mirror of
https://github.com/RayLabsHQ/gitea-mirror.git
synced 2026-03-24 22:58:03 +03:00
Compare commits
113 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5d2462e5a0 | ||
|
|
0000a03ad6 | ||
|
|
d697cb2bc9 | ||
|
|
ddd071f7e5 | ||
|
|
4629ab4335 | ||
|
|
0f303c4b79 | ||
|
|
7c7c259d0a | ||
|
|
fe6bcc5288 | ||
|
|
e26ed3aa9c | ||
|
|
efb96b6e60 | ||
|
|
342cafed0e | ||
|
|
fc7c6b59d7 | ||
|
|
a77ec0447a | ||
|
|
82b5ac8160 | ||
|
|
299659eca2 | ||
|
|
6f53a3ed41 | ||
|
|
1bca7df5ab | ||
|
|
b5210c3916 | ||
|
|
755647e29c | ||
|
|
018c9d1a23 | ||
|
|
c89011819f | ||
|
|
c00d48199b | ||
|
|
de28469210 | ||
|
|
0e2f83fee0 | ||
|
|
1dd3dea231 | ||
|
|
db783c4225 | ||
|
|
8a4716bdbd | ||
|
|
9d37966c10 | ||
|
|
ac16ae56ea | ||
|
|
df3e665978 | ||
|
|
8a26764d2c | ||
|
|
ce365a706e | ||
|
|
be7daac5fb | ||
|
|
e32b7af5eb | ||
|
|
d0693206c3 | ||
|
|
b079070c30 | ||
|
|
e68e9c38a8 | ||
|
|
534150ecf9 | ||
|
|
98da7065e0 | ||
|
|
58e0194aa6 | ||
|
|
7864c46279 | ||
|
|
e3970e53e1 | ||
|
|
be46cfdffa | ||
|
|
2e00a610cb | ||
|
|
61841dd7a5 | ||
|
|
5aa0f3260d | ||
|
|
d0efa200d9 | ||
|
|
c26b5574e0 | ||
|
|
89a6372565 | ||
|
|
f40cad4713 | ||
|
|
855906d990 | ||
|
|
08da526ddd | ||
|
|
2395e14382 | ||
|
|
91c1703bb5 | ||
|
|
6a548e3dac | ||
|
|
f28ac8fa09 | ||
|
|
5e86670a5b | ||
|
|
62d43df2ad | ||
|
|
cb7510f79d | ||
|
|
08c6302bf6 | ||
|
|
6e6c3fa124 | ||
|
|
85b1867490 | ||
|
|
545a575e1a | ||
|
|
ef13fefb69 | ||
|
|
ed59849392 | ||
|
|
5eb160861d | ||
|
|
6829bcff91 | ||
|
|
b1ca8c46bf | ||
|
|
888089b2d5 | ||
|
|
fb60449dc2 | ||
|
|
25854b04f9 | ||
|
|
c34056555f | ||
|
|
f4074a37ad | ||
|
|
6146d41197 | ||
|
|
4cce5b7cfe | ||
|
|
bc89b17a4c | ||
|
|
d023b255a7 | ||
|
|
71cc961f5c | ||
|
|
9bc7bbe33f | ||
|
|
6cc03364fb | ||
|
|
d623d81a44 | ||
|
|
5cc4dcfb29 | ||
|
|
893fae27d3 | ||
|
|
29051f3503 | ||
|
|
0a3ad4e7f5 | ||
|
|
f4d391b240 | ||
|
|
8280c6b337 | ||
|
|
bebbda9465 | ||
|
|
2496d6f6e0 | ||
|
|
179083aec4 | ||
|
|
aa74984fb0 | ||
|
|
18ab4cd53a | ||
|
|
e94bb86b61 | ||
|
|
3993d679e6 | ||
|
|
83cae16319 | ||
|
|
99ebe1a400 | ||
|
|
204d803937 | ||
|
|
2a08ae0b21 | ||
|
|
8dc7ae8bfc | ||
|
|
a4dbb49006 | ||
|
|
6531a9325d | ||
|
|
ff44f0e537 | ||
|
|
dec34fc384 | ||
|
|
f5727daedb | ||
|
|
3857f2fd1a | ||
|
|
e951e97790 | ||
|
|
d0cade633a | ||
|
|
490059666f | ||
|
|
5852bb00f2 | ||
|
|
9968775210 | ||
|
|
0d63fd4dae | ||
|
|
109958342d | ||
|
|
491546a97c |
@@ -15,6 +15,7 @@ dist
|
||||
build
|
||||
.next
|
||||
out
|
||||
www
|
||||
|
||||
# Environment variables
|
||||
.env
|
||||
|
||||
23
.env.example
23
.env.example
@@ -18,9 +18,26 @@ DATABASE_URL=sqlite://data/gitea-mirror.db
|
||||
# Generate with: openssl rand -base64 32
|
||||
BETTER_AUTH_SECRET=change-this-to-a-secure-random-string-in-production
|
||||
BETTER_AUTH_URL=http://localhost:4321
|
||||
# PUBLIC_BETTER_AUTH_URL=https://your-domain.com # Optional: Set this if accessing from different origins (e.g., IP and domain)
|
||||
# ENCRYPTION_SECRET=optional-encryption-key-for-token-encryption # Generate with: openssl rand -base64 48
|
||||
|
||||
# ===========================================
|
||||
# REVERSE PROXY CONFIGURATION
|
||||
# ===========================================
|
||||
# REQUIRED when accessing Gitea Mirror through a reverse proxy (Nginx, Caddy, Traefik, etc.).
|
||||
# Without these, sign-in will fail with "invalid origin" errors and pages may appear blank.
|
||||
#
|
||||
# Set all three to your external URL, e.g.:
|
||||
# BETTER_AUTH_URL=https://gitea-mirror.example.com
|
||||
# PUBLIC_BETTER_AUTH_URL=https://gitea-mirror.example.com
|
||||
# BETTER_AUTH_TRUSTED_ORIGINS=https://gitea-mirror.example.com
|
||||
#
|
||||
# BETTER_AUTH_URL - Used server-side for auth callbacks and redirects
|
||||
# PUBLIC_BETTER_AUTH_URL - Used client-side (browser) for auth API calls
|
||||
# BETTER_AUTH_TRUSTED_ORIGINS - Comma-separated list of origins allowed to make auth requests
|
||||
# (e.g. https://gitea-mirror.example.com,https://alt.example.com)
|
||||
PUBLIC_BETTER_AUTH_URL=http://localhost:4321
|
||||
# BETTER_AUTH_TRUSTED_ORIGINS=
|
||||
|
||||
# ===========================================
|
||||
# DOCKER CONFIGURATION (Optional)
|
||||
# ===========================================
|
||||
@@ -47,6 +64,7 @@ DOCKER_TAG=latest
|
||||
# SKIP_FORKS=false
|
||||
# MIRROR_STARRED=false
|
||||
# STARRED_REPOS_ORG=starred # Organization name for starred repos
|
||||
# STARRED_REPOS_MODE=dedicated-org # dedicated-org | preserve-owner
|
||||
|
||||
# Organization Settings
|
||||
# MIRROR_ORGANIZATIONS=false
|
||||
@@ -66,6 +84,7 @@ DOCKER_TAG=latest
|
||||
|
||||
# Basic Gitea Settings
|
||||
# GITEA_URL=http://gitea:3000
|
||||
# GITEA_EXTERNAL_URL=https://gitea.example.com # Optional: used only for UI links
|
||||
# GITEA_TOKEN=your-local-gitea-token
|
||||
# GITEA_USERNAME=your-local-gitea-username
|
||||
# GITEA_ORGANIZATION=github-mirrors # Default organization for single-org strategy
|
||||
@@ -183,4 +202,4 @@ DOCKER_TAG=latest
|
||||
# ===========================================
|
||||
|
||||
# TLS/SSL Configuration
|
||||
# GITEA_SKIP_TLS_VERIFY=false # WARNING: Only use for testing
|
||||
# GITEA_SKIP_TLS_VERIFY=false # WARNING: Only use for testing
|
||||
|
||||
BIN
.github/screenshots/backup-strategy-ui.png
vendored
Normal file
BIN
.github/screenshots/backup-strategy-ui.png
vendored
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 34 KiB |
9
.github/workflows/README.md
vendored
9
.github/workflows/README.md
vendored
@@ -30,17 +30,22 @@ This workflow runs on all branches and pull requests. It:
|
||||
|
||||
### Docker Build and Push (`docker-build.yml`)
|
||||
|
||||
This workflow builds and pushes Docker images to GitHub Container Registry (ghcr.io), but only when changes are merged to the main branch.
|
||||
This workflow builds Docker images on pushes and pull requests, and pushes to GitHub Container Registry (ghcr.io) when permissions allow (main/tags and same-repo PRs).
|
||||
|
||||
**When it runs:**
|
||||
- On push to the main branch
|
||||
- On tag creation (v*)
|
||||
- On pull requests (build + scan; push only for same-repo PRs)
|
||||
|
||||
**Key features:**
|
||||
- Builds multi-architecture images (amd64 and arm64)
|
||||
- Pushes images only on main branch, not for PRs
|
||||
- Pushes images for main/tags and same-repo PRs
|
||||
- Skips registry push for fork PRs (avoids package write permission failures)
|
||||
- Uses build caching to speed up builds
|
||||
- Creates multiple tags for each image (latest, semver, sha)
|
||||
- Auto-syncs `package.json` version from `v*` tags during release builds
|
||||
- Validates release tags use semver format before building
|
||||
- After tag builds succeed, writes the same version back to `main/package.json`
|
||||
|
||||
### Docker Security Scan (`docker-scan.yml`)
|
||||
|
||||
|
||||
13
.github/workflows/astro-build-test.yml
vendored
13
.github/workflows/astro-build-test.yml
vendored
@@ -6,11 +6,15 @@ on:
|
||||
paths-ignore:
|
||||
- 'README.md'
|
||||
- 'docs/**'
|
||||
- 'www/**'
|
||||
- 'helm/**'
|
||||
pull_request:
|
||||
branches: [ '*' ]
|
||||
paths-ignore:
|
||||
- 'README.md'
|
||||
- 'docs/**'
|
||||
- 'www/**'
|
||||
- 'helm/**'
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
@@ -20,6 +24,7 @@ jobs:
|
||||
build-and-test:
|
||||
name: Build and Test Astro Project
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 25
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
@@ -28,7 +33,7 @@ jobs:
|
||||
- name: Setup Bun
|
||||
uses: oven-sh/setup-bun@v1
|
||||
with:
|
||||
bun-version: '1.2.16'
|
||||
bun-version: '1.3.10'
|
||||
|
||||
- name: Check lockfile and install dependencies
|
||||
run: |
|
||||
@@ -43,6 +48,12 @@ jobs:
|
||||
|
||||
- name: Run tests
|
||||
run: bun test --coverage
|
||||
|
||||
- name: Check Drizzle migrations
|
||||
run: bun run db:check
|
||||
|
||||
- name: Validate migrations (SQLite lint + upgrade path)
|
||||
run: bun test:migrations
|
||||
|
||||
- name: Build Astro project
|
||||
run: bunx --bun astro build
|
||||
|
||||
75
.github/workflows/docker-build.yml
vendored
75
.github/workflows/docker-build.yml
vendored
@@ -36,6 +36,7 @@ env:
|
||||
jobs:
|
||||
docker:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 25
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
@@ -55,6 +56,7 @@ jobs:
|
||||
driver-opts: network=host
|
||||
|
||||
- name: Log into registry
|
||||
if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == github.repository
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ${{ env.REGISTRY }}
|
||||
@@ -75,13 +77,34 @@ jobs:
|
||||
id: tag_version
|
||||
run: |
|
||||
if [[ $GITHUB_REF == refs/tags/v* ]]; then
|
||||
echo "VERSION=${GITHUB_REF#refs/tags/}" >> $GITHUB_OUTPUT
|
||||
echo "Using version tag: ${GITHUB_REF#refs/tags/}"
|
||||
TAG_VERSION="${GITHUB_REF#refs/tags/}"
|
||||
if [[ ! "$TAG_VERSION" =~ ^v[0-9]+\.[0-9]+\.[0-9]+([.-][0-9A-Za-z.-]+)?(\+[0-9A-Za-z.-]+)?$ ]]; then
|
||||
echo "::error::Release tag '${TAG_VERSION}' is invalid. Expected semver tag format like v1.2.3 or v1.2.3-rc.1"
|
||||
exit 1
|
||||
fi
|
||||
APP_VERSION="${TAG_VERSION#v}"
|
||||
echo "VERSION=${TAG_VERSION}" >> $GITHUB_OUTPUT
|
||||
echo "APP_VERSION=${APP_VERSION}" >> $GITHUB_OUTPUT
|
||||
echo "Using version tag: ${TAG_VERSION}"
|
||||
else
|
||||
echo "VERSION=latest" >> $GITHUB_OUTPUT
|
||||
echo "APP_VERSION=dev" >> $GITHUB_OUTPUT
|
||||
echo "No version tag, using 'latest'"
|
||||
fi
|
||||
|
||||
# Keep version files aligned automatically for tag-based releases
|
||||
- name: Sync app version from release tag
|
||||
if: startsWith(github.ref, 'refs/tags/v')
|
||||
run: |
|
||||
VERSION="${{ steps.tag_version.outputs.APP_VERSION }}"
|
||||
echo "Syncing package.json version to ${VERSION}"
|
||||
|
||||
jq --arg version "${VERSION}" '.version = $version' package.json > package.json.tmp
|
||||
mv package.json.tmp package.json
|
||||
|
||||
echo "Version sync diff (package.json):"
|
||||
git --no-pager diff -- package.json
|
||||
|
||||
# Extract metadata for Docker
|
||||
- name: Extract Docker metadata
|
||||
id: meta
|
||||
@@ -105,7 +128,7 @@ jobs:
|
||||
with:
|
||||
context: .
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: true
|
||||
push: ${{ github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == github.repository }}
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
cache-from: type=gha
|
||||
@@ -128,13 +151,14 @@ jobs:
|
||||
|
||||
# Wait for image to be available in registry
|
||||
- name: Wait for image availability
|
||||
if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == github.repository
|
||||
run: |
|
||||
echo "Waiting for image to be available in registry..."
|
||||
sleep 5
|
||||
|
||||
# Add comment to PR with image details
|
||||
- name: Comment PR with image tag
|
||||
if: github.event_name == 'pull_request'
|
||||
if: github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name == github.repository
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
@@ -229,8 +253,49 @@ jobs:
|
||||
|
||||
# Upload security scan results to GitHub Security tab
|
||||
- name: Upload Docker Scout scan results to GitHub Security tab
|
||||
uses: github/codeql-action/upload-sarif@v3
|
||||
uses: github/codeql-action/upload-sarif@v4
|
||||
if: always()
|
||||
continue-on-error: true
|
||||
with:
|
||||
sarif_file: scout-results.sarif
|
||||
|
||||
sync-version-main:
|
||||
name: Sync package.json version back to main
|
||||
if: startsWith(github.ref, 'refs/tags/v')
|
||||
runs-on: ubuntu-latest
|
||||
needs: docker
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
steps:
|
||||
- name: Checkout default branch
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ github.event.repository.default_branch }}
|
||||
|
||||
- name: Update package.json version on main
|
||||
env:
|
||||
TAG_VERSION: ${{ github.ref_name }}
|
||||
TARGET_BRANCH: ${{ github.event.repository.default_branch }}
|
||||
run: |
|
||||
if [[ ! "$TAG_VERSION" =~ ^v[0-9]+\.[0-9]+\.[0-9]+([.-][0-9A-Za-z.-]+)?(\+[0-9A-Za-z.-]+)?$ ]]; then
|
||||
echo "::error::Release tag '${TAG_VERSION}' is invalid. Expected semver tag format like v1.2.3 or v1.2.3-rc.1"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
APP_VERSION="${TAG_VERSION#v}"
|
||||
echo "Syncing ${TARGET_BRANCH}/package.json to ${APP_VERSION}"
|
||||
|
||||
jq --arg version "${APP_VERSION}" '.version = $version' package.json > package.json.tmp
|
||||
mv package.json.tmp package.json
|
||||
|
||||
if git diff --quiet -- package.json; then
|
||||
echo "package.json on ${TARGET_BRANCH} already at ${APP_VERSION}; nothing to commit."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
git config user.name "github-actions[bot]"
|
||||
git config user.email "41898282+github-actions[bot]@users.noreply.github.com"
|
||||
git add package.json
|
||||
git commit -m "chore: sync version to ${APP_VERSION}"
|
||||
git push origin "HEAD:${TARGET_BRANCH}"
|
||||
|
||||
285
.github/workflows/e2e-tests.yml
vendored
Normal file
285
.github/workflows/e2e-tests.yml
vendored
Normal file
@@ -0,0 +1,285 @@
|
||||
name: E2E Integration Tests
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: ["*"]
|
||||
paths-ignore:
|
||||
- "README.md"
|
||||
- "docs/**"
|
||||
- "CHANGELOG.md"
|
||||
- "LICENSE"
|
||||
- "www/**"
|
||||
- "helm/**"
|
||||
pull_request:
|
||||
branches: ["*"]
|
||||
paths-ignore:
|
||||
- "README.md"
|
||||
- "docs/**"
|
||||
- "CHANGELOG.md"
|
||||
- "LICENSE"
|
||||
- "www/**"
|
||||
- "helm/**"
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
debug_enabled:
|
||||
description: "Enable debug logging"
|
||||
required: false
|
||||
default: "false"
|
||||
type: boolean
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
actions: read
|
||||
|
||||
concurrency:
|
||||
group: e2e-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
GITEA_PORT: 3333
|
||||
FAKE_GITHUB_PORT: 4580
|
||||
GIT_SERVER_PORT: 4590
|
||||
APP_PORT: 4321
|
||||
BUN_VERSION: "1.3.10"
|
||||
|
||||
jobs:
|
||||
e2e-tests:
|
||||
name: E2E Integration Tests
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 25
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Bun
|
||||
uses: oven-sh/setup-bun@v1
|
||||
with:
|
||||
bun-version: ${{ env.BUN_VERSION }}
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "22"
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
bun install
|
||||
echo "✓ Dependencies installed"
|
||||
|
||||
- name: Install Playwright
|
||||
run: |
|
||||
npx playwright install chromium
|
||||
npx playwright install-deps chromium
|
||||
echo "✓ Playwright ready"
|
||||
|
||||
- name: Create test git repositories
|
||||
run: |
|
||||
echo "Creating bare git repos for E2E testing..."
|
||||
bun run tests/e2e/create-test-repos.ts --output-dir tests/e2e/git-repos
|
||||
|
||||
if [ ! -f tests/e2e/git-repos/manifest.json ]; then
|
||||
echo "ERROR: Test git repos were not created (manifest.json missing)"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "✓ Test repos created:"
|
||||
cat tests/e2e/git-repos/manifest.json | jq -r '.repos[] | " • \(.owner)/\(.name) — \(.description)"'
|
||||
|
||||
- name: Start Gitea and git-server containers
|
||||
run: |
|
||||
echo "Starting containers via docker compose..."
|
||||
docker compose -f tests/e2e/docker-compose.e2e.yml up -d
|
||||
|
||||
# Wait for git-server
|
||||
echo "Waiting for git HTTP server..."
|
||||
for i in $(seq 1 30); do
|
||||
if curl -sf http://localhost:${{ env.GIT_SERVER_PORT }}/manifest.json > /dev/null 2>&1; then
|
||||
echo "✓ Git HTTP server is ready"
|
||||
break
|
||||
fi
|
||||
if [ $i -eq 30 ]; then
|
||||
echo "ERROR: Git HTTP server did not start"
|
||||
docker compose -f tests/e2e/docker-compose.e2e.yml logs git-server
|
||||
exit 1
|
||||
fi
|
||||
sleep 1
|
||||
done
|
||||
|
||||
# Wait for Gitea
|
||||
echo "Waiting for Gitea to be ready..."
|
||||
for i in $(seq 1 60); do
|
||||
if curl -sf http://localhost:${{ env.GITEA_PORT }}/api/v1/version > /dev/null 2>&1; then
|
||||
version=$(curl -sf http://localhost:${{ env.GITEA_PORT }}/api/v1/version | jq -r '.version // "unknown"')
|
||||
echo "✓ Gitea is ready (version: $version)"
|
||||
break
|
||||
fi
|
||||
if [ $i -eq 60 ]; then
|
||||
echo "ERROR: Gitea did not become healthy within 120s"
|
||||
docker compose -f tests/e2e/docker-compose.e2e.yml logs gitea-e2e --tail=30
|
||||
exit 1
|
||||
fi
|
||||
sleep 2
|
||||
done
|
||||
|
||||
- name: Initialize database
|
||||
run: |
|
||||
bun run manage-db init
|
||||
echo "✓ Database initialized"
|
||||
|
||||
- name: Build application
|
||||
env:
|
||||
GH_API_URL: http://localhost:4580
|
||||
BETTER_AUTH_SECRET: e2e-test-secret
|
||||
run: |
|
||||
bun run build
|
||||
echo "✓ Build complete"
|
||||
|
||||
- name: Start fake GitHub API server
|
||||
run: |
|
||||
# Start with GIT_SERVER_URL pointing to the git-server container name
|
||||
# (Gitea will resolve it via Docker networking)
|
||||
PORT=${{ env.FAKE_GITHUB_PORT }} GIT_SERVER_URL="http://git-server" \
|
||||
npx tsx tests/e2e/fake-github-server.ts &
|
||||
echo $! > /tmp/fake-github.pid
|
||||
|
||||
echo "Waiting for fake GitHub API..."
|
||||
for i in $(seq 1 30); do
|
||||
if curl -sf http://localhost:${{ env.FAKE_GITHUB_PORT }}/___mgmt/health > /dev/null 2>&1; then
|
||||
echo "✓ Fake GitHub API is ready"
|
||||
break
|
||||
fi
|
||||
if [ $i -eq 30 ]; then
|
||||
echo "ERROR: Fake GitHub API did not start"
|
||||
exit 1
|
||||
fi
|
||||
sleep 1
|
||||
done
|
||||
|
||||
# Ensure clone URLs are set for the git-server container
|
||||
curl -sf -X POST http://localhost:${{ env.FAKE_GITHUB_PORT }}/___mgmt/set-clone-url \
|
||||
-H "Content-Type: application/json" \
|
||||
-d '{"url": "http://git-server"}' || true
|
||||
echo "✓ Clone URLs configured for git-server container"
|
||||
|
||||
- name: Start gitea-mirror application
|
||||
env:
|
||||
GH_API_URL: http://localhost:4580
|
||||
BETTER_AUTH_SECRET: e2e-test-secret
|
||||
BETTER_AUTH_URL: http://localhost:4321
|
||||
DATABASE_URL: file:data/gitea-mirror.db
|
||||
HOST: 0.0.0.0
|
||||
PORT: ${{ env.APP_PORT }}
|
||||
NODE_ENV: production
|
||||
PRE_SYNC_BACKUP_ENABLED: "false"
|
||||
ENCRYPTION_SECRET: "e2e-encryption-secret-32char!!"
|
||||
run: |
|
||||
# Re-init DB in case build step cleared it
|
||||
bun run manage-db init 2>/dev/null || true
|
||||
|
||||
bun run start &
|
||||
echo $! > /tmp/app.pid
|
||||
|
||||
echo "Waiting for gitea-mirror app..."
|
||||
for i in $(seq 1 90); do
|
||||
if curl -sf http://localhost:${{ env.APP_PORT }}/api/health > /dev/null 2>&1 || \
|
||||
curl -sf -o /dev/null -w "%{http_code}" http://localhost:${{ env.APP_PORT }}/ 2>/dev/null | grep -q "^[23]"; then
|
||||
echo "✓ gitea-mirror app is ready"
|
||||
break
|
||||
fi
|
||||
if ! kill -0 $(cat /tmp/app.pid) 2>/dev/null; then
|
||||
echo "ERROR: App process died"
|
||||
exit 1
|
||||
fi
|
||||
if [ $i -eq 90 ]; then
|
||||
echo "ERROR: gitea-mirror app did not start within 180s"
|
||||
exit 1
|
||||
fi
|
||||
sleep 2
|
||||
done
|
||||
|
||||
- name: Run E2E tests
|
||||
env:
|
||||
APP_URL: http://localhost:${{ env.APP_PORT }}
|
||||
GITEA_URL: http://localhost:${{ env.GITEA_PORT }}
|
||||
FAKE_GITHUB_URL: http://localhost:${{ env.FAKE_GITHUB_PORT }}
|
||||
GIT_SERVER_URL: http://localhost:${{ env.GIT_SERVER_PORT }}
|
||||
CI: true
|
||||
run: |
|
||||
mkdir -p tests/e2e/test-results
|
||||
npx playwright test \
|
||||
--config tests/e2e/playwright.config.ts \
|
||||
--reporter=github,html
|
||||
|
||||
- name: Diagnostic info on failure
|
||||
if: failure()
|
||||
run: |
|
||||
echo "═══════════════════════════════════════════════════════════"
|
||||
echo " Diagnostic Information"
|
||||
echo "═══════════════════════════════════════════════════════════"
|
||||
|
||||
echo ""
|
||||
echo "── Git server status ──"
|
||||
curl -sf http://localhost:${{ env.GIT_SERVER_PORT }}/manifest.json 2>/dev/null | jq . || echo "(unreachable)"
|
||||
|
||||
echo ""
|
||||
echo "── Gitea status ──"
|
||||
curl -sf http://localhost:${{ env.GITEA_PORT }}/api/v1/version 2>/dev/null || echo "(unreachable)"
|
||||
|
||||
echo ""
|
||||
echo "── Fake GitHub status ──"
|
||||
curl -sf http://localhost:${{ env.FAKE_GITHUB_PORT }}/___mgmt/health 2>/dev/null | jq . || echo "(unreachable)"
|
||||
|
||||
echo ""
|
||||
echo "── App status ──"
|
||||
curl -sf http://localhost:${{ env.APP_PORT }}/api/health 2>/dev/null || echo "(unreachable)"
|
||||
|
||||
echo ""
|
||||
echo "── Docker containers ──"
|
||||
docker compose -f tests/e2e/docker-compose.e2e.yml ps 2>/dev/null || true
|
||||
|
||||
echo ""
|
||||
echo "── Gitea container logs (last 50 lines) ──"
|
||||
docker compose -f tests/e2e/docker-compose.e2e.yml logs gitea-e2e --tail=50 2>/dev/null || echo "(no container)"
|
||||
|
||||
echo ""
|
||||
echo "── Git server logs (last 20 lines) ──"
|
||||
docker compose -f tests/e2e/docker-compose.e2e.yml logs git-server --tail=20 2>/dev/null || echo "(no container)"
|
||||
|
||||
echo ""
|
||||
echo "── Running processes ──"
|
||||
ps aux | grep -E "(fake-github|astro|bun|node)" | grep -v grep || true
|
||||
|
||||
- name: Upload Playwright report
|
||||
uses: actions/upload-artifact@v4
|
||||
if: always()
|
||||
with:
|
||||
name: e2e-playwright-report
|
||||
path: tests/e2e/playwright-report/
|
||||
retention-days: 14
|
||||
|
||||
- name: Upload test results
|
||||
uses: actions/upload-artifact@v4
|
||||
if: always()
|
||||
with:
|
||||
name: e2e-test-results
|
||||
path: tests/e2e/test-results/
|
||||
retention-days: 14
|
||||
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
run: |
|
||||
# Stop background processes
|
||||
if [ -f /tmp/fake-github.pid ]; then
|
||||
kill $(cat /tmp/fake-github.pid) 2>/dev/null || true
|
||||
rm -f /tmp/fake-github.pid
|
||||
fi
|
||||
if [ -f /tmp/app.pid ]; then
|
||||
kill $(cat /tmp/app.pid) 2>/dev/null || true
|
||||
rm -f /tmp/app.pid
|
||||
fi
|
||||
|
||||
# Stop containers
|
||||
docker compose -f tests/e2e/docker-compose.e2e.yml down --volumes --remove-orphans 2>/dev/null || true
|
||||
|
||||
echo "✓ Cleanup complete"
|
||||
2
.github/workflows/helm-test.yml
vendored
2
.github/workflows/helm-test.yml
vendored
@@ -21,6 +21,7 @@ jobs:
|
||||
yamllint:
|
||||
name: Lint YAML
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 25
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
@@ -35,6 +36,7 @@ jobs:
|
||||
helm-template:
|
||||
name: Helm lint & template
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 25
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Setup Helm
|
||||
|
||||
50
.github/workflows/nix-build.yml
vendored
Normal file
50
.github/workflows/nix-build.yml
vendored
Normal file
@@ -0,0 +1,50 @@
|
||||
name: Nix Flake Check
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main, nix]
|
||||
tags:
|
||||
- 'v*'
|
||||
paths:
|
||||
- 'flake.nix'
|
||||
- 'flake.lock'
|
||||
- 'bun.nix'
|
||||
- '.github/workflows/nix-build.yml'
|
||||
pull_request:
|
||||
branches: [main]
|
||||
paths:
|
||||
- 'flake.nix'
|
||||
- 'flake.lock'
|
||||
- 'bun.nix'
|
||||
- '.github/workflows/nix-build.yml'
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
check:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 45
|
||||
env:
|
||||
NIX_CONFIG: |
|
||||
accept-flake-config = true
|
||||
access-tokens = github.com=${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Install Nix
|
||||
uses: DeterminateSystems/nix-installer-action@main
|
||||
|
||||
- name: Setup Nix Cache
|
||||
uses: DeterminateSystems/magic-nix-cache-action@main
|
||||
|
||||
- name: Check flake
|
||||
run: nix flake check --accept-flake-config
|
||||
|
||||
- name: Show flake info
|
||||
run: nix flake show --accept-flake-config
|
||||
|
||||
- name: Build package
|
||||
if: github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/tags/v')
|
||||
run: nix build --print-build-logs --accept-flake-config
|
||||
20
.gitignore
vendored
20
.gitignore
vendored
@@ -32,3 +32,23 @@ certs/*.pem
|
||||
certs/*.cer
|
||||
!certs/README.md
|
||||
|
||||
# Nix build artifacts
|
||||
result
|
||||
result-*
|
||||
.direnv/
|
||||
|
||||
# E2E test artifacts
|
||||
tests/e2e/test-results/
|
||||
tests/e2e/playwright-report/
|
||||
tests/e2e/.auth/
|
||||
tests/e2e/e2e-storage-state.json
|
||||
tests/e2e/.fake-github.pid
|
||||
tests/e2e/.app.pid
|
||||
tests/e2e/git-repos/
|
||||
|
||||
# Playwright
|
||||
/test-results/
|
||||
/playwright-report/
|
||||
/blob-report/
|
||||
/playwright/.cache/
|
||||
/playwright/.auth/
|
||||
|
||||
75
Dockerfile
75
Dockerfile
@@ -1,38 +1,74 @@
|
||||
# syntax=docker/dockerfile:1.4
|
||||
|
||||
FROM oven/bun:1.2.23-alpine AS base
|
||||
FROM oven/bun:1.3.10-debian AS base
|
||||
WORKDIR /app
|
||||
RUN apk add --no-cache libc6-compat python3 make g++ gcc wget sqlite openssl ca-certificates
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
python3 make g++ gcc wget sqlite3 openssl ca-certificates \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# ----------------------------
|
||||
FROM base AS deps
|
||||
FROM base AS builder
|
||||
COPY package.json ./
|
||||
COPY bun.lock* ./
|
||||
RUN bun install --frozen-lockfile
|
||||
|
||||
# ----------------------------
|
||||
FROM deps AS builder
|
||||
COPY . .
|
||||
RUN bun run build
|
||||
RUN mkdir -p dist/scripts && \
|
||||
for script in scripts/*.ts; do \
|
||||
bun build "$script" --target=bun --outfile=dist/scripts/$(basename "${script%.ts}.js"); \
|
||||
done
|
||||
for script in scripts/*.ts; do \
|
||||
bun build "$script" --target=bun --outfile=dist/scripts/$(basename "${script%.ts}.js"); \
|
||||
done
|
||||
|
||||
# ----------------------------
|
||||
FROM deps AS pruner
|
||||
RUN bun install --production --frozen-lockfile
|
||||
FROM base AS pruner
|
||||
COPY package.json ./
|
||||
COPY bun.lock* ./
|
||||
RUN bun install --production --omit=peer --frozen-lockfile
|
||||
|
||||
# ----------------------------
|
||||
FROM base AS runner
|
||||
# Build git-lfs from source with patched Go to resolve Go stdlib CVEs
|
||||
FROM debian:trixie-slim AS git-lfs-builder
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
wget ca-certificates git make \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
ARG GO_VERSION=1.25.8
|
||||
ARG GIT_LFS_VERSION=3.7.1
|
||||
RUN ARCH="$(dpkg --print-architecture)" \
|
||||
&& wget -qO /tmp/go.tar.gz "https://go.dev/dl/go${GO_VERSION}.linux-${ARCH}.tar.gz" \
|
||||
&& tar -C /usr/local -xzf /tmp/go.tar.gz \
|
||||
&& rm /tmp/go.tar.gz
|
||||
ENV PATH="/usr/local/go/bin:/root/go/bin:${PATH}"
|
||||
# Force using our installed Go (not the version in go.mod toolchain directive)
|
||||
ENV GOTOOLCHAIN=local
|
||||
RUN git clone --branch "v${GIT_LFS_VERSION}" --depth 1 https://github.com/git-lfs/git-lfs.git /tmp/git-lfs \
|
||||
&& cd /tmp/git-lfs \
|
||||
&& go get golang.org/x/crypto@latest \
|
||||
&& go mod tidy \
|
||||
&& make \
|
||||
&& install -m 755 /tmp/git-lfs/bin/git-lfs /usr/local/bin/git-lfs
|
||||
|
||||
# ----------------------------
|
||||
FROM oven/bun:1.3.10-debian AS runner
|
||||
WORKDIR /app
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
git wget sqlite3 openssl ca-certificates \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
COPY --from=git-lfs-builder /usr/local/bin/git-lfs /usr/local/bin/git-lfs
|
||||
RUN git lfs install
|
||||
COPY --from=pruner /app/node_modules ./node_modules
|
||||
COPY --from=builder /app/dist ./dist
|
||||
COPY --from=builder /app/package.json ./package.json
|
||||
COPY --from=builder /app/docker-entrypoint.sh ./docker-entrypoint.sh
|
||||
COPY --from=builder /app/scripts ./scripts
|
||||
COPY --from=builder /app/drizzle ./drizzle
|
||||
|
||||
# Remove build-only packages that are not needed at runtime
|
||||
# (esbuild, vite, rollup, tailwind, svgo — all only used during `astro build`)
|
||||
RUN rm -rf node_modules/esbuild node_modules/@esbuild \
|
||||
node_modules/rollup node_modules/@rollup \
|
||||
node_modules/vite node_modules/svgo \
|
||||
node_modules/@tailwindcss/vite \
|
||||
node_modules/tailwindcss
|
||||
|
||||
ENV NODE_ENV=production
|
||||
ENV HOST=0.0.0.0
|
||||
ENV PORT=4321
|
||||
@@ -40,12 +76,13 @@ ENV DATABASE_URL=file:data/gitea-mirror.db
|
||||
|
||||
# Create directories and setup permissions
|
||||
RUN mkdir -p /app/certs && \
|
||||
chmod +x ./docker-entrypoint.sh && \
|
||||
mkdir -p /app/data && \
|
||||
addgroup --system --gid 1001 nodejs && \
|
||||
adduser --system --uid 1001 gitea-mirror && \
|
||||
chown -R gitea-mirror:nodejs /app/data && \
|
||||
chown -R gitea-mirror:nodejs /app/certs
|
||||
chmod +x ./docker-entrypoint.sh && \
|
||||
mkdir -p /app/data && \
|
||||
groupadd --system --gid 1001 nodejs && \
|
||||
useradd --system --uid 1001 --gid 1001 --create-home --home-dir /home/gitea-mirror gitea-mirror && \
|
||||
chown -R gitea-mirror:nodejs /app/data && \
|
||||
chown -R gitea-mirror:nodejs /app/certs && \
|
||||
chown -R gitea-mirror:nodejs /home/gitea-mirror
|
||||
|
||||
USER gitea-mirror
|
||||
|
||||
@@ -55,4 +92,4 @@ EXPOSE 4321
|
||||
HEALTHCHECK --interval=30s --timeout=5s --start-period=5s --retries=3 \
|
||||
CMD wget --no-verbose --tries=1 --spider http://localhost:4321/api/health || exit 1
|
||||
|
||||
ENTRYPOINT ["./docker-entrypoint.sh"]
|
||||
ENTRYPOINT ["./docker-entrypoint.sh"]
|
||||
|
||||
189
NIX.md
Normal file
189
NIX.md
Normal file
@@ -0,0 +1,189 @@
|
||||
# Nix Deployment Quick Reference
|
||||
|
||||
## TL;DR
|
||||
|
||||
```bash
|
||||
# From GitHub (no clone needed!)
|
||||
nix run --extra-experimental-features 'nix-command flakes' github:RayLabsHQ/gitea-mirror
|
||||
|
||||
# Or from local clone
|
||||
nix run --extra-experimental-features 'nix-command flakes' .#gitea-mirror
|
||||
```
|
||||
|
||||
Secrets auto-generate, database auto-initializes, and the web UI starts at http://localhost:4321.
|
||||
|
||||
**Note:** If you have flakes enabled in your nix config, you can omit `--extra-experimental-features 'nix-command flakes'`
|
||||
|
||||
---
|
||||
|
||||
## Installation Options
|
||||
|
||||
### 1. Run Without Installing (from GitHub)
|
||||
```bash
|
||||
# Latest version from main branch
|
||||
nix run --extra-experimental-features 'nix-command flakes' github:RayLabsHQ/gitea-mirror
|
||||
|
||||
# Pin to specific version
|
||||
nix run github:RayLabsHQ/gitea-mirror/vX.Y.Z
|
||||
```
|
||||
|
||||
### 2. Install to Profile
|
||||
```bash
|
||||
# Install from GitHub
|
||||
nix profile install --extra-experimental-features 'nix-command flakes' github:RayLabsHQ/gitea-mirror
|
||||
|
||||
# Run the installed binary
|
||||
gitea-mirror
|
||||
```
|
||||
|
||||
### 3. Use Local Clone
|
||||
```bash
|
||||
# Clone and run
|
||||
git clone https://github.com/RayLabsHQ/gitea-mirror.git
|
||||
cd gitea-mirror
|
||||
nix run --extra-experimental-features 'nix-command flakes' .#gitea-mirror
|
||||
```
|
||||
|
||||
### 4. NixOS System Service
|
||||
```nix
|
||||
# configuration.nix
|
||||
{
|
||||
inputs.gitea-mirror.url = "github:RayLabsHQ/gitea-mirror";
|
||||
|
||||
services.gitea-mirror = {
|
||||
enable = true;
|
||||
betterAuthUrl = "https://mirror.example.com"; # For production
|
||||
openFirewall = true;
|
||||
};
|
||||
}
|
||||
```
|
||||
|
||||
### 5. Development (Local Clone)
|
||||
```bash
|
||||
nix develop --extra-experimental-features 'nix-command flakes'
|
||||
# or
|
||||
direnv allow # Handles experimental features automatically
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Enable Flakes Permanently (Recommended)
|
||||
|
||||
To avoid typing `--extra-experimental-features` every time, add to `~/.config/nix/nix.conf`:
|
||||
```
|
||||
experimental-features = nix-command flakes
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## What Gets Auto-Generated?
|
||||
|
||||
On first run, the wrapper automatically:
|
||||
|
||||
1. Creates `~/.local/share/gitea-mirror/` (or `$DATA_DIR`)
|
||||
2. Generates `BETTER_AUTH_SECRET` → `.better_auth_secret`
|
||||
3. Generates `ENCRYPTION_SECRET` → `.encryption_secret`
|
||||
4. Initializes SQLite database
|
||||
5. Runs startup recovery and repair scripts
|
||||
6. Starts the application
|
||||
|
||||
---
|
||||
|
||||
## Key Commands
|
||||
|
||||
```bash
|
||||
# Database management
|
||||
gitea-mirror-db init # Initialize database
|
||||
gitea-mirror-db check # Health check
|
||||
gitea-mirror-db fix # Fix issues
|
||||
|
||||
# Development (add --extra-experimental-features 'nix-command flakes' if needed)
|
||||
nix develop # Enter dev shell
|
||||
nix build # Build package
|
||||
nix flake check # Validate flake
|
||||
nix flake update # Update dependencies
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Environment Variables
|
||||
|
||||
All vars from `docker-compose.alt.yml` are supported:
|
||||
|
||||
```bash
|
||||
DATA_DIR="$HOME/.local/share/gitea-mirror"
|
||||
PORT=4321
|
||||
HOST="0.0.0.0"
|
||||
BETTER_AUTH_URL="http://localhost:4321"
|
||||
|
||||
# Secrets (auto-generated if not set)
|
||||
BETTER_AUTH_SECRET=auto-generated
|
||||
ENCRYPTION_SECRET=auto-generated
|
||||
|
||||
# Concurrency (for perfect ordering, set both to 1)
|
||||
MIRROR_ISSUE_CONCURRENCY=3
|
||||
MIRROR_PULL_REQUEST_CONCURRENCY=5
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## NixOS Module Options
|
||||
|
||||
```nix
|
||||
services.gitea-mirror = {
|
||||
enable = true;
|
||||
package = ...; # Override package
|
||||
dataDir = "/var/lib/gitea-mirror"; # Data location
|
||||
user = "gitea-mirror"; # Service user
|
||||
group = "gitea-mirror"; # Service group
|
||||
host = "0.0.0.0"; # Bind address
|
||||
port = 4321; # Listen port
|
||||
betterAuthUrl = "http://..."; # External URL
|
||||
betterAuthTrustedOrigins = "..."; # CORS origins
|
||||
mirrorIssueConcurrency = 3; # Concurrency
|
||||
mirrorPullRequestConcurrency = 5; # Concurrency
|
||||
environmentFile = null; # Optional secrets file
|
||||
openFirewall = true; # Open firewall
|
||||
};
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Comparison: Docker vs Nix
|
||||
|
||||
| Feature | Docker | Nix |
|
||||
|---------|--------|-----|
|
||||
| **Config Required** | BETTER_AUTH_SECRET | None (auto-generated) |
|
||||
| **Startup** | `docker-compose up` | `nix run .#gitea-mirror` |
|
||||
| **Service** | Docker daemon | systemd (NixOS) |
|
||||
| **Updates** | `docker pull` | `nix flake update` |
|
||||
| **Reproducible** | Image-based | Hash-based |
|
||||
|
||||
---
|
||||
|
||||
## Full Documentation
|
||||
|
||||
- **[docs/NIX_DEPLOYMENT.md](docs/NIX_DEPLOYMENT.md)** - Complete deployment guide
|
||||
- NixOS module configuration
|
||||
- Home Manager integration
|
||||
- Production deployment examples
|
||||
- Migration from Docker
|
||||
- Troubleshooting guide
|
||||
|
||||
- **[docs/NIX_DISTRIBUTION.md](docs/NIX_DISTRIBUTION.md)** - Distribution guide for maintainers
|
||||
- How users consume the package
|
||||
- CI build caching
|
||||
- Releasing new versions
|
||||
- Submitting to nixpkgs
|
||||
|
||||
---
|
||||
|
||||
## Key Features
|
||||
|
||||
- **Zero-config deployment** - Runs immediately without setup
|
||||
- **Auto-secret generation** - Secure secrets created and persisted
|
||||
- **Startup recovery** - Handles interrupted jobs automatically
|
||||
- **Graceful shutdown** - Proper signal handling
|
||||
- **Health checks** - Built-in monitoring support
|
||||
- **Security hardening** - NixOS module includes systemd protections
|
||||
- **Docker parity** - Same behavior as `docker-compose.alt.yml`
|
||||
134
README.md
134
README.md
@@ -1,7 +1,7 @@
|
||||
<p align="center">
|
||||
<img src=".github/assets/logo.png" alt="Gitea Mirror Logo" width="120" />
|
||||
<h1>Gitea Mirror</h1>
|
||||
<p><i>Automatically mirror repositories from GitHub to your self-hosted Gitea instance.</i></p>
|
||||
<p><i>Automatically mirror repositories from GitHub to your self-hosted Gitea/Forgejo instance.</i></p>
|
||||
<p align="center">
|
||||
<a href="https://github.com/RayLabsHQ/gitea-mirror/releases/latest"><img src="https://img.shields.io/github/v/tag/RayLabsHQ/gitea-mirror?label=release" alt="release"/></a>
|
||||
<a href="https://github.com/RayLabsHQ/gitea-mirror/actions/workflows/astro-build-test.yml"><img src="https://img.shields.io/github/actions/workflow/status/RayLabsHQ/gitea-mirror/astro-build-test.yml?branch=main" alt="build"/></a>
|
||||
@@ -19,7 +19,7 @@ docker compose -f docker-compose.alt.yml up -d
|
||||
# Access at http://localhost:4321
|
||||
```
|
||||
|
||||
First user signup becomes admin. Configure GitHub and Gitea through the web interface!
|
||||
First user signup becomes admin. Configure GitHub and Gitea/Forgejo through the web interface!
|
||||
|
||||
<p align="center">
|
||||
<img src=".github/assets/dashboard.png" alt="Dashboard" width="600" />
|
||||
@@ -28,7 +28,7 @@ First user signup becomes admin. Configure GitHub and Gitea through the web inte
|
||||
|
||||
## ✨ Features
|
||||
|
||||
- 🔁 Mirror public, private, and starred GitHub repos to Gitea
|
||||
- 🔁 Mirror public, private, and starred GitHub repos to Gitea/Forgejo
|
||||
- 🏢 Mirror entire organizations with flexible strategies
|
||||
- 🎯 Custom destination control for repos and organizations
|
||||
- 📦 **Git LFS support** - Mirror large files with Git LFS
|
||||
@@ -40,6 +40,7 @@ First user signup becomes admin. Configure GitHub and Gitea through the web inte
|
||||
- 🔄 **Auto-discovery** - Automatically import new GitHub repositories (v3.4.0+)
|
||||
- 🧹 **Repository cleanup** - Auto-remove repos deleted from GitHub (v3.4.0+)
|
||||
- 🎯 **Proper mirror intervals** - Respects configured sync intervals (v3.4.0+)
|
||||
- 🛡️ **[Force-push protection](docs/FORCE_PUSH_PROTECTION.md)** - Smart detection with backup-on-demand or block-and-approve modes (Beta)
|
||||
- 🗑️ Automatic database cleanup with configurable retention
|
||||
- 🐳 Dockerized with multi-arch support (AMD64/ARM64)
|
||||
|
||||
@@ -112,7 +113,7 @@ docker compose up -d
|
||||
#### Using Pre-built Image Directly
|
||||
|
||||
```bash
|
||||
docker pull ghcr.io/raylabshq/gitea-mirror:v3.1.1
|
||||
docker pull ghcr.io/raylabshq/gitea-mirror:latest
|
||||
```
|
||||
|
||||
### Configuration Options
|
||||
@@ -150,6 +151,38 @@ bash -c "$(curl -fsSL https://raw.githubusercontent.com/community-scripts/Proxmo
|
||||
|
||||
See the [Proxmox VE Community Scripts](https://community-scripts.github.io/ProxmoxVE/scripts?id=gitea-mirror) for more details.
|
||||
|
||||
### Nix/NixOS
|
||||
|
||||
Zero-configuration deployment with Nix:
|
||||
|
||||
```bash
|
||||
# Run immediately - no setup needed!
|
||||
nix run --extra-experimental-features 'nix-command flakes' github:RayLabsHQ/gitea-mirror
|
||||
|
||||
# Or build and run locally
|
||||
nix build --extra-experimental-features 'nix-command flakes'
|
||||
./result/bin/gitea-mirror
|
||||
|
||||
# Or install to profile
|
||||
nix profile install --extra-experimental-features 'nix-command flakes' github:RayLabsHQ/gitea-mirror
|
||||
gitea-mirror
|
||||
```
|
||||
|
||||
**NixOS users** - add to your configuration:
|
||||
```nix
|
||||
{
|
||||
inputs.gitea-mirror.url = "github:RayLabsHQ/gitea-mirror";
|
||||
|
||||
services.gitea-mirror = {
|
||||
enable = true;
|
||||
betterAuthUrl = "https://mirror.example.com";
|
||||
openFirewall = true;
|
||||
};
|
||||
}
|
||||
```
|
||||
|
||||
Secrets auto-generate, database auto-initializes. See [NIX.md](NIX.md) for quick reference or [docs/NIX_DEPLOYMENT.md](docs/NIX_DEPLOYMENT.md) for full documentation.
|
||||
|
||||
### Manual Installation
|
||||
|
||||
```bash
|
||||
@@ -166,31 +199,31 @@ bun run dev
|
||||
1. **First Time Setup**
|
||||
- Navigate to http://localhost:4321
|
||||
- Create admin account (first user signup)
|
||||
- Configure GitHub and Gitea connections
|
||||
- Configure GitHub and Gitea/Forgejo connections
|
||||
|
||||
2. **Mirror Strategies**
|
||||
- **Preserve Structure**: Maintains GitHub organization structure
|
||||
- **Single Organization**: All repos go to one Gitea organization
|
||||
- **Flat User**: All repos under your Gitea user account
|
||||
- **Single Organization**: All repos go to one Gitea/Forgejo organization
|
||||
- **Flat User**: All repos under your Gitea/Forgejo user account
|
||||
- **Mixed Mode**: Personal repos in one org, organization repos preserve structure
|
||||
|
||||
3. **Customization**
|
||||
- Click edit buttons on organization cards to set custom destinations
|
||||
- Override individual repository destinations in the table view
|
||||
- Starred repositories automatically go to a dedicated organization
|
||||
- Starred repositories can go to a dedicated org or preserve source owner/org paths
|
||||
|
||||
## Advanced Features
|
||||
|
||||
### Git LFS (Large File Storage)
|
||||
Mirror Git LFS objects along with your repositories:
|
||||
- Enable "Mirror LFS" option in Settings → Mirror Options
|
||||
- Requires Gitea server with LFS enabled (`LFS_START_SERVER = true`)
|
||||
- Requires Gitea/Forgejo server with LFS enabled (`LFS_START_SERVER = true`)
|
||||
- Requires Git v2.1.2+ on the server
|
||||
|
||||
### Metadata Mirroring
|
||||
Transfer complete repository metadata from GitHub to Gitea:
|
||||
Transfer complete repository metadata from GitHub to Gitea/Forgejo:
|
||||
- **Issues** - Mirror all issues with comments and labels
|
||||
- **Pull Requests** - Transfer PR discussions to Gitea
|
||||
- **Pull Requests** - Transfer PR discussions to Gitea/Forgejo
|
||||
- **Labels** - Preserve repository labels
|
||||
- **Milestones** - Keep project milestones
|
||||
- **Wiki** - Mirror wiki content
|
||||
@@ -210,7 +243,7 @@ Gitea Mirror provides powerful automatic synchronization features:
|
||||
#### Features (v3.4.0+)
|
||||
- **Auto-discovery**: Automatically discovers and imports new GitHub repositories
|
||||
- **Repository cleanup**: Removes repositories that no longer exist in GitHub
|
||||
- **Proper intervals**: Mirrors respect your configured sync intervals (not Gitea's default 24h)
|
||||
- **Proper intervals**: Mirrors respect your configured sync intervals (not Gitea/Forgejo's default 24h)
|
||||
- **Smart scheduling**: Only syncs repositories that need updating
|
||||
- **Auto-start on boot** (v3.5.3+): Automatically imports and mirrors all repositories when `SCHEDULE_ENABLED=true` or `GITEA_MIRROR_INTERVAL` is set - no manual clicks required!
|
||||
|
||||
@@ -221,7 +254,7 @@ Navigate to the Configuration page and enable "Automatic Syncing" with your pref
|
||||
|
||||
**🚀 Set it and forget it!** With these environment variables, Gitea Mirror will automatically:
|
||||
1. **Import** all your GitHub repositories on startup (no manual import needed!)
|
||||
2. **Mirror** them to Gitea immediately
|
||||
2. **Mirror** them to Gitea/Forgejo immediately
|
||||
3. **Keep them synchronized** based on your interval
|
||||
4. **Auto-discover** new repos you create/star on GitHub
|
||||
5. **Clean up** repos you delete from GitHub
|
||||
@@ -250,15 +283,17 @@ CLEANUP_DRY_RUN=false # Set to true to test without changes
|
||||
**Important Notes**:
|
||||
- **Auto-Start**: When `SCHEDULE_ENABLED=true` or `GITEA_MIRROR_INTERVAL` is set, the service automatically imports all GitHub repositories and mirrors them on startup. No manual "Import" or "Mirror" button clicks required!
|
||||
- The scheduler checks every minute for tasks to run. The `GITEA_MIRROR_INTERVAL` determines how often each repository is actually synced. For example, with `8h`, each repo syncs every 8 hours from its last successful sync.
|
||||
- **Large repo bootstrap**: For first-time mirroring of large repositories (especially with metadata/LFS), avoid very short intervals (for example `5m`). Start with a longer interval (`1h` to `8h`) or temporarily disable scheduling during the initial import/mirror run, then enable your regular interval after the first pass completes.
|
||||
- **Why this matters**: If your Gitea/Forgejo instance takes a long time to complete migrations/imports, aggressive schedules can cause repeated retries and duplicate-looking mirror attempts.
|
||||
|
||||
**🛡️ Backup Protection Features**:
|
||||
- **No Accidental Deletions**: Repository cleanup is automatically skipped if GitHub is inaccessible (account deleted, banned, or API errors)
|
||||
- **Archive Never Deletes Data**: The `archive` action preserves all repository data:
|
||||
- Regular repositories: Made read-only using Gitea's archive feature
|
||||
- Mirror repositories: Renamed with `archived-` prefix (Gitea API limitation prevents archiving mirrors)
|
||||
- Regular repositories: Made read-only using Gitea/Forgejo's archive feature
|
||||
- Mirror repositories: Renamed with `archived-` prefix (Gitea/Forgejo API limitation prevents archiving mirrors)
|
||||
- Failed operations: Repository remains fully accessible even if marking as archived fails
|
||||
- **Manual Sync on Demand**: Archived mirrors stay in Gitea with automatic syncs disabled; trigger `Manual Sync` from the Repositories page whenever you need fresh data.
|
||||
- **The Whole Point of Backups**: Your Gitea mirrors are preserved even when GitHub sources disappear - that's why you have backups!
|
||||
- **Manual Sync on Demand**: Archived mirrors stay in Gitea/Forgejo with automatic syncs disabled; trigger `Manual Sync` from the Repositories page whenever you need fresh data.
|
||||
- **The Whole Point of Backups**: Your Gitea/Forgejo mirrors are preserved even when GitHub sources disappear - that's why you have backups!
|
||||
- **Strongly Recommended**: Always use `CLEANUP_ORPHANED_REPO_ACTION=archive` (default) instead of `delete`
|
||||
|
||||
## Troubleshooting
|
||||
@@ -267,6 +302,40 @@ CLEANUP_DRY_RUN=false # Set to true to test without changes
|
||||
|
||||
If using a reverse proxy (e.g., nginx proxy manager) and experiencing issues with JavaScript files not loading properly, try enabling HTTP/2 support in your proxy configuration. While not required by the application, some proxy configurations may have better compatibility with HTTP/2 enabled. See [issue #43](https://github.com/RayLabsHQ/gitea-mirror/issues/43) for reference.
|
||||
|
||||
### Mirror Token Rotation (GitHub Token Changed)
|
||||
|
||||
For existing pull-mirror repositories, changing the GitHub token in Gitea Mirror does not always update stored mirror credentials in Gitea/Forgejo for already-created repositories.
|
||||
|
||||
If sync logs show authentication failures (for example `terminal prompts disabled`), do one of the following:
|
||||
|
||||
1. In Gitea/Forgejo, open repository **Settings → Mirror Settings** and update the mirror authorization password/token.
|
||||
2. Or delete and re-mirror the repository so it is recreated with current credentials.
|
||||
|
||||
### Re-sync Metadata After Changing Mirror Options
|
||||
|
||||
If you enable metadata options (issues/PRs/labels/milestones/releases) after repositories were already mirrored:
|
||||
|
||||
1. Go to **Repositories**, select the repositories, and click **Sync** to run a fresh sync pass.
|
||||
2. For a full metadata refresh, use **Re-run Metadata** on selected repositories. This clears metadata sync state for those repos and immediately starts Sync.
|
||||
3. If some repositories still miss metadata, reset metadata sync state in SQLite and sync again:
|
||||
|
||||
```bash
|
||||
sqlite3 data/gitea-mirror.db "UPDATE repositories SET metadata = NULL;"
|
||||
```
|
||||
|
||||
This clears per-repository metadata completion flags so the next sync can re-run metadata import steps.
|
||||
|
||||
### Mirror Interval vs Gitea/Forgejo `MIN_INTERVAL`
|
||||
|
||||
Gitea Mirror treats the interval configured in **Configuration** (or `GITEA_MIRROR_INTERVAL`) as the source of truth and applies it to mirrored repositories during sync.
|
||||
|
||||
If your Gitea/Forgejo server has `mirror.MIN_INTERVAL` set to a higher value (for example `24h`) and Gitea Mirror is set lower (for example `8h`), sync/mirror operations can fail when updating mirror settings.
|
||||
|
||||
To avoid this:
|
||||
|
||||
1. Set Gitea Mirror interval to a value greater than or equal to your server `MIN_INTERVAL`.
|
||||
2. Do not rely on manual per-repository mirror interval edits in Gitea/Forgejo, as they will be overwritten on sync.
|
||||
|
||||
## Development
|
||||
|
||||
```bash
|
||||
@@ -287,13 +356,13 @@ bun run build
|
||||
|
||||
- **Frontend**: Astro, React, Shadcn UI, Tailwind CSS v4
|
||||
- **Backend**: Bun runtime, SQLite, Drizzle ORM
|
||||
- **APIs**: GitHub (Octokit), Gitea REST API
|
||||
- **APIs**: GitHub (Octokit), Gitea/Forgejo REST API
|
||||
- **Auth**: Better Auth with session-based authentication
|
||||
|
||||
## Security
|
||||
|
||||
### Token Encryption
|
||||
- All GitHub and Gitea API tokens are encrypted at rest using AES-256-GCM
|
||||
- All GitHub and Gitea/Forgejo API tokens are encrypted at rest using AES-256-GCM
|
||||
- Encryption is automatic and transparent to users
|
||||
- Set `ENCRYPTION_SECRET` environment variable for production deployments
|
||||
- Falls back to `BETTER_AUTH_SECRET` if not set
|
||||
@@ -303,6 +372,20 @@ bun run build
|
||||
- Never stored in plaintext
|
||||
- Secure cookie-based session management
|
||||
|
||||
### Admin Password Recovery (CLI)
|
||||
If email delivery is not configured, an admin with server access can reset a user password from the command line:
|
||||
|
||||
```bash
|
||||
bun run reset-password -- --email=user@example.com --new-password='new-secure-password'
|
||||
```
|
||||
|
||||
What this does:
|
||||
- Updates the credential password hash for the matching user
|
||||
- Creates a credential account if one does not already exist
|
||||
- Invalidates all active sessions for that user (forces re-login)
|
||||
|
||||
Use this only from trusted server/admin environments.
|
||||
|
||||
## Authentication
|
||||
|
||||
Gitea Mirror supports multiple authentication methods. **Email/password authentication is the default and always enabled.**
|
||||
@@ -373,13 +456,13 @@ Gitea Mirror can also act as an OIDC provider for other applications. Register O
|
||||
## Known Limitations
|
||||
|
||||
### Pull Request Mirroring Implementation
|
||||
Pull requests **cannot be created as actual PRs** in Gitea due to API limitations. Instead, they are mirrored as **enriched issues** with comprehensive metadata.
|
||||
Pull requests **cannot be created as actual PRs** in Gitea/Forgejo due to API limitations. Instead, they are mirrored as **enriched issues** with comprehensive metadata.
|
||||
|
||||
**Why real PR mirroring isn't possible:**
|
||||
- Gitea's API doesn't support creating pull requests from external sources
|
||||
- Gitea/Forgejo's API doesn't support creating pull requests from external sources
|
||||
- Real PRs require actual Git branches with commits to exist in the repository
|
||||
- Would require complex branch synchronization and commit replication
|
||||
- The mirror relationship is one-way (GitHub → Gitea) for repository content
|
||||
- The mirror relationship is one-way (GitHub → Gitea/Forgejo) for repository content
|
||||
|
||||
**How we handle Pull Requests:**
|
||||
PRs are mirrored as issues with rich metadata including:
|
||||
@@ -393,7 +476,7 @@ PRs are mirrored as issues with rich metadata including:
|
||||
- 🔀 Base and head branch information
|
||||
- ✅ Merge status tracking
|
||||
|
||||
This approach preserves all important PR information while working within Gitea's API constraints. The PRs appear in Gitea's issue tracker with clear visual distinction and comprehensive details.
|
||||
This approach preserves all important PR information while working within Gitea/Forgejo's API constraints. The PRs appear in the issue tracker with clear visual distinction and comprehensive details.
|
||||
|
||||
## Contributing
|
||||
|
||||
@@ -401,7 +484,7 @@ Contributions are welcome! Please read our [Contributing Guidelines](CONTRIBUTIN
|
||||
|
||||
## License
|
||||
|
||||
GNU General Public License v3.0 - see [LICENSE](LICENSE) file for details.
|
||||
GNU Affero General Public License v3.0 (AGPL-3.0) - see [LICENSE](LICENSE) file for details.
|
||||
|
||||
## Star History
|
||||
|
||||
@@ -416,7 +499,8 @@ GNU General Public License v3.0 - see [LICENSE](LICENSE) file for details.
|
||||
## Support
|
||||
|
||||
- 📖 [Documentation](https://github.com/RayLabsHQ/gitea-mirror/tree/main/docs)
|
||||
- 🔐 [Custom CA Certificates](docs/CA_CERTIFICATES.md)
|
||||
- 🔐 [Environment Variables](docs/ENVIRONMENT_VARIABLES.md)
|
||||
- 🛡️ [Force-Push Protection](docs/FORCE_PUSH_PROTECTION.md)
|
||||
- 🐛 [Report Issues](https://github.com/RayLabsHQ/gitea-mirror/issues)
|
||||
- 💬 [Discussions](https://github.com/RayLabsHQ/gitea-mirror/discussions)
|
||||
- 🔧 [Proxmox VE Script](https://community-scripts.github.io/ProxmoxVE/scripts?id=gitea-mirror)
|
||||
|
||||
@@ -3,4 +3,7 @@
|
||||
timeout = 5000
|
||||
|
||||
# Preload the setup file
|
||||
preload = ["./src/tests/setup.bun.ts"]
|
||||
preload = ["./src/tests/setup.bun.ts"]
|
||||
|
||||
# Only run tests in src/ directory (excludes tests/e2e/ which are Playwright tests)
|
||||
root = "./src/"
|
||||
@@ -18,6 +18,10 @@ services:
|
||||
- BETTER_AUTH_SECRET=${BETTER_AUTH_SECRET} # Min 32 chars, required for sessions
|
||||
- BETTER_AUTH_URL=${BETTER_AUTH_URL:-http://localhost:4321}
|
||||
- BETTER_AUTH_TRUSTED_ORIGINS=${BETTER_AUTH_TRUSTED_ORIGINS:-http://localhost:4321}
|
||||
# REVERSE PROXY: If accessing via a reverse proxy, set all three to your external URL:
|
||||
# BETTER_AUTH_URL=https://gitea-mirror.example.com
|
||||
# PUBLIC_BETTER_AUTH_URL=https://gitea-mirror.example.com
|
||||
# BETTER_AUTH_TRUSTED_ORIGINS=https://gitea-mirror.example.com
|
||||
|
||||
# === CORE SETTINGS ===
|
||||
# These are technically required but have working defaults
|
||||
|
||||
@@ -32,6 +32,13 @@ services:
|
||||
- PORT=4321
|
||||
- BETTER_AUTH_SECRET=${BETTER_AUTH_SECRET:-your-secret-key-change-this-in-production}
|
||||
- BETTER_AUTH_URL=${BETTER_AUTH_URL:-http://localhost:4321}
|
||||
# REVERSE PROXY: If you access Gitea Mirror through a reverse proxy (e.g. Nginx, Caddy, Traefik),
|
||||
# you MUST set these three variables to your external URL. Example:
|
||||
# BETTER_AUTH_URL=https://gitea-mirror.example.com
|
||||
# PUBLIC_BETTER_AUTH_URL=https://gitea-mirror.example.com
|
||||
# BETTER_AUTH_TRUSTED_ORIGINS=https://gitea-mirror.example.com
|
||||
- PUBLIC_BETTER_AUTH_URL=${PUBLIC_BETTER_AUTH_URL:-http://localhost:4321}
|
||||
- BETTER_AUTH_TRUSTED_ORIGINS=${BETTER_AUTH_TRUSTED_ORIGINS:-}
|
||||
# Optional: ENCRYPTION_SECRET will be auto-generated if not provided
|
||||
# - ENCRYPTION_SECRET=${ENCRYPTION_SECRET:-}
|
||||
# GitHub/Gitea Mirror Config
|
||||
|
||||
@@ -139,16 +139,29 @@ fi
|
||||
|
||||
# Initialize configuration from environment variables if provided
|
||||
echo "Checking for environment configuration..."
|
||||
if [ -f "dist/scripts/startup-env-config.js" ]; then
|
||||
echo "Loading configuration from environment variables..."
|
||||
bun dist/scripts/startup-env-config.js
|
||||
ENV_CONFIG_EXIT_CODE=$?
|
||||
elif [ -f "scripts/startup-env-config.ts" ]; then
|
||||
echo "Loading configuration from environment variables..."
|
||||
bun scripts/startup-env-config.ts
|
||||
ENV_CONFIG_EXIT_CODE=$?
|
||||
|
||||
# Only run the env config script if relevant env vars are set
|
||||
# This avoids spawning a heavy Bun process on memory-constrained systems
|
||||
HAS_ENV_CONFIG=false
|
||||
if [ -n "$GITHUB_USERNAME" ] || [ -n "$GITHUB_TOKEN" ] || [ -n "$GITEA_URL" ] || [ -n "$GITEA_USERNAME" ] || [ -n "$GITEA_TOKEN" ]; then
|
||||
HAS_ENV_CONFIG=true
|
||||
fi
|
||||
|
||||
if [ "$HAS_ENV_CONFIG" = "true" ]; then
|
||||
if [ -f "dist/scripts/startup-env-config.js" ]; then
|
||||
echo "Loading configuration from environment variables..."
|
||||
bun dist/scripts/startup-env-config.js || ENV_CONFIG_EXIT_CODE=$?
|
||||
ENV_CONFIG_EXIT_CODE=${ENV_CONFIG_EXIT_CODE:-0}
|
||||
elif [ -f "scripts/startup-env-config.ts" ]; then
|
||||
echo "Loading configuration from environment variables..."
|
||||
bun scripts/startup-env-config.ts || ENV_CONFIG_EXIT_CODE=$?
|
||||
ENV_CONFIG_EXIT_CODE=${ENV_CONFIG_EXIT_CODE:-0}
|
||||
else
|
||||
echo "Environment configuration script not found. Skipping."
|
||||
ENV_CONFIG_EXIT_CODE=0
|
||||
fi
|
||||
else
|
||||
echo "Environment configuration script not found. Skipping."
|
||||
echo "No GitHub/Gitea environment variables found, skipping env config initialization."
|
||||
ENV_CONFIG_EXIT_CODE=0
|
||||
fi
|
||||
|
||||
@@ -161,17 +174,15 @@ fi
|
||||
|
||||
# Run startup recovery to handle any interrupted jobs
|
||||
echo "Running startup recovery..."
|
||||
RECOVERY_EXIT_CODE=0
|
||||
if [ -f "dist/scripts/startup-recovery.js" ]; then
|
||||
echo "Running startup recovery using compiled script..."
|
||||
bun dist/scripts/startup-recovery.js --timeout=30000
|
||||
RECOVERY_EXIT_CODE=$?
|
||||
bun dist/scripts/startup-recovery.js --timeout=30000 || RECOVERY_EXIT_CODE=$?
|
||||
elif [ -f "scripts/startup-recovery.ts" ]; then
|
||||
echo "Running startup recovery using TypeScript script..."
|
||||
bun scripts/startup-recovery.ts --timeout=30000
|
||||
RECOVERY_EXIT_CODE=$?
|
||||
bun scripts/startup-recovery.ts --timeout=30000 || RECOVERY_EXIT_CODE=$?
|
||||
else
|
||||
echo "Warning: Startup recovery script not found. Skipping recovery."
|
||||
RECOVERY_EXIT_CODE=0
|
||||
fi
|
||||
|
||||
# Log recovery result
|
||||
@@ -185,17 +196,15 @@ fi
|
||||
|
||||
# Run repository status repair to fix any inconsistent mirroring states
|
||||
echo "Running repository status repair..."
|
||||
REPAIR_EXIT_CODE=0
|
||||
if [ -f "dist/scripts/repair-mirrored-repos.js" ]; then
|
||||
echo "Running repository repair using compiled script..."
|
||||
bun dist/scripts/repair-mirrored-repos.js --startup
|
||||
REPAIR_EXIT_CODE=$?
|
||||
bun dist/scripts/repair-mirrored-repos.js --startup || REPAIR_EXIT_CODE=$?
|
||||
elif [ -f "scripts/repair-mirrored-repos.ts" ]; then
|
||||
echo "Running repository repair using TypeScript script..."
|
||||
bun scripts/repair-mirrored-repos.ts --startup
|
||||
REPAIR_EXIT_CODE=$?
|
||||
bun scripts/repair-mirrored-repos.ts --startup || REPAIR_EXIT_CODE=$?
|
||||
else
|
||||
echo "Warning: Repository repair script not found. Skipping repair."
|
||||
REPAIR_EXIT_CODE=0
|
||||
fi
|
||||
|
||||
# Log repair result
|
||||
|
||||
@@ -310,26 +310,25 @@ bunx tsc --noEmit
|
||||
|
||||
## Release Process
|
||||
|
||||
1. **Update version**:
|
||||
```bash
|
||||
npm version patch # or minor/major
|
||||
```
|
||||
1. **Choose release version** (`X.Y.Z`) and update `CHANGELOG.md`
|
||||
|
||||
2. **Update CHANGELOG.md**
|
||||
|
||||
3. **Build and test**:
|
||||
2. **Build and test**:
|
||||
```bash
|
||||
bun run build
|
||||
bun test
|
||||
```
|
||||
|
||||
4. **Create release**:
|
||||
3. **Create release tag** (semver format required):
|
||||
```bash
|
||||
git tag v2.23.0
|
||||
git push origin v2.23.0
|
||||
git tag vX.Y.Z
|
||||
git push origin vX.Y.Z
|
||||
```
|
||||
|
||||
5. **Create GitHub release**
|
||||
4. **Create GitHub release**
|
||||
|
||||
5. **CI version sync (automatic)**:
|
||||
- On `v*` tags, release CI updates `package.json` version in the build context from the tag (`vX.Y.Z` -> `X.Y.Z`), so Docker release images always report the correct app version.
|
||||
- After the release build succeeds, CI commits the same `package.json` version back to `main` automatically.
|
||||
|
||||
## Contributing
|
||||
|
||||
@@ -349,6 +348,6 @@ git push origin v2.23.0
|
||||
|
||||
## Getting Help
|
||||
|
||||
- Check existing [issues](https://github.com/yourusername/gitea-mirror/issues)
|
||||
- Join [discussions](https://github.com/yourusername/gitea-mirror/discussions)
|
||||
- Read the [FAQ](./FAQ.md)
|
||||
- Check existing [issues](https://github.com/RayLabsHQ/gitea-mirror/issues)
|
||||
- Join [discussions](https://github.com/RayLabsHQ/gitea-mirror/discussions)
|
||||
- Review project docs in [docs/README.md](./README.md)
|
||||
|
||||
@@ -62,6 +62,7 @@ Settings for connecting to and configuring GitHub repository sources.
|
||||
| `SKIP_FORKS` | Skip forked repositories | `false` | `true`, `false` |
|
||||
| `MIRROR_STARRED` | Mirror starred repositories | `false` | `true`, `false` |
|
||||
| `STARRED_REPOS_ORG` | Organization name for starred repos | `starred` | Any string |
|
||||
| `STARRED_REPOS_MODE` | How starred repos are mirrored | `dedicated-org` | `dedicated-org`, `preserve-owner` |
|
||||
|
||||
### Organization Settings
|
||||
|
||||
@@ -77,6 +78,7 @@ Settings for connecting to and configuring GitHub repository sources.
|
||||
| Variable | Description | Default | Options |
|
||||
|----------|-------------|---------|---------|
|
||||
| `SKIP_STARRED_ISSUES` | Enable lightweight mode for starred repos (skip issues) | `false` | `true`, `false` |
|
||||
| `AUTO_MIRROR_STARRED` | Automatically mirror starred repos during scheduled syncs and "Mirror All". When `false`, starred repos are imported for browsing but must be mirrored individually. | `false` | `true`, `false` |
|
||||
|
||||
## Gitea Configuration
|
||||
|
||||
@@ -87,6 +89,7 @@ Settings for the destination Gitea instance.
|
||||
| Variable | Description | Default | Options |
|
||||
|----------|-------------|---------|---------|
|
||||
| `GITEA_URL` | Gitea instance URL | - | Valid URL |
|
||||
| `GITEA_EXTERNAL_URL` | Optional external/browser URL used for dashboard links. API and mirroring still use `GITEA_URL`. | - | Valid URL |
|
||||
| `GITEA_TOKEN` | Gitea access token | - | - |
|
||||
| `GITEA_USERNAME` | Gitea username | - | - |
|
||||
| `GITEA_ORGANIZATION` | Default organization for single-org strategy | `github-mirrors` | Any string |
|
||||
|
||||
183
docs/FORCE_PUSH_PROTECTION.md
Normal file
183
docs/FORCE_PUSH_PROTECTION.md
Normal file
@@ -0,0 +1,183 @@
|
||||
# Force-Push Protection
|
||||
|
||||
This document describes the smart force-push protection system introduced in gitea-mirror v3.11.0+.
|
||||
|
||||
## The Problem
|
||||
|
||||
GitHub repositories can be force-pushed at any time — rewriting history, deleting branches, or replacing commits entirely. When gitea-mirror syncs a force-pushed repo, the old history in Gitea is silently overwritten. Files, commits, and branches disappear with no way to recover them.
|
||||
|
||||
The original workaround (`backupBeforeSync: true`) created a full git bundle backup before **every** sync. This doesn't scale — a user with 100+ GiB of mirrors would need up to 2 TB of backup storage with default retention settings, even though force-pushes are rare.
|
||||
|
||||
## Solution: Smart Detection
|
||||
|
||||
Instead of backing up everything every time, the system detects force-pushes **before** they happen and only acts when needed.
|
||||
|
||||
### How Detection Works
|
||||
|
||||
Before each sync, the app compares branch SHAs between Gitea (the mirror) and GitHub (the source):
|
||||
|
||||
1. **Fetch branches from both sides** — lightweight API calls to get branch names and their latest commit SHAs
|
||||
2. **Compare each branch**:
|
||||
- SHAs match → nothing changed, no action needed
|
||||
- SHAs differ → check if the change is a normal push or a force-push
|
||||
3. **Ancestry check** — for branches with different SHAs, call GitHub's compare API to determine if the new SHA is a descendant of the old one:
|
||||
- **Fast-forward** (new SHA descends from old) → normal push, safe to sync
|
||||
- **Diverged** (histories split) → force-push detected
|
||||
- **404** (old SHA doesn't exist on GitHub anymore) → history was rewritten, force-push detected
|
||||
- **Branch deleted on GitHub** → flagged as destructive change
|
||||
|
||||
### What Happens on Detection
|
||||
|
||||
Depends on the configured strategy (see below):
|
||||
- **Backup strategies** (`always`, `on-force-push`): create a git bundle snapshot, then sync
|
||||
- **Block strategy** (`block-on-force-push`): halt the sync, mark the repo as `pending-approval`, wait for user action
|
||||
|
||||
### Fail-Open Design
|
||||
|
||||
If detection itself fails (GitHub rate limits, network errors, API outages), sync proceeds normally. Detection never blocks a sync due to its own failure. Individual branch check failures are skipped — one flaky branch doesn't affect the others.
|
||||
|
||||
## Backup Strategies
|
||||
|
||||
Configure via **Settings → GitHub Configuration → Destructive Update Protection**.
|
||||
|
||||
| Strategy | What It Does | Storage Cost | Best For |
|
||||
|---|---|---|---|
|
||||
| **Disabled** | No detection, no backups | Zero | Repos you don't care about losing |
|
||||
| **Always Backup** | Snapshot before every sync (original behavior) | High | Small mirror sets, maximum safety |
|
||||
| **Smart** (default) | Detect force-pushes, backup only when found | Near-zero normally | Most users — efficient protection |
|
||||
| **Block & Approve** | Detect force-pushes, block sync until approved | Zero | Critical repos needing manual review |
|
||||
|
||||
### Strategy Details
|
||||
|
||||
#### Disabled
|
||||
|
||||
Syncs proceed without any detection or backup. If a force-push happens on GitHub, the mirror silently overwrites.
|
||||
|
||||
#### Always Backup
|
||||
|
||||
Creates a git bundle snapshot before every sync regardless of whether a force-push occurred. This is the legacy behavior (equivalent to the old `backupBeforeSync: true`). Safe but expensive for large mirror sets.
|
||||
|
||||
#### Smart (`on-force-push`) — Recommended
|
||||
|
||||
Runs the force-push detection before each sync. On normal days (no force-pushes), syncs proceed without any backup overhead. When a force-push is detected, a snapshot is created before the sync runs.
|
||||
|
||||
This gives you protection when it matters with near-zero cost when it doesn't.
|
||||
|
||||
#### Block & Approve (`block-on-force-push`)
|
||||
|
||||
Runs detection and, when a force-push is found, **blocks the sync entirely**. The repository is marked as `pending-approval` and excluded from future scheduled syncs until you take action:
|
||||
|
||||
- **Approve**: creates a backup first, then syncs (safe)
|
||||
- **Dismiss**: clears the flag and resumes normal syncing (no backup)
|
||||
|
||||
Use this for repos where you want manual control over destructive changes.
|
||||
|
||||
## Additional Settings
|
||||
|
||||
These appear when any non-disabled strategy is selected:
|
||||
|
||||
### Snapshot Retention Count
|
||||
|
||||
How many backup snapshots to keep per repository. Oldest snapshots are deleted when this limit is exceeded. Default: **5**.
|
||||
|
||||
### Snapshot Retention Days
|
||||
|
||||
Maximum age (in days) for backup snapshots. Bundles older than this are deleted during retention enforcement, though at least one bundle is always kept. Set to `0` to disable time-based retention. Default: **30**.
|
||||
|
||||
### Snapshot Directory
|
||||
|
||||
Where git bundle backups are stored. Default: **`data/repo-backups`**. Bundles are organized as `<directory>/<owner>/<repo>/<timestamp>.bundle`.
|
||||
|
||||
### Block Sync on Snapshot Failure
|
||||
|
||||
Available for **Always Backup** and **Smart** strategies. When enabled, if the snapshot creation fails (disk full, permissions error, etc.), the sync is also blocked. When disabled, sync continues even if the snapshot couldn't be created.
|
||||
|
||||
Recommended: **enabled** if you rely on backups for recovery.
|
||||
|
||||
## Backward Compatibility
|
||||
|
||||
The old `backupBeforeSync` boolean is still recognized:
|
||||
|
||||
| Old Setting | New Equivalent |
|
||||
|---|---|
|
||||
| `backupBeforeSync: true` | `backupStrategy: "on-force-push"` |
|
||||
| `backupBeforeSync: false` | `backupStrategy: "disabled"` |
|
||||
| Neither set | `backupStrategy: "on-force-push"` (new default) |
|
||||
|
||||
Existing configurations are automatically mapped. The old field is deprecated but will continue to work.
|
||||
|
||||
## Environment Variables
|
||||
|
||||
No new environment variables are required. The backup strategy is configured through the web UI and stored in the database alongside other config.
|
||||
|
||||
## API
|
||||
|
||||
### Approve/Dismiss Blocked Repos
|
||||
|
||||
When using the `block-on-force-push` strategy, repos that are blocked can be managed via the API:
|
||||
|
||||
```bash
|
||||
# Approve sync (creates backup first, then syncs)
|
||||
curl -X POST http://localhost:4321/api/job/approve-sync \
|
||||
-H "Content-Type: application/json" \
|
||||
-H "Cookie: <session>" \
|
||||
-d '{"repositoryIds": ["<id>"], "action": "approve"}'
|
||||
|
||||
# Dismiss (clear the block, resume normal syncing)
|
||||
curl -X POST http://localhost:4321/api/job/approve-sync \
|
||||
-H "Content-Type: application/json" \
|
||||
-H "Cookie: <session>" \
|
||||
-d '{"repositoryIds": ["<id>"], "action": "dismiss"}'
|
||||
```
|
||||
|
||||
Blocked repos also show an **Approve** / **Dismiss** button in the repository table UI.
|
||||
|
||||
## Architecture
|
||||
|
||||
### Key Files
|
||||
|
||||
| File | Purpose |
|
||||
|---|---|
|
||||
| `src/lib/utils/force-push-detection.ts` | Core detection: fetch branches, compare SHAs, check ancestry |
|
||||
| `src/lib/repo-backup.ts` | Strategy resolver, backup decision logic, bundle creation |
|
||||
| `src/lib/gitea-enhanced.ts` | Sync flow integration (calls detection + backup before mirror-sync) |
|
||||
| `src/pages/api/job/approve-sync.ts` | Approve/dismiss API endpoint |
|
||||
| `src/components/config/GitHubConfigForm.tsx` | Strategy selector UI |
|
||||
| `src/components/repositories/RepositoryTable.tsx` | Pending-approval badge + action buttons |
|
||||
|
||||
### Detection Flow
|
||||
|
||||
```
|
||||
syncGiteaRepoEnhanced()
|
||||
│
|
||||
├─ Resolve backup strategy (config → backupStrategy → backupBeforeSync → default)
|
||||
│
|
||||
├─ If strategy needs detection ("on-force-push" or "block-on-force-push"):
|
||||
│ │
|
||||
│ ├─ fetchGiteaBranches() — GET /api/v1/repos/{owner}/{repo}/branches
|
||||
│ ├─ fetchGitHubBranches() — octokit.paginate(repos.listBranches)
|
||||
│ │
|
||||
│ └─ For each Gitea branch where SHA differs:
|
||||
│ └─ checkAncestry() — octokit.repos.compareCommits()
|
||||
│ ├─ "ahead" or "identical" → fast-forward (safe)
|
||||
│ ├─ "diverged" or "behind" → force-push detected
|
||||
│ └─ 404/422 → old SHA gone → force-push detected
|
||||
│
|
||||
├─ If "block-on-force-push" + detected:
|
||||
│ └─ Set repo status to "pending-approval", return early
|
||||
│
|
||||
├─ If backup needed (always, or on-force-push + detected):
|
||||
│ └─ Create git bundle snapshot
|
||||
│
|
||||
└─ Proceed to mirror-sync
|
||||
```
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
**Repos stuck in "pending-approval"**: Use the Approve or Dismiss buttons in the repository table, or call the approve-sync API endpoint.
|
||||
|
||||
**Detection always skipped**: Check the activity log for skip reasons. Common causes: Gitea repo not yet mirrored (first sync), GitHub API rate limits, network errors. All are fail-open by design.
|
||||
|
||||
**Backups consuming too much space**: Lower the retention count, or switch from "Always Backup" to "Smart" which only creates backups on actual force-pushes.
|
||||
|
||||
**False positives**: The detection compares branch-by-branch. A rebase (which is a force-push) will correctly trigger detection. If you routinely rebase branches, consider using "Smart" instead of "Block & Approve" to avoid constant approval prompts.
|
||||
486
docs/NIX_DEPLOYMENT.md
Normal file
486
docs/NIX_DEPLOYMENT.md
Normal file
@@ -0,0 +1,486 @@
|
||||
# Nix Deployment Guide
|
||||
|
||||
This guide covers deploying Gitea Mirror using Nix flakes. The Nix deployment follows the same minimal configuration philosophy as `docker-compose.alt.yml` - secrets are auto-generated, and everything else can be configured via the web UI.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
- Nix 2.4+ installed
|
||||
- For NixOS module: NixOS 23.05+
|
||||
|
||||
### Enable Flakes (Recommended)
|
||||
|
||||
To enable flakes permanently and avoid typing flags, add to `/etc/nix/nix.conf` or `~/.config/nix/nix.conf`:
|
||||
```
|
||||
experimental-features = nix-command flakes
|
||||
```
|
||||
|
||||
**Note:** If you don't enable flakes globally, add `--extra-experimental-features 'nix-command flakes'` to all nix commands shown below.
|
||||
|
||||
## Quick Start (Zero Configuration!)
|
||||
|
||||
### Run Immediately - No Setup Required
|
||||
|
||||
```bash
|
||||
# Run directly from the flake (local)
|
||||
nix run --extra-experimental-features 'nix-command flakes' .#gitea-mirror
|
||||
|
||||
# Or from GitHub (once published)
|
||||
nix run --extra-experimental-features 'nix-command flakes' github:RayLabsHQ/gitea-mirror
|
||||
|
||||
# If you have flakes enabled globally, simply:
|
||||
nix run .#gitea-mirror
|
||||
```
|
||||
|
||||
That's it! On first run:
|
||||
- Secrets (`BETTER_AUTH_SECRET` and `ENCRYPTION_SECRET`) are auto-generated
|
||||
- Database is automatically created and initialized
|
||||
- Startup recovery and repair scripts run automatically
|
||||
- Access the web UI at http://localhost:4321
|
||||
|
||||
Everything else (GitHub credentials, Gitea settings, mirror options) is configured through the web interface after signup.
|
||||
|
||||
### Development Environment
|
||||
|
||||
```bash
|
||||
# Enter development shell with all dependencies
|
||||
nix develop --extra-experimental-features 'nix-command flakes'
|
||||
|
||||
# Or use direnv for automatic environment loading (handles flags automatically)
|
||||
echo "use flake" > .envrc
|
||||
direnv allow
|
||||
```
|
||||
|
||||
### Build and Install
|
||||
|
||||
```bash
|
||||
# Build the package
|
||||
nix build --extra-experimental-features 'nix-command flakes'
|
||||
|
||||
# Run the built package
|
||||
./result/bin/gitea-mirror
|
||||
|
||||
# Install to your profile
|
||||
nix profile install --extra-experimental-features 'nix-command flakes' .#gitea-mirror
|
||||
```
|
||||
|
||||
## What Happens on First Run?
|
||||
|
||||
Following the same pattern as the Docker deployment, the Nix package automatically:
|
||||
|
||||
1. **Creates data directory**: `~/.local/share/gitea-mirror` (or `$DATA_DIR`)
|
||||
2. **Generates secrets** (stored securely in data directory):
|
||||
- `BETTER_AUTH_SECRET` - Session authentication (32-char hex)
|
||||
- `ENCRYPTION_SECRET` - Token encryption (48-char base64)
|
||||
3. **Initializes database**: SQLite database with Drizzle migrations
|
||||
4. **Runs startup scripts**:
|
||||
- Environment configuration loader
|
||||
- Crash recovery for interrupted jobs
|
||||
- Repository status repair
|
||||
5. **Starts the application** with graceful shutdown handling
|
||||
|
||||
## NixOS Module - Minimal Deployment
|
||||
|
||||
### Simplest Possible Configuration
|
||||
|
||||
Add to your NixOS configuration (`/etc/nixos/configuration.nix`):
|
||||
|
||||
```nix
|
||||
{
|
||||
inputs = {
|
||||
nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable";
|
||||
gitea-mirror.url = "github:RayLabsHQ/gitea-mirror";
|
||||
};
|
||||
|
||||
outputs = { nixpkgs, gitea-mirror, ... }: {
|
||||
nixosConfigurations.your-hostname = nixpkgs.lib.nixosSystem {
|
||||
system = "x86_64-linux";
|
||||
modules = [
|
||||
gitea-mirror.nixosModules.default
|
||||
{
|
||||
# That's it! Just enable the service
|
||||
services.gitea-mirror.enable = true;
|
||||
}
|
||||
];
|
||||
};
|
||||
};
|
||||
}
|
||||
```
|
||||
|
||||
Apply with:
|
||||
```bash
|
||||
sudo nixos-rebuild switch
|
||||
```
|
||||
|
||||
Access at http://localhost:4321, sign up (first user is admin), and configure everything via the web UI.
|
||||
|
||||
### Production Configuration
|
||||
|
||||
For production with custom domain and firewall:
|
||||
|
||||
```nix
|
||||
{
|
||||
services.gitea-mirror = {
|
||||
enable = true;
|
||||
host = "0.0.0.0";
|
||||
port = 4321;
|
||||
betterAuthUrl = "https://mirror.example.com";
|
||||
betterAuthTrustedOrigins = "https://mirror.example.com";
|
||||
openFirewall = true;
|
||||
};
|
||||
|
||||
# Optional: Use with nginx reverse proxy
|
||||
services.nginx = {
|
||||
enable = true;
|
||||
virtualHosts."mirror.example.com" = {
|
||||
locations."/" = {
|
||||
proxyPass = "http://127.0.0.1:4321";
|
||||
proxyWebsockets = true;
|
||||
};
|
||||
enableACME = true;
|
||||
forceSSL = true;
|
||||
};
|
||||
};
|
||||
}
|
||||
```
|
||||
|
||||
### Advanced: Manual Secret Management
|
||||
|
||||
If you prefer to manage secrets manually (e.g., with sops-nix or agenix):
|
||||
|
||||
1. Create a secrets file:
|
||||
```bash
|
||||
# /var/lib/gitea-mirror/secrets.env
|
||||
BETTER_AUTH_SECRET=your-32-character-minimum-secret-key-here
|
||||
ENCRYPTION_SECRET=your-encryption-secret-here
|
||||
```
|
||||
|
||||
2. Reference it in your configuration:
|
||||
```nix
|
||||
{
|
||||
services.gitea-mirror = {
|
||||
enable = true;
|
||||
environmentFile = "/var/lib/gitea-mirror/secrets.env";
|
||||
};
|
||||
}
|
||||
```
|
||||
|
||||
### Full Configuration Options
|
||||
|
||||
```nix
|
||||
{
|
||||
services.gitea-mirror = {
|
||||
enable = true;
|
||||
package = gitea-mirror.packages.x86_64-linux.default; # Override package
|
||||
dataDir = "/var/lib/gitea-mirror";
|
||||
user = "gitea-mirror";
|
||||
group = "gitea-mirror";
|
||||
host = "0.0.0.0";
|
||||
port = 4321;
|
||||
betterAuthUrl = "https://mirror.example.com";
|
||||
betterAuthTrustedOrigins = "https://mirror.example.com";
|
||||
|
||||
# Concurrency controls (match docker-compose.alt.yml)
|
||||
mirrorIssueConcurrency = 3; # Set to 1 for perfect chronological order
|
||||
mirrorPullRequestConcurrency = 5; # Set to 1 for perfect chronological order
|
||||
|
||||
environmentFile = null; # Optional secrets file
|
||||
openFirewall = true;
|
||||
};
|
||||
}
|
||||
```
|
||||
|
||||
## Service Management (NixOS)
|
||||
|
||||
```bash
|
||||
# Start the service
|
||||
sudo systemctl start gitea-mirror
|
||||
|
||||
# Stop the service
|
||||
sudo systemctl stop gitea-mirror
|
||||
|
||||
# Restart the service
|
||||
sudo systemctl restart gitea-mirror
|
||||
|
||||
# Check status
|
||||
sudo systemctl status gitea-mirror
|
||||
|
||||
# View logs
|
||||
sudo journalctl -u gitea-mirror -f
|
||||
|
||||
# Health check
|
||||
curl http://localhost:4321/api/health
|
||||
```
|
||||
|
||||
## Environment Variables
|
||||
|
||||
All variables from `docker-compose.alt.yml` are supported:
|
||||
|
||||
```bash
|
||||
# === AUTO-GENERATED (Don't set unless you want specific values) ===
|
||||
BETTER_AUTH_SECRET # Auto-generated, stored in data dir
|
||||
ENCRYPTION_SECRET # Auto-generated, stored in data dir
|
||||
|
||||
# === CORE SETTINGS (Have good defaults) ===
|
||||
DATA_DIR="$HOME/.local/share/gitea-mirror"
|
||||
DATABASE_URL="file:$DATA_DIR/gitea-mirror.db"
|
||||
HOST="0.0.0.0"
|
||||
PORT="4321"
|
||||
NODE_ENV="production"
|
||||
|
||||
# === BETTER AUTH (Override for custom domains) ===
|
||||
BETTER_AUTH_URL="http://localhost:4321"
|
||||
BETTER_AUTH_TRUSTED_ORIGINS="http://localhost:4321"
|
||||
PUBLIC_BETTER_AUTH_URL="http://localhost:4321"
|
||||
|
||||
# === CONCURRENCY CONTROLS ===
|
||||
MIRROR_ISSUE_CONCURRENCY=3 # Default: 3 (set to 1 for perfect order)
|
||||
MIRROR_PULL_REQUEST_CONCURRENCY=5 # Default: 5 (set to 1 for perfect order)
|
||||
|
||||
# === CONFIGURE VIA WEB UI (Not needed at startup) ===
|
||||
# GitHub credentials, Gitea settings, mirror options, scheduling, etc.
|
||||
# All configured after signup through the web interface
|
||||
```
|
||||
|
||||
## Database Management
|
||||
|
||||
The Nix package includes a database management helper:
|
||||
|
||||
```bash
|
||||
# Initialize database (done automatically on first run)
|
||||
gitea-mirror-db init
|
||||
|
||||
# Check database health
|
||||
gitea-mirror-db check
|
||||
|
||||
# Fix database issues
|
||||
gitea-mirror-db fix
|
||||
|
||||
# Reset users
|
||||
gitea-mirror-db reset-users
|
||||
```
|
||||
|
||||
## Home Manager Integration
|
||||
|
||||
For single-user deployments:
|
||||
|
||||
```nix
|
||||
{ config, pkgs, ... }:
|
||||
let
|
||||
gitea-mirror = (import (fetchTarball "https://github.com/RayLabsHQ/gitea-mirror/archive/main.tar.gz")).packages.${pkgs.system}.default;
|
||||
in {
|
||||
home.packages = [ gitea-mirror ];
|
||||
|
||||
# Optional: Run as user service
|
||||
systemd.user.services.gitea-mirror = {
|
||||
Unit = {
|
||||
Description = "Gitea Mirror Service";
|
||||
After = [ "network.target" ];
|
||||
};
|
||||
|
||||
Service = {
|
||||
Type = "simple";
|
||||
ExecStart = "${gitea-mirror}/bin/gitea-mirror";
|
||||
Restart = "always";
|
||||
Environment = [
|
||||
"DATA_DIR=%h/.local/share/gitea-mirror"
|
||||
"HOST=127.0.0.1"
|
||||
"PORT=4321"
|
||||
];
|
||||
};
|
||||
|
||||
Install = {
|
||||
WantedBy = [ "default.target" ];
|
||||
};
|
||||
};
|
||||
}
|
||||
```
|
||||
|
||||
## Docker Image from Nix (Optional)
|
||||
|
||||
You can also use Nix to create a Docker image:
|
||||
|
||||
```nix
|
||||
# Add to flake.nix packages section
|
||||
dockerImage = pkgs.dockerTools.buildLayeredImage {
|
||||
name = "gitea-mirror";
|
||||
tag = "latest";
|
||||
contents = [ self.packages.${system}.default pkgs.cacert pkgs.openssl ];
|
||||
config = {
|
||||
Cmd = [ "${self.packages.${system}.default}/bin/gitea-mirror" ];
|
||||
ExposedPorts = { "4321/tcp" = {}; };
|
||||
Env = [
|
||||
"DATA_DIR=/data"
|
||||
"DATABASE_URL=file:/data/gitea-mirror.db"
|
||||
];
|
||||
Volumes = { "/data" = {}; };
|
||||
};
|
||||
};
|
||||
```
|
||||
|
||||
Build and load:
|
||||
```bash
|
||||
nix build --extra-experimental-features 'nix-command flakes' .#dockerImage
|
||||
docker load < result
|
||||
docker run -p 4321:4321 -v gitea-mirror-data:/data gitea-mirror:latest
|
||||
```
|
||||
|
||||
## Comparison: Docker vs Nix
|
||||
|
||||
Both deployment methods follow the same philosophy:
|
||||
|
||||
| Feature | Docker Compose | Nix |
|
||||
|---------|---------------|-----|
|
||||
| **Configuration** | Minimal (only BETTER_AUTH_SECRET) | Zero config (auto-generated) |
|
||||
| **Secret Generation** | Auto-generated & persisted | Auto-generated & persisted |
|
||||
| **Database Init** | Automatic on first run | Automatic on first run |
|
||||
| **Startup Scripts** | Runs recovery/repair/env-config | Runs recovery/repair/env-config |
|
||||
| **Graceful Shutdown** | Signal handling in entrypoint | Signal handling in wrapper |
|
||||
| **Health Check** | Docker healthcheck | systemd timer (optional) |
|
||||
| **Updates** | `docker pull` | `nix flake update && nixos-rebuild` |
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Check Auto-Generated Secrets
|
||||
```bash
|
||||
# For standalone
|
||||
cat ~/.local/share/gitea-mirror/.better_auth_secret
|
||||
cat ~/.local/share/gitea-mirror/.encryption_secret
|
||||
|
||||
# For NixOS service
|
||||
sudo cat /var/lib/gitea-mirror/.better_auth_secret
|
||||
sudo cat /var/lib/gitea-mirror/.encryption_secret
|
||||
```
|
||||
|
||||
### Database Issues
|
||||
```bash
|
||||
# Check if database exists
|
||||
ls -la ~/.local/share/gitea-mirror/gitea-mirror.db
|
||||
|
||||
# Reinitialize (deletes all data!)
|
||||
rm ~/.local/share/gitea-mirror/gitea-mirror.db
|
||||
gitea-mirror-db init
|
||||
```
|
||||
|
||||
### Permission Issues (NixOS)
|
||||
```bash
|
||||
sudo chown -R gitea-mirror:gitea-mirror /var/lib/gitea-mirror
|
||||
sudo chmod 700 /var/lib/gitea-mirror
|
||||
```
|
||||
|
||||
### Port Already in Use
|
||||
```bash
|
||||
# Change port
|
||||
export PORT=8080
|
||||
gitea-mirror
|
||||
|
||||
# Or in NixOS config
|
||||
services.gitea-mirror.port = 8080;
|
||||
```
|
||||
|
||||
### View Startup Logs
|
||||
```bash
|
||||
# Standalone (verbose output on console)
|
||||
gitea-mirror
|
||||
|
||||
# NixOS service
|
||||
sudo journalctl -u gitea-mirror -f --since "5 minutes ago"
|
||||
```
|
||||
|
||||
## Updating
|
||||
|
||||
### Standalone Installation
|
||||
```bash
|
||||
# Update flake lock
|
||||
nix flake update --extra-experimental-features 'nix-command flakes'
|
||||
|
||||
# Rebuild
|
||||
nix build --extra-experimental-features 'nix-command flakes'
|
||||
|
||||
# Or update profile
|
||||
nix profile upgrade --extra-experimental-features 'nix-command flakes' gitea-mirror
|
||||
```
|
||||
|
||||
### NixOS
|
||||
```bash
|
||||
# Update input
|
||||
sudo nix flake lock --update-input gitea-mirror --extra-experimental-features 'nix-command flakes'
|
||||
|
||||
# Rebuild system
|
||||
sudo nixos-rebuild switch --flake .#your-hostname
|
||||
```
|
||||
|
||||
## Migration from Docker
|
||||
|
||||
To migrate from Docker to Nix while keeping your data:
|
||||
|
||||
1. **Stop Docker container:**
|
||||
```bash
|
||||
docker-compose -f docker-compose.alt.yml down
|
||||
```
|
||||
|
||||
2. **Copy data directory:**
|
||||
```bash
|
||||
# For standalone
|
||||
cp -r ./data ~/.local/share/gitea-mirror
|
||||
|
||||
# For NixOS
|
||||
sudo cp -r ./data /var/lib/gitea-mirror
|
||||
sudo chown -R gitea-mirror:gitea-mirror /var/lib/gitea-mirror
|
||||
```
|
||||
|
||||
3. **Copy secrets (if you want to keep them):**
|
||||
```bash
|
||||
# Extract from Docker volume
|
||||
docker run --rm -v gitea-mirror_data:/data alpine \
|
||||
cat /data/.better_auth_secret > better_auth_secret
|
||||
docker run --rm -v gitea-mirror_data:/data alpine \
|
||||
cat /data/.encryption_secret > encryption_secret
|
||||
|
||||
# Copy to new location
|
||||
cp better_auth_secret ~/.local/share/gitea-mirror/.better_auth_secret
|
||||
cp encryption_secret ~/.local/share/gitea-mirror/.encryption_secret
|
||||
chmod 600 ~/.local/share/gitea-mirror/.*_secret
|
||||
```
|
||||
|
||||
4. **Start Nix version:**
|
||||
```bash
|
||||
gitea-mirror
|
||||
```
|
||||
|
||||
## CI/CD Integration
|
||||
|
||||
Example GitHub Actions workflow (see `.github/workflows/nix-build.yml`):
|
||||
|
||||
```yaml
|
||||
name: Nix Build
|
||||
|
||||
on: [push, pull_request]
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
build:
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-latest, macos-latest]
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: DeterminateSystems/nix-installer-action@main
|
||||
- uses: DeterminateSystems/magic-nix-cache-action@main
|
||||
- run: nix flake check
|
||||
- run: nix build --print-build-logs
|
||||
```
|
||||
|
||||
This uses:
|
||||
- **Determinate Nix Installer** - Fast, reliable Nix installation with flakes enabled by default
|
||||
- **Magic Nix Cache** - Free caching using GitHub Actions cache (no account needed)
|
||||
|
||||
## Resources
|
||||
|
||||
- [Nix Manual](https://nixos.org/manual/nix/stable/)
|
||||
- [NixOS Options Search](https://search.nixos.org/options)
|
||||
- [Nix Pills Tutorial](https://nixos.org/guides/nix-pills/)
|
||||
- [Project Documentation](../README.md)
|
||||
- [Docker Deployment](../docker-compose.alt.yml) - Equivalent minimal config
|
||||
322
docs/NIX_DISTRIBUTION.md
Normal file
322
docs/NIX_DISTRIBUTION.md
Normal file
@@ -0,0 +1,322 @@
|
||||
# Nix Package Distribution Guide
|
||||
|
||||
This guide explains how Gitea Mirror is distributed via Nix and how users can consume it.
|
||||
|
||||
## Distribution Methods
|
||||
|
||||
### Method 1: Direct GitHub Usage (Zero Infrastructure)
|
||||
|
||||
**No CI, releases, or setup needed!** Users can consume directly from GitHub:
|
||||
|
||||
```bash
|
||||
# Latest from main branch
|
||||
nix run --extra-experimental-features 'nix-command flakes' github:RayLabsHQ/gitea-mirror
|
||||
|
||||
# Pin to specific commit
|
||||
nix run github:RayLabsHQ/gitea-mirror/abc123def
|
||||
|
||||
# Pin to git tag
|
||||
nix run github:RayLabsHQ/gitea-mirror/vX.Y.Z
|
||||
```
|
||||
|
||||
**How it works:**
|
||||
1. Nix fetches the repository from GitHub
|
||||
2. Nix reads `flake.nix` and `flake.lock`
|
||||
3. Nix builds the package locally on the user's machine
|
||||
4. Package is cached in `/nix/store` for reuse
|
||||
|
||||
**Pros:**
|
||||
- Zero infrastructure needed
|
||||
- Works immediately after pushing code
|
||||
- Users always get reproducible builds
|
||||
|
||||
**Cons:**
|
||||
- Users must build from source (slower first time)
|
||||
- Requires build dependencies (Bun, etc.)
|
||||
|
||||
---
|
||||
|
||||
### Method 2: CI Build Caching
|
||||
|
||||
The GitHub Actions workflow uses **Magic Nix Cache** (by Determinate Systems) to cache builds:
|
||||
|
||||
- **Zero configuration required** - no accounts or tokens needed
|
||||
- **Automatic** - CI workflow handles everything
|
||||
- **Uses GitHub Actions cache** - fast, reliable, free
|
||||
|
||||
#### How It Works:
|
||||
|
||||
1. GitHub Actions builds the package on each push/PR
|
||||
2. Build artifacts are cached in GitHub Actions cache
|
||||
3. Subsequent builds reuse cached dependencies (faster CI)
|
||||
|
||||
Note: This caches CI builds. Users still build locally, but the flake.lock ensures reproducibility.
|
||||
|
||||
---
|
||||
|
||||
### Method 3: nixpkgs Submission (Official Distribution)
|
||||
|
||||
Submit to the official Nix package repository for maximum visibility.
|
||||
|
||||
#### Process:
|
||||
|
||||
1. **Prepare package** (already done with `flake.nix`)
|
||||
2. **Test thoroughly**
|
||||
3. **Submit PR to nixpkgs:** https://github.com/NixOS/nixpkgs
|
||||
|
||||
#### User Experience:
|
||||
|
||||
```bash
|
||||
# After acceptance into nixpkgs
|
||||
nix run nixpkgs#gitea-mirror
|
||||
|
||||
# NixOS configuration
|
||||
environment.systemPackages = [ pkgs.gitea-mirror ];
|
||||
```
|
||||
|
||||
**Pros:**
|
||||
- Maximum discoverability (official repo)
|
||||
- Trusted by Nix community
|
||||
- Included in NixOS search
|
||||
- Binary caching by cache.nixos.org
|
||||
|
||||
**Cons:**
|
||||
- Submission/review process
|
||||
- Must follow nixpkgs guidelines
|
||||
- Updates require PRs
|
||||
|
||||
---
|
||||
|
||||
## Current Distribution Strategy
|
||||
|
||||
### Phase 1: Direct GitHub (Immediate) ✅
|
||||
|
||||
Already working! Users can:
|
||||
|
||||
```bash
|
||||
nix run github:RayLabsHQ/gitea-mirror
|
||||
```
|
||||
|
||||
### Phase 2: CI Build Validation ✅
|
||||
|
||||
GitHub Actions workflow validates builds on every push/PR:
|
||||
|
||||
- Uses Magic Nix Cache for fast CI builds
|
||||
- Tests on both Linux and macOS
|
||||
- No setup required - works automatically
|
||||
|
||||
### Phase 3: Version Releases (Optional)
|
||||
|
||||
Tag releases for version pinning:
|
||||
|
||||
```bash
|
||||
git tag vX.Y.Z
|
||||
git push origin vX.Y.Z
|
||||
|
||||
# Users can then pin:
|
||||
nix run github:RayLabsHQ/gitea-mirror/vX.Y.Z
|
||||
```
|
||||
|
||||
### Phase 4: nixpkgs Submission (Long Term)
|
||||
|
||||
Once package is stable and well-tested, submit to nixpkgs.
|
||||
|
||||
---
|
||||
|
||||
## User Documentation
|
||||
|
||||
### For Users: How to Install
|
||||
|
||||
Add this to your `docs/NIX_DEPLOYMENT.md`:
|
||||
|
||||
#### Option 1: Direct Install (No Configuration)
|
||||
|
||||
```bash
|
||||
# Run immediately
|
||||
nix run --extra-experimental-features 'nix-command flakes' github:RayLabsHQ/gitea-mirror
|
||||
|
||||
# Install to profile
|
||||
nix profile install --extra-experimental-features 'nix-command flakes' github:RayLabsHQ/gitea-mirror
|
||||
```
|
||||
|
||||
#### Option 2: Pin to Specific Version
|
||||
|
||||
```bash
|
||||
# Pin to git tag
|
||||
nix run github:RayLabsHQ/gitea-mirror/vX.Y.Z
|
||||
|
||||
# Pin to commit
|
||||
nix run github:RayLabsHQ/gitea-mirror/abc123def
|
||||
|
||||
# Lock in flake.nix
|
||||
inputs.gitea-mirror.url = "github:RayLabsHQ/gitea-mirror/vX.Y.Z";
|
||||
```
|
||||
|
||||
#### Option 3: NixOS Configuration
|
||||
|
||||
```nix
|
||||
{
|
||||
inputs = {
|
||||
nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable";
|
||||
gitea-mirror.url = "github:RayLabsHQ/gitea-mirror";
|
||||
# Or pin to version:
|
||||
# gitea-mirror.url = "github:RayLabsHQ/gitea-mirror/vX.Y.Z";
|
||||
};
|
||||
|
||||
outputs = { nixpkgs, gitea-mirror, ... }: {
|
||||
nixosConfigurations.your-host = nixpkgs.lib.nixosSystem {
|
||||
modules = [
|
||||
gitea-mirror.nixosModules.default
|
||||
{
|
||||
services.gitea-mirror = {
|
||||
enable = true;
|
||||
betterAuthUrl = "https://mirror.example.com";
|
||||
openFirewall = true;
|
||||
};
|
||||
}
|
||||
];
|
||||
};
|
||||
};
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Maintaining the Distribution
|
||||
|
||||
### Releasing New Versions
|
||||
|
||||
```bash
|
||||
# 1. Update version in package.json
|
||||
vim package.json # Update version field
|
||||
|
||||
# 2. Update flake.nix version (line 17)
|
||||
vim flake.nix # Update version = "X.Y.Z";
|
||||
|
||||
# 3. Commit changes
|
||||
git add package.json flake.nix
|
||||
git commit -m "chore: bump version to vX.Y.Z"
|
||||
|
||||
# 4. Create git tag
|
||||
git tag vX.Y.Z
|
||||
git push origin main
|
||||
git push origin vX.Y.Z
|
||||
|
||||
# 5. GitHub Actions builds and caches automatically
|
||||
```
|
||||
|
||||
Users can then pin to the new version:
|
||||
```bash
|
||||
nix run github:RayLabsHQ/gitea-mirror/vX.Y.Z
|
||||
```
|
||||
|
||||
### Updating Flake Lock
|
||||
|
||||
The `flake.lock` file pins all dependencies. Update it periodically:
|
||||
|
||||
```bash
|
||||
# Update all inputs
|
||||
nix flake update
|
||||
|
||||
# Update specific input
|
||||
nix flake lock --update-input nixpkgs
|
||||
|
||||
# Test after update
|
||||
nix build
|
||||
nix flake check
|
||||
|
||||
# Commit the updated lock file
|
||||
git add flake.lock
|
||||
git commit -m "chore: update flake dependencies"
|
||||
git push
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Troubleshooting Distribution Issues
|
||||
|
||||
### Users Report Build Failures
|
||||
|
||||
1. **Check GitHub Actions:** Ensure CI is passing
|
||||
2. **Test locally:** `nix flake check`
|
||||
3. **Check flake.lock:** May need update if dependencies changed
|
||||
|
||||
### CI Cache Not Working
|
||||
|
||||
1. **Check workflow logs:** Review GitHub Actions for errors
|
||||
2. **Clear cache:** GitHub Actions → Caches → Delete relevant cache
|
||||
3. **Verify flake.lock:** May need `nix flake update` if dependencies changed
|
||||
|
||||
### Version Pinning Not Working
|
||||
|
||||
```bash
|
||||
# Verify tag exists
|
||||
git tag -l
|
||||
|
||||
# Ensure tag is pushed
|
||||
git ls-remote --tags origin
|
||||
|
||||
# Test specific tag
|
||||
nix run github:RayLabsHQ/gitea-mirror/vX.Y.Z
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Advanced: Custom Binary Cache
|
||||
|
||||
If you prefer self-hosting instead of Cachix:
|
||||
|
||||
### Option 1: S3-Compatible Storage
|
||||
|
||||
```nix
|
||||
# Generate signing key
|
||||
nix-store --generate-binary-cache-key cache.example.com cache-priv-key.pem cache-pub-key.pem
|
||||
|
||||
# Push to S3
|
||||
nix copy --to s3://my-nix-cache?region=us-east-1 $(nix-build)
|
||||
```
|
||||
|
||||
Users configure:
|
||||
```nix
|
||||
substituters = https://my-bucket.s3.amazonaws.com/nix-cache
|
||||
trusted-public-keys = cache.example.com:BASE64_PUBLIC_KEY
|
||||
```
|
||||
|
||||
### Option 2: Self-Hosted Nix Store
|
||||
|
||||
Run `nix-serve` on your server:
|
||||
|
||||
```bash
|
||||
# On server
|
||||
nix-serve -p 8080
|
||||
|
||||
# Behind nginx/caddy
|
||||
proxy_pass http://localhost:8080;
|
||||
```
|
||||
|
||||
Users configure:
|
||||
```nix
|
||||
substituters = https://cache.example.com
|
||||
trusted-public-keys = YOUR_KEY
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Comparison: Distribution Methods
|
||||
|
||||
| Method | Setup Time | User Speed | Cost | Discoverability |
|
||||
|--------|-----------|------------|------|-----------------|
|
||||
| Direct GitHub | 0 min | Slow (build) | Free | Low |
|
||||
| nixpkgs | Hours/days | Fast (binary) | Free | High |
|
||||
| Self-hosted cache | 30+ min | Fast (binary) | Server cost | Low |
|
||||
|
||||
**Current approach:** Direct GitHub consumption with CI validation using Magic Nix Cache. Users build locally (reproducible via flake.lock). Consider **nixpkgs** submission for maximum reach once the package is mature.
|
||||
|
||||
---
|
||||
|
||||
## Resources
|
||||
|
||||
- [Nix Flakes Documentation](https://nixos.wiki/wiki/Flakes)
|
||||
- [Magic Nix Cache](https://github.com/DeterminateSystems/magic-nix-cache-action)
|
||||
- [nixpkgs Contributing Guide](https://github.com/NixOS/nixpkgs/blob/master/CONTRIBUTING.md)
|
||||
- [Nix Binary Cache Setup](https://nixos.org/manual/nix/stable/package-management/binary-cache-substituter.html)
|
||||
88
docs/NOTIFICATIONS.md
Normal file
88
docs/NOTIFICATIONS.md
Normal file
@@ -0,0 +1,88 @@
|
||||
# Notifications
|
||||
|
||||
Gitea Mirror supports push notifications for mirror events. You can be alerted when jobs succeed, fail, or when new repositories are discovered.
|
||||
|
||||
## Supported Providers
|
||||
|
||||
### 1. Ntfy.sh (Direct)
|
||||
|
||||
[Ntfy.sh](https://ntfy.sh) is a simple HTTP-based pub-sub notification service. You can use the public server at `https://ntfy.sh` or self-host your own instance.
|
||||
|
||||
**Setup (public server):**
|
||||
1. Go to **Configuration > Notifications**
|
||||
2. Enable notifications and select **Ntfy.sh** as the provider
|
||||
3. Set the **Topic** to a unique name (e.g., `my-gitea-mirror-abc123`)
|
||||
4. Leave the Server URL as `https://ntfy.sh`
|
||||
5. Subscribe to the same topic on your phone or desktop using the [ntfy app](https://ntfy.sh/docs/subscribe/phone/)
|
||||
|
||||
**Setup (self-hosted):**
|
||||
1. Deploy ntfy using Docker: `docker run -p 8080:80 binwiederhier/ntfy serve`
|
||||
2. Set the **Server URL** to your instance (e.g., `http://ntfy:8080`)
|
||||
3. If authentication is enabled, provide an **Access token**
|
||||
4. Set your **Topic** name
|
||||
|
||||
**Priority levels:**
|
||||
- `min` / `low` / `default` / `high` / `urgent`
|
||||
- Error notifications automatically use `high` priority regardless of the default setting
|
||||
|
||||
### 2. Apprise API (Aggregator)
|
||||
|
||||
[Apprise](https://github.com/caronc/apprise-api) is a notification aggregator that supports 100+ services (Slack, Discord, Telegram, Email, Pushover, and many more) through a single API.
|
||||
|
||||
**Setup:**
|
||||
1. Deploy the Apprise API server:
|
||||
```yaml
|
||||
# docker-compose.yml
|
||||
services:
|
||||
apprise:
|
||||
image: caronc/apprise:latest
|
||||
ports:
|
||||
- "8000:8000"
|
||||
volumes:
|
||||
- apprise-config:/config
|
||||
volumes:
|
||||
apprise-config:
|
||||
```
|
||||
2. Configure your notification services in Apprise (via its web UI at `http://localhost:8000` or API)
|
||||
3. Create a configuration token/key in Apprise
|
||||
4. In Gitea Mirror, go to **Configuration > Notifications**
|
||||
5. Enable notifications and select **Apprise API**
|
||||
6. Set the **Server URL** to your Apprise instance (e.g., `http://apprise:8000`)
|
||||
7. Enter the **Token/path** you created in step 3
|
||||
|
||||
**Tag filtering:**
|
||||
- Optionally set a **Tag** to only notify specific Apprise services
|
||||
- Leave empty to notify all configured services
|
||||
|
||||
## Event Types
|
||||
|
||||
| Event | Default | Description |
|
||||
|-------|---------|-------------|
|
||||
| Sync errors | On | A mirror job failed |
|
||||
| Sync success | Off | A mirror job completed successfully |
|
||||
| New repo discovered | Off | A new GitHub repo was auto-imported during scheduled sync |
|
||||
|
||||
## Testing
|
||||
|
||||
Use the **Send Test Notification** button on the Notifications settings page to verify your configuration. The test sends a sample success notification to your configured provider.
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
**Notifications not arriving:**
|
||||
- Check that notifications are enabled in the settings
|
||||
- Verify the provider configuration (URL, topic/token)
|
||||
- Use the Test button to check connectivity
|
||||
- Check the server logs for `[NotificationService]` messages
|
||||
|
||||
**Ntfy authentication errors:**
|
||||
- Ensure your access token is correct
|
||||
- If self-hosting, verify the ntfy server allows the topic
|
||||
|
||||
**Apprise connection refused:**
|
||||
- Verify the Apprise API server is running and accessible from the Gitea Mirror container
|
||||
- If using Docker, ensure both containers are on the same network
|
||||
- Check the Apprise server logs for errors
|
||||
|
||||
**Tokens and security:**
|
||||
- Notification tokens (ntfy access tokens, Apprise tokens) are encrypted at rest using the same AES-256-GCM encryption as GitHub/Gitea tokens
|
||||
- Tokens are decrypted only when sending notifications or displaying in the settings UI
|
||||
@@ -7,6 +7,8 @@ This folder contains engineering and operations references for the open-source G
|
||||
### Core workflow
|
||||
- **[DEVELOPMENT_WORKFLOW.md](./DEVELOPMENT_WORKFLOW.md)** – Set up a local environment, run scripts, and understand the repo layout (app + marketing site).
|
||||
- **[ENVIRONMENT_VARIABLES.md](./ENVIRONMENT_VARIABLES.md)** – Complete reference for every configuration flag supported by the app and Docker images.
|
||||
- **[NIX_DEPLOYMENT.md](./NIX_DEPLOYMENT.md)** – User-facing deployment guide for Nix and NixOS.
|
||||
- **[NIX_DISTRIBUTION.md](./NIX_DISTRIBUTION.md)** – Maintainer notes for packaging, releases, and distribution strategy.
|
||||
|
||||
### Reliability & recovery
|
||||
- **[GRACEFUL_SHUTDOWN.md](./GRACEFUL_SHUTDOWN.md)** – How signal handling, shutdown coordination, and job persistence work in v3.
|
||||
@@ -32,8 +34,6 @@ The first user you create locally becomes the administrator. All other configura
|
||||
## Contributing & support
|
||||
|
||||
- 🎯 Contribution guide: [../CONTRIBUTING.md](../CONTRIBUTING.md)
|
||||
- 📘 Code of conduct: [../CODE_OF_CONDUCT.md](../CODE_OF_CONDUCT.md)
|
||||
- 🐞 Issues & feature requests: <https://github.com/RayLabsHQ/gitea-mirror/issues>
|
||||
- 💬 Discussions: <https://github.com/RayLabsHQ/gitea-mirror/discussions>
|
||||
|
||||
Security disclosures should follow the process in [../SECURITY.md](../SECURITY.md).
|
||||
- 🔐 Security policy & advisories: <https://github.com/RayLabsHQ/gitea-mirror/security>
|
||||
|
||||
BIN
docs/images/add-repo-target-org.png
Normal file
BIN
docs/images/add-repo-target-org.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 22 KiB |
149
drizzle/0009_nervous_tyger_tiger.sql
Normal file
149
drizzle/0009_nervous_tyger_tiger.sql
Normal file
@@ -0,0 +1,149 @@
|
||||
CREATE TABLE `__new_repositories` (
|
||||
`id` text PRIMARY KEY NOT NULL,
|
||||
`user_id` text NOT NULL,
|
||||
`config_id` text NOT NULL,
|
||||
`name` text NOT NULL,
|
||||
`full_name` text NOT NULL,
|
||||
`normalized_full_name` text NOT NULL,
|
||||
`url` text NOT NULL,
|
||||
`clone_url` text NOT NULL,
|
||||
`owner` text NOT NULL,
|
||||
`organization` text,
|
||||
`mirrored_location` text DEFAULT '',
|
||||
`is_private` integer DEFAULT false NOT NULL,
|
||||
`is_fork` integer DEFAULT false NOT NULL,
|
||||
`forked_from` text,
|
||||
`has_issues` integer DEFAULT false NOT NULL,
|
||||
`is_starred` integer DEFAULT false NOT NULL,
|
||||
`is_archived` integer DEFAULT false NOT NULL,
|
||||
`size` integer DEFAULT 0 NOT NULL,
|
||||
`has_lfs` integer DEFAULT false NOT NULL,
|
||||
`has_submodules` integer DEFAULT false NOT NULL,
|
||||
`language` text,
|
||||
`description` text,
|
||||
`default_branch` text NOT NULL,
|
||||
`visibility` text DEFAULT 'public' NOT NULL,
|
||||
`status` text DEFAULT 'imported' NOT NULL,
|
||||
`last_mirrored` integer,
|
||||
`error_message` text,
|
||||
`destination_org` text,
|
||||
`metadata` text,
|
||||
`imported_at` integer DEFAULT (unixepoch()) NOT NULL,
|
||||
`created_at` integer DEFAULT (unixepoch()) NOT NULL,
|
||||
`updated_at` integer DEFAULT (unixepoch()) NOT NULL,
|
||||
FOREIGN KEY (`user_id`) REFERENCES `users`(`id`) ON UPDATE no action ON DELETE no action,
|
||||
FOREIGN KEY (`config_id`) REFERENCES `configs`(`id`) ON UPDATE no action ON DELETE no action
|
||||
);
|
||||
--> statement-breakpoint
|
||||
INSERT INTO `__new_repositories` (
|
||||
`id`,
|
||||
`user_id`,
|
||||
`config_id`,
|
||||
`name`,
|
||||
`full_name`,
|
||||
`normalized_full_name`,
|
||||
`url`,
|
||||
`clone_url`,
|
||||
`owner`,
|
||||
`organization`,
|
||||
`mirrored_location`,
|
||||
`is_private`,
|
||||
`is_fork`,
|
||||
`forked_from`,
|
||||
`has_issues`,
|
||||
`is_starred`,
|
||||
`is_archived`,
|
||||
`size`,
|
||||
`has_lfs`,
|
||||
`has_submodules`,
|
||||
`language`,
|
||||
`description`,
|
||||
`default_branch`,
|
||||
`visibility`,
|
||||
`status`,
|
||||
`last_mirrored`,
|
||||
`error_message`,
|
||||
`destination_org`,
|
||||
`metadata`,
|
||||
`imported_at`,
|
||||
`created_at`,
|
||||
`updated_at`
|
||||
)
|
||||
SELECT
|
||||
`repositories`.`id`,
|
||||
`repositories`.`user_id`,
|
||||
`repositories`.`config_id`,
|
||||
`repositories`.`name`,
|
||||
`repositories`.`full_name`,
|
||||
`repositories`.`normalized_full_name`,
|
||||
`repositories`.`url`,
|
||||
`repositories`.`clone_url`,
|
||||
`repositories`.`owner`,
|
||||
`repositories`.`organization`,
|
||||
`repositories`.`mirrored_location`,
|
||||
`repositories`.`is_private`,
|
||||
`repositories`.`is_fork`,
|
||||
`repositories`.`forked_from`,
|
||||
`repositories`.`has_issues`,
|
||||
`repositories`.`is_starred`,
|
||||
`repositories`.`is_archived`,
|
||||
`repositories`.`size`,
|
||||
`repositories`.`has_lfs`,
|
||||
`repositories`.`has_submodules`,
|
||||
`repositories`.`language`,
|
||||
`repositories`.`description`,
|
||||
`repositories`.`default_branch`,
|
||||
`repositories`.`visibility`,
|
||||
`repositories`.`status`,
|
||||
`repositories`.`last_mirrored`,
|
||||
`repositories`.`error_message`,
|
||||
`repositories`.`destination_org`,
|
||||
`repositories`.`metadata`,
|
||||
COALESCE(
|
||||
(
|
||||
SELECT MIN(`mj`.`timestamp`)
|
||||
FROM `mirror_jobs` `mj`
|
||||
WHERE `mj`.`user_id` = `repositories`.`user_id`
|
||||
AND `mj`.`status` = 'imported'
|
||||
AND (
|
||||
(`mj`.`repository_id` IS NOT NULL AND `mj`.`repository_id` = `repositories`.`id`)
|
||||
OR (
|
||||
`mj`.`repository_id` IS NULL
|
||||
AND `mj`.`repository_name` IS NOT NULL
|
||||
AND (
|
||||
lower(trim(`mj`.`repository_name`)) = `repositories`.`normalized_full_name`
|
||||
OR lower(trim(`mj`.`repository_name`)) = lower(trim(`repositories`.`name`))
|
||||
)
|
||||
)
|
||||
)
|
||||
),
|
||||
`repositories`.`created_at`,
|
||||
unixepoch()
|
||||
) AS `imported_at`,
|
||||
`repositories`.`created_at`,
|
||||
`repositories`.`updated_at`
|
||||
FROM `repositories`;
|
||||
--> statement-breakpoint
|
||||
DROP TABLE `repositories`;
|
||||
--> statement-breakpoint
|
||||
ALTER TABLE `__new_repositories` RENAME TO `repositories`;
|
||||
--> statement-breakpoint
|
||||
CREATE INDEX `idx_repositories_user_id` ON `repositories` (`user_id`);
|
||||
--> statement-breakpoint
|
||||
CREATE INDEX `idx_repositories_config_id` ON `repositories` (`config_id`);
|
||||
--> statement-breakpoint
|
||||
CREATE INDEX `idx_repositories_status` ON `repositories` (`status`);
|
||||
--> statement-breakpoint
|
||||
CREATE INDEX `idx_repositories_owner` ON `repositories` (`owner`);
|
||||
--> statement-breakpoint
|
||||
CREATE INDEX `idx_repositories_organization` ON `repositories` (`organization`);
|
||||
--> statement-breakpoint
|
||||
CREATE INDEX `idx_repositories_is_fork` ON `repositories` (`is_fork`);
|
||||
--> statement-breakpoint
|
||||
CREATE INDEX `idx_repositories_is_starred` ON `repositories` (`is_starred`);
|
||||
--> statement-breakpoint
|
||||
CREATE INDEX `idx_repositories_user_imported_at` ON `repositories` (`user_id`,`imported_at`);
|
||||
--> statement-breakpoint
|
||||
CREATE UNIQUE INDEX `uniq_repositories_user_full_name` ON `repositories` (`user_id`,`full_name`);
|
||||
--> statement-breakpoint
|
||||
CREATE UNIQUE INDEX `uniq_repositories_user_normalized_full_name` ON `repositories` (`user_id`,`normalized_full_name`);
|
||||
9
drizzle/0010_mirrored_location_index.sql
Normal file
9
drizzle/0010_mirrored_location_index.sql
Normal file
@@ -0,0 +1,9 @@
|
||||
-- Add index for mirroredLocation lookups (used by name collision detection)
|
||||
CREATE INDEX IF NOT EXISTS `idx_repositories_mirrored_location` ON `repositories` (`user_id`, `mirrored_location`);
|
||||
|
||||
-- Add unique partial index to enforce that no two repos for the same user
|
||||
-- can claim the same non-empty mirroredLocation. This prevents race conditions
|
||||
-- during concurrent batch mirroring of starred repos with duplicate names.
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS `uniq_repositories_user_mirrored_location`
|
||||
ON `repositories` (`user_id`, `mirrored_location`)
|
||||
WHERE `mirrored_location` != '';
|
||||
1
drizzle/0011_notification_config.sql
Normal file
1
drizzle/0011_notification_config.sql
Normal file
@@ -0,0 +1 @@
|
||||
ALTER TABLE `configs` ADD `notification_config` text DEFAULT '{"enabled":false,"provider":"ntfy","notifyOnSyncError":true,"notifyOnSyncSuccess":false,"notifyOnNewRepo":false}' NOT NULL;
|
||||
2022
drizzle/meta/0009_snapshot.json
Normal file
2022
drizzle/meta/0009_snapshot.json
Normal file
File diff suppressed because it is too large
Load Diff
2030
drizzle/meta/0011_snapshot.json
Normal file
2030
drizzle/meta/0011_snapshot.json
Normal file
File diff suppressed because it is too large
Load Diff
@@ -64,6 +64,27 @@
|
||||
"when": 1761802056073,
|
||||
"tag": "0008_serious_thena",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 9,
|
||||
"version": "6",
|
||||
"when": 1773542995732,
|
||||
"tag": "0009_nervous_tyger_tiger",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 10,
|
||||
"version": "6",
|
||||
"when": 1774054800000,
|
||||
"tag": "0010_mirrored_location_index",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 11,
|
||||
"version": "6",
|
||||
"when": 1774058400000,
|
||||
"tag": "0011_notification_config",
|
||||
"breakpoints": true
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
170
flake.lock
generated
Normal file
170
flake.lock
generated
Normal file
@@ -0,0 +1,170 @@
|
||||
{
|
||||
"nodes": {
|
||||
"bun2nix": {
|
||||
"inputs": {
|
||||
"flake-parts": "flake-parts",
|
||||
"import-tree": "import-tree",
|
||||
"nixpkgs": [
|
||||
"nixpkgs"
|
||||
],
|
||||
"systems": "systems",
|
||||
"treefmt-nix": "treefmt-nix"
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1770895533,
|
||||
"narHash": "sha256-v3QaK9ugy9bN9RXDnjw0i2OifKmz2NnKM82agtqm/UY=",
|
||||
"owner": "nix-community",
|
||||
"repo": "bun2nix",
|
||||
"rev": "c843f477b15f51151f8c6bcc886954699440a6e1",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "nix-community",
|
||||
"repo": "bun2nix",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"flake-parts": {
|
||||
"inputs": {
|
||||
"nixpkgs-lib": "nixpkgs-lib"
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1769996383,
|
||||
"narHash": "sha256-AnYjnFWgS49RlqX7LrC4uA+sCCDBj0Ry/WOJ5XWAsa0=",
|
||||
"owner": "hercules-ci",
|
||||
"repo": "flake-parts",
|
||||
"rev": "57928607ea566b5db3ad13af0e57e921e6b12381",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "hercules-ci",
|
||||
"repo": "flake-parts",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"flake-utils": {
|
||||
"inputs": {
|
||||
"systems": "systems_2"
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1731533236,
|
||||
"narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=",
|
||||
"owner": "numtide",
|
||||
"repo": "flake-utils",
|
||||
"rev": "11707dc2f618dd54ca8739b309ec4fc024de578b",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "numtide",
|
||||
"repo": "flake-utils",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"import-tree": {
|
||||
"locked": {
|
||||
"lastModified": 1763762820,
|
||||
"narHash": "sha256-ZvYKbFib3AEwiNMLsejb/CWs/OL/srFQ8AogkebEPF0=",
|
||||
"owner": "vic",
|
||||
"repo": "import-tree",
|
||||
"rev": "3c23749d8013ec6daa1d7255057590e9ca726646",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "vic",
|
||||
"repo": "import-tree",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"nixpkgs": {
|
||||
"locked": {
|
||||
"lastModified": 1761672384,
|
||||
"narHash": "sha256-o9KF3DJL7g7iYMZq9SWgfS1BFlNbsm6xplRjVlOCkXI=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "08dacfca559e1d7da38f3cf05f1f45ee9bfd213c",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "NixOS",
|
||||
"ref": "nixos-unstable",
|
||||
"repo": "nixpkgs",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"nixpkgs-lib": {
|
||||
"locked": {
|
||||
"lastModified": 1769909678,
|
||||
"narHash": "sha256-cBEymOf4/o3FD5AZnzC3J9hLbiZ+QDT/KDuyHXVJOpM=",
|
||||
"owner": "nix-community",
|
||||
"repo": "nixpkgs.lib",
|
||||
"rev": "72716169fe93074c333e8d0173151350670b824c",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "nix-community",
|
||||
"repo": "nixpkgs.lib",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"root": {
|
||||
"inputs": {
|
||||
"bun2nix": "bun2nix",
|
||||
"flake-utils": "flake-utils",
|
||||
"nixpkgs": "nixpkgs"
|
||||
}
|
||||
},
|
||||
"systems": {
|
||||
"locked": {
|
||||
"lastModified": 1681028828,
|
||||
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
|
||||
"owner": "nix-systems",
|
||||
"repo": "default",
|
||||
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "nix-systems",
|
||||
"repo": "default",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"systems_2": {
|
||||
"locked": {
|
||||
"lastModified": 1681028828,
|
||||
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
|
||||
"owner": "nix-systems",
|
||||
"repo": "default",
|
||||
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "nix-systems",
|
||||
"repo": "default",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"treefmt-nix": {
|
||||
"inputs": {
|
||||
"nixpkgs": [
|
||||
"bun2nix",
|
||||
"nixpkgs"
|
||||
]
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1770228511,
|
||||
"narHash": "sha256-wQ6NJSuFqAEmIg2VMnLdCnUc0b7vslUohqqGGD+Fyxk=",
|
||||
"owner": "numtide",
|
||||
"repo": "treefmt-nix",
|
||||
"rev": "337a4fe074be1042a35086f15481d763b8ddc0e7",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "numtide",
|
||||
"repo": "treefmt-nix",
|
||||
"type": "github"
|
||||
}
|
||||
}
|
||||
},
|
||||
"root": "root",
|
||||
"version": 7
|
||||
}
|
||||
468
flake.nix
Normal file
468
flake.nix
Normal file
@@ -0,0 +1,468 @@
|
||||
{
|
||||
description = "Gitea Mirror - Self-hosted GitHub to Gitea mirroring service";
|
||||
|
||||
nixConfig = {
|
||||
extra-substituters = [
|
||||
"https://nix-community.cachix.org"
|
||||
];
|
||||
extra-trusted-public-keys = [
|
||||
"nix-community.cachix.org-1:mB9FSh9qf2dCimDSUo8Zy7bkq5CX+/rkCWyvRCYg3Fs="
|
||||
];
|
||||
};
|
||||
|
||||
inputs = {
|
||||
nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable";
|
||||
flake-utils.url = "github:numtide/flake-utils";
|
||||
bun2nix = {
|
||||
url = "github:nix-community/bun2nix";
|
||||
inputs.nixpkgs.follows = "nixpkgs";
|
||||
};
|
||||
};
|
||||
|
||||
outputs = { self, nixpkgs, flake-utils, bun2nix }:
|
||||
let
|
||||
forEachSystem = flake-utils.lib.eachDefaultSystem;
|
||||
in
|
||||
(forEachSystem (system:
|
||||
let
|
||||
pkgs = nixpkgs.legacyPackages.${system};
|
||||
b2n = bun2nix.packages.${system}.default;
|
||||
|
||||
# Build the application
|
||||
gitea-mirror = pkgs.stdenv.mkDerivation {
|
||||
pname = "gitea-mirror";
|
||||
version = "3.9.6";
|
||||
|
||||
src = ./.;
|
||||
|
||||
nativeBuildInputs = [
|
||||
pkgs.bun
|
||||
b2n.hook
|
||||
];
|
||||
|
||||
buildInputs = with pkgs; [
|
||||
sqlite
|
||||
openssl
|
||||
];
|
||||
|
||||
bunDeps = b2n.fetchBunDeps {
|
||||
bunNix = ./bun.nix;
|
||||
};
|
||||
|
||||
# bun2nix defaults to isolated installs on Linux, which can be
|
||||
# very slow in CI for larger dependency trees and may appear stuck.
|
||||
# Use hoisted linker and fail fast on lockfile drift.
|
||||
bunInstallFlags = if pkgs.stdenv.hostPlatform.isDarwin then [
|
||||
"--linker=hoisted"
|
||||
"--backend=copyfile"
|
||||
"--frozen-lockfile"
|
||||
"--no-progress"
|
||||
] else [
|
||||
"--linker=hoisted"
|
||||
"--frozen-lockfile"
|
||||
"--no-progress"
|
||||
];
|
||||
|
||||
# Let the bun2nix hook handle dependency installation via the
|
||||
# pre-fetched cache, but skip its default build/check/install
|
||||
# phases since we have custom ones.
|
||||
dontUseBunBuild = true;
|
||||
dontUseBunCheck = true;
|
||||
dontUseBunInstall = true;
|
||||
|
||||
buildPhase = ''
|
||||
runHook preBuild
|
||||
export HOME=$TMPDIR
|
||||
|
||||
# The bun2nix cache is in the read-only Nix store, but bunx/astro
|
||||
# may try to write to it at build time. Copy the cache to a
|
||||
# writable location.
|
||||
if [ -n "$BUN_INSTALL_CACHE_DIR" ] && [ -d "$BUN_INSTALL_CACHE_DIR" ]; then
|
||||
WRITABLE_CACHE="$TMPDIR/bun-cache"
|
||||
cp -rL "$BUN_INSTALL_CACHE_DIR" "$WRITABLE_CACHE" 2>/dev/null || true
|
||||
chmod -R u+w "$WRITABLE_CACHE" 2>/dev/null || true
|
||||
export BUN_INSTALL_CACHE_DIR="$WRITABLE_CACHE"
|
||||
fi
|
||||
|
||||
# Build the Astro application
|
||||
bun run build
|
||||
|
||||
runHook postBuild
|
||||
'';
|
||||
|
||||
installPhase = ''
|
||||
runHook preInstall
|
||||
|
||||
mkdir -p $out/lib/gitea-mirror
|
||||
mkdir -p $out/bin
|
||||
|
||||
# Copy the built application
|
||||
cp -r dist $out/lib/gitea-mirror/
|
||||
cp -r node_modules $out/lib/gitea-mirror/
|
||||
cp -r scripts $out/lib/gitea-mirror/
|
||||
cp -r src $out/lib/gitea-mirror/
|
||||
cp -r drizzle $out/lib/gitea-mirror/
|
||||
cp package.json $out/lib/gitea-mirror/
|
||||
cp tsconfig.json $out/lib/gitea-mirror/
|
||||
|
||||
# Create entrypoint script that matches Docker behavior
|
||||
cat > $out/bin/gitea-mirror <<'EOF'
|
||||
#!${pkgs.bash}/bin/bash
|
||||
set -e
|
||||
|
||||
# === DEFAULT CONFIGURATION ===
|
||||
# These match docker-compose.alt.yml defaults
|
||||
export DATA_DIR=''${DATA_DIR:-"$HOME/.local/share/gitea-mirror"}
|
||||
export DATABASE_URL=''${DATABASE_URL:-"file:$DATA_DIR/gitea-mirror.db"}
|
||||
export HOST=''${HOST:-"0.0.0.0"}
|
||||
export PORT=''${PORT:-"4321"}
|
||||
export NODE_ENV=''${NODE_ENV:-"production"}
|
||||
|
||||
# Better Auth configuration
|
||||
export BETTER_AUTH_URL=''${BETTER_AUTH_URL:-"http://localhost:4321"}
|
||||
export BETTER_AUTH_TRUSTED_ORIGINS=''${BETTER_AUTH_TRUSTED_ORIGINS:-"http://localhost:4321"}
|
||||
export PUBLIC_BETTER_AUTH_URL=''${PUBLIC_BETTER_AUTH_URL:-"http://localhost:4321"}
|
||||
|
||||
# Concurrency settings (match docker-compose.alt.yml)
|
||||
export MIRROR_ISSUE_CONCURRENCY=''${MIRROR_ISSUE_CONCURRENCY:-3}
|
||||
export MIRROR_PULL_REQUEST_CONCURRENCY=''${MIRROR_PULL_REQUEST_CONCURRENCY:-5}
|
||||
|
||||
# Create data directory
|
||||
mkdir -p "$DATA_DIR"
|
||||
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
|
||||
APP_DIR="$SCRIPT_DIR/../lib/gitea-mirror"
|
||||
|
||||
# The app uses process.cwd()/data for the database, but the Nix store
|
||||
# is read-only. Create a writable working directory with symlinks to
|
||||
# the app files and a real data directory.
|
||||
WORK_DIR="$DATA_DIR/.workdir"
|
||||
mkdir -p "$WORK_DIR"
|
||||
for item in dist node_modules scripts src drizzle package.json tsconfig.json; do
|
||||
ln -sfn "$APP_DIR/$item" "$WORK_DIR/$item"
|
||||
done
|
||||
ln -sfn "$DATA_DIR" "$WORK_DIR/data"
|
||||
cd "$WORK_DIR"
|
||||
|
||||
# === AUTO-GENERATE SECRETS ===
|
||||
BETTER_AUTH_SECRET_FILE="$DATA_DIR/.better_auth_secret"
|
||||
ENCRYPTION_SECRET_FILE="$DATA_DIR/.encryption_secret"
|
||||
|
||||
# Generate BETTER_AUTH_SECRET if not provided
|
||||
if [ -z "$BETTER_AUTH_SECRET" ]; then
|
||||
if [ -f "$BETTER_AUTH_SECRET_FILE" ]; then
|
||||
echo "Using previously generated BETTER_AUTH_SECRET"
|
||||
export BETTER_AUTH_SECRET=$(cat "$BETTER_AUTH_SECRET_FILE")
|
||||
else
|
||||
echo "Generating a secure random BETTER_AUTH_SECRET"
|
||||
GENERATED_SECRET=$(${pkgs.openssl}/bin/openssl rand -hex 32)
|
||||
export BETTER_AUTH_SECRET="$GENERATED_SECRET"
|
||||
echo "$GENERATED_SECRET" > "$BETTER_AUTH_SECRET_FILE"
|
||||
chmod 600 "$BETTER_AUTH_SECRET_FILE"
|
||||
echo "✅ BETTER_AUTH_SECRET generated and saved to $BETTER_AUTH_SECRET_FILE"
|
||||
fi
|
||||
fi
|
||||
|
||||
# Generate ENCRYPTION_SECRET if not provided
|
||||
if [ -z "$ENCRYPTION_SECRET" ]; then
|
||||
if [ -f "$ENCRYPTION_SECRET_FILE" ]; then
|
||||
echo "Using previously generated ENCRYPTION_SECRET"
|
||||
export ENCRYPTION_SECRET=$(cat "$ENCRYPTION_SECRET_FILE")
|
||||
else
|
||||
echo "Generating a secure random ENCRYPTION_SECRET"
|
||||
GENERATED_ENCRYPTION_SECRET=$(${pkgs.openssl}/bin/openssl rand -base64 36)
|
||||
export ENCRYPTION_SECRET="$GENERATED_ENCRYPTION_SECRET"
|
||||
echo "$GENERATED_ENCRYPTION_SECRET" > "$ENCRYPTION_SECRET_FILE"
|
||||
chmod 600 "$ENCRYPTION_SECRET_FILE"
|
||||
echo "✅ ENCRYPTION_SECRET generated and saved to $ENCRYPTION_SECRET_FILE"
|
||||
fi
|
||||
fi
|
||||
|
||||
# === DATABASE INITIALIZATION ===
|
||||
DB_PATH=$(echo "$DATABASE_URL" | ${pkgs.gnused}/bin/sed 's|^file:||')
|
||||
if [ ! -f "$DB_PATH" ]; then
|
||||
echo "Database not found. It will be created and initialized via Drizzle migrations on first app startup..."
|
||||
touch "$DB_PATH"
|
||||
else
|
||||
echo "Database already exists, Drizzle will check for pending migrations on startup..."
|
||||
fi
|
||||
|
||||
# === STARTUP SCRIPTS ===
|
||||
# Initialize configuration from environment variables
|
||||
echo "Checking for environment configuration..."
|
||||
if [ -f "scripts/startup-env-config.ts" ]; then
|
||||
echo "Loading configuration from environment variables..."
|
||||
${pkgs.bun}/bin/bun scripts/startup-env-config.ts && \
|
||||
echo "✅ Environment configuration loaded successfully" || \
|
||||
echo "⚠️ Environment configuration loading completed with warnings"
|
||||
fi
|
||||
|
||||
# Run startup recovery
|
||||
echo "Running startup recovery..."
|
||||
if [ -f "scripts/startup-recovery.ts" ]; then
|
||||
${pkgs.bun}/bin/bun scripts/startup-recovery.ts --timeout=30000 && \
|
||||
echo "✅ Startup recovery completed successfully" || \
|
||||
echo "⚠️ Startup recovery completed with warnings"
|
||||
fi
|
||||
|
||||
# Run repository status repair
|
||||
echo "Running repository status repair..."
|
||||
if [ -f "scripts/repair-mirrored-repos.ts" ]; then
|
||||
${pkgs.bun}/bin/bun scripts/repair-mirrored-repos.ts --startup && \
|
||||
echo "✅ Repository status repair completed successfully" || \
|
||||
echo "⚠️ Repository status repair completed with warnings"
|
||||
fi
|
||||
|
||||
# === SIGNAL HANDLING ===
|
||||
shutdown_handler() {
|
||||
echo "🛑 Received shutdown signal, forwarding to application..."
|
||||
if [ ! -z "$APP_PID" ]; then
|
||||
kill -TERM "$APP_PID" 2>/dev/null || true
|
||||
wait "$APP_PID" 2>/dev/null || true
|
||||
fi
|
||||
exit 0
|
||||
}
|
||||
|
||||
trap 'shutdown_handler' TERM INT HUP
|
||||
|
||||
# === START APPLICATION ===
|
||||
echo "Starting Gitea Mirror..."
|
||||
echo "Access the web interface at $BETTER_AUTH_URL"
|
||||
${pkgs.bun}/bin/bun dist/server/entry.mjs &
|
||||
APP_PID=$!
|
||||
|
||||
wait "$APP_PID"
|
||||
EOF
|
||||
chmod +x $out/bin/gitea-mirror
|
||||
|
||||
# Create database management helper
|
||||
cat > $out/bin/gitea-mirror-db <<'EOF'
|
||||
#!${pkgs.bash}/bin/bash
|
||||
export DATA_DIR=''${DATA_DIR:-"$HOME/.local/share/gitea-mirror"}
|
||||
mkdir -p "$DATA_DIR"
|
||||
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
|
||||
cd "$SCRIPT_DIR/../lib/gitea-mirror"
|
||||
exec ${pkgs.bun}/bin/bun scripts/manage-db.ts "$@"
|
||||
EOF
|
||||
chmod +x $out/bin/gitea-mirror-db
|
||||
|
||||
runHook postInstall
|
||||
'';
|
||||
|
||||
meta = with pkgs.lib; {
|
||||
description = "Self-hosted GitHub to Gitea mirroring service";
|
||||
homepage = "https://github.com/RayLabsHQ/gitea-mirror";
|
||||
license = licenses.mit;
|
||||
maintainers = [ ];
|
||||
platforms = platforms.linux ++ platforms.darwin;
|
||||
};
|
||||
};
|
||||
|
||||
in
|
||||
{
|
||||
packages = {
|
||||
default = gitea-mirror;
|
||||
gitea-mirror = gitea-mirror;
|
||||
};
|
||||
|
||||
# Development shell
|
||||
devShells.default = pkgs.mkShell {
|
||||
buildInputs = with pkgs; [
|
||||
bun
|
||||
sqlite
|
||||
openssl
|
||||
b2n
|
||||
];
|
||||
|
||||
shellHook = ''
|
||||
echo "🚀 Gitea Mirror development environment"
|
||||
echo ""
|
||||
echo "Quick start:"
|
||||
echo " bun install # Install dependencies"
|
||||
echo " bun run dev # Start development server"
|
||||
echo " bun run build # Build for production"
|
||||
echo ""
|
||||
echo "Nix packaging:"
|
||||
echo " bun2nix -o bun.nix # Regenerate bun.nix after dependency changes"
|
||||
echo " nix build # Build the package"
|
||||
echo ""
|
||||
echo "Database:"
|
||||
echo " bun run manage-db init # Initialize database"
|
||||
echo " bun run db:studio # Open Drizzle Studio"
|
||||
'';
|
||||
};
|
||||
|
||||
}
|
||||
)) // {
|
||||
nixosModules.default = { config, lib, pkgs, ... }:
|
||||
with lib;
|
||||
let
|
||||
cfg = config.services.gitea-mirror;
|
||||
in {
|
||||
options.services.gitea-mirror = {
|
||||
enable = mkEnableOption "Gitea Mirror service";
|
||||
|
||||
package = mkOption {
|
||||
type = types.package;
|
||||
default = self.packages.${pkgs.system}.default;
|
||||
description = "The Gitea Mirror package to use";
|
||||
};
|
||||
|
||||
dataDir = mkOption {
|
||||
type = types.path;
|
||||
default = "/var/lib/gitea-mirror";
|
||||
description = "Directory to store data and database";
|
||||
};
|
||||
|
||||
user = mkOption {
|
||||
type = types.str;
|
||||
default = "gitea-mirror";
|
||||
description = "User account under which Gitea Mirror runs";
|
||||
};
|
||||
|
||||
group = mkOption {
|
||||
type = types.str;
|
||||
default = "gitea-mirror";
|
||||
description = "Group under which Gitea Mirror runs";
|
||||
};
|
||||
|
||||
host = mkOption {
|
||||
type = types.str;
|
||||
default = "0.0.0.0";
|
||||
description = "Host to bind to";
|
||||
};
|
||||
|
||||
port = mkOption {
|
||||
type = types.port;
|
||||
default = 4321;
|
||||
description = "Port to listen on";
|
||||
};
|
||||
|
||||
betterAuthUrl = mkOption {
|
||||
type = types.str;
|
||||
default = "http://localhost:4321";
|
||||
description = "Better Auth URL (external URL of the service)";
|
||||
};
|
||||
|
||||
betterAuthTrustedOrigins = mkOption {
|
||||
type = types.str;
|
||||
default = "http://localhost:4321";
|
||||
description = "Comma-separated list of trusted origins for Better Auth";
|
||||
};
|
||||
|
||||
mirrorIssueConcurrency = mkOption {
|
||||
type = types.int;
|
||||
default = 3;
|
||||
description = "Number of concurrent issue mirror operations (set to 1 for perfect ordering)";
|
||||
};
|
||||
|
||||
mirrorPullRequestConcurrency = mkOption {
|
||||
type = types.int;
|
||||
default = 5;
|
||||
description = "Number of concurrent PR mirror operations (set to 1 for perfect ordering)";
|
||||
};
|
||||
|
||||
environmentFile = mkOption {
|
||||
type = types.nullOr types.path;
|
||||
default = null;
|
||||
description = ''
|
||||
Path to file containing environment variables.
|
||||
Only needed if you want to set BETTER_AUTH_SECRET or ENCRYPTION_SECRET manually.
|
||||
Otherwise, secrets will be auto-generated and stored in the data directory.
|
||||
|
||||
Example:
|
||||
BETTER_AUTH_SECRET=your-32-character-secret-here
|
||||
ENCRYPTION_SECRET=your-encryption-secret-here
|
||||
'';
|
||||
};
|
||||
|
||||
openFirewall = mkOption {
|
||||
type = types.bool;
|
||||
default = false;
|
||||
description = "Open the firewall for the specified port";
|
||||
};
|
||||
};
|
||||
|
||||
config = mkIf cfg.enable {
|
||||
users.users.${cfg.user} = {
|
||||
isSystemUser = true;
|
||||
group = cfg.group;
|
||||
home = cfg.dataDir;
|
||||
createHome = true;
|
||||
};
|
||||
|
||||
users.groups.${cfg.group} = {};
|
||||
|
||||
systemd.services.gitea-mirror = {
|
||||
description = "Gitea Mirror - GitHub to Gitea mirroring service";
|
||||
after = [ "network.target" ];
|
||||
wantedBy = [ "multi-user.target" ];
|
||||
|
||||
environment = {
|
||||
DATA_DIR = cfg.dataDir;
|
||||
DATABASE_URL = "file:${cfg.dataDir}/gitea-mirror.db";
|
||||
HOST = cfg.host;
|
||||
PORT = toString cfg.port;
|
||||
NODE_ENV = "production";
|
||||
BETTER_AUTH_URL = cfg.betterAuthUrl;
|
||||
BETTER_AUTH_TRUSTED_ORIGINS = cfg.betterAuthTrustedOrigins;
|
||||
PUBLIC_BETTER_AUTH_URL = cfg.betterAuthUrl;
|
||||
MIRROR_ISSUE_CONCURRENCY = toString cfg.mirrorIssueConcurrency;
|
||||
MIRROR_PULL_REQUEST_CONCURRENCY = toString cfg.mirrorPullRequestConcurrency;
|
||||
};
|
||||
|
||||
serviceConfig = {
|
||||
Type = "simple";
|
||||
User = cfg.user;
|
||||
Group = cfg.group;
|
||||
ExecStart = "${cfg.package}/bin/gitea-mirror";
|
||||
Restart = "always";
|
||||
RestartSec = "10s";
|
||||
|
||||
# Security hardening
|
||||
NoNewPrivileges = true;
|
||||
PrivateTmp = true;
|
||||
ProtectSystem = "strict";
|
||||
ProtectHome = true;
|
||||
ReadWritePaths = [ cfg.dataDir ];
|
||||
|
||||
# Graceful shutdown
|
||||
TimeoutStopSec = "30s";
|
||||
KillMode = "mixed";
|
||||
KillSignal = "SIGTERM";
|
||||
} // optionalAttrs (cfg.environmentFile != null) {
|
||||
EnvironmentFile = cfg.environmentFile;
|
||||
};
|
||||
};
|
||||
|
||||
# Health check timer (optional monitoring)
|
||||
systemd.timers.gitea-mirror-healthcheck = {
|
||||
description = "Gitea Mirror health check timer";
|
||||
wantedBy = [ "timers.target" ];
|
||||
timerConfig = {
|
||||
OnBootSec = "5min";
|
||||
OnUnitActiveSec = "5min";
|
||||
};
|
||||
};
|
||||
|
||||
systemd.services.gitea-mirror-healthcheck = {
|
||||
description = "Gitea Mirror health check";
|
||||
after = [ "gitea-mirror.service" ];
|
||||
serviceConfig = {
|
||||
Type = "oneshot";
|
||||
ExecStart = "${pkgs.bash}/bin/bash -c '${pkgs.curl}/bin/curl -f http://127.0.0.1:${toString cfg.port}/api/health || true'";
|
||||
User = "nobody";
|
||||
};
|
||||
};
|
||||
|
||||
networking.firewall = mkIf cfg.openFirewall {
|
||||
allowedTCPPorts = [ cfg.port ];
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
# Overlay for adding to nixpkgs
|
||||
overlays.default = final: prev: {
|
||||
gitea-mirror = self.packages.${final.system}.default;
|
||||
};
|
||||
};
|
||||
}
|
||||
96
package.json
96
package.json
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "gitea-mirror",
|
||||
"type": "module",
|
||||
"version": "3.9.0",
|
||||
"version": "3.13.3",
|
||||
"engines": {
|
||||
"bun": ">=1.2.9"
|
||||
},
|
||||
@@ -16,6 +16,7 @@
|
||||
"check-db": "bun scripts/manage-db.ts check",
|
||||
"fix-db": "bun scripts/manage-db.ts fix",
|
||||
"reset-users": "bun scripts/manage-db.ts reset-users",
|
||||
"reset-password": "bun scripts/manage-db.ts reset-password",
|
||||
"db:generate": "bun drizzle-kit generate",
|
||||
"db:migrate": "bun drizzle-kit migrate",
|
||||
"db:push": "bun drizzle-kit push",
|
||||
@@ -33,83 +34,96 @@
|
||||
"start": "bun dist/server/entry.mjs",
|
||||
"start:fresh": "bun run cleanup-db && bun run manage-db init && bun dist/server/entry.mjs",
|
||||
"test": "bun test",
|
||||
"test:migrations": "bun scripts/validate-migrations.ts",
|
||||
"test:watch": "bun test --watch",
|
||||
"test:coverage": "bun test --coverage",
|
||||
"test:e2e": "bash tests/e2e/run-e2e.sh",
|
||||
"test:e2e:ci": "bash tests/e2e/run-e2e.sh --ci",
|
||||
"test:e2e:keep": "bash tests/e2e/run-e2e.sh --keep",
|
||||
"test:e2e:cleanup": "bash tests/e2e/cleanup.sh",
|
||||
"astro": "bunx --bun astro"
|
||||
},
|
||||
"overrides": {
|
||||
"@esbuild-kit/esm-loader": "npm:tsx@^4.20.5",
|
||||
"devalue": "^5.3.2"
|
||||
"@esbuild-kit/esm-loader": "npm:tsx@^4.21.0",
|
||||
"devalue": "^5.6.4",
|
||||
"fast-xml-parser": "^5.5.5",
|
||||
"node-forge": "^1.3.3",
|
||||
"svgo": "^4.0.1",
|
||||
"rollup": ">=4.59.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"@astrojs/check": "^0.9.5",
|
||||
"@astrojs/mdx": "4.3.7",
|
||||
"@astrojs/node": "9.5.0",
|
||||
"@astrojs/react": "^4.4.0",
|
||||
"@better-auth/sso": "1.4.0-beta.12",
|
||||
"@octokit/plugin-throttling": "^11.0.2",
|
||||
"@octokit/rest": "^22.0.0",
|
||||
"@astrojs/check": "^0.9.7",
|
||||
"@astrojs/mdx": "5.0.0",
|
||||
"@astrojs/node": "10.0.1",
|
||||
"@astrojs/react": "^5.0.0",
|
||||
"@better-auth/sso": "1.5.5",
|
||||
"@octokit/plugin-throttling": "^11.0.3",
|
||||
"@octokit/rest": "^22.0.1",
|
||||
"@radix-ui/react-accordion": "^1.2.12",
|
||||
"@radix-ui/react-avatar": "^1.1.10",
|
||||
"@radix-ui/react-avatar": "^1.1.11",
|
||||
"@radix-ui/react-checkbox": "^1.3.3",
|
||||
"@radix-ui/react-collapsible": "^1.1.12",
|
||||
"@radix-ui/react-dialog": "^1.1.15",
|
||||
"@radix-ui/react-dropdown-menu": "^2.1.16",
|
||||
"@radix-ui/react-hover-card": "^1.1.15",
|
||||
"@radix-ui/react-label": "^2.1.7",
|
||||
"@radix-ui/react-label": "^2.1.8",
|
||||
"@radix-ui/react-popover": "^1.1.15",
|
||||
"@radix-ui/react-progress": "^1.1.7",
|
||||
"@radix-ui/react-progress": "^1.1.8",
|
||||
"@radix-ui/react-radio-group": "^1.3.8",
|
||||
"@radix-ui/react-scroll-area": "^1.2.10",
|
||||
"@radix-ui/react-select": "^2.2.6",
|
||||
"@radix-ui/react-separator": "^1.1.7",
|
||||
"@radix-ui/react-slot": "^1.2.3",
|
||||
"@radix-ui/react-separator": "^1.1.8",
|
||||
"@radix-ui/react-slot": "^1.2.4",
|
||||
"@radix-ui/react-switch": "^1.2.6",
|
||||
"@radix-ui/react-tabs": "^1.1.13",
|
||||
"@radix-ui/react-tooltip": "^1.2.8",
|
||||
"@tailwindcss/vite": "^4.1.15",
|
||||
"@tanstack/react-virtual": "^3.13.12",
|
||||
"@tailwindcss/vite": "^4.2.1",
|
||||
"@tanstack/react-table": "^8.21.3",
|
||||
"@tanstack/react-virtual": "^3.13.19",
|
||||
"@types/canvas-confetti": "^1.9.0",
|
||||
"@types/react": "^19.2.2",
|
||||
"@types/react-dom": "^19.2.2",
|
||||
"astro": "^5.14.8",
|
||||
"bcryptjs": "^3.0.2",
|
||||
"@types/react": "^19.2.14",
|
||||
"@types/react-dom": "^19.2.3",
|
||||
"astro": "^6.0.4",
|
||||
"bcryptjs": "^3.0.3",
|
||||
"better-auth": "1.5.5",
|
||||
"buffer": "^6.0.3",
|
||||
"better-auth": "1.4.0-beta.13",
|
||||
"canvas-confetti": "^1.9.3",
|
||||
"canvas-confetti": "^1.9.4",
|
||||
"class-variance-authority": "^0.7.1",
|
||||
"clsx": "^2.1.1",
|
||||
"cmdk": "^1.1.1",
|
||||
"dotenv": "^17.2.3",
|
||||
"drizzle-orm": "^0.44.6",
|
||||
"dotenv": "^17.3.1",
|
||||
"drizzle-orm": "^0.45.1",
|
||||
"fuse.js": "^7.1.0",
|
||||
"jsonwebtoken": "^9.0.2",
|
||||
"lucide-react": "^0.546.0",
|
||||
"jsonwebtoken": "^9.0.3",
|
||||
"lucide-react": "^0.577.0",
|
||||
"nanoid": "^5.1.6",
|
||||
"next-themes": "^0.4.6",
|
||||
"react": "^19.2.0",
|
||||
"react-dom": "^19.2.0",
|
||||
"react": "^19.2.4",
|
||||
"react-dom": "^19.2.4",
|
||||
"react-icons": "^5.5.0",
|
||||
"sonner": "^2.0.7",
|
||||
"tailwind-merge": "^3.3.1",
|
||||
"tailwindcss": "^4.1.15",
|
||||
"tailwind-merge": "^3.5.0",
|
||||
"tailwindcss": "^4.2.1",
|
||||
"tw-animate-css": "^1.4.0",
|
||||
"typescript": "^5.9.3",
|
||||
"uuid": "^13.0.0",
|
||||
"vaul": "^1.1.2",
|
||||
"zod": "^4.1.12"
|
||||
"zod": "^4.3.6"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@playwright/test": "^1.58.2",
|
||||
"@testing-library/jest-dom": "^6.9.1",
|
||||
"@testing-library/react": "^16.3.0",
|
||||
"@testing-library/react": "^16.3.2",
|
||||
"@types/bcryptjs": "^3.0.0",
|
||||
"@types/bun": "^1.3.0",
|
||||
"@types/bun": "^1.3.10",
|
||||
"@types/jsonwebtoken": "^9.0.10",
|
||||
"@types/uuid": "^10.0.0",
|
||||
"@vitejs/plugin-react": "^5.0.4",
|
||||
"drizzle-kit": "^0.31.5",
|
||||
"jsdom": "^26.1.0",
|
||||
"tsx": "^4.20.6",
|
||||
"vitest": "^3.2.4"
|
||||
"@types/node": "^25.5.0",
|
||||
"@types/uuid": "^11.0.0",
|
||||
"@vitejs/plugin-react": "^6.0.1",
|
||||
"drizzle-kit": "^0.31.9",
|
||||
"jsdom": "^28.1.0",
|
||||
"tsx": "^4.21.0",
|
||||
"vitest": "^4.1.0"
|
||||
},
|
||||
"packageManager": "bun@1.2.23"
|
||||
"packageManager": "bun@1.3.10"
|
||||
}
|
||||
|
||||
@@ -4,9 +4,9 @@ import { Database } from "bun:sqlite";
|
||||
import { drizzle } from "drizzle-orm/bun-sqlite";
|
||||
import { migrate } from "drizzle-orm/bun-sqlite/migrator";
|
||||
import { v4 as uuidv4 } from "uuid";
|
||||
import { users, configs, repositories, organizations, mirrorJobs, events } from "../src/lib/db/schema";
|
||||
import bcrypt from "bcryptjs";
|
||||
import { eq } from "drizzle-orm";
|
||||
import { users, configs, repositories, organizations, mirrorJobs, events, accounts, sessions } from "../src/lib/db/schema";
|
||||
import { and, eq } from "drizzle-orm";
|
||||
import { hashPassword } from "better-auth/crypto";
|
||||
|
||||
// Command line arguments
|
||||
const args = process.argv.slice(2);
|
||||
@@ -194,6 +194,92 @@ async function fixDatabase() {
|
||||
console.log("✅ Database location fixed");
|
||||
}
|
||||
|
||||
/**
|
||||
* Reset a single user's password (admin recovery flow)
|
||||
*/
|
||||
async function resetPassword() {
|
||||
const emailArg = args.find((arg) => arg.startsWith("--email="));
|
||||
const passwordArg = args.find((arg) => arg.startsWith("--new-password="));
|
||||
const email = emailArg?.split("=")[1]?.trim().toLowerCase();
|
||||
const newPassword = passwordArg?.split("=")[1];
|
||||
|
||||
if (!email || !newPassword) {
|
||||
console.log("❌ Missing required arguments");
|
||||
console.log("Usage:");
|
||||
console.log(" bun run manage-db reset-password --email=user@example.com --new-password='new-secure-password'");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
if (newPassword.length < 8) {
|
||||
console.log("❌ Password must be at least 8 characters");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
if (!fs.existsSync(dbPath)) {
|
||||
console.log("❌ Database does not exist");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const sqlite = new Database(dbPath);
|
||||
const db = drizzle({ client: sqlite });
|
||||
|
||||
try {
|
||||
const user = await db.query.users.findFirst({
|
||||
where: eq(users.email, email),
|
||||
});
|
||||
|
||||
if (!user) {
|
||||
console.log(`❌ No user found for email: ${email}`);
|
||||
sqlite.close();
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const hashedPassword = await hashPassword(newPassword);
|
||||
const now = new Date();
|
||||
|
||||
const credentialAccount = await db.query.accounts.findFirst({
|
||||
where: and(
|
||||
eq(accounts.userId, user.id),
|
||||
eq(accounts.providerId, "credential"),
|
||||
),
|
||||
});
|
||||
|
||||
if (credentialAccount) {
|
||||
await db
|
||||
.update(accounts)
|
||||
.set({
|
||||
password: hashedPassword,
|
||||
updatedAt: now,
|
||||
})
|
||||
.where(eq(accounts.id, credentialAccount.id));
|
||||
} else {
|
||||
await db.insert(accounts).values({
|
||||
id: uuidv4(),
|
||||
accountId: user.id,
|
||||
userId: user.id,
|
||||
providerId: "credential",
|
||||
password: hashedPassword,
|
||||
createdAt: now,
|
||||
updatedAt: now,
|
||||
});
|
||||
}
|
||||
|
||||
const deletedSessions = await db
|
||||
.delete(sessions)
|
||||
.where(eq(sessions.userId, user.id))
|
||||
.returning({ id: sessions.id });
|
||||
|
||||
console.log(`✅ Password reset for ${email}`);
|
||||
console.log(`🔒 Cleared ${deletedSessions.length} active session(s)`);
|
||||
|
||||
sqlite.close();
|
||||
} catch (error) {
|
||||
console.error("❌ Error resetting password:", error);
|
||||
sqlite.close();
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Auto mode - check and initialize if needed
|
||||
*/
|
||||
@@ -224,6 +310,9 @@ switch (command) {
|
||||
case "cleanup":
|
||||
await cleanupDatabase();
|
||||
break;
|
||||
case "reset-password":
|
||||
await resetPassword();
|
||||
break;
|
||||
case "auto":
|
||||
await autoMode();
|
||||
break;
|
||||
@@ -233,7 +322,8 @@ switch (command) {
|
||||
console.log(" check - Check database status");
|
||||
console.log(" fix - Fix database location issues");
|
||||
console.log(" reset-users - Remove all users and related data");
|
||||
console.log(" reset-password - Reset one user's password and clear sessions");
|
||||
console.log(" cleanup - Remove all database files");
|
||||
console.log(" auto - Auto initialize if needed");
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -15,33 +15,40 @@ import { repoStatusEnum } from "@/types/Repository";
|
||||
const isDryRun = process.argv.includes("--dry-run");
|
||||
const specificRepo = process.argv.find(arg => arg.startsWith("--repo-name="))?.split("=")[1];
|
||||
const isStartupMode = process.argv.includes("--startup");
|
||||
const requestTimeoutMs = parsePositiveInteger(process.env.GITEA_REPAIR_REQUEST_TIMEOUT_MS, 15000);
|
||||
const progressInterval = parsePositiveInteger(process.env.GITEA_REPAIR_PROGRESS_INTERVAL, 100);
|
||||
|
||||
async function checkRepoInGitea(config: any, owner: string, repoName: string): Promise<boolean> {
|
||||
try {
|
||||
if (!config.giteaConfig?.url || !config.giteaConfig?.token) {
|
||||
return false;
|
||||
}
|
||||
type GiteaLookupResult = {
|
||||
exists: boolean;
|
||||
details: any | null;
|
||||
timedOut: boolean;
|
||||
error: string | null;
|
||||
};
|
||||
|
||||
const response = await fetch(
|
||||
`${config.giteaConfig.url}/api/v1/repos/${owner}/${repoName}`,
|
||||
{
|
||||
headers: {
|
||||
Authorization: `token ${config.giteaConfig.token}`,
|
||||
},
|
||||
}
|
||||
);
|
||||
|
||||
return response.ok;
|
||||
} catch (error) {
|
||||
console.error(`Error checking repo ${owner}/${repoName} in Gitea:`, error);
|
||||
return false;
|
||||
function parsePositiveInteger(value: string | undefined, fallback: number): number {
|
||||
const parsed = Number.parseInt(value ?? "", 10);
|
||||
if (!Number.isFinite(parsed) || parsed <= 0) {
|
||||
return fallback;
|
||||
}
|
||||
return parsed;
|
||||
}
|
||||
|
||||
async function getRepoDetailsFromGitea(config: any, owner: string, repoName: string): Promise<any> {
|
||||
function isTimeoutError(error: unknown): boolean {
|
||||
if (!(error instanceof Error)) {
|
||||
return false;
|
||||
}
|
||||
return error.name === "TimeoutError" || error.name === "AbortError";
|
||||
}
|
||||
|
||||
async function getRepoDetailsFromGitea(config: any, owner: string, repoName: string): Promise<GiteaLookupResult> {
|
||||
try {
|
||||
if (!config.giteaConfig?.url || !config.giteaConfig?.token) {
|
||||
return null;
|
||||
return {
|
||||
exists: false,
|
||||
details: null,
|
||||
timedOut: false,
|
||||
error: "Missing Gitea URL or token in config",
|
||||
};
|
||||
}
|
||||
|
||||
const response = await fetch(
|
||||
@@ -50,16 +57,41 @@ async function getRepoDetailsFromGitea(config: any, owner: string, repoName: str
|
||||
headers: {
|
||||
Authorization: `token ${config.giteaConfig.token}`,
|
||||
},
|
||||
signal: AbortSignal.timeout(requestTimeoutMs),
|
||||
}
|
||||
);
|
||||
|
||||
if (response.ok) {
|
||||
return await response.json();
|
||||
return {
|
||||
exists: true,
|
||||
details: await response.json(),
|
||||
timedOut: false,
|
||||
error: null,
|
||||
};
|
||||
}
|
||||
return null;
|
||||
|
||||
if (response.status === 404) {
|
||||
return {
|
||||
exists: false,
|
||||
details: null,
|
||||
timedOut: false,
|
||||
error: null,
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
exists: false,
|
||||
details: null,
|
||||
timedOut: false,
|
||||
error: `Gitea API returned HTTP ${response.status}`,
|
||||
};
|
||||
} catch (error) {
|
||||
console.error(`Error getting repo details for ${owner}/${repoName}:`, error);
|
||||
return null;
|
||||
return {
|
||||
exists: false,
|
||||
details: null,
|
||||
timedOut: isTimeoutError(error),
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@@ -99,6 +131,8 @@ async function repairMirroredRepositories() {
|
||||
.from(repositories)
|
||||
.where(whereConditions);
|
||||
|
||||
const totalRepos = repos.length;
|
||||
|
||||
if (repos.length === 0) {
|
||||
if (!isStartupMode) {
|
||||
console.log("✅ No repositories found that need repair");
|
||||
@@ -109,13 +143,25 @@ async function repairMirroredRepositories() {
|
||||
if (!isStartupMode) {
|
||||
console.log(`📋 Found ${repos.length} repositories to check:`);
|
||||
console.log("");
|
||||
} else {
|
||||
console.log(`Checking ${totalRepos} repositories for status inconsistencies...`);
|
||||
console.log(`Request timeout: ${requestTimeoutMs}ms | Progress interval: every ${progressInterval} repositories`);
|
||||
}
|
||||
|
||||
const startedAt = Date.now();
|
||||
const configCache = new Map<string, any>();
|
||||
let checkedCount = 0;
|
||||
let repairedCount = 0;
|
||||
let skippedCount = 0;
|
||||
let errorCount = 0;
|
||||
let timeoutCount = 0;
|
||||
let giteaErrorCount = 0;
|
||||
let giteaErrorSamples = 0;
|
||||
let startupSkipWarningCount = 0;
|
||||
|
||||
for (const repo of repos) {
|
||||
checkedCount++;
|
||||
|
||||
if (!isStartupMode) {
|
||||
console.log(`🔍 Checking repository: ${repo.name}`);
|
||||
console.log(` Current status: ${repo.status}`);
|
||||
@@ -124,13 +170,29 @@ async function repairMirroredRepositories() {
|
||||
|
||||
try {
|
||||
// Get user configuration
|
||||
const config = await db
|
||||
.select()
|
||||
.from(configs)
|
||||
.where(eq(configs.id, repo.configId))
|
||||
.limit(1);
|
||||
const configKey = String(repo.configId);
|
||||
let userConfig = configCache.get(configKey);
|
||||
|
||||
if (config.length === 0) {
|
||||
if (!userConfig) {
|
||||
const config = await db
|
||||
.select()
|
||||
.from(configs)
|
||||
.where(eq(configs.id, repo.configId))
|
||||
.limit(1);
|
||||
|
||||
if (config.length === 0) {
|
||||
if (!isStartupMode) {
|
||||
console.log(` ❌ No configuration found for repository`);
|
||||
}
|
||||
errorCount++;
|
||||
continue;
|
||||
}
|
||||
|
||||
userConfig = config[0];
|
||||
configCache.set(configKey, userConfig);
|
||||
}
|
||||
|
||||
if (!userConfig) {
|
||||
if (!isStartupMode) {
|
||||
console.log(` ❌ No configuration found for repository`);
|
||||
}
|
||||
@@ -138,7 +200,6 @@ async function repairMirroredRepositories() {
|
||||
continue;
|
||||
}
|
||||
|
||||
const userConfig = config[0];
|
||||
const giteaUsername = userConfig.giteaConfig?.defaultOwner;
|
||||
|
||||
if (!giteaUsername) {
|
||||
@@ -153,25 +214,59 @@ async function repairMirroredRepositories() {
|
||||
let existsInGitea = false;
|
||||
let actualOwner = giteaUsername;
|
||||
let giteaRepoDetails = null;
|
||||
let repoRequestTimedOut = false;
|
||||
let repoRequestErrored = false;
|
||||
|
||||
// First check user location
|
||||
existsInGitea = await checkRepoInGitea(userConfig, giteaUsername, repo.name);
|
||||
if (existsInGitea) {
|
||||
giteaRepoDetails = await getRepoDetailsFromGitea(userConfig, giteaUsername, repo.name);
|
||||
const userLookup = await getRepoDetailsFromGitea(userConfig, giteaUsername, repo.name);
|
||||
existsInGitea = userLookup.exists;
|
||||
giteaRepoDetails = userLookup.details;
|
||||
|
||||
if (userLookup.timedOut) {
|
||||
timeoutCount++;
|
||||
repoRequestTimedOut = true;
|
||||
} else if (userLookup.error) {
|
||||
giteaErrorCount++;
|
||||
repoRequestErrored = true;
|
||||
if (!isStartupMode || giteaErrorSamples < 3) {
|
||||
console.log(` ⚠️ Gitea lookup issue for ${giteaUsername}/${repo.name}: ${userLookup.error}`);
|
||||
giteaErrorSamples++;
|
||||
}
|
||||
}
|
||||
|
||||
// If not found in user location and repo has organization, check organization
|
||||
if (!existsInGitea && repo.organization) {
|
||||
existsInGitea = await checkRepoInGitea(userConfig, repo.organization, repo.name);
|
||||
const orgLookup = await getRepoDetailsFromGitea(userConfig, repo.organization, repo.name);
|
||||
existsInGitea = orgLookup.exists;
|
||||
if (existsInGitea) {
|
||||
actualOwner = repo.organization;
|
||||
giteaRepoDetails = await getRepoDetailsFromGitea(userConfig, repo.organization, repo.name);
|
||||
giteaRepoDetails = orgLookup.details;
|
||||
}
|
||||
|
||||
if (orgLookup.timedOut) {
|
||||
timeoutCount++;
|
||||
repoRequestTimedOut = true;
|
||||
} else if (orgLookup.error) {
|
||||
giteaErrorCount++;
|
||||
repoRequestErrored = true;
|
||||
if (!isStartupMode || giteaErrorSamples < 3) {
|
||||
console.log(` ⚠️ Gitea lookup issue for ${repo.organization}/${repo.name}: ${orgLookup.error}`);
|
||||
giteaErrorSamples++;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!existsInGitea) {
|
||||
if (!isStartupMode) {
|
||||
console.log(` ⏭️ Repository not found in Gitea - skipping`);
|
||||
} else if (repoRequestTimedOut || repoRequestErrored) {
|
||||
if (startupSkipWarningCount < 3) {
|
||||
console.log(` ⚠️ Skipping ${repo.name}; Gitea was slow/unreachable during lookup`);
|
||||
startupSkipWarningCount++;
|
||||
if (startupSkipWarningCount === 3) {
|
||||
console.log(` ℹ️ Additional slow/unreachable lookup warnings suppressed; progress logs will continue`);
|
||||
}
|
||||
}
|
||||
}
|
||||
skippedCount++;
|
||||
continue;
|
||||
@@ -241,22 +336,43 @@ async function repairMirroredRepositories() {
|
||||
|
||||
if (!isStartupMode) {
|
||||
console.log("");
|
||||
} else if (checkedCount % progressInterval === 0 || checkedCount === totalRepos) {
|
||||
const elapsedSeconds = Math.floor((Date.now() - startedAt) / 1000);
|
||||
console.log(
|
||||
`Repair progress: ${checkedCount}/${totalRepos} checked | repaired=${repairedCount}, skipped=${skippedCount}, errors=${errorCount}, timeouts=${timeoutCount} | elapsed=${elapsedSeconds}s`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if (isStartupMode) {
|
||||
// In startup mode, only log if there were repairs or errors
|
||||
const elapsedSeconds = Math.floor((Date.now() - startedAt) / 1000);
|
||||
console.log(
|
||||
`Repository repair summary: checked=${checkedCount}, repaired=${repairedCount}, skipped=${skippedCount}, errors=${errorCount}, timeouts=${timeoutCount}, elapsed=${elapsedSeconds}s`
|
||||
);
|
||||
if (repairedCount > 0) {
|
||||
console.log(`Repaired ${repairedCount} repository status inconsistencies`);
|
||||
}
|
||||
if (errorCount > 0) {
|
||||
console.log(`Warning: ${errorCount} repositories had errors during repair`);
|
||||
}
|
||||
if (timeoutCount > 0) {
|
||||
console.log(
|
||||
`Warning: ${timeoutCount} Gitea API requests timed out. Increase GITEA_REPAIR_REQUEST_TIMEOUT_MS if your Gitea instance is under heavy load.`
|
||||
);
|
||||
}
|
||||
if (giteaErrorCount > 0) {
|
||||
console.log(`Warning: ${giteaErrorCount} Gitea API requests failed with non-timeout errors.`);
|
||||
}
|
||||
} else {
|
||||
console.log("📊 Repair Summary:");
|
||||
console.log(` Checked: ${checkedCount}`);
|
||||
console.log(` Repaired: ${repairedCount}`);
|
||||
console.log(` Skipped: ${skippedCount}`);
|
||||
console.log(` Errors: ${errorCount}`);
|
||||
console.log(` Timeouts: ${timeoutCount}`);
|
||||
if (giteaErrorCount > 0) {
|
||||
console.log(` Gitea API Errors: ${giteaErrorCount}`);
|
||||
}
|
||||
|
||||
if (isDryRun && repairedCount > 0) {
|
||||
console.log("");
|
||||
|
||||
265
scripts/validate-migrations.ts
Normal file
265
scripts/validate-migrations.ts
Normal file
@@ -0,0 +1,265 @@
|
||||
#!/usr/bin/env bun
|
||||
|
||||
import { Database } from "bun:sqlite";
|
||||
import { readFileSync } from "fs";
|
||||
import path from "path";
|
||||
|
||||
type JournalEntry = {
|
||||
idx: number;
|
||||
tag: string;
|
||||
when: number;
|
||||
breakpoints: boolean;
|
||||
};
|
||||
|
||||
type Migration = {
|
||||
entry: JournalEntry;
|
||||
statements: string[];
|
||||
};
|
||||
|
||||
type UpgradeFixture = {
|
||||
seed: (db: Database) => void;
|
||||
verify: (db: Database) => void;
|
||||
};
|
||||
|
||||
type TableInfoRow = {
|
||||
cid: number;
|
||||
name: string;
|
||||
type: string;
|
||||
notnull: number;
|
||||
dflt_value: string | null;
|
||||
pk: number;
|
||||
};
|
||||
|
||||
const migrationsFolder = path.join(process.cwd(), "drizzle");
|
||||
const migrations = loadMigrations();
|
||||
const latestMigration = migrations.at(-1);
|
||||
|
||||
/**
|
||||
* Known SQLite limitations that Drizzle-kit's auto-generated migrations
|
||||
* can violate. Each rule is checked against every SQL statement.
|
||||
*/
|
||||
const SQLITE_LINT_RULES: { pattern: RegExp; message: string }[] = [
|
||||
{
|
||||
pattern: /ALTER\s+TABLE\s+\S+\s+ADD\s+(?:COLUMN\s+)?\S+[^;]*DEFAULT\s*\(/i,
|
||||
message:
|
||||
"ALTER TABLE ADD COLUMN with an expression default (e.g. DEFAULT (unixepoch())) " +
|
||||
"is not allowed in SQLite. Use the table-recreation pattern instead " +
|
||||
"(CREATE new table, INSERT SELECT, DROP old, RENAME).",
|
||||
},
|
||||
{
|
||||
pattern: /ALTER\s+TABLE\s+\S+\s+ADD\s+(?:COLUMN\s+)?\S+[^;]*DEFAULT\s+CURRENT_(TIME|DATE|TIMESTAMP)\b/i,
|
||||
message:
|
||||
"ALTER TABLE ADD COLUMN with DEFAULT CURRENT_TIME/CURRENT_DATE/CURRENT_TIMESTAMP " +
|
||||
"is not allowed in SQLite. Use the table-recreation pattern instead.",
|
||||
},
|
||||
];
|
||||
|
||||
function loadMigrations(): Migration[] {
|
||||
const journalPath = path.join(migrationsFolder, "meta", "_journal.json");
|
||||
const journal = JSON.parse(readFileSync(journalPath, "utf8")) as {
|
||||
entries: JournalEntry[];
|
||||
};
|
||||
|
||||
return journal.entries.map((entry) => {
|
||||
const migrationPath = path.join(migrationsFolder, `${entry.tag}.sql`);
|
||||
const statements = readFileSync(migrationPath, "utf8")
|
||||
.split("--> statement-breakpoint")
|
||||
.map((statement) => statement.trim())
|
||||
.filter(Boolean);
|
||||
|
||||
return { entry, statements };
|
||||
});
|
||||
}
|
||||
|
||||
function assert(condition: unknown, message: string): asserts condition {
|
||||
if (!condition) {
|
||||
throw new Error(message);
|
||||
}
|
||||
}
|
||||
|
||||
function runMigration(db: Database, migration: Migration) {
|
||||
db.run("BEGIN");
|
||||
|
||||
try {
|
||||
for (const statement of migration.statements) {
|
||||
db.run(statement);
|
||||
}
|
||||
|
||||
db.run("COMMIT");
|
||||
} catch (error) {
|
||||
try {
|
||||
db.run("ROLLBACK");
|
||||
} catch {
|
||||
// Ignore rollback errors so the original failure is preserved.
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
function runMigrations(db: Database, selectedMigrations: Migration[]) {
|
||||
for (const migration of selectedMigrations) {
|
||||
runMigration(db, migration);
|
||||
}
|
||||
}
|
||||
|
||||
function seedPre0009Database(db: Database) {
|
||||
// Seed every existing table so ALTER TABLE paths run against non-empty data.
|
||||
db.run("INSERT INTO users (id, email, username, name) VALUES ('u1', 'u1@example.com', 'user1', 'User One')");
|
||||
db.run("INSERT INTO configs (id, user_id, name, github_config, gitea_config, schedule_config, cleanup_config) VALUES ('c1', 'u1', 'Default', '{}', '{}', '{}', '{}')");
|
||||
db.run("INSERT INTO accounts (id, account_id, user_id, provider_id, access_token, refresh_token, id_token, access_token_expires_at, refresh_token_expires_at, scope) VALUES ('acct1', 'acct-1', 'u1', 'github', 'access-token', 'refresh-token', 'id-token', 2000, 3000, 'repo')");
|
||||
db.run("INSERT INTO events (id, user_id, channel, payload) VALUES ('evt1', 'u1', 'sync', '{\"status\":\"queued\"}')");
|
||||
db.run("INSERT INTO mirror_jobs (id, user_id, repository_id, repository_name, status, message, timestamp) VALUES ('job1', 'u1', 'r1', 'owner/repo', 'imported', 'Imported repository', 900)");
|
||||
db.run("INSERT INTO organizations (id, user_id, config_id, name, avatar_url, public_repository_count, private_repository_count, fork_repository_count) VALUES ('org1', 'u1', 'c1', 'Example Org', 'https://example.com/org.png', 1, 0, 0)");
|
||||
db.run("INSERT INTO repositories (id, user_id, config_id, name, full_name, normalized_full_name, url, clone_url, owner, organization, default_branch, created_at, updated_at, metadata) VALUES ('r1', 'u1', 'c1', 'repo', 'owner/repo', 'owner/repo', 'https://example.com/repo', 'https://example.com/repo.git', 'owner', 'Example Org', 'main', 1000, 1100, '{\"issues\":true}')");
|
||||
db.run("INSERT INTO sessions (id, token, user_id, expires_at) VALUES ('sess1', 'session-token', 'u1', 4000)");
|
||||
db.run("INSERT INTO verification_tokens (id, token, identifier, type, expires_at) VALUES ('vt1', 'verify-token', 'u1@example.com', 'email', 5000)");
|
||||
db.run("INSERT INTO verifications (id, identifier, value, expires_at) VALUES ('ver1', 'u1@example.com', '123456', 6000)");
|
||||
db.run("INSERT INTO oauth_applications (id, client_id, client_secret, name, redirect_urls, type, user_id) VALUES ('app1', 'client-1', 'secret-1', 'Example App', '[\"https://example.com/callback\"]', 'confidential', 'u1')");
|
||||
db.run("INSERT INTO oauth_access_tokens (id, access_token, refresh_token, access_token_expires_at, refresh_token_expires_at, client_id, user_id, scopes) VALUES ('oat1', 'oauth-access-token', 'oauth-refresh-token', 7000, 8000, 'client-1', 'u1', '[\"repo\"]')");
|
||||
db.run("INSERT INTO oauth_consent (id, user_id, client_id, scopes, consent_given) VALUES ('consent1', 'u1', 'client-1', '[\"repo\"]', true)");
|
||||
db.run("INSERT INTO sso_providers (id, issuer, domain, oidc_config, user_id, provider_id) VALUES ('sso1', 'https://issuer.example.com', 'example.com', '{}', 'u1', 'provider-1')");
|
||||
db.run("INSERT INTO rate_limits (id, user_id, provider, `limit`, remaining, used, reset, retry_after, status, last_checked) VALUES ('rl1', 'u1', 'github', 5000, 4999, 1, 9000, NULL, 'ok', 8500)");
|
||||
}
|
||||
|
||||
function verify0009Migration(db: Database) {
|
||||
const repositoryColumns = db.query("PRAGMA table_info(repositories)").all() as TableInfoRow[];
|
||||
const importedAtColumn = repositoryColumns.find((column) => column.name === "imported_at");
|
||||
|
||||
assert(importedAtColumn, "Expected repositories.imported_at column to exist after migration");
|
||||
assert(importedAtColumn.notnull === 1, "Expected repositories.imported_at to be NOT NULL");
|
||||
assert(importedAtColumn.dflt_value === "unixepoch()", `Expected repositories.imported_at default to be unixepoch(), got ${importedAtColumn.dflt_value ?? "null"}`);
|
||||
|
||||
const existingRepo = db.query("SELECT imported_at FROM repositories WHERE id = 'r1'").get() as { imported_at: number } | null;
|
||||
assert(existingRepo?.imported_at === 900, `Expected existing repository imported_at to backfill from mirror_jobs timestamp 900, got ${existingRepo?.imported_at ?? "null"}`);
|
||||
|
||||
db.run("INSERT INTO repositories (id, user_id, config_id, name, full_name, normalized_full_name, url, clone_url, owner, default_branch) VALUES ('r2', 'u1', 'c1', 'repo-two', 'owner/repo-two', 'owner/repo-two', 'https://example.com/repo-two', 'https://example.com/repo-two.git', 'owner', 'main')");
|
||||
const newRepo = db.query("SELECT imported_at FROM repositories WHERE id = 'r2'").get() as { imported_at: number } | null;
|
||||
assert(typeof newRepo?.imported_at === "number" && newRepo.imported_at > 0, "Expected new repository insert to receive imported_at from the column default");
|
||||
|
||||
const importedAtIndex = db
|
||||
.query("SELECT name FROM sqlite_master WHERE type = 'index' AND tbl_name = 'repositories' AND name = 'idx_repositories_user_imported_at'")
|
||||
.get() as { name: string } | null;
|
||||
assert(importedAtIndex?.name === "idx_repositories_user_imported_at", "Expected repositories imported_at index to exist after migration");
|
||||
}
|
||||
|
||||
function seedPre0010Database(db: any) {
|
||||
// Seed a repo row to verify index creation doesn't break existing data
|
||||
seedPre0009Database(db);
|
||||
}
|
||||
|
||||
function verify0010Migration(db: any) {
|
||||
const indexes = db.prepare(
|
||||
"SELECT name FROM sqlite_master WHERE type='index' AND name='uniq_repositories_user_mirrored_location'"
|
||||
).all();
|
||||
if (indexes.length === 0) {
|
||||
throw new Error("Missing unique partial index uniq_repositories_user_mirrored_location");
|
||||
}
|
||||
|
||||
const lookupIdx = db.prepare(
|
||||
"SELECT name FROM sqlite_master WHERE type='index' AND name='idx_repositories_mirrored_location'"
|
||||
).all();
|
||||
if (lookupIdx.length === 0) {
|
||||
throw new Error("Missing lookup index idx_repositories_mirrored_location");
|
||||
}
|
||||
}
|
||||
|
||||
function seedPre0011Database(db: any) {
|
||||
seedPre0009Database(db);
|
||||
runMigration(db, migrations.find((m) => m.entry.tag === "0009_nervous_tyger_tiger")!);
|
||||
runMigration(db, migrations.find((m) => m.entry.tag === "0010_mirrored_location_index")!);
|
||||
}
|
||||
|
||||
function verify0011Migration(db: any) {
|
||||
const configColumns = db.query("PRAGMA table_info(configs)").all() as TableInfoRow[];
|
||||
const notificationConfigColumn = configColumns.find((column: any) => column.name === "notification_config");
|
||||
|
||||
assert(notificationConfigColumn, "Expected configs.notification_config column to exist after migration");
|
||||
assert(notificationConfigColumn.notnull === 1, "Expected configs.notification_config to be NOT NULL");
|
||||
assert(
|
||||
notificationConfigColumn.dflt_value !== null,
|
||||
"Expected configs.notification_config to have a default value",
|
||||
);
|
||||
|
||||
const existingConfig = db.query("SELECT notification_config FROM configs WHERE id = 'c1'").get() as { notification_config: string } | null;
|
||||
assert(existingConfig, "Expected existing config row to still exist");
|
||||
const parsed = JSON.parse(existingConfig.notification_config);
|
||||
assert(parsed.enabled === false, "Expected default notification_config.enabled to be false");
|
||||
assert(parsed.provider === "ntfy", "Expected default notification_config.provider to be 'ntfy'");
|
||||
}
|
||||
|
||||
const latestUpgradeFixtures: Record<string, UpgradeFixture> = {
|
||||
"0009_nervous_tyger_tiger": {
|
||||
seed: seedPre0009Database,
|
||||
verify: verify0009Migration,
|
||||
},
|
||||
"0010_mirrored_location_index": {
|
||||
seed: seedPre0010Database,
|
||||
verify: verify0010Migration,
|
||||
},
|
||||
"0011_notification_config": {
|
||||
seed: seedPre0011Database,
|
||||
verify: verify0011Migration,
|
||||
},
|
||||
};
|
||||
|
||||
function lintMigrations(selectedMigrations: Migration[]) {
|
||||
const violations: string[] = [];
|
||||
|
||||
for (const migration of selectedMigrations) {
|
||||
for (const statement of migration.statements) {
|
||||
for (const rule of SQLITE_LINT_RULES) {
|
||||
if (rule.pattern.test(statement)) {
|
||||
violations.push(`[${migration.entry.tag}] ${rule.message}\n Statement: ${statement.slice(0, 120)}...`);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
assert(
|
||||
violations.length === 0,
|
||||
`SQLite lint found ${violations.length} violation(s):\n\n${violations.join("\n\n")}`,
|
||||
);
|
||||
}
|
||||
|
||||
function validateMigrations() {
|
||||
assert(latestMigration, "No migrations found in drizzle/meta/_journal.json");
|
||||
|
||||
// Lint all migrations for known SQLite pitfalls before running anything.
|
||||
lintMigrations(migrations);
|
||||
|
||||
const emptyDb = new Database(":memory:");
|
||||
try {
|
||||
runMigrations(emptyDb, migrations);
|
||||
} finally {
|
||||
emptyDb.close();
|
||||
}
|
||||
|
||||
const upgradeFixture = latestUpgradeFixtures[latestMigration.entry.tag];
|
||||
assert(
|
||||
upgradeFixture,
|
||||
`Missing upgrade fixture for latest migration ${latestMigration.entry.tag}. Add one in scripts/validate-migrations.ts.`,
|
||||
);
|
||||
|
||||
const upgradeDb = new Database(":memory:");
|
||||
try {
|
||||
runMigrations(upgradeDb, migrations.slice(0, -1));
|
||||
upgradeFixture.seed(upgradeDb);
|
||||
runMigration(upgradeDb, latestMigration);
|
||||
upgradeFixture.verify(upgradeDb);
|
||||
} finally {
|
||||
upgradeDb.close();
|
||||
}
|
||||
|
||||
console.log(
|
||||
`Validated ${migrations.length} migrations from scratch and upgrade path for ${latestMigration.entry.tag}.`,
|
||||
);
|
||||
}
|
||||
|
||||
try {
|
||||
validateMigrations();
|
||||
} catch (error) {
|
||||
console.error("Migration validation failed:");
|
||||
console.error(error instanceof Error ? error.stack ?? error.message : String(error));
|
||||
process.exit(1);
|
||||
}
|
||||
@@ -3,6 +3,7 @@ import { GitHubConfigForm } from './GitHubConfigForm';
|
||||
import { GiteaConfigForm } from './GiteaConfigForm';
|
||||
import { AutomationSettings } from './AutomationSettings';
|
||||
import { SSOSettings } from './SSOSettings';
|
||||
import { NotificationSettings } from './NotificationSettings';
|
||||
import type {
|
||||
ConfigApiResponse,
|
||||
GiteaConfig,
|
||||
@@ -13,6 +14,7 @@ import type {
|
||||
DatabaseCleanupConfig,
|
||||
MirrorOptions,
|
||||
AdvancedOptions,
|
||||
NotificationConfig,
|
||||
} from '@/types/config';
|
||||
import { Button } from '../ui/button';
|
||||
import { useAuth } from '@/hooks/useAuth';
|
||||
@@ -30,6 +32,7 @@ type ConfigState = {
|
||||
cleanupConfig: DatabaseCleanupConfig;
|
||||
mirrorOptions: MirrorOptions;
|
||||
advancedOptions: AdvancedOptions;
|
||||
notificationConfig: NotificationConfig;
|
||||
};
|
||||
|
||||
export function ConfigTabs() {
|
||||
@@ -42,12 +45,19 @@ export function ConfigTabs() {
|
||||
},
|
||||
giteaConfig: {
|
||||
url: '',
|
||||
externalUrl: '',
|
||||
username: '',
|
||||
token: '',
|
||||
organization: 'github-mirrors',
|
||||
visibility: 'public',
|
||||
starredReposOrg: 'starred',
|
||||
starredReposMode: 'dedicated-org',
|
||||
preserveOrgStructure: false,
|
||||
backupStrategy: "on-force-push",
|
||||
backupRetentionCount: 5,
|
||||
backupRetentionDays: 30,
|
||||
backupDirectory: 'data/repo-backups',
|
||||
blockSyncOnBackupFailure: true,
|
||||
},
|
||||
scheduleConfig: {
|
||||
enabled: false, // Don't set defaults here - will be loaded from API
|
||||
@@ -77,6 +87,14 @@ export function ConfigTabs() {
|
||||
advancedOptions: {
|
||||
skipForks: false,
|
||||
starredCodeOnly: false,
|
||||
autoMirrorStarred: false,
|
||||
},
|
||||
notificationConfig: {
|
||||
enabled: false,
|
||||
provider: "ntfy",
|
||||
notifyOnSyncError: true,
|
||||
notifyOnSyncSuccess: false,
|
||||
notifyOnNewRepo: false,
|
||||
},
|
||||
});
|
||||
const { user } = useAuth();
|
||||
@@ -87,10 +105,12 @@ export function ConfigTabs() {
|
||||
const [isAutoSavingCleanup, setIsAutoSavingCleanup] = useState<boolean>(false);
|
||||
const [isAutoSavingGitHub, setIsAutoSavingGitHub] = useState<boolean>(false);
|
||||
const [isAutoSavingGitea, setIsAutoSavingGitea] = useState<boolean>(false);
|
||||
const [isAutoSavingNotification, setIsAutoSavingNotification] = useState<boolean>(false);
|
||||
const autoSaveScheduleTimeoutRef = useRef<NodeJS.Timeout | null>(null);
|
||||
const autoSaveCleanupTimeoutRef = useRef<NodeJS.Timeout | null>(null);
|
||||
const autoSaveGitHubTimeoutRef = useRef<NodeJS.Timeout | null>(null);
|
||||
const autoSaveGiteaTimeoutRef = useRef<NodeJS.Timeout | null>(null);
|
||||
const autoSaveNotificationTimeoutRef = useRef<NodeJS.Timeout | null>(null);
|
||||
|
||||
const isConfigFormValid = (): boolean => {
|
||||
const { githubConfig, giteaConfig } = config;
|
||||
@@ -117,19 +137,31 @@ export function ConfigTabs() {
|
||||
if (!user?.id) return;
|
||||
setIsSyncing(true);
|
||||
try {
|
||||
const result = await apiRequest<{ success: boolean; message?: string }>(
|
||||
const result = await apiRequest<{ success: boolean; message?: string; failedOrgs?: string[]; recoveredOrgs?: number }>(
|
||||
`/sync?userId=${user.id}`,
|
||||
{ method: 'POST' },
|
||||
);
|
||||
result.success
|
||||
? toast.success(
|
||||
'GitHub data imported successfully! Head to the Repositories page to start mirroring.',
|
||||
)
|
||||
: toast.error(
|
||||
`Failed to import GitHub data: ${
|
||||
result.message || 'Unknown error'
|
||||
}`,
|
||||
if (result.success) {
|
||||
toast.success(
|
||||
'GitHub data imported successfully! Head to the Repositories page to start mirroring.',
|
||||
);
|
||||
if (result.failedOrgs && result.failedOrgs.length > 0) {
|
||||
toast.warning(
|
||||
`${result.failedOrgs.length} org${result.failedOrgs.length > 1 ? 's' : ''} failed to import (${result.failedOrgs.join(', ')}). Check the Organizations tab for details.`,
|
||||
);
|
||||
}
|
||||
if (result.recoveredOrgs && result.recoveredOrgs > 0) {
|
||||
toast.success(
|
||||
`${result.recoveredOrgs} previously failed org${result.recoveredOrgs > 1 ? 's' : ''} recovered successfully.`,
|
||||
);
|
||||
}
|
||||
} else {
|
||||
toast.error(
|
||||
`Failed to import GitHub data: ${
|
||||
result.message || 'Unknown error'
|
||||
}`,
|
||||
);
|
||||
}
|
||||
} catch (error) {
|
||||
toast.error(
|
||||
`Error importing GitHub data: ${
|
||||
@@ -440,6 +472,55 @@ export function ConfigTabs() {
|
||||
}
|
||||
}, [user?.id, config.githubConfig, config.giteaConfig, config.scheduleConfig, config.cleanupConfig, config.mirrorOptions]);
|
||||
|
||||
// Auto-save function for notification config changes
|
||||
const autoSaveNotificationConfig = useCallback(async (notifConfig: NotificationConfig) => {
|
||||
if (!user?.id) return;
|
||||
|
||||
// Clear any existing timeout
|
||||
if (autoSaveNotificationTimeoutRef.current) {
|
||||
clearTimeout(autoSaveNotificationTimeoutRef.current);
|
||||
}
|
||||
|
||||
// Debounce the auto-save to prevent excessive API calls
|
||||
autoSaveNotificationTimeoutRef.current = setTimeout(async () => {
|
||||
setIsAutoSavingNotification(true);
|
||||
|
||||
const reqPayload = {
|
||||
userId: user.id!,
|
||||
githubConfig: config.githubConfig,
|
||||
giteaConfig: config.giteaConfig,
|
||||
scheduleConfig: config.scheduleConfig,
|
||||
cleanupConfig: config.cleanupConfig,
|
||||
mirrorOptions: config.mirrorOptions,
|
||||
advancedOptions: config.advancedOptions,
|
||||
notificationConfig: notifConfig,
|
||||
};
|
||||
|
||||
try {
|
||||
const response = await fetch('/api/config', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify(reqPayload),
|
||||
});
|
||||
const result: SaveConfigApiResponse = await response.json();
|
||||
|
||||
if (result.success) {
|
||||
// Silent success - no toast for auto-save
|
||||
invalidateConfigCache();
|
||||
} else {
|
||||
showErrorToast(
|
||||
`Auto-save failed: ${result.message || 'Unknown error'}`,
|
||||
toast
|
||||
);
|
||||
}
|
||||
} catch (error) {
|
||||
showErrorToast(error, toast);
|
||||
} finally {
|
||||
setIsAutoSavingNotification(false);
|
||||
}
|
||||
}, 500); // 500ms debounce
|
||||
}, [user?.id, config.githubConfig, config.giteaConfig, config.scheduleConfig, config.cleanupConfig, config.mirrorOptions, config.advancedOptions]);
|
||||
|
||||
// Cleanup timeouts on unmount
|
||||
useEffect(() => {
|
||||
return () => {
|
||||
@@ -455,6 +536,9 @@ export function ConfigTabs() {
|
||||
if (autoSaveGiteaTimeoutRef.current) {
|
||||
clearTimeout(autoSaveGiteaTimeoutRef.current);
|
||||
}
|
||||
if (autoSaveNotificationTimeoutRef.current) {
|
||||
clearTimeout(autoSaveNotificationTimeoutRef.current);
|
||||
}
|
||||
};
|
||||
}, []);
|
||||
|
||||
@@ -486,6 +570,8 @@ export function ConfigTabs() {
|
||||
},
|
||||
advancedOptions:
|
||||
response.advancedOptions || config.advancedOptions,
|
||||
notificationConfig:
|
||||
(response as any).notificationConfig || config.notificationConfig,
|
||||
});
|
||||
|
||||
}
|
||||
@@ -615,9 +701,10 @@ export function ConfigTabs() {
|
||||
|
||||
{/* Content section - Tabs layout */}
|
||||
<Tabs defaultValue="connections" className="space-y-4">
|
||||
<TabsList className="grid w-full grid-cols-3">
|
||||
<TabsList className="grid w-full grid-cols-4">
|
||||
<TabsTrigger value="connections">Connections</TabsTrigger>
|
||||
<TabsTrigger value="automation">Automation</TabsTrigger>
|
||||
<TabsTrigger value="notifications">Notifications</TabsTrigger>
|
||||
<TabsTrigger value="sso">Authentication</TabsTrigger>
|
||||
</TabsList>
|
||||
|
||||
@@ -654,9 +741,20 @@ export function ConfigTabs() {
|
||||
: update,
|
||||
}))
|
||||
}
|
||||
giteaConfig={config.giteaConfig}
|
||||
setGiteaConfig={update =>
|
||||
setConfig(prev => ({
|
||||
...prev,
|
||||
giteaConfig:
|
||||
typeof update === 'function'
|
||||
? update(prev.giteaConfig)
|
||||
: update,
|
||||
}))
|
||||
}
|
||||
onAutoSave={autoSaveGitHubConfig}
|
||||
onMirrorOptionsAutoSave={autoSaveMirrorOptions}
|
||||
onAdvancedOptionsAutoSave={autoSaveAdvancedOptions}
|
||||
onGiteaAutoSave={autoSaveGiteaConfig}
|
||||
isAutoSaving={isAutoSavingGitHub}
|
||||
/>
|
||||
<GiteaConfigForm
|
||||
@@ -694,6 +792,17 @@ export function ConfigTabs() {
|
||||
/>
|
||||
</TabsContent>
|
||||
|
||||
<TabsContent value="notifications" className="space-y-4">
|
||||
<NotificationSettings
|
||||
notificationConfig={config.notificationConfig}
|
||||
onNotificationChange={(newConfig) => {
|
||||
setConfig(prev => ({ ...prev, notificationConfig: newConfig }));
|
||||
autoSaveNotificationConfig(newConfig);
|
||||
}}
|
||||
isAutoSaving={isAutoSavingNotification}
|
||||
/>
|
||||
</TabsContent>
|
||||
|
||||
<TabsContent value="sso" className="space-y-4">
|
||||
<SSOSettings />
|
||||
</TabsContent>
|
||||
|
||||
@@ -7,10 +7,11 @@ import {
|
||||
CardTitle,
|
||||
} from "@/components/ui/card";
|
||||
import { githubApi } from "@/lib/api";
|
||||
import type { GitHubConfig, MirrorOptions, AdvancedOptions } from "@/types/config";
|
||||
import type { GitHubConfig, MirrorOptions, AdvancedOptions, GiteaConfig, BackupStrategy } from "@/types/config";
|
||||
import { Input } from "../ui/input";
|
||||
import { toast } from "sonner";
|
||||
import { Info } from "lucide-react";
|
||||
import { Info, ShieldAlert } from "lucide-react";
|
||||
import { Badge } from "@/components/ui/badge";
|
||||
import { GitHubMirrorSettings } from "./GitHubMirrorSettings";
|
||||
import { Separator } from "../ui/separator";
|
||||
import {
|
||||
@@ -26,23 +27,29 @@ interface GitHubConfigFormProps {
|
||||
setMirrorOptions: React.Dispatch<React.SetStateAction<MirrorOptions>>;
|
||||
advancedOptions: AdvancedOptions;
|
||||
setAdvancedOptions: React.Dispatch<React.SetStateAction<AdvancedOptions>>;
|
||||
giteaConfig?: GiteaConfig;
|
||||
setGiteaConfig?: React.Dispatch<React.SetStateAction<GiteaConfig>>;
|
||||
onAutoSave?: (githubConfig: GitHubConfig) => Promise<void>;
|
||||
onMirrorOptionsAutoSave?: (mirrorOptions: MirrorOptions) => Promise<void>;
|
||||
onAdvancedOptionsAutoSave?: (advancedOptions: AdvancedOptions) => Promise<void>;
|
||||
onGiteaAutoSave?: (giteaConfig: GiteaConfig) => Promise<void>;
|
||||
isAutoSaving?: boolean;
|
||||
}
|
||||
|
||||
export function GitHubConfigForm({
|
||||
config,
|
||||
setConfig,
|
||||
config,
|
||||
setConfig,
|
||||
mirrorOptions,
|
||||
setMirrorOptions,
|
||||
advancedOptions,
|
||||
setAdvancedOptions,
|
||||
onAutoSave,
|
||||
giteaConfig,
|
||||
setGiteaConfig,
|
||||
onAutoSave,
|
||||
onMirrorOptionsAutoSave,
|
||||
onAdvancedOptionsAutoSave,
|
||||
isAutoSaving
|
||||
onGiteaAutoSave,
|
||||
isAutoSaving
|
||||
}: GitHubConfigFormProps) {
|
||||
const [isLoading, setIsLoading] = useState(false);
|
||||
|
||||
@@ -202,7 +209,161 @@ export function GitHubConfigForm({
|
||||
if (onAdvancedOptionsAutoSave) onAdvancedOptionsAutoSave(newOptions);
|
||||
}}
|
||||
/>
|
||||
|
||||
|
||||
{giteaConfig && setGiteaConfig && (
|
||||
<>
|
||||
<Separator />
|
||||
|
||||
<div className="space-y-4">
|
||||
<h3 className="text-sm font-medium flex items-center gap-2">
|
||||
<ShieldAlert className="h-4 w-4 text-primary" />
|
||||
Destructive Update Protection
|
||||
<Badge variant="secondary" className="ml-2 text-[10px] px-1.5 py-0">BETA</Badge>
|
||||
</h3>
|
||||
<p className="text-xs text-muted-foreground">
|
||||
Choose how to handle force-pushes or rewritten upstream history on GitHub.
|
||||
</p>
|
||||
|
||||
<div className="grid grid-cols-2 md:grid-cols-4 gap-2">
|
||||
{([
|
||||
{
|
||||
value: "disabled",
|
||||
label: "Disabled",
|
||||
desc: "No detection or backups",
|
||||
},
|
||||
{
|
||||
value: "always",
|
||||
label: "Always Backup",
|
||||
desc: "Snapshot before every sync (high disk usage)",
|
||||
},
|
||||
{
|
||||
value: "on-force-push",
|
||||
label: "Smart",
|
||||
desc: "Backup only on force-push",
|
||||
},
|
||||
{
|
||||
value: "block-on-force-push",
|
||||
label: "Block & Approve",
|
||||
desc: "Require approval on force-push",
|
||||
},
|
||||
] as const).map((opt) => {
|
||||
const isSelected = (giteaConfig.backupStrategy ?? "on-force-push") === opt.value;
|
||||
return (
|
||||
<button
|
||||
key={opt.value}
|
||||
type="button"
|
||||
onClick={() => {
|
||||
const newConfig = { ...giteaConfig, backupStrategy: opt.value as BackupStrategy };
|
||||
setGiteaConfig(newConfig);
|
||||
if (onGiteaAutoSave) onGiteaAutoSave(newConfig);
|
||||
}}
|
||||
className={`flex flex-col items-start gap-1 rounded-lg border p-3 text-left text-sm transition-colors ${
|
||||
isSelected
|
||||
? "border-primary bg-primary/5 ring-1 ring-primary"
|
||||
: "border-input hover:bg-accent hover:text-accent-foreground"
|
||||
}`}
|
||||
>
|
||||
<span className="font-medium">{opt.label}</span>
|
||||
<span className="text-xs text-muted-foreground">{opt.desc}</span>
|
||||
</button>
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
|
||||
{(giteaConfig.backupStrategy ?? "on-force-push") !== "disabled" && (
|
||||
<>
|
||||
<div className="grid grid-cols-1 md:grid-cols-3 gap-4">
|
||||
<div>
|
||||
<label htmlFor="backup-retention" className="block text-sm font-medium mb-1.5">
|
||||
Snapshot retention count
|
||||
</label>
|
||||
<input
|
||||
id="backup-retention"
|
||||
name="backupRetentionCount"
|
||||
type="number"
|
||||
min={1}
|
||||
value={giteaConfig.backupRetentionCount ?? 5}
|
||||
onChange={(e) => {
|
||||
const newConfig = {
|
||||
...giteaConfig,
|
||||
backupRetentionCount: Math.max(1, Number.parseInt(e.target.value, 10) || 5),
|
||||
};
|
||||
setGiteaConfig(newConfig);
|
||||
if (onGiteaAutoSave) onGiteaAutoSave(newConfig);
|
||||
}}
|
||||
className="w-full rounded-md border border-input bg-background px-3 py-2 text-sm shadow-sm transition-colors placeholder:text-muted-foreground focus-visible:outline-none focus-visible:ring-1 focus-visible:ring-ring"
|
||||
/>
|
||||
</div>
|
||||
<div>
|
||||
<label htmlFor="backup-retention-days" className="block text-sm font-medium mb-1.5">
|
||||
Snapshot retention days
|
||||
</label>
|
||||
<input
|
||||
id="backup-retention-days"
|
||||
name="backupRetentionDays"
|
||||
type="number"
|
||||
min={0}
|
||||
value={giteaConfig.backupRetentionDays ?? 30}
|
||||
onChange={(e) => {
|
||||
const newConfig = {
|
||||
...giteaConfig,
|
||||
backupRetentionDays: Math.max(0, Number.parseInt(e.target.value, 10) || 0),
|
||||
};
|
||||
setGiteaConfig(newConfig);
|
||||
if (onGiteaAutoSave) onGiteaAutoSave(newConfig);
|
||||
}}
|
||||
className="w-full rounded-md border border-input bg-background px-3 py-2 text-sm shadow-sm transition-colors placeholder:text-muted-foreground focus-visible:outline-none focus-visible:ring-1 focus-visible:ring-ring"
|
||||
/>
|
||||
<p className="text-xs text-muted-foreground mt-1">0 = no time-based limit</p>
|
||||
</div>
|
||||
<div>
|
||||
<label htmlFor="backup-directory" className="block text-sm font-medium mb-1.5">
|
||||
Snapshot directory
|
||||
</label>
|
||||
<input
|
||||
id="backup-directory"
|
||||
name="backupDirectory"
|
||||
type="text"
|
||||
value={giteaConfig.backupDirectory || "data/repo-backups"}
|
||||
onChange={(e) => {
|
||||
const newConfig = { ...giteaConfig, backupDirectory: e.target.value };
|
||||
setGiteaConfig(newConfig);
|
||||
if (onGiteaAutoSave) onGiteaAutoSave(newConfig);
|
||||
}}
|
||||
className="w-full rounded-md border border-input bg-background px-3 py-2 text-sm shadow-sm transition-colors placeholder:text-muted-foreground focus-visible:outline-none focus-visible:ring-1 focus-visible:ring-ring"
|
||||
placeholder="data/repo-backups"
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{((giteaConfig.backupStrategy ?? "on-force-push") === "always" ||
|
||||
(giteaConfig.backupStrategy ?? "on-force-push") === "on-force-push") && (
|
||||
<label className="flex items-start gap-3 text-sm">
|
||||
<input
|
||||
name="blockSyncOnBackupFailure"
|
||||
type="checkbox"
|
||||
checked={Boolean(giteaConfig.blockSyncOnBackupFailure)}
|
||||
onChange={(e) => {
|
||||
const newConfig = { ...giteaConfig, blockSyncOnBackupFailure: e.target.checked };
|
||||
setGiteaConfig(newConfig);
|
||||
if (onGiteaAutoSave) onGiteaAutoSave(newConfig);
|
||||
}}
|
||||
className="mt-0.5 rounded border-input"
|
||||
/>
|
||||
<span>
|
||||
Block sync when snapshot fails
|
||||
<p className="text-xs text-muted-foreground">
|
||||
Recommended for backup-first behavior. If disabled, sync continues even when snapshot creation fails.
|
||||
</p>
|
||||
</span>
|
||||
</label>
|
||||
)}
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
</>
|
||||
)}
|
||||
|
||||
{/* Mobile: Show button at bottom */}
|
||||
<Button
|
||||
type="button"
|
||||
|
||||
@@ -287,6 +287,31 @@ export function GitHubMirrorSettings({
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Auto-mirror starred repos toggle */}
|
||||
{githubConfig.mirrorStarred && (
|
||||
<div className="mt-4">
|
||||
<div className="flex items-start space-x-3">
|
||||
<Checkbox
|
||||
id="auto-mirror-starred"
|
||||
checked={advancedOptions.autoMirrorStarred ?? false}
|
||||
onCheckedChange={(checked) => handleAdvancedChange('autoMirrorStarred', !!checked)}
|
||||
/>
|
||||
<div className="space-y-0.5 flex-1">
|
||||
<Label
|
||||
htmlFor="auto-mirror-starred"
|
||||
className="text-sm font-normal cursor-pointer flex items-center gap-2"
|
||||
>
|
||||
<Star className="h-3.5 w-3.5" />
|
||||
Auto-mirror new starred repositories
|
||||
</Label>
|
||||
<p className="text-xs text-muted-foreground">
|
||||
When disabled, starred repos are imported for browsing but not automatically mirrored. You can still mirror individual repos manually.
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Duplicate name handling for starred repos */}
|
||||
{githubConfig.mirrorStarred && (
|
||||
<div className="mt-4 space-y-2">
|
||||
@@ -377,14 +402,13 @@ export function GitHubMirrorSettings({
|
||||
id="release-limit"
|
||||
type="number"
|
||||
min="1"
|
||||
max="100"
|
||||
value={mirrorOptions.releaseLimit || 10}
|
||||
onChange={(e) => {
|
||||
const value = parseInt(e.target.value) || 10;
|
||||
const clampedValue = Math.min(100, Math.max(1, value));
|
||||
const clampedValue = Math.max(1, value);
|
||||
handleMirrorChange('releaseLimit', clampedValue);
|
||||
}}
|
||||
className="w-16 px-2 py-1 text-xs border border-input rounded bg-background text-foreground"
|
||||
className="w-20 px-2 py-1 text-xs border border-input rounded bg-background text-foreground"
|
||||
/>
|
||||
<span className="text-xs text-muted-foreground">releases</span>
|
||||
</div>
|
||||
|
||||
@@ -100,9 +100,14 @@ export function GiteaConfigForm({ config, setConfig, onAutoSave, isAutoSaving, g
|
||||
);
|
||||
}
|
||||
|
||||
const normalizedValue =
|
||||
type === "checkbox"
|
||||
? checked
|
||||
: value;
|
||||
|
||||
const newConfig = {
|
||||
...config,
|
||||
[name]: type === "checkbox" ? checked : value,
|
||||
[name]: normalizedValue,
|
||||
};
|
||||
setConfig(newConfig);
|
||||
|
||||
@@ -195,6 +200,27 @@ export function GiteaConfigForm({ config, setConfig, onAutoSave, isAutoSaving, g
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div>
|
||||
<label
|
||||
htmlFor="gitea-external-url"
|
||||
className="block text-sm font-medium mb-1.5"
|
||||
>
|
||||
Gitea External URL (optional)
|
||||
</label>
|
||||
<input
|
||||
id="gitea-external-url"
|
||||
name="externalUrl"
|
||||
type="url"
|
||||
value={config.externalUrl || ""}
|
||||
onChange={handleChange}
|
||||
className="w-full rounded-md border border-input bg-background px-3 py-2 text-sm shadow-sm transition-colors placeholder:text-muted-foreground focus-visible:outline-none focus-visible:ring-1 focus-visible:ring-ring"
|
||||
placeholder="https://gitea.example.com"
|
||||
/>
|
||||
<p className="text-xs text-muted-foreground mt-1">
|
||||
Used only for dashboard links. API sync still uses Gitea URL.
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<div>
|
||||
<label
|
||||
htmlFor="gitea-token"
|
||||
@@ -224,6 +250,7 @@ export function GiteaConfigForm({ config, setConfig, onAutoSave, isAutoSaving, g
|
||||
strategy={mirrorStrategy}
|
||||
destinationOrg={config.organization}
|
||||
starredReposOrg={config.starredReposOrg}
|
||||
starredReposMode={config.starredReposMode}
|
||||
onStrategyChange={setMirrorStrategy}
|
||||
githubUsername={githubUsername}
|
||||
giteaUsername={config.username}
|
||||
@@ -235,6 +262,7 @@ export function GiteaConfigForm({ config, setConfig, onAutoSave, isAutoSaving, g
|
||||
strategy={mirrorStrategy}
|
||||
destinationOrg={config.organization}
|
||||
starredReposOrg={config.starredReposOrg}
|
||||
starredReposMode={config.starredReposMode}
|
||||
personalReposOrg={config.personalReposOrg}
|
||||
visibility={config.visibility}
|
||||
onDestinationOrgChange={(org) => {
|
||||
@@ -247,6 +275,11 @@ export function GiteaConfigForm({ config, setConfig, onAutoSave, isAutoSaving, g
|
||||
setConfig(newConfig);
|
||||
if (onAutoSave) onAutoSave(newConfig);
|
||||
}}
|
||||
onStarredReposModeChange={(mode) => {
|
||||
const newConfig = { ...config, starredReposMode: mode };
|
||||
setConfig(newConfig);
|
||||
if (onAutoSave) onAutoSave(newConfig);
|
||||
}}
|
||||
onPersonalReposOrgChange={(org) => {
|
||||
const newConfig = { ...config, personalReposOrg: org };
|
||||
setConfig(newConfig);
|
||||
@@ -258,7 +291,7 @@ export function GiteaConfigForm({ config, setConfig, onAutoSave, isAutoSaving, g
|
||||
if (onAutoSave) onAutoSave(newConfig);
|
||||
}}
|
||||
/>
|
||||
|
||||
|
||||
{/* Mobile: Show button at bottom */}
|
||||
<Button
|
||||
type="button"
|
||||
|
||||
394
src/components/config/NotificationSettings.tsx
Normal file
394
src/components/config/NotificationSettings.tsx
Normal file
@@ -0,0 +1,394 @@
|
||||
import { useState } from "react";
|
||||
import { Card, CardContent, CardHeader, CardTitle } from "@/components/ui/card";
|
||||
import { Label } from "@/components/ui/label";
|
||||
import { Input } from "@/components/ui/input";
|
||||
import {
|
||||
Select,
|
||||
SelectContent,
|
||||
SelectItem,
|
||||
SelectTrigger,
|
||||
SelectValue,
|
||||
} from "@/components/ui/select";
|
||||
import { Switch } from "@/components/ui/switch";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { Bell, Activity, Send } from "lucide-react";
|
||||
import { toast } from "sonner";
|
||||
import type { NotificationConfig } from "@/types/config";
|
||||
|
||||
interface NotificationSettingsProps {
|
||||
notificationConfig: NotificationConfig;
|
||||
onNotificationChange: (config: NotificationConfig) => void;
|
||||
isAutoSaving?: boolean;
|
||||
}
|
||||
|
||||
export function NotificationSettings({
|
||||
notificationConfig,
|
||||
onNotificationChange,
|
||||
isAutoSaving,
|
||||
}: NotificationSettingsProps) {
|
||||
const [isTesting, setIsTesting] = useState(false);
|
||||
|
||||
const handleTestNotification = async () => {
|
||||
setIsTesting(true);
|
||||
try {
|
||||
const resp = await fetch("/api/notifications/test", {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify({ notificationConfig }),
|
||||
});
|
||||
const result = await resp.json();
|
||||
if (result.success) {
|
||||
toast.success("Test notification sent successfully!");
|
||||
} else {
|
||||
toast.error(`Test failed: ${result.error || "Unknown error"}`);
|
||||
}
|
||||
} catch (error) {
|
||||
toast.error(
|
||||
`Test failed: ${error instanceof Error ? error.message : String(error)}`
|
||||
);
|
||||
} finally {
|
||||
setIsTesting(false);
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<Card className="w-full">
|
||||
<CardHeader>
|
||||
<CardTitle className="text-lg font-semibold flex items-center gap-2">
|
||||
<Bell className="h-5 w-5" />
|
||||
Notifications
|
||||
{isAutoSaving && (
|
||||
<Activity className="h-4 w-4 animate-spin text-muted-foreground ml-2" />
|
||||
)}
|
||||
</CardTitle>
|
||||
</CardHeader>
|
||||
|
||||
<CardContent className="space-y-6">
|
||||
{/* Enable/disable toggle */}
|
||||
<div className="flex items-center justify-between">
|
||||
<div className="space-y-0.5">
|
||||
<Label htmlFor="notifications-enabled" className="text-sm font-medium cursor-pointer">
|
||||
Enable notifications
|
||||
</Label>
|
||||
<p className="text-xs text-muted-foreground">
|
||||
Receive alerts when mirror jobs complete or fail
|
||||
</p>
|
||||
</div>
|
||||
<Switch
|
||||
id="notifications-enabled"
|
||||
checked={notificationConfig.enabled}
|
||||
onCheckedChange={(checked) =>
|
||||
onNotificationChange({ ...notificationConfig, enabled: checked })
|
||||
}
|
||||
/>
|
||||
</div>
|
||||
|
||||
{notificationConfig.enabled && (
|
||||
<>
|
||||
{/* Provider selector */}
|
||||
<div className="space-y-2">
|
||||
<Label htmlFor="notification-provider" className="text-sm font-medium">
|
||||
Notification provider
|
||||
</Label>
|
||||
<Select
|
||||
value={notificationConfig.provider}
|
||||
onValueChange={(value: "ntfy" | "apprise") =>
|
||||
onNotificationChange({ ...notificationConfig, provider: value })
|
||||
}
|
||||
>
|
||||
<SelectTrigger id="notification-provider">
|
||||
<SelectValue />
|
||||
</SelectTrigger>
|
||||
<SelectContent>
|
||||
<SelectItem value="ntfy">Ntfy.sh</SelectItem>
|
||||
<SelectItem value="apprise">Apprise API</SelectItem>
|
||||
</SelectContent>
|
||||
</Select>
|
||||
</div>
|
||||
|
||||
{/* Ntfy configuration */}
|
||||
{notificationConfig.provider === "ntfy" && (
|
||||
<div className="space-y-4 p-4 border border-border rounded-lg bg-card/50">
|
||||
<h3 className="text-sm font-medium">Ntfy.sh Settings</h3>
|
||||
|
||||
<div className="space-y-2">
|
||||
<Label htmlFor="ntfy-url" className="text-sm">
|
||||
Server URL
|
||||
</Label>
|
||||
<Input
|
||||
id="ntfy-url"
|
||||
type="url"
|
||||
placeholder="https://ntfy.sh"
|
||||
value={notificationConfig.ntfy?.url || "https://ntfy.sh"}
|
||||
onChange={(e) =>
|
||||
onNotificationChange({
|
||||
...notificationConfig,
|
||||
ntfy: {
|
||||
...notificationConfig.ntfy!,
|
||||
url: e.target.value,
|
||||
topic: notificationConfig.ntfy?.topic || "",
|
||||
priority: notificationConfig.ntfy?.priority || "default",
|
||||
},
|
||||
})
|
||||
}
|
||||
/>
|
||||
<p className="text-xs text-muted-foreground">
|
||||
Use https://ntfy.sh for the public server or your self-hosted instance URL
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<div className="space-y-2">
|
||||
<Label htmlFor="ntfy-topic" className="text-sm">
|
||||
Topic <span className="text-destructive">*</span>
|
||||
</Label>
|
||||
<Input
|
||||
id="ntfy-topic"
|
||||
placeholder="gitea-mirror"
|
||||
value={notificationConfig.ntfy?.topic || ""}
|
||||
onChange={(e) =>
|
||||
onNotificationChange({
|
||||
...notificationConfig,
|
||||
ntfy: {
|
||||
...notificationConfig.ntfy!,
|
||||
url: notificationConfig.ntfy?.url || "https://ntfy.sh",
|
||||
topic: e.target.value,
|
||||
priority: notificationConfig.ntfy?.priority || "default",
|
||||
},
|
||||
})
|
||||
}
|
||||
/>
|
||||
<p className="text-xs text-muted-foreground">
|
||||
Choose a unique topic name. Anyone with the topic name can subscribe.
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<div className="space-y-2">
|
||||
<Label htmlFor="ntfy-token" className="text-sm">
|
||||
Access token (optional)
|
||||
</Label>
|
||||
<Input
|
||||
id="ntfy-token"
|
||||
type="password"
|
||||
placeholder="tk_..."
|
||||
value={notificationConfig.ntfy?.token || ""}
|
||||
onChange={(e) =>
|
||||
onNotificationChange({
|
||||
...notificationConfig,
|
||||
ntfy: {
|
||||
...notificationConfig.ntfy!,
|
||||
url: notificationConfig.ntfy?.url || "https://ntfy.sh",
|
||||
topic: notificationConfig.ntfy?.topic || "",
|
||||
token: e.target.value,
|
||||
priority: notificationConfig.ntfy?.priority || "default",
|
||||
},
|
||||
})
|
||||
}
|
||||
/>
|
||||
<p className="text-xs text-muted-foreground">
|
||||
Required if your ntfy server uses authentication
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<div className="space-y-2">
|
||||
<Label htmlFor="ntfy-priority" className="text-sm">
|
||||
Default priority
|
||||
</Label>
|
||||
<Select
|
||||
value={notificationConfig.ntfy?.priority || "default"}
|
||||
onValueChange={(value: "min" | "low" | "default" | "high" | "urgent") =>
|
||||
onNotificationChange({
|
||||
...notificationConfig,
|
||||
ntfy: {
|
||||
...notificationConfig.ntfy!,
|
||||
url: notificationConfig.ntfy?.url || "https://ntfy.sh",
|
||||
topic: notificationConfig.ntfy?.topic || "",
|
||||
priority: value,
|
||||
},
|
||||
})
|
||||
}
|
||||
>
|
||||
<SelectTrigger id="ntfy-priority">
|
||||
<SelectValue />
|
||||
</SelectTrigger>
|
||||
<SelectContent>
|
||||
<SelectItem value="min">Min</SelectItem>
|
||||
<SelectItem value="low">Low</SelectItem>
|
||||
<SelectItem value="default">Default</SelectItem>
|
||||
<SelectItem value="high">High</SelectItem>
|
||||
<SelectItem value="urgent">Urgent</SelectItem>
|
||||
</SelectContent>
|
||||
</Select>
|
||||
<p className="text-xs text-muted-foreground">
|
||||
Error notifications always use "high" priority regardless of this setting
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Apprise configuration */}
|
||||
{notificationConfig.provider === "apprise" && (
|
||||
<div className="space-y-4 p-4 border border-border rounded-lg bg-card/50">
|
||||
<h3 className="text-sm font-medium">Apprise API Settings</h3>
|
||||
|
||||
<div className="space-y-2">
|
||||
<Label htmlFor="apprise-url" className="text-sm">
|
||||
Server URL <span className="text-destructive">*</span>
|
||||
</Label>
|
||||
<Input
|
||||
id="apprise-url"
|
||||
type="url"
|
||||
placeholder="http://apprise:8000"
|
||||
value={notificationConfig.apprise?.url || ""}
|
||||
onChange={(e) =>
|
||||
onNotificationChange({
|
||||
...notificationConfig,
|
||||
apprise: {
|
||||
...notificationConfig.apprise!,
|
||||
url: e.target.value,
|
||||
token: notificationConfig.apprise?.token || "",
|
||||
},
|
||||
})
|
||||
}
|
||||
/>
|
||||
<p className="text-xs text-muted-foreground">
|
||||
URL of your Apprise API server (e.g., http://apprise:8000)
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<div className="space-y-2">
|
||||
<Label htmlFor="apprise-token" className="text-sm">
|
||||
Token / path <span className="text-destructive">*</span>
|
||||
</Label>
|
||||
<Input
|
||||
id="apprise-token"
|
||||
placeholder="gitea-mirror"
|
||||
value={notificationConfig.apprise?.token || ""}
|
||||
onChange={(e) =>
|
||||
onNotificationChange({
|
||||
...notificationConfig,
|
||||
apprise: {
|
||||
...notificationConfig.apprise!,
|
||||
url: notificationConfig.apprise?.url || "",
|
||||
token: e.target.value,
|
||||
},
|
||||
})
|
||||
}
|
||||
/>
|
||||
<p className="text-xs text-muted-foreground">
|
||||
The Apprise API configuration token or key
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<div className="space-y-2">
|
||||
<Label htmlFor="apprise-tag" className="text-sm">
|
||||
Tag filter (optional)
|
||||
</Label>
|
||||
<Input
|
||||
id="apprise-tag"
|
||||
placeholder="all"
|
||||
value={notificationConfig.apprise?.tag || ""}
|
||||
onChange={(e) =>
|
||||
onNotificationChange({
|
||||
...notificationConfig,
|
||||
apprise: {
|
||||
...notificationConfig.apprise!,
|
||||
url: notificationConfig.apprise?.url || "",
|
||||
token: notificationConfig.apprise?.token || "",
|
||||
tag: e.target.value,
|
||||
},
|
||||
})
|
||||
}
|
||||
/>
|
||||
<p className="text-xs text-muted-foreground">
|
||||
Optional tag to filter which Apprise services receive notifications
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Event toggles */}
|
||||
<div className="space-y-4 p-4 border border-border rounded-lg bg-card/50">
|
||||
<h3 className="text-sm font-medium">Notification Events</h3>
|
||||
|
||||
<div className="flex items-center justify-between">
|
||||
<div className="space-y-0.5">
|
||||
<Label htmlFor="notify-sync-error" className="text-sm font-normal cursor-pointer">
|
||||
Sync errors
|
||||
</Label>
|
||||
<p className="text-xs text-muted-foreground">
|
||||
Notify when a mirror job fails
|
||||
</p>
|
||||
</div>
|
||||
<Switch
|
||||
id="notify-sync-error"
|
||||
checked={notificationConfig.notifyOnSyncError}
|
||||
onCheckedChange={(checked) =>
|
||||
onNotificationChange({ ...notificationConfig, notifyOnSyncError: checked })
|
||||
}
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div className="flex items-center justify-between">
|
||||
<div className="space-y-0.5">
|
||||
<Label htmlFor="notify-sync-success" className="text-sm font-normal cursor-pointer">
|
||||
Sync success
|
||||
</Label>
|
||||
<p className="text-xs text-muted-foreground">
|
||||
Notify when a mirror job completes successfully
|
||||
</p>
|
||||
</div>
|
||||
<Switch
|
||||
id="notify-sync-success"
|
||||
checked={notificationConfig.notifyOnSyncSuccess}
|
||||
onCheckedChange={(checked) =>
|
||||
onNotificationChange({ ...notificationConfig, notifyOnSyncSuccess: checked })
|
||||
}
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div className="flex items-center justify-between">
|
||||
<div className="space-y-0.5">
|
||||
<Label htmlFor="notify-new-repo" className="text-sm font-normal cursor-pointer text-muted-foreground">
|
||||
New repository discovered (coming soon)
|
||||
</Label>
|
||||
<p className="text-xs text-muted-foreground">
|
||||
Notify when a new GitHub repository is auto-imported
|
||||
</p>
|
||||
</div>
|
||||
<Switch
|
||||
id="notify-new-repo"
|
||||
checked={notificationConfig.notifyOnNewRepo}
|
||||
disabled
|
||||
onCheckedChange={(checked) =>
|
||||
onNotificationChange({ ...notificationConfig, notifyOnNewRepo: checked })
|
||||
}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Test button */}
|
||||
<div className="flex justify-end">
|
||||
<Button
|
||||
variant="outline"
|
||||
onClick={handleTestNotification}
|
||||
disabled={isTesting}
|
||||
>
|
||||
{isTesting ? (
|
||||
<>
|
||||
<Activity className="h-4 w-4 animate-spin mr-2" />
|
||||
Sending...
|
||||
</>
|
||||
) : (
|
||||
<>
|
||||
<Send className="h-4 w-4 mr-2" />
|
||||
Send Test Notification
|
||||
</>
|
||||
)}
|
||||
</Button>
|
||||
</div>
|
||||
</>
|
||||
)}
|
||||
</CardContent>
|
||||
</Card>
|
||||
);
|
||||
}
|
||||
@@ -9,16 +9,18 @@ import {
|
||||
TooltipTrigger,
|
||||
} from "@/components/ui/tooltip";
|
||||
import { cn } from "@/lib/utils";
|
||||
import type { MirrorStrategy, GiteaOrgVisibility } from "@/types/config";
|
||||
import type { MirrorStrategy, GiteaOrgVisibility, StarredReposMode } from "@/types/config";
|
||||
|
||||
interface OrganizationConfigurationProps {
|
||||
strategy: MirrorStrategy;
|
||||
destinationOrg?: string;
|
||||
starredReposOrg?: string;
|
||||
starredReposMode?: StarredReposMode;
|
||||
personalReposOrg?: string;
|
||||
visibility: GiteaOrgVisibility;
|
||||
onDestinationOrgChange: (org: string) => void;
|
||||
onStarredReposOrgChange: (org: string) => void;
|
||||
onStarredReposModeChange: (mode: StarredReposMode) => void;
|
||||
onPersonalReposOrgChange: (org: string) => void;
|
||||
onVisibilityChange: (visibility: GiteaOrgVisibility) => void;
|
||||
}
|
||||
@@ -33,13 +35,19 @@ export const OrganizationConfiguration: React.FC<OrganizationConfigurationProps>
|
||||
strategy,
|
||||
destinationOrg,
|
||||
starredReposOrg,
|
||||
starredReposMode,
|
||||
personalReposOrg,
|
||||
visibility,
|
||||
onDestinationOrgChange,
|
||||
onStarredReposOrgChange,
|
||||
onStarredReposModeChange,
|
||||
onPersonalReposOrgChange,
|
||||
onVisibilityChange,
|
||||
}) => {
|
||||
const activeStarredMode = starredReposMode || "dedicated-org";
|
||||
const showStarredReposOrgInput = activeStarredMode === "dedicated-org";
|
||||
const showDestinationOrgInput = strategy === "single-org" || strategy === "mixed";
|
||||
|
||||
return (
|
||||
<div className="space-y-4">
|
||||
<div>
|
||||
@@ -49,38 +57,94 @@ export const OrganizationConfiguration: React.FC<OrganizationConfigurationProps>
|
||||
</h4>
|
||||
</div>
|
||||
|
||||
{/* First row - Organization inputs with consistent layout */}
|
||||
<div className="grid grid-cols-1 md:grid-cols-2 gap-4">
|
||||
{/* Left column - always shows starred repos org */}
|
||||
<div className="space-y-1">
|
||||
<Label htmlFor="starredReposOrg" className="text-sm font-normal flex items-center gap-2">
|
||||
<Star className="h-3.5 w-3.5" />
|
||||
Starred Repos Organization
|
||||
<TooltipProvider>
|
||||
<Tooltip>
|
||||
<TooltipTrigger>
|
||||
<Info className="h-3.5 w-3.5 text-muted-foreground" />
|
||||
</TooltipTrigger>
|
||||
<TooltipContent>
|
||||
<p>Starred repositories will be organized separately in this organization</p>
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
</TooltipProvider>
|
||||
</Label>
|
||||
<Input
|
||||
id="starredReposOrg"
|
||||
value={starredReposOrg || ""}
|
||||
onChange={(e) => onStarredReposOrgChange(e.target.value)}
|
||||
placeholder="starred"
|
||||
className=""
|
||||
/>
|
||||
<p className="text-xs text-muted-foreground mt-1">
|
||||
Keep starred repos organized separately
|
||||
<div className="space-y-2">
|
||||
<Label className="text-sm font-normal flex items-center gap-2">
|
||||
Starred Repository Destination
|
||||
<TooltipProvider>
|
||||
<Tooltip>
|
||||
<TooltipTrigger>
|
||||
<Info className="h-3.5 w-3.5 text-muted-foreground" />
|
||||
</TooltipTrigger>
|
||||
<TooltipContent>
|
||||
<p>Choose whether starred repos use one org or keep their source Owner/Org paths</p>
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
</TooltipProvider>
|
||||
</Label>
|
||||
<div className="rounded-lg border bg-muted/20 p-2">
|
||||
<div className="grid grid-cols-1 sm:grid-cols-2 gap-2">
|
||||
<button
|
||||
type="button"
|
||||
onClick={() => onStarredReposModeChange("dedicated-org")}
|
||||
aria-pressed={activeStarredMode === "dedicated-org"}
|
||||
className={cn(
|
||||
"text-left px-3 py-2 rounded-md border text-sm transition-all",
|
||||
activeStarredMode === "dedicated-org"
|
||||
? "bg-accent border-accent-foreground/30 ring-1 ring-accent-foreground/20 font-medium shadow-sm"
|
||||
: "bg-background hover:bg-accent/50 border-input"
|
||||
)}
|
||||
>
|
||||
Dedicated Organization
|
||||
</button>
|
||||
<button
|
||||
type="button"
|
||||
onClick={() => onStarredReposModeChange("preserve-owner")}
|
||||
aria-pressed={activeStarredMode === "preserve-owner"}
|
||||
className={cn(
|
||||
"text-left px-3 py-2 rounded-md border text-sm transition-all",
|
||||
activeStarredMode === "preserve-owner"
|
||||
? "bg-accent border-accent-foreground/30 ring-1 ring-accent-foreground/20 font-medium shadow-sm"
|
||||
: "bg-background hover:bg-accent/50 border-input"
|
||||
)}
|
||||
>
|
||||
Preserve Source Owner/Org
|
||||
</button>
|
||||
</div>
|
||||
<p className="mt-2 px-1 text-xs text-muted-foreground">
|
||||
{
|
||||
activeStarredMode === "dedicated-org"
|
||||
? "All starred repositories go to a single destination organization."
|
||||
: "Starred repositories keep their original GitHub Owner/Org destination."
|
||||
}
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Right column - shows destination org for single-org/mixed, personal repos org for preserve, empty div for others */}
|
||||
{strategy === "single-org" || strategy === "mixed" ? (
|
||||
{/* First row - Organization inputs */}
|
||||
{(showStarredReposOrgInput || showDestinationOrgInput) && (
|
||||
<div className="grid grid-cols-1 md:grid-cols-2 gap-4">
|
||||
{showStarredReposOrgInput ? (
|
||||
<div className="space-y-1">
|
||||
<Label htmlFor="starredReposOrg" className="text-sm font-normal flex items-center gap-2">
|
||||
<Star className="h-3.5 w-3.5" />
|
||||
Starred Repos Organization
|
||||
<TooltipProvider>
|
||||
<Tooltip>
|
||||
<TooltipTrigger>
|
||||
<Info className="h-3.5 w-3.5 text-muted-foreground" />
|
||||
</TooltipTrigger>
|
||||
<TooltipContent>
|
||||
<p>Starred repositories will be organized separately in this organization</p>
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
</TooltipProvider>
|
||||
</Label>
|
||||
<Input
|
||||
id="starredReposOrg"
|
||||
value={starredReposOrg || ""}
|
||||
onChange={(e) => onStarredReposOrgChange(e.target.value)}
|
||||
placeholder="starred"
|
||||
className=""
|
||||
/>
|
||||
<p className="text-xs text-muted-foreground mt-1">
|
||||
Keep starred repos organized separately
|
||||
</p>
|
||||
</div>
|
||||
) : (
|
||||
<div className="hidden md:block" />
|
||||
)}
|
||||
|
||||
{showDestinationOrgInput ? (
|
||||
<div className="space-y-1">
|
||||
<Label htmlFor="destinationOrg" className="text-sm font-normal flex items-center gap-2">
|
||||
{strategy === "mixed" ? "Personal Repos Organization" : "Destination Organization"}
|
||||
@@ -114,10 +178,11 @@ export const OrganizationConfiguration: React.FC<OrganizationConfigurationProps>
|
||||
}
|
||||
</p>
|
||||
</div>
|
||||
) : (
|
||||
<div className="hidden md:block" />
|
||||
)}
|
||||
</div>
|
||||
) : (
|
||||
<div className="hidden md:block" />
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Second row - Organization Visibility (always shown) */}
|
||||
<div className="space-y-2">
|
||||
@@ -172,4 +237,3 @@ export const OrganizationConfiguration: React.FC<OrganizationConfigurationProps>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
|
||||
@@ -8,6 +8,7 @@ import {
|
||||
HoverCardTrigger,
|
||||
} from "@/components/ui/hover-card";
|
||||
import { cn } from "@/lib/utils";
|
||||
import type { StarredReposMode } from "@/types/config";
|
||||
|
||||
export type MirrorStrategy = "preserve" | "single-org" | "flat-user" | "mixed";
|
||||
|
||||
@@ -15,6 +16,7 @@ interface OrganizationStrategyProps {
|
||||
strategy: MirrorStrategy;
|
||||
destinationOrg?: string;
|
||||
starredReposOrg?: string;
|
||||
starredReposMode?: StarredReposMode;
|
||||
onStrategyChange: (strategy: MirrorStrategy) => void;
|
||||
githubUsername?: string;
|
||||
giteaUsername?: string;
|
||||
@@ -76,13 +78,18 @@ const MappingPreview: React.FC<{
|
||||
config: typeof strategyConfig.preserve;
|
||||
destinationOrg?: string;
|
||||
starredReposOrg?: string;
|
||||
starredReposMode?: StarredReposMode;
|
||||
githubUsername?: string;
|
||||
giteaUsername?: string;
|
||||
}> = ({ strategy, config, destinationOrg, starredReposOrg, githubUsername, giteaUsername }) => {
|
||||
}> = ({ strategy, config, destinationOrg, starredReposOrg, starredReposMode, githubUsername, giteaUsername }) => {
|
||||
const displayGithubUsername = githubUsername || "<username>";
|
||||
const displayGiteaUsername = giteaUsername || "<username>";
|
||||
const isGithubPlaceholder = !githubUsername;
|
||||
const isGiteaPlaceholder = !giteaUsername;
|
||||
const starredDestination =
|
||||
(starredReposMode || "dedicated-org") === "preserve-owner"
|
||||
? "awesome/starred-repo"
|
||||
: `${starredReposOrg || "starred"}/starred-repo`;
|
||||
|
||||
if (strategy === "preserve") {
|
||||
return (
|
||||
@@ -122,7 +129,7 @@ const MappingPreview: React.FC<{
|
||||
</div>
|
||||
<div className={cn("flex items-center gap-2 p-1.5 rounded text-xs", config.repoColors.bg)}>
|
||||
<Building2 className={cn("h-3 w-3", config.repoColors.icon)} />
|
||||
<span>{starredReposOrg || "starred"}/starred-repo</span>
|
||||
<span>{starredDestination}</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
@@ -168,7 +175,7 @@ const MappingPreview: React.FC<{
|
||||
</div>
|
||||
<div className={cn("flex items-center gap-2 p-1.5 rounded text-xs", config.repoColors.bg)}>
|
||||
<Building2 className={cn("h-3 w-3", config.repoColors.icon)} />
|
||||
<span>{starredReposOrg || "starred"}/starred-repo</span>
|
||||
<span>{starredDestination}</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
@@ -214,7 +221,7 @@ const MappingPreview: React.FC<{
|
||||
</div>
|
||||
<div className={cn("flex items-center gap-2 p-1.5 rounded text-xs", config.repoColors.bg)}>
|
||||
<Building2 className={cn("h-3 w-3", config.repoColors.icon)} />
|
||||
<span>{starredReposOrg || "starred"}/starred-repo</span>
|
||||
<span>{starredDestination}</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
@@ -260,7 +267,7 @@ const MappingPreview: React.FC<{
|
||||
</div>
|
||||
<div className={cn("flex items-center gap-2 p-1.5 rounded text-xs", config.repoColors.bg)}>
|
||||
<Building2 className={cn("h-3 w-3", config.repoColors.icon)} />
|
||||
<span>{starredReposOrg || "starred"}/starred-repo</span>
|
||||
<span>{starredDestination}</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
@@ -275,6 +282,7 @@ export const OrganizationStrategy: React.FC<OrganizationStrategyProps> = ({
|
||||
strategy,
|
||||
destinationOrg,
|
||||
starredReposOrg,
|
||||
starredReposMode,
|
||||
onStrategyChange,
|
||||
githubUsername,
|
||||
giteaUsername,
|
||||
@@ -339,7 +347,7 @@ export const OrganizationStrategy: React.FC<OrganizationStrategyProps> = ({
|
||||
<span className="text-xs font-medium">Starred Repositories</span>
|
||||
</div>
|
||||
<p className="text-xs text-muted-foreground pl-5">
|
||||
Always go to the configured starred repos organization and cannot be overridden.
|
||||
Follow your starred-repo mode and cannot be overridden per repository.
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
@@ -415,6 +423,7 @@ export const OrganizationStrategy: React.FC<OrganizationStrategyProps> = ({
|
||||
config={config}
|
||||
destinationOrg={destinationOrg}
|
||||
starredReposOrg={starredReposOrg}
|
||||
starredReposMode={starredReposMode}
|
||||
githubUsername={githubUsername}
|
||||
giteaUsername={giteaUsername}
|
||||
/>
|
||||
@@ -434,4 +443,4 @@ export const OrganizationStrategy: React.FC<OrganizationStrategyProps> = ({
|
||||
</RadioGroup>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
};
|
||||
|
||||
@@ -4,6 +4,7 @@ import { GitFork } from "lucide-react";
|
||||
import { SiGithub, SiGitea } from "react-icons/si";
|
||||
import type { Repository } from "@/lib/db/schema";
|
||||
import { getStatusColor } from "@/lib/utils";
|
||||
import { buildGiteaWebUrl } from "@/lib/gitea-url";
|
||||
import { useGiteaConfig } from "@/hooks/useGiteaConfig";
|
||||
|
||||
interface RepositoryListProps {
|
||||
@@ -15,10 +16,6 @@ export function RepositoryList({ repositories }: RepositoryListProps) {
|
||||
|
||||
// Helper function to construct Gitea repository URL
|
||||
const getGiteaRepoUrl = (repository: Repository): string | null => {
|
||||
if (!giteaConfig?.url) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// Only provide Gitea links for repositories that have been or are being mirrored
|
||||
const validStatuses = ['mirroring', 'mirrored', 'syncing', 'synced'];
|
||||
if (!validStatuses.includes(repository.status)) {
|
||||
@@ -37,12 +34,7 @@ export function RepositoryList({ repositories }: RepositoryListProps) {
|
||||
repoPath = `${owner}/${repository.name}`;
|
||||
}
|
||||
|
||||
// Ensure the base URL doesn't have a trailing slash
|
||||
const baseUrl = giteaConfig.url.endsWith('/')
|
||||
? giteaConfig.url.slice(0, -1)
|
||||
: giteaConfig.url;
|
||||
|
||||
return `${baseUrl}/${repoPath}`;
|
||||
return buildGiteaWebUrl(giteaConfig, repoPath);
|
||||
};
|
||||
|
||||
return (
|
||||
|
||||
@@ -159,7 +159,7 @@ function AppWithProviders({ page: initialPage }: AppProps) {
|
||||
{currentPage === "activity-log" && <ActivityLog />}
|
||||
</section>
|
||||
</div>
|
||||
<Toaster />
|
||||
<Toaster position="top-center" />
|
||||
</main>
|
||||
</NavigationContext.Provider>
|
||||
);
|
||||
|
||||
@@ -9,6 +9,7 @@ import type { FilterParams } from "@/types/filter";
|
||||
import Fuse from "fuse.js";
|
||||
import { Skeleton } from "@/components/ui/skeleton";
|
||||
import { cn } from "@/lib/utils";
|
||||
import { buildGiteaWebUrl } from "@/lib/gitea-url";
|
||||
import { MirrorDestinationEditor } from "./MirrorDestinationEditor";
|
||||
import { useGiteaConfig } from "@/hooks/useGiteaConfig";
|
||||
import {
|
||||
@@ -67,10 +68,6 @@ export function OrganizationList({
|
||||
|
||||
// Helper function to construct Gitea organization URL
|
||||
const getGiteaOrgUrl = (organization: Organization): string | null => {
|
||||
if (!giteaConfig?.url) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// Only provide Gitea links for organizations that have been mirrored
|
||||
const validStatuses = ['mirroring', 'mirrored'];
|
||||
if (!validStatuses.includes(organization.status || '')) {
|
||||
@@ -83,12 +80,7 @@ export function OrganizationList({
|
||||
return null;
|
||||
}
|
||||
|
||||
// Ensure the base URL doesn't have a trailing slash
|
||||
const baseUrl = giteaConfig.url.endsWith('/')
|
||||
? giteaConfig.url.slice(0, -1)
|
||||
: giteaConfig.url;
|
||||
|
||||
return `${baseUrl}/${orgName}`;
|
||||
return buildGiteaWebUrl(giteaConfig, orgName);
|
||||
};
|
||||
|
||||
const handleUpdateDestination = async (orgId: string, newDestination: string | null) => {
|
||||
@@ -247,6 +239,11 @@ export function OrganizationList({
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Error message for failed orgs */}
|
||||
{org.status === "failed" && org.errorMessage && (
|
||||
<p className="text-xs text-destructive line-clamp-2">{org.errorMessage}</p>
|
||||
)}
|
||||
|
||||
{/* Destination override section */}
|
||||
<div>
|
||||
<MirrorDestinationEditor
|
||||
@@ -303,6 +300,13 @@ export function OrganizationList({
|
||||
/>
|
||||
</div>
|
||||
|
||||
{/* Error message for failed orgs */}
|
||||
{org.status === "failed" && org.errorMessage && (
|
||||
<div className="mb-4 p-3 rounded-md bg-destructive/10 border border-destructive/20">
|
||||
<p className="text-sm text-destructive">{org.errorMessage}</p>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Repository statistics */}
|
||||
<div className="mb-4">
|
||||
<div className="flex items-center gap-4 text-sm">
|
||||
@@ -312,7 +316,7 @@ export function OrganizationList({
|
||||
{org.repositoryCount === 1 ? "repository" : "repositories"}
|
||||
</span>
|
||||
</div>
|
||||
|
||||
|
||||
{/* Repository breakdown - only show non-zero counts */}
|
||||
{(() => {
|
||||
const counts = [];
|
||||
@@ -325,7 +329,7 @@ export function OrganizationList({
|
||||
if (org.forkRepositoryCount && org.forkRepositoryCount > 0) {
|
||||
counts.push(`${org.forkRepositoryCount} ${org.forkRepositoryCount === 1 ? 'fork' : 'forks'}`);
|
||||
}
|
||||
|
||||
|
||||
return counts.length > 0 ? (
|
||||
<div className="flex items-center gap-3 text-xs text-muted-foreground">
|
||||
{counts.map((count, index) => (
|
||||
@@ -414,7 +418,7 @@ export function OrganizationList({
|
||||
)}
|
||||
</>
|
||||
)}
|
||||
|
||||
|
||||
{/* Dropdown menu for additional actions */}
|
||||
{org.status !== "mirroring" && (
|
||||
<DropdownMenu>
|
||||
@@ -425,7 +429,7 @@ export function OrganizationList({
|
||||
</DropdownMenuTrigger>
|
||||
<DropdownMenuContent align="end">
|
||||
{org.status !== "ignored" && (
|
||||
<DropdownMenuItem
|
||||
<DropdownMenuItem
|
||||
onClick={() => org.id && onIgnore && onIgnore({ orgId: org.id, ignore: true })}
|
||||
>
|
||||
<Ban className="h-4 w-4 mr-2" />
|
||||
@@ -448,7 +452,7 @@ export function OrganizationList({
|
||||
</DropdownMenu>
|
||||
)}
|
||||
</div>
|
||||
|
||||
|
||||
<div className="flex items-center gap-2 justify-center">
|
||||
{(() => {
|
||||
const giteaUrl = getGiteaOrgUrl(org);
|
||||
|
||||
@@ -18,10 +18,12 @@ interface AddRepositoryDialogProps {
|
||||
repo,
|
||||
owner,
|
||||
force,
|
||||
destinationOrg,
|
||||
}: {
|
||||
repo: string;
|
||||
owner: string;
|
||||
force?: boolean;
|
||||
destinationOrg?: string;
|
||||
}) => Promise<void>;
|
||||
}
|
||||
|
||||
@@ -32,6 +34,7 @@ export default function AddRepositoryDialog({
|
||||
}: AddRepositoryDialogProps) {
|
||||
const [repo, setRepo] = useState<string>("");
|
||||
const [owner, setOwner] = useState<string>("");
|
||||
const [destinationOrg, setDestinationOrg] = useState<string>("");
|
||||
const [isLoading, setIsLoading] = useState<boolean>(false);
|
||||
const [error, setError] = useState<string>("");
|
||||
|
||||
@@ -40,6 +43,7 @@ export default function AddRepositoryDialog({
|
||||
setError("");
|
||||
setRepo("");
|
||||
setOwner("");
|
||||
setDestinationOrg("");
|
||||
}
|
||||
}, [isDialogOpen]);
|
||||
|
||||
@@ -54,11 +58,16 @@ export default function AddRepositoryDialog({
|
||||
try {
|
||||
setIsLoading(true);
|
||||
|
||||
await onAddRepository({ repo, owner });
|
||||
await onAddRepository({
|
||||
repo,
|
||||
owner,
|
||||
destinationOrg: destinationOrg.trim() || undefined,
|
||||
});
|
||||
|
||||
setError("");
|
||||
setRepo("");
|
||||
setOwner("");
|
||||
setDestinationOrg("");
|
||||
setIsDialogOpen(false);
|
||||
} catch (err: any) {
|
||||
setError(err?.message || "Failed to add repository.");
|
||||
@@ -124,6 +133,27 @@ export default function AddRepositoryDialog({
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div>
|
||||
<label
|
||||
htmlFor="destinationOrg"
|
||||
className="block text-sm font-medium mb-1.5"
|
||||
>
|
||||
Target Organization{" "}
|
||||
<span className="text-muted-foreground font-normal">
|
||||
(optional)
|
||||
</span>
|
||||
</label>
|
||||
<input
|
||||
id="destinationOrg"
|
||||
type="text"
|
||||
value={destinationOrg}
|
||||
onChange={(e) => setDestinationOrg(e.target.value)}
|
||||
className="w-full rounded-md border border-input bg-background px-3 py-2 text-sm shadow-sm transition-colors placeholder:text-muted-foreground focus-visible:outline-none focus-visible:ring-1 focus-visible:ring-ring"
|
||||
placeholder="Gitea org or user (uses default strategy if empty)"
|
||||
autoComplete="off"
|
||||
/>
|
||||
</div>
|
||||
|
||||
{error && <p className="text-sm text-red-500 mt-1">{error}</p>}
|
||||
</div>
|
||||
|
||||
|
||||
@@ -28,9 +28,16 @@ export function InlineDestinationEditor({
|
||||
|
||||
// Determine the default destination based on repository properties and config
|
||||
const getDefaultDestination = () => {
|
||||
// Starred repos always go to the configured starredReposOrg
|
||||
if (repository.isStarred && giteaConfig?.starredReposOrg) {
|
||||
return giteaConfig.starredReposOrg;
|
||||
// Starred repos can use either dedicated org or preserved source owner
|
||||
if (repository.isStarred) {
|
||||
const starredReposMode = giteaConfig?.starredReposMode || "dedicated-org";
|
||||
if (starredReposMode === "preserve-owner") {
|
||||
return repository.organization || repository.owner;
|
||||
}
|
||||
if (giteaConfig?.starredReposOrg) {
|
||||
return giteaConfig.starredReposOrg;
|
||||
}
|
||||
return "starred";
|
||||
}
|
||||
|
||||
// Check mirror strategy
|
||||
@@ -60,7 +67,7 @@ export function InlineDestinationEditor({
|
||||
const defaultDestination = getDefaultDestination();
|
||||
const currentDestination = repository.destinationOrg || defaultDestination;
|
||||
const hasOverride = repository.destinationOrg && repository.destinationOrg !== defaultDestination;
|
||||
const isStarredRepo = repository.isStarred && giteaConfig?.starredReposOrg;
|
||||
const isStarredRepo = repository.isStarred;
|
||||
|
||||
useEffect(() => {
|
||||
if (isEditing && inputRef.current) {
|
||||
@@ -184,4 +191,4 @@ export function InlineDestinationEditor({
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -44,24 +44,35 @@ import { toast } from "sonner";
|
||||
import type { SyncRepoRequest, SyncRepoResponse } from "@/types/sync";
|
||||
import { OwnerCombobox, OrganizationCombobox } from "./RepositoryComboboxes";
|
||||
import type { RetryRepoRequest, RetryRepoResponse } from "@/types/retry";
|
||||
import type { ResetMetadataRequest, ResetMetadataResponse } from "@/types/reset-metadata";
|
||||
import AddRepositoryDialog from "./AddRepositoryDialog";
|
||||
|
||||
import { useLiveRefresh } from "@/hooks/useLiveRefresh";
|
||||
import { useConfigStatus } from "@/hooks/useConfigStatus";
|
||||
import { useNavigation } from "@/components/layout/MainLayout";
|
||||
|
||||
const REPOSITORY_SORT_OPTIONS = [
|
||||
{ value: "imported-desc", label: "Recently Imported" },
|
||||
{ value: "imported-asc", label: "Oldest Imported" },
|
||||
{ value: "updated-desc", label: "Recently Updated" },
|
||||
{ value: "updated-asc", label: "Oldest Updated" },
|
||||
{ value: "name-asc", label: "Name (A-Z)" },
|
||||
{ value: "name-desc", label: "Name (Z-A)" },
|
||||
] as const;
|
||||
|
||||
export default function Repository() {
|
||||
const [repositories, setRepositories] = useState<Repository[]>([]);
|
||||
const [isInitialLoading, setIsInitialLoading] = useState(true);
|
||||
const { user } = useAuth();
|
||||
const { registerRefreshCallback, isLiveEnabled } = useLiveRefresh();
|
||||
const { isGitHubConfigured, isFullyConfigured } = useConfigStatus();
|
||||
const { isGitHubConfigured, isFullyConfigured, autoMirrorStarred, githubOwner } = useConfigStatus();
|
||||
const { navigationKey } = useNavigation();
|
||||
const { filter, setFilter } = useFilterParams({
|
||||
searchTerm: "",
|
||||
status: "",
|
||||
organization: "",
|
||||
owner: "",
|
||||
sort: "imported-desc",
|
||||
});
|
||||
const [isDialogOpen, setIsDialogOpen] = useState<boolean>(false);
|
||||
const [selectedRepoIds, setSelectedRepoIds] = useState<Set<string>>(new Set());
|
||||
@@ -232,10 +243,12 @@ export default function Repository() {
|
||||
// Filter out repositories that are already mirroring, mirrored, or ignored
|
||||
const eligibleRepos = repositories.filter(
|
||||
(repo) =>
|
||||
repo.status !== "mirroring" &&
|
||||
repo.status !== "mirrored" &&
|
||||
repo.status !== "mirroring" &&
|
||||
repo.status !== "mirrored" &&
|
||||
repo.status !== "ignored" && // Skip ignored repositories
|
||||
repo.id
|
||||
repo.id &&
|
||||
// Skip starred repos from other owners when autoMirrorStarred is disabled
|
||||
!(repo.isStarred && !autoMirrorStarred && repo.owner !== githubOwner)
|
||||
);
|
||||
|
||||
if (eligibleRepos.length === 0) {
|
||||
@@ -291,7 +304,7 @@ export default function Repository() {
|
||||
|
||||
const selectedRepos = repositories.filter(repo => repo.id && selectedRepoIds.has(repo.id));
|
||||
const eligibleRepos = selectedRepos.filter(
|
||||
repo => repo.status === "imported" || repo.status === "failed"
|
||||
repo => repo.status === "imported" || repo.status === "failed" || repo.status === "pending-approval"
|
||||
);
|
||||
|
||||
if (eligibleRepos.length === 0) {
|
||||
@@ -300,7 +313,7 @@ export default function Repository() {
|
||||
}
|
||||
|
||||
const repoIds = eligibleRepos.map(repo => repo.id as string);
|
||||
|
||||
|
||||
setLoadingRepoIds(prev => {
|
||||
const newSet = new Set(prev);
|
||||
repoIds.forEach(id => newSet.add(id));
|
||||
@@ -378,6 +391,67 @@ export default function Repository() {
|
||||
}
|
||||
};
|
||||
|
||||
const handleBulkRerunMetadata = async () => {
|
||||
if (selectedRepoIds.size === 0) return;
|
||||
|
||||
const selectedRepos = repositories.filter(repo => repo.id && selectedRepoIds.has(repo.id));
|
||||
const eligibleRepos = selectedRepos.filter(
|
||||
repo => ["mirrored", "synced", "archived"].includes(repo.status)
|
||||
);
|
||||
|
||||
if (eligibleRepos.length === 0) {
|
||||
toast.info("No eligible repositories to re-run metadata in selection");
|
||||
return;
|
||||
}
|
||||
|
||||
const repoIds = eligibleRepos.map(repo => repo.id as string);
|
||||
|
||||
setLoadingRepoIds(prev => {
|
||||
const newSet = new Set(prev);
|
||||
repoIds.forEach(id => newSet.add(id));
|
||||
return newSet;
|
||||
});
|
||||
|
||||
try {
|
||||
const resetPayload: ResetMetadataRequest = {
|
||||
userId: user?.id || "",
|
||||
repositoryIds: repoIds,
|
||||
};
|
||||
|
||||
const resetResponse = await apiRequest<ResetMetadataResponse>("/job/reset-metadata", {
|
||||
method: "POST",
|
||||
data: resetPayload,
|
||||
});
|
||||
|
||||
if (!resetResponse.success) {
|
||||
showErrorToast(resetResponse.error || "Failed to reset metadata state", toast);
|
||||
return;
|
||||
}
|
||||
|
||||
const syncResponse = await apiRequest<SyncRepoResponse>("/job/sync-repo", {
|
||||
method: "POST",
|
||||
data: { userId: user?.id, repositoryIds: repoIds },
|
||||
});
|
||||
|
||||
if (syncResponse.success) {
|
||||
toast.success(`Re-running metadata for ${repoIds.length} repositories`);
|
||||
setRepositories(prevRepos =>
|
||||
prevRepos.map(repo => {
|
||||
const updated = syncResponse.repositories.find(r => r.id === repo.id);
|
||||
return updated ? updated : repo;
|
||||
})
|
||||
);
|
||||
setSelectedRepoIds(new Set());
|
||||
} else {
|
||||
showErrorToast(syncResponse.error || "Error starting metadata re-sync", toast);
|
||||
}
|
||||
} catch (error) {
|
||||
showErrorToast(error, toast);
|
||||
} finally {
|
||||
setLoadingRepoIds(new Set());
|
||||
}
|
||||
};
|
||||
|
||||
const handleBulkRetry = async () => {
|
||||
if (selectedRepoIds.size === 0) return;
|
||||
|
||||
@@ -632,14 +706,90 @@ export default function Repository() {
|
||||
}
|
||||
};
|
||||
|
||||
const handleApproveSyncAction = async ({ repoId }: { repoId: string }) => {
|
||||
try {
|
||||
if (!user || !user.id) return;
|
||||
setLoadingRepoIds((prev) => new Set(prev).add(repoId));
|
||||
|
||||
const response = await apiRequest<{
|
||||
success: boolean;
|
||||
message?: string;
|
||||
error?: string;
|
||||
repositories: Repository[];
|
||||
}>("/job/approve-sync", {
|
||||
method: "POST",
|
||||
data: { repositoryIds: [repoId], action: "approve" },
|
||||
});
|
||||
|
||||
if (response.success) {
|
||||
toast.success("Sync approved — backup + sync started");
|
||||
setRepositories((prevRepos) =>
|
||||
prevRepos.map((repo) => {
|
||||
const updated = response.repositories.find((r) => r.id === repo.id);
|
||||
return updated ? updated : repo;
|
||||
}),
|
||||
);
|
||||
} else {
|
||||
showErrorToast(response.error || "Error approving sync", toast);
|
||||
}
|
||||
} catch (error) {
|
||||
showErrorToast(error, toast);
|
||||
} finally {
|
||||
setLoadingRepoIds((prev) => {
|
||||
const newSet = new Set(prev);
|
||||
newSet.delete(repoId);
|
||||
return newSet;
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const handleDismissSyncAction = async ({ repoId }: { repoId: string }) => {
|
||||
try {
|
||||
if (!user || !user.id) return;
|
||||
setLoadingRepoIds((prev) => new Set(prev).add(repoId));
|
||||
|
||||
const response = await apiRequest<{
|
||||
success: boolean;
|
||||
message?: string;
|
||||
error?: string;
|
||||
repositories: Repository[];
|
||||
}>("/job/approve-sync", {
|
||||
method: "POST",
|
||||
data: { repositoryIds: [repoId], action: "dismiss" },
|
||||
});
|
||||
|
||||
if (response.success) {
|
||||
toast.success("Force-push alert dismissed");
|
||||
setRepositories((prevRepos) =>
|
||||
prevRepos.map((repo) => {
|
||||
const updated = response.repositories.find((r) => r.id === repo.id);
|
||||
return updated ? updated : repo;
|
||||
}),
|
||||
);
|
||||
} else {
|
||||
showErrorToast(response.error || "Error dismissing alert", toast);
|
||||
}
|
||||
} catch (error) {
|
||||
showErrorToast(error, toast);
|
||||
} finally {
|
||||
setLoadingRepoIds((prev) => {
|
||||
const newSet = new Set(prev);
|
||||
newSet.delete(repoId);
|
||||
return newSet;
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const handleAddRepository = async ({
|
||||
repo,
|
||||
owner,
|
||||
force = false,
|
||||
destinationOrg,
|
||||
}: {
|
||||
repo: string;
|
||||
owner: string;
|
||||
force?: boolean;
|
||||
destinationOrg?: string;
|
||||
}) => {
|
||||
if (!user || !user.id) {
|
||||
return;
|
||||
@@ -674,6 +824,7 @@ export default function Repository() {
|
||||
repo: trimmedRepo,
|
||||
owner: trimmedOwner,
|
||||
force,
|
||||
...(destinationOrg ? { destinationOrg } : {}),
|
||||
};
|
||||
|
||||
const response = await apiRequest<AddRepositoriesApiResponse>(
|
||||
@@ -798,7 +949,7 @@ export default function Repository() {
|
||||
const actions = [];
|
||||
|
||||
// Check if any selected repos can be mirrored
|
||||
if (selectedRepos.some(repo => repo.status === "imported" || repo.status === "failed")) {
|
||||
if (selectedRepos.some(repo => repo.status === "imported" || repo.status === "failed" || repo.status === "pending-approval")) {
|
||||
actions.push('mirror');
|
||||
}
|
||||
|
||||
@@ -806,6 +957,10 @@ export default function Repository() {
|
||||
if (selectedRepos.some(repo => repo.status === "mirrored" || repo.status === "synced")) {
|
||||
actions.push('sync');
|
||||
}
|
||||
|
||||
if (selectedRepos.some(repo => ["mirrored", "synced", "archived"].includes(repo.status))) {
|
||||
actions.push('rerun-metadata');
|
||||
}
|
||||
|
||||
// Check if any selected repos are failed
|
||||
if (selectedRepos.some(repo => repo.status === "failed")) {
|
||||
@@ -832,8 +987,9 @@ export default function Repository() {
|
||||
const selectedRepos = repositories.filter(repo => repo.id && selectedRepoIds.has(repo.id));
|
||||
|
||||
return {
|
||||
mirror: selectedRepos.filter(repo => repo.status === "imported" || repo.status === "failed").length,
|
||||
mirror: selectedRepos.filter(repo => repo.status === "imported" || repo.status === "failed" || repo.status === "pending-approval").length,
|
||||
sync: selectedRepos.filter(repo => repo.status === "mirrored" || repo.status === "synced").length,
|
||||
rerunMetadata: selectedRepos.filter(repo => ["mirrored", "synced", "archived"].includes(repo.status)).length,
|
||||
retry: selectedRepos.filter(repo => repo.status === "failed").length,
|
||||
ignore: selectedRepos.filter(repo => repo.status !== "ignored").length,
|
||||
include: selectedRepos.filter(repo => repo.status === "ignored").length,
|
||||
@@ -853,6 +1009,7 @@ export default function Repository() {
|
||||
status: "",
|
||||
organization: "",
|
||||
owner: "",
|
||||
sort: filter.sort || "imported-desc",
|
||||
});
|
||||
};
|
||||
|
||||
@@ -993,6 +1150,33 @@ export default function Repository() {
|
||||
</SelectContent>
|
||||
</Select>
|
||||
</div>
|
||||
|
||||
{/* Sort Filter */}
|
||||
<div className="space-y-2">
|
||||
<label className="text-sm font-medium flex items-center gap-2">
|
||||
<span className="text-muted-foreground">Sort</span>
|
||||
</label>
|
||||
<Select
|
||||
value={filter.sort || "imported-desc"}
|
||||
onValueChange={(value) =>
|
||||
setFilter((prev) => ({
|
||||
...prev,
|
||||
sort: value,
|
||||
}))
|
||||
}
|
||||
>
|
||||
<SelectTrigger className="w-full h-10">
|
||||
<SelectValue placeholder="Sort repositories" />
|
||||
</SelectTrigger>
|
||||
<SelectContent>
|
||||
{REPOSITORY_SORT_OPTIONS.map((option) => (
|
||||
<SelectItem key={option.value} value={option.value}>
|
||||
{option.label}
|
||||
</SelectItem>
|
||||
))}
|
||||
</SelectContent>
|
||||
</Select>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<DrawerFooter className="gap-2 px-4 pt-2 pb-4 border-t">
|
||||
@@ -1095,6 +1279,27 @@ export default function Repository() {
|
||||
</SelectContent>
|
||||
</Select>
|
||||
|
||||
<Select
|
||||
value={filter.sort || "imported-desc"}
|
||||
onValueChange={(value) =>
|
||||
setFilter((prev) => ({
|
||||
...prev,
|
||||
sort: value,
|
||||
}))
|
||||
}
|
||||
>
|
||||
<SelectTrigger className="w-[190px] h-10">
|
||||
<SelectValue placeholder="Sort repositories" />
|
||||
</SelectTrigger>
|
||||
<SelectContent>
|
||||
{REPOSITORY_SORT_OPTIONS.map((option) => (
|
||||
<SelectItem key={option.value} value={option.value}>
|
||||
{option.label}
|
||||
</SelectItem>
|
||||
))}
|
||||
</SelectContent>
|
||||
</Select>
|
||||
|
||||
<Button
|
||||
variant="outline"
|
||||
size="icon"
|
||||
@@ -1157,6 +1362,18 @@ export default function Repository() {
|
||||
Sync ({actionCounts.sync})
|
||||
</Button>
|
||||
)}
|
||||
|
||||
{availableActions.includes('rerun-metadata') && (
|
||||
<Button
|
||||
variant="outline"
|
||||
size="default"
|
||||
onClick={handleBulkRerunMetadata}
|
||||
disabled={loadingRepoIds.size > 0}
|
||||
>
|
||||
<RefreshCw className="h-4 w-4 mr-2" />
|
||||
Re-run Metadata ({actionCounts.rerunMetadata})
|
||||
</Button>
|
||||
)}
|
||||
|
||||
{availableActions.includes('retry') && (
|
||||
<Button
|
||||
@@ -1240,6 +1457,18 @@ export default function Repository() {
|
||||
<span className="hidden sm:inline">Sync </span>({actionCounts.sync})
|
||||
</Button>
|
||||
)}
|
||||
|
||||
{availableActions.includes('rerun-metadata') && (
|
||||
<Button
|
||||
variant="outline"
|
||||
size="sm"
|
||||
onClick={handleBulkRerunMetadata}
|
||||
disabled={loadingRepoIds.size > 0}
|
||||
>
|
||||
<RefreshCw className="h-4 w-4 mr-2" />
|
||||
Re-run Metadata ({actionCounts.rerunMetadata})
|
||||
</Button>
|
||||
)}
|
||||
|
||||
{availableActions.includes('retry') && (
|
||||
<Button
|
||||
@@ -1315,6 +1544,8 @@ export default function Repository() {
|
||||
await fetchRepositories(false);
|
||||
}}
|
||||
onDelete={handleRequestDeleteRepository}
|
||||
onApproveSync={handleApproveSyncAction}
|
||||
onDismissSync={handleDismissSyncAction}
|
||||
/>
|
||||
)}
|
||||
|
||||
|
||||
@@ -1,11 +1,20 @@
|
||||
import { useMemo, useRef } from "react";
|
||||
import Fuse from "fuse.js";
|
||||
import {
|
||||
getCoreRowModel,
|
||||
getFilteredRowModel,
|
||||
getSortedRowModel,
|
||||
useReactTable,
|
||||
type ColumnDef,
|
||||
type ColumnFiltersState,
|
||||
type SortingState,
|
||||
} from "@tanstack/react-table";
|
||||
import { useVirtualizer } from "@tanstack/react-virtual";
|
||||
import { FlipHorizontal, GitFork, RefreshCw, RotateCcw, Star, Lock, Ban, Check, ChevronDown, Trash2 } from "lucide-react";
|
||||
import { FlipHorizontal, GitFork, RefreshCw, RotateCcw, Star, Lock, Ban, Check, ChevronDown, Trash2, X } from "lucide-react";
|
||||
import { SiGithub, SiGitea } from "react-icons/si";
|
||||
import type { Repository } from "@/lib/db/schema";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { formatDate, formatLastSyncTime, getStatusColor } from "@/lib/utils";
|
||||
import { formatLastSyncTime } from "@/lib/utils";
|
||||
import { buildGiteaWebUrl } from "@/lib/gitea-url";
|
||||
import type { FilterParams } from "@/types/filter";
|
||||
import { Skeleton } from "@/components/ui/skeleton";
|
||||
import { useGiteaConfig } from "@/hooks/useGiteaConfig";
|
||||
@@ -42,6 +51,32 @@ interface RepositoryTableProps {
|
||||
onSelectionChange: (selectedIds: Set<string>) => void;
|
||||
onRefresh?: () => Promise<void>;
|
||||
onDelete?: (repoId: string) => void;
|
||||
onApproveSync?: ({ repoId }: { repoId: string }) => Promise<void>;
|
||||
onDismissSync?: ({ repoId }: { repoId: string }) => Promise<void>;
|
||||
}
|
||||
|
||||
function getTimestamp(value: Date | string | null | undefined): number {
|
||||
if (!value) return 0;
|
||||
const timestamp = new Date(value).getTime();
|
||||
return Number.isNaN(timestamp) ? 0 : timestamp;
|
||||
}
|
||||
|
||||
function getTableSorting(sortOrder: string | undefined): SortingState {
|
||||
switch (sortOrder ?? "imported-desc") {
|
||||
case "imported-asc":
|
||||
return [{ id: "importedAt", desc: false }];
|
||||
case "updated-desc":
|
||||
return [{ id: "updatedAt", desc: true }];
|
||||
case "updated-asc":
|
||||
return [{ id: "updatedAt", desc: false }];
|
||||
case "name-asc":
|
||||
return [{ id: "fullName", desc: false }];
|
||||
case "name-desc":
|
||||
return [{ id: "fullName", desc: true }];
|
||||
case "imported-desc":
|
||||
default:
|
||||
return [{ id: "importedAt", desc: true }];
|
||||
}
|
||||
}
|
||||
|
||||
export default function RepositoryTable({
|
||||
@@ -59,6 +94,8 @@ export default function RepositoryTable({
|
||||
onSelectionChange,
|
||||
onRefresh,
|
||||
onDelete,
|
||||
onApproveSync,
|
||||
onDismissSync,
|
||||
}: RepositoryTableProps) {
|
||||
const tableParentRef = useRef<HTMLDivElement>(null);
|
||||
const { giteaConfig } = useGiteaConfig();
|
||||
@@ -88,10 +125,6 @@ export default function RepositoryTable({
|
||||
|
||||
// Helper function to construct Gitea repository URL
|
||||
const getGiteaRepoUrl = (repository: Repository): string | null => {
|
||||
if (!giteaConfig?.url) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// Only provide Gitea links for repositories that have been or are being mirrored
|
||||
const validStatuses = ['mirroring', 'mirrored', 'syncing', 'synced', 'archived'];
|
||||
if (!validStatuses.includes(repository.status)) {
|
||||
@@ -108,48 +141,92 @@ export default function RepositoryTable({
|
||||
repoPath = `${owner}/${repository.name}`;
|
||||
}
|
||||
|
||||
// Ensure the base URL doesn't have a trailing slash
|
||||
const baseUrl = giteaConfig.url.endsWith('/')
|
||||
? giteaConfig.url.slice(0, -1)
|
||||
: giteaConfig.url;
|
||||
|
||||
return `${baseUrl}/${repoPath}`;
|
||||
return buildGiteaWebUrl(giteaConfig, repoPath);
|
||||
};
|
||||
|
||||
const hasAnyFilter = Object.values(filter).some(
|
||||
(val) => val?.toString().trim() !== ""
|
||||
);
|
||||
const hasAnyFilter = [
|
||||
filter.searchTerm,
|
||||
filter.status,
|
||||
filter.owner,
|
||||
filter.organization,
|
||||
].some((val) => val?.toString().trim() !== "");
|
||||
|
||||
const filteredRepositories = useMemo(() => {
|
||||
let result = repositories;
|
||||
const columnFilters = useMemo<ColumnFiltersState>(() => {
|
||||
const next: ColumnFiltersState = [];
|
||||
|
||||
if (filter.status) {
|
||||
result = result.filter((repo) => repo.status === filter.status);
|
||||
next.push({ id: "status", value: filter.status });
|
||||
}
|
||||
|
||||
if (filter.owner) {
|
||||
result = result.filter((repo) => repo.owner === filter.owner);
|
||||
next.push({ id: "owner", value: filter.owner });
|
||||
}
|
||||
|
||||
if (filter.organization) {
|
||||
result = result.filter(
|
||||
(repo) => repo.organization === filter.organization
|
||||
);
|
||||
next.push({ id: "organization", value: filter.organization });
|
||||
}
|
||||
|
||||
if (filter.searchTerm) {
|
||||
const fuse = new Fuse(result, {
|
||||
keys: ["name", "fullName", "owner", "organization"],
|
||||
threshold: 0.3,
|
||||
});
|
||||
result = fuse.search(filter.searchTerm).map((res) => res.item);
|
||||
}
|
||||
return next;
|
||||
}, [filter.status, filter.owner, filter.organization]);
|
||||
|
||||
return result;
|
||||
}, [repositories, filter]);
|
||||
const sorting = useMemo(() => getTableSorting(filter.sort), [filter.sort]);
|
||||
|
||||
const columns = useMemo<ColumnDef<Repository>[]>(
|
||||
() => [
|
||||
{
|
||||
id: "fullName",
|
||||
accessorFn: (row) => row.fullName,
|
||||
},
|
||||
{
|
||||
id: "owner",
|
||||
accessorFn: (row) => row.owner,
|
||||
filterFn: "equalsString",
|
||||
},
|
||||
{
|
||||
id: "organization",
|
||||
accessorFn: (row) => row.organization ?? "",
|
||||
filterFn: "equalsString",
|
||||
},
|
||||
{
|
||||
id: "status",
|
||||
accessorFn: (row) => row.status,
|
||||
filterFn: "equalsString",
|
||||
},
|
||||
{
|
||||
id: "importedAt",
|
||||
accessorFn: (row) => getTimestamp(row.importedAt),
|
||||
enableGlobalFilter: false,
|
||||
enableColumnFilter: false,
|
||||
},
|
||||
{
|
||||
id: "updatedAt",
|
||||
accessorFn: (row) => getTimestamp(row.updatedAt),
|
||||
enableGlobalFilter: false,
|
||||
enableColumnFilter: false,
|
||||
},
|
||||
],
|
||||
[]
|
||||
);
|
||||
|
||||
const table = useReactTable({
|
||||
data: repositories,
|
||||
columns,
|
||||
state: {
|
||||
globalFilter: filter.searchTerm ?? "",
|
||||
columnFilters,
|
||||
sorting,
|
||||
},
|
||||
getCoreRowModel: getCoreRowModel(),
|
||||
getFilteredRowModel: getFilteredRowModel(),
|
||||
getSortedRowModel: getSortedRowModel(),
|
||||
});
|
||||
|
||||
const visibleRepositories = table
|
||||
.getRowModel()
|
||||
.rows.map((row) => row.original);
|
||||
|
||||
const rowVirtualizer = useVirtualizer({
|
||||
count: filteredRepositories.length,
|
||||
count: visibleRepositories.length,
|
||||
getScrollElement: () => tableParentRef.current,
|
||||
estimateSize: () => 65,
|
||||
overscan: 5,
|
||||
@@ -158,7 +235,11 @@ export default function RepositoryTable({
|
||||
// Selection handlers
|
||||
const handleSelectAll = (checked: boolean) => {
|
||||
if (checked) {
|
||||
const allIds = new Set(filteredRepositories.map(repo => repo.id).filter((id): id is string => !!id));
|
||||
const allIds = new Set(
|
||||
visibleRepositories
|
||||
.map((repo) => repo.id)
|
||||
.filter((id): id is string => !!id)
|
||||
);
|
||||
onSelectionChange(allIds);
|
||||
} else {
|
||||
onSelectionChange(new Set());
|
||||
@@ -175,8 +256,9 @@ export default function RepositoryTable({
|
||||
onSelectionChange(newSelection);
|
||||
};
|
||||
|
||||
const isAllSelected = filteredRepositories.length > 0 &&
|
||||
filteredRepositories.every(repo => repo.id && selectedRepoIds.has(repo.id));
|
||||
const isAllSelected =
|
||||
visibleRepositories.length > 0 &&
|
||||
visibleRepositories.every((repo) => repo.id && selectedRepoIds.has(repo.id));
|
||||
const isPartiallySelected = selectedRepoIds.size > 0 && !isAllSelected;
|
||||
|
||||
// Mobile card layout for repository
|
||||
@@ -231,7 +313,7 @@ export default function RepositoryTable({
|
||||
|
||||
{/* Status & Last Mirrored */}
|
||||
<div className="flex items-center justify-between">
|
||||
<Badge
|
||||
<Badge
|
||||
className={`capitalize
|
||||
${repo.status === 'imported' ? 'bg-yellow-500/10 text-yellow-600 hover:bg-yellow-500/20 dark:text-yellow-400' :
|
||||
repo.status === 'mirrored' || repo.status === 'synced' ? 'bg-green-500/10 text-green-600 hover:bg-green-500/20 dark:text-green-400' :
|
||||
@@ -239,13 +321,14 @@ export default function RepositoryTable({
|
||||
repo.status === 'failed' ? 'bg-red-500/10 text-red-600 hover:bg-red-500/20 dark:text-red-400' :
|
||||
repo.status === 'ignored' ? 'bg-gray-500/10 text-gray-600 hover:bg-gray-500/20 dark:text-gray-400' :
|
||||
repo.status === 'skipped' ? 'bg-orange-500/10 text-orange-600 hover:bg-orange-500/20 dark:text-orange-400' :
|
||||
repo.status === 'pending-approval' ? 'bg-amber-500/10 text-amber-600 hover:bg-amber-500/20 dark:text-amber-400' :
|
||||
'bg-muted hover:bg-muted/80'}`}
|
||||
variant="secondary"
|
||||
>
|
||||
{repo.status}
|
||||
</Badge>
|
||||
<span className="text-xs text-muted-foreground">
|
||||
{formatLastSyncTime(repo.lastMirrored)}
|
||||
{formatLastSyncTime(repo.lastMirrored ?? null)}
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
@@ -316,7 +399,40 @@ export default function RepositoryTable({
|
||||
)}
|
||||
</Button>
|
||||
)}
|
||||
|
||||
{repo.status === "pending-approval" && (
|
||||
<div className="flex gap-2 w-full">
|
||||
<Button
|
||||
size="default"
|
||||
variant="default"
|
||||
onClick={() => repo.id && onApproveSync?.({ repoId: repo.id })}
|
||||
disabled={isLoading}
|
||||
className="flex-1 h-10"
|
||||
>
|
||||
{isLoading ? (
|
||||
<>
|
||||
<Check className="h-4 w-4 mr-2 animate-spin" />
|
||||
Approving...
|
||||
</>
|
||||
) : (
|
||||
<>
|
||||
<Check className="h-4 w-4 mr-2" />
|
||||
Approve Sync
|
||||
</>
|
||||
)}
|
||||
</Button>
|
||||
<Button
|
||||
size="default"
|
||||
variant="outline"
|
||||
onClick={() => repo.id && onDismissSync?.({ repoId: repo.id })}
|
||||
disabled={isLoading}
|
||||
className="flex-1 h-10"
|
||||
>
|
||||
<X className="h-4 w-4 mr-2" />
|
||||
Dismiss
|
||||
</Button>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Ignore/Include button */}
|
||||
{repo.status === "ignored" ? (
|
||||
<Button
|
||||
@@ -341,7 +457,7 @@ export default function RepositoryTable({
|
||||
Ignore Repository
|
||||
</Button>
|
||||
)}
|
||||
|
||||
|
||||
{/* External links */}
|
||||
<div className="flex gap-2">
|
||||
<Button variant="outline" size="default" className="flex-1 h-10 min-w-0" asChild>
|
||||
@@ -472,7 +588,7 @@ export default function RepositoryTable({
|
||||
{hasAnyFilter && (
|
||||
<div className="mb-4 flex items-center gap-2">
|
||||
<span className="text-sm text-muted-foreground">
|
||||
Showing {filteredRepositories.length} of {repositories.length} repositories
|
||||
Showing {visibleRepositories.length} of {repositories.length} repositories
|
||||
</span>
|
||||
<Button
|
||||
variant="ghost"
|
||||
@@ -483,6 +599,7 @@ export default function RepositoryTable({
|
||||
status: "",
|
||||
organization: "",
|
||||
owner: "",
|
||||
sort: filter.sort || "imported-desc",
|
||||
})
|
||||
}
|
||||
>
|
||||
@@ -491,7 +608,7 @@ export default function RepositoryTable({
|
||||
</div>
|
||||
)}
|
||||
|
||||
{filteredRepositories.length === 0 ? (
|
||||
{visibleRepositories.length === 0 ? (
|
||||
<div className="text-center py-8">
|
||||
<p className="text-muted-foreground">
|
||||
{hasAnyFilter
|
||||
@@ -512,12 +629,12 @@ export default function RepositoryTable({
|
||||
className="h-5 w-5"
|
||||
/>
|
||||
<span className="text-sm font-medium">
|
||||
Select All ({filteredRepositories.length})
|
||||
Select All ({visibleRepositories.length})
|
||||
</span>
|
||||
</div>
|
||||
|
||||
{/* Repository cards */}
|
||||
{filteredRepositories.map((repo) => (
|
||||
{visibleRepositories.map((repo) => (
|
||||
<RepositoryCard key={repo.id} repo={repo} />
|
||||
))}
|
||||
</div>
|
||||
@@ -563,13 +680,14 @@ export default function RepositoryTable({
|
||||
position: "relative",
|
||||
}}
|
||||
>
|
||||
{rowVirtualizer.getVirtualItems().map((virtualRow, index) => {
|
||||
const repo = filteredRepositories[virtualRow.index];
|
||||
{rowVirtualizer.getVirtualItems().map((virtualRow) => {
|
||||
const repo = visibleRepositories[virtualRow.index];
|
||||
if (!repo) return null;
|
||||
const isLoading = loadingRepoIds.has(repo.id ?? "");
|
||||
|
||||
return (
|
||||
<div
|
||||
key={index}
|
||||
key={virtualRow.key}
|
||||
ref={rowVirtualizer.measureElement}
|
||||
style={{
|
||||
position: "absolute",
|
||||
@@ -632,7 +750,7 @@ export default function RepositoryTable({
|
||||
{/* Last Mirrored */}
|
||||
<div className="h-full p-3 flex items-center flex-[1]">
|
||||
<p className="text-sm">
|
||||
{formatLastSyncTime(repo.lastMirrored)}
|
||||
{formatLastSyncTime(repo.lastMirrored ?? null)}
|
||||
</p>
|
||||
</div>
|
||||
|
||||
@@ -642,7 +760,7 @@ export default function RepositoryTable({
|
||||
<TooltipProvider>
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<Badge
|
||||
<Badge
|
||||
variant="destructive"
|
||||
className="cursor-help capitalize"
|
||||
>
|
||||
@@ -655,7 +773,7 @@ export default function RepositoryTable({
|
||||
</Tooltip>
|
||||
</TooltipProvider>
|
||||
) : (
|
||||
<Badge
|
||||
<Badge
|
||||
className={`capitalize
|
||||
${repo.status === 'imported' ? 'bg-yellow-500/10 text-yellow-600 hover:bg-yellow-500/20 dark:text-yellow-400' :
|
||||
repo.status === 'mirrored' || repo.status === 'synced' ? 'bg-green-500/10 text-green-600 hover:bg-green-500/20 dark:text-green-400' :
|
||||
@@ -663,6 +781,7 @@ export default function RepositoryTable({
|
||||
repo.status === 'failed' ? 'bg-red-500/10 text-red-600 hover:bg-red-500/20 dark:text-red-400' :
|
||||
repo.status === 'ignored' ? 'bg-gray-500/10 text-gray-600 hover:bg-gray-500/20 dark:text-gray-400' :
|
||||
repo.status === 'skipped' ? 'bg-orange-500/10 text-orange-600 hover:bg-orange-500/20 dark:text-orange-400' :
|
||||
repo.status === 'pending-approval' ? 'bg-amber-500/10 text-amber-600 hover:bg-amber-500/20 dark:text-amber-400' :
|
||||
'bg-muted hover:bg-muted/80'}`}
|
||||
variant="secondary"
|
||||
>
|
||||
@@ -680,6 +799,8 @@ export default function RepositoryTable({
|
||||
onRetry={() => onRetry({ repoId: repo.id ?? "" })}
|
||||
onSkip={(skip) => onSkip({ repoId: repo.id ?? "", skip })}
|
||||
onDelete={onDelete && repo.id ? () => onDelete(repo.id as string) : undefined}
|
||||
onApproveSync={onApproveSync ? () => onApproveSync({ repoId: repo.id ?? "" }) : undefined}
|
||||
onDismissSync={onDismissSync ? () => onDismissSync({ repoId: repo.id ?? "" }) : undefined}
|
||||
/>
|
||||
</div>
|
||||
{/* Links */}
|
||||
@@ -743,7 +864,7 @@ export default function RepositoryTable({
|
||||
<div className={`h-1.5 w-1.5 rounded-full ${isLiveActive ? 'bg-emerald-500' : 'bg-primary'}`} />
|
||||
<span className="text-sm font-medium text-foreground">
|
||||
{hasAnyFilter
|
||||
? `Showing ${filteredRepositories.length} of ${repositories.length} repositories`
|
||||
? `Showing ${visibleRepositories.length} of ${repositories.length} repositories`
|
||||
: `${repositories.length} ${repositories.length === 1 ? 'repository' : 'repositories'} total`}
|
||||
</span>
|
||||
</div>
|
||||
@@ -791,6 +912,8 @@ function RepoActionButton({
|
||||
onRetry,
|
||||
onSkip,
|
||||
onDelete,
|
||||
onApproveSync,
|
||||
onDismissSync,
|
||||
}: {
|
||||
repo: { id: string; status: string };
|
||||
isLoading: boolean;
|
||||
@@ -799,7 +922,36 @@ function RepoActionButton({
|
||||
onRetry: () => void;
|
||||
onSkip: (skip: boolean) => void;
|
||||
onDelete?: () => void;
|
||||
onApproveSync?: () => void;
|
||||
onDismissSync?: () => void;
|
||||
}) {
|
||||
// For pending-approval repos, show approve/dismiss actions
|
||||
if (repo.status === "pending-approval") {
|
||||
return (
|
||||
<div className="flex gap-1">
|
||||
<Button
|
||||
variant="default"
|
||||
size="sm"
|
||||
disabled={isLoading}
|
||||
onClick={onApproveSync}
|
||||
className="min-w-[70px]"
|
||||
>
|
||||
<Check className="h-4 w-4 mr-1" />
|
||||
Approve
|
||||
</Button>
|
||||
<Button
|
||||
variant="outline"
|
||||
size="sm"
|
||||
disabled={isLoading}
|
||||
onClick={onDismissSync}
|
||||
>
|
||||
<X className="h-4 w-4 mr-1" />
|
||||
Dismiss
|
||||
</Button>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
// For ignored repos, show an "Include" action
|
||||
if (repo.status === "ignored") {
|
||||
return (
|
||||
|
||||
@@ -1,4 +0,0 @@
|
||||
import { defineCollection, z } from 'astro:content';
|
||||
|
||||
// Export empty collections since docs have been moved
|
||||
export const collections = {};
|
||||
@@ -9,6 +9,8 @@ interface ConfigStatus {
|
||||
isFullyConfigured: boolean;
|
||||
isLoading: boolean;
|
||||
error: string | null;
|
||||
autoMirrorStarred: boolean;
|
||||
githubOwner: string;
|
||||
}
|
||||
|
||||
// Cache to prevent duplicate API calls across components
|
||||
@@ -33,6 +35,8 @@ export function useConfigStatus(): ConfigStatus {
|
||||
isFullyConfigured: false,
|
||||
isLoading: true,
|
||||
error: null,
|
||||
autoMirrorStarred: false,
|
||||
githubOwner: '',
|
||||
});
|
||||
|
||||
// Track if this hook has already checked config to prevent multiple calls
|
||||
@@ -46,6 +50,8 @@ export function useConfigStatus(): ConfigStatus {
|
||||
isFullyConfigured: false,
|
||||
isLoading: false,
|
||||
error: 'No user found',
|
||||
autoMirrorStarred: false,
|
||||
githubOwner: '',
|
||||
});
|
||||
return;
|
||||
}
|
||||
@@ -78,6 +84,8 @@ export function useConfigStatus(): ConfigStatus {
|
||||
isFullyConfigured,
|
||||
isLoading: false,
|
||||
error: null,
|
||||
autoMirrorStarred: configResponse?.advancedOptions?.autoMirrorStarred ?? false,
|
||||
githubOwner: configResponse?.githubConfig?.username ?? '',
|
||||
});
|
||||
return;
|
||||
}
|
||||
@@ -119,6 +127,8 @@ export function useConfigStatus(): ConfigStatus {
|
||||
isFullyConfigured,
|
||||
isLoading: false,
|
||||
error: null,
|
||||
autoMirrorStarred: configResponse?.advancedOptions?.autoMirrorStarred ?? false,
|
||||
githubOwner: configResponse?.githubConfig?.username ?? '',
|
||||
});
|
||||
|
||||
hasCheckedRef.current = true;
|
||||
@@ -129,6 +139,8 @@ export function useConfigStatus(): ConfigStatus {
|
||||
isFullyConfigured: false,
|
||||
isLoading: false,
|
||||
error: error instanceof Error ? error.message : 'Failed to check configuration',
|
||||
autoMirrorStarred: false,
|
||||
githubOwner: '',
|
||||
});
|
||||
hasCheckedRef.current = true;
|
||||
}
|
||||
|
||||
@@ -7,6 +7,7 @@ const FILTER_KEYS: (keyof FilterParams)[] = [
|
||||
"membershipRole",
|
||||
"owner",
|
||||
"organization",
|
||||
"sort",
|
||||
"type",
|
||||
"name",
|
||||
];
|
||||
|
||||
@@ -91,35 +91,17 @@ export const giteaApi = {
|
||||
|
||||
// Health API
|
||||
export interface HealthResponse {
|
||||
status: "ok" | "error";
|
||||
status: "ok" | "error" | "degraded";
|
||||
timestamp: string;
|
||||
version: string;
|
||||
latestVersion: string;
|
||||
updateAvailable: boolean;
|
||||
database: {
|
||||
connected: boolean;
|
||||
message: string;
|
||||
};
|
||||
system: {
|
||||
uptime: {
|
||||
startTime: string;
|
||||
uptimeMs: number;
|
||||
formatted: string;
|
||||
};
|
||||
memory: {
|
||||
rss: string;
|
||||
heapTotal: string;
|
||||
heapUsed: string;
|
||||
external: string;
|
||||
systemTotal: string;
|
||||
systemFree: string;
|
||||
};
|
||||
os: {
|
||||
platform: string;
|
||||
version: string;
|
||||
arch: string;
|
||||
};
|
||||
env: string;
|
||||
recovery?: {
|
||||
status: string;
|
||||
jobsNeedingRecovery: number;
|
||||
};
|
||||
error?: string;
|
||||
}
|
||||
|
||||
66
src/lib/auth-guards.test.ts
Normal file
66
src/lib/auth-guards.test.ts
Normal file
@@ -0,0 +1,66 @@
|
||||
import { describe, expect, mock, test } from "bun:test";
|
||||
|
||||
const getSessionMock = mock(async () => null);
|
||||
|
||||
mock.module("@/lib/auth", () => ({
|
||||
auth: {
|
||||
api: {
|
||||
getSession: getSessionMock,
|
||||
},
|
||||
},
|
||||
}));
|
||||
|
||||
import { requireAuthenticatedUserId } from "./auth-guards";
|
||||
|
||||
describe("requireAuthenticatedUserId", () => {
|
||||
test("returns user id from locals session without calling auth api", async () => {
|
||||
getSessionMock.mockImplementation(async () => {
|
||||
throw new Error("should not be called");
|
||||
});
|
||||
|
||||
const result = await requireAuthenticatedUserId({
|
||||
request: new Request("http://localhost/test"),
|
||||
locals: {
|
||||
session: { userId: "local-user-id" },
|
||||
} as any,
|
||||
});
|
||||
|
||||
expect("userId" in result).toBe(true);
|
||||
if ("userId" in result) {
|
||||
expect(result.userId).toBe("local-user-id");
|
||||
}
|
||||
});
|
||||
|
||||
test("returns user id from auth session when locals are empty", async () => {
|
||||
getSessionMock.mockImplementation(async () => ({
|
||||
user: { id: "session-user-id" },
|
||||
session: { id: "session-id" },
|
||||
}));
|
||||
|
||||
const result = await requireAuthenticatedUserId({
|
||||
request: new Request("http://localhost/test"),
|
||||
locals: {} as any,
|
||||
});
|
||||
|
||||
expect("userId" in result).toBe(true);
|
||||
if ("userId" in result) {
|
||||
expect(result.userId).toBe("session-user-id");
|
||||
}
|
||||
});
|
||||
|
||||
test("returns unauthorized response when auth lookup throws", async () => {
|
||||
getSessionMock.mockImplementation(async () => {
|
||||
throw new Error("session provider unavailable");
|
||||
});
|
||||
|
||||
const result = await requireAuthenticatedUserId({
|
||||
request: new Request("http://localhost/test"),
|
||||
locals: {} as any,
|
||||
});
|
||||
|
||||
expect("response" in result).toBe(true);
|
||||
if ("response" in result) {
|
||||
expect(result.response.status).toBe(401);
|
||||
}
|
||||
});
|
||||
});
|
||||
45
src/lib/auth-guards.ts
Normal file
45
src/lib/auth-guards.ts
Normal file
@@ -0,0 +1,45 @@
|
||||
import type { APIContext } from "astro";
|
||||
import { auth } from "@/lib/auth";
|
||||
|
||||
function unauthorizedResponse() {
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
success: false,
|
||||
error: "Unauthorized",
|
||||
}),
|
||||
{
|
||||
status: 401,
|
||||
headers: { "Content-Type": "application/json" },
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Ensures request is authenticated and returns the authenticated user ID.
|
||||
* Never trust client-provided userId for authorization decisions.
|
||||
*/
|
||||
export async function requireAuthenticatedUserId(
|
||||
context: Pick<APIContext, "request" | "locals">
|
||||
): Promise<{ userId: string } | { response: Response }> {
|
||||
const localUserId =
|
||||
context.locals?.session?.userId || context.locals?.user?.id;
|
||||
|
||||
if (localUserId) {
|
||||
return { userId: localUserId };
|
||||
}
|
||||
|
||||
let session: Awaited<ReturnType<typeof auth.api.getSession>> | null = null;
|
||||
try {
|
||||
session = await auth.api.getSession({
|
||||
headers: context.request.headers,
|
||||
});
|
||||
} catch {
|
||||
return { response: unauthorizedResponse() };
|
||||
}
|
||||
|
||||
if (!session?.user?.id) {
|
||||
return { response: unauthorizedResponse() };
|
||||
}
|
||||
|
||||
return { userId: session.user.id };
|
||||
}
|
||||
119
src/lib/auth-origins.test.ts
Normal file
119
src/lib/auth-origins.test.ts
Normal file
@@ -0,0 +1,119 @@
|
||||
import { describe, test, expect, beforeEach, afterEach } from "bun:test";
|
||||
import { resolveTrustedOrigins } from "./auth";
|
||||
|
||||
// Helper to create a mock Request with specific headers
|
||||
function mockRequest(headers: Record<string, string>): Request {
|
||||
return new Request("http://localhost:4321/api/auth/sign-in", {
|
||||
headers: new Headers(headers),
|
||||
});
|
||||
}
|
||||
|
||||
describe("resolveTrustedOrigins", () => {
|
||||
const savedEnv: Record<string, string | undefined> = {};
|
||||
|
||||
beforeEach(() => {
|
||||
// Save and clear relevant env vars
|
||||
for (const key of ["BETTER_AUTH_URL", "BETTER_AUTH_TRUSTED_ORIGINS"]) {
|
||||
savedEnv[key] = process.env[key];
|
||||
delete process.env[key];
|
||||
}
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
// Restore env vars
|
||||
for (const [key, val] of Object.entries(savedEnv)) {
|
||||
if (val === undefined) delete process.env[key];
|
||||
else process.env[key] = val;
|
||||
}
|
||||
});
|
||||
|
||||
test("includes localhost defaults when called without request", async () => {
|
||||
const origins = await resolveTrustedOrigins();
|
||||
expect(origins).toContain("http://localhost:4321");
|
||||
expect(origins).toContain("http://localhost:8080");
|
||||
});
|
||||
|
||||
test("includes BETTER_AUTH_URL from env", async () => {
|
||||
process.env.BETTER_AUTH_URL = "https://gitea-mirror.example.com";
|
||||
const origins = await resolveTrustedOrigins();
|
||||
expect(origins).toContain("https://gitea-mirror.example.com");
|
||||
});
|
||||
|
||||
test("includes BETTER_AUTH_TRUSTED_ORIGINS (comma-separated)", async () => {
|
||||
process.env.BETTER_AUTH_TRUSTED_ORIGINS = "https://a.example.com, https://b.example.com";
|
||||
const origins = await resolveTrustedOrigins();
|
||||
expect(origins).toContain("https://a.example.com");
|
||||
expect(origins).toContain("https://b.example.com");
|
||||
});
|
||||
|
||||
test("skips invalid URLs in env vars", async () => {
|
||||
process.env.BETTER_AUTH_URL = "not-a-url";
|
||||
process.env.BETTER_AUTH_TRUSTED_ORIGINS = "also-invalid, https://valid.example.com";
|
||||
const origins = await resolveTrustedOrigins();
|
||||
expect(origins).not.toContain("not-a-url");
|
||||
expect(origins).not.toContain("also-invalid");
|
||||
expect(origins).toContain("https://valid.example.com");
|
||||
});
|
||||
|
||||
test("auto-detects origin from x-forwarded-host + x-forwarded-proto", async () => {
|
||||
const req = mockRequest({
|
||||
"x-forwarded-host": "gitea-mirror.mydomain.tld",
|
||||
"x-forwarded-proto": "https",
|
||||
});
|
||||
const origins = await resolveTrustedOrigins(req);
|
||||
expect(origins).toContain("https://gitea-mirror.mydomain.tld");
|
||||
});
|
||||
|
||||
test("falls back to host header when x-forwarded-host is absent", async () => {
|
||||
const req = mockRequest({
|
||||
host: "myserver.local:4321",
|
||||
});
|
||||
const origins = await resolveTrustedOrigins(req);
|
||||
expect(origins).toContain("http://myserver.local:4321");
|
||||
});
|
||||
|
||||
test("handles multi-value x-forwarded-host (chained proxies)", async () => {
|
||||
const req = mockRequest({
|
||||
"x-forwarded-host": "external.example.com, internal.proxy.local",
|
||||
"x-forwarded-proto": "https",
|
||||
});
|
||||
const origins = await resolveTrustedOrigins(req);
|
||||
expect(origins).toContain("https://external.example.com");
|
||||
expect(origins).not.toContain("https://internal.proxy.local");
|
||||
});
|
||||
|
||||
test("handles multi-value x-forwarded-proto (chained proxies)", async () => {
|
||||
const req = mockRequest({
|
||||
"x-forwarded-host": "gitea.example.com",
|
||||
"x-forwarded-proto": "https, http",
|
||||
});
|
||||
const origins = await resolveTrustedOrigins(req);
|
||||
expect(origins).toContain("https://gitea.example.com");
|
||||
// Should NOT create an origin with "https, http" as proto
|
||||
expect(origins).not.toContain("https, http://gitea.example.com");
|
||||
});
|
||||
|
||||
test("rejects invalid x-forwarded-proto values", async () => {
|
||||
const req = mockRequest({
|
||||
"x-forwarded-host": "gitea.example.com",
|
||||
"x-forwarded-proto": "ftp",
|
||||
});
|
||||
const origins = await resolveTrustedOrigins(req);
|
||||
expect(origins).not.toContain("ftp://gitea.example.com");
|
||||
});
|
||||
|
||||
test("deduplicates origins", async () => {
|
||||
process.env.BETTER_AUTH_URL = "http://localhost:4321";
|
||||
const origins = await resolveTrustedOrigins();
|
||||
const count = origins.filter(o => o === "http://localhost:4321").length;
|
||||
expect(count).toBe(1);
|
||||
});
|
||||
|
||||
test("defaults proto to http when x-forwarded-proto is absent", async () => {
|
||||
const req = mockRequest({
|
||||
"x-forwarded-host": "gitea.internal",
|
||||
});
|
||||
const origins = await resolveTrustedOrigins(req);
|
||||
expect(origins).toContain("http://gitea.internal");
|
||||
});
|
||||
});
|
||||
113
src/lib/auth.ts
113
src/lib/auth.ts
@@ -6,6 +6,72 @@ import { db, users } from "./db";
|
||||
import * as schema from "./db/schema";
|
||||
import { eq } from "drizzle-orm";
|
||||
|
||||
/**
|
||||
* Resolves the list of trusted origins for Better Auth CSRF validation.
|
||||
* Exported for testing. Called per-request with the incoming Request,
|
||||
* or at startup with no request (static origins only).
|
||||
*/
|
||||
export async function resolveTrustedOrigins(request?: Request): Promise<string[]> {
|
||||
const origins: string[] = [
|
||||
"http://localhost:4321",
|
||||
"http://localhost:8080", // Keycloak
|
||||
];
|
||||
|
||||
// Add the primary URL from BETTER_AUTH_URL
|
||||
const primaryUrl = process.env.BETTER_AUTH_URL;
|
||||
if (primaryUrl && typeof primaryUrl === 'string' && primaryUrl.trim() !== '') {
|
||||
try {
|
||||
const validatedUrl = new URL(primaryUrl.trim());
|
||||
origins.push(validatedUrl.origin);
|
||||
} catch {
|
||||
// Skip if invalid
|
||||
}
|
||||
}
|
||||
|
||||
// Add additional trusted origins from environment
|
||||
if (process.env.BETTER_AUTH_TRUSTED_ORIGINS) {
|
||||
const additionalOrigins = process.env.BETTER_AUTH_TRUSTED_ORIGINS
|
||||
.split(',')
|
||||
.map(o => o.trim())
|
||||
.filter(o => o !== '');
|
||||
|
||||
for (const origin of additionalOrigins) {
|
||||
try {
|
||||
const validatedUrl = new URL(origin);
|
||||
origins.push(validatedUrl.origin);
|
||||
} catch {
|
||||
console.warn(`Invalid trusted origin: ${origin}, skipping`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Auto-detect origin from the incoming request's Host header when running
|
||||
// behind a reverse proxy. Helps with Better Auth's per-request CSRF check.
|
||||
if (request?.headers) {
|
||||
// Take first value only — headers can be comma-separated in chained proxy setups
|
||||
const rawHost = request.headers.get("x-forwarded-host") || request.headers.get("host");
|
||||
const host = rawHost?.split(",")[0].trim();
|
||||
if (host) {
|
||||
const rawProto = request.headers.get("x-forwarded-proto") || "http";
|
||||
const proto = rawProto.split(",")[0].trim().toLowerCase();
|
||||
if (proto === "http" || proto === "https") {
|
||||
try {
|
||||
const detected = new URL(`${proto}://${host}`);
|
||||
origins.push(detected.origin);
|
||||
} catch {
|
||||
// Malformed header, ignore
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const uniqueOrigins = [...new Set(origins.filter(Boolean))];
|
||||
if (!request) {
|
||||
console.info("Trusted origins (static):", uniqueOrigins);
|
||||
}
|
||||
return uniqueOrigins;
|
||||
}
|
||||
|
||||
export const auth = betterAuth({
|
||||
// Database configuration
|
||||
database: drizzleAdapter(db, {
|
||||
@@ -43,48 +109,11 @@ export const auth = betterAuth({
|
||||
})(),
|
||||
basePath: "/api/auth", // Specify the base path for auth endpoints
|
||||
|
||||
// Trusted origins - this is how we support multiple access URLs
|
||||
trustedOrigins: (() => {
|
||||
const origins: string[] = [
|
||||
"http://localhost:4321",
|
||||
"http://localhost:8080", // Keycloak
|
||||
];
|
||||
|
||||
// Add the primary URL from BETTER_AUTH_URL
|
||||
const primaryUrl = process.env.BETTER_AUTH_URL;
|
||||
if (primaryUrl && typeof primaryUrl === 'string' && primaryUrl.trim() !== '') {
|
||||
try {
|
||||
const validatedUrl = new URL(primaryUrl.trim());
|
||||
origins.push(validatedUrl.origin);
|
||||
} catch {
|
||||
// Skip if invalid
|
||||
}
|
||||
}
|
||||
|
||||
// Add additional trusted origins from environment
|
||||
// This is where users can specify multiple access URLs
|
||||
if (process.env.BETTER_AUTH_TRUSTED_ORIGINS) {
|
||||
const additionalOrigins = process.env.BETTER_AUTH_TRUSTED_ORIGINS
|
||||
.split(',')
|
||||
.map(o => o.trim())
|
||||
.filter(o => o !== '');
|
||||
|
||||
// Validate each additional origin
|
||||
for (const origin of additionalOrigins) {
|
||||
try {
|
||||
const validatedUrl = new URL(origin);
|
||||
origins.push(validatedUrl.origin);
|
||||
} catch {
|
||||
console.warn(`Invalid trusted origin: ${origin}, skipping`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Remove duplicates and empty strings, then return
|
||||
const uniqueOrigins = [...new Set(origins.filter(Boolean))];
|
||||
console.info('Trusted origins:', uniqueOrigins);
|
||||
return uniqueOrigins;
|
||||
})(),
|
||||
// Trusted origins - this is how we support multiple access URLs.
|
||||
// Uses the function form so that the origin can be auto-detected from
|
||||
// the incoming request's Host / X-Forwarded-* headers, which makes the
|
||||
// app work behind a reverse proxy without manual env var configuration.
|
||||
trustedOrigins: (request?: Request) => resolveTrustedOrigins(request),
|
||||
|
||||
// Authentication methods
|
||||
emailAndPassword: {
|
||||
|
||||
@@ -19,8 +19,23 @@ export const ENV = {
|
||||
},
|
||||
|
||||
// Better Auth secret for authentication
|
||||
BETTER_AUTH_SECRET:
|
||||
process.env.BETTER_AUTH_SECRET || "your-secret-key-change-this-in-production",
|
||||
get BETTER_AUTH_SECRET(): string {
|
||||
const secret = process.env.BETTER_AUTH_SECRET;
|
||||
const knownInsecureDefaults = [
|
||||
"your-secret-key-change-this-in-production",
|
||||
"dev-only-insecure-secret-do-not-use-in-production",
|
||||
];
|
||||
if (!secret || knownInsecureDefaults.includes(secret)) {
|
||||
if (process.env.NODE_ENV === "production") {
|
||||
console.error(
|
||||
"\x1b[31m[SECURITY WARNING]\x1b[0m BETTER_AUTH_SECRET is missing or using an insecure default. " +
|
||||
"Set a strong secret: openssl rand -base64 32"
|
||||
);
|
||||
}
|
||||
return secret || "dev-only-insecure-secret-do-not-use-in-production";
|
||||
}
|
||||
return secret;
|
||||
},
|
||||
|
||||
// Server host and port
|
||||
HOST: process.env.HOST || "localhost",
|
||||
|
||||
@@ -35,13 +35,54 @@ if (process.env.NODE_ENV !== "test") {
|
||||
// Create drizzle instance with the SQLite client
|
||||
db = drizzle({ client: sqlite });
|
||||
|
||||
/**
|
||||
* Fix migration records that were marked as applied but whose DDL actually
|
||||
* failed (e.g. the v3.13.0 release where ALTER TABLE with expression default
|
||||
* was rejected by SQLite). Without this, Drizzle skips the migration on
|
||||
* retry because it thinks it already ran.
|
||||
*
|
||||
* Drizzle tracks migrations by `created_at` (= journal timestamp) and only
|
||||
* looks at the most recent record. If the last recorded timestamp is >= the
|
||||
* failed migration's timestamp but the expected column is missing, we delete
|
||||
* stale records so the migration re-runs.
|
||||
*/
|
||||
function repairFailedMigrations() {
|
||||
try {
|
||||
const migrationsTableExists = sqlite
|
||||
.query("SELECT name FROM sqlite_master WHERE type='table' AND name='__drizzle_migrations'")
|
||||
.get();
|
||||
|
||||
if (!migrationsTableExists) return;
|
||||
|
||||
// Migration 0009 journal timestamp (from drizzle/meta/_journal.json)
|
||||
const MIGRATION_0009_TIMESTAMP = 1773542995732;
|
||||
|
||||
const lastMigration = sqlite
|
||||
.query("SELECT id, created_at FROM __drizzle_migrations ORDER BY created_at DESC LIMIT 1")
|
||||
.get() as { id: number; created_at: number } | null;
|
||||
|
||||
if (!lastMigration || Number(lastMigration.created_at) < MIGRATION_0009_TIMESTAMP) return;
|
||||
|
||||
// Migration 0009 is recorded as applied — verify the column actually exists
|
||||
const columns = sqlite.query("PRAGMA table_info(repositories)").all() as { name: string }[];
|
||||
const hasImportedAt = columns.some((c) => c.name === "imported_at");
|
||||
|
||||
if (!hasImportedAt) {
|
||||
console.log("🔧 Detected failed migration 0009 (imported_at column missing). Removing stale record so it can re-run...");
|
||||
sqlite.prepare("DELETE FROM __drizzle_migrations WHERE created_at >= ?").run(MIGRATION_0009_TIMESTAMP);
|
||||
}
|
||||
} catch (error) {
|
||||
console.warn("⚠️ Migration repair check failed (non-fatal):", error);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Run Drizzle migrations
|
||||
*/
|
||||
function runDrizzleMigrations() {
|
||||
try {
|
||||
console.log("🔄 Checking for pending migrations...");
|
||||
|
||||
|
||||
// Check if migrations table exists
|
||||
const migrationsTableExists = sqlite
|
||||
.query("SELECT name FROM sqlite_master WHERE type='table' AND name='__drizzle_migrations'")
|
||||
@@ -51,9 +92,12 @@ if (process.env.NODE_ENV !== "test") {
|
||||
console.log("📦 First time setup - running initial migrations...");
|
||||
}
|
||||
|
||||
// Fix any migrations that were recorded but actually failed (e.g. v3.13.0 bug)
|
||||
repairFailedMigrations();
|
||||
|
||||
// Run migrations using Drizzle migrate function
|
||||
migrate(db, { migrationsFolder: "./drizzle" });
|
||||
|
||||
|
||||
console.log("✅ Database migrations completed successfully");
|
||||
} catch (error) {
|
||||
console.error("❌ Error running migrations:", error);
|
||||
|
||||
26
src/lib/db/migrations.test.ts
Normal file
26
src/lib/db/migrations.test.ts
Normal file
@@ -0,0 +1,26 @@
|
||||
import { expect, test } from "bun:test";
|
||||
|
||||
function decodeOutput(output: ArrayBufferLike | Uint8Array | null | undefined) {
|
||||
if (!output) {
|
||||
return "";
|
||||
}
|
||||
|
||||
return Buffer.from(output as ArrayBufferLike).toString("utf8");
|
||||
}
|
||||
|
||||
test("migration validation script passes", () => {
|
||||
const result = Bun.spawnSync({
|
||||
cmd: ["bun", "scripts/validate-migrations.ts"],
|
||||
cwd: process.cwd(),
|
||||
stdout: "pipe",
|
||||
stderr: "pipe",
|
||||
});
|
||||
|
||||
const stdout = decodeOutput(result.stdout);
|
||||
const stderr = decodeOutput(result.stderr);
|
||||
|
||||
expect(
|
||||
result.exitCode,
|
||||
`Migration validation script failed.\nstdout:\n${stdout}\nstderr:\n${stderr}`,
|
||||
).toBe(0);
|
||||
});
|
||||
@@ -25,15 +25,25 @@ export const githubConfigSchema = z.object({
|
||||
includePublic: z.boolean().default(true),
|
||||
includeOrganizations: z.array(z.string()).default([]),
|
||||
starredReposOrg: z.string().optional(),
|
||||
starredReposMode: z.enum(["dedicated-org", "preserve-owner"]).default("dedicated-org"),
|
||||
mirrorStrategy: z.enum(["preserve", "single-org", "flat-user", "mixed"]).default("preserve"),
|
||||
defaultOrg: z.string().optional(),
|
||||
starredCodeOnly: z.boolean().default(false),
|
||||
autoMirrorStarred: z.boolean().default(false),
|
||||
skipStarredIssues: z.boolean().optional(), // Deprecated: kept for backward compatibility, use starredCodeOnly instead
|
||||
starredDuplicateStrategy: z.enum(["suffix", "prefix", "owner-org"]).default("suffix").optional(),
|
||||
});
|
||||
|
||||
export const backupStrategyEnum = z.enum([
|
||||
"disabled",
|
||||
"always",
|
||||
"on-force-push",
|
||||
"block-on-force-push",
|
||||
]);
|
||||
|
||||
export const giteaConfigSchema = z.object({
|
||||
url: z.url(),
|
||||
externalUrl: z.url().optional(),
|
||||
token: z.string(),
|
||||
defaultOwner: z.string(),
|
||||
organization: z.string().optional(),
|
||||
@@ -63,6 +73,12 @@ export const giteaConfigSchema = z.object({
|
||||
mirrorPullRequests: z.boolean().default(false),
|
||||
mirrorLabels: z.boolean().default(false),
|
||||
mirrorMilestones: z.boolean().default(false),
|
||||
backupStrategy: backupStrategyEnum.default("on-force-push"),
|
||||
backupBeforeSync: z.boolean().default(true), // Deprecated: kept for backward compat, use backupStrategy
|
||||
backupRetentionCount: z.number().int().min(1).default(5),
|
||||
backupRetentionDays: z.number().int().min(0).default(30),
|
||||
backupDirectory: z.string().optional(),
|
||||
blockSyncOnBackupFailure: z.boolean().default(true),
|
||||
});
|
||||
|
||||
export const scheduleConfigSchema = z.object({
|
||||
@@ -106,6 +122,31 @@ export const cleanupConfigSchema = z.object({
|
||||
nextRun: z.coerce.date().optional(),
|
||||
});
|
||||
|
||||
export const ntfyConfigSchema = z.object({
|
||||
url: z.string().default("https://ntfy.sh"),
|
||||
topic: z.string().default(""),
|
||||
token: z.string().optional(),
|
||||
priority: z.enum(["min", "low", "default", "high", "urgent"]).default("default"),
|
||||
});
|
||||
|
||||
export const appriseConfigSchema = z.object({
|
||||
url: z.string().default(""),
|
||||
token: z.string().default(""),
|
||||
tag: z.string().optional(),
|
||||
});
|
||||
|
||||
export const notificationConfigSchema = z.object({
|
||||
enabled: z.boolean().default(false),
|
||||
provider: z.enum(["ntfy", "apprise"]).default("ntfy"),
|
||||
notifyOnSyncError: z.boolean().default(true),
|
||||
notifyOnSyncSuccess: z.boolean().default(false),
|
||||
notifyOnNewRepo: z.boolean().default(false),
|
||||
ntfy: ntfyConfigSchema.optional(),
|
||||
apprise: appriseConfigSchema.optional(),
|
||||
});
|
||||
|
||||
export type NotificationConfig = z.infer<typeof notificationConfigSchema>;
|
||||
|
||||
export const configSchema = z.object({
|
||||
id: z.string(),
|
||||
userId: z.string(),
|
||||
@@ -159,12 +200,14 @@ export const repositorySchema = z.object({
|
||||
"syncing",
|
||||
"synced",
|
||||
"archived",
|
||||
"pending-approval", // Blocked by force-push detection, needs manual approval
|
||||
])
|
||||
.default("imported"),
|
||||
lastMirrored: z.coerce.date().optional().nullable(),
|
||||
errorMessage: z.string().optional().nullable(),
|
||||
destinationOrg: z.string().optional().nullable(),
|
||||
metadata: z.string().optional().nullable(), // JSON string for metadata sync state
|
||||
importedAt: z.coerce.date(),
|
||||
createdAt: z.coerce.date(),
|
||||
updatedAt: z.coerce.date(),
|
||||
});
|
||||
@@ -190,6 +233,7 @@ export const mirrorJobSchema = z.object({
|
||||
"syncing",
|
||||
"synced",
|
||||
"archived",
|
||||
"pending-approval",
|
||||
])
|
||||
.default("imported"),
|
||||
message: z.string(),
|
||||
@@ -318,6 +362,11 @@ export const configs = sqliteTable("configs", {
|
||||
.$type<z.infer<typeof cleanupConfigSchema>>()
|
||||
.notNull(),
|
||||
|
||||
notificationConfig: text("notification_config", { mode: "json" })
|
||||
.$type<z.infer<typeof notificationConfigSchema>>()
|
||||
.notNull()
|
||||
.default(sql`'{"enabled":false,"provider":"ntfy","notifyOnSyncError":true,"notifyOnSyncSuccess":false,"notifyOnNewRepo":false}'`),
|
||||
|
||||
createdAt: integer("created_at", { mode: "timestamp" })
|
||||
.notNull()
|
||||
.default(sql`(unixepoch())`),
|
||||
@@ -378,6 +427,9 @@ export const repositories = sqliteTable("repositories", {
|
||||
destinationOrg: text("destination_org"),
|
||||
|
||||
metadata: text("metadata"), // JSON string storing metadata sync state (issues, PRs, releases, etc.)
|
||||
importedAt: integer("imported_at", { mode: "timestamp" })
|
||||
.notNull()
|
||||
.default(sql`(unixepoch())`),
|
||||
|
||||
createdAt: integer("created_at", { mode: "timestamp" })
|
||||
.notNull()
|
||||
@@ -393,8 +445,10 @@ export const repositories = sqliteTable("repositories", {
|
||||
index("idx_repositories_organization").on(table.organization),
|
||||
index("idx_repositories_is_fork").on(table.isForked),
|
||||
index("idx_repositories_is_starred").on(table.isStarred),
|
||||
index("idx_repositories_user_imported_at").on(table.userId, table.importedAt),
|
||||
uniqueIndex("uniq_repositories_user_full_name").on(table.userId, table.fullName),
|
||||
uniqueIndex("uniq_repositories_user_normalized_full_name").on(table.userId, table.normalizedFullName),
|
||||
index("idx_repositories_mirrored_location").on(table.userId, table.mirroredLocation),
|
||||
]);
|
||||
|
||||
export const mirrorJobs = sqliteTable("mirror_jobs", {
|
||||
|
||||
@@ -22,11 +22,14 @@ interface EnvConfig {
|
||||
preserveOrgStructure?: boolean;
|
||||
onlyMirrorOrgs?: boolean;
|
||||
starredCodeOnly?: boolean;
|
||||
autoMirrorStarred?: boolean;
|
||||
starredReposOrg?: string;
|
||||
starredReposMode?: 'dedicated-org' | 'preserve-owner';
|
||||
mirrorStrategy?: 'preserve' | 'single-org' | 'flat-user' | 'mixed';
|
||||
};
|
||||
gitea: {
|
||||
url?: string;
|
||||
externalUrl?: string;
|
||||
username?: string;
|
||||
token?: string;
|
||||
organization?: string;
|
||||
@@ -111,11 +114,14 @@ function parseEnvConfig(): EnvConfig {
|
||||
preserveOrgStructure: process.env.PRESERVE_ORG_STRUCTURE === 'true',
|
||||
onlyMirrorOrgs: process.env.ONLY_MIRROR_ORGS === 'true',
|
||||
starredCodeOnly: process.env.SKIP_STARRED_ISSUES === 'true',
|
||||
autoMirrorStarred: process.env.AUTO_MIRROR_STARRED === 'true',
|
||||
starredReposOrg: process.env.STARRED_REPOS_ORG,
|
||||
starredReposMode: process.env.STARRED_REPOS_MODE as 'dedicated-org' | 'preserve-owner',
|
||||
mirrorStrategy: process.env.MIRROR_STRATEGY as 'preserve' | 'single-org' | 'flat-user' | 'mixed',
|
||||
},
|
||||
gitea: {
|
||||
url: process.env.GITEA_URL,
|
||||
externalUrl: process.env.GITEA_EXTERNAL_URL,
|
||||
username: process.env.GITEA_USERNAME,
|
||||
token: process.env.GITEA_TOKEN,
|
||||
organization: process.env.GITEA_ORGANIZATION,
|
||||
@@ -256,14 +262,17 @@ export async function initializeConfigFromEnv(): Promise<void> {
|
||||
includePublic: envConfig.github.publicRepositories ?? existingConfig?.[0]?.githubConfig?.includePublic ?? true,
|
||||
includeOrganizations: envConfig.github.mirrorOrganizations ? [] : (existingConfig?.[0]?.githubConfig?.includeOrganizations ?? []),
|
||||
starredReposOrg: envConfig.github.starredReposOrg || existingConfig?.[0]?.githubConfig?.starredReposOrg || 'starred',
|
||||
starredReposMode: envConfig.github.starredReposMode || existingConfig?.[0]?.githubConfig?.starredReposMode || 'dedicated-org',
|
||||
mirrorStrategy,
|
||||
defaultOrg: envConfig.gitea.organization || existingConfig?.[0]?.githubConfig?.defaultOrg || 'github-mirrors',
|
||||
starredCodeOnly: envConfig.github.starredCodeOnly ?? existingConfig?.[0]?.githubConfig?.starredCodeOnly ?? false,
|
||||
autoMirrorStarred: envConfig.github.autoMirrorStarred ?? existingConfig?.[0]?.githubConfig?.autoMirrorStarred ?? false,
|
||||
};
|
||||
|
||||
// Build Gitea config
|
||||
const giteaConfig = {
|
||||
url: envConfig.gitea.url || existingConfig?.[0]?.giteaConfig?.url || '',
|
||||
externalUrl: envConfig.gitea.externalUrl || existingConfig?.[0]?.giteaConfig?.externalUrl || undefined,
|
||||
token: envConfig.gitea.token ? encrypt(envConfig.gitea.token) : existingConfig?.[0]?.giteaConfig?.token || '',
|
||||
defaultOwner: envConfig.gitea.username || existingConfig?.[0]?.giteaConfig?.defaultOwner || '',
|
||||
organization: envConfig.gitea.organization || existingConfig?.[0]?.giteaConfig?.organization || undefined,
|
||||
|
||||
@@ -13,6 +13,11 @@ const mockMirrorGitRepoPullRequestsToGitea = mock(() => Promise.resolve());
|
||||
const mockMirrorGitRepoLabelsToGitea = mock(() => Promise.resolve());
|
||||
const mockMirrorGitRepoMilestonesToGitea = mock(() => Promise.resolve());
|
||||
const mockGetGiteaRepoOwnerAsync = mock(() => Promise.resolve("starred"));
|
||||
const mockCreatePreSyncBundleBackup = mock(() =>
|
||||
Promise.resolve({ bundlePath: "/tmp/mock.bundle" })
|
||||
);
|
||||
let mockShouldCreatePreSyncBackup = false;
|
||||
let mockShouldBlockSyncOnBackupFailure = true;
|
||||
|
||||
// Mock the database module
|
||||
const mockDb = {
|
||||
@@ -28,8 +33,14 @@ const mockDb = {
|
||||
|
||||
mock.module("@/lib/db", () => ({
|
||||
db: mockDb,
|
||||
users: {},
|
||||
configs: {},
|
||||
organizations: {},
|
||||
mirrorJobs: {},
|
||||
repositories: {}
|
||||
repositories: {},
|
||||
events: {},
|
||||
accounts: {},
|
||||
sessions: {},
|
||||
}));
|
||||
|
||||
// Mock config encryption
|
||||
@@ -235,6 +246,12 @@ mock.module("@/lib/http-client", () => ({
|
||||
HttpError: MockHttpError
|
||||
}));
|
||||
|
||||
mock.module("@/lib/repo-backup", () => ({
|
||||
createPreSyncBundleBackup: mockCreatePreSyncBundleBackup,
|
||||
shouldCreatePreSyncBackup: () => mockShouldCreatePreSyncBackup,
|
||||
shouldBlockSyncOnBackupFailure: () => mockShouldBlockSyncOnBackupFailure,
|
||||
}));
|
||||
|
||||
// Now import the modules we're testing
|
||||
import {
|
||||
getGiteaRepoInfo,
|
||||
@@ -264,6 +281,15 @@ describe("Enhanced Gitea Operations", () => {
|
||||
mockMirrorGitRepoMilestonesToGitea.mockClear();
|
||||
mockGetGiteaRepoOwnerAsync.mockClear();
|
||||
mockGetGiteaRepoOwnerAsync.mockImplementation(() => Promise.resolve("starred"));
|
||||
mockHttpGet.mockClear();
|
||||
mockHttpPost.mockClear();
|
||||
mockHttpDelete.mockClear();
|
||||
mockCreatePreSyncBundleBackup.mockClear();
|
||||
mockCreatePreSyncBundleBackup.mockImplementation(() =>
|
||||
Promise.resolve({ bundlePath: "/tmp/mock.bundle" })
|
||||
);
|
||||
mockShouldCreatePreSyncBackup = false;
|
||||
mockShouldBlockSyncOnBackupFailure = true;
|
||||
// Reset tracking variables
|
||||
orgCheckCount = 0;
|
||||
orgTestContext = "";
|
||||
@@ -529,6 +555,125 @@ describe("Enhanced Gitea Operations", () => {
|
||||
expect(releaseCall.octokit).toBeDefined();
|
||||
});
|
||||
|
||||
test("blocks sync when pre-sync snapshot fails and blocking is enabled", async () => {
|
||||
mockShouldCreatePreSyncBackup = true;
|
||||
mockShouldBlockSyncOnBackupFailure = true;
|
||||
mockCreatePreSyncBundleBackup.mockImplementation(() =>
|
||||
Promise.reject(new Error("simulated backup failure"))
|
||||
);
|
||||
|
||||
const config: Partial<Config> = {
|
||||
userId: "user123",
|
||||
githubConfig: {
|
||||
username: "testuser",
|
||||
token: "github-token",
|
||||
privateRepositories: false,
|
||||
mirrorStarred: true,
|
||||
},
|
||||
giteaConfig: {
|
||||
url: "https://gitea.example.com",
|
||||
token: "encrypted-token",
|
||||
defaultOwner: "testuser",
|
||||
mirrorReleases: false,
|
||||
backupStrategy: "always",
|
||||
blockSyncOnBackupFailure: true,
|
||||
},
|
||||
};
|
||||
|
||||
const repository: Repository = {
|
||||
id: "repo456",
|
||||
name: "mirror-repo",
|
||||
fullName: "user/mirror-repo",
|
||||
owner: "user",
|
||||
cloneUrl: "https://github.com/user/mirror-repo.git",
|
||||
isPrivate: false,
|
||||
isStarred: true,
|
||||
status: repoStatusEnum.parse("mirrored"),
|
||||
visibility: "public",
|
||||
userId: "user123",
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
};
|
||||
|
||||
await expect(
|
||||
syncGiteaRepoEnhanced(
|
||||
{ config, repository },
|
||||
{
|
||||
getGiteaRepoOwnerAsync: mockGetGiteaRepoOwnerAsync,
|
||||
mirrorGitHubReleasesToGitea: mockMirrorGitHubReleasesToGitea,
|
||||
mirrorGitRepoIssuesToGitea: mockMirrorGitRepoIssuesToGitea,
|
||||
mirrorGitRepoPullRequestsToGitea: mockMirrorGitRepoPullRequestsToGitea,
|
||||
mirrorGitRepoLabelsToGitea: mockMirrorGitRepoLabelsToGitea,
|
||||
mirrorGitRepoMilestonesToGitea: mockMirrorGitRepoMilestonesToGitea,
|
||||
}
|
||||
)
|
||||
).rejects.toThrow("Snapshot failed; sync blocked to protect history.");
|
||||
|
||||
const mirrorSyncCalls = mockHttpPost.mock.calls.filter((call) =>
|
||||
String(call[0]).includes("/mirror-sync")
|
||||
);
|
||||
expect(mirrorSyncCalls.length).toBe(0);
|
||||
});
|
||||
|
||||
test("continues sync when pre-sync snapshot fails and blocking is disabled", async () => {
|
||||
mockShouldCreatePreSyncBackup = true;
|
||||
mockShouldBlockSyncOnBackupFailure = false;
|
||||
mockCreatePreSyncBundleBackup.mockImplementation(() =>
|
||||
Promise.reject(new Error("simulated backup failure"))
|
||||
);
|
||||
|
||||
const config: Partial<Config> = {
|
||||
userId: "user123",
|
||||
githubConfig: {
|
||||
username: "testuser",
|
||||
token: "github-token",
|
||||
privateRepositories: false,
|
||||
mirrorStarred: true,
|
||||
},
|
||||
giteaConfig: {
|
||||
url: "https://gitea.example.com",
|
||||
token: "encrypted-token",
|
||||
defaultOwner: "testuser",
|
||||
mirrorReleases: false,
|
||||
backupBeforeSync: true,
|
||||
blockSyncOnBackupFailure: false,
|
||||
},
|
||||
};
|
||||
|
||||
const repository: Repository = {
|
||||
id: "repo457",
|
||||
name: "mirror-repo",
|
||||
fullName: "user/mirror-repo",
|
||||
owner: "user",
|
||||
cloneUrl: "https://github.com/user/mirror-repo.git",
|
||||
isPrivate: false,
|
||||
isStarred: true,
|
||||
status: repoStatusEnum.parse("mirrored"),
|
||||
visibility: "public",
|
||||
userId: "user123",
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
};
|
||||
|
||||
const result = await syncGiteaRepoEnhanced(
|
||||
{ config, repository },
|
||||
{
|
||||
getGiteaRepoOwnerAsync: mockGetGiteaRepoOwnerAsync,
|
||||
mirrorGitHubReleasesToGitea: mockMirrorGitHubReleasesToGitea,
|
||||
mirrorGitRepoIssuesToGitea: mockMirrorGitRepoIssuesToGitea,
|
||||
mirrorGitRepoPullRequestsToGitea: mockMirrorGitRepoPullRequestsToGitea,
|
||||
mirrorGitRepoLabelsToGitea: mockMirrorGitRepoLabelsToGitea,
|
||||
mirrorGitRepoMilestonesToGitea: mockMirrorGitRepoMilestonesToGitea,
|
||||
}
|
||||
);
|
||||
|
||||
expect(result).toEqual({ success: true });
|
||||
const mirrorSyncCalls = mockHttpPost.mock.calls.filter((call) =>
|
||||
String(call[0]).includes("/mirror-sync")
|
||||
);
|
||||
expect(mirrorSyncCalls.length).toBe(1);
|
||||
});
|
||||
|
||||
test("mirrors metadata components when enabled and not previously synced", async () => {
|
||||
const config: Partial<Config> = {
|
||||
userId: "user123",
|
||||
@@ -587,7 +732,7 @@ describe("Enhanced Gitea Operations", () => {
|
||||
expect(mockMirrorGitRepoLabelsToGitea).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
test("skips metadata mirroring when components already synced", async () => {
|
||||
test("continues incremental issue and PR syncing when metadata was previously synced", async () => {
|
||||
const config: Partial<Config> = {
|
||||
userId: "user123",
|
||||
githubConfig: {
|
||||
@@ -647,8 +792,8 @@ describe("Enhanced Gitea Operations", () => {
|
||||
);
|
||||
|
||||
expect(mockMirrorGitHubReleasesToGitea).not.toHaveBeenCalled();
|
||||
expect(mockMirrorGitRepoIssuesToGitea).not.toHaveBeenCalled();
|
||||
expect(mockMirrorGitRepoPullRequestsToGitea).not.toHaveBeenCalled();
|
||||
expect(mockMirrorGitRepoIssuesToGitea).toHaveBeenCalledTimes(1);
|
||||
expect(mockMirrorGitRepoPullRequestsToGitea).toHaveBeenCalledTimes(1);
|
||||
expect(mockMirrorGitRepoLabelsToGitea).not.toHaveBeenCalled();
|
||||
expect(mockMirrorGitRepoMilestonesToGitea).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
@@ -15,6 +15,16 @@ import { httpPost, httpGet, httpPatch, HttpError } from "./http-client";
|
||||
import { db, repositories } from "./db";
|
||||
import { eq } from "drizzle-orm";
|
||||
import { repoStatusEnum } from "@/types/Repository";
|
||||
import {
|
||||
createPreSyncBundleBackup,
|
||||
shouldCreatePreSyncBackup,
|
||||
shouldBlockSyncOnBackupFailure,
|
||||
resolveBackupStrategy,
|
||||
shouldBackupForStrategy,
|
||||
shouldBlockSyncForStrategy,
|
||||
strategyNeedsDetection,
|
||||
} from "./repo-backup";
|
||||
import { detectForcePush } from "./utils/force-push-detection";
|
||||
import {
|
||||
parseRepositoryMetadataState,
|
||||
serializeRepositoryMetadataState,
|
||||
@@ -250,9 +260,12 @@ export async function getOrCreateGiteaOrgEnhanced({
|
||||
export async function syncGiteaRepoEnhanced({
|
||||
config,
|
||||
repository,
|
||||
skipForcePushDetection,
|
||||
}: {
|
||||
config: Partial<Config>;
|
||||
repository: Repository;
|
||||
/** When true, skip force-push detection and blocking (used by approve-sync). */
|
||||
skipForcePushDetection?: boolean;
|
||||
}, deps?: SyncDependencies): Promise<any> {
|
||||
try {
|
||||
if (!config.userId || !config.giteaConfig?.url || !config.giteaConfig?.token) {
|
||||
@@ -313,6 +326,141 @@ export async function syncGiteaRepoEnhanced({
|
||||
throw new Error(`Repository ${repository.name} is not a mirror. Cannot sync.`);
|
||||
}
|
||||
|
||||
// ---- Smart backup strategy with force-push detection ----
|
||||
const backupStrategy = resolveBackupStrategy(config);
|
||||
let forcePushDetected = false;
|
||||
|
||||
if (backupStrategy !== "disabled") {
|
||||
// Run force-push detection if the strategy requires it
|
||||
// (skip when called from approve-sync to avoid re-blocking)
|
||||
if (strategyNeedsDetection(backupStrategy) && !skipForcePushDetection) {
|
||||
try {
|
||||
const decryptedGithubToken = decryptedConfig.githubConfig?.token;
|
||||
if (decryptedGithubToken) {
|
||||
const fpOctokit = new Octokit({ auth: decryptedGithubToken });
|
||||
const detectionResult = await detectForcePush({
|
||||
giteaUrl: config.giteaConfig.url,
|
||||
giteaToken: decryptedConfig.giteaConfig.token,
|
||||
giteaOwner: repoOwner,
|
||||
giteaRepo: repository.name,
|
||||
octokit: fpOctokit,
|
||||
githubOwner: repository.owner,
|
||||
githubRepo: repository.name,
|
||||
});
|
||||
|
||||
forcePushDetected = detectionResult.detected;
|
||||
|
||||
if (detectionResult.skipped) {
|
||||
console.log(
|
||||
`[Sync] Force-push detection skipped for ${repository.name}: ${detectionResult.skipReason}`,
|
||||
);
|
||||
} else if (forcePushDetected) {
|
||||
const branchNames = detectionResult.affectedBranches
|
||||
.map((b) => `${b.name} (${b.reason})`)
|
||||
.join(", ");
|
||||
console.warn(
|
||||
`[Sync] Force-push detected on ${repository.name}: ${branchNames}`,
|
||||
);
|
||||
}
|
||||
} else {
|
||||
console.log(
|
||||
`[Sync] Skipping force-push detection for ${repository.name}: no GitHub token`,
|
||||
);
|
||||
}
|
||||
} catch (detectionError) {
|
||||
// Fail-open: detection errors should never block sync
|
||||
console.warn(
|
||||
`[Sync] Force-push detection failed for ${repository.name}, proceeding with sync: ${
|
||||
detectionError instanceof Error ? detectionError.message : String(detectionError)
|
||||
}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Check if sync should be blocked (block-on-force-push mode)
|
||||
if (shouldBlockSyncForStrategy(backupStrategy, forcePushDetected)) {
|
||||
const branchInfo = `Force-push detected; sync blocked for manual approval.`;
|
||||
|
||||
await db
|
||||
.update(repositories)
|
||||
.set({
|
||||
status: "pending-approval",
|
||||
updatedAt: new Date(),
|
||||
errorMessage: branchInfo,
|
||||
})
|
||||
.where(eq(repositories.id, repository.id!));
|
||||
|
||||
await createMirrorJob({
|
||||
userId: config.userId,
|
||||
repositoryId: repository.id,
|
||||
repositoryName: repository.name,
|
||||
message: `Sync blocked for ${repository.name}: force-push detected`,
|
||||
details: branchInfo,
|
||||
status: "pending-approval",
|
||||
});
|
||||
|
||||
console.warn(`[Sync] Sync blocked for ${repository.name}: pending manual approval`);
|
||||
return { blocked: true, reason: branchInfo };
|
||||
}
|
||||
|
||||
// Create backup if strategy says so
|
||||
if (shouldBackupForStrategy(backupStrategy, forcePushDetected)) {
|
||||
const cloneUrl =
|
||||
repoInfo.clone_url ||
|
||||
`${config.giteaConfig.url.replace(/\/$/, "")}/${repoOwner}/${repository.name}.git`;
|
||||
|
||||
try {
|
||||
const backupResult = await createPreSyncBundleBackup({
|
||||
config,
|
||||
owner: repoOwner,
|
||||
repoName: repository.name,
|
||||
cloneUrl,
|
||||
force: true, // Strategy already decided to backup; skip legacy gate
|
||||
});
|
||||
|
||||
await createMirrorJob({
|
||||
userId: config.userId,
|
||||
repositoryId: repository.id,
|
||||
repositoryName: repository.name,
|
||||
message: `Snapshot created for ${repository.name}`,
|
||||
details: `Pre-sync snapshot created at ${backupResult.bundlePath}.`,
|
||||
status: "syncing",
|
||||
});
|
||||
} catch (backupError) {
|
||||
const errorMessage =
|
||||
backupError instanceof Error ? backupError.message : String(backupError);
|
||||
|
||||
await createMirrorJob({
|
||||
userId: config.userId,
|
||||
repositoryId: repository.id,
|
||||
repositoryName: repository.name,
|
||||
message: `Snapshot failed for ${repository.name}`,
|
||||
details: `Pre-sync snapshot failed: ${errorMessage}`,
|
||||
status: "failed",
|
||||
});
|
||||
|
||||
if (shouldBlockSyncOnBackupFailure(config)) {
|
||||
await db
|
||||
.update(repositories)
|
||||
.set({
|
||||
status: repoStatusEnum.parse("failed"),
|
||||
updatedAt: new Date(),
|
||||
errorMessage: `Snapshot failed; sync blocked to protect history. ${errorMessage}`,
|
||||
})
|
||||
.where(eq(repositories.id, repository.id!));
|
||||
|
||||
throw new Error(
|
||||
`Snapshot failed; sync blocked to protect history. ${errorMessage}`,
|
||||
);
|
||||
}
|
||||
|
||||
console.warn(
|
||||
`[Sync] Snapshot failed for ${repository.name}, continuing because blockSyncOnBackupFailure=false: ${errorMessage}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Update mirror interval if needed
|
||||
if (config.giteaConfig?.mirrorInterval) {
|
||||
try {
|
||||
@@ -361,12 +509,10 @@ export async function syncGiteaRepoEnhanced({
|
||||
!!config.giteaConfig?.mirrorReleases && !skipMetadataForStarred;
|
||||
const shouldMirrorIssuesThisRun =
|
||||
!!config.giteaConfig?.mirrorIssues &&
|
||||
!skipMetadataForStarred &&
|
||||
!metadataState.components.issues;
|
||||
!skipMetadataForStarred;
|
||||
const shouldMirrorPullRequests =
|
||||
!!config.giteaConfig?.mirrorPullRequests &&
|
||||
!skipMetadataForStarred &&
|
||||
!metadataState.components.pullRequests;
|
||||
!skipMetadataForStarred;
|
||||
const shouldMirrorLabels =
|
||||
!!config.giteaConfig?.mirrorLabels &&
|
||||
!skipMetadataForStarred &&
|
||||
@@ -440,13 +586,6 @@ export async function syncGiteaRepoEnhanced({
|
||||
);
|
||||
}
|
||||
}
|
||||
} else if (
|
||||
config.giteaConfig?.mirrorIssues &&
|
||||
metadataState.components.issues
|
||||
) {
|
||||
console.log(
|
||||
`[Sync] Issues already mirrored for ${repository.name}; skipping`
|
||||
);
|
||||
}
|
||||
|
||||
if (shouldMirrorPullRequests) {
|
||||
@@ -477,13 +616,6 @@ export async function syncGiteaRepoEnhanced({
|
||||
);
|
||||
}
|
||||
}
|
||||
} else if (
|
||||
config.giteaConfig?.mirrorPullRequests &&
|
||||
metadataState.components.pullRequests
|
||||
) {
|
||||
console.log(
|
||||
`[Sync] Pull requests already mirrored for ${repository.name}; skipping`
|
||||
);
|
||||
}
|
||||
|
||||
if (shouldMirrorLabels) {
|
||||
@@ -587,12 +719,12 @@ export async function syncGiteaRepoEnhanced({
|
||||
userId: config.userId,
|
||||
repositoryId: repository.id,
|
||||
repositoryName: repository.name,
|
||||
message: `Successfully synced repository: ${repository.name}`,
|
||||
details: `Repository ${repository.name} was synced with Gitea.`,
|
||||
message: `Sync requested for repository: ${repository.name}`,
|
||||
details: `Mirror sync was requested for ${repository.name}.`,
|
||||
status: "synced",
|
||||
});
|
||||
|
||||
console.log(`[Sync] Repository ${repository.name} synced successfully`);
|
||||
console.log(`[Sync] Mirror sync requested for repository ${repository.name}`);
|
||||
return response.data;
|
||||
} catch (syncError) {
|
||||
if (syncError instanceof HttpError && syncError.status === 400) {
|
||||
|
||||
@@ -24,9 +24,14 @@ mock.module("@/lib/db", () => {
|
||||
values: mock(() => Promise.resolve())
|
||||
}))
|
||||
},
|
||||
users: {},
|
||||
configs: {},
|
||||
repositories: {},
|
||||
organizations: {},
|
||||
events: {}
|
||||
events: {},
|
||||
mirrorJobs: {},
|
||||
accounts: {},
|
||||
sessions: {},
|
||||
};
|
||||
});
|
||||
|
||||
@@ -59,10 +64,16 @@ const mockGetOrCreateGiteaOrg = mock(async ({ orgName, config }: any) => {
|
||||
|
||||
const mockMirrorGitHubOrgRepoToGiteaOrg = mock(async () => {});
|
||||
const mockIsRepoPresentInGitea = mock(async () => false);
|
||||
const mockMirrorGithubRepoToGitea = mock(async () => {});
|
||||
const mockGetGiteaRepoOwnerAsync = mock(async () => "starred");
|
||||
const mockGetGiteaRepoOwner = mock(() => "starred");
|
||||
|
||||
mock.module("./gitea", () => ({
|
||||
getOrCreateGiteaOrg: mockGetOrCreateGiteaOrg,
|
||||
mirrorGitHubOrgRepoToGiteaOrg: mockMirrorGitHubOrgRepoToGiteaOrg,
|
||||
mirrorGithubRepoToGitea: mockMirrorGithubRepoToGitea,
|
||||
getGiteaRepoOwner: mockGetGiteaRepoOwner,
|
||||
getGiteaRepoOwnerAsync: mockGetGiteaRepoOwnerAsync,
|
||||
isRepoPresentInGitea: mockIsRepoPresentInGitea
|
||||
}));
|
||||
|
||||
@@ -226,4 +237,4 @@ describe("Starred Repository Error Handling", () => {
|
||||
});
|
||||
});
|
||||
|
||||
});
|
||||
});
|
||||
|
||||
45
src/lib/gitea-url.test.ts
Normal file
45
src/lib/gitea-url.test.ts
Normal file
@@ -0,0 +1,45 @@
|
||||
import { describe, expect, it } from "bun:test";
|
||||
import { buildGiteaWebUrl, getGiteaWebBaseUrl } from "@/lib/gitea-url";
|
||||
|
||||
describe("getGiteaWebBaseUrl", () => {
|
||||
it("prefers externalUrl when both urls are present", () => {
|
||||
const baseUrl = getGiteaWebBaseUrl({
|
||||
url: "http://gitea:3000",
|
||||
externalUrl: "https://git.example.com",
|
||||
});
|
||||
|
||||
expect(baseUrl).toBe("https://git.example.com");
|
||||
});
|
||||
|
||||
it("falls back to url when externalUrl is missing", () => {
|
||||
const baseUrl = getGiteaWebBaseUrl({
|
||||
url: "http://gitea:3000",
|
||||
});
|
||||
|
||||
expect(baseUrl).toBe("http://gitea:3000");
|
||||
});
|
||||
|
||||
it("trims a trailing slash", () => {
|
||||
const baseUrl = getGiteaWebBaseUrl({
|
||||
externalUrl: "https://git.example.com/",
|
||||
});
|
||||
|
||||
expect(baseUrl).toBe("https://git.example.com");
|
||||
});
|
||||
});
|
||||
|
||||
describe("buildGiteaWebUrl", () => {
|
||||
it("builds a full repository url and removes leading path slashes", () => {
|
||||
const url = buildGiteaWebUrl(
|
||||
{ externalUrl: "https://git.example.com/" },
|
||||
"/org/repo"
|
||||
);
|
||||
|
||||
expect(url).toBe("https://git.example.com/org/repo");
|
||||
});
|
||||
|
||||
it("returns null when no gitea url is configured", () => {
|
||||
const url = buildGiteaWebUrl({}, "org/repo");
|
||||
expect(url).toBeNull();
|
||||
});
|
||||
});
|
||||
28
src/lib/gitea-url.ts
Normal file
28
src/lib/gitea-url.ts
Normal file
@@ -0,0 +1,28 @@
|
||||
interface GiteaUrlConfig {
|
||||
url?: string | null;
|
||||
externalUrl?: string | null;
|
||||
}
|
||||
|
||||
export function getGiteaWebBaseUrl(
|
||||
config?: GiteaUrlConfig | null
|
||||
): string | null {
|
||||
const rawBaseUrl = config?.externalUrl || config?.url;
|
||||
if (!rawBaseUrl) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return rawBaseUrl.endsWith("/") ? rawBaseUrl.slice(0, -1) : rawBaseUrl;
|
||||
}
|
||||
|
||||
export function buildGiteaWebUrl(
|
||||
config: GiteaUrlConfig | null | undefined,
|
||||
path: string
|
||||
): string | null {
|
||||
const baseUrl = getGiteaWebBaseUrl(config);
|
||||
if (!baseUrl) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const normalizedPath = path.replace(/^\/+/, "");
|
||||
return normalizedPath ? `${baseUrl}/${normalizedPath}` : baseUrl;
|
||||
}
|
||||
@@ -27,8 +27,14 @@ mock.module("@/lib/db", () => {
|
||||
})
|
||||
})
|
||||
},
|
||||
users: {},
|
||||
configs: {},
|
||||
repositories: {},
|
||||
organizations: {}
|
||||
organizations: {},
|
||||
mirrorJobs: {},
|
||||
events: {},
|
||||
accounts: {},
|
||||
sessions: {},
|
||||
};
|
||||
});
|
||||
|
||||
@@ -55,8 +61,61 @@ mock.module("@/lib/http-client", () => {
|
||||
|
||||
// Mock the gitea module itself
|
||||
mock.module("./gitea", () => {
|
||||
const mockGetGiteaRepoOwner = mock(({ config, repository }: any) => {
|
||||
if (repository?.isStarred && config?.githubConfig?.starredReposMode === "preserve-owner") {
|
||||
return repository.organization || repository.owner;
|
||||
}
|
||||
if (repository?.isStarred) {
|
||||
return config?.githubConfig?.starredReposOrg || "starred";
|
||||
}
|
||||
|
||||
const mirrorStrategy =
|
||||
config?.githubConfig?.mirrorStrategy ||
|
||||
(config?.giteaConfig?.preserveOrgStructure ? "preserve" : "flat-user");
|
||||
const configuredGitHubOwner =
|
||||
(config?.githubConfig?.owner || config?.githubConfig?.username || "")
|
||||
.trim()
|
||||
.toLowerCase();
|
||||
const repoOwner = repository?.owner?.trim().toLowerCase();
|
||||
|
||||
switch (mirrorStrategy) {
|
||||
case "preserve":
|
||||
if (repository?.organization) {
|
||||
return repository.organization;
|
||||
}
|
||||
if (configuredGitHubOwner && repoOwner && repoOwner !== configuredGitHubOwner) {
|
||||
return repository.owner;
|
||||
}
|
||||
return config?.giteaConfig?.defaultOwner || "giteauser";
|
||||
case "single-org":
|
||||
return config?.giteaConfig?.organization || config?.giteaConfig?.defaultOwner || "giteauser";
|
||||
case "mixed":
|
||||
if (repository?.organization) return repository.organization;
|
||||
return config?.giteaConfig?.organization || config?.giteaConfig?.defaultOwner || "giteauser";
|
||||
case "flat-user":
|
||||
default:
|
||||
return config?.giteaConfig?.defaultOwner || "giteauser";
|
||||
}
|
||||
});
|
||||
const mockGetGiteaRepoOwnerAsync = mock(async ({ config, repository }: any) => {
|
||||
if (repository?.isStarred && config?.githubConfig?.starredReposMode === "preserve-owner") {
|
||||
return repository.organization || repository.owner;
|
||||
}
|
||||
|
||||
if (repository?.destinationOrg) {
|
||||
return repository.destinationOrg;
|
||||
}
|
||||
|
||||
if (repository?.organization && mockDbSelectResult[0]?.destinationOrg) {
|
||||
return mockDbSelectResult[0].destinationOrg;
|
||||
}
|
||||
|
||||
return mockGetGiteaRepoOwner({ config, repository });
|
||||
});
|
||||
return {
|
||||
isRepoPresentInGitea: mockIsRepoPresentInGitea,
|
||||
getGiteaRepoOwner: mockGetGiteaRepoOwner,
|
||||
getGiteaRepoOwnerAsync: mockGetGiteaRepoOwnerAsync,
|
||||
mirrorGithubRepoToGitea: mock(async () => {}),
|
||||
mirrorGitHubOrgRepoToGiteaOrg: mock(async () => {})
|
||||
};
|
||||
@@ -328,6 +387,7 @@ describe("Gitea Repository Mirroring", () => {
|
||||
describe("getGiteaRepoOwner - Organization Override Tests", () => {
|
||||
const baseConfig: Partial<Config> = {
|
||||
githubConfig: {
|
||||
owner: "testuser",
|
||||
username: "testuser",
|
||||
token: "token",
|
||||
preserveOrgStructure: false,
|
||||
@@ -342,6 +402,8 @@ describe("getGiteaRepoOwner - Organization Override Tests", () => {
|
||||
mirrorPublicOrgs: false,
|
||||
publicOrgs: [],
|
||||
starredCodeOnly: false,
|
||||
starredReposOrg: "starred",
|
||||
starredReposMode: "dedicated-org",
|
||||
mirrorStrategy: "preserve"
|
||||
},
|
||||
giteaConfig: {
|
||||
@@ -350,7 +412,6 @@ describe("getGiteaRepoOwner - Organization Override Tests", () => {
|
||||
token: "gitea-token",
|
||||
organization: "github-mirrors",
|
||||
visibility: "public",
|
||||
starredReposOrg: "starred",
|
||||
preserveVisibility: false
|
||||
}
|
||||
};
|
||||
@@ -390,8 +451,8 @@ describe("getGiteaRepoOwner - Organization Override Tests", () => {
|
||||
const repo = { ...baseRepo, isStarred: true };
|
||||
const configWithoutStarredOrg = {
|
||||
...baseConfig,
|
||||
giteaConfig: {
|
||||
...baseConfig.giteaConfig,
|
||||
githubConfig: {
|
||||
...baseConfig.githubConfig,
|
||||
starredReposOrg: undefined
|
||||
}
|
||||
};
|
||||
@@ -399,6 +460,34 @@ describe("getGiteaRepoOwner - Organization Override Tests", () => {
|
||||
expect(result).toBe("starred");
|
||||
});
|
||||
|
||||
test("starred repos preserve owner/org when starredReposMode is preserve-owner", () => {
|
||||
const repo = { ...baseRepo, isStarred: true, owner: "FOO", organization: "FOO", fullName: "FOO/BAR" };
|
||||
const configWithPreserveStarred = {
|
||||
...baseConfig,
|
||||
githubConfig: {
|
||||
...baseConfig.githubConfig!,
|
||||
starredReposMode: "preserve-owner" as const,
|
||||
},
|
||||
};
|
||||
|
||||
const result = getGiteaRepoOwner({ config: configWithPreserveStarred, repository: repo });
|
||||
expect(result).toBe("FOO");
|
||||
});
|
||||
|
||||
test("starred personal repos preserve owner when starredReposMode is preserve-owner", () => {
|
||||
const repo = { ...baseRepo, isStarred: true, owner: "alice", organization: undefined, fullName: "alice/demo" };
|
||||
const configWithPreserveStarred = {
|
||||
...baseConfig,
|
||||
githubConfig: {
|
||||
...baseConfig.githubConfig!,
|
||||
starredReposMode: "preserve-owner" as const,
|
||||
},
|
||||
};
|
||||
|
||||
const result = getGiteaRepoOwner({ config: configWithPreserveStarred, repository: repo });
|
||||
expect(result).toBe("alice");
|
||||
});
|
||||
|
||||
// Removed test for personalReposOrg as this field no longer exists
|
||||
|
||||
test("preserve strategy: personal repos fallback to username when no override", () => {
|
||||
@@ -407,6 +496,18 @@ describe("getGiteaRepoOwner - Organization Override Tests", () => {
|
||||
expect(result).toBe("giteauser");
|
||||
});
|
||||
|
||||
test("preserve strategy: personal repos owned by another user keep source owner namespace", () => {
|
||||
const repo = {
|
||||
...baseRepo,
|
||||
owner: "nice-user",
|
||||
fullName: "nice-user/test-repo",
|
||||
organization: undefined,
|
||||
isForked: true,
|
||||
};
|
||||
const result = getGiteaRepoOwner({ config: baseConfig, repository: repo });
|
||||
expect(result).toBe("nice-user");
|
||||
});
|
||||
|
||||
test("preserve strategy: org repos go to same org name", () => {
|
||||
const repo = { ...baseRepo, organization: "myorg" };
|
||||
const result = getGiteaRepoOwner({ config: baseConfig, repository: repo });
|
||||
@@ -492,4 +593,46 @@ describe("getGiteaRepoOwner - Organization Override Tests", () => {
|
||||
|
||||
expect(result).toBe("custom-org");
|
||||
});
|
||||
|
||||
test("getGiteaRepoOwnerAsync preserves starred owner when preserve-owner mode is enabled", async () => {
|
||||
const configWithUser: Partial<Config> = {
|
||||
...baseConfig,
|
||||
userId: "user-id",
|
||||
githubConfig: {
|
||||
...baseConfig.githubConfig!,
|
||||
starredReposMode: "preserve-owner",
|
||||
},
|
||||
};
|
||||
|
||||
const repo = { ...baseRepo, isStarred: true, owner: "FOO", organization: "FOO", fullName: "FOO/BAR" };
|
||||
|
||||
const result = await getGiteaRepoOwnerAsync({
|
||||
config: configWithUser,
|
||||
repository: repo,
|
||||
});
|
||||
|
||||
expect(result).toBe("FOO");
|
||||
});
|
||||
|
||||
test("getGiteaRepoOwnerAsync preserves external personal owner for preserve strategy", async () => {
|
||||
const configWithUser: Partial<Config> = {
|
||||
...baseConfig,
|
||||
userId: "user-id",
|
||||
};
|
||||
|
||||
const repo = {
|
||||
...baseRepo,
|
||||
owner: "nice-user",
|
||||
fullName: "nice-user/test-repo",
|
||||
organization: undefined,
|
||||
isForked: true,
|
||||
};
|
||||
|
||||
const result = await getGiteaRepoOwnerAsync({
|
||||
config: configWithUser,
|
||||
repository: repo,
|
||||
});
|
||||
|
||||
expect(result).toBe("nice-user");
|
||||
});
|
||||
});
|
||||
|
||||
1033
src/lib/gitea.ts
1033
src/lib/gitea.ts
File diff suppressed because it is too large
Load Diff
@@ -22,22 +22,30 @@ if (process.env.NODE_ENV !== "test") {
|
||||
// Fallback to base Octokit if .plugin is not present
|
||||
const MyOctokit: any = (Octokit as any)?.plugin?.call
|
||||
? (Octokit as any).plugin(throttling)
|
||||
: Octokit as any;
|
||||
: (Octokit as any);
|
||||
|
||||
/**
|
||||
* Creates an authenticated Octokit instance with rate limit tracking and throttling
|
||||
*/
|
||||
export function createGitHubClient(token: string, userId?: string, username?: string): Octokit {
|
||||
export function createGitHubClient(
|
||||
token: string,
|
||||
userId?: string,
|
||||
username?: string,
|
||||
): Octokit {
|
||||
// Create a proper User-Agent to identify our application
|
||||
// This helps GitHub understand our traffic patterns and can provide better rate limits
|
||||
const userAgent = username
|
||||
? `gitea-mirror/3.5.4 (user:${username})`
|
||||
const userAgent = username
|
||||
? `gitea-mirror/3.5.4 (user:${username})`
|
||||
: "gitea-mirror/3.5.4";
|
||||
|
||||
|
||||
// Support GH_API_URL (preferred) or GITHUB_API_URL (may conflict with GitHub Actions)
|
||||
// GitHub Actions sets GITHUB_API_URL to https://api.github.com by default
|
||||
const baseUrl = process.env.GH_API_URL || process.env.GITHUB_API_URL || "https://api.github.com";
|
||||
|
||||
const octokit = new MyOctokit({
|
||||
auth: token, // Always use token for authentication (5000 req/hr vs 60 for unauthenticated)
|
||||
userAgent, // Identify our application and user
|
||||
baseUrl: "https://api.github.com", // Explicitly set the API endpoint
|
||||
baseUrl, // Configurable for E2E testing
|
||||
log: {
|
||||
debug: () => {},
|
||||
info: console.log,
|
||||
@@ -52,14 +60,19 @@ export function createGitHubClient(token: string, userId?: string, username?: st
|
||||
},
|
||||
},
|
||||
throttle: {
|
||||
onRateLimit: async (retryAfter: number, options: any, octokit: any, retryCount: number) => {
|
||||
onRateLimit: async (
|
||||
retryAfter: number,
|
||||
options: any,
|
||||
octokit: any,
|
||||
retryCount: number,
|
||||
) => {
|
||||
const isSearch = options.url.includes("/search/");
|
||||
const maxRetries = isSearch ? 5 : 3; // Search endpoints get more retries
|
||||
|
||||
|
||||
console.warn(
|
||||
`[GitHub] Rate limit hit for ${options.method} ${options.url}. Retry ${retryCount + 1}/${maxRetries}`
|
||||
`[GitHub] Rate limit hit for ${options.method} ${options.url}. Retry ${retryCount + 1}/${maxRetries}`,
|
||||
);
|
||||
|
||||
|
||||
// Update rate limit status and notify UI (if available)
|
||||
if (userId && RateLimitManager) {
|
||||
await RateLimitManager.updateFromResponse(userId, {
|
||||
@@ -68,7 +81,7 @@ export function createGitHubClient(token: string, userId?: string, username?: st
|
||||
"x-ratelimit-reset": (Date.now() / 1000 + retryAfter).toString(),
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
if (userId && publishEvent) {
|
||||
await publishEvent({
|
||||
userId,
|
||||
@@ -83,22 +96,29 @@ export function createGitHubClient(token: string, userId?: string, username?: st
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
// Retry with exponential backoff
|
||||
if (retryCount < maxRetries) {
|
||||
console.log(`[GitHub] Waiting ${retryAfter}s before retry...`);
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
// Max retries reached
|
||||
console.error(`[GitHub] Max retries (${maxRetries}) reached for ${options.url}`);
|
||||
console.error(
|
||||
`[GitHub] Max retries (${maxRetries}) reached for ${options.url}`,
|
||||
);
|
||||
return false;
|
||||
},
|
||||
onSecondaryRateLimit: async (retryAfter: number, options: any, octokit: any, retryCount: number) => {
|
||||
onSecondaryRateLimit: async (
|
||||
retryAfter: number,
|
||||
options: any,
|
||||
octokit: any,
|
||||
retryCount: number,
|
||||
) => {
|
||||
console.warn(
|
||||
`[GitHub] Secondary rate limit hit for ${options.method} ${options.url}`
|
||||
`[GitHub] Secondary rate limit hit for ${options.method} ${options.url}`,
|
||||
);
|
||||
|
||||
|
||||
// Update status and notify UI (if available)
|
||||
if (userId && publishEvent) {
|
||||
await publishEvent({
|
||||
@@ -114,13 +134,15 @@ export function createGitHubClient(token: string, userId?: string, username?: st
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
// Retry up to 2 times for secondary rate limits
|
||||
if (retryCount < 2) {
|
||||
console.log(`[GitHub] Waiting ${retryAfter}s for secondary rate limit...`);
|
||||
console.log(
|
||||
`[GitHub] Waiting ${retryAfter}s for secondary rate limit...`,
|
||||
);
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
return false;
|
||||
},
|
||||
// Throttle options to prevent hitting limits
|
||||
@@ -129,50 +151,57 @@ export function createGitHubClient(token: string, userId?: string, username?: st
|
||||
retryAfterBaseValue: 1000, // Base retry in ms
|
||||
},
|
||||
});
|
||||
|
||||
// Add additional rate limit tracking if userId is provided and RateLimitManager is available
|
||||
|
||||
// Add rate limit tracking hooks if userId is provided and RateLimitManager is available
|
||||
if (userId && RateLimitManager) {
|
||||
octokit.hook.after("request", async (response: any, options: any) => {
|
||||
// Update rate limit from response headers
|
||||
octokit.hook.after("request", async (response: any, _options: any) => {
|
||||
if (response.headers) {
|
||||
await RateLimitManager.updateFromResponse(userId, response.headers);
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
octokit.hook.error("request", async (error: any, options: any) => {
|
||||
// Handle rate limit errors
|
||||
if (error.status === 403 || error.status === 429) {
|
||||
const message = error.message || "";
|
||||
|
||||
if (message.includes("rate limit") || message.includes("API rate limit")) {
|
||||
console.error(`[GitHub] Rate limit error for user ${userId}: ${message}`);
|
||||
|
||||
|
||||
if (
|
||||
message.includes("rate limit") ||
|
||||
message.includes("API rate limit")
|
||||
) {
|
||||
console.error(
|
||||
`[GitHub] Rate limit error for user ${userId}: ${message}`,
|
||||
);
|
||||
|
||||
// Update rate limit status from error response (if available)
|
||||
if (error.response?.headers && RateLimitManager) {
|
||||
await RateLimitManager.updateFromResponse(userId, error.response.headers);
|
||||
await RateLimitManager.updateFromResponse(
|
||||
userId,
|
||||
error.response.headers,
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
// Create error event for UI (if available)
|
||||
if (publishEvent) {
|
||||
await publishEvent({
|
||||
userId,
|
||||
channel: "rate-limit",
|
||||
payload: {
|
||||
type: "error",
|
||||
provider: "github",
|
||||
error: message,
|
||||
endpoint: options.url,
|
||||
message: `Rate limit exceeded: ${message}`,
|
||||
},
|
||||
});
|
||||
channel: "rate-limit",
|
||||
payload: {
|
||||
type: "error",
|
||||
provider: "github",
|
||||
error: message,
|
||||
endpoint: options.url,
|
||||
message: `Rate limit exceeded: ${message}`,
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
throw error;
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
return octokit;
|
||||
}
|
||||
|
||||
@@ -213,7 +242,7 @@ export async function getGithubRepositories({
|
||||
try {
|
||||
const repos = await octokit.paginate(
|
||||
octokit.repos.listForAuthenticatedUser,
|
||||
{ per_page: 100 }
|
||||
{ per_page: 100 },
|
||||
);
|
||||
|
||||
const skipForks = config.githubConfig?.skipForks ?? false;
|
||||
@@ -254,9 +283,11 @@ export async function getGithubRepositories({
|
||||
visibility: (repo.visibility ?? "public") as GitRepo["visibility"],
|
||||
|
||||
status: "imported",
|
||||
isDisabled: repo.disabled ?? false,
|
||||
lastMirrored: undefined,
|
||||
errorMessage: undefined,
|
||||
|
||||
importedAt: new Date(),
|
||||
createdAt: repo.created_at ? new Date(repo.created_at) : new Date(),
|
||||
updatedAt: repo.updated_at ? new Date(repo.updated_at) : new Date(),
|
||||
}));
|
||||
@@ -264,7 +295,7 @@ export async function getGithubRepositories({
|
||||
throw new Error(
|
||||
`Error fetching repositories: ${
|
||||
error instanceof Error ? error.message : String(error)
|
||||
}`
|
||||
}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -275,13 +306,13 @@ export async function getGithubStarredRepositories({
|
||||
}: {
|
||||
octokit: Octokit;
|
||||
config: Partial<Config>;
|
||||
}) {
|
||||
}): Promise<GitRepo[]> {
|
||||
try {
|
||||
const starredRepos = await octokit.paginate(
|
||||
octokit.activity.listReposStarredByAuthenticatedUser,
|
||||
{
|
||||
per_page: 100,
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
return starredRepos.map((repo) => ({
|
||||
@@ -314,9 +345,11 @@ export async function getGithubStarredRepositories({
|
||||
visibility: (repo.visibility ?? "public") as GitRepo["visibility"],
|
||||
|
||||
status: "imported",
|
||||
isDisabled: repo.disabled ?? false,
|
||||
lastMirrored: undefined,
|
||||
errorMessage: undefined,
|
||||
|
||||
importedAt: new Date(),
|
||||
createdAt: repo.created_at ? new Date(repo.created_at) : new Date(),
|
||||
updatedAt: repo.updated_at ? new Date(repo.updated_at) : new Date(),
|
||||
}));
|
||||
@@ -324,7 +357,7 @@ export async function getGithubStarredRepositories({
|
||||
throw new Error(
|
||||
`Error fetching starred repositories: ${
|
||||
error instanceof Error ? error.message : String(error)
|
||||
}`
|
||||
}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -338,7 +371,7 @@ export async function getGithubOrganizations({
|
||||
}: {
|
||||
octokit: Octokit;
|
||||
config: Partial<Config>;
|
||||
}): Promise<GitOrg[]> {
|
||||
}): Promise<{ organizations: GitOrg[]; failedOrgs: { name: string; avatarUrl: string; reason: string }[] }> {
|
||||
try {
|
||||
const { data: orgs } = await octokit.orgs.listForAuthenticatedUser({
|
||||
per_page: 100,
|
||||
@@ -347,47 +380,66 @@ export async function getGithubOrganizations({
|
||||
// Get excluded organizations from environment variable
|
||||
const excludedOrgsEnv = process.env.GITHUB_EXCLUDED_ORGS;
|
||||
const excludedOrgs = excludedOrgsEnv
|
||||
? excludedOrgsEnv.split(',').map(org => org.trim().toLowerCase())
|
||||
? excludedOrgsEnv.split(",").map((org) => org.trim().toLowerCase())
|
||||
: [];
|
||||
|
||||
// Filter out excluded organizations
|
||||
const filteredOrgs = orgs.filter(org => {
|
||||
const filteredOrgs = orgs.filter((org) => {
|
||||
if (excludedOrgs.includes(org.login.toLowerCase())) {
|
||||
console.log(`Skipping organization ${org.login} - excluded via GITHUB_EXCLUDED_ORGS environment variable`);
|
||||
console.log(
|
||||
`Skipping organization ${org.login} - excluded via GITHUB_EXCLUDED_ORGS environment variable`,
|
||||
);
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
});
|
||||
|
||||
const organizations = await Promise.all(
|
||||
const failedOrgs: { name: string; avatarUrl: string; reason: string }[] = [];
|
||||
const results = await Promise.all(
|
||||
filteredOrgs.map(async (org) => {
|
||||
const [{ data: orgDetails }, { data: membership }] = await Promise.all([
|
||||
octokit.orgs.get({ org: org.login }),
|
||||
octokit.orgs.getMembershipForAuthenticatedUser({ org: org.login }),
|
||||
]);
|
||||
try {
|
||||
const [{ data: orgDetails }, { data: membership }] = await Promise.all([
|
||||
octokit.orgs.get({ org: org.login }),
|
||||
octokit.orgs.getMembershipForAuthenticatedUser({ org: org.login }),
|
||||
]);
|
||||
|
||||
const totalRepos =
|
||||
orgDetails.public_repos + (orgDetails.total_private_repos ?? 0);
|
||||
const totalRepos =
|
||||
orgDetails.public_repos + (orgDetails.total_private_repos ?? 0);
|
||||
|
||||
return {
|
||||
name: org.login,
|
||||
avatarUrl: org.avatar_url,
|
||||
membershipRole: membership.role as MembershipRole,
|
||||
isIncluded: false,
|
||||
status: "imported" as RepoStatus,
|
||||
repositoryCount: totalRepos,
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
};
|
||||
})
|
||||
return {
|
||||
name: org.login,
|
||||
avatarUrl: org.avatar_url,
|
||||
membershipRole: membership.role as MembershipRole,
|
||||
isIncluded: false,
|
||||
status: "imported" as RepoStatus,
|
||||
repositoryCount: totalRepos,
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
};
|
||||
} catch (error: any) {
|
||||
// Capture organizations that return 403 (SAML enforcement, insufficient token scope, etc.)
|
||||
if (error?.status === 403) {
|
||||
const reason = error?.message || "access denied";
|
||||
console.warn(
|
||||
`Failed to import organization ${org.login} - ${reason}`,
|
||||
);
|
||||
failedOrgs.push({ name: org.login, avatarUrl: org.avatar_url, reason });
|
||||
return null;
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}),
|
||||
);
|
||||
|
||||
return organizations;
|
||||
return {
|
||||
organizations: results.filter((org): org is NonNullable<typeof org> => org !== null),
|
||||
failedOrgs,
|
||||
};
|
||||
} catch (error) {
|
||||
throw new Error(
|
||||
`Error fetching organizations: ${
|
||||
error instanceof Error ? error.message : String(error)
|
||||
}`
|
||||
}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -438,9 +490,11 @@ export async function getGithubOrganizationRepositories({
|
||||
visibility: (repo.visibility ?? "public") as GitRepo["visibility"],
|
||||
|
||||
status: "imported",
|
||||
isDisabled: repo.disabled ?? false,
|
||||
lastMirrored: undefined,
|
||||
errorMessage: undefined,
|
||||
|
||||
importedAt: new Date(),
|
||||
createdAt: repo.created_at ? new Date(repo.created_at) : new Date(),
|
||||
updatedAt: repo.updated_at ? new Date(repo.updated_at) : new Date(),
|
||||
}));
|
||||
@@ -448,7 +502,7 @@ export async function getGithubOrganizationRepositories({
|
||||
throw new Error(
|
||||
`Error fetching organization repositories: ${
|
||||
error instanceof Error ? error.message : String(error)
|
||||
}`
|
||||
}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,6 +3,7 @@ import { db, mirrorJobs } from "./db";
|
||||
import { eq, and, or, lt, isNull } from "drizzle-orm";
|
||||
import { v4 as uuidv4 } from "uuid";
|
||||
import { publishEvent } from "./events";
|
||||
import { triggerJobNotification } from "./notification-service";
|
||||
|
||||
export async function createMirrorJob({
|
||||
userId,
|
||||
@@ -19,6 +20,7 @@ export async function createMirrorJob({
|
||||
itemIds,
|
||||
inProgress,
|
||||
skipDuplicateEvent,
|
||||
skipNotification,
|
||||
}: {
|
||||
userId: string;
|
||||
organizationId?: string;
|
||||
@@ -34,6 +36,7 @@ export async function createMirrorJob({
|
||||
itemIds?: string[];
|
||||
inProgress?: boolean;
|
||||
skipDuplicateEvent?: boolean; // Option to skip event publishing for internal operations
|
||||
skipNotification?: boolean; // Option to skip push notifications for specific internal operations
|
||||
}) {
|
||||
const jobId = uuidv4();
|
||||
const currentTimestamp = new Date();
|
||||
@@ -67,7 +70,7 @@ export async function createMirrorJob({
|
||||
// Insert the job into the database
|
||||
await db.insert(mirrorJobs).values(job);
|
||||
|
||||
// Publish the event using SQLite instead of Redis (unless skipped)
|
||||
// Publish realtime status events unless explicitly skipped
|
||||
if (!skipDuplicateEvent) {
|
||||
const channel = `mirror-status:${userId}`;
|
||||
|
||||
@@ -89,6 +92,15 @@ export async function createMirrorJob({
|
||||
});
|
||||
}
|
||||
|
||||
// Trigger push notifications for terminal statuses (never blocks the mirror flow).
|
||||
// Keep this independent from skipDuplicateEvent so event-stream suppression does not
|
||||
// silently disable user-facing notifications.
|
||||
if (!skipNotification && (status === "failed" || status === "mirrored" || status === "synced")) {
|
||||
triggerJobNotification({ userId, status, repositoryName, organizationName, message, details }).catch(err => {
|
||||
console.error("[NotificationService] Background notification failed:", err);
|
||||
});
|
||||
}
|
||||
|
||||
return jobId;
|
||||
} catch (error) {
|
||||
console.error("Error creating mirror job:", error);
|
||||
|
||||
221
src/lib/notification-service.test.ts
Normal file
221
src/lib/notification-service.test.ts
Normal file
@@ -0,0 +1,221 @@
|
||||
import { describe, test, expect, beforeEach, mock } from "bun:test";
|
||||
|
||||
// Mock fetch globally before importing the module
|
||||
let mockFetch: ReturnType<typeof mock>;
|
||||
|
||||
beforeEach(() => {
|
||||
mockFetch = mock(() =>
|
||||
Promise.resolve(new Response("ok", { status: 200 }))
|
||||
);
|
||||
globalThis.fetch = mockFetch as any;
|
||||
});
|
||||
|
||||
// Mock encryption module
|
||||
mock.module("@/lib/utils/encryption", () => ({
|
||||
encrypt: (val: string) => val,
|
||||
decrypt: (val: string) => val,
|
||||
isEncrypted: () => false,
|
||||
}));
|
||||
|
||||
// Import after mocks are set up — db is already mocked via setup.bun.ts
|
||||
import { sendNotification, testNotification } from "./notification-service";
|
||||
import type { NotificationConfig } from "@/types/config";
|
||||
|
||||
describe("sendNotification", () => {
|
||||
test("sends ntfy notification when provider is ntfy", async () => {
|
||||
const config: NotificationConfig = {
|
||||
enabled: true,
|
||||
provider: "ntfy",
|
||||
notifyOnSyncError: true,
|
||||
notifyOnSyncSuccess: true,
|
||||
notifyOnNewRepo: false,
|
||||
ntfy: {
|
||||
url: "https://ntfy.sh",
|
||||
topic: "test-topic",
|
||||
priority: "default",
|
||||
},
|
||||
};
|
||||
|
||||
await sendNotification(config, {
|
||||
title: "Test",
|
||||
message: "Test message",
|
||||
type: "sync_success",
|
||||
});
|
||||
|
||||
expect(mockFetch).toHaveBeenCalledTimes(1);
|
||||
const [url] = mockFetch.mock.calls[0];
|
||||
expect(url).toBe("https://ntfy.sh/test-topic");
|
||||
});
|
||||
|
||||
test("sends apprise notification when provider is apprise", async () => {
|
||||
const config: NotificationConfig = {
|
||||
enabled: true,
|
||||
provider: "apprise",
|
||||
notifyOnSyncError: true,
|
||||
notifyOnSyncSuccess: true,
|
||||
notifyOnNewRepo: false,
|
||||
apprise: {
|
||||
url: "http://apprise:8000",
|
||||
token: "my-token",
|
||||
},
|
||||
};
|
||||
|
||||
await sendNotification(config, {
|
||||
title: "Test",
|
||||
message: "Test message",
|
||||
type: "sync_success",
|
||||
});
|
||||
|
||||
expect(mockFetch).toHaveBeenCalledTimes(1);
|
||||
const [url] = mockFetch.mock.calls[0];
|
||||
expect(url).toBe("http://apprise:8000/notify/my-token");
|
||||
});
|
||||
|
||||
test("does not throw when fetch fails", async () => {
|
||||
mockFetch = mock(() => Promise.reject(new Error("Network error")));
|
||||
globalThis.fetch = mockFetch as any;
|
||||
|
||||
const config: NotificationConfig = {
|
||||
enabled: true,
|
||||
provider: "ntfy",
|
||||
notifyOnSyncError: true,
|
||||
notifyOnSyncSuccess: true,
|
||||
notifyOnNewRepo: false,
|
||||
ntfy: {
|
||||
url: "https://ntfy.sh",
|
||||
topic: "test-topic",
|
||||
priority: "default",
|
||||
},
|
||||
};
|
||||
|
||||
// Should not throw
|
||||
await sendNotification(config, {
|
||||
title: "Test",
|
||||
message: "Test message",
|
||||
type: "sync_success",
|
||||
});
|
||||
});
|
||||
|
||||
test("skips notification when ntfy topic is missing", async () => {
|
||||
const config: NotificationConfig = {
|
||||
enabled: true,
|
||||
provider: "ntfy",
|
||||
notifyOnSyncError: true,
|
||||
notifyOnSyncSuccess: true,
|
||||
notifyOnNewRepo: false,
|
||||
ntfy: {
|
||||
url: "https://ntfy.sh",
|
||||
topic: "",
|
||||
priority: "default",
|
||||
},
|
||||
};
|
||||
|
||||
await sendNotification(config, {
|
||||
title: "Test",
|
||||
message: "Test message",
|
||||
type: "sync_success",
|
||||
});
|
||||
|
||||
expect(mockFetch).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
test("skips notification when apprise URL is missing", async () => {
|
||||
const config: NotificationConfig = {
|
||||
enabled: true,
|
||||
provider: "apprise",
|
||||
notifyOnSyncError: true,
|
||||
notifyOnSyncSuccess: true,
|
||||
notifyOnNewRepo: false,
|
||||
apprise: {
|
||||
url: "",
|
||||
token: "my-token",
|
||||
},
|
||||
};
|
||||
|
||||
await sendNotification(config, {
|
||||
title: "Test",
|
||||
message: "Test message",
|
||||
type: "sync_success",
|
||||
});
|
||||
|
||||
expect(mockFetch).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe("testNotification", () => {
|
||||
test("returns success when notification is sent", async () => {
|
||||
const config: NotificationConfig = {
|
||||
enabled: true,
|
||||
provider: "ntfy",
|
||||
notifyOnSyncError: true,
|
||||
notifyOnSyncSuccess: true,
|
||||
notifyOnNewRepo: false,
|
||||
ntfy: {
|
||||
url: "https://ntfy.sh",
|
||||
topic: "test-topic",
|
||||
priority: "default",
|
||||
},
|
||||
};
|
||||
|
||||
const result = await testNotification(config);
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.error).toBeUndefined();
|
||||
});
|
||||
|
||||
test("returns error when topic is missing", async () => {
|
||||
const config: NotificationConfig = {
|
||||
enabled: true,
|
||||
provider: "ntfy",
|
||||
notifyOnSyncError: true,
|
||||
notifyOnSyncSuccess: true,
|
||||
notifyOnNewRepo: false,
|
||||
ntfy: {
|
||||
url: "https://ntfy.sh",
|
||||
topic: "",
|
||||
priority: "default",
|
||||
},
|
||||
};
|
||||
|
||||
const result = await testNotification(config);
|
||||
expect(result.success).toBe(false);
|
||||
expect(result.error).toContain("topic");
|
||||
});
|
||||
|
||||
test("returns error when fetch fails", async () => {
|
||||
mockFetch = mock(() =>
|
||||
Promise.resolve(new Response("bad request", { status: 400 }))
|
||||
);
|
||||
globalThis.fetch = mockFetch as any;
|
||||
|
||||
const config: NotificationConfig = {
|
||||
enabled: true,
|
||||
provider: "ntfy",
|
||||
notifyOnSyncError: true,
|
||||
notifyOnSyncSuccess: true,
|
||||
notifyOnNewRepo: false,
|
||||
ntfy: {
|
||||
url: "https://ntfy.sh",
|
||||
topic: "test-topic",
|
||||
priority: "default",
|
||||
},
|
||||
};
|
||||
|
||||
const result = await testNotification(config);
|
||||
expect(result.success).toBe(false);
|
||||
expect(result.error).toBeDefined();
|
||||
});
|
||||
|
||||
test("returns error for unknown provider", async () => {
|
||||
const config = {
|
||||
enabled: true,
|
||||
provider: "unknown" as any,
|
||||
notifyOnSyncError: true,
|
||||
notifyOnSyncSuccess: true,
|
||||
notifyOnNewRepo: false,
|
||||
};
|
||||
|
||||
const result = await testNotification(config);
|
||||
expect(result.success).toBe(false);
|
||||
expect(result.error).toContain("Unknown provider");
|
||||
});
|
||||
});
|
||||
165
src/lib/notification-service.ts
Normal file
165
src/lib/notification-service.ts
Normal file
@@ -0,0 +1,165 @@
|
||||
import type { NotificationConfig } from "@/types/config";
|
||||
import type { NotificationEvent } from "./providers/ntfy";
|
||||
import { sendNtfyNotification } from "./providers/ntfy";
|
||||
import { sendAppriseNotification } from "./providers/apprise";
|
||||
import { db, configs } from "@/lib/db";
|
||||
import { eq } from "drizzle-orm";
|
||||
import { decrypt } from "@/lib/utils/encryption";
|
||||
|
||||
/**
|
||||
* Sends a notification using the configured provider.
|
||||
* NEVER throws -- all errors are caught and logged.
|
||||
*/
|
||||
export async function sendNotification(
|
||||
config: NotificationConfig,
|
||||
event: NotificationEvent,
|
||||
): Promise<void> {
|
||||
try {
|
||||
if (config.provider === "ntfy") {
|
||||
if (!config.ntfy?.topic) {
|
||||
console.warn("[NotificationService] Ntfy topic is not configured, skipping notification");
|
||||
return;
|
||||
}
|
||||
await sendNtfyNotification(config.ntfy, event);
|
||||
} else if (config.provider === "apprise") {
|
||||
if (!config.apprise?.url || !config.apprise?.token) {
|
||||
console.warn("[NotificationService] Apprise URL or token is not configured, skipping notification");
|
||||
return;
|
||||
}
|
||||
await sendAppriseNotification(config.apprise, event);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error("[NotificationService] Failed to send notification:", error);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Sends a test notification and returns the result.
|
||||
* Unlike sendNotification, this propagates the success/error status
|
||||
* so the UI can display the outcome.
|
||||
*/
|
||||
export async function testNotification(
|
||||
notificationConfig: NotificationConfig,
|
||||
): Promise<{ success: boolean; error?: string }> {
|
||||
const event: NotificationEvent = {
|
||||
title: "Gitea Mirror - Test Notification",
|
||||
message: "This is a test notification from Gitea Mirror. If you see this, notifications are working correctly!",
|
||||
type: "sync_success",
|
||||
};
|
||||
|
||||
try {
|
||||
if (notificationConfig.provider === "ntfy") {
|
||||
if (!notificationConfig.ntfy?.topic) {
|
||||
return { success: false, error: "Ntfy topic is required" };
|
||||
}
|
||||
await sendNtfyNotification(notificationConfig.ntfy, event);
|
||||
} else if (notificationConfig.provider === "apprise") {
|
||||
if (!notificationConfig.apprise?.url || !notificationConfig.apprise?.token) {
|
||||
return { success: false, error: "Apprise URL and token are required" };
|
||||
}
|
||||
await sendAppriseNotification(notificationConfig.apprise, event);
|
||||
} else {
|
||||
return { success: false, error: `Unknown provider: ${notificationConfig.provider}` };
|
||||
}
|
||||
return { success: true };
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : String(error);
|
||||
return { success: false, error: message };
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Loads the user's notification config from the database and triggers
|
||||
* a notification if the event type matches the user's preferences.
|
||||
*
|
||||
* NEVER throws -- all errors are caught and logged. This function is
|
||||
* designed to be called fire-and-forget from the mirror job system.
|
||||
*/
|
||||
export async function triggerJobNotification({
|
||||
userId,
|
||||
status,
|
||||
repositoryName,
|
||||
organizationName,
|
||||
message,
|
||||
details,
|
||||
}: {
|
||||
userId: string;
|
||||
status: string;
|
||||
repositoryName?: string | null;
|
||||
organizationName?: string | null;
|
||||
message?: string;
|
||||
details?: string;
|
||||
}): Promise<void> {
|
||||
try {
|
||||
// Only trigger for terminal statuses
|
||||
if (status !== "failed" && status !== "mirrored" && status !== "synced") {
|
||||
return;
|
||||
}
|
||||
|
||||
// Fetch user's config from database
|
||||
const configResults = await db
|
||||
.select()
|
||||
.from(configs)
|
||||
.where(eq(configs.userId, userId))
|
||||
.limit(1);
|
||||
|
||||
if (configResults.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
const userConfig = configResults[0];
|
||||
const notificationConfig = userConfig.notificationConfig as NotificationConfig | undefined;
|
||||
|
||||
if (!notificationConfig?.enabled) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Check event type against user preferences
|
||||
const isError = status === "failed";
|
||||
const isSuccess = status === "mirrored" || status === "synced";
|
||||
|
||||
if (isError && !notificationConfig.notifyOnSyncError) {
|
||||
return;
|
||||
}
|
||||
if (isSuccess && !notificationConfig.notifyOnSyncSuccess) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Only decrypt the active provider's token to avoid failures from stale
|
||||
// credentials on the inactive provider dropping the entire notification
|
||||
const decryptedConfig = { ...notificationConfig };
|
||||
if (decryptedConfig.provider === "ntfy" && decryptedConfig.ntfy?.token) {
|
||||
decryptedConfig.ntfy = {
|
||||
...decryptedConfig.ntfy,
|
||||
token: decrypt(decryptedConfig.ntfy.token),
|
||||
};
|
||||
}
|
||||
if (decryptedConfig.provider === "apprise" && decryptedConfig.apprise?.token) {
|
||||
decryptedConfig.apprise = {
|
||||
...decryptedConfig.apprise,
|
||||
token: decrypt(decryptedConfig.apprise.token),
|
||||
};
|
||||
}
|
||||
|
||||
// Build event
|
||||
const repoLabel = repositoryName || organizationName || "Unknown";
|
||||
const eventType: NotificationEvent["type"] = isError ? "sync_error" : "sync_success";
|
||||
|
||||
const event: NotificationEvent = {
|
||||
title: isError
|
||||
? `Mirror Failed: ${repoLabel}`
|
||||
: `Mirror Success: ${repoLabel}`,
|
||||
message: [
|
||||
message || `Repository ${repoLabel} ${isError ? "failed to mirror" : "mirrored successfully"}`,
|
||||
details ? `\nDetails: ${details}` : "",
|
||||
]
|
||||
.filter(Boolean)
|
||||
.join(""),
|
||||
type: eventType,
|
||||
};
|
||||
|
||||
await sendNotification(decryptedConfig, event);
|
||||
} catch (error) {
|
||||
console.error("[NotificationService] Background notification failed:", error);
|
||||
}
|
||||
}
|
||||
98
src/lib/providers/apprise.test.ts
Normal file
98
src/lib/providers/apprise.test.ts
Normal file
@@ -0,0 +1,98 @@
|
||||
import { describe, test, expect, beforeEach, mock } from "bun:test";
|
||||
import { sendAppriseNotification } from "./apprise";
|
||||
import type { NotificationEvent } from "./ntfy";
|
||||
import type { AppriseConfig } from "@/types/config";
|
||||
|
||||
describe("sendAppriseNotification", () => {
|
||||
let mockFetch: ReturnType<typeof mock>;
|
||||
|
||||
beforeEach(() => {
|
||||
mockFetch = mock(() =>
|
||||
Promise.resolve(new Response("ok", { status: 200 }))
|
||||
);
|
||||
globalThis.fetch = mockFetch as any;
|
||||
});
|
||||
|
||||
const baseConfig: AppriseConfig = {
|
||||
url: "http://apprise:8000",
|
||||
token: "gitea-mirror",
|
||||
};
|
||||
|
||||
const baseEvent: NotificationEvent = {
|
||||
title: "Test Notification",
|
||||
message: "This is a test",
|
||||
type: "sync_success",
|
||||
};
|
||||
|
||||
test("constructs correct URL from config", async () => {
|
||||
await sendAppriseNotification(baseConfig, baseEvent);
|
||||
|
||||
expect(mockFetch).toHaveBeenCalledTimes(1);
|
||||
const [url] = mockFetch.mock.calls[0];
|
||||
expect(url).toBe("http://apprise:8000/notify/gitea-mirror");
|
||||
});
|
||||
|
||||
test("strips trailing slash from URL", async () => {
|
||||
await sendAppriseNotification(
|
||||
{ ...baseConfig, url: "http://apprise:8000/" },
|
||||
baseEvent
|
||||
);
|
||||
|
||||
const [url] = mockFetch.mock.calls[0];
|
||||
expect(url).toBe("http://apprise:8000/notify/gitea-mirror");
|
||||
});
|
||||
|
||||
test("sends correct JSON body format", async () => {
|
||||
await sendAppriseNotification(baseConfig, baseEvent);
|
||||
|
||||
const [, opts] = mockFetch.mock.calls[0];
|
||||
expect(opts.headers["Content-Type"]).toBe("application/json");
|
||||
|
||||
const body = JSON.parse(opts.body);
|
||||
expect(body.title).toBe("Test Notification");
|
||||
expect(body.body).toBe("This is a test");
|
||||
expect(body.type).toBe("success");
|
||||
});
|
||||
|
||||
test("maps sync_error to failure type", async () => {
|
||||
const errorEvent: NotificationEvent = {
|
||||
...baseEvent,
|
||||
type: "sync_error",
|
||||
};
|
||||
await sendAppriseNotification(baseConfig, errorEvent);
|
||||
|
||||
const [, opts] = mockFetch.mock.calls[0];
|
||||
const body = JSON.parse(opts.body);
|
||||
expect(body.type).toBe("failure");
|
||||
});
|
||||
|
||||
test("includes tag when configured", async () => {
|
||||
await sendAppriseNotification(
|
||||
{ ...baseConfig, tag: "urgent" },
|
||||
baseEvent
|
||||
);
|
||||
|
||||
const [, opts] = mockFetch.mock.calls[0];
|
||||
const body = JSON.parse(opts.body);
|
||||
expect(body.tag).toBe("urgent");
|
||||
});
|
||||
|
||||
test("omits tag when not configured", async () => {
|
||||
await sendAppriseNotification(baseConfig, baseEvent);
|
||||
|
||||
const [, opts] = mockFetch.mock.calls[0];
|
||||
const body = JSON.parse(opts.body);
|
||||
expect(body.tag).toBeUndefined();
|
||||
});
|
||||
|
||||
test("throws on non-200 response", async () => {
|
||||
mockFetch = mock(() =>
|
||||
Promise.resolve(new Response("server error", { status: 500 }))
|
||||
);
|
||||
globalThis.fetch = mockFetch as any;
|
||||
|
||||
expect(
|
||||
sendAppriseNotification(baseConfig, baseEvent)
|
||||
).rejects.toThrow("Apprise error: 500");
|
||||
});
|
||||
});
|
||||
15
src/lib/providers/apprise.ts
Normal file
15
src/lib/providers/apprise.ts
Normal file
@@ -0,0 +1,15 @@
|
||||
import type { AppriseConfig } from "@/types/config";
|
||||
import type { NotificationEvent } from "./ntfy";
|
||||
|
||||
export async function sendAppriseNotification(config: AppriseConfig, event: NotificationEvent): Promise<void> {
|
||||
const url = `${config.url.replace(/\/$/, "")}/notify/${config.token}`;
|
||||
const headers: Record<string, string> = { "Content-Type": "application/json" };
|
||||
const body = JSON.stringify({
|
||||
title: event.title,
|
||||
body: event.message,
|
||||
type: event.type === "sync_error" ? "failure" : "success",
|
||||
tag: config.tag || undefined,
|
||||
});
|
||||
const resp = await fetch(url, { method: "POST", body, headers });
|
||||
if (!resp.ok) throw new Error(`Apprise error: ${resp.status} ${await resp.text()}`);
|
||||
}
|
||||
95
src/lib/providers/ntfy.test.ts
Normal file
95
src/lib/providers/ntfy.test.ts
Normal file
@@ -0,0 +1,95 @@
|
||||
import { describe, test, expect, beforeEach, mock } from "bun:test";
|
||||
import { sendNtfyNotification, type NotificationEvent } from "./ntfy";
|
||||
import type { NtfyConfig } from "@/types/config";
|
||||
|
||||
describe("sendNtfyNotification", () => {
|
||||
let mockFetch: ReturnType<typeof mock>;
|
||||
|
||||
beforeEach(() => {
|
||||
mockFetch = mock(() =>
|
||||
Promise.resolve(new Response("ok", { status: 200 }))
|
||||
);
|
||||
globalThis.fetch = mockFetch as any;
|
||||
});
|
||||
|
||||
const baseConfig: NtfyConfig = {
|
||||
url: "https://ntfy.sh",
|
||||
topic: "gitea-mirror",
|
||||
priority: "default",
|
||||
};
|
||||
|
||||
const baseEvent: NotificationEvent = {
|
||||
title: "Test Notification",
|
||||
message: "This is a test",
|
||||
type: "sync_success",
|
||||
};
|
||||
|
||||
test("constructs correct URL from config", async () => {
|
||||
await sendNtfyNotification(baseConfig, baseEvent);
|
||||
|
||||
expect(mockFetch).toHaveBeenCalledTimes(1);
|
||||
const [url] = mockFetch.mock.calls[0];
|
||||
expect(url).toBe("https://ntfy.sh/gitea-mirror");
|
||||
});
|
||||
|
||||
test("strips trailing slash from URL", async () => {
|
||||
await sendNtfyNotification(
|
||||
{ ...baseConfig, url: "https://ntfy.sh/" },
|
||||
baseEvent
|
||||
);
|
||||
|
||||
const [url] = mockFetch.mock.calls[0];
|
||||
expect(url).toBe("https://ntfy.sh/gitea-mirror");
|
||||
});
|
||||
|
||||
test("includes Authorization header when token is present", async () => {
|
||||
await sendNtfyNotification(
|
||||
{ ...baseConfig, token: "tk_secret" },
|
||||
baseEvent
|
||||
);
|
||||
|
||||
const [, opts] = mockFetch.mock.calls[0];
|
||||
expect(opts.headers["Authorization"]).toBe("Bearer tk_secret");
|
||||
});
|
||||
|
||||
test("does not include Authorization header when no token", async () => {
|
||||
await sendNtfyNotification(baseConfig, baseEvent);
|
||||
|
||||
const [, opts] = mockFetch.mock.calls[0];
|
||||
expect(opts.headers["Authorization"]).toBeUndefined();
|
||||
});
|
||||
|
||||
test("uses high priority for sync_error events", async () => {
|
||||
const errorEvent: NotificationEvent = {
|
||||
...baseEvent,
|
||||
type: "sync_error",
|
||||
};
|
||||
await sendNtfyNotification(baseConfig, errorEvent);
|
||||
|
||||
const [, opts] = mockFetch.mock.calls[0];
|
||||
expect(opts.headers["Priority"]).toBe("high");
|
||||
expect(opts.headers["Tags"]).toBe("warning");
|
||||
});
|
||||
|
||||
test("uses config priority for non-error events", async () => {
|
||||
await sendNtfyNotification(
|
||||
{ ...baseConfig, priority: "low" },
|
||||
baseEvent
|
||||
);
|
||||
|
||||
const [, opts] = mockFetch.mock.calls[0];
|
||||
expect(opts.headers["Priority"]).toBe("low");
|
||||
expect(opts.headers["Tags"]).toBe("white_check_mark");
|
||||
});
|
||||
|
||||
test("throws on non-200 response", async () => {
|
||||
mockFetch = mock(() =>
|
||||
Promise.resolve(new Response("rate limited", { status: 429 }))
|
||||
);
|
||||
globalThis.fetch = mockFetch as any;
|
||||
|
||||
expect(
|
||||
sendNtfyNotification(baseConfig, baseEvent)
|
||||
).rejects.toThrow("Ntfy error: 429");
|
||||
});
|
||||
});
|
||||
21
src/lib/providers/ntfy.ts
Normal file
21
src/lib/providers/ntfy.ts
Normal file
@@ -0,0 +1,21 @@
|
||||
import type { NtfyConfig } from "@/types/config";
|
||||
|
||||
export interface NotificationEvent {
|
||||
title: string;
|
||||
message: string;
|
||||
type: "sync_error" | "sync_success" | "new_repo";
|
||||
}
|
||||
|
||||
export async function sendNtfyNotification(config: NtfyConfig, event: NotificationEvent): Promise<void> {
|
||||
const url = `${config.url.replace(/\/$/, "")}/${config.topic}`;
|
||||
const headers: Record<string, string> = {
|
||||
"Title": event.title,
|
||||
"Priority": event.type === "sync_error" ? "high" : (config.priority || "default"),
|
||||
"Tags": event.type === "sync_error" ? "warning" : "white_check_mark",
|
||||
};
|
||||
if (config.token) {
|
||||
headers["Authorization"] = `Bearer ${config.token}`;
|
||||
}
|
||||
const resp = await fetch(url, { method: "POST", body: event.message, headers });
|
||||
if (!resp.ok) throw new Error(`Ntfy error: ${resp.status} ${await resp.text()}`);
|
||||
}
|
||||
248
src/lib/repo-backup.test.ts
Normal file
248
src/lib/repo-backup.test.ts
Normal file
@@ -0,0 +1,248 @@
|
||||
import path from "node:path";
|
||||
import { afterEach, beforeEach, describe, expect, test } from "bun:test";
|
||||
import type { Config } from "@/types/config";
|
||||
import {
|
||||
resolveBackupPaths,
|
||||
resolveBackupStrategy,
|
||||
shouldBackupForStrategy,
|
||||
shouldBlockSyncForStrategy,
|
||||
strategyNeedsDetection,
|
||||
} from "@/lib/repo-backup";
|
||||
|
||||
describe("resolveBackupPaths", () => {
|
||||
let originalBackupDirEnv: string | undefined;
|
||||
|
||||
beforeEach(() => {
|
||||
originalBackupDirEnv = process.env.PRE_SYNC_BACKUP_DIR;
|
||||
delete process.env.PRE_SYNC_BACKUP_DIR;
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
if (originalBackupDirEnv === undefined) {
|
||||
delete process.env.PRE_SYNC_BACKUP_DIR;
|
||||
} else {
|
||||
process.env.PRE_SYNC_BACKUP_DIR = originalBackupDirEnv;
|
||||
}
|
||||
});
|
||||
|
||||
test("returns absolute paths when backupDirectory is relative", () => {
|
||||
const config: Partial<Config> = {
|
||||
userId: "user-123",
|
||||
giteaConfig: {
|
||||
backupDirectory: "data/repo-backups",
|
||||
} as Config["giteaConfig"],
|
||||
};
|
||||
|
||||
const { backupRoot, repoBackupDir } = resolveBackupPaths({
|
||||
config,
|
||||
owner: "RayLabsHQ",
|
||||
repoName: "gitea-mirror",
|
||||
});
|
||||
|
||||
expect(path.isAbsolute(backupRoot)).toBe(true);
|
||||
expect(path.isAbsolute(repoBackupDir)).toBe(true);
|
||||
expect(repoBackupDir).toBe(
|
||||
path.join(backupRoot, "user-123", "RayLabsHQ", "gitea-mirror")
|
||||
);
|
||||
});
|
||||
|
||||
test("returns absolute paths when backupDirectory is already absolute", () => {
|
||||
const config: Partial<Config> = {
|
||||
userId: "user-123",
|
||||
giteaConfig: {
|
||||
backupDirectory: "/data/repo-backups",
|
||||
} as Config["giteaConfig"],
|
||||
};
|
||||
|
||||
const { backupRoot, repoBackupDir } = resolveBackupPaths({
|
||||
config,
|
||||
owner: "owner",
|
||||
repoName: "repo",
|
||||
});
|
||||
|
||||
expect(backupRoot).toBe("/data/repo-backups");
|
||||
expect(path.isAbsolute(repoBackupDir)).toBe(true);
|
||||
});
|
||||
|
||||
test("falls back to cwd-based path when no backupDirectory is set", () => {
|
||||
const config: Partial<Config> = {
|
||||
userId: "user-123",
|
||||
giteaConfig: {} as Config["giteaConfig"],
|
||||
};
|
||||
|
||||
const { backupRoot } = resolveBackupPaths({
|
||||
config,
|
||||
owner: "owner",
|
||||
repoName: "repo",
|
||||
});
|
||||
|
||||
expect(path.isAbsolute(backupRoot)).toBe(true);
|
||||
expect(backupRoot).toBe(
|
||||
path.resolve(process.cwd(), "data", "repo-backups")
|
||||
);
|
||||
});
|
||||
|
||||
test("uses PRE_SYNC_BACKUP_DIR env var when config has no backupDirectory", () => {
|
||||
process.env.PRE_SYNC_BACKUP_DIR = "custom/backup/path";
|
||||
|
||||
const config: Partial<Config> = {
|
||||
userId: "user-123",
|
||||
giteaConfig: {} as Config["giteaConfig"],
|
||||
};
|
||||
|
||||
const { backupRoot } = resolveBackupPaths({
|
||||
config,
|
||||
owner: "owner",
|
||||
repoName: "repo",
|
||||
});
|
||||
|
||||
expect(path.isAbsolute(backupRoot)).toBe(true);
|
||||
expect(backupRoot).toBe(path.resolve("custom/backup/path"));
|
||||
});
|
||||
|
||||
test("sanitizes owner and repoName in path segments", () => {
|
||||
const config: Partial<Config> = {
|
||||
userId: "user-123",
|
||||
giteaConfig: {
|
||||
backupDirectory: "/backups",
|
||||
} as Config["giteaConfig"],
|
||||
};
|
||||
|
||||
const { repoBackupDir } = resolveBackupPaths({
|
||||
config,
|
||||
owner: "org/with-slash",
|
||||
repoName: "repo name!",
|
||||
});
|
||||
|
||||
expect(repoBackupDir).toBe(
|
||||
path.join("/backups", "user-123", "org_with-slash", "repo_name_")
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
// ---- Backup strategy resolver tests ----
|
||||
|
||||
function makeConfig(overrides: Record<string, any> = {}): Partial<Config> {
|
||||
return {
|
||||
giteaConfig: {
|
||||
url: "https://gitea.example.com",
|
||||
token: "tok",
|
||||
...overrides,
|
||||
},
|
||||
} as Partial<Config>;
|
||||
}
|
||||
|
||||
const envKeysToClean = ["PRE_SYNC_BACKUP_STRATEGY", "PRE_SYNC_BACKUP_ENABLED"];
|
||||
|
||||
describe("resolveBackupStrategy", () => {
|
||||
let savedEnv: Record<string, string | undefined> = {};
|
||||
|
||||
beforeEach(() => {
|
||||
savedEnv = {};
|
||||
for (const key of envKeysToClean) {
|
||||
savedEnv[key] = process.env[key];
|
||||
delete process.env[key];
|
||||
}
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
for (const [key, value] of Object.entries(savedEnv)) {
|
||||
if (value === undefined) {
|
||||
delete process.env[key];
|
||||
} else {
|
||||
process.env[key] = value;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
test("returns explicit backupStrategy when set", () => {
|
||||
expect(resolveBackupStrategy(makeConfig({ backupStrategy: "always" }))).toBe("always");
|
||||
expect(resolveBackupStrategy(makeConfig({ backupStrategy: "disabled" }))).toBe("disabled");
|
||||
expect(resolveBackupStrategy(makeConfig({ backupStrategy: "on-force-push" }))).toBe("on-force-push");
|
||||
expect(resolveBackupStrategy(makeConfig({ backupStrategy: "block-on-force-push" }))).toBe("block-on-force-push");
|
||||
});
|
||||
|
||||
test("maps backupBeforeSync: true → 'on-force-push' (backward compat, prevents silent always-backup)", () => {
|
||||
expect(resolveBackupStrategy(makeConfig({ backupBeforeSync: true }))).toBe("on-force-push");
|
||||
});
|
||||
|
||||
test("maps backupBeforeSync: false → 'disabled' (backward compat)", () => {
|
||||
expect(resolveBackupStrategy(makeConfig({ backupBeforeSync: false }))).toBe("disabled");
|
||||
});
|
||||
|
||||
test("prefers explicit backupStrategy over backupBeforeSync", () => {
|
||||
expect(
|
||||
resolveBackupStrategy(
|
||||
makeConfig({ backupStrategy: "on-force-push", backupBeforeSync: true }),
|
||||
),
|
||||
).toBe("on-force-push");
|
||||
});
|
||||
|
||||
test("falls back to PRE_SYNC_BACKUP_STRATEGY env var", () => {
|
||||
process.env.PRE_SYNC_BACKUP_STRATEGY = "block-on-force-push";
|
||||
expect(resolveBackupStrategy(makeConfig({}))).toBe("block-on-force-push");
|
||||
});
|
||||
|
||||
test("falls back to PRE_SYNC_BACKUP_ENABLED env var (legacy)", () => {
|
||||
process.env.PRE_SYNC_BACKUP_ENABLED = "false";
|
||||
expect(resolveBackupStrategy(makeConfig({}))).toBe("disabled");
|
||||
});
|
||||
|
||||
test("defaults to 'on-force-push' when nothing is configured", () => {
|
||||
expect(resolveBackupStrategy(makeConfig({}))).toBe("on-force-push");
|
||||
});
|
||||
|
||||
test("handles empty giteaConfig gracefully", () => {
|
||||
expect(resolveBackupStrategy({})).toBe("on-force-push");
|
||||
});
|
||||
});
|
||||
|
||||
describe("shouldBackupForStrategy", () => {
|
||||
test("disabled → never backup", () => {
|
||||
expect(shouldBackupForStrategy("disabled", false)).toBe(false);
|
||||
expect(shouldBackupForStrategy("disabled", true)).toBe(false);
|
||||
});
|
||||
|
||||
test("always → always backup", () => {
|
||||
expect(shouldBackupForStrategy("always", false)).toBe(true);
|
||||
expect(shouldBackupForStrategy("always", true)).toBe(true);
|
||||
});
|
||||
|
||||
test("on-force-push → backup only when detected", () => {
|
||||
expect(shouldBackupForStrategy("on-force-push", false)).toBe(false);
|
||||
expect(shouldBackupForStrategy("on-force-push", true)).toBe(true);
|
||||
});
|
||||
|
||||
test("block-on-force-push → backup only when detected", () => {
|
||||
expect(shouldBackupForStrategy("block-on-force-push", false)).toBe(false);
|
||||
expect(shouldBackupForStrategy("block-on-force-push", true)).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe("shouldBlockSyncForStrategy", () => {
|
||||
test("only block-on-force-push + detected returns true", () => {
|
||||
expect(shouldBlockSyncForStrategy("block-on-force-push", true)).toBe(true);
|
||||
});
|
||||
|
||||
test("block-on-force-push without detection does not block", () => {
|
||||
expect(shouldBlockSyncForStrategy("block-on-force-push", false)).toBe(false);
|
||||
});
|
||||
|
||||
test("other strategies never block", () => {
|
||||
expect(shouldBlockSyncForStrategy("disabled", true)).toBe(false);
|
||||
expect(shouldBlockSyncForStrategy("always", true)).toBe(false);
|
||||
expect(shouldBlockSyncForStrategy("on-force-push", true)).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe("strategyNeedsDetection", () => {
|
||||
test("returns true for detection-based strategies", () => {
|
||||
expect(strategyNeedsDetection("on-force-push")).toBe(true);
|
||||
expect(strategyNeedsDetection("block-on-force-push")).toBe(true);
|
||||
});
|
||||
|
||||
test("returns false for non-detection strategies", () => {
|
||||
expect(strategyNeedsDetection("disabled")).toBe(false);
|
||||
expect(strategyNeedsDetection("always")).toBe(false);
|
||||
});
|
||||
});
|
||||
313
src/lib/repo-backup.ts
Normal file
313
src/lib/repo-backup.ts
Normal file
@@ -0,0 +1,313 @@
|
||||
import { mkdir, mkdtemp, readdir, rm, stat } from "node:fs/promises";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import type { Config, BackupStrategy } from "@/types/config";
|
||||
import { decryptConfigTokens } from "./utils/config-encryption";
|
||||
|
||||
const TRUE_VALUES = new Set(["1", "true", "yes", "on"]);
|
||||
|
||||
function parseBoolean(value: string | undefined, fallback: boolean): boolean {
|
||||
if (value === undefined) return fallback;
|
||||
return TRUE_VALUES.has(value.trim().toLowerCase());
|
||||
}
|
||||
|
||||
function parsePositiveInt(value: string | undefined, fallback: number): number {
|
||||
if (!value) return fallback;
|
||||
const parsed = Number.parseInt(value, 10);
|
||||
if (!Number.isFinite(parsed) || parsed <= 0) {
|
||||
return fallback;
|
||||
}
|
||||
return parsed;
|
||||
}
|
||||
|
||||
function sanitizePathSegment(input: string): string {
|
||||
return input.replace(/[^a-zA-Z0-9._-]/g, "_");
|
||||
}
|
||||
|
||||
function buildTimestamp(): string {
|
||||
// Example: 2026-02-25T18-34-22-123Z
|
||||
return new Date().toISOString().replace(/[:.]/g, "-");
|
||||
}
|
||||
|
||||
function buildAuthenticatedCloneUrl(cloneUrl: string, token: string): string {
|
||||
const parsed = new URL(cloneUrl);
|
||||
if (parsed.protocol !== "http:" && parsed.protocol !== "https:") {
|
||||
return cloneUrl;
|
||||
}
|
||||
|
||||
parsed.username = process.env.PRE_SYNC_BACKUP_GIT_USERNAME || "oauth2";
|
||||
parsed.password = token;
|
||||
return parsed.toString();
|
||||
}
|
||||
|
||||
function maskToken(text: string, token: string): string {
|
||||
if (!token) return text;
|
||||
return text.split(token).join("***");
|
||||
}
|
||||
|
||||
async function runGit(args: string[], tokenToMask: string): Promise<void> {
|
||||
const proc = Bun.spawn({
|
||||
cmd: ["git", ...args],
|
||||
stdout: "pipe",
|
||||
stderr: "pipe",
|
||||
});
|
||||
|
||||
const [stdout, stderr, exitCode] = await Promise.all([
|
||||
new Response(proc.stdout).text(),
|
||||
new Response(proc.stderr).text(),
|
||||
proc.exited,
|
||||
]);
|
||||
|
||||
if (exitCode !== 0) {
|
||||
const details = [stdout, stderr].filter(Boolean).join("\n").trim();
|
||||
const safeDetails = maskToken(details, tokenToMask);
|
||||
throw new Error(`git command failed: ${safeDetails || "unknown git error"}`);
|
||||
}
|
||||
}
|
||||
|
||||
async function enforceRetention(
|
||||
repoBackupDir: string,
|
||||
keepCount: number,
|
||||
retentionDays: number = 0,
|
||||
): Promise<void> {
|
||||
const entries = await readdir(repoBackupDir);
|
||||
const bundleFiles = entries
|
||||
.filter((name) => name.endsWith(".bundle"))
|
||||
.map((name) => path.join(repoBackupDir, name));
|
||||
|
||||
if (bundleFiles.length === 0) return;
|
||||
|
||||
const filesWithMtime = await Promise.all(
|
||||
bundleFiles.map(async (filePath) => ({
|
||||
filePath,
|
||||
mtimeMs: (await stat(filePath)).mtimeMs,
|
||||
}))
|
||||
);
|
||||
|
||||
filesWithMtime.sort((a, b) => b.mtimeMs - a.mtimeMs);
|
||||
|
||||
const toDelete = new Set<string>();
|
||||
|
||||
// Count-based retention: keep only the N most recent
|
||||
if (filesWithMtime.length > keepCount) {
|
||||
for (const entry of filesWithMtime.slice(keepCount)) {
|
||||
toDelete.add(entry.filePath);
|
||||
}
|
||||
}
|
||||
|
||||
// Time-based retention: delete bundles older than retentionDays
|
||||
if (retentionDays > 0) {
|
||||
const cutoffMs = Date.now() - retentionDays * 86_400_000;
|
||||
for (const entry of filesWithMtime) {
|
||||
if (entry.mtimeMs < cutoffMs) {
|
||||
toDelete.add(entry.filePath);
|
||||
}
|
||||
}
|
||||
// Always keep at least 1 bundle even if it's old
|
||||
if (toDelete.size === filesWithMtime.length && filesWithMtime.length > 0) {
|
||||
toDelete.delete(filesWithMtime[0].filePath);
|
||||
}
|
||||
}
|
||||
|
||||
if (toDelete.size > 0) {
|
||||
await Promise.all([...toDelete].map((fp) => rm(fp, { force: true })));
|
||||
}
|
||||
}
|
||||
|
||||
export function isPreSyncBackupEnabled(): boolean {
|
||||
return parseBoolean(process.env.PRE_SYNC_BACKUP_ENABLED, true);
|
||||
}
|
||||
|
||||
export function shouldCreatePreSyncBackup(config: Partial<Config>): boolean {
|
||||
const configSetting = config.giteaConfig?.backupBeforeSync;
|
||||
const fallback = isPreSyncBackupEnabled();
|
||||
return configSetting === undefined ? fallback : Boolean(configSetting);
|
||||
}
|
||||
|
||||
export function shouldBlockSyncOnBackupFailure(config: Partial<Config>): boolean {
|
||||
const configSetting = config.giteaConfig?.blockSyncOnBackupFailure;
|
||||
return configSetting === undefined ? true : Boolean(configSetting);
|
||||
}
|
||||
|
||||
// ---- Backup strategy resolver ----
|
||||
|
||||
const VALID_STRATEGIES = new Set<BackupStrategy>([
|
||||
"disabled",
|
||||
"always",
|
||||
"on-force-push",
|
||||
"block-on-force-push",
|
||||
]);
|
||||
|
||||
/**
|
||||
* Resolve the effective backup strategy from config, falling back through:
|
||||
* 1. `backupStrategy` field (new)
|
||||
* 2. `backupBeforeSync` boolean (deprecated, backward compat)
|
||||
* 3. `PRE_SYNC_BACKUP_STRATEGY` env var
|
||||
* 4. `PRE_SYNC_BACKUP_ENABLED` env var (legacy)
|
||||
* 5. Default: `"on-force-push"`
|
||||
*/
|
||||
export function resolveBackupStrategy(config: Partial<Config>): BackupStrategy {
|
||||
// 1. Explicit backupStrategy field
|
||||
const explicit = config.giteaConfig?.backupStrategy;
|
||||
if (explicit && VALID_STRATEGIES.has(explicit as BackupStrategy)) {
|
||||
return explicit as BackupStrategy;
|
||||
}
|
||||
|
||||
// 2. Legacy backupBeforeSync boolean → map to strategy
|
||||
// Note: backupBeforeSync: true now maps to "on-force-push" (not "always")
|
||||
// because mappers default backupBeforeSync to true, causing every legacy config
|
||||
// to silently resolve to "always" and create full git bundles on every sync.
|
||||
const legacy = config.giteaConfig?.backupBeforeSync;
|
||||
if (legacy !== undefined) {
|
||||
return legacy ? "on-force-push" : "disabled";
|
||||
}
|
||||
|
||||
// 3. Env var (new)
|
||||
const envStrategy = process.env.PRE_SYNC_BACKUP_STRATEGY?.trim().toLowerCase();
|
||||
if (envStrategy && VALID_STRATEGIES.has(envStrategy as BackupStrategy)) {
|
||||
return envStrategy as BackupStrategy;
|
||||
}
|
||||
|
||||
// 4. Env var (legacy)
|
||||
const envEnabled = process.env.PRE_SYNC_BACKUP_ENABLED;
|
||||
if (envEnabled !== undefined) {
|
||||
return parseBoolean(envEnabled, true) ? "always" : "disabled";
|
||||
}
|
||||
|
||||
// 5. Default
|
||||
return "on-force-push";
|
||||
}
|
||||
|
||||
/**
|
||||
* Determine whether a backup should be created for the given strategy and
|
||||
* force-push detection result.
|
||||
*/
|
||||
export function shouldBackupForStrategy(
|
||||
strategy: BackupStrategy,
|
||||
forcePushDetected: boolean,
|
||||
): boolean {
|
||||
switch (strategy) {
|
||||
case "disabled":
|
||||
return false;
|
||||
case "always":
|
||||
return true;
|
||||
case "on-force-push":
|
||||
case "block-on-force-push":
|
||||
return forcePushDetected;
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Determine whether sync should be blocked (requires manual approval).
|
||||
* Only `block-on-force-push` with an actual detection blocks sync.
|
||||
*/
|
||||
export function shouldBlockSyncForStrategy(
|
||||
strategy: BackupStrategy,
|
||||
forcePushDetected: boolean,
|
||||
): boolean {
|
||||
return strategy === "block-on-force-push" && forcePushDetected;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns true when the strategy requires running force-push detection
|
||||
* before deciding on backup / block behavior.
|
||||
*/
|
||||
export function strategyNeedsDetection(strategy: BackupStrategy): boolean {
|
||||
return strategy === "on-force-push" || strategy === "block-on-force-push";
|
||||
}
|
||||
|
||||
export function resolveBackupPaths({
|
||||
config,
|
||||
owner,
|
||||
repoName,
|
||||
}: {
|
||||
config: Partial<Config>;
|
||||
owner: string;
|
||||
repoName: string;
|
||||
}): { backupRoot: string; repoBackupDir: string } {
|
||||
let backupRoot =
|
||||
config.giteaConfig?.backupDirectory?.trim() ||
|
||||
process.env.PRE_SYNC_BACKUP_DIR?.trim() ||
|
||||
path.join(process.cwd(), "data", "repo-backups");
|
||||
|
||||
// Ensure backupRoot is absolute - relative paths break git bundle creation
|
||||
// because git runs with -C mirrorClonePath and interprets relative paths from there.
|
||||
// Always use path.resolve() which guarantees an absolute path, rather than a
|
||||
// conditional check that can miss edge cases (e.g., NixOS systemd services).
|
||||
backupRoot = path.resolve(backupRoot);
|
||||
|
||||
const repoBackupDir = path.join(
|
||||
backupRoot,
|
||||
sanitizePathSegment(config.userId || "unknown-user"),
|
||||
sanitizePathSegment(owner),
|
||||
sanitizePathSegment(repoName)
|
||||
);
|
||||
|
||||
return { backupRoot, repoBackupDir };
|
||||
}
|
||||
|
||||
export async function createPreSyncBundleBackup({
|
||||
config,
|
||||
owner,
|
||||
repoName,
|
||||
cloneUrl,
|
||||
force,
|
||||
}: {
|
||||
config: Partial<Config>;
|
||||
owner: string;
|
||||
repoName: string;
|
||||
cloneUrl: string;
|
||||
/** When true, skip the legacy shouldCreatePreSyncBackup check.
|
||||
* Used by the strategy-driven path which has already decided to backup. */
|
||||
force?: boolean;
|
||||
}): Promise<{ bundlePath: string }> {
|
||||
if (!force && !shouldCreatePreSyncBackup(config)) {
|
||||
throw new Error("Pre-sync backup is disabled.");
|
||||
}
|
||||
|
||||
if (!config.giteaConfig?.token) {
|
||||
throw new Error("Gitea token is required for pre-sync backup.");
|
||||
}
|
||||
|
||||
const decryptedConfig = decryptConfigTokens(config as Config);
|
||||
const giteaToken = decryptedConfig.giteaConfig?.token;
|
||||
if (!giteaToken) {
|
||||
throw new Error("Decrypted Gitea token is required for pre-sync backup.");
|
||||
}
|
||||
|
||||
const { repoBackupDir } = resolveBackupPaths({ config, owner, repoName });
|
||||
const retention = Math.max(
|
||||
1,
|
||||
Number.isFinite(config.giteaConfig?.backupRetentionCount)
|
||||
? Number(config.giteaConfig?.backupRetentionCount)
|
||||
: parsePositiveInt(process.env.PRE_SYNC_BACKUP_KEEP_COUNT, 5)
|
||||
);
|
||||
const retentionDays = Math.max(
|
||||
0,
|
||||
Number.isFinite(config.giteaConfig?.backupRetentionDays)
|
||||
? Number(config.giteaConfig?.backupRetentionDays)
|
||||
: parsePositiveInt(process.env.PRE_SYNC_BACKUP_RETENTION_DAYS, 30)
|
||||
);
|
||||
|
||||
await mkdir(repoBackupDir, { recursive: true });
|
||||
|
||||
const tmpDir = await mkdtemp(path.join(os.tmpdir(), "gitea-mirror-backup-"));
|
||||
const mirrorClonePath = path.join(tmpDir, "repo.git");
|
||||
// path.resolve guarantees an absolute path, critical because git -C changes
|
||||
// the working directory and would misinterpret a relative bundlePath
|
||||
const bundlePath = path.resolve(repoBackupDir, `${buildTimestamp()}.bundle`);
|
||||
|
||||
try {
|
||||
const authCloneUrl = buildAuthenticatedCloneUrl(cloneUrl, giteaToken);
|
||||
|
||||
await runGit(["clone", "--mirror", authCloneUrl, mirrorClonePath], giteaToken);
|
||||
await runGit(["-C", mirrorClonePath, "bundle", "create", bundlePath, "--all"], giteaToken);
|
||||
|
||||
await enforceRetention(repoBackupDir, retention, retentionDays);
|
||||
return { bundlePath };
|
||||
} finally {
|
||||
await rm(tmpDir, { recursive: true, force: true });
|
||||
}
|
||||
}
|
||||
17
src/lib/repo-eligibility.test.ts
Normal file
17
src/lib/repo-eligibility.test.ts
Normal file
@@ -0,0 +1,17 @@
|
||||
import { describe, expect, it } from "bun:test";
|
||||
import { isMirrorableGitHubRepo } from "@/lib/repo-eligibility";
|
||||
|
||||
describe("isMirrorableGitHubRepo", () => {
|
||||
it("returns false for disabled repos", () => {
|
||||
expect(isMirrorableGitHubRepo({ isDisabled: true })).toBe(false);
|
||||
});
|
||||
|
||||
it("returns true for enabled repos", () => {
|
||||
expect(isMirrorableGitHubRepo({ isDisabled: false })).toBe(true);
|
||||
});
|
||||
|
||||
it("returns true when disabled flag is absent", () => {
|
||||
expect(isMirrorableGitHubRepo({})).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
6
src/lib/repo-eligibility.ts
Normal file
6
src/lib/repo-eligibility.ts
Normal file
@@ -0,0 +1,6 @@
|
||||
import type { GitRepo } from "@/types/Repository";
|
||||
|
||||
export function isMirrorableGitHubRepo(repo: Pick<GitRepo, "isDisabled">): boolean {
|
||||
return repo.isDisabled !== true;
|
||||
}
|
||||
|
||||
@@ -28,6 +28,7 @@ function sampleRepo(overrides: Partial<GitRepo> = {}): GitRepo {
|
||||
status: 'imported',
|
||||
lastMirrored: undefined,
|
||||
errorMessage: undefined,
|
||||
importedAt: new Date(),
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
};
|
||||
|
||||
@@ -56,6 +56,7 @@ export function normalizeGitRepoToInsert(
|
||||
status: 'imported',
|
||||
lastMirrored: repo.lastMirrored ?? null,
|
||||
errorMessage: repo.errorMessage ?? null,
|
||||
importedAt: repo.importedAt || new Date(),
|
||||
createdAt: repo.createdAt || new Date(),
|
||||
updatedAt: repo.updatedAt || new Date(),
|
||||
};
|
||||
|
||||
@@ -10,6 +10,7 @@ import { createGitHubClient, getGithubRepositories, getGithubStarredRepositories
|
||||
import { createGiteaClient, deleteGiteaRepo, archiveGiteaRepo, getGiteaRepoOwnerAsync, checkRepoLocation } from '@/lib/gitea';
|
||||
import { getDecryptedGitHubToken, getDecryptedGiteaToken } from '@/lib/utils/config-encryption';
|
||||
import { publishEvent } from '@/lib/events';
|
||||
import { isMirrorableGitHubRepo } from '@/lib/repo-eligibility';
|
||||
|
||||
let cleanupInterval: NodeJS.Timeout | null = null;
|
||||
let isCleanupRunning = false;
|
||||
@@ -59,7 +60,9 @@ async function identifyOrphanedRepositories(config: any): Promise<any[]> {
|
||||
return [];
|
||||
}
|
||||
|
||||
const githubRepoFullNames = new Set(allGithubRepos.map(repo => repo.fullName));
|
||||
const githubReposByFullName = new Map(
|
||||
allGithubRepos.map((repo) => [repo.fullName, repo] as const)
|
||||
);
|
||||
|
||||
// Get all repositories from our database
|
||||
const dbRepos = await db
|
||||
@@ -70,18 +73,30 @@ async function identifyOrphanedRepositories(config: any): Promise<any[]> {
|
||||
// Only identify repositories as orphaned if we successfully accessed GitHub
|
||||
// This prevents false positives when GitHub is down or account is inaccessible
|
||||
const orphanedRepos = dbRepos.filter(repo => {
|
||||
const isOrphaned = !githubRepoFullNames.has(repo.fullName);
|
||||
if (!isOrphaned) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Skip repositories we've already archived/preserved
|
||||
if (repo.status === 'archived' || repo.isArchived) {
|
||||
console.log(`[Repository Cleanup] Skipping ${repo.fullName} - already archived`);
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
// If starred repos are not being fetched from GitHub, we can't determine
|
||||
// if a starred repo is orphaned - skip it to prevent data loss
|
||||
if (repo.isStarred && !config.githubConfig?.includeStarred) {
|
||||
console.log(`[Repository Cleanup] Skipping starred repo ${repo.fullName} - starred repos not being fetched from GitHub`);
|
||||
return false;
|
||||
}
|
||||
|
||||
const githubRepo = githubReposByFullName.get(repo.fullName);
|
||||
if (!githubRepo) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (!isMirrorableGitHubRepo(githubRepo)) {
|
||||
console.log(`[Repository Cleanup] Preserving ${repo.fullName} - repository is disabled on GitHub`);
|
||||
return false;
|
||||
}
|
||||
|
||||
return false;
|
||||
});
|
||||
|
||||
if (orphanedRepos.length > 0) {
|
||||
|
||||
68
src/lib/repository-sorting.test.ts
Normal file
68
src/lib/repository-sorting.test.ts
Normal file
@@ -0,0 +1,68 @@
|
||||
import { describe, expect, test } from "bun:test";
|
||||
import type { Repository } from "@/lib/db/schema";
|
||||
import { sortRepositories } from "@/lib/repository-sorting";
|
||||
|
||||
function makeRepo(overrides: Partial<Repository>): Repository {
|
||||
return {
|
||||
id: "id",
|
||||
userId: "user-1",
|
||||
configId: "config-1",
|
||||
name: "repo",
|
||||
fullName: "owner/repo",
|
||||
normalizedFullName: "owner/repo",
|
||||
url: "https://github.com/owner/repo",
|
||||
cloneUrl: "https://github.com/owner/repo.git",
|
||||
owner: "owner",
|
||||
organization: null,
|
||||
mirroredLocation: "",
|
||||
isPrivate: false,
|
||||
isForked: false,
|
||||
forkedFrom: null,
|
||||
hasIssues: true,
|
||||
isStarred: false,
|
||||
isArchived: false,
|
||||
size: 1,
|
||||
hasLFS: false,
|
||||
hasSubmodules: false,
|
||||
language: null,
|
||||
description: null,
|
||||
defaultBranch: "main",
|
||||
visibility: "public",
|
||||
status: "imported",
|
||||
lastMirrored: null,
|
||||
errorMessage: null,
|
||||
destinationOrg: null,
|
||||
metadata: null,
|
||||
importedAt: new Date("2026-01-01T00:00:00.000Z"),
|
||||
createdAt: new Date("2020-01-01T00:00:00.000Z"),
|
||||
updatedAt: new Date("2026-01-01T00:00:00.000Z"),
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
describe("sortRepositories", () => {
|
||||
test("defaults to recently imported first", () => {
|
||||
const repos = [
|
||||
makeRepo({ id: "a", fullName: "owner/a", importedAt: new Date("2026-01-01T00:00:00.000Z") }),
|
||||
makeRepo({ id: "b", fullName: "owner/b", importedAt: new Date("2026-03-01T00:00:00.000Z") }),
|
||||
makeRepo({ id: "c", fullName: "owner/c", importedAt: new Date("2025-12-01T00:00:00.000Z") }),
|
||||
];
|
||||
|
||||
const sorted = sortRepositories(repos, undefined);
|
||||
expect(sorted.map((repo) => repo.id)).toEqual(["b", "a", "c"]);
|
||||
});
|
||||
|
||||
test("supports name and updated sorting", () => {
|
||||
const repos = [
|
||||
makeRepo({ id: "a", fullName: "owner/zeta", updatedAt: new Date("2026-01-01T00:00:00.000Z") }),
|
||||
makeRepo({ id: "b", fullName: "owner/alpha", updatedAt: new Date("2026-03-01T00:00:00.000Z") }),
|
||||
makeRepo({ id: "c", fullName: "owner/middle", updatedAt: new Date("2025-12-01T00:00:00.000Z") }),
|
||||
];
|
||||
|
||||
const nameSorted = sortRepositories(repos, "name-asc");
|
||||
expect(nameSorted.map((repo) => repo.id)).toEqual(["b", "c", "a"]);
|
||||
|
||||
const updatedSorted = sortRepositories(repos, "updated-desc");
|
||||
expect(updatedSorted.map((repo) => repo.id)).toEqual(["b", "a", "c"]);
|
||||
});
|
||||
});
|
||||
40
src/lib/repository-sorting.ts
Normal file
40
src/lib/repository-sorting.ts
Normal file
@@ -0,0 +1,40 @@
|
||||
import type { Repository } from "@/lib/db/schema";
|
||||
|
||||
export type RepositorySortOrder =
|
||||
| "imported-desc"
|
||||
| "imported-asc"
|
||||
| "updated-desc"
|
||||
| "updated-asc"
|
||||
| "name-asc"
|
||||
| "name-desc";
|
||||
|
||||
function getTimestamp(value: Date | string | null | undefined): number {
|
||||
if (!value) return 0;
|
||||
const timestamp = new Date(value).getTime();
|
||||
return Number.isNaN(timestamp) ? 0 : timestamp;
|
||||
}
|
||||
|
||||
export function sortRepositories(
|
||||
repositories: Repository[],
|
||||
sortOrder: string | undefined,
|
||||
): Repository[] {
|
||||
const order = (sortOrder ?? "imported-desc") as RepositorySortOrder;
|
||||
|
||||
return [...repositories].sort((a, b) => {
|
||||
switch (order) {
|
||||
case "imported-asc":
|
||||
return getTimestamp(a.importedAt) - getTimestamp(b.importedAt);
|
||||
case "updated-desc":
|
||||
return getTimestamp(b.updatedAt) - getTimestamp(a.updatedAt);
|
||||
case "updated-asc":
|
||||
return getTimestamp(a.updatedAt) - getTimestamp(b.updatedAt);
|
||||
case "name-asc":
|
||||
return a.fullName.localeCompare(b.fullName, undefined, { sensitivity: "base" });
|
||||
case "name-desc":
|
||||
return b.fullName.localeCompare(a.fullName, undefined, { sensitivity: "base" });
|
||||
case "imported-desc":
|
||||
default:
|
||||
return getTimestamp(b.importedAt) - getTimestamp(a.importedAt);
|
||||
}
|
||||
});
|
||||
}
|
||||
@@ -12,6 +12,8 @@ import { parseInterval, formatDuration } from '@/lib/utils/duration-parser';
|
||||
import type { Repository } from '@/lib/db/schema';
|
||||
import { repoStatusEnum, repositoryVisibilityEnum } from '@/types/Repository';
|
||||
import { mergeGitReposPreferStarred, normalizeGitRepoToInsert, calcBatchSizeForInsert } from '@/lib/repo-utils';
|
||||
import { isMirrorableGitHubRepo } from '@/lib/repo-eligibility';
|
||||
import { createMirrorJob } from '@/lib/helpers';
|
||||
|
||||
let schedulerInterval: NodeJS.Timeout | null = null;
|
||||
let isSchedulerRunning = false;
|
||||
@@ -96,6 +98,7 @@ async function runScheduledSync(config: any): Promise<void> {
|
||||
: Promise.resolve([]),
|
||||
]);
|
||||
const allGithubRepos = mergeGitReposPreferStarred(basicAndForkedRepos, starredRepos);
|
||||
const mirrorableGithubRepos = allGithubRepos.filter(isMirrorableGitHubRepo);
|
||||
|
||||
// Check for new repositories
|
||||
const existingRepos = await db
|
||||
@@ -104,7 +107,7 @@ async function runScheduledSync(config: any): Promise<void> {
|
||||
.where(eq(repositories.userId, userId));
|
||||
|
||||
const existingRepoNames = new Set(existingRepos.map(r => r.normalizedFullName));
|
||||
const newRepos = allGithubRepos.filter(r => !existingRepoNames.has(r.fullName.toLowerCase()));
|
||||
const newRepos = mirrorableGithubRepos.filter(r => !existingRepoNames.has(r.fullName.toLowerCase()));
|
||||
|
||||
if (newRepos.length > 0) {
|
||||
console.log(`[Scheduler] Found ${newRepos.length} new repositories for user ${userId}`);
|
||||
@@ -126,9 +129,26 @@ async function runScheduledSync(config: any): Promise<void> {
|
||||
.onConflictDoNothing({ target: [repositories.userId, repositories.normalizedFullName] });
|
||||
}
|
||||
console.log(`[Scheduler] Successfully imported ${newRepos.length} new repositories for user ${userId}`);
|
||||
|
||||
// Log activity for each newly imported repo
|
||||
for (const repo of newRepos) {
|
||||
const sourceLabel = repo.isStarred ? 'starred' : 'owned';
|
||||
await createMirrorJob({
|
||||
userId,
|
||||
repositoryName: repo.fullName,
|
||||
message: `Auto-imported ${sourceLabel} repository: ${repo.fullName}`,
|
||||
details: `Repository ${repo.fullName} was discovered and imported during scheduled sync.`,
|
||||
status: 'imported',
|
||||
skipDuplicateEvent: true,
|
||||
});
|
||||
}
|
||||
} else {
|
||||
console.log(`[Scheduler] No new repositories found for user ${userId}`);
|
||||
}
|
||||
const skippedDisabledCount = allGithubRepos.length - mirrorableGithubRepos.length;
|
||||
if (skippedDisabledCount > 0) {
|
||||
console.log(`[Scheduler] Skipped ${skippedDisabledCount} disabled GitHub repositories for user ${userId}`);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(`[Scheduler] Failed to auto-import repositories for user ${userId}:`, error);
|
||||
}
|
||||
@@ -170,7 +190,7 @@ async function runScheduledSync(config: any): Promise<void> {
|
||||
if (scheduleConfig.autoMirror) {
|
||||
try {
|
||||
console.log(`[Scheduler] Auto-mirror enabled - checking for repositories to mirror for user ${userId}...`);
|
||||
const reposNeedingMirror = await db
|
||||
let reposNeedingMirror = await db
|
||||
.select()
|
||||
.from(repositories)
|
||||
.where(
|
||||
@@ -184,6 +204,19 @@ async function runScheduledSync(config: any): Promise<void> {
|
||||
)
|
||||
);
|
||||
|
||||
// Filter out starred repos from auto-mirror when autoMirrorStarred is disabled
|
||||
if (!config.githubConfig?.autoMirrorStarred) {
|
||||
const githubOwner = config.githubConfig?.owner || '';
|
||||
const beforeCount = reposNeedingMirror.length;
|
||||
reposNeedingMirror = reposNeedingMirror.filter(
|
||||
repo => !repo.isStarred || repo.owner === githubOwner
|
||||
);
|
||||
const skippedCount = beforeCount - reposNeedingMirror.length;
|
||||
if (skippedCount > 0) {
|
||||
console.log(`[Scheduler] Skipped ${skippedCount} starred repositories from auto-mirror (autoMirrorStarred is disabled)`);
|
||||
}
|
||||
}
|
||||
|
||||
if (reposNeedingMirror.length > 0) {
|
||||
console.log(`[Scheduler] Found ${reposNeedingMirror.length} repositories that need initial mirroring`);
|
||||
|
||||
@@ -274,11 +307,29 @@ async function runScheduledSync(config: any): Promise<void> {
|
||||
});
|
||||
}
|
||||
|
||||
// Log pending-approval repos that are excluded from sync
|
||||
try {
|
||||
const pendingApprovalRepos = await db
|
||||
.select({ id: repositories.id })
|
||||
.from(repositories)
|
||||
.where(
|
||||
and(
|
||||
eq(repositories.userId, userId),
|
||||
eq(repositories.status, 'pending-approval')
|
||||
)
|
||||
);
|
||||
if (pendingApprovalRepos.length > 0) {
|
||||
console.log(`[Scheduler] ${pendingApprovalRepos.length} repositories pending approval (force-push detected) for user ${userId} — skipping sync for those`);
|
||||
}
|
||||
} catch {
|
||||
// Non-critical logging, ignore errors
|
||||
}
|
||||
|
||||
if (reposToSync.length === 0) {
|
||||
console.log(`[Scheduler] No repositories to sync for user ${userId}`);
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
console.log(`[Scheduler] Syncing ${reposToSync.length} repositories for user ${userId}`);
|
||||
|
||||
// Process repositories in batches
|
||||
@@ -429,6 +480,7 @@ async function performInitialAutoStart(): Promise<void> {
|
||||
: Promise.resolve([]),
|
||||
]);
|
||||
const allGithubRepos = mergeGitReposPreferStarred(basicAndForkedRepos, starredRepos);
|
||||
const mirrorableGithubRepos = allGithubRepos.filter(isMirrorableGitHubRepo);
|
||||
|
||||
// Check for new repositories
|
||||
const existingRepos = await db
|
||||
@@ -437,7 +489,7 @@ async function performInitialAutoStart(): Promise<void> {
|
||||
.where(eq(repositories.userId, config.userId));
|
||||
|
||||
const existingRepoNames = new Set(existingRepos.map(r => r.normalizedFullName));
|
||||
const reposToImport = allGithubRepos.filter(r => !existingRepoNames.has(r.fullName.toLowerCase()));
|
||||
const reposToImport = mirrorableGithubRepos.filter(r => !existingRepoNames.has(r.fullName.toLowerCase()));
|
||||
|
||||
if (reposToImport.length > 0) {
|
||||
console.log(`[Scheduler] Importing ${reposToImport.length} repositories for user ${config.userId}...`);
|
||||
@@ -459,10 +511,27 @@ async function performInitialAutoStart(): Promise<void> {
|
||||
.onConflictDoNothing({ target: [repositories.userId, repositories.normalizedFullName] });
|
||||
}
|
||||
console.log(`[Scheduler] Successfully imported ${reposToImport.length} repositories`);
|
||||
|
||||
// Log activity for each newly imported repo
|
||||
for (const repo of reposToImport) {
|
||||
const sourceLabel = repo.isStarred ? 'starred' : 'owned';
|
||||
await createMirrorJob({
|
||||
userId: config.userId,
|
||||
repositoryName: repo.fullName,
|
||||
message: `Auto-imported ${sourceLabel} repository: ${repo.fullName}`,
|
||||
details: `Repository ${repo.fullName} was discovered and imported during auto-start.`,
|
||||
status: 'imported',
|
||||
skipDuplicateEvent: true,
|
||||
});
|
||||
}
|
||||
} else {
|
||||
console.log(`[Scheduler] No new repositories to import for user ${config.userId}`);
|
||||
}
|
||||
|
||||
const skippedDisabledCount = allGithubRepos.length - mirrorableGithubRepos.length;
|
||||
if (skippedDisabledCount > 0) {
|
||||
console.log(`[Scheduler] Skipped ${skippedDisabledCount} disabled GitHub repositories for user ${config.userId}`);
|
||||
}
|
||||
|
||||
// Check if we already have mirrored repositories (indicating this isn't first run)
|
||||
const mirroredRepos = await db
|
||||
.select()
|
||||
@@ -505,8 +574,34 @@ async function performInitialAutoStart(): Promise<void> {
|
||||
}
|
||||
|
||||
// Step 2: Trigger mirror for all repositories that need mirroring
|
||||
// Only auto-mirror if autoMirror is enabled in schedule config
|
||||
if (!config.scheduleConfig?.autoMirror) {
|
||||
console.log(`[Scheduler] Step 2: Skipping initial mirror - autoMirror is disabled for user ${config.userId}`);
|
||||
|
||||
// Still update schedule config timestamps
|
||||
const currentTime2 = new Date();
|
||||
const intervalSource2 = config.scheduleConfig?.interval ||
|
||||
config.giteaConfig?.mirrorInterval ||
|
||||
'8h';
|
||||
const interval2 = parseScheduleInterval(intervalSource2);
|
||||
const nextRun2 = new Date(currentTime2.getTime() + interval2);
|
||||
|
||||
await db.update(configs).set({
|
||||
scheduleConfig: {
|
||||
...config.scheduleConfig,
|
||||
enabled: true,
|
||||
lastRun: currentTime2,
|
||||
nextRun: nextRun2,
|
||||
},
|
||||
updatedAt: currentTime2,
|
||||
}).where(eq(configs.id, config.id));
|
||||
|
||||
console.log(`[Scheduler] Scheduling enabled for user ${config.userId}, next sync at ${nextRun2.toISOString()}`);
|
||||
continue;
|
||||
}
|
||||
|
||||
console.log(`[Scheduler] Step 2: Triggering mirror for repositories that need mirroring...`);
|
||||
const reposNeedingMirror = await db
|
||||
let reposNeedingMirror = await db
|
||||
.select()
|
||||
.from(repositories)
|
||||
.where(
|
||||
@@ -519,7 +614,20 @@ async function performInitialAutoStart(): Promise<void> {
|
||||
)
|
||||
)
|
||||
);
|
||||
|
||||
|
||||
// Filter out starred repos from auto-mirror when autoMirrorStarred is disabled
|
||||
if (!config.githubConfig?.autoMirrorStarred) {
|
||||
const githubOwner = config.githubConfig?.owner || '';
|
||||
const beforeCount = reposNeedingMirror.length;
|
||||
reposNeedingMirror = reposNeedingMirror.filter(
|
||||
repo => !repo.isStarred || repo.owner === githubOwner
|
||||
);
|
||||
const skippedCount = beforeCount - reposNeedingMirror.length;
|
||||
if (skippedCount > 0) {
|
||||
console.log(`[Scheduler] Skipped ${skippedCount} starred repositories from initial auto-mirror (autoMirrorStarred is disabled)`);
|
||||
}
|
||||
}
|
||||
|
||||
if (reposNeedingMirror.length > 0) {
|
||||
console.log(`[Scheduler] Found ${reposNeedingMirror.length} repositories that need mirroring`);
|
||||
|
||||
|
||||
@@ -92,8 +92,13 @@ async function preCreateOrganizations({
|
||||
// Get unique organization names
|
||||
const orgNames = new Set<string>();
|
||||
|
||||
// Add starred repos org
|
||||
if (config.githubConfig?.starredReposOrg) {
|
||||
const starredReposMode = config.githubConfig?.starredReposMode || "dedicated-org";
|
||||
|
||||
if (starredReposMode === "preserve-owner") {
|
||||
for (const repo of repositories) {
|
||||
orgNames.add(repo.organization || repo.owner);
|
||||
}
|
||||
} else if (config.githubConfig?.starredReposOrg) {
|
||||
orgNames.add(config.githubConfig.starredReposOrg);
|
||||
} else {
|
||||
orgNames.add("starred");
|
||||
@@ -129,7 +134,11 @@ async function processStarredRepository({
|
||||
octokit: Octokit;
|
||||
strategyConfig: ReturnType<typeof getMirrorStrategyConfig>;
|
||||
}): Promise<void> {
|
||||
const starredOrg = config.githubConfig?.starredReposOrg || "starred";
|
||||
const starredReposMode = config.githubConfig?.starredReposMode || "dedicated-org";
|
||||
const starredOrg =
|
||||
starredReposMode === "preserve-owner"
|
||||
? repository.organization || repository.owner
|
||||
: config.githubConfig?.starredReposOrg || "starred";
|
||||
|
||||
// Check if repository exists in Gitea
|
||||
const existingRepo = await getGiteaRepoInfo({
|
||||
@@ -257,7 +266,11 @@ export async function syncStarredRepositories({
|
||||
if (error instanceof Error && error.message.includes("not a mirror")) {
|
||||
console.warn(`Repository ${repository.name} is not a mirror, handling...`);
|
||||
|
||||
const starredOrg = config.githubConfig?.starredReposOrg || "starred";
|
||||
const starredReposMode = config.githubConfig?.starredReposMode || "dedicated-org";
|
||||
const starredOrg =
|
||||
starredReposMode === "preserve-owner"
|
||||
? repository.organization || repository.owner
|
||||
: config.githubConfig?.starredReposOrg || "starred";
|
||||
const repoInfo = await getGiteaRepoInfo({
|
||||
config,
|
||||
owner: starredOrg,
|
||||
@@ -287,4 +300,4 @@ export async function syncStarredRepositories({
|
||||
},
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -169,4 +169,31 @@ describe("parseErrorMessage", () => {
|
||||
expect(result.description).toBeUndefined();
|
||||
expect(result.isStructured).toBe(false);
|
||||
});
|
||||
|
||||
test("adds trusted origins guidance for invalid origin errors", () => {
|
||||
const errorMessage = "Invalid Origin: https://mirror.example.com";
|
||||
|
||||
const result = parseErrorMessage(errorMessage);
|
||||
|
||||
expect(result.title).toBe("Invalid Origin");
|
||||
expect(result.description).toContain("BETTER_AUTH_TRUSTED_ORIGINS");
|
||||
expect(result.description).toContain("https://mirror.example.com");
|
||||
expect(result.isStructured).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe("showErrorToast", () => {
|
||||
test("shows invalid origin guidance in toast description", () => {
|
||||
const calls: any[] = [];
|
||||
const toast = {
|
||||
error: (...args: any[]) => calls.push(args),
|
||||
};
|
||||
|
||||
showErrorToast("Invalid Origin: http://10.10.20.45:4321", toast);
|
||||
|
||||
expect(calls).toHaveLength(1);
|
||||
expect(calls[0][0]).toBe("Invalid Origin");
|
||||
expect(calls[0][1].description).toContain("BETTER_AUTH_TRUSTED_ORIGINS");
|
||||
expect(calls[0][1].description).toContain("http://10.10.20.45:4321");
|
||||
});
|
||||
});
|
||||
|
||||
@@ -11,9 +11,11 @@ export function cn(...inputs: ClassValue[]) {
|
||||
|
||||
export function generateRandomString(length: number): string {
|
||||
const chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789';
|
||||
const randomValues = new Uint32Array(length);
|
||||
crypto.getRandomValues(randomValues);
|
||||
let result = '';
|
||||
for (let i = 0; i < length; i++) {
|
||||
result += chars.charAt(Math.floor(Math.random() * chars.length));
|
||||
result += chars.charAt(randomValues[i] % chars.length);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
@@ -86,6 +88,30 @@ export interface ParsedErrorMessage {
|
||||
isStructured: boolean;
|
||||
}
|
||||
|
||||
function getInvalidOriginGuidance(title: string, description?: string): ParsedErrorMessage | null {
|
||||
const fullMessage = `${title} ${description ?? ""}`.trim();
|
||||
if (!/invalid origin/i.test(fullMessage)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const urlMatch = fullMessage.match(/https?:\/\/[^\s'")]+/i);
|
||||
let originHint = "this URL";
|
||||
|
||||
if (urlMatch) {
|
||||
try {
|
||||
originHint = new URL(urlMatch[0]).origin;
|
||||
} catch {
|
||||
originHint = urlMatch[0];
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
title: "Invalid Origin",
|
||||
description: `Add ${originHint} to BETTER_AUTH_TRUSTED_ORIGINS and restart the app.`,
|
||||
isStructured: true,
|
||||
};
|
||||
}
|
||||
|
||||
export function parseErrorMessage(error: unknown): ParsedErrorMessage {
|
||||
// Handle Error objects
|
||||
if (error instanceof Error) {
|
||||
@@ -102,29 +128,32 @@ export function parseErrorMessage(error: unknown): ParsedErrorMessage {
|
||||
if (typeof parsed === "object" && parsed !== null) {
|
||||
// Format 1: { error: "message", errorType: "type", troubleshooting: "info" }
|
||||
if (parsed.error) {
|
||||
return {
|
||||
const formatted = {
|
||||
title: parsed.error,
|
||||
description: parsed.troubleshooting || parsed.errorType || undefined,
|
||||
isStructured: true,
|
||||
};
|
||||
return getInvalidOriginGuidance(formatted.title, formatted.description) || formatted;
|
||||
}
|
||||
|
||||
// Format 2: { title: "title", description: "desc" }
|
||||
if (parsed.title) {
|
||||
return {
|
||||
const formatted = {
|
||||
title: parsed.title,
|
||||
description: parsed.description || undefined,
|
||||
isStructured: true,
|
||||
};
|
||||
return getInvalidOriginGuidance(formatted.title, formatted.description) || formatted;
|
||||
}
|
||||
|
||||
// Format 3: { message: "msg", details: "details" }
|
||||
if (parsed.message) {
|
||||
return {
|
||||
const formatted = {
|
||||
title: parsed.message,
|
||||
description: parsed.details || undefined,
|
||||
isStructured: true,
|
||||
};
|
||||
return getInvalidOriginGuidance(formatted.title, formatted.description) || formatted;
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
@@ -132,11 +161,12 @@ export function parseErrorMessage(error: unknown): ParsedErrorMessage {
|
||||
}
|
||||
|
||||
// Plain string message
|
||||
return {
|
||||
const formatted = {
|
||||
title: error,
|
||||
description: undefined,
|
||||
isStructured: false,
|
||||
};
|
||||
return getInvalidOriginGuidance(formatted.title, formatted.description) || formatted;
|
||||
}
|
||||
|
||||
// Handle objects directly
|
||||
@@ -144,36 +174,40 @@ export function parseErrorMessage(error: unknown): ParsedErrorMessage {
|
||||
const errorObj = error as any;
|
||||
|
||||
if (errorObj.error) {
|
||||
return {
|
||||
const formatted = {
|
||||
title: errorObj.error,
|
||||
description: errorObj.troubleshooting || errorObj.errorType || undefined,
|
||||
isStructured: true,
|
||||
};
|
||||
return getInvalidOriginGuidance(formatted.title, formatted.description) || formatted;
|
||||
}
|
||||
|
||||
if (errorObj.title) {
|
||||
return {
|
||||
const formatted = {
|
||||
title: errorObj.title,
|
||||
description: errorObj.description || undefined,
|
||||
isStructured: true,
|
||||
};
|
||||
return getInvalidOriginGuidance(formatted.title, formatted.description) || formatted;
|
||||
}
|
||||
|
||||
if (errorObj.message) {
|
||||
return {
|
||||
const formatted = {
|
||||
title: errorObj.message,
|
||||
description: errorObj.details || undefined,
|
||||
isStructured: true,
|
||||
};
|
||||
return getInvalidOriginGuidance(formatted.title, formatted.description) || formatted;
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback for unknown types
|
||||
return {
|
||||
const fallback = {
|
||||
title: String(error),
|
||||
description: undefined,
|
||||
isStructured: false,
|
||||
};
|
||||
return getInvalidOriginGuidance(fallback.title, fallback.description) || fallback;
|
||||
}
|
||||
|
||||
// Enhanced toast helper that parses structured error messages
|
||||
@@ -248,6 +282,8 @@ export const getStatusColor = (status: string): string => {
|
||||
return "bg-orange-500"; // Deleting
|
||||
case "deleted":
|
||||
return "bg-gray-600"; // Deleted
|
||||
case "pending-approval":
|
||||
return "bg-amber-500"; // Needs manual approval
|
||||
default:
|
||||
return "bg-gray-400"; // Unknown/neutral
|
||||
}
|
||||
|
||||
@@ -9,6 +9,7 @@ export interface DefaultConfigOptions {
|
||||
githubToken?: string;
|
||||
githubUsername?: string;
|
||||
giteaUrl?: string;
|
||||
giteaExternalUrl?: string;
|
||||
giteaToken?: string;
|
||||
giteaUsername?: string;
|
||||
scheduleEnabled?: boolean;
|
||||
@@ -38,6 +39,8 @@ export async function createDefaultConfig({ userId, envOverrides = {} }: Default
|
||||
const githubToken = envOverrides.githubToken || process.env.GITHUB_TOKEN || "";
|
||||
const githubUsername = envOverrides.githubUsername || process.env.GITHUB_USERNAME || "";
|
||||
const giteaUrl = envOverrides.giteaUrl || process.env.GITEA_URL || "";
|
||||
const giteaExternalUrl =
|
||||
envOverrides.giteaExternalUrl || process.env.GITEA_EXTERNAL_URL || "";
|
||||
const giteaToken = envOverrides.giteaToken || process.env.GITEA_TOKEN || "";
|
||||
const giteaUsername = envOverrides.giteaUsername || process.env.GITEA_USERNAME || "";
|
||||
|
||||
@@ -71,11 +74,13 @@ export async function createDefaultConfig({ userId, envOverrides = {} }: Default
|
||||
includePublic: true,
|
||||
includeOrganizations: [],
|
||||
starredReposOrg: "starred",
|
||||
starredReposMode: "dedicated-org",
|
||||
mirrorStrategy: "preserve",
|
||||
defaultOrg: "github-mirrors",
|
||||
},
|
||||
giteaConfig: {
|
||||
url: giteaUrl,
|
||||
externalUrl: giteaExternalUrl || undefined,
|
||||
token: giteaToken ? encrypt(giteaToken) : "",
|
||||
defaultOwner: giteaUsername,
|
||||
mirrorInterval: "8h",
|
||||
@@ -88,6 +93,12 @@ export async function createDefaultConfig({ userId, envOverrides = {} }: Default
|
||||
forkStrategy: "reference",
|
||||
issueConcurrency: 3,
|
||||
pullRequestConcurrency: 5,
|
||||
backupStrategy: "on-force-push",
|
||||
backupBeforeSync: true, // Deprecated: kept for backward compat
|
||||
backupRetentionCount: 5,
|
||||
backupRetentionDays: 30,
|
||||
backupDirectory: "data/repo-backups",
|
||||
blockSyncOnBackupFailure: true,
|
||||
},
|
||||
include: [],
|
||||
exclude: [],
|
||||
|
||||
@@ -48,6 +48,7 @@ export function mapUiToDbConfig(
|
||||
|
||||
// Starred repos organization
|
||||
starredReposOrg: giteaConfig.starredReposOrg,
|
||||
starredReposMode: giteaConfig.starredReposMode || "dedicated-org",
|
||||
|
||||
// Mirror strategy
|
||||
mirrorStrategy: giteaConfig.mirrorStrategy || "preserve",
|
||||
@@ -55,11 +56,13 @@ export function mapUiToDbConfig(
|
||||
|
||||
// Advanced options
|
||||
starredCodeOnly: advancedOptions.starredCodeOnly,
|
||||
autoMirrorStarred: advancedOptions.autoMirrorStarred ?? false,
|
||||
};
|
||||
|
||||
// Map Gitea config to match database schema
|
||||
const dbGiteaConfig: DbGiteaConfig = {
|
||||
url: giteaConfig.url,
|
||||
externalUrl: giteaConfig.externalUrl?.trim() || undefined,
|
||||
token: giteaConfig.token,
|
||||
defaultOwner: giteaConfig.username, // Map username to defaultOwner
|
||||
organization: giteaConfig.organization, // Add organization field
|
||||
@@ -98,6 +101,12 @@ export function mapUiToDbConfig(
|
||||
mirrorPullRequests: mirrorOptions.mirrorMetadata && mirrorOptions.metadataComponents.pullRequests,
|
||||
mirrorLabels: mirrorOptions.mirrorMetadata && mirrorOptions.metadataComponents.labels,
|
||||
mirrorMilestones: mirrorOptions.mirrorMetadata && mirrorOptions.metadataComponents.milestones,
|
||||
backupStrategy: giteaConfig.backupStrategy || "on-force-push",
|
||||
backupBeforeSync: giteaConfig.backupBeforeSync ?? true,
|
||||
backupRetentionCount: giteaConfig.backupRetentionCount ?? 5,
|
||||
backupRetentionDays: giteaConfig.backupRetentionDays ?? 30,
|
||||
backupDirectory: giteaConfig.backupDirectory?.trim() || undefined,
|
||||
blockSyncOnBackupFailure: giteaConfig.blockSyncOnBackupFailure ?? true,
|
||||
};
|
||||
|
||||
return {
|
||||
@@ -126,16 +135,26 @@ export function mapDbToUiConfig(dbConfig: any): {
|
||||
// Map from database Gitea config to UI fields
|
||||
const giteaConfig: GiteaConfig = {
|
||||
url: dbConfig.giteaConfig?.url || "",
|
||||
externalUrl: dbConfig.giteaConfig?.externalUrl || "",
|
||||
username: dbConfig.giteaConfig?.defaultOwner || "", // Map defaultOwner to username
|
||||
token: dbConfig.giteaConfig?.token || "",
|
||||
organization: dbConfig.githubConfig?.defaultOrg || "github-mirrors", // Get from GitHub config
|
||||
visibility: dbConfig.giteaConfig?.visibility === "default" ? "public" : dbConfig.giteaConfig?.visibility || "public",
|
||||
starredReposOrg: dbConfig.githubConfig?.starredReposOrg || "starred", // Get from GitHub config
|
||||
starredReposMode: dbConfig.githubConfig?.starredReposMode || "dedicated-org", // Get from GitHub config
|
||||
preserveOrgStructure: dbConfig.giteaConfig?.preserveVisibility || false, // Map preserveVisibility
|
||||
mirrorStrategy: dbConfig.githubConfig?.mirrorStrategy || "preserve", // Get from GitHub config
|
||||
personalReposOrg: undefined, // Not stored in current schema
|
||||
issueConcurrency: dbConfig.giteaConfig?.issueConcurrency ?? 3,
|
||||
pullRequestConcurrency: dbConfig.giteaConfig?.pullRequestConcurrency ?? 5,
|
||||
backupStrategy: dbConfig.giteaConfig?.backupStrategy ||
|
||||
// Respect legacy backupBeforeSync: false → "disabled" mapping on round-trip
|
||||
(dbConfig.giteaConfig?.backupBeforeSync === false ? "disabled" : "on-force-push"),
|
||||
backupBeforeSync: dbConfig.giteaConfig?.backupBeforeSync ?? true,
|
||||
backupRetentionCount: dbConfig.giteaConfig?.backupRetentionCount ?? 5,
|
||||
backupRetentionDays: dbConfig.giteaConfig?.backupRetentionDays ?? 30,
|
||||
backupDirectory: dbConfig.giteaConfig?.backupDirectory || "data/repo-backups",
|
||||
blockSyncOnBackupFailure: dbConfig.giteaConfig?.blockSyncOnBackupFailure ?? true,
|
||||
};
|
||||
|
||||
// Map mirror options from various database fields
|
||||
@@ -158,6 +177,7 @@ export function mapDbToUiConfig(dbConfig: any): {
|
||||
skipForks: !(dbConfig.githubConfig?.includeForks ?? true), // Invert includeForks to get skipForks
|
||||
// Support both old (skipStarredIssues) and new (starredCodeOnly) field names for backward compatibility
|
||||
starredCodeOnly: dbConfig.githubConfig?.starredCodeOnly ?? (dbConfig.githubConfig as any)?.skipStarredIssues ?? false,
|
||||
autoMirrorStarred: dbConfig.githubConfig?.autoMirrorStarred ?? false,
|
||||
};
|
||||
|
||||
return {
|
||||
|
||||
@@ -160,10 +160,23 @@ export function generateSecureToken(length: number = 32): string {
|
||||
}
|
||||
|
||||
/**
|
||||
* Hashes a value using SHA-256 (for non-reversible values like API keys for comparison)
|
||||
* Hashes a value using SHA-256 with a random salt (for non-reversible values like API keys)
|
||||
* @param value The value to hash
|
||||
* @returns Hex encoded hash
|
||||
* @returns Salt and hash in format "salt:hash"
|
||||
*/
|
||||
export function hashValue(value: string): string {
|
||||
return crypto.createHash('sha256').update(value).digest('hex');
|
||||
const salt = crypto.randomBytes(16).toString('hex');
|
||||
const hash = crypto.createHash('sha256').update(salt + value).digest('hex');
|
||||
return `${salt}:${hash}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Verifies a value against a salted hash produced by hashValue()
|
||||
* Uses constant-time comparison to prevent timing attacks
|
||||
*/
|
||||
export function verifyHash(value: string, saltedHash: string): boolean {
|
||||
const [salt, expectedHash] = saltedHash.split(':');
|
||||
if (!salt || !expectedHash) return false;
|
||||
const actualHash = crypto.createHash('sha256').update(salt + value).digest('hex');
|
||||
return crypto.timingSafeEqual(Buffer.from(actualHash, 'hex'), Buffer.from(expectedHash, 'hex'));
|
||||
}
|
||||
319
src/lib/utils/force-push-detection.test.ts
Normal file
319
src/lib/utils/force-push-detection.test.ts
Normal file
@@ -0,0 +1,319 @@
|
||||
import { describe, expect, it, mock } from "bun:test";
|
||||
import {
|
||||
detectForcePush,
|
||||
fetchGitHubBranches,
|
||||
checkAncestry,
|
||||
type BranchInfo,
|
||||
} from "./force-push-detection";
|
||||
|
||||
// ---- Helpers ----
|
||||
|
||||
function makeOctokit(overrides: Record<string, any> = {}) {
|
||||
return {
|
||||
repos: {
|
||||
listBranches: mock(() => Promise.resolve({ data: [] })),
|
||||
compareCommits: mock(() =>
|
||||
Promise.resolve({ data: { status: "ahead" } }),
|
||||
),
|
||||
...overrides.repos,
|
||||
},
|
||||
paginate: mock(async (_method: any, params: any) => {
|
||||
// Default: return whatever the test wired into _githubBranches
|
||||
return overrides._githubBranches ?? [];
|
||||
}),
|
||||
...overrides,
|
||||
} as any;
|
||||
}
|
||||
|
||||
// ---- fetchGitHubBranches ----
|
||||
|
||||
describe("fetchGitHubBranches", () => {
|
||||
it("maps Octokit paginated response to BranchInfo[]", async () => {
|
||||
const octokit = makeOctokit({
|
||||
_githubBranches: [
|
||||
{ name: "main", commit: { sha: "aaa" } },
|
||||
{ name: "dev", commit: { sha: "bbb" } },
|
||||
],
|
||||
});
|
||||
|
||||
const result = await fetchGitHubBranches({
|
||||
octokit,
|
||||
owner: "user",
|
||||
repo: "repo",
|
||||
});
|
||||
|
||||
expect(result).toEqual([
|
||||
{ name: "main", sha: "aaa" },
|
||||
{ name: "dev", sha: "bbb" },
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
// ---- checkAncestry ----
|
||||
|
||||
describe("checkAncestry", () => {
|
||||
it("returns true for fast-forward (ahead)", async () => {
|
||||
const octokit = makeOctokit({
|
||||
repos: {
|
||||
compareCommits: mock(() =>
|
||||
Promise.resolve({ data: { status: "ahead" } }),
|
||||
),
|
||||
},
|
||||
});
|
||||
|
||||
const result = await checkAncestry({
|
||||
octokit,
|
||||
owner: "user",
|
||||
repo: "repo",
|
||||
baseSha: "old",
|
||||
headSha: "new",
|
||||
});
|
||||
|
||||
expect(result).toBe(true);
|
||||
});
|
||||
|
||||
it("returns true for identical", async () => {
|
||||
const octokit = makeOctokit({
|
||||
repos: {
|
||||
compareCommits: mock(() =>
|
||||
Promise.resolve({ data: { status: "identical" } }),
|
||||
),
|
||||
},
|
||||
});
|
||||
|
||||
const result = await checkAncestry({
|
||||
octokit,
|
||||
owner: "user",
|
||||
repo: "repo",
|
||||
baseSha: "same",
|
||||
headSha: "same",
|
||||
});
|
||||
|
||||
expect(result).toBe(true);
|
||||
});
|
||||
|
||||
it("returns false for diverged", async () => {
|
||||
const octokit = makeOctokit({
|
||||
repos: {
|
||||
compareCommits: mock(() =>
|
||||
Promise.resolve({ data: { status: "diverged" } }),
|
||||
),
|
||||
},
|
||||
});
|
||||
|
||||
const result = await checkAncestry({
|
||||
octokit,
|
||||
owner: "user",
|
||||
repo: "repo",
|
||||
baseSha: "old",
|
||||
headSha: "new",
|
||||
});
|
||||
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
|
||||
it("returns false when API returns 404 (old SHA gone)", async () => {
|
||||
const error404 = Object.assign(new Error("Not Found"), { status: 404 });
|
||||
const octokit = makeOctokit({
|
||||
repos: {
|
||||
compareCommits: mock(() => Promise.reject(error404)),
|
||||
},
|
||||
});
|
||||
|
||||
const result = await checkAncestry({
|
||||
octokit,
|
||||
owner: "user",
|
||||
repo: "repo",
|
||||
baseSha: "gone",
|
||||
headSha: "new",
|
||||
});
|
||||
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
|
||||
it("throws on transient errors (fail-open for caller)", async () => {
|
||||
const error500 = Object.assign(new Error("Internal Server Error"), { status: 500 });
|
||||
const octokit = makeOctokit({
|
||||
repos: {
|
||||
compareCommits: mock(() => Promise.reject(error500)),
|
||||
},
|
||||
});
|
||||
|
||||
expect(
|
||||
checkAncestry({
|
||||
octokit,
|
||||
owner: "user",
|
||||
repo: "repo",
|
||||
baseSha: "old",
|
||||
headSha: "new",
|
||||
}),
|
||||
).rejects.toThrow("Internal Server Error");
|
||||
});
|
||||
});
|
||||
|
||||
// ---- detectForcePush ----
|
||||
// Uses _deps injection to avoid fragile global fetch mocking.
|
||||
|
||||
describe("detectForcePush", () => {
|
||||
const baseArgs = {
|
||||
giteaUrl: "https://gitea.example.com",
|
||||
giteaToken: "tok",
|
||||
giteaOwner: "org",
|
||||
giteaRepo: "repo",
|
||||
githubOwner: "user",
|
||||
githubRepo: "repo",
|
||||
};
|
||||
|
||||
function makeDeps(overrides: {
|
||||
giteaBranches?: BranchInfo[] | Error;
|
||||
githubBranches?: BranchInfo[] | Error;
|
||||
ancestryResult?: boolean;
|
||||
} = {}) {
|
||||
return {
|
||||
fetchGiteaBranches: mock(async () => {
|
||||
if (overrides.giteaBranches instanceof Error) throw overrides.giteaBranches;
|
||||
return overrides.giteaBranches ?? [];
|
||||
}) as any,
|
||||
fetchGitHubBranches: mock(async () => {
|
||||
if (overrides.githubBranches instanceof Error) throw overrides.githubBranches;
|
||||
return overrides.githubBranches ?? [];
|
||||
}) as any,
|
||||
checkAncestry: mock(async () => overrides.ancestryResult ?? true) as any,
|
||||
};
|
||||
}
|
||||
|
||||
const dummyOctokit = {} as any;
|
||||
|
||||
it("skips when Gitea has no branches (first mirror)", async () => {
|
||||
const deps = makeDeps({ giteaBranches: [] });
|
||||
const result = await detectForcePush({ ...baseArgs, octokit: dummyOctokit, _deps: deps });
|
||||
|
||||
expect(result.detected).toBe(false);
|
||||
expect(result.skipped).toBe(true);
|
||||
expect(result.skipReason).toContain("No Gitea branches");
|
||||
});
|
||||
|
||||
it("returns no detection when all SHAs match", async () => {
|
||||
const deps = makeDeps({
|
||||
giteaBranches: [
|
||||
{ name: "main", sha: "aaa" },
|
||||
{ name: "dev", sha: "bbb" },
|
||||
],
|
||||
githubBranches: [
|
||||
{ name: "main", sha: "aaa" },
|
||||
{ name: "dev", sha: "bbb" },
|
||||
],
|
||||
});
|
||||
|
||||
const result = await detectForcePush({ ...baseArgs, octokit: dummyOctokit, _deps: deps });
|
||||
|
||||
expect(result.detected).toBe(false);
|
||||
expect(result.skipped).toBe(false);
|
||||
expect(result.affectedBranches).toHaveLength(0);
|
||||
});
|
||||
|
||||
it("detects deleted branch", async () => {
|
||||
const deps = makeDeps({
|
||||
giteaBranches: [
|
||||
{ name: "main", sha: "aaa" },
|
||||
{ name: "old-branch", sha: "ccc" },
|
||||
],
|
||||
githubBranches: [{ name: "main", sha: "aaa" }],
|
||||
});
|
||||
|
||||
const result = await detectForcePush({ ...baseArgs, octokit: dummyOctokit, _deps: deps });
|
||||
|
||||
expect(result.detected).toBe(true);
|
||||
expect(result.affectedBranches).toHaveLength(1);
|
||||
expect(result.affectedBranches[0]).toEqual({
|
||||
name: "old-branch",
|
||||
reason: "deleted",
|
||||
giteaSha: "ccc",
|
||||
githubSha: null,
|
||||
});
|
||||
});
|
||||
|
||||
it("returns no detection for fast-forward", async () => {
|
||||
const deps = makeDeps({
|
||||
giteaBranches: [{ name: "main", sha: "old-sha" }],
|
||||
githubBranches: [{ name: "main", sha: "new-sha" }],
|
||||
ancestryResult: true, // fast-forward
|
||||
});
|
||||
|
||||
const result = await detectForcePush({ ...baseArgs, octokit: dummyOctokit, _deps: deps });
|
||||
|
||||
expect(result.detected).toBe(false);
|
||||
expect(result.affectedBranches).toHaveLength(0);
|
||||
});
|
||||
|
||||
it("detects diverged branch", async () => {
|
||||
const deps = makeDeps({
|
||||
giteaBranches: [{ name: "main", sha: "old-sha" }],
|
||||
githubBranches: [{ name: "main", sha: "rewritten-sha" }],
|
||||
ancestryResult: false, // diverged
|
||||
});
|
||||
|
||||
const result = await detectForcePush({ ...baseArgs, octokit: dummyOctokit, _deps: deps });
|
||||
|
||||
expect(result.detected).toBe(true);
|
||||
expect(result.affectedBranches).toHaveLength(1);
|
||||
expect(result.affectedBranches[0]).toEqual({
|
||||
name: "main",
|
||||
reason: "diverged",
|
||||
giteaSha: "old-sha",
|
||||
githubSha: "rewritten-sha",
|
||||
});
|
||||
});
|
||||
|
||||
it("detects force-push when ancestry check fails (old SHA gone)", async () => {
|
||||
const deps = makeDeps({
|
||||
giteaBranches: [{ name: "main", sha: "old-sha" }],
|
||||
githubBranches: [{ name: "main", sha: "new-sha" }],
|
||||
ancestryResult: false, // checkAncestry returns false on error
|
||||
});
|
||||
|
||||
const result = await detectForcePush({ ...baseArgs, octokit: dummyOctokit, _deps: deps });
|
||||
|
||||
expect(result.detected).toBe(true);
|
||||
expect(result.affectedBranches).toHaveLength(1);
|
||||
expect(result.affectedBranches[0].reason).toBe("diverged");
|
||||
});
|
||||
|
||||
it("skips when Gitea API returns 404", async () => {
|
||||
const { HttpError } = await import("@/lib/http-client");
|
||||
const deps = makeDeps({
|
||||
giteaBranches: new HttpError("not found", 404, "Not Found"),
|
||||
});
|
||||
|
||||
const result = await detectForcePush({ ...baseArgs, octokit: dummyOctokit, _deps: deps });
|
||||
|
||||
expect(result.detected).toBe(false);
|
||||
expect(result.skipped).toBe(true);
|
||||
expect(result.skipReason).toContain("not found");
|
||||
});
|
||||
|
||||
it("skips when Gitea API returns server error", async () => {
|
||||
const deps = makeDeps({
|
||||
giteaBranches: new Error("HTTP 500: internal error"),
|
||||
});
|
||||
|
||||
const result = await detectForcePush({ ...baseArgs, octokit: dummyOctokit, _deps: deps });
|
||||
|
||||
expect(result.detected).toBe(false);
|
||||
expect(result.skipped).toBe(true);
|
||||
expect(result.skipReason).toContain("Failed to fetch Gitea branches");
|
||||
});
|
||||
|
||||
it("skips when GitHub API fails", async () => {
|
||||
const deps = makeDeps({
|
||||
giteaBranches: [{ name: "main", sha: "aaa" }],
|
||||
githubBranches: new Error("rate limited"),
|
||||
});
|
||||
|
||||
const result = await detectForcePush({ ...baseArgs, octokit: dummyOctokit, _deps: deps });
|
||||
|
||||
expect(result.detected).toBe(false);
|
||||
expect(result.skipped).toBe(true);
|
||||
expect(result.skipReason).toContain("Failed to fetch GitHub branches");
|
||||
});
|
||||
});
|
||||
286
src/lib/utils/force-push-detection.ts
Normal file
286
src/lib/utils/force-push-detection.ts
Normal file
@@ -0,0 +1,286 @@
|
||||
/**
|
||||
* Force-push detection module.
|
||||
*
|
||||
* Compares branch SHAs between a Gitea mirror and GitHub source to detect
|
||||
* branches that were deleted, rewritten, or force-pushed.
|
||||
*
|
||||
* **Fail-open**: If detection itself fails (API errors, rate limits, etc.),
|
||||
* the result indicates no force-push so sync proceeds normally. Detection
|
||||
* should never block sync due to its own failure.
|
||||
*/
|
||||
|
||||
import type { Octokit } from "@octokit/rest";
|
||||
import { httpGet, HttpError } from "@/lib/http-client";
|
||||
|
||||
// ---- Types ----
|
||||
|
||||
export interface BranchInfo {
|
||||
name: string;
|
||||
sha: string;
|
||||
}
|
||||
|
||||
export type ForcePushReason = "deleted" | "diverged" | "non-fast-forward";
|
||||
|
||||
export interface AffectedBranch {
|
||||
name: string;
|
||||
reason: ForcePushReason;
|
||||
giteaSha: string;
|
||||
githubSha: string | null; // null when branch was deleted
|
||||
}
|
||||
|
||||
export interface ForcePushDetectionResult {
|
||||
detected: boolean;
|
||||
affectedBranches: AffectedBranch[];
|
||||
/** True when detection could not run (API error, etc.) */
|
||||
skipped: boolean;
|
||||
skipReason?: string;
|
||||
}
|
||||
|
||||
const NO_FORCE_PUSH: ForcePushDetectionResult = {
|
||||
detected: false,
|
||||
affectedBranches: [],
|
||||
skipped: false,
|
||||
};
|
||||
|
||||
function skippedResult(reason: string): ForcePushDetectionResult {
|
||||
return {
|
||||
detected: false,
|
||||
affectedBranches: [],
|
||||
skipped: true,
|
||||
skipReason: reason,
|
||||
};
|
||||
}
|
||||
|
||||
// ---- Branch fetching ----
|
||||
|
||||
/**
|
||||
* Fetch all branches from a Gitea repository (paginated).
|
||||
*/
|
||||
export async function fetchGiteaBranches({
|
||||
giteaUrl,
|
||||
giteaToken,
|
||||
owner,
|
||||
repo,
|
||||
}: {
|
||||
giteaUrl: string;
|
||||
giteaToken: string;
|
||||
owner: string;
|
||||
repo: string;
|
||||
}): Promise<BranchInfo[]> {
|
||||
const branches: BranchInfo[] = [];
|
||||
let page = 1;
|
||||
const perPage = 50;
|
||||
|
||||
while (true) {
|
||||
const url = `${giteaUrl}/api/v1/repos/${owner}/${repo}/branches?page=${page}&limit=${perPage}`;
|
||||
const response = await httpGet<Array<{ name: string; commit: { id: string } }>>(
|
||||
url,
|
||||
{ Authorization: `token ${giteaToken}` },
|
||||
);
|
||||
|
||||
if (!Array.isArray(response.data) || response.data.length === 0) break;
|
||||
|
||||
for (const b of response.data) {
|
||||
branches.push({ name: b.name, sha: b.commit.id });
|
||||
}
|
||||
|
||||
if (response.data.length < perPage) break;
|
||||
page++;
|
||||
}
|
||||
|
||||
return branches;
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch all branches from a GitHub repository (paginated via Octokit).
|
||||
*/
|
||||
export async function fetchGitHubBranches({
|
||||
octokit,
|
||||
owner,
|
||||
repo,
|
||||
}: {
|
||||
octokit: Octokit;
|
||||
owner: string;
|
||||
repo: string;
|
||||
}): Promise<BranchInfo[]> {
|
||||
const data = await octokit.paginate(octokit.repos.listBranches, {
|
||||
owner,
|
||||
repo,
|
||||
per_page: 100,
|
||||
});
|
||||
|
||||
return data.map((b) => ({ name: b.name, sha: b.commit.sha }));
|
||||
}
|
||||
|
||||
/**
|
||||
* Check whether the transition from `baseSha` to `headSha` on the same branch
|
||||
* is a fast-forward (i.e. `baseSha` is an ancestor of `headSha`).
|
||||
*
|
||||
* Returns `true` when the change is safe (fast-forward) and `false` when it
|
||||
* is a confirmed force-push (404 = old SHA garbage-collected from GitHub).
|
||||
*
|
||||
* Throws on transient errors (rate limits, network issues) so the caller
|
||||
* can decide how to handle them (fail-open: skip that branch).
|
||||
*/
|
||||
export async function checkAncestry({
|
||||
octokit,
|
||||
owner,
|
||||
repo,
|
||||
baseSha,
|
||||
headSha,
|
||||
}: {
|
||||
octokit: Octokit;
|
||||
owner: string;
|
||||
repo: string;
|
||||
baseSha: string;
|
||||
headSha: string;
|
||||
}): Promise<boolean> {
|
||||
try {
|
||||
const { data } = await octokit.repos.compareCommits({
|
||||
owner,
|
||||
repo,
|
||||
base: baseSha,
|
||||
head: headSha,
|
||||
});
|
||||
// "ahead" means headSha is strictly ahead of baseSha → fast-forward.
|
||||
// "behind" or "diverged" means the branch was rewritten.
|
||||
return data.status === "ahead" || data.status === "identical";
|
||||
} catch (error: any) {
|
||||
// 404 / 422 = old SHA no longer exists on GitHub → confirmed force-push.
|
||||
if (error?.status === 404 || error?.status === 422) {
|
||||
return false;
|
||||
}
|
||||
// Any other error (rate limit, network) → rethrow so caller can
|
||||
// handle it as fail-open (skip branch) rather than false-positive.
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
// ---- Main detection ----
|
||||
|
||||
/**
|
||||
* Compare branch SHAs between Gitea and GitHub to detect force-pushes.
|
||||
*
|
||||
* The function is intentionally fail-open: any error during detection returns
|
||||
* a "skipped" result so that sync can proceed normally.
|
||||
*/
|
||||
export async function detectForcePush({
|
||||
giteaUrl,
|
||||
giteaToken,
|
||||
giteaOwner,
|
||||
giteaRepo,
|
||||
octokit,
|
||||
githubOwner,
|
||||
githubRepo,
|
||||
_deps,
|
||||
}: {
|
||||
giteaUrl: string;
|
||||
giteaToken: string;
|
||||
giteaOwner: string;
|
||||
giteaRepo: string;
|
||||
octokit: Octokit;
|
||||
githubOwner: string;
|
||||
githubRepo: string;
|
||||
/** @internal — test-only dependency injection */
|
||||
_deps?: {
|
||||
fetchGiteaBranches: typeof fetchGiteaBranches;
|
||||
fetchGitHubBranches: typeof fetchGitHubBranches;
|
||||
checkAncestry: typeof checkAncestry;
|
||||
};
|
||||
}): Promise<ForcePushDetectionResult> {
|
||||
const deps = _deps ?? { fetchGiteaBranches, fetchGitHubBranches, checkAncestry };
|
||||
|
||||
// 1. Fetch Gitea branches
|
||||
let giteaBranches: BranchInfo[];
|
||||
try {
|
||||
giteaBranches = await deps.fetchGiteaBranches({
|
||||
giteaUrl,
|
||||
giteaToken,
|
||||
owner: giteaOwner,
|
||||
repo: giteaRepo,
|
||||
});
|
||||
} catch (error) {
|
||||
// Gitea 404 = repo not yet mirrored, skip detection
|
||||
if (error instanceof HttpError && error.status === 404) {
|
||||
return skippedResult("Gitea repository not found (first mirror?)");
|
||||
}
|
||||
return skippedResult(
|
||||
`Failed to fetch Gitea branches: ${error instanceof Error ? error.message : String(error)}`,
|
||||
);
|
||||
}
|
||||
|
||||
// First-time mirror: no Gitea branches → nothing to compare
|
||||
if (giteaBranches.length === 0) {
|
||||
return skippedResult("No Gitea branches found (first mirror?)");
|
||||
}
|
||||
|
||||
// 2. Fetch GitHub branches
|
||||
let githubBranches: BranchInfo[];
|
||||
try {
|
||||
githubBranches = await deps.fetchGitHubBranches({
|
||||
octokit,
|
||||
owner: githubOwner,
|
||||
repo: githubRepo,
|
||||
});
|
||||
} catch (error) {
|
||||
return skippedResult(
|
||||
`Failed to fetch GitHub branches: ${error instanceof Error ? error.message : String(error)}`,
|
||||
);
|
||||
}
|
||||
|
||||
const githubBranchMap = new Map(githubBranches.map((b) => [b.name, b.sha]));
|
||||
|
||||
// 3. Compare each Gitea branch against GitHub
|
||||
const affected: AffectedBranch[] = [];
|
||||
|
||||
for (const giteaBranch of giteaBranches) {
|
||||
const githubSha = githubBranchMap.get(giteaBranch.name);
|
||||
|
||||
if (githubSha === undefined) {
|
||||
// Branch was deleted on GitHub
|
||||
affected.push({
|
||||
name: giteaBranch.name,
|
||||
reason: "deleted",
|
||||
giteaSha: giteaBranch.sha,
|
||||
githubSha: null,
|
||||
});
|
||||
continue;
|
||||
}
|
||||
|
||||
// Same SHA → no change
|
||||
if (githubSha === giteaBranch.sha) continue;
|
||||
|
||||
// SHAs differ → check if it's a fast-forward
|
||||
try {
|
||||
const isFastForward = await deps.checkAncestry({
|
||||
octokit,
|
||||
owner: githubOwner,
|
||||
repo: githubRepo,
|
||||
baseSha: giteaBranch.sha,
|
||||
headSha: githubSha,
|
||||
});
|
||||
|
||||
if (!isFastForward) {
|
||||
affected.push({
|
||||
name: giteaBranch.name,
|
||||
reason: "diverged",
|
||||
giteaSha: giteaBranch.sha,
|
||||
githubSha,
|
||||
});
|
||||
}
|
||||
} catch {
|
||||
// Individual branch check failure → skip that branch (fail-open)
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
if (affected.length === 0) {
|
||||
return NO_FORCE_PUSH;
|
||||
}
|
||||
|
||||
return {
|
||||
detected: true,
|
||||
affectedBranches: affected,
|
||||
skipped: false,
|
||||
};
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user