Compare commits

..

24 Commits

Author SHA1 Message Date
Arunavo Ray
2b78a6a4a8 v3.5.4 2025-09-07 19:11:50 +05:30
Arunavo Ray
c2f6e73054 Testing Authentik SSO Issues 2025-09-07 19:09:00 +05:30
Arunavo Ray
c4b353aae8 Added docs around scheduling using corn 2025-09-07 16:51:51 +05:30
Arunavo Ray
4a54cf9009 v3.5.3 2025-09-07 16:29:43 +05:30
Arunavo Ray
fab4efd93a Auto-start on boot 2025-09-07 16:29:23 +05:30
Arunavo Ray
9f21cd6b1a Addressing concerns of Issue #85 and #86 2025-09-07 15:25:48 +05:30
Arunavo Ray
9ef6017a23 v3.5.2 2025-09-07 13:55:43 +05:30
Arunavo Ray
502796371f Attempt to address #84 2025-09-07 13:55:20 +05:30
Arunavo Ray
b956b71c5f Fixed #87 where the Release Notes was missing 2025-09-07 13:14:41 +05:30
Arunavo Ray
26b82e0f65 Added AGENTS.md 2025-09-07 11:46:14 +05:30
Arunavo Ray
7c124a37d7 v3.5.1 2025-08-30 00:47:59 +05:30
Arunavo Ray
3e14edc571 fixed default overide 2025-08-30 00:47:33 +05:30
Arunavo Ray
a188869cae "Automatic Mirroring" changed to "Automatic Syncing" 2025-08-30 00:37:56 +05:30
Arunavo Ray
afac3b5ddc UI tweek 2025-08-29 21:16:19 +05:30
Arunavo Ray
2ce4bb4373 update env doc 2025-08-29 20:43:49 +05:30
Arunavo Ray
5c9a3afaae updates to auth url 2025-08-29 20:43:25 +05:30
Arunavo Ray
de4e111095 type fix 2025-08-29 20:42:56 +05:30
Arunavo Ray
8c4d9508c7 Add provider modal optimised 2025-08-29 19:17:40 +05:30
Arunavo Ray
921eb5e07d util 2025-08-29 19:08:48 +05:30
Arunavo Ray
ac1b09f7a1 UI updates 2025-08-29 19:08:39 +05:30
Arunavo Ray
9ee67ce77d made time more user readable 2025-08-29 18:32:22 +05:30
Arunavo Ray
92db61a2c9 v3.5.0 2025-08-29 18:11:49 +05:30
Arunavo Ray
cbf6e11de3 Env var updates 2025-08-29 18:11:26 +05:30
Arunavo Ray
18855f09c4 Imporved a bunch of things in Mirror and sync Automation 2025-08-29 17:49:44 +05:30
34 changed files with 1350 additions and 507 deletions

View File

@@ -18,6 +18,7 @@ DATABASE_URL=sqlite://data/gitea-mirror.db
# Generate with: openssl rand -base64 32
BETTER_AUTH_SECRET=change-this-to-a-secure-random-string-in-production
BETTER_AUTH_URL=http://localhost:4321
# PUBLIC_BETTER_AUTH_URL=https://your-domain.com # Optional: Set this if accessing from different origins (e.g., IP and domain)
# ENCRYPTION_SECRET=optional-encryption-key-for-token-encryption # Generate with: openssl rand -base64 48
# ===========================================
@@ -94,6 +95,7 @@ DOCKER_TAG=latest
# Release and Metadata
# MIRROR_RELEASES=false # Mirror GitHub releases
# RELEASE_LIMIT=10 # Maximum number of releases to mirror per repository
# MIRROR_WIKI=false # Mirror wiki content
# Issue Tracking (requires MIRROR_METADATA=true)
@@ -109,8 +111,10 @@ DOCKER_TAG=latest
# ===========================================
# Basic Schedule Settings
# SCHEDULE_ENABLED=false
# SCHEDULE_ENABLED=false # When true, auto-imports and mirrors all repos on startup (v3.5.3+)
# SCHEDULE_INTERVAL=3600 # Interval in seconds or cron expression (e.g., "0 2 * * *")
# GITEA_MIRROR_INTERVAL=8h # Mirror sync interval (5m, 30m, 1h, 8h, 24h, 1d, 7d) - also triggers auto-start
# AUTO_IMPORT_REPOS=true # Automatically discover and import new GitHub repositories during syncs
# DELAY=3600 # Legacy: same as SCHEDULE_INTERVAL, kept for backward compatibility
# Execution Settings
@@ -148,11 +152,11 @@ DOCKER_TAG=latest
# CLEANUP_ENABLED=false
# CLEANUP_RETENTION_DAYS=7 # Days to keep events
# Repository Cleanup
# Repository Cleanup (v3.4.0+)
# CLEANUP_DELETE_FROM_GITEA=false # Delete repos from Gitea
# CLEANUP_DELETE_IF_NOT_IN_GITHUB=true # Delete if not in GitHub - automatically enables cleanup
# CLEANUP_DELETE_IF_NOT_IN_GITHUB=false # Auto-remove repos that no longer exist in GitHub
# CLEANUP_ORPHANED_REPO_ACTION=archive # Options: skip, archive, delete
# CLEANUP_DRY_RUN=true # Test mode without actual deletion
# CLEANUP_DRY_RUN=true # Test mode without actual deletion (set to false for production)
# Protected Repositories (comma-separated)
# CLEANUP_PROTECTED_REPOS=important-repo,critical-project

46
AGENTS.md Normal file
View File

@@ -0,0 +1,46 @@
# Repository Guidelines
## Project Structure & Module Organization
- `src/` app code
- `components/` (React, PascalCase files), `pages/` (Astro/API routes), `lib/` (domain + utilities, kebab-case), `hooks/`, `layouts/`, `styles/`, `tests/`, `types/`, `data/`, `content/`.
- `scripts/` operational TS scripts (DB init, recovery): e.g., `scripts/manage-db.ts`.
- `drizzle/` SQL migrations; `data/` runtime SQLite (`gitea-mirror.db`).
- `public/` static assets; `dist/` build output.
- Key config: `astro.config.mjs`, `tsconfig.json` (alias `@/* → src/*`), `bunfig.toml` (test preload), `.env(.example)`.
## Build, Test, and Development Commands
- Prereq: Bun `>= 1.2.9` (see `package.json`).
- Setup: `bun run setup` install deps and init DB.
- Dev: `bun run dev` start Astro dev server.
- Build: `bun run build` produce `dist/`.
- Preview/Start: `bun run preview` (static preview) or `bun run start` (SSR entry).
- Database: `bun run db:generate|migrate|push|studio` and `bun run manage-db init|check|fix|reset-users`.
- Tests: `bun test` | `bun run test:watch` | `bun run test:coverage`.
- Docker: see `docker-compose.yml` and variants in repo root.
## Coding Style & Naming Conventions
- Language: TypeScript, Astro, React.
- Indentation: 2 spaces; keep existing semicolon/quote style in touched files.
- Components: PascalCase `.tsx` in `src/components/` (e.g., `MainLayout.tsx`).
- Modules/utils: kebab-case in `src/lib/` (e.g., `gitea-enhanced.ts`).
- Imports: prefer alias `@/…` (configured in `tsconfig.json`).
- Do not introduce new lint/format configs; follow current patterns.
## Testing Guidelines
- Runner: Bun test (`bun:test`) with preload `src/tests/setup.bun.ts` (see `bunfig.toml`).
- Location/Names: `**/*.test.ts(x)` under `src/**` (examples in `src/lib/**`).
- Scope: add unit tests for new logic and API route tests for handlers.
- Aim for meaningful coverage on DB, auth, and mirroring paths.
## Commit & Pull Request Guidelines
- Commits: short, imperative, scoped when helpful (e.g., `lib: fix token parsing`, `ui: align buttons`).
- PRs must include:
- Summary, rationale, and testing steps/commands.
- Linked issues (e.g., `Closes #123`).
- Screenshots/gifs for UI changes.
- Notes on DB/migration or .env impacts; update `docs/`/CHANGELOG if applicable.
## Security & Configuration Tips
- Never commit secrets. Copy `.env.example``.env` and fill values; prefer `bun run startup-env-config` to validate.
- SQLite files live in `data/`; avoid committing generated DBs.
- Certificates (if used) reside in `certs/`; manage locally or via Docker secrets.

View File

@@ -208,6 +208,24 @@ Repositories can have the following statuses:
- **deleting**: Repository being deleted
- **deleted**: Repository deleted
### Scheduling and Synchronization (Issue #72 Fixes)
#### Fixed Issues
1. **Mirror Interval Bug**: Added `mirror_interval` parameter to Gitea API calls when creating mirrors (previously defaulted to 24h)
2. **Auto-Discovery**: Scheduler now automatically discovers and imports new GitHub repositories
3. **Interval Updates**: Sync operations now update existing mirrors' intervals to match configuration
4. **Repository Cleanup**: Integrated automatic cleanup of orphaned repositories (repos removed from GitHub)
#### Environment Variables for Auto-Import
- **AUTO_IMPORT_REPOS**: Set to `false` to disable automatic repository discovery (default: enabled)
#### How Scheduling Works
- **Scheduler Service**: Runs every minute to check for scheduled tasks
- **Sync Interval**: Configured via `GITEA_MIRROR_INTERVAL` or UI (e.g., "8h", "30m", "1d")
- **Auto-Import**: Checks GitHub for new repositories during each scheduled sync
- **Auto-Cleanup**: Removes repositories that no longer exist in GitHub (if enabled)
- **Mirror Interval Update**: Updates Gitea's internal mirror interval during sync operations
### Authentication Configuration
#### SSO Provider Configuration

View File

@@ -40,7 +40,10 @@ First user signup becomes admin. Configure GitHub and Gitea through the web inte
- 🚫 **Repository ignore** - Mark specific repos to skip
- 🔐 Secure authentication with Better Auth (email/password, SSO, OIDC)
- 📊 Real-time dashboard with activity logs
- ⏱️ Scheduled automatic mirroring with flexible intervals
- ⏱️ Scheduled automatic mirroring with configurable intervals
- 🔄 **Auto-discovery** - Automatically import new GitHub repositories (v3.4.0+)
- 🧹 **Repository cleanup** - Auto-remove repos deleted from GitHub (v3.4.0+)
- 🎯 **Proper mirror intervals** - Respects configured sync intervals (v3.4.0+)
- 🗑️ Automatic database cleanup with configurable retention
- 🐳 Dockerized with multi-arch support (AMD64/ARM64)
@@ -204,25 +207,62 @@ Enable in Settings → Mirror Options → Mirror metadata
- **Automatic Cleanup** - Configure retention period for activity logs
- **Scheduled Sync** - Set custom intervals for automatic mirroring
### Automatic Mirroring
### Automatic Syncing & Synchronization
Gitea Mirror can automatically sync your repositories at regular intervals. There are two ways to configure this:
Gitea Mirror provides powerful automatic synchronization features:
#### Via Web Interface (Recommended)
Navigate to the Configuration page and enable "Automatic Mirroring" with your preferred interval (e.g., every 6 hours, daily, etc.).
#### Features (v3.4.0+)
- **Auto-discovery**: Automatically discovers and imports new GitHub repositories
- **Repository cleanup**: Removes repositories that no longer exist in GitHub
- **Proper intervals**: Mirrors respect your configured sync intervals (not Gitea's default 24h)
- **Smart scheduling**: Only syncs repositories that need updating
- **Auto-start on boot** (v3.5.3+): Automatically imports and mirrors all repositories when `SCHEDULE_ENABLED=true` or `GITEA_MIRROR_INTERVAL` is set - no manual clicks required!
#### Via Environment Variables
Set `GITEA_MIRROR_INTERVAL` to automatically enable scheduled mirroring:
#### Configuration via Web Interface (Recommended)
Navigate to the Configuration page and enable "Automatic Syncing" with your preferred interval.
#### Configuration via Environment Variables
**🚀 Set it and forget it!** With these environment variables, Gitea Mirror will automatically:
1. **Import** all your GitHub repositories on startup (no manual import needed!)
2. **Mirror** them to Gitea immediately
3. **Keep them synchronized** based on your interval
4. **Auto-discover** new repos you create/star on GitHub
5. **Clean up** repos you delete from GitHub
```bash
# Examples of supported formats:
# Option 1: Enable automatic scheduling (triggers auto-start)
SCHEDULE_ENABLED=true
SCHEDULE_INTERVAL=3600 # Check every hour (or use cron: "0 * * * *")
# Option 2: Set mirror interval (also triggers auto-start)
GITEA_MIRROR_INTERVAL=8h # Every 8 hours
GITEA_MIRROR_INTERVAL=30m # Every 30 minutes
GITEA_MIRROR_INTERVAL=1d # Daily
GITEA_MIRROR_INTERVAL=86400 # Every 86400 seconds (24 hours)
# Other examples: 5m, 30m, 1h, 24h, 1d, 7d
# Advanced: Use cron expressions for specific times
SCHEDULE_INTERVAL="0 2 * * *" # Daily at 2 AM (optimize bandwidth usage)
# Auto-import new repositories (default: true)
AUTO_IMPORT_REPOS=true
# Auto-cleanup orphaned repositories
CLEANUP_DELETE_IF_NOT_IN_GITHUB=true
CLEANUP_ORPHANED_REPO_ACTION=archive # 'archive' (recommended) or 'delete'
CLEANUP_DRY_RUN=false # Set to true to test without changes
```
When this variable is set, the scheduler automatically enables and runs at the specified interval. The timer starts from the last successful sync, not from container startup.
**Important Notes**:
- **Auto-Start**: When `SCHEDULE_ENABLED=true` or `GITEA_MIRROR_INTERVAL` is set, the service automatically imports all GitHub repositories and mirrors them on startup. No manual "Import" or "Mirror" button clicks required!
- The scheduler checks every minute for tasks to run. The `GITEA_MIRROR_INTERVAL` determines how often each repository is actually synced. For example, with `8h`, each repo syncs every 8 hours from its last successful sync.
**🛡️ Backup Protection Features**:
- **No Accidental Deletions**: Repository cleanup is automatically skipped if GitHub is inaccessible (account deleted, banned, or API errors)
- **Archive Never Deletes Data**: The `archive` action preserves all repository data:
- Regular repositories: Made read-only using Gitea's archive feature
- Mirror repositories: Renamed with `[ARCHIVED]` prefix (Gitea API limitation prevents archiving mirrors)
- Failed operations: Repository remains fully accessible even if marking as archived fails
- **The Whole Point of Backups**: Your Gitea mirrors are preserved even when GitHub sources disappear - that's why you have backups!
- **Strongly Recommended**: Always use `CLEANUP_ORPHANED_REPO_ACTION=archive` (default) instead of `delete`
## Troubleshooting

View File

@@ -8,7 +8,7 @@
"@astrojs/mdx": "4.3.4",
"@astrojs/node": "9.4.3",
"@astrojs/react": "^4.3.0",
"@better-auth/sso": "^1.3.7",
"@better-auth/sso": "^1.3.8",
"@octokit/rest": "^22.0.0",
"@radix-ui/react-accordion": "^1.2.12",
"@radix-ui/react-avatar": "^1.1.10",
@@ -34,7 +34,7 @@
"@types/react-dom": "^19.1.9",
"astro": "^5.13.4",
"bcryptjs": "^3.0.2",
"better-auth": "^1.3.7",
"better-auth": "^1.3.8",
"canvas-confetti": "^1.9.3",
"class-variance-authority": "^0.7.1",
"clsx": "^2.1.1",
@@ -147,7 +147,7 @@
"@babel/types": ["@babel/types@7.28.2", "", { "dependencies": { "@babel/helper-string-parser": "^7.27.1", "@babel/helper-validator-identifier": "^7.27.1" } }, "sha512-ruv7Ae4J5dUYULmeXw1gmb7rYRz57OWCPM57pHojnLq/3Z1CK2lNSLTCVjxVk1F/TZHwOZZrOWi0ur95BbLxNQ=="],
"@better-auth/sso": ["@better-auth/sso@1.3.7", "", { "dependencies": { "@better-fetch/fetch": "^1.1.18", "better-auth": "^1.3.7", "fast-xml-parser": "^5.2.5", "jose": "^5.9.6", "oauth2-mock-server": "^7.2.0", "samlify": "^2.10.0" }, "peerDependencies": { "zod": "^3.25.0 || ^4.0.0" } }, "sha512-MTwBiNash7HN0nLtQiL1tvYgWBn6GjYj6EYvtrQeb0/+UW0tjBDgsl39ojiFFSWGuT0gxPv+ij8tQNaFmQ1+2g=="],
"@better-auth/sso": ["@better-auth/sso@1.3.8", "", { "dependencies": { "@better-fetch/fetch": "^1.1.18", "fast-xml-parser": "^5.2.5", "jose": "^5.10.0", "oauth2-mock-server": "^7.2.1", "samlify": "^2.10.1", "zod": "^4.1.5" }, "peerDependencies": { "better-auth": "1.3.8" } }, "sha512-ohJl4uTRwVACu8840A5Ys/z2jus/vEsCrWvOj/RannsZ6CxQAjr8utYYXXs6lVn08ynOcuT4m0OsYRbrw7a42g=="],
"@better-auth/utils": ["@better-auth/utils@0.2.6", "", { "dependencies": { "uncrypto": "^0.1.3" } }, "sha512-3y/vaL5Ox33dBwgJ6ub3OPkVqr6B5xL2kgxNHG8eHZuryLyG/4JSPGqjbdRSgjuy9kALUZYDFl+ORIAxlWMSuA=="],
@@ -683,7 +683,7 @@
"before-after-hook": ["before-after-hook@4.0.0", "", {}, "sha512-q6tR3RPqIB1pMiTRMFcZwuG5T8vwp+vUvEG0vuI6B+Rikh5BfPp2fQ82c925FOs+b0lcFQ8CFrL+KbilfZFhOQ=="],
"better-auth": ["better-auth@1.3.7", "", { "dependencies": { "@better-auth/utils": "0.2.6", "@better-fetch/fetch": "^1.1.18", "@noble/ciphers": "^0.6.0", "@noble/hashes": "^1.8.0", "@simplewebauthn/browser": "^13.1.2", "@simplewebauthn/server": "^13.1.2", "better-call": "^1.0.13", "defu": "^6.1.4", "jose": "^5.10.0", "kysely": "^0.28.5", "nanostores": "^0.11.4" }, "peerDependencies": { "react": "^18.0.0 || ^19.0.0", "react-dom": "^18.0.0 || ^19.0.0", "zod": "^3.25.0 || ^4.0.0" }, "optionalPeers": ["react", "react-dom"] }, "sha512-/1fEyx2SGgJQM5ujozDCh9eJksnVkNU/J7Fk/tG5Y390l8nKbrPvqiFlCjlMM+scR+UABJbQzA6An7HT50LHyQ=="],
"better-auth": ["better-auth@1.3.8", "", { "dependencies": { "@better-auth/utils": "0.2.6", "@better-fetch/fetch": "^1.1.18", "@noble/ciphers": "^0.6.0", "@noble/hashes": "^1.8.0", "@simplewebauthn/browser": "^13.1.2", "@simplewebauthn/server": "^13.1.2", "better-call": "1.0.16", "defu": "^6.1.4", "jose": "^5.10.0", "kysely": "^0.28.5", "nanostores": "^0.11.4", "zod": "^4.1.5" }, "peerDependencies": { "react": "^18.0.0 || ^19.0.0", "react-dom": "^18.0.0 || ^19.0.0" }, "optionalPeers": ["react", "react-dom"] }, "sha512-uRFzHbWkhr8eWNy+BJwyMnrZPOvQjwrcLND3nc6jusRteYA9cjeRGElgCPTWTIyWUfzaQ708Lb5Mdq9Gv41Qpw=="],
"better-call": ["better-call@1.0.16", "", { "dependencies": { "@better-fetch/fetch": "^1.1.4", "rou3": "^0.5.1", "set-cookie-parser": "^2.7.1", "uncrypto": "^0.1.3" } }, "sha512-42dgJ1rOtc0anOoxjXPOWuel/Z/4aeO7EJ2SiXNwvlkySSgjXhNjAjTMWa8DL1nt6EXS3jl3VKC3mPsU/lUgVA=="],

View File

@@ -1,5 +1,7 @@
# Gitea Mirror alternate deployment configuration
# Standard deployment with host path and minimal environments
# Minimal Gitea Mirror deployment
# Only includes what CANNOT be configured via the Web UI
# Everything else can be set up through the web interface after deployment
services:
gitea-mirror:
image: ghcr.io/raylabshq/gitea-mirror:latest
@@ -11,17 +13,43 @@ services:
volumes:
- ./data:/app/data
environment:
# For a complete list of all supported environment variables, see:
# docs/ENVIRONMENT_VARIABLES.md or .env.example
# === ABSOLUTELY REQUIRED ===
# This MUST be set and CANNOT be changed via UI
- BETTER_AUTH_SECRET=${BETTER_AUTH_SECRET} # Min 32 chars, required for sessions
# === CORE SETTINGS ===
# These are technically required but have working defaults
- NODE_ENV=production
- DATABASE_URL=file:data/gitea-mirror.db
- HOST=0.0.0.0
- PORT=4321
- BETTER_AUTH_URL=http://localhost:4321
- BETTER_AUTH_SECRET=${BETTER_AUTH_SECRET:-your-secret-key-change-this-in-production}
- BETTER_AUTH_URL=${BETTER_AUTH_URL:-http://localhost:4321}
healthcheck:
test: ["CMD", "wget", "--no-verbose", "--tries=3", "--spider", "http://localhost:4321/api/health"]
interval: 30s
timeout: 10s
retries: 5
start_period: 15s
# === QUICK START ===
#
# 1. Create a .env file with only ONE required variable:
# BETTER_AUTH_SECRET=your-32-character-minimum-secret-key-here
#
# 2. Run:
# docker-compose -f docker-compose.alt.yml up -d
#
# 3. Access at http://localhost:4321
#
# 4. Sign up for an account (first user becomes admin)
#
# 5. Configure everything else through the web UI:
# - GitHub credentials
# - Gitea credentials
# - Mirror settings
# - Scheduling options
# - Auto-import settings
# - Cleanup preferences
#
# That's it! Everything else can be configured via the web interface.

View File

@@ -1,174 +0,0 @@
version: "3.8"
services:
# PostgreSQL database for Authentik
authentik-db:
image: postgres:15-alpine
container_name: authentik-db
restart: unless-stopped
environment:
POSTGRES_USER: authentik
POSTGRES_PASSWORD: authentik-db-password
POSTGRES_DB: authentik
volumes:
- authentik-db-data:/var/lib/postgresql/data
networks:
- authentik-net
healthcheck:
test: ["CMD-SHELL", "pg_isready -U authentik"]
interval: 10s
timeout: 5s
retries: 5
# Redis cache for Authentik
authentik-redis:
image: redis:7-alpine
container_name: authentik-redis
restart: unless-stopped
command: redis-server --save 60 1 --loglevel warning
volumes:
- authentik-redis-data:/data
networks:
- authentik-net
healthcheck:
test: ["CMD", "redis-cli", "ping"]
interval: 10s
timeout: 5s
retries: 5
# Authentik Server
authentik-server:
image: ghcr.io/goauthentik/server:2024.2
container_name: authentik-server
restart: unless-stopped
command: server
environment:
# Core Settings
AUTHENTIK_SECRET_KEY: "change-me-to-a-random-50-char-string-for-production"
AUTHENTIK_ERROR_REPORTING__ENABLED: false
# Database
AUTHENTIK_POSTGRESQL__HOST: authentik-db
AUTHENTIK_POSTGRESQL__USER: authentik
AUTHENTIK_POSTGRESQL__NAME: authentik
AUTHENTIK_POSTGRESQL__PASSWORD: authentik-db-password
# Redis
AUTHENTIK_REDIS__HOST: authentik-redis
# Email (optional - for testing, uses console backend)
AUTHENTIK_EMAIL__HOST: localhost
AUTHENTIK_EMAIL__PORT: 25
AUTHENTIK_EMAIL__USE_TLS: false
AUTHENTIK_EMAIL__USE_SSL: false
AUTHENTIK_EMAIL__TIMEOUT: 10
AUTHENTIK_EMAIL__FROM: authentik@localhost
# Log Level
AUTHENTIK_LOG_LEVEL: info
# Disable analytics
AUTHENTIK_DISABLE_UPDATE_CHECK: true
AUTHENTIK_DISABLE_STARTUP_ANALYTICS: true
# Default admin user (only created on first run)
AUTHENTIK_BOOTSTRAP_PASSWORD: admin-password
AUTHENTIK_BOOTSTRAP_TOKEN: initial-admin-token
AUTHENTIK_BOOTSTRAP_EMAIL: admin@example.com
volumes:
- authentik-media:/media
- authentik-templates:/templates
ports:
- "9000:9000" # HTTP
- "9443:9443" # HTTPS (if configured)
networks:
- authentik-net
- gitea-mirror-net
depends_on:
authentik-db:
condition: service_healthy
authentik-redis:
condition: service_healthy
# Authentik Worker (background tasks)
authentik-worker:
image: ghcr.io/goauthentik/server:2024.2
container_name: authentik-worker
restart: unless-stopped
command: worker
environment:
# Same environment as server
AUTHENTIK_SECRET_KEY: "change-me-to-a-random-50-char-string-for-production"
AUTHENTIK_ERROR_REPORTING__ENABLED: false
AUTHENTIK_POSTGRESQL__HOST: authentik-db
AUTHENTIK_POSTGRESQL__USER: authentik
AUTHENTIK_POSTGRESQL__NAME: authentik
AUTHENTIK_POSTGRESQL__PASSWORD: authentik-db-password
AUTHENTIK_REDIS__HOST: authentik-redis
AUTHENTIK_EMAIL__HOST: localhost
AUTHENTIK_EMAIL__PORT: 25
AUTHENTIK_EMAIL__USE_TLS: false
AUTHENTIK_EMAIL__USE_SSL: false
AUTHENTIK_EMAIL__TIMEOUT: 10
AUTHENTIK_EMAIL__FROM: authentik@localhost
AUTHENTIK_LOG_LEVEL: info
AUTHENTIK_DISABLE_UPDATE_CHECK: true
AUTHENTIK_DISABLE_STARTUP_ANALYTICS: true
volumes:
- authentik-media:/media
- authentik-templates:/templates
networks:
- authentik-net
depends_on:
authentik-db:
condition: service_healthy
authentik-redis:
condition: service_healthy
# Gitea Mirror Application (uncomment to run together)
# gitea-mirror:
# build: .
# # OR use pre-built image:
# # image: ghcr.io/raylabshq/gitea-mirror:latest
# container_name: gitea-mirror
# restart: unless-stopped
# environment:
# # Core Settings
# BETTER_AUTH_URL: http://localhost:4321
# BETTER_AUTH_TRUSTED_ORIGINS: http://localhost:4321,http://localhost:9000
# BETTER_AUTH_SECRET: "your-32-character-secret-key-here"
#
# # GitHub Settings (configure as needed)
# GITHUB_USERNAME: ${GITHUB_USERNAME}
# GITHUB_TOKEN: ${GITHUB_TOKEN}
#
# # Gitea Settings (configure as needed)
# GITEA_URL: ${GITEA_URL}
# GITEA_USERNAME: ${GITEA_USERNAME}
# GITEA_TOKEN: ${GITEA_TOKEN}
# volumes:
# - ./data:/app/data
# ports:
# - "4321:4321"
# networks:
# - gitea-mirror-net
# depends_on:
# - authentik-server
volumes:
authentik-db-data:
name: authentik-db-data
authentik-redis-data:
name: authentik-redis-data
authentik-media:
name: authentik-media
authentik-templates:
name: authentik-templates
networks:
authentik-net:
name: authentik-net
driver: bridge
gitea-mirror-net:
name: gitea-mirror-net
driver: bridge

View File

@@ -1,130 +0,0 @@
version: "3.8"
services:
# PostgreSQL database for Keycloak
keycloak-db:
image: postgres:15-alpine
container_name: keycloak-db
restart: unless-stopped
environment:
POSTGRES_DB: keycloak
POSTGRES_USER: keycloak
POSTGRES_PASSWORD: keycloak-db-password
volumes:
- keycloak-db-data:/var/lib/postgresql/data
networks:
- keycloak-net
healthcheck:
test: ["CMD-SHELL", "pg_isready -U keycloak"]
interval: 10s
timeout: 5s
retries: 5
# Keycloak Identity Provider
keycloak:
image: quay.io/keycloak/keycloak:23.0
container_name: keycloak
restart: unless-stopped
command: start-dev # Use 'start' for production with HTTPS
environment:
# Admin credentials
KEYCLOAK_ADMIN: admin
KEYCLOAK_ADMIN_PASSWORD: admin-password
# Database configuration
KC_DB: postgres
KC_DB_URL_HOST: keycloak-db
KC_DB_URL_DATABASE: keycloak
KC_DB_USERNAME: keycloak
KC_DB_PASSWORD: keycloak-db-password
# HTTP settings
KC_HTTP_ENABLED: true
KC_HTTP_PORT: 8080
KC_HOSTNAME_STRICT: false
KC_HOSTNAME_STRICT_HTTPS: false
KC_PROXY: edge # If behind a proxy
# Development settings (remove for production)
KC_HOSTNAME: localhost
KC_HOSTNAME_PORT: 8080
KC_HOSTNAME_ADMIN: localhost
# Features
KC_FEATURES: token-exchange,admin-fine-grained-authz
# Health and metrics
KC_HEALTH_ENABLED: true
KC_METRICS_ENABLED: true
# Log level
KC_LOG_LEVEL: INFO
# Uncomment for debug logging
# KC_LOG_LEVEL: DEBUG
# QUARKUS_LOG_CATEGORY__ORG_KEYCLOAK_SERVICES: DEBUG
ports:
- "8080:8080" # HTTP
- "8443:8443" # HTTPS (if configured)
- "9000:9000" # Management
networks:
- keycloak-net
- gitea-mirror-net
depends_on:
keycloak-db:
condition: service_healthy
volumes:
# For custom themes (optional)
- keycloak-themes:/opt/keycloak/themes
# For importing realm configurations
- ./keycloak-realm-export.json:/opt/keycloak/data/import/realm.json:ro
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:8080/health/ready"]
interval: 15s
timeout: 10s
retries: 10
start_period: 60s
# Gitea Mirror Application (uncomment to run together)
# gitea-mirror:
# build: .
# # OR use pre-built image:
# # image: ghcr.io/raylabshq/gitea-mirror:latest
# container_name: gitea-mirror
# restart: unless-stopped
# environment:
# # Core Settings
# BETTER_AUTH_URL: http://localhost:4321
# BETTER_AUTH_TRUSTED_ORIGINS: http://localhost:4321,http://localhost:8080
# BETTER_AUTH_SECRET: "your-32-character-secret-key-here"
#
# # GitHub Settings (configure as needed)
# GITHUB_USERNAME: ${GITHUB_USERNAME}
# GITHUB_TOKEN: ${GITHUB_TOKEN}
#
# # Gitea Settings (configure as needed)
# GITEA_URL: ${GITEA_URL}
# GITEA_USERNAME: ${GITEA_USERNAME}
# GITEA_TOKEN: ${GITEA_TOKEN}
# volumes:
# - ./data:/app/data
# ports:
# - "4321:4321"
# networks:
# - gitea-mirror-net
# depends_on:
# keycloak:
# condition: service_healthy
volumes:
keycloak-db-data:
name: keycloak-db-data
keycloak-themes:
name: keycloak-themes
networks:
keycloak-net:
name: keycloak-net
driver: bridge
gitea-mirror-net:
name: gitea-mirror-net
driver: bridge

View File

@@ -53,6 +53,14 @@ services:
- GITEA_ORGANIZATION=${GITEA_ORGANIZATION:-github-mirrors}
- GITEA_ORG_VISIBILITY=${GITEA_ORG_VISIBILITY:-public}
- DELAY=${DELAY:-3600}
# Scheduling and Sync Configuration (Issue #72 fixes)
- SCHEDULE_ENABLED=${SCHEDULE_ENABLED:-false}
- GITEA_MIRROR_INTERVAL=${GITEA_MIRROR_INTERVAL:-8h}
- AUTO_IMPORT_REPOS=${AUTO_IMPORT_REPOS:-true}
# Repository Cleanup Configuration
- CLEANUP_DELETE_IF_NOT_IN_GITHUB=${CLEANUP_DELETE_IF_NOT_IN_GITHUB:-false}
- CLEANUP_ORPHANED_REPO_ACTION=${CLEANUP_ORPHANED_REPO_ACTION:-archive}
- CLEANUP_DRY_RUN=${CLEANUP_DRY_RUN:-true}
# Optional: Skip TLS verification (insecure, use only for testing)
# - GITEA_SKIP_TLS_VERIFY=${GITEA_SKIP_TLS_VERIFY:-false}
# Header Authentication (for Reverse Proxy SSO)

View File

@@ -36,6 +36,7 @@ Essential application settings required for running Gitea Mirror.
| `DATABASE_URL` | Database connection URL | `sqlite://data/gitea-mirror.db` | No |
| `BETTER_AUTH_SECRET` | Secret key for session signing (generate with: `openssl rand -base64 32`) | - | Yes |
| `BETTER_AUTH_URL` | Primary base URL for authentication. This should be the main URL where your application is accessed. | `http://localhost:4321` | No |
| `PUBLIC_BETTER_AUTH_URL` | Client-side auth URL for multi-origin access. Set this to your primary domain when you need to access the app from different origins (e.g., both IP and domain). The client will use this URL for all auth requests instead of the current browser origin. | - | No |
| `BETTER_AUTH_TRUSTED_ORIGINS` | Trusted origins for authentication requests. Comma-separated list of URLs. Use this to specify additional access URLs (e.g., local IP + domain: `http://10.10.20.45:4321,https://gitea-mirror.mydomain.tld`), SSO providers, reverse proxies, etc. | - | No |
| `ENCRYPTION_SECRET` | Optional encryption key for tokens (generate with: `openssl rand -base64 48`) | - | No |
@@ -133,6 +134,7 @@ Control what content gets mirrored from GitHub to Gitea.
| Variable | Description | Default | Options |
|----------|-------------|---------|---------|
| `MIRROR_RELEASES` | Mirror GitHub releases | `false` | `true`, `false` |
| `RELEASE_LIMIT` | Maximum number of releases to mirror per repository | `10` | Number (1-100) |
| `MIRROR_WIKI` | Mirror wiki content | `false` | `true`, `false` |
| `MIRROR_METADATA` | Master toggle for metadata mirroring | `false` | `true`, `false` |
| `MIRROR_ISSUES` | Mirror issues (requires MIRROR_METADATA=true) | `false` | `true`, `false` |
@@ -148,10 +150,29 @@ Configure automatic scheduled mirroring.
| Variable | Description | Default | Options |
|----------|-------------|---------|---------|
| `SCHEDULE_ENABLED` | Enable automatic mirroring | `false` | `true`, `false` |
| `SCHEDULE_INTERVAL` | Interval in seconds or cron expression | `3600` | Number or cron string (e.g., `"0 2 * * *"`) |
| `SCHEDULE_ENABLED` | Enable automatic mirroring. **When set to `true`, automatically imports and mirrors all repositories on startup** (v3.5.3+) | `false` | `true`, `false` |
| `SCHEDULE_INTERVAL` | Interval in seconds or cron expression. **Supports cron syntax for scheduled runs** (e.g., `"0 2 * * *"` for 2 AM daily) | `3600` | Number (seconds) or cron string |
| `DELAY` | Legacy: same as SCHEDULE_INTERVAL | `3600` | Number (seconds) |
> **🚀 Auto-Start Feature (v3.5.3+)**
> Setting either `SCHEDULE_ENABLED=true` or `GITEA_MIRROR_INTERVAL` triggers auto-start functionality where the service will:
> 1. **Import** all GitHub repositories on startup
> 2. **Mirror** them to Gitea immediately
> 3. **Continue syncing** at the configured interval
> 4. **Auto-discover** new repositories
> 5. **Clean up** deleted repositories (if configured)
>
> This eliminates the need for manual button clicks - perfect for Docker/Kubernetes deployments!
> **⏰ Scheduling with Cron Expressions**
> Use cron expressions in `SCHEDULE_INTERVAL` to run at specific times:
> - `"0 2 * * *"` - Daily at 2 AM
> - `"0 */6 * * *"` - Every 6 hours
> - `"0 0 * * 0"` - Weekly on Sunday at midnight
> - `"0 3 * * 1-5"` - Weekdays at 3 AM (Monday-Friday)
>
> This is useful for optimizing bandwidth usage during low-activity periods.
### Execution Settings
| Variable | Description | Default | Options |
@@ -173,6 +194,7 @@ Configure automatic scheduled mirroring.
| Variable | Description | Default | Options |
|----------|-------------|---------|---------|
| `AUTO_IMPORT_REPOS` | Automatically discover and import new GitHub repositories during scheduled syncs | `true` | `true`, `false` |
| `SCHEDULE_ONLY_MIRROR_UPDATED` | Only mirror repos with updates | `false` | `true`, `false` |
| `SCHEDULE_UPDATE_INTERVAL` | Check for updates interval (milliseconds) | `86400000` | Number |
| `SCHEDULE_SKIP_RECENTLY_MIRRORED` | Skip recently mirrored repos | `true` | `true`, `false` |
@@ -205,10 +227,25 @@ Configure automatic cleanup of old events and data.
|----------|-------------|---------|---------|
| `CLEANUP_DELETE_FROM_GITEA` | Delete repositories from Gitea | `false` | `true`, `false` |
| `CLEANUP_DELETE_IF_NOT_IN_GITHUB` | Delete repos not found in GitHub (automatically enables cleanup) | `true` | `true`, `false` |
| `CLEANUP_ORPHANED_REPO_ACTION` | Action for orphaned repositories | `archive` | `skip`, `archive`, `delete` |
| `CLEANUP_ORPHANED_REPO_ACTION` | Action for orphaned repositories. **Note**: `archive` is recommended to preserve backups | `archive` | `skip`, `archive`, `delete` |
| `CLEANUP_DRY_RUN` | Test mode without actual deletion | `true` | `true`, `false` |
| `CLEANUP_PROTECTED_REPOS` | Comma-separated list of protected repository names | - | Comma-separated strings |
**🛡️ Safety Features (Backup Protection)**:
- **GitHub Failures Don't Delete Backups**: Cleanup is automatically skipped if GitHub API returns errors (404, 403, connection issues)
- **Archive Never Deletes**: The `archive` action ALWAYS preserves repository data, it never deletes
- **Graceful Degradation**: If marking as archived fails, the repository remains fully accessible in Gitea
- **The Purpose of Backups**: Your mirrors are preserved even when GitHub sources disappear - that's the whole point!
**Archive Behavior (Aligned with Gitea API)**:
- **Regular repositories**: Uses Gitea's native archive feature (PATCH `/repos/{owner}/{repo}` with `archived: true`)
- Makes repository read-only while preserving all data
- **Mirror repositories**: Uses rename strategy (Gitea API returns 422 for archiving mirrors)
- Renamed with `[ARCHIVED]` prefix for clear identification
- Description updated with preservation notice and timestamp
- Mirror interval set to 8760h (1 year) to minimize sync attempts
- Repository remains fully accessible and cloneable
### Execution Settings
| Variable | Description | Default | Options |
@@ -300,21 +337,28 @@ services:
### Multiple Access URLs
To allow access to Gitea Mirror through multiple URLs (e.g., local IP and public domain), use the `BETTER_AUTH_TRUSTED_ORIGINS` variable:
To allow access to Gitea Mirror through multiple URLs (e.g., local IP and public domain), you need to configure both server and client settings:
**Example Configuration:**
```bash
# Primary URL (required) - typically your public domain
# Primary URL (required) - where the auth server is hosted
BETTER_AUTH_URL=https://gitea-mirror.mydomain.tld
# Additional access URLs (optional) - local IPs, alternate domains
# Client-side URL (optional) - tells the browser where to send auth requests
# Set this to your primary domain when accessing from different origins
PUBLIC_BETTER_AUTH_URL=https://gitea-mirror.mydomain.tld
# Additional trusted origins (optional) - origins allowed to make auth requests
BETTER_AUTH_TRUSTED_ORIGINS=http://10.10.20.45:4321,http://192.168.1.100:4321
```
This setup allows you to:
- Access via local network IP: `http://10.10.20.45:4321`
- Access via public domain: `https://gitea-mirror.mydomain.tld`
- Both URLs will work for authentication and session management
- Auth requests from the IP will be sent to the domain (via `PUBLIC_BETTER_AUTH_URL`)
- Each origin requires separate login due to browser cookie isolation
**Important:** When accessing from different origins (IP vs domain), you'll need to log in separately on each origin as cookies cannot be shared across different origins for security reasons.
### Trusted Origins

View File

@@ -60,7 +60,7 @@ bun run dev
## Key Features
- 🔄 **Automatic Mirroring** - Keep repositories synchronized
- 🔄 **Automatic Syncing** - Keep repositories synchronized
- 🗂️ **Organization Support** - Mirror entire organizations
-**Starred Repos** - Mirror your starred repositories
- 🔐 **Self-Hosted** - Full control over your data

View File

@@ -1,7 +1,7 @@
{
"name": "gitea-mirror",
"type": "module",
"version": "3.4.0",
"version": "3.5.4",
"engines": {
"bun": ">=1.2.9"
},
@@ -46,7 +46,7 @@
"@astrojs/mdx": "4.3.4",
"@astrojs/node": "9.4.3",
"@astrojs/react": "^4.3.0",
"@better-auth/sso": "^1.3.7",
"@better-auth/sso": "^1.3.8",
"@octokit/rest": "^22.0.0",
"@radix-ui/react-accordion": "^1.2.12",
"@radix-ui/react-avatar": "^1.1.10",
@@ -72,7 +72,7 @@
"@types/react-dom": "^19.1.9",
"astro": "^5.13.4",
"bcryptjs": "^3.0.2",
"better-auth": "^1.3.7",
"better-auth": "^1.3.8",
"canvas-confetti": "^1.9.3",
"class-variance-authority": "^0.7.1",
"clsx": "^2.1.1",

View File

@@ -122,12 +122,12 @@ export function AutomationSettings({
<CardContent className="space-y-6">
<div className="grid grid-cols-1 md:grid-cols-2 gap-4">
{/* Automatic Mirroring Section */}
{/* Automatic Syncing Section */}
<div className="space-y-4 p-4 border border-border rounded-lg bg-card/50">
<div className="flex items-center justify-between">
<h3 className="text-sm font-medium flex items-center gap-2">
<RefreshCw className="h-4 w-4 text-primary" />
Automatic Mirroring
Automatic Syncing
</h3>
{isAutoSavingSchedule && (
<Activity className="h-4 w-4 animate-spin text-muted-foreground" />

View File

@@ -50,12 +50,12 @@ export function ConfigTabs() {
preserveOrgStructure: false,
},
scheduleConfig: {
enabled: true, // Default to enabled
interval: 86400, // Default to daily (24 hours)
enabled: false, // Don't set defaults here - will be loaded from API
interval: 0, // Will be replaced with actual value from API
},
cleanupConfig: {
enabled: true, // Default to enabled
retentionDays: 604800, // 7 days in seconds - Default retention period
enabled: false, // Don't set defaults here - will be loaded from API
retentionDays: 0, // Will be replaced with actual value from API
},
mirrorOptions: {
mirrorReleases: false,

View File

@@ -372,8 +372,8 @@ export function SSOSettings() {
Add Provider
</Button>
</DialogTrigger>
<DialogContent className="max-w-2xl">
<DialogHeader>
<DialogContent className="max-w-2xl max-h-[90vh] md:max-h-[85vh] lg:max-h-[90vh] overflow-hidden flex flex-col">
<DialogHeader className="flex-shrink-0">
<DialogTitle>{editingProvider ? 'Edit SSO Provider' : 'Add SSO Provider'}</DialogTitle>
<DialogDescription>
{editingProvider
@@ -381,14 +381,15 @@ export function SSOSettings() {
: 'Configure an external identity provider for user authentication'}
</DialogDescription>
</DialogHeader>
<Tabs value={providerType} onValueChange={(value) => setProviderType(value as 'oidc' | 'saml')}>
<TabsList className="grid w-full grid-cols-2">
<TabsTrigger value="oidc">OIDC / OAuth2</TabsTrigger>
<TabsTrigger value="saml">SAML 2.0</TabsTrigger>
</TabsList>
{/* Common Fields */}
<div className="space-y-4 mt-4">
<div className="flex-1 overflow-y-auto px-1 -mx-1">
<Tabs value={providerType} onValueChange={(value) => setProviderType(value as 'oidc' | 'saml')}>
<TabsList className="grid w-full grid-cols-2 sticky top-0 z-10 bg-background">
<TabsTrigger value="oidc">OIDC / OAuth2</TabsTrigger>
<TabsTrigger value="saml">SAML 2.0</TabsTrigger>
</TabsList>
{/* Common Fields */}
<div className="space-y-4 mt-4">
<div className="grid grid-cols-2 gap-4">
<div className="space-y-2">
<Label htmlFor="providerId">Provider ID</Label>
@@ -569,7 +570,8 @@ export function SSOSettings() {
</Alert>
</TabsContent>
</Tabs>
<DialogFooter>
</div>
<DialogFooter className="flex-shrink-0 pt-4 border-t">
<Button
variant="outline"
onClick={() => {

View File

@@ -83,7 +83,7 @@ export function ScheduleConfigForm({
htmlFor="enabled"
className="select-none ml-2 block text-sm font-medium"
>
Enable Automatic Mirroring
Enable Automatic Syncing
</label>
</div>
@@ -93,7 +93,7 @@ export function ScheduleConfigForm({
htmlFor="interval"
className="block text-sm font-medium mb-1.5"
>
Mirroring Interval
Sync Interval
</label>
<Select
@@ -122,7 +122,7 @@ export function ScheduleConfigForm({
</Select>
<p className="text-xs text-muted-foreground mt-1">
How often the mirroring process should run.
How often the sync process should run.
</p>
<div className="mt-2 p-2 bg-muted/50 rounded-md">
<p className="text-xs text-muted-foreground">

View File

@@ -16,6 +16,46 @@ import { usePageVisibility } from "@/hooks/usePageVisibility";
import { useConfigStatus } from "@/hooks/useConfigStatus";
import { useNavigation } from "@/components/layout/MainLayout";
// Helper function to format last sync time
function formatLastSyncTime(date: Date | null): string {
if (!date) return "Never";
const now = new Date();
const syncDate = new Date(date);
const diffMs = now.getTime() - syncDate.getTime();
const diffMins = Math.floor(diffMs / 60000);
const diffHours = Math.floor(diffMs / 3600000);
const diffDays = Math.floor(diffMs / 86400000);
// Show relative time for recent syncs
if (diffMins < 1) return "Just now";
if (diffMins < 60) return `${diffMins} min ago`;
if (diffHours < 24) return `${diffHours} hr${diffHours === 1 ? '' : 's'} ago`;
if (diffDays < 7) return `${diffDays} day${diffDays === 1 ? '' : 's'} ago`;
// For older syncs, show week count
const diffWeeks = Math.floor(diffDays / 7);
if (diffWeeks < 4) return `${diffWeeks} week${diffWeeks === 1 ? '' : 's'} ago`;
// For even older, show month count
const diffMonths = Math.floor(diffDays / 30);
return `${diffMonths} month${diffMonths === 1 ? '' : 's'} ago`;
}
// Helper function to format full timestamp
function formatFullTimestamp(date: Date | null): string {
if (!date) return "";
return new Date(date).toLocaleString("en-US", {
month: "2-digit",
day: "2-digit",
year: "2-digit",
hour: "2-digit",
minute: "2-digit",
hour12: true
}).replace(',', '');
}
export function Dashboard() {
const { user } = useAuth();
const { registerRefreshCallback } = useLiveRefresh();
@@ -236,19 +276,9 @@ export function Dashboard() {
/>
<StatusCard
title="Last Sync"
value={
lastSync
? new Date(lastSync).toLocaleString("en-US", {
month: "2-digit",
day: "2-digit",
year: "2-digit",
hour: "2-digit",
minute: "2-digit",
})
: "N/A"
}
value={formatLastSyncTime(lastSync)}
icon={<Clock className="h-4 w-4" />}
description="Last successful sync"
description={formatFullTimestamp(lastSync)}
/>
</div>

View File

@@ -7,7 +7,7 @@ import { toast } from "sonner";
import { Skeleton } from "@/components/ui/skeleton";
import { useLiveRefresh } from "@/hooks/useLiveRefresh";
import { useConfigStatus } from "@/hooks/useConfigStatus";
import { Menu, LogOut } from "lucide-react";
import { Menu, LogOut, PanelRightOpen, PanelRightClose } from "lucide-react";
import {
DropdownMenu,
DropdownMenuContent,
@@ -19,9 +19,12 @@ interface HeaderProps {
currentPage?: "dashboard" | "repositories" | "organizations" | "configuration" | "activity-log";
onNavigate?: (page: string) => void;
onMenuClick: () => void;
onToggleCollapse?: () => void;
isSidebarCollapsed?: boolean;
isSidebarOpen?: boolean;
}
export function Header({ currentPage, onNavigate, onMenuClick }: HeaderProps) {
export function Header({ currentPage, onNavigate, onMenuClick, onToggleCollapse, isSidebarCollapsed, isSidebarOpen }: HeaderProps) {
const { user, logout, isLoading } = useAuth();
const { isLiveEnabled, toggleLive } = useLiveRefresh();
const { isFullyConfigured, isLoading: configLoading } = useConfigStatus();
@@ -63,18 +66,38 @@ export function Header({ currentPage, onNavigate, onMenuClick }: HeaderProps) {
return (
<header className="border-b bg-background">
<div className="flex h-[4.5rem] items-center justify-between px-4 sm:px-6">
<div className="flex items-center gap-2">
{/* Hamburger Menu Button - Mobile Only */}
<div className="flex items-center lg:gap-12 md:gap-6 gap-4">
{/* Sidebar Toggle - Mobile uses slide-in, Medium uses collapse */}
<Button
variant="outline"
size="lg"
className="lg:hidden"
size="icon"
className="md:hidden h-10 w-10"
onClick={onMenuClick}
>
<Menu className="h-5 w-5" />
{isSidebarOpen ? (
<PanelRightOpen className="h-5 w-5" />
) : (
<PanelRightClose className="h-5 w-5" />
)}
<span className="sr-only">Toggle menu</span>
</Button>
{/* Sidebar Collapse Toggle - Only on medium screens (768px - 1280px) */}
<Button
variant="ghost"
size="icon"
className="hidden md:flex xl:hidden h-10 w-10"
onClick={onToggleCollapse}
title={isSidebarCollapsed ? "Expand sidebar" : "Collapse sidebar"}
>
{isSidebarCollapsed ? (
<PanelRightClose className="h-5 w-5" />
) : (
<PanelRightOpen className="h-5 w-5" />
)}
<span className="sr-only">Toggle sidebar</span>
</Button>
<button
onClick={() => {
if (currentPage !== 'dashboard') {

View File

@@ -45,6 +45,13 @@ function AppWithProviders({ page: initialPage }: AppProps) {
const [currentPage, setCurrentPage] = useState<AppProps['page']>(initialPage);
const [navigationKey, setNavigationKey] = useState(0);
const [sidebarOpen, setSidebarOpen] = useState(false);
const [sidebarCollapsed, setSidebarCollapsed] = useState(() => {
// Check if we're on medium screens (768px - 1280px)
if (typeof window !== 'undefined') {
return window.innerWidth >= 768 && window.innerWidth < 1280;
}
return false;
});
useRepoSync({
userId: user?.id,
@@ -83,6 +90,23 @@ function AppWithProviders({ page: initialPage }: AppProps) {
return () => window.removeEventListener('popstate', handlePopState);
}, []);
// Handle window resize to auto-collapse sidebar on medium screens
useEffect(() => {
const handleResize = () => {
const width = window.innerWidth;
// Auto-collapse on medium screens (768px - 1280px)
if (width >= 768 && width < 1280) {
setSidebarCollapsed(true);
} else if (width >= 1280) {
// Expand on large screens
setSidebarCollapsed(false);
}
};
window.addEventListener('resize', handleResize);
return () => window.removeEventListener('resize', handleResize);
}, []);
// Show loading state only during initial auth/config loading
const isInitialLoading = authLoading || (configLoading && !user);
@@ -113,14 +137,21 @@ function AppWithProviders({ page: initialPage }: AppProps) {
currentPage={currentPage}
onNavigate={handleNavigation}
onMenuClick={() => setSidebarOpen(!sidebarOpen)}
onToggleCollapse={() => setSidebarCollapsed(!sidebarCollapsed)}
isSidebarCollapsed={sidebarCollapsed}
isSidebarOpen={sidebarOpen}
/>
<div className="flex flex-1 relative">
<Sidebar
onNavigate={handleNavigation}
isOpen={sidebarOpen}
isCollapsed={sidebarCollapsed}
onClose={() => setSidebarOpen(false)}
onToggleCollapse={() => setSidebarCollapsed(!sidebarCollapsed)}
/>
<section className="flex-1 p-4 sm:p-6 overflow-y-auto h-[calc(100dvh-4.55rem)] w-full lg:w-[calc(100%-16rem)]">
<section className={`flex-1 p-4 sm:p-6 overflow-y-auto h-[calc(100dvh-4.55rem)] w-full transition-all duration-200 ${
sidebarCollapsed ? 'md:w-[calc(100%-5rem)] xl:w-[calc(100%-16rem)]' : 'md:w-[calc(100%-16rem)]'
}`}>
{currentPage === "dashboard" && <Dashboard />}
{currentPage === "repositories" && <Repository />}
{currentPage === "organizations" && <Organization />}

View File

@@ -3,15 +3,23 @@ import { cn } from "@/lib/utils";
import { ExternalLink } from "lucide-react";
import { links } from "@/data/Sidebar";
import { VersionInfo } from "./VersionInfo";
import {
Tooltip,
TooltipContent,
TooltipProvider,
TooltipTrigger,
} from "@/components/ui/tooltip";
interface SidebarProps {
className?: string;
onNavigate?: (page: string) => void;
isOpen: boolean;
isCollapsed?: boolean;
onClose: () => void;
onToggleCollapse?: () => void;
}
export function Sidebar({ className, onNavigate, isOpen, onClose }: SidebarProps) {
export function Sidebar({ className, onNavigate, isOpen, isCollapsed = false, onClose, onToggleCollapse }: SidebarProps) {
const [currentPath, setCurrentPath] = useState<string>("");
useEffect(() => {
@@ -53,7 +61,7 @@ export function Sidebar({ className, onNavigate, isOpen, onClose }: SidebarProps
onNavigate?.(pageName);
// Close sidebar on mobile after navigation
if (window.innerWidth < 1024) {
if (window.innerWidth < 768) {
onClose();
}
};
@@ -63,7 +71,7 @@ export function Sidebar({ className, onNavigate, isOpen, onClose }: SidebarProps
{/* Mobile Backdrop */}
{isOpen && (
<div
className="fixed inset-0 backdrop-blur-sm z-40 lg:hidden"
className="fixed inset-0 backdrop-blur-sm z-40 md:hidden"
onClick={onClose}
/>
)}
@@ -71,54 +79,126 @@ export function Sidebar({ className, onNavigate, isOpen, onClose }: SidebarProps
{/* Sidebar */}
<aside
className={cn(
"fixed lg:static inset-y-0 left-0 z-50 w-64 bg-background border-r flex flex-col h-full lg:h-[calc(100vh-4.5rem)] transition-transform duration-200 ease-in-out lg:translate-x-0",
"fixed md:static inset-y-0 left-0 z-50 bg-background border-r flex flex-col h-full md:h-[calc(100vh-4.5rem)] transition-all duration-200 ease-in-out md:translate-x-0",
isOpen ? "translate-x-0" : "-translate-x-full",
isCollapsed ? "md:w-20 xl:w-64" : "w-64",
className
)}
>
<div className="flex flex-col h-full">
<nav className="flex flex-col gap-y-1 lg:gap-y-1 pl-2 pr-3 pt-4 flex-shrink-0">
<nav className={cn(
"flex flex-col pt-4 flex-shrink-0",
isCollapsed
? "md:gap-y-2 md:items-center md:px-2 xl:gap-y-1 xl:items-stretch xl:pl-2 xl:pr-3 gap-y-1 pl-2 pr-3"
: "gap-y-1 pl-2 pr-3"
)}>
{links.map((link, index) => {
const isActive = currentPath === link.href;
const Icon = link.icon;
return (
const button = (
<button
key={index}
onClick={(e) => handleNavigation(link.href, e)}
className={cn(
"flex items-center gap-3 rounded-md px-3 py-3 lg:py-2 text-sm lg:text-sm font-medium transition-colors w-full text-left",
"flex items-center rounded-md text-sm font-medium transition-colors w-full",
isCollapsed
? "md:h-12 md:w-12 md:justify-center md:p-0 xl:h-auto xl:w-full xl:justify-start xl:px-3 xl:py-2 h-auto px-3 py-3"
: "px-3 py-3 md:py-2",
isActive
? "bg-primary text-primary-foreground"
: "text-muted-foreground hover:bg-accent hover:text-accent-foreground"
)}
>
<Icon className="h-5 w-5 lg:h-4 lg:w-4" />
{link.label}
<Icon className={cn(
"flex-shrink-0",
isCollapsed
? "md:h-5 md:w-5 md:mr-0 xl:h-4 xl:w-4 xl:mr-3 h-5 w-5 mr-3"
: "h-5 w-5 md:h-4 md:w-4 mr-3"
)} />
<span className={cn(
"transition-all duration-200",
isCollapsed ? "md:hidden xl:inline" : "inline"
)}>
{link.label}
</span>
</button>
);
// Wrap in tooltip when collapsed on medium screens
if (isCollapsed) {
return (
<TooltipProvider key={index}>
<Tooltip delayDuration={0}>
<TooltipTrigger asChild>
{button}
</TooltipTrigger>
<TooltipContent side="right" className="hidden md:block xl:hidden">
{link.label}
</TooltipContent>
</Tooltip>
</TooltipProvider>
);
}
return button;
})}
</nav>
<div className="flex-1 min-h-0" />
<div className="px-4 py-4 flex-shrink-0">
<div className="rounded-md bg-muted p-3 lg:p-3">
<h4 className="text-sm font-medium mb-2">Need Help?</h4>
<p className="text-xs text-muted-foreground mb-3 lg:mb-2">
Check out the documentation for help with setup and configuration.
</p>
<a
href="/docs"
target="_blank"
rel="noopener noreferrer"
className="inline-flex items-center gap-1.5 text-xs lg:text-xs text-primary hover:underline py-2 lg:py-0"
>
Documentation
<ExternalLink className="h-3.5 w-3.5 lg:h-3 lg:w-3" />
</a>
<div className={cn(
"py-4 flex-shrink-0",
isCollapsed ? "md:px-2 xl:px-4 px-4" : "px-4"
)}>
<div className={cn(
"rounded-md bg-muted transition-all duration-200",
isCollapsed ? "md:p-0 xl:p-3 p-3" : "p-3"
)}>
<div className={cn(
isCollapsed ? "md:hidden xl:block" : "block"
)}>
<h4 className="text-sm font-medium mb-2">Need Help?</h4>
<p className="text-xs text-muted-foreground mb-3 md:mb-2">
Check out the documentation for help with setup and configuration.
</p>
<a
href="/docs"
target="_blank"
rel="noopener noreferrer"
className="inline-flex items-center gap-1.5 text-xs md:text-xs text-primary hover:underline py-2 md:py-0"
>
Documentation
<ExternalLink className="h-3.5 w-3.5 md:h-3 md:w-3" />
</a>
</div>
{/* Icon-only help button for collapsed state on medium screens */}
<TooltipProvider>
<Tooltip delayDuration={0}>
<TooltipTrigger asChild>
<a
href="/docs"
target="_blank"
rel="noopener noreferrer"
className={cn(
"flex items-center justify-center rounded-md hover:bg-accent transition-colors",
isCollapsed ? "md:h-12 md:w-12 xl:hidden hidden" : "hidden"
)}
>
<ExternalLink className="h-5 w-5" />
</a>
</TooltipTrigger>
<TooltipContent side="right">
Documentation
</TooltipContent>
</Tooltip>
</TooltipProvider>
</div>
<div className={cn(
isCollapsed ? "md:hidden xl:block" : "block"
)}>
<VersionInfo />
</div>
<VersionInfo />
</div>
</div>
</aside>

View File

@@ -228,17 +228,17 @@ export function OrganizationList({
{(() => {
const parts = [];
if (org.publicRepositoryCount && org.publicRepositoryCount > 0) {
parts.push(`${org.publicRepositoryCount}pub`);
parts.push(`${org.publicRepositoryCount} pub`);
}
if (org.privateRepositoryCount && org.privateRepositoryCount > 0) {
parts.push(`${org.privateRepositoryCount}priv`);
parts.push(`${org.privateRepositoryCount} priv`);
}
if (org.forkRepositoryCount && org.forkRepositoryCount > 0) {
parts.push(`${org.forkRepositoryCount}fork`);
parts.push(`${org.forkRepositoryCount} fork`);
}
return parts.length > 0 ? (
<span className="ml-1">({parts.join('/')})</span>
<span className="ml-1">({parts.join(' | ')})</span>
) : null;
})()}
</div>

View File

@@ -5,7 +5,7 @@ import { FlipHorizontal, GitFork, RefreshCw, RotateCcw, Star, Lock, Ban, Check,
import { SiGithub, SiGitea } from "react-icons/si";
import type { Repository } from "@/lib/db/schema";
import { Button } from "@/components/ui/button";
import { formatDate, getStatusColor } from "@/lib/utils";
import { formatDate, formatLastSyncTime, getStatusColor } from "@/lib/utils";
import type { FilterParams } from "@/types/filter";
import { Skeleton } from "@/components/ui/skeleton";
import { useGiteaConfig } from "@/hooks/useGiteaConfig";
@@ -242,7 +242,7 @@ export default function RepositoryTable({
{repo.status}
</Badge>
<span className="text-xs text-muted-foreground">
{repo.lastMirrored ? formatDate(repo.lastMirrored) : "Never mirrored"}
{formatLastSyncTime(repo.lastMirrored)}
</span>
</div>
</div>
@@ -410,7 +410,7 @@ export default function RepositoryTable({
<div className="h-full p-3 flex items-center justify-center flex-[0.3]">
<Skeleton className="h-4 w-4" />
</div>
<div className="h-full p-3 text-sm font-medium flex-[2.5]">
<div className="h-full py-3 text-sm font-medium flex-[2.3]">
Repository
</div>
<div className="h-full p-3 text-sm font-medium flex-[1]">Owner</div>
@@ -437,7 +437,7 @@ export default function RepositoryTable({
<div className="h-full p-3 flex items-center justify-center flex-[0.3]">
<Skeleton className="h-4 w-4" />
</div>
<div className="h-full p-3 flex-[2.5]">
<div className="h-full p-3 flex-[2.3]">
<Skeleton className="h-5 w-48" />
<Skeleton className="h-3 w-24 mt-1" />
</div>
@@ -530,7 +530,7 @@ export default function RepositoryTable({
aria-label="Select all repositories"
/>
</div>
<div className="h-full p-3 text-sm font-medium flex-[2.5]">
<div className="h-full py-3 text-sm font-medium flex-[2.3]">
Repository
</div>
<div className="h-full p-3 text-sm font-medium flex-[1]">Owner</div>
@@ -588,7 +588,7 @@ export default function RepositoryTable({
</div>
{/* Repository */}
<div className="h-full py-3 flex items-center gap-2 flex-[2.5]">
<div className="h-full py-3 flex items-center gap-2 flex-[2.3]">
<div className="flex-1">
<div className="font-medium flex items-center gap-1">
{repo.name}
@@ -629,9 +629,7 @@ export default function RepositoryTable({
{/* Last Mirrored */}
<div className="h-full p-3 flex items-center flex-[1]">
<p className="text-sm">
{repo.lastMirrored
? formatDate(new Date(repo.lastMirrored))
: "Never"}
{formatLastSyncTime(repo.lastMirrored)}
</p>
</div>

View File

@@ -4,9 +4,35 @@ import { ssoClient } from "@better-auth/sso/client";
import type { Session as BetterAuthSession, User as BetterAuthUser } from "better-auth";
export const authClient = createAuthClient({
// The base URL is optional when running on the same domain
// Better Auth will use the current domain by default
baseURL: typeof window !== 'undefined' ? window.location.origin : 'http://localhost:4321',
// Use PUBLIC_BETTER_AUTH_URL if set (for multi-origin access), otherwise use current origin
// This allows the client to connect to the auth server even when accessed from different origins
baseURL: (() => {
let url: string | undefined;
// Check for public environment variable first (for client-side access)
if (typeof import.meta !== 'undefined' && import.meta.env?.PUBLIC_BETTER_AUTH_URL) {
url = import.meta.env.PUBLIC_BETTER_AUTH_URL;
}
// Validate and clean the URL if provided
if (url && typeof url === 'string' && url.trim() !== '') {
try {
// Validate URL format and remove trailing slash
const validatedUrl = new URL(url.trim());
return validatedUrl.origin; // Use origin to ensure clean URL without path
} catch (e) {
console.warn(`Invalid PUBLIC_BETTER_AUTH_URL: ${url}, falling back to default`);
}
}
// Fall back to current origin if running in browser
if (typeof window !== 'undefined' && window.location?.origin) {
return window.location.origin;
}
// Default for SSR - always return a valid URL
return 'http://localhost:4321';
})(),
basePath: '/api/auth', // Explicitly set the base path
plugins: [
oidcClient(),

View File

@@ -19,42 +19,71 @@ export const auth = betterAuth({
// Base URL configuration - use the primary URL (Better Auth only supports single baseURL)
baseURL: (() => {
const url = process.env.BETTER_AUTH_URL || "http://localhost:4321";
const url = process.env.BETTER_AUTH_URL;
const defaultUrl = "http://localhost:4321";
// Check if URL is provided and not empty
if (!url || typeof url !== 'string' || url.trim() === '') {
console.info('BETTER_AUTH_URL not set, using default:', defaultUrl);
return defaultUrl;
}
try {
// Validate URL format
new URL(url);
return url;
} catch {
console.warn(`Invalid BETTER_AUTH_URL: ${url}, falling back to localhost`);
return "http://localhost:4321";
// Validate URL format and ensure it's a proper origin
const validatedUrl = new URL(url.trim());
const cleanUrl = validatedUrl.origin; // Use origin to ensure no trailing paths
console.info('Using BETTER_AUTH_URL:', cleanUrl);
return cleanUrl;
} catch (e) {
console.error(`Invalid BETTER_AUTH_URL format: "${url}"`);
console.error('Error:', e);
console.info('Falling back to default:', defaultUrl);
return defaultUrl;
}
})(),
basePath: "/api/auth", // Specify the base path for auth endpoints
// Trusted origins - this is how we support multiple access URLs
trustedOrigins: (() => {
const origins = [
const origins: string[] = [
"http://localhost:4321",
"http://localhost:8080", // Keycloak
];
// Add the primary URL from BETTER_AUTH_URL
const primaryUrl = process.env.BETTER_AUTH_URL || "http://localhost:4321";
try {
new URL(primaryUrl);
origins.push(primaryUrl);
} catch {
// Skip if invalid
const primaryUrl = process.env.BETTER_AUTH_URL;
if (primaryUrl && typeof primaryUrl === 'string' && primaryUrl.trim() !== '') {
try {
const validatedUrl = new URL(primaryUrl.trim());
origins.push(validatedUrl.origin);
} catch {
// Skip if invalid
}
}
// Add additional trusted origins from environment
// This is where users can specify multiple access URLs
if (process.env.BETTER_AUTH_TRUSTED_ORIGINS) {
origins.push(...process.env.BETTER_AUTH_TRUSTED_ORIGINS.split(',').map(o => o.trim()));
const additionalOrigins = process.env.BETTER_AUTH_TRUSTED_ORIGINS
.split(',')
.map(o => o.trim())
.filter(o => o !== '');
// Validate each additional origin
for (const origin of additionalOrigins) {
try {
const validatedUrl = new URL(origin);
origins.push(validatedUrl.origin);
} catch {
console.warn(`Invalid trusted origin: ${origin}, skipping`);
}
}
}
// Remove duplicates and return
return [...new Set(origins.filter(Boolean))];
// Remove duplicates and empty strings, then return
const uniqueOrigins = [...new Set(origins.filter(Boolean))];
console.info('Trusted origins:', uniqueOrigins);
return uniqueOrigins;
})(),
// Authentication methods

View File

@@ -133,6 +133,7 @@ function parseEnvConfig(): EnvConfig {
mirrorLabels: process.env.MIRROR_LABELS === 'true',
mirrorMilestones: process.env.MIRROR_MILESTONES === 'true',
mirrorMetadata: process.env.MIRROR_METADATA === 'true',
releaseLimit: process.env.RELEASE_LIMIT ? parseInt(process.env.RELEASE_LIMIT, 10) : undefined,
},
schedule: {
enabled: process.env.SCHEDULE_ENABLED === 'true' ||
@@ -271,6 +272,7 @@ export async function initializeConfigFromEnv(): Promise<void> {
forkStrategy: envConfig.gitea.forkStrategy || existingConfig?.[0]?.giteaConfig?.forkStrategy || 'reference',
// Mirror metadata options
mirrorReleases: envConfig.mirror.mirrorReleases ?? existingConfig?.[0]?.giteaConfig?.mirrorReleases ?? false,
releaseLimit: envConfig.mirror.releaseLimit ?? existingConfig?.[0]?.giteaConfig?.releaseLimit ?? 10,
mirrorMetadata: envConfig.mirror.mirrorMetadata ?? (envConfig.mirror.mirrorIssues || envConfig.mirror.mirrorPullRequests || envConfig.mirror.mirrorLabels || envConfig.mirror.mirrorMilestones) ?? existingConfig?.[0]?.giteaConfig?.mirrorMetadata ?? false,
mirrorIssues: envConfig.mirror.mirrorIssues ?? existingConfig?.[0]?.giteaConfig?.mirrorIssues ?? false,
mirrorPullRequests: envConfig.mirror.mirrorPullRequests ?? existingConfig?.[0]?.giteaConfig?.mirrorPullRequests ?? false,
@@ -299,6 +301,7 @@ export async function initializeConfigFromEnv(): Promise<void> {
updateInterval: envConfig.schedule.updateInterval ?? existingConfig?.[0]?.scheduleConfig?.updateInterval ?? 86400000,
skipRecentlyMirrored: envConfig.schedule.skipRecentlyMirrored ?? existingConfig?.[0]?.scheduleConfig?.skipRecentlyMirrored ?? true,
recentThreshold: envConfig.schedule.recentThreshold ?? existingConfig?.[0]?.scheduleConfig?.recentThreshold ?? 3600000,
autoImport: process.env.AUTO_IMPORT_REPOS !== 'false', // New field for auto-importing new repositories
lastRun: existingConfig?.[0]?.scheduleConfig?.lastRun || undefined,
nextRun: existingConfig?.[0]?.scheduleConfig?.nextRun || undefined,
};

View File

@@ -10,7 +10,7 @@ import type { Config } from "@/types/config";
import type { Repository } from "./db/schema";
import { createMirrorJob } from "./helpers";
import { decryptConfigTokens } from "./utils/config-encryption";
import { httpPost, httpGet, HttpError } from "./http-client";
import { httpPost, httpGet, httpPatch, HttpError } from "./http-client";
import { db, repositories } from "./db";
import { eq } from "drizzle-orm";
import { repoStatusEnum } from "@/types/Repository";
@@ -299,6 +299,23 @@ export async function syncGiteaRepoEnhanced({
throw new Error(`Repository ${repository.name} is not a mirror. Cannot sync.`);
}
// Update mirror interval if needed
if (config.giteaConfig?.mirrorInterval) {
try {
console.log(`[Sync] Updating mirror interval for ${repository.name} to ${config.giteaConfig.mirrorInterval}`);
const updateUrl = `${config.giteaConfig.url}/api/v1/repos/${repoOwner}/${repository.name}`;
await httpPatch(updateUrl, {
mirror_interval: config.giteaConfig.mirrorInterval,
}, {
Authorization: `token ${decryptedConfig.giteaConfig.token}`,
});
console.log(`[Sync] Successfully updated mirror interval for ${repository.name}`);
} catch (updateError) {
console.warn(`[Sync] Failed to update mirror interval for ${repository.name}:`, updateError);
// Continue with sync even if interval update fails
}
}
// Perform the sync
const apiUrl = `${config.giteaConfig.url}/api/v1/repos/${repoOwner}/${repository.name}/mirror-sync`;

View File

@@ -7,7 +7,7 @@ import { membershipRoleEnum } from "@/types/organizations";
import { Octokit } from "@octokit/rest";
import type { Config } from "@/types/config";
import type { Organization, Repository } from "./db/schema";
import { httpPost, httpGet, httpDelete, httpPut } from "./http-client";
import { httpPost, httpGet, httpDelete, httpPut, httpPatch } from "./http-client";
import { createMirrorJob } from "./helpers";
import { db, organizations, repositories } from "./db";
import { eq, and } from "drizzle-orm";
@@ -417,6 +417,7 @@ export const mirrorGithubRepoToGitea = async ({
clone_addr: cloneAddress,
repo_name: repository.name,
mirror: true,
mirror_interval: config.giteaConfig?.mirrorInterval || "8h", // Set mirror interval
wiki: config.giteaConfig?.wiki || false, // will mirror wiki if it exists
lfs: config.giteaConfig?.lfs || false, // Enable LFS mirroring if configured
private: repository.isPrivate,
@@ -711,6 +712,7 @@ export async function mirrorGitHubRepoToGiteaOrg({
uid: giteaOrgId,
repo_name: repository.name,
mirror: true,
mirror_interval: config.giteaConfig?.mirrorInterval || "8h", // Set mirror interval
wiki: config.giteaConfig?.wiki || false, // will mirror wiki if it exists
lfs: config.giteaConfig?.lfs || false, // Enable LFS mirroring if configured
private: repository.isPrivate,
@@ -1433,20 +1435,54 @@ export async function mirrorGitHubReleasesToGitea({
}
).catch(() => null);
const releaseNote = release.body || "";
if (existingReleasesResponse) {
console.log(`[Releases] Release ${release.tag_name} already exists, skipping`);
skippedCount++;
// Update existing release if the changelog/body differs
const existingRelease = existingReleasesResponse.data;
const existingNote = existingRelease.body || "";
if (existingNote !== releaseNote || existingRelease.name !== (release.name || release.tag_name)) {
console.log(`[Releases] Updating existing release ${release.tag_name} with new changelog/title`);
await httpPut(
`${config.giteaConfig.url}/api/v1/repos/${repoOwner}/${repository.name}/releases/${existingRelease.id}`,
{
tag_name: release.tag_name,
target: release.target_commitish,
title: release.name || release.tag_name,
body: releaseNote,
draft: release.draft,
prerelease: release.prerelease,
},
{
Authorization: `token ${decryptedConfig.giteaConfig.token}`,
}
);
if (releaseNote) {
console.log(`[Releases] Updated changelog for ${release.tag_name} (${releaseNote.length} characters)`);
}
mirroredCount++;
} else {
console.log(`[Releases] Release ${release.tag_name} already up-to-date, skipping`);
skippedCount++;
}
continue;
}
// Create the release
// Create new release with changelog/body content
if (releaseNote) {
console.log(`[Releases] Including changelog for ${release.tag_name} (${releaseNote.length} characters)`);
}
const createReleaseResponse = await httpPost(
`${config.giteaConfig.url}/api/v1/repos/${repoOwner}/${repository.name}/releases`,
{
tag_name: release.tag_name,
target: release.target_commitish,
title: release.name || release.tag_name,
note: release.body || "",
body: releaseNote,
draft: release.draft,
prerelease: release.prerelease,
},
@@ -1505,13 +1541,14 @@ export async function mirrorGitHubReleasesToGitea({
}
mirroredCount++;
console.log(`[Releases] Successfully mirrored release: ${release.tag_name}`);
const noteInfo = releaseNote ? ` with ${releaseNote.length} character changelog` : " without changelog";
console.log(`[Releases] Successfully mirrored release: ${release.tag_name}${noteInfo}`);
} catch (error) {
console.error(`[Releases] Failed to mirror release ${release.tag_name}: ${error instanceof Error ? error.message : String(error)}`);
}
}
console.log(`✅ Mirrored ${mirroredCount} new releases to Gitea (${skippedCount} already existed)`);
console.log(`✅ Mirrored/Updated ${mirroredCount} releases to Gitea (${skippedCount} already up-to-date)`);
}
export async function mirrorGitRepoPullRequestsToGitea({
@@ -1979,6 +2016,12 @@ export async function deleteGiteaRepo(
/**
* Archive a repository in Gitea
*
* IMPORTANT: This function NEVER deletes data. It only marks repositories as archived.
* - For regular repos: Uses Gitea's archive feature (makes read-only)
* - For mirror repos: Renames with [ARCHIVED] prefix (Gitea doesn't allow archiving mirrors)
*
* This ensures backups are preserved even when the GitHub source disappears.
*/
export async function archiveGiteaRepo(
client: { url: string; token: string },
@@ -1986,24 +2029,115 @@ export async function archiveGiteaRepo(
repo: string
): Promise<void> {
try {
const response = await httpPut(
// First, check if this is a mirror repository
const repoResponse = await httpGet(
`${client.url}/api/v1/repos/${owner}/${repo}`,
{
archived: true,
},
{
Authorization: `token ${client.token}`,
'Content-Type': 'application/json',
}
);
if (response.status >= 400) {
throw new Error(`Failed to archive repository ${owner}/${repo}: ${response.status} ${response.statusText}`);
if (!repoResponse.data) {
console.warn(`[Archive] Repository ${owner}/${repo} not found in Gitea. Skipping.`);
return;
}
console.log(`Successfully archived repository ${owner}/${repo} in Gitea`);
if (repoResponse.data?.mirror) {
console.log(`[Archive] Repository ${owner}/${repo} is a mirror. Using safe rename strategy.`);
// IMPORTANT: Gitea API doesn't allow archiving mirror repositories
// According to Gitea source code, attempting to archive a mirror returns:
// "repo is a mirror, cannot archive/un-archive" (422 Unprocessable Entity)
//
// Our solution: Rename the repo to clearly mark it as orphaned
// This preserves all data while indicating the repo is no longer actively synced
const currentName = repoResponse.data.name;
// Skip if already marked as archived
if (currentName.startsWith('[ARCHIVED]')) {
console.log(`[Archive] Repository ${owner}/${repo} already marked as archived. Skipping.`);
return;
}
const archivedName = `[ARCHIVED] ${currentName}`;
const currentDesc = repoResponse.data.description || '';
const archiveNotice = `\n\n⚠ ARCHIVED: Original GitHub repository no longer exists. Preserved as backup on ${new Date().toISOString()}`;
// Only add notice if not already present
const newDescription = currentDesc.includes('⚠️ ARCHIVED:')
? currentDesc
: currentDesc + archiveNotice;
const renameResponse = await httpPatch(
`${client.url}/api/v1/repos/${owner}/${repo}`,
{
name: archivedName,
description: newDescription,
},
{
Authorization: `token ${client.token}`,
'Content-Type': 'application/json',
}
);
if (renameResponse.status >= 400) {
// If rename fails, log but don't throw - data is still preserved
console.error(`[Archive] Failed to rename mirror repository ${owner}/${repo}: ${renameResponse.status}`);
console.log(`[Archive] Repository ${owner}/${repo} remains accessible but not marked as archived`);
return;
}
console.log(`[Archive] Successfully marked mirror repository ${owner}/${repo} as archived (renamed to ${archivedName})`);
// Also try to reduce sync frequency to prevent unnecessary API calls
// This is optional - if it fails, the repo is still preserved
try {
await httpPatch(
`${client.url}/api/v1/repos/${owner}/${archivedName}`,
{
mirror_interval: "8760h", // 1 year - minimizes sync attempts
},
{
Authorization: `token ${client.token}`,
'Content-Type': 'application/json',
}
);
console.log(`[Archive] Reduced sync frequency for ${owner}/${archivedName} to yearly`);
} catch (intervalError) {
// Non-critical - repo is still preserved even if we can't change interval
console.debug(`[Archive] Could not update mirror interval (non-critical):`, intervalError);
}
} else {
// For non-mirror repositories, use Gitea's native archive feature
// This makes the repository read-only but preserves all data
console.log(`[Archive] Archiving regular repository ${owner}/${repo}`);
const response = await httpPatch(
`${client.url}/api/v1/repos/${owner}/${repo}`,
{
archived: true,
},
{
Authorization: `token ${client.token}`,
'Content-Type': 'application/json',
}
);
if (response.status >= 400) {
// If archive fails, log but data is still preserved in Gitea
console.error(`[Archive] Failed to archive repository ${owner}/${repo}: ${response.status}`);
console.log(`[Archive] Repository ${owner}/${repo} remains accessible but not marked as archived`);
return;
}
console.log(`[Archive] Successfully archived repository ${owner}/${repo} (now read-only)`);
}
} catch (error) {
console.error(`Error archiving repository ${owner}/${repo}:`, error);
throw error;
// Even on error, the repository data is preserved in Gitea
// We just couldn't mark it as archived
console.error(`[Archive] Could not mark repository ${owner}/${repo} as archived:`, error);
console.log(`[Archive] Repository ${owner}/${repo} data is preserved but not marked as archived`);
// Don't throw - we want cleanup to continue for other repos
}
}

View File

@@ -178,6 +178,21 @@ export async function httpPut<T = any>(
});
}
/**
* PATCH request
*/
export async function httpPatch<T = any>(
url: string,
body?: any,
headers?: Record<string, string>
): Promise<HttpResponse<T>> {
return httpRequest<T>(url, {
method: 'PATCH',
headers,
body: body ? JSON.stringify(body) : undefined,
});
}
/**
* DELETE request
*/
@@ -220,6 +235,10 @@ export class GiteaHttpClient {
return httpPut<T>(`${this.baseUrl}${endpoint}`, body, this.getHeaders());
}
async patch<T = any>(endpoint: string, body?: any): Promise<HttpResponse<T>> {
return httpPatch<T>(`${this.baseUrl}${endpoint}`, body, this.getHeaders());
}
async delete<T = any>(endpoint: string): Promise<HttpResponse<T>> {
return httpDelete<T>(`${this.baseUrl}${endpoint}`, this.getHeaders());
}

View File

@@ -27,15 +27,37 @@ async function identifyOrphanedRepositories(config: any): Promise<any[]> {
const decryptedToken = getDecryptedGitHubToken(config);
const octokit = createGitHubClient(decryptedToken);
// Fetch GitHub data
const [basicAndForkedRepos, starredRepos] = await Promise.all([
getGithubRepositories({ octokit, config }),
config.githubConfig?.includeStarred
? getGithubStarredRepositories({ octokit, config })
: Promise.resolve([]),
]);
let allGithubRepos = [];
let githubApiAccessible = true;
try {
// Fetch GitHub data
const [basicAndForkedRepos, starredRepos] = await Promise.all([
getGithubRepositories({ octokit, config }),
config.githubConfig?.includeStarred
? getGithubStarredRepositories({ octokit, config })
: Promise.resolve([]),
]);
allGithubRepos = [...basicAndForkedRepos, ...starredRepos];
} catch (githubError: any) {
// Handle GitHub API errors gracefully
console.warn(`[Repository Cleanup] GitHub API error for user ${userId}: ${githubError.message}`);
// Check if it's a critical error (like account deleted/banned)
if (githubError.status === 404 || githubError.status === 403) {
console.error(`[Repository Cleanup] CRITICAL: GitHub account may be deleted/banned. Skipping cleanup to prevent data loss.`);
console.error(`[Repository Cleanup] Consider using CLEANUP_ORPHANED_REPO_ACTION=archive instead of delete for safety.`);
// Return empty array to skip cleanup entirely when GitHub account is inaccessible
return [];
}
// For other errors, also skip cleanup to be safe
console.error(`[Repository Cleanup] Skipping cleanup due to GitHub API error. This prevents accidental deletion of backups.`);
return [];
}
const allGithubRepos = [...basicAndForkedRepos, ...starredRepos];
const githubRepoFullNames = new Set(allGithubRepos.map(repo => repo.fullName));
// Get all repositories from our database
@@ -44,13 +66,19 @@ async function identifyOrphanedRepositories(config: any): Promise<any[]> {
.from(repositories)
.where(eq(repositories.userId, userId));
// Identify orphaned repositories
// Only identify repositories as orphaned if we successfully accessed GitHub
// This prevents false positives when GitHub is down or account is inaccessible
const orphanedRepos = dbRepos.filter(repo => !githubRepoFullNames.has(repo.fullName));
if (orphanedRepos.length > 0) {
console.log(`[Repository Cleanup] Found ${orphanedRepos.length} orphaned repositories for user ${userId}`);
}
return orphanedRepos;
} catch (error) {
console.error(`[Repository Cleanup] Error identifying orphaned repositories for user ${userId}:`, error);
throw error;
// Return empty array on error to prevent accidental deletions
return [];
}
}
@@ -348,6 +376,9 @@ export function isRepositoryCleanupServiceRunning(): boolean {
return cleanupInterval !== null;
}
// Export functions for use by scheduler
export { identifyOrphanedRepositories, handleOrphanedRepository };
/**
* Manually trigger repository cleanup for a specific user
*/

View File

@@ -5,9 +5,8 @@
*/
import { db, configs, repositories } from '@/lib/db';
import { eq, and, or, lt, gte } from 'drizzle-orm';
import { syncGiteaRepo } from '@/lib/gitea';
import { createGitHubClient } from '@/lib/github';
import { eq, and, or } from 'drizzle-orm';
import { syncGiteaRepo, mirrorGithubRepoToGitea } from '@/lib/gitea';
import { getDecryptedGitHubToken } from '@/lib/utils/config-encryption';
import { parseInterval, formatDuration } from '@/lib/utils/duration-parser';
import type { Repository } from '@/lib/db/schema';
@@ -15,6 +14,7 @@ import { repoStatusEnum, repositoryVisibilityEnum } from '@/types/Repository';
let schedulerInterval: NodeJS.Timeout | null = null;
let isSchedulerRunning = false;
let hasPerformedAutoStart = false; // Track if we've already done auto-start
/**
* Parse schedule interval with enhanced support for duration strings, cron, and numbers
@@ -41,6 +41,12 @@ async function runScheduledSync(config: any): Promise<void> {
console.log(`[Scheduler] Running scheduled sync for user ${userId}`);
try {
// Check if tokens are configured before proceeding
if (!config.githubConfig?.token || !config.giteaConfig?.token) {
console.log(`[Scheduler] Skipping sync for user ${userId}: GitHub or Gitea tokens not configured`);
return;
}
// Update lastRun timestamp
const currentTime = new Date();
const scheduleConfig = config.scheduleConfig || {};
@@ -68,6 +74,110 @@ async function runScheduledSync(config: any): Promise<void> {
updatedAt: currentTime,
}).where(eq(configs.id, config.id));
// Auto-discovery: Check for new GitHub repositories
if (scheduleConfig.autoImport !== false) {
console.log(`[Scheduler] Checking for new GitHub repositories for user ${userId}...`);
try {
const { getGithubRepositories, getGithubStarredRepositories } = await import('@/lib/github');
const { v4: uuidv4 } = await import('uuid');
const { getDecryptedGitHubToken } = await import('@/lib/utils/config-encryption');
// Create GitHub client
const decryptedToken = getDecryptedGitHubToken(config);
const { Octokit } = await import('@octokit/rest');
const octokit = new Octokit({ auth: decryptedToken });
// Fetch GitHub data
const [basicAndForkedRepos, starredRepos] = await Promise.all([
getGithubRepositories({ octokit, config }),
config.githubConfig?.includeStarred
? getGithubStarredRepositories({ octokit, config })
: Promise.resolve([]),
]);
const allGithubRepos = [...basicAndForkedRepos, ...starredRepos];
// Check for new repositories
const existingRepos = await db
.select({ fullName: repositories.fullName })
.from(repositories)
.where(eq(repositories.userId, userId));
const existingRepoNames = new Set(existingRepos.map(r => r.fullName));
const newRepos = allGithubRepos.filter(r => !existingRepoNames.has(r.fullName));
if (newRepos.length > 0) {
console.log(`[Scheduler] Found ${newRepos.length} new repositories for user ${userId}`);
// Insert new repositories
const reposToInsert = newRepos.map(repo => ({
id: uuidv4(),
userId,
configId: config.id,
name: repo.name,
fullName: repo.fullName,
url: repo.url,
cloneUrl: repo.cloneUrl,
owner: repo.owner,
organization: repo.organization,
isPrivate: repo.isPrivate,
isForked: repo.isForked,
forkedFrom: repo.forkedFrom,
hasIssues: repo.hasIssues,
isStarred: repo.isStarred,
isArchived: repo.isArchived,
size: repo.size,
hasLFS: repo.hasLFS,
hasSubmodules: repo.hasSubmodules,
defaultBranch: repo.defaultBranch,
visibility: repo.visibility,
status: 'imported',
createdAt: new Date(),
updatedAt: new Date(),
}));
await db.insert(repositories).values(reposToInsert);
console.log(`[Scheduler] Successfully imported ${newRepos.length} new repositories for user ${userId}`);
} else {
console.log(`[Scheduler] No new repositories found for user ${userId}`);
}
} catch (error) {
console.error(`[Scheduler] Failed to auto-import repositories for user ${userId}:`, error);
}
}
// Auto-cleanup: Remove orphaned repositories (repos that no longer exist in GitHub)
if (config.cleanupConfig?.deleteIfNotInGitHub) {
console.log(`[Scheduler] Checking for orphaned repositories to cleanup for user ${userId}...`);
try {
const { identifyOrphanedRepositories, handleOrphanedRepository } = await import('@/lib/repository-cleanup-service');
const orphanedRepos = await identifyOrphanedRepositories(config);
if (orphanedRepos.length > 0) {
console.log(`[Scheduler] Found ${orphanedRepos.length} orphaned repositories for cleanup`);
for (const repo of orphanedRepos) {
try {
await handleOrphanedRepository(
config,
repo,
config.cleanupConfig.orphanedRepoAction || 'archive',
config.cleanupConfig.dryRun ?? false
);
console.log(`[Scheduler] Handled orphaned repository: ${repo.fullName}`);
} catch (error) {
console.error(`[Scheduler] Failed to handle orphaned repository ${repo.fullName}:`, error);
}
}
} else {
console.log(`[Scheduler] No orphaned repositories found for cleanup`);
}
} catch (error) {
console.error(`[Scheduler] Failed to cleanup orphaned repositories for user ${userId}:`, error);
}
}
// Get repositories to sync
let reposToSync = await db
.select()
@@ -176,6 +286,278 @@ async function syncSingleRepository(config: any, repo: any): Promise<void> {
}
}
/**
* Check if we should auto-start based on environment configuration
*/
async function checkAutoStartConfiguration(): Promise<boolean> {
// Don't auto-start more than once
if (hasPerformedAutoStart) {
return false;
}
try {
// Check if any configuration has scheduling enabled or mirror interval set
const activeConfigs = await db
.select()
.from(configs)
.where(eq(configs.isActive, true));
for (const config of activeConfigs) {
// Check if scheduling is enabled via environment
const scheduleEnabled = config.scheduleConfig?.enabled === true;
const hasMirrorInterval = !!config.giteaConfig?.mirrorInterval;
// If either SCHEDULE_ENABLED=true or GITEA_MIRROR_INTERVAL is set, we should auto-start
if (scheduleEnabled || hasMirrorInterval) {
console.log(`[Scheduler] Auto-start conditions met for user ${config.userId} (scheduleEnabled=${scheduleEnabled}, hasMirrorInterval=${hasMirrorInterval})`);
return true;
}
}
return false;
} catch (error) {
console.error('[Scheduler] Error checking auto-start configuration:', error);
return false;
}
}
/**
* Perform initial auto-start: import repositories and trigger mirror
*/
async function performInitialAutoStart(): Promise<void> {
hasPerformedAutoStart = true;
try {
console.log('[Scheduler] Performing initial auto-start...');
// Get all active configurations
const activeConfigs = await db
.select()
.from(configs)
.where(eq(configs.isActive, true));
for (const config of activeConfigs) {
// Skip if tokens are not configured
if (!config.githubConfig?.token || !config.giteaConfig?.token) {
console.log(`[Scheduler] Skipping auto-start for user ${config.userId}: tokens not configured`);
continue;
}
const scheduleEnabled = config.scheduleConfig?.enabled === true;
const hasMirrorInterval = !!config.giteaConfig?.mirrorInterval;
// Only process configs that have scheduling or mirror interval configured
if (!scheduleEnabled && !hasMirrorInterval) {
continue;
}
console.log(`[Scheduler] Auto-starting for user ${config.userId}...`);
try {
// Step 1: Import repositories from GitHub
console.log(`[Scheduler] Step 1: Importing repositories from GitHub for user ${config.userId}...`);
const { getGithubRepositories, getGithubStarredRepositories } = await import('@/lib/github');
const { v4: uuidv4 } = await import('uuid');
// Create GitHub client
const decryptedToken = getDecryptedGitHubToken(config);
const { Octokit } = await import('@octokit/rest');
const octokit = new Octokit({ auth: decryptedToken });
// Fetch GitHub data
const [basicAndForkedRepos, starredRepos] = await Promise.all([
getGithubRepositories({ octokit, config }),
config.githubConfig?.includeStarred
? getGithubStarredRepositories({ octokit, config })
: Promise.resolve([]),
]);
const allGithubRepos = [...basicAndForkedRepos, ...starredRepos];
// Check for new repositories
const existingRepos = await db
.select({ fullName: repositories.fullName })
.from(repositories)
.where(eq(repositories.userId, config.userId));
const existingRepoNames = new Set(existingRepos.map(r => r.fullName));
const reposToImport = allGithubRepos.filter(r => !existingRepoNames.has(r.fullName));
if (reposToImport.length > 0) {
console.log(`[Scheduler] Importing ${reposToImport.length} repositories for user ${config.userId}...`);
// Insert new repositories
const reposToInsert = reposToImport.map(repo => ({
id: uuidv4(),
userId: config.userId,
configId: config.id,
name: repo.name,
fullName: repo.fullName,
url: repo.url,
cloneUrl: repo.cloneUrl,
owner: repo.owner,
organization: repo.organization,
isPrivate: repo.isPrivate,
isForked: repo.isForked,
forkedFrom: repo.forkedFrom,
hasIssues: repo.hasIssues,
isStarred: repo.isStarred,
isArchived: repo.isArchived,
size: repo.size,
hasLFS: repo.hasLFS,
hasSubmodules: repo.hasSubmodules,
defaultBranch: repo.defaultBranch,
visibility: repo.visibility,
status: 'imported',
createdAt: new Date(),
updatedAt: new Date(),
}));
await db.insert(repositories).values(reposToInsert);
console.log(`[Scheduler] Successfully imported ${reposToImport.length} repositories`);
} else {
console.log(`[Scheduler] No new repositories to import for user ${config.userId}`);
}
// Check if we already have mirrored repositories (indicating this isn't first run)
const mirroredRepos = await db
.select()
.from(repositories)
.where(
and(
eq(repositories.userId, config.userId),
or(
eq(repositories.status, 'mirrored'),
eq(repositories.status, 'synced')
)
)
)
.limit(1);
// If we already have mirrored repos, skip the initial mirror (let regular sync handle it)
if (mirroredRepos.length > 0) {
console.log(`[Scheduler] User ${config.userId} already has mirrored repositories, skipping initial mirror (let regular sync handle updates)`);
// Still update the schedule config to indicate scheduling is active
const currentTime = new Date();
const intervalSource = config.scheduleConfig?.interval ||
config.giteaConfig?.mirrorInterval ||
'8h';
const interval = parseScheduleInterval(intervalSource);
const nextRun = new Date(currentTime.getTime() + interval);
await db.update(configs).set({
scheduleConfig: {
...config.scheduleConfig,
enabled: true,
lastRun: currentTime,
nextRun: nextRun,
},
updatedAt: currentTime,
}).where(eq(configs.id, config.id));
console.log(`[Scheduler] Scheduling enabled for user ${config.userId}, next sync at ${nextRun.toISOString()}`);
continue;
}
// Step 2: Trigger mirror for all repositories that need mirroring
console.log(`[Scheduler] Step 2: Triggering mirror for repositories that need mirroring...`);
const reposNeedingMirror = await db
.select()
.from(repositories)
.where(
and(
eq(repositories.userId, config.userId),
or(
eq(repositories.status, 'imported'),
eq(repositories.status, 'pending'),
eq(repositories.status, 'failed')
)
)
);
if (reposNeedingMirror.length > 0) {
console.log(`[Scheduler] Found ${reposNeedingMirror.length} repositories that need mirroring`);
// Reuse the octokit instance from above
// (octokit was already created in the import phase)
// Process repositories in batches
const batchSize = config.scheduleConfig?.batchSize || 5;
for (let i = 0; i < reposNeedingMirror.length; i += batchSize) {
const batch = reposNeedingMirror.slice(i, Math.min(i + batchSize, reposNeedingMirror.length));
console.log(`[Scheduler] Processing batch ${Math.floor(i / batchSize) + 1} of ${Math.ceil(reposNeedingMirror.length / batchSize)} (${batch.length} repos)`);
await Promise.all(
batch.map(async (repo) => {
try {
const repository: Repository = {
...repo,
status: repoStatusEnum.parse(repo.status),
organization: repo.organization ?? undefined,
lastMirrored: repo.lastMirrored ?? undefined,
errorMessage: repo.errorMessage ?? undefined,
mirroredLocation: repo.mirroredLocation || '',
forkedFrom: repo.forkedFrom ?? undefined,
visibility: repositoryVisibilityEnum.parse(repo.visibility),
};
await mirrorGithubRepoToGitea({
octokit,
repository,
config
});
console.log(`[Scheduler] Successfully mirrored repository: ${repo.fullName}`);
} catch (error) {
console.error(`[Scheduler] Failed to mirror repository ${repo.fullName}:`, error);
}
})
);
// Pause between batches if configured
if (i + batchSize < reposNeedingMirror.length) {
const pauseTime = config.scheduleConfig?.pauseBetweenBatches || 2000;
console.log(`[Scheduler] Pausing for ${pauseTime}ms before next batch...`);
await new Promise(resolve => setTimeout(resolve, pauseTime));
}
}
console.log(`[Scheduler] Completed initial mirror for ${reposNeedingMirror.length} repositories`);
} else {
console.log(`[Scheduler] No repositories need mirroring`);
}
// Update the schedule config to indicate we've run
const currentTime = new Date();
const intervalSource = config.scheduleConfig?.interval ||
config.giteaConfig?.mirrorInterval ||
'8h';
const interval = parseScheduleInterval(intervalSource);
const nextRun = new Date(currentTime.getTime() + interval);
await db.update(configs).set({
scheduleConfig: {
...config.scheduleConfig,
enabled: true, // Ensure scheduling is enabled
lastRun: currentTime,
nextRun: nextRun,
},
updatedAt: currentTime,
}).where(eq(configs.id, config.id));
console.log(`[Scheduler] Auto-start completed for user ${config.userId}, next sync at ${nextRun.toISOString()}`);
} catch (error) {
console.error(`[Scheduler] Failed to auto-start for user ${config.userId}:`, error);
}
}
console.log('[Scheduler] Initial auto-start completed');
} catch (error) {
console.error('[Scheduler] Failed to perform initial auto-start:', error);
}
}
/**
* Main scheduler loop
*/
@@ -202,25 +584,41 @@ async function schedulerLoop(): Promise<void> {
config.scheduleConfig?.enabled === true
);
if (enabledConfigs.length === 0) {
console.log(`[Scheduler] No configurations with scheduling enabled (found ${activeConfigs.length} active configs)`);
// Further filter configs that have valid tokens
const validConfigs = enabledConfigs.filter(config => {
const hasGitHubToken = !!config.githubConfig?.token;
const hasGiteaToken = !!config.giteaConfig?.token;
// Show details about why configs are not enabled
activeConfigs.forEach(config => {
const scheduleEnabled = config.scheduleConfig?.enabled;
const mirrorInterval = config.giteaConfig?.mirrorInterval;
console.log(`[Scheduler] User ${config.userId}: scheduleEnabled=${scheduleEnabled}, mirrorInterval=${mirrorInterval}`);
});
if (!hasGitHubToken || !hasGiteaToken) {
console.log(`[Scheduler] User ${config.userId}: Scheduling enabled but tokens missing (GitHub: ${hasGitHubToken}, Gitea: ${hasGiteaToken})`);
return false;
}
return true;
});
if (validConfigs.length === 0) {
if (enabledConfigs.length > 0) {
console.log(`[Scheduler] ${enabledConfigs.length} config(s) have scheduling enabled but lack required tokens`);
} else {
console.log(`[Scheduler] No configurations with scheduling enabled (found ${activeConfigs.length} active configs)`);
// Show details about why configs are not enabled
activeConfigs.forEach(config => {
const scheduleEnabled = config.scheduleConfig?.enabled;
const mirrorInterval = config.giteaConfig?.mirrorInterval;
console.log(`[Scheduler] User ${config.userId}: scheduleEnabled=${scheduleEnabled}, mirrorInterval=${mirrorInterval}`);
});
}
return;
}
console.log(`[Scheduler] Processing ${enabledConfigs.length} configurations with scheduling enabled (out of ${activeConfigs.length} total active configs)`);
console.log(`[Scheduler] Processing ${validConfigs.length} valid configurations (out of ${enabledConfigs.length} with scheduling enabled)`);
// Check each configuration to see if it's time to run
const currentTime = new Date();
for (const config of enabledConfigs) {
for (const config of validConfigs) {
const scheduleConfig = config.scheduleConfig || {};
// Check if it's time to run based on nextRun
@@ -242,7 +640,7 @@ async function schedulerLoop(): Promise<void> {
/**
* Start the scheduler service
*/
export function startSchedulerService(): void {
export async function startSchedulerService(): Promise<void> {
if (schedulerInterval) {
console.log('[Scheduler] Scheduler service is already running');
return;
@@ -250,6 +648,14 @@ export function startSchedulerService(): void {
console.log('[Scheduler] Starting scheduler service');
// Check if we should auto-start mirroring based on environment variables
const shouldAutoStart = await checkAutoStartConfiguration();
if (shouldAutoStart) {
console.log('[Scheduler] Auto-start detected from environment variables, triggering initial import and mirror...');
await performInitialAutoStart();
}
// Run immediately on start
schedulerLoop().catch(error => {
console.error('[Scheduler] Error during initial scheduler run:', error);

View File

@@ -29,6 +29,31 @@ export function formatDate(date?: Date | string | null): string {
}).format(new Date(date));
}
export function formatLastSyncTime(date: Date | string | null): string {
if (!date) return "Never";
const now = new Date();
const syncDate = new Date(date);
const diffMs = now.getTime() - syncDate.getTime();
const diffMins = Math.floor(diffMs / 60000);
const diffHours = Math.floor(diffMs / 3600000);
const diffDays = Math.floor(diffMs / 86400000);
// Show relative time for recent syncs
if (diffMins < 1) return "Just now";
if (diffMins < 60) return `${diffMins} min ago`;
if (diffHours < 24) return `${diffHours} hr${diffHours === 1 ? '' : 's'} ago`;
if (diffDays < 7) return `${diffDays} day${diffDays === 1 ? '' : 's'} ago`;
// For older syncs, show week count
const diffWeeks = Math.floor(diffDays / 7);
if (diffWeeks < 4) return `${diffWeeks} week${diffWeeks === 1 ? '' : 's'} ago`;
// For even older, show month count
const diffMonths = Math.floor(diffDays / 30);
return `${diffMonths} month${diffMonths === 1 ? '' : 's'} ago`;
}
export function truncate(str: string, length: number): string {
if (str.length <= length) return str;
return str.slice(0, length) + "...";

View File

@@ -8,6 +8,7 @@ import { setupSignalHandlers } from './lib/signal-handlers';
import { auth } from './lib/auth';
import { isHeaderAuthEnabled, authenticateWithHeaders } from './lib/auth-header';
import { initializeConfigFromEnv } from './lib/env-config-loader';
import { db, users } from './lib/db';
// Flag to track if recovery has been initialized
let recoveryInitialized = false;
@@ -17,6 +18,7 @@ let schedulerServiceStarted = false;
let repositoryCleanupServiceStarted = false;
let shutdownManagerInitialized = false;
let envConfigInitialized = false;
let envConfigCheckCount = 0; // Track attempts to avoid excessive checking
export const onRequest = defineMiddleware(async (context, next) => {
// First, try Better Auth session (cookie-based)
@@ -79,14 +81,31 @@ export const onRequest = defineMiddleware(async (context, next) => {
}
}
// Initialize configuration from environment variables (only once)
if (!envConfigInitialized) {
envConfigInitialized = true;
try {
await initializeConfigFromEnv();
} catch (error) {
console.error('⚠️ Failed to initialize configuration from environment:', error);
// Continue anyway - environment config is optional
// Initialize configuration from environment variables
// Optimized to minimize performance impact:
// - Once initialized, no checks are performed (envConfigInitialized = true)
// - Limits checks to first 100 requests to avoid DB queries on every request if no users exist
// - After user creation, env vars load on next request and flag is set permanently
if (!envConfigInitialized && envConfigCheckCount < 100) {
envConfigCheckCount++;
// Only check every 10th request after the first 10 to reduce DB load
const shouldCheck = envConfigCheckCount <= 10 || envConfigCheckCount % 10 === 0;
if (shouldCheck) {
try {
const hasUsers = await db.select().from(users).limit(1).then(u => u.length > 0);
if (hasUsers) {
// We have users now, try to initialize config
await initializeConfigFromEnv();
envConfigInitialized = true; // This ensures we never check again
console.log('✅ Environment configuration loaded after user creation');
}
} catch (error) {
console.error('⚠️ Failed to initialize configuration from environment:', error);
// Continue anyway - environment config is optional
}
}
}
@@ -160,7 +179,10 @@ export const onRequest = defineMiddleware(async (context, next) => {
if (recoveryInitialized && !schedulerServiceStarted) {
try {
console.log('Starting automatic mirror scheduler service...');
startSchedulerService();
// Start the scheduler service (now async)
startSchedulerService().catch(error => {
console.error('Error in scheduler service startup:', error);
});
// Register scheduler service shutdown callback
registerShutdownCallback(async () => {

View File

@@ -25,9 +25,34 @@ export async function POST(context: APIContext) {
);
}
// Validate issuer URL format
let validatedIssuer = issuer;
if (issuer && typeof issuer === 'string' && issuer.trim() !== '') {
try {
const issuerUrl = new URL(issuer.trim());
validatedIssuer = issuerUrl.toString().replace(/\/$/, ''); // Remove trailing slash
} catch (e) {
return new Response(
JSON.stringify({ error: `Invalid issuer URL format: ${issuer}` }),
{
status: 400,
headers: { "Content-Type": "application/json" },
}
);
}
} else {
return new Response(
JSON.stringify({ error: "Issuer URL cannot be empty" }),
{
status: 400,
headers: { "Content-Type": "application/json" },
}
);
}
let registrationBody: any = {
providerId,
issuer,
issuer: validatedIssuer,
domain,
organizationId,
};
@@ -91,14 +116,27 @@ export async function POST(context: APIContext) {
// Use provided scopes or default if not specified
const finalScopes = scopes || ["openid", "email", "profile"];
// Validate endpoint URLs if provided
const validateUrl = (url: string | undefined, name: string): string | undefined => {
if (!url) return undefined;
if (typeof url !== 'string' || url.trim() === '') return undefined;
try {
const validatedUrl = new URL(url.trim());
return validatedUrl.toString();
} catch (e) {
console.warn(`Invalid ${name} URL: ${url}, skipping`);
return undefined;
}
};
registrationBody.oidcConfig = {
clientId,
clientSecret,
authorizationEndpoint,
tokenEndpoint,
jwksEndpoint,
discoveryEndpoint,
userInfoEndpoint,
clientId: clientId || undefined,
clientSecret: clientSecret || undefined,
authorizationEndpoint: validateUrl(authorizationEndpoint, 'authorization endpoint'),
tokenEndpoint: validateUrl(tokenEndpoint, 'token endpoint'),
jwksEndpoint: validateUrl(jwksEndpoint, 'JWKS endpoint'),
discoveryEndpoint: validateUrl(discoveryEndpoint, 'discovery endpoint'),
userInfoEndpoint: validateUrl(userInfoEndpoint, 'userinfo endpoint'),
scopes: finalScopes,
pkce,
};

View File

@@ -10,26 +10,71 @@ export async function POST(context: APIContext) {
const { issuer } = await context.request.json();
if (!issuer) {
return new Response(JSON.stringify({ error: "Issuer URL is required" }), {
if (!issuer || typeof issuer !== 'string' || issuer.trim() === '') {
return new Response(JSON.stringify({ error: "Issuer URL is required and must be a valid string" }), {
status: 400,
headers: { "Content-Type": "application/json" },
});
}
// Ensure issuer URL ends without trailing slash for well-known discovery
const cleanIssuer = issuer.replace(/\/$/, "");
// Validate issuer URL format
let cleanIssuer: string;
try {
const issuerUrl = new URL(issuer.trim());
cleanIssuer = issuerUrl.toString().replace(/\/$/, ""); // Remove trailing slash
} catch (e) {
return new Response(
JSON.stringify({
error: "Invalid issuer URL format",
details: `The provided URL "${issuer}" is not a valid URL. For Authentik, use format: https://your-authentik-domain/application/o/<app-slug>/`
}),
{
status: 400,
headers: { "Content-Type": "application/json" },
}
);
}
const discoveryUrl = `${cleanIssuer}/.well-known/openid-configuration`;
try {
// Fetch OIDC discovery document
const response = await fetch(discoveryUrl);
// Fetch OIDC discovery document with timeout
const controller = new AbortController();
const timeoutId = setTimeout(() => controller.abort(), 10000); // 10 second timeout
let response: Response;
try {
response = await fetch(discoveryUrl, {
signal: controller.signal,
headers: {
'Accept': 'application/json',
}
});
} catch (fetchError) {
if (fetchError instanceof Error && fetchError.name === 'AbortError') {
throw new Error(`Request timeout: The OIDC provider at ${cleanIssuer} did not respond within 10 seconds`);
}
throw new Error(`Network error: Could not connect to ${cleanIssuer}. Please verify the URL is correct and accessible.`);
} finally {
clearTimeout(timeoutId);
}
if (!response.ok) {
throw new Error(`Failed to fetch discovery document: ${response.status}`);
if (response.status === 404) {
throw new Error(`OIDC discovery document not found at ${discoveryUrl}. For Authentik, ensure you're using the correct application slug in the URL.`);
} else if (response.status >= 500) {
throw new Error(`OIDC provider error (${response.status}): The server at ${cleanIssuer} returned an error.`);
} else {
throw new Error(`Failed to fetch discovery document (${response.status}): ${response.statusText}`);
}
}
const config = await response.json();
let config: any;
try {
config = await response.json();
} catch (parseError) {
throw new Error(`Invalid response: The discovery document from ${cleanIssuer} is not valid JSON.`);
}
// Extract the essential endpoints
const discoveredConfig = {