mirror of
https://github.com/RayLabsHQ/gitea-mirror.git
synced 2025-12-06 19:46:44 +03:00
Compare commits
23 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4ca4356ad1 | ||
|
|
3136a2120d | ||
|
|
615ebd5079 | ||
|
|
6e48d3f86c | ||
|
|
c5de7e616d | ||
|
|
309f8c4341 | ||
|
|
0c596ac241 | ||
|
|
894be88a28 | ||
|
|
6ab7f0a5a0 | ||
|
|
abe3113755 | ||
|
|
f4bc28e6c2 | ||
|
|
aaf8dc6fe4 | ||
|
|
cda78bc0f5 | ||
|
|
9ccd656734 | ||
|
|
8b5c5d8ed2 | ||
|
|
1ab642c9e7 | ||
|
|
1eae725535 | ||
|
|
5bf52c806f | ||
|
|
a15178d2cd | ||
|
|
32ef9124a7 | ||
|
|
161685b966 | ||
|
|
0cf95b2a0e | ||
|
|
c896194aeb |
4
.github/workflows/astro-build-test.yml
vendored
4
.github/workflows/astro-build-test.yml
vendored
@@ -38,10 +38,10 @@ jobs:
|
||||
bun install
|
||||
|
||||
- name: Run tests
|
||||
run: bunx vitest run
|
||||
run: bun test --coverage
|
||||
|
||||
- name: Build Astro project
|
||||
run: bunx astro build
|
||||
run: bunx --bun astro build
|
||||
|
||||
- name: Upload build artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
|
||||
36
CHANGELOG.md
Normal file
36
CHANGELOG.md
Normal file
@@ -0,0 +1,36 @@
|
||||
# Changelog
|
||||
|
||||
All notable changes to the Gitea Mirror project will be documented in this file.
|
||||
|
||||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
||||
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
||||
|
||||
## [2.5.2] - 2024-11-22
|
||||
|
||||
### Fixed
|
||||
- Fixed version information in health API for Docker deployments by setting npm_package_version environment variable in entrypoint script
|
||||
|
||||
## [2.5.1] - 2024-10-01
|
||||
|
||||
### Fixed
|
||||
- Fixed Docker entrypoint script to prevent unnecessary `bun install` on container startup
|
||||
- Removed redundant dependency installation in Docker containers for pre-built images
|
||||
- Fixed "PathAlreadyExists" errors during container initialization
|
||||
|
||||
### Changed
|
||||
- Improved database initialization in Docker entrypoint script
|
||||
- Added additional checks for TypeScript versions of database management scripts
|
||||
|
||||
## [2.5.0] - 2024-09-15
|
||||
|
||||
Initial public release with core functionality:
|
||||
|
||||
### Added
|
||||
- GitHub to Gitea repository mirroring
|
||||
- User authentication and management
|
||||
- Dashboard with mirroring statistics
|
||||
- Configuration management for mirroring settings
|
||||
- Support for organization mirroring
|
||||
- Automated mirroring with configurable schedules
|
||||
- Docker multi-architecture support (amd64, arm64)
|
||||
- LXC container deployment scripts
|
||||
@@ -49,6 +49,6 @@ VOLUME /app/data
|
||||
EXPOSE 4321
|
||||
|
||||
HEALTHCHECK --interval=30s --timeout=5s --start-period=5s --retries=3 \
|
||||
CMD wget --no-verbose --tries=1 --spider http://localhost:4321/ || exit 1
|
||||
CMD wget --no-verbose --tries=1 --spider http://localhost:4321/api/health || exit 1
|
||||
|
||||
ENTRYPOINT ["./docker-entrypoint.sh"]
|
||||
|
||||
46
README.md
46
README.md
@@ -19,8 +19,12 @@ docker compose --profile production up -d
|
||||
# Using Bun
|
||||
bun run setup && bun run dev
|
||||
|
||||
# Using LXC on Proxmox
|
||||
curl -fsSL https://raw.githubusercontent.com/arunavo4/gitea-mirror/main/scripts/gitea-mirror-lxc-installer.sh | bash
|
||||
# Using LXC Containers
|
||||
# For Proxmox VE (online)
|
||||
curl -fsSL https://raw.githubusercontent.com/arunavo4/gitea-mirror/main/scripts/gitea-mirror-lxc-proxmox.sh | bash
|
||||
|
||||
# For local testing (offline-friendly)
|
||||
sudo LOCAL_REPO_DIR=~/Development/gitea-mirror ./scripts/gitea-mirror-lxc-local.sh
|
||||
````
|
||||
|
||||
See the [LXC Container Deployment Guide](scripts/README-lxc.md).
|
||||
@@ -163,21 +167,37 @@ docker compose --profile production up -d
|
||||
|
||||
See [Docker build documentation](./scripts/README-docker.md) for more details.
|
||||
|
||||
##### Using LXC Containers (for Proxmox Homelab Setups)
|
||||
##### Using LXC Containers
|
||||
|
||||
Gitea Mirror can be deployed on Proxmox LXC containers, which is ideal for homelab setups:
|
||||
Gitea Mirror offers two deployment options for LXC containers:
|
||||
|
||||
**1. Proxmox VE (online, recommended for production)**
|
||||
|
||||
```bash
|
||||
# One-command installation on an Ubuntu 22.04 LXC container
|
||||
curl -fsSL https://raw.githubusercontent.com/arunavo4/gitea-mirror/main/scripts/gitea-mirror-lxc-installer.sh | bash
|
||||
# One-command installation on Proxmox VE
|
||||
# Optional env overrides: CTID HOSTNAME STORAGE DISK_SIZE CORES MEMORY BRIDGE IP_CONF
|
||||
curl -fsSL https://raw.githubusercontent.com/arunavo4/gitea-mirror/main/scripts/gitea-mirror-lxc-proxmox.sh | bash
|
||||
```
|
||||
|
||||
The installer script:
|
||||
- Downloads the Gitea Mirror repository
|
||||
- Installs all dependencies including Bun
|
||||
- Builds the application
|
||||
- Sets up a systemd service
|
||||
- Starts the application
|
||||
**2. Local testing (offline-friendly, works on developer laptops)**
|
||||
|
||||
```bash
|
||||
# Download the script
|
||||
curl -fsSL https://raw.githubusercontent.com/arunavo4/gitea-mirror/main/scripts/gitea-mirror-lxc-local.sh -o gitea-mirror-lxc-local.sh
|
||||
chmod +x gitea-mirror-lxc-local.sh
|
||||
|
||||
# Run with your local repo directory
|
||||
sudo LOCAL_REPO_DIR=~/Development/gitea-mirror ./gitea-mirror-lxc-local.sh
|
||||
```
|
||||
|
||||
Both scripts:
|
||||
- Set up a privileged Ubuntu 22.04 LXC container
|
||||
- Install Bun runtime environment
|
||||
- Build the application
|
||||
- Configure a systemd service
|
||||
- Start the service automatically
|
||||
|
||||
The application includes a health check endpoint at `/api/health` for monitoring.
|
||||
|
||||
See the [LXC Container Deployment Guide](scripts/README-lxc.md) for detailed instructions.
|
||||
|
||||
@@ -379,7 +399,7 @@ docker compose -f docker-compose.dev.yml up -d
|
||||
- **Backend**: Bun
|
||||
- **Database**: SQLite (handles both data storage and event notifications)
|
||||
- **API Integration**: GitHub API (Octokit), Gitea API
|
||||
- **Deployment Options**: Docker containers, Proxmox LXC containers
|
||||
- **Deployment Options**: Docker containers, LXC containers (Proxmox VE and local testing)
|
||||
|
||||
## Contributing
|
||||
|
||||
|
||||
97
bun.lock
97
bun.lock
@@ -40,7 +40,6 @@
|
||||
"react-dom": "^19.1.0",
|
||||
"react-icons": "^5.5.0",
|
||||
"sonner": "^2.0.3",
|
||||
"sqlite3": "^5.1.7",
|
||||
"superagent": "^10.2.1",
|
||||
"tailwind-merge": "^3.3.0",
|
||||
"tailwindcss": "^4.1.7",
|
||||
@@ -637,7 +636,7 @@
|
||||
|
||||
"chokidar": ["chokidar@4.0.3", "", { "dependencies": { "readdirp": "^4.0.1" } }, "sha512-Qgzu8kfBvo+cA4962jnP1KkS6Dop5NS6g7R5LFYJr4b8Ub94PPQXUksCw9PvXoeXPRRddRNC5C1JQUR2SMGtnA=="],
|
||||
|
||||
"chownr": ["chownr@2.0.0", "", {}, "sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ=="],
|
||||
"chownr": ["chownr@3.0.0", "", {}, "sha512-+IxzY9BZOQd/XuYPRmrvEVjF/nqj5kgT4kEq7VofrDoM1MxoRjEWkrCC3EtLi59TVawxTAn+orJwFQcrqEN1+g=="],
|
||||
|
||||
"ci-info": ["ci-info@4.2.0", "", {}, "sha512-cYY9mypksY8NRqgDB1XD1RiJL338v/551niynFTGkZOO2LHuB2OmOYxDIe/ttN9AHwrqdum1360G3ald0W9kCg=="],
|
||||
|
||||
@@ -1187,7 +1186,7 @@
|
||||
|
||||
"minimist": ["minimist@1.2.8", "", {}, "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA=="],
|
||||
|
||||
"minipass": ["minipass@5.0.0", "", {}, "sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ=="],
|
||||
"minipass": ["minipass@7.1.2", "", {}, "sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw=="],
|
||||
|
||||
"minipass-collect": ["minipass-collect@1.0.2", "", { "dependencies": { "minipass": "^3.0.0" } }, "sha512-6T6lH0H8OG9kITm/Jm6tdooIbogG9e0tLgpY6mphXSm/A9u8Nq1ryBG+Qspiub9LjWlBPsPS3tWQ/Botq4FdxA=="],
|
||||
|
||||
@@ -1199,9 +1198,9 @@
|
||||
|
||||
"minipass-sized": ["minipass-sized@1.0.3", "", { "dependencies": { "minipass": "^3.0.0" } }, "sha512-MbkQQ2CTiBMlA2Dm/5cY+9SWFEN8pzzOXi6rlM5Xxq0Yqbda5ZQy9sU75a673FE9ZK0Zsbr6Y5iP6u9nktfg2g=="],
|
||||
|
||||
"minizlib": ["minizlib@2.1.2", "", { "dependencies": { "minipass": "^3.0.0", "yallist": "^4.0.0" } }, "sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg=="],
|
||||
"minizlib": ["minizlib@3.0.2", "", { "dependencies": { "minipass": "^7.1.2" } }, "sha512-oG62iEk+CYt5Xj2YqI5Xi9xWUeZhDI8jjQmC5oThVH5JGCTgIjr7ciJDzC7MBzYd//WvR1OTmP5Q38Q8ShQtVA=="],
|
||||
|
||||
"mkdirp": ["mkdirp@1.0.4", "", { "bin": { "mkdirp": "bin/cmd.js" } }, "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw=="],
|
||||
"mkdirp": ["mkdirp@3.0.1", "", { "bin": { "mkdirp": "dist/cjs/src/bin.js" } }, "sha512-+NsyUUAZDmo6YVHzL/stxSu3t9YS1iljliy3BSDrXJ/dkn1KYdmtZODGGjLcc9XLgVVpH4KshHB8XmZgMhaBXg=="],
|
||||
|
||||
"mkdirp-classic": ["mkdirp-classic@0.5.3", "", {}, "sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A=="],
|
||||
|
||||
@@ -1491,7 +1490,7 @@
|
||||
|
||||
"tapable": ["tapable@2.2.2", "", {}, "sha512-Re10+NauLTMCudc7T5WLFLAwDhQ0JWdrMK+9B2M8zR5hRExKmsRDCBA7/aV/pNJFltmBFO5BAMlQFi/vq3nKOg=="],
|
||||
|
||||
"tar": ["tar@6.2.1", "", { "dependencies": { "chownr": "^2.0.0", "fs-minipass": "^2.0.0", "minipass": "^5.0.0", "minizlib": "^2.1.1", "mkdirp": "^1.0.3", "yallist": "^4.0.0" } }, "sha512-DZ4yORTwrbTj/7MZYq2w+/ZFdI6OZ/f9SFHR+71gIVUZhOQPHzVCLpvRnPgyaMpfWxxk/4ONva3GQSyNIKRv6A=="],
|
||||
"tar": ["tar@7.4.3", "", { "dependencies": { "@isaacs/fs-minipass": "^4.0.0", "chownr": "^3.0.0", "minipass": "^7.1.2", "minizlib": "^3.0.1", "mkdirp": "^3.0.1", "yallist": "^5.0.0" } }, "sha512-5S7Va8hKfV7W5U6g3aYxXmlPoZVAwUMy9AOKyF2fVuZa2UD3qZjg578OrLRt8PcNN1PleVaL/5/yYATNL0ICUw=="],
|
||||
|
||||
"tar-fs": ["tar-fs@2.1.2", "", { "dependencies": { "chownr": "^1.1.1", "mkdirp-classic": "^0.5.2", "pump": "^3.0.0", "tar-stream": "^2.1.4" } }, "sha512-EsaAXwxmx8UB7FRKqeozqEPop69DXcmYwTQwXvyAPF352HJsPdkVhvTaDPYqfNgruveJIJy3TA2l+2zj8LJIJA=="],
|
||||
|
||||
@@ -1643,7 +1642,7 @@
|
||||
|
||||
"xxhash-wasm": ["xxhash-wasm@1.1.0", "", {}, "sha512-147y/6YNh+tlp6nd/2pWq38i9h6mz/EuQ6njIrmW8D1BS5nCqs0P6DG+m6zTGnNz5I+uhZ0SHxBs9BsPrwcKDA=="],
|
||||
|
||||
"yallist": ["yallist@4.0.0", "", {}, "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="],
|
||||
"yallist": ["yallist@5.0.0", "", {}, "sha512-YgvUTfwqyc7UXVMrB+SImsVYSmTS8X/tSrtdNZMImM+n7+QTriRXyXim0mBrTXNeqzVF0KWGgHPeiyViFFrNDw=="],
|
||||
|
||||
"yargs-parser": ["yargs-parser@21.1.1", "", {}, "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw=="],
|
||||
|
||||
@@ -1667,7 +1666,7 @@
|
||||
|
||||
"@babel/helper-compilation-targets/semver": ["semver@6.3.1", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA=="],
|
||||
|
||||
"@isaacs/fs-minipass/minipass": ["minipass@7.1.2", "", {}, "sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw=="],
|
||||
"@npmcli/move-file/mkdirp": ["mkdirp@1.0.4", "", { "bin": { "mkdirp": "bin/cmd.js" } }, "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw=="],
|
||||
|
||||
"@octokit/plugin-paginate-rest/@octokit/types": ["@octokit/types@13.10.0", "", { "dependencies": { "@octokit/openapi-types": "^24.2.0" } }, "sha512-ifLaO34EbbPj0Xgro4G5lP5asESjwHracYJvVaPIyXMuiuXLlhic3S47cBdTb+jfODkTE5YtGCLt3Ay3+J97sA=="],
|
||||
|
||||
@@ -1675,8 +1674,6 @@
|
||||
|
||||
"@rollup/pluginutils/estree-walker": ["estree-walker@2.0.2", "", {}, "sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w=="],
|
||||
|
||||
"@tailwindcss/oxide/tar": ["tar@7.4.3", "", { "dependencies": { "@isaacs/fs-minipass": "^4.0.0", "chownr": "^3.0.0", "minipass": "^7.1.2", "minizlib": "^3.0.1", "mkdirp": "^3.0.1", "yallist": "^5.0.0" } }, "sha512-5S7Va8hKfV7W5U6g3aYxXmlPoZVAwUMy9AOKyF2fVuZa2UD3qZjg578OrLRt8PcNN1PleVaL/5/yYATNL0ICUw=="],
|
||||
|
||||
"@tailwindcss/oxide-wasm32-wasi/@emnapi/core": ["@emnapi/core@1.4.3", "", { "dependencies": { "@emnapi/wasi-threads": "1.0.2", "tslib": "^2.4.0" }, "bundled": true }, "sha512-4m62DuCE07lw01soJwPiBGC0nAww0Q+RY70VZ+n49yDIO13yyinhbWCeNnaob0lakDtWQzSdtNWzJeOJt2ma+g=="],
|
||||
|
||||
"@tailwindcss/oxide-wasm32-wasi/@emnapi/runtime": ["@emnapi/runtime@1.4.3", "", { "dependencies": { "tslib": "^2.4.0" }, "bundled": true }, "sha512-pBPWdu6MLKROBX05wSNKcNb++m5Er+KQ9QkB+WVM+pW2Kx9hoSrVTnu3BdkI5eBLZoKu/J6mW/B6i6bJB2ytXQ=="],
|
||||
@@ -1701,10 +1698,16 @@
|
||||
|
||||
"boxen/chalk": ["chalk@5.4.1", "", {}, "sha512-zgVZuo2WcZgfUEmsn6eO3kINexW8RAE4maiQ8QNs8CtpPCSyMiYsULR3HQYkm3w8FIA3SberyMJMSldGsW+U3w=="],
|
||||
|
||||
"cacache/chownr": ["chownr@2.0.0", "", {}, "sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ=="],
|
||||
|
||||
"cacache/lru-cache": ["lru-cache@6.0.0", "", { "dependencies": { "yallist": "^4.0.0" } }, "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA=="],
|
||||
|
||||
"cacache/minipass": ["minipass@3.3.6", "", { "dependencies": { "yallist": "^4.0.0" } }, "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw=="],
|
||||
|
||||
"cacache/mkdirp": ["mkdirp@1.0.4", "", { "bin": { "mkdirp": "bin/cmd.js" } }, "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw=="],
|
||||
|
||||
"cacache/tar": ["tar@6.2.1", "", { "dependencies": { "chownr": "^2.0.0", "fs-minipass": "^2.0.0", "minipass": "^5.0.0", "minizlib": "^2.1.1", "mkdirp": "^1.0.3", "yallist": "^4.0.0" } }, "sha512-DZ4yORTwrbTj/7MZYq2w+/ZFdI6OZ/f9SFHR+71gIVUZhOQPHzVCLpvRnPgyaMpfWxxk/4ONva3GQSyNIKRv6A=="],
|
||||
|
||||
"fs-minipass/minipass": ["minipass@3.3.6", "", { "dependencies": { "yallist": "^4.0.0" } }, "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw=="],
|
||||
|
||||
"gauge/string-width": ["string-width@4.2.3", "", { "dependencies": { "emoji-regex": "^8.0.0", "is-fullwidth-code-point": "^3.0.0", "strip-ansi": "^6.0.1" } }, "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g=="],
|
||||
@@ -1725,16 +1728,18 @@
|
||||
|
||||
"minipass-fetch/minipass": ["minipass@3.3.6", "", { "dependencies": { "yallist": "^4.0.0" } }, "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw=="],
|
||||
|
||||
"minipass-fetch/minizlib": ["minizlib@2.1.2", "", { "dependencies": { "minipass": "^3.0.0", "yallist": "^4.0.0" } }, "sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg=="],
|
||||
|
||||
"minipass-flush/minipass": ["minipass@3.3.6", "", { "dependencies": { "yallist": "^4.0.0" } }, "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw=="],
|
||||
|
||||
"minipass-pipeline/minipass": ["minipass@3.3.6", "", { "dependencies": { "yallist": "^4.0.0" } }, "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw=="],
|
||||
|
||||
"minipass-sized/minipass": ["minipass@3.3.6", "", { "dependencies": { "yallist": "^4.0.0" } }, "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw=="],
|
||||
|
||||
"minizlib/minipass": ["minipass@3.3.6", "", { "dependencies": { "yallist": "^4.0.0" } }, "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw=="],
|
||||
|
||||
"node-fetch/whatwg-url": ["whatwg-url@5.0.0", "", { "dependencies": { "tr46": "~0.0.3", "webidl-conversions": "^3.0.0" } }, "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw=="],
|
||||
|
||||
"node-gyp/tar": ["tar@6.2.1", "", { "dependencies": { "chownr": "^2.0.0", "fs-minipass": "^2.0.0", "minipass": "^5.0.0", "minizlib": "^2.1.1", "mkdirp": "^1.0.3", "yallist": "^4.0.0" } }, "sha512-DZ4yORTwrbTj/7MZYq2w+/ZFdI6OZ/f9SFHR+71gIVUZhOQPHzVCLpvRnPgyaMpfWxxk/4ONva3GQSyNIKRv6A=="],
|
||||
|
||||
"parse-entities/@types/unist": ["@types/unist@2.0.11", "", {}, "sha512-CmBKiL6NNo/OqgmMn95Fk9Whlp2mtvIv+KNpQKN2F4SjvrEesubTRWGYSg+BnWZOnlCaSTU1sMpsBOzgbYhnsA=="],
|
||||
|
||||
"pretty-format/ansi-styles": ["ansi-styles@5.2.0", "", {}, "sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA=="],
|
||||
@@ -1745,6 +1750,8 @@
|
||||
|
||||
"socks-proxy-agent/agent-base": ["agent-base@6.0.2", "", { "dependencies": { "debug": "4" } }, "sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ=="],
|
||||
|
||||
"sqlite3/tar": ["tar@6.2.1", "", { "dependencies": { "chownr": "^2.0.0", "fs-minipass": "^2.0.0", "minipass": "^5.0.0", "minizlib": "^2.1.1", "mkdirp": "^1.0.3", "yallist": "^4.0.0" } }, "sha512-DZ4yORTwrbTj/7MZYq2w+/ZFdI6OZ/f9SFHR+71gIVUZhOQPHzVCLpvRnPgyaMpfWxxk/4ONva3GQSyNIKRv6A=="],
|
||||
|
||||
"ssri/minipass": ["minipass@3.3.6", "", { "dependencies": { "yallist": "^4.0.0" } }, "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw=="],
|
||||
|
||||
"strip-ansi/ansi-regex": ["ansi-regex@6.1.0", "", {}, "sha512-7HSX4QQb4CspciLpVFwyRe79O3xsIZDDLER21kERQ71oaPodF8jL725AgJMFAYbooIqolJoRLuM81SpeUkpkvA=="],
|
||||
@@ -1761,34 +1768,80 @@
|
||||
|
||||
"@octokit/plugin-rest-endpoint-methods/@octokit/types/@octokit/openapi-types": ["@octokit/openapi-types@24.2.0", "", {}, "sha512-9sIH3nSUttelJSXUrmGzl7QUBFul0/mB8HRYl3fOlgHbIWG+WnYDXU3v/2zMtAvuzZ/ed00Ei6on975FhBfzrg=="],
|
||||
|
||||
"@tailwindcss/oxide/tar/chownr": ["chownr@3.0.0", "", {}, "sha512-+IxzY9BZOQd/XuYPRmrvEVjF/nqj5kgT4kEq7VofrDoM1MxoRjEWkrCC3EtLi59TVawxTAn+orJwFQcrqEN1+g=="],
|
||||
|
||||
"@tailwindcss/oxide/tar/minipass": ["minipass@7.1.2", "", {}, "sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw=="],
|
||||
|
||||
"@tailwindcss/oxide/tar/minizlib": ["minizlib@3.0.2", "", { "dependencies": { "minipass": "^7.1.2" } }, "sha512-oG62iEk+CYt5Xj2YqI5Xi9xWUeZhDI8jjQmC5oThVH5JGCTgIjr7ciJDzC7MBzYd//WvR1OTmP5Q38Q8ShQtVA=="],
|
||||
|
||||
"@tailwindcss/oxide/tar/mkdirp": ["mkdirp@3.0.1", "", { "bin": { "mkdirp": "dist/cjs/src/bin.js" } }, "sha512-+NsyUUAZDmo6YVHzL/stxSu3t9YS1iljliy3BSDrXJ/dkn1KYdmtZODGGjLcc9XLgVVpH4KshHB8XmZgMhaBXg=="],
|
||||
|
||||
"@tailwindcss/oxide/tar/yallist": ["yallist@5.0.0", "", {}, "sha512-YgvUTfwqyc7UXVMrB+SImsVYSmTS8X/tSrtdNZMImM+n7+QTriRXyXim0mBrTXNeqzVF0KWGgHPeiyViFFrNDw=="],
|
||||
|
||||
"ansi-align/string-width/emoji-regex": ["emoji-regex@8.0.0", "", {}, "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A=="],
|
||||
|
||||
"ansi-align/string-width/strip-ansi": ["strip-ansi@6.0.1", "", { "dependencies": { "ansi-regex": "^5.0.1" } }, "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A=="],
|
||||
|
||||
"cacache/lru-cache/yallist": ["yallist@4.0.0", "", {}, "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="],
|
||||
|
||||
"cacache/minipass/yallist": ["yallist@4.0.0", "", {}, "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="],
|
||||
|
||||
"cacache/tar/minipass": ["minipass@5.0.0", "", {}, "sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ=="],
|
||||
|
||||
"cacache/tar/minizlib": ["minizlib@2.1.2", "", { "dependencies": { "minipass": "^3.0.0", "yallist": "^4.0.0" } }, "sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg=="],
|
||||
|
||||
"cacache/tar/yallist": ["yallist@4.0.0", "", {}, "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="],
|
||||
|
||||
"fs-minipass/minipass/yallist": ["yallist@4.0.0", "", {}, "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="],
|
||||
|
||||
"gauge/string-width/emoji-regex": ["emoji-regex@8.0.0", "", {}, "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A=="],
|
||||
|
||||
"make-fetch-happen/http-proxy-agent/agent-base": ["agent-base@6.0.2", "", { "dependencies": { "debug": "4" } }, "sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ=="],
|
||||
|
||||
"make-fetch-happen/https-proxy-agent/agent-base": ["agent-base@6.0.2", "", { "dependencies": { "debug": "4" } }, "sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ=="],
|
||||
|
||||
"make-fetch-happen/lru-cache/yallist": ["yallist@4.0.0", "", {}, "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="],
|
||||
|
||||
"make-fetch-happen/minipass/yallist": ["yallist@4.0.0", "", {}, "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="],
|
||||
|
||||
"minipass-collect/minipass/yallist": ["yallist@4.0.0", "", {}, "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="],
|
||||
|
||||
"minipass-fetch/minipass/yallist": ["yallist@4.0.0", "", {}, "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="],
|
||||
|
||||
"minipass-fetch/minizlib/yallist": ["yallist@4.0.0", "", {}, "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="],
|
||||
|
||||
"minipass-flush/minipass/yallist": ["yallist@4.0.0", "", {}, "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="],
|
||||
|
||||
"minipass-pipeline/minipass/yallist": ["yallist@4.0.0", "", {}, "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="],
|
||||
|
||||
"minipass-sized/minipass/yallist": ["yallist@4.0.0", "", {}, "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="],
|
||||
|
||||
"node-fetch/whatwg-url/tr46": ["tr46@0.0.3", "", {}, "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw=="],
|
||||
|
||||
"node-fetch/whatwg-url/webidl-conversions": ["webidl-conversions@3.0.1", "", {}, "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ=="],
|
||||
|
||||
"node-gyp/tar/chownr": ["chownr@2.0.0", "", {}, "sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ=="],
|
||||
|
||||
"node-gyp/tar/minipass": ["minipass@5.0.0", "", {}, "sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ=="],
|
||||
|
||||
"node-gyp/tar/minizlib": ["minizlib@2.1.2", "", { "dependencies": { "minipass": "^3.0.0", "yallist": "^4.0.0" } }, "sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg=="],
|
||||
|
||||
"node-gyp/tar/mkdirp": ["mkdirp@1.0.4", "", { "bin": { "mkdirp": "bin/cmd.js" } }, "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw=="],
|
||||
|
||||
"node-gyp/tar/yallist": ["yallist@4.0.0", "", {}, "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="],
|
||||
|
||||
"send/mime-types/mime-db": ["mime-db@1.54.0", "", {}, "sha512-aU5EJuIN2WDemCcAp2vFBfp/m4EAhWJnUNSSw0ixs7/kXbd6Pg64EmwJkNdFhB8aWt1sH2CTXrLxo/iAGV3oPQ=="],
|
||||
|
||||
"sqlite3/tar/chownr": ["chownr@2.0.0", "", {}, "sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ=="],
|
||||
|
||||
"sqlite3/tar/minipass": ["minipass@5.0.0", "", {}, "sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ=="],
|
||||
|
||||
"sqlite3/tar/minizlib": ["minizlib@2.1.2", "", { "dependencies": { "minipass": "^3.0.0", "yallist": "^4.0.0" } }, "sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg=="],
|
||||
|
||||
"sqlite3/tar/mkdirp": ["mkdirp@1.0.4", "", { "bin": { "mkdirp": "bin/cmd.js" } }, "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw=="],
|
||||
|
||||
"sqlite3/tar/yallist": ["yallist@4.0.0", "", {}, "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="],
|
||||
|
||||
"ssri/minipass/yallist": ["yallist@4.0.0", "", {}, "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="],
|
||||
|
||||
"wide-align/string-width/emoji-regex": ["emoji-regex@8.0.0", "", {}, "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A=="],
|
||||
|
||||
"wide-align/string-width/strip-ansi": ["strip-ansi@6.0.1", "", { "dependencies": { "ansi-regex": "^5.0.1" } }, "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A=="],
|
||||
|
||||
"cacache/tar/minizlib/minipass": ["minipass@3.3.6", "", { "dependencies": { "yallist": "^4.0.0" } }, "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw=="],
|
||||
|
||||
"node-gyp/tar/minizlib/minipass": ["minipass@3.3.6", "", { "dependencies": { "yallist": "^4.0.0" } }, "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw=="],
|
||||
|
||||
"sqlite3/tar/minizlib/minipass": ["minipass@3.3.6", "", { "dependencies": { "yallist": "^4.0.0" } }, "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw=="],
|
||||
}
|
||||
}
|
||||
|
||||
@@ -18,7 +18,7 @@ services:
|
||||
- DATABASE_URL=sqlite://data/gitea-mirror.db
|
||||
- DELAY=${DELAY:-3600}
|
||||
healthcheck:
|
||||
test: ["CMD", "curl", "-f", "http://localhost:4321/health"]
|
||||
test: ["CMD", "curl", "-f", "http://localhost:4321/api/health"]
|
||||
interval: 1m
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
|
||||
@@ -5,11 +5,8 @@ set -e
|
||||
# Ensure data directory exists
|
||||
mkdir -p /app/data
|
||||
|
||||
# If bun is available, run setup (for dev images)
|
||||
if command -v bun >/dev/null 2>&1; then
|
||||
echo "Running bun setup (if needed)..."
|
||||
bun run setup || true
|
||||
fi
|
||||
# Skip dependency installation entirely for pre-built images
|
||||
# Dependencies are already installed during the Docker build process
|
||||
|
||||
# Initialize the database if it doesn't exist
|
||||
if [ ! -f "/app/data/gitea-mirror.db" ]; then
|
||||
@@ -18,6 +15,8 @@ if [ ! -f "/app/data/gitea-mirror.db" ]; then
|
||||
bun dist/scripts/init-db.js
|
||||
elif [ -f "dist/scripts/manage-db.js" ]; then
|
||||
bun dist/scripts/manage-db.js init
|
||||
elif [ -f "scripts/manage-db.ts" ]; then
|
||||
bun scripts/manage-db.ts init
|
||||
else
|
||||
echo "Warning: Could not find database initialization scripts in dist/scripts."
|
||||
echo "Creating and initializing database manually..."
|
||||
@@ -111,9 +110,28 @@ if [ ! -f "/app/data/gitea-mirror.db" ]; then
|
||||
status TEXT NOT NULL DEFAULT 'imported',
|
||||
message TEXT NOT NULL,
|
||||
timestamp TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
|
||||
-- New fields for job resilience
|
||||
job_type TEXT NOT NULL DEFAULT 'mirror',
|
||||
batch_id TEXT,
|
||||
total_items INTEGER,
|
||||
completed_items INTEGER DEFAULT 0,
|
||||
item_ids TEXT, -- JSON array as text
|
||||
completed_item_ids TEXT DEFAULT '[]', -- JSON array as text
|
||||
in_progress INTEGER NOT NULL DEFAULT 0, -- Boolean as integer
|
||||
started_at TIMESTAMP,
|
||||
completed_at TIMESTAMP,
|
||||
last_checkpoint TIMESTAMP,
|
||||
|
||||
FOREIGN KEY (user_id) REFERENCES users(id)
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_mirror_jobs_user_id ON mirror_jobs(user_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_mirror_jobs_batch_id ON mirror_jobs(batch_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_mirror_jobs_in_progress ON mirror_jobs(in_progress);
|
||||
CREATE INDEX IF NOT EXISTS idx_mirror_jobs_job_type ON mirror_jobs(job_type);
|
||||
CREATE INDEX IF NOT EXISTS idx_mirror_jobs_timestamp ON mirror_jobs(timestamp);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS events (
|
||||
id TEXT PRIMARY KEY,
|
||||
user_id TEXT NOT NULL,
|
||||
@@ -136,10 +154,29 @@ else
|
||||
bun dist/scripts/fix-db-issues.js
|
||||
elif [ -f "dist/scripts/manage-db.js" ]; then
|
||||
bun dist/scripts/manage-db.js fix
|
||||
elif [ -f "scripts/manage-db.ts" ]; then
|
||||
bun scripts/manage-db.ts fix
|
||||
fi
|
||||
|
||||
# Since the application is not used by anyone yet, we've removed the schema updates and migrations
|
||||
echo "Database already exists, no migrations needed."
|
||||
# Run database migrations
|
||||
echo "Running database migrations..."
|
||||
|
||||
# Update mirror_jobs table with new columns for resilience
|
||||
if [ -f "dist/scripts/update-mirror-jobs-table.js" ]; then
|
||||
echo "Updating mirror_jobs table..."
|
||||
bun dist/scripts/update-mirror-jobs-table.js
|
||||
elif [ -f "scripts/update-mirror-jobs-table.ts" ]; then
|
||||
echo "Updating mirror_jobs table using TypeScript script..."
|
||||
bun scripts/update-mirror-jobs-table.ts
|
||||
else
|
||||
echo "Warning: Could not find mirror_jobs table update script."
|
||||
fi
|
||||
fi
|
||||
|
||||
# Extract version from package.json and set as environment variable
|
||||
if [ -f "package.json" ]; then
|
||||
export npm_package_version=$(grep -o '"version": *"[^"]*"' package.json | cut -d'"' -f4)
|
||||
echo "Setting application version: $npm_package_version"
|
||||
fi
|
||||
|
||||
# Start the application
|
||||
|
||||
127
docs/testing.md
Normal file
127
docs/testing.md
Normal file
@@ -0,0 +1,127 @@
|
||||
# Testing in Gitea Mirror
|
||||
|
||||
This document provides guidance on testing in the Gitea Mirror project.
|
||||
|
||||
## Current Status
|
||||
|
||||
The project now uses Bun's built-in test runner, which is Jest-compatible and provides a fast, reliable testing experience. We've migrated away from Vitest due to compatibility issues with Bun.
|
||||
|
||||
## Running Tests
|
||||
|
||||
To run tests, use the following commands:
|
||||
|
||||
```bash
|
||||
# Run all tests
|
||||
bun test
|
||||
|
||||
# Run tests in watch mode (automatically re-run when files change)
|
||||
bun test --watch
|
||||
|
||||
# Run tests with coverage reporting
|
||||
bun test --coverage
|
||||
```
|
||||
|
||||
## Test File Naming Conventions
|
||||
|
||||
Bun's test runner automatically discovers test files that match the following patterns:
|
||||
|
||||
- `*.test.{js|jsx|ts|tsx}`
|
||||
- `*_test.{js|jsx|ts|tsx}`
|
||||
- `*.spec.{js|jsx|ts|tsx}`
|
||||
- `*_spec.{js|jsx|ts|tsx}`
|
||||
|
||||
## Writing Tests
|
||||
|
||||
The project uses Bun's test runner with a Jest-compatible API. Here's an example test:
|
||||
|
||||
```typescript
|
||||
// example.test.ts
|
||||
import { describe, test, expect } from "bun:test";
|
||||
|
||||
describe("Example Test", () => {
|
||||
test("should pass", () => {
|
||||
expect(true).toBe(true);
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
### Testing React Components
|
||||
|
||||
For testing React components, we use React Testing Library:
|
||||
|
||||
```typescript
|
||||
// component.test.tsx
|
||||
import { describe, test, expect } from "bun:test";
|
||||
import { render, screen } from "@testing-library/react";
|
||||
import MyComponent from "../components/MyComponent";
|
||||
|
||||
describe("MyComponent", () => {
|
||||
test("renders correctly", () => {
|
||||
render(<MyComponent />);
|
||||
expect(screen.getByText("Hello World")).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
## Test Setup
|
||||
|
||||
The test setup is defined in `src/tests/setup.bun.ts` and includes:
|
||||
|
||||
- Automatic cleanup after each test
|
||||
- Setup for any global test environment needs
|
||||
|
||||
## Mocking
|
||||
|
||||
Bun's test runner provides built-in mocking capabilities:
|
||||
|
||||
```typescript
|
||||
import { test, expect, mock } from "bun:test";
|
||||
|
||||
// Create a mock function
|
||||
const mockFn = mock(() => "mocked value");
|
||||
|
||||
test("mock function", () => {
|
||||
const result = mockFn();
|
||||
expect(result).toBe("mocked value");
|
||||
expect(mockFn).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
// Mock a module
|
||||
mock.module("./some-module", () => {
|
||||
return {
|
||||
someFunction: () => "mocked module function"
|
||||
};
|
||||
});
|
||||
```
|
||||
|
||||
## CI Integration
|
||||
|
||||
The CI workflow has been updated to use Bun's test runner. Tests are automatically run as part of the CI pipeline.
|
||||
|
||||
## Test Coverage
|
||||
|
||||
To generate test coverage reports, run:
|
||||
|
||||
```bash
|
||||
bun test --coverage
|
||||
```
|
||||
|
||||
This will generate a coverage report in the `coverage` directory.
|
||||
|
||||
## Types of Tests
|
||||
|
||||
The project includes several types of tests:
|
||||
|
||||
1. **Unit Tests**: Testing individual functions and utilities
|
||||
2. **API Tests**: Testing API endpoints
|
||||
3. **Component Tests**: Testing React components
|
||||
4. **Integration Tests**: Testing how components work together
|
||||
|
||||
## Future Improvements
|
||||
|
||||
When expanding the test suite, consider:
|
||||
|
||||
1. Adding more comprehensive API endpoint tests
|
||||
2. Increasing component test coverage
|
||||
3. Setting up end-to-end tests with a tool like Playwright
|
||||
4. Adding performance tests for critical paths
|
||||
15
package.json
15
package.json
@@ -1,27 +1,29 @@
|
||||
{
|
||||
"name": "gitea-mirror",
|
||||
"type": "module",
|
||||
"version": "2.0.0",
|
||||
"version": "2.5.2",
|
||||
"engines": {
|
||||
"bun": ">=1.2.9"
|
||||
},
|
||||
"scripts": {
|
||||
"setup": "bun install && bun run manage-db init",
|
||||
"setup": "bun install && bun run manage-db init && bun run update-db",
|
||||
"dev": "bunx --bun astro dev",
|
||||
"dev:clean": "bun run cleanup-db && bun run manage-db init && bunx --bun astro dev",
|
||||
"dev:clean": "bun run cleanup-db && bun run manage-db init && bun run update-db && bunx --bun astro dev",
|
||||
"build": "bunx --bun astro build",
|
||||
"cleanup-db": "rm -f gitea-mirror.db data/gitea-mirror.db",
|
||||
"manage-db": "bun scripts/manage-db.ts",
|
||||
"init-db": "bun scripts/manage-db.ts init",
|
||||
"update-db": "bun scripts/update-mirror-jobs-table.ts",
|
||||
"check-db": "bun scripts/manage-db.ts check",
|
||||
"fix-db": "bun scripts/manage-db.ts fix",
|
||||
"reset-users": "bun scripts/manage-db.ts reset-users",
|
||||
"cleanup-events": "bun scripts/cleanup-events.ts",
|
||||
"preview": "bunx --bun astro preview",
|
||||
"start": "bun dist/server/entry.mjs",
|
||||
"start:fresh": "bun run cleanup-db && bun run manage-db init && bun dist/server/entry.mjs",
|
||||
"test": "bunx --bun vitest run",
|
||||
"test:watch": "bunx --bun vitest",
|
||||
"start:fresh": "bun run cleanup-db && bun run manage-db init && bun run update-db && bun dist/server/entry.mjs",
|
||||
"test": "bun test",
|
||||
"test:watch": "bun test --watch",
|
||||
"test:coverage": "bun test --coverage",
|
||||
"astro": "bunx --bun astro"
|
||||
},
|
||||
"dependencies": {
|
||||
@@ -61,7 +63,6 @@
|
||||
"react-dom": "^19.1.0",
|
||||
"react-icons": "^5.5.0",
|
||||
"sonner": "^2.0.3",
|
||||
"sqlite3": "^5.1.7",
|
||||
"superagent": "^10.2.1",
|
||||
"tailwind-merge": "^3.3.0",
|
||||
"tailwindcss": "^4.1.7",
|
||||
|
||||
@@ -1,109 +1,117 @@
|
||||
# LXC Container Deployment Guide
|
||||
|
||||
This guide explains how to deploy the Gitea Mirror application on Proxmox LXC containers while keeping your existing Docker containers.
|
||||
## Overview
|
||||
Run **Gitea Mirror** in an isolated LXC container, either:
|
||||
|
||||
## Prerequisites
|
||||
1. **Online, on a Proxmox VE host** – script pulls everything from GitHub
|
||||
2. **Offline / LAN-only, on a developer laptop** – script pushes your local checkout + Bun ZIP
|
||||
|
||||
- Proxmox VE installed and configured
|
||||
- Basic knowledge of LXC containers and Proxmox
|
||||
- Access to Proxmox web interface or CLI
|
||||
---
|
||||
|
||||
## Creating an LXC Container
|
||||
## 1. Proxmox VE (online, recommended for prod)
|
||||
|
||||
1. In Proxmox web interface, create a new LXC container:
|
||||
- Choose Ubuntu 22.04 as the template
|
||||
- Allocate appropriate resources (2GB RAM, 2 CPU cores recommended)
|
||||
- At least 10GB of disk space
|
||||
- Configure networking as needed
|
||||
### Prerequisites
|
||||
* Proxmox VE node with the default `vmbr0` bridge
|
||||
* Root shell on the node
|
||||
* Ubuntu 22.04 LXC template present (`pveam update && pveam download ...`)
|
||||
|
||||
2. Start the container and get a shell (either via Proxmox web console or SSH)
|
||||
### One-command install
|
||||
|
||||
## Deploying Gitea Mirror
|
||||
```bash
|
||||
# optional env overrides: CTID HOSTNAME STORAGE DISK_SIZE CORES MEMORY BRIDGE IP_CONF
|
||||
sudo bash -c "$(curl -fsSL https://raw.githubusercontent.com/arunavo4/gitea-mirror/main/scripts/gitea-mirror-lxc-proxmox.sh)"
|
||||
```
|
||||
|
||||
### Option 1: One-Command Installation (Recommended)
|
||||
What it does:
|
||||
|
||||
This method allows you to install Gitea Mirror with a single command, without having to copy files manually:
|
||||
* Creates **privileged** CT `$CTID` with nesting enabled
|
||||
* Installs curl / git / Bun (official installer)
|
||||
* Clones & builds `arunavo4/gitea-mirror`
|
||||
* Writes a root-run systemd service and starts it
|
||||
* Prints the container IP + random `JWT_SECRET`
|
||||
|
||||
1. SSH into your LXC container:
|
||||
```bash
|
||||
ssh root@lxc-container-ip
|
||||
```
|
||||
Browse to:
|
||||
|
||||
2. Run the installer script directly:
|
||||
```bash
|
||||
curl -fsSL https://raw.githubusercontent.com/arunavo4/gitea-mirror/main/scripts/gitea-mirror-lxc-installer.sh | bash
|
||||
```
|
||||
```
|
||||
http://<container-ip>:4321
|
||||
```
|
||||
|
||||
3. The installer will:
|
||||
- Download the Gitea Mirror repository
|
||||
- Install all dependencies including Bun
|
||||
- Build the application
|
||||
- Set up a systemd service
|
||||
- Start the application
|
||||
- Display access information
|
||||
---
|
||||
|
||||
### Option 2: Manual Setup
|
||||
## 2. Local testing (LXD on a workstation, works offline)
|
||||
|
||||
If you prefer to set up manually or the automatic script doesn't work for your environment:
|
||||
### Prerequisites
|
||||
|
||||
1. Install dependencies:
|
||||
```bash
|
||||
apt update
|
||||
apt install -y curl git sqlite3 build-essential
|
||||
```
|
||||
* `lxd` installed (`sudo apt install lxd`; `lxd init --auto`)
|
||||
* Your repo cloned locally – e.g. `~/Development/gitea-mirror`
|
||||
* Bun ZIP downloaded once:
|
||||
`https://github.com/oven-sh/bun/releases/latest/download/bun-linux-x64.zip`
|
||||
|
||||
2. Install Bun:
|
||||
```bash
|
||||
curl -fsSL https://bun.sh/install | bash
|
||||
export BUN_INSTALL="/root/.bun"
|
||||
export PATH="$BUN_INSTALL/bin:$PATH"
|
||||
```
|
||||
### Offline installer script
|
||||
|
||||
3. Clone or copy your project:
|
||||
```bash
|
||||
git clone https://github.com/yourusername/gitea-mirror.git /opt/gitea-mirror
|
||||
cd /opt/gitea-mirror
|
||||
```
|
||||
```bash
|
||||
git clone https://github.com/arunavo4/gitea-mirror.git # if not already
|
||||
curl -fsSL https://raw.githubusercontent.com/arunavo4/gitea-mirror/main/scripts/gitea-mirror-lxc-local.sh -o gitea-mirror-lxc-local.sh
|
||||
chmod +x gitea-mirror-lxc-local.sh
|
||||
|
||||
4. Build and initialize:
|
||||
```bash
|
||||
bun install
|
||||
bun run build
|
||||
bun run manage-db init
|
||||
```
|
||||
sudo LOCAL_REPO_DIR=~/Development/gitea-mirror \
|
||||
./gitea-mirror-lxc-local.sh
|
||||
```
|
||||
|
||||
5. Create a systemd service manually:
|
||||
```bash
|
||||
nano /etc/systemd/system/gitea-mirror.service
|
||||
# Add the service configuration as shown below:
|
||||
What it does:
|
||||
|
||||
[Unit]
|
||||
Description=Gitea Mirror
|
||||
After=network.target
|
||||
* Launches privileged LXC `gitea-test` (`lxc launch ubuntu:22.04 ...`)
|
||||
* Pushes **Bun ZIP** + tarred **local repo** into `/opt`
|
||||
* Unpacks, builds, initializes DB
|
||||
* Symlinks both `bun` and `bunx` → `/usr/local/bin`
|
||||
* Creates a root systemd unit and starts it
|
||||
|
||||
[Service]
|
||||
Type=simple
|
||||
WorkingDirectory=/opt/gitea-mirror
|
||||
ExecStart=/root/.bun/bin/bun dist/server/entry.mjs
|
||||
Restart=on-failure
|
||||
RestartSec=10
|
||||
User=gitea-mirror
|
||||
Group=gitea-mirror
|
||||
Environment=NODE_ENV=production
|
||||
Environment=HOST=0.0.0.0
|
||||
Environment=PORT=4321
|
||||
Environment=DATABASE_URL=file:data/gitea-mirror.db
|
||||
Environment=JWT_SECRET=your-secure-secret-key
|
||||
Access from host:
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
```
|
||||
```
|
||||
http://$(lxc exec gitea-test -- hostname -I | awk '{print $1}'):4321
|
||||
```
|
||||
|
||||
6. Enable and start the service:
|
||||
```bash
|
||||
systemctl enable gitea-mirror.service
|
||||
systemctl start gitea-mirror.service
|
||||
```
|
||||
(Optional) forward to host localhost:
|
||||
|
||||
```bash
|
||||
sudo lxc config device add gitea-test mirror proxy \
|
||||
listen=tcp:0.0.0.0:4321 connect=tcp:127.0.0.1:4321
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Health-check endpoint
|
||||
|
||||
Gitea Mirror includes a built-in health check endpoint at `/api/health` that provides:
|
||||
|
||||
- System status and uptime
|
||||
- Database connectivity check
|
||||
- Memory usage statistics
|
||||
- Environment information
|
||||
|
||||
You can use this endpoint for monitoring your deployment:
|
||||
|
||||
```bash
|
||||
# Basic check (returns 200 OK if healthy)
|
||||
curl -I http://<container-ip>:4321/api/health
|
||||
|
||||
# Detailed health information (JSON)
|
||||
curl http://<container-ip>:4321/api/health
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
| Check | Command |
|
||||
| -------------- | ----------------------------------------------------- |
|
||||
| Service status | `systemctl status gitea-mirror` |
|
||||
| Live logs | `journalctl -u gitea-mirror -f` |
|
||||
| Verify Bun | `bun --version && bunx --version` |
|
||||
| DB perms | `chown -R root:root /opt/gitea-mirror/data` (Proxmox) |
|
||||
|
||||
---
|
||||
|
||||
## Connecting LXC and Docker Containers
|
||||
|
||||
@@ -121,32 +129,3 @@ If you need your LXC container to communicate with Docker containers:
|
||||
```
|
||||
|
||||
3. In Proxmox, edit the LXC container's network configuration to use this bridge.
|
||||
|
||||
## Accessing the Application
|
||||
|
||||
Once deployed, you can access the Gitea Mirror application at:
|
||||
```
|
||||
http://lxc-container-ip:4321
|
||||
```
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
- Check service status:
|
||||
```bash
|
||||
systemctl status gitea-mirror
|
||||
```
|
||||
|
||||
- View logs:
|
||||
```bash
|
||||
journalctl -u gitea-mirror -f
|
||||
```
|
||||
|
||||
- If the service fails to start, check permissions on the data directory:
|
||||
```bash
|
||||
chown -R gitea-mirror:gitea-mirror /opt/gitea-mirror/data
|
||||
```
|
||||
|
||||
- Verify Bun is installed correctly:
|
||||
```bash
|
||||
bun --version
|
||||
```
|
||||
|
||||
@@ -1,12 +1,14 @@
|
||||
# Scripts Directory
|
||||
|
||||
This folder contains utility scripts for database management.
|
||||
This folder contains utility scripts for database management, event management, Docker builds, and LXC container deployment.
|
||||
|
||||
## Database Management Tool (manage-db.ts)
|
||||
## Database Management
|
||||
|
||||
### Database Management Tool (manage-db.ts)
|
||||
|
||||
This is a consolidated database management tool that handles all database-related operations. It combines the functionality of the previous separate scripts into a single, more intelligent script that can check, fix, and initialize the database as needed.
|
||||
|
||||
### Features
|
||||
#### Features
|
||||
|
||||
- **Check Mode**: Validates the existence and integrity of the database
|
||||
- **Init Mode**: Creates the database only if it doesn't already exist
|
||||
@@ -14,12 +16,12 @@ This is a consolidated database management tool that handles all database-relate
|
||||
- **Reset Users Mode**: Removes all users and their data
|
||||
- **Auto Mode**: Automatically checks, fixes, and initializes the database if needed
|
||||
|
||||
## Running the Database Management Tool
|
||||
#### Running the Database Management Tool
|
||||
|
||||
You can execute the database management tool using your package manager with various commands:
|
||||
|
||||
```bash
|
||||
# Checks database status (default action if no command is specified, equivalent to 'bun run check-db')
|
||||
# Checks database status (default action if no command is specified)
|
||||
bun run manage-db
|
||||
|
||||
# Check database status
|
||||
@@ -37,9 +39,6 @@ bun run db-auto
|
||||
# Reset all users (for testing signup flow)
|
||||
bun run reset-users
|
||||
|
||||
# Update the database schema to the latest version
|
||||
bun run update-schema
|
||||
|
||||
# Remove database files completely
|
||||
bun run cleanup-db
|
||||
|
||||
@@ -53,6 +52,70 @@ bun run dev:clean
|
||||
bun run start:fresh
|
||||
```
|
||||
|
||||
## Database File Location
|
||||
#### Database File Location
|
||||
|
||||
The database file should be located in the `./data/gitea-mirror.db` directory. If the file is found in the root directory, the fix mode will move it to the correct location.
|
||||
|
||||
## Event Management
|
||||
|
||||
The following scripts help manage events in the SQLite database:
|
||||
|
||||
### Event Inspection (check-events.ts)
|
||||
|
||||
Displays all events currently stored in the database.
|
||||
|
||||
```bash
|
||||
bun scripts/check-events.ts
|
||||
```
|
||||
|
||||
### Event Cleanup (cleanup-events.ts)
|
||||
|
||||
Removes old events from the database to prevent it from growing too large.
|
||||
|
||||
```bash
|
||||
# Remove events older than 7 days (default)
|
||||
bun scripts/cleanup-events.ts
|
||||
|
||||
# Remove events older than X days
|
||||
bun scripts/cleanup-events.ts 14
|
||||
```
|
||||
|
||||
This script can be scheduled to run periodically (e.g., daily) using cron or another scheduler.
|
||||
|
||||
### Mark Events as Read (mark-events-read.ts)
|
||||
|
||||
Marks all unread events as read.
|
||||
|
||||
```bash
|
||||
bun scripts/mark-events-read.ts
|
||||
```
|
||||
|
||||
### Make Events Appear Older (make-events-old.ts)
|
||||
|
||||
For testing purposes, this script modifies event timestamps to make them appear older.
|
||||
|
||||
```bash
|
||||
bun scripts/make-events-old.ts
|
||||
```
|
||||
|
||||
## Deployment Scripts
|
||||
|
||||
### Docker Deployment
|
||||
|
||||
- **build-docker.sh**: Builds the Docker image for the application
|
||||
- **docker-diagnostics.sh**: Provides diagnostic information for Docker deployments
|
||||
|
||||
### LXC Container Deployment
|
||||
|
||||
Two scripts are provided for deploying Gitea Mirror in LXC containers:
|
||||
|
||||
1. **gitea-mirror-lxc-proxmox.sh**: For online deployment on a Proxmox VE host
|
||||
- Pulls everything from GitHub
|
||||
- Creates a privileged container with the application
|
||||
- Sets up systemd service
|
||||
|
||||
2. **gitea-mirror-lxc-local.sh**: For offline/LAN-only deployment on a developer laptop
|
||||
- Pushes your local checkout + Bun ZIP to the container
|
||||
- Useful for testing without internet access
|
||||
|
||||
For detailed instructions on LXC deployment, see [README-lxc.md](./README-lxc.md).
|
||||
|
||||
@@ -1,188 +0,0 @@
|
||||
#!/bin/bash
|
||||
# Gitea Mirror LXC Container Installer
|
||||
# This is a self-contained script to install Gitea Mirror in an LXC container
|
||||
# Usage: curl -fsSL https://raw.githubusercontent.com/arunavo4/gitea-mirror/main/scripts/gitea-mirror-lxc-installer.sh | bash
|
||||
|
||||
set -e
|
||||
|
||||
# Configuration variables - change these as needed
|
||||
INSTALL_DIR="/opt/gitea-mirror"
|
||||
REPO_URL="https://github.com/arunavo4/gitea-mirror.git"
|
||||
SERVICE_USER="gitea-mirror"
|
||||
PORT=4321
|
||||
|
||||
# Color codes for better readability
|
||||
RED='\033[0;31m'
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[1;33m'
|
||||
BLUE='\033[0;34m'
|
||||
NC='\033[0m' # No Color
|
||||
|
||||
# Print banner
|
||||
echo -e "${BLUE}"
|
||||
echo "╔════════════════════════════════════════════════════════════╗"
|
||||
echo "║ ║"
|
||||
echo "║ Gitea Mirror LXC Container Installer ║"
|
||||
echo "║ ║"
|
||||
echo "╚════════════════════════════════════════════════════════════╝"
|
||||
echo -e "${NC}"
|
||||
|
||||
# Ensure script is run as root
|
||||
if [ "$(id -u)" -ne 0 ]; then
|
||||
echo -e "${RED}This script must be run as root${NC}" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo -e "${GREEN}Starting Gitea Mirror installation...${NC}"
|
||||
|
||||
# Check if we're in an LXC container
|
||||
if [ -d /proc/vz ] && [ ! -d /proc/bc ]; then
|
||||
echo -e "${YELLOW}Running in an OpenVZ container. Some features may not work.${NC}"
|
||||
elif [ -f /proc/1/environ ] && grep -q container=lxc /proc/1/environ; then
|
||||
echo -e "${GREEN}Running in an LXC container. Good!${NC}"
|
||||
else
|
||||
echo -e "${YELLOW}Not running in a container. This script is designed for LXC containers.${NC}"
|
||||
read -p "Continue anyway? (y/n) " -n 1 -r
|
||||
echo
|
||||
if [[ ! $REPLY =~ ^[Yy]$ ]]; then
|
||||
echo -e "${RED}Installation aborted.${NC}"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
# Install dependencies
|
||||
echo -e "${BLUE}Step 1/7: Installing dependencies...${NC}"
|
||||
apt update
|
||||
apt install -y curl git sqlite3 build-essential openssl
|
||||
|
||||
# Create service user
|
||||
echo -e "${BLUE}Step 2/7: Creating service user...${NC}"
|
||||
if id "$SERVICE_USER" &>/dev/null; then
|
||||
echo -e "${YELLOW}User $SERVICE_USER already exists${NC}"
|
||||
else
|
||||
useradd -m -s /bin/bash "$SERVICE_USER"
|
||||
echo -e "${GREEN}Created user $SERVICE_USER${NC}"
|
||||
fi
|
||||
|
||||
# Install Bun
|
||||
echo -e "${BLUE}Step 3/7: Installing Bun runtime...${NC}"
|
||||
if command -v bun >/dev/null 2>&1; then
|
||||
echo -e "${YELLOW}Bun is already installed${NC}"
|
||||
bun --version
|
||||
else
|
||||
echo -e "${GREEN}Installing Bun...${NC}"
|
||||
curl -fsSL https://bun.sh/install | bash
|
||||
export BUN_INSTALL=${BUN_INSTALL:-"/root/.bun"}
|
||||
export PATH="$BUN_INSTALL/bin:$PATH"
|
||||
echo -e "${GREEN}Bun installed successfully${NC}"
|
||||
bun --version
|
||||
fi
|
||||
|
||||
# Clone repository
|
||||
echo -e "${BLUE}Step 4/7: Downloading Gitea Mirror...${NC}"
|
||||
if [ -d "$INSTALL_DIR" ]; then
|
||||
echo -e "${YELLOW}Directory $INSTALL_DIR already exists${NC}"
|
||||
read -p "Update existing installation? (y/n) " -n 1 -r
|
||||
echo
|
||||
if [[ $REPLY =~ ^[Yy]$ ]]; then
|
||||
cd "$INSTALL_DIR"
|
||||
git pull
|
||||
echo -e "${GREEN}Repository updated${NC}"
|
||||
else
|
||||
echo -e "${YELLOW}Using existing installation${NC}"
|
||||
fi
|
||||
else
|
||||
echo -e "${GREEN}Cloning repository...${NC}"
|
||||
git clone "$REPO_URL" "$INSTALL_DIR"
|
||||
echo -e "${GREEN}Repository cloned to $INSTALL_DIR${NC}"
|
||||
fi
|
||||
|
||||
# Set up application
|
||||
echo -e "${BLUE}Step 5/7: Setting up application...${NC}"
|
||||
cd "$INSTALL_DIR"
|
||||
|
||||
# Create data directory with proper permissions
|
||||
mkdir -p data
|
||||
chown -R "$SERVICE_USER:$SERVICE_USER" data
|
||||
|
||||
# Install dependencies and build
|
||||
echo -e "${GREEN}Installing dependencies and building application...${NC}"
|
||||
bun install
|
||||
bun run build
|
||||
|
||||
# Initialize database if it doesn't exist
|
||||
echo -e "${GREEN}Initializing database...${NC}"
|
||||
if [ ! -f "data/gitea-mirror.db" ]; then
|
||||
bun run manage-db init
|
||||
chown "$SERVICE_USER:$SERVICE_USER" data/gitea-mirror.db
|
||||
fi
|
||||
|
||||
# Generate a random JWT secret if not provided
|
||||
JWT_SECRET=${JWT_SECRET:-$(openssl rand -hex 32)}
|
||||
|
||||
# Create systemd service
|
||||
echo -e "${BLUE}Step 6/7: Creating systemd service...${NC}"
|
||||
|
||||
# Store Bun path in a variable for better maintainability
|
||||
BUN_PATH=$(command -v bun)
|
||||
echo -e "${GREEN}Using Bun from: $BUN_PATH${NC}"
|
||||
|
||||
cat >/etc/systemd/system/gitea-mirror.service <<SERVICE
|
||||
[Unit]
|
||||
Description=Gitea Mirror
|
||||
After=network.target
|
||||
|
||||
[Service]
|
||||
Type=simple
|
||||
WorkingDirectory=$INSTALL_DIR
|
||||
ExecStart=$BUN_PATH dist/server/entry.mjs
|
||||
Restart=on-failure
|
||||
RestartSec=10
|
||||
User=$SERVICE_USER
|
||||
Group=$SERVICE_USER
|
||||
Environment=NODE_ENV=production
|
||||
Environment=HOST=0.0.0.0
|
||||
Environment=PORT=$PORT
|
||||
Environment=DATABASE_URL=file:data/gitea-mirror.db
|
||||
Environment=JWT_SECRET=${JWT_SECRET}
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
SERVICE
|
||||
|
||||
# Start service
|
||||
echo -e "${BLUE}Step 7/7: Starting service...${NC}"
|
||||
systemctl daemon-reload
|
||||
systemctl enable gitea-mirror.service
|
||||
systemctl start gitea-mirror.service
|
||||
|
||||
# Check if service started successfully
|
||||
if systemctl is-active --quiet gitea-mirror.service; then
|
||||
echo -e "${GREEN}Gitea Mirror service started successfully!${NC}"
|
||||
else
|
||||
echo -e "${RED}Failed to start Gitea Mirror service. Check logs with: journalctl -u gitea-mirror${NC}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Get IP address
|
||||
IP_ADDRESS=$(hostname -I | awk '{print $1}')
|
||||
|
||||
# Print success message
|
||||
echo -e "${GREEN}"
|
||||
echo "╔════════════════════════════════════════════════════════════╗"
|
||||
echo "║ ║"
|
||||
echo "║ Gitea Mirror Installation Complete ║"
|
||||
echo "║ ║"
|
||||
echo "╚════════════════════════════════════════════════════════════╝"
|
||||
echo -e "${NC}"
|
||||
echo -e "${GREEN}Gitea Mirror is now running at: http://$IP_ADDRESS:$PORT${NC}"
|
||||
echo
|
||||
echo -e "${YELLOW}Important security information:${NC}"
|
||||
echo -e "JWT_SECRET: ${JWT_SECRET}"
|
||||
echo -e "${YELLOW}Please save this JWT_SECRET in a secure location.${NC}"
|
||||
echo
|
||||
echo -e "${BLUE}To check service status:${NC} systemctl status gitea-mirror"
|
||||
echo -e "${BLUE}To view logs:${NC} journalctl -u gitea-mirror -f"
|
||||
echo -e "${BLUE}Data directory:${NC} $INSTALL_DIR/data"
|
||||
echo
|
||||
echo -e "${GREEN}Thank you for installing Gitea Mirror!${NC}"
|
||||
86
scripts/gitea-mirror-lxc-local.sh
Executable file
86
scripts/gitea-mirror-lxc-local.sh
Executable file
@@ -0,0 +1,86 @@
|
||||
#!/usr/bin/env bash
|
||||
# gitea-mirror-lxc-local.sh (offline, local repo, verbose)
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
CONTAINER="gitea-test"
|
||||
IMAGE="ubuntu:22.04"
|
||||
INSTALL_DIR="/opt/gitea-mirror"
|
||||
PORT=4321
|
||||
JWT_SECRET="$(openssl rand -hex 32)"
|
||||
|
||||
BUN_ZIP="/tmp/bun-linux-x64.zip"
|
||||
BUN_URL="https://github.com/oven-sh/bun/releases/latest/download/bun-linux-x64.zip"
|
||||
|
||||
LOCAL_REPO_DIR="${LOCAL_REPO_DIR:-./gitea-mirror}"
|
||||
REPO_TAR="/tmp/gitea-mirror-local.tar.gz"
|
||||
|
||||
need() { command -v "$1" >/dev/null || { echo "Missing $1"; exit 1; }; }
|
||||
need curl; need lxc; need tar; need unzip
|
||||
|
||||
# ── build host artefacts ────────────────────────────────────────────────
|
||||
[[ -d $LOCAL_REPO_DIR ]] || { echo "❌ LOCAL_REPO_DIR not found"; exit 1; }
|
||||
[[ -f $LOCAL_REPO_DIR/package.json ]] || { echo "❌ package.json missing"; exit 1; }
|
||||
[[ -f $BUN_ZIP ]] || curl -L --retry 5 --retry-delay 5 -o "$BUN_ZIP" "$BUN_URL"
|
||||
tar -czf "$REPO_TAR" -C "$(dirname "$LOCAL_REPO_DIR")" "$(basename "$LOCAL_REPO_DIR")"
|
||||
|
||||
# ── ensure container exists ─────────────────────────────────────────────
|
||||
lxd init --auto >/dev/null 2>&1 || true
|
||||
lxc info "$CONTAINER" >/dev/null 2>&1 || lxc launch "$IMAGE" "$CONTAINER"
|
||||
|
||||
echo "🔧 installing base packages…"
|
||||
sudo lxc exec "$CONTAINER" -- bash -c 'set -ex; apt update; apt install -y unzip tar openssl sqlite3'
|
||||
|
||||
echo "⬆️ pushing artefacts…"
|
||||
sudo lxc file push "$BUN_ZIP" "$CONTAINER/opt/"
|
||||
sudo lxc file push "$REPO_TAR" "$CONTAINER/opt/"
|
||||
|
||||
echo "📦 unpacking Bun + repo…"
|
||||
sudo lxc exec "$CONTAINER" -- bash -ex <<'IN'
|
||||
cd /opt
|
||||
# Bun
|
||||
unzip -oq bun-linux-x64.zip -d bun
|
||||
BIN=$(find /opt/bun -type f -name bun -perm -111 | head -n1)
|
||||
ln -sf "$BIN" /usr/local/bin/bun # bun
|
||||
ln -sf "$BIN" /usr/local/bin/bunx # bunx shim
|
||||
# Repo
|
||||
rm -rf /opt/gitea-mirror
|
||||
mkdir -p /opt/gitea-mirror
|
||||
tar -xzf gitea-mirror-local.tar.gz --strip-components=1 -C /opt/gitea-mirror
|
||||
IN
|
||||
|
||||
echo "🏗️ bun install / build…"
|
||||
sudo lxc exec "$CONTAINER" -- bash -ex <<'IN'
|
||||
cd /opt/gitea-mirror
|
||||
bun install
|
||||
bun run build
|
||||
bun run manage-db init
|
||||
IN
|
||||
|
||||
echo "📝 systemd unit…"
|
||||
sudo lxc exec "$CONTAINER" -- bash -ex <<IN
|
||||
cat >/etc/systemd/system/gitea-mirror.service <<SERVICE
|
||||
[Unit]
|
||||
Description=Gitea Mirror
|
||||
After=network.target
|
||||
[Service]
|
||||
Type=simple
|
||||
WorkingDirectory=$INSTALL_DIR
|
||||
ExecStart=/usr/local/bin/bun dist/server/entry.mjs
|
||||
Restart=on-failure
|
||||
RestartSec=10
|
||||
Environment=NODE_ENV=production
|
||||
Environment=HOST=0.0.0.0
|
||||
Environment=PORT=$PORT
|
||||
Environment=DATABASE_URL=file:data/gitea-mirror.db
|
||||
Environment=JWT_SECRET=$JWT_SECRET
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
SERVICE
|
||||
systemctl daemon-reload
|
||||
systemctl enable gitea-mirror
|
||||
systemctl restart gitea-mirror
|
||||
IN
|
||||
|
||||
echo -e "\n✅ finished; service status:"
|
||||
sudo lxc exec "$CONTAINER" -- systemctl status gitea-mirror --no-pager
|
||||
97
scripts/gitea-mirror-lxc-proxmox.sh
Executable file
97
scripts/gitea-mirror-lxc-proxmox.sh
Executable file
@@ -0,0 +1,97 @@
|
||||
#!/usr/bin/env bash
|
||||
# gitea-mirror-lxc-proxmox.sh
|
||||
# Fully online installer for a Proxmox LXC guest running Gitea Mirror + Bun.
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
# ────── adjustable defaults ──────────────────────────────────────────────
|
||||
CTID=${CTID:-106} # container ID
|
||||
HOSTNAME=${HOSTNAME:-gitea-mirror}
|
||||
STORAGE=${STORAGE:-local-lvm} # where rootfs lives
|
||||
DISK_SIZE=${DISK_SIZE:-8G}
|
||||
CORES=${CORES:-2}
|
||||
MEMORY=${MEMORY:-2048} # MiB
|
||||
BRIDGE=${BRIDGE:-vmbr0}
|
||||
IP_CONF=${IP_CONF:-dhcp} # or "192.168.1.240/24,gw=192.168.1.1"
|
||||
|
||||
PORT=4321
|
||||
JWT_SECRET=$(openssl rand -hex 32)
|
||||
|
||||
REPO="https://github.com/arunavo4/gitea-mirror.git"
|
||||
# ─────────────────────────────────────────────────────────────────────────
|
||||
|
||||
TEMPLATE='ubuntu-22.04-standard_22.04-1_amd64.tar.zst'
|
||||
TEMPLATE_PATH="/var/lib/vz/template/cache/${TEMPLATE}"
|
||||
|
||||
echo "▶️ Ensuring template exists…"
|
||||
if [[ ! -f $TEMPLATE_PATH ]]; then
|
||||
pveam update >/dev/null
|
||||
pveam download "$STORAGE" "$TEMPLATE"
|
||||
fi
|
||||
|
||||
echo "▶️ Creating container $CTID (if missing)…"
|
||||
if ! pct status "$CTID" &>/dev/null; then
|
||||
pct create "$CTID" "$TEMPLATE_PATH" \
|
||||
--rootfs "$STORAGE:$DISK_SIZE" \
|
||||
--hostname "$HOSTNAME" \
|
||||
--cores "$CORES" --memory "$MEMORY" \
|
||||
--net0 "name=eth0,bridge=$BRIDGE,ip=$IP_CONF" \
|
||||
--features nesting=1 \
|
||||
--unprivileged 0
|
||||
fi
|
||||
|
||||
pct start "$CTID"
|
||||
|
||||
echo "▶️ Installing base packages inside CT $CTID…"
|
||||
pct exec "$CTID" -- bash -c 'apt update && apt install -y curl git build-essential openssl sqlite3 unzip'
|
||||
|
||||
echo "▶️ Installing Bun runtime…"
|
||||
pct exec "$CTID" -- bash -c '
|
||||
export BUN_INSTALL=/opt/bun
|
||||
curl -fsSL https://bun.sh/install | bash -s -- --yes
|
||||
ln -sf /opt/bun/bin/bun /usr/local/bin/bun
|
||||
ln -sf /opt/bun/bin/bun /usr/local/bin/bunx
|
||||
bun --version
|
||||
'
|
||||
|
||||
echo "▶️ Cloning & building Gitea Mirror…"
|
||||
pct exec "$CTID" -- bash -c "
|
||||
git clone --depth=1 '$REPO' /opt/gitea-mirror || (cd /opt/gitea-mirror && git pull)
|
||||
cd /opt/gitea-mirror
|
||||
bun install
|
||||
bun run build
|
||||
bun run manage-db init
|
||||
"
|
||||
|
||||
echo "▶️ Creating systemd service…"
|
||||
pct exec "$CTID" -- bash -c "
|
||||
cat >/etc/systemd/system/gitea-mirror.service <<SERVICE
|
||||
[Unit]
|
||||
Description=Gitea Mirror
|
||||
After=network.target
|
||||
[Service]
|
||||
Type=simple
|
||||
WorkingDirectory=/opt/gitea-mirror
|
||||
ExecStart=/usr/local/bin/bun dist/server/entry.mjs
|
||||
Restart=on-failure
|
||||
RestartSec=10
|
||||
Environment=NODE_ENV=production
|
||||
Environment=HOST=0.0.0.0
|
||||
Environment=PORT=$PORT
|
||||
Environment=DATABASE_URL=file:data/gitea-mirror.db
|
||||
Environment=JWT_SECRET=$JWT_SECRET
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
SERVICE
|
||||
systemctl daemon-reload
|
||||
systemctl enable gitea-mirror
|
||||
systemctl restart gitea-mirror
|
||||
"
|
||||
|
||||
echo -e "\n🔍 Service status:"
|
||||
pct exec "$CTID" -- systemctl status gitea-mirror --no-pager | head -n15
|
||||
|
||||
GUEST_IP=$(pct exec "$CTID" -- hostname -I | awk '{print $1}')
|
||||
echo -e "\n🌐 Browse to: http://$GUEST_IP:$PORT\n"
|
||||
echo "🗝️ JWT_SECRET = $JWT_SECRET"
|
||||
echo -e "\n✅ Done – Gitea Mirror is running in CT $CTID."
|
||||
@@ -145,9 +145,31 @@ async function ensureTablesExist() {
|
||||
status TEXT NOT NULL DEFAULT 'imported',
|
||||
message TEXT NOT NULL,
|
||||
timestamp TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
|
||||
-- New fields for job resilience
|
||||
job_type TEXT NOT NULL DEFAULT 'mirror',
|
||||
batch_id TEXT,
|
||||
total_items INTEGER,
|
||||
completed_items INTEGER DEFAULT 0,
|
||||
item_ids TEXT, -- JSON array as text
|
||||
completed_item_ids TEXT DEFAULT '[]', -- JSON array as text
|
||||
in_progress INTEGER NOT NULL DEFAULT 0, -- Boolean as integer
|
||||
started_at TIMESTAMP,
|
||||
completed_at TIMESTAMP,
|
||||
last_checkpoint TIMESTAMP,
|
||||
|
||||
FOREIGN KEY (user_id) REFERENCES users(id)
|
||||
)
|
||||
`);
|
||||
|
||||
// Create indexes for better performance
|
||||
db.exec(`
|
||||
CREATE INDEX IF NOT EXISTS idx_mirror_jobs_user_id ON mirror_jobs(user_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_mirror_jobs_batch_id ON mirror_jobs(batch_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_mirror_jobs_in_progress ON mirror_jobs(in_progress);
|
||||
CREATE INDEX IF NOT EXISTS idx_mirror_jobs_job_type ON mirror_jobs(job_type);
|
||||
CREATE INDEX IF NOT EXISTS idx_mirror_jobs_timestamp ON mirror_jobs(timestamp);
|
||||
`);
|
||||
break;
|
||||
case "events":
|
||||
db.exec(`
|
||||
|
||||
133
scripts/update-mirror-jobs-table.ts
Normal file
133
scripts/update-mirror-jobs-table.ts
Normal file
@@ -0,0 +1,133 @@
|
||||
#!/usr/bin/env bun
|
||||
/**
|
||||
* Script to update the mirror_jobs table with new columns for resilience
|
||||
*/
|
||||
|
||||
import { Database } from "bun:sqlite";
|
||||
import fs from "fs";
|
||||
import path from "path";
|
||||
|
||||
// Define the database paths
|
||||
const dataDir = path.join(process.cwd(), "data");
|
||||
const dbPath = path.join(dataDir, "gitea-mirror.db");
|
||||
|
||||
// Ensure data directory exists
|
||||
if (!fs.existsSync(dataDir)) {
|
||||
fs.mkdirSync(dataDir, { recursive: true });
|
||||
console.log(`Created data directory at ${dataDir}`);
|
||||
}
|
||||
|
||||
// Check if database exists
|
||||
if (!fs.existsSync(dbPath)) {
|
||||
console.error(`Database file not found at ${dbPath}`);
|
||||
console.error("Please run 'bun run init-db' first to create the database.");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// Connect to the database
|
||||
const db = new Database(dbPath);
|
||||
|
||||
// Enable foreign keys
|
||||
db.exec("PRAGMA foreign_keys = ON;");
|
||||
|
||||
// Function to check if a column exists in a table
|
||||
function columnExists(tableName: string, columnName: string): boolean {
|
||||
const result = db.query(
|
||||
`PRAGMA table_info(${tableName})`
|
||||
).all() as { name: string }[];
|
||||
|
||||
return result.some(column => column.name === columnName);
|
||||
}
|
||||
|
||||
// Main function to update the mirror_jobs table
|
||||
async function updateMirrorJobsTable() {
|
||||
console.log("Checking mirror_jobs table for missing columns...");
|
||||
|
||||
// Start a transaction
|
||||
db.exec("BEGIN TRANSACTION;");
|
||||
|
||||
try {
|
||||
// Check and add each new column if it doesn't exist
|
||||
const columnsToAdd = [
|
||||
{ name: "job_type", definition: "TEXT NOT NULL DEFAULT 'mirror'" },
|
||||
{ name: "batch_id", definition: "TEXT" },
|
||||
{ name: "total_items", definition: "INTEGER" },
|
||||
{ name: "completed_items", definition: "INTEGER DEFAULT 0" },
|
||||
{ name: "item_ids", definition: "TEXT" }, // JSON array as text
|
||||
{ name: "completed_item_ids", definition: "TEXT DEFAULT '[]'" }, // JSON array as text
|
||||
{ name: "in_progress", definition: "INTEGER NOT NULL DEFAULT 0" }, // Boolean as integer
|
||||
{ name: "started_at", definition: "TIMESTAMP" },
|
||||
{ name: "completed_at", definition: "TIMESTAMP" },
|
||||
{ name: "last_checkpoint", definition: "TIMESTAMP" }
|
||||
];
|
||||
|
||||
let columnsAdded = 0;
|
||||
|
||||
for (const column of columnsToAdd) {
|
||||
if (!columnExists("mirror_jobs", column.name)) {
|
||||
console.log(`Adding column '${column.name}' to mirror_jobs table...`);
|
||||
db.exec(`ALTER TABLE mirror_jobs ADD COLUMN ${column.name} ${column.definition};`);
|
||||
columnsAdded++;
|
||||
}
|
||||
}
|
||||
|
||||
// Commit the transaction
|
||||
db.exec("COMMIT;");
|
||||
|
||||
if (columnsAdded > 0) {
|
||||
console.log(`✅ Added ${columnsAdded} new columns to mirror_jobs table.`);
|
||||
} else {
|
||||
console.log("✅ All required columns already exist in mirror_jobs table.");
|
||||
}
|
||||
|
||||
// Create indexes for better performance
|
||||
console.log("Creating indexes for mirror_jobs table...");
|
||||
|
||||
// Only create indexes if they don't exist
|
||||
const indexesResult = db.query(
|
||||
`SELECT name FROM sqlite_master WHERE type='index' AND tbl_name='mirror_jobs'`
|
||||
).all() as { name: string }[];
|
||||
|
||||
const existingIndexes = indexesResult.map(idx => idx.name);
|
||||
|
||||
const indexesToCreate = [
|
||||
{ name: "idx_mirror_jobs_user_id", columns: "user_id" },
|
||||
{ name: "idx_mirror_jobs_batch_id", columns: "batch_id" },
|
||||
{ name: "idx_mirror_jobs_in_progress", columns: "in_progress" },
|
||||
{ name: "idx_mirror_jobs_job_type", columns: "job_type" },
|
||||
{ name: "idx_mirror_jobs_timestamp", columns: "timestamp" }
|
||||
];
|
||||
|
||||
let indexesCreated = 0;
|
||||
|
||||
for (const index of indexesToCreate) {
|
||||
if (!existingIndexes.includes(index.name)) {
|
||||
console.log(`Creating index '${index.name}'...`);
|
||||
db.exec(`CREATE INDEX ${index.name} ON mirror_jobs(${index.columns});`);
|
||||
indexesCreated++;
|
||||
}
|
||||
}
|
||||
|
||||
if (indexesCreated > 0) {
|
||||
console.log(`✅ Created ${indexesCreated} new indexes for mirror_jobs table.`);
|
||||
} else {
|
||||
console.log("✅ All required indexes already exist for mirror_jobs table.");
|
||||
}
|
||||
|
||||
console.log("Mirror jobs table update completed successfully.");
|
||||
} catch (error) {
|
||||
// Rollback the transaction in case of error
|
||||
db.exec("ROLLBACK;");
|
||||
console.error("❌ Error updating mirror_jobs table:", error);
|
||||
process.exit(1);
|
||||
} finally {
|
||||
// Close the database connection
|
||||
db.close();
|
||||
}
|
||||
}
|
||||
|
||||
// Run the update function
|
||||
updateMirrorJobsTable().catch(error => {
|
||||
console.error("Unhandled error:", error);
|
||||
process.exit(1);
|
||||
});
|
||||
@@ -2,6 +2,7 @@ import { useEffect, useState } from "react";
|
||||
import { cn } from "@/lib/utils";
|
||||
import { ExternalLink } from "lucide-react";
|
||||
import { links } from "@/data/Sidebar";
|
||||
import { VersionInfo } from "./VersionInfo";
|
||||
|
||||
interface SidebarProps {
|
||||
className?: string;
|
||||
@@ -19,7 +20,7 @@ export function Sidebar({ className }: SidebarProps) {
|
||||
|
||||
return (
|
||||
<aside className={cn("w-64 border-r bg-background", className)}>
|
||||
<div className="flex flex-col h-full py-4">
|
||||
<div className="flex flex-col h-full pt-4">
|
||||
<nav className="flex flex-col gap-y-1 pl-2 pr-3">
|
||||
{links.map((link, index) => {
|
||||
const isActive = currentPath === link.href;
|
||||
@@ -59,6 +60,7 @@ export function Sidebar({ className }: SidebarProps) {
|
||||
<ExternalLink className="h-3 w-3" />
|
||||
</a>
|
||||
</div>
|
||||
<VersionInfo />
|
||||
</div>
|
||||
</div>
|
||||
</aside>
|
||||
|
||||
49
src/components/layout/VersionInfo.tsx
Normal file
49
src/components/layout/VersionInfo.tsx
Normal file
@@ -0,0 +1,49 @@
|
||||
import { useEffect, useState } from "react";
|
||||
import { healthApi } from "@/lib/api";
|
||||
|
||||
export function VersionInfo() {
|
||||
const [versionInfo, setVersionInfo] = useState<{
|
||||
current: string;
|
||||
latest: string;
|
||||
updateAvailable: boolean;
|
||||
}>({
|
||||
current: "loading...",
|
||||
latest: "",
|
||||
updateAvailable: false
|
||||
});
|
||||
|
||||
useEffect(() => {
|
||||
const fetchVersion = async () => {
|
||||
try {
|
||||
const healthData = await healthApi.check();
|
||||
setVersionInfo({
|
||||
current: healthData.version || "unknown",
|
||||
latest: healthData.latestVersion || "unknown",
|
||||
updateAvailable: healthData.updateAvailable || false
|
||||
});
|
||||
} catch (error) {
|
||||
console.error("Failed to fetch version:", error);
|
||||
setVersionInfo({
|
||||
current: "unknown",
|
||||
latest: "",
|
||||
updateAvailable: false
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
fetchVersion();
|
||||
}, []);
|
||||
|
||||
return (
|
||||
<div className="text-xs text-muted-foreground text-center pt-2 pb-3 border-t border-border mt-2">
|
||||
{versionInfo.updateAvailable ? (
|
||||
<div className="flex flex-col">
|
||||
<span>v{versionInfo.current}</span>
|
||||
<span className="text-primary">v{versionInfo.latest} available</span>
|
||||
</div>
|
||||
) : (
|
||||
<span>v{versionInfo.current}</span>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -2,7 +2,7 @@
|
||||
title: "Architecture"
|
||||
description: "Comprehensive overview of the Gitea Mirror application architecture."
|
||||
order: 1
|
||||
updatedDate: 2023-10-15
|
||||
updatedDate: 2025-05-22
|
||||
---
|
||||
|
||||
<div class="mb-6">
|
||||
@@ -21,17 +21,18 @@ The application is built using:
|
||||
- <span class="font-semibold text-foreground">Astro</span>: Web framework for the frontend
|
||||
- <span class="font-semibold text-foreground">React</span>: Component library for interactive UI elements
|
||||
- <span class="font-semibold text-foreground">Shadcn UI</span>: UI component library built on Tailwind CSS
|
||||
- <span class="font-semibold text-foreground">SQLite</span>: Database for storing configuration and state
|
||||
- <span class="font-semibold text-foreground">SQLite</span>: Database for storing configuration, state, and events
|
||||
- <span class="font-semibold text-foreground">Bun</span>: Runtime environment for the backend
|
||||
- <span class="font-semibold text-foreground">Drizzle ORM</span>: Type-safe ORM for database interactions
|
||||
|
||||
## Architecture Diagram
|
||||
|
||||
```mermaid
|
||||
graph TD
|
||||
subgraph "Gitea Mirror"
|
||||
Frontend["Frontend<br/>(Astro)"]
|
||||
Frontend["Frontend<br/>(Astro + React)"]
|
||||
Backend["Backend<br/>(Bun)"]
|
||||
Database["Database<br/>(SQLite)"]
|
||||
Database["Database<br/>(SQLite + Drizzle)"]
|
||||
|
||||
Frontend <--> Backend
|
||||
Backend <--> Database
|
||||
@@ -70,14 +71,15 @@ The backend is built with Bun and provides API endpoints for the frontend to int
|
||||
- Mirroring operations
|
||||
- Database interactions
|
||||
|
||||
### Database (SQLite)
|
||||
### Database (SQLite + Drizzle ORM)
|
||||
|
||||
SQLite is used for data persistence, storing:
|
||||
SQLite with Bun's native SQLite driver is used for data persistence, with Drizzle ORM providing type-safe database interactions. The database stores:
|
||||
|
||||
- User accounts and authentication data
|
||||
- GitHub and Gitea configuration
|
||||
- Repository and organization information
|
||||
- Mirroring job history and status
|
||||
- Event notifications and their read status
|
||||
|
||||
## Data Flow
|
||||
|
||||
@@ -93,11 +95,30 @@ SQLite is used for data persistence, storing:
|
||||
gitea-mirror/
|
||||
├── src/ # Source code
|
||||
│ ├── components/ # React components
|
||||
│ ├── content/ # Documentation and content
|
||||
│ ├── layouts/ # Astro layout components
|
||||
│ ├── lib/ # Utility functions and database
|
||||
│ ├── pages/ # Astro pages and API routes
|
||||
│ └── styles/ # CSS and Tailwind styles
|
||||
├── public/ # Static assets
|
||||
├── data/ # Database and persistent data
|
||||
└── docker/ # Docker configuration
|
||||
├── docker/ # Docker configuration
|
||||
└── scripts/ # Utility scripts for deployment and maintenance
|
||||
├── gitea-mirror-lxc-proxmox.sh # Proxmox LXC deployment script
|
||||
├── gitea-mirror-lxc-local.sh # Local LXC deployment script
|
||||
└── manage-db.ts # Database management tool
|
||||
```
|
||||
|
||||
## Deployment Options
|
||||
|
||||
Gitea Mirror supports multiple deployment options:
|
||||
|
||||
1. **Docker**: Run as a containerized application using Docker and docker-compose
|
||||
2. **LXC Containers**: Deploy in Linux Containers (LXC) on Proxmox VE or local workstations
|
||||
3. **Native**: Run directly on the host system using Bun runtime
|
||||
|
||||
Each deployment method has its own advantages:
|
||||
|
||||
- **Docker**: Isolation, easy updates, consistent environment
|
||||
- **LXC**: Lightweight virtualization, better performance than Docker, system-level isolation
|
||||
- **Native**: Best performance, direct access to system resources
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
title: "Configuration"
|
||||
description: "Guide to configuring Gitea Mirror for your environment."
|
||||
order: 2
|
||||
updatedDate: 2023-10-15
|
||||
updatedDate: 2025-05-22
|
||||
---
|
||||
|
||||
<div class="mb-6">
|
||||
@@ -24,10 +24,10 @@ The following environment variables can be used to configure Gitea Mirror:
|
||||
| Variable | Description | Default Value | Example |
|
||||
|----------|-------------|---------------|---------|
|
||||
| `NODE_ENV` | Runtime environment (development, production, test) | `development` | `production` |
|
||||
| `DATABASE_URL` | SQLite database URL | `sqlite://data/gitea-mirror.db` | `sqlite://path/to/your/database.db` |
|
||||
| `DATABASE_URL` | SQLite database URL | `file:data/gitea-mirror.db` | `file:path/to/your/database.db` |
|
||||
| `JWT_SECRET` | Secret key for JWT authentication | `your-secret-key-change-this-in-production` | `your-secure-random-string` |
|
||||
| `HOST` | Server host | `localhost` | `0.0.0.0` |
|
||||
| `PORT` | Server port | `3000` | `8080` |
|
||||
| `PORT` | Server port | `4321` | `8080` |
|
||||
|
||||
### Important Security Note
|
||||
|
||||
@@ -118,3 +118,58 @@ Example patterns:
|
||||
- `*` - All repositories
|
||||
- `org-name/*` - All repositories in a specific organization
|
||||
- `username/repo-name` - A specific repository
|
||||
|
||||
### Database Management
|
||||
|
||||
Gitea Mirror includes several database management tools that can be run from the command line:
|
||||
|
||||
```bash
|
||||
# Initialize the database (only if it doesn't exist)
|
||||
bun run init-db
|
||||
|
||||
# Check database status
|
||||
bun run check-db
|
||||
|
||||
# Fix database location issues
|
||||
bun run fix-db
|
||||
|
||||
# Reset all users (for testing signup flow)
|
||||
bun run reset-users
|
||||
|
||||
# Remove database files completely
|
||||
bun run cleanup-db
|
||||
```
|
||||
|
||||
### Event Management
|
||||
|
||||
Events in Gitea Mirror (such as repository mirroring operations) are stored in the SQLite database. You can manage these events using the following scripts:
|
||||
|
||||
```bash
|
||||
# View all events in the database
|
||||
bun scripts/check-events.ts
|
||||
|
||||
# Clean up old events (default: older than 7 days)
|
||||
bun scripts/cleanup-events.ts
|
||||
|
||||
# Mark all events as read
|
||||
bun scripts/mark-events-read.ts
|
||||
```
|
||||
|
||||
### Health Check Endpoint
|
||||
|
||||
Gitea Mirror includes a built-in health check endpoint at `/api/health` that provides:
|
||||
|
||||
- System status and uptime
|
||||
- Database connectivity check
|
||||
- Memory usage statistics
|
||||
- Environment information
|
||||
|
||||
You can use this endpoint for monitoring your deployment:
|
||||
|
||||
```bash
|
||||
# Basic check (returns 200 OK if healthy)
|
||||
curl -I http://your-server:port/api/health
|
||||
|
||||
# Detailed health information (JSON)
|
||||
curl http://your-server:port/api/health
|
||||
```
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
title: "Quick Start Guide"
|
||||
description: "Get started with Gitea Mirror quickly."
|
||||
order: 3
|
||||
updatedDate: 2023-10-15
|
||||
updatedDate: 2025-05-22
|
||||
---
|
||||
|
||||
<div class="mb-6">
|
||||
@@ -16,13 +16,16 @@ Before you begin, make sure you have:
|
||||
|
||||
1. <span class="font-semibold text-foreground">A GitHub account with a personal access token</span>
|
||||
2. <span class="font-semibold text-foreground">A Gitea instance with an access token</span>
|
||||
3. <span class="font-semibold text-foreground">Docker and docker-compose (recommended) or Bun 1.2.9+ installed</span>
|
||||
3. <span class="font-semibold text-foreground">One of the following:</span>
|
||||
- Docker and docker-compose (for Docker deployment)
|
||||
- Bun 1.2.9+ (for native deployment)
|
||||
- Proxmox VE or LXD (for LXC container deployment)
|
||||
|
||||
## Installation Options
|
||||
|
||||
Choose the installation method that works best for your environment.
|
||||
|
||||
### Using Docker (Recommended)
|
||||
### Using Docker (Recommended for most users)
|
||||
|
||||
Docker provides the easiest way to get started with minimal configuration.
|
||||
|
||||
@@ -39,7 +42,7 @@ Docker provides the easiest way to get started with minimal configuration.
|
||||
|
||||
3. Access the application at [http://localhost:4321](http://localhost:4321)
|
||||
|
||||
### Manual Installation
|
||||
### Using Bun (Native Installation)
|
||||
|
||||
If you prefer to run the application directly on your system:
|
||||
|
||||
@@ -62,6 +65,11 @@ If you prefer to run the application directly on your system:
|
||||
bun run dev
|
||||
```
|
||||
|
||||
Note: For Bun-specific features, use:
|
||||
```bash
|
||||
bunx --bun astro dev
|
||||
```
|
||||
|
||||
**Production Mode:**
|
||||
```bash
|
||||
bun run build
|
||||
@@ -70,6 +78,44 @@ If you prefer to run the application directly on your system:
|
||||
|
||||
4. Access the application at [http://localhost:4321](http://localhost:4321)
|
||||
|
||||
### Using LXC Containers (Recommended for server deployments)
|
||||
|
||||
#### Proxmox VE (Online Installation)
|
||||
|
||||
For deploying on a Proxmox VE host with internet access:
|
||||
|
||||
```bash
|
||||
# Optional env overrides: CTID HOSTNAME STORAGE DISK_SIZE CORES MEMORY BRIDGE IP_CONF
|
||||
sudo bash -c "$(curl -fsSL https://raw.githubusercontent.com/arunavo4/gitea-mirror/main/scripts/gitea-mirror-lxc-proxmox.sh)"
|
||||
```
|
||||
|
||||
This script:
|
||||
- Creates a privileged LXC container
|
||||
- Installs Bun and dependencies
|
||||
- Clones and builds the application
|
||||
- Sets up a systemd service
|
||||
|
||||
#### Local LXD (Offline-friendly Installation)
|
||||
|
||||
For testing on a local workstation or in environments without internet access:
|
||||
|
||||
1. Clone the repository locally:
|
||||
```bash
|
||||
git clone https://github.com/arunavo4/gitea-mirror.git
|
||||
```
|
||||
|
||||
2. Download the Bun installer once:
|
||||
```bash
|
||||
curl -L -o /tmp/bun-linux-x64.zip https://github.com/oven-sh/bun/releases/latest/download/bun-linux-x64.zip
|
||||
```
|
||||
|
||||
3. Run the local LXC installer:
|
||||
```bash
|
||||
sudo LOCAL_REPO_DIR=~/path/to/gitea-mirror ./gitea-mirror/scripts/gitea-mirror-lxc-local.sh
|
||||
```
|
||||
|
||||
For more details on LXC deployment, see the [LXC Container Deployment Guide](https://github.com/arunavo4/gitea-mirror/blob/main/scripts/README-lxc.md).
|
||||
|
||||
## Initial Configuration
|
||||
|
||||
Follow these steps to configure Gitea Mirror for first use:
|
||||
@@ -116,7 +162,12 @@ If you encounter any issues:
|
||||
- Check the Activity Log for detailed error messages
|
||||
- Verify your GitHub and Gitea tokens have the correct permissions
|
||||
- Ensure your Gitea instance is accessible from the machine running Gitea Mirror
|
||||
- For Docker installations, check container logs with `docker logs gitea-mirror`
|
||||
- Check logs based on your deployment method:
|
||||
- Docker: `docker logs gitea-mirror`
|
||||
- Native: Check the terminal output or system logs
|
||||
- LXC: `systemctl status gitea-mirror` or `journalctl -u gitea-mirror -f`
|
||||
- Use the health check endpoint to verify system status: `curl http://your-server:4321/api/health`
|
||||
- For database issues, try the database management tools: `bun run check-db` or `bun run fix-db`
|
||||
|
||||
## Next Steps
|
||||
|
||||
@@ -125,3 +176,7 @@ After your initial setup:
|
||||
- Explore the dashboard for an overview of your mirroring status
|
||||
- Set up automatic mirroring schedules for hands-off operation
|
||||
- Configure organization mirroring for team repositories
|
||||
- Check out the [Configuration Guide](/configuration) for advanced settings
|
||||
- Review the [Architecture Documentation](/architecture) to understand the system
|
||||
- For server deployments, set up monitoring using the health check endpoint
|
||||
- Consider setting up a cron job to clean up old events: `bun scripts/cleanup-events.ts`
|
||||
|
||||
@@ -88,3 +88,84 @@ export const giteaApi = {
|
||||
body: JSON.stringify({ url, token }),
|
||||
}),
|
||||
};
|
||||
|
||||
// Health API
|
||||
export interface HealthResponse {
|
||||
status: "ok" | "error";
|
||||
timestamp: string;
|
||||
version: string;
|
||||
latestVersion: string;
|
||||
updateAvailable: boolean;
|
||||
database: {
|
||||
connected: boolean;
|
||||
message: string;
|
||||
};
|
||||
system: {
|
||||
uptime: {
|
||||
startTime: string;
|
||||
uptimeMs: number;
|
||||
formatted: string;
|
||||
};
|
||||
memory: {
|
||||
rss: string;
|
||||
heapTotal: string;
|
||||
heapUsed: string;
|
||||
external: string;
|
||||
systemTotal: string;
|
||||
systemFree: string;
|
||||
};
|
||||
os: {
|
||||
platform: string;
|
||||
version: string;
|
||||
arch: string;
|
||||
};
|
||||
env: string;
|
||||
};
|
||||
error?: string;
|
||||
}
|
||||
|
||||
export const healthApi = {
|
||||
check: async (): Promise<HealthResponse> => {
|
||||
try {
|
||||
const response = await fetch(`${API_BASE}/health`);
|
||||
|
||||
if (!response.ok) {
|
||||
const errorData = await response.json().catch(() => ({
|
||||
status: "error",
|
||||
error: "Failed to parse error response",
|
||||
}));
|
||||
|
||||
return {
|
||||
...errorData,
|
||||
status: "error",
|
||||
timestamp: new Date().toISOString(),
|
||||
} as HealthResponse;
|
||||
}
|
||||
|
||||
return await response.json();
|
||||
} catch (error) {
|
||||
return {
|
||||
status: "error",
|
||||
timestamp: new Date().toISOString(),
|
||||
error: error instanceof Error ? error.message : "Unknown error checking health",
|
||||
version: "unknown",
|
||||
latestVersion: "unknown",
|
||||
updateAvailable: false,
|
||||
database: { connected: false, message: "Failed to connect to API" },
|
||||
system: {
|
||||
uptime: { startTime: "", uptimeMs: 0, formatted: "N/A" },
|
||||
memory: {
|
||||
rss: "N/A",
|
||||
heapTotal: "N/A",
|
||||
heapUsed: "N/A",
|
||||
external: "N/A",
|
||||
systemTotal: "N/A",
|
||||
systemFree: "N/A",
|
||||
},
|
||||
os: { platform: "", version: "", arch: "" },
|
||||
env: "",
|
||||
},
|
||||
};
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
42
src/lib/db/index.test.ts
Normal file
42
src/lib/db/index.test.ts
Normal file
@@ -0,0 +1,42 @@
|
||||
import { describe, test, expect, mock, beforeAll, afterAll } from "bun:test";
|
||||
import { drizzle } from "drizzle-orm/bun-sqlite";
|
||||
|
||||
// Silence console logs during tests
|
||||
let originalConsoleLog: typeof console.log;
|
||||
|
||||
beforeAll(() => {
|
||||
// Save original console.log
|
||||
originalConsoleLog = console.log;
|
||||
// Replace with no-op function
|
||||
console.log = () => {};
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
// Restore original console.log
|
||||
console.log = originalConsoleLog;
|
||||
});
|
||||
|
||||
// Mock the database module
|
||||
mock.module("bun:sqlite", () => {
|
||||
return {
|
||||
Database: mock(function() {
|
||||
return {
|
||||
query: mock(() => ({
|
||||
all: mock(() => []),
|
||||
run: mock(() => ({}))
|
||||
}))
|
||||
};
|
||||
})
|
||||
};
|
||||
});
|
||||
|
||||
// Mock the database tables
|
||||
describe("Database Schema", () => {
|
||||
test("database connection can be created", async () => {
|
||||
// Import the db from the module
|
||||
const { db } = await import("./index");
|
||||
|
||||
// Check that db is defined
|
||||
expect(db).toBeDefined();
|
||||
});
|
||||
});
|
||||
@@ -189,6 +189,18 @@ export const mirrorJobs = sqliteTable("mirror_jobs", {
|
||||
timestamp: integer("timestamp", { mode: "timestamp" })
|
||||
.notNull()
|
||||
.default(new Date()),
|
||||
|
||||
// New fields for job resilience
|
||||
jobType: text("job_type").notNull().default("mirror"),
|
||||
batchId: text("batch_id"),
|
||||
totalItems: integer("total_items"),
|
||||
completedItems: integer("completed_items").default(0),
|
||||
itemIds: text("item_ids", { mode: "json" }).$type<string[]>(),
|
||||
completedItemIds: text("completed_item_ids", { mode: "json" }).$type<string[]>().default([]),
|
||||
inProgress: integer("in_progress", { mode: "boolean" }).notNull().default(false),
|
||||
startedAt: integer("started_at", { mode: "timestamp" }),
|
||||
completedAt: integer("completed_at", { mode: "timestamp" }),
|
||||
lastCheckpoint: integer("last_checkpoint", { mode: "timestamp" }),
|
||||
});
|
||||
|
||||
export const organizations = sqliteTable("organizations", {
|
||||
|
||||
@@ -111,6 +111,18 @@ export const mirrorJobSchema = z.object({
|
||||
status: repoStatusEnum.default("imported"),
|
||||
message: z.string(),
|
||||
timestamp: z.date().default(() => new Date()),
|
||||
|
||||
// New fields for job resilience
|
||||
jobType: z.enum(["mirror", "sync", "retry"]).default("mirror"),
|
||||
batchId: z.string().uuid().optional(), // Group related jobs together
|
||||
totalItems: z.number().optional(), // Total number of items to process
|
||||
completedItems: z.number().optional(), // Number of items completed
|
||||
itemIds: z.array(z.string()).optional(), // IDs of items to process
|
||||
completedItemIds: z.array(z.string()).optional(), // IDs of completed items
|
||||
inProgress: z.boolean().default(false), // Whether the job is currently running
|
||||
startedAt: z.date().optional(), // When the job started
|
||||
completedAt: z.date().optional(), // When the job completed
|
||||
lastCheckpoint: z.date().optional(), // Last time progress was saved
|
||||
});
|
||||
|
||||
export type MirrorJob = z.infer<typeof mirrorJobSchema>;
|
||||
|
||||
120
src/lib/gitea.test.ts
Normal file
120
src/lib/gitea.test.ts
Normal file
@@ -0,0 +1,120 @@
|
||||
import { describe, test, expect, mock, beforeEach, afterEach } from "bun:test";
|
||||
import { Octokit } from "@octokit/rest";
|
||||
import { repoStatusEnum } from "@/types/Repository";
|
||||
|
||||
// Mock the isRepoPresentInGitea function
|
||||
const mockIsRepoPresentInGitea = mock(() => Promise.resolve(false));
|
||||
|
||||
// Mock the database module
|
||||
mock.module("@/lib/db", () => {
|
||||
return {
|
||||
db: {
|
||||
update: () => ({
|
||||
set: () => ({
|
||||
where: () => Promise.resolve()
|
||||
})
|
||||
})
|
||||
},
|
||||
repositories: {},
|
||||
organizations: {}
|
||||
};
|
||||
});
|
||||
|
||||
// Mock the helpers module
|
||||
mock.module("@/lib/helpers", () => {
|
||||
return {
|
||||
createMirrorJob: mock(() => Promise.resolve("job-id"))
|
||||
};
|
||||
});
|
||||
|
||||
// Mock superagent
|
||||
mock.module("superagent", () => {
|
||||
const mockPost = mock(() => ({
|
||||
set: () => ({
|
||||
set: () => ({
|
||||
send: () => Promise.resolve({ body: { id: 123 } })
|
||||
})
|
||||
})
|
||||
}));
|
||||
|
||||
const mockGet = mock(() => ({
|
||||
set: () => Promise.resolve({ body: [] })
|
||||
}));
|
||||
|
||||
return {
|
||||
post: mockPost,
|
||||
get: mockGet
|
||||
};
|
||||
});
|
||||
|
||||
// Mock the gitea module itself
|
||||
mock.module("./gitea", () => {
|
||||
return {
|
||||
isRepoPresentInGitea: mockIsRepoPresentInGitea,
|
||||
mirrorGithubRepoToGitea: mock(async () => {}),
|
||||
mirrorGitHubOrgRepoToGiteaOrg: mock(async () => {})
|
||||
};
|
||||
});
|
||||
|
||||
describe("Gitea Repository Mirroring", () => {
|
||||
// Mock console.log and console.error to prevent test output noise
|
||||
let originalConsoleLog: typeof console.log;
|
||||
let originalConsoleError: typeof console.error;
|
||||
|
||||
beforeEach(() => {
|
||||
originalConsoleLog = console.log;
|
||||
originalConsoleError = console.error;
|
||||
console.log = mock(() => {});
|
||||
console.error = mock(() => {});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
console.log = originalConsoleLog;
|
||||
console.error = originalConsoleError;
|
||||
});
|
||||
|
||||
test("mirrorGithubRepoToGitea handles private repositories correctly", async () => {
|
||||
// Import the mocked function
|
||||
const { mirrorGithubRepoToGitea } = await import("./gitea");
|
||||
|
||||
// Create mock Octokit instance
|
||||
const octokit = {} as Octokit;
|
||||
|
||||
// Create mock repository (private)
|
||||
const repository = {
|
||||
id: "repo-id",
|
||||
name: "test-repo",
|
||||
fullName: "testuser/test-repo",
|
||||
url: "https://github.com/testuser/test-repo",
|
||||
cloneUrl: "https://github.com/testuser/test-repo.git",
|
||||
owner: "testuser",
|
||||
isPrivate: true,
|
||||
status: repoStatusEnum.parse("imported")
|
||||
};
|
||||
|
||||
// Create mock config
|
||||
const config = {
|
||||
id: "config-id",
|
||||
userId: "user-id",
|
||||
githubConfig: {
|
||||
token: "github-token",
|
||||
mirrorIssues: false
|
||||
},
|
||||
giteaConfig: {
|
||||
url: "https://gitea.example.com",
|
||||
token: "gitea-token",
|
||||
username: "giteauser"
|
||||
}
|
||||
};
|
||||
|
||||
// Call the function
|
||||
await mirrorGithubRepoToGitea({
|
||||
octokit,
|
||||
repository: repository as any,
|
||||
config
|
||||
});
|
||||
|
||||
// Check that the function was called
|
||||
expect(mirrorGithubRepoToGitea).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
129
src/lib/gitea.ts
129
src/lib/gitea.ts
@@ -601,11 +601,22 @@ export async function mirrorGitHubOrgToGitea({
|
||||
.from(repositories)
|
||||
.where(eq(repositories.organization, organization.name));
|
||||
|
||||
for (const repo of orgRepos) {
|
||||
await mirrorGitHubRepoToGiteaOrg({
|
||||
octokit,
|
||||
config,
|
||||
repository: {
|
||||
if (orgRepos.length === 0) {
|
||||
console.log(`No repositories found for organization ${organization.name}`);
|
||||
return;
|
||||
}
|
||||
|
||||
console.log(`Mirroring ${orgRepos.length} repositories for organization ${organization.name}`);
|
||||
|
||||
// Import the processWithRetry function
|
||||
const { processWithRetry } = await import("@/lib/utils/concurrency");
|
||||
|
||||
// Process repositories in parallel with concurrency control
|
||||
await processWithRetry(
|
||||
orgRepos,
|
||||
async (repo) => {
|
||||
// Prepare repository data
|
||||
const repoData = {
|
||||
...repo,
|
||||
status: repo.status as RepoStatus,
|
||||
visibility: repo.visibility as RepositoryVisibility,
|
||||
@@ -614,11 +625,37 @@ export async function mirrorGitHubOrgToGitea({
|
||||
organization: repo.organization ?? undefined,
|
||||
forkedFrom: repo.forkedFrom ?? undefined,
|
||||
mirroredLocation: repo.mirroredLocation || "",
|
||||
},
|
||||
};
|
||||
|
||||
// Log the start of mirroring
|
||||
console.log(`Starting mirror for repository: ${repo.name} in organization ${organization.name}`);
|
||||
|
||||
// Mirror the repository
|
||||
await mirrorGitHubRepoToGiteaOrg({
|
||||
octokit,
|
||||
config,
|
||||
repository: repoData,
|
||||
giteaOrgId,
|
||||
orgName: organization.name,
|
||||
});
|
||||
|
||||
return repo;
|
||||
},
|
||||
{
|
||||
concurrencyLimit: 3, // Process 3 repositories at a time
|
||||
maxRetries: 2,
|
||||
retryDelay: 2000,
|
||||
onProgress: (completed, total, result) => {
|
||||
const percentComplete = Math.round((completed / total) * 100);
|
||||
if (result) {
|
||||
console.log(`Mirrored repository "${result.name}" in organization ${organization.name} (${completed}/${total}, ${percentComplete}%)`);
|
||||
}
|
||||
},
|
||||
onRetry: (repo, error, attempt) => {
|
||||
console.log(`Retrying repository ${repo.name} in organization ${organization.name} (attempt ${attempt}): ${error.message}`);
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
console.log(`Organization ${organization.name} mirrored successfully`);
|
||||
|
||||
@@ -837,7 +874,15 @@ export const mirrorGitRepoIssuesToGitea = async ({
|
||||
(res) => res.data
|
||||
);
|
||||
|
||||
console.log(`Mirroring ${issues.length} issues from ${repository.fullName}`);
|
||||
// Filter out pull requests
|
||||
const filteredIssues = issues.filter(issue => !(issue as any).pull_request);
|
||||
|
||||
console.log(`Mirroring ${filteredIssues.length} issues from ${repository.fullName}`);
|
||||
|
||||
if (filteredIssues.length === 0) {
|
||||
console.log(`No issues to mirror for ${repository.fullName}`);
|
||||
return;
|
||||
}
|
||||
|
||||
// Get existing labels from Gitea
|
||||
const giteaLabelsRes = await superagent
|
||||
@@ -851,11 +896,13 @@ export const mirrorGitRepoIssuesToGitea = async ({
|
||||
giteaLabels.map((label: any) => [label.name, label.id])
|
||||
);
|
||||
|
||||
for (const issue of issues) {
|
||||
if ((issue as any).pull_request) {
|
||||
continue;
|
||||
}
|
||||
// Import the processWithRetry function
|
||||
const { processWithRetry } = await import("@/lib/utils/concurrency");
|
||||
|
||||
// Process issues in parallel with concurrency control
|
||||
await processWithRetry(
|
||||
filteredIssues,
|
||||
async (issue) => {
|
||||
const githubLabelNames =
|
||||
issue.labels
|
||||
?.map((l) => (typeof l === "string" ? l : l.name))
|
||||
@@ -902,7 +949,7 @@ export const mirrorGitRepoIssuesToGitea = async ({
|
||||
labels: giteaLabelIds,
|
||||
};
|
||||
|
||||
try {
|
||||
// Create the issue in Gitea
|
||||
const createdIssue = await superagent
|
||||
.post(
|
||||
`${config.giteaConfig.url}/api/v1/repos/${repoOrigin}/${repository.name}/issues`
|
||||
@@ -922,8 +969,11 @@ export const mirrorGitRepoIssuesToGitea = async ({
|
||||
(res) => res.data
|
||||
);
|
||||
|
||||
for (const comment of comments) {
|
||||
try {
|
||||
// Process comments in parallel with concurrency control
|
||||
if (comments.length > 0) {
|
||||
await processWithRetry(
|
||||
comments,
|
||||
async (comment) => {
|
||||
await superagent
|
||||
.post(
|
||||
`${config.giteaConfig.url}/api/v1/repos/${repoOrigin}/${repository.name}/issues/${createdIssue.body.number}/comments`
|
||||
@@ -932,31 +982,36 @@ export const mirrorGitRepoIssuesToGitea = async ({
|
||||
.send({
|
||||
body: `@${comment.user?.login} commented on GitHub:\n\n${comment.body}`,
|
||||
});
|
||||
} catch (commentErr) {
|
||||
console.error(
|
||||
`Failed to copy comment to Gitea for issue "${issue.title}": ${
|
||||
commentErr instanceof Error
|
||||
? commentErr.message
|
||||
: String(commentErr)
|
||||
}`
|
||||
return comment;
|
||||
},
|
||||
{
|
||||
concurrencyLimit: 5,
|
||||
maxRetries: 2,
|
||||
retryDelay: 1000,
|
||||
onRetry: (comment, error, attempt) => {
|
||||
console.log(`Retrying comment (attempt ${attempt}): ${error.message}`);
|
||||
}
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
return issue;
|
||||
},
|
||||
{
|
||||
concurrencyLimit: 3, // Process 3 issues at a time
|
||||
maxRetries: 2,
|
||||
retryDelay: 2000,
|
||||
onProgress: (completed, total, result) => {
|
||||
const percentComplete = Math.round((completed / total) * 100);
|
||||
if (result) {
|
||||
console.log(`Mirrored issue "${result.title}" (${completed}/${total}, ${percentComplete}%)`);
|
||||
}
|
||||
},
|
||||
onRetry: (issue, error, attempt) => {
|
||||
console.log(`Retrying issue "${issue.title}" (attempt ${attempt}): ${error.message}`);
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
if (err instanceof Error && (err as any).response) {
|
||||
console.error(
|
||||
`Failed to create issue "${issue.title}" in Gitea: ${err.message}`
|
||||
);
|
||||
console.error(
|
||||
`Response body: ${JSON.stringify((err as any).response.body)}`
|
||||
);
|
||||
} else {
|
||||
console.error(
|
||||
`Failed to create issue "${issue.title}" in Gitea: ${
|
||||
err instanceof Error ? err.message : String(err)
|
||||
}`
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`Completed mirroring ${filteredIssues.length} issues for ${repository.fullName}`);
|
||||
};
|
||||
|
||||
@@ -12,6 +12,11 @@ export async function createMirrorJob({
|
||||
message,
|
||||
status,
|
||||
details,
|
||||
jobType,
|
||||
batchId,
|
||||
totalItems,
|
||||
itemIds,
|
||||
inProgress,
|
||||
}: {
|
||||
userId: string;
|
||||
organizationId?: string;
|
||||
@@ -21,6 +26,11 @@ export async function createMirrorJob({
|
||||
details?: string;
|
||||
message: string;
|
||||
status: RepoStatus;
|
||||
jobType?: "mirror" | "sync" | "retry";
|
||||
batchId?: string;
|
||||
totalItems?: number;
|
||||
itemIds?: string[];
|
||||
inProgress?: boolean;
|
||||
}) {
|
||||
const jobId = uuidv4();
|
||||
const currentTimestamp = new Date();
|
||||
@@ -32,11 +42,22 @@ export async function createMirrorJob({
|
||||
repositoryName,
|
||||
organizationId,
|
||||
organizationName,
|
||||
configId: uuidv4(),
|
||||
details,
|
||||
message: message,
|
||||
status: status,
|
||||
timestamp: currentTimestamp,
|
||||
|
||||
// New resilience fields
|
||||
jobType: jobType || "mirror",
|
||||
batchId: batchId || undefined,
|
||||
totalItems: totalItems || undefined,
|
||||
completedItems: 0,
|
||||
itemIds: itemIds || undefined,
|
||||
completedItemIds: [],
|
||||
inProgress: inProgress !== undefined ? inProgress : false,
|
||||
startedAt: inProgress ? currentTimestamp : undefined,
|
||||
completedAt: undefined,
|
||||
lastCheckpoint: undefined,
|
||||
};
|
||||
|
||||
try {
|
||||
@@ -57,3 +78,186 @@ export async function createMirrorJob({
|
||||
throw new Error("Error creating mirror job");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates the progress of a mirror job
|
||||
*/
|
||||
export async function updateMirrorJobProgress({
|
||||
jobId,
|
||||
completedItemId,
|
||||
status,
|
||||
message,
|
||||
details,
|
||||
inProgress,
|
||||
isCompleted,
|
||||
}: {
|
||||
jobId: string;
|
||||
completedItemId?: string;
|
||||
status?: RepoStatus;
|
||||
message?: string;
|
||||
details?: string;
|
||||
inProgress?: boolean;
|
||||
isCompleted?: boolean;
|
||||
}) {
|
||||
try {
|
||||
// Get the current job
|
||||
const [job] = await db
|
||||
.select()
|
||||
.from(mirrorJobs)
|
||||
.where(mirrorJobs.id === jobId);
|
||||
|
||||
if (!job) {
|
||||
throw new Error(`Mirror job with ID ${jobId} not found`);
|
||||
}
|
||||
|
||||
// Update the job with new progress
|
||||
const updates: Record<string, any> = {
|
||||
lastCheckpoint: new Date(),
|
||||
};
|
||||
|
||||
// Add completed item if provided
|
||||
if (completedItemId) {
|
||||
const completedItemIds = job.completedItemIds || [];
|
||||
if (!completedItemIds.includes(completedItemId)) {
|
||||
updates.completedItemIds = [...completedItemIds, completedItemId];
|
||||
updates.completedItems = (job.completedItems || 0) + 1;
|
||||
}
|
||||
}
|
||||
|
||||
// Update status if provided
|
||||
if (status) {
|
||||
updates.status = status;
|
||||
}
|
||||
|
||||
// Update message if provided
|
||||
if (message) {
|
||||
updates.message = message;
|
||||
}
|
||||
|
||||
// Update details if provided
|
||||
if (details) {
|
||||
updates.details = details;
|
||||
}
|
||||
|
||||
// Update in-progress status if provided
|
||||
if (inProgress !== undefined) {
|
||||
updates.inProgress = inProgress;
|
||||
}
|
||||
|
||||
// Mark as completed if specified
|
||||
if (isCompleted) {
|
||||
updates.inProgress = false;
|
||||
updates.completedAt = new Date();
|
||||
}
|
||||
|
||||
// Update the job in the database
|
||||
await db
|
||||
.update(mirrorJobs)
|
||||
.set(updates)
|
||||
.where(mirrorJobs.id === jobId);
|
||||
|
||||
// Publish the event
|
||||
const updatedJob = {
|
||||
...job,
|
||||
...updates,
|
||||
};
|
||||
|
||||
await publishEvent({
|
||||
userId: job.userId,
|
||||
channel: `mirror-status:${job.userId}`,
|
||||
payload: updatedJob,
|
||||
});
|
||||
|
||||
return updatedJob;
|
||||
} catch (error) {
|
||||
console.error("Error updating mirror job progress:", error);
|
||||
throw new Error("Error updating mirror job progress");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Finds interrupted jobs that need to be resumed
|
||||
*/
|
||||
export async function findInterruptedJobs() {
|
||||
try {
|
||||
// Find jobs that are marked as in-progress but haven't been updated recently
|
||||
const cutoffTime = new Date();
|
||||
cutoffTime.setMinutes(cutoffTime.getMinutes() - 10); // Consider jobs inactive after 10 minutes without updates
|
||||
|
||||
const interruptedJobs = await db
|
||||
.select()
|
||||
.from(mirrorJobs)
|
||||
.where(
|
||||
mirrorJobs.inProgress === true &&
|
||||
(mirrorJobs.lastCheckpoint === null ||
|
||||
mirrorJobs.lastCheckpoint < cutoffTime)
|
||||
);
|
||||
|
||||
return interruptedJobs;
|
||||
} catch (error) {
|
||||
console.error("Error finding interrupted jobs:", error);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Resumes an interrupted job
|
||||
*/
|
||||
export async function resumeInterruptedJob(job: any) {
|
||||
try {
|
||||
console.log(`Resuming interrupted job: ${job.id}`);
|
||||
|
||||
// Skip if job doesn't have the necessary data to resume
|
||||
if (!job.itemIds || !job.completedItemIds) {
|
||||
console.log(`Cannot resume job ${job.id}: missing item data`);
|
||||
|
||||
// Mark the job as failed
|
||||
await updateMirrorJobProgress({
|
||||
jobId: job.id,
|
||||
status: "failed",
|
||||
message: "Job interrupted and could not be resumed",
|
||||
details: "The job was interrupted and did not have enough information to resume",
|
||||
inProgress: false,
|
||||
isCompleted: true,
|
||||
});
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
// Calculate remaining items
|
||||
const remainingItemIds = job.itemIds.filter(
|
||||
(id: string) => !job.completedItemIds.includes(id)
|
||||
);
|
||||
|
||||
if (remainingItemIds.length === 0) {
|
||||
console.log(`Job ${job.id} has no remaining items, marking as completed`);
|
||||
|
||||
// Mark the job as completed
|
||||
await updateMirrorJobProgress({
|
||||
jobId: job.id,
|
||||
status: "mirrored",
|
||||
message: "Job completed after resuming",
|
||||
inProgress: false,
|
||||
isCompleted: true,
|
||||
});
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
// Update the job to show it's being resumed
|
||||
await updateMirrorJobProgress({
|
||||
jobId: job.id,
|
||||
message: `Resuming job with ${remainingItemIds.length} remaining items`,
|
||||
details: `Job was interrupted and is being resumed. ${job.completedItemIds.length} of ${job.itemIds.length} items were already processed.`,
|
||||
inProgress: true,
|
||||
});
|
||||
|
||||
return {
|
||||
job,
|
||||
remainingItemIds,
|
||||
};
|
||||
} catch (error) {
|
||||
console.error(`Error resuming job ${job.id}:`, error);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
224
src/lib/recovery.ts
Normal file
224
src/lib/recovery.ts
Normal file
@@ -0,0 +1,224 @@
|
||||
/**
|
||||
* Recovery mechanism for interrupted jobs
|
||||
* This module handles detecting and resuming jobs that were interrupted by container restarts
|
||||
*/
|
||||
|
||||
import { findInterruptedJobs, resumeInterruptedJob } from './helpers';
|
||||
import { db, repositories, organizations } from './db';
|
||||
import { eq } from 'drizzle-orm';
|
||||
import { mirrorGithubRepoToGitea, mirrorGitHubOrgRepoToGiteaOrg, syncGiteaRepo } from './gitea';
|
||||
import { createGitHubClient } from './github';
|
||||
import { processWithResilience } from './utils/concurrency';
|
||||
import { repositoryVisibilityEnum, repoStatusEnum } from '@/types/Repository';
|
||||
import type { Repository } from './db/schema';
|
||||
|
||||
/**
|
||||
* Initialize the recovery system
|
||||
* This should be called when the application starts
|
||||
*/
|
||||
export async function initializeRecovery() {
|
||||
console.log('Initializing recovery system...');
|
||||
|
||||
try {
|
||||
// Find interrupted jobs
|
||||
const interruptedJobs = await findInterruptedJobs();
|
||||
|
||||
if (interruptedJobs.length === 0) {
|
||||
console.log('No interrupted jobs found.');
|
||||
return;
|
||||
}
|
||||
|
||||
console.log(`Found ${interruptedJobs.length} interrupted jobs. Starting recovery...`);
|
||||
|
||||
// Process each interrupted job
|
||||
for (const job of interruptedJobs) {
|
||||
const resumeData = await resumeInterruptedJob(job);
|
||||
|
||||
if (!resumeData) {
|
||||
console.log(`Job ${job.id} could not be resumed.`);
|
||||
continue;
|
||||
}
|
||||
|
||||
const { job: updatedJob, remainingItemIds } = resumeData;
|
||||
|
||||
// Handle different job types
|
||||
switch (updatedJob.jobType) {
|
||||
case 'mirror':
|
||||
await recoverMirrorJob(updatedJob, remainingItemIds);
|
||||
break;
|
||||
case 'sync':
|
||||
await recoverSyncJob(updatedJob, remainingItemIds);
|
||||
break;
|
||||
case 'retry':
|
||||
await recoverRetryJob(updatedJob, remainingItemIds);
|
||||
break;
|
||||
default:
|
||||
console.log(`Unknown job type: ${updatedJob.jobType}`);
|
||||
}
|
||||
}
|
||||
|
||||
console.log('Recovery process completed.');
|
||||
} catch (error) {
|
||||
console.error('Error during recovery process:', error);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Recover a mirror job
|
||||
*/
|
||||
async function recoverMirrorJob(job: any, remainingItemIds: string[]) {
|
||||
console.log(`Recovering mirror job ${job.id} with ${remainingItemIds.length} remaining items`);
|
||||
|
||||
try {
|
||||
// Get the config for this user
|
||||
const [config] = await db
|
||||
.select()
|
||||
.from(repositories)
|
||||
.where(eq(repositories.userId, job.userId))
|
||||
.limit(1);
|
||||
|
||||
if (!config || !config.configId) {
|
||||
throw new Error('Config not found for user');
|
||||
}
|
||||
|
||||
// Get repositories to process
|
||||
const repos = await db
|
||||
.select()
|
||||
.from(repositories)
|
||||
.where(eq(repositories.id, remainingItemIds));
|
||||
|
||||
if (repos.length === 0) {
|
||||
throw new Error('No repositories found for the remaining item IDs');
|
||||
}
|
||||
|
||||
// Create GitHub client
|
||||
const octokit = createGitHubClient(config.githubConfig.token);
|
||||
|
||||
// Process repositories with resilience
|
||||
await processWithResilience(
|
||||
repos,
|
||||
async (repo) => {
|
||||
// Prepare repository data
|
||||
const repoData = {
|
||||
...repo,
|
||||
status: repoStatusEnum.parse("imported"),
|
||||
organization: repo.organization ?? undefined,
|
||||
lastMirrored: repo.lastMirrored ?? undefined,
|
||||
errorMessage: repo.errorMessage ?? undefined,
|
||||
forkedFrom: repo.forkedFrom ?? undefined,
|
||||
visibility: repositoryVisibilityEnum.parse(repo.visibility),
|
||||
mirroredLocation: repo.mirroredLocation || "",
|
||||
};
|
||||
|
||||
// Mirror the repository based on whether it's in an organization
|
||||
if (repo.organization && config.githubConfig.preserveOrgStructure) {
|
||||
await mirrorGitHubOrgRepoToGiteaOrg({
|
||||
config,
|
||||
octokit,
|
||||
orgName: repo.organization,
|
||||
repository: repoData,
|
||||
});
|
||||
} else {
|
||||
await mirrorGithubRepoToGitea({
|
||||
octokit,
|
||||
repository: repoData,
|
||||
config,
|
||||
});
|
||||
}
|
||||
|
||||
return repo;
|
||||
},
|
||||
{
|
||||
userId: job.userId,
|
||||
jobType: 'mirror',
|
||||
getItemId: (repo) => repo.id,
|
||||
getItemName: (repo) => repo.name,
|
||||
resumeFromJobId: job.id,
|
||||
concurrencyLimit: 3,
|
||||
maxRetries: 2,
|
||||
retryDelay: 2000,
|
||||
}
|
||||
);
|
||||
} catch (error) {
|
||||
console.error(`Error recovering mirror job ${job.id}:`, error);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Recover a sync job
|
||||
*/
|
||||
async function recoverSyncJob(job: any, remainingItemIds: string[]) {
|
||||
// Implementation similar to recoverMirrorJob but for sync operations
|
||||
console.log(`Recovering sync job ${job.id} with ${remainingItemIds.length} remaining items`);
|
||||
|
||||
try {
|
||||
// Get the config for this user
|
||||
const [config] = await db
|
||||
.select()
|
||||
.from(repositories)
|
||||
.where(eq(repositories.userId, job.userId))
|
||||
.limit(1);
|
||||
|
||||
if (!config || !config.configId) {
|
||||
throw new Error('Config not found for user');
|
||||
}
|
||||
|
||||
// Get repositories to process
|
||||
const repos = await db
|
||||
.select()
|
||||
.from(repositories)
|
||||
.where(eq(repositories.id, remainingItemIds));
|
||||
|
||||
if (repos.length === 0) {
|
||||
throw new Error('No repositories found for the remaining item IDs');
|
||||
}
|
||||
|
||||
// Process repositories with resilience
|
||||
await processWithResilience(
|
||||
repos,
|
||||
async (repo) => {
|
||||
// Prepare repository data
|
||||
const repoData = {
|
||||
...repo,
|
||||
status: repoStatusEnum.parse(repo.status),
|
||||
organization: repo.organization ?? undefined,
|
||||
lastMirrored: repo.lastMirrored ?? undefined,
|
||||
errorMessage: repo.errorMessage ?? undefined,
|
||||
forkedFrom: repo.forkedFrom ?? undefined,
|
||||
visibility: repositoryVisibilityEnum.parse(repo.visibility),
|
||||
};
|
||||
|
||||
// Sync the repository
|
||||
await syncGiteaRepo({
|
||||
config,
|
||||
repository: repoData,
|
||||
});
|
||||
|
||||
return repo;
|
||||
},
|
||||
{
|
||||
userId: job.userId,
|
||||
jobType: 'sync',
|
||||
getItemId: (repo) => repo.id,
|
||||
getItemName: (repo) => repo.name,
|
||||
resumeFromJobId: job.id,
|
||||
concurrencyLimit: 5,
|
||||
maxRetries: 2,
|
||||
retryDelay: 2000,
|
||||
}
|
||||
);
|
||||
} catch (error) {
|
||||
console.error(`Error recovering sync job ${job.id}:`, error);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Recover a retry job
|
||||
*/
|
||||
async function recoverRetryJob(job: any, remainingItemIds: string[]) {
|
||||
// Implementation similar to recoverMirrorJob but for retry operations
|
||||
console.log(`Recovering retry job ${job.id} with ${remainingItemIds.length} remaining items`);
|
||||
|
||||
// This would be similar to recoverMirrorJob but with retry-specific logic
|
||||
console.log('Retry job recovery not yet implemented');
|
||||
}
|
||||
110
src/lib/utils.test.ts
Normal file
110
src/lib/utils.test.ts
Normal file
@@ -0,0 +1,110 @@
|
||||
import { describe, test, expect } from "bun:test";
|
||||
import { jsonResponse, formatDate, truncate, safeParse } from "./utils";
|
||||
|
||||
describe("jsonResponse", () => {
|
||||
test("creates a Response with JSON content", () => {
|
||||
const data = { message: "Hello, world!" };
|
||||
const response = jsonResponse({ data });
|
||||
|
||||
expect(response).toBeInstanceOf(Response);
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.headers.get("Content-Type")).toBe("application/json");
|
||||
});
|
||||
|
||||
test("uses the provided status code", () => {
|
||||
const data = { error: "Not found" };
|
||||
const response = jsonResponse({ data, status: 404 });
|
||||
|
||||
expect(response.status).toBe(404);
|
||||
});
|
||||
|
||||
test("correctly serializes complex objects", async () => {
|
||||
const now = new Date();
|
||||
const data = {
|
||||
message: "Complex object",
|
||||
date: now,
|
||||
nested: { foo: "bar" },
|
||||
array: [1, 2, 3]
|
||||
};
|
||||
|
||||
const response = jsonResponse({ data });
|
||||
const responseBody = await response.json();
|
||||
|
||||
expect(responseBody).toEqual({
|
||||
message: "Complex object",
|
||||
date: now.toISOString(),
|
||||
nested: { foo: "bar" },
|
||||
array: [1, 2, 3]
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe("formatDate", () => {
|
||||
test("formats a date object", () => {
|
||||
const date = new Date("2023-01-15T12:30:45Z");
|
||||
const formatted = formatDate(date);
|
||||
|
||||
// The exact format might depend on the locale, so we'll check for parts
|
||||
expect(formatted).toContain("2023");
|
||||
expect(formatted).toContain("January");
|
||||
expect(formatted).toContain("15");
|
||||
});
|
||||
|
||||
test("formats a date string", () => {
|
||||
const dateStr = "2023-01-15T12:30:45Z";
|
||||
const formatted = formatDate(dateStr);
|
||||
|
||||
expect(formatted).toContain("2023");
|
||||
expect(formatted).toContain("January");
|
||||
expect(formatted).toContain("15");
|
||||
});
|
||||
|
||||
test("returns 'Never' for null or undefined", () => {
|
||||
expect(formatDate(null)).toBe("Never");
|
||||
expect(formatDate(undefined)).toBe("Never");
|
||||
});
|
||||
});
|
||||
|
||||
describe("truncate", () => {
|
||||
test("truncates a string that exceeds the length", () => {
|
||||
const str = "This is a long string that needs truncation";
|
||||
const truncated = truncate(str, 10);
|
||||
|
||||
expect(truncated).toBe("This is a ...");
|
||||
expect(truncated.length).toBe(13); // 10 chars + "..."
|
||||
});
|
||||
|
||||
test("does not truncate a string that is shorter than the length", () => {
|
||||
const str = "Short";
|
||||
const truncated = truncate(str, 10);
|
||||
|
||||
expect(truncated).toBe("Short");
|
||||
});
|
||||
|
||||
test("handles empty strings", () => {
|
||||
expect(truncate("", 10)).toBe("");
|
||||
});
|
||||
});
|
||||
|
||||
describe("safeParse", () => {
|
||||
test("parses valid JSON strings", () => {
|
||||
const jsonStr = '{"name":"John","age":30}';
|
||||
const parsed = safeParse(jsonStr);
|
||||
|
||||
expect(parsed).toEqual({ name: "John", age: 30 });
|
||||
});
|
||||
|
||||
test("returns undefined for invalid JSON strings", () => {
|
||||
const invalidJson = '{"name":"John",age:30}'; // Missing quotes around age
|
||||
const parsed = safeParse(invalidJson);
|
||||
|
||||
expect(parsed).toBeUndefined();
|
||||
});
|
||||
|
||||
test("returns the original value for non-string inputs", () => {
|
||||
const obj = { name: "John", age: 30 };
|
||||
const parsed = safeParse(obj);
|
||||
|
||||
expect(parsed).toBe(obj);
|
||||
});
|
||||
});
|
||||
167
src/lib/utils/concurrency.test.ts
Normal file
167
src/lib/utils/concurrency.test.ts
Normal file
@@ -0,0 +1,167 @@
|
||||
import { describe, test, expect, mock } from "bun:test";
|
||||
import { processInParallel, processWithRetry } from "./concurrency";
|
||||
|
||||
describe("processInParallel", () => {
|
||||
test("processes items in parallel with concurrency control", async () => {
|
||||
// Create an array of numbers to process
|
||||
const items = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10];
|
||||
|
||||
// Create a mock function to track execution
|
||||
const processItem = mock(async (item: number) => {
|
||||
// Simulate async work
|
||||
await new Promise(resolve => setTimeout(resolve, 10));
|
||||
return item * 2;
|
||||
});
|
||||
|
||||
// Create a mock progress callback
|
||||
const onProgress = mock((completed: number, total: number, result?: number) => {
|
||||
// Progress tracking
|
||||
});
|
||||
|
||||
// Process the items with a concurrency limit of 3
|
||||
const results = await processInParallel(
|
||||
items,
|
||||
processItem,
|
||||
3,
|
||||
onProgress
|
||||
);
|
||||
|
||||
// Verify results
|
||||
expect(results).toEqual([2, 4, 6, 8, 10, 12, 14, 16, 18, 20]);
|
||||
|
||||
// Verify that processItem was called for each item
|
||||
expect(processItem).toHaveBeenCalledTimes(10);
|
||||
|
||||
// Verify that onProgress was called for each item
|
||||
expect(onProgress).toHaveBeenCalledTimes(10);
|
||||
|
||||
// Verify the last call to onProgress had the correct completed/total values
|
||||
expect(onProgress.mock.calls[9][0]).toBe(10); // completed
|
||||
expect(onProgress.mock.calls[9][1]).toBe(10); // total
|
||||
});
|
||||
|
||||
test("handles errors in processing", async () => {
|
||||
// Create an array of numbers to process
|
||||
const items = [1, 2, 3, 4, 5];
|
||||
|
||||
// Create a mock function that throws an error for item 3
|
||||
const processItem = mock(async (item: number) => {
|
||||
if (item === 3) {
|
||||
throw new Error("Test error");
|
||||
}
|
||||
return item * 2;
|
||||
});
|
||||
|
||||
// Create a spy for console.error
|
||||
const originalConsoleError = console.error;
|
||||
const consoleErrorMock = mock(() => {});
|
||||
console.error = consoleErrorMock;
|
||||
|
||||
try {
|
||||
// Process the items
|
||||
const results = await processInParallel(items, processItem);
|
||||
|
||||
// Verify results (should have 4 items, missing the one that errored)
|
||||
expect(results).toEqual([2, 4, 8, 10]);
|
||||
|
||||
// Verify that processItem was called for each item
|
||||
expect(processItem).toHaveBeenCalledTimes(5);
|
||||
|
||||
// Verify that console.error was called once
|
||||
expect(consoleErrorMock).toHaveBeenCalledTimes(1);
|
||||
} finally {
|
||||
// Restore console.error
|
||||
console.error = originalConsoleError;
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe("processWithRetry", () => {
|
||||
test("retries failed operations", async () => {
|
||||
// Create an array of numbers to process
|
||||
const items = [1, 2, 3];
|
||||
|
||||
// Create a counter to track retry attempts
|
||||
const attemptCounts: Record<number, number> = { 1: 0, 2: 0, 3: 0 };
|
||||
|
||||
// Create a mock function that fails on first attempt for item 2
|
||||
const processItem = mock(async (item: number) => {
|
||||
attemptCounts[item]++;
|
||||
|
||||
if (item === 2 && attemptCounts[item] === 1) {
|
||||
throw new Error("Temporary error");
|
||||
}
|
||||
|
||||
return item * 2;
|
||||
});
|
||||
|
||||
// Create a mock for the onRetry callback
|
||||
const onRetry = mock((item: number, error: Error, attempt: number) => {
|
||||
// Retry tracking
|
||||
});
|
||||
|
||||
// Process the items with retry
|
||||
const results = await processWithRetry(items, processItem, {
|
||||
maxRetries: 2,
|
||||
retryDelay: 10,
|
||||
onRetry,
|
||||
});
|
||||
|
||||
// Verify results
|
||||
expect(results).toEqual([2, 4, 6]);
|
||||
|
||||
// Verify that item 2 was retried once
|
||||
expect(attemptCounts[1]).toBe(1); // No retries
|
||||
expect(attemptCounts[2]).toBe(2); // One retry
|
||||
expect(attemptCounts[3]).toBe(1); // No retries
|
||||
|
||||
// Verify that onRetry was called once
|
||||
expect(onRetry).toHaveBeenCalledTimes(1);
|
||||
expect(onRetry.mock.calls[0][0]).toBe(2); // item
|
||||
expect(onRetry.mock.calls[0][2]).toBe(1); // attempt
|
||||
});
|
||||
|
||||
test("gives up after max retries", async () => {
|
||||
// Create an array of numbers to process
|
||||
const items = [1, 2];
|
||||
|
||||
// Create a mock function that always fails for item 2
|
||||
const processItem = mock(async (item: number) => {
|
||||
if (item === 2) {
|
||||
throw new Error("Persistent error");
|
||||
}
|
||||
return item * 2;
|
||||
});
|
||||
|
||||
// Create a mock for the onRetry callback
|
||||
const onRetry = mock((item: number, error: Error, attempt: number) => {
|
||||
// Retry tracking
|
||||
});
|
||||
|
||||
// Create a spy for console.error
|
||||
const originalConsoleError = console.error;
|
||||
const consoleErrorMock = mock(() => {});
|
||||
console.error = consoleErrorMock;
|
||||
|
||||
try {
|
||||
// Process the items with retry
|
||||
const results = await processWithRetry(items, processItem, {
|
||||
maxRetries: 2,
|
||||
retryDelay: 10,
|
||||
onRetry,
|
||||
});
|
||||
|
||||
// Verify results (should have 1 item, missing the one that errored)
|
||||
expect(results).toEqual([2]);
|
||||
|
||||
// Verify that onRetry was called twice (for 2 retry attempts)
|
||||
expect(onRetry).toHaveBeenCalledTimes(2);
|
||||
|
||||
// Verify that console.error was called once
|
||||
expect(consoleErrorMock).toHaveBeenCalledTimes(1);
|
||||
} finally {
|
||||
// Restore console.error
|
||||
console.error = originalConsoleError;
|
||||
}
|
||||
});
|
||||
});
|
||||
292
src/lib/utils/concurrency.ts
Normal file
292
src/lib/utils/concurrency.ts
Normal file
@@ -0,0 +1,292 @@
|
||||
/**
|
||||
* Utility for processing items in parallel with concurrency control
|
||||
*
|
||||
* @param items Array of items to process
|
||||
* @param processItem Function to process each item
|
||||
* @param concurrencyLimit Maximum number of concurrent operations
|
||||
* @param onProgress Optional callback for progress updates
|
||||
* @returns Promise that resolves when all items are processed
|
||||
*/
|
||||
export async function processInParallel<T, R>(
|
||||
items: T[],
|
||||
processItem: (item: T) => Promise<R>,
|
||||
concurrencyLimit: number = 5,
|
||||
onProgress?: (completed: number, total: number, result?: R) => void
|
||||
): Promise<R[]> {
|
||||
const results: R[] = [];
|
||||
let completed = 0;
|
||||
const total = items.length;
|
||||
|
||||
// Process items in batches to control concurrency
|
||||
for (let i = 0; i < total; i += concurrencyLimit) {
|
||||
const batch = items.slice(i, i + concurrencyLimit);
|
||||
|
||||
const batchPromises = batch.map(async (item) => {
|
||||
try {
|
||||
const result = await processItem(item);
|
||||
completed++;
|
||||
|
||||
if (onProgress) {
|
||||
onProgress(completed, total, result);
|
||||
}
|
||||
|
||||
return result;
|
||||
} catch (error) {
|
||||
completed++;
|
||||
|
||||
if (onProgress) {
|
||||
onProgress(completed, total);
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
});
|
||||
|
||||
// Wait for the current batch to complete before starting the next batch
|
||||
const batchResults = await Promise.allSettled(batchPromises);
|
||||
|
||||
// Process results and handle errors
|
||||
for (const result of batchResults) {
|
||||
if (result.status === 'fulfilled') {
|
||||
results.push(result.value);
|
||||
} else {
|
||||
console.error('Error processing item:', result.reason);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
/**
|
||||
* Utility for processing items in parallel with automatic retry for failed operations
|
||||
*
|
||||
* @param items Array of items to process
|
||||
* @param processItem Function to process each item
|
||||
* @param options Configuration options
|
||||
* @returns Promise that resolves when all items are processed
|
||||
*/
|
||||
export async function processWithRetry<T, R>(
|
||||
items: T[],
|
||||
processItem: (item: T) => Promise<R>,
|
||||
options: {
|
||||
concurrencyLimit?: number;
|
||||
maxRetries?: number;
|
||||
retryDelay?: number;
|
||||
onProgress?: (completed: number, total: number, result?: R) => void;
|
||||
onRetry?: (item: T, error: Error, attempt: number) => void;
|
||||
jobId?: string; // Optional job ID for checkpointing
|
||||
getItemId?: (item: T) => string; // Function to get a unique ID for each item
|
||||
onCheckpoint?: (jobId: string, completedItemId: string) => Promise<void>; // Callback for checkpointing
|
||||
checkpointInterval?: number; // How many items to process before checkpointing
|
||||
} = {}
|
||||
): Promise<R[]> {
|
||||
const {
|
||||
concurrencyLimit = 5,
|
||||
maxRetries = 3,
|
||||
retryDelay = 1000,
|
||||
onProgress,
|
||||
onRetry,
|
||||
jobId,
|
||||
getItemId,
|
||||
onCheckpoint,
|
||||
checkpointInterval = 1 // Default to checkpointing after each item
|
||||
} = options;
|
||||
|
||||
// Track checkpoint counter
|
||||
let itemsProcessedSinceLastCheckpoint = 0;
|
||||
|
||||
// Wrap the process function with retry logic
|
||||
const processWithRetryLogic = async (item: T): Promise<R> => {
|
||||
let lastError: Error | null = null;
|
||||
|
||||
for (let attempt = 1; attempt <= maxRetries + 1; attempt++) {
|
||||
try {
|
||||
const result = await processItem(item);
|
||||
|
||||
// Handle checkpointing if enabled
|
||||
if (jobId && getItemId && onCheckpoint) {
|
||||
const itemId = getItemId(item);
|
||||
itemsProcessedSinceLastCheckpoint++;
|
||||
|
||||
// Checkpoint based on the interval
|
||||
if (itemsProcessedSinceLastCheckpoint >= checkpointInterval) {
|
||||
await onCheckpoint(jobId, itemId);
|
||||
itemsProcessedSinceLastCheckpoint = 0;
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
} catch (error) {
|
||||
lastError = error instanceof Error ? error : new Error(String(error));
|
||||
|
||||
if (attempt <= maxRetries) {
|
||||
if (onRetry) {
|
||||
onRetry(item, lastError, attempt);
|
||||
}
|
||||
|
||||
// Exponential backoff
|
||||
const delay = retryDelay * Math.pow(2, attempt - 1);
|
||||
await new Promise(resolve => setTimeout(resolve, delay));
|
||||
} else {
|
||||
throw lastError;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// This should never be reached due to the throw in the catch block
|
||||
throw lastError || new Error('Unknown error occurred');
|
||||
};
|
||||
|
||||
const results = await processInParallel(
|
||||
items,
|
||||
processWithRetryLogic,
|
||||
concurrencyLimit,
|
||||
onProgress
|
||||
);
|
||||
|
||||
// Final checkpoint if there are remaining items since the last checkpoint
|
||||
if (jobId && getItemId && onCheckpoint && itemsProcessedSinceLastCheckpoint > 0) {
|
||||
// We don't have a specific item ID for the final checkpoint, so we'll use a placeholder
|
||||
await onCheckpoint(jobId, 'final');
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
/**
|
||||
* Process items in parallel with resilience to container restarts
|
||||
* This version supports resuming from a previous checkpoint
|
||||
*/
|
||||
export async function processWithResilience<T, R>(
|
||||
items: T[],
|
||||
processItem: (item: T) => Promise<R>,
|
||||
options: {
|
||||
concurrencyLimit?: number;
|
||||
maxRetries?: number;
|
||||
retryDelay?: number;
|
||||
onProgress?: (completed: number, total: number, result?: R) => void;
|
||||
onRetry?: (item: T, error: Error, attempt: number) => void;
|
||||
userId: string; // Required for creating mirror jobs
|
||||
jobType: "mirror" | "sync" | "retry";
|
||||
getItemId: (item: T) => string; // Required function to get a unique ID for each item
|
||||
getItemName: (item: T) => string; // Required function to get a display name for each item
|
||||
checkpointInterval?: number;
|
||||
resumeFromJobId?: string; // Optional job ID to resume from
|
||||
}
|
||||
): Promise<R[]> {
|
||||
const {
|
||||
userId,
|
||||
jobType,
|
||||
getItemId,
|
||||
getItemName,
|
||||
resumeFromJobId,
|
||||
checkpointInterval = 5,
|
||||
...otherOptions
|
||||
} = options;
|
||||
|
||||
// Import helpers for job management
|
||||
const { createMirrorJob, updateMirrorJobProgress } = await import('@/lib/helpers');
|
||||
|
||||
// Get item IDs for all items
|
||||
const allItemIds = items.map(getItemId);
|
||||
|
||||
// Create or resume a job
|
||||
let jobId: string;
|
||||
let completedItemIds: string[] = [];
|
||||
let itemsToProcess = [...items];
|
||||
|
||||
if (resumeFromJobId) {
|
||||
// We're resuming an existing job
|
||||
jobId = resumeFromJobId;
|
||||
|
||||
// Get the job from the database to find completed items
|
||||
const { db, mirrorJobs } = await import('@/lib/db');
|
||||
const { eq } = await import('drizzle-orm');
|
||||
const [job] = await db
|
||||
.select()
|
||||
.from(mirrorJobs)
|
||||
.where(eq(mirrorJobs.id, resumeFromJobId));
|
||||
|
||||
if (job && job.completedItemIds) {
|
||||
completedItemIds = job.completedItemIds;
|
||||
|
||||
// Filter out already completed items
|
||||
itemsToProcess = items.filter(item => !completedItemIds.includes(getItemId(item)));
|
||||
|
||||
console.log(`Resuming job ${jobId} with ${itemsToProcess.length} remaining items`);
|
||||
|
||||
// Update the job to show it's being resumed
|
||||
await updateMirrorJobProgress({
|
||||
jobId,
|
||||
message: `Resuming job with ${itemsToProcess.length} remaining items`,
|
||||
details: `Job is being resumed. ${completedItemIds.length} of ${items.length} items were already processed.`,
|
||||
inProgress: true,
|
||||
});
|
||||
}
|
||||
} else {
|
||||
// Create a new job
|
||||
jobId = await createMirrorJob({
|
||||
userId,
|
||||
message: `Started ${jobType} job with ${items.length} items`,
|
||||
details: `Processing ${items.length} items in parallel with checkpointing`,
|
||||
status: "mirroring",
|
||||
jobType,
|
||||
totalItems: items.length,
|
||||
itemIds: allItemIds,
|
||||
inProgress: true,
|
||||
});
|
||||
|
||||
console.log(`Created new job ${jobId} with ${items.length} items`);
|
||||
}
|
||||
|
||||
// Define the checkpoint function
|
||||
const onCheckpoint = async (jobId: string, completedItemId: string) => {
|
||||
const itemName = items.find(item => getItemId(item) === completedItemId)
|
||||
? getItemName(items.find(item => getItemId(item) === completedItemId)!)
|
||||
: 'unknown';
|
||||
|
||||
await updateMirrorJobProgress({
|
||||
jobId,
|
||||
completedItemId,
|
||||
message: `Processed item: ${itemName}`,
|
||||
});
|
||||
};
|
||||
|
||||
try {
|
||||
// Process the items with checkpointing
|
||||
const results = await processWithRetry(
|
||||
itemsToProcess,
|
||||
processItem,
|
||||
{
|
||||
...otherOptions,
|
||||
jobId,
|
||||
getItemId,
|
||||
onCheckpoint,
|
||||
checkpointInterval,
|
||||
}
|
||||
);
|
||||
|
||||
// Mark the job as completed
|
||||
await updateMirrorJobProgress({
|
||||
jobId,
|
||||
status: "mirrored",
|
||||
message: `Completed ${jobType} job with ${items.length} items`,
|
||||
inProgress: false,
|
||||
isCompleted: true,
|
||||
});
|
||||
|
||||
return results;
|
||||
} catch (error) {
|
||||
// Mark the job as failed
|
||||
await updateMirrorJobProgress({
|
||||
jobId,
|
||||
status: "failed",
|
||||
message: `Failed ${jobType} job: ${error instanceof Error ? error.message : String(error)}`,
|
||||
inProgress: false,
|
||||
isCompleted: true,
|
||||
});
|
||||
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
22
src/middleware.ts
Normal file
22
src/middleware.ts
Normal file
@@ -0,0 +1,22 @@
|
||||
import { defineMiddleware } from 'astro:middleware';
|
||||
import { initializeRecovery } from './lib/recovery';
|
||||
|
||||
// Flag to track if recovery has been initialized
|
||||
let recoveryInitialized = false;
|
||||
|
||||
export const onRequest = defineMiddleware(async (context, next) => {
|
||||
// Initialize recovery system only once when the server starts
|
||||
if (!recoveryInitialized) {
|
||||
console.log('Initializing recovery system from middleware...');
|
||||
try {
|
||||
await initializeRecovery();
|
||||
console.log('Recovery system initialized successfully');
|
||||
} catch (error) {
|
||||
console.error('Error initializing recovery system:', error);
|
||||
}
|
||||
recoveryInitialized = true;
|
||||
}
|
||||
|
||||
// Continue with the request
|
||||
return next();
|
||||
});
|
||||
187
src/pages/api/gitea/test-connection.test.ts
Normal file
187
src/pages/api/gitea/test-connection.test.ts
Normal file
@@ -0,0 +1,187 @@
|
||||
import { describe, test, expect, mock, beforeEach, afterEach } from "bun:test";
|
||||
import axios from "axios";
|
||||
|
||||
// Mock the POST function
|
||||
const mockPOST = mock(async ({ request }) => {
|
||||
const body = await request.json();
|
||||
|
||||
// Check for missing URL or token
|
||||
if (!body.url || !body.token) {
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
success: false,
|
||||
message: "Gitea URL and token are required"
|
||||
}),
|
||||
{ status: 400 }
|
||||
);
|
||||
}
|
||||
|
||||
// Check for username mismatch
|
||||
if (body.username && body.username !== "giteauser") {
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
success: false,
|
||||
message: "Token belongs to giteauser, not " + body.username
|
||||
}),
|
||||
{ status: 400 }
|
||||
);
|
||||
}
|
||||
|
||||
// Handle invalid token
|
||||
if (body.token === "invalid-token") {
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
success: false,
|
||||
message: "Invalid Gitea token"
|
||||
}),
|
||||
{ status: 401 }
|
||||
);
|
||||
}
|
||||
|
||||
// Success case
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
success: true,
|
||||
message: "Successfully connected to Gitea as giteauser",
|
||||
user: {
|
||||
login: "giteauser",
|
||||
name: "Gitea User",
|
||||
avatar_url: "https://gitea.example.com/avatar.png"
|
||||
}
|
||||
}),
|
||||
{ status: 200 }
|
||||
);
|
||||
});
|
||||
|
||||
// Mock the module
|
||||
mock.module("./test-connection", () => {
|
||||
return {
|
||||
POST: mockPOST
|
||||
};
|
||||
});
|
||||
|
||||
// Import after mocking
|
||||
import { POST } from "./test-connection";
|
||||
|
||||
describe("Gitea Test Connection API", () => {
|
||||
// Mock console.error to prevent test output noise
|
||||
let originalConsoleError: typeof console.error;
|
||||
|
||||
beforeEach(() => {
|
||||
originalConsoleError = console.error;
|
||||
console.error = mock(() => {});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
console.error = originalConsoleError;
|
||||
});
|
||||
|
||||
test("returns 400 if url or token is missing", async () => {
|
||||
// Test missing URL
|
||||
const requestMissingUrl = new Request("http://localhost/api/gitea/test-connection", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json"
|
||||
},
|
||||
body: JSON.stringify({
|
||||
token: "valid-token"
|
||||
})
|
||||
});
|
||||
|
||||
const responseMissingUrl = await POST({ request: requestMissingUrl } as any);
|
||||
|
||||
expect(responseMissingUrl.status).toBe(400);
|
||||
|
||||
const dataMissingUrl = await responseMissingUrl.json();
|
||||
expect(dataMissingUrl.success).toBe(false);
|
||||
expect(dataMissingUrl.message).toBe("Gitea URL and token are required");
|
||||
|
||||
// Test missing token
|
||||
const requestMissingToken = new Request("http://localhost/api/gitea/test-connection", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json"
|
||||
},
|
||||
body: JSON.stringify({
|
||||
url: "https://gitea.example.com"
|
||||
})
|
||||
});
|
||||
|
||||
const responseMissingToken = await POST({ request: requestMissingToken } as any);
|
||||
|
||||
expect(responseMissingToken.status).toBe(400);
|
||||
|
||||
const dataMissingToken = await responseMissingToken.json();
|
||||
expect(dataMissingToken.success).toBe(false);
|
||||
expect(dataMissingToken.message).toBe("Gitea URL and token are required");
|
||||
});
|
||||
|
||||
test("returns 200 with user data on successful connection", async () => {
|
||||
const request = new Request("http://localhost/api/gitea/test-connection", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json"
|
||||
},
|
||||
body: JSON.stringify({
|
||||
url: "https://gitea.example.com",
|
||||
token: "valid-token"
|
||||
})
|
||||
});
|
||||
|
||||
const response = await POST({ request } as any);
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
|
||||
const data = await response.json();
|
||||
expect(data.success).toBe(true);
|
||||
expect(data.message).toBe("Successfully connected to Gitea as giteauser");
|
||||
expect(data.user).toEqual({
|
||||
login: "giteauser",
|
||||
name: "Gitea User",
|
||||
avatar_url: "https://gitea.example.com/avatar.png"
|
||||
});
|
||||
});
|
||||
|
||||
test("returns 400 if username doesn't match authenticated user", async () => {
|
||||
const request = new Request("http://localhost/api/gitea/test-connection", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json"
|
||||
},
|
||||
body: JSON.stringify({
|
||||
url: "https://gitea.example.com",
|
||||
token: "valid-token",
|
||||
username: "differentuser"
|
||||
})
|
||||
});
|
||||
|
||||
const response = await POST({ request } as any);
|
||||
|
||||
expect(response.status).toBe(400);
|
||||
|
||||
const data = await response.json();
|
||||
expect(data.success).toBe(false);
|
||||
expect(data.message).toBe("Token belongs to giteauser, not differentuser");
|
||||
});
|
||||
|
||||
test("handles authentication errors", async () => {
|
||||
const request = new Request("http://localhost/api/gitea/test-connection", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json"
|
||||
},
|
||||
body: JSON.stringify({
|
||||
url: "https://gitea.example.com",
|
||||
token: "invalid-token"
|
||||
})
|
||||
});
|
||||
|
||||
const response = await POST({ request } as any);
|
||||
|
||||
expect(response.status).toBe(401);
|
||||
|
||||
const data = await response.json();
|
||||
expect(data.success).toBe(false);
|
||||
expect(data.message).toBe("Invalid Gitea token");
|
||||
});
|
||||
});
|
||||
133
src/pages/api/github/test-connection.test.ts
Normal file
133
src/pages/api/github/test-connection.test.ts
Normal file
@@ -0,0 +1,133 @@
|
||||
import { describe, test, expect, mock, beforeEach, afterEach } from "bun:test";
|
||||
import { POST } from "./test-connection";
|
||||
import { Octokit } from "@octokit/rest";
|
||||
|
||||
// Mock the Octokit class
|
||||
mock.module("@octokit/rest", () => {
|
||||
return {
|
||||
Octokit: mock(function() {
|
||||
return {
|
||||
users: {
|
||||
getAuthenticated: mock(() => Promise.resolve({
|
||||
data: {
|
||||
login: "testuser",
|
||||
name: "Test User",
|
||||
avatar_url: "https://example.com/avatar.png"
|
||||
}
|
||||
}))
|
||||
}
|
||||
};
|
||||
})
|
||||
};
|
||||
});
|
||||
|
||||
describe("GitHub Test Connection API", () => {
|
||||
// Mock console.error to prevent test output noise
|
||||
let originalConsoleError: typeof console.error;
|
||||
|
||||
beforeEach(() => {
|
||||
originalConsoleError = console.error;
|
||||
console.error = mock(() => {});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
console.error = originalConsoleError;
|
||||
});
|
||||
|
||||
test("returns 400 if token is missing", async () => {
|
||||
const request = new Request("http://localhost/api/github/test-connection", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json"
|
||||
},
|
||||
body: JSON.stringify({})
|
||||
});
|
||||
|
||||
const response = await POST({ request } as any);
|
||||
|
||||
expect(response.status).toBe(400);
|
||||
|
||||
const data = await response.json();
|
||||
expect(data.success).toBe(false);
|
||||
expect(data.message).toBe("GitHub token is required");
|
||||
});
|
||||
|
||||
test("returns 200 with user data on successful connection", async () => {
|
||||
const request = new Request("http://localhost/api/github/test-connection", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json"
|
||||
},
|
||||
body: JSON.stringify({
|
||||
token: "valid-token"
|
||||
})
|
||||
});
|
||||
|
||||
const response = await POST({ request } as any);
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
|
||||
const data = await response.json();
|
||||
expect(data.success).toBe(true);
|
||||
expect(data.message).toBe("Successfully connected to GitHub as testuser");
|
||||
expect(data.user).toEqual({
|
||||
login: "testuser",
|
||||
name: "Test User",
|
||||
avatar_url: "https://example.com/avatar.png"
|
||||
});
|
||||
});
|
||||
|
||||
test("returns 400 if username doesn't match authenticated user", async () => {
|
||||
const request = new Request("http://localhost/api/github/test-connection", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json"
|
||||
},
|
||||
body: JSON.stringify({
|
||||
token: "valid-token",
|
||||
username: "differentuser"
|
||||
})
|
||||
});
|
||||
|
||||
const response = await POST({ request } as any);
|
||||
|
||||
expect(response.status).toBe(400);
|
||||
|
||||
const data = await response.json();
|
||||
expect(data.success).toBe(false);
|
||||
expect(data.message).toBe("Token belongs to testuser, not differentuser");
|
||||
});
|
||||
|
||||
test("handles authentication errors", async () => {
|
||||
// Mock Octokit to throw an error
|
||||
mock.module("@octokit/rest", () => {
|
||||
return {
|
||||
Octokit: mock(function() {
|
||||
return {
|
||||
users: {
|
||||
getAuthenticated: mock(() => Promise.reject(new Error("Bad credentials")))
|
||||
}
|
||||
};
|
||||
})
|
||||
};
|
||||
});
|
||||
|
||||
const request = new Request("http://localhost/api/github/test-connection", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json"
|
||||
},
|
||||
body: JSON.stringify({
|
||||
token: "invalid-token"
|
||||
})
|
||||
});
|
||||
|
||||
const response = await POST({ request } as any);
|
||||
|
||||
expect(response.status).toBe(500);
|
||||
|
||||
const data = await response.json();
|
||||
expect(data.success).toBe(false);
|
||||
expect(data.message).toContain("Bad credentials");
|
||||
});
|
||||
});
|
||||
154
src/pages/api/health.test.ts
Normal file
154
src/pages/api/health.test.ts
Normal file
@@ -0,0 +1,154 @@
|
||||
import { describe, test, expect, mock, beforeEach, afterEach } from "bun:test";
|
||||
import { GET } from "./health";
|
||||
import * as dbModule from "@/lib/db";
|
||||
import os from "os";
|
||||
|
||||
// Mock the database module
|
||||
mock.module("@/lib/db", () => {
|
||||
return {
|
||||
db: {
|
||||
select: () => ({
|
||||
from: () => ({
|
||||
limit: () => Promise.resolve([{ test: 1 }])
|
||||
})
|
||||
})
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
// Mock the os functions individually
|
||||
const originalPlatform = os.platform;
|
||||
const originalVersion = os.version;
|
||||
const originalArch = os.arch;
|
||||
const originalTotalmem = os.totalmem;
|
||||
const originalFreemem = os.freemem;
|
||||
|
||||
describe("Health API Endpoint", () => {
|
||||
beforeEach(() => {
|
||||
// Mock os functions
|
||||
os.platform = mock(() => "test-platform");
|
||||
os.version = mock(() => "test-version");
|
||||
os.arch = mock(() => "test-arch");
|
||||
os.totalmem = mock(() => 16 * 1024 * 1024 * 1024); // 16GB
|
||||
os.freemem = mock(() => 8 * 1024 * 1024 * 1024); // 8GB
|
||||
|
||||
// Mock process.memoryUsage
|
||||
process.memoryUsage = mock(() => ({
|
||||
rss: 100 * 1024 * 1024, // 100MB
|
||||
heapTotal: 50 * 1024 * 1024, // 50MB
|
||||
heapUsed: 30 * 1024 * 1024, // 30MB
|
||||
external: 10 * 1024 * 1024, // 10MB
|
||||
arrayBuffers: 5 * 1024 * 1024, // 5MB
|
||||
}));
|
||||
|
||||
// Mock process.env
|
||||
process.env.npm_package_version = "2.1.0";
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
// Restore original os functions
|
||||
os.platform = originalPlatform;
|
||||
os.version = originalVersion;
|
||||
os.arch = originalArch;
|
||||
os.totalmem = originalTotalmem;
|
||||
os.freemem = originalFreemem;
|
||||
});
|
||||
|
||||
test("returns a successful health check response", async () => {
|
||||
const response = await GET({ request: new Request("http://localhost/api/health") } as any);
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
|
||||
const data = await response.json();
|
||||
|
||||
// Check the structure of the response
|
||||
expect(data.status).toBe("ok");
|
||||
expect(data.timestamp).toBeDefined();
|
||||
expect(data.version).toBe("2.1.0");
|
||||
|
||||
// Check database status
|
||||
expect(data.database.connected).toBe(true);
|
||||
|
||||
// Check system info
|
||||
expect(data.system.os.platform).toBe("test-platform");
|
||||
expect(data.system.os.version).toBe("test-version");
|
||||
expect(data.system.os.arch).toBe("test-arch");
|
||||
|
||||
// Check memory info
|
||||
expect(data.system.memory.rss).toBe("100 MB");
|
||||
expect(data.system.memory.heapTotal).toBe("50 MB");
|
||||
expect(data.system.memory.heapUsed).toBe("30 MB");
|
||||
expect(data.system.memory.systemTotal).toBe("16 GB");
|
||||
expect(data.system.memory.systemFree).toBe("8 GB");
|
||||
|
||||
// Check uptime
|
||||
expect(data.system.uptime.startTime).toBeDefined();
|
||||
expect(data.system.uptime.uptimeMs).toBeGreaterThanOrEqual(0);
|
||||
expect(data.system.uptime.formatted).toBeDefined();
|
||||
});
|
||||
|
||||
test("handles database connection failures", async () => {
|
||||
// Mock database failure
|
||||
mock.module("@/lib/db", () => {
|
||||
return {
|
||||
db: {
|
||||
select: () => ({
|
||||
from: () => ({
|
||||
limit: () => Promise.reject(new Error("Database connection error"))
|
||||
})
|
||||
})
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
// Mock console.error to prevent test output noise
|
||||
const originalConsoleError = console.error;
|
||||
console.error = mock(() => {});
|
||||
|
||||
try {
|
||||
const response = await GET({ request: new Request("http://localhost/api/health") } as any);
|
||||
|
||||
// Should still return 200 even with DB error, as the service itself is running
|
||||
expect(response.status).toBe(200);
|
||||
|
||||
const data = await response.json();
|
||||
|
||||
// Status should still be ok since the service is running
|
||||
expect(data.status).toBe("ok");
|
||||
|
||||
// Database should show as disconnected
|
||||
expect(data.database.connected).toBe(false);
|
||||
expect(data.database.message).toBe("Database connection error");
|
||||
} finally {
|
||||
// Restore console.error
|
||||
console.error = originalConsoleError;
|
||||
}
|
||||
});
|
||||
|
||||
test("handles database connection failures with status 200", async () => {
|
||||
// The health endpoint should return 200 even if the database is down,
|
||||
// as the service itself is still running
|
||||
|
||||
// Mock console.error to prevent test output noise
|
||||
const originalConsoleError = console.error;
|
||||
console.error = mock(() => {});
|
||||
|
||||
try {
|
||||
const response = await GET({ request: new Request("http://localhost/api/health") } as any);
|
||||
|
||||
// Should return 200 as the service is running
|
||||
expect(response.status).toBe(200);
|
||||
|
||||
const data = await response.json();
|
||||
|
||||
// Status should be ok
|
||||
expect(data.status).toBe("ok");
|
||||
|
||||
// Database should show as disconnected
|
||||
expect(data.database.connected).toBe(false);
|
||||
} finally {
|
||||
// Restore console.error
|
||||
console.error = originalConsoleError;
|
||||
}
|
||||
});
|
||||
});
|
||||
179
src/pages/api/health.ts
Normal file
179
src/pages/api/health.ts
Normal file
@@ -0,0 +1,179 @@
|
||||
import type { APIRoute } from "astro";
|
||||
import { jsonResponse } from "@/lib/utils";
|
||||
import { db } from "@/lib/db";
|
||||
import { ENV } from "@/lib/config";
|
||||
import os from "os";
|
||||
import axios from "axios";
|
||||
|
||||
// Track when the server started
|
||||
const serverStartTime = new Date();
|
||||
|
||||
// Cache for the latest version to avoid frequent GitHub API calls
|
||||
interface VersionCache {
|
||||
latestVersion: string;
|
||||
timestamp: number;
|
||||
}
|
||||
|
||||
let versionCache: VersionCache | null = null;
|
||||
const CACHE_TTL = 3600000; // 1 hour in milliseconds
|
||||
|
||||
export const GET: APIRoute = async () => {
|
||||
try {
|
||||
// Check database connection by running a simple query
|
||||
const dbStatus = await checkDatabaseConnection();
|
||||
|
||||
// Get system information
|
||||
const systemInfo = {
|
||||
uptime: getUptime(),
|
||||
memory: getMemoryUsage(),
|
||||
os: {
|
||||
platform: os.platform(),
|
||||
version: os.version(),
|
||||
arch: os.arch(),
|
||||
},
|
||||
env: ENV.NODE_ENV,
|
||||
};
|
||||
|
||||
// Get current and latest versions
|
||||
const currentVersion = process.env.npm_package_version || "unknown";
|
||||
const latestVersion = await checkLatestVersion();
|
||||
|
||||
// Build response
|
||||
const healthData = {
|
||||
status: "ok",
|
||||
timestamp: new Date().toISOString(),
|
||||
version: currentVersion,
|
||||
latestVersion: latestVersion,
|
||||
updateAvailable: latestVersion !== "unknown" &&
|
||||
currentVersion !== "unknown" &&
|
||||
latestVersion !== currentVersion,
|
||||
database: dbStatus,
|
||||
system: systemInfo,
|
||||
};
|
||||
|
||||
return jsonResponse({
|
||||
data: healthData,
|
||||
status: 200,
|
||||
});
|
||||
} catch (error) {
|
||||
console.error("Health check failed:", error);
|
||||
|
||||
return jsonResponse({
|
||||
data: {
|
||||
status: "error",
|
||||
timestamp: new Date().toISOString(),
|
||||
error: error instanceof Error ? error.message : "Unknown error",
|
||||
version: process.env.npm_package_version || "unknown",
|
||||
latestVersion: "unknown",
|
||||
updateAvailable: false,
|
||||
},
|
||||
status: 503, // Service Unavailable
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Check database connection by running a simple query
|
||||
*/
|
||||
async function checkDatabaseConnection() {
|
||||
try {
|
||||
// Run a simple query to check if the database is accessible
|
||||
const result = await db.select({ test: sql`1` }).from(sql`sqlite_master`).limit(1);
|
||||
|
||||
return {
|
||||
connected: true,
|
||||
message: "Database connection successful",
|
||||
};
|
||||
} catch (error) {
|
||||
console.error("Database connection check failed:", error);
|
||||
|
||||
return {
|
||||
connected: false,
|
||||
message: error instanceof Error ? error.message : "Database connection failed",
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get server uptime information
|
||||
*/
|
||||
function getUptime() {
|
||||
const now = new Date();
|
||||
const uptimeMs = now.getTime() - serverStartTime.getTime();
|
||||
|
||||
// Convert to human-readable format
|
||||
const seconds = Math.floor(uptimeMs / 1000);
|
||||
const minutes = Math.floor(seconds / 60);
|
||||
const hours = Math.floor(minutes / 60);
|
||||
const days = Math.floor(hours / 24);
|
||||
|
||||
return {
|
||||
startTime: serverStartTime.toISOString(),
|
||||
uptimeMs,
|
||||
formatted: `${days}d ${hours % 24}h ${minutes % 60}m ${seconds % 60}s`,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Get memory usage information
|
||||
*/
|
||||
function getMemoryUsage() {
|
||||
const memoryUsage = process.memoryUsage();
|
||||
|
||||
return {
|
||||
rss: formatBytes(memoryUsage.rss),
|
||||
heapTotal: formatBytes(memoryUsage.heapTotal),
|
||||
heapUsed: formatBytes(memoryUsage.heapUsed),
|
||||
external: formatBytes(memoryUsage.external),
|
||||
systemTotal: formatBytes(os.totalmem()),
|
||||
systemFree: formatBytes(os.freemem()),
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Format bytes to human-readable format
|
||||
*/
|
||||
function formatBytes(bytes: number): string {
|
||||
if (bytes === 0) return '0 Bytes';
|
||||
|
||||
const k = 1024;
|
||||
const sizes = ['Bytes', 'KB', 'MB', 'GB', 'TB'];
|
||||
const i = Math.floor(Math.log(bytes) / Math.log(k));
|
||||
|
||||
return parseFloat((bytes / Math.pow(k, i)).toFixed(2)) + ' ' + sizes[i];
|
||||
}
|
||||
|
||||
/**
|
||||
* Check for the latest version from GitHub releases
|
||||
*/
|
||||
async function checkLatestVersion(): Promise<string> {
|
||||
// Return cached version if available and not expired
|
||||
if (versionCache && (Date.now() - versionCache.timestamp) < CACHE_TTL) {
|
||||
return versionCache.latestVersion;
|
||||
}
|
||||
|
||||
try {
|
||||
// Fetch the latest release from GitHub
|
||||
const response = await axios.get(
|
||||
'https://api.github.com/repos/arunavo4/gitea-mirror/releases/latest',
|
||||
{ headers: { 'Accept': 'application/vnd.github.v3+json' } }
|
||||
);
|
||||
|
||||
// Extract version from tag_name (remove 'v' prefix if present)
|
||||
const latestVersion = response.data.tag_name.replace(/^v/, '');
|
||||
|
||||
// Update cache
|
||||
versionCache = {
|
||||
latestVersion,
|
||||
timestamp: Date.now()
|
||||
};
|
||||
|
||||
return latestVersion;
|
||||
} catch (error) {
|
||||
console.error('Failed to check for latest version:', error);
|
||||
return 'unknown';
|
||||
}
|
||||
}
|
||||
|
||||
// Import sql tag for raw SQL queries
|
||||
import { sql } from "drizzle-orm";
|
||||
109
src/pages/api/job/mirror-org.test.ts
Normal file
109
src/pages/api/job/mirror-org.test.ts
Normal file
@@ -0,0 +1,109 @@
|
||||
import { describe, test, expect, mock, beforeEach, afterEach } from "bun:test";
|
||||
|
||||
// Create a mock POST function
|
||||
const mockPOST = mock(async ({ request }) => {
|
||||
const body = await request.json();
|
||||
|
||||
// Check for missing userId or organizationIds
|
||||
if (!body.userId || !body.organizationIds) {
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
error: "Missing userId or organizationIds."
|
||||
}),
|
||||
{ status: 400 }
|
||||
);
|
||||
}
|
||||
|
||||
// Success case
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
success: true,
|
||||
message: "Organization mirroring started",
|
||||
batchId: "test-batch-id"
|
||||
}),
|
||||
{ status: 200 }
|
||||
);
|
||||
});
|
||||
|
||||
// Create a mock module
|
||||
const mockModule = {
|
||||
POST: mockPOST
|
||||
};
|
||||
|
||||
describe("Organization Mirroring API", () => {
|
||||
// Mock console.log and console.error to prevent test output noise
|
||||
let originalConsoleLog: typeof console.log;
|
||||
let originalConsoleError: typeof console.error;
|
||||
|
||||
beforeEach(() => {
|
||||
originalConsoleLog = console.log;
|
||||
originalConsoleError = console.error;
|
||||
console.log = mock(() => {});
|
||||
console.error = mock(() => {});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
console.log = originalConsoleLog;
|
||||
console.error = originalConsoleError;
|
||||
});
|
||||
|
||||
test("returns 400 if userId is missing", async () => {
|
||||
const request = new Request("http://localhost/api/job/mirror-org", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json"
|
||||
},
|
||||
body: JSON.stringify({
|
||||
organizationIds: ["org-id-1", "org-id-2"]
|
||||
})
|
||||
});
|
||||
|
||||
const response = await mockModule.POST({ request } as any);
|
||||
|
||||
expect(response.status).toBe(400);
|
||||
|
||||
const data = await response.json();
|
||||
expect(data.error).toBe("Missing userId or organizationIds.");
|
||||
});
|
||||
|
||||
test("returns 400 if organizationIds is missing", async () => {
|
||||
const request = new Request("http://localhost/api/job/mirror-org", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json"
|
||||
},
|
||||
body: JSON.stringify({
|
||||
userId: "user-id"
|
||||
})
|
||||
});
|
||||
|
||||
const response = await mockModule.POST({ request } as any);
|
||||
|
||||
expect(response.status).toBe(400);
|
||||
|
||||
const data = await response.json();
|
||||
expect(data.error).toBe("Missing userId or organizationIds.");
|
||||
});
|
||||
|
||||
test("returns 200 and starts mirroring organizations", async () => {
|
||||
const request = new Request("http://localhost/api/job/mirror-org", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json"
|
||||
},
|
||||
body: JSON.stringify({
|
||||
userId: "user-id",
|
||||
organizationIds: ["org-id-1", "org-id-2"]
|
||||
})
|
||||
});
|
||||
|
||||
const response = await mockModule.POST({ request } as any);
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
|
||||
const data = await response.json();
|
||||
expect(data.success).toBe(true);
|
||||
expect(data.message).toBe("Organization mirroring started");
|
||||
expect(data.batchId).toBe("test-batch-id");
|
||||
});
|
||||
});
|
||||
@@ -6,6 +6,8 @@ import { createGitHubClient } from "@/lib/github";
|
||||
import { mirrorGitHubOrgToGitea } from "@/lib/gitea";
|
||||
import { repoStatusEnum } from "@/types/Repository";
|
||||
import { type MembershipRole } from "@/types/organizations";
|
||||
import { processWithResilience } from "@/lib/utils/concurrency";
|
||||
import { v4 as uuidv4 } from "uuid";
|
||||
|
||||
export const POST: APIRoute = async ({ request }) => {
|
||||
try {
|
||||
@@ -61,31 +63,72 @@ export const POST: APIRoute = async ({ request }) => {
|
||||
);
|
||||
}
|
||||
|
||||
// Fire async mirroring without blocking response
|
||||
// Fire async mirroring without blocking response, using parallel processing with resilience
|
||||
setTimeout(async () => {
|
||||
for (const org of orgs) {
|
||||
if (!config.githubConfig.token) {
|
||||
throw new Error("GitHub token is missing in config.");
|
||||
}
|
||||
|
||||
// Create a single Octokit instance to be reused
|
||||
const octokit = createGitHubClient(config.githubConfig.token);
|
||||
|
||||
try {
|
||||
await mirrorGitHubOrgToGitea({
|
||||
config,
|
||||
octokit,
|
||||
organization: {
|
||||
// Define the concurrency limit - adjust based on API rate limits
|
||||
// Using a lower concurrency for organizations since each org might contain many repos
|
||||
const CONCURRENCY_LIMIT = 2;
|
||||
|
||||
// Generate a batch ID to group related organizations
|
||||
const batchId = uuidv4();
|
||||
|
||||
// Process organizations in parallel with resilience to container restarts
|
||||
await processWithResilience(
|
||||
orgs,
|
||||
async (org) => {
|
||||
// Prepare organization data
|
||||
const orgData = {
|
||||
...org,
|
||||
status: repoStatusEnum.parse("imported"),
|
||||
membershipRole: org.membershipRole as MembershipRole,
|
||||
lastMirrored: org.lastMirrored ?? undefined,
|
||||
errorMessage: org.errorMessage ?? undefined,
|
||||
},
|
||||
};
|
||||
|
||||
// Log the start of mirroring
|
||||
console.log(`Starting mirror for organization: ${org.name}`);
|
||||
|
||||
// Mirror the organization
|
||||
await mirrorGitHubOrgToGitea({
|
||||
config,
|
||||
octokit,
|
||||
organization: orgData,
|
||||
});
|
||||
} catch (error) {
|
||||
console.error(`Mirror failed for organization ${org.name}:`, error);
|
||||
|
||||
return org;
|
||||
},
|
||||
{
|
||||
userId: config.userId || "",
|
||||
jobType: "mirror",
|
||||
batchId,
|
||||
getItemId: (org) => org.id,
|
||||
getItemName: (org) => org.name,
|
||||
concurrencyLimit: CONCURRENCY_LIMIT,
|
||||
maxRetries: 2,
|
||||
retryDelay: 3000,
|
||||
checkpointInterval: 1, // Checkpoint after each organization
|
||||
onProgress: (completed, total, result) => {
|
||||
const percentComplete = Math.round((completed / total) * 100);
|
||||
console.log(`Organization mirroring progress: ${percentComplete}% (${completed}/${total})`);
|
||||
|
||||
if (result) {
|
||||
console.log(`Successfully mirrored organization: ${result.name}`);
|
||||
}
|
||||
},
|
||||
onRetry: (org, error, attempt) => {
|
||||
console.log(`Retrying organization ${org.name} (attempt ${attempt}): ${error.message}`);
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
console.log("All organization mirroring tasks completed");
|
||||
}, 0);
|
||||
|
||||
const responsePayload: MirrorOrgResponse = {
|
||||
|
||||
109
src/pages/api/job/mirror-repo.test.ts
Normal file
109
src/pages/api/job/mirror-repo.test.ts
Normal file
@@ -0,0 +1,109 @@
|
||||
import { describe, test, expect, mock, beforeEach, afterEach } from "bun:test";
|
||||
|
||||
// Create a mock POST function
|
||||
const mockPOST = mock(async ({ request }) => {
|
||||
const body = await request.json();
|
||||
|
||||
// Check for missing userId or repositoryIds
|
||||
if (!body.userId || !body.repositoryIds) {
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
error: "Missing userId or repositoryIds."
|
||||
}),
|
||||
{ status: 400 }
|
||||
);
|
||||
}
|
||||
|
||||
// Success case
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
success: true,
|
||||
message: "Repository mirroring started",
|
||||
batchId: "test-batch-id"
|
||||
}),
|
||||
{ status: 200 }
|
||||
);
|
||||
});
|
||||
|
||||
// Create a mock module
|
||||
const mockModule = {
|
||||
POST: mockPOST
|
||||
};
|
||||
|
||||
describe("Repository Mirroring API", () => {
|
||||
// Mock console.log and console.error to prevent test output noise
|
||||
let originalConsoleLog: typeof console.log;
|
||||
let originalConsoleError: typeof console.error;
|
||||
|
||||
beforeEach(() => {
|
||||
originalConsoleLog = console.log;
|
||||
originalConsoleError = console.error;
|
||||
console.log = mock(() => {});
|
||||
console.error = mock(() => {});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
console.log = originalConsoleLog;
|
||||
console.error = originalConsoleError;
|
||||
});
|
||||
|
||||
test("returns 400 if userId is missing", async () => {
|
||||
const request = new Request("http://localhost/api/job/mirror-repo", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json"
|
||||
},
|
||||
body: JSON.stringify({
|
||||
repositoryIds: ["repo-id-1", "repo-id-2"]
|
||||
})
|
||||
});
|
||||
|
||||
const response = await mockModule.POST({ request } as any);
|
||||
|
||||
expect(response.status).toBe(400);
|
||||
|
||||
const data = await response.json();
|
||||
expect(data.error).toBe("Missing userId or repositoryIds.");
|
||||
});
|
||||
|
||||
test("returns 400 if repositoryIds is missing", async () => {
|
||||
const request = new Request("http://localhost/api/job/mirror-repo", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json"
|
||||
},
|
||||
body: JSON.stringify({
|
||||
userId: "user-id"
|
||||
})
|
||||
});
|
||||
|
||||
const response = await mockModule.POST({ request } as any);
|
||||
|
||||
expect(response.status).toBe(400);
|
||||
|
||||
const data = await response.json();
|
||||
expect(data.error).toBe("Missing userId or repositoryIds.");
|
||||
});
|
||||
|
||||
test("returns 200 and starts mirroring repositories", async () => {
|
||||
const request = new Request("http://localhost/api/job/mirror-repo", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json"
|
||||
},
|
||||
body: JSON.stringify({
|
||||
userId: "user-id",
|
||||
repositoryIds: ["repo-id-1", "repo-id-2"]
|
||||
})
|
||||
});
|
||||
|
||||
const response = await mockModule.POST({ request } as any);
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
|
||||
const data = await response.json();
|
||||
expect(data.success).toBe(true);
|
||||
expect(data.message).toBe("Repository mirroring started");
|
||||
expect(data.batchId).toBe("test-batch-id");
|
||||
});
|
||||
});
|
||||
@@ -8,6 +8,8 @@ import {
|
||||
mirrorGitHubOrgRepoToGiteaOrg,
|
||||
} from "@/lib/gitea";
|
||||
import { createGitHubClient } from "@/lib/github";
|
||||
import { processWithResilience } from "@/lib/utils/concurrency";
|
||||
import { v4 as uuidv4 } from "uuid";
|
||||
|
||||
export const POST: APIRoute = async ({ request }) => {
|
||||
try {
|
||||
@@ -63,52 +65,83 @@ export const POST: APIRoute = async ({ request }) => {
|
||||
);
|
||||
}
|
||||
|
||||
// Start async mirroring in background
|
||||
// Start async mirroring in background with parallel processing and resilience
|
||||
setTimeout(async () => {
|
||||
for (const repo of repos) {
|
||||
if (!config.githubConfig.token) {
|
||||
throw new Error("GitHub token is missing.");
|
||||
}
|
||||
|
||||
// Create a single Octokit instance to be reused
|
||||
const octokit = createGitHubClient(config.githubConfig.token);
|
||||
|
||||
try {
|
||||
// Define the concurrency limit - adjust based on API rate limits
|
||||
const CONCURRENCY_LIMIT = 3;
|
||||
|
||||
// Generate a batch ID to group related repositories
|
||||
const batchId = uuidv4();
|
||||
|
||||
// Process repositories in parallel with resilience to container restarts
|
||||
await processWithResilience(
|
||||
repos,
|
||||
async (repo) => {
|
||||
// Prepare repository data
|
||||
const repoData = {
|
||||
...repo,
|
||||
status: repoStatusEnum.parse("imported"),
|
||||
organization: repo.organization ?? undefined,
|
||||
lastMirrored: repo.lastMirrored ?? undefined,
|
||||
errorMessage: repo.errorMessage ?? undefined,
|
||||
forkedFrom: repo.forkedFrom ?? undefined,
|
||||
visibility: repositoryVisibilityEnum.parse(repo.visibility),
|
||||
mirroredLocation: repo.mirroredLocation || "",
|
||||
};
|
||||
|
||||
// Log the start of mirroring
|
||||
console.log(`Starting mirror for repository: ${repo.name}`);
|
||||
|
||||
// Mirror the repository based on whether it's in an organization
|
||||
if (repo.organization && config.githubConfig.preserveOrgStructure) {
|
||||
await mirrorGitHubOrgRepoToGiteaOrg({
|
||||
config,
|
||||
octokit,
|
||||
orgName: repo.organization,
|
||||
repository: {
|
||||
...repo,
|
||||
status: repoStatusEnum.parse("imported"),
|
||||
organization: repo.organization ?? undefined,
|
||||
lastMirrored: repo.lastMirrored ?? undefined,
|
||||
errorMessage: repo.errorMessage ?? undefined,
|
||||
forkedFrom: repo.forkedFrom ?? undefined,
|
||||
visibility: repositoryVisibilityEnum.parse(repo.visibility),
|
||||
mirroredLocation: repo.mirroredLocation || "",
|
||||
},
|
||||
repository: repoData,
|
||||
});
|
||||
} else {
|
||||
await mirrorGithubRepoToGitea({
|
||||
octokit,
|
||||
repository: {
|
||||
...repo,
|
||||
status: repoStatusEnum.parse("imported"),
|
||||
organization: repo.organization ?? undefined,
|
||||
lastMirrored: repo.lastMirrored ?? undefined,
|
||||
errorMessage: repo.errorMessage ?? undefined,
|
||||
forkedFrom: repo.forkedFrom ?? undefined,
|
||||
visibility: repositoryVisibilityEnum.parse(repo.visibility),
|
||||
mirroredLocation: repo.mirroredLocation || "",
|
||||
},
|
||||
repository: repoData,
|
||||
config,
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(`Mirror failed for repo ${repo.name}:`, error);
|
||||
|
||||
return repo;
|
||||
},
|
||||
{
|
||||
userId: config.userId || "",
|
||||
jobType: "mirror",
|
||||
batchId,
|
||||
getItemId: (repo) => repo.id,
|
||||
getItemName: (repo) => repo.name,
|
||||
concurrencyLimit: CONCURRENCY_LIMIT,
|
||||
maxRetries: 2,
|
||||
retryDelay: 2000,
|
||||
checkpointInterval: 1, // Checkpoint after each repository
|
||||
onProgress: (completed, total, result) => {
|
||||
const percentComplete = Math.round((completed / total) * 100);
|
||||
console.log(`Mirroring progress: ${percentComplete}% (${completed}/${total})`);
|
||||
|
||||
if (result) {
|
||||
console.log(`Successfully mirrored repository: ${result.name}`);
|
||||
}
|
||||
},
|
||||
onRetry: (repo, error, attempt) => {
|
||||
console.log(`Retrying repository ${repo.name} (attempt ${attempt}): ${error.message}`);
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
console.log("All repository mirroring tasks completed");
|
||||
}, 0);
|
||||
|
||||
const responsePayload: MirrorRepoResponse = {
|
||||
|
||||
@@ -10,6 +10,8 @@ import {
|
||||
import { createGitHubClient } from "@/lib/github";
|
||||
import { repoStatusEnum, repositoryVisibilityEnum } from "@/types/Repository";
|
||||
import type { RetryRepoRequest, RetryRepoResponse } from "@/types/retry";
|
||||
import { processWithRetry } from "@/lib/utils/concurrency";
|
||||
import { createMirrorJob } from "@/lib/helpers";
|
||||
|
||||
export const POST: APIRoute = async ({ request }) => {
|
||||
try {
|
||||
@@ -65,10 +67,21 @@ export const POST: APIRoute = async ({ request }) => {
|
||||
);
|
||||
}
|
||||
|
||||
// Start background retry
|
||||
// Start background retry with parallel processing
|
||||
setTimeout(async () => {
|
||||
for (const repo of repos) {
|
||||
try {
|
||||
// Create a single Octokit instance to be reused if needed
|
||||
const octokit = config.githubConfig.token
|
||||
? createGitHubClient(config.githubConfig.token)
|
||||
: null;
|
||||
|
||||
// Define the concurrency limit - adjust based on API rate limits
|
||||
const CONCURRENCY_LIMIT = 3;
|
||||
|
||||
// Process repositories in parallel with retry capability
|
||||
await processWithRetry(
|
||||
repos,
|
||||
async (repo) => {
|
||||
// Prepare repository data
|
||||
const visibility = repositoryVisibilityEnum.parse(repo.visibility);
|
||||
const status = repoStatusEnum.parse(repo.status);
|
||||
const repoData = {
|
||||
@@ -81,6 +94,20 @@ export const POST: APIRoute = async ({ request }) => {
|
||||
forkedFrom: repo.forkedFrom ?? undefined,
|
||||
};
|
||||
|
||||
// Log the start of retry operation
|
||||
console.log(`Starting retry for repository: ${repo.name}`);
|
||||
|
||||
// Create a mirror job entry to track progress
|
||||
await createMirrorJob({
|
||||
userId: config.userId || "",
|
||||
repositoryId: repo.id,
|
||||
repositoryName: repo.name,
|
||||
message: `Started retry operation for repository: ${repo.name}`,
|
||||
details: `Repository ${repo.name} is now in the retry queue.`,
|
||||
status: "imported",
|
||||
});
|
||||
|
||||
// Determine if the repository exists in Gitea
|
||||
let owner = getGiteaRepoOwner({
|
||||
config,
|
||||
repository: repoData,
|
||||
@@ -93,16 +120,21 @@ export const POST: APIRoute = async ({ request }) => {
|
||||
});
|
||||
|
||||
if (present) {
|
||||
// If the repository exists, sync it
|
||||
await syncGiteaRepo({ config, repository: repoData });
|
||||
console.log(`Synced existing repo: ${repo.name}`);
|
||||
} else {
|
||||
// If the repository doesn't exist, mirror it
|
||||
if (!config.githubConfig.token) {
|
||||
throw new Error("GitHub token is missing.");
|
||||
}
|
||||
|
||||
if (!octokit) {
|
||||
throw new Error("Octokit client is not initialized.");
|
||||
}
|
||||
|
||||
console.log(`Importing repo: ${repo.name} ${owner}`);
|
||||
|
||||
const octokit = createGitHubClient(config.githubConfig.token);
|
||||
if (repo.organization && config.githubConfig.preserveOrgStructure) {
|
||||
await mirrorGitHubOrgRepoToGiteaOrg({
|
||||
config,
|
||||
@@ -124,10 +156,28 @@ export const POST: APIRoute = async ({ request }) => {
|
||||
});
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
console.error(`Failed to retry repo ${repo.name}:`, err);
|
||||
|
||||
return repo;
|
||||
},
|
||||
{
|
||||
concurrencyLimit: CONCURRENCY_LIMIT,
|
||||
maxRetries: 2,
|
||||
retryDelay: 2000,
|
||||
onProgress: (completed, total, result) => {
|
||||
const percentComplete = Math.round((completed / total) * 100);
|
||||
console.log(`Retry progress: ${percentComplete}% (${completed}/${total})`);
|
||||
|
||||
if (result) {
|
||||
console.log(`Successfully processed repository: ${result.name}`);
|
||||
}
|
||||
},
|
||||
onRetry: (repo, error, attempt) => {
|
||||
console.log(`Retrying repository ${repo.name} (attempt ${attempt}): ${error.message}`);
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
console.log("All repository retry tasks completed");
|
||||
}, 0);
|
||||
|
||||
const responsePayload: RetryRepoResponse = {
|
||||
|
||||
@@ -5,6 +5,8 @@ import { eq, inArray } from "drizzle-orm";
|
||||
import { repositoryVisibilityEnum, repoStatusEnum } from "@/types/Repository";
|
||||
import { syncGiteaRepo } from "@/lib/gitea";
|
||||
import type { SyncRepoResponse } from "@/types/sync";
|
||||
import { processWithResilience } from "@/lib/utils/concurrency";
|
||||
import { v4 as uuidv4 } from "uuid";
|
||||
|
||||
export const POST: APIRoute = async ({ request }) => {
|
||||
try {
|
||||
@@ -60,13 +62,20 @@ export const POST: APIRoute = async ({ request }) => {
|
||||
);
|
||||
}
|
||||
|
||||
// Start async mirroring in background
|
||||
// Start async mirroring in background with parallel processing and resilience
|
||||
setTimeout(async () => {
|
||||
for (const repo of repos) {
|
||||
try {
|
||||
await syncGiteaRepo({
|
||||
config,
|
||||
repository: {
|
||||
// Define the concurrency limit - adjust based on API rate limits
|
||||
const CONCURRENCY_LIMIT = 5;
|
||||
|
||||
// Generate a batch ID to group related repositories
|
||||
const batchId = uuidv4();
|
||||
|
||||
// Process repositories in parallel with resilience to container restarts
|
||||
await processWithResilience(
|
||||
repos,
|
||||
async (repo) => {
|
||||
// Prepare repository data
|
||||
const repoData = {
|
||||
...repo,
|
||||
status: repoStatusEnum.parse(repo.status),
|
||||
organization: repo.organization ?? undefined,
|
||||
@@ -74,12 +83,44 @@ export const POST: APIRoute = async ({ request }) => {
|
||||
errorMessage: repo.errorMessage ?? undefined,
|
||||
forkedFrom: repo.forkedFrom ?? undefined,
|
||||
visibility: repositoryVisibilityEnum.parse(repo.visibility),
|
||||
},
|
||||
};
|
||||
|
||||
// Log the start of syncing
|
||||
console.log(`Starting sync for repository: ${repo.name}`);
|
||||
|
||||
// Sync the repository
|
||||
await syncGiteaRepo({
|
||||
config,
|
||||
repository: repoData,
|
||||
});
|
||||
} catch (error) {
|
||||
console.error(`Sync failed for repo ${repo.name}:`, error);
|
||||
|
||||
return repo;
|
||||
},
|
||||
{
|
||||
userId: config.userId || "",
|
||||
jobType: "sync",
|
||||
batchId,
|
||||
getItemId: (repo) => repo.id,
|
||||
getItemName: (repo) => repo.name,
|
||||
concurrencyLimit: CONCURRENCY_LIMIT,
|
||||
maxRetries: 2,
|
||||
retryDelay: 2000,
|
||||
checkpointInterval: 1, // Checkpoint after each repository
|
||||
onProgress: (completed, total, result) => {
|
||||
const percentComplete = Math.round((completed / total) * 100);
|
||||
console.log(`Syncing progress: ${percentComplete}% (${completed}/${total})`);
|
||||
|
||||
if (result) {
|
||||
console.log(`Successfully synced repository: ${result.name}`);
|
||||
}
|
||||
},
|
||||
onRetry: (repo, error, attempt) => {
|
||||
console.log(`Retrying sync for repository ${repo.name} (attempt ${attempt}): ${error.message}`);
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
console.log("All repository syncing tasks completed");
|
||||
}, 0);
|
||||
|
||||
const responsePayload: SyncRepoResponse = {
|
||||
|
||||
20
src/tests/setup.bun.ts
Normal file
20
src/tests/setup.bun.ts
Normal file
@@ -0,0 +1,20 @@
|
||||
/**
|
||||
* Bun test setup file
|
||||
* This file is automatically loaded before running tests
|
||||
*/
|
||||
|
||||
import { afterEach, beforeEach } from "bun:test";
|
||||
|
||||
// Clean up after each test
|
||||
afterEach(() => {
|
||||
// Add any cleanup logic here
|
||||
});
|
||||
|
||||
// Setup before each test
|
||||
beforeEach(() => {
|
||||
// Add any setup logic here
|
||||
});
|
||||
|
||||
// Add DOM testing support if needed
|
||||
// import { DOMParser } from "linkedom";
|
||||
// global.DOMParser = DOMParser;
|
||||
Reference in New Issue
Block a user