Compare commits
23 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b34ed5595b | ||
|
|
cbc11155ef | ||
|
|
941f61830f | ||
|
|
5b60cffaae | ||
|
|
ede5b4dbe8 | ||
|
|
99336e2607 | ||
|
|
cba421d606 | ||
|
|
c4b9a82806 | ||
|
|
38e0fb33b9 | ||
|
|
22a4b71653 | ||
|
|
52568eda36 | ||
|
|
a84191f0a5 | ||
|
|
33829eda20 | ||
|
|
1e63fd2278 | ||
|
|
daf4ab6a93 | ||
|
|
4404af7d40 | ||
|
|
97ff8d190d | ||
|
|
3ff86de67d | ||
|
|
3d8bdff9af | ||
|
|
a28a766f8b | ||
|
|
7afe364a24 | ||
|
|
a4e771d3bd | ||
|
|
703156b15c |
BIN
.github/assets/activity.png
vendored
|
Before Width: | Height: | Size: 140 KiB After Width: | Height: | Size: 816 KiB |
BIN
.github/assets/configuration.png
vendored
|
Before Width: | Height: | Size: 164 KiB After Width: | Height: | Size: 945 KiB |
BIN
.github/assets/dashboard.png
vendored
|
Before Width: | Height: | Size: 194 KiB After Width: | Height: | Size: 943 KiB |
BIN
.github/assets/organisations.png
vendored
|
Before Width: | Height: | Size: 88 KiB After Width: | Height: | Size: 784 KiB |
BIN
.github/assets/repositories.png
vendored
|
Before Width: | Height: | Size: 170 KiB After Width: | Height: | Size: 970 KiB |
44
README.md
@@ -30,7 +30,7 @@ sudo LOCAL_REPO_DIR=~/Development/gitea-mirror ./scripts/gitea-mirror-lxc-local.
|
|||||||
See the [LXC Container Deployment Guide](scripts/README-lxc.md).
|
See the [LXC Container Deployment Guide](scripts/README-lxc.md).
|
||||||
|
|
||||||
<p align="center">
|
<p align="center">
|
||||||
<img src=".github/assets/dashboard.png" alt="Dashboard" width="80%"/>
|
<img src=".github/assets/dashboard.png" alt="Dashboard" width="full"/>
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
## ✨ Features
|
## ✨ Features
|
||||||
@@ -50,12 +50,12 @@ See the [LXC Container Deployment Guide](scripts/README-lxc.md).
|
|||||||
## 📸 Screenshots
|
## 📸 Screenshots
|
||||||
|
|
||||||
<p align="center">
|
<p align="center">
|
||||||
<img src=".github/assets/repositories.png" width="45%"/>
|
<img src=".github/assets/repositories.png" width="49%"/>
|
||||||
<img src=".github/assets/organisations.png" width="45%"/>
|
<img src=".github/assets/organisations.png" width="49%"/>
|
||||||
</p>
|
</p>
|
||||||
<p align="center">
|
<p align="center">
|
||||||
<img src=".github/assets/configuration.png" width="45%"/>
|
<img src=".github/assets/configuration.png" width="49%"/>
|
||||||
<img src=".github/assets/activity.png" width="45%"/>
|
<img src=".github/assets/activity.png" width="49%"/>
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
### Dashboard
|
### Dashboard
|
||||||
@@ -69,7 +69,7 @@ Easily configure your GitHub and Gitea connections, set up automatic mirroring s
|
|||||||
|
|
||||||
## Getting Started
|
## Getting Started
|
||||||
|
|
||||||
See the [Quick Start Guide](docs/quickstart.md) for detailed instructions on getting up and running quickly.
|
See the [Quick Start Guide](src/content/docs/quickstart.md) for detailed instructions on getting up and running quickly.
|
||||||
|
|
||||||
### Prerequisites
|
### Prerequisites
|
||||||
|
|
||||||
@@ -282,9 +282,39 @@ bun run reset-users
|
|||||||
bun run check-db
|
bun run check-db
|
||||||
```
|
```
|
||||||
|
|
||||||
|
##### Database Permissions for Direct Installation
|
||||||
|
|
||||||
|
> [!IMPORTANT]
|
||||||
|
> **If you're running the application directly** (not using Docker), you may encounter SQLite permission errors. This is because SQLite requires both read/write access to the database file and write access to the directory containing the database.
|
||||||
|
|
||||||
|
**Common Error:**
|
||||||
|
```
|
||||||
|
Error: [ERROR] SQLiteError: attempt to write a readonly database
|
||||||
|
```
|
||||||
|
|
||||||
|
**Solution:**
|
||||||
|
```bash
|
||||||
|
# Ensure the data directory exists and has proper permissions
|
||||||
|
mkdir -p data
|
||||||
|
chmod 755 data
|
||||||
|
|
||||||
|
# If the database file already exists, ensure it's writable
|
||||||
|
chmod 644 data/gitea-mirror.db
|
||||||
|
|
||||||
|
# Make sure the user running the application owns the data directory
|
||||||
|
chown -R $(whoami) data/
|
||||||
|
```
|
||||||
|
|
||||||
|
**Why Docker doesn't have this issue:**
|
||||||
|
- Docker containers run with a dedicated user (`gitea-mirror`) that owns the `/app/data` directory
|
||||||
|
- The container setup ensures proper permissions are set during image build
|
||||||
|
- Volume mounts are handled by Docker with appropriate permissions
|
||||||
|
|
||||||
|
**Recommended approach:** Use Docker or Docker Compose for deployment to avoid permission issues entirely.
|
||||||
|
|
||||||
### Configuration
|
### Configuration
|
||||||
|
|
||||||
Gitea Mirror can be configured through environment variables or through the web UI. See the [Configuration Guide](docs/configuration.md) for more details.
|
Gitea Mirror can be configured through environment variables or through the web UI. See the [Configuration Guide](src/content/docs/configuration.md) for more details.
|
||||||
|
|
||||||
Key configuration options include:
|
Key configuration options include:
|
||||||
|
|
||||||
|
|||||||
@@ -232,6 +232,23 @@ else
|
|||||||
echo "❌ Startup recovery failed with exit code $RECOVERY_EXIT_CODE"
|
echo "❌ Startup recovery failed with exit code $RECOVERY_EXIT_CODE"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
# Function to handle shutdown signals
|
||||||
|
shutdown_handler() {
|
||||||
|
echo "🛑 Received shutdown signal, forwarding to application..."
|
||||||
|
if [ ! -z "$APP_PID" ]; then
|
||||||
|
kill -TERM "$APP_PID"
|
||||||
|
wait "$APP_PID"
|
||||||
|
fi
|
||||||
|
exit 0
|
||||||
|
}
|
||||||
|
|
||||||
|
# Set up signal handlers
|
||||||
|
trap 'shutdown_handler' TERM INT HUP
|
||||||
|
|
||||||
# Start the application
|
# Start the application
|
||||||
echo "Starting Gitea Mirror..."
|
echo "Starting Gitea Mirror..."
|
||||||
exec bun ./dist/server/entry.mjs
|
bun ./dist/server/entry.mjs &
|
||||||
|
APP_PID=$!
|
||||||
|
|
||||||
|
# Wait for the application to finish
|
||||||
|
wait "$APP_PID"
|
||||||
|
|||||||
249
docs/GRACEFUL_SHUTDOWN.md
Normal file
@@ -0,0 +1,249 @@
|
|||||||
|
# Graceful Shutdown and Enhanced Job Recovery
|
||||||
|
|
||||||
|
This document describes the graceful shutdown and enhanced job recovery capabilities implemented in gitea-mirror v2.8.0+.
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
The gitea-mirror application now includes comprehensive graceful shutdown handling and enhanced job recovery mechanisms designed specifically for containerized environments. These features ensure:
|
||||||
|
|
||||||
|
- **No data loss** during container restarts or shutdowns
|
||||||
|
- **Automatic job resumption** after application restarts
|
||||||
|
- **Clean termination** of all active processes and connections
|
||||||
|
- **Container-aware design** optimized for Docker/LXC deployments
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
### 1. Graceful Shutdown Manager
|
||||||
|
|
||||||
|
The shutdown manager (`src/lib/shutdown-manager.ts`) provides centralized coordination of application termination:
|
||||||
|
|
||||||
|
#### Key Capabilities:
|
||||||
|
- **Active Job Tracking**: Monitors all running mirroring/sync jobs
|
||||||
|
- **State Persistence**: Saves job progress to database before shutdown
|
||||||
|
- **Callback System**: Allows services to register cleanup functions
|
||||||
|
- **Timeout Protection**: Prevents hanging shutdowns with configurable timeouts
|
||||||
|
- **Signal Coordination**: Works with signal handlers for proper container lifecycle
|
||||||
|
|
||||||
|
#### Configuration:
|
||||||
|
- **Shutdown Timeout**: 30 seconds maximum (configurable)
|
||||||
|
- **Job Save Timeout**: 10 seconds per job (configurable)
|
||||||
|
|
||||||
|
### 2. Signal Handlers
|
||||||
|
|
||||||
|
The signal handler system (`src/lib/signal-handlers.ts`) ensures proper response to container lifecycle events:
|
||||||
|
|
||||||
|
#### Supported Signals:
|
||||||
|
- **SIGTERM**: Docker stop, Kubernetes pod termination
|
||||||
|
- **SIGINT**: Ctrl+C, manual interruption
|
||||||
|
- **SIGHUP**: Terminal hangup, service reload
|
||||||
|
- **Uncaught Exceptions**: Emergency shutdown on critical errors
|
||||||
|
- **Unhandled Rejections**: Graceful handling of promise failures
|
||||||
|
|
||||||
|
### 3. Enhanced Job Recovery
|
||||||
|
|
||||||
|
Building on the existing recovery system, new enhancements include:
|
||||||
|
|
||||||
|
#### Shutdown-Aware Processing:
|
||||||
|
- Jobs check for shutdown signals during execution
|
||||||
|
- Automatic state saving when shutdown is detected
|
||||||
|
- Proper job status management (interrupted vs failed)
|
||||||
|
|
||||||
|
#### Container Integration:
|
||||||
|
- Docker entrypoint script forwards signals correctly
|
||||||
|
- Startup recovery runs before main application
|
||||||
|
- Recovery timeouts prevent startup delays
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
### Basic Operation
|
||||||
|
|
||||||
|
The graceful shutdown system is automatically initialized when the application starts. No manual configuration is required for basic operation.
|
||||||
|
|
||||||
|
### Testing
|
||||||
|
|
||||||
|
Test the graceful shutdown functionality:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Run the integration test
|
||||||
|
bun run test-shutdown
|
||||||
|
|
||||||
|
# Clean up test data
|
||||||
|
bun run test-shutdown-cleanup
|
||||||
|
|
||||||
|
# Run unit tests
|
||||||
|
bun test src/lib/shutdown-manager.test.ts
|
||||||
|
bun test src/lib/signal-handlers.test.ts
|
||||||
|
```
|
||||||
|
|
||||||
|
### Manual Testing
|
||||||
|
|
||||||
|
1. **Start the application**:
|
||||||
|
```bash
|
||||||
|
bun run dev
|
||||||
|
# or in production
|
||||||
|
bun run start
|
||||||
|
```
|
||||||
|
|
||||||
|
2. **Start a mirroring job** through the web interface
|
||||||
|
|
||||||
|
3. **Send shutdown signal**:
|
||||||
|
```bash
|
||||||
|
# Send SIGTERM (recommended)
|
||||||
|
kill -TERM <process_id>
|
||||||
|
|
||||||
|
# Or use Ctrl+C for SIGINT
|
||||||
|
```
|
||||||
|
|
||||||
|
4. **Verify job state** is saved and can be resumed on restart
|
||||||
|
|
||||||
|
### Container Testing
|
||||||
|
|
||||||
|
Test with Docker:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Build and run container
|
||||||
|
docker build -t gitea-mirror .
|
||||||
|
docker run -d --name test-shutdown gitea-mirror
|
||||||
|
|
||||||
|
# Start a job, then stop container
|
||||||
|
docker stop test-shutdown
|
||||||
|
|
||||||
|
# Restart and verify recovery
|
||||||
|
docker start test-shutdown
|
||||||
|
docker logs test-shutdown
|
||||||
|
```
|
||||||
|
|
||||||
|
## Implementation Details
|
||||||
|
|
||||||
|
### Shutdown Flow
|
||||||
|
|
||||||
|
1. **Signal Reception**: Signal handlers detect termination request
|
||||||
|
2. **Shutdown Initiation**: Shutdown manager begins graceful termination
|
||||||
|
3. **Job State Saving**: All active jobs save current progress to database
|
||||||
|
4. **Service Cleanup**: Registered callbacks stop background services
|
||||||
|
5. **Connection Cleanup**: Database connections and resources are released
|
||||||
|
6. **Process Termination**: Application exits with appropriate code
|
||||||
|
|
||||||
|
### Job State Management
|
||||||
|
|
||||||
|
During shutdown, active jobs are updated with:
|
||||||
|
- `inProgress: false` - Mark as not currently running
|
||||||
|
- `lastCheckpoint: <timestamp>` - Record shutdown time
|
||||||
|
- `message: "Job interrupted by application shutdown - will resume on restart"`
|
||||||
|
- Status remains as `"imported"` (not `"failed"`) to enable recovery
|
||||||
|
|
||||||
|
### Recovery Integration
|
||||||
|
|
||||||
|
The existing recovery system automatically detects and resumes interrupted jobs:
|
||||||
|
- Jobs with `inProgress: false` and incomplete status are candidates for recovery
|
||||||
|
- Recovery runs during application startup (before serving requests)
|
||||||
|
- Jobs resume from their last checkpoint with remaining items
|
||||||
|
|
||||||
|
## Configuration
|
||||||
|
|
||||||
|
### Environment Variables
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Optional: Adjust shutdown timeout (default: 30000ms)
|
||||||
|
SHUTDOWN_TIMEOUT=30000
|
||||||
|
|
||||||
|
# Optional: Adjust job save timeout (default: 10000ms)
|
||||||
|
JOB_SAVE_TIMEOUT=10000
|
||||||
|
```
|
||||||
|
|
||||||
|
### Docker Configuration
|
||||||
|
|
||||||
|
The Docker entrypoint script includes proper signal handling:
|
||||||
|
|
||||||
|
```dockerfile
|
||||||
|
# Signals are forwarded to the application process
|
||||||
|
# SIGTERM is handled gracefully with 30-second timeout
|
||||||
|
# Container stops cleanly without force-killing processes
|
||||||
|
```
|
||||||
|
|
||||||
|
### Kubernetes Configuration
|
||||||
|
|
||||||
|
For Kubernetes deployments, configure appropriate termination grace period:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
apiVersion: v1
|
||||||
|
kind: Pod
|
||||||
|
spec:
|
||||||
|
terminationGracePeriodSeconds: 45 # Allow time for graceful shutdown
|
||||||
|
containers:
|
||||||
|
- name: gitea-mirror
|
||||||
|
# ... other configuration
|
||||||
|
```
|
||||||
|
|
||||||
|
## Monitoring and Debugging
|
||||||
|
|
||||||
|
### Logs
|
||||||
|
|
||||||
|
The application provides detailed logging during shutdown:
|
||||||
|
|
||||||
|
```
|
||||||
|
🛑 Graceful shutdown initiated by signal: SIGTERM
|
||||||
|
📊 Shutdown status: 2 active jobs, 1 callbacks
|
||||||
|
📝 Step 1: Saving active job states...
|
||||||
|
Saving state for job abc-123...
|
||||||
|
✅ Saved state for job abc-123
|
||||||
|
🔧 Step 2: Executing shutdown callbacks...
|
||||||
|
✅ Shutdown callback 1 completed
|
||||||
|
💾 Step 3: Closing database connections...
|
||||||
|
✅ Graceful shutdown completed successfully
|
||||||
|
```
|
||||||
|
|
||||||
|
### Status Endpoints
|
||||||
|
|
||||||
|
Check shutdown manager status via API:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Get current status (if application is running)
|
||||||
|
curl http://localhost:4321/api/health
|
||||||
|
```
|
||||||
|
|
||||||
|
### Troubleshooting
|
||||||
|
|
||||||
|
**Problem**: Jobs not resuming after restart
|
||||||
|
- **Check**: Startup recovery logs for errors
|
||||||
|
- **Verify**: Database contains interrupted jobs with correct status
|
||||||
|
- **Test**: Run `bun run startup-recovery` manually
|
||||||
|
|
||||||
|
**Problem**: Shutdown timeout reached
|
||||||
|
- **Check**: Job complexity and database performance
|
||||||
|
- **Adjust**: Increase `SHUTDOWN_TIMEOUT` environment variable
|
||||||
|
- **Monitor**: Database connection and disk I/O during shutdown
|
||||||
|
|
||||||
|
**Problem**: Container force-killed
|
||||||
|
- **Check**: Container orchestrator termination grace period
|
||||||
|
- **Adjust**: Increase grace period to allow shutdown completion
|
||||||
|
- **Monitor**: Application shutdown logs for timing issues
|
||||||
|
|
||||||
|
## Best Practices
|
||||||
|
|
||||||
|
### Development
|
||||||
|
- Always test graceful shutdown during development
|
||||||
|
- Use the provided test scripts to verify functionality
|
||||||
|
- Monitor logs for shutdown timing and job state persistence
|
||||||
|
|
||||||
|
### Production
|
||||||
|
- Set appropriate container termination grace periods
|
||||||
|
- Monitor shutdown logs for performance issues
|
||||||
|
- Use health checks to verify application readiness after restart
|
||||||
|
- Consider job complexity when planning maintenance windows
|
||||||
|
|
||||||
|
### Monitoring
|
||||||
|
- Track job recovery success rates
|
||||||
|
- Monitor shutdown duration metrics
|
||||||
|
- Alert on forced terminations or recovery failures
|
||||||
|
- Log analysis for shutdown pattern optimization
|
||||||
|
|
||||||
|
## Future Enhancements
|
||||||
|
|
||||||
|
Planned improvements for future versions:
|
||||||
|
|
||||||
|
1. **Configurable Timeouts**: Environment variable configuration for all timeouts
|
||||||
|
2. **Shutdown Metrics**: Prometheus metrics for shutdown performance
|
||||||
|
3. **Progressive Shutdown**: Graceful degradation of service capabilities
|
||||||
|
4. **Job Prioritization**: Priority-based job saving during shutdown
|
||||||
|
5. **Health Check Integration**: Readiness probes during shutdown process
|
||||||
236
docs/SHUTDOWN_PROCESS.md
Normal file
@@ -0,0 +1,236 @@
|
|||||||
|
# Graceful Shutdown Process
|
||||||
|
|
||||||
|
This document details how the gitea-mirror application handles graceful shutdown during active mirroring operations, with specific focus on job interruption and recovery.
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
The graceful shutdown system is designed for **fast, clean termination** without waiting for long-running jobs to complete. It prioritizes **quick shutdown times** (under 30 seconds) while **preserving all progress** for seamless recovery.
|
||||||
|
|
||||||
|
## Key Principle
|
||||||
|
|
||||||
|
**The application does NOT wait for jobs to finish before shutting down.** Instead, it saves the current state and resumes after restart.
|
||||||
|
|
||||||
|
## Shutdown Scenario Example
|
||||||
|
|
||||||
|
### Initial State
|
||||||
|
- **Job**: Mirror 500 repositories
|
||||||
|
- **Progress**: 200 repositories completed
|
||||||
|
- **Remaining**: 300 repositories pending
|
||||||
|
- **Action**: User initiates shutdown (SIGTERM, Ctrl+C, Docker stop)
|
||||||
|
|
||||||
|
### Shutdown Process (Under 30 seconds)
|
||||||
|
|
||||||
|
#### Step 1: Signal Detection (Immediate)
|
||||||
|
```
|
||||||
|
📡 Received SIGTERM signal
|
||||||
|
🛑 Graceful shutdown initiated by signal: SIGTERM
|
||||||
|
📊 Shutdown status: 1 active jobs, 2 callbacks
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Step 2: Job State Saving (1-10 seconds)
|
||||||
|
```
|
||||||
|
📝 Step 1: Saving active job states...
|
||||||
|
Saving state for job abc-123...
|
||||||
|
✅ Saved state for job abc-123
|
||||||
|
```
|
||||||
|
|
||||||
|
**What gets saved:**
|
||||||
|
- `inProgress: false` - Mark job as not currently running
|
||||||
|
- `completedItems: 200` - Number of repos successfully mirrored
|
||||||
|
- `totalItems: 500` - Total repos in the job
|
||||||
|
- `completedItemIds: [repo1, repo2, ..., repo200]` - List of completed repos
|
||||||
|
- `itemIds: [repo1, repo2, ..., repo500]` - Full list of repos
|
||||||
|
- `lastCheckpoint: 2025-05-24T17:30:00Z` - Exact shutdown time
|
||||||
|
- `message: "Job interrupted by application shutdown - will resume on restart"`
|
||||||
|
- `status: "imported"` - Keeps status as resumable (not "failed")
|
||||||
|
|
||||||
|
#### Step 3: Service Cleanup (1-5 seconds)
|
||||||
|
```
|
||||||
|
🔧 Step 2: Executing shutdown callbacks...
|
||||||
|
🛑 Shutting down cleanup service...
|
||||||
|
✅ Cleanup service stopped
|
||||||
|
✅ Shutdown callback 1 completed
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Step 4: Clean Exit (Immediate)
|
||||||
|
```
|
||||||
|
💾 Step 3: Closing database connections...
|
||||||
|
✅ Graceful shutdown completed successfully
|
||||||
|
```
|
||||||
|
|
||||||
|
**Total shutdown time: ~15 seconds** (well under the 30-second limit)
|
||||||
|
|
||||||
|
## What Happens to the Remaining 300 Repos?
|
||||||
|
|
||||||
|
### During Shutdown
|
||||||
|
- **NOT processed** - The remaining 300 repos are not mirrored
|
||||||
|
- **NOT lost** - Their IDs are preserved in the job state
|
||||||
|
- **NOT marked as failed** - Job status remains "imported" for recovery
|
||||||
|
|
||||||
|
### After Restart
|
||||||
|
The recovery system automatically:
|
||||||
|
|
||||||
|
1. **Detects interrupted job** during startup
|
||||||
|
2. **Calculates remaining work**: 500 - 200 = 300 repos
|
||||||
|
3. **Extracts remaining repo IDs**: repos 201-500 from the original list
|
||||||
|
4. **Resumes processing** from exactly where it left off
|
||||||
|
5. **Continues until completion** of all 500 repos
|
||||||
|
|
||||||
|
## Timeout Configuration
|
||||||
|
|
||||||
|
### Shutdown Timeouts
|
||||||
|
```typescript
|
||||||
|
const SHUTDOWN_TIMEOUT = 30000; // 30 seconds max shutdown time
|
||||||
|
const JOB_SAVE_TIMEOUT = 10000; // 10 seconds to save job state
|
||||||
|
```
|
||||||
|
|
||||||
|
### Timeout Behavior
|
||||||
|
- **Normal case**: Shutdown completes in 10-20 seconds
|
||||||
|
- **Slow database**: Up to 30 seconds allowed
|
||||||
|
- **Timeout exceeded**: Force exit with code 1
|
||||||
|
- **Container kill**: Orchestrator should allow 45+ seconds grace period
|
||||||
|
|
||||||
|
## Job State Persistence
|
||||||
|
|
||||||
|
### Database Schema
|
||||||
|
The `mirror_jobs` table stores complete job state:
|
||||||
|
|
||||||
|
```sql
|
||||||
|
-- Job identification
|
||||||
|
id TEXT PRIMARY KEY,
|
||||||
|
user_id TEXT NOT NULL,
|
||||||
|
job_type TEXT NOT NULL DEFAULT 'mirror',
|
||||||
|
|
||||||
|
-- Progress tracking
|
||||||
|
total_items INTEGER,
|
||||||
|
completed_items INTEGER DEFAULT 0,
|
||||||
|
item_ids TEXT, -- JSON array of all repo IDs
|
||||||
|
completed_item_ids TEXT DEFAULT '[]', -- JSON array of completed repo IDs
|
||||||
|
|
||||||
|
-- State management
|
||||||
|
in_progress INTEGER NOT NULL DEFAULT 0, -- Boolean: currently running
|
||||||
|
started_at TIMESTAMP,
|
||||||
|
completed_at TIMESTAMP,
|
||||||
|
last_checkpoint TIMESTAMP, -- Last progress save
|
||||||
|
|
||||||
|
-- Status and messaging
|
||||||
|
status TEXT NOT NULL DEFAULT 'imported',
|
||||||
|
message TEXT NOT NULL
|
||||||
|
```
|
||||||
|
|
||||||
|
### Recovery Query
|
||||||
|
The recovery system finds interrupted jobs:
|
||||||
|
|
||||||
|
```sql
|
||||||
|
SELECT * FROM mirror_jobs
|
||||||
|
WHERE in_progress = 0
|
||||||
|
AND status = 'imported'
|
||||||
|
AND completed_at IS NULL
|
||||||
|
AND total_items > completed_items;
|
||||||
|
```
|
||||||
|
|
||||||
|
## Shutdown-Aware Processing
|
||||||
|
|
||||||
|
### Concurrency Check
|
||||||
|
During job execution, each repo processing checks for shutdown:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// Before processing each repository
|
||||||
|
if (isShuttingDown()) {
|
||||||
|
throw new Error('Processing interrupted by application shutdown');
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Checkpoint Intervals
|
||||||
|
Jobs save progress periodically (every 10 repos by default):
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
checkpointInterval: 10, // Save progress every 10 repositories
|
||||||
|
```
|
||||||
|
|
||||||
|
This ensures minimal work loss even if shutdown occurs between checkpoints.
|
||||||
|
|
||||||
|
## Container Integration
|
||||||
|
|
||||||
|
### Docker Entrypoint
|
||||||
|
The Docker entrypoint properly forwards signals:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Set up signal handlers
|
||||||
|
trap 'shutdown_handler' TERM INT HUP
|
||||||
|
|
||||||
|
# Start application in background
|
||||||
|
bun ./dist/server/entry.mjs &
|
||||||
|
APP_PID=$!
|
||||||
|
|
||||||
|
# Wait for application to finish
|
||||||
|
wait "$APP_PID"
|
||||||
|
```
|
||||||
|
|
||||||
|
### Kubernetes Configuration
|
||||||
|
Recommended pod configuration:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
apiVersion: v1
|
||||||
|
kind: Pod
|
||||||
|
spec:
|
||||||
|
terminationGracePeriodSeconds: 45 # Allow time for graceful shutdown
|
||||||
|
containers:
|
||||||
|
- name: gitea-mirror
|
||||||
|
# ... other configuration
|
||||||
|
```
|
||||||
|
|
||||||
|
## Monitoring and Logging
|
||||||
|
|
||||||
|
### Shutdown Logs
|
||||||
|
```
|
||||||
|
🛑 Graceful shutdown initiated by signal: SIGTERM
|
||||||
|
📊 Shutdown status: 1 active jobs, 2 callbacks
|
||||||
|
📝 Step 1: Saving active job states...
|
||||||
|
Saving state for 1 active jobs...
|
||||||
|
✅ Completed saving all active jobs
|
||||||
|
🔧 Step 2: Executing shutdown callbacks...
|
||||||
|
✅ Completed all shutdown callbacks
|
||||||
|
💾 Step 3: Closing database connections...
|
||||||
|
✅ Graceful shutdown completed successfully
|
||||||
|
```
|
||||||
|
|
||||||
|
### Recovery Logs
|
||||||
|
```
|
||||||
|
⚠️ Jobs found that need recovery. Starting recovery process...
|
||||||
|
Resuming job abc-123 with 300 remaining items...
|
||||||
|
✅ Recovery completed successfully
|
||||||
|
```
|
||||||
|
|
||||||
|
## Best Practices
|
||||||
|
|
||||||
|
### For Operations
|
||||||
|
1. **Monitor shutdown times** - Should complete under 30 seconds
|
||||||
|
2. **Check recovery logs** - Verify jobs resume correctly after restart
|
||||||
|
3. **Set appropriate grace periods** - Allow 45+ seconds in orchestrators
|
||||||
|
4. **Plan maintenance windows** - Jobs will resume but may take time to complete
|
||||||
|
|
||||||
|
### For Development
|
||||||
|
1. **Test shutdown scenarios** - Use `bun run test-shutdown`
|
||||||
|
2. **Monitor job progress** - Check checkpoint frequency and timing
|
||||||
|
3. **Verify recovery** - Ensure interrupted jobs resume correctly
|
||||||
|
4. **Handle edge cases** - Test shutdown during different job phases
|
||||||
|
|
||||||
|
## Troubleshooting
|
||||||
|
|
||||||
|
### Shutdown Takes Too Long
|
||||||
|
- **Check**: Database performance during job state saving
|
||||||
|
- **Solution**: Increase `SHUTDOWN_TIMEOUT` environment variable
|
||||||
|
- **Monitor**: Job complexity and checkpoint frequency
|
||||||
|
|
||||||
|
### Jobs Don't Resume
|
||||||
|
- **Check**: Recovery logs for errors during startup
|
||||||
|
- **Verify**: Database contains interrupted jobs with correct status
|
||||||
|
- **Test**: Run `bun run startup-recovery` manually
|
||||||
|
|
||||||
|
### Container Force-Killed
|
||||||
|
- **Check**: Container orchestrator termination grace period
|
||||||
|
- **Increase**: Grace period to 45+ seconds
|
||||||
|
- **Monitor**: Application shutdown completion time
|
||||||
|
|
||||||
|
This design ensures **production-ready graceful shutdown** with **zero data loss** and **fast recovery times** suitable for modern containerized deployments.
|
||||||
14
package.json
@@ -1,19 +1,18 @@
|
|||||||
{
|
{
|
||||||
"name": "gitea-mirror",
|
"name": "gitea-mirror",
|
||||||
"type": "module",
|
"type": "module",
|
||||||
"version": "2.8.0",
|
"version": "2.10.0",
|
||||||
"engines": {
|
"engines": {
|
||||||
"bun": ">=1.2.9"
|
"bun": ">=1.2.9"
|
||||||
},
|
},
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"setup": "bun install && bun run manage-db init && bun run update-db",
|
"setup": "bun install && bun run manage-db init",
|
||||||
"dev": "bunx --bun astro dev",
|
"dev": "bunx --bun astro dev",
|
||||||
"dev:clean": "bun run cleanup-db && bun run manage-db init && bun run update-db && bunx --bun astro dev",
|
"dev:clean": "bun run cleanup-db && bun run manage-db init && bunx --bun astro dev",
|
||||||
"build": "bunx --bun astro build",
|
"build": "bunx --bun astro build",
|
||||||
"cleanup-db": "rm -f gitea-mirror.db data/gitea-mirror.db",
|
"cleanup-db": "rm -f gitea-mirror.db data/gitea-mirror.db",
|
||||||
"manage-db": "bun scripts/manage-db.ts",
|
"manage-db": "bun scripts/manage-db.ts",
|
||||||
"init-db": "bun scripts/manage-db.ts init",
|
"init-db": "bun scripts/manage-db.ts init",
|
||||||
"update-db": "bun scripts/update-mirror-jobs-table.ts",
|
|
||||||
"check-db": "bun scripts/manage-db.ts check",
|
"check-db": "bun scripts/manage-db.ts check",
|
||||||
"fix-db": "bun scripts/manage-db.ts fix",
|
"fix-db": "bun scripts/manage-db.ts fix",
|
||||||
"reset-users": "bun scripts/manage-db.ts reset-users",
|
"reset-users": "bun scripts/manage-db.ts reset-users",
|
||||||
@@ -22,9 +21,11 @@
|
|||||||
"startup-recovery-force": "bun scripts/startup-recovery.ts --force",
|
"startup-recovery-force": "bun scripts/startup-recovery.ts --force",
|
||||||
"test-recovery": "bun scripts/test-recovery.ts",
|
"test-recovery": "bun scripts/test-recovery.ts",
|
||||||
"test-recovery-cleanup": "bun scripts/test-recovery.ts --cleanup",
|
"test-recovery-cleanup": "bun scripts/test-recovery.ts --cleanup",
|
||||||
|
"test-shutdown": "bun scripts/test-graceful-shutdown.ts",
|
||||||
|
"test-shutdown-cleanup": "bun scripts/test-graceful-shutdown.ts --cleanup",
|
||||||
"preview": "bunx --bun astro preview",
|
"preview": "bunx --bun astro preview",
|
||||||
"start": "bun dist/server/entry.mjs",
|
"start": "bun dist/server/entry.mjs",
|
||||||
"start:fresh": "bun run cleanup-db && bun run manage-db init && bun run update-db && bun dist/server/entry.mjs",
|
"start:fresh": "bun run cleanup-db && bun run manage-db init && bun dist/server/entry.mjs",
|
||||||
"test": "bun test",
|
"test": "bun test",
|
||||||
"test:watch": "bun test --watch",
|
"test:watch": "bun test --watch",
|
||||||
"test:coverage": "bun test --coverage",
|
"test:coverage": "bun test --coverage",
|
||||||
@@ -53,7 +54,6 @@
|
|||||||
"@types/react": "^19.1.4",
|
"@types/react": "^19.1.4",
|
||||||
"@types/react-dom": "^19.1.5",
|
"@types/react-dom": "^19.1.5",
|
||||||
"astro": "^5.7.13",
|
"astro": "^5.7.13",
|
||||||
"axios": "^1.9.0",
|
|
||||||
"bcryptjs": "^3.0.2",
|
"bcryptjs": "^3.0.2",
|
||||||
"canvas-confetti": "^1.9.3",
|
"canvas-confetti": "^1.9.3",
|
||||||
"class-variance-authority": "^0.7.1",
|
"class-variance-authority": "^0.7.1",
|
||||||
@@ -68,7 +68,6 @@
|
|||||||
"react-dom": "^19.1.0",
|
"react-dom": "^19.1.0",
|
||||||
"react-icons": "^5.5.0",
|
"react-icons": "^5.5.0",
|
||||||
"sonner": "^2.0.3",
|
"sonner": "^2.0.3",
|
||||||
"superagent": "^10.2.1",
|
|
||||||
"tailwind-merge": "^3.3.0",
|
"tailwind-merge": "^3.3.0",
|
||||||
"tailwindcss": "^4.1.7",
|
"tailwindcss": "^4.1.7",
|
||||||
"tw-animate-css": "^1.3.0",
|
"tw-animate-css": "^1.3.0",
|
||||||
@@ -80,7 +79,6 @@
|
|||||||
"@testing-library/react": "^16.3.0",
|
"@testing-library/react": "^16.3.0",
|
||||||
"@types/bcryptjs": "^3.0.0",
|
"@types/bcryptjs": "^3.0.0",
|
||||||
"@types/jsonwebtoken": "^9.0.9",
|
"@types/jsonwebtoken": "^9.0.9",
|
||||||
"@types/superagent": "^8.1.9",
|
|
||||||
"@types/uuid": "^10.0.0",
|
"@types/uuid": "^10.0.0",
|
||||||
"@vitejs/plugin-react": "^4.4.1",
|
"@vitejs/plugin-react": "^4.4.1",
|
||||||
"jsdom": "^26.1.0",
|
"jsdom": "^26.1.0",
|
||||||
|
|||||||
238
scripts/test-graceful-shutdown.ts
Normal file
@@ -0,0 +1,238 @@
|
|||||||
|
#!/usr/bin/env bun
|
||||||
|
/**
|
||||||
|
* Integration test for graceful shutdown functionality
|
||||||
|
*
|
||||||
|
* This script tests the complete graceful shutdown flow:
|
||||||
|
* 1. Starts a mock job
|
||||||
|
* 2. Initiates shutdown
|
||||||
|
* 3. Verifies job state is saved correctly
|
||||||
|
* 4. Tests recovery after restart
|
||||||
|
*
|
||||||
|
* Usage:
|
||||||
|
* bun scripts/test-graceful-shutdown.ts [--cleanup]
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { db, mirrorJobs } from "../src/lib/db";
|
||||||
|
import { eq } from "drizzle-orm";
|
||||||
|
import {
|
||||||
|
initializeShutdownManager,
|
||||||
|
registerActiveJob,
|
||||||
|
unregisterActiveJob,
|
||||||
|
gracefulShutdown,
|
||||||
|
getShutdownStatus,
|
||||||
|
registerShutdownCallback
|
||||||
|
} from "../src/lib/shutdown-manager";
|
||||||
|
import { setupSignalHandlers, removeSignalHandlers } from "../src/lib/signal-handlers";
|
||||||
|
import { createMirrorJob } from "../src/lib/helpers";
|
||||||
|
|
||||||
|
// Test configuration
|
||||||
|
const TEST_USER_ID = "test-user-shutdown";
|
||||||
|
const TEST_JOB_PREFIX = "test-shutdown-job";
|
||||||
|
|
||||||
|
// Parse command line arguments
|
||||||
|
const args = process.argv.slice(2);
|
||||||
|
const shouldCleanup = args.includes('--cleanup');
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a test job for shutdown testing
|
||||||
|
*/
|
||||||
|
async function createTestJob(): Promise<string> {
|
||||||
|
console.log('📝 Creating test job...');
|
||||||
|
|
||||||
|
const jobId = await createMirrorJob({
|
||||||
|
userId: TEST_USER_ID,
|
||||||
|
message: 'Test job for graceful shutdown testing',
|
||||||
|
details: 'This job simulates a long-running mirroring operation',
|
||||||
|
status: "mirroring",
|
||||||
|
jobType: "mirror",
|
||||||
|
totalItems: 10,
|
||||||
|
itemIds: ['item-1', 'item-2', 'item-3', 'item-4', 'item-5'],
|
||||||
|
completedItemIds: ['item-1', 'item-2'], // Simulate partial completion
|
||||||
|
inProgress: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
console.log(`✅ Created test job: ${jobId}`);
|
||||||
|
return jobId;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Verify that job state was saved correctly during shutdown
|
||||||
|
*/
|
||||||
|
async function verifyJobState(jobId: string): Promise<boolean> {
|
||||||
|
console.log(`🔍 Verifying job state for ${jobId}...`);
|
||||||
|
|
||||||
|
const jobs = await db
|
||||||
|
.select()
|
||||||
|
.from(mirrorJobs)
|
||||||
|
.where(eq(mirrorJobs.id, jobId));
|
||||||
|
|
||||||
|
if (jobs.length === 0) {
|
||||||
|
console.error(`❌ Job ${jobId} not found in database`);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
const job = jobs[0];
|
||||||
|
|
||||||
|
// Check that the job was marked as interrupted
|
||||||
|
if (job.inProgress) {
|
||||||
|
console.error(`❌ Job ${jobId} is still marked as in progress`);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!job.message?.includes('interrupted by application shutdown')) {
|
||||||
|
console.error(`❌ Job ${jobId} does not have shutdown message. Message: ${job.message}`);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!job.lastCheckpoint) {
|
||||||
|
console.error(`❌ Job ${jobId} does not have a checkpoint timestamp`);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`✅ Job ${jobId} state verified correctly`);
|
||||||
|
console.log(` - In Progress: ${job.inProgress}`);
|
||||||
|
console.log(` - Message: ${job.message}`);
|
||||||
|
console.log(` - Last Checkpoint: ${job.lastCheckpoint}`);
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Test the graceful shutdown process
|
||||||
|
*/
|
||||||
|
async function testGracefulShutdown(): Promise<void> {
|
||||||
|
console.log('\n🧪 Testing Graceful Shutdown Process');
|
||||||
|
console.log('=====================================\n');
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Step 1: Initialize shutdown manager
|
||||||
|
console.log('Step 1: Initializing shutdown manager...');
|
||||||
|
initializeShutdownManager();
|
||||||
|
setupSignalHandlers();
|
||||||
|
|
||||||
|
// Step 2: Create and register a test job
|
||||||
|
console.log('\nStep 2: Creating and registering test job...');
|
||||||
|
const jobId = await createTestJob();
|
||||||
|
registerActiveJob(jobId);
|
||||||
|
|
||||||
|
// Step 3: Register a test shutdown callback
|
||||||
|
console.log('\nStep 3: Registering shutdown callback...');
|
||||||
|
let callbackExecuted = false;
|
||||||
|
registerShutdownCallback(async () => {
|
||||||
|
console.log('🔧 Test shutdown callback executed');
|
||||||
|
callbackExecuted = true;
|
||||||
|
});
|
||||||
|
|
||||||
|
// Step 4: Check initial status
|
||||||
|
console.log('\nStep 4: Checking initial status...');
|
||||||
|
const initialStatus = getShutdownStatus();
|
||||||
|
console.log(` - Active jobs: ${initialStatus.activeJobs.length}`);
|
||||||
|
console.log(` - Registered callbacks: ${initialStatus.registeredCallbacks}`);
|
||||||
|
console.log(` - Shutdown in progress: ${initialStatus.inProgress}`);
|
||||||
|
|
||||||
|
// Step 5: Simulate graceful shutdown
|
||||||
|
console.log('\nStep 5: Simulating graceful shutdown...');
|
||||||
|
|
||||||
|
// Override process.exit to prevent actual exit during test
|
||||||
|
const originalExit = process.exit;
|
||||||
|
let exitCode: number | undefined;
|
||||||
|
process.exit = ((code?: number) => {
|
||||||
|
exitCode = code;
|
||||||
|
console.log(`🚪 Process.exit called with code: ${code}`);
|
||||||
|
// Don't actually exit during test
|
||||||
|
}) as any;
|
||||||
|
|
||||||
|
try {
|
||||||
|
// This should save job state and execute callbacks
|
||||||
|
await gracefulShutdown('TEST_SIGNAL');
|
||||||
|
} catch (error) {
|
||||||
|
// Expected since we're not actually exiting
|
||||||
|
console.log(`⚠️ Graceful shutdown completed (exit intercepted)`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Restore original process.exit
|
||||||
|
process.exit = originalExit;
|
||||||
|
|
||||||
|
// Step 6: Verify job state was saved
|
||||||
|
console.log('\nStep 6: Verifying job state was saved...');
|
||||||
|
const jobStateValid = await verifyJobState(jobId);
|
||||||
|
|
||||||
|
// Step 7: Verify callback was executed
|
||||||
|
console.log('\nStep 7: Verifying callback execution...');
|
||||||
|
if (callbackExecuted) {
|
||||||
|
console.log('✅ Shutdown callback was executed');
|
||||||
|
} else {
|
||||||
|
console.error('❌ Shutdown callback was not executed');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Step 8: Test results
|
||||||
|
console.log('\n📊 Test Results:');
|
||||||
|
console.log(` - Job state saved correctly: ${jobStateValid ? '✅' : '❌'}`);
|
||||||
|
console.log(` - Shutdown callback executed: ${callbackExecuted ? '✅' : '❌'}`);
|
||||||
|
console.log(` - Exit code: ${exitCode}`);
|
||||||
|
|
||||||
|
if (jobStateValid && callbackExecuted) {
|
||||||
|
console.log('\n🎉 All tests passed! Graceful shutdown is working correctly.');
|
||||||
|
} else {
|
||||||
|
console.error('\n❌ Some tests failed. Please check the implementation.');
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
console.error('\n💥 Test failed with error:', error);
|
||||||
|
process.exit(1);
|
||||||
|
} finally {
|
||||||
|
// Clean up signal handlers
|
||||||
|
removeSignalHandlers();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Clean up test data
|
||||||
|
*/
|
||||||
|
async function cleanupTestData(): Promise<void> {
|
||||||
|
console.log('🧹 Cleaning up test data...');
|
||||||
|
|
||||||
|
const result = await db
|
||||||
|
.delete(mirrorJobs)
|
||||||
|
.where(eq(mirrorJobs.userId, TEST_USER_ID));
|
||||||
|
|
||||||
|
console.log('✅ Test data cleaned up');
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Main test runner
|
||||||
|
*/
|
||||||
|
async function runTest(): Promise<void> {
|
||||||
|
console.log('🧪 Graceful Shutdown Integration Test');
|
||||||
|
console.log('====================================\n');
|
||||||
|
|
||||||
|
if (shouldCleanup) {
|
||||||
|
await cleanupTestData();
|
||||||
|
console.log('✅ Cleanup completed');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
await testGracefulShutdown();
|
||||||
|
} finally {
|
||||||
|
// Always clean up test data
|
||||||
|
await cleanupTestData();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle process signals gracefully during testing
|
||||||
|
process.on('SIGINT', async () => {
|
||||||
|
console.log('\n⚠️ Test interrupted by SIGINT');
|
||||||
|
await cleanupTestData();
|
||||||
|
process.exit(130);
|
||||||
|
});
|
||||||
|
|
||||||
|
process.on('SIGTERM', async () => {
|
||||||
|
console.log('\n⚠️ Test interrupted by SIGTERM');
|
||||||
|
await cleanupTestData();
|
||||||
|
process.exit(143);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Run the test
|
||||||
|
runTest();
|
||||||
@@ -14,6 +14,7 @@ type MirrorJobWithKey = MirrorJob & { _rowKey: string };
|
|||||||
interface ActivityListProps {
|
interface ActivityListProps {
|
||||||
activities: MirrorJobWithKey[];
|
activities: MirrorJobWithKey[];
|
||||||
isLoading: boolean;
|
isLoading: boolean;
|
||||||
|
isLiveActive?: boolean;
|
||||||
filter: FilterParams;
|
filter: FilterParams;
|
||||||
setFilter: (filter: FilterParams) => void;
|
setFilter: (filter: FilterParams) => void;
|
||||||
}
|
}
|
||||||
@@ -21,6 +22,7 @@ interface ActivityListProps {
|
|||||||
export default function ActivityList({
|
export default function ActivityList({
|
||||||
activities,
|
activities,
|
||||||
isLoading,
|
isLoading,
|
||||||
|
isLiveActive = false,
|
||||||
filter,
|
filter,
|
||||||
setFilter,
|
setFilter,
|
||||||
}: ActivityListProps) {
|
}: ActivityListProps) {
|
||||||
@@ -120,18 +122,19 @@ export default function ActivityList({
|
|||||||
}
|
}
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<Card
|
<div className="flex flex-col border rounded-md">
|
||||||
ref={parentRef}
|
<Card
|
||||||
className='relative max-h-[calc(100dvh-191px)] overflow-y-auto rounded-md border'
|
ref={parentRef}
|
||||||
>
|
className='relative max-h-[calc(100dvh-231px)] overflow-y-auto rounded-none border-0'
|
||||||
<div
|
|
||||||
style={{
|
|
||||||
height: virtualizer.getTotalSize(),
|
|
||||||
position: 'relative',
|
|
||||||
width: '100%',
|
|
||||||
}}
|
|
||||||
>
|
>
|
||||||
{virtualizer.getVirtualItems().map((vRow) => {
|
<div
|
||||||
|
style={{
|
||||||
|
height: virtualizer.getTotalSize(),
|
||||||
|
position: 'relative',
|
||||||
|
width: '100%',
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
{virtualizer.getVirtualItems().map((vRow) => {
|
||||||
const activity = filteredActivities[vRow.index];
|
const activity = filteredActivities[vRow.index];
|
||||||
const isExpanded = expandedItems.has(activity._rowKey);
|
const isExpanded = expandedItems.has(activity._rowKey);
|
||||||
|
|
||||||
@@ -213,5 +216,44 @@ export default function ActivityList({
|
|||||||
})}
|
})}
|
||||||
</div>
|
</div>
|
||||||
</Card>
|
</Card>
|
||||||
|
|
||||||
|
{/* Status Bar */}
|
||||||
|
<div className="h-[40px] flex items-center justify-between border-t bg-muted/30 px-3 relative">
|
||||||
|
<div className="flex items-center gap-2">
|
||||||
|
<div className={`h-1.5 w-1.5 rounded-full ${isLiveActive ? 'bg-emerald-500' : 'bg-primary'}`} />
|
||||||
|
<span className="text-sm font-medium text-foreground">
|
||||||
|
{filteredActivities.length} {filteredActivities.length === 1 ? 'activity' : 'activities'} total
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Center - Live active indicator */}
|
||||||
|
{isLiveActive && (
|
||||||
|
<div className="flex items-center gap-1.5 absolute left-1/2 transform -translate-x-1/2">
|
||||||
|
<div
|
||||||
|
className="h-1 w-1 rounded-full bg-emerald-500"
|
||||||
|
style={{
|
||||||
|
animation: 'pulse 2s ease-in-out infinite'
|
||||||
|
}}
|
||||||
|
/>
|
||||||
|
<span className="text-xs text-emerald-600 dark:text-emerald-400 font-medium">
|
||||||
|
Live active
|
||||||
|
</span>
|
||||||
|
<div
|
||||||
|
className="h-1 w-1 rounded-full bg-emerald-500"
|
||||||
|
style={{
|
||||||
|
animation: 'pulse 2s ease-in-out infinite',
|
||||||
|
animationDelay: '1s'
|
||||||
|
}}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{(filter.searchTerm || filter.status || filter.type || filter.name) && (
|
||||||
|
<span className="text-xs text-muted-foreground">
|
||||||
|
Filters applied
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -16,7 +16,7 @@ import {
|
|||||||
DialogTitle,
|
DialogTitle,
|
||||||
DialogTrigger,
|
DialogTrigger,
|
||||||
} from '../ui/dialog';
|
} from '../ui/dialog';
|
||||||
import { apiRequest, formatDate } from '@/lib/utils';
|
import { apiRequest, formatDate, showErrorToast } from '@/lib/utils';
|
||||||
import { useAuth } from '@/hooks/useAuth';
|
import { useAuth } from '@/hooks/useAuth';
|
||||||
import type { MirrorJob } from '@/lib/db/schema';
|
import type { MirrorJob } from '@/lib/db/schema';
|
||||||
import type { ActivityApiResponse } from '@/types/activities';
|
import type { ActivityApiResponse } from '@/types/activities';
|
||||||
@@ -67,12 +67,12 @@ function deepClone<T>(obj: T): T {
|
|||||||
|
|
||||||
export function ActivityLog() {
|
export function ActivityLog() {
|
||||||
const { user } = useAuth();
|
const { user } = useAuth();
|
||||||
const { registerRefreshCallback } = useLiveRefresh();
|
const { registerRefreshCallback, isLiveEnabled } = useLiveRefresh();
|
||||||
const { isFullyConfigured } = useConfigStatus();
|
const { isFullyConfigured } = useConfigStatus();
|
||||||
const { navigationKey } = useNavigation();
|
const { navigationKey } = useNavigation();
|
||||||
|
|
||||||
const [activities, setActivities] = useState<MirrorJobWithKey[]>([]);
|
const [activities, setActivities] = useState<MirrorJobWithKey[]>([]);
|
||||||
const [isLoading, setIsLoading] = useState(false);
|
const [isInitialLoading, setIsInitialLoading] = useState(false);
|
||||||
const [showCleanupDialog, setShowCleanupDialog] = useState(false);
|
const [showCleanupDialog, setShowCleanupDialog] = useState(false);
|
||||||
|
|
||||||
// Ref to track if component is mounted to prevent state updates after unmount
|
// Ref to track if component is mounted to prevent state updates after unmount
|
||||||
@@ -138,11 +138,14 @@ export function ActivityLog() {
|
|||||||
|
|
||||||
/* ------------------------- initial fetch --------------------------- */
|
/* ------------------------- initial fetch --------------------------- */
|
||||||
|
|
||||||
const fetchActivities = useCallback(async () => {
|
const fetchActivities = useCallback(async (isLiveRefresh = false) => {
|
||||||
if (!user?.id) return false;
|
if (!user?.id) return false;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
setIsLoading(true);
|
// Set appropriate loading state based on refresh type
|
||||||
|
if (!isLiveRefresh) {
|
||||||
|
setIsInitialLoading(true);
|
||||||
|
}
|
||||||
|
|
||||||
const res = await apiRequest<ActivityApiResponse>(
|
const res = await apiRequest<ActivityApiResponse>(
|
||||||
`/activities?userId=${user.id}`,
|
`/activities?userId=${user.id}`,
|
||||||
@@ -150,7 +153,10 @@ export function ActivityLog() {
|
|||||||
);
|
);
|
||||||
|
|
||||||
if (!res.success) {
|
if (!res.success) {
|
||||||
toast.error(res.message ?? 'Failed to fetch activities.');
|
// Only show error toast for manual refreshes to avoid spam during live updates
|
||||||
|
if (!isLiveRefresh) {
|
||||||
|
showErrorToast(res.message ?? 'Failed to fetch activities.', toast);
|
||||||
|
}
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -176,22 +182,23 @@ export function ActivityLog() {
|
|||||||
return true;
|
return true;
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
if (isMountedRef.current) {
|
if (isMountedRef.current) {
|
||||||
toast.error(
|
// Only show error toast for manual refreshes to avoid spam during live updates
|
||||||
err instanceof Error ? err.message : 'Failed to fetch activities.',
|
if (!isLiveRefresh) {
|
||||||
);
|
showErrorToast(err, toast);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
return false;
|
return false;
|
||||||
} finally {
|
} finally {
|
||||||
if (isMountedRef.current) {
|
if (isMountedRef.current && !isLiveRefresh) {
|
||||||
setIsLoading(false);
|
setIsInitialLoading(false);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}, [user?.id]); // Only depend on user.id, not entire user object
|
}, [user?.id]); // Only depend on user.id, not entire user object
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
// Reset loading state when component becomes active
|
// Reset loading state when component becomes active
|
||||||
setIsLoading(true);
|
setIsInitialLoading(true);
|
||||||
fetchActivities();
|
fetchActivities(false); // Manual refresh, not live
|
||||||
}, [fetchActivities, navigationKey]); // Include navigationKey to trigger on navigation
|
}, [fetchActivities, navigationKey]); // Include navigationKey to trigger on navigation
|
||||||
|
|
||||||
// Register with global live refresh system
|
// Register with global live refresh system
|
||||||
@@ -203,7 +210,7 @@ export function ActivityLog() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
const unregister = registerRefreshCallback(() => {
|
const unregister = registerRefreshCallback(() => {
|
||||||
fetchActivities();
|
fetchActivities(true); // Live refresh
|
||||||
});
|
});
|
||||||
|
|
||||||
return unregister;
|
return unregister;
|
||||||
@@ -301,10 +308,9 @@ export function ActivityLog() {
|
|||||||
if (!user?.id) return;
|
if (!user?.id) return;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
setIsLoading(true);
|
setIsInitialLoading(true);
|
||||||
setShowCleanupDialog(false);
|
setShowCleanupDialog(false);
|
||||||
|
|
||||||
// Use fetch directly to avoid potential axios issues
|
|
||||||
const response = await fetch('/api/activities/cleanup', {
|
const response = await fetch('/api/activities/cleanup', {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: { 'Content-Type': 'application/json' },
|
headers: { 'Content-Type': 'application/json' },
|
||||||
@@ -323,13 +329,13 @@ export function ActivityLog() {
|
|||||||
setActivities([]);
|
setActivities([]);
|
||||||
toast.success(`All activities cleaned up successfully. Deleted ${res.result.mirrorJobsDeleted} mirror jobs and ${res.result.eventsDeleted} events.`);
|
toast.success(`All activities cleaned up successfully. Deleted ${res.result.mirrorJobsDeleted} mirror jobs and ${res.result.eventsDeleted} events.`);
|
||||||
} else {
|
} else {
|
||||||
toast.error(res.error || 'Failed to cleanup activities.');
|
showErrorToast(res.error || 'Failed to cleanup activities.', toast);
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error('Error cleaning up activities:', error);
|
console.error('Error cleaning up activities:', error);
|
||||||
toast.error(error instanceof Error ? error.message : 'Failed to cleanup activities.');
|
showErrorToast(error, toast);
|
||||||
} finally {
|
} finally {
|
||||||
setIsLoading(false);
|
setIsInitialLoading(false);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -430,7 +436,7 @@ export function ActivityLog() {
|
|||||||
<Button
|
<Button
|
||||||
variant="outline"
|
variant="outline"
|
||||||
size="icon"
|
size="icon"
|
||||||
onClick={() => fetchActivities()}
|
onClick={() => fetchActivities(false)} // Manual refresh, show loading skeleton
|
||||||
title="Refresh activity log"
|
title="Refresh activity log"
|
||||||
>
|
>
|
||||||
<RefreshCw className='h-4 w-4' />
|
<RefreshCw className='h-4 w-4' />
|
||||||
@@ -451,7 +457,8 @@ export function ActivityLog() {
|
|||||||
{/* activity list */}
|
{/* activity list */}
|
||||||
<ActivityList
|
<ActivityList
|
||||||
activities={applyLightFilter(activities)}
|
activities={applyLightFilter(activities)}
|
||||||
isLoading={isLoading || !connected}
|
isLoading={isInitialLoading || !connected}
|
||||||
|
isLiveActive={isLiveEnabled && isFullyConfigured}
|
||||||
filter={filter}
|
filter={filter}
|
||||||
setFilter={setFilter}
|
setFilter={setFilter}
|
||||||
/>
|
/>
|
||||||
@@ -472,9 +479,9 @@ export function ActivityLog() {
|
|||||||
<Button
|
<Button
|
||||||
variant="destructive"
|
variant="destructive"
|
||||||
onClick={confirmCleanup}
|
onClick={confirmCleanup}
|
||||||
disabled={isLoading}
|
disabled={isInitialLoading}
|
||||||
>
|
>
|
||||||
{isLoading ? 'Deleting...' : 'Delete All Activities'}
|
{isInitialLoading ? 'Deleting...' : 'Delete All Activities'}
|
||||||
</Button>
|
</Button>
|
||||||
</DialogFooter>
|
</DialogFooter>
|
||||||
</DialogContent>
|
</DialogContent>
|
||||||
|
|||||||
@@ -6,6 +6,7 @@ import { Button } from '@/components/ui/button';
|
|||||||
import { Card, CardContent, CardDescription, CardFooter, CardHeader, CardTitle } from '@/components/ui/card';
|
import { Card, CardContent, CardDescription, CardFooter, CardHeader, CardTitle } from '@/components/ui/card';
|
||||||
import { SiGitea } from 'react-icons/si';
|
import { SiGitea } from 'react-icons/si';
|
||||||
import { toast, Toaster } from 'sonner';
|
import { toast, Toaster } from 'sonner';
|
||||||
|
import { showErrorToast } from '@/lib/utils';
|
||||||
import { FlipHorizontal } from 'lucide-react';
|
import { FlipHorizontal } from 'lucide-react';
|
||||||
|
|
||||||
export function LoginForm() {
|
export function LoginForm() {
|
||||||
@@ -45,10 +46,10 @@ export function LoginForm() {
|
|||||||
window.location.href = '/';
|
window.location.href = '/';
|
||||||
}, 1000);
|
}, 1000);
|
||||||
} else {
|
} else {
|
||||||
toast.error(data.error || 'Login failed. Please try again.');
|
showErrorToast(data.error || 'Login failed. Please try again.', toast);
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
toast.error('An error occurred while logging in. Please try again.');
|
showErrorToast(error, toast);
|
||||||
} finally {
|
} finally {
|
||||||
setIsLoading(false);
|
setIsLoading(false);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -5,6 +5,7 @@ import { Button } from '@/components/ui/button';
|
|||||||
import { Card, CardContent, CardDescription, CardFooter, CardHeader, CardTitle } from '@/components/ui/card';
|
import { Card, CardContent, CardDescription, CardFooter, CardHeader, CardTitle } from '@/components/ui/card';
|
||||||
import { GitMerge } from 'lucide-react';
|
import { GitMerge } from 'lucide-react';
|
||||||
import { toast, Toaster } from 'sonner';
|
import { toast, Toaster } from 'sonner';
|
||||||
|
import { showErrorToast } from '@/lib/utils';
|
||||||
|
|
||||||
export function SignupForm() {
|
export function SignupForm() {
|
||||||
const [isLoading, setIsLoading] = useState(false);
|
const [isLoading, setIsLoading] = useState(false);
|
||||||
@@ -51,10 +52,10 @@ export function SignupForm() {
|
|||||||
window.location.href = '/';
|
window.location.href = '/';
|
||||||
}, 1500);
|
}, 1500);
|
||||||
} else {
|
} else {
|
||||||
toast.error(data.error || 'Failed to create account. Please try again.');
|
showErrorToast(data.error || 'Failed to create account. Please try again.', toast);
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
toast.error('An error occurred while creating your account. Please try again.');
|
showErrorToast(error, toast);
|
||||||
} finally {
|
} finally {
|
||||||
setIsLoading(false);
|
setIsLoading(false);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -14,7 +14,7 @@ import type {
|
|||||||
} from '@/types/config';
|
} from '@/types/config';
|
||||||
import { Button } from '../ui/button';
|
import { Button } from '../ui/button';
|
||||||
import { useAuth } from '@/hooks/useAuth';
|
import { useAuth } from '@/hooks/useAuth';
|
||||||
import { apiRequest } from '@/lib/utils';
|
import { apiRequest, showErrorToast } from '@/lib/utils';
|
||||||
import { RefreshCw } from 'lucide-react';
|
import { RefreshCw } from 'lucide-react';
|
||||||
import { toast } from 'sonner';
|
import { toast } from 'sonner';
|
||||||
import { Skeleton } from '@/components/ui/skeleton';
|
import { Skeleton } from '@/components/ui/skeleton';
|
||||||
@@ -53,17 +53,21 @@ export function ConfigTabs() {
|
|||||||
},
|
},
|
||||||
cleanupConfig: {
|
cleanupConfig: {
|
||||||
enabled: false,
|
enabled: false,
|
||||||
retentionDays: 7,
|
retentionDays: 604800, // 7 days in seconds
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
const { user, refreshUser } = useAuth();
|
const { user } = useAuth();
|
||||||
const [isLoading, setIsLoading] = useState(true);
|
const [isLoading, setIsLoading] = useState(true);
|
||||||
const [isSyncing, setIsSyncing] = useState<boolean>(false);
|
const [isSyncing, setIsSyncing] = useState<boolean>(false);
|
||||||
const [isConfigSaved, setIsConfigSaved] = useState<boolean>(false);
|
|
||||||
const [isAutoSavingSchedule, setIsAutoSavingSchedule] = useState<boolean>(false);
|
const [isAutoSavingSchedule, setIsAutoSavingSchedule] = useState<boolean>(false);
|
||||||
const [isAutoSavingCleanup, setIsAutoSavingCleanup] = useState<boolean>(false);
|
const [isAutoSavingCleanup, setIsAutoSavingCleanup] = useState<boolean>(false);
|
||||||
|
const [isAutoSavingGitHub, setIsAutoSavingGitHub] = useState<boolean>(false);
|
||||||
|
const [isAutoSavingGitea, setIsAutoSavingGitea] = useState<boolean>(false);
|
||||||
const autoSaveScheduleTimeoutRef = useRef<NodeJS.Timeout | null>(null);
|
const autoSaveScheduleTimeoutRef = useRef<NodeJS.Timeout | null>(null);
|
||||||
const autoSaveCleanupTimeoutRef = useRef<NodeJS.Timeout | null>(null);
|
const autoSaveCleanupTimeoutRef = useRef<NodeJS.Timeout | null>(null);
|
||||||
|
const autoSaveGitHubTimeoutRef = useRef<NodeJS.Timeout | null>(null);
|
||||||
|
const autoSaveGiteaTimeoutRef = useRef<NodeJS.Timeout | null>(null);
|
||||||
|
|
||||||
const isConfigFormValid = (): boolean => {
|
const isConfigFormValid = (): boolean => {
|
||||||
const { githubConfig, giteaConfig } = config;
|
const { githubConfig, giteaConfig } = config;
|
||||||
@@ -91,7 +95,7 @@ export function ConfigTabs() {
|
|||||||
);
|
);
|
||||||
result.success
|
result.success
|
||||||
? toast.success(
|
? toast.success(
|
||||||
'GitHub data imported successfully! Head to the Dashboard to start mirroring repositories.',
|
'GitHub data imported successfully! Head to the Repositories page to start mirroring.',
|
||||||
)
|
)
|
||||||
: toast.error(
|
: toast.error(
|
||||||
`Failed to import GitHub data: ${
|
`Failed to import GitHub data: ${
|
||||||
@@ -109,47 +113,9 @@ export function ConfigTabs() {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const handleSaveConfig = async () => {
|
|
||||||
if (!user?.id) return;
|
|
||||||
const reqPayload: SaveConfigApiRequest = {
|
|
||||||
userId: user.id,
|
|
||||||
githubConfig: config.githubConfig,
|
|
||||||
giteaConfig: config.giteaConfig,
|
|
||||||
scheduleConfig: config.scheduleConfig,
|
|
||||||
cleanupConfig: config.cleanupConfig,
|
|
||||||
};
|
|
||||||
try {
|
|
||||||
const response = await fetch('/api/config', {
|
|
||||||
method: 'POST',
|
|
||||||
headers: { 'Content-Type': 'application/json' },
|
|
||||||
body: JSON.stringify(reqPayload),
|
|
||||||
});
|
|
||||||
const result: SaveConfigApiResponse = await response.json();
|
|
||||||
if (result.success) {
|
|
||||||
await refreshUser();
|
|
||||||
setIsConfigSaved(true);
|
|
||||||
// Invalidate config cache so other components get fresh data
|
|
||||||
invalidateConfigCache();
|
|
||||||
toast.success(
|
|
||||||
'Configuration saved successfully! Now import your GitHub data to begin.',
|
|
||||||
);
|
|
||||||
} else {
|
|
||||||
toast.error(
|
|
||||||
`Failed to save configuration: ${result.message || 'Unknown error'}`,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
toast.error(
|
|
||||||
`An error occurred while saving the configuration: ${
|
|
||||||
error instanceof Error ? error.message : String(error)
|
|
||||||
}`,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
// Auto-save function specifically for schedule config changes
|
// Auto-save function specifically for schedule config changes
|
||||||
const autoSaveScheduleConfig = useCallback(async (scheduleConfig: ScheduleConfig) => {
|
const autoSaveScheduleConfig = useCallback(async (scheduleConfig: ScheduleConfig) => {
|
||||||
if (!user?.id || !isConfigSaved) return; // Only auto-save if config was previously saved
|
if (!user?.id) return;
|
||||||
|
|
||||||
// Clear any existing timeout
|
// Clear any existing timeout
|
||||||
if (autoSaveScheduleTimeoutRef.current) {
|
if (autoSaveScheduleTimeoutRef.current) {
|
||||||
@@ -181,28 +147,39 @@ export function ConfigTabs() {
|
|||||||
// Removed refreshUser() call to prevent page reload
|
// Removed refreshUser() call to prevent page reload
|
||||||
// Invalidate config cache so other components get fresh data
|
// Invalidate config cache so other components get fresh data
|
||||||
invalidateConfigCache();
|
invalidateConfigCache();
|
||||||
|
|
||||||
|
// Fetch updated config to get the recalculated nextRun time
|
||||||
|
try {
|
||||||
|
const updatedResponse = await apiRequest<ConfigApiResponse>(
|
||||||
|
`/config?userId=${user.id}`,
|
||||||
|
{ method: 'GET' },
|
||||||
|
);
|
||||||
|
if (updatedResponse && !updatedResponse.error) {
|
||||||
|
setConfig(prev => ({
|
||||||
|
...prev,
|
||||||
|
scheduleConfig: updatedResponse.scheduleConfig || prev.scheduleConfig,
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
} catch (fetchError) {
|
||||||
|
console.warn('Failed to fetch updated config after auto-save:', fetchError);
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
toast.error(
|
showErrorToast(
|
||||||
`Auto-save failed: ${result.message || 'Unknown error'}`,
|
`Auto-save failed: ${result.message || 'Unknown error'}`,
|
||||||
{ duration: 3000 }
|
toast
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
toast.error(
|
showErrorToast(error, toast);
|
||||||
`Auto-save error: ${
|
|
||||||
error instanceof Error ? error.message : String(error)
|
|
||||||
}`,
|
|
||||||
{ duration: 3000 }
|
|
||||||
);
|
|
||||||
} finally {
|
} finally {
|
||||||
setIsAutoSavingSchedule(false);
|
setIsAutoSavingSchedule(false);
|
||||||
}
|
}
|
||||||
}, 500); // 500ms debounce
|
}, 500); // 500ms debounce
|
||||||
}, [user?.id, isConfigSaved, config.githubConfig, config.giteaConfig, config.cleanupConfig]);
|
}, [user?.id, config.githubConfig, config.giteaConfig, config.cleanupConfig]);
|
||||||
|
|
||||||
// Auto-save function specifically for cleanup config changes
|
// Auto-save function specifically for cleanup config changes
|
||||||
const autoSaveCleanupConfig = useCallback(async (cleanupConfig: DatabaseCleanupConfig) => {
|
const autoSaveCleanupConfig = useCallback(async (cleanupConfig: DatabaseCleanupConfig) => {
|
||||||
if (!user?.id || !isConfigSaved) return; // Only auto-save if config was previously saved
|
if (!user?.id) return;
|
||||||
|
|
||||||
// Clear any existing timeout
|
// Clear any existing timeout
|
||||||
if (autoSaveCleanupTimeoutRef.current) {
|
if (autoSaveCleanupTimeoutRef.current) {
|
||||||
@@ -233,24 +210,129 @@ export function ConfigTabs() {
|
|||||||
// Silent success - no toast for auto-save
|
// Silent success - no toast for auto-save
|
||||||
// Invalidate config cache so other components get fresh data
|
// Invalidate config cache so other components get fresh data
|
||||||
invalidateConfigCache();
|
invalidateConfigCache();
|
||||||
|
|
||||||
|
// Fetch updated config to get the recalculated nextRun time
|
||||||
|
try {
|
||||||
|
const updatedResponse = await apiRequest<ConfigApiResponse>(
|
||||||
|
`/config?userId=${user.id}`,
|
||||||
|
{ method: 'GET' },
|
||||||
|
);
|
||||||
|
if (updatedResponse && !updatedResponse.error) {
|
||||||
|
setConfig(prev => ({
|
||||||
|
...prev,
|
||||||
|
cleanupConfig: updatedResponse.cleanupConfig || prev.cleanupConfig,
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
} catch (fetchError) {
|
||||||
|
console.warn('Failed to fetch updated config after auto-save:', fetchError);
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
toast.error(
|
showErrorToast(
|
||||||
`Auto-save failed: ${result.message || 'Unknown error'}`,
|
`Auto-save failed: ${result.message || 'Unknown error'}`,
|
||||||
{ duration: 3000 }
|
toast
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
toast.error(
|
showErrorToast(error, toast);
|
||||||
`Auto-save error: ${
|
|
||||||
error instanceof Error ? error.message : String(error)
|
|
||||||
}`,
|
|
||||||
{ duration: 3000 }
|
|
||||||
);
|
|
||||||
} finally {
|
} finally {
|
||||||
setIsAutoSavingCleanup(false);
|
setIsAutoSavingCleanup(false);
|
||||||
}
|
}
|
||||||
}, 500); // 500ms debounce
|
}, 500); // 500ms debounce
|
||||||
}, [user?.id, isConfigSaved, config.githubConfig, config.giteaConfig, config.scheduleConfig]);
|
}, [user?.id, config.githubConfig, config.giteaConfig, config.scheduleConfig]);
|
||||||
|
|
||||||
|
// Auto-save function specifically for GitHub config changes
|
||||||
|
const autoSaveGitHubConfig = useCallback(async (githubConfig: GitHubConfig) => {
|
||||||
|
if (!user?.id) return;
|
||||||
|
|
||||||
|
// Clear any existing timeout
|
||||||
|
if (autoSaveGitHubTimeoutRef.current) {
|
||||||
|
clearTimeout(autoSaveGitHubTimeoutRef.current);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Debounce the auto-save to prevent excessive API calls
|
||||||
|
autoSaveGitHubTimeoutRef.current = setTimeout(async () => {
|
||||||
|
setIsAutoSavingGitHub(true);
|
||||||
|
|
||||||
|
const reqPayload: SaveConfigApiRequest = {
|
||||||
|
userId: user.id!,
|
||||||
|
githubConfig: githubConfig,
|
||||||
|
giteaConfig: config.giteaConfig,
|
||||||
|
scheduleConfig: config.scheduleConfig,
|
||||||
|
cleanupConfig: config.cleanupConfig,
|
||||||
|
};
|
||||||
|
|
||||||
|
try {
|
||||||
|
const response = await fetch('/api/config', {
|
||||||
|
method: 'POST',
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
body: JSON.stringify(reqPayload),
|
||||||
|
});
|
||||||
|
const result: SaveConfigApiResponse = await response.json();
|
||||||
|
|
||||||
|
if (result.success) {
|
||||||
|
// Silent success - no toast for auto-save
|
||||||
|
// Invalidate config cache so other components get fresh data
|
||||||
|
invalidateConfigCache();
|
||||||
|
} else {
|
||||||
|
showErrorToast(
|
||||||
|
`Auto-save failed: ${result.message || 'Unknown error'}`,
|
||||||
|
toast
|
||||||
|
);
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
showErrorToast(error, toast);
|
||||||
|
} finally {
|
||||||
|
setIsAutoSavingGitHub(false);
|
||||||
|
}
|
||||||
|
}, 500); // 500ms debounce
|
||||||
|
}, [user?.id, config.giteaConfig, config.scheduleConfig, config.cleanupConfig]);
|
||||||
|
|
||||||
|
// Auto-save function specifically for Gitea config changes
|
||||||
|
const autoSaveGiteaConfig = useCallback(async (giteaConfig: GiteaConfig) => {
|
||||||
|
if (!user?.id) return;
|
||||||
|
|
||||||
|
// Clear any existing timeout
|
||||||
|
if (autoSaveGiteaTimeoutRef.current) {
|
||||||
|
clearTimeout(autoSaveGiteaTimeoutRef.current);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Debounce the auto-save to prevent excessive API calls
|
||||||
|
autoSaveGiteaTimeoutRef.current = setTimeout(async () => {
|
||||||
|
setIsAutoSavingGitea(true);
|
||||||
|
|
||||||
|
const reqPayload: SaveConfigApiRequest = {
|
||||||
|
userId: user.id!,
|
||||||
|
githubConfig: config.githubConfig,
|
||||||
|
giteaConfig: giteaConfig,
|
||||||
|
scheduleConfig: config.scheduleConfig,
|
||||||
|
cleanupConfig: config.cleanupConfig,
|
||||||
|
};
|
||||||
|
|
||||||
|
try {
|
||||||
|
const response = await fetch('/api/config', {
|
||||||
|
method: 'POST',
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
body: JSON.stringify(reqPayload),
|
||||||
|
});
|
||||||
|
const result: SaveConfigApiResponse = await response.json();
|
||||||
|
|
||||||
|
if (result.success) {
|
||||||
|
// Silent success - no toast for auto-save
|
||||||
|
// Invalidate config cache so other components get fresh data
|
||||||
|
invalidateConfigCache();
|
||||||
|
} else {
|
||||||
|
showErrorToast(
|
||||||
|
`Auto-save failed: ${result.message || 'Unknown error'}`,
|
||||||
|
toast
|
||||||
|
);
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
showErrorToast(error, toast);
|
||||||
|
} finally {
|
||||||
|
setIsAutoSavingGitea(false);
|
||||||
|
}
|
||||||
|
}, 500); // 500ms debounce
|
||||||
|
}, [user?.id, config.githubConfig, config.scheduleConfig, config.cleanupConfig]);
|
||||||
|
|
||||||
// Cleanup timeouts on unmount
|
// Cleanup timeouts on unmount
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
@@ -261,6 +343,12 @@ export function ConfigTabs() {
|
|||||||
if (autoSaveCleanupTimeoutRef.current) {
|
if (autoSaveCleanupTimeoutRef.current) {
|
||||||
clearTimeout(autoSaveCleanupTimeoutRef.current);
|
clearTimeout(autoSaveCleanupTimeoutRef.current);
|
||||||
}
|
}
|
||||||
|
if (autoSaveGitHubTimeoutRef.current) {
|
||||||
|
clearTimeout(autoSaveGitHubTimeoutRef.current);
|
||||||
|
}
|
||||||
|
if (autoSaveGiteaTimeoutRef.current) {
|
||||||
|
clearTimeout(autoSaveGiteaTimeoutRef.current);
|
||||||
|
}
|
||||||
};
|
};
|
||||||
}, []);
|
}, []);
|
||||||
|
|
||||||
@@ -285,7 +373,7 @@ export function ConfigTabs() {
|
|||||||
cleanupConfig:
|
cleanupConfig:
|
||||||
response.cleanupConfig || config.cleanupConfig,
|
response.cleanupConfig || config.cleanupConfig,
|
||||||
});
|
});
|
||||||
if (response.id) setIsConfigSaved(true);
|
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.warn(
|
console.warn(
|
||||||
@@ -382,10 +470,10 @@ export function ConfigTabs() {
|
|||||||
<div className="flex gap-x-4">
|
<div className="flex gap-x-4">
|
||||||
<Button
|
<Button
|
||||||
onClick={handleImportGitHubData}
|
onClick={handleImportGitHubData}
|
||||||
disabled={isSyncing || !isConfigSaved}
|
disabled={isSyncing || !isConfigFormValid()}
|
||||||
title={
|
title={
|
||||||
!isConfigSaved
|
!isConfigFormValid()
|
||||||
? 'Save configuration first'
|
? 'Please fill all required GitHub and Gitea fields'
|
||||||
: isSyncing
|
: isSyncing
|
||||||
? 'Import in progress'
|
? 'Import in progress'
|
||||||
: 'Import GitHub Data'
|
: 'Import GitHub Data'
|
||||||
@@ -403,17 +491,6 @@ export function ConfigTabs() {
|
|||||||
</>
|
</>
|
||||||
)}
|
)}
|
||||||
</Button>
|
</Button>
|
||||||
<Button
|
|
||||||
onClick={handleSaveConfig}
|
|
||||||
disabled={!isConfigFormValid()}
|
|
||||||
title={
|
|
||||||
!isConfigFormValid()
|
|
||||||
? 'Please fill all required fields'
|
|
||||||
: 'Save Configuration'
|
|
||||||
}
|
|
||||||
>
|
|
||||||
Save Configuration
|
|
||||||
</Button>
|
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
@@ -431,6 +508,8 @@ export function ConfigTabs() {
|
|||||||
: update,
|
: update,
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
onAutoSave={autoSaveGitHubConfig}
|
||||||
|
isAutoSaving={isAutoSavingGitHub}
|
||||||
/>
|
/>
|
||||||
<GiteaConfigForm
|
<GiteaConfigForm
|
||||||
config={config.giteaConfig}
|
config={config.giteaConfig}
|
||||||
@@ -443,6 +522,8 @@ export function ConfigTabs() {
|
|||||||
: update,
|
: update,
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
onAutoSave={autoSaveGiteaConfig}
|
||||||
|
isAutoSaving={isAutoSavingGitea}
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
<div className="flex gap-x-4">
|
<div className="flex gap-x-4">
|
||||||
|
|||||||
@@ -18,38 +18,79 @@ interface DatabaseCleanupConfigFormProps {
|
|||||||
isAutoSaving?: boolean;
|
isAutoSaving?: boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
// Helper to calculate cleanup interval in hours (should match backend logic)
|
||||||
|
function calculateCleanupInterval(retentionSeconds: number): number {
|
||||||
|
const retentionDays = retentionSeconds / (24 * 60 * 60);
|
||||||
|
if (retentionDays <= 1) {
|
||||||
|
return 6;
|
||||||
|
} else if (retentionDays <= 3) {
|
||||||
|
return 12;
|
||||||
|
} else if (retentionDays <= 7) {
|
||||||
|
return 24;
|
||||||
|
} else if (retentionDays <= 30) {
|
||||||
|
return 48;
|
||||||
|
} else {
|
||||||
|
return 168;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
export function DatabaseCleanupConfigForm({
|
export function DatabaseCleanupConfigForm({
|
||||||
config,
|
config,
|
||||||
setConfig,
|
setConfig,
|
||||||
onAutoSave,
|
onAutoSave,
|
||||||
isAutoSaving = false,
|
isAutoSaving = false,
|
||||||
}: DatabaseCleanupConfigFormProps) {
|
}: DatabaseCleanupConfigFormProps) {
|
||||||
|
// Optimistically update nextRun when enabled or retention changes
|
||||||
const handleChange = (
|
const handleChange = (
|
||||||
e: React.ChangeEvent<HTMLInputElement | HTMLSelectElement>
|
e: React.ChangeEvent<HTMLInputElement | HTMLSelectElement>
|
||||||
) => {
|
) => {
|
||||||
const { name, value, type } = e.target;
|
const { name, value, type } = e.target;
|
||||||
const newConfig = {
|
let newConfig = {
|
||||||
...config,
|
...config,
|
||||||
[name]:
|
[name]: type === "checkbox" ? (e.target as HTMLInputElement).checked : value,
|
||||||
type === "checkbox" ? (e.target as HTMLInputElement).checked : value,
|
|
||||||
};
|
};
|
||||||
setConfig(newConfig);
|
|
||||||
|
|
||||||
// Trigger auto-save for cleanup config changes
|
// If enabling or changing retention, recalculate nextRun
|
||||||
|
if (
|
||||||
|
(name === "enabled" && (e.target as HTMLInputElement).checked) ||
|
||||||
|
(name === "retentionDays" && config.enabled)
|
||||||
|
) {
|
||||||
|
const now = new Date();
|
||||||
|
const retentionSeconds =
|
||||||
|
name === "retentionDays"
|
||||||
|
? Number(value)
|
||||||
|
: Number(newConfig.retentionDays);
|
||||||
|
const intervalHours = calculateCleanupInterval(retentionSeconds);
|
||||||
|
const nextRun = new Date(now.getTime() + intervalHours * 60 * 60 * 1000);
|
||||||
|
newConfig = {
|
||||||
|
...newConfig,
|
||||||
|
nextRun,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
// If disabling, clear nextRun
|
||||||
|
if (name === "enabled" && !(e.target as HTMLInputElement).checked) {
|
||||||
|
newConfig = {
|
||||||
|
...newConfig,
|
||||||
|
nextRun: undefined,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
setConfig(newConfig);
|
||||||
if (onAutoSave) {
|
if (onAutoSave) {
|
||||||
onAutoSave(newConfig);
|
onAutoSave(newConfig);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
// Predefined retention periods
|
// Predefined retention periods (in seconds, like schedule intervals)
|
||||||
const retentionOptions: { value: number; label: string }[] = [
|
const retentionOptions: { value: number; label: string }[] = [
|
||||||
{ value: 1, label: "1 day" },
|
{ value: 86400, label: "1 day" }, // 24 * 60 * 60
|
||||||
{ value: 3, label: "3 days" },
|
{ value: 259200, label: "3 days" }, // 3 * 24 * 60 * 60
|
||||||
{ value: 7, label: "7 days" },
|
{ value: 604800, label: "7 days" }, // 7 * 24 * 60 * 60
|
||||||
{ value: 14, label: "14 days" },
|
{ value: 1209600, label: "14 days" }, // 14 * 24 * 60 * 60
|
||||||
{ value: 30, label: "30 days" },
|
{ value: 2592000, label: "30 days" }, // 30 * 24 * 60 * 60
|
||||||
{ value: 60, label: "60 days" },
|
{ value: 5184000, label: "60 days" }, // 60 * 24 * 60 * 60
|
||||||
{ value: 90, label: "90 days" },
|
{ value: 7776000, label: "90 days" }, // 90 * 24 * 60 * 60
|
||||||
];
|
];
|
||||||
|
|
||||||
return (
|
return (
|
||||||
@@ -92,7 +133,7 @@ export function DatabaseCleanupConfigForm({
|
|||||||
{config.enabled && (
|
{config.enabled && (
|
||||||
<div>
|
<div>
|
||||||
<label className="block text-sm font-medium mb-2">
|
<label className="block text-sm font-medium mb-2">
|
||||||
Retention Period
|
Data Retention Period
|
||||||
</label>
|
</label>
|
||||||
|
|
||||||
<Select
|
<Select
|
||||||
@@ -123,22 +164,36 @@ export function DatabaseCleanupConfigForm({
|
|||||||
<p className="text-xs text-muted-foreground mt-1">
|
<p className="text-xs text-muted-foreground mt-1">
|
||||||
Activities and events older than this period will be automatically deleted.
|
Activities and events older than this period will be automatically deleted.
|
||||||
</p>
|
</p>
|
||||||
|
<div className="mt-2 p-2 bg-muted/50 rounded-md">
|
||||||
|
<p className="text-xs text-muted-foreground">
|
||||||
|
<strong>Cleanup Frequency:</strong> The cleanup process runs automatically at optimal intervals:
|
||||||
|
shorter retention periods trigger more frequent cleanups, longer periods trigger less frequent cleanups.
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
<div>
|
<div className="flex gap-x-4">
|
||||||
<label className="block text-sm font-medium mb-1">Last Run</label>
|
<div className="flex-1">
|
||||||
<div className="text-sm">
|
<label className="block text-sm font-medium mb-1">Last Cleanup</label>
|
||||||
{config.lastRun ? formatDate(config.lastRun) : "Never"}
|
<div className="text-sm">
|
||||||
|
{config.lastRun ? formatDate(config.lastRun) : "Never"}
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
{config.enabled && (
|
||||||
|
<div className="flex-1">
|
||||||
|
<label className="block text-sm font-medium mb-1">Next Cleanup</label>
|
||||||
|
<div className="text-sm">
|
||||||
|
{config.nextRun
|
||||||
|
? formatDate(config.nextRun)
|
||||||
|
: config.enabled
|
||||||
|
? "Calculating..."
|
||||||
|
: "Never"}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
{config.nextRun && config.enabled && (
|
|
||||||
<div>
|
|
||||||
<label className="block text-sm font-medium mb-1">Next Run</label>
|
|
||||||
<div className="text-sm">{formatDate(config.nextRun)}</div>
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
</div>
|
</div>
|
||||||
</CardContent>
|
</CardContent>
|
||||||
</Card>
|
</Card>
|
||||||
|
|||||||
@@ -20,9 +20,11 @@ import { Tooltip, TooltipContent, TooltipTrigger } from "../ui/tooltip";
|
|||||||
interface GitHubConfigFormProps {
|
interface GitHubConfigFormProps {
|
||||||
config: GitHubConfig;
|
config: GitHubConfig;
|
||||||
setConfig: React.Dispatch<React.SetStateAction<GitHubConfig>>;
|
setConfig: React.Dispatch<React.SetStateAction<GitHubConfig>>;
|
||||||
|
onAutoSave?: (githubConfig: GitHubConfig) => Promise<void>;
|
||||||
|
isAutoSaving?: boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
export function GitHubConfigForm({ config, setConfig }: GitHubConfigFormProps) {
|
export function GitHubConfigForm({ config, setConfig, onAutoSave, isAutoSaving }: GitHubConfigFormProps) {
|
||||||
const [isLoading, setIsLoading] = useState(false);
|
const [isLoading, setIsLoading] = useState(false);
|
||||||
|
|
||||||
const handleChange = (e: React.ChangeEvent<HTMLInputElement>) => {
|
const handleChange = (e: React.ChangeEvent<HTMLInputElement>) => {
|
||||||
@@ -43,10 +45,17 @@ export function GitHubConfigForm({ config, setConfig }: GitHubConfigFormProps) {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
setConfig({
|
const newConfig = {
|
||||||
...config,
|
...config,
|
||||||
[name]: type === "checkbox" ? checked : value,
|
[name]: type === "checkbox" ? checked : value,
|
||||||
});
|
};
|
||||||
|
|
||||||
|
setConfig(newConfig);
|
||||||
|
|
||||||
|
// Auto-save for all field changes
|
||||||
|
if (onAutoSave) {
|
||||||
|
onAutoSave(newConfig);
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const testConnection = async () => {
|
const testConnection = async () => {
|
||||||
|
|||||||
@@ -21,19 +21,27 @@ import { toast } from "sonner";
|
|||||||
interface GiteaConfigFormProps {
|
interface GiteaConfigFormProps {
|
||||||
config: GiteaConfig;
|
config: GiteaConfig;
|
||||||
setConfig: React.Dispatch<React.SetStateAction<GiteaConfig>>;
|
setConfig: React.Dispatch<React.SetStateAction<GiteaConfig>>;
|
||||||
|
onAutoSave?: (giteaConfig: GiteaConfig) => Promise<void>;
|
||||||
|
isAutoSaving?: boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
export function GiteaConfigForm({ config, setConfig }: GiteaConfigFormProps) {
|
export function GiteaConfigForm({ config, setConfig, onAutoSave, isAutoSaving }: GiteaConfigFormProps) {
|
||||||
const [isLoading, setIsLoading] = useState(false);
|
const [isLoading, setIsLoading] = useState(false);
|
||||||
|
|
||||||
const handleChange = (
|
const handleChange = (
|
||||||
e: React.ChangeEvent<HTMLInputElement | HTMLSelectElement>
|
e: React.ChangeEvent<HTMLInputElement | HTMLSelectElement>
|
||||||
) => {
|
) => {
|
||||||
const { name, value } = e.target;
|
const { name, value } = e.target;
|
||||||
setConfig({
|
const newConfig = {
|
||||||
...config,
|
...config,
|
||||||
[name]: value,
|
[name]: value,
|
||||||
});
|
};
|
||||||
|
setConfig(newConfig);
|
||||||
|
|
||||||
|
// Auto-save for all field changes
|
||||||
|
if (onAutoSave) {
|
||||||
|
onAutoSave(newConfig);
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const testConnection = async () => {
|
const testConnection = async () => {
|
||||||
|
|||||||
@@ -43,9 +43,6 @@ export function ScheduleConfigForm({
|
|||||||
|
|
||||||
// Predefined intervals
|
// Predefined intervals
|
||||||
const intervals: { value: number; label: string }[] = [
|
const intervals: { value: number; label: string }[] = [
|
||||||
// { value: 120, label: "2 minutes" }, //for testing
|
|
||||||
{ value: 900, label: "15 minutes" },
|
|
||||||
{ value: 1800, label: "30 minutes" },
|
|
||||||
{ value: 3600, label: "1 hour" },
|
{ value: 3600, label: "1 hour" },
|
||||||
{ value: 7200, label: "2 hours" },
|
{ value: 7200, label: "2 hours" },
|
||||||
{ value: 14400, label: "4 hours" },
|
{ value: 14400, label: "4 hours" },
|
||||||
@@ -127,22 +124,32 @@ export function ScheduleConfigForm({
|
|||||||
<p className="text-xs text-muted-foreground mt-1">
|
<p className="text-xs text-muted-foreground mt-1">
|
||||||
How often the mirroring process should run.
|
How often the mirroring process should run.
|
||||||
</p>
|
</p>
|
||||||
|
<div className="mt-2 p-2 bg-muted/50 rounded-md">
|
||||||
|
<p className="text-xs text-muted-foreground">
|
||||||
|
<strong>Sync Schedule:</strong> Repositories will be synchronized at the specified interval.
|
||||||
|
Choose shorter intervals for frequently updated repositories, longer intervals for stable ones.
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
<div>
|
<div className="flex gap-x-4">
|
||||||
<label className="block text-sm font-medium mb-1">Last Run</label>
|
<div className="flex-1">
|
||||||
<div className="text-sm">
|
<label className="block text-sm font-medium mb-1">Last Sync</label>
|
||||||
{config.lastRun ? formatDate(config.lastRun) : "Never"}
|
<div className="text-sm">
|
||||||
|
{config.lastRun ? formatDate(config.lastRun) : "Never"}
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
{config.enabled && (
|
||||||
|
<div className="flex-1">
|
||||||
|
<label className="block text-sm font-medium mb-1">Next Sync</label>
|
||||||
|
<div className="text-sm">
|
||||||
|
{config.nextRun ? formatDate(config.nextRun) : "Never"}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
{config.nextRun && config.enabled && (
|
|
||||||
<div>
|
|
||||||
<label className="block text-sm font-medium mb-1">Next Run</label>
|
|
||||||
<div className="text-sm">{formatDate(config.nextRun)}</div>
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
</div>
|
</div>
|
||||||
</CardContent>
|
</CardContent>
|
||||||
</Card>
|
</Card>
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ import { GitFork, Clock, FlipHorizontal, Building2 } from "lucide-react";
|
|||||||
import { useCallback, useEffect, useRef, useState } from "react";
|
import { useCallback, useEffect, useRef, useState } from "react";
|
||||||
import type { MirrorJob, Organization, Repository } from "@/lib/db/schema";
|
import type { MirrorJob, Organization, Repository } from "@/lib/db/schema";
|
||||||
import { useAuth } from "@/hooks/useAuth";
|
import { useAuth } from "@/hooks/useAuth";
|
||||||
import { apiRequest } from "@/lib/utils";
|
import { apiRequest, showErrorToast } from "@/lib/utils";
|
||||||
import type { DashboardApiResponse } from "@/types/dashboard";
|
import type { DashboardApiResponse } from "@/types/dashboard";
|
||||||
import { useSSE } from "@/hooks/useSEE";
|
import { useSSE } from "@/hooks/useSEE";
|
||||||
import { toast } from "sonner";
|
import { toast } from "sonner";
|
||||||
@@ -103,15 +103,11 @@ export function Dashboard() {
|
|||||||
}
|
}
|
||||||
return true;
|
return true;
|
||||||
} else {
|
} else {
|
||||||
toast.error(response.error || "Error fetching dashboard data");
|
showErrorToast(response.error || "Error fetching dashboard data", toast);
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
toast.error(
|
showErrorToast(error, toast);
|
||||||
error instanceof Error
|
|
||||||
? error.message
|
|
||||||
: "Error fetching dashboard data"
|
|
||||||
);
|
|
||||||
return false;
|
return false;
|
||||||
} finally {
|
} finally {
|
||||||
setIsLoading(false);
|
setIsLoading(false);
|
||||||
|
|||||||
@@ -81,6 +81,11 @@ export function RepositoryList({ repositories }: RepositoryListProps) {
|
|||||||
Private
|
Private
|
||||||
</span>
|
</span>
|
||||||
)}
|
)}
|
||||||
|
{repo.isForked && (
|
||||||
|
<span className="rounded-full bg-muted px-2 py-0.5 text-xs">
|
||||||
|
Fork
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
</div>
|
</div>
|
||||||
<div className="flex items-center gap-2 mt-1">
|
<div className="flex items-center gap-2 mt-1">
|
||||||
<span className="text-xs text-muted-foreground">
|
<span className="text-xs text-muted-foreground">
|
||||||
|
|||||||
@@ -24,8 +24,13 @@ export function Header({ currentPage, onNavigate }: HeaderProps) {
|
|||||||
// Determine button state and tooltip
|
// Determine button state and tooltip
|
||||||
const isLiveActive = isLiveEnabled && isFullyConfigured;
|
const isLiveActive = isLiveEnabled && isFullyConfigured;
|
||||||
const getTooltip = () => {
|
const getTooltip = () => {
|
||||||
if (!isFullyConfigured && !configLoading) {
|
if (configLoading) {
|
||||||
return 'Configure GitHub and Gitea settings to enable live refresh';
|
return 'Loading configuration...';
|
||||||
|
}
|
||||||
|
if (!isFullyConfigured) {
|
||||||
|
return isLiveEnabled
|
||||||
|
? 'Live refresh enabled but requires GitHub and Gitea configuration to function'
|
||||||
|
: 'Enable live refresh (requires GitHub and Gitea configuration)';
|
||||||
}
|
}
|
||||||
return isLiveEnabled ? 'Disable live refresh' : 'Enable live refresh';
|
return isLiveEnabled ? 'Disable live refresh' : 'Enable live refresh';
|
||||||
};
|
};
|
||||||
@@ -68,17 +73,18 @@ export function Header({ currentPage, onNavigate }: HeaderProps) {
|
|||||||
<Button
|
<Button
|
||||||
variant="outline"
|
variant="outline"
|
||||||
size="lg"
|
size="lg"
|
||||||
className={`flex items-center gap-2 ${!isFullyConfigured && !configLoading ? 'opacity-50 cursor-not-allowed' : ''}`}
|
className="flex items-center gap-2"
|
||||||
onClick={isFullyConfigured || configLoading ? toggleLive : undefined}
|
onClick={toggleLive}
|
||||||
title={getTooltip()}
|
title={getTooltip()}
|
||||||
disabled={!isFullyConfigured && !configLoading}
|
|
||||||
>
|
>
|
||||||
<div className={`w-3 h-3 rounded-full ${
|
<div className={`w-3 h-3 rounded-full ${
|
||||||
configLoading
|
configLoading
|
||||||
? 'bg-yellow-400 animate-pulse'
|
? 'bg-yellow-400 animate-pulse'
|
||||||
: isLiveActive
|
: isLiveActive
|
||||||
? 'bg-emerald-400 animate-pulse'
|
? 'bg-emerald-400 animate-pulse'
|
||||||
: 'bg-gray-500'
|
: isLiveEnabled
|
||||||
|
? 'bg-orange-400'
|
||||||
|
: 'bg-gray-500'
|
||||||
}`} />
|
}`} />
|
||||||
<span>LIVE</span>
|
<span>LIVE</span>
|
||||||
</Button>
|
</Button>
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ import type { MirrorJob, Organization } from "@/lib/db/schema";
|
|||||||
import { OrganizationList } from "./OrganizationsList";
|
import { OrganizationList } from "./OrganizationsList";
|
||||||
import AddOrganizationDialog from "./AddOrganizationDialog";
|
import AddOrganizationDialog from "./AddOrganizationDialog";
|
||||||
import { useAuth } from "@/hooks/useAuth";
|
import { useAuth } from "@/hooks/useAuth";
|
||||||
import { apiRequest } from "@/lib/utils";
|
import { apiRequest, showErrorToast } from "@/lib/utils";
|
||||||
import {
|
import {
|
||||||
membershipRoleEnum,
|
membershipRoleEnum,
|
||||||
type AddOrganizationApiRequest,
|
type AddOrganizationApiRequest,
|
||||||
@@ -24,7 +24,6 @@ import type { MirrorOrgRequest, MirrorOrgResponse } from "@/types/mirror";
|
|||||||
import { useSSE } from "@/hooks/useSEE";
|
import { useSSE } from "@/hooks/useSEE";
|
||||||
import { useFilterParams } from "@/hooks/useFilterParams";
|
import { useFilterParams } from "@/hooks/useFilterParams";
|
||||||
import { toast } from "sonner";
|
import { toast } from "sonner";
|
||||||
import { useLiveRefresh } from "@/hooks/useLiveRefresh";
|
|
||||||
import { useConfigStatus } from "@/hooks/useConfigStatus";
|
import { useConfigStatus } from "@/hooks/useConfigStatus";
|
||||||
import { useNavigation } from "@/components/layout/MainLayout";
|
import { useNavigation } from "@/components/layout/MainLayout";
|
||||||
|
|
||||||
@@ -33,7 +32,6 @@ export function Organization() {
|
|||||||
const [isLoading, setIsLoading] = useState<boolean>(true);
|
const [isLoading, setIsLoading] = useState<boolean>(true);
|
||||||
const [isDialogOpen, setIsDialogOpen] = useState<boolean>(false);
|
const [isDialogOpen, setIsDialogOpen] = useState<boolean>(false);
|
||||||
const { user } = useAuth();
|
const { user } = useAuth();
|
||||||
const { registerRefreshCallback } = useLiveRefresh();
|
|
||||||
const { isGitHubConfigured } = useConfigStatus();
|
const { isGitHubConfigured } = useConfigStatus();
|
||||||
const { navigationKey } = useNavigation();
|
const { navigationKey } = useNavigation();
|
||||||
const { filter, setFilter } = useFilterParams({
|
const { filter, setFilter } = useFilterParams({
|
||||||
@@ -108,20 +106,6 @@ export function Organization() {
|
|||||||
fetchOrganizations();
|
fetchOrganizations();
|
||||||
}, [fetchOrganizations, navigationKey]); // Include navigationKey to trigger on navigation
|
}, [fetchOrganizations, navigationKey]); // Include navigationKey to trigger on navigation
|
||||||
|
|
||||||
// Register with global live refresh system
|
|
||||||
useEffect(() => {
|
|
||||||
// Only register for live refresh if GitHub is configured
|
|
||||||
if (!isGitHubConfigured) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const unregister = registerRefreshCallback(() => {
|
|
||||||
fetchOrganizations();
|
|
||||||
});
|
|
||||||
|
|
||||||
return unregister;
|
|
||||||
}, [registerRefreshCallback, fetchOrganizations, isGitHubConfigured]);
|
|
||||||
|
|
||||||
const handleRefresh = async () => {
|
const handleRefresh = async () => {
|
||||||
const success = await fetchOrganizations();
|
const success = await fetchOrganizations();
|
||||||
if (success) {
|
if (success) {
|
||||||
@@ -209,12 +193,10 @@ export function Organization() {
|
|||||||
searchTerm: org,
|
searchTerm: org,
|
||||||
}));
|
}));
|
||||||
} else {
|
} else {
|
||||||
toast.error(response.error || "Error adding organization");
|
showErrorToast(response.error || "Error adding organization", toast);
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
toast.error(
|
showErrorToast(error, toast);
|
||||||
error instanceof Error ? error.message : "Error adding organization"
|
|
||||||
);
|
|
||||||
} finally {
|
} finally {
|
||||||
setIsLoading(false);
|
setIsLoading(false);
|
||||||
}
|
}
|
||||||
@@ -266,12 +248,10 @@ export function Organization() {
|
|||||||
})
|
})
|
||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
toast.error(response.error || "Error starting mirror jobs");
|
showErrorToast(response.error || "Error starting mirror jobs", toast);
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
toast.error(
|
showErrorToast(error, toast);
|
||||||
error instanceof Error ? error.message : "Error starting mirror jobs"
|
|
||||||
);
|
|
||||||
} finally {
|
} finally {
|
||||||
// Reset loading states - we'll let the SSE updates handle status changes
|
// Reset loading states - we'll let the SSE updates handle status changes
|
||||||
setLoadingOrgIds(new Set());
|
setLoadingOrgIds(new Set());
|
||||||
|
|||||||
@@ -118,10 +118,38 @@ export function OrganizationList({
|
|||||||
</span>
|
</span>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<p className="text-sm text-muted-foreground mb-4">
|
<div className="text-sm text-muted-foreground mb-4">
|
||||||
{org.repositoryCount}{" "}
|
<div className="flex items-center justify-between">
|
||||||
{org.repositoryCount === 1 ? "repository" : "repositories"}
|
<span className="font-medium">
|
||||||
</p>
|
{org.repositoryCount}{" "}
|
||||||
|
{org.repositoryCount === 1 ? "repository" : "repositories"}
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
{(org.publicRepositoryCount !== undefined ||
|
||||||
|
org.privateRepositoryCount !== undefined ||
|
||||||
|
org.forkRepositoryCount !== undefined) && (
|
||||||
|
<div className="flex gap-4 mt-2 text-xs">
|
||||||
|
{org.publicRepositoryCount !== undefined && (
|
||||||
|
<span className="flex items-center gap-1">
|
||||||
|
<div className="h-2 w-2 rounded-full bg-green-500" />
|
||||||
|
{org.publicRepositoryCount} public
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
|
{org.privateRepositoryCount !== undefined && org.privateRepositoryCount > 0 && (
|
||||||
|
<span className="flex items-center gap-1">
|
||||||
|
<div className="h-2 w-2 rounded-full bg-orange-500" />
|
||||||
|
{org.privateRepositoryCount} private
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
|
{org.forkRepositoryCount !== undefined && org.forkRepositoryCount > 0 && (
|
||||||
|
<span className="flex items-center gap-1">
|
||||||
|
<div className="h-2 w-2 rounded-full bg-blue-500" />
|
||||||
|
{org.forkRepositoryCount} fork{org.forkRepositoryCount !== 1 ? 's' : ''}
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
|
||||||
<div className="flex items-center justify-between">
|
<div className="flex items-center justify-between">
|
||||||
<div className="flex items-center">
|
<div className="flex items-center">
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ import {
|
|||||||
type RepositoryApiResponse,
|
type RepositoryApiResponse,
|
||||||
type RepoStatus,
|
type RepoStatus,
|
||||||
} from "@/types/Repository";
|
} from "@/types/Repository";
|
||||||
import { apiRequest } from "@/lib/utils";
|
import { apiRequest, showErrorToast } from "@/lib/utils";
|
||||||
import {
|
import {
|
||||||
Select,
|
Select,
|
||||||
SelectContent,
|
SelectContent,
|
||||||
@@ -34,10 +34,10 @@ import { useNavigation } from "@/components/layout/MainLayout";
|
|||||||
|
|
||||||
export default function Repository() {
|
export default function Repository() {
|
||||||
const [repositories, setRepositories] = useState<Repository[]>([]);
|
const [repositories, setRepositories] = useState<Repository[]>([]);
|
||||||
const [isLoading, setIsLoading] = useState(true);
|
const [isInitialLoading, setIsInitialLoading] = useState(true);
|
||||||
const { user } = useAuth();
|
const { user } = useAuth();
|
||||||
const { registerRefreshCallback } = useLiveRefresh();
|
const { registerRefreshCallback, isLiveEnabled } = useLiveRefresh();
|
||||||
const { isGitHubConfigured } = useConfigStatus();
|
const { isGitHubConfigured, isFullyConfigured } = useConfigStatus();
|
||||||
const { navigationKey } = useNavigation();
|
const { navigationKey } = useNavigation();
|
||||||
const { filter, setFilter } = useFilterParams({
|
const { filter, setFilter } = useFilterParams({
|
||||||
searchTerm: "",
|
searchTerm: "",
|
||||||
@@ -80,17 +80,20 @@ export default function Repository() {
|
|||||||
onMessage: handleNewMessage,
|
onMessage: handleNewMessage,
|
||||||
});
|
});
|
||||||
|
|
||||||
const fetchRepositories = useCallback(async () => {
|
const fetchRepositories = useCallback(async (isLiveRefresh = false) => {
|
||||||
if (!user?.id) return;
|
if (!user?.id) return;
|
||||||
|
|
||||||
// Don't fetch repositories if GitHub is not configured or still loading config
|
// Don't fetch repositories if GitHub is not configured or still loading config
|
||||||
if (!isGitHubConfigured) {
|
if (!isGitHubConfigured) {
|
||||||
setIsLoading(false);
|
setIsInitialLoading(false);
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
setIsLoading(true);
|
// Set appropriate loading state based on refresh type
|
||||||
|
if (!isLiveRefresh) {
|
||||||
|
setIsInitialLoading(true);
|
||||||
|
}
|
||||||
|
|
||||||
const response = await apiRequest<RepositoryApiResponse>(
|
const response = await apiRequest<RepositoryApiResponse>(
|
||||||
`/github/repositories?userId=${user.id}`,
|
`/github/repositories?userId=${user.id}`,
|
||||||
@@ -103,23 +106,29 @@ export default function Repository() {
|
|||||||
setRepositories(response.repositories);
|
setRepositories(response.repositories);
|
||||||
return true;
|
return true;
|
||||||
} else {
|
} else {
|
||||||
toast.error(response.error || "Error fetching repositories");
|
// Only show error toast for manual refreshes to avoid spam during live updates
|
||||||
|
if (!isLiveRefresh) {
|
||||||
|
showErrorToast(response.error || "Error fetching repositories", toast);
|
||||||
|
}
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
toast.error(
|
// Only show error toast for manual refreshes to avoid spam during live updates
|
||||||
error instanceof Error ? error.message : "Error fetching repositories"
|
if (!isLiveRefresh) {
|
||||||
);
|
showErrorToast(error, toast);
|
||||||
|
}
|
||||||
return false;
|
return false;
|
||||||
} finally {
|
} finally {
|
||||||
setIsLoading(false);
|
if (!isLiveRefresh) {
|
||||||
|
setIsInitialLoading(false);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}, [user?.id, isGitHubConfigured]); // Only depend on user.id, not entire user object
|
}, [user?.id, isGitHubConfigured]); // Only depend on user.id, not entire user object
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
// Reset loading state when component becomes active
|
// Reset loading state when component becomes active
|
||||||
setIsLoading(true);
|
setIsInitialLoading(true);
|
||||||
fetchRepositories();
|
fetchRepositories(false); // Manual refresh, not live
|
||||||
}, [fetchRepositories, navigationKey]); // Include navigationKey to trigger on navigation
|
}, [fetchRepositories, navigationKey]); // Include navigationKey to trigger on navigation
|
||||||
|
|
||||||
// Register with global live refresh system
|
// Register with global live refresh system
|
||||||
@@ -130,14 +139,14 @@ export default function Repository() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
const unregister = registerRefreshCallback(() => {
|
const unregister = registerRefreshCallback(() => {
|
||||||
fetchRepositories();
|
fetchRepositories(true); // Live refresh
|
||||||
});
|
});
|
||||||
|
|
||||||
return unregister;
|
return unregister;
|
||||||
}, [registerRefreshCallback, fetchRepositories, isGitHubConfigured]);
|
}, [registerRefreshCallback, fetchRepositories, isGitHubConfigured]);
|
||||||
|
|
||||||
const handleRefresh = async () => {
|
const handleRefresh = async () => {
|
||||||
const success = await fetchRepositories();
|
const success = await fetchRepositories(false); // Manual refresh, show loading skeleton
|
||||||
if (success) {
|
if (success) {
|
||||||
toast.success("Repositories refreshed successfully.");
|
toast.success("Repositories refreshed successfully.");
|
||||||
}
|
}
|
||||||
@@ -173,12 +182,10 @@ export default function Repository() {
|
|||||||
})
|
})
|
||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
toast.error(response.error || "Error starting mirror job");
|
showErrorToast(response.error || "Error starting mirror job", toast);
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
toast.error(
|
showErrorToast(error, toast);
|
||||||
error instanceof Error ? error.message : "Error starting mirror job"
|
|
||||||
);
|
|
||||||
} finally {
|
} finally {
|
||||||
setLoadingRepoIds((prev) => {
|
setLoadingRepoIds((prev) => {
|
||||||
const newSet = new Set(prev);
|
const newSet = new Set(prev);
|
||||||
@@ -237,12 +244,10 @@ export default function Repository() {
|
|||||||
})
|
})
|
||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
toast.error(response.error || "Error starting mirror jobs");
|
showErrorToast(response.error || "Error starting mirror jobs", toast);
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
toast.error(
|
showErrorToast(error, toast);
|
||||||
error instanceof Error ? error.message : "Error starting mirror jobs"
|
|
||||||
);
|
|
||||||
} finally {
|
} finally {
|
||||||
// Reset loading states - we'll let the SSE updates handle status changes
|
// Reset loading states - we'll let the SSE updates handle status changes
|
||||||
setLoadingRepoIds(new Set());
|
setLoadingRepoIds(new Set());
|
||||||
@@ -276,12 +281,10 @@ export default function Repository() {
|
|||||||
})
|
})
|
||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
toast.error(response.error || "Error starting sync job");
|
showErrorToast(response.error || "Error starting sync job", toast);
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
toast.error(
|
showErrorToast(error, toast);
|
||||||
error instanceof Error ? error.message : "Error starting sync job"
|
|
||||||
);
|
|
||||||
} finally {
|
} finally {
|
||||||
setLoadingRepoIds((prev) => {
|
setLoadingRepoIds((prev) => {
|
||||||
const newSet = new Set(prev);
|
const newSet = new Set(prev);
|
||||||
@@ -318,12 +321,10 @@ export default function Repository() {
|
|||||||
})
|
})
|
||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
toast.error(response.error || "Error retrying job");
|
showErrorToast(response.error || "Error retrying job", toast);
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
toast.error(
|
showErrorToast(error, toast);
|
||||||
error instanceof Error ? error.message : "Error retrying job"
|
|
||||||
);
|
|
||||||
} finally {
|
} finally {
|
||||||
setLoadingRepoIds((prev) => {
|
setLoadingRepoIds((prev) => {
|
||||||
const newSet = new Set(prev);
|
const newSet = new Set(prev);
|
||||||
@@ -363,19 +364,17 @@ export default function Repository() {
|
|||||||
toast.success(`Repository added successfully`);
|
toast.success(`Repository added successfully`);
|
||||||
setRepositories((prevRepos) => [...prevRepos, response.repository]);
|
setRepositories((prevRepos) => [...prevRepos, response.repository]);
|
||||||
|
|
||||||
await fetchRepositories();
|
await fetchRepositories(false); // Manual refresh after adding repository
|
||||||
|
|
||||||
setFilter((prev) => ({
|
setFilter((prev) => ({
|
||||||
...prev,
|
...prev,
|
||||||
searchTerm: repo,
|
searchTerm: repo,
|
||||||
}));
|
}));
|
||||||
} else {
|
} else {
|
||||||
toast.error(response.error || "Error adding repository");
|
showErrorToast(response.error || "Error adding repository", toast);
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
toast.error(
|
showErrorToast(error, toast);
|
||||||
error instanceof Error ? error.message : "Error adding repository"
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -463,7 +462,7 @@ export default function Repository() {
|
|||||||
<Button
|
<Button
|
||||||
variant="default"
|
variant="default"
|
||||||
onClick={handleMirrorAllRepos}
|
onClick={handleMirrorAllRepos}
|
||||||
disabled={isLoading || loadingRepoIds.size > 0}
|
disabled={isInitialLoading || loadingRepoIds.size > 0}
|
||||||
>
|
>
|
||||||
<FlipHorizontal className="h-4 w-4 mr-2" />
|
<FlipHorizontal className="h-4 w-4 mr-2" />
|
||||||
Mirror All
|
Mirror All
|
||||||
@@ -490,7 +489,8 @@ export default function Repository() {
|
|||||||
) : (
|
) : (
|
||||||
<RepositoryTable
|
<RepositoryTable
|
||||||
repositories={repositories}
|
repositories={repositories}
|
||||||
isLoading={isLoading || !connected}
|
isLoading={isInitialLoading || !connected}
|
||||||
|
isLiveActive={isLiveEnabled && isFullyConfigured}
|
||||||
filter={filter}
|
filter={filter}
|
||||||
setFilter={setFilter}
|
setFilter={setFilter}
|
||||||
onMirror={handleMirrorRepo}
|
onMirror={handleMirrorRepo}
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
import { useMemo, useRef } from "react";
|
import { useMemo, useRef } from "react";
|
||||||
import Fuse from "fuse.js";
|
import Fuse from "fuse.js";
|
||||||
import { useVirtualizer } from "@tanstack/react-virtual";
|
import { useVirtualizer } from "@tanstack/react-virtual";
|
||||||
import { GitFork, RefreshCw, RotateCcw } from "lucide-react";
|
import { FlipHorizontal, GitFork, RefreshCw, RotateCcw } from "lucide-react";
|
||||||
import { SiGithub, SiGitea } from "react-icons/si";
|
import { SiGithub, SiGitea } from "react-icons/si";
|
||||||
import type { Repository } from "@/lib/db/schema";
|
import type { Repository } from "@/lib/db/schema";
|
||||||
import { Button } from "@/components/ui/button";
|
import { Button } from "@/components/ui/button";
|
||||||
@@ -13,6 +13,7 @@ import { useGiteaConfig } from "@/hooks/useGiteaConfig";
|
|||||||
interface RepositoryTableProps {
|
interface RepositoryTableProps {
|
||||||
repositories: Repository[];
|
repositories: Repository[];
|
||||||
isLoading: boolean;
|
isLoading: boolean;
|
||||||
|
isLiveActive?: boolean;
|
||||||
filter: FilterParams;
|
filter: FilterParams;
|
||||||
setFilter: (filter: FilterParams) => void;
|
setFilter: (filter: FilterParams) => void;
|
||||||
onMirror: ({ repoId }: { repoId: string }) => Promise<void>;
|
onMirror: ({ repoId }: { repoId: string }) => Promise<void>;
|
||||||
@@ -24,6 +25,7 @@ interface RepositoryTableProps {
|
|||||||
export default function RepositoryTable({
|
export default function RepositoryTable({
|
||||||
repositories,
|
repositories,
|
||||||
isLoading,
|
isLoading,
|
||||||
|
isLiveActive = false,
|
||||||
filter,
|
filter,
|
||||||
setFilter,
|
setFilter,
|
||||||
onMirror,
|
onMirror,
|
||||||
@@ -247,6 +249,11 @@ export default function RepositoryTable({
|
|||||||
Private
|
Private
|
||||||
</span>
|
</span>
|
||||||
)}
|
)}
|
||||||
|
{repo.isForked && (
|
||||||
|
<span className="ml-2 rounded-full bg-muted px-2 py-0.5 text-xs">
|
||||||
|
Fork
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
{/* Owner */}
|
{/* Owner */}
|
||||||
@@ -345,15 +352,38 @@ export default function RepositoryTable({
|
|||||||
</div>
|
</div>
|
||||||
|
|
||||||
{/* Status Bar */}
|
{/* Status Bar */}
|
||||||
<div className="h-[40px] flex items-center justify-between border-t bg-muted/30 px-3">
|
<div className="h-[40px] flex items-center justify-between border-t bg-muted/30 px-3 relative">
|
||||||
<div className="flex items-center gap-2">
|
<div className="flex items-center gap-2">
|
||||||
<div className="h-1.5 w-1.5 rounded-full bg-primary" />
|
<div className={`h-1.5 w-1.5 rounded-full ${isLiveActive ? 'bg-emerald-500' : 'bg-primary'}`} />
|
||||||
<span className="text-sm font-medium text-foreground">
|
<span className="text-sm font-medium text-foreground">
|
||||||
{hasAnyFilter
|
{hasAnyFilter
|
||||||
? `Showing ${filteredRepositories.length} of ${repositories.length} repositories`
|
? `Showing ${filteredRepositories.length} of ${repositories.length} repositories`
|
||||||
: `${repositories.length} ${repositories.length === 1 ? 'repository' : 'repositories'} total`}
|
: `${repositories.length} ${repositories.length === 1 ? 'repository' : 'repositories'} total`}
|
||||||
</span>
|
</span>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
{/* Center - Live active indicator */}
|
||||||
|
{isLiveActive && (
|
||||||
|
<div className="flex items-center gap-1.5 absolute left-1/2 transform -translate-x-1/2">
|
||||||
|
<div
|
||||||
|
className="h-1 w-1 rounded-full bg-emerald-500"
|
||||||
|
style={{
|
||||||
|
animation: 'pulse 2s ease-in-out infinite'
|
||||||
|
}}
|
||||||
|
/>
|
||||||
|
<span className="text-xs text-emerald-600 dark:text-emerald-400 font-medium">
|
||||||
|
Live active
|
||||||
|
</span>
|
||||||
|
<div
|
||||||
|
className="h-1 w-1 rounded-full bg-emerald-500"
|
||||||
|
style={{
|
||||||
|
animation: 'pulse 2s ease-in-out infinite',
|
||||||
|
animationDelay: '1s'
|
||||||
|
}}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
{hasAnyFilter && (
|
{hasAnyFilter && (
|
||||||
<span className="text-xs text-muted-foreground">
|
<span className="text-xs text-muted-foreground">
|
||||||
Filters applied
|
Filters applied
|
||||||
@@ -393,7 +423,7 @@ function RepoActionButton({
|
|||||||
disabled ||= repo.status === "syncing";
|
disabled ||= repo.status === "syncing";
|
||||||
} else if (["imported", "mirroring"].includes(repo.status)) {
|
} else if (["imported", "mirroring"].includes(repo.status)) {
|
||||||
label = "Mirror";
|
label = "Mirror";
|
||||||
icon = <GitFork className="h-4 w-4 mr-1" />;
|
icon = <FlipHorizontal className="h-4 w-4 mr-1" />; // Don't change this icon to GitFork.
|
||||||
onClick = onMirror;
|
onClick = onMirror;
|
||||||
disabled ||= repo.status === "mirroring";
|
disabled ||= repo.status === "mirroring";
|
||||||
} else {
|
} else {
|
||||||
|
|||||||
@@ -15,15 +15,39 @@ interface CleanupResult {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Clean up old events and mirror jobs for a specific user
|
* Calculate cleanup interval in hours based on retention period
|
||||||
|
* For shorter retention periods, run more frequently
|
||||||
|
* For longer retention periods, run less frequently
|
||||||
|
* @param retentionSeconds - Retention period in seconds
|
||||||
*/
|
*/
|
||||||
async function cleanupForUser(userId: string, retentionDays: number): Promise<CleanupResult> {
|
export function calculateCleanupInterval(retentionSeconds: number): number {
|
||||||
try {
|
const retentionDays = retentionSeconds / (24 * 60 * 60); // Convert seconds to days
|
||||||
console.log(`Running cleanup for user ${userId} with ${retentionDays} days retention`);
|
|
||||||
|
|
||||||
// Calculate cutoff date
|
if (retentionDays <= 1) {
|
||||||
|
return 6; // Every 6 hours for 1 day retention
|
||||||
|
} else if (retentionDays <= 3) {
|
||||||
|
return 12; // Every 12 hours for 1-3 days retention
|
||||||
|
} else if (retentionDays <= 7) {
|
||||||
|
return 24; // Daily for 4-7 days retention
|
||||||
|
} else if (retentionDays <= 30) {
|
||||||
|
return 48; // Every 2 days for 8-30 days retention
|
||||||
|
} else {
|
||||||
|
return 168; // Weekly for 30+ days retention
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Clean up old events and mirror jobs for a specific user
|
||||||
|
* @param retentionSeconds - Retention period in seconds
|
||||||
|
*/
|
||||||
|
async function cleanupForUser(userId: string, retentionSeconds: number): Promise<CleanupResult> {
|
||||||
|
try {
|
||||||
|
const retentionDays = retentionSeconds / (24 * 60 * 60); // Convert to days for logging
|
||||||
|
console.log(`Running cleanup for user ${userId} with ${retentionDays} days retention (${retentionSeconds} seconds)`);
|
||||||
|
|
||||||
|
// Calculate cutoff date using seconds
|
||||||
const cutoffDate = new Date();
|
const cutoffDate = new Date();
|
||||||
cutoffDate.setDate(cutoffDate.getDate() - retentionDays);
|
cutoffDate.setTime(cutoffDate.getTime() - retentionSeconds * 1000);
|
||||||
|
|
||||||
let eventsDeleted = 0;
|
let eventsDeleted = 0;
|
||||||
let mirrorJobsDeleted = 0;
|
let mirrorJobsDeleted = 0;
|
||||||
@@ -75,7 +99,9 @@ async function cleanupForUser(userId: string, retentionDays: number): Promise<Cl
|
|||||||
async function updateCleanupConfig(userId: string, cleanupConfig: any) {
|
async function updateCleanupConfig(userId: string, cleanupConfig: any) {
|
||||||
try {
|
try {
|
||||||
const now = new Date();
|
const now = new Date();
|
||||||
const nextRun = new Date(now.getTime() + 24 * 60 * 60 * 1000); // Next day
|
const retentionSeconds = cleanupConfig.retentionDays || 604800; // Default 7 days in seconds
|
||||||
|
const cleanupIntervalHours = calculateCleanupInterval(retentionSeconds);
|
||||||
|
const nextRun = new Date(now.getTime() + cleanupIntervalHours * 60 * 60 * 1000);
|
||||||
|
|
||||||
const updatedConfig = {
|
const updatedConfig = {
|
||||||
...cleanupConfig,
|
...cleanupConfig,
|
||||||
@@ -91,7 +117,8 @@ async function updateCleanupConfig(userId: string, cleanupConfig: any) {
|
|||||||
})
|
})
|
||||||
.where(eq(configs.userId, userId));
|
.where(eq(configs.userId, userId));
|
||||||
|
|
||||||
console.log(`Updated cleanup config for user ${userId}, next run: ${nextRun.toISOString()}`);
|
const retentionDays = retentionSeconds / (24 * 60 * 60);
|
||||||
|
console.log(`Updated cleanup config for user ${userId}, next run: ${nextRun.toISOString()} (${cleanupIntervalHours}h interval for ${retentionDays}d retention)`);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error(`Error updating cleanup config for user ${userId}:`, error);
|
console.error(`Error updating cleanup config for user ${userId}:`, error);
|
||||||
}
|
}
|
||||||
@@ -116,7 +143,7 @@ export async function runAutomaticCleanup(): Promise<CleanupResult[]> {
|
|||||||
for (const config of userConfigs) {
|
for (const config of userConfigs) {
|
||||||
try {
|
try {
|
||||||
const cleanupConfig = config.cleanupConfig;
|
const cleanupConfig = config.cleanupConfig;
|
||||||
|
|
||||||
// Skip if cleanup is not enabled
|
// Skip if cleanup is not enabled
|
||||||
if (!cleanupConfig?.enabled) {
|
if (!cleanupConfig?.enabled) {
|
||||||
continue;
|
continue;
|
||||||
@@ -124,10 +151,10 @@ export async function runAutomaticCleanup(): Promise<CleanupResult[]> {
|
|||||||
|
|
||||||
// Check if it's time to run cleanup
|
// Check if it's time to run cleanup
|
||||||
const nextRun = cleanupConfig.nextRun ? new Date(cleanupConfig.nextRun) : null;
|
const nextRun = cleanupConfig.nextRun ? new Date(cleanupConfig.nextRun) : null;
|
||||||
|
|
||||||
// If nextRun is null or in the past, run cleanup
|
// If nextRun is null or in the past, run cleanup
|
||||||
if (!nextRun || now >= nextRun) {
|
if (!nextRun || now >= nextRun) {
|
||||||
const result = await cleanupForUser(config.userId, cleanupConfig.retentionDays || 7);
|
const result = await cleanupForUser(config.userId, cleanupConfig.retentionDays || 604800);
|
||||||
results.push(result);
|
results.push(result);
|
||||||
|
|
||||||
// Update the cleanup config with new run times
|
// Update the cleanup config with new run times
|
||||||
@@ -154,30 +181,41 @@ export async function runAutomaticCleanup(): Promise<CleanupResult[]> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Service state tracking
|
||||||
|
let cleanupIntervalId: NodeJS.Timeout | null = null;
|
||||||
|
let initialCleanupTimeoutId: NodeJS.Timeout | null = null;
|
||||||
|
let cleanupServiceRunning = false;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Start the cleanup service with periodic execution
|
* Start the cleanup service with periodic execution
|
||||||
* This should be called when the application starts
|
* This should be called when the application starts
|
||||||
*/
|
*/
|
||||||
export function startCleanupService() {
|
export function startCleanupService() {
|
||||||
|
if (cleanupServiceRunning) {
|
||||||
|
console.log('⚠️ Cleanup service already running, skipping start');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
console.log('Starting background cleanup service...');
|
console.log('Starting background cleanup service...');
|
||||||
|
|
||||||
// Run cleanup every hour
|
// Run cleanup every hour
|
||||||
const CLEANUP_INTERVAL = 60 * 60 * 1000; // 1 hour in milliseconds
|
const CLEANUP_INTERVAL = 60 * 60 * 1000; // 1 hour in milliseconds
|
||||||
|
|
||||||
// Run initial cleanup after 5 minutes to allow app to fully start
|
// Run initial cleanup after 5 minutes to allow app to fully start
|
||||||
setTimeout(() => {
|
initialCleanupTimeoutId = setTimeout(() => {
|
||||||
runAutomaticCleanup().catch(error => {
|
runAutomaticCleanup().catch(error => {
|
||||||
console.error('Error in initial cleanup run:', error);
|
console.error('Error in initial cleanup run:', error);
|
||||||
});
|
});
|
||||||
}, 5 * 60 * 1000); // 5 minutes
|
}, 5 * 60 * 1000); // 5 minutes
|
||||||
|
|
||||||
// Set up periodic cleanup
|
// Set up periodic cleanup
|
||||||
setInterval(() => {
|
cleanupIntervalId = setInterval(() => {
|
||||||
runAutomaticCleanup().catch(error => {
|
runAutomaticCleanup().catch(error => {
|
||||||
console.error('Error in periodic cleanup run:', error);
|
console.error('Error in periodic cleanup run:', error);
|
||||||
});
|
});
|
||||||
}, CLEANUP_INTERVAL);
|
}, CLEANUP_INTERVAL);
|
||||||
|
|
||||||
|
cleanupServiceRunning = true;
|
||||||
console.log(`✅ Cleanup service started. Will run every ${CLEANUP_INTERVAL / 1000 / 60} minutes.`);
|
console.log(`✅ Cleanup service started. Will run every ${CLEANUP_INTERVAL / 1000 / 60} minutes.`);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -185,7 +223,36 @@ export function startCleanupService() {
|
|||||||
* Stop the cleanup service (for testing or shutdown)
|
* Stop the cleanup service (for testing or shutdown)
|
||||||
*/
|
*/
|
||||||
export function stopCleanupService() {
|
export function stopCleanupService() {
|
||||||
// Note: In a real implementation, you'd want to track the interval ID
|
if (!cleanupServiceRunning) {
|
||||||
// and clear it here. For now, this is a placeholder.
|
console.log('Cleanup service is not running');
|
||||||
console.log('Cleanup service stop requested (not implemented)');
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log('🛑 Stopping cleanup service...');
|
||||||
|
|
||||||
|
// Clear the periodic interval
|
||||||
|
if (cleanupIntervalId) {
|
||||||
|
clearInterval(cleanupIntervalId);
|
||||||
|
cleanupIntervalId = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Clear the initial timeout
|
||||||
|
if (initialCleanupTimeoutId) {
|
||||||
|
clearTimeout(initialCleanupTimeoutId);
|
||||||
|
initialCleanupTimeoutId = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
cleanupServiceRunning = false;
|
||||||
|
console.log('✅ Cleanup service stopped');
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get cleanup service status
|
||||||
|
*/
|
||||||
|
export function getCleanupServiceStatus() {
|
||||||
|
return {
|
||||||
|
running: cleanupServiceRunning,
|
||||||
|
hasInterval: cleanupIntervalId !== null,
|
||||||
|
hasInitialTimeout: initialCleanupTimeoutId !== null,
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -54,7 +54,7 @@ export const configSchema = z.object({
|
|||||||
}),
|
}),
|
||||||
cleanupConfig: z.object({
|
cleanupConfig: z.object({
|
||||||
enabled: z.boolean().default(false),
|
enabled: z.boolean().default(false),
|
||||||
retentionDays: z.number().min(1).default(7), // in days
|
retentionDays: z.number().min(1).default(604800), // in seconds (default: 7 days)
|
||||||
lastRun: z.date().optional(),
|
lastRun: z.date().optional(),
|
||||||
nextRun: z.date().optional(),
|
nextRun: z.date().optional(),
|
||||||
}),
|
}),
|
||||||
@@ -152,6 +152,9 @@ export const organizationSchema = z.object({
|
|||||||
errorMessage: z.string().optional(),
|
errorMessage: z.string().optional(),
|
||||||
|
|
||||||
repositoryCount: z.number().default(0),
|
repositoryCount: z.number().default(0),
|
||||||
|
publicRepositoryCount: z.number().optional(),
|
||||||
|
privateRepositoryCount: z.number().optional(),
|
||||||
|
forkRepositoryCount: z.number().optional(),
|
||||||
|
|
||||||
createdAt: z.date().default(() => new Date()),
|
createdAt: z.date().default(() => new Date()),
|
||||||
updatedAt: z.date().default(() => new Date()),
|
updatedAt: z.date().default(() => new Date()),
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
import { describe, test, expect, mock, beforeEach, afterEach } from "bun:test";
|
import { describe, test, expect, mock, beforeEach, afterEach } from "bun:test";
|
||||||
import { Octokit } from "@octokit/rest";
|
import { Octokit } from "@octokit/rest";
|
||||||
import { repoStatusEnum } from "@/types/Repository";
|
import { repoStatusEnum } from "@/types/Repository";
|
||||||
|
import { getOrCreateGiteaOrg } from "./gitea";
|
||||||
|
|
||||||
// Mock the isRepoPresentInGitea function
|
// Mock the isRepoPresentInGitea function
|
||||||
const mockIsRepoPresentInGitea = mock(() => Promise.resolve(false));
|
const mockIsRepoPresentInGitea = mock(() => Promise.resolve(false));
|
||||||
@@ -27,23 +28,17 @@ mock.module("@/lib/helpers", () => {
|
|||||||
};
|
};
|
||||||
});
|
});
|
||||||
|
|
||||||
// Mock superagent
|
// Mock http-client
|
||||||
mock.module("superagent", () => {
|
mock.module("@/lib/http-client", () => {
|
||||||
const mockPost = mock(() => ({
|
|
||||||
set: () => ({
|
|
||||||
set: () => ({
|
|
||||||
send: () => Promise.resolve({ body: { id: 123 } })
|
|
||||||
})
|
|
||||||
})
|
|
||||||
}));
|
|
||||||
|
|
||||||
const mockGet = mock(() => ({
|
|
||||||
set: () => Promise.resolve({ body: [] })
|
|
||||||
}));
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
post: mockPost,
|
httpPost: mock(() => Promise.resolve({ data: { id: 123 }, status: 200, statusText: 'OK', headers: new Headers() })),
|
||||||
get: mockGet
|
httpGet: mock(() => Promise.resolve({ data: [], status: 200, statusText: 'OK', headers: new Headers() })),
|
||||||
|
HttpError: class MockHttpError extends Error {
|
||||||
|
constructor(message: string, public status: number, public statusText: string, public response?: string) {
|
||||||
|
super(message);
|
||||||
|
this.name = 'HttpError';
|
||||||
|
}
|
||||||
|
}
|
||||||
};
|
};
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -117,4 +112,182 @@ describe("Gitea Repository Mirroring", () => {
|
|||||||
// Check that the function was called
|
// Check that the function was called
|
||||||
expect(mirrorGithubRepoToGitea).toHaveBeenCalled();
|
expect(mirrorGithubRepoToGitea).toHaveBeenCalled();
|
||||||
});
|
});
|
||||||
|
|
||||||
|
test("getOrCreateGiteaOrg handles JSON parsing errors gracefully", async () => {
|
||||||
|
// Mock fetch to return invalid JSON
|
||||||
|
const originalFetch = global.fetch;
|
||||||
|
global.fetch = mock(async (url: string) => {
|
||||||
|
if (url.includes("/api/v1/orgs/")) {
|
||||||
|
// Mock response that looks successful but has invalid JSON
|
||||||
|
return {
|
||||||
|
ok: true,
|
||||||
|
status: 200,
|
||||||
|
headers: {
|
||||||
|
get: (name: string) => name === "content-type" ? "application/json" : null
|
||||||
|
},
|
||||||
|
json: () => Promise.reject(new Error("Unexpected token in JSON")),
|
||||||
|
text: () => Promise.resolve("Invalid JSON response"),
|
||||||
|
clone: function() {
|
||||||
|
return {
|
||||||
|
text: () => Promise.resolve("Invalid JSON response")
|
||||||
|
};
|
||||||
|
}
|
||||||
|
} as any;
|
||||||
|
}
|
||||||
|
return originalFetch(url);
|
||||||
|
});
|
||||||
|
|
||||||
|
const config = {
|
||||||
|
userId: "user-id",
|
||||||
|
giteaConfig: {
|
||||||
|
url: "https://gitea.example.com",
|
||||||
|
token: "gitea-token"
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
try {
|
||||||
|
await getOrCreateGiteaOrg({
|
||||||
|
orgName: "test-org",
|
||||||
|
config
|
||||||
|
});
|
||||||
|
// Should not reach here
|
||||||
|
expect(true).toBe(false);
|
||||||
|
} catch (error) {
|
||||||
|
// Should catch the JSON parsing error with a descriptive message
|
||||||
|
expect(error).toBeInstanceOf(Error);
|
||||||
|
expect((error as Error).message).toContain("Failed to parse JSON response from Gitea API");
|
||||||
|
} finally {
|
||||||
|
// Restore original fetch
|
||||||
|
global.fetch = originalFetch;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getOrCreateGiteaOrg handles non-JSON content-type gracefully", async () => {
|
||||||
|
// Mock fetch to return HTML instead of JSON
|
||||||
|
const originalFetch = global.fetch;
|
||||||
|
global.fetch = mock(async (url: string) => {
|
||||||
|
if (url.includes("/api/v1/orgs/")) {
|
||||||
|
return {
|
||||||
|
ok: true,
|
||||||
|
status: 200,
|
||||||
|
headers: {
|
||||||
|
get: (name: string) => name === "content-type" ? "text/html" : null
|
||||||
|
},
|
||||||
|
text: () => Promise.resolve("<html><body>Error page</body></html>")
|
||||||
|
} as any;
|
||||||
|
}
|
||||||
|
return originalFetch(url);
|
||||||
|
});
|
||||||
|
|
||||||
|
const config = {
|
||||||
|
userId: "user-id",
|
||||||
|
giteaConfig: {
|
||||||
|
url: "https://gitea.example.com",
|
||||||
|
token: "gitea-token"
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
try {
|
||||||
|
await getOrCreateGiteaOrg({
|
||||||
|
orgName: "test-org",
|
||||||
|
config
|
||||||
|
});
|
||||||
|
// Should not reach here
|
||||||
|
expect(true).toBe(false);
|
||||||
|
} catch (error) {
|
||||||
|
// Should catch the content-type error
|
||||||
|
expect(error).toBeInstanceOf(Error);
|
||||||
|
expect((error as Error).message).toContain("Invalid response format from Gitea API");
|
||||||
|
expect((error as Error).message).toContain("text/html");
|
||||||
|
} finally {
|
||||||
|
// Restore original fetch
|
||||||
|
global.fetch = originalFetch;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
test("mirrorGitHubOrgToGitea handles empty organizations correctly", async () => {
|
||||||
|
// Mock the createMirrorJob function
|
||||||
|
const mockCreateMirrorJob = mock(() => Promise.resolve("job-id"));
|
||||||
|
|
||||||
|
// Mock the getOrCreateGiteaOrg function
|
||||||
|
const mockGetOrCreateGiteaOrg = mock(() => Promise.resolve("gitea-org-id"));
|
||||||
|
|
||||||
|
// Create a test version of the function with mocked dependencies
|
||||||
|
const testMirrorGitHubOrgToGitea = async ({
|
||||||
|
organization,
|
||||||
|
config,
|
||||||
|
}: {
|
||||||
|
organization: any;
|
||||||
|
config: any;
|
||||||
|
}) => {
|
||||||
|
// Simulate the function logic for empty organization
|
||||||
|
console.log(`Mirroring organization ${organization.name}`);
|
||||||
|
|
||||||
|
// Mock: get or create Gitea org
|
||||||
|
await mockGetOrCreateGiteaOrg();
|
||||||
|
|
||||||
|
// Mock: query the db with the org name and get the repos
|
||||||
|
const orgRepos: any[] = []; // Empty array to simulate no repositories
|
||||||
|
|
||||||
|
if (orgRepos.length === 0) {
|
||||||
|
console.log(`No repositories found for organization ${organization.name} - marking as successfully mirrored`);
|
||||||
|
} else {
|
||||||
|
console.log(`Mirroring ${orgRepos.length} repositories for organization ${organization.name}`);
|
||||||
|
// Repository processing would happen here
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`Organization ${organization.name} mirrored successfully`);
|
||||||
|
|
||||||
|
// Mock: Append log for "mirrored" status
|
||||||
|
await mockCreateMirrorJob({
|
||||||
|
userId: config.userId,
|
||||||
|
organizationId: organization.id,
|
||||||
|
organizationName: organization.name,
|
||||||
|
message: `Successfully mirrored organization: ${organization.name}`,
|
||||||
|
details: orgRepos.length === 0
|
||||||
|
? `Organization ${organization.name} was processed successfully (no repositories found).`
|
||||||
|
: `Organization ${organization.name} was mirrored to Gitea with ${orgRepos.length} repositories.`,
|
||||||
|
status: "mirrored",
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
// Create mock organization
|
||||||
|
const organization = {
|
||||||
|
id: "org-id",
|
||||||
|
name: "empty-org",
|
||||||
|
status: "imported"
|
||||||
|
};
|
||||||
|
|
||||||
|
// Create mock config
|
||||||
|
const config = {
|
||||||
|
id: "config-id",
|
||||||
|
userId: "user-id",
|
||||||
|
githubConfig: {
|
||||||
|
token: "github-token"
|
||||||
|
},
|
||||||
|
giteaConfig: {
|
||||||
|
url: "https://gitea.example.com",
|
||||||
|
token: "gitea-token"
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Call the test function
|
||||||
|
await testMirrorGitHubOrgToGitea({
|
||||||
|
organization,
|
||||||
|
config
|
||||||
|
});
|
||||||
|
|
||||||
|
// Verify that the mirror job was created with the correct details for empty org
|
||||||
|
expect(mockCreateMirrorJob).toHaveBeenCalledWith({
|
||||||
|
userId: "user-id",
|
||||||
|
organizationId: "org-id",
|
||||||
|
organizationName: "empty-org",
|
||||||
|
message: "Successfully mirrored organization: empty-org",
|
||||||
|
details: "Organization empty-org was processed successfully (no repositories found).",
|
||||||
|
status: "mirrored",
|
||||||
|
});
|
||||||
|
|
||||||
|
// Verify that getOrCreateGiteaOrg was called
|
||||||
|
expect(mockGetOrCreateGiteaOrg).toHaveBeenCalled();
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
279
src/lib/gitea.ts
@@ -6,7 +6,7 @@ import {
|
|||||||
import { Octokit } from "@octokit/rest";
|
import { Octokit } from "@octokit/rest";
|
||||||
import type { Config } from "@/types/config";
|
import type { Config } from "@/types/config";
|
||||||
import type { Organization, Repository } from "./db/schema";
|
import type { Organization, Repository } from "./db/schema";
|
||||||
import superagent from "superagent";
|
import { httpPost, httpGet } from "./http-client";
|
||||||
import { createMirrorJob } from "./helpers";
|
import { createMirrorJob } from "./helpers";
|
||||||
import { db, organizations, repositories } from "./db";
|
import { db, organizations, repositories } from "./db";
|
||||||
import { eq } from "drizzle-orm";
|
import { eq } from "drizzle-orm";
|
||||||
@@ -181,19 +181,17 @@ export const mirrorGithubRepoToGitea = async ({
|
|||||||
|
|
||||||
const apiUrl = `${config.giteaConfig.url}/api/v1/repos/migrate`;
|
const apiUrl = `${config.giteaConfig.url}/api/v1/repos/migrate`;
|
||||||
|
|
||||||
const response = await superagent
|
const response = await httpPost(apiUrl, {
|
||||||
.post(apiUrl)
|
clone_addr: cloneAddress,
|
||||||
.set("Authorization", `token ${config.giteaConfig.token}`)
|
repo_name: repository.name,
|
||||||
.set("Content-Type", "application/json")
|
mirror: true,
|
||||||
.send({
|
private: repository.isPrivate,
|
||||||
clone_addr: cloneAddress,
|
repo_owner: config.giteaConfig.username,
|
||||||
repo_name: repository.name,
|
description: "",
|
||||||
mirror: true,
|
service: "git",
|
||||||
private: repository.isPrivate,
|
}, {
|
||||||
repo_owner: config.giteaConfig.username,
|
"Authorization": `token ${config.giteaConfig.token}`,
|
||||||
description: "",
|
});
|
||||||
service: "git",
|
|
||||||
});
|
|
||||||
|
|
||||||
// clone issues
|
// clone issues
|
||||||
if (config.githubConfig.mirrorIssues) {
|
if (config.githubConfig.mirrorIssues) {
|
||||||
@@ -229,7 +227,7 @@ export const mirrorGithubRepoToGitea = async ({
|
|||||||
status: "mirrored",
|
status: "mirrored",
|
||||||
});
|
});
|
||||||
|
|
||||||
return response.body;
|
return response.data;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error(
|
console.error(
|
||||||
`Error while mirroring repository ${repository.name}: ${
|
`Error while mirroring repository ${repository.name}: ${
|
||||||
@@ -283,6 +281,8 @@ export async function getOrCreateGiteaOrg({
|
|||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
console.log(`Attempting to get or create Gitea organization: ${orgName}`);
|
||||||
|
|
||||||
const orgRes = await fetch(
|
const orgRes = await fetch(
|
||||||
`${config.giteaConfig.url}/api/v1/orgs/${orgName}`,
|
`${config.giteaConfig.url}/api/v1/orgs/${orgName}`,
|
||||||
{
|
{
|
||||||
@@ -293,13 +293,36 @@ export async function getOrCreateGiteaOrg({
|
|||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
|
||||||
|
console.log(`Get org response status: ${orgRes.status} for org: ${orgName}`);
|
||||||
|
|
||||||
if (orgRes.ok) {
|
if (orgRes.ok) {
|
||||||
const org = await orgRes.json();
|
// Check if response is actually JSON
|
||||||
// Note: Organization events are handled by the main mirroring process
|
const contentType = orgRes.headers.get("content-type");
|
||||||
// to avoid duplicate events
|
if (!contentType || !contentType.includes("application/json")) {
|
||||||
return org.id;
|
console.warn(`Expected JSON response but got content-type: ${contentType}`);
|
||||||
|
const responseText = await orgRes.text();
|
||||||
|
console.warn(`Response body: ${responseText}`);
|
||||||
|
throw new Error(`Invalid response format from Gitea API. Expected JSON but got: ${contentType}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Clone the response to handle potential JSON parsing errors
|
||||||
|
const orgResClone = orgRes.clone();
|
||||||
|
|
||||||
|
try {
|
||||||
|
const org = await orgRes.json();
|
||||||
|
console.log(`Successfully retrieved existing org: ${orgName} with ID: ${org.id}`);
|
||||||
|
// Note: Organization events are handled by the main mirroring process
|
||||||
|
// to avoid duplicate events
|
||||||
|
return org.id;
|
||||||
|
} catch (jsonError) {
|
||||||
|
const responseText = await orgResClone.text();
|
||||||
|
console.error(`Failed to parse JSON response for existing org: ${responseText}`);
|
||||||
|
throw new Error(`Failed to parse JSON response from Gitea API: ${jsonError instanceof Error ? jsonError.message : String(jsonError)}`);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
console.log(`Organization ${orgName} not found, attempting to create it`);
|
||||||
|
|
||||||
const createRes = await fetch(`${config.giteaConfig.url}/api/v1/orgs`, {
|
const createRes = await fetch(`${config.giteaConfig.url}/api/v1/orgs`, {
|
||||||
method: "POST",
|
method: "POST",
|
||||||
headers: {
|
headers: {
|
||||||
@@ -314,21 +337,46 @@ export async function getOrCreateGiteaOrg({
|
|||||||
}),
|
}),
|
||||||
});
|
});
|
||||||
|
|
||||||
|
console.log(`Create org response status: ${createRes.status} for org: ${orgName}`);
|
||||||
|
|
||||||
if (!createRes.ok) {
|
if (!createRes.ok) {
|
||||||
throw new Error(`Failed to create Gitea org: ${await createRes.text()}`);
|
const errorText = await createRes.text();
|
||||||
|
console.error(`Failed to create org ${orgName}. Status: ${createRes.status}, Response: ${errorText}`);
|
||||||
|
throw new Error(`Failed to create Gitea org: ${errorText}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if response is actually JSON
|
||||||
|
const createContentType = createRes.headers.get("content-type");
|
||||||
|
if (!createContentType || !createContentType.includes("application/json")) {
|
||||||
|
console.warn(`Expected JSON response but got content-type: ${createContentType}`);
|
||||||
|
const responseText = await createRes.text();
|
||||||
|
console.warn(`Response body: ${responseText}`);
|
||||||
|
throw new Error(`Invalid response format from Gitea API. Expected JSON but got: ${createContentType}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Note: Organization creation events are handled by the main mirroring process
|
// Note: Organization creation events are handled by the main mirroring process
|
||||||
// to avoid duplicate events
|
// to avoid duplicate events
|
||||||
|
|
||||||
const newOrg = await createRes.json();
|
// Clone the response to handle potential JSON parsing errors
|
||||||
return newOrg.id;
|
const createResClone = createRes.clone();
|
||||||
|
|
||||||
|
try {
|
||||||
|
const newOrg = await createRes.json();
|
||||||
|
console.log(`Successfully created new org: ${orgName} with ID: ${newOrg.id}`);
|
||||||
|
return newOrg.id;
|
||||||
|
} catch (jsonError) {
|
||||||
|
const responseText = await createResClone.text();
|
||||||
|
console.error(`Failed to parse JSON response for new org: ${responseText}`);
|
||||||
|
throw new Error(`Failed to parse JSON response from Gitea API: ${jsonError instanceof Error ? jsonError.message : String(jsonError)}`);
|
||||||
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
const errorMessage =
|
const errorMessage =
|
||||||
error instanceof Error
|
error instanceof Error
|
||||||
? error.message
|
? error.message
|
||||||
: "Unknown error occurred in getOrCreateGiteaOrg.";
|
: "Unknown error occurred in getOrCreateGiteaOrg.";
|
||||||
|
|
||||||
|
console.error(`Error in getOrCreateGiteaOrg for ${orgName}: ${errorMessage}`);
|
||||||
|
|
||||||
await createMirrorJob({
|
await createMirrorJob({
|
||||||
userId: config.userId,
|
userId: config.userId,
|
||||||
organizationId: orgId,
|
organizationId: orgId,
|
||||||
@@ -410,17 +458,15 @@ export async function mirrorGitHubRepoToGiteaOrg({
|
|||||||
|
|
||||||
const apiUrl = `${config.giteaConfig.url}/api/v1/repos/migrate`;
|
const apiUrl = `${config.giteaConfig.url}/api/v1/repos/migrate`;
|
||||||
|
|
||||||
const migrateRes = await superagent
|
const migrateRes = await httpPost(apiUrl, {
|
||||||
.post(apiUrl)
|
clone_addr: cloneAddress,
|
||||||
.set("Authorization", `token ${config.giteaConfig.token}`)
|
uid: giteaOrgId,
|
||||||
.set("Content-Type", "application/json")
|
repo_name: repository.name,
|
||||||
.send({
|
mirror: true,
|
||||||
clone_addr: cloneAddress,
|
private: repository.isPrivate,
|
||||||
uid: giteaOrgId,
|
}, {
|
||||||
repo_name: repository.name,
|
"Authorization": `token ${config.giteaConfig.token}`,
|
||||||
mirror: true,
|
});
|
||||||
private: repository.isPrivate,
|
|
||||||
});
|
|
||||||
|
|
||||||
// Clone issues
|
// Clone issues
|
||||||
if (config.githubConfig?.mirrorIssues) {
|
if (config.githubConfig?.mirrorIssues) {
|
||||||
@@ -458,7 +504,7 @@ export async function mirrorGitHubRepoToGiteaOrg({
|
|||||||
status: "mirrored",
|
status: "mirrored",
|
||||||
});
|
});
|
||||||
|
|
||||||
return migrateRes.body;
|
return migrateRes.data;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error(
|
console.error(
|
||||||
`Error while mirroring repository ${repository.name}: ${
|
`Error while mirroring repository ${repository.name}: ${
|
||||||
@@ -583,60 +629,59 @@ export async function mirrorGitHubOrgToGitea({
|
|||||||
.where(eq(repositories.organization, organization.name));
|
.where(eq(repositories.organization, organization.name));
|
||||||
|
|
||||||
if (orgRepos.length === 0) {
|
if (orgRepos.length === 0) {
|
||||||
console.log(`No repositories found for organization ${organization.name}`);
|
console.log(`No repositories found for organization ${organization.name} - marking as successfully mirrored`);
|
||||||
return;
|
} else {
|
||||||
}
|
console.log(`Mirroring ${orgRepos.length} repositories for organization ${organization.name}`);
|
||||||
|
|
||||||
console.log(`Mirroring ${orgRepos.length} repositories for organization ${organization.name}`);
|
// Import the processWithRetry function
|
||||||
|
const { processWithRetry } = await import("@/lib/utils/concurrency");
|
||||||
|
|
||||||
// Import the processWithRetry function
|
// Process repositories in parallel with concurrency control
|
||||||
const { processWithRetry } = await import("@/lib/utils/concurrency");
|
await processWithRetry(
|
||||||
|
orgRepos,
|
||||||
|
async (repo) => {
|
||||||
|
// Prepare repository data
|
||||||
|
const repoData = {
|
||||||
|
...repo,
|
||||||
|
status: repo.status as RepoStatus,
|
||||||
|
visibility: repo.visibility as RepositoryVisibility,
|
||||||
|
lastMirrored: repo.lastMirrored ?? undefined,
|
||||||
|
errorMessage: repo.errorMessage ?? undefined,
|
||||||
|
organization: repo.organization ?? undefined,
|
||||||
|
forkedFrom: repo.forkedFrom ?? undefined,
|
||||||
|
mirroredLocation: repo.mirroredLocation || "",
|
||||||
|
};
|
||||||
|
|
||||||
// Process repositories in parallel with concurrency control
|
// Log the start of mirroring
|
||||||
await processWithRetry(
|
console.log(`Starting mirror for repository: ${repo.name} in organization ${organization.name}`);
|
||||||
orgRepos,
|
|
||||||
async (repo) => {
|
|
||||||
// Prepare repository data
|
|
||||||
const repoData = {
|
|
||||||
...repo,
|
|
||||||
status: repo.status as RepoStatus,
|
|
||||||
visibility: repo.visibility as RepositoryVisibility,
|
|
||||||
lastMirrored: repo.lastMirrored ?? undefined,
|
|
||||||
errorMessage: repo.errorMessage ?? undefined,
|
|
||||||
organization: repo.organization ?? undefined,
|
|
||||||
forkedFrom: repo.forkedFrom ?? undefined,
|
|
||||||
mirroredLocation: repo.mirroredLocation || "",
|
|
||||||
};
|
|
||||||
|
|
||||||
// Log the start of mirroring
|
// Mirror the repository
|
||||||
console.log(`Starting mirror for repository: ${repo.name} in organization ${organization.name}`);
|
await mirrorGitHubRepoToGiteaOrg({
|
||||||
|
octokit,
|
||||||
|
config,
|
||||||
|
repository: repoData,
|
||||||
|
giteaOrgId,
|
||||||
|
orgName: organization.name,
|
||||||
|
});
|
||||||
|
|
||||||
// Mirror the repository
|
return repo;
|
||||||
await mirrorGitHubRepoToGiteaOrg({
|
|
||||||
octokit,
|
|
||||||
config,
|
|
||||||
repository: repoData,
|
|
||||||
giteaOrgId,
|
|
||||||
orgName: organization.name,
|
|
||||||
});
|
|
||||||
|
|
||||||
return repo;
|
|
||||||
},
|
|
||||||
{
|
|
||||||
concurrencyLimit: 3, // Process 3 repositories at a time
|
|
||||||
maxRetries: 2,
|
|
||||||
retryDelay: 2000,
|
|
||||||
onProgress: (completed, total, result) => {
|
|
||||||
const percentComplete = Math.round((completed / total) * 100);
|
|
||||||
if (result) {
|
|
||||||
console.log(`Mirrored repository "${result.name}" in organization ${organization.name} (${completed}/${total}, ${percentComplete}%)`);
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
onRetry: (repo, error, attempt) => {
|
{
|
||||||
console.log(`Retrying repository ${repo.name} in organization ${organization.name} (attempt ${attempt}): ${error.message}`);
|
concurrencyLimit: 3, // Process 3 repositories at a time
|
||||||
|
maxRetries: 2,
|
||||||
|
retryDelay: 2000,
|
||||||
|
onProgress: (completed, total, result) => {
|
||||||
|
const percentComplete = Math.round((completed / total) * 100);
|
||||||
|
if (result) {
|
||||||
|
console.log(`Mirrored repository "${result.name}" in organization ${organization.name} (${completed}/${total}, ${percentComplete}%)`);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
onRetry: (repo, error, attempt) => {
|
||||||
|
console.log(`Retrying repository ${repo.name} in organization ${organization.name} (attempt ${attempt}): ${error.message}`);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
);
|
||||||
);
|
}
|
||||||
|
|
||||||
console.log(`Organization ${organization.name} mirrored successfully`);
|
console.log(`Organization ${organization.name} mirrored successfully`);
|
||||||
|
|
||||||
@@ -657,7 +702,9 @@ export async function mirrorGitHubOrgToGitea({
|
|||||||
organizationId: organization.id,
|
organizationId: organization.id,
|
||||||
organizationName: organization.name,
|
organizationName: organization.name,
|
||||||
message: `Successfully mirrored organization: ${organization.name}`,
|
message: `Successfully mirrored organization: ${organization.name}`,
|
||||||
details: `Organization ${organization.name} was mirrored to Gitea.`,
|
details: orgRepos.length === 0
|
||||||
|
? `Organization ${organization.name} was processed successfully (no repositories found).`
|
||||||
|
: `Organization ${organization.name} was mirrored to Gitea with ${orgRepos.length} repositories.`,
|
||||||
status: repoStatusEnum.parse("mirrored"),
|
status: repoStatusEnum.parse("mirrored"),
|
||||||
});
|
});
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
@@ -751,9 +798,9 @@ export const syncGiteaRepo = async ({
|
|||||||
// Use the actual owner where the repo was found
|
// Use the actual owner where the repo was found
|
||||||
const apiUrl = `${config.giteaConfig.url}/api/v1/repos/${actualOwner}/${repository.name}/mirror-sync`;
|
const apiUrl = `${config.giteaConfig.url}/api/v1/repos/${actualOwner}/${repository.name}/mirror-sync`;
|
||||||
|
|
||||||
const response = await superagent
|
const response = await httpPost(apiUrl, undefined, {
|
||||||
.post(apiUrl)
|
"Authorization": `token ${config.giteaConfig.token}`,
|
||||||
.set("Authorization", `token ${config.giteaConfig.token}`);
|
});
|
||||||
|
|
||||||
// Mark repo as "synced" in DB
|
// Mark repo as "synced" in DB
|
||||||
await db
|
await db
|
||||||
@@ -779,7 +826,7 @@ export const syncGiteaRepo = async ({
|
|||||||
|
|
||||||
console.log(`Repository ${repository.name} synced successfully`);
|
console.log(`Repository ${repository.name} synced successfully`);
|
||||||
|
|
||||||
return response.body;
|
return response.data;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error(
|
console.error(
|
||||||
`Error while syncing repository ${repository.name}: ${
|
`Error while syncing repository ${repository.name}: ${
|
||||||
@@ -866,13 +913,14 @@ export const mirrorGitRepoIssuesToGitea = async ({
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Get existing labels from Gitea
|
// Get existing labels from Gitea
|
||||||
const giteaLabelsRes = await superagent
|
const giteaLabelsRes = await httpGet(
|
||||||
.get(
|
`${config.giteaConfig.url}/api/v1/repos/${repoOrigin}/${repository.name}/labels`,
|
||||||
`${config.giteaConfig.url}/api/v1/repos/${repoOrigin}/${repository.name}/labels`
|
{
|
||||||
)
|
"Authorization": `token ${config.giteaConfig.token}`,
|
||||||
.set("Authorization", `token ${config.giteaConfig.token}`);
|
}
|
||||||
|
);
|
||||||
|
|
||||||
const giteaLabels = giteaLabelsRes.body;
|
const giteaLabels = giteaLabelsRes.data;
|
||||||
const labelMap = new Map<string, number>(
|
const labelMap = new Map<string, number>(
|
||||||
giteaLabels.map((label: any) => [label.name, label.id])
|
giteaLabels.map((label: any) => [label.name, label.id])
|
||||||
);
|
);
|
||||||
@@ -897,15 +945,16 @@ export const mirrorGitRepoIssuesToGitea = async ({
|
|||||||
giteaLabelIds.push(labelMap.get(name)!);
|
giteaLabelIds.push(labelMap.get(name)!);
|
||||||
} else {
|
} else {
|
||||||
try {
|
try {
|
||||||
const created = await superagent
|
const created = await httpPost(
|
||||||
.post(
|
`${config.giteaConfig!.url}/api/v1/repos/${repoOrigin}/${repository.name}/labels`,
|
||||||
`${config.giteaConfig.url}/api/v1/repos/${repoOrigin}/${repository.name}/labels`
|
{ name, color: "#ededed" }, // Default color
|
||||||
)
|
{
|
||||||
.set("Authorization", `token ${config.giteaConfig.token}`)
|
"Authorization": `token ${config.giteaConfig!.token}`,
|
||||||
.send({ name, color: "#ededed" }); // Default color
|
}
|
||||||
|
);
|
||||||
|
|
||||||
labelMap.set(name, created.body.id);
|
labelMap.set(name, created.data.id);
|
||||||
giteaLabelIds.push(created.body.id);
|
giteaLabelIds.push(created.data.id);
|
||||||
} catch (labelErr) {
|
} catch (labelErr) {
|
||||||
console.error(
|
console.error(
|
||||||
`Failed to create label "${name}" in Gitea: ${labelErr}`
|
`Failed to create label "${name}" in Gitea: ${labelErr}`
|
||||||
@@ -931,12 +980,13 @@ export const mirrorGitRepoIssuesToGitea = async ({
|
|||||||
};
|
};
|
||||||
|
|
||||||
// Create the issue in Gitea
|
// Create the issue in Gitea
|
||||||
const createdIssue = await superagent
|
const createdIssue = await httpPost(
|
||||||
.post(
|
`${config.giteaConfig!.url}/api/v1/repos/${repoOrigin}/${repository.name}/issues`,
|
||||||
`${config.giteaConfig.url}/api/v1/repos/${repoOrigin}/${repository.name}/issues`
|
issuePayload,
|
||||||
)
|
{
|
||||||
.set("Authorization", `token ${config.giteaConfig.token}`)
|
"Authorization": `token ${config.giteaConfig!.token}`,
|
||||||
.send(issuePayload);
|
}
|
||||||
|
);
|
||||||
|
|
||||||
// Clone comments
|
// Clone comments
|
||||||
const comments = await octokit.paginate(
|
const comments = await octokit.paginate(
|
||||||
@@ -955,21 +1005,22 @@ export const mirrorGitRepoIssuesToGitea = async ({
|
|||||||
await processWithRetry(
|
await processWithRetry(
|
||||||
comments,
|
comments,
|
||||||
async (comment) => {
|
async (comment) => {
|
||||||
await superagent
|
await httpPost(
|
||||||
.post(
|
`${config.giteaConfig!.url}/api/v1/repos/${repoOrigin}/${repository.name}/issues/${createdIssue.data.number}/comments`,
|
||||||
`${config.giteaConfig.url}/api/v1/repos/${repoOrigin}/${repository.name}/issues/${createdIssue.body.number}/comments`
|
{
|
||||||
)
|
|
||||||
.set("Authorization", `token ${config.giteaConfig.token}`)
|
|
||||||
.send({
|
|
||||||
body: `@${comment.user?.login} commented on GitHub:\n\n${comment.body}`,
|
body: `@${comment.user?.login} commented on GitHub:\n\n${comment.body}`,
|
||||||
});
|
},
|
||||||
|
{
|
||||||
|
"Authorization": `token ${config.giteaConfig!.token}`,
|
||||||
|
}
|
||||||
|
);
|
||||||
return comment;
|
return comment;
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
concurrencyLimit: 5,
|
concurrencyLimit: 5,
|
||||||
maxRetries: 2,
|
maxRetries: 2,
|
||||||
retryDelay: 1000,
|
retryDelay: 1000,
|
||||||
onRetry: (comment, error, attempt) => {
|
onRetry: (_comment, error, attempt) => {
|
||||||
console.log(`Retrying comment (attempt ${attempt}): ${error.message}`);
|
console.log(`Retrying comment (attempt ${attempt}): ${error.message}`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
204
src/lib/http-client.ts
Normal file
@@ -0,0 +1,204 @@
|
|||||||
|
/**
|
||||||
|
* HTTP client utility functions using fetch() for consistent error handling
|
||||||
|
*/
|
||||||
|
|
||||||
|
export interface HttpResponse<T = any> {
|
||||||
|
data: T;
|
||||||
|
status: number;
|
||||||
|
statusText: string;
|
||||||
|
headers: Headers;
|
||||||
|
}
|
||||||
|
|
||||||
|
export class HttpError extends Error {
|
||||||
|
constructor(
|
||||||
|
message: string,
|
||||||
|
public status: number,
|
||||||
|
public statusText: string,
|
||||||
|
public response?: string
|
||||||
|
) {
|
||||||
|
super(message);
|
||||||
|
this.name = 'HttpError';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Enhanced fetch with consistent error handling and JSON parsing
|
||||||
|
*/
|
||||||
|
export async function httpRequest<T = any>(
|
||||||
|
url: string,
|
||||||
|
options: RequestInit = {}
|
||||||
|
): Promise<HttpResponse<T>> {
|
||||||
|
try {
|
||||||
|
const response = await fetch(url, {
|
||||||
|
...options,
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
...options.headers,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
// Clone response for error handling
|
||||||
|
const responseClone = response.clone();
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
let errorMessage = `HTTP ${response.status}: ${response.statusText}`;
|
||||||
|
let responseText = '';
|
||||||
|
|
||||||
|
try {
|
||||||
|
responseText = await responseClone.text();
|
||||||
|
if (responseText) {
|
||||||
|
errorMessage += ` - ${responseText}`;
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
// Ignore text parsing errors
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new HttpError(
|
||||||
|
errorMessage,
|
||||||
|
response.status,
|
||||||
|
response.statusText,
|
||||||
|
responseText
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check content type for JSON responses
|
||||||
|
const contentType = response.headers.get('content-type');
|
||||||
|
let data: T;
|
||||||
|
|
||||||
|
if (contentType && contentType.includes('application/json')) {
|
||||||
|
try {
|
||||||
|
data = await response.json();
|
||||||
|
} catch (jsonError) {
|
||||||
|
const responseText = await responseClone.text();
|
||||||
|
|
||||||
|
// Enhanced JSON parsing error logging
|
||||||
|
console.error("=== JSON PARSING ERROR ===");
|
||||||
|
console.error("URL:", url);
|
||||||
|
console.error("Status:", response.status, response.statusText);
|
||||||
|
console.error("Content-Type:", contentType);
|
||||||
|
console.error("Response length:", responseText.length);
|
||||||
|
console.error("Response preview (first 500 chars):", responseText.substring(0, 500));
|
||||||
|
console.error("JSON Error:", jsonError instanceof Error ? jsonError.message : String(jsonError));
|
||||||
|
console.error("========================");
|
||||||
|
|
||||||
|
throw new HttpError(
|
||||||
|
`Failed to parse JSON response from ${url}: ${jsonError instanceof Error ? jsonError.message : String(jsonError)}. Response: ${responseText.substring(0, 200)}${responseText.length > 200 ? '...' : ''}`,
|
||||||
|
response.status,
|
||||||
|
response.statusText,
|
||||||
|
responseText
|
||||||
|
);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// For non-JSON responses, return text as data
|
||||||
|
data = (await response.text()) as unknown as T;
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
data,
|
||||||
|
status: response.status,
|
||||||
|
statusText: response.statusText,
|
||||||
|
headers: response.headers,
|
||||||
|
};
|
||||||
|
} catch (error) {
|
||||||
|
if (error instanceof HttpError) {
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle network errors, etc.
|
||||||
|
throw new HttpError(
|
||||||
|
`Network error: ${error instanceof Error ? error.message : String(error)}`,
|
||||||
|
0,
|
||||||
|
'Network Error'
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* GET request
|
||||||
|
*/
|
||||||
|
export async function httpGet<T = any>(
|
||||||
|
url: string,
|
||||||
|
headers?: Record<string, string>
|
||||||
|
): Promise<HttpResponse<T>> {
|
||||||
|
return httpRequest<T>(url, {
|
||||||
|
method: 'GET',
|
||||||
|
headers,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* POST request
|
||||||
|
*/
|
||||||
|
export async function httpPost<T = any>(
|
||||||
|
url: string,
|
||||||
|
body?: any,
|
||||||
|
headers?: Record<string, string>
|
||||||
|
): Promise<HttpResponse<T>> {
|
||||||
|
return httpRequest<T>(url, {
|
||||||
|
method: 'POST',
|
||||||
|
headers,
|
||||||
|
body: body ? JSON.stringify(body) : undefined,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* PUT request
|
||||||
|
*/
|
||||||
|
export async function httpPut<T = any>(
|
||||||
|
url: string,
|
||||||
|
body?: any,
|
||||||
|
headers?: Record<string, string>
|
||||||
|
): Promise<HttpResponse<T>> {
|
||||||
|
return httpRequest<T>(url, {
|
||||||
|
method: 'PUT',
|
||||||
|
headers,
|
||||||
|
body: body ? JSON.stringify(body) : undefined,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* DELETE request
|
||||||
|
*/
|
||||||
|
export async function httpDelete<T = any>(
|
||||||
|
url: string,
|
||||||
|
headers?: Record<string, string>
|
||||||
|
): Promise<HttpResponse<T>> {
|
||||||
|
return httpRequest<T>(url, {
|
||||||
|
method: 'DELETE',
|
||||||
|
headers,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gitea-specific HTTP client with authentication
|
||||||
|
*/
|
||||||
|
export class GiteaHttpClient {
|
||||||
|
constructor(
|
||||||
|
private baseUrl: string,
|
||||||
|
private token: string
|
||||||
|
) {}
|
||||||
|
|
||||||
|
private getHeaders(additionalHeaders?: Record<string, string>): Record<string, string> {
|
||||||
|
return {
|
||||||
|
'Authorization': `token ${this.token}`,
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
...additionalHeaders,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
async get<T = any>(endpoint: string): Promise<HttpResponse<T>> {
|
||||||
|
return httpGet<T>(`${this.baseUrl}${endpoint}`, this.getHeaders());
|
||||||
|
}
|
||||||
|
|
||||||
|
async post<T = any>(endpoint: string, body?: any): Promise<HttpResponse<T>> {
|
||||||
|
return httpPost<T>(`${this.baseUrl}${endpoint}`, body, this.getHeaders());
|
||||||
|
}
|
||||||
|
|
||||||
|
async put<T = any>(endpoint: string, body?: any): Promise<HttpResponse<T>> {
|
||||||
|
return httpPut<T>(`${this.baseUrl}${endpoint}`, body, this.getHeaders());
|
||||||
|
}
|
||||||
|
|
||||||
|
async delete<T = any>(endpoint: string): Promise<HttpResponse<T>> {
|
||||||
|
return httpDelete<T>(`${this.baseUrl}${endpoint}`, this.getHeaders());
|
||||||
|
}
|
||||||
|
}
|
||||||
240
src/lib/shutdown-manager.ts
Normal file
@@ -0,0 +1,240 @@
|
|||||||
|
/**
|
||||||
|
* Shutdown Manager for Graceful Application Termination
|
||||||
|
*
|
||||||
|
* This module provides centralized shutdown coordination for the gitea-mirror application.
|
||||||
|
* It ensures that:
|
||||||
|
* - In-progress jobs are properly saved to the database
|
||||||
|
* - Database connections are closed cleanly
|
||||||
|
* - Background services are stopped gracefully
|
||||||
|
* - No data loss occurs during container restarts
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { db, mirrorJobs } from './db';
|
||||||
|
import { eq, and } from 'drizzle-orm';
|
||||||
|
import type { MirrorJob } from './db/schema';
|
||||||
|
|
||||||
|
// Shutdown state tracking
|
||||||
|
let shutdownInProgress = false;
|
||||||
|
let shutdownStartTime: Date | null = null;
|
||||||
|
let shutdownCallbacks: Array<() => Promise<void>> = [];
|
||||||
|
let activeJobs = new Set<string>();
|
||||||
|
let shutdownTimeout: NodeJS.Timeout | null = null;
|
||||||
|
|
||||||
|
// Configuration
|
||||||
|
const SHUTDOWN_TIMEOUT = 30000; // 30 seconds max shutdown time
|
||||||
|
const JOB_SAVE_TIMEOUT = 10000; // 10 seconds to save job state
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Register a callback to be executed during shutdown
|
||||||
|
*/
|
||||||
|
export function registerShutdownCallback(callback: () => Promise<void>): void {
|
||||||
|
shutdownCallbacks.push(callback);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Register an active job that needs to be tracked during shutdown
|
||||||
|
*/
|
||||||
|
export function registerActiveJob(jobId: string): void {
|
||||||
|
activeJobs.add(jobId);
|
||||||
|
console.log(`Registered active job: ${jobId} (${activeJobs.size} total active jobs)`);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Unregister a job when it completes normally
|
||||||
|
*/
|
||||||
|
export function unregisterActiveJob(jobId: string): void {
|
||||||
|
activeJobs.delete(jobId);
|
||||||
|
console.log(`Unregistered job: ${jobId} (${activeJobs.size} remaining active jobs)`);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if shutdown is currently in progress
|
||||||
|
*/
|
||||||
|
export function isShuttingDown(): boolean {
|
||||||
|
return shutdownInProgress;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get shutdown status information
|
||||||
|
*/
|
||||||
|
export function getShutdownStatus() {
|
||||||
|
return {
|
||||||
|
inProgress: shutdownInProgress,
|
||||||
|
startTime: shutdownStartTime,
|
||||||
|
activeJobs: Array.from(activeJobs),
|
||||||
|
registeredCallbacks: shutdownCallbacks.length,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Save the current state of an active job to the database
|
||||||
|
*/
|
||||||
|
async function saveJobState(jobId: string): Promise<void> {
|
||||||
|
try {
|
||||||
|
console.log(`Saving state for job ${jobId}...`);
|
||||||
|
|
||||||
|
// Update the job to mark it as interrupted but not failed
|
||||||
|
await db
|
||||||
|
.update(mirrorJobs)
|
||||||
|
.set({
|
||||||
|
inProgress: false,
|
||||||
|
lastCheckpoint: new Date(),
|
||||||
|
message: 'Job interrupted by application shutdown - will resume on restart',
|
||||||
|
})
|
||||||
|
.where(eq(mirrorJobs.id, jobId));
|
||||||
|
|
||||||
|
console.log(`✅ Saved state for job ${jobId}`);
|
||||||
|
} catch (error) {
|
||||||
|
console.error(`❌ Failed to save state for job ${jobId}:`, error);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Save all active jobs to the database
|
||||||
|
*/
|
||||||
|
async function saveAllActiveJobs(): Promise<void> {
|
||||||
|
if (activeJobs.size === 0) {
|
||||||
|
console.log('No active jobs to save');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`Saving state for ${activeJobs.size} active jobs...`);
|
||||||
|
|
||||||
|
const savePromises = Array.from(activeJobs).map(async (jobId) => {
|
||||||
|
try {
|
||||||
|
await Promise.race([
|
||||||
|
saveJobState(jobId),
|
||||||
|
new Promise<never>((_, reject) => {
|
||||||
|
setTimeout(() => reject(new Error(`Timeout saving job ${jobId}`)), JOB_SAVE_TIMEOUT);
|
||||||
|
})
|
||||||
|
]);
|
||||||
|
} catch (error) {
|
||||||
|
console.error(`Failed to save job ${jobId} within timeout:`, error);
|
||||||
|
// Continue with other jobs even if one fails
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
await Promise.allSettled(savePromises);
|
||||||
|
console.log('✅ Completed saving all active jobs');
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Execute all registered shutdown callbacks
|
||||||
|
*/
|
||||||
|
async function executeShutdownCallbacks(): Promise<void> {
|
||||||
|
if (shutdownCallbacks.length === 0) {
|
||||||
|
console.log('No shutdown callbacks to execute');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`Executing ${shutdownCallbacks.length} shutdown callbacks...`);
|
||||||
|
|
||||||
|
const callbackPromises = shutdownCallbacks.map(async (callback, index) => {
|
||||||
|
try {
|
||||||
|
await callback();
|
||||||
|
console.log(`✅ Shutdown callback ${index + 1} completed`);
|
||||||
|
} catch (error) {
|
||||||
|
console.error(`❌ Shutdown callback ${index + 1} failed:`, error);
|
||||||
|
// Continue with other callbacks even if one fails
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
await Promise.allSettled(callbackPromises);
|
||||||
|
console.log('✅ Completed all shutdown callbacks');
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Perform graceful shutdown of the application
|
||||||
|
*/
|
||||||
|
export async function gracefulShutdown(signal: string = 'UNKNOWN'): Promise<void> {
|
||||||
|
if (shutdownInProgress) {
|
||||||
|
console.log('⚠️ Shutdown already in progress, ignoring additional signal');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
shutdownInProgress = true;
|
||||||
|
shutdownStartTime = new Date();
|
||||||
|
|
||||||
|
console.log(`\n🛑 Graceful shutdown initiated by signal: ${signal}`);
|
||||||
|
console.log(`📊 Shutdown status: ${activeJobs.size} active jobs, ${shutdownCallbacks.length} callbacks`);
|
||||||
|
|
||||||
|
// Set up shutdown timeout
|
||||||
|
shutdownTimeout = setTimeout(() => {
|
||||||
|
console.error(`❌ Shutdown timeout reached (${SHUTDOWN_TIMEOUT}ms), forcing exit`);
|
||||||
|
process.exit(1);
|
||||||
|
}, SHUTDOWN_TIMEOUT);
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Step 1: Save all active job states
|
||||||
|
console.log('\n📝 Step 1: Saving active job states...');
|
||||||
|
await saveAllActiveJobs();
|
||||||
|
|
||||||
|
// Step 2: Execute shutdown callbacks (stop services, close connections, etc.)
|
||||||
|
console.log('\n🔧 Step 2: Executing shutdown callbacks...');
|
||||||
|
await executeShutdownCallbacks();
|
||||||
|
|
||||||
|
// Step 3: Close database connections
|
||||||
|
console.log('\n💾 Step 3: Closing database connections...');
|
||||||
|
// Note: Drizzle with bun:sqlite doesn't require explicit connection closing
|
||||||
|
// but we'll add this for completeness and future database changes
|
||||||
|
|
||||||
|
console.log('\n✅ Graceful shutdown completed successfully');
|
||||||
|
|
||||||
|
// Clear the timeout since we completed successfully
|
||||||
|
if (shutdownTimeout) {
|
||||||
|
clearTimeout(shutdownTimeout);
|
||||||
|
shutdownTimeout = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Exit with success code
|
||||||
|
process.exit(0);
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
console.error('\n❌ Error during graceful shutdown:', error);
|
||||||
|
|
||||||
|
// Clear the timeout
|
||||||
|
if (shutdownTimeout) {
|
||||||
|
clearTimeout(shutdownTimeout);
|
||||||
|
shutdownTimeout = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Exit with error code
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Initialize the shutdown manager
|
||||||
|
* This should be called early in the application lifecycle
|
||||||
|
*/
|
||||||
|
export function initializeShutdownManager(): void {
|
||||||
|
console.log('🔧 Initializing shutdown manager...');
|
||||||
|
|
||||||
|
// Reset state in case of re-initialization
|
||||||
|
shutdownInProgress = false;
|
||||||
|
shutdownStartTime = null;
|
||||||
|
activeJobs.clear();
|
||||||
|
shutdownCallbacks = []; // Reset callbacks too
|
||||||
|
|
||||||
|
// Clear any existing timeout
|
||||||
|
if (shutdownTimeout) {
|
||||||
|
clearTimeout(shutdownTimeout);
|
||||||
|
shutdownTimeout = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log('✅ Shutdown manager initialized');
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Force immediate shutdown (for emergencies)
|
||||||
|
*/
|
||||||
|
export function forceShutdown(exitCode: number = 1): void {
|
||||||
|
console.error('🚨 Force shutdown requested');
|
||||||
|
|
||||||
|
if (shutdownTimeout) {
|
||||||
|
clearTimeout(shutdownTimeout);
|
||||||
|
}
|
||||||
|
|
||||||
|
process.exit(exitCode);
|
||||||
|
}
|
||||||
141
src/lib/signal-handlers.ts
Normal file
@@ -0,0 +1,141 @@
|
|||||||
|
/**
|
||||||
|
* Signal Handlers for Graceful Shutdown
|
||||||
|
*
|
||||||
|
* This module sets up proper signal handling for container environments.
|
||||||
|
* It ensures the application responds correctly to SIGTERM, SIGINT, and other signals.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { gracefulShutdown, isShuttingDown } from './shutdown-manager';
|
||||||
|
|
||||||
|
// Track if signal handlers have been registered
|
||||||
|
let signalHandlersRegistered = false;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Setup signal handlers for graceful shutdown
|
||||||
|
* This should be called early in the application lifecycle
|
||||||
|
*/
|
||||||
|
export function setupSignalHandlers(): void {
|
||||||
|
if (signalHandlersRegistered) {
|
||||||
|
console.log('⚠️ Signal handlers already registered, skipping');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log('🔧 Setting up signal handlers for graceful shutdown...');
|
||||||
|
|
||||||
|
// Handle SIGTERM (Docker stop, Kubernetes termination)
|
||||||
|
process.on('SIGTERM', () => {
|
||||||
|
console.log('\n📡 Received SIGTERM signal');
|
||||||
|
if (!isShuttingDown()) {
|
||||||
|
gracefulShutdown('SIGTERM').catch((error) => {
|
||||||
|
console.error('Error during SIGTERM shutdown:', error);
|
||||||
|
process.exit(1);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Handle SIGINT (Ctrl+C)
|
||||||
|
process.on('SIGINT', () => {
|
||||||
|
console.log('\n📡 Received SIGINT signal');
|
||||||
|
if (!isShuttingDown()) {
|
||||||
|
gracefulShutdown('SIGINT').catch((error) => {
|
||||||
|
console.error('Error during SIGINT shutdown:', error);
|
||||||
|
process.exit(1);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Handle SIGHUP (terminal hangup)
|
||||||
|
process.on('SIGHUP', () => {
|
||||||
|
console.log('\n📡 Received SIGHUP signal');
|
||||||
|
if (!isShuttingDown()) {
|
||||||
|
gracefulShutdown('SIGHUP').catch((error) => {
|
||||||
|
console.error('Error during SIGHUP shutdown:', error);
|
||||||
|
process.exit(1);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Handle uncaught exceptions
|
||||||
|
process.on('uncaughtException', (error) => {
|
||||||
|
console.error('\n💥 Uncaught Exception:', error);
|
||||||
|
console.error('Stack trace:', error.stack);
|
||||||
|
|
||||||
|
if (!isShuttingDown()) {
|
||||||
|
console.log('Initiating emergency shutdown due to uncaught exception...');
|
||||||
|
gracefulShutdown('UNCAUGHT_EXCEPTION').catch((shutdownError) => {
|
||||||
|
console.error('Error during emergency shutdown:', shutdownError);
|
||||||
|
process.exit(1);
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
// If already shutting down, force exit
|
||||||
|
console.error('Uncaught exception during shutdown, forcing exit');
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Handle unhandled promise rejections
|
||||||
|
process.on('unhandledRejection', (reason, promise) => {
|
||||||
|
console.error('\n💥 Unhandled Promise Rejection at:', promise);
|
||||||
|
console.error('Reason:', reason);
|
||||||
|
|
||||||
|
if (!isShuttingDown()) {
|
||||||
|
console.log('Initiating emergency shutdown due to unhandled rejection...');
|
||||||
|
gracefulShutdown('UNHANDLED_REJECTION').catch((shutdownError) => {
|
||||||
|
console.error('Error during emergency shutdown:', shutdownError);
|
||||||
|
process.exit(1);
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
// If already shutting down, force exit
|
||||||
|
console.error('Unhandled rejection during shutdown, forcing exit');
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Handle process warnings (for debugging)
|
||||||
|
process.on('warning', (warning) => {
|
||||||
|
console.warn('⚠️ Process Warning:', warning.name);
|
||||||
|
console.warn('Message:', warning.message);
|
||||||
|
if (warning.stack) {
|
||||||
|
console.warn('Stack:', warning.stack);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
signalHandlersRegistered = true;
|
||||||
|
console.log('✅ Signal handlers registered successfully');
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Remove signal handlers (for testing)
|
||||||
|
*/
|
||||||
|
export function removeSignalHandlers(): void {
|
||||||
|
if (!signalHandlersRegistered) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log('🔧 Removing signal handlers...');
|
||||||
|
|
||||||
|
process.removeAllListeners('SIGTERM');
|
||||||
|
process.removeAllListeners('SIGINT');
|
||||||
|
process.removeAllListeners('SIGHUP');
|
||||||
|
process.removeAllListeners('uncaughtException');
|
||||||
|
process.removeAllListeners('unhandledRejection');
|
||||||
|
process.removeAllListeners('warning');
|
||||||
|
|
||||||
|
signalHandlersRegistered = false;
|
||||||
|
console.log('✅ Signal handlers removed');
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if signal handlers are registered
|
||||||
|
*/
|
||||||
|
export function areSignalHandlersRegistered(): boolean {
|
||||||
|
return signalHandlersRegistered;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Send a test signal to the current process (for testing)
|
||||||
|
*/
|
||||||
|
export function sendTestSignal(signal: NodeJS.Signals = 'SIGTERM'): void {
|
||||||
|
console.log(`🧪 Sending test signal: ${signal}`);
|
||||||
|
process.kill(process.pid, signal);
|
||||||
|
}
|
||||||
@@ -1,35 +1,35 @@
|
|||||||
import { describe, test, expect } from "bun:test";
|
import { describe, test, expect } from "bun:test";
|
||||||
import { jsonResponse, formatDate, truncate, safeParse } from "./utils";
|
import { jsonResponse, formatDate, truncate, safeParse, parseErrorMessage, showErrorToast } from "./utils";
|
||||||
|
|
||||||
describe("jsonResponse", () => {
|
describe("jsonResponse", () => {
|
||||||
test("creates a Response with JSON content", () => {
|
test("creates a Response with JSON content", () => {
|
||||||
const data = { message: "Hello, world!" };
|
const data = { message: "Hello, world!" };
|
||||||
const response = jsonResponse({ data });
|
const response = jsonResponse({ data });
|
||||||
|
|
||||||
expect(response).toBeInstanceOf(Response);
|
expect(response).toBeInstanceOf(Response);
|
||||||
expect(response.status).toBe(200);
|
expect(response.status).toBe(200);
|
||||||
expect(response.headers.get("Content-Type")).toBe("application/json");
|
expect(response.headers.get("Content-Type")).toBe("application/json");
|
||||||
});
|
});
|
||||||
|
|
||||||
test("uses the provided status code", () => {
|
test("uses the provided status code", () => {
|
||||||
const data = { error: "Not found" };
|
const data = { error: "Not found" };
|
||||||
const response = jsonResponse({ data, status: 404 });
|
const response = jsonResponse({ data, status: 404 });
|
||||||
|
|
||||||
expect(response.status).toBe(404);
|
expect(response.status).toBe(404);
|
||||||
});
|
});
|
||||||
|
|
||||||
test("correctly serializes complex objects", async () => {
|
test("correctly serializes complex objects", async () => {
|
||||||
const now = new Date();
|
const now = new Date();
|
||||||
const data = {
|
const data = {
|
||||||
message: "Complex object",
|
message: "Complex object",
|
||||||
date: now,
|
date: now,
|
||||||
nested: { foo: "bar" },
|
nested: { foo: "bar" },
|
||||||
array: [1, 2, 3]
|
array: [1, 2, 3]
|
||||||
};
|
};
|
||||||
|
|
||||||
const response = jsonResponse({ data });
|
const response = jsonResponse({ data });
|
||||||
const responseBody = await response.json();
|
const responseBody = await response.json();
|
||||||
|
|
||||||
expect(responseBody).toEqual({
|
expect(responseBody).toEqual({
|
||||||
message: "Complex object",
|
message: "Complex object",
|
||||||
date: now.toISOString(),
|
date: now.toISOString(),
|
||||||
@@ -43,22 +43,22 @@ describe("formatDate", () => {
|
|||||||
test("formats a date object", () => {
|
test("formats a date object", () => {
|
||||||
const date = new Date("2023-01-15T12:30:45Z");
|
const date = new Date("2023-01-15T12:30:45Z");
|
||||||
const formatted = formatDate(date);
|
const formatted = formatDate(date);
|
||||||
|
|
||||||
// The exact format might depend on the locale, so we'll check for parts
|
// The exact format might depend on the locale, so we'll check for parts
|
||||||
expect(formatted).toContain("2023");
|
expect(formatted).toContain("2023");
|
||||||
expect(formatted).toContain("January");
|
expect(formatted).toContain("January");
|
||||||
expect(formatted).toContain("15");
|
expect(formatted).toContain("15");
|
||||||
});
|
});
|
||||||
|
|
||||||
test("formats a date string", () => {
|
test("formats a date string", () => {
|
||||||
const dateStr = "2023-01-15T12:30:45Z";
|
const dateStr = "2023-01-15T12:30:45Z";
|
||||||
const formatted = formatDate(dateStr);
|
const formatted = formatDate(dateStr);
|
||||||
|
|
||||||
expect(formatted).toContain("2023");
|
expect(formatted).toContain("2023");
|
||||||
expect(formatted).toContain("January");
|
expect(formatted).toContain("January");
|
||||||
expect(formatted).toContain("15");
|
expect(formatted).toContain("15");
|
||||||
});
|
});
|
||||||
|
|
||||||
test("returns 'Never' for null or undefined", () => {
|
test("returns 'Never' for null or undefined", () => {
|
||||||
expect(formatDate(null)).toBe("Never");
|
expect(formatDate(null)).toBe("Never");
|
||||||
expect(formatDate(undefined)).toBe("Never");
|
expect(formatDate(undefined)).toBe("Never");
|
||||||
@@ -69,18 +69,18 @@ describe("truncate", () => {
|
|||||||
test("truncates a string that exceeds the length", () => {
|
test("truncates a string that exceeds the length", () => {
|
||||||
const str = "This is a long string that needs truncation";
|
const str = "This is a long string that needs truncation";
|
||||||
const truncated = truncate(str, 10);
|
const truncated = truncate(str, 10);
|
||||||
|
|
||||||
expect(truncated).toBe("This is a ...");
|
expect(truncated).toBe("This is a ...");
|
||||||
expect(truncated.length).toBe(13); // 10 chars + "..."
|
expect(truncated.length).toBe(13); // 10 chars + "..."
|
||||||
});
|
});
|
||||||
|
|
||||||
test("does not truncate a string that is shorter than the length", () => {
|
test("does not truncate a string that is shorter than the length", () => {
|
||||||
const str = "Short";
|
const str = "Short";
|
||||||
const truncated = truncate(str, 10);
|
const truncated = truncate(str, 10);
|
||||||
|
|
||||||
expect(truncated).toBe("Short");
|
expect(truncated).toBe("Short");
|
||||||
});
|
});
|
||||||
|
|
||||||
test("handles empty strings", () => {
|
test("handles empty strings", () => {
|
||||||
expect(truncate("", 10)).toBe("");
|
expect(truncate("", 10)).toBe("");
|
||||||
});
|
});
|
||||||
@@ -90,21 +90,71 @@ describe("safeParse", () => {
|
|||||||
test("parses valid JSON strings", () => {
|
test("parses valid JSON strings", () => {
|
||||||
const jsonStr = '{"name":"John","age":30}';
|
const jsonStr = '{"name":"John","age":30}';
|
||||||
const parsed = safeParse(jsonStr);
|
const parsed = safeParse(jsonStr);
|
||||||
|
|
||||||
expect(parsed).toEqual({ name: "John", age: 30 });
|
expect(parsed).toEqual({ name: "John", age: 30 });
|
||||||
});
|
});
|
||||||
|
|
||||||
test("returns undefined for invalid JSON strings", () => {
|
test("returns undefined for invalid JSON strings", () => {
|
||||||
const invalidJson = '{"name":"John",age:30}'; // Missing quotes around age
|
const invalidJson = '{"name":"John",age:30}'; // Missing quotes around age
|
||||||
const parsed = safeParse(invalidJson);
|
const parsed = safeParse(invalidJson);
|
||||||
|
|
||||||
expect(parsed).toBeUndefined();
|
expect(parsed).toBeUndefined();
|
||||||
});
|
});
|
||||||
|
|
||||||
test("returns the original value for non-string inputs", () => {
|
test("returns the original value for non-string inputs", () => {
|
||||||
const obj = { name: "John", age: 30 };
|
const obj = { name: "John", age: 30 };
|
||||||
const parsed = safeParse(obj);
|
const parsed = safeParse(obj);
|
||||||
|
|
||||||
expect(parsed).toBe(obj);
|
expect(parsed).toBe(obj);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
describe("parseErrorMessage", () => {
|
||||||
|
test("parses JSON error with error and troubleshooting fields", () => {
|
||||||
|
const errorMessage = JSON.stringify({
|
||||||
|
error: "Unexpected end of JSON input",
|
||||||
|
errorType: "SyntaxError",
|
||||||
|
timestamp: "2025-05-28T09:08:02.37Z",
|
||||||
|
troubleshooting: "JSON parsing error detected. Check Gitea server status and logs. Ensure Gitea is returning valid JSON responses."
|
||||||
|
});
|
||||||
|
|
||||||
|
const result = parseErrorMessage(errorMessage);
|
||||||
|
|
||||||
|
expect(result.title).toBe("Unexpected end of JSON input");
|
||||||
|
expect(result.description).toBe("JSON parsing error detected. Check Gitea server status and logs. Ensure Gitea is returning valid JSON responses.");
|
||||||
|
expect(result.isStructured).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("parses JSON error with title and description fields", () => {
|
||||||
|
const errorMessage = JSON.stringify({
|
||||||
|
title: "Connection Failed",
|
||||||
|
description: "Unable to connect to the server. Please check your network connection."
|
||||||
|
});
|
||||||
|
|
||||||
|
const result = parseErrorMessage(errorMessage);
|
||||||
|
|
||||||
|
expect(result.title).toBe("Connection Failed");
|
||||||
|
expect(result.description).toBe("Unable to connect to the server. Please check your network connection.");
|
||||||
|
expect(result.isStructured).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("handles plain string error messages", () => {
|
||||||
|
const errorMessage = "Simple error message";
|
||||||
|
|
||||||
|
const result = parseErrorMessage(errorMessage);
|
||||||
|
|
||||||
|
expect(result.title).toBe("Simple error message");
|
||||||
|
expect(result.description).toBeUndefined();
|
||||||
|
expect(result.isStructured).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("handles Error objects", () => {
|
||||||
|
const error = new Error("Something went wrong");
|
||||||
|
|
||||||
|
const result = parseErrorMessage(error);
|
||||||
|
|
||||||
|
expect(result.title).toBe("Something went wrong");
|
||||||
|
expect(result.description).toBeUndefined();
|
||||||
|
expect(result.isStructured).toBe(false);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|||||||
138
src/lib/utils.ts
@@ -1,7 +1,6 @@
|
|||||||
import { clsx, type ClassValue } from "clsx";
|
import { clsx, type ClassValue } from "clsx";
|
||||||
import { twMerge } from "tailwind-merge";
|
import { twMerge } from "tailwind-merge";
|
||||||
import axios from "axios";
|
import { httpRequest, HttpError } from "@/lib/http-client";
|
||||||
import type { AxiosError, AxiosRequestConfig } from "axios";
|
|
||||||
import type { RepoStatus } from "@/types/Repository";
|
import type { RepoStatus } from "@/types/Repository";
|
||||||
|
|
||||||
export const API_BASE = "/api";
|
export const API_BASE = "/api";
|
||||||
@@ -37,27 +36,148 @@ export function safeParse<T>(value: unknown): T | undefined {
|
|||||||
return value as T;
|
return value as T;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Enhanced error message parsing for toast notifications
|
||||||
|
export interface ParsedErrorMessage {
|
||||||
|
title: string;
|
||||||
|
description?: string;
|
||||||
|
isStructured: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function parseErrorMessage(error: unknown): ParsedErrorMessage {
|
||||||
|
// Handle Error objects
|
||||||
|
if (error instanceof Error) {
|
||||||
|
return parseErrorMessage(error.message);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle string messages
|
||||||
|
if (typeof error === "string") {
|
||||||
|
// Try to parse as JSON first
|
||||||
|
try {
|
||||||
|
const parsed = JSON.parse(error);
|
||||||
|
|
||||||
|
// Check for common structured error formats
|
||||||
|
if (typeof parsed === "object" && parsed !== null) {
|
||||||
|
// Format 1: { error: "message", errorType: "type", troubleshooting: "info" }
|
||||||
|
if (parsed.error) {
|
||||||
|
return {
|
||||||
|
title: parsed.error,
|
||||||
|
description: parsed.troubleshooting || parsed.errorType || undefined,
|
||||||
|
isStructured: true,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Format 2: { title: "title", description: "desc" }
|
||||||
|
if (parsed.title) {
|
||||||
|
return {
|
||||||
|
title: parsed.title,
|
||||||
|
description: parsed.description || undefined,
|
||||||
|
isStructured: true,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Format 3: { message: "msg", details: "details" }
|
||||||
|
if (parsed.message) {
|
||||||
|
return {
|
||||||
|
title: parsed.message,
|
||||||
|
description: parsed.details || undefined,
|
||||||
|
isStructured: true,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
// Not valid JSON, treat as plain string
|
||||||
|
}
|
||||||
|
|
||||||
|
// Plain string message
|
||||||
|
return {
|
||||||
|
title: error,
|
||||||
|
description: undefined,
|
||||||
|
isStructured: false,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle objects directly
|
||||||
|
if (typeof error === "object" && error !== null) {
|
||||||
|
const errorObj = error as any;
|
||||||
|
|
||||||
|
if (errorObj.error) {
|
||||||
|
return {
|
||||||
|
title: errorObj.error,
|
||||||
|
description: errorObj.troubleshooting || errorObj.errorType || undefined,
|
||||||
|
isStructured: true,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
if (errorObj.title) {
|
||||||
|
return {
|
||||||
|
title: errorObj.title,
|
||||||
|
description: errorObj.description || undefined,
|
||||||
|
isStructured: true,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
if (errorObj.message) {
|
||||||
|
return {
|
||||||
|
title: errorObj.message,
|
||||||
|
description: errorObj.details || undefined,
|
||||||
|
isStructured: true,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fallback for unknown types
|
||||||
|
return {
|
||||||
|
title: String(error),
|
||||||
|
description: undefined,
|
||||||
|
isStructured: false,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Enhanced toast helper that parses structured error messages
|
||||||
|
export function showErrorToast(error: unknown, toast: any) {
|
||||||
|
const parsed = parseErrorMessage(error);
|
||||||
|
|
||||||
|
if (parsed.description) {
|
||||||
|
// Use sonner's rich toast format with title and description
|
||||||
|
toast.error(parsed.title, {
|
||||||
|
description: parsed.description,
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
// Simple error toast
|
||||||
|
toast.error(parsed.title);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Helper function for API requests
|
// Helper function for API requests
|
||||||
|
|
||||||
export async function apiRequest<T>(
|
export async function apiRequest<T>(
|
||||||
endpoint: string,
|
endpoint: string,
|
||||||
options: AxiosRequestConfig = {}
|
options: (RequestInit & { data?: any }) = {}
|
||||||
): Promise<T> {
|
): Promise<T> {
|
||||||
try {
|
try {
|
||||||
const response = await axios<T>(`${API_BASE}${endpoint}`, {
|
// Handle the custom 'data' property by converting it to 'body'
|
||||||
|
const { data, ...requestOptions } = options;
|
||||||
|
const finalOptions: RequestInit = {
|
||||||
headers: {
|
headers: {
|
||||||
"Content-Type": "application/json",
|
"Content-Type": "application/json",
|
||||||
...(options.headers || {}),
|
...(requestOptions.headers || {}),
|
||||||
},
|
},
|
||||||
...options,
|
...requestOptions,
|
||||||
});
|
};
|
||||||
|
|
||||||
|
// If data is provided, stringify it and set as body
|
||||||
|
if (data !== undefined) {
|
||||||
|
finalOptions.body = JSON.stringify(data);
|
||||||
|
}
|
||||||
|
|
||||||
|
const response = await httpRequest<T>(`${API_BASE}${endpoint}`, finalOptions);
|
||||||
|
|
||||||
return response.data;
|
return response.data;
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
const error = err as AxiosError<{ message?: string }>;
|
const error = err as HttpError;
|
||||||
|
|
||||||
const message =
|
const message =
|
||||||
error.response?.data?.message ||
|
error.response ||
|
||||||
error.message ||
|
error.message ||
|
||||||
"An unknown error occurred";
|
"An unknown error occurred";
|
||||||
|
|
||||||
|
|||||||
@@ -5,19 +5,19 @@ describe("processInParallel", () => {
|
|||||||
test("processes items in parallel with concurrency control", async () => {
|
test("processes items in parallel with concurrency control", async () => {
|
||||||
// Create an array of numbers to process
|
// Create an array of numbers to process
|
||||||
const items = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10];
|
const items = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10];
|
||||||
|
|
||||||
// Create a mock function to track execution
|
// Create a mock function to track execution
|
||||||
const processItem = mock(async (item: number) => {
|
const processItem = mock(async (item: number) => {
|
||||||
// Simulate async work
|
// Simulate async work
|
||||||
await new Promise(resolve => setTimeout(resolve, 10));
|
await new Promise(resolve => setTimeout(resolve, 10));
|
||||||
return item * 2;
|
return item * 2;
|
||||||
});
|
});
|
||||||
|
|
||||||
// Create a mock progress callback
|
// Create a mock progress callback
|
||||||
const onProgress = mock((completed: number, total: number, result?: number) => {
|
const onProgress = mock((completed: number, total: number, result?: number) => {
|
||||||
// Progress tracking
|
// Progress tracking
|
||||||
});
|
});
|
||||||
|
|
||||||
// Process the items with a concurrency limit of 3
|
// Process the items with a concurrency limit of 3
|
||||||
const results = await processInParallel(
|
const results = await processInParallel(
|
||||||
items,
|
items,
|
||||||
@@ -25,25 +25,25 @@ describe("processInParallel", () => {
|
|||||||
3,
|
3,
|
||||||
onProgress
|
onProgress
|
||||||
);
|
);
|
||||||
|
|
||||||
// Verify results
|
// Verify results
|
||||||
expect(results).toEqual([2, 4, 6, 8, 10, 12, 14, 16, 18, 20]);
|
expect(results).toEqual([2, 4, 6, 8, 10, 12, 14, 16, 18, 20]);
|
||||||
|
|
||||||
// Verify that processItem was called for each item
|
// Verify that processItem was called for each item
|
||||||
expect(processItem).toHaveBeenCalledTimes(10);
|
expect(processItem).toHaveBeenCalledTimes(10);
|
||||||
|
|
||||||
// Verify that onProgress was called for each item
|
// Verify that onProgress was called for each item
|
||||||
expect(onProgress).toHaveBeenCalledTimes(10);
|
expect(onProgress).toHaveBeenCalledTimes(10);
|
||||||
|
|
||||||
// Verify the last call to onProgress had the correct completed/total values
|
// Verify the last call to onProgress had the correct completed/total values
|
||||||
expect(onProgress.mock.calls[9][0]).toBe(10); // completed
|
expect(onProgress.mock.calls[9][0]).toBe(10); // completed
|
||||||
expect(onProgress.mock.calls[9][1]).toBe(10); // total
|
expect(onProgress.mock.calls[9][1]).toBe(10); // total
|
||||||
});
|
});
|
||||||
|
|
||||||
test("handles errors in processing", async () => {
|
test("handles errors in processing", async () => {
|
||||||
// Create an array of numbers to process
|
// Create an array of numbers to process
|
||||||
const items = [1, 2, 3, 4, 5];
|
const items = [1, 2, 3, 4, 5];
|
||||||
|
|
||||||
// Create a mock function that throws an error for item 3
|
// Create a mock function that throws an error for item 3
|
||||||
const processItem = mock(async (item: number) => {
|
const processItem = mock(async (item: number) => {
|
||||||
if (item === 3) {
|
if (item === 3) {
|
||||||
@@ -51,24 +51,24 @@ describe("processInParallel", () => {
|
|||||||
}
|
}
|
||||||
return item * 2;
|
return item * 2;
|
||||||
});
|
});
|
||||||
|
|
||||||
// Create a spy for console.error
|
// Create a spy for console.error
|
||||||
const originalConsoleError = console.error;
|
const originalConsoleError = console.error;
|
||||||
const consoleErrorMock = mock(() => {});
|
const consoleErrorMock = mock(() => {});
|
||||||
console.error = consoleErrorMock;
|
console.error = consoleErrorMock;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// Process the items
|
// Process the items
|
||||||
const results = await processInParallel(items, processItem);
|
const results = await processInParallel(items, processItem);
|
||||||
|
|
||||||
// Verify results (should have 4 items, missing the one that errored)
|
// Verify results (should have 4 items, missing the one that errored)
|
||||||
expect(results).toEqual([2, 4, 8, 10]);
|
expect(results).toEqual([2, 4, 8, 10]);
|
||||||
|
|
||||||
// Verify that processItem was called for each item
|
// Verify that processItem was called for each item
|
||||||
expect(processItem).toHaveBeenCalledTimes(5);
|
expect(processItem).toHaveBeenCalledTimes(5);
|
||||||
|
|
||||||
// Verify that console.error was called once
|
// Verify that console.error was called (enhanced logging calls it multiple times)
|
||||||
expect(consoleErrorMock).toHaveBeenCalledTimes(1);
|
expect(consoleErrorMock).toHaveBeenCalled();
|
||||||
} finally {
|
} finally {
|
||||||
// Restore console.error
|
// Restore console.error
|
||||||
console.error = originalConsoleError;
|
console.error = originalConsoleError;
|
||||||
@@ -80,51 +80,51 @@ describe("processWithRetry", () => {
|
|||||||
test("retries failed operations", async () => {
|
test("retries failed operations", async () => {
|
||||||
// Create an array of numbers to process
|
// Create an array of numbers to process
|
||||||
const items = [1, 2, 3];
|
const items = [1, 2, 3];
|
||||||
|
|
||||||
// Create a counter to track retry attempts
|
// Create a counter to track retry attempts
|
||||||
const attemptCounts: Record<number, number> = { 1: 0, 2: 0, 3: 0 };
|
const attemptCounts: Record<number, number> = { 1: 0, 2: 0, 3: 0 };
|
||||||
|
|
||||||
// Create a mock function that fails on first attempt for item 2
|
// Create a mock function that fails on first attempt for item 2
|
||||||
const processItem = mock(async (item: number) => {
|
const processItem = mock(async (item: number) => {
|
||||||
attemptCounts[item]++;
|
attemptCounts[item]++;
|
||||||
|
|
||||||
if (item === 2 && attemptCounts[item] === 1) {
|
if (item === 2 && attemptCounts[item] === 1) {
|
||||||
throw new Error("Temporary error");
|
throw new Error("Temporary error");
|
||||||
}
|
}
|
||||||
|
|
||||||
return item * 2;
|
return item * 2;
|
||||||
});
|
});
|
||||||
|
|
||||||
// Create a mock for the onRetry callback
|
// Create a mock for the onRetry callback
|
||||||
const onRetry = mock((item: number, error: Error, attempt: number) => {
|
const onRetry = mock((item: number, error: Error, attempt: number) => {
|
||||||
// Retry tracking
|
// Retry tracking
|
||||||
});
|
});
|
||||||
|
|
||||||
// Process the items with retry
|
// Process the items with retry
|
||||||
const results = await processWithRetry(items, processItem, {
|
const results = await processWithRetry(items, processItem, {
|
||||||
maxRetries: 2,
|
maxRetries: 2,
|
||||||
retryDelay: 10,
|
retryDelay: 10,
|
||||||
onRetry,
|
onRetry,
|
||||||
});
|
});
|
||||||
|
|
||||||
// Verify results
|
// Verify results
|
||||||
expect(results).toEqual([2, 4, 6]);
|
expect(results).toEqual([2, 4, 6]);
|
||||||
|
|
||||||
// Verify that item 2 was retried once
|
// Verify that item 2 was retried once
|
||||||
expect(attemptCounts[1]).toBe(1); // No retries
|
expect(attemptCounts[1]).toBe(1); // No retries
|
||||||
expect(attemptCounts[2]).toBe(2); // One retry
|
expect(attemptCounts[2]).toBe(2); // One retry
|
||||||
expect(attemptCounts[3]).toBe(1); // No retries
|
expect(attemptCounts[3]).toBe(1); // No retries
|
||||||
|
|
||||||
// Verify that onRetry was called once
|
// Verify that onRetry was called once
|
||||||
expect(onRetry).toHaveBeenCalledTimes(1);
|
expect(onRetry).toHaveBeenCalledTimes(1);
|
||||||
expect(onRetry.mock.calls[0][0]).toBe(2); // item
|
expect(onRetry.mock.calls[0][0]).toBe(2); // item
|
||||||
expect(onRetry.mock.calls[0][2]).toBe(1); // attempt
|
expect(onRetry.mock.calls[0][2]).toBe(1); // attempt
|
||||||
});
|
});
|
||||||
|
|
||||||
test("gives up after max retries", async () => {
|
test("gives up after max retries", async () => {
|
||||||
// Create an array of numbers to process
|
// Create an array of numbers to process
|
||||||
const items = [1, 2];
|
const items = [1, 2];
|
||||||
|
|
||||||
// Create a mock function that always fails for item 2
|
// Create a mock function that always fails for item 2
|
||||||
const processItem = mock(async (item: number) => {
|
const processItem = mock(async (item: number) => {
|
||||||
if (item === 2) {
|
if (item === 2) {
|
||||||
@@ -132,17 +132,17 @@ describe("processWithRetry", () => {
|
|||||||
}
|
}
|
||||||
return item * 2;
|
return item * 2;
|
||||||
});
|
});
|
||||||
|
|
||||||
// Create a mock for the onRetry callback
|
// Create a mock for the onRetry callback
|
||||||
const onRetry = mock((item: number, error: Error, attempt: number) => {
|
const onRetry = mock((item: number, error: Error, attempt: number) => {
|
||||||
// Retry tracking
|
// Retry tracking
|
||||||
});
|
});
|
||||||
|
|
||||||
// Create a spy for console.error
|
// Create a spy for console.error
|
||||||
const originalConsoleError = console.error;
|
const originalConsoleError = console.error;
|
||||||
const consoleErrorMock = mock(() => {});
|
const consoleErrorMock = mock(() => {});
|
||||||
console.error = consoleErrorMock;
|
console.error = consoleErrorMock;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// Process the items with retry
|
// Process the items with retry
|
||||||
const results = await processWithRetry(items, processItem, {
|
const results = await processWithRetry(items, processItem, {
|
||||||
@@ -150,15 +150,15 @@ describe("processWithRetry", () => {
|
|||||||
retryDelay: 10,
|
retryDelay: 10,
|
||||||
onRetry,
|
onRetry,
|
||||||
});
|
});
|
||||||
|
|
||||||
// Verify results (should have 1 item, missing the one that errored)
|
// Verify results (should have 1 item, missing the one that errored)
|
||||||
expect(results).toEqual([2]);
|
expect(results).toEqual([2]);
|
||||||
|
|
||||||
// Verify that onRetry was called twice (for 2 retry attempts)
|
// Verify that onRetry was called twice (for 2 retry attempts)
|
||||||
expect(onRetry).toHaveBeenCalledTimes(2);
|
expect(onRetry).toHaveBeenCalledTimes(2);
|
||||||
|
|
||||||
// Verify that console.error was called once
|
// Verify that console.error was called (enhanced logging calls it multiple times)
|
||||||
expect(consoleErrorMock).toHaveBeenCalledTimes(1);
|
expect(consoleErrorMock).toHaveBeenCalled();
|
||||||
} finally {
|
} finally {
|
||||||
// Restore console.error
|
// Restore console.error
|
||||||
console.error = originalConsoleError;
|
console.error = originalConsoleError;
|
||||||
|
|||||||
@@ -46,11 +46,25 @@ export async function processInParallel<T, R>(
|
|||||||
const batchResults = await Promise.allSettled(batchPromises);
|
const batchResults = await Promise.allSettled(batchPromises);
|
||||||
|
|
||||||
// Process results and handle errors
|
// Process results and handle errors
|
||||||
for (const result of batchResults) {
|
for (let j = 0; j < batchResults.length; j++) {
|
||||||
|
const result = batchResults[j];
|
||||||
if (result.status === 'fulfilled') {
|
if (result.status === 'fulfilled') {
|
||||||
results.push(result.value);
|
results.push(result.value);
|
||||||
} else {
|
} else {
|
||||||
console.error('Error processing item:', result.reason);
|
const itemIndex = i + j;
|
||||||
|
console.error("=== BATCH ITEM PROCESSING ERROR ===");
|
||||||
|
console.error("Batch index:", Math.floor(i / concurrencyLimit));
|
||||||
|
console.error("Item index in batch:", j);
|
||||||
|
console.error("Global item index:", itemIndex);
|
||||||
|
console.error("Error type:", result.reason?.constructor?.name);
|
||||||
|
console.error("Error message:", result.reason instanceof Error ? result.reason.message : String(result.reason));
|
||||||
|
|
||||||
|
if (result.reason instanceof Error && result.reason.message.includes('JSON')) {
|
||||||
|
console.error("🚨 JSON parsing error in batch processing");
|
||||||
|
console.error("This indicates an API response issue from Gitea");
|
||||||
|
}
|
||||||
|
|
||||||
|
console.error("==================================");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -102,6 +116,16 @@ export async function processWithRetry<T, R>(
|
|||||||
|
|
||||||
for (let attempt = 1; attempt <= maxRetries + 1; attempt++) {
|
for (let attempt = 1; attempt <= maxRetries + 1; attempt++) {
|
||||||
try {
|
try {
|
||||||
|
// Check for shutdown before processing each item (only in production)
|
||||||
|
try {
|
||||||
|
const { isShuttingDown } = await import('@/lib/shutdown-manager');
|
||||||
|
if (isShuttingDown()) {
|
||||||
|
throw new Error('Processing interrupted by application shutdown');
|
||||||
|
}
|
||||||
|
} catch (importError) {
|
||||||
|
// Ignore import errors during testing
|
||||||
|
}
|
||||||
|
|
||||||
const result = await processItem(item);
|
const result = await processItem(item);
|
||||||
|
|
||||||
// Handle checkpointing if enabled
|
// Handle checkpointing if enabled
|
||||||
@@ -129,6 +153,21 @@ export async function processWithRetry<T, R>(
|
|||||||
const delay = retryDelay * Math.pow(2, attempt - 1);
|
const delay = retryDelay * Math.pow(2, attempt - 1);
|
||||||
await new Promise(resolve => setTimeout(resolve, delay));
|
await new Promise(resolve => setTimeout(resolve, delay));
|
||||||
} else {
|
} else {
|
||||||
|
// Enhanced error logging for final failure
|
||||||
|
console.error("=== ITEM PROCESSING FAILED (MAX RETRIES EXCEEDED) ===");
|
||||||
|
console.error("Item:", getItemId ? getItemId(item) : 'unknown');
|
||||||
|
console.error("Error type:", lastError.constructor.name);
|
||||||
|
console.error("Error message:", lastError.message);
|
||||||
|
console.error("Attempts made:", maxRetries + 1);
|
||||||
|
|
||||||
|
if (lastError.message.includes('JSON')) {
|
||||||
|
console.error("🚨 JSON-related error detected in item processing");
|
||||||
|
console.error("This suggests an issue with API responses from Gitea");
|
||||||
|
}
|
||||||
|
|
||||||
|
console.error("Stack trace:", lastError.stack);
|
||||||
|
console.error("================================================");
|
||||||
|
|
||||||
throw lastError;
|
throw lastError;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -185,9 +224,24 @@ export async function processWithResilience<T, R>(
|
|||||||
...otherOptions
|
...otherOptions
|
||||||
} = options;
|
} = options;
|
||||||
|
|
||||||
// Import helpers for job management
|
// Import helpers for job management and shutdown handling
|
||||||
const { createMirrorJob, updateMirrorJobProgress } = await import('@/lib/helpers');
|
const { createMirrorJob, updateMirrorJobProgress } = await import('@/lib/helpers');
|
||||||
|
|
||||||
|
// Import shutdown manager (with fallback for testing)
|
||||||
|
let registerActiveJob: (jobId: string) => void = () => {};
|
||||||
|
let unregisterActiveJob: (jobId: string) => void = () => {};
|
||||||
|
let isShuttingDown: () => boolean = () => false;
|
||||||
|
|
||||||
|
try {
|
||||||
|
const shutdownManager = await import('@/lib/shutdown-manager');
|
||||||
|
registerActiveJob = shutdownManager.registerActiveJob;
|
||||||
|
unregisterActiveJob = shutdownManager.unregisterActiveJob;
|
||||||
|
isShuttingDown = shutdownManager.isShuttingDown;
|
||||||
|
} catch (importError) {
|
||||||
|
// Use fallback functions during testing
|
||||||
|
console.log('Using fallback shutdown manager functions (testing mode)');
|
||||||
|
}
|
||||||
|
|
||||||
// Get item IDs for all items
|
// Get item IDs for all items
|
||||||
const allItemIds = items.map(getItemId);
|
const allItemIds = items.map(getItemId);
|
||||||
|
|
||||||
@@ -240,6 +294,9 @@ export async function processWithResilience<T, R>(
|
|||||||
console.log(`Created new job ${jobId} with ${items.length} items`);
|
console.log(`Created new job ${jobId} with ${items.length} items`);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Register the job with the shutdown manager
|
||||||
|
registerActiveJob(jobId);
|
||||||
|
|
||||||
// Define the checkpoint function
|
// Define the checkpoint function
|
||||||
const onCheckpoint = async (jobId: string, completedItemId: string) => {
|
const onCheckpoint = async (jobId: string, completedItemId: string) => {
|
||||||
const itemName = items.find(item => getItemId(item) === completedItemId)
|
const itemName = items.find(item => getItemId(item) === completedItemId)
|
||||||
@@ -254,6 +311,12 @@ export async function processWithResilience<T, R>(
|
|||||||
};
|
};
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
// Check if shutdown is in progress before starting
|
||||||
|
if (isShuttingDown()) {
|
||||||
|
console.log(`⚠️ Shutdown in progress, aborting job ${jobId}`);
|
||||||
|
throw new Error('Job aborted due to application shutdown');
|
||||||
|
}
|
||||||
|
|
||||||
// Process the items with checkpointing
|
// Process the items with checkpointing
|
||||||
const results = await processWithRetry(
|
const results = await processWithRetry(
|
||||||
itemsToProcess,
|
itemsToProcess,
|
||||||
@@ -276,17 +339,27 @@ export async function processWithResilience<T, R>(
|
|||||||
isCompleted: true,
|
isCompleted: true,
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// Unregister the job from shutdown manager
|
||||||
|
unregisterActiveJob(jobId);
|
||||||
|
|
||||||
return results;
|
return results;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
// Mark the job as failed
|
// Mark the job as failed (unless it was interrupted by shutdown)
|
||||||
|
const isShutdownError = error instanceof Error && error.message.includes('shutdown');
|
||||||
|
|
||||||
await updateMirrorJobProgress({
|
await updateMirrorJobProgress({
|
||||||
jobId,
|
jobId,
|
||||||
status: "failed",
|
status: isShutdownError ? "imported" : "failed", // Keep as imported if shutdown interrupted
|
||||||
message: `Failed ${jobType} job: ${error instanceof Error ? error.message : String(error)}`,
|
message: isShutdownError
|
||||||
|
? 'Job interrupted by application shutdown - will resume on restart'
|
||||||
|
: `Failed ${jobType} job: ${error instanceof Error ? error.message : String(error)}`,
|
||||||
inProgress: false,
|
inProgress: false,
|
||||||
isCompleted: true,
|
isCompleted: !isShutdownError, // Don't mark as completed if shutdown interrupted
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// Unregister the job from shutdown manager
|
||||||
|
unregisterActiveJob(jobId);
|
||||||
|
|
||||||
throw error;
|
throw error;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,13 +1,30 @@
|
|||||||
import { defineMiddleware } from 'astro:middleware';
|
import { defineMiddleware } from 'astro:middleware';
|
||||||
import { initializeRecovery, hasJobsNeedingRecovery, getRecoveryStatus } from './lib/recovery';
|
import { initializeRecovery, hasJobsNeedingRecovery, getRecoveryStatus } from './lib/recovery';
|
||||||
import { startCleanupService } from './lib/cleanup-service';
|
import { startCleanupService, stopCleanupService } from './lib/cleanup-service';
|
||||||
|
import { initializeShutdownManager, registerShutdownCallback } from './lib/shutdown-manager';
|
||||||
|
import { setupSignalHandlers } from './lib/signal-handlers';
|
||||||
|
|
||||||
// Flag to track if recovery has been initialized
|
// Flag to track if recovery has been initialized
|
||||||
let recoveryInitialized = false;
|
let recoveryInitialized = false;
|
||||||
let recoveryAttempted = false;
|
let recoveryAttempted = false;
|
||||||
let cleanupServiceStarted = false;
|
let cleanupServiceStarted = false;
|
||||||
|
let shutdownManagerInitialized = false;
|
||||||
|
|
||||||
export const onRequest = defineMiddleware(async (context, next) => {
|
export const onRequest = defineMiddleware(async (context, next) => {
|
||||||
|
// Initialize shutdown manager and signal handlers first
|
||||||
|
if (!shutdownManagerInitialized) {
|
||||||
|
try {
|
||||||
|
console.log('🔧 Initializing shutdown manager and signal handlers...');
|
||||||
|
initializeShutdownManager();
|
||||||
|
setupSignalHandlers();
|
||||||
|
shutdownManagerInitialized = true;
|
||||||
|
console.log('✅ Shutdown manager and signal handlers initialized');
|
||||||
|
} catch (error) {
|
||||||
|
console.error('❌ Failed to initialize shutdown manager:', error);
|
||||||
|
// Continue anyway - this shouldn't block the application
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Initialize recovery system only once when the server starts
|
// Initialize recovery system only once when the server starts
|
||||||
// This is a fallback in case the startup script didn't run
|
// This is a fallback in case the startup script didn't run
|
||||||
if (!recoveryInitialized && !recoveryAttempted) {
|
if (!recoveryInitialized && !recoveryAttempted) {
|
||||||
@@ -60,6 +77,13 @@ export const onRequest = defineMiddleware(async (context, next) => {
|
|||||||
try {
|
try {
|
||||||
console.log('Starting automatic database cleanup service...');
|
console.log('Starting automatic database cleanup service...');
|
||||||
startCleanupService();
|
startCleanupService();
|
||||||
|
|
||||||
|
// Register cleanup service shutdown callback
|
||||||
|
registerShutdownCallback(async () => {
|
||||||
|
console.log('🛑 Shutting down cleanup service...');
|
||||||
|
stopCleanupService();
|
||||||
|
});
|
||||||
|
|
||||||
cleanupServiceStarted = true;
|
cleanupServiceStarted = true;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error('Failed to start cleanup service:', error);
|
console.error('Failed to start cleanup service:', error);
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ import type { APIRoute } from "astro";
|
|||||||
import { db, configs, users } from "@/lib/db";
|
import { db, configs, users } from "@/lib/db";
|
||||||
import { v4 as uuidv4 } from "uuid";
|
import { v4 as uuidv4 } from "uuid";
|
||||||
import { eq } from "drizzle-orm";
|
import { eq } from "drizzle-orm";
|
||||||
|
import { calculateCleanupInterval } from "@/lib/cleanup-service";
|
||||||
|
|
||||||
export const POST: APIRoute = async ({ request }) => {
|
export const POST: APIRoute = async ({ request }) => {
|
||||||
try {
|
try {
|
||||||
@@ -56,6 +57,63 @@ export const POST: APIRoute = async ({ request }) => {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Process schedule config - set/update nextRun if enabled, clear if disabled
|
||||||
|
const processedScheduleConfig = { ...scheduleConfig };
|
||||||
|
if (scheduleConfig.enabled) {
|
||||||
|
const now = new Date();
|
||||||
|
const interval = scheduleConfig.interval || 3600; // Default to 1 hour
|
||||||
|
|
||||||
|
// Check if we need to recalculate nextRun
|
||||||
|
// Recalculate if: no nextRun exists, or interval changed from existing config
|
||||||
|
let shouldRecalculate = !scheduleConfig.nextRun;
|
||||||
|
|
||||||
|
if (existingConfig && existingConfig.scheduleConfig) {
|
||||||
|
const existingScheduleConfig = existingConfig.scheduleConfig;
|
||||||
|
const existingInterval = existingScheduleConfig.interval || 3600;
|
||||||
|
|
||||||
|
// If interval changed, recalculate nextRun
|
||||||
|
if (interval !== existingInterval) {
|
||||||
|
shouldRecalculate = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (shouldRecalculate) {
|
||||||
|
processedScheduleConfig.nextRun = new Date(now.getTime() + interval * 1000);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Clear nextRun when disabled
|
||||||
|
processedScheduleConfig.nextRun = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Process cleanup config - set/update nextRun if enabled, clear if disabled
|
||||||
|
const processedCleanupConfig = { ...cleanupConfig };
|
||||||
|
if (cleanupConfig.enabled) {
|
||||||
|
const now = new Date();
|
||||||
|
const retentionSeconds = cleanupConfig.retentionDays || 604800; // Default 7 days in seconds
|
||||||
|
const cleanupIntervalHours = calculateCleanupInterval(retentionSeconds);
|
||||||
|
|
||||||
|
// Check if we need to recalculate nextRun
|
||||||
|
// Recalculate if: no nextRun exists, or retention period changed from existing config
|
||||||
|
let shouldRecalculate = !cleanupConfig.nextRun;
|
||||||
|
|
||||||
|
if (existingConfig && existingConfig.cleanupConfig) {
|
||||||
|
const existingCleanupConfig = existingConfig.cleanupConfig;
|
||||||
|
const existingRetentionSeconds = existingCleanupConfig.retentionDays || 604800;
|
||||||
|
|
||||||
|
// If retention period changed, recalculate nextRun
|
||||||
|
if (retentionSeconds !== existingRetentionSeconds) {
|
||||||
|
shouldRecalculate = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (shouldRecalculate) {
|
||||||
|
processedCleanupConfig.nextRun = new Date(now.getTime() + cleanupIntervalHours * 60 * 60 * 1000);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Clear nextRun when disabled
|
||||||
|
processedCleanupConfig.nextRun = null;
|
||||||
|
}
|
||||||
|
|
||||||
if (existingConfig) {
|
if (existingConfig) {
|
||||||
// Update path
|
// Update path
|
||||||
await db
|
await db
|
||||||
@@ -63,8 +121,8 @@ export const POST: APIRoute = async ({ request }) => {
|
|||||||
.set({
|
.set({
|
||||||
githubConfig,
|
githubConfig,
|
||||||
giteaConfig,
|
giteaConfig,
|
||||||
scheduleConfig,
|
scheduleConfig: processedScheduleConfig,
|
||||||
cleanupConfig,
|
cleanupConfig: processedCleanupConfig,
|
||||||
updatedAt: new Date(),
|
updatedAt: new Date(),
|
||||||
})
|
})
|
||||||
.where(eq(configs.id, existingConfig.id));
|
.where(eq(configs.id, existingConfig.id));
|
||||||
@@ -113,8 +171,8 @@ export const POST: APIRoute = async ({ request }) => {
|
|||||||
giteaConfig,
|
giteaConfig,
|
||||||
include: [],
|
include: [],
|
||||||
exclude: [],
|
exclude: [],
|
||||||
scheduleConfig,
|
scheduleConfig: processedScheduleConfig,
|
||||||
cleanupConfig,
|
cleanupConfig: processedCleanupConfig,
|
||||||
createdAt: new Date(),
|
createdAt: new Date(),
|
||||||
updatedAt: new Date(),
|
updatedAt: new Date(),
|
||||||
});
|
});
|
||||||
@@ -201,7 +259,7 @@ export const GET: APIRoute = async ({ request }) => {
|
|||||||
},
|
},
|
||||||
cleanupConfig: {
|
cleanupConfig: {
|
||||||
enabled: false,
|
enabled: false,
|
||||||
retentionDays: 7,
|
retentionDays: 604800, // 7 days in seconds
|
||||||
lastRun: null,
|
lastRun: null,
|
||||||
nextRun: null,
|
nextRun: null,
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -1,5 +1,4 @@
|
|||||||
import { describe, test, expect, mock, beforeEach, afterEach } from "bun:test";
|
import { describe, test, expect, mock, beforeEach, afterEach } from "bun:test";
|
||||||
import axios from "axios";
|
|
||||||
|
|
||||||
// Mock the POST function
|
// Mock the POST function
|
||||||
const mockPOST = mock(async ({ request }) => {
|
const mockPOST = mock(async ({ request }) => {
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
import type { APIRoute } from 'astro';
|
import type { APIRoute } from 'astro';
|
||||||
import axios from 'axios';
|
import { httpGet, HttpError } from '@/lib/http-client';
|
||||||
|
|
||||||
export const POST: APIRoute = async ({ request }) => {
|
export const POST: APIRoute = async ({ request }) => {
|
||||||
try {
|
try {
|
||||||
@@ -25,11 +25,9 @@ export const POST: APIRoute = async ({ request }) => {
|
|||||||
const baseUrl = url.endsWith('/') ? url.slice(0, -1) : url;
|
const baseUrl = url.endsWith('/') ? url.slice(0, -1) : url;
|
||||||
|
|
||||||
// Test the connection by fetching the authenticated user
|
// Test the connection by fetching the authenticated user
|
||||||
const response = await axios.get(`${baseUrl}/api/v1/user`, {
|
const response = await httpGet(`${baseUrl}/api/v1/user`, {
|
||||||
headers: {
|
'Authorization': `token ${token}`,
|
||||||
'Authorization': `token ${token}`,
|
'Accept': 'application/json',
|
||||||
'Accept': 'application/json',
|
|
||||||
},
|
|
||||||
});
|
});
|
||||||
|
|
||||||
const data = response.data;
|
const data = response.data;
|
||||||
@@ -72,8 +70,8 @@ export const POST: APIRoute = async ({ request }) => {
|
|||||||
console.error('Gitea connection test failed:', error);
|
console.error('Gitea connection test failed:', error);
|
||||||
|
|
||||||
// Handle specific error types
|
// Handle specific error types
|
||||||
if (axios.isAxiosError(error) && error.response) {
|
if (error instanceof HttpError) {
|
||||||
if (error.response.status === 401) {
|
if (error.status === 401) {
|
||||||
return new Response(
|
return new Response(
|
||||||
JSON.stringify({
|
JSON.stringify({
|
||||||
success: false,
|
success: false,
|
||||||
@@ -86,7 +84,7 @@ export const POST: APIRoute = async ({ request }) => {
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
} else if (error.response.status === 404) {
|
} else if (error.status === 404) {
|
||||||
return new Response(
|
return new Response(
|
||||||
JSON.stringify({
|
JSON.stringify({
|
||||||
success: false,
|
success: false,
|
||||||
@@ -99,25 +97,23 @@ export const POST: APIRoute = async ({ request }) => {
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
} else if (error.status === 0) {
|
||||||
|
// Network error
|
||||||
|
return new Response(
|
||||||
|
JSON.stringify({
|
||||||
|
success: false,
|
||||||
|
message: 'Could not connect to Gitea server. Please check the URL.',
|
||||||
|
}),
|
||||||
|
{
|
||||||
|
status: 500,
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
},
|
||||||
|
}
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Handle connection errors
|
|
||||||
if (axios.isAxiosError(error) && (error.code === 'ECONNREFUSED' || error.code === 'ENOTFOUND')) {
|
|
||||||
return new Response(
|
|
||||||
JSON.stringify({
|
|
||||||
success: false,
|
|
||||||
message: 'Could not connect to Gitea server. Please check the URL.',
|
|
||||||
}),
|
|
||||||
{
|
|
||||||
status: 500,
|
|
||||||
headers: {
|
|
||||||
'Content-Type': 'application/json',
|
|
||||||
},
|
|
||||||
}
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Generic error response
|
// Generic error response
|
||||||
return new Response(
|
return new Response(
|
||||||
JSON.stringify({
|
JSON.stringify({
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
import type { APIRoute } from "astro";
|
import type { APIRoute } from "astro";
|
||||||
import { db } from "@/lib/db";
|
import { db } from "@/lib/db";
|
||||||
import { organizations } from "@/lib/db";
|
import { organizations, repositories, configs } from "@/lib/db";
|
||||||
import { eq, sql } from "drizzle-orm";
|
import { eq, sql, and, count } from "drizzle-orm";
|
||||||
import {
|
import {
|
||||||
membershipRoleEnum,
|
membershipRoleEnum,
|
||||||
type OrganizationsApiResponse,
|
type OrganizationsApiResponse,
|
||||||
@@ -25,24 +25,114 @@ export const GET: APIRoute = async ({ request }) => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
// Fetch the user's active configuration to respect filtering settings
|
||||||
|
const [config] = await db
|
||||||
|
.select()
|
||||||
|
.from(configs)
|
||||||
|
.where(and(eq(configs.userId, userId), eq(configs.isActive, true)));
|
||||||
|
|
||||||
|
if (!config) {
|
||||||
|
return jsonResponse({
|
||||||
|
data: {
|
||||||
|
success: false,
|
||||||
|
error: "No active configuration found for this user",
|
||||||
|
},
|
||||||
|
status: 404,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const githubConfig = config.githubConfig as {
|
||||||
|
mirrorStarred: boolean;
|
||||||
|
skipForks: boolean;
|
||||||
|
privateRepositories: boolean;
|
||||||
|
};
|
||||||
|
|
||||||
const rawOrgs = await db
|
const rawOrgs = await db
|
||||||
.select()
|
.select()
|
||||||
.from(organizations)
|
.from(organizations)
|
||||||
.where(eq(organizations.userId, userId))
|
.where(eq(organizations.userId, userId))
|
||||||
.orderBy(sql`name COLLATE NOCASE`);
|
.orderBy(sql`name COLLATE NOCASE`);
|
||||||
|
|
||||||
const orgsWithIds: Organization[] = rawOrgs.map((org) => ({
|
// Calculate repository breakdowns for each organization
|
||||||
...org,
|
const orgsWithBreakdown = await Promise.all(
|
||||||
status: repoStatusEnum.parse(org.status),
|
rawOrgs.map(async (org) => {
|
||||||
membershipRole: membershipRoleEnum.parse(org.membershipRole),
|
// Build base conditions for this organization (without private/fork filters)
|
||||||
lastMirrored: org.lastMirrored ?? undefined,
|
const baseConditions = [
|
||||||
errorMessage: org.errorMessage ?? undefined,
|
eq(repositories.userId, userId),
|
||||||
}));
|
eq(repositories.organization, org.name)
|
||||||
|
];
|
||||||
|
|
||||||
|
if (!githubConfig.mirrorStarred) {
|
||||||
|
baseConditions.push(eq(repositories.isStarred, false));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get total count with all user config filters applied
|
||||||
|
const totalConditions = [...baseConditions];
|
||||||
|
if (githubConfig.skipForks) {
|
||||||
|
totalConditions.push(eq(repositories.isForked, false));
|
||||||
|
}
|
||||||
|
if (!githubConfig.privateRepositories) {
|
||||||
|
totalConditions.push(eq(repositories.isPrivate, false));
|
||||||
|
}
|
||||||
|
|
||||||
|
const [totalCount] = await db
|
||||||
|
.select({ count: count() })
|
||||||
|
.from(repositories)
|
||||||
|
.where(and(...totalConditions));
|
||||||
|
|
||||||
|
// Get public count
|
||||||
|
const publicConditions = [...baseConditions, eq(repositories.isPrivate, false)];
|
||||||
|
if (githubConfig.skipForks) {
|
||||||
|
publicConditions.push(eq(repositories.isForked, false));
|
||||||
|
}
|
||||||
|
|
||||||
|
const [publicCount] = await db
|
||||||
|
.select({ count: count() })
|
||||||
|
.from(repositories)
|
||||||
|
.where(and(...publicConditions));
|
||||||
|
|
||||||
|
// Get private count (only if private repos are enabled in config)
|
||||||
|
const [privateCount] = githubConfig.privateRepositories ? await db
|
||||||
|
.select({ count: count() })
|
||||||
|
.from(repositories)
|
||||||
|
.where(
|
||||||
|
and(
|
||||||
|
...baseConditions,
|
||||||
|
eq(repositories.isPrivate, true),
|
||||||
|
...(githubConfig.skipForks ? [eq(repositories.isForked, false)] : [])
|
||||||
|
)
|
||||||
|
) : [{ count: 0 }];
|
||||||
|
|
||||||
|
// Get fork count (only if forks are enabled in config)
|
||||||
|
const [forkCount] = !githubConfig.skipForks ? await db
|
||||||
|
.select({ count: count() })
|
||||||
|
.from(repositories)
|
||||||
|
.where(
|
||||||
|
and(
|
||||||
|
...baseConditions,
|
||||||
|
eq(repositories.isForked, true),
|
||||||
|
...(!githubConfig.privateRepositories ? [eq(repositories.isPrivate, false)] : [])
|
||||||
|
)
|
||||||
|
) : [{ count: 0 }];
|
||||||
|
|
||||||
|
return {
|
||||||
|
...org,
|
||||||
|
status: repoStatusEnum.parse(org.status),
|
||||||
|
membershipRole: membershipRoleEnum.parse(org.membershipRole),
|
||||||
|
lastMirrored: org.lastMirrored ?? undefined,
|
||||||
|
errorMessage: org.errorMessage ?? undefined,
|
||||||
|
repositoryCount: totalCount.count,
|
||||||
|
publicRepositoryCount: publicCount.count,
|
||||||
|
privateRepositoryCount: privateCount.count,
|
||||||
|
forkRepositoryCount: forkCount.count,
|
||||||
|
};
|
||||||
|
})
|
||||||
|
);
|
||||||
|
|
||||||
const resPayload: OrganizationsApiResponse = {
|
const resPayload: OrganizationsApiResponse = {
|
||||||
success: true,
|
success: true,
|
||||||
message: "Organizations fetched successfully",
|
message: "Organizations fetched successfully",
|
||||||
organizations: orgsWithIds,
|
organizations: orgsWithBreakdown,
|
||||||
};
|
};
|
||||||
|
|
||||||
return jsonResponse({ data: resPayload, status: 200 });
|
return jsonResponse({ data: resPayload, status: 200 });
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ import { db } from "@/lib/db";
|
|||||||
import { ENV } from "@/lib/config";
|
import { ENV } from "@/lib/config";
|
||||||
import { getRecoveryStatus, hasJobsNeedingRecovery } from "@/lib/recovery";
|
import { getRecoveryStatus, hasJobsNeedingRecovery } from "@/lib/recovery";
|
||||||
import os from "os";
|
import os from "os";
|
||||||
import axios from "axios";
|
import { httpGet } from "@/lib/http-client";
|
||||||
|
|
||||||
// Track when the server started
|
// Track when the server started
|
||||||
const serverStartTime = new Date();
|
const serverStartTime = new Date();
|
||||||
@@ -197,9 +197,9 @@ async function checkLatestVersion(): Promise<string> {
|
|||||||
|
|
||||||
try {
|
try {
|
||||||
// Fetch the latest release from GitHub
|
// Fetch the latest release from GitHub
|
||||||
const response = await axios.get(
|
const response = await httpGet(
|
||||||
'https://api.github.com/repos/arunavo4/gitea-mirror/releases/latest',
|
'https://api.github.com/repos/arunavo4/gitea-mirror/releases/latest',
|
||||||
{ headers: { 'Accept': 'application/vnd.github.v3+json' } }
|
{ 'Accept': 'application/vnd.github.v3+json' }
|
||||||
);
|
);
|
||||||
|
|
||||||
// Extract version from tag_name (remove 'v' prefix if present)
|
// Extract version from tag_name (remove 'v' prefix if present)
|
||||||
|
|||||||
@@ -165,11 +165,43 @@ export const POST: APIRoute = async ({ request }) => {
|
|||||||
headers: { "Content-Type": "application/json" },
|
headers: { "Content-Type": "application/json" },
|
||||||
});
|
});
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error("Error mirroring repositories:", error);
|
// Enhanced error logging for better debugging
|
||||||
|
console.error("=== ERROR MIRRORING REPOSITORIES ===");
|
||||||
|
console.error("Error type:", error?.constructor?.name);
|
||||||
|
console.error("Error message:", error instanceof Error ? error.message : String(error));
|
||||||
|
|
||||||
|
if (error instanceof Error) {
|
||||||
|
console.error("Error stack:", error.stack);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Log additional context
|
||||||
|
console.error("Request details:");
|
||||||
|
console.error("- URL:", request.url);
|
||||||
|
console.error("- Method:", request.method);
|
||||||
|
console.error("- Headers:", Object.fromEntries(request.headers.entries()));
|
||||||
|
|
||||||
|
// If it's a JSON parsing error, provide more context
|
||||||
|
if (error instanceof SyntaxError && error.message.includes('JSON')) {
|
||||||
|
console.error("🚨 JSON PARSING ERROR DETECTED:");
|
||||||
|
console.error("This suggests the response from Gitea API is not valid JSON");
|
||||||
|
console.error("Common causes:");
|
||||||
|
console.error("- Gitea server returned HTML error page instead of JSON");
|
||||||
|
console.error("- Network connection interrupted");
|
||||||
|
console.error("- Gitea server is down or misconfigured");
|
||||||
|
console.error("- Authentication token is invalid");
|
||||||
|
console.error("Check your Gitea server logs and configuration");
|
||||||
|
}
|
||||||
|
|
||||||
|
console.error("=====================================");
|
||||||
|
|
||||||
return new Response(
|
return new Response(
|
||||||
JSON.stringify({
|
JSON.stringify({
|
||||||
error:
|
error: error instanceof Error ? error.message : "An unknown error occurred",
|
||||||
error instanceof Error ? error.message : "An unknown error occurred",
|
errorType: error?.constructor?.name || "Unknown",
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
troubleshooting: error instanceof SyntaxError && error.message.includes('JSON')
|
||||||
|
? "JSON parsing error detected. Check Gitea server status and logs. Ensure Gitea is returning valid JSON responses."
|
||||||
|
: "Check application logs for more details"
|
||||||
}),
|
}),
|
||||||
{ status: 500, headers: { "Content-Type": "application/json" } }
|
{ status: 500, headers: { "Content-Type": "application/json" } }
|
||||||
);
|
);
|
||||||
|
|||||||
@@ -20,7 +20,7 @@ export interface ScheduleConfig {
|
|||||||
|
|
||||||
export interface DatabaseCleanupConfig {
|
export interface DatabaseCleanupConfig {
|
||||||
enabled: boolean;
|
enabled: boolean;
|
||||||
retentionDays: number;
|
retentionDays: number; // Actually stores seconds, but keeping the name for compatibility
|
||||||
lastRun?: Date;
|
lastRun?: Date;
|
||||||
nextRun?: Date;
|
nextRun?: Date;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -33,6 +33,9 @@ export interface GitOrg {
|
|||||||
isIncluded: boolean;
|
isIncluded: boolean;
|
||||||
status: RepoStatus;
|
status: RepoStatus;
|
||||||
repositoryCount: number;
|
repositoryCount: number;
|
||||||
|
publicRepositoryCount?: number;
|
||||||
|
privateRepositoryCount?: number;
|
||||||
|
forkRepositoryCount?: number;
|
||||||
createdAt: Date;
|
createdAt: Date;
|
||||||
updatedAt: Date;
|
updatedAt: Date;
|
||||||
}
|
}
|
||||||
|
|||||||