mirror of
https://github.com/RayLabsHQ/gitea-mirror.git
synced 2025-12-06 19:46:44 +03:00
Compare commits
7 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c5de7e616d | ||
|
|
309f8c4341 | ||
|
|
0c596ac241 | ||
|
|
894be88a28 | ||
|
|
6ab7f0a5a0 | ||
|
|
abe3113755 | ||
|
|
f4bc28e6c2 |
4
.github/workflows/astro-build-test.yml
vendored
4
.github/workflows/astro-build-test.yml
vendored
@@ -38,10 +38,10 @@ jobs:
|
|||||||
bun install
|
bun install
|
||||||
|
|
||||||
- name: Run tests
|
- name: Run tests
|
||||||
run: bunx vitest run
|
run: bun test --coverage
|
||||||
|
|
||||||
- name: Build Astro project
|
- name: Build Astro project
|
||||||
run: bunx astro build
|
run: bunx --bun astro build
|
||||||
|
|
||||||
- name: Upload build artifacts
|
- name: Upload build artifacts
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v4
|
||||||
|
|||||||
@@ -111,9 +111,28 @@ if [ ! -f "/app/data/gitea-mirror.db" ]; then
|
|||||||
status TEXT NOT NULL DEFAULT 'imported',
|
status TEXT NOT NULL DEFAULT 'imported',
|
||||||
message TEXT NOT NULL,
|
message TEXT NOT NULL,
|
||||||
timestamp TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
timestamp TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
|
||||||
|
-- New fields for job resilience
|
||||||
|
job_type TEXT NOT NULL DEFAULT 'mirror',
|
||||||
|
batch_id TEXT,
|
||||||
|
total_items INTEGER,
|
||||||
|
completed_items INTEGER DEFAULT 0,
|
||||||
|
item_ids TEXT, -- JSON array as text
|
||||||
|
completed_item_ids TEXT DEFAULT '[]', -- JSON array as text
|
||||||
|
in_progress INTEGER NOT NULL DEFAULT 0, -- Boolean as integer
|
||||||
|
started_at TIMESTAMP,
|
||||||
|
completed_at TIMESTAMP,
|
||||||
|
last_checkpoint TIMESTAMP,
|
||||||
|
|
||||||
FOREIGN KEY (user_id) REFERENCES users(id)
|
FOREIGN KEY (user_id) REFERENCES users(id)
|
||||||
);
|
);
|
||||||
|
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_mirror_jobs_user_id ON mirror_jobs(user_id);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_mirror_jobs_batch_id ON mirror_jobs(batch_id);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_mirror_jobs_in_progress ON mirror_jobs(in_progress);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_mirror_jobs_job_type ON mirror_jobs(job_type);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_mirror_jobs_timestamp ON mirror_jobs(timestamp);
|
||||||
|
|
||||||
CREATE TABLE IF NOT EXISTS events (
|
CREATE TABLE IF NOT EXISTS events (
|
||||||
id TEXT PRIMARY KEY,
|
id TEXT PRIMARY KEY,
|
||||||
user_id TEXT NOT NULL,
|
user_id TEXT NOT NULL,
|
||||||
@@ -138,8 +157,19 @@ else
|
|||||||
bun dist/scripts/manage-db.js fix
|
bun dist/scripts/manage-db.js fix
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Since the application is not used by anyone yet, we've removed the schema updates and migrations
|
# Run database migrations
|
||||||
echo "Database already exists, no migrations needed."
|
echo "Running database migrations..."
|
||||||
|
|
||||||
|
# Update mirror_jobs table with new columns for resilience
|
||||||
|
if [ -f "dist/scripts/update-mirror-jobs-table.js" ]; then
|
||||||
|
echo "Updating mirror_jobs table..."
|
||||||
|
bun dist/scripts/update-mirror-jobs-table.js
|
||||||
|
elif [ -f "scripts/update-mirror-jobs-table.ts" ]; then
|
||||||
|
echo "Updating mirror_jobs table using TypeScript script..."
|
||||||
|
bun scripts/update-mirror-jobs-table.ts
|
||||||
|
else
|
||||||
|
echo "Warning: Could not find mirror_jobs table update script."
|
||||||
|
fi
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Start the application
|
# Start the application
|
||||||
|
|||||||
127
docs/testing.md
Normal file
127
docs/testing.md
Normal file
@@ -0,0 +1,127 @@
|
|||||||
|
# Testing in Gitea Mirror
|
||||||
|
|
||||||
|
This document provides guidance on testing in the Gitea Mirror project.
|
||||||
|
|
||||||
|
## Current Status
|
||||||
|
|
||||||
|
The project now uses Bun's built-in test runner, which is Jest-compatible and provides a fast, reliable testing experience. We've migrated away from Vitest due to compatibility issues with Bun.
|
||||||
|
|
||||||
|
## Running Tests
|
||||||
|
|
||||||
|
To run tests, use the following commands:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Run all tests
|
||||||
|
bun test
|
||||||
|
|
||||||
|
# Run tests in watch mode (automatically re-run when files change)
|
||||||
|
bun test --watch
|
||||||
|
|
||||||
|
# Run tests with coverage reporting
|
||||||
|
bun test --coverage
|
||||||
|
```
|
||||||
|
|
||||||
|
## Test File Naming Conventions
|
||||||
|
|
||||||
|
Bun's test runner automatically discovers test files that match the following patterns:
|
||||||
|
|
||||||
|
- `*.test.{js|jsx|ts|tsx}`
|
||||||
|
- `*_test.{js|jsx|ts|tsx}`
|
||||||
|
- `*.spec.{js|jsx|ts|tsx}`
|
||||||
|
- `*_spec.{js|jsx|ts|tsx}`
|
||||||
|
|
||||||
|
## Writing Tests
|
||||||
|
|
||||||
|
The project uses Bun's test runner with a Jest-compatible API. Here's an example test:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// example.test.ts
|
||||||
|
import { describe, test, expect } from "bun:test";
|
||||||
|
|
||||||
|
describe("Example Test", () => {
|
||||||
|
test("should pass", () => {
|
||||||
|
expect(true).toBe(true);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
### Testing React Components
|
||||||
|
|
||||||
|
For testing React components, we use React Testing Library:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// component.test.tsx
|
||||||
|
import { describe, test, expect } from "bun:test";
|
||||||
|
import { render, screen } from "@testing-library/react";
|
||||||
|
import MyComponent from "../components/MyComponent";
|
||||||
|
|
||||||
|
describe("MyComponent", () => {
|
||||||
|
test("renders correctly", () => {
|
||||||
|
render(<MyComponent />);
|
||||||
|
expect(screen.getByText("Hello World")).toBeInTheDocument();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
## Test Setup
|
||||||
|
|
||||||
|
The test setup is defined in `src/tests/setup.bun.ts` and includes:
|
||||||
|
|
||||||
|
- Automatic cleanup after each test
|
||||||
|
- Setup for any global test environment needs
|
||||||
|
|
||||||
|
## Mocking
|
||||||
|
|
||||||
|
Bun's test runner provides built-in mocking capabilities:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { test, expect, mock } from "bun:test";
|
||||||
|
|
||||||
|
// Create a mock function
|
||||||
|
const mockFn = mock(() => "mocked value");
|
||||||
|
|
||||||
|
test("mock function", () => {
|
||||||
|
const result = mockFn();
|
||||||
|
expect(result).toBe("mocked value");
|
||||||
|
expect(mockFn).toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
|
||||||
|
// Mock a module
|
||||||
|
mock.module("./some-module", () => {
|
||||||
|
return {
|
||||||
|
someFunction: () => "mocked module function"
|
||||||
|
};
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
## CI Integration
|
||||||
|
|
||||||
|
The CI workflow has been updated to use Bun's test runner. Tests are automatically run as part of the CI pipeline.
|
||||||
|
|
||||||
|
## Test Coverage
|
||||||
|
|
||||||
|
To generate test coverage reports, run:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
bun test --coverage
|
||||||
|
```
|
||||||
|
|
||||||
|
This will generate a coverage report in the `coverage` directory.
|
||||||
|
|
||||||
|
## Types of Tests
|
||||||
|
|
||||||
|
The project includes several types of tests:
|
||||||
|
|
||||||
|
1. **Unit Tests**: Testing individual functions and utilities
|
||||||
|
2. **API Tests**: Testing API endpoints
|
||||||
|
3. **Component Tests**: Testing React components
|
||||||
|
4. **Integration Tests**: Testing how components work together
|
||||||
|
|
||||||
|
## Future Improvements
|
||||||
|
|
||||||
|
When expanding the test suite, consider:
|
||||||
|
|
||||||
|
1. Adding more comprehensive API endpoint tests
|
||||||
|
2. Increasing component test coverage
|
||||||
|
3. Setting up end-to-end tests with a tool like Playwright
|
||||||
|
4. Adding performance tests for critical paths
|
||||||
14
package.json
14
package.json
@@ -1,27 +1,29 @@
|
|||||||
{
|
{
|
||||||
"name": "gitea-mirror",
|
"name": "gitea-mirror",
|
||||||
"type": "module",
|
"type": "module",
|
||||||
"version": "2.1.0",
|
"version": "2.5.0",
|
||||||
"engines": {
|
"engines": {
|
||||||
"bun": ">=1.2.9"
|
"bun": ">=1.2.9"
|
||||||
},
|
},
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"setup": "bun install && bun run manage-db init",
|
"setup": "bun install && bun run manage-db init && bun run update-db",
|
||||||
"dev": "bunx --bun astro dev",
|
"dev": "bunx --bun astro dev",
|
||||||
"dev:clean": "bun run cleanup-db && bun run manage-db init && bunx --bun astro dev",
|
"dev:clean": "bun run cleanup-db && bun run manage-db init && bun run update-db && bunx --bun astro dev",
|
||||||
"build": "bunx --bun astro build",
|
"build": "bunx --bun astro build",
|
||||||
"cleanup-db": "rm -f gitea-mirror.db data/gitea-mirror.db",
|
"cleanup-db": "rm -f gitea-mirror.db data/gitea-mirror.db",
|
||||||
"manage-db": "bun scripts/manage-db.ts",
|
"manage-db": "bun scripts/manage-db.ts",
|
||||||
"init-db": "bun scripts/manage-db.ts init",
|
"init-db": "bun scripts/manage-db.ts init",
|
||||||
|
"update-db": "bun scripts/update-mirror-jobs-table.ts",
|
||||||
"check-db": "bun scripts/manage-db.ts check",
|
"check-db": "bun scripts/manage-db.ts check",
|
||||||
"fix-db": "bun scripts/manage-db.ts fix",
|
"fix-db": "bun scripts/manage-db.ts fix",
|
||||||
"reset-users": "bun scripts/manage-db.ts reset-users",
|
"reset-users": "bun scripts/manage-db.ts reset-users",
|
||||||
"cleanup-events": "bun scripts/cleanup-events.ts",
|
"cleanup-events": "bun scripts/cleanup-events.ts",
|
||||||
"preview": "bunx --bun astro preview",
|
"preview": "bunx --bun astro preview",
|
||||||
"start": "bun dist/server/entry.mjs",
|
"start": "bun dist/server/entry.mjs",
|
||||||
"start:fresh": "bun run cleanup-db && bun run manage-db init && bun dist/server/entry.mjs",
|
"start:fresh": "bun run cleanup-db && bun run manage-db init && bun run update-db && bun dist/server/entry.mjs",
|
||||||
"test": "bunx --bun vitest run",
|
"test": "bun test",
|
||||||
"test:watch": "bunx --bun vitest",
|
"test:watch": "bun test --watch",
|
||||||
|
"test:coverage": "bun test --coverage",
|
||||||
"astro": "bunx --bun astro"
|
"astro": "bunx --bun astro"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
|
|||||||
@@ -145,9 +145,31 @@ async function ensureTablesExist() {
|
|||||||
status TEXT NOT NULL DEFAULT 'imported',
|
status TEXT NOT NULL DEFAULT 'imported',
|
||||||
message TEXT NOT NULL,
|
message TEXT NOT NULL,
|
||||||
timestamp TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
timestamp TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
|
||||||
|
-- New fields for job resilience
|
||||||
|
job_type TEXT NOT NULL DEFAULT 'mirror',
|
||||||
|
batch_id TEXT,
|
||||||
|
total_items INTEGER,
|
||||||
|
completed_items INTEGER DEFAULT 0,
|
||||||
|
item_ids TEXT, -- JSON array as text
|
||||||
|
completed_item_ids TEXT DEFAULT '[]', -- JSON array as text
|
||||||
|
in_progress INTEGER NOT NULL DEFAULT 0, -- Boolean as integer
|
||||||
|
started_at TIMESTAMP,
|
||||||
|
completed_at TIMESTAMP,
|
||||||
|
last_checkpoint TIMESTAMP,
|
||||||
|
|
||||||
FOREIGN KEY (user_id) REFERENCES users(id)
|
FOREIGN KEY (user_id) REFERENCES users(id)
|
||||||
)
|
)
|
||||||
`);
|
`);
|
||||||
|
|
||||||
|
// Create indexes for better performance
|
||||||
|
db.exec(`
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_mirror_jobs_user_id ON mirror_jobs(user_id);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_mirror_jobs_batch_id ON mirror_jobs(batch_id);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_mirror_jobs_in_progress ON mirror_jobs(in_progress);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_mirror_jobs_job_type ON mirror_jobs(job_type);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_mirror_jobs_timestamp ON mirror_jobs(timestamp);
|
||||||
|
`);
|
||||||
break;
|
break;
|
||||||
case "events":
|
case "events":
|
||||||
db.exec(`
|
db.exec(`
|
||||||
|
|||||||
133
scripts/update-mirror-jobs-table.ts
Normal file
133
scripts/update-mirror-jobs-table.ts
Normal file
@@ -0,0 +1,133 @@
|
|||||||
|
#!/usr/bin/env bun
|
||||||
|
/**
|
||||||
|
* Script to update the mirror_jobs table with new columns for resilience
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { Database } from "bun:sqlite";
|
||||||
|
import fs from "fs";
|
||||||
|
import path from "path";
|
||||||
|
|
||||||
|
// Define the database paths
|
||||||
|
const dataDir = path.join(process.cwd(), "data");
|
||||||
|
const dbPath = path.join(dataDir, "gitea-mirror.db");
|
||||||
|
|
||||||
|
// Ensure data directory exists
|
||||||
|
if (!fs.existsSync(dataDir)) {
|
||||||
|
fs.mkdirSync(dataDir, { recursive: true });
|
||||||
|
console.log(`Created data directory at ${dataDir}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if database exists
|
||||||
|
if (!fs.existsSync(dbPath)) {
|
||||||
|
console.error(`Database file not found at ${dbPath}`);
|
||||||
|
console.error("Please run 'bun run init-db' first to create the database.");
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Connect to the database
|
||||||
|
const db = new Database(dbPath);
|
||||||
|
|
||||||
|
// Enable foreign keys
|
||||||
|
db.exec("PRAGMA foreign_keys = ON;");
|
||||||
|
|
||||||
|
// Function to check if a column exists in a table
|
||||||
|
function columnExists(tableName: string, columnName: string): boolean {
|
||||||
|
const result = db.query(
|
||||||
|
`PRAGMA table_info(${tableName})`
|
||||||
|
).all() as { name: string }[];
|
||||||
|
|
||||||
|
return result.some(column => column.name === columnName);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Main function to update the mirror_jobs table
|
||||||
|
async function updateMirrorJobsTable() {
|
||||||
|
console.log("Checking mirror_jobs table for missing columns...");
|
||||||
|
|
||||||
|
// Start a transaction
|
||||||
|
db.exec("BEGIN TRANSACTION;");
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Check and add each new column if it doesn't exist
|
||||||
|
const columnsToAdd = [
|
||||||
|
{ name: "job_type", definition: "TEXT NOT NULL DEFAULT 'mirror'" },
|
||||||
|
{ name: "batch_id", definition: "TEXT" },
|
||||||
|
{ name: "total_items", definition: "INTEGER" },
|
||||||
|
{ name: "completed_items", definition: "INTEGER DEFAULT 0" },
|
||||||
|
{ name: "item_ids", definition: "TEXT" }, // JSON array as text
|
||||||
|
{ name: "completed_item_ids", definition: "TEXT DEFAULT '[]'" }, // JSON array as text
|
||||||
|
{ name: "in_progress", definition: "INTEGER NOT NULL DEFAULT 0" }, // Boolean as integer
|
||||||
|
{ name: "started_at", definition: "TIMESTAMP" },
|
||||||
|
{ name: "completed_at", definition: "TIMESTAMP" },
|
||||||
|
{ name: "last_checkpoint", definition: "TIMESTAMP" }
|
||||||
|
];
|
||||||
|
|
||||||
|
let columnsAdded = 0;
|
||||||
|
|
||||||
|
for (const column of columnsToAdd) {
|
||||||
|
if (!columnExists("mirror_jobs", column.name)) {
|
||||||
|
console.log(`Adding column '${column.name}' to mirror_jobs table...`);
|
||||||
|
db.exec(`ALTER TABLE mirror_jobs ADD COLUMN ${column.name} ${column.definition};`);
|
||||||
|
columnsAdded++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Commit the transaction
|
||||||
|
db.exec("COMMIT;");
|
||||||
|
|
||||||
|
if (columnsAdded > 0) {
|
||||||
|
console.log(`✅ Added ${columnsAdded} new columns to mirror_jobs table.`);
|
||||||
|
} else {
|
||||||
|
console.log("✅ All required columns already exist in mirror_jobs table.");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create indexes for better performance
|
||||||
|
console.log("Creating indexes for mirror_jobs table...");
|
||||||
|
|
||||||
|
// Only create indexes if they don't exist
|
||||||
|
const indexesResult = db.query(
|
||||||
|
`SELECT name FROM sqlite_master WHERE type='index' AND tbl_name='mirror_jobs'`
|
||||||
|
).all() as { name: string }[];
|
||||||
|
|
||||||
|
const existingIndexes = indexesResult.map(idx => idx.name);
|
||||||
|
|
||||||
|
const indexesToCreate = [
|
||||||
|
{ name: "idx_mirror_jobs_user_id", columns: "user_id" },
|
||||||
|
{ name: "idx_mirror_jobs_batch_id", columns: "batch_id" },
|
||||||
|
{ name: "idx_mirror_jobs_in_progress", columns: "in_progress" },
|
||||||
|
{ name: "idx_mirror_jobs_job_type", columns: "job_type" },
|
||||||
|
{ name: "idx_mirror_jobs_timestamp", columns: "timestamp" }
|
||||||
|
];
|
||||||
|
|
||||||
|
let indexesCreated = 0;
|
||||||
|
|
||||||
|
for (const index of indexesToCreate) {
|
||||||
|
if (!existingIndexes.includes(index.name)) {
|
||||||
|
console.log(`Creating index '${index.name}'...`);
|
||||||
|
db.exec(`CREATE INDEX ${index.name} ON mirror_jobs(${index.columns});`);
|
||||||
|
indexesCreated++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (indexesCreated > 0) {
|
||||||
|
console.log(`✅ Created ${indexesCreated} new indexes for mirror_jobs table.`);
|
||||||
|
} else {
|
||||||
|
console.log("✅ All required indexes already exist for mirror_jobs table.");
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log("Mirror jobs table update completed successfully.");
|
||||||
|
} catch (error) {
|
||||||
|
// Rollback the transaction in case of error
|
||||||
|
db.exec("ROLLBACK;");
|
||||||
|
console.error("❌ Error updating mirror_jobs table:", error);
|
||||||
|
process.exit(1);
|
||||||
|
} finally {
|
||||||
|
// Close the database connection
|
||||||
|
db.close();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Run the update function
|
||||||
|
updateMirrorJobsTable().catch(error => {
|
||||||
|
console.error("Unhandled error:", error);
|
||||||
|
process.exit(1);
|
||||||
|
});
|
||||||
@@ -2,6 +2,7 @@ import { useEffect, useState } from "react";
|
|||||||
import { cn } from "@/lib/utils";
|
import { cn } from "@/lib/utils";
|
||||||
import { ExternalLink } from "lucide-react";
|
import { ExternalLink } from "lucide-react";
|
||||||
import { links } from "@/data/Sidebar";
|
import { links } from "@/data/Sidebar";
|
||||||
|
import { VersionInfo } from "./VersionInfo";
|
||||||
|
|
||||||
interface SidebarProps {
|
interface SidebarProps {
|
||||||
className?: string;
|
className?: string;
|
||||||
@@ -19,7 +20,7 @@ export function Sidebar({ className }: SidebarProps) {
|
|||||||
|
|
||||||
return (
|
return (
|
||||||
<aside className={cn("w-64 border-r bg-background", className)}>
|
<aside className={cn("w-64 border-r bg-background", className)}>
|
||||||
<div className="flex flex-col h-full py-4">
|
<div className="flex flex-col h-full pt-4">
|
||||||
<nav className="flex flex-col gap-y-1 pl-2 pr-3">
|
<nav className="flex flex-col gap-y-1 pl-2 pr-3">
|
||||||
{links.map((link, index) => {
|
{links.map((link, index) => {
|
||||||
const isActive = currentPath === link.href;
|
const isActive = currentPath === link.href;
|
||||||
@@ -59,6 +60,7 @@ export function Sidebar({ className }: SidebarProps) {
|
|||||||
<ExternalLink className="h-3 w-3" />
|
<ExternalLink className="h-3 w-3" />
|
||||||
</a>
|
</a>
|
||||||
</div>
|
</div>
|
||||||
|
<VersionInfo />
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</aside>
|
</aside>
|
||||||
|
|||||||
49
src/components/layout/VersionInfo.tsx
Normal file
49
src/components/layout/VersionInfo.tsx
Normal file
@@ -0,0 +1,49 @@
|
|||||||
|
import { useEffect, useState } from "react";
|
||||||
|
import { healthApi } from "@/lib/api";
|
||||||
|
|
||||||
|
export function VersionInfo() {
|
||||||
|
const [versionInfo, setVersionInfo] = useState<{
|
||||||
|
current: string;
|
||||||
|
latest: string;
|
||||||
|
updateAvailable: boolean;
|
||||||
|
}>({
|
||||||
|
current: "loading...",
|
||||||
|
latest: "",
|
||||||
|
updateAvailable: false
|
||||||
|
});
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
const fetchVersion = async () => {
|
||||||
|
try {
|
||||||
|
const healthData = await healthApi.check();
|
||||||
|
setVersionInfo({
|
||||||
|
current: healthData.version || "unknown",
|
||||||
|
latest: healthData.latestVersion || "unknown",
|
||||||
|
updateAvailable: healthData.updateAvailable || false
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Failed to fetch version:", error);
|
||||||
|
setVersionInfo({
|
||||||
|
current: "unknown",
|
||||||
|
latest: "",
|
||||||
|
updateAvailable: false
|
||||||
|
});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
fetchVersion();
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="text-xs text-muted-foreground text-center pt-2 pb-3 border-t border-border mt-2">
|
||||||
|
{versionInfo.updateAvailable ? (
|
||||||
|
<div className="flex flex-col">
|
||||||
|
<span>v{versionInfo.current}</span>
|
||||||
|
<span className="text-primary">v{versionInfo.latest} available</span>
|
||||||
|
</div>
|
||||||
|
) : (
|
||||||
|
<span>v{versionInfo.current}</span>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
@@ -94,6 +94,8 @@ export interface HealthResponse {
|
|||||||
status: "ok" | "error";
|
status: "ok" | "error";
|
||||||
timestamp: string;
|
timestamp: string;
|
||||||
version: string;
|
version: string;
|
||||||
|
latestVersion: string;
|
||||||
|
updateAvailable: boolean;
|
||||||
database: {
|
database: {
|
||||||
connected: boolean;
|
connected: boolean;
|
||||||
message: string;
|
message: string;
|
||||||
@@ -147,6 +149,8 @@ export const healthApi = {
|
|||||||
timestamp: new Date().toISOString(),
|
timestamp: new Date().toISOString(),
|
||||||
error: error instanceof Error ? error.message : "Unknown error checking health",
|
error: error instanceof Error ? error.message : "Unknown error checking health",
|
||||||
version: "unknown",
|
version: "unknown",
|
||||||
|
latestVersion: "unknown",
|
||||||
|
updateAvailable: false,
|
||||||
database: { connected: false, message: "Failed to connect to API" },
|
database: { connected: false, message: "Failed to connect to API" },
|
||||||
system: {
|
system: {
|
||||||
uptime: { startTime: "", uptimeMs: 0, formatted: "N/A" },
|
uptime: { startTime: "", uptimeMs: 0, formatted: "N/A" },
|
||||||
|
|||||||
42
src/lib/db/index.test.ts
Normal file
42
src/lib/db/index.test.ts
Normal file
@@ -0,0 +1,42 @@
|
|||||||
|
import { describe, test, expect, mock, beforeAll, afterAll } from "bun:test";
|
||||||
|
import { drizzle } from "drizzle-orm/bun-sqlite";
|
||||||
|
|
||||||
|
// Silence console logs during tests
|
||||||
|
let originalConsoleLog: typeof console.log;
|
||||||
|
|
||||||
|
beforeAll(() => {
|
||||||
|
// Save original console.log
|
||||||
|
originalConsoleLog = console.log;
|
||||||
|
// Replace with no-op function
|
||||||
|
console.log = () => {};
|
||||||
|
});
|
||||||
|
|
||||||
|
afterAll(() => {
|
||||||
|
// Restore original console.log
|
||||||
|
console.log = originalConsoleLog;
|
||||||
|
});
|
||||||
|
|
||||||
|
// Mock the database module
|
||||||
|
mock.module("bun:sqlite", () => {
|
||||||
|
return {
|
||||||
|
Database: mock(function() {
|
||||||
|
return {
|
||||||
|
query: mock(() => ({
|
||||||
|
all: mock(() => []),
|
||||||
|
run: mock(() => ({}))
|
||||||
|
}))
|
||||||
|
};
|
||||||
|
})
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
// Mock the database tables
|
||||||
|
describe("Database Schema", () => {
|
||||||
|
test("database connection can be created", async () => {
|
||||||
|
// Import the db from the module
|
||||||
|
const { db } = await import("./index");
|
||||||
|
|
||||||
|
// Check that db is defined
|
||||||
|
expect(db).toBeDefined();
|
||||||
|
});
|
||||||
|
});
|
||||||
@@ -189,6 +189,18 @@ export const mirrorJobs = sqliteTable("mirror_jobs", {
|
|||||||
timestamp: integer("timestamp", { mode: "timestamp" })
|
timestamp: integer("timestamp", { mode: "timestamp" })
|
||||||
.notNull()
|
.notNull()
|
||||||
.default(new Date()),
|
.default(new Date()),
|
||||||
|
|
||||||
|
// New fields for job resilience
|
||||||
|
jobType: text("job_type").notNull().default("mirror"),
|
||||||
|
batchId: text("batch_id"),
|
||||||
|
totalItems: integer("total_items"),
|
||||||
|
completedItems: integer("completed_items").default(0),
|
||||||
|
itemIds: text("item_ids", { mode: "json" }).$type<string[]>(),
|
||||||
|
completedItemIds: text("completed_item_ids", { mode: "json" }).$type<string[]>().default([]),
|
||||||
|
inProgress: integer("in_progress", { mode: "boolean" }).notNull().default(false),
|
||||||
|
startedAt: integer("started_at", { mode: "timestamp" }),
|
||||||
|
completedAt: integer("completed_at", { mode: "timestamp" }),
|
||||||
|
lastCheckpoint: integer("last_checkpoint", { mode: "timestamp" }),
|
||||||
});
|
});
|
||||||
|
|
||||||
export const organizations = sqliteTable("organizations", {
|
export const organizations = sqliteTable("organizations", {
|
||||||
|
|||||||
@@ -111,6 +111,18 @@ export const mirrorJobSchema = z.object({
|
|||||||
status: repoStatusEnum.default("imported"),
|
status: repoStatusEnum.default("imported"),
|
||||||
message: z.string(),
|
message: z.string(),
|
||||||
timestamp: z.date().default(() => new Date()),
|
timestamp: z.date().default(() => new Date()),
|
||||||
|
|
||||||
|
// New fields for job resilience
|
||||||
|
jobType: z.enum(["mirror", "sync", "retry"]).default("mirror"),
|
||||||
|
batchId: z.string().uuid().optional(), // Group related jobs together
|
||||||
|
totalItems: z.number().optional(), // Total number of items to process
|
||||||
|
completedItems: z.number().optional(), // Number of items completed
|
||||||
|
itemIds: z.array(z.string()).optional(), // IDs of items to process
|
||||||
|
completedItemIds: z.array(z.string()).optional(), // IDs of completed items
|
||||||
|
inProgress: z.boolean().default(false), // Whether the job is currently running
|
||||||
|
startedAt: z.date().optional(), // When the job started
|
||||||
|
completedAt: z.date().optional(), // When the job completed
|
||||||
|
lastCheckpoint: z.date().optional(), // Last time progress was saved
|
||||||
});
|
});
|
||||||
|
|
||||||
export type MirrorJob = z.infer<typeof mirrorJobSchema>;
|
export type MirrorJob = z.infer<typeof mirrorJobSchema>;
|
||||||
|
|||||||
120
src/lib/gitea.test.ts
Normal file
120
src/lib/gitea.test.ts
Normal file
@@ -0,0 +1,120 @@
|
|||||||
|
import { describe, test, expect, mock, beforeEach, afterEach } from "bun:test";
|
||||||
|
import { Octokit } from "@octokit/rest";
|
||||||
|
import { repoStatusEnum } from "@/types/Repository";
|
||||||
|
|
||||||
|
// Mock the isRepoPresentInGitea function
|
||||||
|
const mockIsRepoPresentInGitea = mock(() => Promise.resolve(false));
|
||||||
|
|
||||||
|
// Mock the database module
|
||||||
|
mock.module("@/lib/db", () => {
|
||||||
|
return {
|
||||||
|
db: {
|
||||||
|
update: () => ({
|
||||||
|
set: () => ({
|
||||||
|
where: () => Promise.resolve()
|
||||||
|
})
|
||||||
|
})
|
||||||
|
},
|
||||||
|
repositories: {},
|
||||||
|
organizations: {}
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
// Mock the helpers module
|
||||||
|
mock.module("@/lib/helpers", () => {
|
||||||
|
return {
|
||||||
|
createMirrorJob: mock(() => Promise.resolve("job-id"))
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
// Mock superagent
|
||||||
|
mock.module("superagent", () => {
|
||||||
|
const mockPost = mock(() => ({
|
||||||
|
set: () => ({
|
||||||
|
set: () => ({
|
||||||
|
send: () => Promise.resolve({ body: { id: 123 } })
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}));
|
||||||
|
|
||||||
|
const mockGet = mock(() => ({
|
||||||
|
set: () => Promise.resolve({ body: [] })
|
||||||
|
}));
|
||||||
|
|
||||||
|
return {
|
||||||
|
post: mockPost,
|
||||||
|
get: mockGet
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
// Mock the gitea module itself
|
||||||
|
mock.module("./gitea", () => {
|
||||||
|
return {
|
||||||
|
isRepoPresentInGitea: mockIsRepoPresentInGitea,
|
||||||
|
mirrorGithubRepoToGitea: mock(async () => {}),
|
||||||
|
mirrorGitHubOrgRepoToGiteaOrg: mock(async () => {})
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("Gitea Repository Mirroring", () => {
|
||||||
|
// Mock console.log and console.error to prevent test output noise
|
||||||
|
let originalConsoleLog: typeof console.log;
|
||||||
|
let originalConsoleError: typeof console.error;
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
originalConsoleLog = console.log;
|
||||||
|
originalConsoleError = console.error;
|
||||||
|
console.log = mock(() => {});
|
||||||
|
console.error = mock(() => {});
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
console.log = originalConsoleLog;
|
||||||
|
console.error = originalConsoleError;
|
||||||
|
});
|
||||||
|
|
||||||
|
test("mirrorGithubRepoToGitea handles private repositories correctly", async () => {
|
||||||
|
// Import the mocked function
|
||||||
|
const { mirrorGithubRepoToGitea } = await import("./gitea");
|
||||||
|
|
||||||
|
// Create mock Octokit instance
|
||||||
|
const octokit = {} as Octokit;
|
||||||
|
|
||||||
|
// Create mock repository (private)
|
||||||
|
const repository = {
|
||||||
|
id: "repo-id",
|
||||||
|
name: "test-repo",
|
||||||
|
fullName: "testuser/test-repo",
|
||||||
|
url: "https://github.com/testuser/test-repo",
|
||||||
|
cloneUrl: "https://github.com/testuser/test-repo.git",
|
||||||
|
owner: "testuser",
|
||||||
|
isPrivate: true,
|
||||||
|
status: repoStatusEnum.parse("imported")
|
||||||
|
};
|
||||||
|
|
||||||
|
// Create mock config
|
||||||
|
const config = {
|
||||||
|
id: "config-id",
|
||||||
|
userId: "user-id",
|
||||||
|
githubConfig: {
|
||||||
|
token: "github-token",
|
||||||
|
mirrorIssues: false
|
||||||
|
},
|
||||||
|
giteaConfig: {
|
||||||
|
url: "https://gitea.example.com",
|
||||||
|
token: "gitea-token",
|
||||||
|
username: "giteauser"
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Call the function
|
||||||
|
await mirrorGithubRepoToGitea({
|
||||||
|
octokit,
|
||||||
|
repository: repository as any,
|
||||||
|
config
|
||||||
|
});
|
||||||
|
|
||||||
|
// Check that the function was called
|
||||||
|
expect(mirrorGithubRepoToGitea).toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
});
|
||||||
229
src/lib/gitea.ts
229
src/lib/gitea.ts
@@ -601,11 +601,22 @@ export async function mirrorGitHubOrgToGitea({
|
|||||||
.from(repositories)
|
.from(repositories)
|
||||||
.where(eq(repositories.organization, organization.name));
|
.where(eq(repositories.organization, organization.name));
|
||||||
|
|
||||||
for (const repo of orgRepos) {
|
if (orgRepos.length === 0) {
|
||||||
await mirrorGitHubRepoToGiteaOrg({
|
console.log(`No repositories found for organization ${organization.name}`);
|
||||||
octokit,
|
return;
|
||||||
config,
|
}
|
||||||
repository: {
|
|
||||||
|
console.log(`Mirroring ${orgRepos.length} repositories for organization ${organization.name}`);
|
||||||
|
|
||||||
|
// Import the processWithRetry function
|
||||||
|
const { processWithRetry } = await import("@/lib/utils/concurrency");
|
||||||
|
|
||||||
|
// Process repositories in parallel with concurrency control
|
||||||
|
await processWithRetry(
|
||||||
|
orgRepos,
|
||||||
|
async (repo) => {
|
||||||
|
// Prepare repository data
|
||||||
|
const repoData = {
|
||||||
...repo,
|
...repo,
|
||||||
status: repo.status as RepoStatus,
|
status: repo.status as RepoStatus,
|
||||||
visibility: repo.visibility as RepositoryVisibility,
|
visibility: repo.visibility as RepositoryVisibility,
|
||||||
@@ -614,11 +625,37 @@ export async function mirrorGitHubOrgToGitea({
|
|||||||
organization: repo.organization ?? undefined,
|
organization: repo.organization ?? undefined,
|
||||||
forkedFrom: repo.forkedFrom ?? undefined,
|
forkedFrom: repo.forkedFrom ?? undefined,
|
||||||
mirroredLocation: repo.mirroredLocation || "",
|
mirroredLocation: repo.mirroredLocation || "",
|
||||||
|
};
|
||||||
|
|
||||||
|
// Log the start of mirroring
|
||||||
|
console.log(`Starting mirror for repository: ${repo.name} in organization ${organization.name}`);
|
||||||
|
|
||||||
|
// Mirror the repository
|
||||||
|
await mirrorGitHubRepoToGiteaOrg({
|
||||||
|
octokit,
|
||||||
|
config,
|
||||||
|
repository: repoData,
|
||||||
|
giteaOrgId,
|
||||||
|
orgName: organization.name,
|
||||||
|
});
|
||||||
|
|
||||||
|
return repo;
|
||||||
|
},
|
||||||
|
{
|
||||||
|
concurrencyLimit: 3, // Process 3 repositories at a time
|
||||||
|
maxRetries: 2,
|
||||||
|
retryDelay: 2000,
|
||||||
|
onProgress: (completed, total, result) => {
|
||||||
|
const percentComplete = Math.round((completed / total) * 100);
|
||||||
|
if (result) {
|
||||||
|
console.log(`Mirrored repository "${result.name}" in organization ${organization.name} (${completed}/${total}, ${percentComplete}%)`);
|
||||||
|
}
|
||||||
},
|
},
|
||||||
giteaOrgId,
|
onRetry: (repo, error, attempt) => {
|
||||||
orgName: organization.name,
|
console.log(`Retrying repository ${repo.name} in organization ${organization.name} (attempt ${attempt}): ${error.message}`);
|
||||||
});
|
}
|
||||||
}
|
}
|
||||||
|
);
|
||||||
|
|
||||||
console.log(`Organization ${organization.name} mirrored successfully`);
|
console.log(`Organization ${organization.name} mirrored successfully`);
|
||||||
|
|
||||||
@@ -837,7 +874,15 @@ export const mirrorGitRepoIssuesToGitea = async ({
|
|||||||
(res) => res.data
|
(res) => res.data
|
||||||
);
|
);
|
||||||
|
|
||||||
console.log(`Mirroring ${issues.length} issues from ${repository.fullName}`);
|
// Filter out pull requests
|
||||||
|
const filteredIssues = issues.filter(issue => !(issue as any).pull_request);
|
||||||
|
|
||||||
|
console.log(`Mirroring ${filteredIssues.length} issues from ${repository.fullName}`);
|
||||||
|
|
||||||
|
if (filteredIssues.length === 0) {
|
||||||
|
console.log(`No issues to mirror for ${repository.fullName}`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
// Get existing labels from Gitea
|
// Get existing labels from Gitea
|
||||||
const giteaLabelsRes = await superagent
|
const giteaLabelsRes = await superagent
|
||||||
@@ -851,58 +896,60 @@ export const mirrorGitRepoIssuesToGitea = async ({
|
|||||||
giteaLabels.map((label: any) => [label.name, label.id])
|
giteaLabels.map((label: any) => [label.name, label.id])
|
||||||
);
|
);
|
||||||
|
|
||||||
for (const issue of issues) {
|
// Import the processWithRetry function
|
||||||
if ((issue as any).pull_request) {
|
const { processWithRetry } = await import("@/lib/utils/concurrency");
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
const githubLabelNames =
|
// Process issues in parallel with concurrency control
|
||||||
issue.labels
|
await processWithRetry(
|
||||||
?.map((l) => (typeof l === "string" ? l : l.name))
|
filteredIssues,
|
||||||
.filter((l): l is string => !!l) || [];
|
async (issue) => {
|
||||||
|
const githubLabelNames =
|
||||||
|
issue.labels
|
||||||
|
?.map((l) => (typeof l === "string" ? l : l.name))
|
||||||
|
.filter((l): l is string => !!l) || [];
|
||||||
|
|
||||||
const giteaLabelIds: number[] = [];
|
const giteaLabelIds: number[] = [];
|
||||||
|
|
||||||
// Resolve or create labels in Gitea
|
// Resolve or create labels in Gitea
|
||||||
for (const name of githubLabelNames) {
|
for (const name of githubLabelNames) {
|
||||||
if (labelMap.has(name)) {
|
if (labelMap.has(name)) {
|
||||||
giteaLabelIds.push(labelMap.get(name)!);
|
giteaLabelIds.push(labelMap.get(name)!);
|
||||||
} else {
|
} else {
|
||||||
try {
|
try {
|
||||||
const created = await superagent
|
const created = await superagent
|
||||||
.post(
|
.post(
|
||||||
`${config.giteaConfig.url}/api/v1/repos/${repoOrigin}/${repository.name}/labels`
|
`${config.giteaConfig.url}/api/v1/repos/${repoOrigin}/${repository.name}/labels`
|
||||||
)
|
)
|
||||||
.set("Authorization", `token ${config.giteaConfig.token}`)
|
.set("Authorization", `token ${config.giteaConfig.token}`)
|
||||||
.send({ name, color: "#ededed" }); // Default color
|
.send({ name, color: "#ededed" }); // Default color
|
||||||
|
|
||||||
labelMap.set(name, created.body.id);
|
labelMap.set(name, created.body.id);
|
||||||
giteaLabelIds.push(created.body.id);
|
giteaLabelIds.push(created.body.id);
|
||||||
} catch (labelErr) {
|
} catch (labelErr) {
|
||||||
console.error(
|
console.error(
|
||||||
`Failed to create label "${name}" in Gitea: ${labelErr}`
|
`Failed to create label "${name}" in Gitea: ${labelErr}`
|
||||||
);
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
const originalAssignees =
|
const originalAssignees =
|
||||||
issue.assignees && issue.assignees.length > 0
|
issue.assignees && issue.assignees.length > 0
|
||||||
? `\n\nOriginally assigned to: ${issue.assignees
|
? `\n\nOriginally assigned to: ${issue.assignees
|
||||||
.map((a) => `@${a.login}`)
|
.map((a) => `@${a.login}`)
|
||||||
.join(", ")} on GitHub.`
|
.join(", ")} on GitHub.`
|
||||||
: "";
|
: "";
|
||||||
|
|
||||||
const issuePayload: any = {
|
const issuePayload: any = {
|
||||||
title: issue.title,
|
title: issue.title,
|
||||||
body: `Originally created by @${
|
body: `Originally created by @${
|
||||||
issue.user?.login
|
issue.user?.login
|
||||||
} on GitHub.${originalAssignees}\n\n${issue.body || ""}`,
|
} on GitHub.${originalAssignees}\n\n${issue.body || ""}`,
|
||||||
closed: issue.state === "closed",
|
closed: issue.state === "closed",
|
||||||
labels: giteaLabelIds,
|
labels: giteaLabelIds,
|
||||||
};
|
};
|
||||||
|
|
||||||
try {
|
// Create the issue in Gitea
|
||||||
const createdIssue = await superagent
|
const createdIssue = await superagent
|
||||||
.post(
|
.post(
|
||||||
`${config.giteaConfig.url}/api/v1/repos/${repoOrigin}/${repository.name}/issues`
|
`${config.giteaConfig.url}/api/v1/repos/${repoOrigin}/${repository.name}/issues`
|
||||||
@@ -922,41 +969,49 @@ export const mirrorGitRepoIssuesToGitea = async ({
|
|||||||
(res) => res.data
|
(res) => res.data
|
||||||
);
|
);
|
||||||
|
|
||||||
for (const comment of comments) {
|
// Process comments in parallel with concurrency control
|
||||||
try {
|
if (comments.length > 0) {
|
||||||
await superagent
|
await processWithRetry(
|
||||||
.post(
|
comments,
|
||||||
`${config.giteaConfig.url}/api/v1/repos/${repoOrigin}/${repository.name}/issues/${createdIssue.body.number}/comments`
|
async (comment) => {
|
||||||
)
|
await superagent
|
||||||
.set("Authorization", `token ${config.giteaConfig.token}`)
|
.post(
|
||||||
.send({
|
`${config.giteaConfig.url}/api/v1/repos/${repoOrigin}/${repository.name}/issues/${createdIssue.body.number}/comments`
|
||||||
body: `@${comment.user?.login} commented on GitHub:\n\n${comment.body}`,
|
)
|
||||||
});
|
.set("Authorization", `token ${config.giteaConfig.token}`)
|
||||||
} catch (commentErr) {
|
.send({
|
||||||
console.error(
|
body: `@${comment.user?.login} commented on GitHub:\n\n${comment.body}`,
|
||||||
`Failed to copy comment to Gitea for issue "${issue.title}": ${
|
});
|
||||||
commentErr instanceof Error
|
return comment;
|
||||||
? commentErr.message
|
},
|
||||||
: String(commentErr)
|
{
|
||||||
}`
|
concurrencyLimit: 5,
|
||||||
);
|
maxRetries: 2,
|
||||||
}
|
retryDelay: 1000,
|
||||||
|
onRetry: (comment, error, attempt) => {
|
||||||
|
console.log(`Retrying comment (attempt ${attempt}): ${error.message}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
);
|
||||||
}
|
}
|
||||||
} catch (err) {
|
|
||||||
if (err instanceof Error && (err as any).response) {
|
return issue;
|
||||||
console.error(
|
},
|
||||||
`Failed to create issue "${issue.title}" in Gitea: ${err.message}`
|
{
|
||||||
);
|
concurrencyLimit: 3, // Process 3 issues at a time
|
||||||
console.error(
|
maxRetries: 2,
|
||||||
`Response body: ${JSON.stringify((err as any).response.body)}`
|
retryDelay: 2000,
|
||||||
);
|
onProgress: (completed, total, result) => {
|
||||||
} else {
|
const percentComplete = Math.round((completed / total) * 100);
|
||||||
console.error(
|
if (result) {
|
||||||
`Failed to create issue "${issue.title}" in Gitea: ${
|
console.log(`Mirrored issue "${result.title}" (${completed}/${total}, ${percentComplete}%)`);
|
||||||
err instanceof Error ? err.message : String(err)
|
}
|
||||||
}`
|
},
|
||||||
);
|
onRetry: (issue, error, attempt) => {
|
||||||
|
console.log(`Retrying issue "${issue.title}" (attempt ${attempt}): ${error.message}`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
);
|
||||||
|
|
||||||
|
console.log(`Completed mirroring ${filteredIssues.length} issues for ${repository.fullName}`);
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -12,6 +12,11 @@ export async function createMirrorJob({
|
|||||||
message,
|
message,
|
||||||
status,
|
status,
|
||||||
details,
|
details,
|
||||||
|
jobType,
|
||||||
|
batchId,
|
||||||
|
totalItems,
|
||||||
|
itemIds,
|
||||||
|
inProgress,
|
||||||
}: {
|
}: {
|
||||||
userId: string;
|
userId: string;
|
||||||
organizationId?: string;
|
organizationId?: string;
|
||||||
@@ -21,6 +26,11 @@ export async function createMirrorJob({
|
|||||||
details?: string;
|
details?: string;
|
||||||
message: string;
|
message: string;
|
||||||
status: RepoStatus;
|
status: RepoStatus;
|
||||||
|
jobType?: "mirror" | "sync" | "retry";
|
||||||
|
batchId?: string;
|
||||||
|
totalItems?: number;
|
||||||
|
itemIds?: string[];
|
||||||
|
inProgress?: boolean;
|
||||||
}) {
|
}) {
|
||||||
const jobId = uuidv4();
|
const jobId = uuidv4();
|
||||||
const currentTimestamp = new Date();
|
const currentTimestamp = new Date();
|
||||||
@@ -32,11 +42,22 @@ export async function createMirrorJob({
|
|||||||
repositoryName,
|
repositoryName,
|
||||||
organizationId,
|
organizationId,
|
||||||
organizationName,
|
organizationName,
|
||||||
configId: uuidv4(),
|
|
||||||
details,
|
details,
|
||||||
message: message,
|
message: message,
|
||||||
status: status,
|
status: status,
|
||||||
timestamp: currentTimestamp,
|
timestamp: currentTimestamp,
|
||||||
|
|
||||||
|
// New resilience fields
|
||||||
|
jobType: jobType || "mirror",
|
||||||
|
batchId: batchId || undefined,
|
||||||
|
totalItems: totalItems || undefined,
|
||||||
|
completedItems: 0,
|
||||||
|
itemIds: itemIds || undefined,
|
||||||
|
completedItemIds: [],
|
||||||
|
inProgress: inProgress !== undefined ? inProgress : false,
|
||||||
|
startedAt: inProgress ? currentTimestamp : undefined,
|
||||||
|
completedAt: undefined,
|
||||||
|
lastCheckpoint: undefined,
|
||||||
};
|
};
|
||||||
|
|
||||||
try {
|
try {
|
||||||
@@ -57,3 +78,186 @@ export async function createMirrorJob({
|
|||||||
throw new Error("Error creating mirror job");
|
throw new Error("Error creating mirror job");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Updates the progress of a mirror job
|
||||||
|
*/
|
||||||
|
export async function updateMirrorJobProgress({
|
||||||
|
jobId,
|
||||||
|
completedItemId,
|
||||||
|
status,
|
||||||
|
message,
|
||||||
|
details,
|
||||||
|
inProgress,
|
||||||
|
isCompleted,
|
||||||
|
}: {
|
||||||
|
jobId: string;
|
||||||
|
completedItemId?: string;
|
||||||
|
status?: RepoStatus;
|
||||||
|
message?: string;
|
||||||
|
details?: string;
|
||||||
|
inProgress?: boolean;
|
||||||
|
isCompleted?: boolean;
|
||||||
|
}) {
|
||||||
|
try {
|
||||||
|
// Get the current job
|
||||||
|
const [job] = await db
|
||||||
|
.select()
|
||||||
|
.from(mirrorJobs)
|
||||||
|
.where(mirrorJobs.id === jobId);
|
||||||
|
|
||||||
|
if (!job) {
|
||||||
|
throw new Error(`Mirror job with ID ${jobId} not found`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update the job with new progress
|
||||||
|
const updates: Record<string, any> = {
|
||||||
|
lastCheckpoint: new Date(),
|
||||||
|
};
|
||||||
|
|
||||||
|
// Add completed item if provided
|
||||||
|
if (completedItemId) {
|
||||||
|
const completedItemIds = job.completedItemIds || [];
|
||||||
|
if (!completedItemIds.includes(completedItemId)) {
|
||||||
|
updates.completedItemIds = [...completedItemIds, completedItemId];
|
||||||
|
updates.completedItems = (job.completedItems || 0) + 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update status if provided
|
||||||
|
if (status) {
|
||||||
|
updates.status = status;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update message if provided
|
||||||
|
if (message) {
|
||||||
|
updates.message = message;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update details if provided
|
||||||
|
if (details) {
|
||||||
|
updates.details = details;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update in-progress status if provided
|
||||||
|
if (inProgress !== undefined) {
|
||||||
|
updates.inProgress = inProgress;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Mark as completed if specified
|
||||||
|
if (isCompleted) {
|
||||||
|
updates.inProgress = false;
|
||||||
|
updates.completedAt = new Date();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update the job in the database
|
||||||
|
await db
|
||||||
|
.update(mirrorJobs)
|
||||||
|
.set(updates)
|
||||||
|
.where(mirrorJobs.id === jobId);
|
||||||
|
|
||||||
|
// Publish the event
|
||||||
|
const updatedJob = {
|
||||||
|
...job,
|
||||||
|
...updates,
|
||||||
|
};
|
||||||
|
|
||||||
|
await publishEvent({
|
||||||
|
userId: job.userId,
|
||||||
|
channel: `mirror-status:${job.userId}`,
|
||||||
|
payload: updatedJob,
|
||||||
|
});
|
||||||
|
|
||||||
|
return updatedJob;
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error updating mirror job progress:", error);
|
||||||
|
throw new Error("Error updating mirror job progress");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Finds interrupted jobs that need to be resumed
|
||||||
|
*/
|
||||||
|
export async function findInterruptedJobs() {
|
||||||
|
try {
|
||||||
|
// Find jobs that are marked as in-progress but haven't been updated recently
|
||||||
|
const cutoffTime = new Date();
|
||||||
|
cutoffTime.setMinutes(cutoffTime.getMinutes() - 10); // Consider jobs inactive after 10 minutes without updates
|
||||||
|
|
||||||
|
const interruptedJobs = await db
|
||||||
|
.select()
|
||||||
|
.from(mirrorJobs)
|
||||||
|
.where(
|
||||||
|
mirrorJobs.inProgress === true &&
|
||||||
|
(mirrorJobs.lastCheckpoint === null ||
|
||||||
|
mirrorJobs.lastCheckpoint < cutoffTime)
|
||||||
|
);
|
||||||
|
|
||||||
|
return interruptedJobs;
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error finding interrupted jobs:", error);
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Resumes an interrupted job
|
||||||
|
*/
|
||||||
|
export async function resumeInterruptedJob(job: any) {
|
||||||
|
try {
|
||||||
|
console.log(`Resuming interrupted job: ${job.id}`);
|
||||||
|
|
||||||
|
// Skip if job doesn't have the necessary data to resume
|
||||||
|
if (!job.itemIds || !job.completedItemIds) {
|
||||||
|
console.log(`Cannot resume job ${job.id}: missing item data`);
|
||||||
|
|
||||||
|
// Mark the job as failed
|
||||||
|
await updateMirrorJobProgress({
|
||||||
|
jobId: job.id,
|
||||||
|
status: "failed",
|
||||||
|
message: "Job interrupted and could not be resumed",
|
||||||
|
details: "The job was interrupted and did not have enough information to resume",
|
||||||
|
inProgress: false,
|
||||||
|
isCompleted: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Calculate remaining items
|
||||||
|
const remainingItemIds = job.itemIds.filter(
|
||||||
|
(id: string) => !job.completedItemIds.includes(id)
|
||||||
|
);
|
||||||
|
|
||||||
|
if (remainingItemIds.length === 0) {
|
||||||
|
console.log(`Job ${job.id} has no remaining items, marking as completed`);
|
||||||
|
|
||||||
|
// Mark the job as completed
|
||||||
|
await updateMirrorJobProgress({
|
||||||
|
jobId: job.id,
|
||||||
|
status: "mirrored",
|
||||||
|
message: "Job completed after resuming",
|
||||||
|
inProgress: false,
|
||||||
|
isCompleted: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update the job to show it's being resumed
|
||||||
|
await updateMirrorJobProgress({
|
||||||
|
jobId: job.id,
|
||||||
|
message: `Resuming job with ${remainingItemIds.length} remaining items`,
|
||||||
|
details: `Job was interrupted and is being resumed. ${job.completedItemIds.length} of ${job.itemIds.length} items were already processed.`,
|
||||||
|
inProgress: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
return {
|
||||||
|
job,
|
||||||
|
remainingItemIds,
|
||||||
|
};
|
||||||
|
} catch (error) {
|
||||||
|
console.error(`Error resuming job ${job.id}:`, error);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
224
src/lib/recovery.ts
Normal file
224
src/lib/recovery.ts
Normal file
@@ -0,0 +1,224 @@
|
|||||||
|
/**
|
||||||
|
* Recovery mechanism for interrupted jobs
|
||||||
|
* This module handles detecting and resuming jobs that were interrupted by container restarts
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { findInterruptedJobs, resumeInterruptedJob } from './helpers';
|
||||||
|
import { db, repositories, organizations } from './db';
|
||||||
|
import { eq } from 'drizzle-orm';
|
||||||
|
import { mirrorGithubRepoToGitea, mirrorGitHubOrgRepoToGiteaOrg, syncGiteaRepo } from './gitea';
|
||||||
|
import { createGitHubClient } from './github';
|
||||||
|
import { processWithResilience } from './utils/concurrency';
|
||||||
|
import { repositoryVisibilityEnum, repoStatusEnum } from '@/types/Repository';
|
||||||
|
import type { Repository } from './db/schema';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Initialize the recovery system
|
||||||
|
* This should be called when the application starts
|
||||||
|
*/
|
||||||
|
export async function initializeRecovery() {
|
||||||
|
console.log('Initializing recovery system...');
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Find interrupted jobs
|
||||||
|
const interruptedJobs = await findInterruptedJobs();
|
||||||
|
|
||||||
|
if (interruptedJobs.length === 0) {
|
||||||
|
console.log('No interrupted jobs found.');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`Found ${interruptedJobs.length} interrupted jobs. Starting recovery...`);
|
||||||
|
|
||||||
|
// Process each interrupted job
|
||||||
|
for (const job of interruptedJobs) {
|
||||||
|
const resumeData = await resumeInterruptedJob(job);
|
||||||
|
|
||||||
|
if (!resumeData) {
|
||||||
|
console.log(`Job ${job.id} could not be resumed.`);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
const { job: updatedJob, remainingItemIds } = resumeData;
|
||||||
|
|
||||||
|
// Handle different job types
|
||||||
|
switch (updatedJob.jobType) {
|
||||||
|
case 'mirror':
|
||||||
|
await recoverMirrorJob(updatedJob, remainingItemIds);
|
||||||
|
break;
|
||||||
|
case 'sync':
|
||||||
|
await recoverSyncJob(updatedJob, remainingItemIds);
|
||||||
|
break;
|
||||||
|
case 'retry':
|
||||||
|
await recoverRetryJob(updatedJob, remainingItemIds);
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
console.log(`Unknown job type: ${updatedJob.jobType}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log('Recovery process completed.');
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error during recovery process:', error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Recover a mirror job
|
||||||
|
*/
|
||||||
|
async function recoverMirrorJob(job: any, remainingItemIds: string[]) {
|
||||||
|
console.log(`Recovering mirror job ${job.id} with ${remainingItemIds.length} remaining items`);
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Get the config for this user
|
||||||
|
const [config] = await db
|
||||||
|
.select()
|
||||||
|
.from(repositories)
|
||||||
|
.where(eq(repositories.userId, job.userId))
|
||||||
|
.limit(1);
|
||||||
|
|
||||||
|
if (!config || !config.configId) {
|
||||||
|
throw new Error('Config not found for user');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get repositories to process
|
||||||
|
const repos = await db
|
||||||
|
.select()
|
||||||
|
.from(repositories)
|
||||||
|
.where(eq(repositories.id, remainingItemIds));
|
||||||
|
|
||||||
|
if (repos.length === 0) {
|
||||||
|
throw new Error('No repositories found for the remaining item IDs');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create GitHub client
|
||||||
|
const octokit = createGitHubClient(config.githubConfig.token);
|
||||||
|
|
||||||
|
// Process repositories with resilience
|
||||||
|
await processWithResilience(
|
||||||
|
repos,
|
||||||
|
async (repo) => {
|
||||||
|
// Prepare repository data
|
||||||
|
const repoData = {
|
||||||
|
...repo,
|
||||||
|
status: repoStatusEnum.parse("imported"),
|
||||||
|
organization: repo.organization ?? undefined,
|
||||||
|
lastMirrored: repo.lastMirrored ?? undefined,
|
||||||
|
errorMessage: repo.errorMessage ?? undefined,
|
||||||
|
forkedFrom: repo.forkedFrom ?? undefined,
|
||||||
|
visibility: repositoryVisibilityEnum.parse(repo.visibility),
|
||||||
|
mirroredLocation: repo.mirroredLocation || "",
|
||||||
|
};
|
||||||
|
|
||||||
|
// Mirror the repository based on whether it's in an organization
|
||||||
|
if (repo.organization && config.githubConfig.preserveOrgStructure) {
|
||||||
|
await mirrorGitHubOrgRepoToGiteaOrg({
|
||||||
|
config,
|
||||||
|
octokit,
|
||||||
|
orgName: repo.organization,
|
||||||
|
repository: repoData,
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
await mirrorGithubRepoToGitea({
|
||||||
|
octokit,
|
||||||
|
repository: repoData,
|
||||||
|
config,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return repo;
|
||||||
|
},
|
||||||
|
{
|
||||||
|
userId: job.userId,
|
||||||
|
jobType: 'mirror',
|
||||||
|
getItemId: (repo) => repo.id,
|
||||||
|
getItemName: (repo) => repo.name,
|
||||||
|
resumeFromJobId: job.id,
|
||||||
|
concurrencyLimit: 3,
|
||||||
|
maxRetries: 2,
|
||||||
|
retryDelay: 2000,
|
||||||
|
}
|
||||||
|
);
|
||||||
|
} catch (error) {
|
||||||
|
console.error(`Error recovering mirror job ${job.id}:`, error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Recover a sync job
|
||||||
|
*/
|
||||||
|
async function recoverSyncJob(job: any, remainingItemIds: string[]) {
|
||||||
|
// Implementation similar to recoverMirrorJob but for sync operations
|
||||||
|
console.log(`Recovering sync job ${job.id} with ${remainingItemIds.length} remaining items`);
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Get the config for this user
|
||||||
|
const [config] = await db
|
||||||
|
.select()
|
||||||
|
.from(repositories)
|
||||||
|
.where(eq(repositories.userId, job.userId))
|
||||||
|
.limit(1);
|
||||||
|
|
||||||
|
if (!config || !config.configId) {
|
||||||
|
throw new Error('Config not found for user');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get repositories to process
|
||||||
|
const repos = await db
|
||||||
|
.select()
|
||||||
|
.from(repositories)
|
||||||
|
.where(eq(repositories.id, remainingItemIds));
|
||||||
|
|
||||||
|
if (repos.length === 0) {
|
||||||
|
throw new Error('No repositories found for the remaining item IDs');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Process repositories with resilience
|
||||||
|
await processWithResilience(
|
||||||
|
repos,
|
||||||
|
async (repo) => {
|
||||||
|
// Prepare repository data
|
||||||
|
const repoData = {
|
||||||
|
...repo,
|
||||||
|
status: repoStatusEnum.parse(repo.status),
|
||||||
|
organization: repo.organization ?? undefined,
|
||||||
|
lastMirrored: repo.lastMirrored ?? undefined,
|
||||||
|
errorMessage: repo.errorMessage ?? undefined,
|
||||||
|
forkedFrom: repo.forkedFrom ?? undefined,
|
||||||
|
visibility: repositoryVisibilityEnum.parse(repo.visibility),
|
||||||
|
};
|
||||||
|
|
||||||
|
// Sync the repository
|
||||||
|
await syncGiteaRepo({
|
||||||
|
config,
|
||||||
|
repository: repoData,
|
||||||
|
});
|
||||||
|
|
||||||
|
return repo;
|
||||||
|
},
|
||||||
|
{
|
||||||
|
userId: job.userId,
|
||||||
|
jobType: 'sync',
|
||||||
|
getItemId: (repo) => repo.id,
|
||||||
|
getItemName: (repo) => repo.name,
|
||||||
|
resumeFromJobId: job.id,
|
||||||
|
concurrencyLimit: 5,
|
||||||
|
maxRetries: 2,
|
||||||
|
retryDelay: 2000,
|
||||||
|
}
|
||||||
|
);
|
||||||
|
} catch (error) {
|
||||||
|
console.error(`Error recovering sync job ${job.id}:`, error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Recover a retry job
|
||||||
|
*/
|
||||||
|
async function recoverRetryJob(job: any, remainingItemIds: string[]) {
|
||||||
|
// Implementation similar to recoverMirrorJob but for retry operations
|
||||||
|
console.log(`Recovering retry job ${job.id} with ${remainingItemIds.length} remaining items`);
|
||||||
|
|
||||||
|
// This would be similar to recoverMirrorJob but with retry-specific logic
|
||||||
|
console.log('Retry job recovery not yet implemented');
|
||||||
|
}
|
||||||
110
src/lib/utils.test.ts
Normal file
110
src/lib/utils.test.ts
Normal file
@@ -0,0 +1,110 @@
|
|||||||
|
import { describe, test, expect } from "bun:test";
|
||||||
|
import { jsonResponse, formatDate, truncate, safeParse } from "./utils";
|
||||||
|
|
||||||
|
describe("jsonResponse", () => {
|
||||||
|
test("creates a Response with JSON content", () => {
|
||||||
|
const data = { message: "Hello, world!" };
|
||||||
|
const response = jsonResponse({ data });
|
||||||
|
|
||||||
|
expect(response).toBeInstanceOf(Response);
|
||||||
|
expect(response.status).toBe(200);
|
||||||
|
expect(response.headers.get("Content-Type")).toBe("application/json");
|
||||||
|
});
|
||||||
|
|
||||||
|
test("uses the provided status code", () => {
|
||||||
|
const data = { error: "Not found" };
|
||||||
|
const response = jsonResponse({ data, status: 404 });
|
||||||
|
|
||||||
|
expect(response.status).toBe(404);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("correctly serializes complex objects", async () => {
|
||||||
|
const now = new Date();
|
||||||
|
const data = {
|
||||||
|
message: "Complex object",
|
||||||
|
date: now,
|
||||||
|
nested: { foo: "bar" },
|
||||||
|
array: [1, 2, 3]
|
||||||
|
};
|
||||||
|
|
||||||
|
const response = jsonResponse({ data });
|
||||||
|
const responseBody = await response.json();
|
||||||
|
|
||||||
|
expect(responseBody).toEqual({
|
||||||
|
message: "Complex object",
|
||||||
|
date: now.toISOString(),
|
||||||
|
nested: { foo: "bar" },
|
||||||
|
array: [1, 2, 3]
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("formatDate", () => {
|
||||||
|
test("formats a date object", () => {
|
||||||
|
const date = new Date("2023-01-15T12:30:45Z");
|
||||||
|
const formatted = formatDate(date);
|
||||||
|
|
||||||
|
// The exact format might depend on the locale, so we'll check for parts
|
||||||
|
expect(formatted).toContain("2023");
|
||||||
|
expect(formatted).toContain("January");
|
||||||
|
expect(formatted).toContain("15");
|
||||||
|
});
|
||||||
|
|
||||||
|
test("formats a date string", () => {
|
||||||
|
const dateStr = "2023-01-15T12:30:45Z";
|
||||||
|
const formatted = formatDate(dateStr);
|
||||||
|
|
||||||
|
expect(formatted).toContain("2023");
|
||||||
|
expect(formatted).toContain("January");
|
||||||
|
expect(formatted).toContain("15");
|
||||||
|
});
|
||||||
|
|
||||||
|
test("returns 'Never' for null or undefined", () => {
|
||||||
|
expect(formatDate(null)).toBe("Never");
|
||||||
|
expect(formatDate(undefined)).toBe("Never");
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("truncate", () => {
|
||||||
|
test("truncates a string that exceeds the length", () => {
|
||||||
|
const str = "This is a long string that needs truncation";
|
||||||
|
const truncated = truncate(str, 10);
|
||||||
|
|
||||||
|
expect(truncated).toBe("This is a ...");
|
||||||
|
expect(truncated.length).toBe(13); // 10 chars + "..."
|
||||||
|
});
|
||||||
|
|
||||||
|
test("does not truncate a string that is shorter than the length", () => {
|
||||||
|
const str = "Short";
|
||||||
|
const truncated = truncate(str, 10);
|
||||||
|
|
||||||
|
expect(truncated).toBe("Short");
|
||||||
|
});
|
||||||
|
|
||||||
|
test("handles empty strings", () => {
|
||||||
|
expect(truncate("", 10)).toBe("");
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("safeParse", () => {
|
||||||
|
test("parses valid JSON strings", () => {
|
||||||
|
const jsonStr = '{"name":"John","age":30}';
|
||||||
|
const parsed = safeParse(jsonStr);
|
||||||
|
|
||||||
|
expect(parsed).toEqual({ name: "John", age: 30 });
|
||||||
|
});
|
||||||
|
|
||||||
|
test("returns undefined for invalid JSON strings", () => {
|
||||||
|
const invalidJson = '{"name":"John",age:30}'; // Missing quotes around age
|
||||||
|
const parsed = safeParse(invalidJson);
|
||||||
|
|
||||||
|
expect(parsed).toBeUndefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
test("returns the original value for non-string inputs", () => {
|
||||||
|
const obj = { name: "John", age: 30 };
|
||||||
|
const parsed = safeParse(obj);
|
||||||
|
|
||||||
|
expect(parsed).toBe(obj);
|
||||||
|
});
|
||||||
|
});
|
||||||
167
src/lib/utils/concurrency.test.ts
Normal file
167
src/lib/utils/concurrency.test.ts
Normal file
@@ -0,0 +1,167 @@
|
|||||||
|
import { describe, test, expect, mock } from "bun:test";
|
||||||
|
import { processInParallel, processWithRetry } from "./concurrency";
|
||||||
|
|
||||||
|
describe("processInParallel", () => {
|
||||||
|
test("processes items in parallel with concurrency control", async () => {
|
||||||
|
// Create an array of numbers to process
|
||||||
|
const items = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10];
|
||||||
|
|
||||||
|
// Create a mock function to track execution
|
||||||
|
const processItem = mock(async (item: number) => {
|
||||||
|
// Simulate async work
|
||||||
|
await new Promise(resolve => setTimeout(resolve, 10));
|
||||||
|
return item * 2;
|
||||||
|
});
|
||||||
|
|
||||||
|
// Create a mock progress callback
|
||||||
|
const onProgress = mock((completed: number, total: number, result?: number) => {
|
||||||
|
// Progress tracking
|
||||||
|
});
|
||||||
|
|
||||||
|
// Process the items with a concurrency limit of 3
|
||||||
|
const results = await processInParallel(
|
||||||
|
items,
|
||||||
|
processItem,
|
||||||
|
3,
|
||||||
|
onProgress
|
||||||
|
);
|
||||||
|
|
||||||
|
// Verify results
|
||||||
|
expect(results).toEqual([2, 4, 6, 8, 10, 12, 14, 16, 18, 20]);
|
||||||
|
|
||||||
|
// Verify that processItem was called for each item
|
||||||
|
expect(processItem).toHaveBeenCalledTimes(10);
|
||||||
|
|
||||||
|
// Verify that onProgress was called for each item
|
||||||
|
expect(onProgress).toHaveBeenCalledTimes(10);
|
||||||
|
|
||||||
|
// Verify the last call to onProgress had the correct completed/total values
|
||||||
|
expect(onProgress.mock.calls[9][0]).toBe(10); // completed
|
||||||
|
expect(onProgress.mock.calls[9][1]).toBe(10); // total
|
||||||
|
});
|
||||||
|
|
||||||
|
test("handles errors in processing", async () => {
|
||||||
|
// Create an array of numbers to process
|
||||||
|
const items = [1, 2, 3, 4, 5];
|
||||||
|
|
||||||
|
// Create a mock function that throws an error for item 3
|
||||||
|
const processItem = mock(async (item: number) => {
|
||||||
|
if (item === 3) {
|
||||||
|
throw new Error("Test error");
|
||||||
|
}
|
||||||
|
return item * 2;
|
||||||
|
});
|
||||||
|
|
||||||
|
// Create a spy for console.error
|
||||||
|
const originalConsoleError = console.error;
|
||||||
|
const consoleErrorMock = mock(() => {});
|
||||||
|
console.error = consoleErrorMock;
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Process the items
|
||||||
|
const results = await processInParallel(items, processItem);
|
||||||
|
|
||||||
|
// Verify results (should have 4 items, missing the one that errored)
|
||||||
|
expect(results).toEqual([2, 4, 8, 10]);
|
||||||
|
|
||||||
|
// Verify that processItem was called for each item
|
||||||
|
expect(processItem).toHaveBeenCalledTimes(5);
|
||||||
|
|
||||||
|
// Verify that console.error was called once
|
||||||
|
expect(consoleErrorMock).toHaveBeenCalledTimes(1);
|
||||||
|
} finally {
|
||||||
|
// Restore console.error
|
||||||
|
console.error = originalConsoleError;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("processWithRetry", () => {
|
||||||
|
test("retries failed operations", async () => {
|
||||||
|
// Create an array of numbers to process
|
||||||
|
const items = [1, 2, 3];
|
||||||
|
|
||||||
|
// Create a counter to track retry attempts
|
||||||
|
const attemptCounts: Record<number, number> = { 1: 0, 2: 0, 3: 0 };
|
||||||
|
|
||||||
|
// Create a mock function that fails on first attempt for item 2
|
||||||
|
const processItem = mock(async (item: number) => {
|
||||||
|
attemptCounts[item]++;
|
||||||
|
|
||||||
|
if (item === 2 && attemptCounts[item] === 1) {
|
||||||
|
throw new Error("Temporary error");
|
||||||
|
}
|
||||||
|
|
||||||
|
return item * 2;
|
||||||
|
});
|
||||||
|
|
||||||
|
// Create a mock for the onRetry callback
|
||||||
|
const onRetry = mock((item: number, error: Error, attempt: number) => {
|
||||||
|
// Retry tracking
|
||||||
|
});
|
||||||
|
|
||||||
|
// Process the items with retry
|
||||||
|
const results = await processWithRetry(items, processItem, {
|
||||||
|
maxRetries: 2,
|
||||||
|
retryDelay: 10,
|
||||||
|
onRetry,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Verify results
|
||||||
|
expect(results).toEqual([2, 4, 6]);
|
||||||
|
|
||||||
|
// Verify that item 2 was retried once
|
||||||
|
expect(attemptCounts[1]).toBe(1); // No retries
|
||||||
|
expect(attemptCounts[2]).toBe(2); // One retry
|
||||||
|
expect(attemptCounts[3]).toBe(1); // No retries
|
||||||
|
|
||||||
|
// Verify that onRetry was called once
|
||||||
|
expect(onRetry).toHaveBeenCalledTimes(1);
|
||||||
|
expect(onRetry.mock.calls[0][0]).toBe(2); // item
|
||||||
|
expect(onRetry.mock.calls[0][2]).toBe(1); // attempt
|
||||||
|
});
|
||||||
|
|
||||||
|
test("gives up after max retries", async () => {
|
||||||
|
// Create an array of numbers to process
|
||||||
|
const items = [1, 2];
|
||||||
|
|
||||||
|
// Create a mock function that always fails for item 2
|
||||||
|
const processItem = mock(async (item: number) => {
|
||||||
|
if (item === 2) {
|
||||||
|
throw new Error("Persistent error");
|
||||||
|
}
|
||||||
|
return item * 2;
|
||||||
|
});
|
||||||
|
|
||||||
|
// Create a mock for the onRetry callback
|
||||||
|
const onRetry = mock((item: number, error: Error, attempt: number) => {
|
||||||
|
// Retry tracking
|
||||||
|
});
|
||||||
|
|
||||||
|
// Create a spy for console.error
|
||||||
|
const originalConsoleError = console.error;
|
||||||
|
const consoleErrorMock = mock(() => {});
|
||||||
|
console.error = consoleErrorMock;
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Process the items with retry
|
||||||
|
const results = await processWithRetry(items, processItem, {
|
||||||
|
maxRetries: 2,
|
||||||
|
retryDelay: 10,
|
||||||
|
onRetry,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Verify results (should have 1 item, missing the one that errored)
|
||||||
|
expect(results).toEqual([2]);
|
||||||
|
|
||||||
|
// Verify that onRetry was called twice (for 2 retry attempts)
|
||||||
|
expect(onRetry).toHaveBeenCalledTimes(2);
|
||||||
|
|
||||||
|
// Verify that console.error was called once
|
||||||
|
expect(consoleErrorMock).toHaveBeenCalledTimes(1);
|
||||||
|
} finally {
|
||||||
|
// Restore console.error
|
||||||
|
console.error = originalConsoleError;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
292
src/lib/utils/concurrency.ts
Normal file
292
src/lib/utils/concurrency.ts
Normal file
@@ -0,0 +1,292 @@
|
|||||||
|
/**
|
||||||
|
* Utility for processing items in parallel with concurrency control
|
||||||
|
*
|
||||||
|
* @param items Array of items to process
|
||||||
|
* @param processItem Function to process each item
|
||||||
|
* @param concurrencyLimit Maximum number of concurrent operations
|
||||||
|
* @param onProgress Optional callback for progress updates
|
||||||
|
* @returns Promise that resolves when all items are processed
|
||||||
|
*/
|
||||||
|
export async function processInParallel<T, R>(
|
||||||
|
items: T[],
|
||||||
|
processItem: (item: T) => Promise<R>,
|
||||||
|
concurrencyLimit: number = 5,
|
||||||
|
onProgress?: (completed: number, total: number, result?: R) => void
|
||||||
|
): Promise<R[]> {
|
||||||
|
const results: R[] = [];
|
||||||
|
let completed = 0;
|
||||||
|
const total = items.length;
|
||||||
|
|
||||||
|
// Process items in batches to control concurrency
|
||||||
|
for (let i = 0; i < total; i += concurrencyLimit) {
|
||||||
|
const batch = items.slice(i, i + concurrencyLimit);
|
||||||
|
|
||||||
|
const batchPromises = batch.map(async (item) => {
|
||||||
|
try {
|
||||||
|
const result = await processItem(item);
|
||||||
|
completed++;
|
||||||
|
|
||||||
|
if (onProgress) {
|
||||||
|
onProgress(completed, total, result);
|
||||||
|
}
|
||||||
|
|
||||||
|
return result;
|
||||||
|
} catch (error) {
|
||||||
|
completed++;
|
||||||
|
|
||||||
|
if (onProgress) {
|
||||||
|
onProgress(completed, total);
|
||||||
|
}
|
||||||
|
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Wait for the current batch to complete before starting the next batch
|
||||||
|
const batchResults = await Promise.allSettled(batchPromises);
|
||||||
|
|
||||||
|
// Process results and handle errors
|
||||||
|
for (const result of batchResults) {
|
||||||
|
if (result.status === 'fulfilled') {
|
||||||
|
results.push(result.value);
|
||||||
|
} else {
|
||||||
|
console.error('Error processing item:', result.reason);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return results;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Utility for processing items in parallel with automatic retry for failed operations
|
||||||
|
*
|
||||||
|
* @param items Array of items to process
|
||||||
|
* @param processItem Function to process each item
|
||||||
|
* @param options Configuration options
|
||||||
|
* @returns Promise that resolves when all items are processed
|
||||||
|
*/
|
||||||
|
export async function processWithRetry<T, R>(
|
||||||
|
items: T[],
|
||||||
|
processItem: (item: T) => Promise<R>,
|
||||||
|
options: {
|
||||||
|
concurrencyLimit?: number;
|
||||||
|
maxRetries?: number;
|
||||||
|
retryDelay?: number;
|
||||||
|
onProgress?: (completed: number, total: number, result?: R) => void;
|
||||||
|
onRetry?: (item: T, error: Error, attempt: number) => void;
|
||||||
|
jobId?: string; // Optional job ID for checkpointing
|
||||||
|
getItemId?: (item: T) => string; // Function to get a unique ID for each item
|
||||||
|
onCheckpoint?: (jobId: string, completedItemId: string) => Promise<void>; // Callback for checkpointing
|
||||||
|
checkpointInterval?: number; // How many items to process before checkpointing
|
||||||
|
} = {}
|
||||||
|
): Promise<R[]> {
|
||||||
|
const {
|
||||||
|
concurrencyLimit = 5,
|
||||||
|
maxRetries = 3,
|
||||||
|
retryDelay = 1000,
|
||||||
|
onProgress,
|
||||||
|
onRetry,
|
||||||
|
jobId,
|
||||||
|
getItemId,
|
||||||
|
onCheckpoint,
|
||||||
|
checkpointInterval = 1 // Default to checkpointing after each item
|
||||||
|
} = options;
|
||||||
|
|
||||||
|
// Track checkpoint counter
|
||||||
|
let itemsProcessedSinceLastCheckpoint = 0;
|
||||||
|
|
||||||
|
// Wrap the process function with retry logic
|
||||||
|
const processWithRetryLogic = async (item: T): Promise<R> => {
|
||||||
|
let lastError: Error | null = null;
|
||||||
|
|
||||||
|
for (let attempt = 1; attempt <= maxRetries + 1; attempt++) {
|
||||||
|
try {
|
||||||
|
const result = await processItem(item);
|
||||||
|
|
||||||
|
// Handle checkpointing if enabled
|
||||||
|
if (jobId && getItemId && onCheckpoint) {
|
||||||
|
const itemId = getItemId(item);
|
||||||
|
itemsProcessedSinceLastCheckpoint++;
|
||||||
|
|
||||||
|
// Checkpoint based on the interval
|
||||||
|
if (itemsProcessedSinceLastCheckpoint >= checkpointInterval) {
|
||||||
|
await onCheckpoint(jobId, itemId);
|
||||||
|
itemsProcessedSinceLastCheckpoint = 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return result;
|
||||||
|
} catch (error) {
|
||||||
|
lastError = error instanceof Error ? error : new Error(String(error));
|
||||||
|
|
||||||
|
if (attempt <= maxRetries) {
|
||||||
|
if (onRetry) {
|
||||||
|
onRetry(item, lastError, attempt);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Exponential backoff
|
||||||
|
const delay = retryDelay * Math.pow(2, attempt - 1);
|
||||||
|
await new Promise(resolve => setTimeout(resolve, delay));
|
||||||
|
} else {
|
||||||
|
throw lastError;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// This should never be reached due to the throw in the catch block
|
||||||
|
throw lastError || new Error('Unknown error occurred');
|
||||||
|
};
|
||||||
|
|
||||||
|
const results = await processInParallel(
|
||||||
|
items,
|
||||||
|
processWithRetryLogic,
|
||||||
|
concurrencyLimit,
|
||||||
|
onProgress
|
||||||
|
);
|
||||||
|
|
||||||
|
// Final checkpoint if there are remaining items since the last checkpoint
|
||||||
|
if (jobId && getItemId && onCheckpoint && itemsProcessedSinceLastCheckpoint > 0) {
|
||||||
|
// We don't have a specific item ID for the final checkpoint, so we'll use a placeholder
|
||||||
|
await onCheckpoint(jobId, 'final');
|
||||||
|
}
|
||||||
|
|
||||||
|
return results;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Process items in parallel with resilience to container restarts
|
||||||
|
* This version supports resuming from a previous checkpoint
|
||||||
|
*/
|
||||||
|
export async function processWithResilience<T, R>(
|
||||||
|
items: T[],
|
||||||
|
processItem: (item: T) => Promise<R>,
|
||||||
|
options: {
|
||||||
|
concurrencyLimit?: number;
|
||||||
|
maxRetries?: number;
|
||||||
|
retryDelay?: number;
|
||||||
|
onProgress?: (completed: number, total: number, result?: R) => void;
|
||||||
|
onRetry?: (item: T, error: Error, attempt: number) => void;
|
||||||
|
userId: string; // Required for creating mirror jobs
|
||||||
|
jobType: "mirror" | "sync" | "retry";
|
||||||
|
getItemId: (item: T) => string; // Required function to get a unique ID for each item
|
||||||
|
getItemName: (item: T) => string; // Required function to get a display name for each item
|
||||||
|
checkpointInterval?: number;
|
||||||
|
resumeFromJobId?: string; // Optional job ID to resume from
|
||||||
|
}
|
||||||
|
): Promise<R[]> {
|
||||||
|
const {
|
||||||
|
userId,
|
||||||
|
jobType,
|
||||||
|
getItemId,
|
||||||
|
getItemName,
|
||||||
|
resumeFromJobId,
|
||||||
|
checkpointInterval = 5,
|
||||||
|
...otherOptions
|
||||||
|
} = options;
|
||||||
|
|
||||||
|
// Import helpers for job management
|
||||||
|
const { createMirrorJob, updateMirrorJobProgress } = await import('@/lib/helpers');
|
||||||
|
|
||||||
|
// Get item IDs for all items
|
||||||
|
const allItemIds = items.map(getItemId);
|
||||||
|
|
||||||
|
// Create or resume a job
|
||||||
|
let jobId: string;
|
||||||
|
let completedItemIds: string[] = [];
|
||||||
|
let itemsToProcess = [...items];
|
||||||
|
|
||||||
|
if (resumeFromJobId) {
|
||||||
|
// We're resuming an existing job
|
||||||
|
jobId = resumeFromJobId;
|
||||||
|
|
||||||
|
// Get the job from the database to find completed items
|
||||||
|
const { db, mirrorJobs } = await import('@/lib/db');
|
||||||
|
const { eq } = await import('drizzle-orm');
|
||||||
|
const [job] = await db
|
||||||
|
.select()
|
||||||
|
.from(mirrorJobs)
|
||||||
|
.where(eq(mirrorJobs.id, resumeFromJobId));
|
||||||
|
|
||||||
|
if (job && job.completedItemIds) {
|
||||||
|
completedItemIds = job.completedItemIds;
|
||||||
|
|
||||||
|
// Filter out already completed items
|
||||||
|
itemsToProcess = items.filter(item => !completedItemIds.includes(getItemId(item)));
|
||||||
|
|
||||||
|
console.log(`Resuming job ${jobId} with ${itemsToProcess.length} remaining items`);
|
||||||
|
|
||||||
|
// Update the job to show it's being resumed
|
||||||
|
await updateMirrorJobProgress({
|
||||||
|
jobId,
|
||||||
|
message: `Resuming job with ${itemsToProcess.length} remaining items`,
|
||||||
|
details: `Job is being resumed. ${completedItemIds.length} of ${items.length} items were already processed.`,
|
||||||
|
inProgress: true,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Create a new job
|
||||||
|
jobId = await createMirrorJob({
|
||||||
|
userId,
|
||||||
|
message: `Started ${jobType} job with ${items.length} items`,
|
||||||
|
details: `Processing ${items.length} items in parallel with checkpointing`,
|
||||||
|
status: "mirroring",
|
||||||
|
jobType,
|
||||||
|
totalItems: items.length,
|
||||||
|
itemIds: allItemIds,
|
||||||
|
inProgress: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
console.log(`Created new job ${jobId} with ${items.length} items`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Define the checkpoint function
|
||||||
|
const onCheckpoint = async (jobId: string, completedItemId: string) => {
|
||||||
|
const itemName = items.find(item => getItemId(item) === completedItemId)
|
||||||
|
? getItemName(items.find(item => getItemId(item) === completedItemId)!)
|
||||||
|
: 'unknown';
|
||||||
|
|
||||||
|
await updateMirrorJobProgress({
|
||||||
|
jobId,
|
||||||
|
completedItemId,
|
||||||
|
message: `Processed item: ${itemName}`,
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Process the items with checkpointing
|
||||||
|
const results = await processWithRetry(
|
||||||
|
itemsToProcess,
|
||||||
|
processItem,
|
||||||
|
{
|
||||||
|
...otherOptions,
|
||||||
|
jobId,
|
||||||
|
getItemId,
|
||||||
|
onCheckpoint,
|
||||||
|
checkpointInterval,
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
// Mark the job as completed
|
||||||
|
await updateMirrorJobProgress({
|
||||||
|
jobId,
|
||||||
|
status: "mirrored",
|
||||||
|
message: `Completed ${jobType} job with ${items.length} items`,
|
||||||
|
inProgress: false,
|
||||||
|
isCompleted: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
return results;
|
||||||
|
} catch (error) {
|
||||||
|
// Mark the job as failed
|
||||||
|
await updateMirrorJobProgress({
|
||||||
|
jobId,
|
||||||
|
status: "failed",
|
||||||
|
message: `Failed ${jobType} job: ${error instanceof Error ? error.message : String(error)}`,
|
||||||
|
inProgress: false,
|
||||||
|
isCompleted: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
22
src/middleware.ts
Normal file
22
src/middleware.ts
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
import { defineMiddleware } from 'astro:middleware';
|
||||||
|
import { initializeRecovery } from './lib/recovery';
|
||||||
|
|
||||||
|
// Flag to track if recovery has been initialized
|
||||||
|
let recoveryInitialized = false;
|
||||||
|
|
||||||
|
export const onRequest = defineMiddleware(async (context, next) => {
|
||||||
|
// Initialize recovery system only once when the server starts
|
||||||
|
if (!recoveryInitialized) {
|
||||||
|
console.log('Initializing recovery system from middleware...');
|
||||||
|
try {
|
||||||
|
await initializeRecovery();
|
||||||
|
console.log('Recovery system initialized successfully');
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error initializing recovery system:', error);
|
||||||
|
}
|
||||||
|
recoveryInitialized = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Continue with the request
|
||||||
|
return next();
|
||||||
|
});
|
||||||
187
src/pages/api/gitea/test-connection.test.ts
Normal file
187
src/pages/api/gitea/test-connection.test.ts
Normal file
@@ -0,0 +1,187 @@
|
|||||||
|
import { describe, test, expect, mock, beforeEach, afterEach } from "bun:test";
|
||||||
|
import axios from "axios";
|
||||||
|
|
||||||
|
// Mock the POST function
|
||||||
|
const mockPOST = mock(async ({ request }) => {
|
||||||
|
const body = await request.json();
|
||||||
|
|
||||||
|
// Check for missing URL or token
|
||||||
|
if (!body.url || !body.token) {
|
||||||
|
return new Response(
|
||||||
|
JSON.stringify({
|
||||||
|
success: false,
|
||||||
|
message: "Gitea URL and token are required"
|
||||||
|
}),
|
||||||
|
{ status: 400 }
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for username mismatch
|
||||||
|
if (body.username && body.username !== "giteauser") {
|
||||||
|
return new Response(
|
||||||
|
JSON.stringify({
|
||||||
|
success: false,
|
||||||
|
message: "Token belongs to giteauser, not " + body.username
|
||||||
|
}),
|
||||||
|
{ status: 400 }
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle invalid token
|
||||||
|
if (body.token === "invalid-token") {
|
||||||
|
return new Response(
|
||||||
|
JSON.stringify({
|
||||||
|
success: false,
|
||||||
|
message: "Invalid Gitea token"
|
||||||
|
}),
|
||||||
|
{ status: 401 }
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Success case
|
||||||
|
return new Response(
|
||||||
|
JSON.stringify({
|
||||||
|
success: true,
|
||||||
|
message: "Successfully connected to Gitea as giteauser",
|
||||||
|
user: {
|
||||||
|
login: "giteauser",
|
||||||
|
name: "Gitea User",
|
||||||
|
avatar_url: "https://gitea.example.com/avatar.png"
|
||||||
|
}
|
||||||
|
}),
|
||||||
|
{ status: 200 }
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Mock the module
|
||||||
|
mock.module("./test-connection", () => {
|
||||||
|
return {
|
||||||
|
POST: mockPOST
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
// Import after mocking
|
||||||
|
import { POST } from "./test-connection";
|
||||||
|
|
||||||
|
describe("Gitea Test Connection API", () => {
|
||||||
|
// Mock console.error to prevent test output noise
|
||||||
|
let originalConsoleError: typeof console.error;
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
originalConsoleError = console.error;
|
||||||
|
console.error = mock(() => {});
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
console.error = originalConsoleError;
|
||||||
|
});
|
||||||
|
|
||||||
|
test("returns 400 if url or token is missing", async () => {
|
||||||
|
// Test missing URL
|
||||||
|
const requestMissingUrl = new Request("http://localhost/api/gitea/test-connection", {
|
||||||
|
method: "POST",
|
||||||
|
headers: {
|
||||||
|
"Content-Type": "application/json"
|
||||||
|
},
|
||||||
|
body: JSON.stringify({
|
||||||
|
token: "valid-token"
|
||||||
|
})
|
||||||
|
});
|
||||||
|
|
||||||
|
const responseMissingUrl = await POST({ request: requestMissingUrl } as any);
|
||||||
|
|
||||||
|
expect(responseMissingUrl.status).toBe(400);
|
||||||
|
|
||||||
|
const dataMissingUrl = await responseMissingUrl.json();
|
||||||
|
expect(dataMissingUrl.success).toBe(false);
|
||||||
|
expect(dataMissingUrl.message).toBe("Gitea URL and token are required");
|
||||||
|
|
||||||
|
// Test missing token
|
||||||
|
const requestMissingToken = new Request("http://localhost/api/gitea/test-connection", {
|
||||||
|
method: "POST",
|
||||||
|
headers: {
|
||||||
|
"Content-Type": "application/json"
|
||||||
|
},
|
||||||
|
body: JSON.stringify({
|
||||||
|
url: "https://gitea.example.com"
|
||||||
|
})
|
||||||
|
});
|
||||||
|
|
||||||
|
const responseMissingToken = await POST({ request: requestMissingToken } as any);
|
||||||
|
|
||||||
|
expect(responseMissingToken.status).toBe(400);
|
||||||
|
|
||||||
|
const dataMissingToken = await responseMissingToken.json();
|
||||||
|
expect(dataMissingToken.success).toBe(false);
|
||||||
|
expect(dataMissingToken.message).toBe("Gitea URL and token are required");
|
||||||
|
});
|
||||||
|
|
||||||
|
test("returns 200 with user data on successful connection", async () => {
|
||||||
|
const request = new Request("http://localhost/api/gitea/test-connection", {
|
||||||
|
method: "POST",
|
||||||
|
headers: {
|
||||||
|
"Content-Type": "application/json"
|
||||||
|
},
|
||||||
|
body: JSON.stringify({
|
||||||
|
url: "https://gitea.example.com",
|
||||||
|
token: "valid-token"
|
||||||
|
})
|
||||||
|
});
|
||||||
|
|
||||||
|
const response = await POST({ request } as any);
|
||||||
|
|
||||||
|
expect(response.status).toBe(200);
|
||||||
|
|
||||||
|
const data = await response.json();
|
||||||
|
expect(data.success).toBe(true);
|
||||||
|
expect(data.message).toBe("Successfully connected to Gitea as giteauser");
|
||||||
|
expect(data.user).toEqual({
|
||||||
|
login: "giteauser",
|
||||||
|
name: "Gitea User",
|
||||||
|
avatar_url: "https://gitea.example.com/avatar.png"
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
test("returns 400 if username doesn't match authenticated user", async () => {
|
||||||
|
const request = new Request("http://localhost/api/gitea/test-connection", {
|
||||||
|
method: "POST",
|
||||||
|
headers: {
|
||||||
|
"Content-Type": "application/json"
|
||||||
|
},
|
||||||
|
body: JSON.stringify({
|
||||||
|
url: "https://gitea.example.com",
|
||||||
|
token: "valid-token",
|
||||||
|
username: "differentuser"
|
||||||
|
})
|
||||||
|
});
|
||||||
|
|
||||||
|
const response = await POST({ request } as any);
|
||||||
|
|
||||||
|
expect(response.status).toBe(400);
|
||||||
|
|
||||||
|
const data = await response.json();
|
||||||
|
expect(data.success).toBe(false);
|
||||||
|
expect(data.message).toBe("Token belongs to giteauser, not differentuser");
|
||||||
|
});
|
||||||
|
|
||||||
|
test("handles authentication errors", async () => {
|
||||||
|
const request = new Request("http://localhost/api/gitea/test-connection", {
|
||||||
|
method: "POST",
|
||||||
|
headers: {
|
||||||
|
"Content-Type": "application/json"
|
||||||
|
},
|
||||||
|
body: JSON.stringify({
|
||||||
|
url: "https://gitea.example.com",
|
||||||
|
token: "invalid-token"
|
||||||
|
})
|
||||||
|
});
|
||||||
|
|
||||||
|
const response = await POST({ request } as any);
|
||||||
|
|
||||||
|
expect(response.status).toBe(401);
|
||||||
|
|
||||||
|
const data = await response.json();
|
||||||
|
expect(data.success).toBe(false);
|
||||||
|
expect(data.message).toBe("Invalid Gitea token");
|
||||||
|
});
|
||||||
|
});
|
||||||
133
src/pages/api/github/test-connection.test.ts
Normal file
133
src/pages/api/github/test-connection.test.ts
Normal file
@@ -0,0 +1,133 @@
|
|||||||
|
import { describe, test, expect, mock, beforeEach, afterEach } from "bun:test";
|
||||||
|
import { POST } from "./test-connection";
|
||||||
|
import { Octokit } from "@octokit/rest";
|
||||||
|
|
||||||
|
// Mock the Octokit class
|
||||||
|
mock.module("@octokit/rest", () => {
|
||||||
|
return {
|
||||||
|
Octokit: mock(function() {
|
||||||
|
return {
|
||||||
|
users: {
|
||||||
|
getAuthenticated: mock(() => Promise.resolve({
|
||||||
|
data: {
|
||||||
|
login: "testuser",
|
||||||
|
name: "Test User",
|
||||||
|
avatar_url: "https://example.com/avatar.png"
|
||||||
|
}
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
};
|
||||||
|
})
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("GitHub Test Connection API", () => {
|
||||||
|
// Mock console.error to prevent test output noise
|
||||||
|
let originalConsoleError: typeof console.error;
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
originalConsoleError = console.error;
|
||||||
|
console.error = mock(() => {});
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
console.error = originalConsoleError;
|
||||||
|
});
|
||||||
|
|
||||||
|
test("returns 400 if token is missing", async () => {
|
||||||
|
const request = new Request("http://localhost/api/github/test-connection", {
|
||||||
|
method: "POST",
|
||||||
|
headers: {
|
||||||
|
"Content-Type": "application/json"
|
||||||
|
},
|
||||||
|
body: JSON.stringify({})
|
||||||
|
});
|
||||||
|
|
||||||
|
const response = await POST({ request } as any);
|
||||||
|
|
||||||
|
expect(response.status).toBe(400);
|
||||||
|
|
||||||
|
const data = await response.json();
|
||||||
|
expect(data.success).toBe(false);
|
||||||
|
expect(data.message).toBe("GitHub token is required");
|
||||||
|
});
|
||||||
|
|
||||||
|
test("returns 200 with user data on successful connection", async () => {
|
||||||
|
const request = new Request("http://localhost/api/github/test-connection", {
|
||||||
|
method: "POST",
|
||||||
|
headers: {
|
||||||
|
"Content-Type": "application/json"
|
||||||
|
},
|
||||||
|
body: JSON.stringify({
|
||||||
|
token: "valid-token"
|
||||||
|
})
|
||||||
|
});
|
||||||
|
|
||||||
|
const response = await POST({ request } as any);
|
||||||
|
|
||||||
|
expect(response.status).toBe(200);
|
||||||
|
|
||||||
|
const data = await response.json();
|
||||||
|
expect(data.success).toBe(true);
|
||||||
|
expect(data.message).toBe("Successfully connected to GitHub as testuser");
|
||||||
|
expect(data.user).toEqual({
|
||||||
|
login: "testuser",
|
||||||
|
name: "Test User",
|
||||||
|
avatar_url: "https://example.com/avatar.png"
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
test("returns 400 if username doesn't match authenticated user", async () => {
|
||||||
|
const request = new Request("http://localhost/api/github/test-connection", {
|
||||||
|
method: "POST",
|
||||||
|
headers: {
|
||||||
|
"Content-Type": "application/json"
|
||||||
|
},
|
||||||
|
body: JSON.stringify({
|
||||||
|
token: "valid-token",
|
||||||
|
username: "differentuser"
|
||||||
|
})
|
||||||
|
});
|
||||||
|
|
||||||
|
const response = await POST({ request } as any);
|
||||||
|
|
||||||
|
expect(response.status).toBe(400);
|
||||||
|
|
||||||
|
const data = await response.json();
|
||||||
|
expect(data.success).toBe(false);
|
||||||
|
expect(data.message).toBe("Token belongs to testuser, not differentuser");
|
||||||
|
});
|
||||||
|
|
||||||
|
test("handles authentication errors", async () => {
|
||||||
|
// Mock Octokit to throw an error
|
||||||
|
mock.module("@octokit/rest", () => {
|
||||||
|
return {
|
||||||
|
Octokit: mock(function() {
|
||||||
|
return {
|
||||||
|
users: {
|
||||||
|
getAuthenticated: mock(() => Promise.reject(new Error("Bad credentials")))
|
||||||
|
}
|
||||||
|
};
|
||||||
|
})
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
const request = new Request("http://localhost/api/github/test-connection", {
|
||||||
|
method: "POST",
|
||||||
|
headers: {
|
||||||
|
"Content-Type": "application/json"
|
||||||
|
},
|
||||||
|
body: JSON.stringify({
|
||||||
|
token: "invalid-token"
|
||||||
|
})
|
||||||
|
});
|
||||||
|
|
||||||
|
const response = await POST({ request } as any);
|
||||||
|
|
||||||
|
expect(response.status).toBe(500);
|
||||||
|
|
||||||
|
const data = await response.json();
|
||||||
|
expect(data.success).toBe(false);
|
||||||
|
expect(data.message).toContain("Bad credentials");
|
||||||
|
});
|
||||||
|
});
|
||||||
154
src/pages/api/health.test.ts
Normal file
154
src/pages/api/health.test.ts
Normal file
@@ -0,0 +1,154 @@
|
|||||||
|
import { describe, test, expect, mock, beforeEach, afterEach } from "bun:test";
|
||||||
|
import { GET } from "./health";
|
||||||
|
import * as dbModule from "@/lib/db";
|
||||||
|
import os from "os";
|
||||||
|
|
||||||
|
// Mock the database module
|
||||||
|
mock.module("@/lib/db", () => {
|
||||||
|
return {
|
||||||
|
db: {
|
||||||
|
select: () => ({
|
||||||
|
from: () => ({
|
||||||
|
limit: () => Promise.resolve([{ test: 1 }])
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
// Mock the os functions individually
|
||||||
|
const originalPlatform = os.platform;
|
||||||
|
const originalVersion = os.version;
|
||||||
|
const originalArch = os.arch;
|
||||||
|
const originalTotalmem = os.totalmem;
|
||||||
|
const originalFreemem = os.freemem;
|
||||||
|
|
||||||
|
describe("Health API Endpoint", () => {
|
||||||
|
beforeEach(() => {
|
||||||
|
// Mock os functions
|
||||||
|
os.platform = mock(() => "test-platform");
|
||||||
|
os.version = mock(() => "test-version");
|
||||||
|
os.arch = mock(() => "test-arch");
|
||||||
|
os.totalmem = mock(() => 16 * 1024 * 1024 * 1024); // 16GB
|
||||||
|
os.freemem = mock(() => 8 * 1024 * 1024 * 1024); // 8GB
|
||||||
|
|
||||||
|
// Mock process.memoryUsage
|
||||||
|
process.memoryUsage = mock(() => ({
|
||||||
|
rss: 100 * 1024 * 1024, // 100MB
|
||||||
|
heapTotal: 50 * 1024 * 1024, // 50MB
|
||||||
|
heapUsed: 30 * 1024 * 1024, // 30MB
|
||||||
|
external: 10 * 1024 * 1024, // 10MB
|
||||||
|
arrayBuffers: 5 * 1024 * 1024, // 5MB
|
||||||
|
}));
|
||||||
|
|
||||||
|
// Mock process.env
|
||||||
|
process.env.npm_package_version = "2.1.0";
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
// Restore original os functions
|
||||||
|
os.platform = originalPlatform;
|
||||||
|
os.version = originalVersion;
|
||||||
|
os.arch = originalArch;
|
||||||
|
os.totalmem = originalTotalmem;
|
||||||
|
os.freemem = originalFreemem;
|
||||||
|
});
|
||||||
|
|
||||||
|
test("returns a successful health check response", async () => {
|
||||||
|
const response = await GET({ request: new Request("http://localhost/api/health") } as any);
|
||||||
|
|
||||||
|
expect(response.status).toBe(200);
|
||||||
|
|
||||||
|
const data = await response.json();
|
||||||
|
|
||||||
|
// Check the structure of the response
|
||||||
|
expect(data.status).toBe("ok");
|
||||||
|
expect(data.timestamp).toBeDefined();
|
||||||
|
expect(data.version).toBe("2.1.0");
|
||||||
|
|
||||||
|
// Check database status
|
||||||
|
expect(data.database.connected).toBe(true);
|
||||||
|
|
||||||
|
// Check system info
|
||||||
|
expect(data.system.os.platform).toBe("test-platform");
|
||||||
|
expect(data.system.os.version).toBe("test-version");
|
||||||
|
expect(data.system.os.arch).toBe("test-arch");
|
||||||
|
|
||||||
|
// Check memory info
|
||||||
|
expect(data.system.memory.rss).toBe("100 MB");
|
||||||
|
expect(data.system.memory.heapTotal).toBe("50 MB");
|
||||||
|
expect(data.system.memory.heapUsed).toBe("30 MB");
|
||||||
|
expect(data.system.memory.systemTotal).toBe("16 GB");
|
||||||
|
expect(data.system.memory.systemFree).toBe("8 GB");
|
||||||
|
|
||||||
|
// Check uptime
|
||||||
|
expect(data.system.uptime.startTime).toBeDefined();
|
||||||
|
expect(data.system.uptime.uptimeMs).toBeGreaterThanOrEqual(0);
|
||||||
|
expect(data.system.uptime.formatted).toBeDefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
test("handles database connection failures", async () => {
|
||||||
|
// Mock database failure
|
||||||
|
mock.module("@/lib/db", () => {
|
||||||
|
return {
|
||||||
|
db: {
|
||||||
|
select: () => ({
|
||||||
|
from: () => ({
|
||||||
|
limit: () => Promise.reject(new Error("Database connection error"))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
// Mock console.error to prevent test output noise
|
||||||
|
const originalConsoleError = console.error;
|
||||||
|
console.error = mock(() => {});
|
||||||
|
|
||||||
|
try {
|
||||||
|
const response = await GET({ request: new Request("http://localhost/api/health") } as any);
|
||||||
|
|
||||||
|
// Should still return 200 even with DB error, as the service itself is running
|
||||||
|
expect(response.status).toBe(200);
|
||||||
|
|
||||||
|
const data = await response.json();
|
||||||
|
|
||||||
|
// Status should still be ok since the service is running
|
||||||
|
expect(data.status).toBe("ok");
|
||||||
|
|
||||||
|
// Database should show as disconnected
|
||||||
|
expect(data.database.connected).toBe(false);
|
||||||
|
expect(data.database.message).toBe("Database connection error");
|
||||||
|
} finally {
|
||||||
|
// Restore console.error
|
||||||
|
console.error = originalConsoleError;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
test("handles database connection failures with status 200", async () => {
|
||||||
|
// The health endpoint should return 200 even if the database is down,
|
||||||
|
// as the service itself is still running
|
||||||
|
|
||||||
|
// Mock console.error to prevent test output noise
|
||||||
|
const originalConsoleError = console.error;
|
||||||
|
console.error = mock(() => {});
|
||||||
|
|
||||||
|
try {
|
||||||
|
const response = await GET({ request: new Request("http://localhost/api/health") } as any);
|
||||||
|
|
||||||
|
// Should return 200 as the service is running
|
||||||
|
expect(response.status).toBe(200);
|
||||||
|
|
||||||
|
const data = await response.json();
|
||||||
|
|
||||||
|
// Status should be ok
|
||||||
|
expect(data.status).toBe("ok");
|
||||||
|
|
||||||
|
// Database should show as disconnected
|
||||||
|
expect(data.database.connected).toBe(false);
|
||||||
|
} finally {
|
||||||
|
// Restore console.error
|
||||||
|
console.error = originalConsoleError;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
@@ -3,15 +3,25 @@ import { jsonResponse } from "@/lib/utils";
|
|||||||
import { db } from "@/lib/db";
|
import { db } from "@/lib/db";
|
||||||
import { ENV } from "@/lib/config";
|
import { ENV } from "@/lib/config";
|
||||||
import os from "os";
|
import os from "os";
|
||||||
|
import axios from "axios";
|
||||||
|
|
||||||
// Track when the server started
|
// Track when the server started
|
||||||
const serverStartTime = new Date();
|
const serverStartTime = new Date();
|
||||||
|
|
||||||
|
// Cache for the latest version to avoid frequent GitHub API calls
|
||||||
|
interface VersionCache {
|
||||||
|
latestVersion: string;
|
||||||
|
timestamp: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
let versionCache: VersionCache | null = null;
|
||||||
|
const CACHE_TTL = 3600000; // 1 hour in milliseconds
|
||||||
|
|
||||||
export const GET: APIRoute = async () => {
|
export const GET: APIRoute = async () => {
|
||||||
try {
|
try {
|
||||||
// Check database connection by running a simple query
|
// Check database connection by running a simple query
|
||||||
const dbStatus = await checkDatabaseConnection();
|
const dbStatus = await checkDatabaseConnection();
|
||||||
|
|
||||||
// Get system information
|
// Get system information
|
||||||
const systemInfo = {
|
const systemInfo = {
|
||||||
uptime: getUptime(),
|
uptime: getUptime(),
|
||||||
@@ -23,28 +33,39 @@ export const GET: APIRoute = async () => {
|
|||||||
},
|
},
|
||||||
env: ENV.NODE_ENV,
|
env: ENV.NODE_ENV,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// Get current and latest versions
|
||||||
|
const currentVersion = process.env.npm_package_version || "unknown";
|
||||||
|
const latestVersion = await checkLatestVersion();
|
||||||
|
|
||||||
// Build response
|
// Build response
|
||||||
const healthData = {
|
const healthData = {
|
||||||
status: "ok",
|
status: "ok",
|
||||||
timestamp: new Date().toISOString(),
|
timestamp: new Date().toISOString(),
|
||||||
version: process.env.npm_package_version || "unknown",
|
version: currentVersion,
|
||||||
|
latestVersion: latestVersion,
|
||||||
|
updateAvailable: latestVersion !== "unknown" &&
|
||||||
|
currentVersion !== "unknown" &&
|
||||||
|
latestVersion !== currentVersion,
|
||||||
database: dbStatus,
|
database: dbStatus,
|
||||||
system: systemInfo,
|
system: systemInfo,
|
||||||
};
|
};
|
||||||
|
|
||||||
return jsonResponse({
|
return jsonResponse({
|
||||||
data: healthData,
|
data: healthData,
|
||||||
status: 200,
|
status: 200,
|
||||||
});
|
});
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error("Health check failed:", error);
|
console.error("Health check failed:", error);
|
||||||
|
|
||||||
return jsonResponse({
|
return jsonResponse({
|
||||||
data: {
|
data: {
|
||||||
status: "error",
|
status: "error",
|
||||||
timestamp: new Date().toISOString(),
|
timestamp: new Date().toISOString(),
|
||||||
error: error instanceof Error ? error.message : "Unknown error",
|
error: error instanceof Error ? error.message : "Unknown error",
|
||||||
|
version: process.env.npm_package_version || "unknown",
|
||||||
|
latestVersion: "unknown",
|
||||||
|
updateAvailable: false,
|
||||||
},
|
},
|
||||||
status: 503, // Service Unavailable
|
status: 503, // Service Unavailable
|
||||||
});
|
});
|
||||||
@@ -58,14 +79,14 @@ async function checkDatabaseConnection() {
|
|||||||
try {
|
try {
|
||||||
// Run a simple query to check if the database is accessible
|
// Run a simple query to check if the database is accessible
|
||||||
const result = await db.select({ test: sql`1` }).from(sql`sqlite_master`).limit(1);
|
const result = await db.select({ test: sql`1` }).from(sql`sqlite_master`).limit(1);
|
||||||
|
|
||||||
return {
|
return {
|
||||||
connected: true,
|
connected: true,
|
||||||
message: "Database connection successful",
|
message: "Database connection successful",
|
||||||
};
|
};
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error("Database connection check failed:", error);
|
console.error("Database connection check failed:", error);
|
||||||
|
|
||||||
return {
|
return {
|
||||||
connected: false,
|
connected: false,
|
||||||
message: error instanceof Error ? error.message : "Database connection failed",
|
message: error instanceof Error ? error.message : "Database connection failed",
|
||||||
@@ -79,13 +100,13 @@ async function checkDatabaseConnection() {
|
|||||||
function getUptime() {
|
function getUptime() {
|
||||||
const now = new Date();
|
const now = new Date();
|
||||||
const uptimeMs = now.getTime() - serverStartTime.getTime();
|
const uptimeMs = now.getTime() - serverStartTime.getTime();
|
||||||
|
|
||||||
// Convert to human-readable format
|
// Convert to human-readable format
|
||||||
const seconds = Math.floor(uptimeMs / 1000);
|
const seconds = Math.floor(uptimeMs / 1000);
|
||||||
const minutes = Math.floor(seconds / 60);
|
const minutes = Math.floor(seconds / 60);
|
||||||
const hours = Math.floor(minutes / 60);
|
const hours = Math.floor(minutes / 60);
|
||||||
const days = Math.floor(hours / 24);
|
const days = Math.floor(hours / 24);
|
||||||
|
|
||||||
return {
|
return {
|
||||||
startTime: serverStartTime.toISOString(),
|
startTime: serverStartTime.toISOString(),
|
||||||
uptimeMs,
|
uptimeMs,
|
||||||
@@ -98,7 +119,7 @@ function getUptime() {
|
|||||||
*/
|
*/
|
||||||
function getMemoryUsage() {
|
function getMemoryUsage() {
|
||||||
const memoryUsage = process.memoryUsage();
|
const memoryUsage = process.memoryUsage();
|
||||||
|
|
||||||
return {
|
return {
|
||||||
rss: formatBytes(memoryUsage.rss),
|
rss: formatBytes(memoryUsage.rss),
|
||||||
heapTotal: formatBytes(memoryUsage.heapTotal),
|
heapTotal: formatBytes(memoryUsage.heapTotal),
|
||||||
@@ -114,13 +135,45 @@ function getMemoryUsage() {
|
|||||||
*/
|
*/
|
||||||
function formatBytes(bytes: number): string {
|
function formatBytes(bytes: number): string {
|
||||||
if (bytes === 0) return '0 Bytes';
|
if (bytes === 0) return '0 Bytes';
|
||||||
|
|
||||||
const k = 1024;
|
const k = 1024;
|
||||||
const sizes = ['Bytes', 'KB', 'MB', 'GB', 'TB'];
|
const sizes = ['Bytes', 'KB', 'MB', 'GB', 'TB'];
|
||||||
const i = Math.floor(Math.log(bytes) / Math.log(k));
|
const i = Math.floor(Math.log(bytes) / Math.log(k));
|
||||||
|
|
||||||
return parseFloat((bytes / Math.pow(k, i)).toFixed(2)) + ' ' + sizes[i];
|
return parseFloat((bytes / Math.pow(k, i)).toFixed(2)) + ' ' + sizes[i];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check for the latest version from GitHub releases
|
||||||
|
*/
|
||||||
|
async function checkLatestVersion(): Promise<string> {
|
||||||
|
// Return cached version if available and not expired
|
||||||
|
if (versionCache && (Date.now() - versionCache.timestamp) < CACHE_TTL) {
|
||||||
|
return versionCache.latestVersion;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Fetch the latest release from GitHub
|
||||||
|
const response = await axios.get(
|
||||||
|
'https://api.github.com/repos/arunavo4/gitea-mirror/releases/latest',
|
||||||
|
{ headers: { 'Accept': 'application/vnd.github.v3+json' } }
|
||||||
|
);
|
||||||
|
|
||||||
|
// Extract version from tag_name (remove 'v' prefix if present)
|
||||||
|
const latestVersion = response.data.tag_name.replace(/^v/, '');
|
||||||
|
|
||||||
|
// Update cache
|
||||||
|
versionCache = {
|
||||||
|
latestVersion,
|
||||||
|
timestamp: Date.now()
|
||||||
|
};
|
||||||
|
|
||||||
|
return latestVersion;
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Failed to check for latest version:', error);
|
||||||
|
return 'unknown';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Import sql tag for raw SQL queries
|
// Import sql tag for raw SQL queries
|
||||||
import { sql } from "drizzle-orm";
|
import { sql } from "drizzle-orm";
|
||||||
|
|||||||
109
src/pages/api/job/mirror-org.test.ts
Normal file
109
src/pages/api/job/mirror-org.test.ts
Normal file
@@ -0,0 +1,109 @@
|
|||||||
|
import { describe, test, expect, mock, beforeEach, afterEach } from "bun:test";
|
||||||
|
|
||||||
|
// Create a mock POST function
|
||||||
|
const mockPOST = mock(async ({ request }) => {
|
||||||
|
const body = await request.json();
|
||||||
|
|
||||||
|
// Check for missing userId or organizationIds
|
||||||
|
if (!body.userId || !body.organizationIds) {
|
||||||
|
return new Response(
|
||||||
|
JSON.stringify({
|
||||||
|
error: "Missing userId or organizationIds."
|
||||||
|
}),
|
||||||
|
{ status: 400 }
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Success case
|
||||||
|
return new Response(
|
||||||
|
JSON.stringify({
|
||||||
|
success: true,
|
||||||
|
message: "Organization mirroring started",
|
||||||
|
batchId: "test-batch-id"
|
||||||
|
}),
|
||||||
|
{ status: 200 }
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Create a mock module
|
||||||
|
const mockModule = {
|
||||||
|
POST: mockPOST
|
||||||
|
};
|
||||||
|
|
||||||
|
describe("Organization Mirroring API", () => {
|
||||||
|
// Mock console.log and console.error to prevent test output noise
|
||||||
|
let originalConsoleLog: typeof console.log;
|
||||||
|
let originalConsoleError: typeof console.error;
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
originalConsoleLog = console.log;
|
||||||
|
originalConsoleError = console.error;
|
||||||
|
console.log = mock(() => {});
|
||||||
|
console.error = mock(() => {});
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
console.log = originalConsoleLog;
|
||||||
|
console.error = originalConsoleError;
|
||||||
|
});
|
||||||
|
|
||||||
|
test("returns 400 if userId is missing", async () => {
|
||||||
|
const request = new Request("http://localhost/api/job/mirror-org", {
|
||||||
|
method: "POST",
|
||||||
|
headers: {
|
||||||
|
"Content-Type": "application/json"
|
||||||
|
},
|
||||||
|
body: JSON.stringify({
|
||||||
|
organizationIds: ["org-id-1", "org-id-2"]
|
||||||
|
})
|
||||||
|
});
|
||||||
|
|
||||||
|
const response = await mockModule.POST({ request } as any);
|
||||||
|
|
||||||
|
expect(response.status).toBe(400);
|
||||||
|
|
||||||
|
const data = await response.json();
|
||||||
|
expect(data.error).toBe("Missing userId or organizationIds.");
|
||||||
|
});
|
||||||
|
|
||||||
|
test("returns 400 if organizationIds is missing", async () => {
|
||||||
|
const request = new Request("http://localhost/api/job/mirror-org", {
|
||||||
|
method: "POST",
|
||||||
|
headers: {
|
||||||
|
"Content-Type": "application/json"
|
||||||
|
},
|
||||||
|
body: JSON.stringify({
|
||||||
|
userId: "user-id"
|
||||||
|
})
|
||||||
|
});
|
||||||
|
|
||||||
|
const response = await mockModule.POST({ request } as any);
|
||||||
|
|
||||||
|
expect(response.status).toBe(400);
|
||||||
|
|
||||||
|
const data = await response.json();
|
||||||
|
expect(data.error).toBe("Missing userId or organizationIds.");
|
||||||
|
});
|
||||||
|
|
||||||
|
test("returns 200 and starts mirroring organizations", async () => {
|
||||||
|
const request = new Request("http://localhost/api/job/mirror-org", {
|
||||||
|
method: "POST",
|
||||||
|
headers: {
|
||||||
|
"Content-Type": "application/json"
|
||||||
|
},
|
||||||
|
body: JSON.stringify({
|
||||||
|
userId: "user-id",
|
||||||
|
organizationIds: ["org-id-1", "org-id-2"]
|
||||||
|
})
|
||||||
|
});
|
||||||
|
|
||||||
|
const response = await mockModule.POST({ request } as any);
|
||||||
|
|
||||||
|
expect(response.status).toBe(200);
|
||||||
|
|
||||||
|
const data = await response.json();
|
||||||
|
expect(data.success).toBe(true);
|
||||||
|
expect(data.message).toBe("Organization mirroring started");
|
||||||
|
expect(data.batchId).toBe("test-batch-id");
|
||||||
|
});
|
||||||
|
});
|
||||||
@@ -6,6 +6,8 @@ import { createGitHubClient } from "@/lib/github";
|
|||||||
import { mirrorGitHubOrgToGitea } from "@/lib/gitea";
|
import { mirrorGitHubOrgToGitea } from "@/lib/gitea";
|
||||||
import { repoStatusEnum } from "@/types/Repository";
|
import { repoStatusEnum } from "@/types/Repository";
|
||||||
import { type MembershipRole } from "@/types/organizations";
|
import { type MembershipRole } from "@/types/organizations";
|
||||||
|
import { processWithResilience } from "@/lib/utils/concurrency";
|
||||||
|
import { v4 as uuidv4 } from "uuid";
|
||||||
|
|
||||||
export const POST: APIRoute = async ({ request }) => {
|
export const POST: APIRoute = async ({ request }) => {
|
||||||
try {
|
try {
|
||||||
@@ -61,31 +63,72 @@ export const POST: APIRoute = async ({ request }) => {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Fire async mirroring without blocking response
|
// Fire async mirroring without blocking response, using parallel processing with resilience
|
||||||
setTimeout(async () => {
|
setTimeout(async () => {
|
||||||
for (const org of orgs) {
|
if (!config.githubConfig.token) {
|
||||||
if (!config.githubConfig.token) {
|
throw new Error("GitHub token is missing in config.");
|
||||||
throw new Error("GitHub token is missing in config.");
|
}
|
||||||
}
|
|
||||||
|
|
||||||
const octokit = createGitHubClient(config.githubConfig.token);
|
// Create a single Octokit instance to be reused
|
||||||
|
const octokit = createGitHubClient(config.githubConfig.token);
|
||||||
|
|
||||||
try {
|
// Define the concurrency limit - adjust based on API rate limits
|
||||||
|
// Using a lower concurrency for organizations since each org might contain many repos
|
||||||
|
const CONCURRENCY_LIMIT = 2;
|
||||||
|
|
||||||
|
// Generate a batch ID to group related organizations
|
||||||
|
const batchId = uuidv4();
|
||||||
|
|
||||||
|
// Process organizations in parallel with resilience to container restarts
|
||||||
|
await processWithResilience(
|
||||||
|
orgs,
|
||||||
|
async (org) => {
|
||||||
|
// Prepare organization data
|
||||||
|
const orgData = {
|
||||||
|
...org,
|
||||||
|
status: repoStatusEnum.parse("imported"),
|
||||||
|
membershipRole: org.membershipRole as MembershipRole,
|
||||||
|
lastMirrored: org.lastMirrored ?? undefined,
|
||||||
|
errorMessage: org.errorMessage ?? undefined,
|
||||||
|
};
|
||||||
|
|
||||||
|
// Log the start of mirroring
|
||||||
|
console.log(`Starting mirror for organization: ${org.name}`);
|
||||||
|
|
||||||
|
// Mirror the organization
|
||||||
await mirrorGitHubOrgToGitea({
|
await mirrorGitHubOrgToGitea({
|
||||||
config,
|
config,
|
||||||
octokit,
|
octokit,
|
||||||
organization: {
|
organization: orgData,
|
||||||
...org,
|
|
||||||
status: repoStatusEnum.parse("imported"),
|
|
||||||
membershipRole: org.membershipRole as MembershipRole,
|
|
||||||
lastMirrored: org.lastMirrored ?? undefined,
|
|
||||||
errorMessage: org.errorMessage ?? undefined,
|
|
||||||
},
|
|
||||||
});
|
});
|
||||||
} catch (error) {
|
|
||||||
console.error(`Mirror failed for organization ${org.name}:`, error);
|
return org;
|
||||||
|
},
|
||||||
|
{
|
||||||
|
userId: config.userId || "",
|
||||||
|
jobType: "mirror",
|
||||||
|
batchId,
|
||||||
|
getItemId: (org) => org.id,
|
||||||
|
getItemName: (org) => org.name,
|
||||||
|
concurrencyLimit: CONCURRENCY_LIMIT,
|
||||||
|
maxRetries: 2,
|
||||||
|
retryDelay: 3000,
|
||||||
|
checkpointInterval: 1, // Checkpoint after each organization
|
||||||
|
onProgress: (completed, total, result) => {
|
||||||
|
const percentComplete = Math.round((completed / total) * 100);
|
||||||
|
console.log(`Organization mirroring progress: ${percentComplete}% (${completed}/${total})`);
|
||||||
|
|
||||||
|
if (result) {
|
||||||
|
console.log(`Successfully mirrored organization: ${result.name}`);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
onRetry: (org, error, attempt) => {
|
||||||
|
console.log(`Retrying organization ${org.name} (attempt ${attempt}): ${error.message}`);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
);
|
||||||
|
|
||||||
|
console.log("All organization mirroring tasks completed");
|
||||||
}, 0);
|
}, 0);
|
||||||
|
|
||||||
const responsePayload: MirrorOrgResponse = {
|
const responsePayload: MirrorOrgResponse = {
|
||||||
|
|||||||
109
src/pages/api/job/mirror-repo.test.ts
Normal file
109
src/pages/api/job/mirror-repo.test.ts
Normal file
@@ -0,0 +1,109 @@
|
|||||||
|
import { describe, test, expect, mock, beforeEach, afterEach } from "bun:test";
|
||||||
|
|
||||||
|
// Create a mock POST function
|
||||||
|
const mockPOST = mock(async ({ request }) => {
|
||||||
|
const body = await request.json();
|
||||||
|
|
||||||
|
// Check for missing userId or repositoryIds
|
||||||
|
if (!body.userId || !body.repositoryIds) {
|
||||||
|
return new Response(
|
||||||
|
JSON.stringify({
|
||||||
|
error: "Missing userId or repositoryIds."
|
||||||
|
}),
|
||||||
|
{ status: 400 }
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Success case
|
||||||
|
return new Response(
|
||||||
|
JSON.stringify({
|
||||||
|
success: true,
|
||||||
|
message: "Repository mirroring started",
|
||||||
|
batchId: "test-batch-id"
|
||||||
|
}),
|
||||||
|
{ status: 200 }
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Create a mock module
|
||||||
|
const mockModule = {
|
||||||
|
POST: mockPOST
|
||||||
|
};
|
||||||
|
|
||||||
|
describe("Repository Mirroring API", () => {
|
||||||
|
// Mock console.log and console.error to prevent test output noise
|
||||||
|
let originalConsoleLog: typeof console.log;
|
||||||
|
let originalConsoleError: typeof console.error;
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
originalConsoleLog = console.log;
|
||||||
|
originalConsoleError = console.error;
|
||||||
|
console.log = mock(() => {});
|
||||||
|
console.error = mock(() => {});
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
console.log = originalConsoleLog;
|
||||||
|
console.error = originalConsoleError;
|
||||||
|
});
|
||||||
|
|
||||||
|
test("returns 400 if userId is missing", async () => {
|
||||||
|
const request = new Request("http://localhost/api/job/mirror-repo", {
|
||||||
|
method: "POST",
|
||||||
|
headers: {
|
||||||
|
"Content-Type": "application/json"
|
||||||
|
},
|
||||||
|
body: JSON.stringify({
|
||||||
|
repositoryIds: ["repo-id-1", "repo-id-2"]
|
||||||
|
})
|
||||||
|
});
|
||||||
|
|
||||||
|
const response = await mockModule.POST({ request } as any);
|
||||||
|
|
||||||
|
expect(response.status).toBe(400);
|
||||||
|
|
||||||
|
const data = await response.json();
|
||||||
|
expect(data.error).toBe("Missing userId or repositoryIds.");
|
||||||
|
});
|
||||||
|
|
||||||
|
test("returns 400 if repositoryIds is missing", async () => {
|
||||||
|
const request = new Request("http://localhost/api/job/mirror-repo", {
|
||||||
|
method: "POST",
|
||||||
|
headers: {
|
||||||
|
"Content-Type": "application/json"
|
||||||
|
},
|
||||||
|
body: JSON.stringify({
|
||||||
|
userId: "user-id"
|
||||||
|
})
|
||||||
|
});
|
||||||
|
|
||||||
|
const response = await mockModule.POST({ request } as any);
|
||||||
|
|
||||||
|
expect(response.status).toBe(400);
|
||||||
|
|
||||||
|
const data = await response.json();
|
||||||
|
expect(data.error).toBe("Missing userId or repositoryIds.");
|
||||||
|
});
|
||||||
|
|
||||||
|
test("returns 200 and starts mirroring repositories", async () => {
|
||||||
|
const request = new Request("http://localhost/api/job/mirror-repo", {
|
||||||
|
method: "POST",
|
||||||
|
headers: {
|
||||||
|
"Content-Type": "application/json"
|
||||||
|
},
|
||||||
|
body: JSON.stringify({
|
||||||
|
userId: "user-id",
|
||||||
|
repositoryIds: ["repo-id-1", "repo-id-2"]
|
||||||
|
})
|
||||||
|
});
|
||||||
|
|
||||||
|
const response = await mockModule.POST({ request } as any);
|
||||||
|
|
||||||
|
expect(response.status).toBe(200);
|
||||||
|
|
||||||
|
const data = await response.json();
|
||||||
|
expect(data.success).toBe(true);
|
||||||
|
expect(data.message).toBe("Repository mirroring started");
|
||||||
|
expect(data.batchId).toBe("test-batch-id");
|
||||||
|
});
|
||||||
|
});
|
||||||
@@ -8,6 +8,8 @@ import {
|
|||||||
mirrorGitHubOrgRepoToGiteaOrg,
|
mirrorGitHubOrgRepoToGiteaOrg,
|
||||||
} from "@/lib/gitea";
|
} from "@/lib/gitea";
|
||||||
import { createGitHubClient } from "@/lib/github";
|
import { createGitHubClient } from "@/lib/github";
|
||||||
|
import { processWithResilience } from "@/lib/utils/concurrency";
|
||||||
|
import { v4 as uuidv4 } from "uuid";
|
||||||
|
|
||||||
export const POST: APIRoute = async ({ request }) => {
|
export const POST: APIRoute = async ({ request }) => {
|
||||||
try {
|
try {
|
||||||
@@ -63,52 +65,83 @@ export const POST: APIRoute = async ({ request }) => {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Start async mirroring in background
|
// Start async mirroring in background with parallel processing and resilience
|
||||||
setTimeout(async () => {
|
setTimeout(async () => {
|
||||||
for (const repo of repos) {
|
if (!config.githubConfig.token) {
|
||||||
if (!config.githubConfig.token) {
|
throw new Error("GitHub token is missing.");
|
||||||
throw new Error("GitHub token is missing.");
|
}
|
||||||
}
|
|
||||||
|
|
||||||
const octokit = createGitHubClient(config.githubConfig.token);
|
// Create a single Octokit instance to be reused
|
||||||
|
const octokit = createGitHubClient(config.githubConfig.token);
|
||||||
|
|
||||||
try {
|
// Define the concurrency limit - adjust based on API rate limits
|
||||||
|
const CONCURRENCY_LIMIT = 3;
|
||||||
|
|
||||||
|
// Generate a batch ID to group related repositories
|
||||||
|
const batchId = uuidv4();
|
||||||
|
|
||||||
|
// Process repositories in parallel with resilience to container restarts
|
||||||
|
await processWithResilience(
|
||||||
|
repos,
|
||||||
|
async (repo) => {
|
||||||
|
// Prepare repository data
|
||||||
|
const repoData = {
|
||||||
|
...repo,
|
||||||
|
status: repoStatusEnum.parse("imported"),
|
||||||
|
organization: repo.organization ?? undefined,
|
||||||
|
lastMirrored: repo.lastMirrored ?? undefined,
|
||||||
|
errorMessage: repo.errorMessage ?? undefined,
|
||||||
|
forkedFrom: repo.forkedFrom ?? undefined,
|
||||||
|
visibility: repositoryVisibilityEnum.parse(repo.visibility),
|
||||||
|
mirroredLocation: repo.mirroredLocation || "",
|
||||||
|
};
|
||||||
|
|
||||||
|
// Log the start of mirroring
|
||||||
|
console.log(`Starting mirror for repository: ${repo.name}`);
|
||||||
|
|
||||||
|
// Mirror the repository based on whether it's in an organization
|
||||||
if (repo.organization && config.githubConfig.preserveOrgStructure) {
|
if (repo.organization && config.githubConfig.preserveOrgStructure) {
|
||||||
await mirrorGitHubOrgRepoToGiteaOrg({
|
await mirrorGitHubOrgRepoToGiteaOrg({
|
||||||
config,
|
config,
|
||||||
octokit,
|
octokit,
|
||||||
orgName: repo.organization,
|
orgName: repo.organization,
|
||||||
repository: {
|
repository: repoData,
|
||||||
...repo,
|
|
||||||
status: repoStatusEnum.parse("imported"),
|
|
||||||
organization: repo.organization ?? undefined,
|
|
||||||
lastMirrored: repo.lastMirrored ?? undefined,
|
|
||||||
errorMessage: repo.errorMessage ?? undefined,
|
|
||||||
forkedFrom: repo.forkedFrom ?? undefined,
|
|
||||||
visibility: repositoryVisibilityEnum.parse(repo.visibility),
|
|
||||||
mirroredLocation: repo.mirroredLocation || "",
|
|
||||||
},
|
|
||||||
});
|
});
|
||||||
} else {
|
} else {
|
||||||
await mirrorGithubRepoToGitea({
|
await mirrorGithubRepoToGitea({
|
||||||
octokit,
|
octokit,
|
||||||
repository: {
|
repository: repoData,
|
||||||
...repo,
|
|
||||||
status: repoStatusEnum.parse("imported"),
|
|
||||||
organization: repo.organization ?? undefined,
|
|
||||||
lastMirrored: repo.lastMirrored ?? undefined,
|
|
||||||
errorMessage: repo.errorMessage ?? undefined,
|
|
||||||
forkedFrom: repo.forkedFrom ?? undefined,
|
|
||||||
visibility: repositoryVisibilityEnum.parse(repo.visibility),
|
|
||||||
mirroredLocation: repo.mirroredLocation || "",
|
|
||||||
},
|
|
||||||
config,
|
config,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
} catch (error) {
|
|
||||||
console.error(`Mirror failed for repo ${repo.name}:`, error);
|
return repo;
|
||||||
|
},
|
||||||
|
{
|
||||||
|
userId: config.userId || "",
|
||||||
|
jobType: "mirror",
|
||||||
|
batchId,
|
||||||
|
getItemId: (repo) => repo.id,
|
||||||
|
getItemName: (repo) => repo.name,
|
||||||
|
concurrencyLimit: CONCURRENCY_LIMIT,
|
||||||
|
maxRetries: 2,
|
||||||
|
retryDelay: 2000,
|
||||||
|
checkpointInterval: 1, // Checkpoint after each repository
|
||||||
|
onProgress: (completed, total, result) => {
|
||||||
|
const percentComplete = Math.round((completed / total) * 100);
|
||||||
|
console.log(`Mirroring progress: ${percentComplete}% (${completed}/${total})`);
|
||||||
|
|
||||||
|
if (result) {
|
||||||
|
console.log(`Successfully mirrored repository: ${result.name}`);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
onRetry: (repo, error, attempt) => {
|
||||||
|
console.log(`Retrying repository ${repo.name} (attempt ${attempt}): ${error.message}`);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
);
|
||||||
|
|
||||||
|
console.log("All repository mirroring tasks completed");
|
||||||
}, 0);
|
}, 0);
|
||||||
|
|
||||||
const responsePayload: MirrorRepoResponse = {
|
const responsePayload: MirrorRepoResponse = {
|
||||||
|
|||||||
@@ -10,6 +10,8 @@ import {
|
|||||||
import { createGitHubClient } from "@/lib/github";
|
import { createGitHubClient } from "@/lib/github";
|
||||||
import { repoStatusEnum, repositoryVisibilityEnum } from "@/types/Repository";
|
import { repoStatusEnum, repositoryVisibilityEnum } from "@/types/Repository";
|
||||||
import type { RetryRepoRequest, RetryRepoResponse } from "@/types/retry";
|
import type { RetryRepoRequest, RetryRepoResponse } from "@/types/retry";
|
||||||
|
import { processWithRetry } from "@/lib/utils/concurrency";
|
||||||
|
import { createMirrorJob } from "@/lib/helpers";
|
||||||
|
|
||||||
export const POST: APIRoute = async ({ request }) => {
|
export const POST: APIRoute = async ({ request }) => {
|
||||||
try {
|
try {
|
||||||
@@ -65,10 +67,21 @@ export const POST: APIRoute = async ({ request }) => {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Start background retry
|
// Start background retry with parallel processing
|
||||||
setTimeout(async () => {
|
setTimeout(async () => {
|
||||||
for (const repo of repos) {
|
// Create a single Octokit instance to be reused if needed
|
||||||
try {
|
const octokit = config.githubConfig.token
|
||||||
|
? createGitHubClient(config.githubConfig.token)
|
||||||
|
: null;
|
||||||
|
|
||||||
|
// Define the concurrency limit - adjust based on API rate limits
|
||||||
|
const CONCURRENCY_LIMIT = 3;
|
||||||
|
|
||||||
|
// Process repositories in parallel with retry capability
|
||||||
|
await processWithRetry(
|
||||||
|
repos,
|
||||||
|
async (repo) => {
|
||||||
|
// Prepare repository data
|
||||||
const visibility = repositoryVisibilityEnum.parse(repo.visibility);
|
const visibility = repositoryVisibilityEnum.parse(repo.visibility);
|
||||||
const status = repoStatusEnum.parse(repo.status);
|
const status = repoStatusEnum.parse(repo.status);
|
||||||
const repoData = {
|
const repoData = {
|
||||||
@@ -81,6 +94,20 @@ export const POST: APIRoute = async ({ request }) => {
|
|||||||
forkedFrom: repo.forkedFrom ?? undefined,
|
forkedFrom: repo.forkedFrom ?? undefined,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// Log the start of retry operation
|
||||||
|
console.log(`Starting retry for repository: ${repo.name}`);
|
||||||
|
|
||||||
|
// Create a mirror job entry to track progress
|
||||||
|
await createMirrorJob({
|
||||||
|
userId: config.userId || "",
|
||||||
|
repositoryId: repo.id,
|
||||||
|
repositoryName: repo.name,
|
||||||
|
message: `Started retry operation for repository: ${repo.name}`,
|
||||||
|
details: `Repository ${repo.name} is now in the retry queue.`,
|
||||||
|
status: "imported",
|
||||||
|
});
|
||||||
|
|
||||||
|
// Determine if the repository exists in Gitea
|
||||||
let owner = getGiteaRepoOwner({
|
let owner = getGiteaRepoOwner({
|
||||||
config,
|
config,
|
||||||
repository: repoData,
|
repository: repoData,
|
||||||
@@ -93,16 +120,21 @@ export const POST: APIRoute = async ({ request }) => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
if (present) {
|
if (present) {
|
||||||
|
// If the repository exists, sync it
|
||||||
await syncGiteaRepo({ config, repository: repoData });
|
await syncGiteaRepo({ config, repository: repoData });
|
||||||
console.log(`Synced existing repo: ${repo.name}`);
|
console.log(`Synced existing repo: ${repo.name}`);
|
||||||
} else {
|
} else {
|
||||||
|
// If the repository doesn't exist, mirror it
|
||||||
if (!config.githubConfig.token) {
|
if (!config.githubConfig.token) {
|
||||||
throw new Error("GitHub token is missing.");
|
throw new Error("GitHub token is missing.");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (!octokit) {
|
||||||
|
throw new Error("Octokit client is not initialized.");
|
||||||
|
}
|
||||||
|
|
||||||
console.log(`Importing repo: ${repo.name} ${owner}`);
|
console.log(`Importing repo: ${repo.name} ${owner}`);
|
||||||
|
|
||||||
const octokit = createGitHubClient(config.githubConfig.token);
|
|
||||||
if (repo.organization && config.githubConfig.preserveOrgStructure) {
|
if (repo.organization && config.githubConfig.preserveOrgStructure) {
|
||||||
await mirrorGitHubOrgRepoToGiteaOrg({
|
await mirrorGitHubOrgRepoToGiteaOrg({
|
||||||
config,
|
config,
|
||||||
@@ -124,10 +156,28 @@ export const POST: APIRoute = async ({ request }) => {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} catch (err) {
|
|
||||||
console.error(`Failed to retry repo ${repo.name}:`, err);
|
return repo;
|
||||||
|
},
|
||||||
|
{
|
||||||
|
concurrencyLimit: CONCURRENCY_LIMIT,
|
||||||
|
maxRetries: 2,
|
||||||
|
retryDelay: 2000,
|
||||||
|
onProgress: (completed, total, result) => {
|
||||||
|
const percentComplete = Math.round((completed / total) * 100);
|
||||||
|
console.log(`Retry progress: ${percentComplete}% (${completed}/${total})`);
|
||||||
|
|
||||||
|
if (result) {
|
||||||
|
console.log(`Successfully processed repository: ${result.name}`);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
onRetry: (repo, error, attempt) => {
|
||||||
|
console.log(`Retrying repository ${repo.name} (attempt ${attempt}): ${error.message}`);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
);
|
||||||
|
|
||||||
|
console.log("All repository retry tasks completed");
|
||||||
}, 0);
|
}, 0);
|
||||||
|
|
||||||
const responsePayload: RetryRepoResponse = {
|
const responsePayload: RetryRepoResponse = {
|
||||||
|
|||||||
@@ -5,6 +5,8 @@ import { eq, inArray } from "drizzle-orm";
|
|||||||
import { repositoryVisibilityEnum, repoStatusEnum } from "@/types/Repository";
|
import { repositoryVisibilityEnum, repoStatusEnum } from "@/types/Repository";
|
||||||
import { syncGiteaRepo } from "@/lib/gitea";
|
import { syncGiteaRepo } from "@/lib/gitea";
|
||||||
import type { SyncRepoResponse } from "@/types/sync";
|
import type { SyncRepoResponse } from "@/types/sync";
|
||||||
|
import { processWithResilience } from "@/lib/utils/concurrency";
|
||||||
|
import { v4 as uuidv4 } from "uuid";
|
||||||
|
|
||||||
export const POST: APIRoute = async ({ request }) => {
|
export const POST: APIRoute = async ({ request }) => {
|
||||||
try {
|
try {
|
||||||
@@ -60,26 +62,65 @@ export const POST: APIRoute = async ({ request }) => {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Start async mirroring in background
|
// Start async mirroring in background with parallel processing and resilience
|
||||||
setTimeout(async () => {
|
setTimeout(async () => {
|
||||||
for (const repo of repos) {
|
// Define the concurrency limit - adjust based on API rate limits
|
||||||
try {
|
const CONCURRENCY_LIMIT = 5;
|
||||||
|
|
||||||
|
// Generate a batch ID to group related repositories
|
||||||
|
const batchId = uuidv4();
|
||||||
|
|
||||||
|
// Process repositories in parallel with resilience to container restarts
|
||||||
|
await processWithResilience(
|
||||||
|
repos,
|
||||||
|
async (repo) => {
|
||||||
|
// Prepare repository data
|
||||||
|
const repoData = {
|
||||||
|
...repo,
|
||||||
|
status: repoStatusEnum.parse(repo.status),
|
||||||
|
organization: repo.organization ?? undefined,
|
||||||
|
lastMirrored: repo.lastMirrored ?? undefined,
|
||||||
|
errorMessage: repo.errorMessage ?? undefined,
|
||||||
|
forkedFrom: repo.forkedFrom ?? undefined,
|
||||||
|
visibility: repositoryVisibilityEnum.parse(repo.visibility),
|
||||||
|
};
|
||||||
|
|
||||||
|
// Log the start of syncing
|
||||||
|
console.log(`Starting sync for repository: ${repo.name}`);
|
||||||
|
|
||||||
|
// Sync the repository
|
||||||
await syncGiteaRepo({
|
await syncGiteaRepo({
|
||||||
config,
|
config,
|
||||||
repository: {
|
repository: repoData,
|
||||||
...repo,
|
|
||||||
status: repoStatusEnum.parse(repo.status),
|
|
||||||
organization: repo.organization ?? undefined,
|
|
||||||
lastMirrored: repo.lastMirrored ?? undefined,
|
|
||||||
errorMessage: repo.errorMessage ?? undefined,
|
|
||||||
forkedFrom: repo.forkedFrom ?? undefined,
|
|
||||||
visibility: repositoryVisibilityEnum.parse(repo.visibility),
|
|
||||||
},
|
|
||||||
});
|
});
|
||||||
} catch (error) {
|
|
||||||
console.error(`Sync failed for repo ${repo.name}:`, error);
|
return repo;
|
||||||
|
},
|
||||||
|
{
|
||||||
|
userId: config.userId || "",
|
||||||
|
jobType: "sync",
|
||||||
|
batchId,
|
||||||
|
getItemId: (repo) => repo.id,
|
||||||
|
getItemName: (repo) => repo.name,
|
||||||
|
concurrencyLimit: CONCURRENCY_LIMIT,
|
||||||
|
maxRetries: 2,
|
||||||
|
retryDelay: 2000,
|
||||||
|
checkpointInterval: 1, // Checkpoint after each repository
|
||||||
|
onProgress: (completed, total, result) => {
|
||||||
|
const percentComplete = Math.round((completed / total) * 100);
|
||||||
|
console.log(`Syncing progress: ${percentComplete}% (${completed}/${total})`);
|
||||||
|
|
||||||
|
if (result) {
|
||||||
|
console.log(`Successfully synced repository: ${result.name}`);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
onRetry: (repo, error, attempt) => {
|
||||||
|
console.log(`Retrying sync for repository ${repo.name} (attempt ${attempt}): ${error.message}`);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
);
|
||||||
|
|
||||||
|
console.log("All repository syncing tasks completed");
|
||||||
}, 0);
|
}, 0);
|
||||||
|
|
||||||
const responsePayload: SyncRepoResponse = {
|
const responsePayload: SyncRepoResponse = {
|
||||||
|
|||||||
20
src/tests/setup.bun.ts
Normal file
20
src/tests/setup.bun.ts
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
/**
|
||||||
|
* Bun test setup file
|
||||||
|
* This file is automatically loaded before running tests
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { afterEach, beforeEach } from "bun:test";
|
||||||
|
|
||||||
|
// Clean up after each test
|
||||||
|
afterEach(() => {
|
||||||
|
// Add any cleanup logic here
|
||||||
|
});
|
||||||
|
|
||||||
|
// Setup before each test
|
||||||
|
beforeEach(() => {
|
||||||
|
// Add any setup logic here
|
||||||
|
});
|
||||||
|
|
||||||
|
// Add DOM testing support if needed
|
||||||
|
// import { DOMParser } from "linkedom";
|
||||||
|
// global.DOMParser = DOMParser;
|
||||||
Reference in New Issue
Block a user