mirror of
https://github.com/RayLabsHQ/gitea-mirror.git
synced 2025-12-06 11:36:44 +03:00
Imporved a bunch of things in Mirror and sync Automation
This commit is contained in:
18
CLAUDE.md
18
CLAUDE.md
@@ -208,6 +208,24 @@ Repositories can have the following statuses:
|
||||
- **deleting**: Repository being deleted
|
||||
- **deleted**: Repository deleted
|
||||
|
||||
### Scheduling and Synchronization (Issue #72 Fixes)
|
||||
|
||||
#### Fixed Issues
|
||||
1. **Mirror Interval Bug**: Added `mirror_interval` parameter to Gitea API calls when creating mirrors (previously defaulted to 24h)
|
||||
2. **Auto-Discovery**: Scheduler now automatically discovers and imports new GitHub repositories
|
||||
3. **Interval Updates**: Sync operations now update existing mirrors' intervals to match configuration
|
||||
4. **Repository Cleanup**: Integrated automatic cleanup of orphaned repositories (repos removed from GitHub)
|
||||
|
||||
#### Environment Variables for Auto-Import
|
||||
- **AUTO_IMPORT_REPOS**: Set to `false` to disable automatic repository discovery (default: enabled)
|
||||
|
||||
#### How Scheduling Works
|
||||
- **Scheduler Service**: Runs every minute to check for scheduled tasks
|
||||
- **Sync Interval**: Configured via `GITEA_MIRROR_INTERVAL` or UI (e.g., "8h", "30m", "1d")
|
||||
- **Auto-Import**: Checks GitHub for new repositories during each scheduled sync
|
||||
- **Auto-Cleanup**: Removes repositories that no longer exist in GitHub (if enabled)
|
||||
- **Mirror Interval Update**: Updates Gitea's internal mirror interval during sync operations
|
||||
|
||||
### Authentication Configuration
|
||||
|
||||
#### SSO Provider Configuration
|
||||
|
||||
@@ -299,6 +299,7 @@ export async function initializeConfigFromEnv(): Promise<void> {
|
||||
updateInterval: envConfig.schedule.updateInterval ?? existingConfig?.[0]?.scheduleConfig?.updateInterval ?? 86400000,
|
||||
skipRecentlyMirrored: envConfig.schedule.skipRecentlyMirrored ?? existingConfig?.[0]?.scheduleConfig?.skipRecentlyMirrored ?? true,
|
||||
recentThreshold: envConfig.schedule.recentThreshold ?? existingConfig?.[0]?.scheduleConfig?.recentThreshold ?? 3600000,
|
||||
autoImport: process.env.AUTO_IMPORT_REPOS !== 'false', // New field for auto-importing new repositories
|
||||
lastRun: existingConfig?.[0]?.scheduleConfig?.lastRun || undefined,
|
||||
nextRun: existingConfig?.[0]?.scheduleConfig?.nextRun || undefined,
|
||||
};
|
||||
|
||||
@@ -10,7 +10,7 @@ import type { Config } from "@/types/config";
|
||||
import type { Repository } from "./db/schema";
|
||||
import { createMirrorJob } from "./helpers";
|
||||
import { decryptConfigTokens } from "./utils/config-encryption";
|
||||
import { httpPost, httpGet, HttpError } from "./http-client";
|
||||
import { httpPost, httpGet, httpPatch, HttpError } from "./http-client";
|
||||
import { db, repositories } from "./db";
|
||||
import { eq } from "drizzle-orm";
|
||||
import { repoStatusEnum } from "@/types/Repository";
|
||||
@@ -299,6 +299,23 @@ export async function syncGiteaRepoEnhanced({
|
||||
throw new Error(`Repository ${repository.name} is not a mirror. Cannot sync.`);
|
||||
}
|
||||
|
||||
// Update mirror interval if needed
|
||||
if (config.giteaConfig?.mirrorInterval) {
|
||||
try {
|
||||
console.log(`[Sync] Updating mirror interval for ${repository.name} to ${config.giteaConfig.mirrorInterval}`);
|
||||
const updateUrl = `${config.giteaConfig.url}/api/v1/repos/${repoOwner}/${repository.name}`;
|
||||
await httpPatch(updateUrl, {
|
||||
mirror_interval: config.giteaConfig.mirrorInterval,
|
||||
}, {
|
||||
Authorization: `token ${decryptedConfig.giteaConfig.token}`,
|
||||
});
|
||||
console.log(`[Sync] Successfully updated mirror interval for ${repository.name}`);
|
||||
} catch (updateError) {
|
||||
console.warn(`[Sync] Failed to update mirror interval for ${repository.name}:`, updateError);
|
||||
// Continue with sync even if interval update fails
|
||||
}
|
||||
}
|
||||
|
||||
// Perform the sync
|
||||
const apiUrl = `${config.giteaConfig.url}/api/v1/repos/${repoOwner}/${repository.name}/mirror-sync`;
|
||||
|
||||
|
||||
@@ -417,6 +417,7 @@ export const mirrorGithubRepoToGitea = async ({
|
||||
clone_addr: cloneAddress,
|
||||
repo_name: repository.name,
|
||||
mirror: true,
|
||||
mirror_interval: config.giteaConfig?.mirrorInterval || "8h", // Set mirror interval
|
||||
wiki: config.giteaConfig?.wiki || false, // will mirror wiki if it exists
|
||||
lfs: config.giteaConfig?.lfs || false, // Enable LFS mirroring if configured
|
||||
private: repository.isPrivate,
|
||||
@@ -711,6 +712,7 @@ export async function mirrorGitHubRepoToGiteaOrg({
|
||||
uid: giteaOrgId,
|
||||
repo_name: repository.name,
|
||||
mirror: true,
|
||||
mirror_interval: config.giteaConfig?.mirrorInterval || "8h", // Set mirror interval
|
||||
wiki: config.giteaConfig?.wiki || false, // will mirror wiki if it exists
|
||||
lfs: config.giteaConfig?.lfs || false, // Enable LFS mirroring if configured
|
||||
private: repository.isPrivate,
|
||||
|
||||
@@ -178,6 +178,21 @@ export async function httpPut<T = any>(
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* PATCH request
|
||||
*/
|
||||
export async function httpPatch<T = any>(
|
||||
url: string,
|
||||
body?: any,
|
||||
headers?: Record<string, string>
|
||||
): Promise<HttpResponse<T>> {
|
||||
return httpRequest<T>(url, {
|
||||
method: 'PATCH',
|
||||
headers,
|
||||
body: body ? JSON.stringify(body) : undefined,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* DELETE request
|
||||
*/
|
||||
@@ -220,6 +235,10 @@ export class GiteaHttpClient {
|
||||
return httpPut<T>(`${this.baseUrl}${endpoint}`, body, this.getHeaders());
|
||||
}
|
||||
|
||||
async patch<T = any>(endpoint: string, body?: any): Promise<HttpResponse<T>> {
|
||||
return httpPatch<T>(`${this.baseUrl}${endpoint}`, body, this.getHeaders());
|
||||
}
|
||||
|
||||
async delete<T = any>(endpoint: string): Promise<HttpResponse<T>> {
|
||||
return httpDelete<T>(`${this.baseUrl}${endpoint}`, this.getHeaders());
|
||||
}
|
||||
|
||||
@@ -348,6 +348,9 @@ export function isRepositoryCleanupServiceRunning(): boolean {
|
||||
return cleanupInterval !== null;
|
||||
}
|
||||
|
||||
// Export functions for use by scheduler
|
||||
export { identifyOrphanedRepositories, handleOrphanedRepository };
|
||||
|
||||
/**
|
||||
* Manually trigger repository cleanup for a specific user
|
||||
*/
|
||||
|
||||
@@ -68,6 +68,111 @@ async function runScheduledSync(config: any): Promise<void> {
|
||||
updatedAt: currentTime,
|
||||
}).where(eq(configs.id, config.id));
|
||||
|
||||
// Auto-discovery: Check for new GitHub repositories
|
||||
if (scheduleConfig.autoImport !== false) {
|
||||
console.log(`[Scheduler] Checking for new GitHub repositories for user ${userId}...`);
|
||||
try {
|
||||
const { getGithubRepositories, getGithubStarredRepositories, getGithubOrganizations } = await import('@/lib/github');
|
||||
const { v4: uuidv4 } = await import('uuid');
|
||||
const { getDecryptedGitHubToken } = await import('@/lib/utils/config-encryption');
|
||||
|
||||
// Create GitHub client
|
||||
const decryptedToken = getDecryptedGitHubToken(config);
|
||||
const { Octokit } = await import('@octokit/rest');
|
||||
const octokit = new Octokit({ auth: decryptedToken });
|
||||
|
||||
// Fetch GitHub data
|
||||
const [basicAndForkedRepos, starredRepos, gitOrgs] = await Promise.all([
|
||||
getGithubRepositories({ octokit, config }),
|
||||
config.githubConfig?.includeStarred
|
||||
? getGithubStarredRepositories({ octokit, config })
|
||||
: Promise.resolve([]),
|
||||
getGithubOrganizations({ octokit, config }),
|
||||
]);
|
||||
|
||||
const allGithubRepos = [...basicAndForkedRepos, ...starredRepos];
|
||||
|
||||
// Check for new repositories
|
||||
const existingRepos = await db
|
||||
.select({ fullName: repositories.fullName })
|
||||
.from(repositories)
|
||||
.where(eq(repositories.userId, userId));
|
||||
|
||||
const existingRepoNames = new Set(existingRepos.map(r => r.fullName));
|
||||
const newRepos = allGithubRepos.filter(r => !existingRepoNames.has(r.fullName));
|
||||
|
||||
if (newRepos.length > 0) {
|
||||
console.log(`[Scheduler] Found ${newRepos.length} new repositories for user ${userId}`);
|
||||
|
||||
// Insert new repositories
|
||||
const reposToInsert = newRepos.map(repo => ({
|
||||
id: uuidv4(),
|
||||
userId,
|
||||
configId: config.id,
|
||||
name: repo.name,
|
||||
fullName: repo.fullName,
|
||||
url: repo.url,
|
||||
cloneUrl: repo.cloneUrl,
|
||||
owner: repo.owner,
|
||||
organization: repo.organization,
|
||||
isPrivate: repo.isPrivate,
|
||||
isForked: repo.isForked,
|
||||
forkedFrom: repo.forkedFrom,
|
||||
hasIssues: repo.hasIssues,
|
||||
isStarred: repo.isStarred,
|
||||
isArchived: repo.isArchived,
|
||||
size: repo.size,
|
||||
hasLFS: repo.hasLFS,
|
||||
hasSubmodules: repo.hasSubmodules,
|
||||
defaultBranch: repo.defaultBranch,
|
||||
visibility: repo.visibility,
|
||||
status: 'imported',
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
}));
|
||||
|
||||
await db.insert(repositories).values(reposToInsert);
|
||||
console.log(`[Scheduler] Successfully imported ${newRepos.length} new repositories for user ${userId}`);
|
||||
} else {
|
||||
console.log(`[Scheduler] No new repositories found for user ${userId}`);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(`[Scheduler] Failed to auto-import repositories for user ${userId}:`, error);
|
||||
}
|
||||
}
|
||||
|
||||
// Auto-cleanup: Remove orphaned repositories (repos that no longer exist in GitHub)
|
||||
if (config.cleanupConfig?.deleteIfNotInGitHub) {
|
||||
console.log(`[Scheduler] Checking for orphaned repositories to cleanup for user ${userId}...`);
|
||||
try {
|
||||
const { identifyOrphanedRepositories, handleOrphanedRepository } = await import('@/lib/repository-cleanup-service');
|
||||
|
||||
const orphanedRepos = await identifyOrphanedRepositories(config);
|
||||
|
||||
if (orphanedRepos.length > 0) {
|
||||
console.log(`[Scheduler] Found ${orphanedRepos.length} orphaned repositories for cleanup`);
|
||||
|
||||
for (const repo of orphanedRepos) {
|
||||
try {
|
||||
await handleOrphanedRepository(
|
||||
config,
|
||||
repo,
|
||||
config.cleanupConfig.orphanedRepoAction || 'archive',
|
||||
config.cleanupConfig.dryRun ?? false
|
||||
);
|
||||
console.log(`[Scheduler] Handled orphaned repository: ${repo.fullName}`);
|
||||
} catch (error) {
|
||||
console.error(`[Scheduler] Failed to handle orphaned repository ${repo.fullName}:`, error);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
console.log(`[Scheduler] No orphaned repositories found for cleanup`);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(`[Scheduler] Failed to cleanup orphaned repositories for user ${userId}:`, error);
|
||||
}
|
||||
}
|
||||
|
||||
// Get repositories to sync
|
||||
let reposToSync = await db
|
||||
.select()
|
||||
|
||||
Reference in New Issue
Block a user