Merge branch 'main' into codex/issue-171-sync-reporting

This commit is contained in:
ARUNAVO RAY
2026-02-24 10:12:24 +05:30
committed by GitHub
8 changed files with 384 additions and 19 deletions

View File

@@ -282,6 +282,8 @@ CLEANUP_DRY_RUN=false # Set to true to test without changes
**Important Notes**:
- **Auto-Start**: When `SCHEDULE_ENABLED=true` or `GITEA_MIRROR_INTERVAL` is set, the service automatically imports all GitHub repositories and mirrors them on startup. No manual "Import" or "Mirror" button clicks required!
- The scheduler checks every minute for tasks to run. The `GITEA_MIRROR_INTERVAL` determines how often each repository is actually synced. For example, with `8h`, each repo syncs every 8 hours from its last successful sync.
- **Large repo bootstrap**: For first-time mirroring of large repositories (especially with metadata/LFS), avoid very short intervals (for example `5m`). Start with a longer interval (`1h` to `8h`) or temporarily disable scheduling during the initial import/mirror run, then enable your regular interval after the first pass completes.
- **Why this matters**: If your Gitea instance takes a long time to complete migrations/imports, aggressive schedules can cause repeated retries and duplicate-looking mirror attempts.
**🛡️ Backup Protection Features**:
- **No Accidental Deletions**: Repository cleanup is automatically skipped if GitHub is inaccessible (account deleted, banned, or API errors)
@@ -307,6 +309,20 @@ If sync logs show authentication failures (for example `terminal prompts disable
1. In Gitea/Forgejo, open repository **Settings → Mirror Settings** and update the mirror authorization password/token.
2. Or delete and re-mirror the repository from Gitea Mirror so it is recreated with current credentials.
### Re-sync Metadata After Changing Mirror Options
If you enable metadata options (issues/PRs/labels/milestones/releases) after repositories were already mirrored:
1. Go to **Repositories**, select the repositories, and click **Sync** to run a fresh sync pass.
2. For a full metadata refresh, use **Re-run Metadata** on selected repositories. This clears metadata sync state for those repos and immediately starts Sync.
3. If some repositories still miss metadata, reset metadata sync state in SQLite and sync again:
```bash
sqlite3 data/gitea-mirror.db "UPDATE repositories SET metadata = NULL;"
```
This clears per-repository metadata completion flags so the next sync can re-run metadata import steps.
## Development
```bash
@@ -343,6 +359,20 @@ bun run build
- Never stored in plaintext
- Secure cookie-based session management
### Admin Password Recovery (CLI)
If email delivery is not configured, an admin with server access can reset a user password from the command line:
```bash
bun run reset-password -- --email=user@example.com --new-password='new-secure-password'
```
What this does:
- Updates the credential password hash for the matching user
- Creates a credential account if one does not already exist
- Invalidates all active sessions for that user (forces re-login)
Use this only from trusted server/admin environments.
## Authentication
Gitea Mirror supports multiple authentication methods. **Email/password authentication is the default and always enabled.**

View File

@@ -16,6 +16,7 @@
"check-db": "bun scripts/manage-db.ts check",
"fix-db": "bun scripts/manage-db.ts fix",
"reset-users": "bun scripts/manage-db.ts reset-users",
"reset-password": "bun scripts/manage-db.ts reset-password",
"db:generate": "bun drizzle-kit generate",
"db:migrate": "bun drizzle-kit migrate",
"db:push": "bun drizzle-kit push",

View File

@@ -4,9 +4,9 @@ import { Database } from "bun:sqlite";
import { drizzle } from "drizzle-orm/bun-sqlite";
import { migrate } from "drizzle-orm/bun-sqlite/migrator";
import { v4 as uuidv4 } from "uuid";
import { users, configs, repositories, organizations, mirrorJobs, events } from "../src/lib/db/schema";
import bcrypt from "bcryptjs";
import { eq } from "drizzle-orm";
import { users, configs, repositories, organizations, mirrorJobs, events, accounts, sessions } from "../src/lib/db/schema";
import { and, eq } from "drizzle-orm";
import { hashPassword } from "better-auth/crypto";
// Command line arguments
const args = process.argv.slice(2);
@@ -194,6 +194,92 @@ async function fixDatabase() {
console.log("✅ Database location fixed");
}
/**
* Reset a single user's password (admin recovery flow)
*/
async function resetPassword() {
const emailArg = args.find((arg) => arg.startsWith("--email="));
const passwordArg = args.find((arg) => arg.startsWith("--new-password="));
const email = emailArg?.split("=")[1]?.trim().toLowerCase();
const newPassword = passwordArg?.split("=")[1];
if (!email || !newPassword) {
console.log("❌ Missing required arguments");
console.log("Usage:");
console.log(" bun run manage-db reset-password --email=user@example.com --new-password='new-secure-password'");
process.exit(1);
}
if (newPassword.length < 8) {
console.log("❌ Password must be at least 8 characters");
process.exit(1);
}
if (!fs.existsSync(dbPath)) {
console.log("❌ Database does not exist");
process.exit(1);
}
const sqlite = new Database(dbPath);
const db = drizzle({ client: sqlite });
try {
const user = await db.query.users.findFirst({
where: eq(users.email, email),
});
if (!user) {
console.log(`❌ No user found for email: ${email}`);
sqlite.close();
process.exit(1);
}
const hashedPassword = await hashPassword(newPassword);
const now = new Date();
const credentialAccount = await db.query.accounts.findFirst({
where: and(
eq(accounts.userId, user.id),
eq(accounts.providerId, "credential"),
),
});
if (credentialAccount) {
await db
.update(accounts)
.set({
password: hashedPassword,
updatedAt: now,
})
.where(eq(accounts.id, credentialAccount.id));
} else {
await db.insert(accounts).values({
id: uuidv4(),
accountId: user.id,
userId: user.id,
providerId: "credential",
password: hashedPassword,
createdAt: now,
updatedAt: now,
});
}
const deletedSessions = await db
.delete(sessions)
.where(eq(sessions.userId, user.id))
.returning({ id: sessions.id });
console.log(`✅ Password reset for ${email}`);
console.log(`🔒 Cleared ${deletedSessions.length} active session(s)`);
sqlite.close();
} catch (error) {
console.error("❌ Error resetting password:", error);
sqlite.close();
process.exit(1);
}
}
/**
* Auto mode - check and initialize if needed
*/
@@ -224,6 +310,9 @@ switch (command) {
case "cleanup":
await cleanupDatabase();
break;
case "reset-password":
await resetPassword();
break;
case "auto":
await autoMode();
break;
@@ -233,6 +322,7 @@ switch (command) {
console.log(" check - Check database status");
console.log(" fix - Fix database location issues");
console.log(" reset-users - Remove all users and related data");
console.log(" reset-password - Reset one user's password and clear sessions");
console.log(" cleanup - Remove all database files");
console.log(" auto - Auto initialize if needed");
process.exit(1);

View File

@@ -377,14 +377,13 @@ export function GitHubMirrorSettings({
id="release-limit"
type="number"
min="1"
max="100"
value={mirrorOptions.releaseLimit || 10}
onChange={(e) => {
const value = parseInt(e.target.value) || 10;
const clampedValue = Math.min(100, Math.max(1, value));
const clampedValue = Math.max(1, value);
handleMirrorChange('releaseLimit', clampedValue);
}}
className="w-16 px-2 py-1 text-xs border border-input rounded bg-background text-foreground"
className="w-20 px-2 py-1 text-xs border border-input rounded bg-background text-foreground"
/>
<span className="text-xs text-muted-foreground">releases</span>
</div>

View File

@@ -44,6 +44,7 @@ import { toast } from "sonner";
import type { SyncRepoRequest, SyncRepoResponse } from "@/types/sync";
import { OwnerCombobox, OrganizationCombobox } from "./RepositoryComboboxes";
import type { RetryRepoRequest, RetryRepoResponse } from "@/types/retry";
import type { ResetMetadataRequest, ResetMetadataResponse } from "@/types/reset-metadata";
import AddRepositoryDialog from "./AddRepositoryDialog";
import { useLiveRefresh } from "@/hooks/useLiveRefresh";
@@ -378,6 +379,67 @@ export default function Repository() {
}
};
const handleBulkRerunMetadata = async () => {
if (selectedRepoIds.size === 0) return;
const selectedRepos = repositories.filter(repo => repo.id && selectedRepoIds.has(repo.id));
const eligibleRepos = selectedRepos.filter(
repo => ["mirrored", "synced", "archived"].includes(repo.status)
);
if (eligibleRepos.length === 0) {
toast.info("No eligible repositories to re-run metadata in selection");
return;
}
const repoIds = eligibleRepos.map(repo => repo.id as string);
setLoadingRepoIds(prev => {
const newSet = new Set(prev);
repoIds.forEach(id => newSet.add(id));
return newSet;
});
try {
const resetPayload: ResetMetadataRequest = {
userId: user?.id || "",
repositoryIds: repoIds,
};
const resetResponse = await apiRequest<ResetMetadataResponse>("/job/reset-metadata", {
method: "POST",
data: resetPayload,
});
if (!resetResponse.success) {
showErrorToast(resetResponse.error || "Failed to reset metadata state", toast);
return;
}
const syncResponse = await apiRequest<SyncRepoResponse>("/job/sync-repo", {
method: "POST",
data: { userId: user?.id, repositoryIds: repoIds },
});
if (syncResponse.success) {
toast.success(`Re-running metadata for ${repoIds.length} repositories`);
setRepositories(prevRepos =>
prevRepos.map(repo => {
const updated = syncResponse.repositories.find(r => r.id === repo.id);
return updated ? updated : repo;
})
);
setSelectedRepoIds(new Set());
} else {
showErrorToast(syncResponse.error || "Error starting metadata re-sync", toast);
}
} catch (error) {
showErrorToast(error, toast);
} finally {
setLoadingRepoIds(new Set());
}
};
const handleBulkRetry = async () => {
if (selectedRepoIds.size === 0) return;
@@ -807,6 +869,10 @@ export default function Repository() {
actions.push('sync');
}
if (selectedRepos.some(repo => ["mirrored", "synced", "archived"].includes(repo.status))) {
actions.push('rerun-metadata');
}
// Check if any selected repos are failed
if (selectedRepos.some(repo => repo.status === "failed")) {
actions.push('retry');
@@ -834,6 +900,7 @@ export default function Repository() {
return {
mirror: selectedRepos.filter(repo => repo.status === "imported" || repo.status === "failed").length,
sync: selectedRepos.filter(repo => repo.status === "mirrored" || repo.status === "synced").length,
rerunMetadata: selectedRepos.filter(repo => ["mirrored", "synced", "archived"].includes(repo.status)).length,
retry: selectedRepos.filter(repo => repo.status === "failed").length,
ignore: selectedRepos.filter(repo => repo.status !== "ignored").length,
include: selectedRepos.filter(repo => repo.status === "ignored").length,
@@ -1158,6 +1225,18 @@ export default function Repository() {
</Button>
)}
{availableActions.includes('rerun-metadata') && (
<Button
variant="outline"
size="default"
onClick={handleBulkRerunMetadata}
disabled={loadingRepoIds.size > 0}
>
<RefreshCw className="h-4 w-4 mr-2" />
Re-run Metadata ({actionCounts.rerunMetadata})
</Button>
)}
{availableActions.includes('retry') && (
<Button
variant="outline"
@@ -1241,6 +1320,18 @@ export default function Repository() {
</Button>
)}
{availableActions.includes('rerun-metadata') && (
<Button
variant="outline"
size="sm"
onClick={handleBulkRerunMetadata}
disabled={loadingRepoIds.size > 0}
>
<RefreshCw className="h-4 w-4 mr-2" />
Re-run Metadata ({actionCounts.rerunMetadata})
</Button>
)}
{availableActions.includes('retry') && (
<Button
variant="outline"

View File

@@ -2026,17 +2026,43 @@ export async function mirrorGitHubReleasesToGitea({
}
// Get release limit from config (default to 10)
const releaseLimit = config.giteaConfig?.releaseLimit || 10;
const releaseLimit = Math.max(1, Math.floor(config.giteaConfig?.releaseLimit || 10));
const releases = await octokit.rest.repos.listReleases({
// GitHub API max per page is 100; paginate until we reach the configured limit.
const releases: Awaited<
ReturnType<typeof octokit.rest.repos.listReleases>
>["data"] = [];
let page = 1;
const perPage = Math.min(100, releaseLimit);
while (releases.length < releaseLimit) {
const response = await octokit.rest.repos.listReleases({
owner: repository.owner,
repo: repository.name,
per_page: releaseLimit, // Only fetch the latest N releases
per_page: perPage,
page,
});
console.log(`[Releases] Found ${releases.data.length} releases (limited to latest ${releaseLimit}) to mirror for ${repository.fullName}`);
if (response.data.length === 0) {
break;
}
if (releases.data.length === 0) {
releases.push(...response.data);
if (response.data.length < perPage) {
break;
}
page++;
}
const limitedReleases = releases.slice(0, releaseLimit);
console.log(
`[Releases] Found ${limitedReleases.length} releases (limited to latest ${releaseLimit}) to mirror for ${repository.fullName}`
);
if (limitedReleases.length === 0) {
console.log(`[Releases] No releases to mirror for ${repository.fullName}`);
return;
}
@@ -2044,7 +2070,7 @@ export async function mirrorGitHubReleasesToGitea({
let mirroredCount = 0;
let skippedCount = 0;
const getReleaseTimestamp = (release: typeof releases.data[number]) => {
const getReleaseTimestamp = (release: (typeof limitedReleases)[number]) => {
// Use published_at first (when the release was published on GitHub)
// Fall back to created_at (when the git tag was created) only if published_at is missing
// This matches GitHub's sorting behavior and handles cases where multiple tags
@@ -2055,10 +2081,9 @@ export async function mirrorGitHubReleasesToGitea({
};
// Capture the latest releases, then process them oldest-to-newest so Gitea mirrors keep chronological order
const releasesToProcess = releases.data
const releasesToProcess = limitedReleases
.slice()
.sort((a, b) => getReleaseTimestamp(b) - getReleaseTimestamp(a))
.slice(0, releaseLimit)
.sort((a, b) => getReleaseTimestamp(a) - getReleaseTimestamp(b));
console.log(`[Releases] Processing ${releasesToProcess.length} releases in chronological order (oldest to newest by published date)`);

View File

@@ -0,0 +1,116 @@
import type { APIRoute } from "astro";
import { and, eq, inArray } from "drizzle-orm";
import { db, configs, repositories } from "@/lib/db";
import { repositoryVisibilityEnum, repoStatusEnum } from "@/types/Repository";
import type { ResetMetadataRequest, ResetMetadataResponse } from "@/types/reset-metadata";
import { createSecureErrorResponse } from "@/lib/utils";
export const POST: APIRoute = async ({ request }) => {
try {
const body: ResetMetadataRequest = await request.json();
const { userId, repositoryIds } = body;
if (!userId || !repositoryIds || !Array.isArray(repositoryIds)) {
return new Response(
JSON.stringify({
success: false,
message: "userId and repositoryIds are required.",
}),
{ status: 400, headers: { "Content-Type": "application/json" } }
);
}
if (repositoryIds.length === 0) {
return new Response(
JSON.stringify({
success: false,
message: "No repository IDs provided.",
}),
{ status: 400, headers: { "Content-Type": "application/json" } }
);
}
const configResult = await db
.select()
.from(configs)
.where(eq(configs.userId, userId))
.limit(1);
const config = configResult[0];
if (!config || !config.githubConfig.token || !config.giteaConfig?.token) {
return new Response(
JSON.stringify({
success: false,
error: "Missing GitHub or Gitea configuration.",
}),
{ status: 400, headers: { "Content-Type": "application/json" } }
);
}
const repos = await db
.select()
.from(repositories)
.where(
and(
eq(repositories.userId, userId),
inArray(repositories.id, repositoryIds)
)
);
if (!repos.length) {
return new Response(
JSON.stringify({
success: false,
error: "No repositories found for the given IDs.",
}),
{ status: 404, headers: { "Content-Type": "application/json" } }
);
}
await db
.update(repositories)
.set({
metadata: null,
updatedAt: new Date(),
})
.where(
and(
eq(repositories.userId, userId),
inArray(repositories.id, repositoryIds)
)
);
const updatedRepos = await db
.select()
.from(repositories)
.where(
and(
eq(repositories.userId, userId),
inArray(repositories.id, repositoryIds)
)
);
const responsePayload: ResetMetadataResponse = {
success: true,
message: "Metadata state reset. Trigger sync to re-run metadata import.",
repositories: updatedRepos.map((repo) => ({
...repo,
status: repoStatusEnum.parse(repo.status),
organization: repo.organization ?? undefined,
lastMirrored: repo.lastMirrored ?? undefined,
errorMessage: repo.errorMessage ?? undefined,
forkedFrom: repo.forkedFrom ?? undefined,
visibility: repositoryVisibilityEnum.parse(repo.visibility),
mirroredLocation: repo.mirroredLocation || "",
})),
};
return new Response(JSON.stringify(responsePayload), {
status: 200,
headers: { "Content-Type": "application/json" },
});
} catch (error) {
return createSecureErrorResponse(error, "metadata reset", 500);
}
};

View File

@@ -0,0 +1,13 @@
import type { Repository } from "@/lib/db/schema";
export interface ResetMetadataRequest {
userId: string;
repositoryIds: string[];
}
export interface ResetMetadataResponse {
success: boolean;
message?: string;
error?: string;
repositories: Repository[];
}