Files
gitea-mirror/src/pages/api/job/approve-sync.ts
ARUNAVO RAY 98da7065e0 feat: smart force-push protection with backup strategies (#206)
* feat: smart force-push protection with backup strategies (#187)

Replace blunt `backupBeforeSync` boolean with `backupStrategy` enum
offering four modes: disabled, always, on-force-push (default), and
block-on-force-push. This dramatically reduces backup storage for large
mirror collections by only creating snapshots when force-pushes are
actually detected.

Detection works by comparing branch SHAs between Gitea and GitHub APIs
before each sync — no git cloning required. Fail-open design ensures
detection errors never block sync.

Key changes:
- Add force-push detection module (branch SHA comparison via APIs)
- Add backup strategy resolver with backward-compat migration
- Add pending-approval repo status with approve/dismiss UI + API
- Add block-on-force-push mode requiring manual approval
- Fix checkAncestry to only treat 404 as confirmed force-push
  (transient errors skip branch instead of false-positive blocking)
- Fix approve-sync to bypass detection gate (skipForcePushDetection)
- Fix backup execution to not be hard-gated by deprecated flag
- Persist backupStrategy through config-mapper round-trip

* fix: resolve four bugs in smart force-push protection

P0: Approve flow re-blocks itself — approve-sync now calls
syncGiteaRepoEnhanced with skipForcePushDetection: true so the
detection+block gate is bypassed on approved syncs.

P1: backupStrategy not persisted — added to both directions of the
config-mapper. Don't inject a default in the mapper; let
resolveBackupStrategy handle fallback so legacy backupBeforeSync
still works for E2E tests and existing configs.

P1: Backup hard-gated by deprecated backupBeforeSync — added force
flag to createPreSyncBundleBackup; strategy-driven callers and
approve-sync pass force: true to bypass the legacy guard.

P1: checkAncestry false positives — now only returns false for
404/422 (confirmed force-push). Transient errors (rate limits, 500s)
are rethrown so detectForcePush skips that branch (fail-open).

* test(e2e): migrate backup tests from backupBeforeSync to backupStrategy

Update E2E tests to use the new backupStrategy enum ("always",
"disabled") instead of the deprecated backupBeforeSync boolean.

* docs: add backup strategy UI screenshot

* refactor(ui): move Destructive Update Protection to GitHub config tab

Relocates the backup strategy section from GiteaConfigForm to
GitHubConfigForm since it protects against GitHub-side force-pushes.
Adds ShieldAlert icon to match other section header patterns.

* docs: add force-push protection documentation and Beta badge

Add docs/FORCE_PUSH_PROTECTION.md covering detection mechanism,
backup strategies, API usage, and troubleshooting. Link it from
README features list and support section. Mark the feature as Beta
in the UI with an outline badge.

* fix(ui): match Beta badge style to Git LFS badge
2026-03-02 15:48:59 +05:30

203 lines
6.8 KiB
TypeScript

import type { APIRoute } from "astro";
import { db, configs, repositories } from "@/lib/db";
import { and, eq, inArray } from "drizzle-orm";
import { repositoryVisibilityEnum, repoStatusEnum } from "@/types/Repository";
import { syncGiteaRepoEnhanced } from "@/lib/gitea-enhanced";
import { createSecureErrorResponse } from "@/lib/utils";
import { requireAuthenticatedUserId } from "@/lib/auth-guards";
import { createPreSyncBundleBackup } from "@/lib/repo-backup";
import { decryptConfigTokens } from "@/lib/utils/config-encryption";
import type { Config } from "@/types/config";
import { createMirrorJob } from "@/lib/helpers";
interface ApproveSyncRequest {
repositoryIds: string[];
action: "approve" | "dismiss";
}
export const POST: APIRoute = async ({ request, locals }) => {
try {
const authResult = await requireAuthenticatedUserId({ request, locals });
if ("response" in authResult) return authResult.response;
const userId = authResult.userId;
const body: ApproveSyncRequest = await request.json();
const { repositoryIds, action } = body;
if (!repositoryIds || !Array.isArray(repositoryIds) || repositoryIds.length === 0) {
return new Response(
JSON.stringify({ success: false, message: "repositoryIds are required." }),
{ status: 400, headers: { "Content-Type": "application/json" } },
);
}
if (action !== "approve" && action !== "dismiss") {
return new Response(
JSON.stringify({ success: false, message: "action must be 'approve' or 'dismiss'." }),
{ status: 400, headers: { "Content-Type": "application/json" } },
);
}
// Fetch config
const configResult = await db
.select()
.from(configs)
.where(eq(configs.userId, userId))
.limit(1);
const config = configResult[0];
if (!config) {
return new Response(
JSON.stringify({ success: false, message: "No configuration found." }),
{ status: 400, headers: { "Content-Type": "application/json" } },
);
}
// Fetch repos — only those in pending-approval status
const repos = await db
.select()
.from(repositories)
.where(
and(
eq(repositories.userId, userId),
eq(repositories.status, "pending-approval"),
inArray(repositories.id, repositoryIds),
),
);
if (!repos.length) {
return new Response(
JSON.stringify({ success: false, message: "No pending-approval repositories found for the given IDs." }),
{ status: 404, headers: { "Content-Type": "application/json" } },
);
}
if (action === "dismiss") {
// Reset status to "synced" so repos resume normal schedule
for (const repo of repos) {
await db
.update(repositories)
.set({
status: "synced",
errorMessage: null,
updatedAt: new Date(),
})
.where(eq(repositories.id, repo.id));
await createMirrorJob({
userId,
repositoryId: repo.id,
repositoryName: repo.name,
message: `Force-push alert dismissed for ${repo.name}`,
details: "User dismissed the force-push alert. Repository will resume normal sync schedule.",
status: "synced",
});
}
return new Response(
JSON.stringify({
success: true,
message: `Dismissed ${repos.length} repository alert(s).`,
repositories: repos.map((repo) => ({
...repo,
status: "synced",
errorMessage: null,
})),
}),
{ status: 200, headers: { "Content-Type": "application/json" } },
);
}
// action === "approve": create backup first (safety), then trigger sync
const decryptedConfig = decryptConfigTokens(config as unknown as Config);
// Process in background
setTimeout(async () => {
for (const repo of repos) {
try {
const { getGiteaRepoOwnerAsync } = await import("@/lib/gitea");
const repoOwner = await getGiteaRepoOwnerAsync({ config, repository: repo });
// Always create a backup before approved sync for safety
const cloneUrl = `${config.giteaConfig.url.replace(/\/$/, "")}/${repoOwner}/${repo.name}.git`;
try {
const backupResult = await createPreSyncBundleBackup({
config,
owner: repoOwner,
repoName: repo.name,
cloneUrl,
force: true, // Bypass legacy gate — approval implies backup
});
await createMirrorJob({
userId,
repositoryId: repo.id,
repositoryName: repo.name,
message: `Safety snapshot created for ${repo.name}`,
details: `Pre-approval snapshot at ${backupResult.bundlePath}.`,
status: "syncing",
});
} catch (backupError) {
console.warn(
`[ApproveSync] Backup failed for ${repo.name}, proceeding with sync: ${
backupError instanceof Error ? backupError.message : String(backupError)
}`,
);
}
// Trigger sync — skip detection to avoid re-blocking
const repoData = {
...repo,
status: repoStatusEnum.parse("syncing"),
organization: repo.organization ?? undefined,
lastMirrored: repo.lastMirrored ?? undefined,
errorMessage: repo.errorMessage ?? undefined,
forkedFrom: repo.forkedFrom ?? undefined,
visibility: repositoryVisibilityEnum.parse(repo.visibility),
mirroredLocation: repo.mirroredLocation || "",
};
await syncGiteaRepoEnhanced({
config,
repository: repoData,
skipForcePushDetection: true,
});
console.log(`[ApproveSync] Sync completed for approved repository: ${repo.name}`);
} catch (error) {
console.error(
`[ApproveSync] Failed to sync approved repository ${repo.name}:`,
error,
);
}
}
}, 0);
// Immediately update status to syncing for responsiveness
for (const repo of repos) {
await db
.update(repositories)
.set({
status: "syncing",
errorMessage: null,
updatedAt: new Date(),
})
.where(eq(repositories.id, repo.id));
}
return new Response(
JSON.stringify({
success: true,
message: `Approved sync for ${repos.length} repository(ies). Backup + sync started.`,
repositories: repos.map((repo) => ({
...repo,
status: "syncing",
errorMessage: null,
})),
}),
{ status: 200, headers: { "Content-Type": "application/json" } },
);
} catch (error) {
return createSecureErrorResponse(error, "approve-sync", 500);
}
};