Compare commits

...

8 Commits

Author SHA1 Message Date
Arunavo Ray
62c97ca04c lib: harden metadata sync for existing repos 2026-03-14 17:57:54 +05:30
Arunavo Ray
dd87ddfa3d lib: sync repo topics and descriptions 2026-03-14 17:49:29 +05:30
ARUNAVO RAY
755647e29c scripts: add startup repair progress logs (#223) 2026-03-14 17:44:52 +05:30
dependabot[bot]
018c9d1a23 build(deps): bump devalue (#220) 2026-03-13 00:17:30 +05:30
Arunavo Ray
c89011819f chore: sync version to 3.12.5 2026-03-07 07:00:30 +05:30
ARUNAVO RAY
c00d48199b fix: gracefully handle SAML-protected orgs during GitHub import (#217) (#218) 2026-03-07 06:57:28 +05:30
ARUNAVO RAY
de28469210 nix: refresh bun deps and ci flake trust (#216) 2026-03-06 12:31:51 +05:30
github-actions[bot]
0e2f83fee0 chore: sync version to 3.12.4 2026-03-06 05:10:04 +00:00
10 changed files with 631 additions and 145 deletions

View File

@@ -25,6 +25,10 @@ jobs:
check:
runs-on: ubuntu-latest
timeout-minutes: 45
env:
NIX_CONFIG: |
accept-flake-config = true
access-tokens = github.com=${{ secrets.GITHUB_TOKEN }}
steps:
- uses: actions/checkout@v4
@@ -36,11 +40,11 @@ jobs:
uses: DeterminateSystems/magic-nix-cache-action@main
- name: Check flake
run: nix flake check
run: nix flake check --accept-flake-config
- name: Show flake info
run: nix flake show
run: nix flake show --accept-flake-config
- name: Build package
if: github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/tags/v')
run: nix build --print-build-logs
run: nix build --print-build-logs --accept-flake-config

30
bun.nix
View File

@@ -881,6 +881,10 @@
url = "https://registry.npmjs.org/@oslojs/encoding/-/encoding-1.1.0.tgz";
hash = "sha512-70wQhgYmndg4GCPxPPxPGevRKqTIJ2Nh4OkiMWmDAVYsTQ+Ta7Sq+rPevXyXGdzr30/qZBnyOalCszoMxlyldQ==";
};
"@playwright/test@1.58.2" = fetchurl {
url = "https://registry.npmjs.org/@playwright/test/-/test-1.58.2.tgz";
hash = "sha512-akea+6bHYBBfA9uQqSYmlJXn61cTa+jbO87xVLCWbTqbWadRVmhxlXATaOjOgcBaWU4ePo0wB41KMFv3o35IXA==";
};
"@radix-ui/number@1.1.1" = fetchurl {
url = "https://registry.npmjs.org/@radix-ui/number/-/number-1.1.1.tgz";
hash = "sha512-MkKCwxlXTgz6CFoJx3pCwn07GKp36+aZyu/u2Ln2VrA5DcdyCZkASEDBTd8x5whTQQL5CiYf4prXKLcgQdv29g==";
@@ -1385,6 +1389,10 @@
url = "https://registry.npmjs.org/@types/node/-/node-22.15.23.tgz";
hash = "sha512-7Ec1zaFPF4RJ0eXu1YT/xgiebqwqoJz8rYPDi/O2BcZ++Wpt0Kq9cl0eg6NN6bYbPnR67ZLo7St5Q3UK0SnARw==";
};
"@types/node@25.3.2" = fetchurl {
url = "https://registry.npmjs.org/@types/node/-/node-25.3.2.tgz";
hash = "sha512-RpV6r/ij22zRRdyBPcxDeKAzH43phWVKEjL2iksqo1Vz3CuBUrgmPpPhALKiRfU7OMCmeeO9vECBMsV0hMTG8Q==";
};
"@types/react-dom@19.2.3" = fetchurl {
url = "https://registry.npmjs.org/@types/react-dom/-/react-dom-19.2.3.tgz";
hash = "sha512-jp2L/eY6fn+KgVVQAOqYItbF0VY/YApe5Mz2F0aykSO8gx31bYCZyvSeYxCHKvzHG5eZjc+zyaS5BrBWya2+kQ==";
@@ -1565,9 +1573,9 @@
url = "https://registry.npmjs.org/astring/-/astring-1.9.0.tgz";
hash = "sha512-LElXdjswlqjWrPpJFg1Fx4wpkOCxj1TDHlSV4PlaRxHGWko024xICaa97ZkMfs6DRKlCguiAI+rbXv5GWwXIkg==";
};
"astro@5.17.3" = fetchurl {
url = "https://registry.npmjs.org/astro/-/astro-5.17.3.tgz";
hash = "sha512-69dcfPe8LsHzklwj+hl+vunWUbpMB6pmg35mACjetxbJeUNNys90JaBM8ZiwsPK689SAj/4Zqb1ayaANls9/MA==";
"astro@5.18.0" = fetchurl {
url = "https://registry.npmjs.org/astro/-/astro-5.18.0.tgz";
hash = "sha512-CHiohwJIS4L0G6/IzE1Fx3dgWqXBCXus/od0eGUfxrZJD2um2pE7ehclMmgL/fXqbU7NfE1Ze2pq34h2QaA6iQ==";
};
"axobject-query@4.1.0" = fetchurl {
url = "https://registry.npmjs.org/axobject-query/-/axobject-query-4.1.0.tgz";
@@ -2093,6 +2101,10 @@
url = "https://registry.npmjs.org/fresh/-/fresh-2.0.0.tgz";
hash = "sha512-Rx/WycZ60HOaqLKAi6cHRKKI7zxWbJ31MhntmtwMoaTeF7XFH9hhBp8vITaMidfljRQ6eYWCKkaTK+ykVJHP2A==";
};
"fsevents@2.3.2" = fetchurl {
url = "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz";
hash = "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==";
};
"fsevents@2.3.3" = fetchurl {
url = "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz";
hash = "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==";
@@ -2913,6 +2925,14 @@
url = "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz";
hash = "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==";
};
"playwright-core@1.58.2" = fetchurl {
url = "https://registry.npmjs.org/playwright-core/-/playwright-core-1.58.2.tgz";
hash = "sha512-yZkEtftgwS8CsfYo7nm0KE8jsvm6i/PTgVtB8DL726wNf6H2IMsDuxCpJj59KDaxCtSnrWan2AeDqM7JBaultg==";
};
"playwright@1.58.2" = fetchurl {
url = "https://registry.npmjs.org/playwright/-/playwright-1.58.2.tgz";
hash = "sha512-vA30H8Nvkq/cPBnNw4Q8TWz1EJyqgpuinBcHET0YVJVFldr8JDNiU9LaWAE1KqSkRYazuaBhTpB5ZzShOezQ6A==";
};
"postcss@8.5.3" = fetchurl {
url = "https://registry.npmjs.org/postcss/-/postcss-8.5.3.tgz";
hash = "sha512-dle9A3yYxlBSrt8Fu+IpjGT8SY8hN0mlaA6GY8t0P5PjIOZemULz/E2Bnm/2dcUOena75OTNkHI76uZBNUUq3A==";
@@ -3405,6 +3425,10 @@
url = "https://registry.npmjs.org/undici-types/-/undici-types-6.21.0.tgz";
hash = "sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==";
};
"undici-types@7.18.2" = fetchurl {
url = "https://registry.npmjs.org/undici-types/-/undici-types-7.18.2.tgz";
hash = "sha512-AsuCzffGHJybSaRrmr5eHr81mwJU3kjw6M+uprWvCXiNeN9SOGwQ3Jn8jb8m3Z6izVgknn1R0FTCEAP2QrLY/w==";
};
"undici@7.22.0" = fetchurl {
url = "https://registry.npmjs.org/undici/-/undici-7.22.0.tgz";
hash = "sha512-RqslV2Us5BrllB+JeiZnK4peryVTndy9Dnqq62S3yYRRTj0tFQCwEniUy2167skdGOy3vqRzEvl1Dm4sV2ReDg==";

View File

@@ -1,7 +1,7 @@
{
"name": "gitea-mirror",
"type": "module",
"version": "3.12.3",
"version": "3.12.5",
"engines": {
"bun": ">=1.2.9"
},

View File

@@ -15,33 +15,40 @@ import { repoStatusEnum } from "@/types/Repository";
const isDryRun = process.argv.includes("--dry-run");
const specificRepo = process.argv.find(arg => arg.startsWith("--repo-name="))?.split("=")[1];
const isStartupMode = process.argv.includes("--startup");
const requestTimeoutMs = parsePositiveInteger(process.env.GITEA_REPAIR_REQUEST_TIMEOUT_MS, 15000);
const progressInterval = parsePositiveInteger(process.env.GITEA_REPAIR_PROGRESS_INTERVAL, 100);
async function checkRepoInGitea(config: any, owner: string, repoName: string): Promise<boolean> {
try {
if (!config.giteaConfig?.url || !config.giteaConfig?.token) {
return false;
}
type GiteaLookupResult = {
exists: boolean;
details: any | null;
timedOut: boolean;
error: string | null;
};
const response = await fetch(
`${config.giteaConfig.url}/api/v1/repos/${owner}/${repoName}`,
{
headers: {
Authorization: `token ${config.giteaConfig.token}`,
},
}
);
return response.ok;
} catch (error) {
console.error(`Error checking repo ${owner}/${repoName} in Gitea:`, error);
return false;
function parsePositiveInteger(value: string | undefined, fallback: number): number {
const parsed = Number.parseInt(value ?? "", 10);
if (!Number.isFinite(parsed) || parsed <= 0) {
return fallback;
}
return parsed;
}
async function getRepoDetailsFromGitea(config: any, owner: string, repoName: string): Promise<any> {
function isTimeoutError(error: unknown): boolean {
if (!(error instanceof Error)) {
return false;
}
return error.name === "TimeoutError" || error.name === "AbortError";
}
async function getRepoDetailsFromGitea(config: any, owner: string, repoName: string): Promise<GiteaLookupResult> {
try {
if (!config.giteaConfig?.url || !config.giteaConfig?.token) {
return null;
return {
exists: false,
details: null,
timedOut: false,
error: "Missing Gitea URL or token in config",
};
}
const response = await fetch(
@@ -50,16 +57,41 @@ async function getRepoDetailsFromGitea(config: any, owner: string, repoName: str
headers: {
Authorization: `token ${config.giteaConfig.token}`,
},
signal: AbortSignal.timeout(requestTimeoutMs),
}
);
if (response.ok) {
return await response.json();
return {
exists: true,
details: await response.json(),
timedOut: false,
error: null,
};
}
return null;
if (response.status === 404) {
return {
exists: false,
details: null,
timedOut: false,
error: null,
};
}
return {
exists: false,
details: null,
timedOut: false,
error: `Gitea API returned HTTP ${response.status}`,
};
} catch (error) {
console.error(`Error getting repo details for ${owner}/${repoName}:`, error);
return null;
return {
exists: false,
details: null,
timedOut: isTimeoutError(error),
error: error instanceof Error ? error.message : String(error),
};
}
}
@@ -99,6 +131,8 @@ async function repairMirroredRepositories() {
.from(repositories)
.where(whereConditions);
const totalRepos = repos.length;
if (repos.length === 0) {
if (!isStartupMode) {
console.log("✅ No repositories found that need repair");
@@ -109,13 +143,25 @@ async function repairMirroredRepositories() {
if (!isStartupMode) {
console.log(`📋 Found ${repos.length} repositories to check:`);
console.log("");
} else {
console.log(`Checking ${totalRepos} repositories for status inconsistencies...`);
console.log(`Request timeout: ${requestTimeoutMs}ms | Progress interval: every ${progressInterval} repositories`);
}
const startedAt = Date.now();
const configCache = new Map<string, any>();
let checkedCount = 0;
let repairedCount = 0;
let skippedCount = 0;
let errorCount = 0;
let timeoutCount = 0;
let giteaErrorCount = 0;
let giteaErrorSamples = 0;
let startupSkipWarningCount = 0;
for (const repo of repos) {
checkedCount++;
if (!isStartupMode) {
console.log(`🔍 Checking repository: ${repo.name}`);
console.log(` Current status: ${repo.status}`);
@@ -124,13 +170,29 @@ async function repairMirroredRepositories() {
try {
// Get user configuration
const config = await db
.select()
.from(configs)
.where(eq(configs.id, repo.configId))
.limit(1);
const configKey = String(repo.configId);
let userConfig = configCache.get(configKey);
if (config.length === 0) {
if (!userConfig) {
const config = await db
.select()
.from(configs)
.where(eq(configs.id, repo.configId))
.limit(1);
if (config.length === 0) {
if (!isStartupMode) {
console.log(` ❌ No configuration found for repository`);
}
errorCount++;
continue;
}
userConfig = config[0];
configCache.set(configKey, userConfig);
}
if (!userConfig) {
if (!isStartupMode) {
console.log(` ❌ No configuration found for repository`);
}
@@ -138,7 +200,6 @@ async function repairMirroredRepositories() {
continue;
}
const userConfig = config[0];
const giteaUsername = userConfig.giteaConfig?.defaultOwner;
if (!giteaUsername) {
@@ -153,25 +214,59 @@ async function repairMirroredRepositories() {
let existsInGitea = false;
let actualOwner = giteaUsername;
let giteaRepoDetails = null;
let repoRequestTimedOut = false;
let repoRequestErrored = false;
// First check user location
existsInGitea = await checkRepoInGitea(userConfig, giteaUsername, repo.name);
if (existsInGitea) {
giteaRepoDetails = await getRepoDetailsFromGitea(userConfig, giteaUsername, repo.name);
const userLookup = await getRepoDetailsFromGitea(userConfig, giteaUsername, repo.name);
existsInGitea = userLookup.exists;
giteaRepoDetails = userLookup.details;
if (userLookup.timedOut) {
timeoutCount++;
repoRequestTimedOut = true;
} else if (userLookup.error) {
giteaErrorCount++;
repoRequestErrored = true;
if (!isStartupMode || giteaErrorSamples < 3) {
console.log(` ⚠️ Gitea lookup issue for ${giteaUsername}/${repo.name}: ${userLookup.error}`);
giteaErrorSamples++;
}
}
// If not found in user location and repo has organization, check organization
if (!existsInGitea && repo.organization) {
existsInGitea = await checkRepoInGitea(userConfig, repo.organization, repo.name);
const orgLookup = await getRepoDetailsFromGitea(userConfig, repo.organization, repo.name);
existsInGitea = orgLookup.exists;
if (existsInGitea) {
actualOwner = repo.organization;
giteaRepoDetails = await getRepoDetailsFromGitea(userConfig, repo.organization, repo.name);
giteaRepoDetails = orgLookup.details;
}
if (orgLookup.timedOut) {
timeoutCount++;
repoRequestTimedOut = true;
} else if (orgLookup.error) {
giteaErrorCount++;
repoRequestErrored = true;
if (!isStartupMode || giteaErrorSamples < 3) {
console.log(` ⚠️ Gitea lookup issue for ${repo.organization}/${repo.name}: ${orgLookup.error}`);
giteaErrorSamples++;
}
}
}
if (!existsInGitea) {
if (!isStartupMode) {
console.log(` ⏭️ Repository not found in Gitea - skipping`);
} else if (repoRequestTimedOut || repoRequestErrored) {
if (startupSkipWarningCount < 3) {
console.log(` ⚠️ Skipping ${repo.name}; Gitea was slow/unreachable during lookup`);
startupSkipWarningCount++;
if (startupSkipWarningCount === 3) {
console.log(` Additional slow/unreachable lookup warnings suppressed; progress logs will continue`);
}
}
}
skippedCount++;
continue;
@@ -241,22 +336,43 @@ async function repairMirroredRepositories() {
if (!isStartupMode) {
console.log("");
} else if (checkedCount % progressInterval === 0 || checkedCount === totalRepos) {
const elapsedSeconds = Math.floor((Date.now() - startedAt) / 1000);
console.log(
`Repair progress: ${checkedCount}/${totalRepos} checked | repaired=${repairedCount}, skipped=${skippedCount}, errors=${errorCount}, timeouts=${timeoutCount} | elapsed=${elapsedSeconds}s`
);
}
}
if (isStartupMode) {
// In startup mode, only log if there were repairs or errors
const elapsedSeconds = Math.floor((Date.now() - startedAt) / 1000);
console.log(
`Repository repair summary: checked=${checkedCount}, repaired=${repairedCount}, skipped=${skippedCount}, errors=${errorCount}, timeouts=${timeoutCount}, elapsed=${elapsedSeconds}s`
);
if (repairedCount > 0) {
console.log(`Repaired ${repairedCount} repository status inconsistencies`);
}
if (errorCount > 0) {
console.log(`Warning: ${errorCount} repositories had errors during repair`);
}
if (timeoutCount > 0) {
console.log(
`Warning: ${timeoutCount} Gitea API requests timed out. Increase GITEA_REPAIR_REQUEST_TIMEOUT_MS if your Gitea instance is under heavy load.`
);
}
if (giteaErrorCount > 0) {
console.log(`Warning: ${giteaErrorCount} Gitea API requests failed with non-timeout errors.`);
}
} else {
console.log("📊 Repair Summary:");
console.log(` Checked: ${checkedCount}`);
console.log(` Repaired: ${repairedCount}`);
console.log(` Skipped: ${skippedCount}`);
console.log(` Errors: ${errorCount}`);
console.log(` Timeouts: ${timeoutCount}`);
if (giteaErrorCount > 0) {
console.log(` Gitea API Errors: ${giteaErrorCount}`);
}
if (isDryRun && repairedCount > 0) {
console.log("");

View File

@@ -124,19 +124,31 @@ export function ConfigTabs() {
if (!user?.id) return;
setIsSyncing(true);
try {
const result = await apiRequest<{ success: boolean; message?: string }>(
const result = await apiRequest<{ success: boolean; message?: string; failedOrgs?: string[]; recoveredOrgs?: number }>(
`/sync?userId=${user.id}`,
{ method: 'POST' },
);
result.success
? toast.success(
'GitHub data imported successfully! Head to the Repositories page to start mirroring.',
)
: toast.error(
`Failed to import GitHub data: ${
result.message || 'Unknown error'
}`,
if (result.success) {
toast.success(
'GitHub data imported successfully! Head to the Repositories page to start mirroring.',
);
if (result.failedOrgs && result.failedOrgs.length > 0) {
toast.warning(
`${result.failedOrgs.length} org${result.failedOrgs.length > 1 ? 's' : ''} failed to import (${result.failedOrgs.join(', ')}). Check the Organizations tab for details.`,
);
}
if (result.recoveredOrgs && result.recoveredOrgs > 0) {
toast.success(
`${result.recoveredOrgs} previously failed org${result.recoveredOrgs > 1 ? 's' : ''} recovered successfully.`,
);
}
} else {
toast.error(
`Failed to import GitHub data: ${
result.message || 'Unknown error'
}`,
);
}
} catch (error) {
toast.error(
`Error importing GitHub data: ${

View File

@@ -248,6 +248,11 @@ export function OrganizationList({
</div>
</div>
{/* Error message for failed orgs */}
{org.status === "failed" && org.errorMessage && (
<p className="text-xs text-destructive line-clamp-2">{org.errorMessage}</p>
)}
{/* Destination override section */}
<div>
<MirrorDestinationEditor
@@ -304,6 +309,13 @@ export function OrganizationList({
/>
</div>
{/* Error message for failed orgs */}
{org.status === "failed" && org.errorMessage && (
<div className="mb-4 p-3 rounded-md bg-destructive/10 border border-destructive/20">
<p className="text-sm text-destructive">{org.errorMessage}</p>
</div>
)}
{/* Repository statistics */}
<div className="mb-4">
<div className="flex items-center gap-4 text-sm">
@@ -313,7 +325,7 @@ export function OrganizationList({
{org.repositoryCount === 1 ? "repository" : "repositories"}
</span>
</div>
{/* Repository breakdown - only show non-zero counts */}
{(() => {
const counts = [];
@@ -326,7 +338,7 @@ export function OrganizationList({
if (org.forkRepositoryCount && org.forkRepositoryCount > 0) {
counts.push(`${org.forkRepositoryCount} ${org.forkRepositoryCount === 1 ? 'fork' : 'forks'}`);
}
return counts.length > 0 ? (
<div className="flex items-center gap-3 text-xs text-muted-foreground">
{counts.map((count, index) => (
@@ -415,7 +427,7 @@ export function OrganizationList({
)}
</>
)}
{/* Dropdown menu for additional actions */}
{org.status !== "mirroring" && (
<DropdownMenu>
@@ -426,7 +438,7 @@ export function OrganizationList({
</DropdownMenuTrigger>
<DropdownMenuContent align="end">
{org.status !== "ignored" && (
<DropdownMenuItem
<DropdownMenuItem
onClick={() => org.id && onIgnore && onIgnore({ orgId: org.id, ignore: true })}
>
<Ban className="h-4 w-4 mr-2" />
@@ -449,7 +461,7 @@ export function OrganizationList({
</DropdownMenu>
)}
</div>
<div className="flex items-center gap-2 justify-center">
{(() => {
const giteaUrl = getGiteaOrgUrl(org);

View File

@@ -374,6 +374,161 @@ export const checkRepoLocation = async ({
return { present: false, actualOwner: expectedOwner };
};
const sanitizeTopicForGitea = (topic: string): string =>
topic
.trim()
.toLowerCase()
.replace(/[^a-z0-9-]+/g, "-")
.replace(/-+/g, "-")
.replace(/^-+/, "")
.replace(/-+$/, "");
const normalizeTopicsForGitea = (
topics: string[],
topicPrefix?: string
): string[] => {
const normalizedPrefix = topicPrefix ? sanitizeTopicForGitea(topicPrefix) : "";
const transformedTopics = topics
.map((topic) => sanitizeTopicForGitea(topic))
.filter((topic) => topic.length > 0)
.map((topic) => (normalizedPrefix ? `${normalizedPrefix}-${topic}` : topic));
return [...new Set(transformedTopics)];
};
const getSourceRepositoryCoordinates = (repository: Repository) => {
const delimiterIndex = repository.fullName.indexOf("/");
if (
delimiterIndex > 0 &&
delimiterIndex < repository.fullName.length - 1
) {
return {
owner: repository.fullName.slice(0, delimiterIndex),
repo: repository.fullName.slice(delimiterIndex + 1),
};
}
return {
owner: repository.owner,
repo: repository.name,
};
};
const fetchGitHubTopics = async ({
octokit,
repository,
}: {
octokit: Octokit;
repository: Repository;
}): Promise<string[] | null> => {
const { owner, repo } = getSourceRepositoryCoordinates(repository);
try {
const response = await octokit.request("GET /repos/{owner}/{repo}/topics", {
owner,
repo,
headers: {
Accept: "application/vnd.github+json",
},
});
const names = (response.data as { names?: unknown }).names;
if (!Array.isArray(names)) {
console.warn(
`[Metadata] Unexpected topics payload for ${repository.fullName}; skipping topic sync.`
);
return null;
}
return names.filter((topic): topic is string => typeof topic === "string");
} catch (error) {
console.warn(
`[Metadata] Failed to fetch topics from GitHub for ${repository.fullName}: ${
error instanceof Error ? error.message : String(error)
}`
);
return null;
}
};
const syncRepositoryMetadataToGitea = async ({
config,
octokit,
repository,
giteaOwner,
giteaRepoName,
giteaToken,
}: {
config: Partial<Config>;
octokit: Octokit;
repository: Repository;
giteaOwner: string;
giteaRepoName: string;
giteaToken: string;
}): Promise<void> => {
const giteaBaseUrl = config.giteaConfig?.url;
if (!giteaBaseUrl) {
return;
}
const repoApiUrl = `${giteaBaseUrl}/api/v1/repos/${giteaOwner}/${giteaRepoName}`;
const authHeaders = {
Authorization: `token ${giteaToken}`,
};
const description = repository.description?.trim() || "";
try {
await httpPatch(
repoApiUrl,
{ description },
authHeaders
);
console.log(
`[Metadata] Synced description for ${repository.fullName} to ${giteaOwner}/${giteaRepoName}`
);
} catch (error) {
console.warn(
`[Metadata] Failed to sync description for ${repository.fullName} to ${giteaOwner}/${giteaRepoName}: ${
error instanceof Error ? error.message : String(error)
}`
);
}
if (config.giteaConfig?.addTopics === false) {
return;
}
const sourceTopics = await fetchGitHubTopics({ octokit, repository });
if (sourceTopics === null) {
console.warn(
`[Metadata] Skipping topic sync for ${repository.fullName} because GitHub topics could not be fetched.`
);
return;
}
const topics = normalizeTopicsForGitea(
sourceTopics,
config.giteaConfig?.topicPrefix
);
try {
await httpPut(
`${repoApiUrl}/topics`,
{ topics },
authHeaders
);
console.log(
`[Metadata] Synced ${topics.length} topic(s) for ${repository.fullName} to ${giteaOwner}/${giteaRepoName}`
);
} catch (error) {
console.warn(
`[Metadata] Failed to sync topics for ${repository.fullName} to ${giteaOwner}/${giteaRepoName}: ${
error instanceof Error ? error.message : String(error)
}`
);
}
};
export const mirrorGithubRepoToGitea = async ({
octokit,
repository,
@@ -465,36 +620,66 @@ export const mirrorGithubRepoToGitea = async ({
});
if (isExisting) {
console.log(
`Repository ${targetRepoName} already exists in Gitea under ${repoOwner}. Updating database status.`
);
// Update database to reflect that the repository is already mirrored
await db
.update(repositories)
.set({
status: repoStatusEnum.parse("mirrored"),
updatedAt: new Date(),
lastMirrored: new Date(),
errorMessage: null,
mirroredLocation: `${repoOwner}/${targetRepoName}`,
})
.where(eq(repositories.id, repository.id!));
// Append log for "mirrored" status
await createMirrorJob({
userId: config.userId,
repositoryId: repository.id,
repositoryName: repository.name,
message: `Repository ${repository.name} already exists in Gitea`,
details: `Repository ${repository.name} was found to already exist in Gitea under ${repoOwner} and database status was updated.`,
status: "mirrored",
const { getGiteaRepoInfo, handleExistingNonMirrorRepo } = await import("./gitea-enhanced");
const existingRepoInfo = await getGiteaRepoInfo({
config,
owner: repoOwner,
repoName: targetRepoName,
});
console.log(
`Repository ${repository.name} database status updated to mirrored`
);
return;
if (existingRepoInfo && !existingRepoInfo.mirror) {
console.log(`Repository ${targetRepoName} exists but is not a mirror. Handling...`);
await handleExistingNonMirrorRepo({
config,
repository,
repoInfo: existingRepoInfo,
strategy: "delete", // Can be configured: "skip", "delete", or "rename"
});
} else if (existingRepoInfo?.mirror) {
console.log(
`Repository ${targetRepoName} already exists in Gitea under ${repoOwner}. Updating database status.`
);
await syncRepositoryMetadataToGitea({
config,
octokit,
repository,
giteaOwner: repoOwner,
giteaRepoName: targetRepoName,
giteaToken: decryptedConfig.giteaConfig.token,
});
// Update database to reflect that the repository is already mirrored
await db
.update(repositories)
.set({
status: repoStatusEnum.parse("mirrored"),
updatedAt: new Date(),
lastMirrored: new Date(),
errorMessage: null,
mirroredLocation: `${repoOwner}/${targetRepoName}`,
})
.where(eq(repositories.id, repository.id!));
// Append log for "mirrored" status
await createMirrorJob({
userId: config.userId,
repositoryId: repository.id,
repositoryName: repository.name,
message: `Repository ${repository.name} already exists in Gitea`,
details: `Repository ${repository.name} was found to already exist in Gitea under ${repoOwner} and database status was updated.`,
status: "mirrored",
});
console.log(
`Repository ${repository.name} database status updated to mirrored`
);
return;
} else {
console.warn(
`[Mirror] Repository ${repoOwner}/${targetRepoName} exists but mirror status could not be verified. Continuing with mirror creation flow.`
);
}
}
console.log(`Mirroring repository ${repository.name}`);
@@ -648,6 +833,15 @@ export const mirrorGithubRepoToGitea = async ({
}
);
await syncRepositoryMetadataToGitea({
config,
octokit,
repository,
giteaOwner: repoOwner,
giteaRepoName: targetRepoName,
giteaToken: decryptedConfig.giteaConfig.token,
});
const metadataState = parseRepositoryMetadataState(repository.metadata);
let metadataUpdated = false;
const skipMetadataForStarred =
@@ -1094,36 +1288,66 @@ export async function mirrorGitHubRepoToGiteaOrg({
});
if (isExisting) {
console.log(
`Repository ${targetRepoName} already exists in Gitea organization ${orgName}. Updating database status.`
);
// Update database to reflect that the repository is already mirrored
await db
.update(repositories)
.set({
status: repoStatusEnum.parse("mirrored"),
updatedAt: new Date(),
lastMirrored: new Date(),
errorMessage: null,
mirroredLocation: `${orgName}/${targetRepoName}`,
})
.where(eq(repositories.id, repository.id!));
// Create a mirror job log entry
await createMirrorJob({
userId: config.userId,
repositoryId: repository.id,
repositoryName: repository.name,
message: `Repository ${targetRepoName} already exists in Gitea organization ${orgName}`,
details: `Repository ${targetRepoName} was found to already exist in Gitea organization ${orgName} and database status was updated.`,
status: "mirrored",
const { getGiteaRepoInfo, handleExistingNonMirrorRepo } = await import("./gitea-enhanced");
const existingRepoInfo = await getGiteaRepoInfo({
config,
owner: orgName,
repoName: targetRepoName,
});
console.log(
`Repository ${targetRepoName} database status updated to mirrored in organization ${orgName}`
);
return;
if (existingRepoInfo && !existingRepoInfo.mirror) {
console.log(`Repository ${targetRepoName} exists but is not a mirror. Handling...`);
await handleExistingNonMirrorRepo({
config,
repository,
repoInfo: existingRepoInfo,
strategy: "delete", // Can be configured: "skip", "delete", or "rename"
});
} else if (existingRepoInfo?.mirror) {
console.log(
`Repository ${targetRepoName} already exists in Gitea organization ${orgName}. Updating database status.`
);
await syncRepositoryMetadataToGitea({
config,
octokit,
repository,
giteaOwner: orgName,
giteaRepoName: targetRepoName,
giteaToken: decryptedConfig.giteaConfig.token,
});
// Update database to reflect that the repository is already mirrored
await db
.update(repositories)
.set({
status: repoStatusEnum.parse("mirrored"),
updatedAt: new Date(),
lastMirrored: new Date(),
errorMessage: null,
mirroredLocation: `${orgName}/${targetRepoName}`,
})
.where(eq(repositories.id, repository.id!));
// Create a mirror job log entry
await createMirrorJob({
userId: config.userId,
repositoryId: repository.id,
repositoryName: repository.name,
message: `Repository ${targetRepoName} already exists in Gitea organization ${orgName}`,
details: `Repository ${targetRepoName} was found to already exist in Gitea organization ${orgName} and database status was updated.`,
status: "mirrored",
});
console.log(
`Repository ${targetRepoName} database status updated to mirrored in organization ${orgName}`
);
return;
} else {
console.warn(
`[Mirror] Repository ${orgName}/${targetRepoName} exists but mirror status could not be verified. Continuing with mirror creation flow.`
);
}
}
console.log(
@@ -1182,6 +1406,7 @@ export async function mirrorGitHubRepoToGiteaOrg({
wiki: shouldMirrorWiki || false,
lfs: config.giteaConfig?.lfs || false,
private: repository.isPrivate,
description: repository.description?.trim() || "",
};
// Add authentication for private repositories
@@ -1204,6 +1429,15 @@ export async function mirrorGitHubRepoToGiteaOrg({
}
);
await syncRepositoryMetadataToGitea({
config,
octokit,
repository,
giteaOwner: orgName,
giteaRepoName: targetRepoName,
giteaToken: decryptedConfig.giteaConfig.token,
});
const metadataState = parseRepositoryMetadataState(repository.metadata);
let metadataUpdated = false;
const skipMetadataForStarred =

View File

@@ -369,7 +369,7 @@ export async function getGithubOrganizations({
}: {
octokit: Octokit;
config: Partial<Config>;
}): Promise<GitOrg[]> {
}): Promise<{ organizations: GitOrg[]; failedOrgs: { name: string; avatarUrl: string; reason: string }[] }> {
try {
const { data: orgs } = await octokit.orgs.listForAuthenticatedUser({
per_page: 100,
@@ -392,30 +392,47 @@ export async function getGithubOrganizations({
return true;
});
const organizations = await Promise.all(
const failedOrgs: { name: string; avatarUrl: string; reason: string }[] = [];
const results = await Promise.all(
filteredOrgs.map(async (org) => {
const [{ data: orgDetails }, { data: membership }] = await Promise.all([
octokit.orgs.get({ org: org.login }),
octokit.orgs.getMembershipForAuthenticatedUser({ org: org.login }),
]);
try {
const [{ data: orgDetails }, { data: membership }] = await Promise.all([
octokit.orgs.get({ org: org.login }),
octokit.orgs.getMembershipForAuthenticatedUser({ org: org.login }),
]);
const totalRepos =
orgDetails.public_repos + (orgDetails.total_private_repos ?? 0);
const totalRepos =
orgDetails.public_repos + (orgDetails.total_private_repos ?? 0);
return {
name: org.login,
avatarUrl: org.avatar_url,
membershipRole: membership.role as MembershipRole,
isIncluded: false,
status: "imported" as RepoStatus,
repositoryCount: totalRepos,
createdAt: new Date(),
updatedAt: new Date(),
};
return {
name: org.login,
avatarUrl: org.avatar_url,
membershipRole: membership.role as MembershipRole,
isIncluded: false,
status: "imported" as RepoStatus,
repositoryCount: totalRepos,
createdAt: new Date(),
updatedAt: new Date(),
};
} catch (error: any) {
// Capture organizations that return 403 (SAML enforcement, insufficient token scope, etc.)
if (error?.status === 403) {
const reason = error?.message || "access denied";
console.warn(
`Failed to import organization ${org.login} - ${reason}`,
);
failedOrgs.push({ name: org.login, avatarUrl: org.avatar_url, reason });
return null;
}
throw error;
}
}),
);
return organizations;
return {
organizations: results.filter((org): org is NonNullable<typeof org> => org !== null),
failedOrgs,
};
} catch (error) {
throw new Error(
`Error fetching organizations: ${

View File

@@ -1,6 +1,6 @@
import type { APIRoute } from "astro";
import { db, organizations, repositories, configs } from "@/lib/db";
import { eq } from "drizzle-orm";
import { eq, and } from "drizzle-orm";
import { v4 as uuidv4 } from "uuid";
import { createMirrorJob } from "@/lib/helpers";
import {
@@ -47,13 +47,14 @@ export const POST: APIRoute = async ({ request, locals }) => {
const octokit = createGitHubClient(decryptedToken, userId, githubUsername);
// Fetch GitHub data in parallel
const [basicAndForkedRepos, starredRepos, gitOrgs] = await Promise.all([
const [basicAndForkedRepos, starredRepos, orgResult] = await Promise.all([
getGithubRepositories({ octokit, config }),
config.githubConfig?.includeStarred
? getGithubStarredRepositories({ octokit, config })
: Promise.resolve([]),
getGithubOrganizations({ octokit, config }),
]);
const { organizations: gitOrgs, failedOrgs } = orgResult;
// Merge and de-duplicate by fullName, preferring starred variant when duplicated
const allGithubRepos = mergeGitReposPreferStarred(basicAndForkedRepos, starredRepos);
@@ -108,8 +109,27 @@ export const POST: APIRoute = async ({ request, locals }) => {
updatedAt: new Date(),
}));
// Prepare failed org records for DB insertion
const failedOrgRecords = failedOrgs.map((org) => ({
id: uuidv4(),
userId,
configId: config.id,
name: org.name,
normalizedName: org.name.toLowerCase(),
avatarUrl: org.avatarUrl,
membershipRole: "member" as const,
isIncluded: false,
status: "failed" as const,
errorMessage: org.reason,
repositoryCount: 0,
createdAt: new Date(),
updatedAt: new Date(),
}));
let insertedRepos: typeof newRepos = [];
let insertedOrgs: typeof newOrgs = [];
let insertedFailedOrgs: typeof failedOrgRecords = [];
let recoveredOrgCount = 0;
// Transaction to insert only new items
await db.transaction(async (tx) => {
@@ -119,18 +139,62 @@ export const POST: APIRoute = async ({ request, locals }) => {
.from(repositories)
.where(eq(repositories.userId, userId)),
tx
.select({ normalizedName: organizations.normalizedName })
.select({ normalizedName: organizations.normalizedName, status: organizations.status })
.from(organizations)
.where(eq(organizations.userId, userId)),
]);
const existingRepoNames = new Set(existingRepos.map((r) => r.normalizedFullName));
const existingOrgNames = new Set(existingOrgs.map((o) => o.normalizedName));
const existingOrgMap = new Map(existingOrgs.map((o) => [o.normalizedName, o.status]));
insertedRepos = newRepos.filter(
(r) => !existingRepoNames.has(r.normalizedFullName)
);
insertedOrgs = newOrgs.filter((o) => !existingOrgNames.has(o.normalizedName));
insertedOrgs = newOrgs.filter((o) => !existingOrgMap.has(o.normalizedName));
// Update previously failed orgs that now succeeded
const recoveredOrgs = newOrgs.filter(
(o) => existingOrgMap.get(o.normalizedName) === "failed"
);
for (const org of recoveredOrgs) {
await tx
.update(organizations)
.set({
status: "imported",
errorMessage: null,
repositoryCount: org.repositoryCount,
avatarUrl: org.avatarUrl,
membershipRole: org.membershipRole,
updatedAt: new Date(),
})
.where(
and(
eq(organizations.userId, userId),
eq(organizations.normalizedName, org.normalizedName),
)
);
}
recoveredOrgCount = recoveredOrgs.length;
// Insert or update failed orgs (only update orgs already in "failed" state — don't overwrite good state)
insertedFailedOrgs = failedOrgRecords.filter((o) => !existingOrgMap.has(o.normalizedName));
const stillFailedOrgs = failedOrgRecords.filter(
(o) => existingOrgMap.get(o.normalizedName) === "failed"
);
for (const org of stillFailedOrgs) {
await tx
.update(organizations)
.set({
errorMessage: org.errorMessage,
updatedAt: new Date(),
})
.where(
and(
eq(organizations.userId, userId),
eq(organizations.normalizedName, org.normalizedName),
)
);
}
// Batch insert repositories to avoid SQLite parameter limit (dynamic by column count)
const sample = newRepos[0];
@@ -148,9 +212,10 @@ export const POST: APIRoute = async ({ request, locals }) => {
// Batch insert organizations (they have fewer fields, so we can use larger batches)
const ORG_BATCH_SIZE = 100;
if (insertedOrgs.length > 0) {
for (let i = 0; i < insertedOrgs.length; i += ORG_BATCH_SIZE) {
const batch = insertedOrgs.slice(i, i + ORG_BATCH_SIZE);
const allNewOrgs = [...insertedOrgs, ...insertedFailedOrgs];
if (allNewOrgs.length > 0) {
for (let i = 0; i < allNewOrgs.length; i += ORG_BATCH_SIZE) {
const batch = allNewOrgs.slice(i, i + ORG_BATCH_SIZE);
await tx.insert(organizations).values(batch);
}
}
@@ -189,6 +254,8 @@ export const POST: APIRoute = async ({ request, locals }) => {
newRepositories: insertedRepos.length,
newOrganizations: insertedOrgs.length,
skippedDisabledRepositories: allGithubRepos.length - mirrorableGithubRepos.length,
failedOrgs: failedOrgs.map((o) => o.name),
recoveredOrgs: recoveredOrgCount,
},
});
} catch (error) {

8
www/pnpm-lock.yaml generated
View File

@@ -1216,8 +1216,8 @@ packages:
resolution: {integrity: sha512-KxektNH63SrbfUyDiwXqRb1rLwKt33AmMv+5Nhsw1kqZ13SJBRTgZHtGbE+hH3a1mVW1cz+4pqSWVPAtLVXTzQ==}
engines: {node: '>=18'}
devalue@5.6.3:
resolution: {integrity: sha512-nc7XjUU/2Lb+SvEFVGcWLiKkzfw8+qHI7zn8WYXKkLMgfGSHbgCEaR6bJpev8Cm6Rmrb19Gfd/tZvGqx9is3wg==}
devalue@5.6.4:
resolution: {integrity: sha512-Gp6rDldRsFh/7XuouDbxMH3Mx8GMCcgzIb1pDTvNyn8pZGQ22u+Wa+lGV9dQCltFQ7uVw0MhRyb8XDskNFOReA==}
devlop@1.1.0:
resolution: {integrity: sha512-RWmIqhcFf1lRYBvNmr7qTNuyCt/7/ns2jbpp1+PalgE/rDQcBT0fioSMUpJ93irlUhC5hrg4cYqe6U+0ImW0rA==}
@@ -3157,7 +3157,7 @@ snapshots:
cssesc: 3.0.0
debug: 4.4.3
deterministic-object-hash: 2.0.2
devalue: 5.6.3
devalue: 5.6.4
diff: 8.0.3
dlv: 1.1.3
dset: 3.1.4
@@ -3368,7 +3368,7 @@ snapshots:
dependencies:
base-64: 1.0.0
devalue@5.6.3: {}
devalue@5.6.4: {}
devlop@1.1.0:
dependencies: