tsc issues

This commit is contained in:
Arunavo Ray
2025-08-28 08:34:27 +05:30
parent 389f8dd292
commit ad7418aef2
10 changed files with 128 additions and 151 deletions

View File

@@ -47,7 +47,6 @@ async function createTestJob(): Promise<string> {
jobType: "mirror",
totalItems: 10,
itemIds: ['item-1', 'item-2', 'item-3', 'item-4', 'item-5'],
completedItems: 2, // Simulate partial completion
inProgress: true,
});

View File

@@ -74,7 +74,11 @@ export function extractUserFromHeaders(headers: Headers): {
}
}
return { username, email, name };
return {
username: username || undefined,
email: email || undefined,
name: name || undefined
};
}
// Find or create user from header auth

View File

@@ -53,7 +53,7 @@ async function cleanupForUser(userId: string, retentionSeconds: number): Promise
let mirrorJobsDeleted = 0;
// Clean up old events
const eventsResult = await db
await db
.delete(events)
.where(
and(
@@ -61,10 +61,10 @@ async function cleanupForUser(userId: string, retentionSeconds: number): Promise
lt(events.createdAt, cutoffDate)
)
);
eventsDeleted = eventsResult.changes || 0;
eventsDeleted = 0; // SQLite delete doesn't return count
// Clean up old mirror jobs (only completed ones)
const jobsResult = await db
await db
.delete(mirrorJobs)
.where(
and(
@@ -73,7 +73,7 @@ async function cleanupForUser(userId: string, retentionSeconds: number): Promise
lt(mirrorJobs.timestamp, cutoffDate)
)
);
mirrorJobsDeleted = jobsResult.changes || 0;
mirrorJobsDeleted = 0; // SQLite delete doesn't return count
console.log(`Cleanup completed for user ${userId}: ${eventsDeleted} events, ${mirrorJobsDeleted} jobs deleted`);

View File

@@ -19,6 +19,7 @@ export const githubConfigSchema = z.object({
token: z.string(),
includeStarred: z.boolean().default(false),
includeForks: z.boolean().default(true),
skipForks: z.boolean().default(false),
includeArchived: z.boolean().default(false),
includePrivate: z.boolean().default(true),
includePublic: z.boolean().default(true),
@@ -33,6 +34,7 @@ export const giteaConfigSchema = z.object({
url: z.url(),
token: z.string(),
defaultOwner: z.string(),
organization: z.string().optional(),
mirrorInterval: z.string().default("8h"),
lfs: z.boolean().default(false),
wiki: z.boolean().default(false),
@@ -45,6 +47,7 @@ export const giteaConfigSchema = z.object({
addTopics: z.boolean().default(true),
topicPrefix: z.string().optional(),
preserveVisibility: z.boolean().default(true),
preserveOrgStructure: z.boolean().default(false),
forkStrategy: z
.enum(["skip", "reference", "full-copy"])
.default("reference"),
@@ -76,6 +79,8 @@ export const scheduleConfigSchema = z.object({
updateInterval: z.number().default(86400000),
skipRecentlyMirrored: z.boolean().default(true),
recentThreshold: z.number().default(3600000),
lastRun: z.coerce.date().optional(),
nextRun: z.coerce.date().optional(),
});
export const cleanupConfigSchema = z.object({
@@ -90,6 +95,8 @@ export const cleanupConfigSchema = z.object({
.default("archive"),
batchSize: z.number().default(10),
pauseBetweenDeletes: z.number().default(2000),
lastRun: z.coerce.date().optional(),
nextRun: z.coerce.date().optional(),
});
export const configSchema = z.object({
@@ -243,7 +250,7 @@ export const users = sqliteTable("users", {
.default(sql`(unixepoch())`),
// Custom fields
username: text("username"),
});
}, (_table) => []);
export const events = sqliteTable("events", {
id: text("id").primaryKey(),
@@ -256,13 +263,11 @@ export const events = sqliteTable("events", {
createdAt: integer("created_at", { mode: "timestamp" })
.notNull()
.default(sql`(unixepoch())`),
}, (table) => {
return {
userChannelIdx: index("idx_events_user_channel").on(table.userId, table.channel),
createdAtIdx: index("idx_events_created_at").on(table.createdAt),
readIdx: index("idx_events_read").on(table.read),
};
});
}, (table) => [
index("idx_events_user_channel").on(table.userId, table.channel),
index("idx_events_created_at").on(table.createdAt),
index("idx_events_read").on(table.read),
]);
export const configs = sqliteTable("configs", {
id: text("id").primaryKey(),
@@ -305,7 +310,7 @@ export const configs = sqliteTable("configs", {
updatedAt: integer("updated_at", { mode: "timestamp" })
.notNull()
.default(sql`(unixepoch())`),
});
}, (_table) => []);
export const repositories = sqliteTable("repositories", {
id: text("id").primaryKey(),
@@ -362,17 +367,15 @@ export const repositories = sqliteTable("repositories", {
updatedAt: integer("updated_at", { mode: "timestamp" })
.notNull()
.default(sql`(unixepoch())`),
}, (table) => {
return {
userIdIdx: index("idx_repositories_user_id").on(table.userId),
configIdIdx: index("idx_repositories_config_id").on(table.configId),
statusIdx: index("idx_repositories_status").on(table.status),
ownerIdx: index("idx_repositories_owner").on(table.owner),
organizationIdx: index("idx_repositories_organization").on(table.organization),
isForkedIdx: index("idx_repositories_is_fork").on(table.isForked),
isStarredIdx: index("idx_repositories_is_starred").on(table.isStarred),
};
});
}, (table) => [
index("idx_repositories_user_id").on(table.userId),
index("idx_repositories_config_id").on(table.configId),
index("idx_repositories_status").on(table.status),
index("idx_repositories_owner").on(table.owner),
index("idx_repositories_organization").on(table.organization),
index("idx_repositories_is_fork").on(table.isForked),
index("idx_repositories_is_starred").on(table.isStarred),
]);
export const mirrorJobs = sqliteTable("mirror_jobs", {
id: text("id").primaryKey(),
@@ -405,15 +408,13 @@ export const mirrorJobs = sqliteTable("mirror_jobs", {
startedAt: integer("started_at", { mode: "timestamp" }),
completedAt: integer("completed_at", { mode: "timestamp" }),
lastCheckpoint: integer("last_checkpoint", { mode: "timestamp" }),
}, (table) => {
return {
userIdIdx: index("idx_mirror_jobs_user_id").on(table.userId),
batchIdIdx: index("idx_mirror_jobs_batch_id").on(table.batchId),
inProgressIdx: index("idx_mirror_jobs_in_progress").on(table.inProgress),
jobTypeIdx: index("idx_mirror_jobs_job_type").on(table.jobType),
timestampIdx: index("idx_mirror_jobs_timestamp").on(table.timestamp),
};
});
}, (table) => [
index("idx_mirror_jobs_user_id").on(table.userId),
index("idx_mirror_jobs_batch_id").on(table.batchId),
index("idx_mirror_jobs_in_progress").on(table.inProgress),
index("idx_mirror_jobs_job_type").on(table.jobType),
index("idx_mirror_jobs_timestamp").on(table.timestamp),
]);
export const organizations = sqliteTable("organizations", {
id: text("id").primaryKey(),
@@ -447,14 +448,12 @@ export const organizations = sqliteTable("organizations", {
updatedAt: integer("updated_at", { mode: "timestamp" })
.notNull()
.default(sql`(unixepoch())`),
}, (table) => {
return {
userIdIdx: index("idx_organizations_user_id").on(table.userId),
configIdIdx: index("idx_organizations_config_id").on(table.configId),
statusIdx: index("idx_organizations_status").on(table.status),
isIncludedIdx: index("idx_organizations_is_included").on(table.isIncluded),
};
});
}, (table) => [
index("idx_organizations_user_id").on(table.userId),
index("idx_organizations_config_id").on(table.configId),
index("idx_organizations_status").on(table.status),
index("idx_organizations_is_included").on(table.isIncluded),
]);
// ===== Better Auth Tables =====
@@ -472,13 +471,11 @@ export const sessions = sqliteTable("sessions", {
updatedAt: integer("updated_at", { mode: "timestamp" })
.notNull()
.default(sql`(unixepoch())`),
}, (table) => {
return {
userIdIdx: index("idx_sessions_user_id").on(table.userId),
tokenIdx: index("idx_sessions_token").on(table.token),
expiresAtIdx: index("idx_sessions_expires_at").on(table.expiresAt),
};
});
}, (table) => [
index("idx_sessions_user_id").on(table.userId),
index("idx_sessions_token").on(table.token),
index("idx_sessions_expires_at").on(table.expiresAt),
]);
// Accounts table (for OAuth providers and credentials)
export const accounts = sqliteTable("accounts", {
@@ -497,13 +494,11 @@ export const accounts = sqliteTable("accounts", {
updatedAt: integer("updated_at", { mode: "timestamp" })
.notNull()
.default(sql`(unixepoch())`),
}, (table) => {
return {
accountIdIdx: index("idx_accounts_account_id").on(table.accountId),
userIdIdx: index("idx_accounts_user_id").on(table.userId),
providerIdx: index("idx_accounts_provider").on(table.providerId, table.providerUserId),
};
});
}, (table) => [
index("idx_accounts_account_id").on(table.accountId),
index("idx_accounts_user_id").on(table.userId),
index("idx_accounts_provider").on(table.providerId, table.providerUserId),
]);
// Verification tokens table
export const verificationTokens = sqliteTable("verification_tokens", {
@@ -515,12 +510,10 @@ export const verificationTokens = sqliteTable("verification_tokens", {
createdAt: integer("created_at", { mode: "timestamp" })
.notNull()
.default(sql`(unixepoch())`),
}, (table) => {
return {
tokenIdx: index("idx_verification_tokens_token").on(table.token),
identifierIdx: index("idx_verification_tokens_identifier").on(table.identifier),
};
});
}, (table) => [
index("idx_verification_tokens_token").on(table.token),
index("idx_verification_tokens_identifier").on(table.identifier),
]);
// Verifications table (for Better Auth)
export const verifications = sqliteTable("verifications", {
@@ -534,11 +527,9 @@ export const verifications = sqliteTable("verifications", {
updatedAt: integer("updated_at", { mode: "timestamp" })
.notNull()
.default(sql`(unixepoch())`),
}, (table) => {
return {
identifierIdx: index("idx_verifications_identifier").on(table.identifier),
};
});
}, (table) => [
index("idx_verifications_identifier").on(table.identifier),
]);
// ===== OIDC Provider Tables =====
@@ -559,12 +550,10 @@ export const oauthApplications = sqliteTable("oauth_applications", {
updatedAt: integer("updated_at", { mode: "timestamp" })
.notNull()
.default(sql`(unixepoch())`),
}, (table) => {
return {
clientIdIdx: index("idx_oauth_applications_client_id").on(table.clientId),
userIdIdx: index("idx_oauth_applications_user_id").on(table.userId),
};
});
}, (table) => [
index("idx_oauth_applications_client_id").on(table.clientId),
index("idx_oauth_applications_user_id").on(table.userId),
]);
// OAuth Access Tokens table
export const oauthAccessTokens = sqliteTable("oauth_access_tokens", {
@@ -582,13 +571,11 @@ export const oauthAccessTokens = sqliteTable("oauth_access_tokens", {
updatedAt: integer("updated_at", { mode: "timestamp" })
.notNull()
.default(sql`(unixepoch())`),
}, (table) => {
return {
accessTokenIdx: index("idx_oauth_access_tokens_access_token").on(table.accessToken),
userIdIdx: index("idx_oauth_access_tokens_user_id").on(table.userId),
clientIdIdx: index("idx_oauth_access_tokens_client_id").on(table.clientId),
};
});
}, (table) => [
index("idx_oauth_access_tokens_access_token").on(table.accessToken),
index("idx_oauth_access_tokens_user_id").on(table.userId),
index("idx_oauth_access_tokens_client_id").on(table.clientId),
]);
// OAuth Consent table
export const oauthConsent = sqliteTable("oauth_consent", {
@@ -603,13 +590,11 @@ export const oauthConsent = sqliteTable("oauth_consent", {
updatedAt: integer("updated_at", { mode: "timestamp" })
.notNull()
.default(sql`(unixepoch())`),
}, (table) => {
return {
userIdIdx: index("idx_oauth_consent_user_id").on(table.userId),
clientIdIdx: index("idx_oauth_consent_client_id").on(table.clientId),
userClientIdx: index("idx_oauth_consent_user_client").on(table.userId, table.clientId),
};
});
}, (table) => [
index("idx_oauth_consent_user_id").on(table.userId),
index("idx_oauth_consent_client_id").on(table.clientId),
index("idx_oauth_consent_user_client").on(table.userId, table.clientId),
]);
// ===== SSO Provider Tables =====
@@ -628,13 +613,11 @@ export const ssoProviders = sqliteTable("sso_providers", {
updatedAt: integer("updated_at", { mode: "timestamp" })
.notNull()
.default(sql`(unixepoch())`),
}, (table) => {
return {
providerIdIdx: index("idx_sso_providers_provider_id").on(table.providerId),
domainIdx: index("idx_sso_providers_domain").on(table.domain),
issuerIdx: index("idx_sso_providers_issuer").on(table.issuer),
};
});
}, (table) => [
index("idx_sso_providers_provider_id").on(table.providerId),
index("idx_sso_providers_domain").on(table.domain),
index("idx_sso_providers_issuer").on(table.issuer),
]);
// Export type definitions
export type User = z.infer<typeof userSchema>;

View File

@@ -240,6 +240,7 @@ export async function initializeConfigFromEnv(): Promise<void> {
token: envConfig.github.token ? encrypt(envConfig.github.token) : existingConfig?.[0]?.githubConfig?.token || '',
includeStarred: envConfig.github.mirrorStarred ?? existingConfig?.[0]?.githubConfig?.includeStarred ?? false,
includeForks: !(envConfig.github.skipForks ?? false),
skipForks: envConfig.github.skipForks ?? existingConfig?.[0]?.githubConfig?.skipForks ?? false,
includeArchived: envConfig.github.includeArchived ?? existingConfig?.[0]?.githubConfig?.includeArchived ?? false,
includePrivate: envConfig.github.privateRepositories ?? existingConfig?.[0]?.githubConfig?.includePrivate ?? false,
includePublic: envConfig.github.publicRepositories ?? existingConfig?.[0]?.githubConfig?.includePublic ?? true,
@@ -255,6 +256,8 @@ export async function initializeConfigFromEnv(): Promise<void> {
url: envConfig.gitea.url || existingConfig?.[0]?.giteaConfig?.url || '',
token: envConfig.gitea.token ? encrypt(envConfig.gitea.token) : existingConfig?.[0]?.giteaConfig?.token || '',
defaultOwner: envConfig.gitea.username || existingConfig?.[0]?.giteaConfig?.defaultOwner || '',
organization: envConfig.gitea.organization || existingConfig?.[0]?.giteaConfig?.organization || undefined,
preserveOrgStructure: mirrorStrategy === 'preserve' || mirrorStrategy === 'mixed',
mirrorInterval: envConfig.gitea.mirrorInterval || existingConfig?.[0]?.giteaConfig?.mirrorInterval || '8h',
lfs: envConfig.gitea.lfs ?? existingConfig?.[0]?.giteaConfig?.lfs ?? false,
wiki: envConfig.mirror.mirrorWiki ?? existingConfig?.[0]?.giteaConfig?.wiki ?? false,
@@ -296,8 +299,8 @@ export async function initializeConfigFromEnv(): Promise<void> {
updateInterval: envConfig.schedule.updateInterval ?? existingConfig?.[0]?.scheduleConfig?.updateInterval ?? 86400000,
skipRecentlyMirrored: envConfig.schedule.skipRecentlyMirrored ?? existingConfig?.[0]?.scheduleConfig?.skipRecentlyMirrored ?? true,
recentThreshold: envConfig.schedule.recentThreshold ?? existingConfig?.[0]?.scheduleConfig?.recentThreshold ?? 3600000,
lastRun: existingConfig?.[0]?.scheduleConfig?.lastRun || null,
nextRun: existingConfig?.[0]?.scheduleConfig?.nextRun || null,
lastRun: existingConfig?.[0]?.scheduleConfig?.lastRun || undefined,
nextRun: existingConfig?.[0]?.scheduleConfig?.nextRun || undefined,
};
// Build cleanup config
@@ -311,8 +314,8 @@ export async function initializeConfigFromEnv(): Promise<void> {
orphanedRepoAction: envConfig.cleanup.orphanedRepoAction || existingConfig?.[0]?.cleanupConfig?.orphanedRepoAction || 'archive',
batchSize: envConfig.cleanup.batchSize ?? existingConfig?.[0]?.cleanupConfig?.batchSize ?? 10,
pauseBetweenDeletes: envConfig.cleanup.pauseBetweenDeletes ?? existingConfig?.[0]?.cleanupConfig?.pauseBetweenDeletes ?? 2000,
lastRun: existingConfig?.[0]?.cleanupConfig?.lastRun || null,
nextRun: existingConfig?.[0]?.cleanupConfig?.nextRun || null,
lastRun: existingConfig?.[0]?.cleanupConfig?.lastRun || undefined,
nextRun: existingConfig?.[0]?.cleanupConfig?.nextRun || undefined,
};
if (existingConfig.length > 0) {

View File

@@ -1618,8 +1618,8 @@ export async function mirrorGitRepoPullRequestsToGitea({
}
},
{
maxConcurrency: 5,
retryAttempts: 3,
concurrencyLimit: 5,
maxRetries: 3,
retryDelay: 1000,
}
);
@@ -1840,8 +1840,8 @@ export async function deleteGiteaRepo(
}
);
if (!response.success) {
throw new Error(`Failed to delete repository ${owner}/${repo}: ${response.statusCode}`);
if (response.status >= 400) {
throw new Error(`Failed to delete repository ${owner}/${repo}: ${response.status} ${response.statusText}`);
}
console.log(`Successfully deleted repository ${owner}/${repo} from Gitea`);
@@ -1871,8 +1871,8 @@ export async function archiveGiteaRepo(
}
);
if (!response.success) {
throw new Error(`Failed to archive repository ${owner}/${repo}: ${response.statusCode}`);
if (response.status >= 400) {
throw new Error(`Failed to archive repository ${owner}/${repo}: ${response.status} ${response.statusText}`);
}
console.log(`Successfully archived repository ${owner}/${repo} in Gitea`);

View File

@@ -4,8 +4,8 @@
*/
import { findInterruptedJobs, resumeInterruptedJob } from './helpers';
import { db, repositories, organizations, mirrorJobs } from './db';
import { eq, and, lt } from 'drizzle-orm';
import { db, repositories, organizations, mirrorJobs, configs } from './db';
import { eq, and, lt, inArray } from 'drizzle-orm';
import { mirrorGithubRepoToGitea, mirrorGitHubOrgRepoToGiteaOrg, syncGiteaRepo } from './gitea';
import { createGitHubClient } from './github';
import { processWithResilience } from './utils/concurrency';
@@ -217,26 +217,26 @@ async function recoverMirrorJob(job: any, remainingItemIds: string[]) {
try {
// Get the config for this user with better error handling
const configs = await db
const userConfigs = await db
.select()
.from(repositories)
.where(eq(repositories.userId, job.userId))
.from(configs)
.where(eq(configs.userId, job.userId))
.limit(1);
if (configs.length === 0) {
if (userConfigs.length === 0) {
throw new Error(`No configuration found for user ${job.userId}`);
}
const config = configs[0];
if (!config.configId) {
throw new Error(`Configuration missing configId for user ${job.userId}`);
const config = userConfigs[0];
if (!config.id) {
throw new Error(`Configuration missing id for user ${job.userId}`);
}
// Get repositories to process with validation
const repos = await db
.select()
.from(repositories)
.where(eq(repositories.id, remainingItemIds));
.where(inArray(repositories.id, remainingItemIds));
if (repos.length === 0) {
console.warn(`No repositories found for remaining item IDs: ${remainingItemIds.join(', ')}`);
@@ -286,7 +286,7 @@ async function recoverMirrorJob(job: any, remainingItemIds: string[]) {
};
// Mirror the repository based on whether it's in an organization
if (repo.organization && config.githubConfig.preserveOrgStructure) {
if (repo.organization && config.giteaConfig.preserveOrgStructure) {
await mirrorGitHubOrgRepoToGiteaOrg({
config,
octokit,
@@ -346,26 +346,26 @@ async function recoverSyncJob(job: any, remainingItemIds: string[]) {
try {
// Get the config for this user with better error handling
const configs = await db
const userConfigs = await db
.select()
.from(repositories)
.where(eq(repositories.userId, job.userId))
.from(configs)
.where(eq(configs.userId, job.userId))
.limit(1);
if (configs.length === 0) {
if (userConfigs.length === 0) {
throw new Error(`No configuration found for user ${job.userId}`);
}
const config = configs[0];
if (!config.configId) {
throw new Error(`Configuration missing configId for user ${job.userId}`);
const config = userConfigs[0];
if (!config.id) {
throw new Error(`Configuration missing id for user ${job.userId}`);
}
// Get repositories to process with validation
const repos = await db
.select()
.from(repositories)
.where(eq(repositories.id, remainingItemIds));
.where(inArray(repositories.id, remainingItemIds));
if (repos.length === 0) {
console.warn(`No repositories found for remaining item IDs: ${remainingItemIds.join(', ')}`);
@@ -397,6 +397,7 @@ async function recoverSyncJob(job: any, remainingItemIds: string[]) {
errorMessage: repo.errorMessage ?? undefined,
forkedFrom: repo.forkedFrom ?? undefined,
visibility: repositoryVisibilityEnum.parse(repo.visibility || "public"),
mirroredLocation: repo.mirroredLocation || "",
};
// Sync the repository

View File

@@ -10,7 +10,7 @@ import { syncGiteaRepo } from '@/lib/gitea';
import { createGitHubClient } from '@/lib/github';
import { getDecryptedGitHubToken } from '@/lib/utils/config-encryption';
import { parseInterval, formatDuration } from '@/lib/utils/duration-parser';
import type { Repository } from '@/types';
import type { Repository } from '@/lib/db/schema';
import { repoStatusEnum, repositoryVisibilityEnum } from '@/types/Repository';
let schedulerInterval: NodeJS.Timeout | null = null;

View File

@@ -35,6 +35,16 @@ export const GET: APIRoute = async ({ url }) => {
details: job.details ?? undefined,
message: job.message,
timestamp: job.timestamp,
jobType: job.jobType,
batchId: job.batchId ?? undefined,
totalItems: job.totalItems ?? undefined,
completedItems: job.completedItems,
itemIds: job.itemIds ?? undefined,
completedItemIds: job.completedItemIds,
inProgress: job.inProgress,
startedAt: job.startedAt ?? undefined,
completedAt: job.completedAt ?? undefined,
lastCheckpoint: job.lastCheckpoint ?? undefined,
}));
return new Response(

View File

@@ -77,32 +77,9 @@ export const GET: APIRoute = async ({ request }) => {
repoCount: repoCount ?? 0,
orgCount: orgCount ?? 0,
mirroredCount: mirroredCount ?? 0,
repositories: userRepos.map((repo) => ({
...repo,
organization: repo.organization ?? undefined,
lastMirrored: repo.lastMirrored ?? undefined,
errorMessage: repo.errorMessage ?? undefined,
forkedFrom: repo.forkedFrom ?? undefined,
status: repoStatusEnum.parse(repo.status),
visibility: repositoryVisibilityEnum.parse(repo.visibility),
})),
organizations: userOrgs.map((org) => ({
...org,
status: repoStatusEnum.parse(org.status),
membershipRole: membershipRoleEnum.parse(org.membershipRole),
lastMirrored: org.lastMirrored ?? undefined,
errorMessage: org.errorMessage ?? undefined,
})),
activities: userLogs.map((job) => ({
id: job.id,
userId: job.userId,
repositoryName: job.repositoryName ?? undefined,
organizationName: job.organizationName ?? undefined,
status: repoStatusEnum.parse(job.status),
details: job.details ?? undefined,
message: job.message,
timestamp: job.timestamp,
})),
repositories: userRepos,
organizations: userOrgs,
activities: userLogs,
lastSync: userConfig?.scheduleConfig.lastRun ?? null,
};