-
-
-
-
-
-
-
-
+
+ {isLoadingMethods ? (
+
+
+
-
-
-
-
-
+
+ ) : (
+ <>
+ {/* Show tabs only if multiple auth methods are available */}
+ {authMethods.sso.enabled && authMethods.emailPassword ? (
+
+
+
+
+ Email
+
+
+
+ SSO
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ {authMethods.sso.providers.length > 0 && (
+ <>
+
+
+ Sign in with your organization account
+
+ {authMethods.sso.providers.map(provider => (
+
+ ))}
+
+
+
+ >
+ )}
+
+
+
+
setSsoEmail(e.target.value)}
+ className="w-full rounded-md border border-input bg-background px-3 py-2 text-sm shadow-sm transition-colors placeholder:text-muted-foreground focus-visible:outline-none focus-visible:ring-1 focus-visible:ring-ring"
+ placeholder="Enter your work email"
+ disabled={isLoading}
+ />
+
+ We'll redirect you to your organization's SSO provider
+
+
+
+
+
+
+
+
+
+ ) : (
+ // Single auth method - show email/password only
+ <>
+
+
+
+
+
+
+ >
+ )}
+ >
+ )}
+
Don't have an account? Contact your administrator.
diff --git a/src/components/auth/LoginPage.tsx b/src/components/auth/LoginPage.tsx
new file mode 100644
index 0000000..abd8937
--- /dev/null
+++ b/src/components/auth/LoginPage.tsx
@@ -0,0 +1,10 @@
+import { LoginForm } from './LoginForm';
+import Providers from '@/components/layout/Providers';
+
+export function LoginPage() {
+ return (
+
+
+
+ );
+}
\ No newline at end of file
diff --git a/src/components/auth/SignupForm.tsx b/src/components/auth/SignupForm.tsx
index 53f52f1..77ee885 100644
--- a/src/components/auth/SignupForm.tsx
+++ b/src/components/auth/SignupForm.tsx
@@ -5,21 +5,22 @@ import { Button } from '@/components/ui/button';
import { Card, CardContent, CardDescription, CardFooter, CardHeader, CardTitle } from '@/components/ui/card';
import { toast, Toaster } from 'sonner';
import { showErrorToast } from '@/lib/utils';
+import { useAuth } from '@/hooks/useAuth';
export function SignupForm() {
const [isLoading, setIsLoading] = useState(false);
+ const { register } = useAuth();
async function handleSignup(e: React.FormEvent) {
e.preventDefault();
setIsLoading(true);
const form = e.currentTarget;
const formData = new FormData(form);
- const username = formData.get('username') as string | null;
const email = formData.get('email') as string | null;
const password = formData.get('password') as string | null;
const confirmPassword = formData.get('confirmPassword') as string | null;
- if (!username || !email || !password || !confirmPassword) {
+ if (!email || !password || !confirmPassword) {
toast.error('Please fill in all fields');
setIsLoading(false);
return;
@@ -31,28 +32,15 @@ export function SignupForm() {
return;
}
- const signupData = { username, email, password };
-
try {
- const response = await fetch('/api/auth/register', {
- method: 'POST',
- headers: {
- 'Content-Type': 'application/json',
- },
- body: JSON.stringify(signupData),
- });
-
- const data = await response.json();
-
- if (response.ok) {
- toast.success('Account created successfully! Redirecting to dashboard...');
- // Small delay before redirecting to see the success message
- setTimeout(() => {
- window.location.href = '/';
- }, 1500);
- } else {
- showErrorToast(data.error || 'Failed to create account. Please try again.', toast);
- }
+ // Derive username from email (part before @)
+ const username = email.split('@')[0];
+ await register(username, email, password);
+ toast.success('Account created successfully! Redirecting to dashboard...');
+ // Small delay before redirecting to see the success message
+ setTimeout(() => {
+ window.location.href = '/';
+ }, 1500);
} catch (error) {
showErrorToast(error, toast);
} finally {
@@ -84,20 +72,6 @@ export function SignupForm() {
);
}
diff --git a/src/components/config/SSOSettings.tsx b/src/components/config/SSOSettings.tsx
new file mode 100644
index 0000000..0d3b072
--- /dev/null
+++ b/src/components/config/SSOSettings.tsx
@@ -0,0 +1,426 @@
+import { useState, useEffect } from 'react';
+import { Button } from '@/components/ui/button';
+import { Input } from '@/components/ui/input';
+import { Label } from '@/components/ui/label';
+import { Card, CardContent, CardDescription, CardHeader, CardTitle } from '@/components/ui/card';
+import { Switch } from '@/components/ui/switch';
+import { Alert, AlertDescription } from '@/components/ui/alert';
+import { Dialog, DialogContent, DialogDescription, DialogFooter, DialogHeader, DialogTitle, DialogTrigger } from '@/components/ui/dialog';
+import { Select, SelectContent, SelectItem, SelectTrigger, SelectValue } from '@/components/ui/select';
+import { apiRequest, showErrorToast } from '@/lib/utils';
+import { toast } from 'sonner';
+import { Plus, Trash2, ExternalLink, Loader2, AlertCircle, Copy, Shield, Info } from 'lucide-react';
+import { Separator } from '@/components/ui/separator';
+import { Skeleton } from '../ui/skeleton';
+import { Badge } from '../ui/badge';
+
+interface SSOProvider {
+ id: string;
+ issuer: string;
+ domain: string;
+ providerId: string;
+ organizationId?: string;
+ oidcConfig: {
+ clientId: string;
+ clientSecret: string;
+ authorizationEndpoint: string;
+ tokenEndpoint: string;
+ jwksEndpoint: string;
+ userInfoEndpoint: string;
+ mapping: {
+ id: string;
+ email: string;
+ emailVerified: string;
+ name: string;
+ image: string;
+ };
+ };
+ createdAt: string;
+ updatedAt: string;
+}
+
+export function SSOSettings() {
+ const [providers, setProviders] = useState
([]);
+ const [isLoading, setIsLoading] = useState(true);
+ const [showProviderDialog, setShowProviderDialog] = useState(false);
+ const [isDiscovering, setIsDiscovering] = useState(false);
+ const [headerAuthEnabled, setHeaderAuthEnabled] = useState(false);
+
+ // Form states for new provider
+ const [providerForm, setProviderForm] = useState({
+ issuer: '',
+ domain: '',
+ providerId: '',
+ clientId: '',
+ clientSecret: '',
+ authorizationEndpoint: '',
+ tokenEndpoint: '',
+ jwksEndpoint: '',
+ userInfoEndpoint: '',
+ });
+
+
+
+ useEffect(() => {
+ loadData();
+ }, []);
+
+ const loadData = async () => {
+ setIsLoading(true);
+ try {
+ const [providersRes, headerAuthStatus] = await Promise.all([
+ apiRequest('/sso/providers'),
+ apiRequest<{ enabled: boolean }>('/auth/header-status').catch(() => ({ enabled: false }))
+ ]);
+
+ setProviders(providersRes);
+ setHeaderAuthEnabled(headerAuthStatus.enabled);
+ } catch (error) {
+ showErrorToast(error, toast);
+ } finally {
+ setIsLoading(false);
+ }
+ };
+
+ const discoverOIDC = async () => {
+ if (!providerForm.issuer) {
+ toast.error('Please enter an issuer URL');
+ return;
+ }
+
+ setIsDiscovering(true);
+ try {
+ const discovered = await apiRequest('/sso/discover', {
+ method: 'POST',
+ data: { issuer: providerForm.issuer },
+ });
+
+ setProviderForm(prev => ({
+ ...prev,
+ authorizationEndpoint: discovered.authorizationEndpoint || '',
+ tokenEndpoint: discovered.tokenEndpoint || '',
+ jwksEndpoint: discovered.jwksEndpoint || '',
+ userInfoEndpoint: discovered.userInfoEndpoint || '',
+ domain: discovered.suggestedDomain || prev.domain,
+ }));
+
+ toast.success('OIDC configuration discovered successfully');
+ } catch (error) {
+ showErrorToast(error, toast);
+ } finally {
+ setIsDiscovering(false);
+ }
+ };
+
+ const createProvider = async () => {
+ try {
+ const newProvider = await apiRequest('/sso/providers', {
+ method: 'POST',
+ data: {
+ ...providerForm,
+ mapping: {
+ id: 'sub',
+ email: 'email',
+ emailVerified: 'email_verified',
+ name: 'name',
+ image: 'picture',
+ },
+ },
+ });
+
+ setProviders([...providers, newProvider]);
+ setShowProviderDialog(false);
+ setProviderForm({
+ issuer: '',
+ domain: '',
+ providerId: '',
+ clientId: '',
+ clientSecret: '',
+ authorizationEndpoint: '',
+ tokenEndpoint: '',
+ jwksEndpoint: '',
+ userInfoEndpoint: '',
+ });
+ toast.success('SSO provider created successfully');
+ } catch (error) {
+ showErrorToast(error, toast);
+ }
+ };
+
+ const deleteProvider = async (id: string) => {
+ try {
+ await apiRequest(`/sso/providers?id=${id}`, { method: 'DELETE' });
+ setProviders(providers.filter(p => p.id !== id));
+ toast.success('Provider deleted successfully');
+ } catch (error) {
+ showErrorToast(error, toast);
+ }
+ };
+
+
+ const copyToClipboard = (text: string) => {
+ navigator.clipboard.writeText(text);
+ toast.success('Copied to clipboard');
+ };
+
+ if (isLoading) {
+ return (
+
+
+
+
+ );
+ }
+
+ return (
+
+ {/* Header with status indicators */}
+
+
+
Authentication & SSO
+
+ Configure how users authenticate with your application
+
+
+
+
0 ? 'bg-green-500' : 'bg-muted'}`} />
+
+ {providers.length} Provider{providers.length !== 1 ? 's' : ''} configured
+
+
+
+
+ {/* Authentication Methods Overview */}
+
+
+ Active Authentication Methods
+
+
+
+ {/* Email & Password - Always enabled */}
+
+
+
+
Email & Password
+
Default
+
+
Always enabled
+
+
+ {/* Header Authentication Status */}
+ {headerAuthEnabled && (
+
+
+
+
Header Authentication
+
Auto-login
+
+
Via reverse proxy
+
+ )}
+
+ {/* SSO Providers Status */}
+
+
+
0 ? 'bg-green-500' : 'bg-muted'}`} />
+ SSO/OIDC Providers
+
+
+ {providers.length > 0 ? `${providers.length} provider${providers.length !== 1 ? 's' : ''} configured` : 'Not configured'}
+
+
+
+
+ {/* Header Auth Info */}
+ {headerAuthEnabled && (
+
+
+
+ Header authentication is enabled. Users authenticated by your reverse proxy will be automatically logged in.
+
+
+ )}
+
+
+
+ {/* SSO Providers */}
+
+
+
+
+ External Identity Providers
+
+ Connect external OIDC/OAuth providers (Google, Azure AD, etc.) to allow users to sign in with their existing accounts
+
+
+
+
+
+
+ {providers.length === 0 ? (
+
+
+
No SSO providers configured
+
+ Enable Single Sign-On by adding an external identity provider like Google, Azure AD, or any OIDC-compliant service.
+
+
+
+
+
+ ) : (
+
+ {providers.map(provider => (
+
+
+
+
+
{provider.providerId}
+
{provider.domain}
+
+
+
+
+
+
+
+
Issuer
+
{provider.issuer}
+
+
+
Client ID
+
{provider.oidcConfig.clientId}
+
+
+
+
+ ))}
+
+ )}
+
+
+
+ );
+}
\ No newline at end of file
diff --git a/src/components/layout/Header.tsx b/src/components/layout/Header.tsx
index 96e2ef2..53d1162 100644
--- a/src/components/layout/Header.tsx
+++ b/src/components/layout/Header.tsx
@@ -129,9 +129,9 @@ export function Header({ currentPage, onNavigate, onMenuClick }: HeaderProps) {
diff --git a/src/components/layout/SponsorCard.tsx b/src/components/layout/SponsorCard.tsx
new file mode 100644
index 0000000..f738c49
--- /dev/null
+++ b/src/components/layout/SponsorCard.tsx
@@ -0,0 +1,72 @@
+import { Card, CardContent, CardDescription, CardHeader, CardTitle } from "@/components/ui/card";
+import { Button } from "@/components/ui/button";
+import { Heart, Coffee, Sparkles } from "lucide-react";
+import { isSelfHostedMode } from "@/lib/deployment-mode";
+
+export function SponsorCard() {
+ // Only show in self-hosted mode
+ if (!isSelfHostedMode()) {
+ return null;
+ }
+
+ return (
+
+
+
+
+
+ Support Development
+
+
+ Help us improve Gitea Mirror
+
+
+
+
+ Gitea Mirror is open source and free. Your sponsorship helps us maintain and improve it.
+
+
+
+
+
+
+
+ Pro features available in hosted version
+
+
+
+
+
+ );
+}
\ No newline at end of file
diff --git a/src/components/oauth/ConsentPage.tsx b/src/components/oauth/ConsentPage.tsx
new file mode 100644
index 0000000..f41a7e8
--- /dev/null
+++ b/src/components/oauth/ConsentPage.tsx
@@ -0,0 +1,307 @@
+'use client';
+
+import { useEffect, useState } from 'react';
+import { Card, CardContent, CardDescription, CardFooter, CardHeader, CardTitle } from '@/components/ui/card';
+import { Button } from '@/components/ui/button';
+import { Alert, AlertDescription } from '@/components/ui/alert';
+import { Checkbox } from '@/components/ui/checkbox';
+import { Label } from '@/components/ui/label';
+import { Separator } from '@/components/ui/separator';
+import { authClient } from '@/lib/auth-client';
+import { apiRequest, showErrorToast } from '@/lib/utils';
+import { toast, Toaster } from 'sonner';
+import { Shield, User, Mail, ChevronRight, AlertTriangle, Loader2 } from 'lucide-react';
+import { isValidRedirectUri, parseRedirectUris } from '@/lib/utils/oauth-validation';
+
+interface OAuthApplication {
+ id: string;
+ clientId: string;
+ name: string;
+ redirectURLs: string;
+ type: string;
+}
+
+interface ConsentRequest {
+ clientId: string;
+ scope: string;
+ state?: string;
+ redirectUri?: string;
+}
+
+export default function ConsentPage() {
+ const [isLoading, setIsLoading] = useState(true);
+ const [isSubmitting, setIsSubmitting] = useState(false);
+ const [application, setApplication] = useState(null);
+ const [scopes, setScopes] = useState([]);
+ const [selectedScopes, setSelectedScopes] = useState>(new Set());
+ const [error, setError] = useState(null);
+
+ useEffect(() => {
+ loadConsentDetails();
+ }, []);
+
+ const loadConsentDetails = async () => {
+ try {
+ const params = new URLSearchParams(window.location.search);
+ const clientId = params.get('client_id');
+ const scope = params.get('scope');
+ const redirectUri = params.get('redirect_uri');
+
+ if (!clientId) {
+ setError('Invalid authorization request: missing client ID');
+ return;
+ }
+
+ // Fetch application details
+ const apps = await apiRequest('/sso/applications');
+ const app = apps.find(a => a.clientId === clientId);
+
+ if (!app) {
+ setError('Invalid authorization request: unknown application');
+ return;
+ }
+
+ // Validate redirect URI if provided
+ if (redirectUri) {
+ const authorizedUris = parseRedirectUris(app.redirectURLs);
+
+ if (!isValidRedirectUri(redirectUri, authorizedUris)) {
+ setError('Invalid authorization request: unauthorized redirect URI');
+ return;
+ }
+ }
+
+ setApplication(app);
+
+ // Parse requested scopes
+ const requestedScopes = scope ? scope.split(' ').filter(s => s) : ['openid'];
+ setScopes(requestedScopes);
+
+ // By default, select all requested scopes
+ setSelectedScopes(new Set(requestedScopes));
+ } catch (error) {
+ console.error('Failed to load consent details:', error);
+ setError('Failed to load authorization details');
+ } finally {
+ setIsLoading(false);
+ }
+ };
+
+ const handleConsent = async (accept: boolean) => {
+ setIsSubmitting(true);
+ try {
+ const result = await authClient.oauth2.consent({
+ accept,
+ });
+
+ if (result.error) {
+ throw new Error(result.error.message || 'Consent failed');
+ }
+
+ // The consent method should handle the redirect
+ if (!accept) {
+ // If denied, redirect back to the application with error
+ const params = new URLSearchParams(window.location.search);
+ const redirectUri = params.get('redirect_uri');
+
+ if (redirectUri && application) {
+ // Validate redirect URI against authorized URIs
+ const authorizedUris = parseRedirectUris(application.redirectURLs);
+
+ if (isValidRedirectUri(redirectUri, authorizedUris)) {
+ try {
+ // Parse and reconstruct the URL to ensure it's safe
+ const url = new URL(redirectUri);
+ url.searchParams.set('error', 'access_denied');
+
+ // Safe to redirect - URI has been validated and sanitized
+ window.location.href = url.toString();
+ } catch (e) {
+ console.error('Failed to parse redirect URI:', e);
+ setError('Invalid redirect URI');
+ }
+ } else {
+ console.error('Unauthorized redirect URI:', redirectUri);
+ setError('Invalid redirect URI');
+ }
+ }
+ }
+ } catch (error) {
+ showErrorToast(error, toast);
+ } finally {
+ setIsSubmitting(false);
+ }
+ };
+
+ const toggleScope = (scope: string) => {
+ // openid scope is always required
+ if (scope === 'openid') return;
+
+ const newSelected = new Set(selectedScopes);
+ if (newSelected.has(scope)) {
+ newSelected.delete(scope);
+ } else {
+ newSelected.add(scope);
+ }
+ setSelectedScopes(newSelected);
+ };
+
+ const getScopeDescription = (scope: string): { name: string; description: string; icon: any } => {
+ const scopeDescriptions: Record = {
+ openid: {
+ name: 'Basic Information',
+ description: 'Your user ID (required)',
+ icon: User,
+ },
+ profile: {
+ name: 'Profile Information',
+ description: 'Your name, username, and profile picture',
+ icon: User,
+ },
+ email: {
+ name: 'Email Address',
+ description: 'Your email address and verification status',
+ icon: Mail,
+ },
+ };
+
+ return scopeDescriptions[scope] || {
+ name: scope,
+ description: `Access to ${scope} information`,
+ icon: Shield,
+ };
+ };
+
+ if (isLoading) {
+ return (
+
+
+
+ );
+ }
+
+ if (error) {
+ return (
+
+
+
+
+ Authorization Error
+
+
+
+ {error}
+
+
+
+
+
+
+
+ );
+ }
+
+ return (
+ <>
+
+
+
+
+
+
+ Authorize {application?.name}
+
+ This application is requesting access to your account
+
+
+
+
+
+
Requested permissions:
+
+ {scopes.map(scope => {
+ const scopeInfo = getScopeDescription(scope);
+ const Icon = scopeInfo.icon;
+ const isRequired = scope === 'openid';
+
+ return (
+
+
toggleScope(scope)}
+ disabled={isRequired || isSubmitting}
+ />
+
+
+
+ {scopeInfo.description}
+
+
+
+ );
+ })}
+
+
+
+
+
+
+
+
+ You'll be redirected to {application?.type === 'web' ? 'the website' : 'the application'}
+
+
+
+ You can revoke access at any time in your account settings
+
+
+
+
+
+
+
+
+
+
+
+ >
+ );
+}
\ No newline at end of file
diff --git a/src/components/sponsors/GitHubSponsors.tsx b/src/components/sponsors/GitHubSponsors.tsx
new file mode 100644
index 0000000..973fede
--- /dev/null
+++ b/src/components/sponsors/GitHubSponsors.tsx
@@ -0,0 +1,105 @@
+import { Card, CardContent, CardHeader, CardTitle } from "@/components/ui/card";
+import { Button } from "@/components/ui/button";
+import { Heart, Coffee, Zap } from "lucide-react";
+import { isSelfHostedMode } from "@/lib/deployment-mode";
+
+export function GitHubSponsors() {
+ // Only show in self-hosted mode
+ if (!isSelfHostedMode()) {
+ return null;
+ }
+
+ return (
+
+
+
+
+ Support Gitea Mirror
+
+
+
+
+ Gitea Mirror is open source and free to use. If you find it helpful,
+ consider supporting the project!
+
+
+
+
+
+
+
+ Your support helps maintain and improve the project
+
+
+
+
+ );
+}
+
+// Smaller inline sponsor button for headers/navbars
+export function SponsorButton() {
+ if (!isSelfHostedMode()) {
+ return null;
+ }
+
+ return (
+
+ );
+}
\ No newline at end of file
diff --git a/src/hooks/useAuth-legacy.ts b/src/hooks/useAuth-legacy.ts
new file mode 100644
index 0000000..01b9432
--- /dev/null
+++ b/src/hooks/useAuth-legacy.ts
@@ -0,0 +1,147 @@
+import * as React from "react";
+import {
+ useState,
+ useEffect,
+ createContext,
+ useContext,
+ type Context,
+} from "react";
+import { authApi } from "@/lib/api";
+import type { ExtendedUser } from "@/types/user";
+
+interface AuthContextType {
+ user: ExtendedUser | null;
+ isLoading: boolean;
+ error: string | null;
+ login: (username: string, password: string) => Promise;
+ register: (
+ username: string,
+ email: string,
+ password: string
+ ) => Promise;
+ logout: () => Promise;
+ refreshUser: () => Promise; // Added refreshUser function
+}
+
+const AuthContext: Context = createContext<
+ AuthContextType | undefined
+>(undefined);
+
+export function AuthProvider({ children }: { children: React.ReactNode }) {
+ const [user, setUser] = useState(null);
+ const [isLoading, setIsLoading] = useState(true);
+ const [error, setError] = useState(null);
+
+ // Function to refetch the user data
+ const refreshUser = async () => {
+ // not using loading state to keep the ui seamless and refresh the data in bg
+ // setIsLoading(true);
+ try {
+ const user = await authApi.getCurrentUser();
+ setUser(user);
+ } catch (err: any) {
+ setUser(null);
+ console.error("Failed to refresh user data", err);
+ } finally {
+ // setIsLoading(false);
+ }
+ };
+
+ // Automatically check the user status when the app loads
+ useEffect(() => {
+ const checkAuth = async () => {
+ try {
+ const user = await authApi.getCurrentUser();
+
+ console.log("User data fetched:", user);
+
+ setUser(user);
+ } catch (err: any) {
+ setUser(null);
+
+ // Redirect user based on error
+ if (err?.message === "No users found") {
+ window.location.href = "/signup";
+ } else {
+ window.location.href = "/login";
+ }
+ console.error("Auth check failed", err);
+ } finally {
+ setIsLoading(false);
+ }
+ };
+
+ checkAuth();
+ }, []);
+
+ const login = async (username: string, password: string) => {
+ setIsLoading(true);
+ setError(null);
+ try {
+ const user = await authApi.login(username, password);
+ setUser(user);
+ } catch (err) {
+ setError(err instanceof Error ? err.message : "Login failed");
+ throw err;
+ } finally {
+ setIsLoading(false);
+ }
+ };
+
+ const register = async (
+ username: string,
+ email: string,
+ password: string
+ ) => {
+ setIsLoading(true);
+ setError(null);
+ try {
+ const user = await authApi.register(username, email, password);
+ setUser(user);
+ } catch (err) {
+ setError(err instanceof Error ? err.message : "Registration failed");
+ throw err;
+ } finally {
+ setIsLoading(false);
+ }
+ };
+
+ const logout = async () => {
+ setIsLoading(true);
+ try {
+ await authApi.logout();
+ setUser(null);
+ window.location.href = "/login";
+ } catch (err) {
+ console.error("Logout error:", err);
+ } finally {
+ setIsLoading(false);
+ }
+ };
+
+ // Create the context value with the added refreshUser function
+ const contextValue = {
+ user,
+ isLoading,
+ error,
+ login,
+ register,
+ logout,
+ refreshUser,
+ };
+
+ // Return the provider with the context value
+ return React.createElement(
+ AuthContext.Provider,
+ { value: contextValue },
+ children
+ );
+}
+
+export function useAuth() {
+ const context = useContext(AuthContext);
+ if (context === undefined) {
+ throw new Error("useAuth must be used within an AuthProvider");
+ }
+ return context;
+}
diff --git a/src/hooks/useAuth.ts b/src/hooks/useAuth.ts
index 01b9432..7bab564 100644
--- a/src/hooks/useAuth.ts
+++ b/src/hooks/useAuth.ts
@@ -6,21 +6,22 @@ import {
useContext,
type Context,
} from "react";
-import { authApi } from "@/lib/api";
-import type { ExtendedUser } from "@/types/user";
+import { authClient, useSession as useBetterAuthSession } from "@/lib/auth-client";
+import type { Session, AuthUser } from "@/lib/auth-client";
interface AuthContextType {
- user: ExtendedUser | null;
+ user: AuthUser | null;
+ session: Session | null;
isLoading: boolean;
error: string | null;
- login: (username: string, password: string) => Promise;
+ login: (email: string, password: string, username?: string) => Promise;
register: (
username: string,
email: string,
password: string
) => Promise;
logout: () => Promise;
- refreshUser: () => Promise; // Added refreshUser function
+ refreshUser: () => Promise;
}
const AuthContext: Context = createContext<
@@ -28,60 +29,32 @@ const AuthContext: Context = createContext<
>(undefined);
export function AuthProvider({ children }: { children: React.ReactNode }) {
- const [user, setUser] = useState(null);
- const [isLoading, setIsLoading] = useState(true);
+ const betterAuthSession = useBetterAuthSession();
const [error, setError] = useState(null);
+ const [isLoading, setIsLoading] = useState(false);
- // Function to refetch the user data
- const refreshUser = async () => {
- // not using loading state to keep the ui seamless and refresh the data in bg
- // setIsLoading(true);
- try {
- const user = await authApi.getCurrentUser();
- setUser(user);
- } catch (err: any) {
- setUser(null);
- console.error("Failed to refresh user data", err);
- } finally {
- // setIsLoading(false);
- }
- };
+ // Derive user and session from Better Auth hook
+ const user = betterAuthSession.data?.user || null;
+ const session = betterAuthSession.data || null;
- // Automatically check the user status when the app loads
- useEffect(() => {
- const checkAuth = async () => {
- try {
- const user = await authApi.getCurrentUser();
+ // Don't do any redirects here - let the pages handle their own redirect logic
- console.log("User data fetched:", user);
-
- setUser(user);
- } catch (err: any) {
- setUser(null);
-
- // Redirect user based on error
- if (err?.message === "No users found") {
- window.location.href = "/signup";
- } else {
- window.location.href = "/login";
- }
- console.error("Auth check failed", err);
- } finally {
- setIsLoading(false);
- }
- };
-
- checkAuth();
- }, []);
-
- const login = async (username: string, password: string) => {
+ const login = async (email: string, password: string) => {
setIsLoading(true);
setError(null);
try {
- const user = await authApi.login(username, password);
- setUser(user);
+ const result = await authClient.signIn.email({
+ email,
+ password,
+ callbackURL: "/",
+ });
+
+ if (result.error) {
+ throw new Error(result.error.message || "Login failed");
+ }
} catch (err) {
- setError(err instanceof Error ? err.message : "Login failed");
+ const message = err instanceof Error ? err.message : "Login failed";
+ setError(message);
throw err;
} finally {
setIsLoading(false);
@@ -96,10 +69,19 @@ export function AuthProvider({ children }: { children: React.ReactNode }) {
setIsLoading(true);
setError(null);
try {
- const user = await authApi.register(username, email, password);
- setUser(user);
+ const result = await authClient.signUp.email({
+ email,
+ password,
+ name: username, // Better Auth uses 'name' field for display name
+ callbackURL: "/",
+ });
+
+ if (result.error) {
+ throw new Error(result.error.message || "Registration failed");
+ }
} catch (err) {
- setError(err instanceof Error ? err.message : "Registration failed");
+ const message = err instanceof Error ? err.message : "Registration failed";
+ setError(message);
throw err;
} finally {
setIsLoading(false);
@@ -109,9 +91,13 @@ export function AuthProvider({ children }: { children: React.ReactNode }) {
const logout = async () => {
setIsLoading(true);
try {
- await authApi.logout();
- setUser(null);
- window.location.href = "/login";
+ await authClient.signOut({
+ fetchOptions: {
+ onSuccess: () => {
+ window.location.href = "/login";
+ },
+ },
+ });
} catch (err) {
console.error("Logout error:", err);
} finally {
@@ -119,10 +105,17 @@ export function AuthProvider({ children }: { children: React.ReactNode }) {
}
};
- // Create the context value with the added refreshUser function
+ const refreshUser = async () => {
+ // Better Auth automatically handles session refresh
+ // We can force a refetch if needed
+ await betterAuthSession.refetch();
+ };
+
+ // Create the context value
const contextValue = {
- user,
- isLoading,
+ user: user as AuthUser | null,
+ session,
+ isLoading: isLoading || betterAuthSession.isPending,
error,
login,
register,
@@ -145,3 +138,6 @@ export function useAuth() {
}
return context;
}
+
+// Export the Better Auth session hook for direct use when needed
+export { useBetterAuthSession };
\ No newline at end of file
diff --git a/src/hooks/useAuthMethods.ts b/src/hooks/useAuthMethods.ts
new file mode 100644
index 0000000..9f77a69
--- /dev/null
+++ b/src/hooks/useAuthMethods.ts
@@ -0,0 +1,65 @@
+import { useState, useEffect } from 'react';
+import { apiRequest } from '@/lib/utils';
+
+interface AuthMethods {
+ emailPassword: boolean;
+ sso: {
+ enabled: boolean;
+ providers: Array<{
+ id: string;
+ providerId: string;
+ domain: string;
+ }>;
+ };
+ oidc: {
+ enabled: boolean;
+ };
+}
+
+export function useAuthMethods() {
+ const [authMethods, setAuthMethods] = useState({
+ emailPassword: true,
+ sso: {
+ enabled: false,
+ providers: [],
+ },
+ oidc: {
+ enabled: false,
+ },
+ });
+ const [isLoading, setIsLoading] = useState(true);
+
+ useEffect(() => {
+ loadAuthMethods();
+ }, []);
+
+ const loadAuthMethods = async () => {
+ try {
+ // Check SSO providers
+ const providers = await apiRequest('/sso/providers').catch(() => []);
+ const applications = await apiRequest('/sso/applications').catch(() => []);
+
+ setAuthMethods({
+ emailPassword: true, // Always enabled
+ sso: {
+ enabled: providers.length > 0,
+ providers: providers.map(p => ({
+ id: p.id,
+ providerId: p.providerId,
+ domain: p.domain,
+ })),
+ },
+ oidc: {
+ enabled: applications.length > 0,
+ },
+ });
+ } catch (error) {
+ // If we can't load auth methods, default to email/password only
+ console.error('Failed to load auth methods:', error);
+ } finally {
+ setIsLoading(false);
+ }
+ };
+
+ return { authMethods, isLoading };
+}
\ No newline at end of file
diff --git a/src/lib/auth-client.ts b/src/lib/auth-client.ts
new file mode 100644
index 0000000..07424cc
--- /dev/null
+++ b/src/lib/auth-client.ts
@@ -0,0 +1,28 @@
+import { createAuthClient } from "better-auth/react";
+import { oidcClient } from "better-auth/client/plugins";
+import { ssoClient } from "better-auth/client/plugins";
+
+export const authClient = createAuthClient({
+ // The base URL is optional when running on the same domain
+ // Better Auth will use the current domain by default
+ plugins: [
+ oidcClient(),
+ ssoClient(),
+ ],
+});
+
+// Export commonly used methods for convenience
+export const {
+ signIn,
+ signUp,
+ signOut,
+ useSession,
+ sendVerificationEmail,
+ resetPassword,
+ requestPasswordReset,
+ getSession
+} = authClient;
+
+// Export types
+export type Session = Awaited>["data"];
+export type AuthUser = Session extends { user: infer U } ? U : never;
\ No newline at end of file
diff --git a/src/lib/auth-config.ts b/src/lib/auth-config.ts
new file mode 100644
index 0000000..76b13af
--- /dev/null
+++ b/src/lib/auth-config.ts
@@ -0,0 +1,70 @@
+import { betterAuth } from "better-auth";
+import { drizzleAdapter } from "better-auth/adapters/drizzle";
+import { oidcProvider } from "better-auth/plugins";
+import { sso } from "better-auth/plugins/sso";
+import type { BunSQLiteDatabase } from "drizzle-orm/bun-sqlite";
+
+// This function will be called with the actual database instance
+export function createAuth(db: BunSQLiteDatabase) {
+ return betterAuth({
+ // Database configuration
+ database: drizzleAdapter(db, {
+ provider: "sqlite",
+ usePlural: true, // Our tables use plural names (users, not user)
+ }),
+
+ // Base URL configuration
+ baseURL: process.env.BETTER_AUTH_URL || "http://localhost:3000",
+
+ // Authentication methods
+ emailAndPassword: {
+ enabled: true,
+ requireEmailVerification: false, // We'll enable this later
+ sendResetPassword: async ({ user, url, token }, request) => {
+ // TODO: Implement email sending for password reset
+ console.log("Password reset requested for:", user.email);
+ console.log("Reset URL:", url);
+ },
+ },
+
+ // Session configuration
+ session: {
+ cookieName: "better-auth-session",
+ updateSessionCookieAge: true,
+ expiresIn: 60 * 60 * 24 * 30, // 30 days
+ },
+
+ // User configuration
+ user: {
+ additionalFields: {
+ // We can add custom fields here if needed
+ },
+ },
+
+ // Plugins for OIDC/SSO support
+ plugins: [
+ // SSO plugin for OIDC client support
+ sso({
+ provisionUser: async (data) => {
+ // Custom user provisioning logic for SSO users
+ console.log("Provisioning SSO user:", data);
+ return data;
+ },
+ }),
+
+ // OIDC Provider plugin (for future use when we want to be an OIDC provider)
+ oidcProvider({
+ loginPage: "/signin",
+ consentPage: "/oauth/consent",
+ metadata: {
+ issuer: process.env.BETTER_AUTH_URL || "http://localhost:3000",
+ },
+ }),
+ ],
+
+ // Trusted origins for CORS
+ trustedOrigins: [
+ process.env.BETTER_AUTH_URL || "http://localhost:3000",
+ ],
+ });
+}
\ No newline at end of file
diff --git a/src/lib/auth-header.ts b/src/lib/auth-header.ts
new file mode 100644
index 0000000..cf51926
--- /dev/null
+++ b/src/lib/auth-header.ts
@@ -0,0 +1,135 @@
+import { db, users } from "./db";
+import { eq } from "drizzle-orm";
+import { nanoid } from "nanoid";
+
+export interface HeaderAuthConfig {
+ enabled: boolean;
+ userHeader: string;
+ emailHeader?: string;
+ nameHeader?: string;
+ autoProvision: boolean;
+ allowedDomains?: string[];
+}
+
+// Default configuration - DISABLED by default
+export const defaultHeaderAuthConfig: HeaderAuthConfig = {
+ enabled: false,
+ userHeader: "X-Authentik-Username", // Common header name
+ emailHeader: "X-Authentik-Email",
+ nameHeader: "X-Authentik-Name",
+ autoProvision: false,
+ allowedDomains: [],
+};
+
+// Get header auth config from environment or database
+export function getHeaderAuthConfig(): HeaderAuthConfig {
+ // Check environment variables for header auth config
+ const envConfig: Partial = {
+ enabled: process.env.HEADER_AUTH_ENABLED === "true",
+ userHeader: process.env.HEADER_AUTH_USER_HEADER || defaultHeaderAuthConfig.userHeader,
+ emailHeader: process.env.HEADER_AUTH_EMAIL_HEADER || defaultHeaderAuthConfig.emailHeader,
+ nameHeader: process.env.HEADER_AUTH_NAME_HEADER || defaultHeaderAuthConfig.nameHeader,
+ autoProvision: process.env.HEADER_AUTH_AUTO_PROVISION === "true",
+ allowedDomains: process.env.HEADER_AUTH_ALLOWED_DOMAINS?.split(",").map(d => d.trim()),
+ };
+
+ return {
+ ...defaultHeaderAuthConfig,
+ ...envConfig,
+ };
+}
+
+// Check if header authentication is enabled
+export function isHeaderAuthEnabled(): boolean {
+ const config = getHeaderAuthConfig();
+ return config.enabled === true;
+}
+
+// Extract user info from headers
+export function extractUserFromHeaders(headers: Headers): {
+ username?: string;
+ email?: string;
+ name?: string;
+} | null {
+ const config = getHeaderAuthConfig();
+
+ if (!config.enabled) {
+ return null;
+ }
+
+ const username = headers.get(config.userHeader);
+ const email = config.emailHeader ? headers.get(config.emailHeader) : undefined;
+ const name = config.nameHeader ? headers.get(config.nameHeader) : undefined;
+
+ if (!username) {
+ return null;
+ }
+
+ // If allowed domains are configured, check email domain
+ if (config.allowedDomains && config.allowedDomains.length > 0 && email) {
+ const domain = email.split("@")[1];
+ if (!config.allowedDomains.includes(domain)) {
+ console.warn(`Header auth rejected: email domain ${domain} not in allowed list`);
+ return null;
+ }
+ }
+
+ return { username, email, name };
+}
+
+// Find or create user from header auth
+export async function authenticateWithHeaders(headers: Headers) {
+ const userInfo = extractUserFromHeaders(headers);
+
+ if (!userInfo || !userInfo.username) {
+ return null;
+ }
+
+ const config = getHeaderAuthConfig();
+
+ // Try to find existing user by username or email
+ let existingUser = await db
+ .select()
+ .from(users)
+ .where(eq(users.username, userInfo.username))
+ .limit(1);
+
+ if (existingUser.length === 0 && userInfo.email) {
+ existingUser = await db
+ .select()
+ .from(users)
+ .where(eq(users.email, userInfo.email))
+ .limit(1);
+ }
+
+ if (existingUser.length > 0) {
+ return existingUser[0];
+ }
+
+ // If auto-provisioning is disabled, don't create new users
+ if (!config.autoProvision) {
+ console.warn(`Header auth: User ${userInfo.username} not found and auto-provisioning is disabled`);
+ return null;
+ }
+
+ // Create new user if auto-provisioning is enabled
+ try {
+ const newUser = {
+ id: nanoid(),
+ username: userInfo.username,
+ email: userInfo.email || `${userInfo.username}@header-auth.local`,
+ emailVerified: true, // Trust the auth provider
+ name: userInfo.name || userInfo.username,
+ createdAt: new Date(),
+ updatedAt: new Date(),
+ };
+
+ await db.insert(users).values(newUser);
+ console.log(`Header auth: Auto-provisioned new user ${userInfo.username}`);
+
+ return newUser;
+ } catch (error) {
+ console.error("Failed to auto-provision user from header auth:", error);
+ return null;
+ }
+}
\ No newline at end of file
diff --git a/src/lib/auth-oidc-config.example.ts b/src/lib/auth-oidc-config.example.ts
new file mode 100644
index 0000000..ab8cb97
--- /dev/null
+++ b/src/lib/auth-oidc-config.example.ts
@@ -0,0 +1,179 @@
+/**
+ * Example OIDC/SSO Configuration for Better Auth
+ *
+ * This file demonstrates how to enable OIDC and SSO features in Gitea Mirror.
+ * To use: Copy this file to auth-oidc-config.ts and update the auth.ts import.
+ */
+
+import { betterAuth } from "better-auth";
+import { drizzleAdapter } from "better-auth/adapters/drizzle";
+import { sso } from "better-auth/plugins/sso";
+import { oidcProvider } from "better-auth/plugins/oidc";
+import type { BunSQLiteDatabase } from "drizzle-orm/bun-sqlite";
+
+export function createAuthWithOIDC(db: BunSQLiteDatabase) {
+ return betterAuth({
+ // Database configuration
+ database: drizzleAdapter(db, {
+ provider: "sqlite",
+ usePlural: true,
+ }),
+
+ // Base configuration
+ baseURL: process.env.BETTER_AUTH_URL || "http://localhost:3000",
+ basePath: "/api/auth",
+
+ // Email/Password authentication
+ emailAndPassword: {
+ enabled: true,
+ requireEmailVerification: false,
+ },
+
+ // Session configuration
+ session: {
+ cookieName: "better-auth-session",
+ updateSessionCookieAge: true,
+ expiresIn: 60 * 60 * 24 * 30, // 30 days
+ },
+
+ // User configuration with additional fields
+ user: {
+ additionalFields: {
+ username: {
+ type: "string",
+ required: true,
+ defaultValue: "user",
+ input: true,
+ }
+ },
+ },
+
+ // OAuth2 providers (examples)
+ socialProviders: {
+ github: {
+ enabled: !!process.env.GITHUB_OAUTH_CLIENT_ID,
+ clientId: process.env.GITHUB_OAUTH_CLIENT_ID!,
+ clientSecret: process.env.GITHUB_OAUTH_CLIENT_SECRET!,
+ },
+ google: {
+ enabled: !!process.env.GOOGLE_OAUTH_CLIENT_ID,
+ clientId: process.env.GOOGLE_OAUTH_CLIENT_ID!,
+ clientSecret: process.env.GOOGLE_OAUTH_CLIENT_SECRET!,
+ },
+ },
+
+ // Plugins
+ plugins: [
+ // SSO Plugin - For OIDC/SAML client functionality
+ sso({
+ // Auto-provision users from SSO providers
+ provisionUser: async (data) => {
+ console.log("Provisioning SSO user:", data.email);
+
+ // Custom logic to set username from email
+ const username = data.email.split('@')[0];
+
+ return {
+ ...data,
+ username,
+ };
+ },
+
+ // Organization provisioning for enterprise SSO
+ organizationProvisioning: {
+ disabled: false,
+ defaultRole: "member",
+ getRole: async (user) => {
+ // Custom logic to determine user role
+ // For admin emails, grant admin role
+ if (user.email?.endsWith('@admin.example.com')) {
+ return 'admin';
+ }
+ return 'member';
+ },
+ },
+ }),
+
+ // OIDC Provider Plugin - Makes Gitea Mirror an OIDC provider
+ oidcProvider({
+ // Login page for OIDC authentication flow
+ loginPage: "/login",
+
+ // Consent page for OAuth2 authorization
+ consentPage: "/oauth/consent",
+
+ // Allow dynamic client registration
+ allowDynamicClientRegistration: false,
+
+ // OIDC metadata configuration
+ metadata: {
+ issuer: process.env.BETTER_AUTH_URL || "http://localhost:3000",
+ authorization_endpoint: "/api/auth/oauth2/authorize",
+ token_endpoint: "/api/auth/oauth2/token",
+ userinfo_endpoint: "/api/auth/oauth2/userinfo",
+ jwks_uri: "/api/auth/jwks",
+ },
+
+ // Additional user info claims
+ getAdditionalUserInfoClaim: (user, scopes) => {
+ const claims: Record = {};
+
+ // Add custom claims based on scopes
+ if (scopes.includes('profile')) {
+ claims.username = user.username;
+ claims.preferred_username = user.username;
+ }
+
+ if (scopes.includes('gitea')) {
+ // Add Gitea-specific claims
+ claims.gitea_admin = false; // Customize based on your logic
+ claims.gitea_repos = []; // Could fetch user's repositories
+ }
+
+ return claims;
+ },
+ }),
+ ],
+
+ // Trusted origins for CORS
+ trustedOrigins: [
+ process.env.BETTER_AUTH_URL || "http://localhost:3000",
+ // Add your OIDC client domains here
+ ],
+ });
+}
+
+// Environment variables needed:
+/*
+# OAuth2 Providers (optional)
+GITHUB_OAUTH_CLIENT_ID=your-github-client-id
+GITHUB_OAUTH_CLIENT_SECRET=your-github-client-secret
+GOOGLE_OAUTH_CLIENT_ID=your-google-client-id
+GOOGLE_OAUTH_CLIENT_SECRET=your-google-client-secret
+
+# SSO Configuration (when registering providers)
+SSO_PROVIDER_ISSUER=https://idp.example.com
+SSO_PROVIDER_CLIENT_ID=your-client-id
+SSO_PROVIDER_CLIENT_SECRET=your-client-secret
+*/
+
+// Example: Registering an SSO provider programmatically
+/*
+import { authClient } from "./auth-client";
+
+// Register corporate SSO
+await authClient.sso.register({
+ issuer: "https://login.microsoftonline.com/tenant-id/v2.0",
+ domain: "company.com",
+ clientId: process.env.AZURE_CLIENT_ID!,
+ clientSecret: process.env.AZURE_CLIENT_SECRET!,
+ providerId: "azure-ad",
+ mapping: {
+ id: "sub",
+ email: "email",
+ emailVerified: "email_verified",
+ name: "name",
+ image: "picture",
+ },
+});
+*/
\ No newline at end of file
diff --git a/src/lib/auth.ts b/src/lib/auth.ts
new file mode 100644
index 0000000..a1d38ac
--- /dev/null
+++ b/src/lib/auth.ts
@@ -0,0 +1,99 @@
+import { betterAuth } from "better-auth";
+import { drizzleAdapter } from "better-auth/adapters/drizzle";
+import { oidcProvider } from "better-auth/plugins";
+import { sso } from "better-auth/plugins/sso";
+import { db, users } from "./db";
+import * as schema from "./db/schema";
+import { eq } from "drizzle-orm";
+
+export const auth = betterAuth({
+ // Database configuration
+ database: drizzleAdapter(db, {
+ provider: "sqlite",
+ usePlural: true, // Our tables use plural names (users, not user)
+ schema, // Pass the schema explicitly
+ }),
+
+ // Secret for signing tokens
+ secret: process.env.BETTER_AUTH_SECRET,
+
+ // Base URL configuration
+ baseURL: process.env.BETTER_AUTH_URL || "http://localhost:4321",
+ basePath: "/api/auth", // Specify the base path for auth endpoints
+
+ // Authentication methods
+ emailAndPassword: {
+ enabled: true,
+ requireEmailVerification: false, // We'll enable this later
+ sendResetPassword: async ({ user, url, token }, request) => {
+ // TODO: Implement email sending for password reset
+ console.log("Password reset requested for:", user.email);
+ console.log("Reset URL:", url);
+ },
+ },
+
+
+ // Session configuration
+ session: {
+ cookieName: "better-auth-session",
+ updateSessionCookieAge: true,
+ expiresIn: 60 * 60 * 24 * 30, // 30 days
+ },
+
+ // User configuration
+ user: {
+ additionalFields: {
+ // Keep the username field from our existing schema
+ username: {
+ type: "string",
+ required: false,
+ input: false, // Don't show in signup form - we'll derive from email
+ }
+ },
+ },
+
+ // Plugins configuration
+ plugins: [
+ // OIDC Provider plugin - allows this app to act as an OIDC provider
+ oidcProvider({
+ loginPage: "/login",
+ consentPage: "/oauth/consent",
+ // Allow dynamic client registration for flexibility
+ allowDynamicClientRegistration: true,
+ // Customize user info claims based on scopes
+ getAdditionalUserInfoClaim: (user, scopes) => {
+ const claims: Record = {};
+ if (scopes.includes("profile")) {
+ claims.username = user.username;
+ }
+ return claims;
+ },
+ }),
+
+ // SSO plugin - allows users to authenticate with external OIDC providers
+ sso({
+ // Provision new users when they sign in with SSO
+ provisionUser: async (user) => {
+ // Derive username from email if not provided
+ const username = user.name || user.email?.split('@')[0] || 'user';
+ return {
+ ...user,
+ username,
+ };
+ },
+ // Organization provisioning settings
+ organizationProvisioning: {
+ disabled: false,
+ defaultRole: "member",
+ },
+ }),
+ ],
+
+ // Trusted origins for CORS
+ trustedOrigins: [
+ process.env.BETTER_AUTH_URL || "http://localhost:4321",
+ ],
+});
+
+// Export type for use in other parts of the app
+export type Auth = typeof auth;
\ No newline at end of file
diff --git a/src/lib/config.ts b/src/lib/config.ts
index 8968830..3929b64 100644
--- a/src/lib/config.ts
+++ b/src/lib/config.ts
@@ -18,9 +18,9 @@ export const ENV = {
return "sqlite://data/gitea-mirror.db";
},
- // JWT secret for authentication
- JWT_SECRET:
- process.env.JWT_SECRET || "your-secret-key-change-this-in-production",
+ // Better Auth secret for authentication
+ BETTER_AUTH_SECRET:
+ process.env.BETTER_AUTH_SECRET || "your-secret-key-change-this-in-production",
// Server host and port
HOST: process.env.HOST || "localhost",
diff --git a/src/lib/db/adapter.ts b/src/lib/db/adapter.ts
new file mode 100644
index 0000000..20b2538
--- /dev/null
+++ b/src/lib/db/adapter.ts
@@ -0,0 +1,102 @@
+/**
+ * Database adapter for SQLite
+ * For the self-hosted version of Gitea Mirror
+ */
+
+import { drizzle as drizzleSqlite } from 'drizzle-orm/bun-sqlite';
+import { Database } from 'bun:sqlite';
+import * as schema from './schema';
+
+export type DatabaseClient = ReturnType;
+
+/**
+ * Create SQLite database connection
+ */
+export function createDatabase() {
+ const dbPath = process.env.DATABASE_PATH || './data/gitea-mirror.db';
+
+ // Ensure directory exists
+ const fs = require('fs');
+ const path = require('path');
+ const dir = path.dirname(dbPath);
+ if (!fs.existsSync(dir)) {
+ fs.mkdirSync(dir, { recursive: true });
+ }
+
+ // Create SQLite connection
+ const sqlite = new Database(dbPath);
+
+ // Enable foreign keys and WAL mode for better performance
+ sqlite.exec('PRAGMA foreign_keys = ON');
+ sqlite.exec('PRAGMA journal_mode = WAL');
+ sqlite.exec('PRAGMA synchronous = NORMAL');
+ sqlite.exec('PRAGMA cache_size = -2000'); // 2MB cache
+ sqlite.exec('PRAGMA temp_store = MEMORY');
+
+ // Create Drizzle instance with SQLite
+ const db = drizzleSqlite(sqlite, {
+ schema,
+ logger: process.env.NODE_ENV === 'development',
+ });
+
+ return {
+ db,
+ client: sqlite,
+ type: 'sqlite' as const,
+
+ // Helper methods
+ async close() {
+ sqlite.close();
+ },
+
+ async healthCheck() {
+ try {
+ sqlite.query('SELECT 1').get();
+ return true;
+ } catch {
+ return false;
+ }
+ },
+
+ async transaction(fn: (tx: any) => Promise) {
+ return db.transaction(fn);
+ },
+ };
+}
+
+// Create singleton instance
+let dbInstance: DatabaseClient | null = null;
+
+/**
+ * Get database instance (singleton)
+ */
+export function getDatabase(): DatabaseClient {
+ if (!dbInstance) {
+ dbInstance = createDatabase();
+ }
+ return dbInstance;
+}
+
+/**
+ * Close database connection
+ */
+export async function closeDatabase() {
+ if (dbInstance) {
+ await dbInstance.close();
+ dbInstance = null;
+ }
+}
+
+// Export convenience references
+export const { db, client, type: dbType } = getDatabase();
+
+// Re-export schema for convenience
+export * from './schema';
+
+/**
+ * Database migration utilities
+ */
+export async function runMigrations() {
+ const { migrate } = await import('drizzle-orm/bun-sqlite/migrator');
+ await migrate(db, { migrationsFolder: './drizzle' });
+}
\ No newline at end of file
diff --git a/src/lib/db/index.ts b/src/lib/db/index.ts
index 66414b3..fa2fff7 100644
--- a/src/lib/db/index.ts
+++ b/src/lib/db/index.ts
@@ -1,489 +1,85 @@
-import { z } from "zod";
-import { sqliteTable, text, integer } from "drizzle-orm/sqlite-core";
import { Database } from "bun:sqlite";
import { drizzle } from "drizzle-orm/bun-sqlite";
import fs from "fs";
import path from "path";
-import { configSchema } from "./schema";
+import { migrate } from "drizzle-orm/bun-sqlite/migrator";
-// Define the database URL - for development we'll use a local SQLite file
-const dataDir = path.join(process.cwd(), "data");
-// Ensure data directory exists
-if (!fs.existsSync(dataDir)) {
- fs.mkdirSync(dataDir, { recursive: true });
-}
+// Skip database initialization in test environment
+let db: ReturnType;
-const dbPath = path.join(dataDir, "gitea-mirror.db");
+if (process.env.NODE_ENV !== "test") {
+ // Define the database URL - for development we'll use a local SQLite file
+ const dataDir = path.join(process.cwd(), "data");
+ // Ensure data directory exists
+ if (!fs.existsSync(dataDir)) {
+ fs.mkdirSync(dataDir, { recursive: true });
+ }
-// Create an empty database file if it doesn't exist
-if (!fs.existsSync(dbPath)) {
- fs.writeFileSync(dbPath, "");
-}
+ const dbPath = path.join(dataDir, "gitea-mirror.db");
-// Create SQLite database instance using Bun's native driver
-let sqlite: Database;
-try {
- sqlite = new Database(dbPath);
- console.log("Successfully connected to SQLite database using Bun's native driver");
+ // Create an empty database file if it doesn't exist
+ if (!fs.existsSync(dbPath)) {
+ fs.writeFileSync(dbPath, "");
+ }
- // Ensure all required tables exist
- ensureTablesExist(sqlite);
-
- // Run migrations
- runMigrations(sqlite);
-} catch (error) {
- console.error("Error opening database:", error);
- throw error;
-}
-
-/**
- * Run database migrations
- */
-function runMigrations(db: Database) {
+ // Create SQLite database instance using Bun's native driver
+ let sqlite: Database;
try {
- // Migration 1: Add destination_org column to organizations table
- const orgTableInfo = db.query("PRAGMA table_info(organizations)").all() as Array<{name: string}>;
- const hasDestinationOrg = orgTableInfo.some(col => col.name === 'destination_org');
-
- if (!hasDestinationOrg) {
- console.log("๐ Running migration: Adding destination_org column to organizations table");
- db.exec("ALTER TABLE organizations ADD COLUMN destination_org TEXT");
- console.log("โ
Migration completed: destination_org column added");
- }
-
- // Migration 2: Add destination_org column to repositories table
- const repoTableInfo = db.query("PRAGMA table_info(repositories)").all() as Array<{name: string}>;
- const hasRepoDestinationOrg = repoTableInfo.some(col => col.name === 'destination_org');
-
- if (!hasRepoDestinationOrg) {
- console.log("๐ Running migration: Adding destination_org column to repositories table");
- db.exec("ALTER TABLE repositories ADD COLUMN destination_org TEXT");
- console.log("โ
Migration completed: destination_org column added to repositories");
- }
+ sqlite = new Database(dbPath);
+ console.log("Successfully connected to SQLite database using Bun's native driver");
} catch (error) {
- console.error("โ Error running migrations:", error);
- // Don't throw - migrations should be non-breaking
+ console.error("Error opening database:", error);
+ throw error;
}
-}
-/**
- * Ensure all required tables exist in the database
- */
-function ensureTablesExist(db: Database) {
- const requiredTables = [
- "users",
- "configs",
- "repositories",
- "organizations",
- "mirror_jobs",
- "events",
- ];
+ // Create drizzle instance with the SQLite client
+ db = drizzle({ client: sqlite });
- for (const table of requiredTables) {
+ /**
+ * Run Drizzle migrations
+ */
+ function runDrizzleMigrations() {
try {
- // Check if table exists
- const result = db.query(`SELECT name FROM sqlite_master WHERE type='table' AND name='${table}'`).get();
+ console.log("๐ Checking for pending migrations...");
+
+ // Check if migrations table exists
+ const migrationsTableExists = sqlite
+ .query("SELECT name FROM sqlite_master WHERE type='table' AND name='__drizzle_migrations'")
+ .get();
- if (!result) {
- console.warn(`โ ๏ธ Table '${table}' is missing. Creating it now...`);
- createTable(db, table);
- console.log(`โ
Table '${table}' created successfully`);
+ if (!migrationsTableExists) {
+ console.log("๐ฆ First time setup - running initial migrations...");
}
+
+ // Run migrations using Drizzle migrate function
+ migrate(db, { migrationsFolder: "./drizzle" });
+
+ console.log("โ
Database migrations completed successfully");
} catch (error) {
- console.error(`โ Error checking/creating table '${table}':`, error);
+ console.error("โ Error running migrations:", error);
throw error;
}
}
+
+ // Run Drizzle migrations after db is initialized
+ runDrizzleMigrations();
}
-/**
- * Create a specific table with its schema
- */
-function createTable(db: Database, tableName: string) {
- switch (tableName) {
- case "users":
- db.exec(`
- CREATE TABLE users (
- id TEXT PRIMARY KEY,
- username TEXT NOT NULL,
- password TEXT NOT NULL,
- email TEXT NOT NULL,
- created_at INTEGER NOT NULL,
- updated_at INTEGER NOT NULL
- )
- `);
- break;
+export { db };
- case "configs":
- db.exec(`
- CREATE TABLE configs (
- id TEXT PRIMARY KEY,
- user_id TEXT NOT NULL,
- name TEXT NOT NULL,
- is_active INTEGER NOT NULL DEFAULT 1,
- github_config TEXT NOT NULL,
- gitea_config TEXT NOT NULL,
- include TEXT NOT NULL DEFAULT '["*"]',
- exclude TEXT NOT NULL DEFAULT '[]',
- schedule_config TEXT NOT NULL,
- cleanup_config TEXT NOT NULL,
- created_at INTEGER NOT NULL DEFAULT (strftime('%s','now')),
- updated_at INTEGER NOT NULL DEFAULT (strftime('%s','now')),
- FOREIGN KEY (user_id) REFERENCES users(id)
- )
- `);
- break;
-
- case "repositories":
- db.exec(`
- CREATE TABLE repositories (
- id TEXT PRIMARY KEY,
- user_id TEXT NOT NULL,
- config_id TEXT NOT NULL,
- name TEXT NOT NULL,
- full_name TEXT NOT NULL,
- url TEXT NOT NULL,
- clone_url TEXT NOT NULL,
- owner TEXT NOT NULL,
- organization TEXT,
- mirrored_location TEXT DEFAULT '',
- is_private INTEGER NOT NULL DEFAULT 0,
- is_fork INTEGER NOT NULL DEFAULT 0,
- forked_from TEXT,
- has_issues INTEGER NOT NULL DEFAULT 0,
- is_starred INTEGER NOT NULL DEFAULT 0,
- language TEXT,
- description TEXT,
- default_branch TEXT NOT NULL,
- visibility TEXT NOT NULL DEFAULT 'public',
- status TEXT NOT NULL DEFAULT 'imported',
- last_mirrored INTEGER,
- error_message TEXT,
- created_at INTEGER NOT NULL DEFAULT (strftime('%s','now')),
- updated_at INTEGER NOT NULL DEFAULT (strftime('%s','now')),
- FOREIGN KEY (user_id) REFERENCES users(id),
- FOREIGN KEY (config_id) REFERENCES configs(id)
- )
- `);
-
- // Create indexes for repositories
- db.exec(`
- CREATE INDEX IF NOT EXISTS idx_repositories_user_id ON repositories(user_id);
- CREATE INDEX IF NOT EXISTS idx_repositories_config_id ON repositories(config_id);
- CREATE INDEX IF NOT EXISTS idx_repositories_status ON repositories(status);
- CREATE INDEX IF NOT EXISTS idx_repositories_owner ON repositories(owner);
- CREATE INDEX IF NOT EXISTS idx_repositories_organization ON repositories(organization);
- CREATE INDEX IF NOT EXISTS idx_repositories_is_fork ON repositories(is_fork);
- CREATE INDEX IF NOT EXISTS idx_repositories_is_starred ON repositories(is_starred);
- `);
- break;
-
- case "organizations":
- db.exec(`
- CREATE TABLE organizations (
- id TEXT PRIMARY KEY,
- user_id TEXT NOT NULL,
- config_id TEXT NOT NULL,
- name TEXT NOT NULL,
- avatar_url TEXT NOT NULL,
- membership_role TEXT NOT NULL DEFAULT 'member',
- is_included INTEGER NOT NULL DEFAULT 1,
- status TEXT NOT NULL DEFAULT 'imported',
- last_mirrored INTEGER,
- error_message TEXT,
- repository_count INTEGER NOT NULL DEFAULT 0,
- destination_org TEXT,
- created_at INTEGER NOT NULL DEFAULT (strftime('%s','now')),
- updated_at INTEGER NOT NULL DEFAULT (strftime('%s','now')),
- FOREIGN KEY (user_id) REFERENCES users(id),
- FOREIGN KEY (config_id) REFERENCES configs(id)
- )
- `);
-
- // Create indexes for organizations
- db.exec(`
- CREATE INDEX IF NOT EXISTS idx_organizations_user_id ON organizations(user_id);
- CREATE INDEX IF NOT EXISTS idx_organizations_config_id ON organizations(config_id);
- CREATE INDEX IF NOT EXISTS idx_organizations_status ON organizations(status);
- CREATE INDEX IF NOT EXISTS idx_organizations_is_included ON organizations(is_included);
- `);
- break;
-
- case "mirror_jobs":
- db.exec(`
- CREATE TABLE mirror_jobs (
- id TEXT PRIMARY KEY,
- user_id TEXT NOT NULL,
- repository_id TEXT,
- repository_name TEXT,
- organization_id TEXT,
- organization_name TEXT,
- details TEXT,
- status TEXT NOT NULL DEFAULT 'imported',
- message TEXT NOT NULL,
- timestamp TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
-
- -- New fields for job resilience
- job_type TEXT NOT NULL DEFAULT 'mirror',
- batch_id TEXT,
- total_items INTEGER,
- completed_items INTEGER DEFAULT 0,
- item_ids TEXT, -- JSON array as text
- completed_item_ids TEXT DEFAULT '[]', -- JSON array as text
- in_progress INTEGER NOT NULL DEFAULT 0, -- Boolean as integer
- started_at TIMESTAMP,
- completed_at TIMESTAMP,
- last_checkpoint TIMESTAMP,
-
- FOREIGN KEY (user_id) REFERENCES users(id)
- )
- `);
-
- // Create indexes for mirror_jobs
- db.exec(`
- CREATE INDEX IF NOT EXISTS idx_mirror_jobs_user_id ON mirror_jobs(user_id);
- CREATE INDEX IF NOT EXISTS idx_mirror_jobs_batch_id ON mirror_jobs(batch_id);
- CREATE INDEX IF NOT EXISTS idx_mirror_jobs_in_progress ON mirror_jobs(in_progress);
- CREATE INDEX IF NOT EXISTS idx_mirror_jobs_job_type ON mirror_jobs(job_type);
- CREATE INDEX IF NOT EXISTS idx_mirror_jobs_timestamp ON mirror_jobs(timestamp);
- `);
- break;
-
- case "events":
- db.exec(`
- CREATE TABLE events (
- id TEXT PRIMARY KEY,
- user_id TEXT NOT NULL,
- channel TEXT NOT NULL,
- payload TEXT NOT NULL,
- read INTEGER NOT NULL DEFAULT 0,
- created_at INTEGER NOT NULL DEFAULT (strftime('%s','now')),
- FOREIGN KEY (user_id) REFERENCES users(id)
- )
- `);
-
- // Create indexes for events
- db.exec(`
- CREATE INDEX IF NOT EXISTS idx_events_user_channel ON events(user_id, channel);
- CREATE INDEX IF NOT EXISTS idx_events_created_at ON events(created_at);
- CREATE INDEX IF NOT EXISTS idx_events_read ON events(read);
- `);
- break;
-
- default:
- throw new Error(`Unknown table: ${tableName}`);
- }
-}
-
-// Create drizzle instance with the SQLite client
-export const db = drizzle({ client: sqlite });
-
-// Simple async wrapper around SQLite API for compatibility
-// This maintains backward compatibility with existing code
-export const client = {
- async execute(sql: string, params?: any[]) {
- try {
- const stmt = sqlite.query(sql);
- if (/^\s*select/i.test(sql)) {
- const rows = stmt.all(params ?? []);
- return { rows } as { rows: any[] };
- }
- stmt.run(params ?? []);
- return { rows: [] } as { rows: any[] };
- } catch (error) {
- console.error(`Error executing SQL: ${sql}`, error);
- throw error;
- }
- },
-};
-
-// Define the tables
-export const users = sqliteTable("users", {
- id: text("id").primaryKey(),
- username: text("username").notNull(),
- password: text("password").notNull(),
- email: text("email").notNull(),
- createdAt: integer("created_at", { mode: "timestamp" })
- .notNull()
- .default(new Date()),
- updatedAt: integer("updated_at", { mode: "timestamp" })
- .notNull()
- .default(new Date()),
-});
-
-// New table for event notifications (replacing Redis pub/sub)
-export const events = sqliteTable("events", {
- id: text("id").primaryKey(),
- userId: text("user_id").notNull().references(() => users.id),
- channel: text("channel").notNull(),
- payload: text("payload", { mode: "json" }).notNull(),
- read: integer("read", { mode: "boolean" }).notNull().default(false),
- createdAt: integer("created_at", { mode: "timestamp" })
- .notNull()
- .default(new Date()),
-});
-
-const githubSchema = configSchema.shape.githubConfig;
-const giteaSchema = configSchema.shape.giteaConfig;
-const scheduleSchema = configSchema.shape.scheduleConfig;
-const cleanupSchema = configSchema.shape.cleanupConfig;
-
-export const configs = sqliteTable("configs", {
- id: text("id").primaryKey(),
- userId: text("user_id")
- .notNull()
- .references(() => users.id),
- name: text("name").notNull(),
- isActive: integer("is_active", { mode: "boolean" }).notNull().default(true),
-
- githubConfig: text("github_config", { mode: "json" })
- .$type>()
- .notNull(),
-
- giteaConfig: text("gitea_config", { mode: "json" })
- .$type>()
- .notNull(),
-
- include: text("include", { mode: "json" })
- .$type()
- .notNull()
- .default(["*"]),
-
- exclude: text("exclude", { mode: "json" })
- .$type()
- .notNull()
- .default([]),
-
- scheduleConfig: text("schedule_config", { mode: "json" })
- .$type>()
- .notNull(),
-
- cleanupConfig: text("cleanup_config", { mode: "json" })
- .$type>()
- .notNull(),
-
- createdAt: integer("created_at", { mode: "timestamp" })
- .notNull()
- .default(new Date()),
-
- updatedAt: integer("updated_at", { mode: "timestamp" })
- .notNull()
- .default(new Date()),
-});
-
-export const repositories = sqliteTable("repositories", {
- id: text("id").primaryKey(),
- userId: text("user_id")
- .notNull()
- .references(() => users.id),
- configId: text("config_id")
- .notNull()
- .references(() => configs.id),
- name: text("name").notNull(),
- fullName: text("full_name").notNull(),
- url: text("url").notNull(),
- cloneUrl: text("clone_url").notNull(),
- owner: text("owner").notNull(),
- organization: text("organization"),
- mirroredLocation: text("mirrored_location").default(""),
-
- isPrivate: integer("is_private", { mode: "boolean" })
- .notNull()
- .default(false),
- isForked: integer("is_fork", { mode: "boolean" }).notNull().default(false),
- forkedFrom: text("forked_from"),
-
- hasIssues: integer("has_issues", { mode: "boolean" })
- .notNull()
- .default(false),
- isStarred: integer("is_starred", { mode: "boolean" })
- .notNull()
- .default(false),
- isArchived: integer("is_archived", { mode: "boolean" })
- .notNull()
- .default(false),
-
- size: integer("size").notNull().default(0),
- hasLFS: integer("has_lfs", { mode: "boolean" }).notNull().default(false),
- hasSubmodules: integer("has_submodules", { mode: "boolean" })
- .notNull()
- .default(false),
-
- defaultBranch: text("default_branch").notNull(),
- visibility: text("visibility").notNull().default("public"),
-
- status: text("status").notNull().default("imported"),
- lastMirrored: integer("last_mirrored", { mode: "timestamp" }),
- errorMessage: text("error_message"),
-
- createdAt: integer("created_at", { mode: "timestamp" })
- .notNull()
- .default(new Date()),
- updatedAt: integer("updated_at", { mode: "timestamp" })
- .notNull()
- .default(new Date()),
-});
-
-export const mirrorJobs = sqliteTable("mirror_jobs", {
- id: text("id").primaryKey(),
- userId: text("user_id")
- .notNull()
- .references(() => users.id),
- repositoryId: text("repository_id"),
- repositoryName: text("repository_name"),
- organizationId: text("organization_id"),
- organizationName: text("organization_name"),
- details: text("details"),
- status: text("status").notNull().default("imported"),
- message: text("message").notNull(),
- timestamp: integer("timestamp", { mode: "timestamp" })
- .notNull()
- .default(new Date()),
-
- // New fields for job resilience
- jobType: text("job_type").notNull().default("mirror"),
- batchId: text("batch_id"),
- totalItems: integer("total_items"),
- completedItems: integer("completed_items").default(0),
- itemIds: text("item_ids", { mode: "json" }).$type(),
- completedItemIds: text("completed_item_ids", { mode: "json" }).$type().default([]),
- inProgress: integer("in_progress", { mode: "boolean" }).notNull().default(false),
- startedAt: integer("started_at", { mode: "timestamp" }),
- completedAt: integer("completed_at", { mode: "timestamp" }),
- lastCheckpoint: integer("last_checkpoint", { mode: "timestamp" }),
-});
-
-export const organizations = sqliteTable("organizations", {
- id: text("id").primaryKey(),
- userId: text("user_id")
- .notNull()
- .references(() => users.id),
- configId: text("config_id")
- .notNull()
- .references(() => configs.id),
- name: text("name").notNull(),
-
- avatarUrl: text("avatar_url").notNull(),
-
- membershipRole: text("membership_role").notNull().default("member"),
-
- isIncluded: integer("is_included", { mode: "boolean" })
- .notNull()
- .default(true),
-
- // Override destination organization for this GitHub org's repos
- destinationOrg: text("destination_org"),
-
- status: text("status").notNull().default("imported"),
- lastMirrored: integer("last_mirrored", { mode: "timestamp" }),
- errorMessage: text("error_message"),
-
- repositoryCount: integer("repository_count").notNull().default(0),
-
- createdAt: integer("created_at", { mode: "timestamp" })
- .notNull()
- .default(new Date()),
- updatedAt: integer("updated_at", { mode: "timestamp" })
- .notNull()
- .default(new Date()),
-});
+// Export all table definitions from schema
+export {
+ users,
+ events,
+ configs,
+ repositories,
+ mirrorJobs,
+ organizations,
+ sessions,
+ accounts,
+ verificationTokens,
+ oauthApplications,
+ oauthAccessTokens,
+ oauthConsent,
+ ssoProviders
+} from "./schema";
diff --git a/src/lib/db/schema.sql b/src/lib/db/schema.sql
deleted file mode 100644
index 264645b..0000000
--- a/src/lib/db/schema.sql
+++ /dev/null
@@ -1,75 +0,0 @@
--- Users table
-CREATE TABLE IF NOT EXISTS users (
- id TEXT PRIMARY KEY,
- username TEXT NOT NULL UNIQUE,
- password TEXT NOT NULL,
- email TEXT NOT NULL,
- created_at DATETIME NOT NULL,
- updated_at DATETIME NOT NULL
-);
-
--- Configurations table
-CREATE TABLE IF NOT EXISTS configs (
- id TEXT PRIMARY KEY,
- user_id TEXT NOT NULL,
- name TEXT NOT NULL,
- is_active BOOLEAN NOT NULL DEFAULT 1,
- github_config TEXT NOT NULL,
- gitea_config TEXT NOT NULL,
- schedule_config TEXT NOT NULL,
- include TEXT NOT NULL,
- exclude TEXT NOT NULL,
- created_at DATETIME NOT NULL,
- updated_at DATETIME NOT NULL,
- FOREIGN KEY (user_id) REFERENCES users (id) ON DELETE CASCADE
-);
-
--- Repositories table
-CREATE TABLE IF NOT EXISTS repositories (
- id TEXT PRIMARY KEY,
- config_id TEXT NOT NULL,
- name TEXT NOT NULL,
- full_name TEXT NOT NULL,
- url TEXT NOT NULL,
- is_private BOOLEAN NOT NULL,
- is_fork BOOLEAN NOT NULL,
- owner TEXT NOT NULL,
- organization TEXT,
- mirrored_location TEXT DEFAULT '',
- has_issues BOOLEAN NOT NULL,
- is_starred BOOLEAN NOT NULL,
- status TEXT NOT NULL,
- error_message TEXT,
- last_mirrored DATETIME,
- created_at DATETIME NOT NULL,
- updated_at DATETIME NOT NULL,
- FOREIGN KEY (config_id) REFERENCES configs (id) ON DELETE CASCADE
-);
-
--- Organizations table
-CREATE TABLE IF NOT EXISTS organizations (
- id TEXT PRIMARY KEY,
- config_id TEXT NOT NULL,
- name TEXT NOT NULL,
- type TEXT NOT NULL,
- is_included BOOLEAN NOT NULL,
- repository_count INTEGER NOT NULL,
- created_at DATETIME NOT NULL,
- updated_at DATETIME NOT NULL,
- FOREIGN KEY (config_id) REFERENCES configs (id) ON DELETE CASCADE
-);
-
--- Mirror jobs table
-CREATE TABLE IF NOT EXISTS mirror_jobs (
- id TEXT PRIMARY KEY,
- config_id TEXT NOT NULL,
- repository_id TEXT,
- status TEXT NOT NULL,
- started_at DATETIME NOT NULL,
- completed_at DATETIME,
- log TEXT NOT NULL,
- created_at DATETIME NOT NULL,
- updated_at DATETIME NOT NULL,
- FOREIGN KEY (config_id) REFERENCES configs (id) ON DELETE CASCADE,
- FOREIGN KEY (repository_id) REFERENCES repositories (id) ON DELETE SET NULL
-);
diff --git a/src/lib/db/schema.ts b/src/lib/db/schema.ts
index b9decb5..35dc62d 100644
--- a/src/lib/db/schema.ts
+++ b/src/lib/db/schema.ts
@@ -1,182 +1,615 @@
import { z } from "zod";
-import { repositoryVisibilityEnum, repoStatusEnum } from "@/types/Repository";
-import { membershipRoleEnum } from "@/types/organizations";
+import { sqliteTable, text, integer, index } from "drizzle-orm/sqlite-core";
+import { sql } from "drizzle-orm";
-// User schema
+// ===== Zod Validation Schemas =====
export const userSchema = z.object({
- id: z.string().uuid().optional(),
- username: z.string().min(3),
- password: z.string().min(8).optional(), // Hashed password
+ id: z.string(),
+ username: z.string(),
+ password: z.string(),
email: z.string().email(),
- createdAt: z.date().default(() => new Date()),
- updatedAt: z.date().default(() => new Date()),
+ emailVerified: z.boolean().default(false),
+ createdAt: z.coerce.date(),
+ updatedAt: z.coerce.date(),
});
-export type User = z.infer;
+export const githubConfigSchema = z.object({
+ owner: z.string(),
+ type: z.enum(["personal", "organization"]),
+ token: z.string(),
+ includeStarred: z.boolean().default(false),
+ includeForks: z.boolean().default(true),
+ includeArchived: z.boolean().default(false),
+ includePrivate: z.boolean().default(true),
+ includePublic: z.boolean().default(true),
+ includeOrganizations: z.array(z.string()).default([]),
+ starredReposOrg: z.string().optional(),
+ mirrorStrategy: z.enum(["preserve", "single-org", "flat-user"]).default("preserve"),
+ defaultOrg: z.string().optional(),
+});
+
+export const giteaConfigSchema = z.object({
+ url: z.string().url(),
+ token: z.string(),
+ defaultOwner: z.string(),
+ mirrorInterval: z.string().default("8h"),
+ lfs: z.boolean().default(false),
+ wiki: z.boolean().default(false),
+ visibility: z
+ .enum(["public", "private", "limited", "default"])
+ .default("default"),
+ createOrg: z.boolean().default(true),
+ templateOwner: z.string().optional(),
+ templateRepo: z.string().optional(),
+ addTopics: z.boolean().default(true),
+ topicPrefix: z.string().optional(),
+ preserveVisibility: z.boolean().default(true),
+ forkStrategy: z
+ .enum(["skip", "reference", "full-copy"])
+ .default("reference"),
+});
+
+export const scheduleConfigSchema = z.object({
+ enabled: z.boolean().default(false),
+ interval: z.string().default("0 2 * * *"),
+ concurrent: z.boolean().default(false),
+ batchSize: z.number().default(10),
+ pauseBetweenBatches: z.number().default(5000),
+ retryAttempts: z.number().default(3),
+ retryDelay: z.number().default(60000),
+ timeout: z.number().default(3600000),
+ autoRetry: z.boolean().default(true),
+ cleanupBeforeMirror: z.boolean().default(false),
+ notifyOnFailure: z.boolean().default(true),
+ notifyOnSuccess: z.boolean().default(false),
+ logLevel: z.enum(["error", "warn", "info", "debug"]).default("info"),
+ timezone: z.string().default("UTC"),
+ onlyMirrorUpdated: z.boolean().default(false),
+ updateInterval: z.number().default(86400000),
+ skipRecentlyMirrored: z.boolean().default(true),
+ recentThreshold: z.number().default(3600000),
+});
+
+export const cleanupConfigSchema = z.object({
+ enabled: z.boolean().default(false),
+ deleteFromGitea: z.boolean().default(false),
+ deleteIfNotInGitHub: z.boolean().default(true),
+ protectedRepos: z.array(z.string()).default([]),
+ dryRun: z.boolean().default(true),
+ orphanedRepoAction: z
+ .enum(["skip", "archive", "delete"])
+ .default("archive"),
+ batchSize: z.number().default(10),
+ pauseBetweenDeletes: z.number().default(2000),
+});
-// Configuration schema
export const configSchema = z.object({
- id: z.string().uuid().optional(),
- userId: z.string().uuid(),
- name: z.string().min(1),
+ id: z.string(),
+ userId: z.string(),
+ name: z.string(),
isActive: z.boolean().default(true),
- githubConfig: z.object({
- username: z.string().min(1),
- token: z.string().optional(),
- skipForks: z.boolean().default(false),
- privateRepositories: z.boolean().default(false),
- mirrorIssues: z.boolean().default(false),
- mirrorWiki: z.boolean().default(false),
- mirrorStarred: z.boolean().default(false),
- useSpecificUser: z.boolean().default(false),
- singleRepo: z.string().optional(),
- includeOrgs: z.array(z.string()).default([]),
- excludeOrgs: z.array(z.string()).default([]),
- mirrorPublicOrgs: z.boolean().default(false),
- publicOrgs: z.array(z.string()).default([]),
- skipStarredIssues: z.boolean().default(false),
- }),
- giteaConfig: z.object({
- username: z.string().min(1),
- url: z.string().url(),
- token: z.string().min(1),
- organization: z.string().optional(),
- visibility: z.enum(["public", "private", "limited"]).default("public"),
- starredReposOrg: z.string().default("github"),
- preserveOrgStructure: z.boolean().default(false),
- mirrorStrategy: z.enum(["preserve", "single-org", "flat-user", "mixed"]).optional(),
- personalReposOrg: z.string().optional(), // Override destination for personal repos
- }),
+ githubConfig: githubConfigSchema,
+ giteaConfig: giteaConfigSchema,
include: z.array(z.string()).default(["*"]),
exclude: z.array(z.string()).default([]),
- scheduleConfig: z.object({
- enabled: z.boolean().default(false),
- interval: z.number().min(1).default(3600), // in seconds
- lastRun: z.date().optional(),
- nextRun: z.date().optional(),
- }),
- cleanupConfig: z.object({
- enabled: z.boolean().default(false),
- retentionDays: z.number().min(1).default(604800), // in seconds (default: 7 days)
- lastRun: z.date().optional(),
- nextRun: z.date().optional(),
- }),
- createdAt: z.date().default(() => new Date()),
- updatedAt: z.date().default(() => new Date()),
+ scheduleConfig: scheduleConfigSchema,
+ cleanupConfig: cleanupConfigSchema,
+ createdAt: z.coerce.date(),
+ updatedAt: z.coerce.date(),
});
-export type Config = z.infer;
-
-// Repository schema
export const repositorySchema = z.object({
- id: z.string().uuid().optional(),
- userId: z.string().uuid().optional(),
- configId: z.string().uuid(),
-
- name: z.string().min(1),
- fullName: z.string().min(1),
+ id: z.string(),
+ userId: z.string(),
+ configId: z.string(),
+ name: z.string(),
+ fullName: z.string(),
url: z.string().url(),
cloneUrl: z.string().url(),
-
- owner: z.string().min(1),
- organization: z.string().optional(),
-
+ owner: z.string(),
+ organization: z.string().optional().nullable(),
+ mirroredLocation: z.string().default(""),
isPrivate: z.boolean().default(false),
isForked: z.boolean().default(false),
- forkedFrom: z.string().optional(),
-
+ forkedFrom: z.string().optional().nullable(),
hasIssues: z.boolean().default(false),
isStarred: z.boolean().default(false),
isArchived: z.boolean().default(false),
-
- size: z.number(),
+ size: z.number().default(0),
hasLFS: z.boolean().default(false),
hasSubmodules: z.boolean().default(false),
-
+ language: z.string().optional().nullable(),
+ description: z.string().optional().nullable(),
defaultBranch: z.string(),
- visibility: repositoryVisibilityEnum.default("public"),
-
- status: repoStatusEnum.default("imported"),
- lastMirrored: z.date().optional(),
- errorMessage: z.string().optional(),
-
- mirroredLocation: z.string().default(""), // Store the full Gitea path where repo was mirrored
- destinationOrg: z.string().optional(), // Custom destination organization override
-
- createdAt: z.date().default(() => new Date()),
- updatedAt: z.date().default(() => new Date()),
+ visibility: z.enum(["public", "private", "internal"]).default("public"),
+ status: z
+ .enum([
+ "imported",
+ "mirroring",
+ "mirrored",
+ "failed",
+ "skipped",
+ "deleting",
+ "deleted",
+ "syncing",
+ "synced",
+ ])
+ .default("imported"),
+ lastMirrored: z.coerce.date().optional().nullable(),
+ errorMessage: z.string().optional().nullable(),
+ destinationOrg: z.string().optional().nullable(),
+ createdAt: z.coerce.date(),
+ updatedAt: z.coerce.date(),
});
-export type Repository = z.infer;
-
-// Mirror job schema
export const mirrorJobSchema = z.object({
- id: z.string().uuid().optional(),
- userId: z.string().uuid().optional(),
- repositoryId: z.string().uuid().optional(),
- repositoryName: z.string().optional(),
- organizationId: z.string().uuid().optional(),
- organizationName: z.string().optional(),
- details: z.string().optional(),
- status: repoStatusEnum.default("imported"),
+ id: z.string(),
+ userId: z.string(),
+ repositoryId: z.string().optional().nullable(),
+ repositoryName: z.string().optional().nullable(),
+ organizationId: z.string().optional().nullable(),
+ organizationName: z.string().optional().nullable(),
+ details: z.string().optional().nullable(),
+ status: z
+ .enum([
+ "imported",
+ "mirroring",
+ "mirrored",
+ "failed",
+ "skipped",
+ "deleting",
+ "deleted",
+ "syncing",
+ "synced",
+ ])
+ .default("imported"),
message: z.string(),
- timestamp: z.date().default(() => new Date()),
-
- // New fields for job resilience
- jobType: z.enum(["mirror", "sync", "retry"]).default("mirror"),
- batchId: z.string().uuid().optional(), // Group related jobs together
- totalItems: z.number().optional(), // Total number of items to process
- completedItems: z.number().optional(), // Number of items completed
- itemIds: z.array(z.string()).optional(), // IDs of items to process
- completedItemIds: z.array(z.string()).optional(), // IDs of completed items
- inProgress: z.boolean().default(false), // Whether the job is currently running
- startedAt: z.date().optional(), // When the job started
- completedAt: z.date().optional(), // When the job completed
- lastCheckpoint: z.date().optional(), // Last time progress was saved
+ timestamp: z.coerce.date(),
+ jobType: z.enum(["mirror", "cleanup", "import"]).default("mirror"),
+ batchId: z.string().optional().nullable(),
+ totalItems: z.number().optional().nullable(),
+ completedItems: z.number().default(0),
+ itemIds: z.array(z.string()).optional().nullable(),
+ completedItemIds: z.array(z.string()).default([]),
+ inProgress: z.boolean().default(false),
+ startedAt: z.coerce.date().optional().nullable(),
+ completedAt: z.coerce.date().optional().nullable(),
+ lastCheckpoint: z.coerce.date().optional().nullable(),
});
-export type MirrorJob = z.infer;
-
-// Organization schema
export const organizationSchema = z.object({
- id: z.string().uuid().optional(),
- userId: z.string().uuid().optional(),
- configId: z.string().uuid(),
-
- avatarUrl: z.string().url(),
-
- name: z.string().min(1),
-
- membershipRole: membershipRoleEnum.default("member"),
-
- isIncluded: z.boolean().default(false),
-
- status: repoStatusEnum.default("imported"),
- lastMirrored: z.date().optional(),
- errorMessage: z.string().optional(),
-
+ id: z.string(),
+ userId: z.string(),
+ configId: z.string(),
+ name: z.string(),
+ avatarUrl: z.string(),
+ membershipRole: z.enum(["admin", "member", "owner"]).default("member"),
+ isIncluded: z.boolean().default(true),
+ destinationOrg: z.string().optional().nullable(),
+ status: z
+ .enum([
+ "imported",
+ "mirroring",
+ "mirrored",
+ "failed",
+ "skipped",
+ "deleting",
+ "deleted",
+ "syncing",
+ "synced",
+ ])
+ .default("imported"),
+ lastMirrored: z.coerce.date().optional().nullable(),
+ errorMessage: z.string().optional().nullable(),
repositoryCount: z.number().default(0),
- publicRepositoryCount: z.number().optional(),
- privateRepositoryCount: z.number().optional(),
- forkRepositoryCount: z.number().optional(),
-
- // Override destination organization for this GitHub org's repos
- destinationOrg: z.string().optional(),
-
- createdAt: z.date().default(() => new Date()),
- updatedAt: z.date().default(() => new Date()),
+ createdAt: z.coerce.date(),
+ updatedAt: z.coerce.date(),
});
-export type Organization = z.infer;
-
-// Event schema (for SQLite-based pub/sub)
export const eventSchema = z.object({
- id: z.string().uuid().optional(),
- userId: z.string().uuid(),
- channel: z.string().min(1),
+ id: z.string(),
+ userId: z.string(),
+ channel: z.string(),
payload: z.any(),
read: z.boolean().default(false),
- createdAt: z.date().default(() => new Date()),
+ createdAt: z.coerce.date(),
});
-export type Event = z.infer;
+// ===== Drizzle Table Definitions =====
+
+export const users = sqliteTable("users", {
+ id: text("id").primaryKey(),
+ name: text("name"),
+ email: text("email").notNull().unique(),
+ emailVerified: integer("email_verified", { mode: "boolean" }).notNull().default(false),
+ image: text("image"),
+ createdAt: integer("created_at", { mode: "timestamp" })
+ .notNull()
+ .default(sql`(unixepoch())`),
+ updatedAt: integer("updated_at", { mode: "timestamp" })
+ .notNull()
+ .default(sql`(unixepoch())`),
+ // Custom fields
+ username: text("username"),
+});
+
+export const events = sqliteTable("events", {
+ id: text("id").primaryKey(),
+ userId: text("user_id")
+ .notNull()
+ .references(() => users.id),
+ channel: text("channel").notNull(),
+ payload: text("payload", { mode: "json" }).notNull(),
+ read: integer("read", { mode: "boolean" }).notNull().default(false),
+ createdAt: integer("created_at", { mode: "timestamp" })
+ .notNull()
+ .default(sql`(unixepoch())`),
+}, (table) => {
+ return {
+ userChannelIdx: index("idx_events_user_channel").on(table.userId, table.channel),
+ createdAtIdx: index("idx_events_created_at").on(table.createdAt),
+ readIdx: index("idx_events_read").on(table.read),
+ };
+});
+
+export const configs = sqliteTable("configs", {
+ id: text("id").primaryKey(),
+ userId: text("user_id")
+ .notNull()
+ .references(() => users.id),
+ name: text("name").notNull(),
+ isActive: integer("is_active", { mode: "boolean" }).notNull().default(true),
+
+ githubConfig: text("github_config", { mode: "json" })
+ .$type>()
+ .notNull(),
+
+ giteaConfig: text("gitea_config", { mode: "json" })
+ .$type>()
+ .notNull(),
+
+ include: text("include", { mode: "json" })
+ .$type()
+ .notNull()
+ .default(sql`'["*"]'`),
+
+ exclude: text("exclude", { mode: "json" })
+ .$type()
+ .notNull()
+ .default(sql`'[]'`),
+
+ scheduleConfig: text("schedule_config", { mode: "json" })
+ .$type>()
+ .notNull(),
+
+ cleanupConfig: text("cleanup_config", { mode: "json" })
+ .$type>()
+ .notNull(),
+
+ createdAt: integer("created_at", { mode: "timestamp" })
+ .notNull()
+ .default(sql`(unixepoch())`),
+
+ updatedAt: integer("updated_at", { mode: "timestamp" })
+ .notNull()
+ .default(sql`(unixepoch())`),
+});
+
+export const repositories = sqliteTable("repositories", {
+ id: text("id").primaryKey(),
+ userId: text("user_id")
+ .notNull()
+ .references(() => users.id),
+ configId: text("config_id")
+ .notNull()
+ .references(() => configs.id),
+ name: text("name").notNull(),
+ fullName: text("full_name").notNull(),
+ url: text("url").notNull(),
+ cloneUrl: text("clone_url").notNull(),
+ owner: text("owner").notNull(),
+ organization: text("organization"),
+ mirroredLocation: text("mirrored_location").default(""),
+
+ isPrivate: integer("is_private", { mode: "boolean" })
+ .notNull()
+ .default(false),
+ isForked: integer("is_fork", { mode: "boolean" }).notNull().default(false),
+ forkedFrom: text("forked_from"),
+
+ hasIssues: integer("has_issues", { mode: "boolean" })
+ .notNull()
+ .default(false),
+ isStarred: integer("is_starred", { mode: "boolean" })
+ .notNull()
+ .default(false),
+ isArchived: integer("is_archived", { mode: "boolean" })
+ .notNull()
+ .default(false),
+
+ size: integer("size").notNull().default(0),
+ hasLFS: integer("has_lfs", { mode: "boolean" }).notNull().default(false),
+ hasSubmodules: integer("has_submodules", { mode: "boolean" })
+ .notNull()
+ .default(false),
+
+ language: text("language"),
+ description: text("description"),
+ defaultBranch: text("default_branch").notNull(),
+ visibility: text("visibility").notNull().default("public"),
+
+ status: text("status").notNull().default("imported"),
+ lastMirrored: integer("last_mirrored", { mode: "timestamp" }),
+ errorMessage: text("error_message"),
+
+ destinationOrg: text("destination_org"),
+
+ createdAt: integer("created_at", { mode: "timestamp" })
+ .notNull()
+ .default(sql`(unixepoch())`),
+ updatedAt: integer("updated_at", { mode: "timestamp" })
+ .notNull()
+ .default(sql`(unixepoch())`),
+}, (table) => {
+ return {
+ userIdIdx: index("idx_repositories_user_id").on(table.userId),
+ configIdIdx: index("idx_repositories_config_id").on(table.configId),
+ statusIdx: index("idx_repositories_status").on(table.status),
+ ownerIdx: index("idx_repositories_owner").on(table.owner),
+ organizationIdx: index("idx_repositories_organization").on(table.organization),
+ isForkedIdx: index("idx_repositories_is_fork").on(table.isForked),
+ isStarredIdx: index("idx_repositories_is_starred").on(table.isStarred),
+ };
+});
+
+export const mirrorJobs = sqliteTable("mirror_jobs", {
+ id: text("id").primaryKey(),
+ userId: text("user_id")
+ .notNull()
+ .references(() => users.id),
+ repositoryId: text("repository_id"),
+ repositoryName: text("repository_name"),
+ organizationId: text("organization_id"),
+ organizationName: text("organization_name"),
+ details: text("details"),
+ status: text("status").notNull().default("imported"),
+ message: text("message").notNull(),
+ timestamp: integer("timestamp", { mode: "timestamp" })
+ .notNull()
+ .default(sql`(unixepoch())`),
+
+ // Job resilience fields
+ jobType: text("job_type").notNull().default("mirror"),
+ batchId: text("batch_id"),
+ totalItems: integer("total_items"),
+ completedItems: integer("completed_items").default(0),
+ itemIds: text("item_ids", { mode: "json" }).$type(),
+ completedItemIds: text("completed_item_ids", { mode: "json" })
+ .$type()
+ .default(sql`'[]'`),
+ inProgress: integer("in_progress", { mode: "boolean" })
+ .notNull()
+ .default(false),
+ startedAt: integer("started_at", { mode: "timestamp" }),
+ completedAt: integer("completed_at", { mode: "timestamp" }),
+ lastCheckpoint: integer("last_checkpoint", { mode: "timestamp" }),
+}, (table) => {
+ return {
+ userIdIdx: index("idx_mirror_jobs_user_id").on(table.userId),
+ batchIdIdx: index("idx_mirror_jobs_batch_id").on(table.batchId),
+ inProgressIdx: index("idx_mirror_jobs_in_progress").on(table.inProgress),
+ jobTypeIdx: index("idx_mirror_jobs_job_type").on(table.jobType),
+ timestampIdx: index("idx_mirror_jobs_timestamp").on(table.timestamp),
+ };
+});
+
+export const organizations = sqliteTable("organizations", {
+ id: text("id").primaryKey(),
+ userId: text("user_id")
+ .notNull()
+ .references(() => users.id),
+ configId: text("config_id")
+ .notNull()
+ .references(() => configs.id),
+ name: text("name").notNull(),
+
+ avatarUrl: text("avatar_url").notNull(),
+
+ membershipRole: text("membership_role").notNull().default("member"),
+
+ isIncluded: integer("is_included", { mode: "boolean" })
+ .notNull()
+ .default(true),
+
+ destinationOrg: text("destination_org"),
+
+ status: text("status").notNull().default("imported"),
+ lastMirrored: integer("last_mirrored", { mode: "timestamp" }),
+ errorMessage: text("error_message"),
+
+ repositoryCount: integer("repository_count").notNull().default(0),
+
+ createdAt: integer("created_at", { mode: "timestamp" })
+ .notNull()
+ .default(sql`(unixepoch())`),
+ updatedAt: integer("updated_at", { mode: "timestamp" })
+ .notNull()
+ .default(sql`(unixepoch())`),
+}, (table) => {
+ return {
+ userIdIdx: index("idx_organizations_user_id").on(table.userId),
+ configIdIdx: index("idx_organizations_config_id").on(table.configId),
+ statusIdx: index("idx_organizations_status").on(table.status),
+ isIncludedIdx: index("idx_organizations_is_included").on(table.isIncluded),
+ };
+});
+
+// ===== Better Auth Tables =====
+
+// Sessions table
+export const sessions = sqliteTable("sessions", {
+ id: text("id").primaryKey(),
+ token: text("token").notNull().unique(),
+ userId: text("user_id").notNull().references(() => users.id),
+ expiresAt: integer("expires_at", { mode: "timestamp" }).notNull(),
+ ipAddress: text("ip_address"),
+ userAgent: text("user_agent"),
+ createdAt: integer("created_at", { mode: "timestamp" })
+ .notNull()
+ .default(sql`(unixepoch())`),
+ updatedAt: integer("updated_at", { mode: "timestamp" })
+ .notNull()
+ .default(sql`(unixepoch())`),
+}, (table) => {
+ return {
+ userIdIdx: index("idx_sessions_user_id").on(table.userId),
+ tokenIdx: index("idx_sessions_token").on(table.token),
+ expiresAtIdx: index("idx_sessions_expires_at").on(table.expiresAt),
+ };
+});
+
+// Accounts table (for OAuth providers and credentials)
+export const accounts = sqliteTable("accounts", {
+ id: text("id").primaryKey(),
+ accountId: text("account_id").notNull(),
+ userId: text("user_id").notNull().references(() => users.id),
+ providerId: text("provider_id").notNull(),
+ providerUserId: text("provider_user_id"), // Make nullable for email/password auth
+ accessToken: text("access_token"),
+ refreshToken: text("refresh_token"),
+ expiresAt: integer("expires_at", { mode: "timestamp" }),
+ password: text("password"), // For credential provider
+ createdAt: integer("created_at", { mode: "timestamp" })
+ .notNull()
+ .default(sql`(unixepoch())`),
+ updatedAt: integer("updated_at", { mode: "timestamp" })
+ .notNull()
+ .default(sql`(unixepoch())`),
+}, (table) => {
+ return {
+ accountIdIdx: index("idx_accounts_account_id").on(table.accountId),
+ userIdIdx: index("idx_accounts_user_id").on(table.userId),
+ providerIdx: index("idx_accounts_provider").on(table.providerId, table.providerUserId),
+ };
+});
+
+// Verification tokens table
+export const verificationTokens = sqliteTable("verification_tokens", {
+ id: text("id").primaryKey(),
+ token: text("token").notNull().unique(),
+ identifier: text("identifier").notNull(),
+ type: text("type").notNull(), // email, password-reset, etc
+ expiresAt: integer("expires_at", { mode: "timestamp" }).notNull(),
+ createdAt: integer("created_at", { mode: "timestamp" })
+ .notNull()
+ .default(sql`(unixepoch())`),
+}, (table) => {
+ return {
+ tokenIdx: index("idx_verification_tokens_token").on(table.token),
+ identifierIdx: index("idx_verification_tokens_identifier").on(table.identifier),
+ };
+});
+
+// ===== OIDC Provider Tables =====
+
+// OAuth Applications table
+export const oauthApplications = sqliteTable("oauth_applications", {
+ id: text("id").primaryKey(),
+ clientId: text("client_id").notNull().unique(),
+ clientSecret: text("client_secret").notNull(),
+ name: text("name").notNull(),
+ redirectURLs: text("redirect_urls").notNull(), // Comma-separated list
+ metadata: text("metadata"), // JSON string
+ type: text("type").notNull(), // web, mobile, etc
+ disabled: integer("disabled", { mode: "boolean" }).notNull().default(false),
+ userId: text("user_id"), // Optional - owner of the application
+ createdAt: integer("created_at", { mode: "timestamp" })
+ .notNull()
+ .default(sql`(unixepoch())`),
+ updatedAt: integer("updated_at", { mode: "timestamp" })
+ .notNull()
+ .default(sql`(unixepoch())`),
+}, (table) => {
+ return {
+ clientIdIdx: index("idx_oauth_applications_client_id").on(table.clientId),
+ userIdIdx: index("idx_oauth_applications_user_id").on(table.userId),
+ };
+});
+
+// OAuth Access Tokens table
+export const oauthAccessTokens = sqliteTable("oauth_access_tokens", {
+ id: text("id").primaryKey(),
+ accessToken: text("access_token").notNull(),
+ refreshToken: text("refresh_token"),
+ accessTokenExpiresAt: integer("access_token_expires_at", { mode: "timestamp" }).notNull(),
+ refreshTokenExpiresAt: integer("refresh_token_expires_at", { mode: "timestamp" }),
+ clientId: text("client_id").notNull(),
+ userId: text("user_id").notNull().references(() => users.id),
+ scopes: text("scopes").notNull(), // Comma-separated list
+ createdAt: integer("created_at", { mode: "timestamp" })
+ .notNull()
+ .default(sql`(unixepoch())`),
+ updatedAt: integer("updated_at", { mode: "timestamp" })
+ .notNull()
+ .default(sql`(unixepoch())`),
+}, (table) => {
+ return {
+ accessTokenIdx: index("idx_oauth_access_tokens_access_token").on(table.accessToken),
+ userIdIdx: index("idx_oauth_access_tokens_user_id").on(table.userId),
+ clientIdIdx: index("idx_oauth_access_tokens_client_id").on(table.clientId),
+ };
+});
+
+// OAuth Consent table
+export const oauthConsent = sqliteTable("oauth_consent", {
+ id: text("id").primaryKey(),
+ userId: text("user_id").notNull().references(() => users.id),
+ clientId: text("client_id").notNull(),
+ scopes: text("scopes").notNull(), // Comma-separated list
+ consentGiven: integer("consent_given", { mode: "boolean" }).notNull(),
+ createdAt: integer("created_at", { mode: "timestamp" })
+ .notNull()
+ .default(sql`(unixepoch())`),
+ updatedAt: integer("updated_at", { mode: "timestamp" })
+ .notNull()
+ .default(sql`(unixepoch())`),
+}, (table) => {
+ return {
+ userIdIdx: index("idx_oauth_consent_user_id").on(table.userId),
+ clientIdIdx: index("idx_oauth_consent_client_id").on(table.clientId),
+ userClientIdx: index("idx_oauth_consent_user_client").on(table.userId, table.clientId),
+ };
+});
+
+// ===== SSO Provider Tables =====
+
+// SSO Providers table
+export const ssoProviders = sqliteTable("sso_providers", {
+ id: text("id").primaryKey(),
+ issuer: text("issuer").notNull(),
+ domain: text("domain").notNull(),
+ oidcConfig: text("oidc_config").notNull(), // JSON string with OIDC configuration
+ userId: text("user_id").notNull(), // Admin who created this provider
+ providerId: text("provider_id").notNull().unique(), // Unique identifier for the provider
+ organizationId: text("organization_id"), // Optional - if provider is linked to an organization
+ createdAt: integer("created_at", { mode: "timestamp" })
+ .notNull()
+ .default(sql`(unixepoch())`),
+ updatedAt: integer("updated_at", { mode: "timestamp" })
+ .notNull()
+ .default(sql`(unixepoch())`),
+}, (table) => {
+ return {
+ providerIdIdx: index("idx_sso_providers_provider_id").on(table.providerId),
+ domainIdx: index("idx_sso_providers_domain").on(table.domain),
+ issuerIdx: index("idx_sso_providers_issuer").on(table.issuer),
+ };
+});
+
+// Export type definitions
+export type User = z.infer;
+export type Config = z.infer;
+export type Repository = z.infer;
+export type MirrorJob = z.infer;
+export type Organization = z.infer;
+export type Event = z.infer;
\ No newline at end of file
diff --git a/src/lib/deployment-mode.ts b/src/lib/deployment-mode.ts
new file mode 100644
index 0000000..4f5db45
--- /dev/null
+++ b/src/lib/deployment-mode.ts
@@ -0,0 +1,22 @@
+/**
+ * Deployment mode utilities
+ * Supports both self-hosted and hosted versions
+ */
+
+export const DEPLOYMENT_MODE = process.env.DEPLOYMENT_MODE || 'selfhosted';
+
+export const isSelfHostedMode = () => DEPLOYMENT_MODE === 'selfhosted';
+export const isHostedMode = () => DEPLOYMENT_MODE === 'hosted';
+
+/**
+ * Feature flags for self-hosted version
+ */
+export const features = {
+ // Core features available
+ githubSync: true,
+ giteaMirroring: true,
+ scheduling: true,
+ multiUser: true,
+ githubSponsors: true,
+ unlimitedRepos: true,
+};
\ No newline at end of file
diff --git a/src/lib/events/realtime.ts b/src/lib/events/realtime.ts
new file mode 100644
index 0000000..cf322bc
--- /dev/null
+++ b/src/lib/events/realtime.ts
@@ -0,0 +1,256 @@
+/**
+ * Real-time event system using EventEmitter
+ * For the self-hosted version
+ */
+
+import { EventEmitter } from 'events';
+
+export interface RealtimeEvent {
+ type: string;
+ userId?: string;
+ data: any;
+ timestamp: number;
+}
+
+/**
+ * Real-time event bus for local instance
+ */
+export class RealtimeEventBus extends EventEmitter {
+ private channels = new Map void>>();
+ private userChannels = new Map();
+
+ constructor() {
+ super();
+ }
+
+ /**
+ * Handle incoming events
+ */
+ private handleIncomingEvent(channel: string, event: RealtimeEvent) {
+ // Emit to local listeners
+ this.emit(channel, event);
+
+ // Call channel-specific handlers
+ const handlers = this.channels.get(channel);
+ if (handlers) {
+ handlers.forEach(handler => {
+ try {
+ handler(event);
+ } catch (error) {
+ console.error('Error in event handler:', error);
+ }
+ });
+ }
+ }
+
+ /**
+ * Subscribe to a channel
+ */
+ async subscribe(channel: string, handler?: (event: RealtimeEvent) => void) {
+ // Add handler if provided
+ if (handler) {
+ if (!this.channels.has(channel)) {
+ this.channels.set(channel, new Set());
+ }
+ this.channels.get(channel)!.add(handler);
+ }
+
+ // Add local listener
+ if (!this.listenerCount(channel)) {
+ this.on(channel, (event) => this.handleIncomingEvent(channel, event));
+ }
+ }
+
+ /**
+ * Subscribe to user-specific channels
+ */
+ async subscribeUser(userId: string) {
+ const channels = [
+ `user:${userId}`,
+ `user:${userId}:notifications`,
+ `user:${userId}:updates`,
+ ];
+
+ this.userChannels.set(userId, channels);
+
+ for (const channel of channels) {
+ await this.subscribe(channel);
+ }
+ }
+
+ /**
+ * Unsubscribe from a channel
+ */
+ async unsubscribe(channel: string, handler?: (event: RealtimeEvent) => void) {
+ // Remove handler if provided
+ if (handler) {
+ this.channels.get(channel)?.delete(handler);
+
+ // Remove channel if no handlers left
+ if (this.channels.get(channel)?.size === 0) {
+ this.channels.delete(channel);
+ }
+ }
+
+ // Remove local listener if no handlers
+ if (!this.channels.has(channel)) {
+ this.removeAllListeners(channel);
+ }
+ }
+
+ /**
+ * Unsubscribe from user channels
+ */
+ async unsubscribeUser(userId: string) {
+ const channels = this.userChannels.get(userId) || [];
+
+ for (const channel of channels) {
+ await this.unsubscribe(channel);
+ }
+
+ this.userChannels.delete(userId);
+ }
+
+ /**
+ * Publish an event
+ */
+ async publish(channel: string, event: Omit) {
+ const fullEvent: RealtimeEvent = {
+ ...event,
+ timestamp: Date.now(),
+ };
+
+ // Emit locally
+ this.handleIncomingEvent(channel, fullEvent);
+ }
+
+ /**
+ * Broadcast to all users
+ */
+ async broadcast(event: Omit) {
+ await this.publish('broadcast', event);
+ }
+
+ /**
+ * Send event to specific user
+ */
+ async sendToUser(userId: string, event: Omit) {
+ await this.publish(`user:${userId}`, {
+ ...event,
+ userId,
+ });
+ }
+
+ /**
+ * Send activity update
+ */
+ async sendActivity(activity: {
+ userId: string;
+ action: string;
+ resource: string;
+ resourceId: string;
+ details?: any;
+ }) {
+ const event = {
+ type: 'activity',
+ data: activity,
+ };
+
+ // Send to user
+ await this.sendToUser(activity.userId, event);
+
+ // Also publish to activity channel
+ await this.publish('activity', {
+ ...event,
+ userId: activity.userId,
+ });
+ }
+
+ /**
+ * Get event statistics
+ */
+ getStats() {
+ return {
+ channels: this.channels.size,
+ listeners: Array.from(this.channels.values()).reduce(
+ (sum, handlers) => sum + handlers.size,
+ 0
+ ),
+ userChannels: this.userChannels.size,
+ };
+ }
+}
+
+// Global event bus instance
+export const eventBus = new RealtimeEventBus();
+
+/**
+ * React hook for subscribing to events
+ */
+export function useRealtimeEvents(
+ channel: string,
+ handler: (event: RealtimeEvent) => void,
+ deps: any[] = []
+) {
+ if (typeof window !== 'undefined') {
+ const { useEffect } = require('react');
+
+ useEffect(() => {
+ eventBus.subscribe(channel, handler);
+
+ return () => {
+ eventBus.unsubscribe(channel, handler);
+ };
+ }, deps);
+ }
+}
+
+/**
+ * Server-sent events endpoint handler
+ */
+export async function createSSEHandler(userId: string) {
+ const encoder = new TextEncoder();
+
+ // Create a readable stream for SSE
+ const stream = new ReadableStream({
+ async start(controller) {
+ // Send initial connection event
+ controller.enqueue(
+ encoder.encode(`data: ${JSON.stringify({ type: 'connected' })}\n\n`)
+ );
+
+ // Subscribe to user channels
+ await eventBus.subscribeUser(userId);
+
+ // Create event handler
+ const handleEvent = (event: RealtimeEvent) => {
+ controller.enqueue(
+ encoder.encode(`data: ${JSON.stringify(event)}\n\n`)
+ );
+ };
+
+ // Subscribe to channels
+ eventBus.on(`user:${userId}`, handleEvent);
+
+ // Keep connection alive with heartbeat
+ const heartbeat = setInterval(() => {
+ controller.enqueue(encoder.encode(': heartbeat\n\n'));
+ }, 30000);
+
+ // Cleanup on close
+ return () => {
+ clearInterval(heartbeat);
+ eventBus.off(`user:${userId}`, handleEvent);
+ eventBus.unsubscribeUser(userId);
+ };
+ },
+ });
+
+ return new Response(stream, {
+ headers: {
+ 'Content-Type': 'text/event-stream',
+ 'Cache-Control': 'no-cache',
+ 'Connection': 'keep-alive',
+ },
+ });
+}
\ No newline at end of file
diff --git a/src/lib/gitea.ts b/src/lib/gitea.ts
index e8a115c..d10a195 100644
--- a/src/lib/gitea.ts
+++ b/src/lib/gitea.ts
@@ -11,6 +11,7 @@ import { httpPost, httpGet } from "./http-client";
import { createMirrorJob } from "./helpers";
import { db, organizations, repositories } from "./db";
import { eq, and } from "drizzle-orm";
+import { decryptConfigTokens } from "./utils/config-encryption";
/**
* Helper function to get organization configuration including destination override
@@ -183,12 +184,15 @@ export const isRepoPresentInGitea = async ({
throw new Error("Gitea config is required.");
}
+ // Decrypt config tokens for API usage
+ const decryptedConfig = decryptConfigTokens(config as Config);
+
// Check if the repository exists at the specified owner location
const response = await fetch(
`${config.giteaConfig.url}/api/v1/repos/${owner}/${repoName}`,
{
headers: {
- Authorization: `token ${config.giteaConfig.token}`,
+ Authorization: `token ${decryptedConfig.giteaConfig.token}`,
},
}
);
@@ -371,7 +375,7 @@ export const mirrorGithubRepoToGitea = async ({
service: "git",
},
{
- Authorization: `token ${config.giteaConfig.token}`,
+ Authorization: `token ${decryptedConfig.giteaConfig.token}`,
}
);
@@ -392,7 +396,7 @@ export const mirrorGithubRepoToGitea = async ({
config,
octokit,
repository,
- isRepoInOrg: false,
+ giteaOwner: repoOwner,
});
}
@@ -476,11 +480,14 @@ export async function getOrCreateGiteaOrg({
try {
console.log(`Attempting to get or create Gitea organization: ${orgName}`);
+ // Decrypt config tokens for API usage
+ const decryptedConfig = decryptConfigTokens(config as Config);
+
const orgRes = await fetch(
`${config.giteaConfig.url}/api/v1/orgs/${orgName}`,
{
headers: {
- Authorization: `token ${config.giteaConfig.token}`,
+ Authorization: `token ${decryptedConfig.giteaConfig.token}`,
"Content-Type": "application/json",
},
}
@@ -533,7 +540,7 @@ export async function getOrCreateGiteaOrg({
const createRes = await fetch(`${config.giteaConfig.url}/api/v1/orgs`, {
method: "POST",
headers: {
- Authorization: `token ${config.giteaConfig.token}`,
+ Authorization: `token ${decryptedConfig.giteaConfig.token}`,
"Content-Type": "application/json",
},
body: JSON.stringify({
@@ -720,7 +727,7 @@ export async function mirrorGitHubRepoToGiteaOrg({
private: repository.isPrivate,
},
{
- Authorization: `token ${config.giteaConfig.token}`,
+ Authorization: `token ${decryptedConfig.giteaConfig.token}`,
}
);
@@ -741,7 +748,7 @@ export async function mirrorGitHubRepoToGiteaOrg({
config,
octokit,
repository,
- isRepoInOrg: true,
+ giteaOwner: orgName,
});
}
@@ -1074,6 +1081,9 @@ export const syncGiteaRepo = async ({
throw new Error("Gitea config is required.");
}
+ // Decrypt config tokens for API usage
+ const decryptedConfig = decryptConfigTokens(config as Config);
+
console.log(`Syncing repository ${repository.name}`);
// Mark repo as "syncing" in DB
@@ -1183,12 +1193,12 @@ export const mirrorGitRepoIssuesToGitea = async ({
config,
octokit,
repository,
- isRepoInOrg,
+ giteaOwner,
}: {
config: Partial;
octokit: Octokit;
repository: Repository;
- isRepoInOrg: boolean;
+ giteaOwner: string;
}) => {
//things covered here are- issue, title, body, labels, comments and assignees
if (
@@ -1200,9 +1210,8 @@ export const mirrorGitRepoIssuesToGitea = async ({
throw new Error("Missing GitHub or Gitea configuration.");
}
- const repoOrigin = isRepoInOrg
- ? repository.organization
- : config.githubConfig.username;
+ // Decrypt config tokens for API usage
+ const decryptedConfig = decryptConfigTokens(config as Config);
const [owner, repo] = repository.fullName.split("/");
@@ -1232,7 +1241,7 @@ export const mirrorGitRepoIssuesToGitea = async ({
// Get existing labels from Gitea
const giteaLabelsRes = await httpGet(
- `${config.giteaConfig.url}/api/v1/repos/${repoOrigin}/${repository.name}/labels`,
+ `${config.giteaConfig.url}/api/v1/repos/${giteaOwner}/${repository.name}/labels`,
{
Authorization: `token ${config.giteaConfig.token}`,
}
@@ -1264,7 +1273,7 @@ export const mirrorGitRepoIssuesToGitea = async ({
} else {
try {
const created = await httpPost(
- `${config.giteaConfig!.url}/api/v1/repos/${repoOrigin}/${
+ `${config.giteaConfig!.url}/api/v1/repos/${giteaOwner}/${
repository.name
}/labels`,
{ name, color: "#ededed" }, // Default color
@@ -1301,7 +1310,7 @@ export const mirrorGitRepoIssuesToGitea = async ({
// Create the issue in Gitea
const createdIssue = await httpPost(
- `${config.giteaConfig!.url}/api/v1/repos/${repoOrigin}/${
+ `${config.giteaConfig!.url}/api/v1/repos/${giteaOwner}/${
repository.name
}/issues`,
issuePayload,
@@ -1328,7 +1337,7 @@ export const mirrorGitRepoIssuesToGitea = async ({
comments,
async (comment) => {
await httpPost(
- `${config.giteaConfig!.url}/api/v1/repos/${repoOrigin}/${
+ `${config.giteaConfig!.url}/api/v1/repos/${giteaOwner}/${
repository.name
}/issues/${createdIssue.data.number}/comments`,
{
diff --git a/src/lib/modules/registry.ts b/src/lib/modules/registry.ts
new file mode 100644
index 0000000..0299e0d
--- /dev/null
+++ b/src/lib/modules/registry.ts
@@ -0,0 +1,184 @@
+/**
+ * Module registry implementation
+ * Manages loading and access to modular features
+ */
+
+import type {
+ Module,
+ ModuleRegistry,
+ AppContext,
+ RouteHandler,
+ Middleware,
+ DatabaseAdapter,
+ EventEmitter
+} from './types';
+// Module registry for extensibility
+
+/**
+ * Simple event emitter implementation
+ */
+class SimpleEventEmitter implements EventEmitter {
+ private events: Map> = new Map();
+
+ on(event: string, handler: (...args: any[]) => void): void {
+ if (!this.events.has(event)) {
+ this.events.set(event, new Set());
+ }
+ this.events.get(event)!.add(handler);
+ }
+
+ off(event: string, handler: (...args: any[]) => void): void {
+ this.events.get(event)?.delete(handler);
+ }
+
+ emit(event: string, ...args: any[]): void {
+ this.events.get(event)?.forEach(handler => {
+ try {
+ handler(...args);
+ } catch (error) {
+ console.error(`Error in event handler for ${event}:`, error);
+ }
+ });
+ }
+}
+
+/**
+ * Module manager class
+ */
+export class ModuleManager {
+ private modules: Map = new Map();
+ private routes: Map = new Map();
+ private middlewares: Middleware[] = [];
+ private events = new SimpleEventEmitter();
+ private initialized = false;
+
+ /**
+ * Get app context for modules
+ */
+ private getAppContext(): AppContext {
+ return {
+ addRoute: (path, handler) => this.addRoute(path, handler),
+ addMiddleware: (middleware) => this.middlewares.push(middleware),
+ db: this.getDatabaseAdapter(),
+ events: this.events,
+ modules: this.getRegistry(),
+ };
+ }
+
+ /**
+ * Get database adapter based on deployment mode
+ */
+ private getDatabaseAdapter(): DatabaseAdapter {
+ // This would be implemented to use SQLite or PostgreSQL
+ // based on deployment mode
+ return {
+ query: async (sql, params) => [],
+ execute: async (sql, params) => {},
+ transaction: async (fn) => fn(),
+ };
+ }
+
+ /**
+ * Register a module
+ */
+ async register(module: Module): Promise {
+ if (this.modules.has(module.name)) {
+ console.warn(`Module ${module.name} is already registered`);
+ return;
+ }
+
+ try {
+ await module.init(this.getAppContext());
+ this.modules.set(module.name, module);
+ console.log(`Module ${module.name} registered successfully`);
+ } catch (error) {
+ console.error(`Failed to register module ${module.name}:`, error);
+ throw error;
+ }
+ }
+
+ /**
+ * Unregister a module
+ */
+ async unregister(moduleName: string): Promise {
+ const module = this.modules.get(moduleName);
+ if (!module) return;
+
+ if (module.cleanup) {
+ await module.cleanup();
+ }
+
+ this.modules.delete(moduleName);
+ // Remove routes registered by this module
+ // This would need to track which module registered which routes
+ }
+
+ /**
+ * Add a route handler
+ */
+ private addRoute(path: string, handler: RouteHandler): void {
+ this.routes.set(path, handler);
+ }
+
+ /**
+ * Get route handler for a path
+ */
+ getRouteHandler(path: string): RouteHandler | null {
+ return this.routes.get(path) || null;
+ }
+
+ /**
+ * Get all middleware
+ */
+ getMiddleware(): Middleware[] {
+ return [...this.middlewares];
+ }
+
+ /**
+ * Get module registry
+ */
+ getRegistry(): ModuleRegistry {
+ const registry: ModuleRegistry = {};
+
+ // Copy all modules to registry
+ for (const [name, module] of this.modules) {
+ registry[name] = module;
+ }
+
+ return registry;
+ }
+
+
+ /**
+ * Get a specific module
+ */
+ get(name: K): ModuleRegistry[K] | null {
+ return this.getRegistry()[name] || null;
+ }
+
+ /**
+ * Check if a module is loaded
+ */
+ has(name: string): boolean {
+ return this.modules.has(name);
+ }
+
+ /**
+ * Emit an event to all modules
+ */
+ emit(event: string, ...args: any[]): void {
+ this.events.emit(event, ...args);
+ }
+}
+
+// Global module manager instance
+export const modules = new ModuleManager();
+
+
+// Initialize modules on app start
+export async function initializeModules() {
+ // Load core modules here if any
+
+ // Emit initialization complete event
+ modules.emit('modules:initialized');
+}
\ No newline at end of file
diff --git a/src/lib/modules/types.d.ts b/src/lib/modules/types.d.ts
new file mode 100644
index 0000000..001c833
--- /dev/null
+++ b/src/lib/modules/types.d.ts
@@ -0,0 +1,86 @@
+/**
+ * Module system type definitions
+ * These interfaces allow for extensibility and plugins
+ */
+import type { APIContext } from 'astro';
+import type { ComponentType, LazyExoticComponent } from 'react';
+/**
+ * Base module interface that all modules must implement
+ */
+export interface Module {
+ /** Unique module identifier */
+ name: string;
+ /** Module version */
+ version: string;
+ /** Initialize the module with app context */
+ init(app: AppContext): Promise;
+ /** Cleanup when module is unloaded */
+ cleanup?(): Promise;
+}
+/**
+ * Application context passed to modules
+ */
+export interface AppContext {
+ /** Register API routes */
+ addRoute(path: string, handler: RouteHandler): void;
+ /** Register middleware */
+ addMiddleware(middleware: Middleware): void;
+ /** Access to database (abstracted) */
+ db: DatabaseAdapter;
+ /** Event emitter for cross-module communication */
+ events: EventEmitter;
+ /** Access to other modules */
+ modules: ModuleRegistry;
+}
+/**
+ * Route handler type
+ */
+export type RouteHandler = (context: APIContext) => Promise | Response;
+/**
+ * Middleware type
+ */
+export type Middleware = (context: APIContext, next: () => Promise) => Promise;
+/**
+ * Database adapter interface (abstract away implementation)
+ */
+export interface DatabaseAdapter {
+ query(sql: string, params?: any[]): Promise;
+ execute(sql: string, params?: any[]): Promise;
+ transaction(fn: () => Promise): Promise;
+}
+/**
+ * Event emitter for cross-module communication
+ */
+export interface EventEmitter {
+ on(event: string, handler: (...args: any[]) => void): void;
+ off(event: string, handler: (...args: any[]) => void): void;
+ emit(event: string, ...args: any[]): void;
+}
+/**
+ * Example module interfaces
+ * These are examples of how modules can be structured
+ */
+export interface FeatureModule extends Module {
+ /** React components provided by the module */
+ components?: Record>>;
+ /** API methods provided by the module */
+ api?: Record Promise>;
+ /** Lifecycle hooks */
+ hooks?: {
+ onInit?: () => Promise;
+ onUserAction?: (action: string, data: any) => Promise;
+ };
+}
+/**
+ * Module registry interface
+ */
+export interface ModuleRegistry {
+ [key: string]: Module | undefined;
+}
+export interface User {
+ id: string;
+ email: string;
+ name?: string;
+ username?: string;
+}
+//# sourceMappingURL=types.d.ts.map
\ No newline at end of file
diff --git a/src/lib/modules/types.d.ts.map b/src/lib/modules/types.d.ts.map
new file mode 100644
index 0000000..9cf8369
--- /dev/null
+++ b/src/lib/modules/types.d.ts.map
@@ -0,0 +1 @@
+{"version":3,"file":"types.d.ts","sourceRoot":"","sources":["types.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAEH,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,OAAO,CAAC;AACxC,OAAO,KAAK,EAAE,aAAa,EAAE,mBAAmB,EAAE,MAAM,OAAO,CAAC;AAEhE;;GAEG;AACH,MAAM,WAAW,MAAM;IACrB,+BAA+B;IAC/B,IAAI,EAAE,MAAM,CAAC;IAEb,qBAAqB;IACrB,OAAO,EAAE,MAAM,CAAC;IAEhB,6CAA6C;IAC7C,IAAI,CAAC,GAAG,EAAE,UAAU,GAAG,OAAO,CAAC,IAAI,CAAC,CAAC;IAErC,sCAAsC;IACtC,OAAO,CAAC,IAAI,OAAO,CAAC,IAAI,CAAC,CAAC;CAC3B;AAED;;GAEG;AACH,MAAM,WAAW,UAAU;IACzB,0BAA0B;IAC1B,QAAQ,CAAC,IAAI,EAAE,MAAM,EAAE,OAAO,EAAE,YAAY,GAAG,IAAI,CAAC;IAEpD,0BAA0B;IAC1B,aAAa,CAAC,UAAU,EAAE,UAAU,GAAG,IAAI,CAAC;IAE5C,sCAAsC;IACtC,EAAE,EAAE,eAAe,CAAC;IAEpB,mDAAmD;IACnD,MAAM,EAAE,YAAY,CAAC;IAErB,8BAA8B;IAC9B,OAAO,EAAE,cAAc,CAAC;CACzB;AAED;;GAEG;AACH,MAAM,MAAM,YAAY,GAAG,CAAC,OAAO,EAAE,UAAU,KAAK,OAAO,CAAC,QAAQ,CAAC,GAAG,QAAQ,CAAC;AAEjF;;GAEG;AACH,MAAM,MAAM,UAAU,GAAG,CACvB,OAAO,EAAE,UAAU,EACnB,IAAI,EAAE,MAAM,OAAO,CAAC,QAAQ,CAAC,KAC1B,OAAO,CAAC,QAAQ,CAAC,CAAC;AAEvB;;GAEG;AACH,MAAM,WAAW,eAAe;IAC9B,KAAK,CAAC,CAAC,EAAE,GAAG,EAAE,MAAM,EAAE,MAAM,CAAC,EAAE,GAAG,EAAE,GAAG,OAAO,CAAC,CAAC,EAAE,CAAC,CAAC;IACpD,OAAO,CAAC,GAAG,EAAE,MAAM,EAAE,MAAM,CAAC,EAAE,GAAG,EAAE,GAAG,OAAO,CAAC,IAAI,CAAC,CAAC;IACpD,WAAW,CAAC,CAAC,EAAE,EAAE,EAAE,MAAM,OAAO,CAAC,CAAC,CAAC,GAAG,OAAO,CAAC,CAAC,CAAC,CAAC;CAClD;AAED;;GAEG;AACH,MAAM,WAAW,YAAY;IAC3B,EAAE,CAAC,KAAK,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC,GAAG,IAAI,EAAE,GAAG,EAAE,KAAK,IAAI,GAAG,IAAI,CAAC;IAC3D,GAAG,CAAC,KAAK,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC,GAAG,IAAI,EAAE,GAAG,EAAE,KAAK,IAAI,GAAG,IAAI,CAAC;IAC5D,IAAI,CAAC,KAAK,EAAE,MAAM,EAAE,GAAG,IAAI,EAAE,GAAG,EAAE,GAAG,IAAI,CAAC;CAC3C;AAED;;;GAGG;AAGH,MAAM,WAAW,aAAc,SAAQ,MAAM;IAC3C,8CAA8C;IAC9C,UAAU,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,mBAAmB,CAAC,aAAa,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;IAErE,yCAAyC;IACzC,GAAG,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,GAAG,EAAE,KAAK,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC;IAEvD,sBAAsB;IACtB,KAAK,CAAC,EAAE;QACN,MAAM,CAAC,EAAE,MAAM,OAAO,CAAC,IAAI,CAAC,CAAC;QAC7B,YAAY,CAAC,EAAE,CAAC,MAAM,EAAE,MAAM,EAAE,IAAI,EAAE,GAAG,KAAK,OAAO,CAAC,IAAI,CAAC,CAAC;KAC7D,CAAC;CACH;AAED;;GAEG;AACH,MAAM,WAAW,cAAc;IAC7B,CAAC,GAAG,EAAE,MAAM,GAAG,MAAM,GAAG,SAAS,CAAC;CACnC;AAGD,MAAM,WAAW,IAAI;IACnB,EAAE,EAAE,MAAM,CAAC;IACX,KAAK,EAAE,MAAM,CAAC;IACd,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,QAAQ,CAAC,EAAE,MAAM,CAAC;CACnB"}
\ No newline at end of file
diff --git a/src/lib/modules/types.js b/src/lib/modules/types.js
new file mode 100644
index 0000000..4ceb2c0
--- /dev/null
+++ b/src/lib/modules/types.js
@@ -0,0 +1,5 @@
+/**
+ * Module system type definitions
+ * These interfaces allow for extensibility and plugins
+ */
+export {};
diff --git a/src/lib/modules/types.ts b/src/lib/modules/types.ts
new file mode 100644
index 0000000..d8469e6
--- /dev/null
+++ b/src/lib/modules/types.ts
@@ -0,0 +1,110 @@
+/**
+ * Module system type definitions
+ * These interfaces allow for extensibility and plugins
+ */
+
+import type { APIContext } from 'astro';
+import type { ComponentType, LazyExoticComponent } from 'react';
+
+/**
+ * Base module interface that all modules must implement
+ */
+export interface Module {
+ /** Unique module identifier */
+ name: string;
+
+ /** Module version */
+ version: string;
+
+ /** Initialize the module with app context */
+ init(app: AppContext): Promise;
+
+ /** Cleanup when module is unloaded */
+ cleanup?(): Promise;
+}
+
+/**
+ * Application context passed to modules
+ */
+export interface AppContext {
+ /** Register API routes */
+ addRoute(path: string, handler: RouteHandler): void;
+
+ /** Register middleware */
+ addMiddleware(middleware: Middleware): void;
+
+ /** Access to database (abstracted) */
+ db: DatabaseAdapter;
+
+ /** Event emitter for cross-module communication */
+ events: EventEmitter;
+
+ /** Access to other modules */
+ modules: ModuleRegistry;
+}
+
+/**
+ * Route handler type
+ */
+export type RouteHandler = (context: APIContext) => Promise | Response;
+
+/**
+ * Middleware type
+ */
+export type Middleware = (
+ context: APIContext,
+ next: () => Promise
+) => Promise;
+
+/**
+ * Database adapter interface (abstract away implementation)
+ */
+export interface DatabaseAdapter {
+ query(sql: string, params?: any[]): Promise;
+ execute(sql: string, params?: any[]): Promise;
+ transaction(fn: () => Promise): Promise;
+}
+
+/**
+ * Event emitter for cross-module communication
+ */
+export interface EventEmitter {
+ on(event: string, handler: (...args: any[]) => void): void;
+ off(event: string, handler: (...args: any[]) => void): void;
+ emit(event: string, ...args: any[]): void;
+}
+
+/**
+ * Example module interfaces
+ * These are examples of how modules can be structured
+ */
+
+// Example: Feature module with components
+export interface FeatureModule extends Module {
+ /** React components provided by the module */
+ components?: Record>>;
+
+ /** API methods provided by the module */
+ api?: Record Promise>;
+
+ /** Lifecycle hooks */
+ hooks?: {
+ onInit?: () => Promise;
+ onUserAction?: (action: string, data: any) => Promise;
+ };
+}
+
+/**
+ * Module registry interface
+ */
+export interface ModuleRegistry {
+ [key: string]: Module | undefined;
+}
+
+// Generic types that modules might use
+export interface User {
+ id: string;
+ email: string;
+ name?: string;
+ username?: string;
+}
\ No newline at end of file
diff --git a/src/lib/recovery.ts b/src/lib/recovery.ts
index 5129ec1..0d00aa8 100644
--- a/src/lib/recovery.ts
+++ b/src/lib/recovery.ts
@@ -11,6 +11,7 @@ import { createGitHubClient } from './github';
import { processWithResilience } from './utils/concurrency';
import { repositoryVisibilityEnum, repoStatusEnum } from '@/types/Repository';
import type { Repository } from './db/schema';
+import { getDecryptedGitHubToken } from './utils/config-encryption';
// Recovery state tracking
let recoveryInProgress = false;
@@ -262,7 +263,8 @@ async function recoverMirrorJob(job: any, remainingItemIds: string[]) {
// Create GitHub client with error handling
let octokit;
try {
- octokit = createGitHubClient(config.githubConfig.token);
+ const decryptedToken = getDecryptedGitHubToken(config);
+ octokit = createGitHubClient(decryptedToken);
} catch (error) {
throw new Error(`Failed to create GitHub client: ${error instanceof Error ? error.message : String(error)}`);
}
diff --git a/src/lib/utils.ts b/src/lib/utils.ts
index 4f37651..eae92a1 100644
--- a/src/lib/utils.ts
+++ b/src/lib/utils.ts
@@ -9,6 +9,15 @@ export function cn(...inputs: ClassValue[]) {
return twMerge(clsx(inputs));
}
+export function generateRandomString(length: number): string {
+ const chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789';
+ let result = '';
+ for (let i = 0; i < length; i++) {
+ result += chars.charAt(Math.floor(Math.random() * chars.length));
+ }
+ return result;
+}
+
export function formatDate(date?: Date | string | null): string {
if (!date) return "Never";
return new Intl.DateTimeFormat("en-US", {
@@ -185,7 +194,7 @@ export async function apiRequest(
}
}
-export const getStatusColor = (status: RepoStatus): string => {
+export const getStatusColor = (status: string): string => {
switch (status) {
case "imported":
return "bg-blue-500"; // Info/primary-like
@@ -199,6 +208,12 @@ export const getStatusColor = (status: RepoStatus): string => {
return "bg-indigo-500"; // Sync in progress
case "synced":
return "bg-teal-500"; // Sync complete
+ case "skipped":
+ return "bg-gray-500"; // Skipped
+ case "deleting":
+ return "bg-orange-500"; // Deleting
+ case "deleted":
+ return "bg-gray-600"; // Deleted
default:
return "bg-gray-400"; // Unknown/neutral
}
diff --git a/src/lib/utils/auth-helpers.ts b/src/lib/utils/auth-helpers.ts
new file mode 100644
index 0000000..10e2336
--- /dev/null
+++ b/src/lib/utils/auth-helpers.ts
@@ -0,0 +1,58 @@
+import type { APIRoute, APIContext } from "astro";
+import { auth } from "@/lib/auth";
+
+/**
+ * Get authenticated user from request
+ * @param request - The request object from Astro API route
+ * @returns The authenticated user or null if not authenticated
+ */
+export async function getAuthenticatedUser(request: Request) {
+ try {
+ const session = await auth.api.getSession({
+ headers: request.headers,
+ });
+
+ return session ? session.user : null;
+ } catch (error) {
+ console.error("Error getting session:", error);
+ return null;
+ }
+}
+
+/**
+ * Require authentication for API routes
+ * Returns an error response if user is not authenticated
+ * @param context - The API context from Astro
+ * @returns Object with user if authenticated, or error response if not
+ */
+export async function requireAuth(context: APIContext) {
+ const user = await getAuthenticatedUser(context.request);
+
+ if (!user) {
+ return {
+ user: null,
+ response: new Response(
+ JSON.stringify({
+ success: false,
+ error: "Unauthorized - Please log in",
+ }),
+ {
+ status: 401,
+ headers: { "Content-Type": "application/json" },
+ }
+ ),
+ };
+ }
+
+ return { user, response: null };
+}
+
+/**
+ * Get user ID from authenticated session
+ * @param request - The request object from Astro API route
+ * @returns The user ID or null if not authenticated
+ */
+export async function getAuthenticatedUserId(request: Request): Promise {
+ const user = await getAuthenticatedUser(request);
+ return user?.id || null;
+}
\ No newline at end of file
diff --git a/src/lib/utils/config-encryption.ts b/src/lib/utils/config-encryption.ts
new file mode 100644
index 0000000..a223c18
--- /dev/null
+++ b/src/lib/utils/config-encryption.ts
@@ -0,0 +1,52 @@
+import { decrypt } from "./encryption";
+import type { Config } from "@/types/config";
+
+/**
+ * Decrypts tokens in a config object for use in API calls
+ * @param config The config object with potentially encrypted tokens
+ * @returns Config object with decrypted tokens
+ */
+export function decryptConfigTokens(config: Config): Config {
+ const decryptedConfig = { ...config };
+
+ // Deep clone the config objects
+ if (config.githubConfig) {
+ decryptedConfig.githubConfig = { ...config.githubConfig };
+ if (config.githubConfig.token) {
+ decryptedConfig.githubConfig.token = decrypt(config.githubConfig.token);
+ }
+ }
+
+ if (config.giteaConfig) {
+ decryptedConfig.giteaConfig = { ...config.giteaConfig };
+ if (config.giteaConfig.token) {
+ decryptedConfig.giteaConfig.token = decrypt(config.giteaConfig.token);
+ }
+ }
+
+ return decryptedConfig;
+}
+
+/**
+ * Gets a decrypted GitHub token from config
+ * @param config The config object
+ * @returns Decrypted GitHub token
+ */
+export function getDecryptedGitHubToken(config: Config): string {
+ if (!config.githubConfig?.token) {
+ throw new Error("GitHub token not found in config");
+ }
+ return decrypt(config.githubConfig.token);
+}
+
+/**
+ * Gets a decrypted Gitea token from config
+ * @param config The config object
+ * @returns Decrypted Gitea token
+ */
+export function getDecryptedGiteaToken(config: Config): string {
+ if (!config.giteaConfig?.token) {
+ throw new Error("Gitea token not found in config");
+ }
+ return decrypt(config.giteaConfig.token);
+}
\ No newline at end of file
diff --git a/src/lib/utils/encryption.ts b/src/lib/utils/encryption.ts
new file mode 100644
index 0000000..0c72f83
--- /dev/null
+++ b/src/lib/utils/encryption.ts
@@ -0,0 +1,169 @@
+import * as crypto from "crypto";
+
+// Encryption configuration
+const ALGORITHM = "aes-256-gcm";
+const IV_LENGTH = 16; // 128 bits
+const SALT_LENGTH = 32; // 256 bits
+const TAG_LENGTH = 16; // 128 bits
+const KEY_LENGTH = 32; // 256 bits
+const ITERATIONS = 100000; // PBKDF2 iterations
+
+// Get or generate encryption key
+function getEncryptionKey(): Buffer {
+ const secret = process.env.ENCRYPTION_SECRET || process.env.JWT_SECRET || process.env.BETTER_AUTH_SECRET;
+
+ if (!secret) {
+ throw new Error("No encryption secret found. Please set ENCRYPTION_SECRET environment variable.");
+ }
+
+ // Use a static salt derived from the secret for consistent key generation
+ // This ensures the same key is generated across application restarts
+ const salt = crypto.createHash('sha256').update('gitea-mirror-salt' + secret).digest();
+
+ return crypto.pbkdf2Sync(secret, salt, ITERATIONS, KEY_LENGTH, 'sha256');
+}
+
+export interface EncryptedData {
+ encrypted: string;
+ iv: string;
+ salt: string;
+ tag: string;
+ version: number;
+}
+
+/**
+ * Encrypts sensitive data like API tokens
+ * @param plaintext The data to encrypt
+ * @returns Encrypted data with metadata
+ */
+export function encrypt(plaintext: string): string {
+ if (!plaintext) {
+ return '';
+ }
+
+ try {
+ const key = getEncryptionKey();
+ const iv = crypto.randomBytes(IV_LENGTH);
+ const salt = crypto.randomBytes(SALT_LENGTH);
+
+ const cipher = crypto.createCipheriv(ALGORITHM, key, iv);
+
+ const encrypted = Buffer.concat([
+ cipher.update(plaintext, 'utf8'),
+ cipher.final()
+ ]);
+
+ const tag = cipher.getAuthTag();
+
+ const encryptedData: EncryptedData = {
+ encrypted: encrypted.toString('base64'),
+ iv: iv.toString('base64'),
+ salt: salt.toString('base64'),
+ tag: tag.toString('base64'),
+ version: 1
+ };
+
+ // Return as base64 encoded JSON for easy storage
+ return Buffer.from(JSON.stringify(encryptedData)).toString('base64');
+ } catch (error) {
+ console.error('Encryption error:', error);
+ throw new Error('Failed to encrypt data');
+ }
+}
+
+/**
+ * Decrypts encrypted data
+ * @param encryptedString The encrypted data string
+ * @returns Decrypted plaintext
+ */
+export function decrypt(encryptedString: string): string {
+ if (!encryptedString) {
+ return '';
+ }
+
+ try {
+ // Check if it's already plaintext (for backward compatibility during migration)
+ if (!isEncrypted(encryptedString)) {
+ return encryptedString;
+ }
+
+ const encryptedData: EncryptedData = JSON.parse(
+ Buffer.from(encryptedString, 'base64').toString('utf8')
+ );
+
+ const key = getEncryptionKey();
+ const iv = Buffer.from(encryptedData.iv, 'base64');
+ const tag = Buffer.from(encryptedData.tag, 'base64');
+ const encrypted = Buffer.from(encryptedData.encrypted, 'base64');
+
+ const decipher = crypto.createDecipheriv(ALGORITHM, key, iv);
+ decipher.setAuthTag(tag);
+
+ const decrypted = Buffer.concat([
+ decipher.update(encrypted),
+ decipher.final()
+ ]);
+
+ return decrypted.toString('utf8');
+ } catch (error) {
+ // If decryption fails, check if it's plaintext (backward compatibility)
+ try {
+ JSON.parse(Buffer.from(encryptedString, 'base64').toString('utf8'));
+ throw error; // It was encrypted but failed to decrypt
+ } catch {
+ // Not encrypted, return as-is for backward compatibility
+ console.warn('Token appears to be unencrypted, returning as-is for backward compatibility');
+ return encryptedString;
+ }
+ }
+}
+
+/**
+ * Checks if a string is encrypted
+ * @param value The string to check
+ * @returns true if encrypted, false otherwise
+ */
+export function isEncrypted(value: string): boolean {
+ if (!value) {
+ return false;
+ }
+
+ try {
+ const decoded = Buffer.from(value, 'base64').toString('utf8');
+ const data = JSON.parse(decoded);
+ return data.version === 1 && data.encrypted && data.iv && data.tag;
+ } catch {
+ return false;
+ }
+}
+
+/**
+ * Migrates unencrypted tokens to encrypted format
+ * @param token The token to migrate
+ * @returns Encrypted token if it wasn't already encrypted
+ */
+export function migrateToken(token: string): string {
+ if (!token || isEncrypted(token)) {
+ return token;
+ }
+
+ return encrypt(token);
+}
+
+/**
+ * Generates a secure random token
+ * @param length Token length in bytes (default: 32)
+ * @returns Hex encoded random token
+ */
+export function generateSecureToken(length: number = 32): string {
+ return crypto.randomBytes(length).toString('hex');
+}
+
+/**
+ * Hashes a value using SHA-256 (for non-reversible values like API keys for comparison)
+ * @param value The value to hash
+ * @returns Hex encoded hash
+ */
+export function hashValue(value: string): string {
+ return crypto.createHash('sha256').update(value).digest('hex');
+}
\ No newline at end of file
diff --git a/src/lib/utils/oauth-validation.test.ts b/src/lib/utils/oauth-validation.test.ts
new file mode 100644
index 0000000..1580ef7
--- /dev/null
+++ b/src/lib/utils/oauth-validation.test.ts
@@ -0,0 +1,85 @@
+import { describe, test, expect } from "bun:test";
+import { isValidRedirectUri, parseRedirectUris } from "./oauth-validation";
+
+describe("OAuth Validation", () => {
+ describe("parseRedirectUris", () => {
+ test("parses comma-separated URIs", () => {
+ const result = parseRedirectUris("https://app1.com,https://app2.com, https://app3.com ");
+ expect(result).toEqual([
+ "https://app1.com",
+ "https://app2.com",
+ "https://app3.com"
+ ]);
+ });
+
+ test("handles empty string", () => {
+ expect(parseRedirectUris("")).toEqual([]);
+ });
+
+ test("filters out empty values", () => {
+ const result = parseRedirectUris("https://app1.com,,https://app2.com,");
+ expect(result).toEqual(["https://app1.com", "https://app2.com"]);
+ });
+ });
+
+ describe("isValidRedirectUri", () => {
+ test("validates exact match", () => {
+ const authorizedUris = ["https://app.example.com/callback"];
+
+ expect(isValidRedirectUri("https://app.example.com/callback", authorizedUris)).toBe(true);
+ expect(isValidRedirectUri("https://app.example.com/other", authorizedUris)).toBe(false);
+ });
+
+ test("validates wildcard paths", () => {
+ const authorizedUris = ["https://app.example.com/*"];
+
+ expect(isValidRedirectUri("https://app.example.com/", authorizedUris)).toBe(true);
+ expect(isValidRedirectUri("https://app.example.com/callback", authorizedUris)).toBe(true);
+ expect(isValidRedirectUri("https://app.example.com/deep/path", authorizedUris)).toBe(true);
+
+ // Different domain should fail
+ expect(isValidRedirectUri("https://evil.com/callback", authorizedUris)).toBe(false);
+ });
+
+ test("validates protocol", () => {
+ const authorizedUris = ["https://app.example.com/callback"];
+
+ // HTTP instead of HTTPS should fail
+ expect(isValidRedirectUri("http://app.example.com/callback", authorizedUris)).toBe(false);
+ });
+
+ test("validates host and port", () => {
+ const authorizedUris = ["https://app.example.com:3000/callback"];
+
+ // Different port should fail
+ expect(isValidRedirectUri("https://app.example.com/callback", authorizedUris)).toBe(false);
+ expect(isValidRedirectUri("https://app.example.com:3000/callback", authorizedUris)).toBe(true);
+ expect(isValidRedirectUri("https://app.example.com:4000/callback", authorizedUris)).toBe(false);
+ });
+
+ test("handles invalid URIs", () => {
+ const authorizedUris = ["not-a-valid-uri", "https://valid.com"];
+
+ // Invalid redirect URI
+ expect(isValidRedirectUri("not-a-valid-uri", authorizedUris)).toBe(false);
+
+ // Valid redirect URI with invalid authorized URI should still work if it matches valid one
+ expect(isValidRedirectUri("https://valid.com", authorizedUris)).toBe(true);
+ });
+
+ test("handles empty inputs", () => {
+ expect(isValidRedirectUri("", ["https://app.com"])).toBe(false);
+ expect(isValidRedirectUri("https://app.com", [])).toBe(false);
+ });
+
+ test("prevents open redirect attacks", () => {
+ const authorizedUris = ["https://app.example.com/callback"];
+
+ // Various attack vectors
+ expect(isValidRedirectUri("https://app.example.com.evil.com/callback", authorizedUris)).toBe(false);
+ expect(isValidRedirectUri("https://app.example.com@evil.com/callback", authorizedUris)).toBe(false);
+ expect(isValidRedirectUri("//evil.com/callback", authorizedUris)).toBe(false);
+ expect(isValidRedirectUri("https:evil.com/callback", authorizedUris)).toBe(false);
+ });
+ });
+});
\ No newline at end of file
diff --git a/src/lib/utils/oauth-validation.ts b/src/lib/utils/oauth-validation.ts
new file mode 100644
index 0000000..3a64b55
--- /dev/null
+++ b/src/lib/utils/oauth-validation.ts
@@ -0,0 +1,59 @@
+/**
+ * Validates a redirect URI against a list of authorized URIs
+ * @param redirectUri The redirect URI to validate
+ * @param authorizedUris List of authorized redirect URIs
+ * @returns true if the redirect URI is authorized, false otherwise
+ */
+export function isValidRedirectUri(redirectUri: string, authorizedUris: string[]): boolean {
+ if (!redirectUri || authorizedUris.length === 0) {
+ return false;
+ }
+
+ try {
+ // Parse the redirect URI to ensure it's valid
+ const redirectUrl = new URL(redirectUri);
+
+ return authorizedUris.some(authorizedUri => {
+ try {
+ // Handle wildcard paths (e.g., https://example.com/*)
+ if (authorizedUri.endsWith('/*')) {
+ const baseUri = authorizedUri.slice(0, -2);
+ const baseUrl = new URL(baseUri);
+
+ // Check protocol, host, and port match
+ return redirectUrl.protocol === baseUrl.protocol &&
+ redirectUrl.host === baseUrl.host &&
+ redirectUrl.pathname.startsWith(baseUrl.pathname);
+ }
+
+ // Handle exact match
+ const authorizedUrl = new URL(authorizedUri);
+
+ // For exact match, everything must match including path and query params
+ return redirectUrl.href === authorizedUrl.href;
+ } catch {
+ // If authorized URI is not a valid URL, treat as invalid
+ return false;
+ }
+ });
+ } catch {
+ // If redirect URI is not a valid URL, it's invalid
+ return false;
+ }
+}
+
+/**
+ * Parses a comma-separated list of redirect URIs and trims whitespace
+ * @param redirectUrls Comma-separated list of redirect URIs
+ * @returns Array of trimmed redirect URIs
+ */
+export function parseRedirectUris(redirectUrls: string): string[] {
+ if (!redirectUrls) {
+ return [];
+ }
+
+ return redirectUrls
+ .split(',')
+ .map(uri => uri.trim())
+ .filter(uri => uri.length > 0);
+}
\ No newline at end of file
diff --git a/src/middleware.ts b/src/middleware.ts
index 7fa984c..d02dbca 100644
--- a/src/middleware.ts
+++ b/src/middleware.ts
@@ -3,6 +3,8 @@ import { initializeRecovery, hasJobsNeedingRecovery, getRecoveryStatus } from '.
import { startCleanupService, stopCleanupService } from './lib/cleanup-service';
import { initializeShutdownManager, registerShutdownCallback } from './lib/shutdown-manager';
import { setupSignalHandlers } from './lib/signal-handlers';
+import { auth } from './lib/auth';
+import { isHeaderAuthEnabled, authenticateWithHeaders } from './lib/auth-header';
// Flag to track if recovery has been initialized
let recoveryInitialized = false;
@@ -11,6 +13,52 @@ let cleanupServiceStarted = false;
let shutdownManagerInitialized = false;
export const onRequest = defineMiddleware(async (context, next) => {
+ // First, try Better Auth session (cookie-based)
+ try {
+ const session = await auth.api.getSession({
+ headers: context.request.headers,
+ });
+
+ if (session) {
+ context.locals.user = session.user;
+ context.locals.session = session.session;
+ } else {
+ // No cookie session, check for header authentication
+ if (isHeaderAuthEnabled()) {
+ const headerUser = await authenticateWithHeaders(context.request.headers);
+ if (headerUser) {
+ // Create a session-like object for header auth
+ context.locals.user = {
+ id: headerUser.id,
+ email: headerUser.email,
+ emailVerified: headerUser.emailVerified,
+ name: headerUser.name || headerUser.username,
+ username: headerUser.username,
+ createdAt: headerUser.createdAt,
+ updatedAt: headerUser.updatedAt,
+ };
+ context.locals.session = {
+ id: `header-${headerUser.id}`,
+ userId: headerUser.id,
+ expiresAt: new Date(Date.now() + 24 * 60 * 60 * 1000), // 1 day
+ ipAddress: context.request.headers.get('x-forwarded-for') || context.clientAddress,
+ userAgent: context.request.headers.get('user-agent'),
+ };
+ } else {
+ context.locals.user = null;
+ context.locals.session = null;
+ }
+ } else {
+ context.locals.user = null;
+ context.locals.session = null;
+ }
+ }
+ } catch (error) {
+ // If there's an error getting the session, set to null
+ context.locals.user = null;
+ context.locals.session = null;
+ }
+
// Initialize shutdown manager and signal handlers first
if (!shutdownManagerInitialized) {
try {
diff --git a/src/pages/api/auth/[...all].ts b/src/pages/api/auth/[...all].ts
new file mode 100644
index 0000000..d4077f4
--- /dev/null
+++ b/src/pages/api/auth/[...all].ts
@@ -0,0 +1,10 @@
+import { auth } from "@/lib/auth";
+import type { APIRoute } from "astro";
+
+export const ALL: APIRoute = async (ctx) => {
+ // If you want to use rate limiting, make sure to set the 'x-forwarded-for' header
+ // to the request headers from the context
+ // ctx.request.headers.set("x-forwarded-for", ctx.clientAddress);
+
+ return auth.handler(ctx.request);
+};
\ No newline at end of file
diff --git a/src/pages/api/auth/check-users.ts b/src/pages/api/auth/check-users.ts
new file mode 100644
index 0000000..f726cdb
--- /dev/null
+++ b/src/pages/api/auth/check-users.ts
@@ -0,0 +1,30 @@
+import type { APIRoute } from "astro";
+import { db, users } from "@/lib/db";
+import { sql } from "drizzle-orm";
+
+export const GET: APIRoute = async () => {
+ try {
+ const userCountResult = await db
+ .select({ count: sql`count(*)` })
+ .from(users);
+
+ const userCount = userCountResult[0].count;
+
+ if (userCount === 0) {
+ return new Response(JSON.stringify({ error: "No users found" }), {
+ status: 404,
+ headers: { "Content-Type": "application/json" },
+ });
+ }
+
+ return new Response(JSON.stringify({ userCount }), {
+ status: 200,
+ headers: { "Content-Type": "application/json" },
+ });
+ } catch (error) {
+ return new Response(JSON.stringify({ error: "Internal server error" }), {
+ status: 500,
+ headers: { "Content-Type": "application/json" },
+ });
+ }
+};
\ No newline at end of file
diff --git a/src/pages/api/auth/debug.ts b/src/pages/api/auth/debug.ts
new file mode 100644
index 0000000..3267bf2
--- /dev/null
+++ b/src/pages/api/auth/debug.ts
@@ -0,0 +1,79 @@
+import type { APIRoute } from "astro";
+import { auth } from "@/lib/auth";
+import { db } from "@/lib/db";
+import { users } from "@/lib/db/schema";
+import { nanoid } from "nanoid";
+
+export const GET: APIRoute = async ({ request }) => {
+ try {
+ // Get Better Auth configuration info
+ const info = {
+ baseURL: auth.options.baseURL,
+ basePath: auth.options.basePath,
+ trustedOrigins: auth.options.trustedOrigins,
+ emailPasswordEnabled: auth.options.emailAndPassword?.enabled,
+ userFields: auth.options.user?.additionalFields,
+ databaseConfig: {
+ usePlural: true,
+ provider: "sqlite"
+ }
+ };
+
+ return new Response(JSON.stringify({
+ success: true,
+ config: info
+ }), {
+ status: 200,
+ headers: { "Content-Type": "application/json" },
+ });
+ } catch (error) {
+ // Log full error details server-side for debugging
+ console.error("Debug endpoint error:", error);
+
+ // Only return safe error information to the client
+ return new Response(JSON.stringify({
+ success: false,
+ error: error instanceof Error ? error.message : "An unexpected error occurred"
+ }), {
+ status: 500,
+ headers: { "Content-Type": "application/json" },
+ });
+ }
+};
+
+export const POST: APIRoute = async ({ request }) => {
+ try {
+ // Test creating a user directly
+ const userId = nanoid();
+ const now = new Date();
+
+ await db.insert(users).values({
+ id: userId,
+ email: "test2@example.com",
+ emailVerified: false,
+ username: "test2",
+ // Let the database handle timestamps with defaults
+ });
+
+ return new Response(JSON.stringify({
+ success: true,
+ userId,
+ message: "User created successfully"
+ }), {
+ status: 200,
+ headers: { "Content-Type": "application/json" },
+ });
+ } catch (error) {
+ // Log full error details server-side for debugging
+ console.error("Debug endpoint error:", error);
+
+ // Only return safe error information to the client
+ return new Response(JSON.stringify({
+ success: false,
+ error: error instanceof Error ? error.message : "An unexpected error occurred"
+ }), {
+ status: 500,
+ headers: { "Content-Type": "application/json" },
+ });
+ }
+};
\ No newline at end of file
diff --git a/src/pages/api/auth/header-status.ts b/src/pages/api/auth/header-status.ts
new file mode 100644
index 0000000..661eb96
--- /dev/null
+++ b/src/pages/api/auth/header-status.ts
@@ -0,0 +1,16 @@
+import type { APIRoute } from "astro";
+import { getHeaderAuthConfig } from "@/lib/auth-header";
+
+export const GET: APIRoute = async () => {
+ const config = getHeaderAuthConfig();
+
+ return new Response(JSON.stringify({
+ enabled: config.enabled,
+ userHeader: config.userHeader,
+ autoProvision: config.autoProvision,
+ hasAllowedDomains: config.allowedDomains && config.allowedDomains.length > 0,
+ }), {
+ status: 200,
+ headers: { "Content-Type": "application/json" },
+ });
+};
\ No newline at end of file
diff --git a/src/pages/api/auth/legacy-backup/README.md b/src/pages/api/auth/legacy-backup/README.md
new file mode 100644
index 0000000..f0bdf3a
--- /dev/null
+++ b/src/pages/api/auth/legacy-backup/README.md
@@ -0,0 +1,13 @@
+# Legacy Auth Routes Backup
+
+These files are the original authentication routes before migrating to Better Auth.
+They are kept here as a reference during the migration process.
+
+## Migration Notes
+
+- `index.ts` - Handled user session validation and getting current user
+- `login.ts` - Handled user login with email/password
+- `logout.ts` - Handled user logout and session cleanup
+- `register.ts` - Handled new user registration
+
+All these endpoints are now handled by Better Auth through the catch-all route `[...all].ts`.
\ No newline at end of file
diff --git a/src/pages/api/auth/index.ts b/src/pages/api/auth/legacy-backup/index.ts
similarity index 89%
rename from src/pages/api/auth/index.ts
rename to src/pages/api/auth/legacy-backup/index.ts
index 1c2f936..8eeb62b 100644
--- a/src/pages/api/auth/index.ts
+++ b/src/pages/api/auth/legacy-backup/index.ts
@@ -1,6 +1,6 @@
import type { APIRoute } from "astro";
-import { db, users, configs, client } from "@/lib/db";
-import { eq, and } from "drizzle-orm";
+import { db, users, configs } from "@/lib/db";
+import { eq, and, sql } from "drizzle-orm";
import jwt from "jsonwebtoken";
const JWT_SECRET = process.env.JWT_SECRET || "your-secret-key";
@@ -10,10 +10,10 @@ export const GET: APIRoute = async ({ request, cookies }) => {
const token = authHeader?.split(" ")[1] || cookies.get("token")?.value;
if (!token) {
- const userCountResult = await client.execute(
- `SELECT COUNT(*) as count FROM users`
- );
- const userCount = userCountResult.rows[0].count;
+ const userCountResult = await db
+ .select({ count: sql`count(*)` })
+ .from(users);
+ const userCount = userCountResult[0].count;
if (userCount === 0) {
return new Response(JSON.stringify({ error: "No users found" }), {
diff --git a/src/pages/api/auth/login.ts b/src/pages/api/auth/legacy-backup/login.ts
similarity index 100%
rename from src/pages/api/auth/login.ts
rename to src/pages/api/auth/legacy-backup/login.ts
diff --git a/src/pages/api/auth/logout.ts b/src/pages/api/auth/legacy-backup/logout.ts
similarity index 100%
rename from src/pages/api/auth/logout.ts
rename to src/pages/api/auth/legacy-backup/logout.ts
diff --git a/src/pages/api/auth/register.ts b/src/pages/api/auth/legacy-backup/register.ts
similarity index 100%
rename from src/pages/api/auth/register.ts
rename to src/pages/api/auth/legacy-backup/register.ts
diff --git a/src/pages/api/config/index.ts b/src/pages/api/config/index.ts
index a7e7590..0465f43 100644
--- a/src/pages/api/config/index.ts
+++ b/src/pages/api/config/index.ts
@@ -5,6 +5,7 @@ import { eq } from "drizzle-orm";
import { calculateCleanupInterval } from "@/lib/cleanup-service";
import { createSecureErrorResponse } from "@/lib/utils";
import { mapUiToDbConfig, mapDbToUiConfig } from "@/lib/utils/config-mapper";
+import { encrypt, decrypt, migrateToken } from "@/lib/utils/encryption";
export const POST: APIRoute = async ({ request }) => {
try {
@@ -55,17 +56,27 @@ export const POST: APIRoute = async ({ request }) => {
? JSON.parse(existingConfig.giteaConfig)
: existingConfig.giteaConfig;
+ // Decrypt existing tokens before preserving
if (!mappedGithubConfig.token && existingGithub.token) {
- mappedGithubConfig.token = existingGithub.token;
+ mappedGithubConfig.token = decrypt(existingGithub.token);
}
if (!mappedGiteaConfig.token && existingGitea.token) {
- mappedGiteaConfig.token = existingGitea.token;
+ mappedGiteaConfig.token = decrypt(existingGitea.token);
}
} catch (tokenError) {
console.error("Failed to preserve tokens:", tokenError);
}
}
+
+ // Encrypt tokens before saving
+ if (mappedGithubConfig.token) {
+ mappedGithubConfig.token = encrypt(mappedGithubConfig.token);
+ }
+
+ if (mappedGiteaConfig.token) {
+ mappedGiteaConfig.token = encrypt(mappedGiteaConfig.token);
+ }
// Process schedule config - set/update nextRun if enabled, clear if disabled
const processedScheduleConfig = { ...scheduleConfig };
@@ -279,15 +290,54 @@ export const GET: APIRoute = async ({ request }) => {
// Map database structure to UI structure
const dbConfig = config[0];
- const uiConfig = mapDbToUiConfig(dbConfig);
- return new Response(JSON.stringify({
- ...dbConfig,
- ...uiConfig,
- }), {
- status: 200,
- headers: { "Content-Type": "application/json" },
- });
+ // Decrypt tokens before sending to UI
+ try {
+ const githubConfig = typeof dbConfig.githubConfig === "string"
+ ? JSON.parse(dbConfig.githubConfig)
+ : dbConfig.githubConfig;
+
+ const giteaConfig = typeof dbConfig.giteaConfig === "string"
+ ? JSON.parse(dbConfig.giteaConfig)
+ : dbConfig.giteaConfig;
+
+ // Decrypt tokens
+ if (githubConfig.token) {
+ githubConfig.token = decrypt(githubConfig.token);
+ }
+
+ if (giteaConfig.token) {
+ giteaConfig.token = decrypt(giteaConfig.token);
+ }
+
+ // Create modified config with decrypted tokens
+ const decryptedConfig = {
+ ...dbConfig,
+ githubConfig,
+ giteaConfig
+ };
+
+ const uiConfig = mapDbToUiConfig(decryptedConfig);
+
+ return new Response(JSON.stringify({
+ ...dbConfig,
+ ...uiConfig,
+ }), {
+ status: 200,
+ headers: { "Content-Type": "application/json" },
+ });
+ } catch (error) {
+ console.error("Failed to decrypt tokens:", error);
+ // Return config without decrypting tokens if there's an error
+ const uiConfig = mapDbToUiConfig(dbConfig);
+ return new Response(JSON.stringify({
+ ...dbConfig,
+ ...uiConfig,
+ }), {
+ status: 200,
+ headers: { "Content-Type": "application/json" },
+ });
+ }
} catch (error) {
return createSecureErrorResponse(error, "config fetch", 500);
}
diff --git a/src/pages/api/job/mirror-org.ts b/src/pages/api/job/mirror-org.ts
index dc40095..d9328aa 100644
--- a/src/pages/api/job/mirror-org.ts
+++ b/src/pages/api/job/mirror-org.ts
@@ -9,6 +9,7 @@ import { type MembershipRole } from "@/types/organizations";
import { createSecureErrorResponse } from "@/lib/utils";
import { processWithResilience } from "@/lib/utils/concurrency";
import { v4 as uuidv4 } from "uuid";
+import { getDecryptedGitHubToken } from "@/lib/utils/config-encryption";
export const POST: APIRoute = async ({ request }) => {
try {
@@ -71,7 +72,8 @@ export const POST: APIRoute = async ({ request }) => {
}
// Create a single Octokit instance to be reused
- const octokit = createGitHubClient(config.githubConfig.token);
+ const decryptedToken = getDecryptedGitHubToken(config);
+ const octokit = createGitHubClient(decryptedToken);
// Define the concurrency limit - adjust based on API rate limits
// Using a lower concurrency for organizations since each org might contain many repos
diff --git a/src/pages/api/job/mirror-repo.ts b/src/pages/api/job/mirror-repo.ts
index 4e6acea..60bd3af 100644
--- a/src/pages/api/job/mirror-repo.ts
+++ b/src/pages/api/job/mirror-repo.ts
@@ -9,6 +9,7 @@ import {
getGiteaRepoOwnerAsync,
} from "@/lib/gitea";
import { createGitHubClient } from "@/lib/github";
+import { getDecryptedGitHubToken } from "@/lib/utils/config-encryption";
import { processWithResilience } from "@/lib/utils/concurrency";
import { createSecureErrorResponse } from "@/lib/utils";
@@ -73,7 +74,8 @@ export const POST: APIRoute = async ({ request }) => {
}
// Create a single Octokit instance to be reused
- const octokit = createGitHubClient(config.githubConfig.token);
+ const decryptedToken = getDecryptedGitHubToken(config);
+ const octokit = createGitHubClient(decryptedToken);
// Define the concurrency limit - adjust based on API rate limits
const CONCURRENCY_LIMIT = 3;
diff --git a/src/pages/api/job/retry-repo.ts b/src/pages/api/job/retry-repo.ts
index f283629..560295c 100644
--- a/src/pages/api/job/retry-repo.ts
+++ b/src/pages/api/job/retry-repo.ts
@@ -13,6 +13,7 @@ import type { RetryRepoRequest, RetryRepoResponse } from "@/types/retry";
import { processWithRetry } from "@/lib/utils/concurrency";
import { createMirrorJob } from "@/lib/helpers";
import { createSecureErrorResponse } from "@/lib/utils";
+import { getDecryptedGitHubToken } from "@/lib/utils/config-encryption";
export const POST: APIRoute = async ({ request }) => {
try {
@@ -71,8 +72,11 @@ export const POST: APIRoute = async ({ request }) => {
// Start background retry with parallel processing
setTimeout(async () => {
// Create a single Octokit instance to be reused if needed
- const octokit = config.githubConfig.token
- ? createGitHubClient(config.githubConfig.token)
+ const decryptedToken = config.githubConfig.token
+ ? getDecryptedGitHubToken(config)
+ : null;
+ const octokit = decryptedToken
+ ? createGitHubClient(decryptedToken)
: null;
// Define the concurrency limit - adjust based on API rate limits
diff --git a/src/pages/api/organizations/[id].ts b/src/pages/api/organizations/[id].ts
index 9a3c888..152ccac 100644
--- a/src/pages/api/organizations/[id].ts
+++ b/src/pages/api/organizations/[id].ts
@@ -2,36 +2,17 @@ import type { APIRoute } from "astro";
import { db, organizations } from "@/lib/db";
import { eq, and } from "drizzle-orm";
import { createSecureErrorResponse } from "@/lib/utils";
-import jwt from "jsonwebtoken";
+import { requireAuth } from "@/lib/utils/auth-helpers";
-const JWT_SECRET = process.env.JWT_SECRET || "your-secret-key";
-
-export const PATCH: APIRoute = async ({ request, params, cookies }) => {
+export const PATCH: APIRoute = async (context) => {
try {
- // Get token from Authorization header or cookies
- const authHeader = request.headers.get("Authorization");
- const token = authHeader?.split(" ")[1] || cookies.get("token")?.value;
+ // Check authentication
+ const { user, response } = await requireAuth(context);
+ if (response) return response;
- if (!token) {
- return new Response(JSON.stringify({ error: "Unauthorized" }), {
- status: 401,
- headers: { "Content-Type": "application/json" },
- });
- }
+ const userId = user!.id;
- // Verify token and get user ID
- let userId: string;
- try {
- const decoded = jwt.verify(token, JWT_SECRET) as { id: string };
- userId = decoded.id;
- } catch (error) {
- return new Response(JSON.stringify({ error: "Invalid token" }), {
- status: 401,
- headers: { "Content-Type": "application/json" },
- });
- }
-
- const orgId = params.id;
+ const orgId = context.params.id;
if (!orgId) {
return new Response(JSON.stringify({ error: "Organization ID is required" }), {
status: 400,
@@ -39,7 +20,7 @@ export const PATCH: APIRoute = async ({ request, params, cookies }) => {
});
}
- const body = await request.json();
+ const body = await context.request.json();
const { destinationOrg } = body;
// Validate that the organization belongs to the user
diff --git a/src/pages/api/repositories/[id].ts b/src/pages/api/repositories/[id].ts
index b79bcce..debbc07 100644
--- a/src/pages/api/repositories/[id].ts
+++ b/src/pages/api/repositories/[id].ts
@@ -2,36 +2,17 @@ import type { APIRoute } from "astro";
import { db, repositories } from "@/lib/db";
import { eq, and } from "drizzle-orm";
import { createSecureErrorResponse } from "@/lib/utils";
-import jwt from "jsonwebtoken";
+import { requireAuth } from "@/lib/utils/auth-helpers";
-const JWT_SECRET = process.env.JWT_SECRET || "your-secret-key";
-
-export const PATCH: APIRoute = async ({ request, params, cookies }) => {
+export const PATCH: APIRoute = async (context) => {
try {
- // Get token from Authorization header or cookies
- const authHeader = request.headers.get("Authorization");
- const token = authHeader?.split(" ")[1] || cookies.get("token")?.value;
+ // Check authentication
+ const { user, response } = await requireAuth(context);
+ if (response) return response;
- if (!token) {
- return new Response(JSON.stringify({ error: "Unauthorized" }), {
- status: 401,
- headers: { "Content-Type": "application/json" },
- });
- }
+ const userId = user!.id;
- // Verify token and get user ID
- let userId: string;
- try {
- const decoded = jwt.verify(token, JWT_SECRET) as { id: string };
- userId = decoded.id;
- } catch (error) {
- return new Response(JSON.stringify({ error: "Invalid token" }), {
- status: 401,
- headers: { "Content-Type": "application/json" },
- });
- }
-
- const repoId = params.id;
+ const repoId = context.params.id;
if (!repoId) {
return new Response(JSON.stringify({ error: "Repository ID is required" }), {
status: 400,
@@ -39,7 +20,7 @@ export const PATCH: APIRoute = async ({ request, params, cookies }) => {
});
}
- const body = await request.json();
+ const body = await context.request.json();
const { destinationOrg } = body;
// Validate that the repository belongs to the user
diff --git a/src/pages/api/sso/applications.ts b/src/pages/api/sso/applications.ts
new file mode 100644
index 0000000..ef4ee93
--- /dev/null
+++ b/src/pages/api/sso/applications.ts
@@ -0,0 +1,176 @@
+import type { APIContext } from "astro";
+import { createSecureErrorResponse } from "@/lib/utils";
+import { requireAuth } from "@/lib/utils/auth-helpers";
+import { db, oauthApplications } from "@/lib/db";
+import { nanoid } from "nanoid";
+import { eq } from "drizzle-orm";
+import { generateRandomString } from "@/lib/utils";
+
+// GET /api/sso/applications - List all OAuth applications
+export async function GET(context: APIContext) {
+ try {
+ const { user, response } = await requireAuth(context);
+ if (response) return response;
+
+ const applications = await db.select().from(oauthApplications);
+
+ // Don't send client secrets in list response
+ const sanitizedApps = applications.map(app => ({
+ ...app,
+ clientSecret: undefined,
+ }));
+
+ return new Response(JSON.stringify(sanitizedApps), {
+ status: 200,
+ headers: { "Content-Type": "application/json" },
+ });
+ } catch (error) {
+ return createSecureErrorResponse(error, "SSO applications API");
+ }
+}
+
+// POST /api/sso/applications - Create a new OAuth application
+export async function POST(context: APIContext) {
+ try {
+ const { user, response } = await requireAuth(context);
+ if (response) return response;
+
+ const body = await context.request.json();
+ const { name, redirectURLs, type = "web", metadata } = body;
+
+ // Validate required fields
+ if (!name || !redirectURLs || redirectURLs.length === 0) {
+ return new Response(
+ JSON.stringify({ error: "Name and at least one redirect URL are required" }),
+ {
+ status: 400,
+ headers: { "Content-Type": "application/json" },
+ }
+ );
+ }
+
+ // Generate client credentials
+ const clientId = `client_${generateRandomString(32)}`;
+ const clientSecret = `secret_${generateRandomString(48)}`;
+
+ // Insert new application
+ const [newApp] = await db
+ .insert(oauthApplications)
+ .values({
+ id: nanoid(),
+ clientId,
+ clientSecret,
+ name,
+ redirectURLs: Array.isArray(redirectURLs) ? redirectURLs.join(",") : redirectURLs,
+ type,
+ metadata: metadata ? JSON.stringify(metadata) : null,
+ userId: user.id,
+ disabled: false,
+ })
+ .returning();
+
+ return new Response(JSON.stringify(newApp), {
+ status: 201,
+ headers: { "Content-Type": "application/json" },
+ });
+ } catch (error) {
+ return createSecureErrorResponse(error, "SSO applications API");
+ }
+}
+
+// PUT /api/sso/applications/:id - Update an OAuth application
+export async function PUT(context: APIContext) {
+ try {
+ const { user, response } = await requireAuth(context);
+ if (response) return response;
+
+ const url = new URL(context.request.url);
+ const appId = url.pathname.split("/").pop();
+
+ if (!appId) {
+ return new Response(
+ JSON.stringify({ error: "Application ID is required" }),
+ {
+ status: 400,
+ headers: { "Content-Type": "application/json" },
+ }
+ );
+ }
+
+ const body = await context.request.json();
+ const { name, redirectURLs, disabled, metadata } = body;
+
+ const updateData: any = {};
+ if (name !== undefined) updateData.name = name;
+ if (redirectURLs !== undefined) {
+ updateData.redirectURLs = Array.isArray(redirectURLs)
+ ? redirectURLs.join(",")
+ : redirectURLs;
+ }
+ if (disabled !== undefined) updateData.disabled = disabled;
+ if (metadata !== undefined) updateData.metadata = JSON.stringify(metadata);
+
+ const [updated] = await db
+ .update(oauthApplications)
+ .set({
+ ...updateData,
+ updatedAt: new Date(),
+ })
+ .where(eq(oauthApplications.id, appId))
+ .returning();
+
+ if (!updated) {
+ return new Response(JSON.stringify({ error: "Application not found" }), {
+ status: 404,
+ headers: { "Content-Type": "application/json" },
+ });
+ }
+
+ return new Response(JSON.stringify({ ...updated, clientSecret: undefined }), {
+ status: 200,
+ headers: { "Content-Type": "application/json" },
+ });
+ } catch (error) {
+ return createSecureErrorResponse(error, "SSO applications API");
+ }
+}
+
+// DELETE /api/sso/applications/:id - Delete an OAuth application
+export async function DELETE(context: APIContext) {
+ try {
+ const { user, response } = await requireAuth(context);
+ if (response) return response;
+
+ const url = new URL(context.request.url);
+ const appId = url.searchParams.get("id");
+
+ if (!appId) {
+ return new Response(
+ JSON.stringify({ error: "Application ID is required" }),
+ {
+ status: 400,
+ headers: { "Content-Type": "application/json" },
+ }
+ );
+ }
+
+ const deleted = await db
+ .delete(oauthApplications)
+ .where(eq(oauthApplications.id, appId))
+ .returning();
+
+ if (deleted.length === 0) {
+ return new Response(JSON.stringify({ error: "Application not found" }), {
+ status: 404,
+ headers: { "Content-Type": "application/json" },
+ });
+ }
+
+ return new Response(JSON.stringify({ success: true }), {
+ status: 200,
+ headers: { "Content-Type": "application/json" },
+ });
+ } catch (error) {
+ return createSecureErrorResponse(error, "SSO applications API");
+ }
+}
\ No newline at end of file
diff --git a/src/pages/api/sso/discover.ts b/src/pages/api/sso/discover.ts
new file mode 100644
index 0000000..acbf94d
--- /dev/null
+++ b/src/pages/api/sso/discover.ts
@@ -0,0 +1,69 @@
+import type { APIContext } from "astro";
+import { createSecureErrorResponse } from "@/lib/utils";
+import { requireAuth } from "@/lib/utils/auth-helpers";
+
+// POST /api/sso/discover - Discover OIDC configuration from issuer URL
+export async function POST(context: APIContext) {
+ try {
+ const { user, response } = await requireAuth(context);
+ if (response) return response;
+
+ const { issuer } = await context.request.json();
+
+ if (!issuer) {
+ return new Response(JSON.stringify({ error: "Issuer URL is required" }), {
+ status: 400,
+ headers: { "Content-Type": "application/json" },
+ });
+ }
+
+ // Ensure issuer URL ends without trailing slash for well-known discovery
+ const cleanIssuer = issuer.replace(/\/$/, "");
+ const discoveryUrl = `${cleanIssuer}/.well-known/openid-configuration`;
+
+ try {
+ // Fetch OIDC discovery document
+ const response = await fetch(discoveryUrl);
+
+ if (!response.ok) {
+ throw new Error(`Failed to fetch discovery document: ${response.status}`);
+ }
+
+ const config = await response.json();
+
+ // Extract the essential endpoints
+ const discoveredConfig = {
+ issuer: config.issuer || cleanIssuer,
+ authorizationEndpoint: config.authorization_endpoint,
+ tokenEndpoint: config.token_endpoint,
+ userInfoEndpoint: config.userinfo_endpoint,
+ jwksEndpoint: config.jwks_uri,
+ // Additional useful fields
+ scopes: config.scopes_supported || ["openid", "profile", "email"],
+ responseTypes: config.response_types_supported || ["code"],
+ grantTypes: config.grant_types_supported || ["authorization_code"],
+ // Suggested domain from issuer
+ suggestedDomain: new URL(cleanIssuer).hostname.replace("www.", ""),
+ };
+
+ return new Response(JSON.stringify(discoveredConfig), {
+ status: 200,
+ headers: { "Content-Type": "application/json" },
+ });
+ } catch (error) {
+ console.error("OIDC discovery error:", error);
+ return new Response(
+ JSON.stringify({
+ error: "Failed to discover OIDC configuration",
+ details: error instanceof Error ? error.message : "Unknown error"
+ }),
+ {
+ status: 400,
+ headers: { "Content-Type": "application/json" },
+ }
+ );
+ }
+ } catch (error) {
+ return createSecureErrorResponse(error, "SSO discover API");
+ }
+}
\ No newline at end of file
diff --git a/src/pages/api/sso/providers.ts b/src/pages/api/sso/providers.ts
new file mode 100644
index 0000000..9c5d523
--- /dev/null
+++ b/src/pages/api/sso/providers.ts
@@ -0,0 +1,152 @@
+import type { APIContext } from "astro";
+import { createSecureErrorResponse } from "@/lib/utils";
+import { requireAuth } from "@/lib/utils/auth-helpers";
+import { db, ssoProviders } from "@/lib/db";
+import { nanoid } from "nanoid";
+import { eq } from "drizzle-orm";
+
+// GET /api/sso/providers - List all SSO providers
+export async function GET(context: APIContext) {
+ try {
+ const { user, response } = await requireAuth(context);
+ if (response) return response;
+
+ const providers = await db.select().from(ssoProviders);
+
+ return new Response(JSON.stringify(providers), {
+ status: 200,
+ headers: { "Content-Type": "application/json" },
+ });
+ } catch (error) {
+ return createSecureErrorResponse(error, "SSO providers API");
+ }
+}
+
+// POST /api/sso/providers - Create a new SSO provider
+export async function POST(context: APIContext) {
+ try {
+ const { user, response } = await requireAuth(context);
+ if (response) return response;
+
+ const body = await context.request.json();
+ const {
+ issuer,
+ domain,
+ clientId,
+ clientSecret,
+ authorizationEndpoint,
+ tokenEndpoint,
+ jwksEndpoint,
+ userInfoEndpoint,
+ mapping,
+ providerId,
+ organizationId,
+ } = body;
+
+ // Validate required fields
+ if (!issuer || !domain || !providerId) {
+ return new Response(
+ JSON.stringify({ error: "Missing required fields" }),
+ {
+ status: 400,
+ headers: { "Content-Type": "application/json" },
+ }
+ );
+ }
+
+ // Check if provider ID already exists
+ const existing = await db
+ .select()
+ .from(ssoProviders)
+ .where(eq(ssoProviders.providerId, providerId))
+ .limit(1);
+
+ if (existing.length > 0) {
+ return new Response(
+ JSON.stringify({ error: "Provider ID already exists" }),
+ {
+ status: 409,
+ headers: { "Content-Type": "application/json" },
+ }
+ );
+ }
+
+ // Create OIDC config object
+ const oidcConfig = {
+ clientId,
+ clientSecret,
+ authorizationEndpoint,
+ tokenEndpoint,
+ jwksEndpoint,
+ userInfoEndpoint,
+ mapping: mapping || {
+ id: "sub",
+ email: "email",
+ emailVerified: "email_verified",
+ name: "name",
+ image: "picture",
+ },
+ };
+
+ // Insert new provider
+ const [newProvider] = await db
+ .insert(ssoProviders)
+ .values({
+ id: nanoid(),
+ issuer,
+ domain,
+ oidcConfig: JSON.stringify(oidcConfig),
+ userId: user.id,
+ providerId,
+ organizationId,
+ })
+ .returning();
+
+ return new Response(JSON.stringify(newProvider), {
+ status: 201,
+ headers: { "Content-Type": "application/json" },
+ });
+ } catch (error) {
+ return createSecureErrorResponse(error, "SSO providers API");
+ }
+}
+
+// DELETE /api/sso/providers - Delete a provider by ID
+export async function DELETE(context: APIContext) {
+ try {
+ const { user, response } = await requireAuth(context);
+ if (response) return response;
+
+ const url = new URL(context.request.url);
+ const providerId = url.searchParams.get("id");
+
+ if (!providerId) {
+ return new Response(
+ JSON.stringify({ error: "Provider ID is required" }),
+ {
+ status: 400,
+ headers: { "Content-Type": "application/json" },
+ }
+ );
+ }
+
+ const deleted = await db
+ .delete(ssoProviders)
+ .where(eq(ssoProviders.id, providerId))
+ .returning();
+
+ if (deleted.length === 0) {
+ return new Response(JSON.stringify({ error: "Provider not found" }), {
+ status: 404,
+ headers: { "Content-Type": "application/json" },
+ });
+ }
+
+ return new Response(JSON.stringify({ success: true }), {
+ status: 200,
+ headers: { "Content-Type": "application/json" },
+ });
+ } catch (error) {
+ return createSecureErrorResponse(error, "SSO providers API");
+ }
+}
\ No newline at end of file
diff --git a/src/pages/api/sync/index.ts b/src/pages/api/sync/index.ts
index dd74473..ba6e00f 100644
--- a/src/pages/api/sync/index.ts
+++ b/src/pages/api/sync/index.ts
@@ -10,6 +10,7 @@ import {
getGithubStarredRepositories,
} from "@/lib/github";
import { jsonResponse, createSecureErrorResponse } from "@/lib/utils";
+import { getDecryptedGitHubToken } from "@/lib/utils/config-encryption";
export const POST: APIRoute = async ({ request }) => {
const url = new URL(request.url);
@@ -33,21 +34,21 @@ export const POST: APIRoute = async ({ request }) => {
});
}
- const token = config.githubConfig?.token;
-
- if (!token) {
+ if (!config.githubConfig?.token) {
return jsonResponse({
data: { error: "GitHub token is missing in config" },
status: 400,
});
}
- const octokit = createGitHubClient(token);
+ // Decrypt the GitHub token before using it
+ const decryptedToken = getDecryptedGitHubToken(config);
+ const octokit = createGitHubClient(decryptedToken);
// Fetch GitHub data in parallel
const [basicAndForkedRepos, starredRepos, gitOrgs] = await Promise.all([
getGithubRepositories({ octokit, config }),
- config.githubConfig?.mirrorStarred
+ config.githubConfig?.includeStarred
? getGithubStarredRepositories({ octokit, config })
: Promise.resolve([]),
getGithubOrganizations({ octokit, config }),
diff --git a/src/pages/docs/advanced.astro b/src/pages/docs/advanced.astro
new file mode 100644
index 0000000..9212221
--- /dev/null
+++ b/src/pages/docs/advanced.astro
@@ -0,0 +1,467 @@
+---
+import MainLayout from '../../layouts/main.astro';
+---
+
+
+
+
+
+
+
+
+
+
Advanced Topics
+
+ Advanced configuration options, deployment strategies, troubleshooting, and performance optimization for Gitea Mirror.
+
+
+
+
+
+ Environment Variables
+
+
+ Gitea Mirror can be configured using environment variables. These are particularly useful for containerized deployments.
+
+
+
+
+
+
+ | Variable |
+ Description |
+ Default |
+
+
+
+ {[
+ { var: 'NODE_ENV', desc: 'Application environment', default: 'production' },
+ { var: 'PORT', desc: 'Server port', default: '4321' },
+ { var: 'HOST', desc: 'Server host', default: '0.0.0.0' },
+ { var: 'BETTER_AUTH_SECRET', desc: 'Authentication secret key', default: 'Auto-generated' },
+ { var: 'BETTER_AUTH_URL', desc: 'Authentication base URL', default: 'http://localhost:4321' },
+ { var: 'NODE_EXTRA_CA_CERTS', desc: 'Path to CA certificate file', default: 'None' },
+ { var: 'DATABASE_URL', desc: 'SQLite database path', default: './data/gitea-mirror.db' },
+ ].map((item, i) => (
+
+ | {item.var} |
+ {item.desc} |
+ {item.default} |
+
+ ))}
+
+
+
+
+
+
+
+
+
+ Database Management
+
+
+ Gitea Mirror uses SQLite for data storage. The database is automatically created on first run.
+
+
+ Database Commands
+
+
+
+
Initialize Database
+
+ bun run init-db
+
+
Creates or recreates the database schema
+
+
+
+
Check Database
+
+ bun run check-db
+
+
Verifies database integrity and displays statistics
+
+
+
+
Fix Database
+
+ bun run fix-db
+
+
Attempts to repair common database issues
+
+
+
+
Backup Database
+
+ cp data/gitea-mirror.db data/gitea-mirror.db.backup
+
+
Always backup before major changes
+
+
+
+ Database Schema Management
+
+
+
+
+
+
Drizzle Kit
+
Database schema is managed with Drizzle ORM. Use these commands for schema changes:
+
+ bun run drizzle-kit generate - Generate migration files
+ bun run drizzle-kit push - Apply schema changes directly
+ bun run drizzle-kit studio - Open database browser
+
+
+
+
+
+
+
+
+
+
+ Performance Optimization
+
+ Mirroring Performance
+
+
+ {[
+ {
+ title: 'Batch Operations',
+ tips: [
+ 'Mirror multiple repositories at once',
+ 'Use organization-level mirroring',
+ 'Schedule mirroring during off-peak hours'
+ ]
+ },
+ {
+ title: 'Network Optimization',
+ tips: [
+ 'Use SSH URLs when possible',
+ 'Enable Git LFS only when needed',
+ 'Consider repository size limits'
+ ]
+ }
+ ].map(section => (
+
+
{section.title}
+
+ {section.tips.map(tip => (
+ -
+ โข
+ {tip}
+
+ ))}
+
+
+ ))}
+
+
+ Database Performance
+
+
+
Regular Maintenance
+
+ -
+ โข
+ Enable automatic cleanup in Configuration โ Automation
+
+ -
+ โข
+ Periodically vacuum the SQLite database:
sqlite3 data/gitea-mirror.db "VACUUM;"
+
+ -
+ โข
+ Monitor database size and clean old events regularly
+
+
+
+
+
+
+
+
+
+ Reverse Proxy Configuration
+
+
+ For production deployments, it's recommended to use a reverse proxy like Nginx or Caddy.
+
+
+ Nginx Example
+
+
+
{`server {
+ listen 80;
+ server_name gitea-mirror.example.com;
+ return 301 https://$server_name$request_uri;
+}
+
+server {
+ listen 443 ssl http2;
+ server_name gitea-mirror.example.com;
+
+ ssl_certificate /path/to/cert.pem;
+ ssl_certificate_key /path/to/key.pem;
+
+ location / {
+ proxy_pass http://localhost:4321;
+ proxy_http_version 1.1;
+ proxy_set_header Upgrade $http_upgrade;
+ proxy_set_header Connection 'upgrade';
+ proxy_set_header Host $host;
+ proxy_cache_bypass $http_upgrade;
+ proxy_set_header X-Real-IP $remote_addr;
+ proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
+ proxy_set_header X-Forwarded-Proto $scheme;
+ }
+
+ # SSE endpoint needs special handling
+ location /api/sse {
+ proxy_pass http://localhost:4321;
+ proxy_http_version 1.1;
+ proxy_set_header Connection '';
+ proxy_set_header Cache-Control 'no-cache';
+ proxy_set_header X-Accel-Buffering 'no';
+ proxy_read_timeout 86400;
+ }
+}`}
+
+
+ Caddy Example
+
+
+
{`gitea-mirror.example.com {
+ reverse_proxy localhost:4321
+}`}
+
+
+
+
+
+
+
+ Monitoring and Health Checks
+
+ Health Check Endpoint
+
+
+
Monitor application health using the built-in endpoint:
+
+
+ GET /api/health
+
+
+
Response:
+
+
{`{
+ "status": "ok",
+ "timestamp": "2024-01-15T10:30:00Z",
+ "database": "connected",
+ "version": "1.0.0"
+}`}
+
+
+
+ Monitoring with Prometheus
+
+
+ While Gitea Mirror doesn't have built-in Prometheus metrics, you can monitor it using:
+
+
+
+ -
+ โข
+ Blackbox exporter for endpoint monitoring
+
+ -
+ โข
+ Node exporter for system metrics
+
+ -
+ โข
+ Custom scripts to check database metrics
+
+
+
+
+
+
+
+
+ Backup and Recovery
+
+ What to Backup
+
+
+
+
Essential Files
+
+ - โข data/gitea-mirror.db
+ - โข .env (if using)
+ - โข Custom CA certificates
+
+
+
+
+
Optional Files
+
+ - โข Docker volumes
+ - โข Custom configurations
+ - โข Logs for auditing
+
+
+
+
+ Backup Script Example
+
+
+
{`#!/bin/bash
+BACKUP_DIR="/backups/gitea-mirror"
+DATE=$(date +%Y%m%d_%H%M%S)
+
+# Create backup directory
+mkdir -p "$BACKUP_DIR/$DATE"
+
+# Backup database
+cp data/gitea-mirror.db "$BACKUP_DIR/$DATE/"
+
+# Backup environment
+cp .env "$BACKUP_DIR/$DATE/" 2>/dev/null || true
+
+# Create tarball
+tar -czf "$BACKUP_DIR/backup_$DATE.tar.gz" -C "$BACKUP_DIR" "$DATE"
+
+# Clean up
+rm -rf "$BACKUP_DIR/$DATE"
+
+# Keep only last 7 backups
+ls -t "$BACKUP_DIR"/backup_*.tar.gz | tail -n +8 | xargs rm -f`}
+
+
+
+
+
+
+
+ Troubleshooting Guide
+
+
+ {[
+ {
+ issue: 'Application won\'t start',
+ solutions: [
+ 'Check port availability: `lsof -i :4321`',
+ 'Verify environment variables are set correctly',
+ 'Check database file permissions',
+ 'Review logs for startup errors'
+ ]
+ },
+ {
+ issue: 'Authentication failures',
+ solutions: [
+ 'Ensure BETTER_AUTH_SECRET is set and consistent',
+ 'Check BETTER_AUTH_URL matches your deployment',
+ 'Clear browser cookies and try again',
+ 'Verify database contains user records'
+ ]
+ },
+ {
+ issue: 'Mirroring failures',
+ solutions: [
+ 'Test GitHub/Gitea connections individually',
+ 'Verify access tokens have correct permissions',
+ 'Check network connectivity and firewall rules',
+ 'Review Activity Log for detailed error messages'
+ ]
+ },
+ {
+ issue: 'Performance issues',
+ solutions: [
+ 'Check database size and run cleanup',
+ 'Monitor system resources (CPU, memory, disk)',
+ 'Reduce concurrent mirroring operations',
+ 'Consider upgrading deployment resources'
+ ]
+ }
+ ].map(item => (
+
+
{item.issue}
+
+ {item.solutions.map(solution => (
+ -
+ โ
+ {solution}
+
+ ))}
+
+
+ ))}
+
+
+
+
+
+
+
+ Migration Guide
+
+ Migrating from JWT to Better Auth
+
+
+
If you're upgrading from an older version using JWT authentication:
+
+
+ -
+ 1
+
+
Backup your database
+
Always create a backup before migration
+
+
+ -
+ 2
+
+
Update environment variables
+
Replace JWT_SECRET with BETTER_AUTH_SECRET
+
+
+ -
+ 3
+
+
Run database migrations
+
New auth tables will be created automatically
+
+
+ -
+ 4
+
+
Users will need to log in again
+
Previous sessions will be invalidated
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/src/pages/docs/architecture.astro b/src/pages/docs/architecture.astro
index be28c46..27c010b 100644
--- a/src/pages/docs/architecture.astro
+++ b/src/pages/docs/architecture.astro
@@ -47,7 +47,8 @@ import MainLayout from '../../layouts/main.astro';
{ name: 'Shadcn UI', desc: 'UI component library built on Tailwind CSS' },
{ name: 'SQLite', desc: 'Database for storing configuration, state, and events' },
{ name: 'Bun', desc: 'JavaScript runtime and package manager' },
- { name: 'Drizzle ORM', desc: 'Type-safe ORM for database interactions' }
+ { name: 'Drizzle ORM', desc: 'Type-safe ORM for database interactions' },
+ { name: 'Better Auth', desc: 'Modern authentication library with SSO/OIDC support' }
].map(tech => (
@@ -184,7 +185,8 @@ import MainLayout from '../../layouts/main.astro';
{[
- 'Authentication and user management',
+ 'Authentication with Better Auth (email/password, SSO, OIDC)',
+ 'OAuth2/OIDC provider functionality',
'GitHub API integration',
'Gitea API integration',
'Mirroring operations and job queue',
@@ -213,11 +215,13 @@ import MainLayout from '../../layouts/main.astro';
{[
- 'User accounts and authentication data',
+ 'User accounts and authentication data (Better Auth)',
+ 'OAuth applications and SSO provider configurations',
'GitHub and Gitea configuration',
'Repository and organization information',
'Mirroring job history and status',
- 'Event notifications and their read status'
+ 'Event notifications and their read status',
+ 'OAuth tokens and consent records'
].map(item => (
โธ
@@ -238,7 +242,7 @@ import MainLayout from '../../layouts/main.astro';
{[
- { title: 'User Authentication', desc: 'Users authenticate through the frontend, which communicates with the backend to validate credentials.' },
+ { title: 'User Authentication', desc: 'Users authenticate via Better Auth using email/password, SSO providers, or as OIDC clients.' },
{ title: 'Configuration', desc: 'Users configure GitHub and Gitea settings through the UI, which are stored in the SQLite database.' },
{ title: 'Repository Discovery', desc: 'The backend queries the GitHub API to discover repositories based on user configuration.' },
{ title: 'Mirroring Process', desc: 'When triggered, the backend fetches repository data from GitHub and pushes it to Gitea.' },
diff --git a/src/pages/docs/authentication.astro b/src/pages/docs/authentication.astro
new file mode 100644
index 0000000..d87ef96
--- /dev/null
+++ b/src/pages/docs/authentication.astro
@@ -0,0 +1,535 @@
+---
+import MainLayout from '../../layouts/main.astro';
+---
+
+
+
+
+
+
+
+
+
+
Authentication & SSO Configuration
+
+ Configure authentication methods including email/password, Single Sign-On (SSO), and OIDC provider functionality for Gitea Mirror.
+
+
+
+
+
+ Authentication Overview
+
+
+
+ Gitea Mirror uses Better Auth, a modern authentication library that supports multiple authentication methods.
+ All authentication settings can be configured through the web UI without editing configuration files.
+
+
+
+ Supported Authentication Methods
+
+
+ {[
+ {
+ icon: 'โ๏ธ',
+ title: 'Email & Password',
+ desc: 'Traditional authentication with email and password. Always enabled by default.',
+ status: 'Always Enabled'
+ },
+ {
+ icon: '๐',
+ title: 'Single Sign-On (SSO)',
+ desc: 'Allow users to sign in using external OIDC providers like Google, Okta, or Azure AD.',
+ status: 'Optional'
+ },
+ {
+ icon: '๐',
+ title: 'OIDC Provider',
+ desc: 'Act as an OIDC provider, allowing other applications to authenticate through Gitea Mirror.',
+ status: 'Optional'
+ }
+ ].map(method => (
+
+
{method.icon}
+
{method.title}
+
{method.desc}
+
+ {method.status}
+
+
+ ))}
+
+
+
+
+
+
+
+ Accessing Authentication Settings
+
+
+ -
+ 1
+ Navigate to the Configuration page
+
+ -
+ 2
+ Click on the Authentication tab
+
+ -
+ 3
+ Configure SSO providers or OAuth applications as needed
+
+
+
+
+
+
+
+
+ Single Sign-On (SSO) Configuration
+
+
+ SSO allows your users to authenticate using external identity providers. This is useful for organizations that already have centralized authentication systems.
+
+
+ Adding an SSO Provider
+
+
+
Required Information
+
+
+ {[
+ { name: 'Issuer URL', desc: 'The OIDC issuer URL of your provider', example: 'https://accounts.google.com' },
+ { name: 'Domain', desc: 'The email domain for this provider', example: 'example.com' },
+ { name: 'Provider ID', desc: 'A unique identifier for this provider', example: 'google-sso' },
+ { name: 'Client ID', desc: 'OAuth client ID from your provider', example: '123456789.apps.googleusercontent.com' },
+ { name: 'Client Secret', desc: 'OAuth client secret from your provider', example: 'GOCSPX-...' }
+ ].map(field => (
+
+
+ {field.name}
+ Required
+
+
{field.desc}
+
{field.example}
+
+ ))}
+
+
+
+
+
+
+
+
Auto-Discovery
+
Most OIDC providers support auto-discovery. Simply enter the Issuer URL and click "Discover" to automatically populate the endpoint URLs.
+
+
+
+
+ Redirect URL Configuration
+
+
+
When configuring your SSO provider, use this redirect URL:
+
https://your-domain.com/api/auth/sso/callback/{`{provider-id}`}
+
Replace {`{provider-id}`} with your chosen Provider ID (e.g., google-sso)
+
+
+
+
+
+
+
+ Example SSO Configurations
+
+
+
+
+
+ Google SSO
+
+
+
+
+ -
+ 1. Create OAuth Client in Google Cloud Console
+
+ - โข Go to Google Cloud Console
+ - โข Create a new OAuth 2.0 Client ID
+ - โข Add authorized redirect URI:
https://your-domain.com/api/auth/sso/callback/google-sso
+
+
+ -
+ 2. Configure in Gitea Mirror
+
+
+
Issuer URL: https://accounts.google.com
+
Domain: your-company.com
+
Provider ID: google-sso
+
Client ID: [Your Google Client ID]
+
Client Secret: [Your Google Client Secret]
+
+
+
+ -
+ 3. Use Auto-Discovery
+
Click "Discover" to automatically populate the endpoint URLs
+
+
+
+
+
+
+
+
+ O
+ Okta SSO
+
+
+
+
+ -
+ 1. Create OIDC Application in Okta
+
+ - โข In Okta Admin Console, create a new OIDC Web Application
+ - โข Set Sign-in redirect URI:
https://your-domain.com/api/auth/sso/callback/okta-sso
+ - โข Note the Client ID and Client Secret
+
+
+ -
+ 2. Configure in Gitea Mirror
+
+
+
Issuer URL: https://your-okta-domain.okta.com
+
Domain: your-company.com
+
Provider ID: okta-sso
+
Client ID: [Your Okta Client ID]
+
Client Secret: [Your Okta Client Secret]
+
+
+
+
+
+
+
+
+
+
+ M
+ Azure AD / Microsoft Entra ID
+
+
+
+
+ -
+ 1. Register Application in Azure Portal
+
+ - โข Go to Azure Portal โ Azure Active Directory โ App registrations
+ - โข Create a new registration
+ - โข Add redirect URI:
https://your-domain.com/api/auth/sso/callback/azure-sso
+
+
+ -
+ 2. Configure in Gitea Mirror
+
+
+
Issuer URL: https://login.microsoftonline.com/{`{tenant-id}`}/v2.0
+
Domain: your-company.com
+
Provider ID: azure-sso
+
Client ID: [Your Application ID]
+
Client Secret: [Your Client Secret]
+
+
+
+
+
+
+
+
+
+
+
+
+ OIDC Provider Configuration
+
+
+ The OIDC Provider feature allows Gitea Mirror to act as an authentication provider for other applications.
+ This is useful when you want to centralize authentication through Gitea Mirror.
+
+
+ Creating OAuth Applications
+
+
+
+ -
+ 1
+
+
Navigate to OAuth Applications
+
Go to Configuration โ Authentication โ OAuth Applications
+
+
+ -
+ 2
+
+
Create New Application
+
Click "Create Application" and provide:
+
+ - โข Application Name
+ - โข Application Type (Web, Mobile, or Desktop)
+ - โข Redirect URLs (one per line)
+
+
+
+ -
+ 3
+
+
Save Credentials
+
You'll receive a Client ID and Client Secret. Store these securely!
+
+
+
+
+
+ OIDC Endpoints
+
+
+
Applications can use these standard OIDC endpoints:
+
+
+ Discovery:
+ https://your-domain.com/.well-known/openid-configuration
+
+
+ Authorization:
+ https://your-domain.com/api/auth/oauth2/authorize
+
+
+ Token:
+ https://your-domain.com/api/auth/oauth2/token
+
+
+ UserInfo:
+ https://your-domain.com/api/auth/oauth2/userinfo
+
+
+ JWKS:
+ https://your-domain.com/api/auth/jwks
+
+
+
+
+ Supported Scopes
+
+
+ {[
+ { scope: 'openid', desc: 'Required - provides user ID', claims: 'sub' },
+ { scope: 'profile', desc: 'User profile information', claims: 'name, username, picture' },
+ { scope: 'email', desc: 'Email address', claims: 'email, email_verified' }
+ ].map(item => (
+
+
{item.scope}
+
{item.desc}
+
Claims: {item.claims}
+
+ ))}
+
+
+
+
+
+
+
+ User Experience
+
+ Login Flow with SSO
+
+
+
When SSO is configured, users will see authentication options on the login page:
+
+ - 1. Email & Password tab for traditional login
+ - 2. SSO tab with provider buttons or email input
+ - 3. Automatic redirect to the appropriate provider
+ - 4. Return to Gitea Mirror after successful authentication
+
+
+
+ OAuth Consent Flow
+
+
+
When an application requests authentication through Gitea Mirror:
+
+ - 1. User is redirected to Gitea Mirror
+ - 2. Login prompt if not already authenticated
+ - 3. Consent screen showing requested permissions
+ - 4. User approves or denies the request
+ - 5. Redirect back to the application with auth code
+
+
+
+
+
+
+
+
+ Security Considerations
+
+
+ {[
+ {
+ icon: '๐',
+ title: 'Client Secrets',
+ items: [
+ 'Store OAuth client secrets securely',
+ 'Never commit secrets to version control',
+ 'Rotate secrets regularly'
+ ]
+ },
+ {
+ icon: '๐',
+ title: 'Redirect URLs',
+ items: [
+ 'Only add trusted redirect URLs',
+ 'Use HTTPS in production',
+ 'Validate exact URL matches'
+ ]
+ },
+ {
+ icon: '๐ก๏ธ',
+ title: 'Scopes & Permissions',
+ items: [
+ 'Grant minimum required scopes',
+ 'Review requested permissions',
+ 'Users can revoke access anytime'
+ ]
+ },
+ {
+ icon: 'โฑ๏ธ',
+ title: 'Token Security',
+ items: [
+ 'Access tokens have expiration',
+ 'Refresh tokens for long-lived access',
+ 'Tokens can be revoked'
+ ]
+ }
+ ].map(section => (
+
+
+ {section.icon}
+
{section.title}
+
+
+ {section.items.map(item => (
+ -
+ โข
+ {item}
+
+ ))}
+
+
+ ))}
+
+
+
+
+
+
+
+ Troubleshooting
+
+
+
+
SSO Login Issues
+
+ -
+ โข
+
+ "Invalid origin" error: Check that your Gitea Mirror URL matches the configured redirect URI
+
+
+ -
+ โข
+
+ "Provider not found" error: Ensure the provider is properly configured and saved
+
+
+ -
+ โข
+
+ Redirect loop: Verify the redirect URI in both Gitea Mirror and the SSO provider match exactly
+
+
+
+
+
+
+
OIDC Provider Issues
+
+ -
+ โข
+
+ Application not found: Ensure the client ID is correct and the app is not disabled
+
+
+ -
+ โข
+
+ Invalid redirect URI: The redirect URI must match exactly what's configured
+
+
+ -
+ โข
+
+ Consent not working: Check browser cookies are enabled and not blocked
+
+
+
+
+
+
+
+
+
+
+
+ Migration from JWT Authentication
+
+
+
+
+
+
For Existing Users
+
+ - โข Email/password authentication continues to work
+ - โข No action required from existing users
+ - โข SSO can be added as an additional option
+ - โข JWT_SECRET is no longer required in environment variables
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/src/pages/docs/ca-certificates.astro b/src/pages/docs/ca-certificates.astro
new file mode 100644
index 0000000..3553867
--- /dev/null
+++ b/src/pages/docs/ca-certificates.astro
@@ -0,0 +1,475 @@
+---
+import MainLayout from '../../layouts/main.astro';
+---
+
+
+
+
+
+
+
+
+
+
CA Certificates Configuration
+
+ Configure custom Certificate Authority (CA) certificates for connecting to self-signed or privately signed Gitea instances.
+
+
+
+
+
+ Overview
+
+
+
+ When your Gitea instance uses a self-signed certificate or a certificate signed by a private Certificate Authority (CA),
+ you need to configure Gitea Mirror to trust these certificates. This guide explains how to add custom CA certificates
+ for different deployment methods.
+
+
+
+
+
+
+
+
Important
+
Without proper CA certificate configuration, you'll encounter SSL/TLS errors when connecting to Gitea instances with custom certificates.
+
+
+
+
+
+
+
+
+
+ Common SSL/TLS Errors
+
+ If you see any of these errors, you likely need to configure CA certificates:
+
+
+ {[
+ 'UNABLE_TO_VERIFY_LEAF_SIGNATURE',
+ 'SELF_SIGNED_CERT_IN_CHAIN',
+ 'UNABLE_TO_GET_ISSUER_CERT_LOCALLY',
+ 'CERT_UNTRUSTED',
+ 'unable to verify the first certificate'
+ ].map(error => (
+
+ {error}
+
+ ))}
+
+
+
+
+
+
+
+ Docker Configuration
+
+ For Docker deployments, you have several options to add custom CA certificates:
+
+ Method 1: Volume Mount (Recommended)
+
+
+
+ -
+ 1. Create a certificates directory
+
+
+ -
+ 2. Copy your CA certificate(s)
+
+
cp /path/to/your-ca-cert.crt ./certs/
+
+
+ -
+ 3. Update docker-compose.yml
+
+
{`version: '3.8'
+services:
+ gitea-mirror:
+ image: raylabs/gitea-mirror:latest
+ volumes:
+ - ./data:/app/data
+ - ./certs:/usr/local/share/ca-certificates:ro
+ environment:
+ - NODE_EXTRA_CA_CERTS=/usr/local/share/ca-certificates/your-ca-cert.crt`}
+
+
+ -
+ 4. Restart the container
+
+
docker-compose down && docker-compose up -d
+
+
+
+
+
+ Method 2: Custom Docker Image
+
+
+
For permanent certificate inclusion, create a custom Docker image:
+
+
+
{`FROM raylabs/gitea-mirror:latest
+
+# Copy CA certificates
+COPY ./certs/*.crt /usr/local/share/ca-certificates/
+
+# Update CA certificates
+RUN update-ca-certificates
+
+# Set environment variable
+ENV NODE_EXTRA_CA_CERTS=/usr/local/share/ca-certificates/your-ca-cert.crt`}
+
+
+
Build and use your custom image:
+
+
docker build -t my-gitea-mirror .
+
+
+
+
+
+
+
+
+ Native/Bun Configuration
+
+ For native Bun deployments, configure CA certificates using environment variables:
+
+ Method 1: Environment Variable
+
+
+
+ -
+ 1. Export the certificate path
+
+
export NODE_EXTRA_CA_CERTS=/path/to/your-ca-cert.crt
+
+
+ -
+ 2. Run Gitea Mirror
+
+
+
+
+
+ Method 2: .env File
+
+
+
Add to your .env file:
+
+
NODE_EXTRA_CA_CERTS=/path/to/your-ca-cert.crt
+
+
+
+ Method 3: System-wide CA Store
+
+
+
Add certificates to your system's CA store:
+
+
+
+
Ubuntu/Debian:
+
+
{`sudo cp your-ca-cert.crt /usr/local/share/ca-certificates/
+sudo update-ca-certificates`}
+
+
+
+
+
RHEL/CentOS/Fedora:
+
+
{`sudo cp your-ca-cert.crt /etc/pki/ca-trust/source/anchors/
+sudo update-ca-trust`}
+
+
+
+
+
macOS:
+
+
{`sudo security add-trusted-cert -d -r trustRoot \\
+ -k /Library/Keychains/System.keychain your-ca-cert.crt`}
+
+
+
+
+
+
+
+
+
+
+ LXC Container Configuration
+
+ For LXC deployments on Proxmox VE:
+
+
+
+ -
+ 1. Enter the container
+
+
pct enter <container-id>
+
+
+ -
+ 2. Create certificates directory
+
+
mkdir -p /usr/local/share/ca-certificates
+
+
+ -
+ 3. Copy your CA certificate
+
+
cat > /usr/local/share/ca-certificates/your-ca.crt
+
+ Paste your certificate content and press Ctrl+D
+
+ -
+ 4. Update the systemd service
+
+
{`cat >> /etc/systemd/system/gitea-mirror.service << EOF
+Environment="NODE_EXTRA_CA_CERTS=/usr/local/share/ca-certificates/your-ca.crt"
+EOF`}
+
+
+ -
+ 5. Reload and restart
+
+
{`systemctl daemon-reload
+systemctl restart gitea-mirror`}
+
+
+
+
+
+
+
+
+
+
+ Multiple CA Certificates
+
+ If you need to trust multiple CA certificates:
+
+ Option 1: Bundle Certificates
+
+
+
Combine multiple certificates into one file:
+
+
{`cat ca-cert1.crt ca-cert2.crt ca-cert3.crt > ca-bundle.crt
+export NODE_EXTRA_CA_CERTS=/path/to/ca-bundle.crt`}
+
+
+
+ Option 2: System CA Store
+
+
+
Add all certificates to the system CA store (recommended for production):
+
+
{`# Copy all certificates
+cp *.crt /usr/local/share/ca-certificates/
+update-ca-certificates`}
+
+
+
+
+
+
+
+
+ Verifying Certificate Configuration
+
+ Test your certificate configuration:
+
+
+
1. Test Gitea Connection
+
Use the "Test Connection" button in the Gitea configuration section
+
+
2. Check Logs
+
Look for SSL/TLS errors in the application logs:
+
+
+
+
Docker:
+
+ docker logs gitea-mirror
+
+
+
+
Native:
+
+ Check terminal output
+
+
+
+
LXC:
+
+ journalctl -u gitea-mirror -f
+
+
+
+
+
3. Manual Certificate Test
+
Test SSL connection directly:
+
+
openssl s_client -connect your-gitea-domain.com:443 -CAfile /path/to/ca-cert.crt
+
+
+
+
+
+
+
+
+ Best Practices
+
+
+ {[
+ {
+ icon: '๐',
+ title: 'Certificate Security',
+ items: [
+ 'Keep CA certificates secure',
+ 'Use read-only mounts in Docker',
+ 'Limit certificate file permissions',
+ 'Regularly update certificates'
+ ]
+ },
+ {
+ icon: '๐',
+ title: 'Certificate Management',
+ items: [
+ 'Use descriptive certificate filenames',
+ 'Document certificate purposes',
+ 'Track certificate expiration dates',
+ 'Maintain certificate backups'
+ ]
+ },
+ {
+ icon: '๐ข',
+ title: 'Production Deployment',
+ items: [
+ 'Use proper SSL certificates when possible',
+ 'Consider Let\'s Encrypt for public instances',
+ 'Implement certificate rotation procedures',
+ 'Monitor certificate expiration'
+ ]
+ },
+ {
+ icon: '๐',
+ title: 'Troubleshooting',
+ items: [
+ 'Verify certificate format (PEM)',
+ 'Check certificate chain completeness',
+ 'Ensure proper file permissions',
+ 'Test with openssl commands'
+ ]
+ }
+ ].map(section => (
+
+
+ {section.icon}
+
{section.title}
+
+
+ {section.items.map(item => (
+ -
+ โข
+ {item}
+
+ ))}
+
+
+ ))}
+
+
+
+
+
+
+
+ Common Issues and Solutions
+
+
+
+
Certificate not being recognized
+
+ -
+ โข
+ Ensure the certificate is in PEM format
+
+ -
+ โข
+ Check that NODE_EXTRA_CA_CERTS points to the correct file
+
+ -
+ โข
+ Restart the application after adding certificates
+
+
+
+
+
+
Still getting SSL errors
+
+ -
+ โข
+ Verify the complete certificate chain is included
+
+ -
+ โข
+ Check if intermediate certificates are needed
+
+ -
+ โข
+ Ensure the certificate matches the server hostname
+
+
+
+
+
+
Certificate expired
+
+ -
+ โข
+ Check certificate validity:
openssl x509 -in cert.crt -noout -dates
+
+ -
+ โข
+ Update with new certificate from your CA
+
+ -
+ โข
+ Restart Gitea Mirror after updating
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/src/pages/docs/index.astro b/src/pages/docs/index.astro
index 8dbf4a9..e20edbc 100644
--- a/src/pages/docs/index.astro
+++ b/src/pages/docs/index.astro
@@ -1,16 +1,16 @@
---
import MainLayout from '../../layouts/main.astro';
-import { LuSettings, LuRocket, LuBookOpen } from 'react-icons/lu';
+import { LuSettings, LuRocket, LuBookOpen, LuShield, LuKey, LuNetwork } from 'react-icons/lu';
// Define our documentation pages directly
const docs = [
{
- slug: 'architecture',
- title: 'Architecture',
- description: 'Comprehensive overview of the Gitea Mirror application architecture.',
+ slug: 'quickstart',
+ title: 'Quick Start Guide',
+ description: 'Get started with Gitea Mirror quickly.',
order: 1,
- icon: LuBookOpen,
- href: '/docs/architecture'
+ icon: LuRocket,
+ href: '/docs/quickstart'
},
{
slug: 'configuration',
@@ -21,12 +21,36 @@ const docs = [
href: '/docs/configuration'
},
{
- slug: 'quickstart',
- title: 'Quick Start Guide',
- description: 'Get started with Gitea Mirror quickly.',
+ slug: 'authentication',
+ title: 'Authentication & SSO',
+ description: 'Configure authentication methods, SSO providers, and OIDC.',
order: 3,
- icon: LuRocket,
- href: '/docs/quickstart'
+ icon: LuKey,
+ href: '/docs/authentication'
+ },
+ {
+ slug: 'architecture',
+ title: 'Architecture',
+ description: 'Comprehensive overview of the Gitea Mirror application architecture.',
+ order: 4,
+ icon: LuBookOpen,
+ href: '/docs/architecture'
+ },
+ {
+ slug: 'ca-certificates',
+ title: 'CA Certificates',
+ description: 'Configure custom CA certificates for self-signed Gitea instances.',
+ order: 5,
+ icon: LuShield,
+ href: '/docs/ca-certificates'
+ },
+ {
+ slug: 'advanced',
+ title: 'Advanced Topics',
+ description: 'Advanced configuration, troubleshooting, and deployment options.',
+ order: 6,
+ icon: LuNetwork,
+ href: '/docs/advanced'
}
];
diff --git a/src/pages/docs/quickstart.astro b/src/pages/docs/quickstart.astro
index 83675bd..2193f7d 100644
--- a/src/pages/docs/quickstart.astro
+++ b/src/pages/docs/quickstart.astro
@@ -244,7 +244,7 @@ bun run start
title: 'Create Admin Account',
items: [
"You'll be prompted on first access",
- 'Choose a secure username and password',
+ 'Enter your email address and password',
'This will be your administrator account'
]
},
diff --git a/src/pages/index.astro b/src/pages/index.astro
index 854fb9f..dfd1253 100644
--- a/src/pages/index.astro
+++ b/src/pages/index.astro
@@ -1,12 +1,13 @@
---
import '../styles/global.css';
import App from '@/components/layout/MainLayout';
-import { db, repositories, mirrorJobs, client } from '@/lib/db';
+import { db, repositories, mirrorJobs, users } from '@/lib/db';
+import { sql } from 'drizzle-orm';
import ThemeScript from '@/components/theme/ThemeScript.astro';
// Check if any users exist in the database
-const userCountResult = await client.execute(`SELECT COUNT(*) as count FROM users`);
-const userCount = userCountResult.rows[0].count;
+const userCountResult = await db.select({ count: sql`count(*)` }).from(users);
+const userCount = userCountResult[0]?.count || 0;
// Redirect to signup if no users exist
if (userCount === 0) {
diff --git a/src/pages/login.astro b/src/pages/login.astro
index 3cf82a6..e27a6db 100644
--- a/src/pages/login.astro
+++ b/src/pages/login.astro
@@ -1,12 +1,15 @@
---
import '../styles/global.css';
import ThemeScript from '@/components/theme/ThemeScript.astro';
-import { LoginForm } from '@/components/auth/LoginForm';
-import { client } from '../lib/db';
+import { LoginPage } from '@/components/auth/LoginPage';
+import { db, users } from '@/lib/db';
+import { sql } from 'drizzle-orm';
// Check if any users exist in the database
-const userCountResult = await client.execute(`SELECT COUNT(*) as count FROM users`);
-const userCount = userCountResult.rows[0].count;
+const userCountResult = await db
+ .select({ count: sql`count(*)` })
+ .from(users);
+const userCount = userCountResult[0].count;
// Redirect to signup if no users exist
if (userCount === 0) {
@@ -27,7 +30,7 @@ const generator = Astro.generator;
-
+