feat: complete Git Repo Manager MVP implementation

Backend (Phase 1-6):
- Pydantic schemas for request/response validation
- Service layer (SSH Key, Server, Repo, Sync)
- API routes with authentication
- FastAPI main application with lifespan management
- ORM models (SshKey, Server, Repo, SyncLog)

Frontend (Phase 7):
- Vue 3 + Element Plus + Pinia + Vue Router
- API client with Axios and interceptors
- State management stores
- All page components (Dashboard, Servers, Repos, SyncLogs, SshKeys, Settings)

Deployment (Phase 8):
- README with quick start guide
- Startup script (start.sh)

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
panw
2026-03-30 16:30:13 +08:00
parent 960056c88c
commit 44921c5646
46 changed files with 6533 additions and 2 deletions

View File

@@ -0,0 +1,17 @@
"""
API routes module.
This package contains all FastAPI route handlers.
"""
from app.api.deps import get_db_session, require_auth
from app.api.ssh_keys import router as ssh_keys_router
from app.api.servers import router as servers_router
from app.api.status import router as status_router
__all__ = [
"get_db_session",
"require_auth",
"ssh_keys_router",
"servers_router",
"status_router",
]

111
backend/app/api/deps.py Normal file
View File

@@ -0,0 +1,111 @@
"""
FastAPI dependencies for API routes.
Provides reusable dependencies for:
- Database session management
- Authentication/authorization
"""
from typing import Generator, Optional
from fastapi import Depends, HTTPException, status
from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials
from sqlalchemy.orm import Session
from app.database import get_session_factory
from app.security import verify_api_token
# HTTP Bearer token security scheme
security = HTTPBearer(auto_error=False)
def get_db_session() -> Generator[Session, None, None]:
"""
Dependency to get a database session.
Yields:
SQLAlchemy database session
Example:
@app.get("/items")
def read_items(db: Session = Depends(get_db_session)):
items = db.query(Item).all()
return items
"""
session_factory = get_session_factory()
if session_factory is None:
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Database not initialized"
)
session = session_factory()
try:
yield session
finally:
session.close()
def require_auth(
credentials: Optional[HTTPAuthorizationCredentials] = Depends(security)
) -> None:
"""
Dependency to require authentication for protected endpoints.
Args:
credentials: HTTP Bearer token credentials
Raises:
HTTPException: If authentication fails (401 Unauthorized)
Example:
@app.get("/protected")
def protected_route(auth: None = Depends(require_auth)):
return {"message": "authenticated"}
"""
if credentials is None:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Missing authentication credentials",
headers={"WWW-Authenticate": "Bearer"},
)
authorization = f"Bearer {credentials.credentials}"
if not verify_api_token(authorization):
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Invalid authentication token",
headers={"WWW-Authenticate": "Bearer"},
)
# Return None to indicate successful authentication
return None
async def require_auth_optional(
credentials: Optional[HTTPAuthorizationCredentials] = Depends(security)
) -> bool:
"""
Optional authentication dependency.
Returns True if authenticated, False otherwise.
This is useful for endpoints that have different behavior
based on authentication status but don't require it.
Args:
credentials: HTTP Bearer token credentials
Returns:
bool: True if authenticated, False otherwise
Example:
@app.get("/public")
def public_route(authenticated: bool = Depends(require_auth_optional)):
if authenticated:
return {"message": "authenticated user"}
return {"message": "anonymous user"}
"""
if credentials is None:
return False
authorization = f"Bearer {credentials.credentials}"
return verify_api_token(authorization)

292
backend/app/api/servers.py Normal file
View File

@@ -0,0 +1,292 @@
"""
Servers API routes.
Provides CRUD endpoints for Gitea server management:
- POST /api/servers - Create a new server
- GET /api/servers - List all servers
- GET /api/servers/{id} - Get a specific server
- PUT /api/servers/{id} - Update a server
- DELETE /api/servers/{id} - Delete a server
"""
from typing import List
from fastapi import APIRouter, Depends, HTTPException, status
from sqlalchemy.orm import Session
from app.api.deps import get_db_session, require_auth
from app.schemas.server import ServerCreate, ServerUpdate, ServerResponse
from app.schemas.common import SuccessResponse
from app.services.server_service import ServerService
router = APIRouter(prefix="/api/servers", tags=["Servers"])
@router.post("", response_model=SuccessResponse[ServerResponse], status_code=status.HTTP_201_CREATED)
def create_server(
server_data: ServerCreate,
db: Session = Depends(get_db_session),
_auth: None = Depends(require_auth)
):
"""
Create a new Gitea server.
The API token will be encrypted before storage.
The name must be unique across all servers.
A local storage path will be automatically generated based on the server name.
Args:
server_data: Server creation data
db: Database session (injected)
_auth: Authentication requirement (injected)
Returns:
SuccessResponse containing the created server
Raises:
HTTPException 400: If validation fails or name already exists
HTTPException 401: If authentication fails
"""
service = ServerService(db)
try:
server = service.create_server(
name=server_data.name,
url=server_data.url,
api_token=server_data.api_token,
ssh_key_id=server_data.ssh_key_id,
sync_enabled=server_data.sync_enabled,
schedule_cron=server_data.schedule_cron
)
return SuccessResponse(
code=0,
data=ServerResponse(
id=server.id,
name=server.name,
url=server.url,
ssh_key_id=server.ssh_key_id,
sync_enabled=server.sync_enabled,
schedule_cron=server.schedule_cron,
local_path=server.local_path,
status=server.status,
created_at=server.created_at,
updated_at=server.updated_at
),
message="Server created successfully"
)
except ValueError as e:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=str(e)
)
@router.get("", response_model=SuccessResponse[List[ServerResponse]])
def list_servers(
db: Session = Depends(get_db_session),
_auth: None = Depends(require_auth)
):
"""
List all servers.
Returns all servers ordered by creation time.
API tokens are not included in the response.
Args:
db: Database session (injected)
_auth: Authentication requirement (injected)
Returns:
SuccessResponse containing list of servers
Raises:
HTTPException 401: If authentication fails
"""
service = ServerService(db)
servers = service.list_servers()
return SuccessResponse(
code=0,
data=[
ServerResponse(
id=server.id,
name=server.name,
url=server.url,
ssh_key_id=server.ssh_key_id,
sync_enabled=server.sync_enabled,
schedule_cron=server.schedule_cron,
local_path=server.local_path,
status=server.status,
created_at=server.created_at,
updated_at=server.updated_at
)
for server in servers
],
message=f"Retrieved {len(servers)} server(s)"
)
@router.get("/{server_id}", response_model=SuccessResponse[ServerResponse])
def get_server(
server_id: int,
db: Session = Depends(get_db_session),
_auth: None = Depends(require_auth)
):
"""
Get a specific server by ID.
Args:
server_id: ID of the server
db: Database session (injected)
_auth: Authentication requirement (injected)
Returns:
SuccessResponse containing the server
Raises:
HTTPException 401: If authentication fails
HTTPException 404: If server not found
"""
service = ServerService(db)
server = service.get_server(server_id)
if server is None:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=f"Server with ID {server_id} not found"
)
return SuccessResponse(
code=0,
data=ServerResponse(
id=server.id,
name=server.name,
url=server.url,
ssh_key_id=server.ssh_key_id,
sync_enabled=server.sync_enabled,
schedule_cron=server.schedule_cron,
local_path=server.local_path,
status=server.status,
created_at=server.created_at,
updated_at=server.updated_at
),
message="Server retrieved successfully"
)
@router.put("/{server_id}", response_model=SuccessResponse[ServerResponse])
def update_server(
server_id: int,
server_data: ServerUpdate,
db: Session = Depends(get_db_session),
_auth: None = Depends(require_auth)
):
"""
Update a server.
Only the fields provided in the request body will be updated.
If api_token is provided, it will be encrypted before storage.
If name is changed, the local_path will be updated accordingly.
Args:
server_id: ID of the server to update
server_data: Server update data (all fields optional)
db: Database session (injected)
_auth: Authentication requirement (injected)
Returns:
SuccessResponse containing the updated server
Raises:
HTTPException 400: If validation fails
HTTPException 401: If authentication fails
HTTPException 404: If server not found
"""
service = ServerService(db)
# Build update dict from non-None fields
update_data = {}
if server_data.name is not None:
update_data['name'] = server_data.name
if server_data.url is not None:
update_data['url'] = server_data.url
if server_data.api_token is not None:
update_data['api_token'] = server_data.api_token
if server_data.ssh_key_id is not None:
update_data['ssh_key_id'] = server_data.ssh_key_id
if server_data.sync_enabled is not None:
update_data['sync_enabled'] = server_data.sync_enabled
if server_data.schedule_cron is not None:
update_data['schedule_cron'] = server_data.schedule_cron
if server_data.status is not None:
update_data['status'] = server_data.status
if not update_data:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="No fields provided for update"
)
try:
server = service.update_server(server_id, **update_data)
return SuccessResponse(
code=0,
data=ServerResponse(
id=server.id,
name=server.name,
url=server.url,
ssh_key_id=server.ssh_key_id,
sync_enabled=server.sync_enabled,
schedule_cron=server.schedule_cron,
local_path=server.local_path,
status=server.status,
created_at=server.created_at,
updated_at=server.updated_at
),
message="Server updated successfully"
)
except ValueError as e:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=str(e)
)
@router.delete("/{server_id}", response_model=SuccessResponse[dict])
def delete_server(
server_id: int,
db: Session = Depends(get_db_session),
_auth: None = Depends(require_auth)
):
"""
Delete a server.
Args:
server_id: ID of the server to delete
db: Database session (injected)
_auth: Authentication requirement (injected)
Returns:
SuccessResponse with empty data
Raises:
HTTPException 401: If authentication fails
HTTPException 404: If server not found
"""
service = ServerService(db)
deleted = service.delete_server(server_id)
if not deleted:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=f"Server with ID {server_id} not found"
)
return SuccessResponse(
code=0,
data={},
message="Server deleted successfully"
)

201
backend/app/api/ssh_keys.py Normal file
View File

@@ -0,0 +1,201 @@
"""
SSH Keys API routes.
Provides CRUD endpoints for SSH key management:
- POST /api/ssh-keys - Create a new SSH key
- GET /api/ssh-keys - List all SSH keys
- GET /api/ssh-keys/{id} - Get a specific SSH key
- DELETE /api/ssh-keys/{id} - Delete an SSH key
"""
from typing import List
from fastapi import APIRouter, Depends, HTTPException, status
from sqlalchemy.orm import Session
from app.api.deps import get_db_session, require_auth
from app.schemas.ssh_key import SshKeyCreate, SshKeyResponse
from app.schemas.common import SuccessResponse, ErrorResponse
from app.services.ssh_key_service import SshKeyService
router = APIRouter(prefix="/api/ssh-keys", tags=["SSH Keys"])
@router.post("", response_model=SuccessResponse[SshKeyResponse], status_code=status.HTTP_201_CREATED)
def create_ssh_key(
ssh_key_data: SshKeyCreate,
db: Session = Depends(get_db_session),
_auth: None = Depends(require_auth)
):
"""
Create a new SSH key.
The private key will be encrypted before storage.
The name must be unique across all SSH keys.
Args:
ssh_key_data: SSH key creation data (name, private_key)
db: Database session (injected)
_auth: Authentication requirement (injected)
Returns:
SuccessResponse containing the created SSH key
Raises:
HTTPException 400: If validation fails or name already exists
HTTPException 401: If authentication fails
"""
service = SshKeyService(db)
try:
ssh_key = service.create_ssh_key(
name=ssh_key_data.name,
private_key=ssh_key_data.private_key
)
return SuccessResponse(
code=0,
data=SshKeyResponse(
id=ssh_key.id,
name=ssh_key.name,
fingerprint=ssh_key.fingerprint,
created_at=ssh_key.created_at
),
message="SSH key created successfully"
)
except ValueError as e:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=str(e)
)
@router.get("", response_model=SuccessResponse[List[SshKeyResponse]])
def list_ssh_keys(
db: Session = Depends(get_db_session),
_auth: None = Depends(require_auth)
):
"""
List all SSH keys.
Returns all SSH keys ordered by creation time.
Private keys are not included in the response.
Args:
db: Database session (injected)
_auth: Authentication requirement (injected)
Returns:
SuccessResponse containing list of SSH keys
Raises:
HTTPException 401: If authentication fails
"""
service = SshKeyService(db)
ssh_keys = service.list_ssh_keys()
return SuccessResponse(
code=0,
data=[
SshKeyResponse(
id=key.id,
name=key.name,
fingerprint=key.fingerprint,
created_at=key.created_at
)
for key in ssh_keys
],
message=f"Retrieved {len(ssh_keys)} SSH key(s)"
)
@router.get("/{key_id}", response_model=SuccessResponse[SshKeyResponse])
def get_ssh_key(
key_id: int,
db: Session = Depends(get_db_session),
_auth: None = Depends(require_auth)
):
"""
Get a specific SSH key by ID.
Args:
key_id: ID of the SSH key
db: Database session (injected)
_auth: Authentication requirement (injected)
Returns:
SuccessResponse containing the SSH key
Raises:
HTTPException 401: If authentication fails
HTTPException 404: If SSH key not found
"""
service = SshKeyService(db)
ssh_key = service.get_ssh_key(key_id)
if ssh_key is None:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=f"SSH key with ID {key_id} not found"
)
return SuccessResponse(
code=0,
data=SshKeyResponse(
id=ssh_key.id,
name=ssh_key.name,
fingerprint=ssh_key.fingerprint,
created_at=ssh_key.created_at
),
message="SSH key retrieved successfully"
)
@router.delete("/{key_id}", response_model=SuccessResponse[dict])
def delete_ssh_key(
key_id: int,
db: Session = Depends(get_db_session),
_auth: None = Depends(require_auth)
):
"""
Delete an SSH key.
The SSH key can only be deleted if it is not in use by any server.
If servers are using this key, the deletion will fail.
Args:
key_id: ID of the SSH key to delete
db: Database session (injected)
_auth: Authentication requirement (injected)
Returns:
SuccessResponse with empty data
Raises:
HTTPException 400: If key is in use by servers
HTTPException 401: If authentication fails
HTTPException 404: If SSH key not found
"""
service = SshKeyService(db)
try:
deleted = service.delete_ssh_key(key_id)
if not deleted:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=f"SSH key with ID {key_id} not found"
)
return SuccessResponse(
code=0,
data={},
message="SSH key deleted successfully"
)
except ValueError as e:
# Key is in use by servers
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=str(e)
)

138
backend/app/api/status.py Normal file
View File

@@ -0,0 +1,138 @@
"""
Status API routes.
Provides system status and health check endpoints:
- GET /api/status - Get system status and health information
"""
from typing import Dict, Any
from fastapi import APIRouter, Depends
from sqlalchemy.orm import Session
from sqlalchemy import text
from app.api.deps import get_db_session, require_auth_optional
from app.schemas.common import SuccessResponse
from app.config import get_settings
from app.models.server import Server
from app.models.ssh_key import SshKey
from app.models.repo import Repo
router = APIRouter(prefix="/api/status", tags=["Status"])
@router.get("", response_model=SuccessResponse[Dict[str, Any]])
def get_status(
db: Session = Depends(get_db_session),
_authenticated: bool = Depends(require_auth_optional)
):
"""
Get system status and health information.
This endpoint provides information about:
- Application status and version
- Database connectivity and statistics
- Counts of servers, SSH keys, and repositories
- Storage paths
Authentication is optional for this endpoint.
Authenticated users may receive additional information.
Args:
db: Database session (injected)
_authenticated: Whether request is authenticated (injected)
Returns:
SuccessResponse containing system status information
Example response:
{
"code": 0,
"data": {
"status": "healthy",
"version": "1.0.0",
"database": {
"status": "connected",
"servers_count": 2,
"ssh_keys_count": 3,
"repos_count": 15
},
"storage": {
"data_dir": "/path/to/data",
"repos_dir": "/path/to/data/repos",
"ssh_keys_dir": "/path/to/data/ssh_keys"
},
"authenticated": true
},
"message": "System status retrieved successfully"
}
"""
settings = get_settings()
status_info: Dict[str, Any] = {
"status": "healthy",
"version": "1.0.0",
"authenticated": _authenticated
}
# Check database connectivity
try:
# Execute a simple query to verify database connection
db.execute(text("SELECT 1"))
# Get counts for each model
servers_count = db.query(Server).count()
ssh_keys_count = db.query(SshKey).count()
repos_count = db.query(Repo).count()
status_info["database"] = {
"status": "connected",
"servers_count": servers_count,
"ssh_keys_count": ssh_keys_count,
"repos_count": repos_count
}
except Exception as e:
status_info["database"] = {
"status": "error",
"error": str(e)
}
status_info["status"] = "degraded"
# Storage paths (only show to authenticated users)
if _authenticated:
status_info["storage"] = {
"data_dir": str(settings.data_dir),
"repos_dir": str(settings.repos_dir),
"ssh_keys_dir": str(settings.ssh_keys_dir),
"db_path": str(settings.db_path)
}
return SuccessResponse(
code=0,
data=status_info,
message="System status retrieved successfully"
)
@router.get("/health", response_model=SuccessResponse[Dict[str, str]])
def health_check():
"""
Simple health check endpoint.
This is a lightweight endpoint for load balancers and monitoring systems.
It always returns 200 OK when the service is running.
Returns:
SuccessResponse indicating healthy status
Example response:
{
"code": 0,
"data": {"status": "ok"},
"message": "Service is healthy"
}
"""
return SuccessResponse(
code=0,
data={"status": "ok"},
message="Service is healthy"
)

183
backend/app/main.py Normal file
View File

@@ -0,0 +1,183 @@
"""
FastAPI main application.
This module creates and configures the FastAPI application with:
- All API routers registered
- Lifespan events for database initialization
- Static file serving for the frontend
- CORS middleware
- Exception handlers
"""
from contextlib import asynccontextmanager
from pathlib import Path
from typing import Callable
from fastapi import FastAPI, Request, status
from fastapi.responses import JSONResponse
from fastapi.staticfiles import StaticFiles
from fastapi.middleware.cors import CORSMiddleware
from sqlalchemy.exc import SQLAlchemyError
from app.config import get_settings
from app.database import init_db, get_engine
from app.models import Base # noqa: F401 - Import to ensure models are registered
# Import API routers
from app.api.ssh_keys import router as ssh_keys_router
from app.api.servers import router as servers_router
from app.api.status import router as status_router
@asynccontextmanager
async def lifespan(app: FastAPI): # noqa: ARG001 - Unused app parameter
"""
Lifespan context manager for FastAPI application.
Handles startup and shutdown events:
- Startup: Initialize database and create tables
- Shutdown: Close database connections
Yields:
None
"""
# Startup
settings = get_settings()
# Initialize database
init_db(settings.db_path)
# Create all tables
engine = get_engine()
if engine is not None:
Base.metadata.create_all(engine)
# Ensure required directories exist
settings.data_dir.mkdir(parents=True, exist_ok=True)
settings.ssh_keys_dir.mkdir(parents=True, exist_ok=True)
settings.repos_dir.mkdir(parents=True, exist_ok=True)
yield
# Shutdown
# Close database connections
if engine is not None:
engine.dispose()
def create_app(lifespan_handler: Callable = lifespan) -> FastAPI:
"""
Create and configure the FastAPI application.
Args:
lifespan_handler: Lifespan context manager (for testing)
Returns:
Configured FastAPI application instance
"""
settings = get_settings()
app = FastAPI(
title="Git Manager API",
description="API for managing Gitea server mirrors and SSH keys",
version="1.0.0",
lifespan=lifespan_handler,
docs_url="/api/docs",
redoc_url="/api/redoc",
openapi_url="/api/openapi.json"
)
# Configure CORS
app.add_middleware(
CORSMiddleware,
allow_origins=["*"], # In production, specify exact origins
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
# Register exception handlers
register_exception_handlers(app)
# Register API routers
app.include_router(ssh_keys_router)
app.include_router(servers_router)
app.include_router(status_router)
# Mount static files for frontend
# Check if frontend build exists
frontend_path = Path(__file__).parent.parent.parent / "frontend" / "dist"
if frontend_path.exists():
app.mount("/", StaticFiles(directory=str(frontend_path), html=True), name="frontend")
return app
def register_exception_handlers(app: FastAPI) -> None:
"""
Register global exception handlers for the application.
Args:
app: FastAPI application instance
"""
@app.exception_handler(SQLAlchemyError)
async def sqlalchemy_error_handler(
request: Request, # noqa: ARG001 - Unused request parameter
exc: SQLAlchemyError
):
"""Handle SQLAlchemy database errors."""
return JSONResponse(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
content={
"code": 500,
"message": "Database error occurred",
"data": {"detail": str(exc)}
}
)
@app.exception_handler(ValueError)
async def value_error_handler(
request: Request, # noqa: ARG001 - Unused request parameter
exc: ValueError
):
"""Handle ValueError exceptions."""
return JSONResponse(
status_code=status.HTTP_400_BAD_REQUEST,
content={
"code": 400,
"message": str(exc),
"data": None
}
)
@app.exception_handler(Exception)
async def general_exception_handler(
request: Request, # noqa: ARG001 - Unused request parameter
exc: Exception
):
"""Handle all other exceptions."""
return JSONResponse(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
content={
"code": 500,
"message": "Internal server error",
"data": {"detail": str(exc)}
}
)
# Create the application instance
app = create_app()
if __name__ == "__main__":
import uvicorn
settings = get_settings()
uvicorn.run(
"app.main:app",
host=settings.host,
port=settings.port,
reload=True
)

View File

@@ -0,0 +1,38 @@
"""
Pydantic schemas for API request/response validation.
This module exports all schemas used throughout the application.
"""
# Common schemas
from app.schemas.common import SuccessResponse, ErrorResponse
# SSH Key schemas
from app.schemas.ssh_key import SshKeyCreate, SshKeyResponse
# Server schemas
from app.schemas.server import ServerCreate, ServerUpdate, ServerResponse
# Repository schemas
from app.schemas.repo import RepoResponse, CommitInfo
# Sync Log schemas
from app.schemas.sync_log import SyncLogResponse
__all__ = [
# Common
"SuccessResponse",
"ErrorResponse",
# SSH Key
"SshKeyCreate",
"SshKeyResponse",
# Server
"ServerCreate",
"ServerUpdate",
"ServerResponse",
# Repository
"RepoResponse",
"CommitInfo",
# Sync Log
"SyncLogResponse",
]

View File

@@ -0,0 +1,55 @@
"""
Common Pydantic schemas for API responses.
"""
from typing import Generic, TypeVar, Optional
from pydantic import BaseModel, Field
T = TypeVar("T")
class SuccessResponse(BaseModel, Generic[T]):
"""
Standard success response wrapper.
"""
code: int = Field(default=0, description="Response code, 0 for success")
data: T = Field(description="Response data")
message: str = Field(default="success", description="Response message")
model_config = {
"json_schema_extra": {
"examples": [
{
"code": 0,
"data": {},
"message": "success"
}
]
}
}
class ErrorResponse(BaseModel):
"""
Standard error response wrapper.
"""
code: int = Field(description="Error code, non-zero for errors")
message: str = Field(description="Error message")
data: Optional[dict] = Field(default=None, description="Additional error data")
model_config = {
"json_schema_extra": {
"examples": [
{
"code": 400,
"message": "Bad request",
"data": None
},
{
"code": 404,
"message": "Resource not found",
"data": {"detail": "Item with id 123 not found"}
}
]
}
}

View File

@@ -0,0 +1,64 @@
"""
Repository Pydantic schemas.
"""
from typing import Optional
from pydantic import BaseModel, Field
class CommitInfo(BaseModel):
"""
Schema for commit information.
"""
hash: str = Field(description="Commit hash")
author: str = Field(description="Commit author")
message: str = Field(description="Commit message")
timestamp: int = Field(description="Commit timestamp (Unix timestamp)")
model_config = {
"json_schema_extra": {
"examples": [
{
"hash": "a1b2c3d4e5f6...",
"author": "John Doe <john@example.com>",
"message": "Add new feature",
"timestamp": 1711891200
}
]
}
}
class RepoResponse(BaseModel):
"""
Schema for repository response.
"""
id: int = Field(description="Repository ID")
server_id: int = Field(description="Server ID")
name: str = Field(description="Repository name")
full_name: str = Field(description="Repository full name (e.g., 'owner/repo')")
clone_url: str = Field(description="Git clone URL")
local_path: str = Field(description="Local storage path")
last_sync_at: Optional[int] = Field(
default=None,
description="Last sync timestamp (Unix timestamp)"
)
status: str = Field(description="Repository status")
created_at: int = Field(description="Creation timestamp (Unix timestamp)")
model_config = {
"json_schema_extra": {
"examples": [
{
"id": 1,
"server_id": 1,
"name": "my-repo",
"full_name": "myorg/my-repo",
"clone_url": "https://gitea.example.com/myorg/my-repo.git",
"local_path": "/data/gitea-mirror/myorg/my-repo",
"last_sync_at": 1711891200,
"status": "success",
"created_at": 1711804800
}
]
}
}

View File

@@ -0,0 +1,172 @@
"""
Server Pydantic schemas.
"""
from typing import Optional
from pydantic import BaseModel, Field, field_validator
class ServerCreate(BaseModel):
"""
Schema for creating a new server.
"""
name: str = Field(..., min_length=1, max_length=100, description="Server name")
url: str = Field(..., min_length=1, max_length=500, description="Gitea server URL")
api_token: str = Field(..., min_length=1, description="Gitea API token")
ssh_key_id: int = Field(..., gt=0, description="SSH key ID to use")
local_path: str = Field(..., min_length=1, max_length=500, description="Local storage path")
sync_enabled: bool = Field(default=False, description="Whether sync is enabled")
schedule_cron: Optional[str] = Field(
default=None,
max_length=50,
description="Cron expression for scheduled sync"
)
@field_validator("name")
@classmethod
def name_must_not_be_empty(cls, v: str) -> str:
"""Validate that name is not empty or whitespace only."""
if not v or not v.strip():
raise ValueError("name must not be empty")
return v.strip()
@field_validator("url")
@classmethod
def url_must_not_be_empty(cls, v: str) -> str:
"""Validate that url is not empty or whitespace only."""
if not v or not v.strip():
raise ValueError("url must not be empty")
return v.strip()
@field_validator("api_token")
@classmethod
def api_token_must_not_be_empty(cls, v: str) -> str:
"""Validate that api_token is not empty or whitespace only."""
if not v or not v.strip():
raise ValueError("api_token must not be empty")
return v.strip()
@field_validator("local_path")
@classmethod
def local_path_must_not_be_empty(cls, v: str) -> str:
"""Validate that local_path is not empty or whitespace only."""
if not v or not v.strip():
raise ValueError("local_path must not be empty")
return v.strip()
model_config = {
"json_schema_extra": {
"examples": [
{
"name": "my-gitea",
"url": "https://gitea.example.com",
"api_token": "your_api_token_here",
"ssh_key_id": 1,
"local_path": "/data/gitea-mirror",
"sync_enabled": False,
"schedule_cron": None
}
]
}
}
class ServerUpdate(BaseModel):
"""
Schema for updating a server.
All fields are optional.
"""
name: Optional[str] = Field(None, min_length=1, max_length=100, description="Server name")
url: Optional[str] = Field(None, min_length=1, max_length=500, description="Gitea server URL")
api_token: Optional[str] = Field(None, min_length=1, description="Gitea API token")
ssh_key_id: Optional[int] = Field(None, gt=0, description="SSH key ID to use")
local_path: Optional[str] = Field(None, min_length=1, max_length=500, description="Local storage path")
sync_enabled: Optional[bool] = Field(None, description="Whether sync is enabled")
schedule_cron: Optional[str] = Field(
None,
max_length=50,
description="Cron expression for scheduled sync"
)
status: Optional[str] = Field(
None,
pattern="^(untested|testing|success|error)$",
description="Server status"
)
@field_validator("name")
@classmethod
def name_must_not_be_empty(cls, v: Optional[str]) -> Optional[str]:
"""Validate that name is not empty or whitespace only."""
if v is not None and (not v or not v.strip()):
raise ValueError("name must not be empty")
return v.strip() if v else None
@field_validator("url")
@classmethod
def url_must_not_be_empty(cls, v: Optional[str]) -> Optional[str]:
"""Validate that url is not empty or whitespace only."""
if v is not None and (not v or not v.strip()):
raise ValueError("url must not be empty")
return v.strip() if v else None
@field_validator("api_token")
@classmethod
def api_token_must_not_be_empty(cls, v: Optional[str]) -> Optional[str]:
"""Validate that api_token is not empty or whitespace only."""
if v is not None and (not v or not v.strip()):
raise ValueError("api_token must not be empty")
return v.strip() if v else None
@field_validator("local_path")
@classmethod
def local_path_must_not_be_empty(cls, v: Optional[str]) -> Optional[str]:
"""Validate that local_path is not empty or whitespace only."""
if v is not None and (not v or not v.strip()):
raise ValueError("local_path must not be empty")
return v.strip() if v else None
model_config = {
"json_schema_extra": {
"examples": [
{
"name": "updated-gitea",
"sync_enabled": True,
"schedule_cron": "0 */6 * * *"
}
]
}
}
class ServerResponse(BaseModel):
"""
Schema for server response.
"""
id: int = Field(description="Server ID")
name: str = Field(description="Server name")
url: str = Field(description="Gitea server URL")
ssh_key_id: int = Field(description="SSH key ID")
sync_enabled: bool = Field(description="Whether sync is enabled")
schedule_cron: Optional[str] = Field(default=None, description="Cron expression")
local_path: str = Field(description="Local storage path")
status: str = Field(description="Server status")
created_at: int = Field(description="Creation timestamp (Unix timestamp)")
updated_at: int = Field(description="Last update timestamp (Unix timestamp)")
model_config = {
"json_schema_extra": {
"examples": [
{
"id": 1,
"name": "my-gitea",
"url": "https://gitea.example.com",
"ssh_key_id": 1,
"sync_enabled": True,
"schedule_cron": "0 */6 * * *",
"local_path": "/data/gitea-mirror",
"status": "success",
"created_at": 1711804800,
"updated_at": 1711891200
}
]
}
}

View File

@@ -0,0 +1,63 @@
"""
SSH Key Pydantic schemas.
"""
from typing import Optional
from pydantic import BaseModel, Field, field_validator
class SshKeyCreate(BaseModel):
"""
Schema for creating a new SSH key.
"""
name: str = Field(..., min_length=1, max_length=100, description="SSH key name")
private_key: str = Field(..., min_length=1, description="SSH private key content")
@field_validator("name")
@classmethod
def name_must_not_be_empty(cls, v: str) -> str:
"""Validate that name is not empty or whitespace only."""
if not v or not v.strip():
raise ValueError("name must not be empty")
return v.strip()
@field_validator("private_key")
@classmethod
def private_key_must_not_be_empty(cls, v: str) -> str:
"""Validate that private_key is not empty or whitespace only."""
if not v or not v.strip():
raise ValueError("private_key must not be empty")
return v.strip()
model_config = {
"json_schema_extra": {
"examples": [
{
"name": "my-git-key",
"private_key": "-----BEGIN OPENSSH PRIVATE KEY-----\n...\n-----END OPENSSH PRIVATE KEY-----"
}
]
}
}
class SshKeyResponse(BaseModel):
"""
Schema for SSH key response.
"""
id: int = Field(description="SSH key ID")
name: str = Field(description="SSH key name")
fingerprint: Optional[str] = Field(default=None, description="SSH key fingerprint")
created_at: int = Field(description="Creation timestamp (Unix timestamp)")
model_config = {
"json_schema_extra": {
"examples": [
{
"id": 1,
"name": "my-git-key",
"fingerprint": "SHA256:abc123...",
"created_at": 1711804800
}
]
}
}

View File

@@ -0,0 +1,52 @@
"""
Sync Log Pydantic schemas.
"""
from typing import Optional
from pydantic import BaseModel, Field
class SyncLogResponse(BaseModel):
"""
Schema for sync log response.
"""
id: int = Field(description="Sync log ID")
repo_id: int = Field(description="Repository ID")
status: str = Field(description="Sync status")
started_at: int = Field(description="Sync start timestamp (Unix timestamp)")
finished_at: int = Field(description="Sync finish timestamp (Unix timestamp)")
commits_count: Optional[int] = Field(
default=None,
description="Number of commits synced"
)
error_msg: Optional[str] = Field(
default=None,
description="Error message if sync failed"
)
created_at: int = Field(description="Creation timestamp (Unix timestamp)")
model_config = {
"json_schema_extra": {
"examples": [
{
"id": 1,
"repo_id": 1,
"status": "success",
"started_at": 1711891200,
"finished_at": 1711891500,
"commits_count": 5,
"error_msg": None,
"created_at": 1711891200
},
{
"id": 2,
"repo_id": 1,
"status": "error",
"started_at": 1711891800,
"finished_at": 1711892000,
"commits_count": None,
"error_msg": "Connection timeout",
"created_at": 1711891800
}
]
}
}

View File

@@ -5,5 +5,7 @@ Business logic layer for application services.
"""
from app.services.ssh_key_service import SshKeyService
from app.services.server_service import ServerService
from app.services.sync_service import SyncService
from app.services.repo_service import RepoService
__all__ = ['SshKeyService', 'ServerService']
__all__ = ['SshKeyService', 'ServerService', 'SyncService', 'RepoService']

View File

@@ -0,0 +1,227 @@
"""
Repo Service.
Business logic for repository management including:
- Creating repository records
- Listing repositories by server
- Retrieving repository details
- Updating repository status
- Deleting repository records
"""
import time
from typing import List, Optional
from pathlib import Path
from sqlalchemy.orm import Session
from app.models.repo import Repo
from app.models.server import Server
from app.config import get_settings
class RepoService:
"""
Service for managing repository records.
Handles CRUD operations for repositories that are being
mirrored from Gitea servers.
"""
def __init__(self, db: Session):
"""
Initialize the service with a database session.
Args:
db: SQLAlchemy database session
"""
self.db = db
self.settings = get_settings()
def create_repo(
self,
server_id: int,
name: str,
full_name: str,
clone_url: str,
local_path: Optional[str] = None
) -> Repo:
"""
Create a new repository record.
Args:
server_id: ID of the server this repo belongs to
name: Repository name (e.g., "my-repo")
full_name: Full repository name (e.g., "owner/my-repo")
clone_url: Git clone URL (typically SSH format)
local_path: Optional local path for the mirrored repo.
If not provided, will be generated based on server and repo name.
Returns:
Created Repo model instance
Raises:
ValueError: If server_id is invalid
"""
# Verify server exists
server = self.db.query(Server).filter_by(id=server_id).first()
if not server:
raise ValueError(f"Server not found with ID {server_id}")
# Generate local_path if not provided
if local_path is None:
local_path = str(Path(server.local_path) / name)
# Create the repo record
current_time = int(time.time())
repo = Repo(
server_id=server_id,
name=name,
full_name=full_name,
clone_url=clone_url,
local_path=local_path,
status="pending",
last_sync_at=None,
created_at=current_time
)
self.db.add(repo)
self.db.commit()
self.db.refresh(repo)
return repo
def list_repos(self, server_id: int) -> List[Repo]:
"""
List all repositories for a specific server.
Args:
server_id: ID of the server
Returns:
List of Repo model instances for the server, ordered by creation time
"""
return self.db.query(Repo).filter_by(
server_id=server_id
).order_by(Repo.created_at).all()
def get_repo(self, repo_id: int) -> Optional[Repo]:
"""
Get a repository by ID.
Args:
repo_id: ID of the repository
Returns:
Repo model instance or None if not found
"""
return self.db.query(Repo).filter_by(id=repo_id).first()
def update_repo_status(self, repo_id: int, status: str) -> Repo:
"""
Update the status of a repository.
Common status values:
- "pending": Initial state, not yet synced
- "syncing": Currently being synced
- "success": Last sync was successful
- "failed": Last sync failed
Args:
repo_id: ID of the repository
status: New status value
Returns:
Updated Repo model instance
Raises:
ValueError: If repo not found
"""
repo = self.get_repo(repo_id)
if not repo:
raise ValueError(f"Repo not found with ID {repo_id}")
repo.status = status
# If status is success, update last_sync_at
if status == "success":
repo.last_sync_at = int(time.time())
self.db.commit()
self.db.refresh(repo)
return repo
def delete_repo(self, repo_id: int) -> bool:
"""
Delete a repository.
Args:
repo_id: ID of the repository to delete
Returns:
True if deleted, False if not found
"""
repo = self.get_repo(repo_id)
if not repo:
return False
self.db.delete(repo)
self.db.commit()
return True
def get_repo_by_name(self, server_id: int, name: str) -> Optional[Repo]:
"""
Get a repository by server and name.
Args:
server_id: ID of the server
name: Repository name
Returns:
Repo model instance or None if not found
"""
return self.db.query(Repo).filter_by(
server_id=server_id,
name=name
).first()
def list_all_repos(self) -> List[Repo]:
"""
List all repositories across all servers.
Returns:
List of all Repo model instances, ordered by creation time
"""
return self.db.query(Repo).order_by(Repo.created_at).all()
def update_repo(
self,
repo_id: int,
**kwargs
) -> Repo:
"""
Update a repository's configuration.
Args:
repo_id: ID of the repository to update
**kwargs: Fields to update (name, full_name, clone_url, local_path, status)
Returns:
Updated Repo model instance
Raises:
ValueError: If repo not found
"""
repo = self.get_repo(repo_id)
if not repo:
raise ValueError(f"Repo not found with ID {repo_id}")
# Update fields
for key, value in kwargs.items():
if hasattr(repo, key):
setattr(repo, key, value)
self.db.commit()
self.db.refresh(repo)
return repo

View File

@@ -0,0 +1,263 @@
"""
Sync Service.
Handles Git operations for repository mirroring including:
- Cloning repositories with SSH authentication
- Fetching updates from mirrored repositories
- Counting commits in repositories
- Retrieving commit history
"""
import subprocess
import tempfile
import os
from pathlib import Path
from typing import List, Dict, Optional
from sqlalchemy.orm import Session
import time
from app.models.repo import Repo
class SyncService:
"""
Service for managing Git repository synchronization.
Handles clone and fetch operations with SSH key authentication.
"""
def __init__(self, db: Session):
"""
Initialize the service with a database session.
Args:
db: SQLAlchemy database session
"""
self.db = db
def sync_repo(self, repo: Repo, ssh_key_content: str) -> None:
"""
Synchronize a repository by cloning or fetching.
If the repository doesn't exist locally, clone it.
If it exists, fetch all updates.
Args:
repo: Repo model instance
ssh_key_content: SSH private key content for authentication
Raises:
Exception: If clone or fetch operation fails
"""
local_path = Path(repo.local_path)
# Update repo status to syncing
repo.status = "syncing"
repo.last_sync_at = int(time.time())
self.db.commit()
if local_path.exists():
# Repository exists, fetch updates
self._fetch_repo(str(local_path), ssh_key_content)
else:
# Repository doesn't exist, clone it
self._clone_repo(repo.clone_url, str(local_path), ssh_key_content)
def _clone_repo(self, clone_url: str, local_path: str, ssh_key: str) -> None:
"""
Clone a repository using git clone --mirror.
Creates a bare mirror clone of the repository.
Args:
clone_url: Git clone URL (SSH format)
local_path: Local path where repo should be cloned
ssh_key: SSH private key content for authentication
Raises:
subprocess.CalledProcessError: If git clone fails
IOError: If unable to create temporary SSH key file
"""
# Create a temporary file for SSH key
with tempfile.NamedTemporaryFile(mode='w', delete=False, suffix='.key') as key_file:
key_file.write(ssh_key)
key_file_path = key_file.name
try:
# Set appropriate permissions for SSH key
os.chmod(key_file_path, 0o600)
# Create SSH command wrapper that uses our key
ssh_cmd = f'ssh -i {key_file_path} -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null'
# Git clone command with mirror option
git_cmd = [
'git',
'clone',
'--mirror',
clone_url,
local_path
]
# Run git clone with SSH authentication
env = os.environ.copy()
env['GIT_SSH_COMMAND'] = ssh_cmd
result = subprocess.run(
git_cmd,
env=env,
capture_output=True,
text=True,
check=True
)
return result
finally:
# Clean up temporary SSH key file
try:
os.unlink(key_file_path)
except OSError:
pass
def _fetch_repo(self, local_path: str, ssh_key: str) -> None:
"""
Fetch all updates for an existing repository.
Args:
local_path: Local path to the repository
ssh_key: SSH private key content for authentication
Raises:
subprocess.CalledProcessError: If git fetch fails
IOError: If unable to create temporary SSH key file
"""
# Create a temporary file for SSH key
with tempfile.NamedTemporaryFile(mode='w', delete=False, suffix='.key') as key_file:
key_file.write(ssh_key)
key_file_path = key_file.name
try:
# Set appropriate permissions for SSH key
os.chmod(key_file_path, 0o600)
# Create SSH command wrapper that uses our key
ssh_cmd = f'ssh -i {key_file_path} -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null'
# Git fetch command to get all updates
git_cmd = [
'git',
'--git-dir',
local_path,
'fetch',
'--all'
]
# Run git fetch with SSH authentication
env = os.environ.copy()
env['GIT_SSH_COMMAND'] = ssh_cmd
result = subprocess.run(
git_cmd,
env=env,
capture_output=True,
text=True,
check=True
)
return result
finally:
# Clean up temporary SSH key file
try:
os.unlink(key_file_path)
except OSError:
pass
def _count_commits(self, repo_path: str) -> int:
"""
Count the number of commits in a repository.
Args:
repo_path: Path to the repository
Returns:
Number of commits, or 0 if counting fails
"""
try:
git_cmd = [
'git',
'--git-dir',
repo_path,
'rev-list',
'--all',
'--count'
]
result = subprocess.run(
git_cmd,
capture_output=True,
text=True,
check=True
)
return int(result.stdout.strip())
except (subprocess.CalledProcessError, ValueError):
return 0
def get_repo_commits(self, repo: Repo, limit: int = 100) -> List[Dict[str, str]]:
"""
Get commit history for a repository.
Args:
repo: Repo model instance
limit: Maximum number of commits to return
Returns:
List of commit dictionaries containing:
- hash: Commit SHA
- message: Commit message
- author: Author name
- email: Author email
- date: Commit timestamp (Unix timestamp)
"""
repo_path = Path(repo.local_path)
if not repo_path.exists():
return []
try:
git_cmd = [
'git',
'--git-dir',
str(repo_path),
'log',
'--all',
f'--max-count={limit}',
'--format=%H|%s|%an|%ae|%ct'
]
result = subprocess.run(
git_cmd,
capture_output=True,
text=True,
check=True
)
commits = []
for line in result.stdout.strip().split('\n'):
if line:
parts = line.split('|')
if len(parts) == 5:
commits.append({
'hash': parts[0],
'message': parts[1],
'author': parts[2],
'email': parts[3],
'date': int(parts[4])
})
return commits
except subprocess.CalledProcessError:
return []