This commit is contained in:
xds
2026-03-16 12:12:56 +03:00
commit 9d886076d6
63 changed files with 4482 additions and 0 deletions

36
backend/alembic.ini Normal file
View File

@@ -0,0 +1,36 @@
[alembic]
script_location = alembic
sqlalchemy.url = postgresql+asyncpg://velobrain:velobrain@localhost:5432/velobrain
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S

58
backend/alembic/env.py Normal file
View File

@@ -0,0 +1,58 @@
import asyncio
from logging.config import fileConfig
from alembic import context
from sqlalchemy import pool
from sqlalchemy.ext.asyncio import create_async_engine
from backend.app.core.config import settings
from backend.app.core.database import Base
# Import all models so they register with Base.metadata
from backend.app.models import * # noqa: F401, F403
config = context.config
config.set_main_option("sqlalchemy.url", settings.DATABASE_URL)
if config.config_file_name is not None:
fileConfig(config.config_file_name)
target_metadata = Base.metadata
def run_migrations_offline() -> None:
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
)
with context.begin_transaction():
context.run_migrations()
def do_run_migrations(connection):
context.configure(connection=connection, target_metadata=target_metadata)
with context.begin_transaction():
context.run_migrations()
async def run_async_migrations() -> None:
connectable = create_async_engine(
settings.DATABASE_URL,
poolclass=pool.NullPool,
)
async with connectable.connect() as connection:
await connection.run_sync(do_run_migrations)
await connectable.dispose()
def run_migrations_online() -> None:
asyncio.run(run_async_migrations())
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()

View File

@@ -0,0 +1,26 @@
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision: str = ${repr(up_revision)}
down_revision: Union[str, None] = ${repr(down_revision)}
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
def upgrade() -> None:
${upgrades if upgrades else "pass"}
def downgrade() -> None:
${downgrades if downgrades else "pass"}

0
backend/app/__init__.py Normal file
View File

View File

View File

@@ -0,0 +1,102 @@
import uuid
from pathlib import Path
from fastapi import APIRouter, Depends, UploadFile, File, HTTPException
from sqlalchemy import select, func
from sqlalchemy.ext.asyncio import AsyncSession
from backend.app.core.config import settings
from backend.app.core.database import get_session
from backend.app.models.activity import Activity, DataPoint
from backend.app.schemas.activity import (
ActivityResponse,
ActivityListResponse,
DataPointResponse,
)
from backend.app.services.fit_parser import parse_fit_file
from backend.app.services.metrics import calculate_metrics
router = APIRouter()
@router.post("/upload", response_model=ActivityResponse)
async def upload_activity(
rider_id: uuid.UUID,
file: UploadFile = File(...),
session: AsyncSession = Depends(get_session),
):
if not file.filename or not file.filename.lower().endswith(".fit"):
raise HTTPException(status_code=400, detail="Only .FIT files are accepted")
upload_dir = Path(settings.UPLOAD_DIR)
upload_dir.mkdir(parents=True, exist_ok=True)
file_id = uuid.uuid4()
file_path = upload_dir / f"{file_id}.fit"
content = await file.read()
file_path.write_bytes(content)
activity, data_points = parse_fit_file(content, rider_id, str(file_path))
session.add(activity)
await session.flush()
for dp in data_points:
dp.activity_id = activity.id
session.add_all(data_points)
metrics = calculate_metrics(data_points, activity, rider_id, session)
if metrics:
session.add(metrics)
await session.commit()
await session.refresh(activity)
return activity
@router.get("", response_model=ActivityListResponse)
async def list_activities(
rider_id: uuid.UUID,
limit: int = 20,
offset: int = 0,
session: AsyncSession = Depends(get_session),
):
count_query = select(func.count(Activity.id)).where(Activity.rider_id == rider_id)
total = (await session.execute(count_query)).scalar() or 0
query = (
select(Activity)
.where(Activity.rider_id == rider_id)
.order_by(Activity.date.desc())
.limit(limit)
.offset(offset)
)
result = await session.execute(query)
activities = result.scalars().all()
return ActivityListResponse(items=activities, total=total)
@router.get("/{activity_id}", response_model=ActivityResponse)
async def get_activity(
activity_id: uuid.UUID,
session: AsyncSession = Depends(get_session),
):
activity = await session.get(Activity, activity_id)
if not activity:
raise HTTPException(status_code=404, detail="Activity not found")
return activity
@router.get("/{activity_id}/stream", response_model=list[DataPointResponse])
async def get_activity_stream(
activity_id: uuid.UUID,
session: AsyncSession = Depends(get_session),
):
query = (
select(DataPoint)
.where(DataPoint.activity_id == activity_id)
.order_by(DataPoint.timestamp)
)
result = await session.execute(query)
return result.scalars().all()

117
backend/app/api/auth.py Normal file
View File

@@ -0,0 +1,117 @@
from fastapi import APIRouter, Depends, HTTPException, status
from sqlalchemy import select
from sqlalchemy.ext.asyncio import AsyncSession
from backend.app.core.config import settings
from backend.app.core.database import get_session
from backend.app.core.security import (
create_access_token,
verify_telegram_login,
verify_telegram_webapp,
)
from backend.app.models.rider import Rider
from backend.app.schemas.auth import (
AuthResponse,
TelegramLoginRequest,
TelegramWebAppRequest,
)
router = APIRouter()
async def _upsert_rider(
session: AsyncSession,
telegram_id: int,
first_name: str,
last_name: str | None,
username: str | None,
photo_url: str | None,
) -> Rider:
result = await session.execute(
select(Rider).where(Rider.telegram_id == telegram_id)
)
rider = result.scalar_one_or_none()
name = first_name
if last_name:
name = f"{first_name} {last_name}"
if not rider:
rider = Rider(
telegram_id=telegram_id,
name=name,
telegram_username=username,
avatar_url=photo_url,
)
session.add(rider)
else:
rider.name = name
rider.telegram_username = username
rider.avatar_url = photo_url
await session.commit()
await session.refresh(rider)
return rider
def _build_auth_response(rider: Rider) -> AuthResponse:
token = create_access_token(
rider_id=str(rider.id),
telegram_id=rider.telegram_id,
secret=settings.JWT_SECRET_KEY,
algorithm=settings.JWT_ALGORITHM,
expires_minutes=settings.JWT_ACCESS_TOKEN_EXPIRE_MINUTES,
)
return AuthResponse(access_token=token, rider=rider)
@router.post("/telegram-login", response_model=AuthResponse)
async def telegram_login(
data: TelegramLoginRequest,
session: AsyncSession = Depends(get_session),
):
if not settings.TELEGRAM_BOT_TOKEN:
raise HTTPException(status_code=500, detail="Telegram bot token not configured")
login_data = data.model_dump()
if not verify_telegram_login(login_data, settings.TELEGRAM_BOT_TOKEN):
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Invalid Telegram authorization",
)
rider = await _upsert_rider(
session,
telegram_id=data.id,
first_name=data.first_name,
last_name=data.last_name,
username=data.username,
photo_url=data.photo_url,
)
return _build_auth_response(rider)
@router.post("/telegram-webapp", response_model=AuthResponse)
async def telegram_webapp(
data: TelegramWebAppRequest,
session: AsyncSession = Depends(get_session),
):
if not settings.TELEGRAM_BOT_TOKEN:
raise HTTPException(status_code=500, detail="Telegram bot token not configured")
user = verify_telegram_webapp(data.init_data, settings.TELEGRAM_BOT_TOKEN)
if not user:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Invalid Telegram WebApp data",
)
rider = await _upsert_rider(
session,
telegram_id=user["id"],
first_name=user.get("first_name", ""),
last_name=user.get("last_name"),
username=user.get("username"),
photo_url=user.get("photo_url"),
)
return _build_auth_response(rider)

52
backend/app/api/rider.py Normal file
View File

@@ -0,0 +1,52 @@
import uuid
from fastapi import APIRouter, Depends, HTTPException
from sqlalchemy.ext.asyncio import AsyncSession
from backend.app.core.database import get_session
from backend.app.models.rider import Rider
from backend.app.schemas.rider import RiderCreate, RiderUpdate, RiderResponse
router = APIRouter()
@router.post("/profile", response_model=RiderResponse)
async def create_rider(
data: RiderCreate,
session: AsyncSession = Depends(get_session),
):
rider = Rider(**data.model_dump())
session.add(rider)
await session.commit()
await session.refresh(rider)
return rider
@router.get("/profile/{rider_id}", response_model=RiderResponse)
async def get_rider(
rider_id: uuid.UUID,
session: AsyncSession = Depends(get_session),
):
rider = await session.get(Rider, rider_id)
if not rider:
raise HTTPException(status_code=404, detail="Rider not found")
return rider
@router.put("/profile/{rider_id}", response_model=RiderResponse)
async def update_rider(
rider_id: uuid.UUID,
data: RiderUpdate,
session: AsyncSession = Depends(get_session),
):
rider = await session.get(Rider, rider_id)
if not rider:
raise HTTPException(status_code=404, detail="Rider not found")
update_data = data.model_dump(exclude_unset=True)
for key, value in update_data.items():
setattr(rider, key, value)
await session.commit()
await session.refresh(rider)
return rider

11
backend/app/api/router.py Normal file
View File

@@ -0,0 +1,11 @@
from fastapi import APIRouter
from backend.app.api.auth import router as auth_router
from backend.app.api.activities import router as activities_router
from backend.app.api.rider import router as rider_router
api_router = APIRouter(prefix="/api")
api_router.include_router(auth_router, prefix="/auth", tags=["auth"])
api_router.include_router(activities_router, prefix="/activities", tags=["activities"])
api_router.include_router(rider_router, prefix="/rider", tags=["rider"])

View File

36
backend/app/core/auth.py Normal file
View File

@@ -0,0 +1,36 @@
from fastapi import Depends, HTTPException, status
from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials
from sqlalchemy.ext.asyncio import AsyncSession
from backend.app.core.config import settings
from backend.app.core.database import get_session
from backend.app.core.security import decode_access_token
from backend.app.models.rider import Rider
bearer_scheme = HTTPBearer()
async def get_current_rider(
credentials: HTTPAuthorizationCredentials = Depends(bearer_scheme),
session: AsyncSession = Depends(get_session),
) -> Rider:
try:
payload = decode_access_token(
credentials.credentials,
settings.JWT_SECRET_KEY,
settings.JWT_ALGORITHM,
)
except Exception:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Invalid or expired token",
)
rider = await session.get(Rider, payload["sub"])
if not rider:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Rider not found",
)
return rider

View File

@@ -0,0 +1,34 @@
from pydantic_settings import BaseSettings
class Settings(BaseSettings):
model_config = {"env_file": ".env", "env_file_encoding": "utf-8"}
# Database
DATABASE_URL: str = "postgresql+asyncpg://velobrain:velobrain@localhost:5432/velobrain"
# Anthropic
ANTHROPIC_API_KEY: str = ""
# Gemini
GEMINI_API_KEY: str = ""
GEMINI_MODEL: str = "gemini-2.5-pro"
# App
APP_SECRET_KEY: str = "change-me-in-production"
DEBUG: bool = True
# Auth / JWT
JWT_SECRET_KEY: str = "change-me-jwt-secret"
JWT_ALGORITHM: str = "HS256"
JWT_ACCESS_TOKEN_EXPIRE_MINUTES: int = 1440 # 24 hours
# Telegram
TELEGRAM_BOT_TOKEN: str = ""
TELEGRAM_BOT_USERNAME: str = ""
# Upload
UPLOAD_DIR: str = "./uploads"
settings = Settings()

View File

@@ -0,0 +1,17 @@
from sqlalchemy.ext.asyncio import async_sessionmaker, create_async_engine, AsyncSession
from sqlalchemy.orm import DeclarativeBase
from backend.app.core.config import settings
engine = create_async_engine(settings.DATABASE_URL, echo=settings.DEBUG)
async_session = async_sessionmaker(engine, class_=AsyncSession, expire_on_commit=False)
class Base(DeclarativeBase):
pass
async def get_session() -> AsyncSession:
async with async_session() as session:
yield session

View File

@@ -0,0 +1,71 @@
import hashlib
import hmac
import json
import time
from datetime import datetime, timedelta, timezone
from urllib.parse import parse_qs, unquote
import jwt
def verify_telegram_login(data: dict, bot_token: str) -> bool:
"""Verify data from Telegram Login Widget."""
data = dict(data)
check_hash = data.pop("hash", "")
if not check_hash:
return False
data_check_string = "\n".join(
f"{k}={v}" for k, v in sorted(data.items())
)
secret_key = hashlib.sha256(bot_token.encode()).digest()
computed = hmac.new(secret_key, data_check_string.encode(), hashlib.sha256).hexdigest()
if int(data.get("auth_date", 0)) < time.time() - 86400:
return False
return hmac.compare_digest(computed, check_hash)
def verify_telegram_webapp(init_data: str, bot_token: str) -> dict | None:
"""Verify Telegram WebApp initData and return parsed user dict."""
parsed = parse_qs(init_data)
data = {k: v[0] for k, v in parsed.items()}
check_hash = data.pop("hash", "")
if not check_hash:
return None
data_check_string = "\n".join(
f"{k}={v}" for k, v in sorted(data.items())
)
secret_key = hmac.new(b"WebAppData", bot_token.encode(), hashlib.sha256).digest()
computed = hmac.new(secret_key, data_check_string.encode(), hashlib.sha256).hexdigest()
if not hmac.compare_digest(computed, check_hash):
return None
user_raw = data.get("user")
if not user_raw:
return None
return json.loads(unquote(user_raw))
def create_access_token(
rider_id: str,
telegram_id: int,
secret: str,
algorithm: str,
expires_minutes: int,
) -> str:
payload = {
"sub": rider_id,
"tg_id": telegram_id,
"exp": datetime.now(timezone.utc) + timedelta(minutes=expires_minutes),
}
return jwt.encode(payload, secret, algorithm=algorithm)
def decode_access_token(token: str, secret: str, algorithm: str) -> dict:
return jwt.decode(token, secret, algorithms=[algorithm])

37
backend/app/main.py Normal file
View File

@@ -0,0 +1,37 @@
from contextlib import asynccontextmanager
from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
from backend.app.api.router import api_router
from backend.app.core.config import settings
@asynccontextmanager
async def lifespan(app: FastAPI):
# Startup
yield
# Shutdown
app = FastAPI(
title="VeloBrain",
description="AI-Powered Cycling Training Platform",
version="0.1.0",
lifespan=lifespan,
)
app.add_middleware(
CORSMiddleware,
allow_origins=["http://localhost:5173"],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
app.include_router(api_router)
@app.get("/health")
async def health():
return {"status": "ok"}

View File

@@ -0,0 +1,16 @@
from backend.app.models.rider import Rider
from backend.app.models.activity import Activity, ActivityMetrics, DataPoint, Interval
from backend.app.models.fitness import FitnessHistory, PowerCurve, DiaryEntry
from backend.app.models.training import TrainingPlan
__all__ = [
"Rider",
"Activity",
"ActivityMetrics",
"DataPoint",
"Interval",
"FitnessHistory",
"PowerCurve",
"DiaryEntry",
"TrainingPlan",
]

View File

@@ -0,0 +1,84 @@
import uuid
from datetime import datetime
from sqlalchemy import String, Float, Integer, DateTime, ForeignKey, func
from sqlalchemy.dialects.postgresql import UUID
from sqlalchemy.orm import Mapped, mapped_column, relationship
from backend.app.core.database import Base
class Activity(Base):
__tablename__ = "activities"
id: Mapped[uuid.UUID] = mapped_column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
rider_id: Mapped[uuid.UUID] = mapped_column(UUID(as_uuid=True), ForeignKey("riders.id"))
name: Mapped[str | None] = mapped_column(String(200), nullable=True)
activity_type: Mapped[str] = mapped_column(String(50), default="road")
date: Mapped[datetime] = mapped_column(DateTime(timezone=True))
duration: Mapped[int] = mapped_column(Integer) # seconds
distance: Mapped[float | None] = mapped_column(Float, nullable=True) # meters
elevation_gain: Mapped[float | None] = mapped_column(Float, nullable=True) # meters
file_path: Mapped[str | None] = mapped_column(String(500), nullable=True)
created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), server_default=func.now())
rider = relationship("Rider", back_populates="activities")
metrics = relationship("ActivityMetrics", back_populates="activity", uselist=False, lazy="joined")
intervals = relationship("Interval", back_populates="activity", lazy="selectin")
data_points = relationship("DataPoint", back_populates="activity", lazy="noload")
class ActivityMetrics(Base):
__tablename__ = "activity_metrics"
id: Mapped[uuid.UUID] = mapped_column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
activity_id: Mapped[uuid.UUID] = mapped_column(UUID(as_uuid=True), ForeignKey("activities.id"), unique=True)
tss: Mapped[float | None] = mapped_column(Float, nullable=True)
normalized_power: Mapped[float | None] = mapped_column(Float, nullable=True)
intensity_factor: Mapped[float | None] = mapped_column(Float, nullable=True)
variability_index: Mapped[float | None] = mapped_column(Float, nullable=True)
avg_power: Mapped[float | None] = mapped_column(Float, nullable=True)
max_power: Mapped[int | None] = mapped_column(Integer, nullable=True)
avg_hr: Mapped[int | None] = mapped_column(Integer, nullable=True)
max_hr: Mapped[int | None] = mapped_column(Integer, nullable=True)
avg_cadence: Mapped[int | None] = mapped_column(Integer, nullable=True)
avg_speed: Mapped[float | None] = mapped_column(Float, nullable=True) # m/s
calories: Mapped[int | None] = mapped_column(Integer, nullable=True)
activity = relationship("Activity", back_populates="metrics")
class DataPoint(Base):
__tablename__ = "data_points"
id: Mapped[int] = mapped_column(primary_key=True, autoincrement=True)
activity_id: Mapped[uuid.UUID] = mapped_column(UUID(as_uuid=True), ForeignKey("activities.id"))
timestamp: Mapped[datetime] = mapped_column(DateTime(timezone=True), index=True)
power: Mapped[int | None] = mapped_column(Integer, nullable=True)
heart_rate: Mapped[int | None] = mapped_column(Integer, nullable=True)
cadence: Mapped[int | None] = mapped_column(Integer, nullable=True)
speed: Mapped[float | None] = mapped_column(Float, nullable=True)
latitude: Mapped[float | None] = mapped_column(Float, nullable=True)
longitude: Mapped[float | None] = mapped_column(Float, nullable=True)
altitude: Mapped[float | None] = mapped_column(Float, nullable=True)
temperature: Mapped[int | None] = mapped_column(Integer, nullable=True)
activity = relationship("Activity", back_populates="data_points")
class Interval(Base):
__tablename__ = "intervals"
id: Mapped[uuid.UUID] = mapped_column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
activity_id: Mapped[uuid.UUID] = mapped_column(UUID(as_uuid=True), ForeignKey("activities.id"))
start_ts: Mapped[datetime] = mapped_column(DateTime(timezone=True))
end_ts: Mapped[datetime] = mapped_column(DateTime(timezone=True))
interval_type: Mapped[str] = mapped_column(String(50)) # work / rest / climb
avg_power: Mapped[float | None] = mapped_column(Float, nullable=True)
avg_hr: Mapped[int | None] = mapped_column(Integer, nullable=True)
duration: Mapped[int | None] = mapped_column(Integer, nullable=True) # seconds
activity = relationship("Activity", back_populates="intervals")

View File

@@ -0,0 +1,42 @@
import uuid
from datetime import date, datetime
from sqlalchemy import String, Float, Integer, Date, DateTime, ForeignKey, Text, func
from sqlalchemy.dialects.postgresql import UUID, JSONB
from sqlalchemy.orm import Mapped, mapped_column
from backend.app.core.database import Base
class FitnessHistory(Base):
__tablename__ = "fitness_history"
id: Mapped[int] = mapped_column(primary_key=True, autoincrement=True)
rider_id: Mapped[uuid.UUID] = mapped_column(UUID(as_uuid=True), ForeignKey("riders.id"))
date: Mapped[date] = mapped_column(Date, index=True)
ctl: Mapped[float] = mapped_column(Float, default=0)
atl: Mapped[float] = mapped_column(Float, default=0)
tsb: Mapped[float] = mapped_column(Float, default=0)
ramp_rate: Mapped[float | None] = mapped_column(Float, nullable=True)
class PowerCurve(Base):
__tablename__ = "power_curves"
id: Mapped[uuid.UUID] = mapped_column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
activity_id: Mapped[uuid.UUID] = mapped_column(UUID(as_uuid=True), ForeignKey("activities.id"))
curve_data: Mapped[dict] = mapped_column(JSONB) # {duration_seconds: max_power}
class DiaryEntry(Base):
__tablename__ = "diary_entries"
id: Mapped[uuid.UUID] = mapped_column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
activity_id: Mapped[uuid.UUID] = mapped_column(UUID(as_uuid=True), ForeignKey("activities.id"), unique=True)
ai_summary: Mapped[str | None] = mapped_column(Text, nullable=True)
rider_notes: Mapped[str | None] = mapped_column(Text, nullable=True)
mood: Mapped[str | None] = mapped_column(String(50), nullable=True)
rpe: Mapped[int | None] = mapped_column(Integer, nullable=True)
sleep_hours: Mapped[float | None] = mapped_column(Float, nullable=True)
created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), server_default=func.now())

View File

@@ -0,0 +1,29 @@
import uuid
from datetime import datetime
from sqlalchemy import String, Float, BigInteger, DateTime, func
from sqlalchemy.dialects.postgresql import UUID, JSONB
from sqlalchemy.orm import Mapped, mapped_column, relationship
from backend.app.core.database import Base
class Rider(Base):
__tablename__ = "riders"
id: Mapped[uuid.UUID] = mapped_column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
telegram_id: Mapped[int | None] = mapped_column(BigInteger, unique=True, index=True, nullable=True)
telegram_username: Mapped[str | None] = mapped_column(String(100), nullable=True)
avatar_url: Mapped[str | None] = mapped_column(String(500), nullable=True)
name: Mapped[str] = mapped_column(String(100))
ftp: Mapped[float | None] = mapped_column(Float, nullable=True)
lthr: Mapped[int | None] = mapped_column(nullable=True)
weight: Mapped[float | None] = mapped_column(Float, nullable=True)
zones_config: Mapped[dict | None] = mapped_column(JSONB, nullable=True)
goals: Mapped[str | None] = mapped_column(String(500), nullable=True)
experience_level: Mapped[str | None] = mapped_column(String(50), nullable=True)
created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), server_default=func.now())
updated_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), server_default=func.now(), onupdate=func.now())
activities = relationship("Activity", back_populates="rider", lazy="selectin")

View File

@@ -0,0 +1,23 @@
import uuid
from datetime import date, datetime
from sqlalchemy import String, Date, DateTime, ForeignKey, Text, func
from sqlalchemy.dialects.postgresql import UUID, JSONB
from sqlalchemy.orm import Mapped, mapped_column
from backend.app.core.database import Base
class TrainingPlan(Base):
__tablename__ = "training_plans"
id: Mapped[uuid.UUID] = mapped_column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
rider_id: Mapped[uuid.UUID] = mapped_column(UUID(as_uuid=True), ForeignKey("riders.id"))
goal: Mapped[str] = mapped_column(String(200))
start_date: Mapped[date] = mapped_column(Date)
end_date: Mapped[date] = mapped_column(Date)
phase: Mapped[str | None] = mapped_column(String(50), nullable=True)
weeks_json: Mapped[dict | None] = mapped_column(JSONB, nullable=True)
description: Mapped[str | None] = mapped_column(Text, nullable=True)
created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), server_default=func.now())

View File

View File

@@ -0,0 +1,52 @@
from datetime import datetime
from uuid import UUID
from pydantic import BaseModel
class ActivityMetricsResponse(BaseModel):
model_config = {"from_attributes": True}
tss: float | None = None
normalized_power: float | None = None
intensity_factor: float | None = None
variability_index: float | None = None
avg_power: float | None = None
max_power: int | None = None
avg_hr: int | None = None
max_hr: int | None = None
avg_cadence: int | None = None
avg_speed: float | None = None
class ActivityResponse(BaseModel):
model_config = {"from_attributes": True}
id: UUID
rider_id: UUID
name: str | None = None
activity_type: str
date: datetime
duration: int
distance: float | None = None
elevation_gain: float | None = None
metrics: ActivityMetricsResponse | None = None
class ActivityListResponse(BaseModel):
items: list[ActivityResponse]
total: int
class DataPointResponse(BaseModel):
model_config = {"from_attributes": True}
timestamp: datetime
power: int | None = None
heart_rate: int | None = None
cadence: int | None = None
speed: float | None = None
latitude: float | None = None
longitude: float | None = None
altitude: float | None = None
temperature: int | None = None

View File

@@ -0,0 +1,23 @@
from pydantic import BaseModel
from backend.app.schemas.rider import RiderResponse
class TelegramLoginRequest(BaseModel):
id: int
first_name: str
last_name: str | None = None
username: str | None = None
photo_url: str | None = None
auth_date: int
hash: str
class TelegramWebAppRequest(BaseModel):
init_data: str
class AuthResponse(BaseModel):
access_token: str
token_type: str = "bearer"
rider: RiderResponse

View File

@@ -0,0 +1,38 @@
from uuid import UUID
from pydantic import BaseModel
class RiderCreate(BaseModel):
name: str
ftp: float | None = None
lthr: int | None = None
weight: float | None = None
goals: str | None = None
experience_level: str | None = None
class RiderUpdate(BaseModel):
name: str | None = None
ftp: float | None = None
lthr: int | None = None
weight: float | None = None
zones_config: dict | None = None
goals: str | None = None
experience_level: str | None = None
class RiderResponse(BaseModel):
model_config = {"from_attributes": True}
id: UUID
telegram_id: int | None = None
telegram_username: str | None = None
avatar_url: str | None = None
name: str
ftp: float | None = None
lthr: int | None = None
weight: float | None = None
zones_config: dict | None = None
goals: str | None = None
experience_level: str | None = None

View File

View File

@@ -0,0 +1,102 @@
import uuid
from datetime import datetime, timezone
from io import BytesIO
import fitdecode
from backend.app.models.activity import Activity, DataPoint
def parse_fit_file(
file_content: bytes,
rider_id: uuid.UUID,
file_path: str,
) -> tuple[Activity, list[DataPoint]]:
"""Parse a .FIT file and return an Activity with its DataPoints."""
data_points: list[DataPoint] = []
session_data: dict = {}
with fitdecode.FitReader(BytesIO(file_content)) as fit:
for frame in fit:
if not isinstance(frame, fitdecode.FitDataMessage):
continue
if frame.name == "record":
dp = _parse_record(frame)
if dp:
data_points.append(dp)
elif frame.name == "session":
session_data = _parse_session(frame)
start_time = data_points[0].timestamp if data_points else datetime.now(timezone.utc)
end_time = data_points[-1].timestamp if data_points else start_time
duration = int((end_time - start_time).total_seconds()) if data_points else 0
activity = Activity(
rider_id=rider_id,
name=session_data.get("sport", "Ride"),
activity_type=session_data.get("sub_sport", "road"),
date=start_time,
duration=duration,
distance=session_data.get("total_distance"),
elevation_gain=session_data.get("total_ascent"),
file_path=file_path,
)
return activity, data_points
def _parse_record(frame: fitdecode.FitDataMessage) -> DataPoint | None:
"""Parse a single record message into a DataPoint."""
timestamp = _get_field(frame, "timestamp")
if not timestamp:
return None
if isinstance(timestamp, datetime) and timestamp.tzinfo is None:
timestamp = timestamp.replace(tzinfo=timezone.utc)
return DataPoint(
timestamp=timestamp,
power=_get_field(frame, "power"),
heart_rate=_get_field(frame, "heart_rate"),
cadence=_get_field(frame, "cadence"),
speed=_get_field(frame, "speed"),
latitude=_semicircles_to_degrees(_get_field(frame, "position_lat")),
longitude=_semicircles_to_degrees(_get_field(frame, "position_long")),
altitude=_get_field(frame, "altitude"),
temperature=_get_field(frame, "temperature"),
)
def _parse_session(frame: fitdecode.FitDataMessage) -> dict:
"""Extract session-level data from FIT session message."""
return {
"sport": _get_field_str(frame, "sport"),
"sub_sport": _get_field_str(frame, "sub_sport"),
"total_distance": _get_field(frame, "total_distance"),
"total_ascent": _get_field(frame, "total_ascent"),
"total_elapsed_time": _get_field(frame, "total_elapsed_time"),
}
def _get_field(frame: fitdecode.FitDataMessage, name: str):
"""Safely get a field value from a FIT frame."""
try:
field = frame.get_field(name)
return field.value if field else None
except KeyError:
return None
def _get_field_str(frame: fitdecode.FitDataMessage, name: str) -> str | None:
"""Get field value as string."""
val = _get_field(frame, name)
return str(val) if val is not None else None
def _semicircles_to_degrees(semicircles: int | None) -> float | None:
"""Convert Garmin semicircles to decimal degrees."""
if semicircles is None:
return None
return semicircles * (180.0 / 2**31)

View File

@@ -0,0 +1,126 @@
from google import genai
from google.genai import types
from backend.app.core.config import settings
_client: genai.Client | None = None
def get_client() -> genai.Client:
global _client
if _client is None:
_client = genai.Client(api_key=settings.GEMINI_API_KEY)
return _client
def chat_sync(
messages: list[dict[str, str]],
system_instruction: str | None = None,
temperature: float = 0.7,
max_tokens: int = 8192,
) -> str:
"""
Synchronous chat with Gemini.
messages: list of {"role": "user"|"model", "text": "..."}
Returns the model's text response.
"""
client = get_client()
contents = [
types.Content(
role=m["role"],
parts=[types.Part.from_text(text=m["text"])],
)
for m in messages
]
config = types.GenerateContentConfig(
temperature=temperature,
max_output_tokens=max_tokens,
)
if system_instruction:
config.system_instruction = system_instruction
response = client.models.generate_content(
model=settings.GEMINI_MODEL,
contents=contents,
config=config,
)
return response.text or ""
async def chat_async(
messages: list[dict[str, str]],
system_instruction: str | None = None,
temperature: float = 0.7,
max_tokens: int = 8192,
) -> str:
"""
Async chat with Gemini.
messages: list of {"role": "user"|"model", "text": "..."}
Returns the model's text response.
"""
client = get_client()
contents = [
types.Content(
role=m["role"],
parts=[types.Part.from_text(text=m["text"])],
)
for m in messages
]
config = types.GenerateContentConfig(
temperature=temperature,
max_output_tokens=max_tokens,
)
if system_instruction:
config.system_instruction = system_instruction
response = await client.aio.models.generate_content(
model=settings.GEMINI_MODEL,
contents=contents,
config=config,
)
return response.text or ""
async def chat_stream(
messages: list[dict[str, str]],
system_instruction: str | None = None,
temperature: float = 0.7,
max_tokens: int = 8192,
):
"""
Async streaming chat with Gemini. Yields text chunks.
messages: list of {"role": "user"|"model", "text": "..."}
"""
client = get_client()
contents = [
types.Content(
role=m["role"],
parts=[types.Part.from_text(text=m["text"])],
)
for m in messages
]
config = types.GenerateContentConfig(
temperature=temperature,
max_output_tokens=max_tokens,
)
if system_instruction:
config.system_instruction = system_instruction
async for chunk in client.aio.models.generate_content_stream(
model=settings.GEMINI_MODEL,
contents=contents,
config=config,
):
if chunk.text:
yield chunk.text

View File

@@ -0,0 +1,83 @@
import uuid
import numpy as np
from sqlalchemy import select
from sqlalchemy.ext.asyncio import AsyncSession
from backend.app.models.activity import Activity, ActivityMetrics, DataPoint
from backend.app.models.rider import Rider
def calculate_metrics(
data_points: list[DataPoint],
activity: Activity,
rider_id: uuid.UUID,
session: AsyncSession,
) -> ActivityMetrics | None:
"""Calculate power-based metrics for an activity."""
if not data_points:
return None
powers = np.array([dp.power for dp in data_points if dp.power is not None], dtype=float)
hrs = np.array([dp.heart_rate for dp in data_points if dp.heart_rate is not None], dtype=float)
cadences = np.array([dp.cadence for dp in data_points if dp.cadence is not None], dtype=float)
speeds = np.array([dp.speed for dp in data_points if dp.speed is not None], dtype=float)
avg_power = float(np.mean(powers)) if len(powers) > 0 else None
max_power = int(np.max(powers)) if len(powers) > 0 else None
np_value = _normalized_power(powers) if len(powers) >= 30 else avg_power
avg_hr = int(np.mean(hrs)) if len(hrs) > 0 else None
max_hr = int(np.max(hrs)) if len(hrs) > 0 else None
avg_cadence = int(np.mean(cadences)) if len(cadences) > 0 else None
avg_speed = float(np.mean(speeds)) if len(speeds) > 0 else None
# IF, VI, TSS require FTP — will be None if no FTP set
intensity_factor = None
variability_index = None
tss = None
if np_value and avg_power and avg_power > 0:
variability_index = np_value / avg_power
return ActivityMetrics(
activity_id=activity.id,
tss=tss,
normalized_power=round(np_value, 1) if np_value else None,
intensity_factor=intensity_factor,
variability_index=round(variability_index, 2) if variability_index else None,
avg_power=round(avg_power, 1) if avg_power else None,
max_power=max_power,
avg_hr=avg_hr,
max_hr=max_hr,
avg_cadence=avg_cadence,
avg_speed=round(avg_speed, 2) if avg_speed else None,
)
def calculate_metrics_with_ftp(
metrics: ActivityMetrics,
ftp: float,
duration_seconds: int,
) -> ActivityMetrics:
"""Enrich metrics with FTP-dependent values (IF, TSS)."""
if metrics.normalized_power and ftp > 0:
metrics.intensity_factor = round(metrics.normalized_power / ftp, 2)
metrics.tss = round(
(duration_seconds * metrics.normalized_power * metrics.intensity_factor)
/ (ftp * 3600)
* 100,
1,
)
return metrics
def _normalized_power(powers: np.ndarray) -> float:
"""
NP = 4th root of mean of 4th powers of 30s rolling average.
"""
if len(powers) < 30:
return float(np.mean(powers))
rolling = np.convolve(powers, np.ones(30) / 30, mode="valid")
return float(np.power(np.mean(np.power(rolling, 4)), 0.25))

34
backend/requirements.txt Normal file
View File

@@ -0,0 +1,34 @@
# Web framework
fastapi==0.115.12
uvicorn[standard]==0.34.2
python-multipart==0.0.20
# Database
sqlalchemy[asyncio]==2.0.41
asyncpg==0.30.0
alembic==1.15.2
# FIT parsing
fitdecode==0.10.0
# Analytics
numpy==2.2.4
pandas==2.2.3
# AI
anthropic==0.52.0
google-genai==1.67.0
# Config
pydantic-settings==2.9.1
# Telegram bot
aiogram==3.20.0
# Auth
PyJWT==2.10.1
# Testing
pytest==8.3.5
pytest-asyncio==0.25.3
httpx==0.28.1

View File