This commit is contained in:
xds
2026-03-16 15:43:20 +03:00
parent 002e4cca31
commit 1d76f29244
14 changed files with 546 additions and 89 deletions

View File

@@ -0,0 +1,30 @@
"""add exercise_sets to activities
Revision ID: 79d3444f9d7d
Revises: 4c6a3c01542f
Create Date: 2026-03-16 14:57:26.988571
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision: str = '79d3444f9d7d'
down_revision: Union[str, None] = '4c6a3c01542f'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('activities', sa.Column('exercise_sets', postgresql.JSONB(astext_type=sa.Text()), nullable=True))
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('activities', 'exercise_sets')
# ### end Alembic commands ###

View File

@@ -0,0 +1,36 @@
"""add training plan link fields to activities
Revision ID: ab0f6e2939d3
Revises: 79d3444f9d7d
Create Date: 2026-03-16 15:08:33.484799
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = 'ab0f6e2939d3'
down_revision: Union[str, None] = '79d3444f9d7d'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('activities', sa.Column('training_plan_id', sa.UUID(), nullable=True))
op.add_column('activities', sa.Column('plan_week', sa.Integer(), nullable=True))
op.add_column('activities', sa.Column('plan_day', sa.String(length=20), nullable=True))
op.create_foreign_key(None, 'activities', 'training_plans', ['training_plan_id'], ['id'])
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_constraint(None, 'activities', type_='foreignkey')
op.drop_column('activities', 'plan_day')
op.drop_column('activities', 'plan_week')
op.drop_column('activities', 'training_plan_id')
# ### end Alembic commands ###

View File

@@ -25,6 +25,7 @@ from backend.app.services.zones import calculate_power_zones, calculate_hr_zones
from backend.app.services.power_curve import calculate_power_curve
from backend.app.services.intervals import detect_intervals
from backend.app.services.ai_summary import generate_summary
from backend.app.services.coaching import link_activity_to_plan
router = APIRouter()
@@ -48,31 +49,38 @@ async def upload_activity(
file_path.write_bytes(content)
# 1. Parse FIT
activity, data_points = parse_fit_file(content, rider.id, str(file_path))
activity, data_points, exercise_sets = parse_fit_file(content, rider.id, str(file_path))
if exercise_sets:
activity.exercise_sets = exercise_sets
# Auto-link to training plan
await link_activity_to_plan(activity, rider.id, session)
session.add(activity)
await session.flush()
# 2. Save data points
for dp in data_points:
dp.activity_id = activity.id
session.add_all(data_points)
# 2. Save data points (if any — strength workouts may have none)
if data_points:
for dp in data_points:
dp.activity_id = activity.id
session.add_all(data_points)
# 3. Calculate & save metrics (with FTP if available)
metrics = calculate_metrics(data_points, activity, ftp=rider.ftp)
if metrics:
session.add(metrics)
# 3. Calculate & save metrics (with FTP if available)
metrics = calculate_metrics(data_points, activity, ftp=rider.ftp)
if metrics:
session.add(metrics)
# 4. Detect & save intervals
intervals = detect_intervals(data_points, ftp=rider.ftp)
for interval in intervals:
interval.activity_id = activity.id
session.add_all(intervals)
# 4. Detect & save intervals
intervals = detect_intervals(data_points, ftp=rider.ftp)
for interval in intervals:
interval.activity_id = activity.id
session.add_all(intervals)
# 5. Calculate & save power curve
curve_data = calculate_power_curve(data_points)
if curve_data:
pc = PowerCurve(activity_id=activity.id, curve_data=curve_data)
session.add(pc)
# 5. Calculate & save power curve
curve_data = calculate_power_curve(data_points)
if curve_data:
pc = PowerCurve(activity_id=activity.id, curve_data=curve_data)
session.add(pc)
await session.commit()
await session.refresh(activity)

View File

@@ -9,6 +9,7 @@ from sqlalchemy.ext.asyncio import AsyncSession
from backend.app.core.auth import get_current_rider
from backend.app.core.database import get_session
from backend.app.models.activity import Activity
from backend.app.models.coaching import CoachingChat
from backend.app.models.rider import Rider
from backend.app.models.training import TrainingPlan
@@ -243,11 +244,74 @@ async def get_today(
rider: Rider = Depends(get_current_rider),
session: AsyncSession = Depends(get_session),
):
"""Get today's planned workout."""
"""Get today's planned workout with linked activity if any."""
workout = await get_today_workout(rider, session)
if not workout:
return None
# Check if there's already a linked activity for today
linked_query = (
select(Activity)
.where(Activity.training_plan_id == uuid.UUID(workout["plan_id"]))
.where(Activity.plan_week == workout["week_number"])
.where(Activity.plan_day == workout["day"])
)
linked_result = await session.execute(linked_query)
linked = linked_result.scalar_one_or_none()
workout["linked_activity_id"] = str(linked.id) if linked else None
workout["completed"] = linked is not None
return workout
# --- Activity-Plan linking ---
class LinkRequest(BaseModel):
activity_id: str
plan_id: str
week: int
day: str
@router.post("/link")
async def link_activity(
body: LinkRequest,
rider: Rider = Depends(get_current_rider),
session: AsyncSession = Depends(get_session),
):
"""Manually link an activity to a planned workout day."""
activity = await session.get(Activity, uuid.UUID(body.activity_id))
if not activity or activity.rider_id != rider.id:
raise HTTPException(status_code=404, detail="Activity not found")
plan = await session.get(TrainingPlan, uuid.UUID(body.plan_id))
if not plan or plan.rider_id != rider.id:
raise HTTPException(status_code=404, detail="Plan not found")
activity.training_plan_id = plan.id
activity.plan_week = body.week
activity.plan_day = body.day
await session.commit()
return {"ok": True}
@router.post("/unlink/{activity_id}")
async def unlink_activity(
activity_id: uuid.UUID,
rider: Rider = Depends(get_current_rider),
session: AsyncSession = Depends(get_session),
):
"""Remove link between activity and planned workout."""
activity = await session.get(Activity, activity_id)
if not activity or activity.rider_id != rider.id:
raise HTTPException(status_code=404, detail="Activity not found")
activity.training_plan_id = None
activity.plan_week = None
activity.plan_day = None
await session.commit()
return {"ok": True}
# --- Adjustment chat ---
@router.post("/plan/adjust")

View File

@@ -18,6 +18,7 @@ from backend.app.services.fit_parser import parse_fit_file
from backend.app.services.metrics import calculate_metrics
from backend.app.services.intervals import detect_intervals
from backend.app.services.power_curve import calculate_power_curve
from backend.app.services.coaching import link_activity_to_plan
logger = logging.getLogger(__name__)
@@ -57,27 +58,34 @@ async def process_fit_upload(content: bytes, rider: Rider) -> Activity:
# Re-attach rider to this session
rider = await session.get(Rider, rider.id)
activity, data_points = parse_fit_file(content, rider.id, str(file_path))
activity, data_points, exercise_sets = parse_fit_file(content, rider.id, str(file_path))
if exercise_sets:
activity.exercise_sets = exercise_sets
# Auto-link to training plan
await link_activity_to_plan(activity, rider.id, session)
session.add(activity)
await session.flush()
for dp in data_points:
dp.activity_id = activity.id
session.add_all(data_points)
if data_points:
for dp in data_points:
dp.activity_id = activity.id
session.add_all(data_points)
metrics = calculate_metrics(data_points, activity, ftp=rider.ftp)
if metrics:
session.add(metrics)
metrics = calculate_metrics(data_points, activity, ftp=rider.ftp)
if metrics:
session.add(metrics)
intervals = detect_intervals(data_points, ftp=rider.ftp)
for interval in intervals:
interval.activity_id = activity.id
session.add_all(intervals)
intervals = detect_intervals(data_points, ftp=rider.ftp)
for interval in intervals:
interval.activity_id = activity.id
session.add_all(intervals)
curve_data = calculate_power_curve(data_points)
if curve_data:
pc = PowerCurve(activity_id=activity.id, curve_data=curve_data)
session.add(pc)
curve_data = calculate_power_curve(data_points)
if curve_data:
pc = PowerCurve(activity_id=activity.id, curve_data=curve_data)
session.add(pc)
await session.commit()
await session.refresh(activity)
@@ -134,7 +142,7 @@ async def handle_document(message: Message, bot: Bot):
m = activity.metrics
lines = [
f"*{activity.name or 'Ride'}*",
f"*{activity.name or 'Workout'}*",
f"Duration: {format_duration(activity.duration)}",
]
@@ -156,6 +164,22 @@ async def handle_document(message: Message, bot: Bot):
lines.append(f"Avg HR: {m.avg_hr} bpm")
if m.avg_cadence:
lines.append(f"Avg Cadence: {m.avg_cadence} rpm")
if m.calories:
lines.append(f"Calories: {m.calories} kcal")
# Exercise sets for strength workouts
if activity.exercise_sets:
exercises: dict[str, list] = {}
for s in activity.exercise_sets:
name = s.get("exercise_name", "Unknown")
exercises.setdefault(name, []).append(s)
lines.append("")
for name, sets in exercises.items():
reps_str = " / ".join(
f"{s.get('repetitions', '?')}x{s.get('weight', 0):.0f}kg" if s.get('weight') else f"{s.get('repetitions', '?')} reps"
for s in sets
)
lines.append(f" {name}: {reps_str}")
intervals_count = len(activity.intervals or [])
if intervals_count > 0:

View File

@@ -2,7 +2,7 @@ import uuid
from datetime import datetime
from sqlalchemy import String, Float, Integer, DateTime, ForeignKey, func
from sqlalchemy.dialects.postgresql import UUID
from sqlalchemy.dialects.postgresql import UUID, JSONB
from sqlalchemy.orm import Mapped, mapped_column, relationship
from backend.app.core.database import Base
@@ -20,6 +20,12 @@ class Activity(Base):
distance: Mapped[float | None] = mapped_column(Float, nullable=True) # meters
elevation_gain: Mapped[float | None] = mapped_column(Float, nullable=True) # meters
file_path: Mapped[str | None] = mapped_column(String(500), nullable=True)
exercise_sets: Mapped[list | None] = mapped_column(JSONB, nullable=True) # [{exercise_name, reps, weight, duration}]
# Link to training plan workout
training_plan_id: Mapped[uuid.UUID | None] = mapped_column(UUID(as_uuid=True), ForeignKey("training_plans.id"), nullable=True)
plan_week: Mapped[int | None] = mapped_column(Integer, nullable=True)
plan_day: Mapped[str | None] = mapped_column(String(20), nullable=True) # monday, tuesday, ...
created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), server_default=func.now())

View File

@@ -1,7 +1,7 @@
from datetime import datetime
from uuid import UUID
from pydantic import BaseModel
from pydantic import BaseModel, Field
class ActivityMetricsResponse(BaseModel):
@@ -45,6 +45,10 @@ class ActivityResponse(BaseModel):
elevation_gain: float | None = None
metrics: ActivityMetricsResponse | None = None
intervals: list[IntervalResponse] = []
exercise_sets: list[dict] | None = None
training_plan_id: UUID | None = None
plan_week: int | None = None
plan_day: str | None = None
class ActivityListResponse(BaseModel):

View File

@@ -373,7 +373,7 @@ async def get_today_workout(rider: Rider, session: AsyncSession) -> dict | None:
async def calculate_compliance(plan: TrainingPlan, session: AsyncSession) -> list[dict]:
"""Compare planned vs actual per week."""
"""Compare planned vs actual per week, matching linked activities to specific days."""
if not plan.weeks_json:
return []
@@ -385,19 +385,33 @@ async def calculate_compliance(plan: TrainingPlan, session: AsyncSession) -> lis
week_start = plan.start_date + timedelta(weeks=week_num - 1)
week_end = week_start + timedelta(days=7)
planned_days = [d for d in week.get("days", []) if d.get("workout_type") != "rest"]
planned_rides = len(planned_days)
planned_tss = week.get("target_tss", 0)
# Skip future weeks
if week_start > date.today():
results.append({
"week_number": week_num,
"focus": week.get("focus", ""),
"planned_tss": week.get("target_tss", 0),
"planned_tss": planned_tss,
"actual_tss": 0,
"planned_hours": week.get("target_hours", 0),
"actual_hours": 0,
"planned_rides": sum(1 for d in week.get("days", []) if d.get("workout_type") != "rest"),
"planned_rides": planned_rides,
"actual_rides": 0,
"adherence_pct": 0,
"status": "upcoming",
"days": [
{
"day": d.get("day"),
"planned": d.get("title", d.get("workout_type")),
"workout_type": d.get("workout_type"),
"activity_id": None,
"completed": False,
}
for d in week.get("days", [])
],
})
continue
@@ -415,12 +429,35 @@ async def calculate_compliance(plan: TrainingPlan, session: AsyncSession) -> lis
actual_tss = sum(float(r.tss or 0) for r in acts)
actual_hours = sum(r[0].duration for r in acts) / 3600
actual_rides = len(acts)
planned_rides = sum(1 for d in week.get("days", []) if d.get("workout_type") != "rest")
planned_tss = week.get("target_tss", 0)
# Build per-day status: match linked activities to planned days
linked_activities = {
a[0].plan_day: a[0]
for a in acts
if a[0].training_plan_id == plan.id and a[0].plan_week == week_num and a[0].plan_day
}
day_statuses = []
completed_workouts = 0
for d in week.get("days", []):
day_name = d.get("day")
is_rest = d.get("workout_type") == "rest"
linked = linked_activities.get(day_name)
completed = linked is not None and not is_rest
if completed:
completed_workouts += 1
day_statuses.append({
"day": day_name,
"planned": d.get("title", d.get("workout_type")),
"workout_type": d.get("workout_type"),
"activity_id": str(linked.id) if linked else None,
"completed": completed,
})
adherence = 0
if planned_rides > 0:
adherence = min(100, round(actual_rides / planned_rides * 100))
adherence = min(100, round(completed_workouts / planned_rides * 100))
is_current = week_start <= date.today() < week_end
@@ -435,11 +472,52 @@ async def calculate_compliance(plan: TrainingPlan, session: AsyncSession) -> lis
"actual_rides": actual_rides,
"adherence_pct": adherence,
"status": "current" if is_current else "completed",
"days": day_statuses,
})
return results
async def link_activity_to_plan(
activity,
rider_id,
session: AsyncSession,
) -> None:
"""Auto-link an activity to the active training plan based on date.
Finds the planned workout for the activity's date and sets
training_plan_id, plan_week, plan_day on the activity.
"""
plan_query = (
select(TrainingPlan)
.where(TrainingPlan.rider_id == rider_id)
.where(TrainingPlan.status == "active")
.order_by(TrainingPlan.created_at.desc())
.limit(1)
)
result = await session.execute(plan_query)
plan = result.scalar_one_or_none()
if not plan or not plan.weeks_json:
return
activity_date = activity.date.date() if hasattr(activity.date, "date") else activity.date
if activity_date < plan.start_date or activity_date > plan.end_date:
return
week_num = (activity_date - plan.start_date).days // 7 + 1
day_name = activity_date.strftime("%A").lower()
weeks = plan.weeks_json.get("weeks", [])
for week in weeks:
if week.get("week_number") == week_num:
for day in week.get("days", []):
if day.get("day") == day_name and day.get("workout_type") != "rest":
activity.training_plan_id = plan.id
activity.plan_week = week_num
activity.plan_day = day_name
return
def _extract_json(text: str) -> dict | None:
"""Extract JSON from AI response text."""
# Try to find JSON in code blocks

View File

@@ -7,14 +7,30 @@ import fitdecode
from backend.app.models.activity import Activity, DataPoint
# Map FIT sport enum values to human-readable names
SPORT_NAMES = {
"cycling": "Cycling",
"running": "Running",
"swimming": "Swimming",
"training": "Strength",
"strength_training": "Strength",
"cardio_training": "Cardio",
"walking": "Walking",
"hiking": "Hiking",
"generic": "Workout",
}
def parse_fit_file(
file_content: bytes,
rider_id: uuid.UUID,
file_path: str,
) -> tuple[Activity, list[DataPoint]]:
"""Parse a .FIT file and return an Activity with its DataPoints."""
) -> tuple[Activity, list[DataPoint], list[dict]]:
"""Parse a .FIT file and return an Activity with its DataPoints and exercise sets."""
data_points: list[DataPoint] = []
session_data: dict = {}
exercise_sets: list[dict] = []
current_exercise: str | None = None
with fitdecode.FitReader(BytesIO(file_content)) as fit:
for frame in fit:
@@ -29,14 +45,43 @@ def parse_fit_file(
elif frame.name == "session":
session_data = _parse_session(frame)
start_time = data_points[0].timestamp if data_points else datetime.now(timezone.utc)
end_time = data_points[-1].timestamp if data_points else start_time
duration = int((end_time - start_time).total_seconds()) if data_points else 0
elif frame.name == "workout":
# Workout-level info (name etc.)
wkt_name = _get_field_str(frame, "wkt_name")
if wkt_name:
session_data.setdefault("workout_name", wkt_name)
elif frame.name == "exercise_title":
title = _get_field_str(frame, "exercise_name")
if title:
current_exercise = title
elif frame.name == "set":
ex_set = _parse_set(frame, current_exercise)
if ex_set:
exercise_sets.append(ex_set)
# Determine timing
sport = session_data.get("sport", "")
sport_name = SPORT_NAMES.get(sport.lower() if sport else "", sport or "Workout")
if data_points:
start_time = data_points[0].timestamp
end_time = data_points[-1].timestamp
duration = int((end_time - start_time).total_seconds())
else:
# No record data (strength, etc.) — use session timestamps
start_time = session_data.get("start_time") or datetime.now(timezone.utc)
elapsed = session_data.get("total_elapsed_time")
duration = int(elapsed) if elapsed else 0
end_time = start_time
name = session_data.get("workout_name") or sport_name
activity = Activity(
rider_id=rider_id,
name=session_data.get("sport", "Ride"),
activity_type=session_data.get("sub_sport", "road"),
name=name,
activity_type=session_data.get("sub_sport") or sport or "generic",
date=start_time,
duration=duration,
distance=session_data.get("total_distance"),
@@ -44,7 +89,7 @@ def parse_fit_file(
file_path=file_path,
)
return activity, data_points
return activity, data_points, exercise_sets
def _parse_record(frame: fitdecode.FitDataMessage) -> DataPoint | None:
@@ -71,12 +116,44 @@ def _parse_record(frame: fitdecode.FitDataMessage) -> DataPoint | None:
def _parse_session(frame: fitdecode.FitDataMessage) -> dict:
"""Extract session-level data from FIT session message."""
start_time = _get_field(frame, "start_time")
if isinstance(start_time, datetime) and start_time.tzinfo is None:
start_time = start_time.replace(tzinfo=timezone.utc)
return {
"sport": _get_field_str(frame, "sport"),
"sub_sport": _get_field_str(frame, "sub_sport"),
"total_distance": _get_field(frame, "total_distance"),
"total_ascent": _get_field(frame, "total_ascent"),
"total_elapsed_time": _get_field(frame, "total_elapsed_time"),
"total_calories": _get_field(frame, "total_calories"),
"avg_heart_rate": _get_field(frame, "avg_heart_rate"),
"max_heart_rate": _get_field(frame, "max_heart_rate"),
"start_time": start_time,
}
def _parse_set(frame: fitdecode.FitDataMessage, exercise_name: str | None) -> dict | None:
"""Parse a set message from a strength/cardio workout."""
set_type = _get_field_str(frame, "set_type")
# set_type: 0=active, 1=rest
if set_type is not None and str(set_type) in ("1", "rest"):
return None # Skip rest sets
repetitions = _get_field(frame, "repetitions")
weight = _get_field(frame, "weight")
duration = _get_field(frame, "duration")
start_time = _get_field(frame, "start_time") or _get_field(frame, "timestamp")
category = _get_field_str(frame, "exercise_category")
exercise = _get_field_str(frame, "exercise_name")
return {
"exercise_name": exercise or exercise_name or category or "Unknown",
"exercise_category": category,
"repetitions": int(repetitions) if repetitions is not None else None,
"weight": round(float(weight), 1) if weight is not None else None,
"duration": round(float(duration), 1) if duration is not None else None,
"start_time": start_time.isoformat() if isinstance(start_time, datetime) else None,
}