init
This commit is contained in:
0
backend/app/services/__init__.py
Normal file
0
backend/app/services/__init__.py
Normal file
102
backend/app/services/fit_parser.py
Normal file
102
backend/app/services/fit_parser.py
Normal file
@@ -0,0 +1,102 @@
|
||||
import uuid
|
||||
from datetime import datetime, timezone
|
||||
from io import BytesIO
|
||||
|
||||
import fitdecode
|
||||
|
||||
from backend.app.models.activity import Activity, DataPoint
|
||||
|
||||
|
||||
def parse_fit_file(
|
||||
file_content: bytes,
|
||||
rider_id: uuid.UUID,
|
||||
file_path: str,
|
||||
) -> tuple[Activity, list[DataPoint]]:
|
||||
"""Parse a .FIT file and return an Activity with its DataPoints."""
|
||||
data_points: list[DataPoint] = []
|
||||
session_data: dict = {}
|
||||
|
||||
with fitdecode.FitReader(BytesIO(file_content)) as fit:
|
||||
for frame in fit:
|
||||
if not isinstance(frame, fitdecode.FitDataMessage):
|
||||
continue
|
||||
|
||||
if frame.name == "record":
|
||||
dp = _parse_record(frame)
|
||||
if dp:
|
||||
data_points.append(dp)
|
||||
|
||||
elif frame.name == "session":
|
||||
session_data = _parse_session(frame)
|
||||
|
||||
start_time = data_points[0].timestamp if data_points else datetime.now(timezone.utc)
|
||||
end_time = data_points[-1].timestamp if data_points else start_time
|
||||
duration = int((end_time - start_time).total_seconds()) if data_points else 0
|
||||
|
||||
activity = Activity(
|
||||
rider_id=rider_id,
|
||||
name=session_data.get("sport", "Ride"),
|
||||
activity_type=session_data.get("sub_sport", "road"),
|
||||
date=start_time,
|
||||
duration=duration,
|
||||
distance=session_data.get("total_distance"),
|
||||
elevation_gain=session_data.get("total_ascent"),
|
||||
file_path=file_path,
|
||||
)
|
||||
|
||||
return activity, data_points
|
||||
|
||||
|
||||
def _parse_record(frame: fitdecode.FitDataMessage) -> DataPoint | None:
|
||||
"""Parse a single record message into a DataPoint."""
|
||||
timestamp = _get_field(frame, "timestamp")
|
||||
if not timestamp:
|
||||
return None
|
||||
|
||||
if isinstance(timestamp, datetime) and timestamp.tzinfo is None:
|
||||
timestamp = timestamp.replace(tzinfo=timezone.utc)
|
||||
|
||||
return DataPoint(
|
||||
timestamp=timestamp,
|
||||
power=_get_field(frame, "power"),
|
||||
heart_rate=_get_field(frame, "heart_rate"),
|
||||
cadence=_get_field(frame, "cadence"),
|
||||
speed=_get_field(frame, "speed"),
|
||||
latitude=_semicircles_to_degrees(_get_field(frame, "position_lat")),
|
||||
longitude=_semicircles_to_degrees(_get_field(frame, "position_long")),
|
||||
altitude=_get_field(frame, "altitude"),
|
||||
temperature=_get_field(frame, "temperature"),
|
||||
)
|
||||
|
||||
|
||||
def _parse_session(frame: fitdecode.FitDataMessage) -> dict:
|
||||
"""Extract session-level data from FIT session message."""
|
||||
return {
|
||||
"sport": _get_field_str(frame, "sport"),
|
||||
"sub_sport": _get_field_str(frame, "sub_sport"),
|
||||
"total_distance": _get_field(frame, "total_distance"),
|
||||
"total_ascent": _get_field(frame, "total_ascent"),
|
||||
"total_elapsed_time": _get_field(frame, "total_elapsed_time"),
|
||||
}
|
||||
|
||||
|
||||
def _get_field(frame: fitdecode.FitDataMessage, name: str):
|
||||
"""Safely get a field value from a FIT frame."""
|
||||
try:
|
||||
field = frame.get_field(name)
|
||||
return field.value if field else None
|
||||
except KeyError:
|
||||
return None
|
||||
|
||||
|
||||
def _get_field_str(frame: fitdecode.FitDataMessage, name: str) -> str | None:
|
||||
"""Get field value as string."""
|
||||
val = _get_field(frame, name)
|
||||
return str(val) if val is not None else None
|
||||
|
||||
|
||||
def _semicircles_to_degrees(semicircles: int | None) -> float | None:
|
||||
"""Convert Garmin semicircles to decimal degrees."""
|
||||
if semicircles is None:
|
||||
return None
|
||||
return semicircles * (180.0 / 2**31)
|
||||
126
backend/app/services/gemini_client.py
Normal file
126
backend/app/services/gemini_client.py
Normal file
@@ -0,0 +1,126 @@
|
||||
from google import genai
|
||||
from google.genai import types
|
||||
|
||||
from backend.app.core.config import settings
|
||||
|
||||
_client: genai.Client | None = None
|
||||
|
||||
|
||||
def get_client() -> genai.Client:
|
||||
global _client
|
||||
if _client is None:
|
||||
_client = genai.Client(api_key=settings.GEMINI_API_KEY)
|
||||
return _client
|
||||
|
||||
|
||||
def chat_sync(
|
||||
messages: list[dict[str, str]],
|
||||
system_instruction: str | None = None,
|
||||
temperature: float = 0.7,
|
||||
max_tokens: int = 8192,
|
||||
) -> str:
|
||||
"""
|
||||
Synchronous chat with Gemini.
|
||||
|
||||
messages: list of {"role": "user"|"model", "text": "..."}
|
||||
Returns the model's text response.
|
||||
"""
|
||||
client = get_client()
|
||||
|
||||
contents = [
|
||||
types.Content(
|
||||
role=m["role"],
|
||||
parts=[types.Part.from_text(text=m["text"])],
|
||||
)
|
||||
for m in messages
|
||||
]
|
||||
|
||||
config = types.GenerateContentConfig(
|
||||
temperature=temperature,
|
||||
max_output_tokens=max_tokens,
|
||||
)
|
||||
if system_instruction:
|
||||
config.system_instruction = system_instruction
|
||||
|
||||
response = client.models.generate_content(
|
||||
model=settings.GEMINI_MODEL,
|
||||
contents=contents,
|
||||
config=config,
|
||||
)
|
||||
|
||||
return response.text or ""
|
||||
|
||||
|
||||
async def chat_async(
|
||||
messages: list[dict[str, str]],
|
||||
system_instruction: str | None = None,
|
||||
temperature: float = 0.7,
|
||||
max_tokens: int = 8192,
|
||||
) -> str:
|
||||
"""
|
||||
Async chat with Gemini.
|
||||
|
||||
messages: list of {"role": "user"|"model", "text": "..."}
|
||||
Returns the model's text response.
|
||||
"""
|
||||
client = get_client()
|
||||
|
||||
contents = [
|
||||
types.Content(
|
||||
role=m["role"],
|
||||
parts=[types.Part.from_text(text=m["text"])],
|
||||
)
|
||||
for m in messages
|
||||
]
|
||||
|
||||
config = types.GenerateContentConfig(
|
||||
temperature=temperature,
|
||||
max_output_tokens=max_tokens,
|
||||
)
|
||||
if system_instruction:
|
||||
config.system_instruction = system_instruction
|
||||
|
||||
response = await client.aio.models.generate_content(
|
||||
model=settings.GEMINI_MODEL,
|
||||
contents=contents,
|
||||
config=config,
|
||||
)
|
||||
|
||||
return response.text or ""
|
||||
|
||||
|
||||
async def chat_stream(
|
||||
messages: list[dict[str, str]],
|
||||
system_instruction: str | None = None,
|
||||
temperature: float = 0.7,
|
||||
max_tokens: int = 8192,
|
||||
):
|
||||
"""
|
||||
Async streaming chat with Gemini. Yields text chunks.
|
||||
|
||||
messages: list of {"role": "user"|"model", "text": "..."}
|
||||
"""
|
||||
client = get_client()
|
||||
|
||||
contents = [
|
||||
types.Content(
|
||||
role=m["role"],
|
||||
parts=[types.Part.from_text(text=m["text"])],
|
||||
)
|
||||
for m in messages
|
||||
]
|
||||
|
||||
config = types.GenerateContentConfig(
|
||||
temperature=temperature,
|
||||
max_output_tokens=max_tokens,
|
||||
)
|
||||
if system_instruction:
|
||||
config.system_instruction = system_instruction
|
||||
|
||||
async for chunk in client.aio.models.generate_content_stream(
|
||||
model=settings.GEMINI_MODEL,
|
||||
contents=contents,
|
||||
config=config,
|
||||
):
|
||||
if chunk.text:
|
||||
yield chunk.text
|
||||
83
backend/app/services/metrics.py
Normal file
83
backend/app/services/metrics.py
Normal file
@@ -0,0 +1,83 @@
|
||||
import uuid
|
||||
|
||||
import numpy as np
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from backend.app.models.activity import Activity, ActivityMetrics, DataPoint
|
||||
from backend.app.models.rider import Rider
|
||||
|
||||
|
||||
def calculate_metrics(
|
||||
data_points: list[DataPoint],
|
||||
activity: Activity,
|
||||
rider_id: uuid.UUID,
|
||||
session: AsyncSession,
|
||||
) -> ActivityMetrics | None:
|
||||
"""Calculate power-based metrics for an activity."""
|
||||
if not data_points:
|
||||
return None
|
||||
|
||||
powers = np.array([dp.power for dp in data_points if dp.power is not None], dtype=float)
|
||||
hrs = np.array([dp.heart_rate for dp in data_points if dp.heart_rate is not None], dtype=float)
|
||||
cadences = np.array([dp.cadence for dp in data_points if dp.cadence is not None], dtype=float)
|
||||
speeds = np.array([dp.speed for dp in data_points if dp.speed is not None], dtype=float)
|
||||
|
||||
avg_power = float(np.mean(powers)) if len(powers) > 0 else None
|
||||
max_power = int(np.max(powers)) if len(powers) > 0 else None
|
||||
np_value = _normalized_power(powers) if len(powers) >= 30 else avg_power
|
||||
|
||||
avg_hr = int(np.mean(hrs)) if len(hrs) > 0 else None
|
||||
max_hr = int(np.max(hrs)) if len(hrs) > 0 else None
|
||||
avg_cadence = int(np.mean(cadences)) if len(cadences) > 0 else None
|
||||
avg_speed = float(np.mean(speeds)) if len(speeds) > 0 else None
|
||||
|
||||
# IF, VI, TSS require FTP — will be None if no FTP set
|
||||
intensity_factor = None
|
||||
variability_index = None
|
||||
tss = None
|
||||
|
||||
if np_value and avg_power and avg_power > 0:
|
||||
variability_index = np_value / avg_power
|
||||
|
||||
return ActivityMetrics(
|
||||
activity_id=activity.id,
|
||||
tss=tss,
|
||||
normalized_power=round(np_value, 1) if np_value else None,
|
||||
intensity_factor=intensity_factor,
|
||||
variability_index=round(variability_index, 2) if variability_index else None,
|
||||
avg_power=round(avg_power, 1) if avg_power else None,
|
||||
max_power=max_power,
|
||||
avg_hr=avg_hr,
|
||||
max_hr=max_hr,
|
||||
avg_cadence=avg_cadence,
|
||||
avg_speed=round(avg_speed, 2) if avg_speed else None,
|
||||
)
|
||||
|
||||
|
||||
def calculate_metrics_with_ftp(
|
||||
metrics: ActivityMetrics,
|
||||
ftp: float,
|
||||
duration_seconds: int,
|
||||
) -> ActivityMetrics:
|
||||
"""Enrich metrics with FTP-dependent values (IF, TSS)."""
|
||||
if metrics.normalized_power and ftp > 0:
|
||||
metrics.intensity_factor = round(metrics.normalized_power / ftp, 2)
|
||||
metrics.tss = round(
|
||||
(duration_seconds * metrics.normalized_power * metrics.intensity_factor)
|
||||
/ (ftp * 3600)
|
||||
* 100,
|
||||
1,
|
||||
)
|
||||
return metrics
|
||||
|
||||
|
||||
def _normalized_power(powers: np.ndarray) -> float:
|
||||
"""
|
||||
NP = 4th root of mean of 4th powers of 30s rolling average.
|
||||
"""
|
||||
if len(powers) < 30:
|
||||
return float(np.mean(powers))
|
||||
|
||||
rolling = np.convolve(powers, np.ones(30) / 30, mode="valid")
|
||||
return float(np.power(np.mean(np.power(rolling, 4)), 0.25))
|
||||
Reference in New Issue
Block a user