diff --git a/adapters/s3_adapter.py b/adapters/s3_adapter.py
index 28ba113..be99ce5 100644
--- a/adapters/s3_adapter.py
+++ b/adapters/s3_adapter.py
@@ -63,10 +63,12 @@ class S3Adapter:
response = await client.get_object(Bucket=self.bucket_name, Key=object_name)
# aioboto3 Body is an aiohttp StreamReader wrapper
body = response['Body']
- data = await body.read()
- # Yield in chunks to avoid holding entire response in StreamingResponse buffer
- for i in range(0, len(data), chunk_size):
- yield data[i:i + chunk_size]
+
+ while True:
+ chunk = await body.read(chunk_size)
+ if not chunk:
+ break
+ yield chunk
except ClientError as e:
print(f"Error streaming from S3: {e}")
return
diff --git a/aiws.py b/aiws.py
index 2cc7414..812b7bb 100644
--- a/aiws.py
+++ b/aiws.py
@@ -79,7 +79,7 @@ char_repo = CharacterRepo(mongo_client)
# S3 Adapter
s3_adapter = S3Adapter(
- endpoint_url=os.getenv("MINIO_ENDPOINT", "http://31.59.58.220:9000"),
+ endpoint_url=os.getenv("MINIO_ENDPOINT", "http://localhost:9000"),
aws_access_key_id=os.getenv("MINIO_ACCESS_KEY", "minioadmin"),
aws_secret_access_key=os.getenv("MINIO_SECRET_KEY", "minioadmin"),
bucket_name=os.getenv("MINIO_BUCKET", "ai-char")
diff --git a/api/endpoints/admin.py b/api/endpoints/admin.py
index 262712b..59f89d5 100644
--- a/api/endpoints/admin.py
+++ b/api/endpoints/admin.py
@@ -5,6 +5,8 @@ from fastapi.security import OAuth2PasswordBearer
from pydantic import BaseModel
from repos.user_repo import UsersRepo, UserStatus
+from api.dependency import get_dao
+from repos.dao import DAO
from utils.security import verify_password, create_access_token, ACCESS_TOKEN_EXPIRE_MINUTES, ALGORITHM, SECRET_KEY
from jose import JWTError, jwt
from starlette.requests import Request
diff --git a/api/endpoints/assets_router.py b/api/endpoints/assets_router.py
index b858d3b..c6fa6ca 100644
--- a/api/endpoints/assets_router.py
+++ b/api/endpoints/assets_router.py
@@ -278,8 +278,7 @@ async def upload_asset(
type=asset.type.value if hasattr(asset.type, "value") else asset.type,
content_type=asset.content_type.value if hasattr(asset.content_type, "value") else asset.content_type,
linked_char_id=asset.linked_char_id,
- created_at=asset.created_at,
- url=asset.url
+ created_at=asset.created_at
)
diff --git a/api/endpoints/character_router.py b/api/endpoints/character_router.py
index c9e4394..766fe11 100644
--- a/api/endpoints/character_router.py
+++ b/api/endpoints/character_router.py
@@ -24,8 +24,15 @@ router = APIRouter(prefix="/api/characters", tags=["Characters"], dependencies=[
@router.get("/", response_model=List[Character])
-async def get_characters(request: Request, dao: DAO = Depends(get_dao), current_user: dict = Depends(get_current_user), project_id: Optional[str] = Depends(get_project_id)) -> List[Character]:
- logger.info("get_characters called")
+async def get_characters(
+ request: Request,
+ dao: DAO = Depends(get_dao),
+ current_user: dict = Depends(get_current_user),
+ project_id: Optional[str] = Depends(get_project_id),
+ limit: int = 100,
+ offset: int = 0
+) -> List[Character]:
+ logger.info(f"get_characters called. Limit: {limit}, Offset: {offset}")
user_id_filter = str(current_user["_id"])
if project_id:
@@ -34,7 +41,12 @@ async def get_characters(request: Request, dao: DAO = Depends(get_dao), current_
raise HTTPException(status_code=403, detail="Project access denied")
user_id_filter = None
- characters = await dao.chars.get_all_characters(created_by=user_id_filter, project_id=project_id)
+ characters = await dao.chars.get_all_characters(
+ created_by=user_id_filter,
+ project_id=project_id,
+ limit=limit,
+ offset=offset
+ )
return characters
@@ -178,10 +190,3 @@ async def delete_character(
raise HTTPException(status_code=500, detail="Failed to delete character")
return
-
-
-@router.post("/{character_id}/_run", response_model=GenerationResponse)
-async def post_character_generation(character_id: str, generation: GenerationRequest,
- request: Request) -> GenerationResponse:
- logger.info(f"post_character_generation called. CharacterID: {character_id}")
- generation_service = request.app.state.generation_service
diff --git a/api/endpoints/generation_router.py b/api/endpoints/generation_router.py
index 79f68bd..f035001 100644
--- a/api/endpoints/generation_router.py
+++ b/api/endpoints/generation_router.py
@@ -9,6 +9,7 @@ from api.dependency import get_generation_service, get_project_id, get_dao
from repos.dao import DAO
from api.models.GenerationRequest import GenerationResponse, GenerationRequest, GenerationsResponse, PromptResponse, PromptRequest, GenerationGroupResponse
+from api.models.FinancialUsageDTO import FinancialReport
from api.service.generation_service import GenerationService
from models.Generation import Generation
@@ -68,6 +69,47 @@ async def get_generations(character_id: Optional[str] = None, limit: int = 10, o
return await generation_service.get_generations(character_id, limit=limit, offset=offset, user_id=user_id_filter, project_id=project_id)
+@router.get("/usage", response_model=FinancialReport)
+async def get_usage_report(
+ breakdown: Optional[str] = None, # "user" or "project"
+ generation_service: GenerationService = Depends(get_generation_service),
+ current_user: dict = Depends(get_current_user),
+ project_id: Optional[str] = Depends(get_project_id),
+ dao: DAO = Depends(get_dao)
+) -> FinancialReport:
+ """
+ Returns usage statistics (runs, tokens, cost) for the current user or project.
+ If project_id is provided, returns stats for that project.
+ Otherwise, returns stats for the current user.
+ """
+ user_id_filter = str(current_user["_id"])
+ breakdown_by = None
+
+ if project_id:
+ # Permission check
+ project = await dao.projects.get_project(project_id)
+ if not project or str(current_user["_id"]) not in project.members:
+ raise HTTPException(status_code=403, detail="Project access denied")
+ user_id_filter = None # If we are in project, we see stats for the WHOLE project by default
+ if breakdown == "user":
+ breakdown_by = "created_by"
+ elif breakdown == "project":
+ breakdown_by = "project_id"
+ else:
+ # Default: Stats for current user
+ if breakdown == "project":
+ breakdown_by = "project_id"
+ elif breakdown == "user":
+ # This would breakdown personal usage by user (yourself), but could be useful if it included collaborators?
+ # No, if project_id is None, it's personal.
+ breakdown_by = "created_by"
+
+ return await generation_service.get_financial_report(
+ user_id=user_id_filter,
+ project_id=project_id,
+ breakdown_by=breakdown_by
+ )
+
@router.post("/_run", response_model=GenerationGroupResponse)
async def post_generation(generation: GenerationRequest, request: Request,
generation_service: GenerationService = Depends(get_generation_service),
diff --git a/api/endpoints/project_router.py b/api/endpoints/project_router.py
index 93c2d0f..96375f9 100644
--- a/api/endpoints/project_router.py
+++ b/api/endpoints/project_router.py
@@ -1,4 +1,6 @@
from typing import List, Optional
+
+from bson import ObjectId
from fastapi import APIRouter, Depends, HTTPException, status
from pydantic import BaseModel
from api.dependency import get_dao
@@ -12,14 +14,46 @@ class ProjectCreate(BaseModel):
name: str
description: Optional[str] = None
+class ProjectMemberResponse(BaseModel):
+ id: str
+ username: str
+
class ProjectResponse(BaseModel):
id: str
name: str
description: Optional[str] = None
owner_id: str
- members: List[str]
+ members: List[ProjectMemberResponse]
is_owner: bool = False
+async def _get_project_response(project: Project, current_user_id: str, dao: DAO) -> ProjectResponse:
+ member_responses = []
+ for member_id in project.members:
+ # We need a way to get user by ID. Let's check UsersRepo for get_user by ObjectId or string.
+ # Currently UsersRepo has get_user(user_id: int) for Telegram IDs.
+ # But for Web users we might need to search by _id.
+ # Let's try to get user info.
+ # Since project.members contains strings (ObjectIds as strings), we search by _id.
+ user_doc = await dao.users.collection.find_one({"_id": ObjectId(member_id)})
+ if not user_doc and member_id.isdigit():
+ # Fallback for telegram IDs if they are stored as strings of digits
+ user_doc = await dao.users.get_user(int(member_id))
+
+ username = "unknown"
+ if user_doc:
+ username = user_doc.get("username", "unknown")
+
+ member_responses.append(ProjectMemberResponse(id=member_id, username=username))
+
+ return ProjectResponse(
+ id=project.id,
+ name=project.name,
+ description=project.description,
+ owner_id=project.owner_id,
+ members=member_responses,
+ is_owner=(project.owner_id == current_user_id)
+ )
+
@router.post("", response_model=ProjectResponse)
async def create_project(
project_data: ProjectCreate,
@@ -34,27 +68,15 @@ async def create_project(
members=[user_id]
)
project_id = await dao.projects.create_project(new_project)
+ new_project.id = project_id
# Add project to user's project list
- # Assuming user_repo has a method to add project or we do it directly?
- # UserRepo doesn't have add_project method yet.
- # But since UserRepo is just a wrapper around collection, lets add it here or update UserRepo later?
- # Better to update UserRepo. For now, let's just return success.
- # But user needs to see it in list.
- # Update user in DB
await dao.users.collection.update_one(
{"_id": current_user["_id"]},
{"$addToSet": {"project_ids": project_id}}
)
- return ProjectResponse(
- id=project_id,
- name=new_project.name,
- description=new_project.description,
- owner_id=new_project.owner_id,
- members=new_project.members,
- is_owner=True
- )
+ return await _get_project_response(new_project, user_id, dao)
@router.get("", response_model=List[ProjectResponse])
async def get_my_projects(
@@ -66,14 +88,7 @@ async def get_my_projects(
responses = []
for p in projects:
- responses.append(ProjectResponse(
- id=p.id,
- name=p.name,
- description=p.description,
- owner_id=p.owner_id,
- members=p.members,
- is_owner=(p.owner_id == user_id)
- ))
+ responses.append(await _get_project_response(p, user_id, dao))
return responses
class MemberAdd(BaseModel):
diff --git a/api/service/generation_service.py b/api/service/generation_service.py
index 3d1824e..8fc36aa 100644
--- a/api/service/generation_service.py
+++ b/api/service/generation_service.py
@@ -13,6 +13,7 @@ from aiogram.types import BufferedInputFile
from adapters.Exception import GoogleGenerationException
from adapters.google_adapter import GoogleAdapter
from api.models.GenerationRequest import GenerationRequest, GenerationResponse, GenerationsResponse, GenerationGroupResponse
+from api.models.FinancialUsageDTO import FinancialReport, UsageStats, UsageByEntity
# Импортируйте ваши модели DAO, Asset, Generation корректно
from models.Asset import Asset, AssetType, AssetContentType
from models.Generation import Generation, GenerationStatus
@@ -506,4 +507,29 @@ class GenerationService:
logger.info(f"Purged {purged} assets (soft-deleted + S3 files removed).")
except Exception as e:
- logger.error(f"Error during old data cleanup: {e}")
\ No newline at end of file
+ logger.error(f"Error during old data cleanup: {e}")
+
+ async def get_financial_report(self, user_id: Optional[str] = None, project_id: Optional[str] = None, breakdown_by: Optional[str] = None) -> FinancialReport:
+ """
+ Generates a financial usage report for a specific user or project.
+ 'breakdown_by' can be 'created_by' or 'project_id'.
+ """
+ summary_data = await self.dao.generations.get_usage_stats(created_by=user_id, project_id=project_id)
+ summary = UsageStats(**summary_data)
+
+ by_user = None
+ by_project = None
+
+ if breakdown_by == "created_by":
+ res = await self.dao.generations.get_usage_breakdown(group_by="created_by", project_id=project_id, created_by=user_id)
+ by_user = [UsageByEntity(**item) for item in res]
+
+ if breakdown_by == "project_id":
+ res = await self.dao.generations.get_usage_breakdown(group_by="project_id", project_id=project_id, created_by=user_id)
+ by_project = [UsageByEntity(**item) for item in res]
+
+ return FinancialReport(
+ summary=summary,
+ by_user=by_user,
+ by_project=by_project
+ )
\ No newline at end of file
diff --git a/middlewares/album.py b/middlewares/album.py
index fbc12a6..270a4ee 100644
--- a/middlewares/album.py
+++ b/middlewares/album.py
@@ -27,19 +27,19 @@ class AlbumMiddleware(BaseMiddleware):
# Ждем сбора остальных частей
await asyncio.sleep(self.latency)
- # Проверяем, что ключ все еще существует (на всякий случай)
+ # Проверяем, что ключ все еще существует
if group_id in self.album_data:
# Передаем собранный альбом в хендлер
# Сортируем по message_id, чтобы порядок был верным
- self.album_data[group_id].sort(key=lambda x: x.message_id)
- data["album"] = self.album_data[group_id]
+ current_album = self.album_data[group_id]
+ current_album.sort(key=lambda x: x.message_id)
+ data["album"] = current_album
return await handler(event, data)
finally:
- # ЧИСТКА: Удаляем всегда, если это "головной" поток, который создал запись
- # Проверяем, что мы удаляем именно то, что создали, и ключ существует
- if group_id in self.album_data and self.album_data[group_id][0] == event:
- del self.album_data[group_id]
+ # ЧИСТКА: Удаляем запись после обработки или таймаута
+ # Используем pop() с дефолтом, чтобы избежать KeyError
+ self.album_data.pop(group_id, None)
else:
# Если группа уже собирается - просто добавляем и выходим
diff --git a/models/Asset.py b/models/Asset.py
index 6c4d3dc..af00ef4 100644
--- a/models/Asset.py
+++ b/models/Asset.py
@@ -63,6 +63,7 @@ class Asset(BaseModel):
# --- CALCULATED FIELD ---
@computed_field
+ @property
def url(self) -> str:
"""
Это поле автоматически вычислится и попадет в model_dump() / .json()
diff --git a/models/Character.py b/models/Character.py
index 2af8076..f174659 100644
--- a/models/Character.py
+++ b/models/Character.py
@@ -9,7 +9,6 @@ class Character(BaseModel):
name: str
avatar_asset_id: Optional[str] = None
avatar_image: Optional[str] = None
- character_image_data: Optional[bytes] = None
character_image_doc_tg_id: Optional[str] = None
character_image_tg_id: Optional[str] = None
character_bio: Optional[str] = None
diff --git a/repos/assets_repo.py b/repos/assets_repo.py
index 93e7005..07ae690 100644
--- a/repos/assets_repo.py
+++ b/repos/assets_repo.py
@@ -175,6 +175,8 @@ class AssetsRepo:
filter["linked_char_id"] = character_id
if created_by:
filter["created_by"] = created_by
+ if project_id is None:
+ filter["project_id"] = None
if project_id:
filter["project_id"] = project_id
return await self.collection.count_documents(filter)
diff --git a/repos/char_repo.py b/repos/char_repo.py
index e28e2a5..70df197 100644
--- a/repos/char_repo.py
+++ b/repos/char_repo.py
@@ -15,26 +15,24 @@ class CharacterRepo:
character.id = str(op.inserted_id)
return character
- async def get_character(self, character_id: str, with_image_data: bool = False) -> Character | None:
- args = {}
- if not with_image_data:
- args["character_image_data"] = 0
- res = await self.collection.find_one({"_id": ObjectId(character_id)}, args)
+ async def get_character(self, character_id: str) -> Character | None:
+ res = await self.collection.find_one({"_id": ObjectId(character_id)})
if res is None:
return None
else:
res["id"] = str(res.pop("_id"))
return Character(**res)
- async def get_all_characters(self, created_by: Optional[str] = None, project_id: Optional[str] = None) -> List[Character]:
+ async def get_all_characters(self, created_by: Optional[str] = None, project_id: Optional[str] = None, limit: int = 100, offset: int = 0) -> List[Character]:
filter = {}
if created_by:
filter["created_by"] = created_by
+ if project_id is None:
+ filter["project_id"] = None
if project_id:
filter["project_id"] = project_id
- args = {"character_image_data": 0} # don't return image data for list
- res = await self.collection.find(filter, args).to_list(None)
+ res = await self.collection.find(filter).skip(offset).limit(limit).to_list(None)
chars = []
for doc in res:
doc["id"] = str(doc.pop("_id"))
diff --git a/repos/generation_repo.py b/repos/generation_repo.py
index 5132eaa..c3165e6 100644
--- a/repos/generation_repo.py
+++ b/repos/generation_repo.py
@@ -65,6 +65,8 @@ class GenerationRepo:
args["status"] = status
if created_by is not None:
args["created_by"] = created_by
+ if project_id is None:
+ args["project_id"] = None
if project_id is not None:
args["project_id"] = project_id
if idea_id is not None:
@@ -92,6 +94,121 @@ class GenerationRepo:
async def update_generation(self, generation: Generation, ):
res = await self.collection.update_one({"_id": ObjectId(generation.id)}, {"$set": generation.model_dump()})
+ async def get_usage_stats(self, created_by: Optional[str] = None, project_id: Optional[str] = None) -> dict:
+ """
+ Calculates usage statistics (runs, tokens, cost) using MongoDB aggregation.
+ """
+ pipeline = []
+
+ # 1. Match active done generations
+ match_stage = {"is_deleted": False, "status": GenerationStatus.DONE}
+ if created_by:
+ match_stage["created_by"] = created_by
+ if project_id:
+ match_stage["project_id"] = project_id
+
+ pipeline.append({"$match": match_stage})
+
+ # 2. Group by null (total)
+ pipeline.append({
+ "$group": {
+ "_id": None,
+ "total_runs": {"$sum": 1},
+ "total_tokens": {
+ "$sum": {
+ "$cond": [
+ {"$and": [{"$gt": ["$input_token_usage", 0]}, {"$gt": ["$output_token_usage", 0]}]},
+ {"$add": ["$input_token_usage", "$output_token_usage"]},
+ {"$ifNull": ["$token_usage", 0]}
+ ]
+ }
+ },
+ "total_input_tokens": {"$sum": {"$ifNull": ["$input_token_usage", 0]}},
+ "total_output_tokens": {"$sum": {"$ifNull": ["$output_token_usage", 0]}},
+ "total_cost": {
+ "$sum": {
+ "$add": [
+ {"$multiply": [{"$ifNull": ["$input_token_usage", 0]}, 0.000002]},
+ {"$multiply": [{"$ifNull": ["$output_token_usage", 0]}, 0.00012]}
+ ]
+ }
+ }
+ }
+ })
+
+ cursor = self.collection.aggregate(pipeline)
+ res = await cursor.to_list(1)
+
+ if not res:
+ return {
+ "total_runs": 0,
+ "total_tokens": 0,
+ "total_input_tokens": 0,
+ "total_output_tokens": 0,
+ "total_cost": 0.0
+ }
+
+ result = res[0]
+ result.pop("_id")
+ result["total_cost"] = round(result["total_cost"], 4)
+ return result
+
+ async def get_usage_breakdown(self, group_by: str = "created_by", project_id: Optional[str] = None, created_by: Optional[str] = None) -> List[dict]:
+ """
+ Returns usage statistics grouped by user or project.
+ """
+ pipeline = []
+
+ match_stage = {"is_deleted": False, "status": GenerationStatus.DONE}
+ if project_id:
+ match_stage["project_id"] = project_id
+ if created_by:
+ match_stage["created_by"] = created_by
+
+ pipeline.append({"$match": match_stage})
+
+ pipeline.append({
+ "$group": {
+ "_id": f"${group_by}",
+ "total_runs": {"$sum": 1},
+ "total_tokens": {
+ "$sum": {
+ "$cond": [
+ {"$and": [{"$gt": ["$input_token_usage", 0]}, {"$gt": ["$output_token_usage", 0]}]},
+ {"$add": ["$input_token_usage", "$output_token_usage"]},
+ {"$ifNull": ["$token_usage", 0]}
+ ]
+ }
+ },
+ "total_input_tokens": {"$sum": {"$ifNull": ["$input_token_usage", 0]}},
+ "total_output_tokens": {"$sum": {"$ifNull": ["$output_token_usage", 0]}},
+ "total_cost": {
+ "$sum": {
+ "$add": [
+ {"$multiply": [{"$ifNull": ["$input_token_usage", 0]}, 0.000002]},
+ {"$multiply": [{"$ifNull": ["$output_token_usage", 0]}, 0.00012]}
+ ]
+ }
+ }
+ }
+ })
+
+ pipeline.append({"$sort": {"total_cost": -1}})
+
+ cursor = self.collection.aggregate(pipeline)
+ res = await cursor.to_list(None)
+
+ results = []
+ for item in res:
+ entity_id = item.pop("_id")
+ item["total_cost"] = round(item["total_cost"], 4)
+ results.append({
+ "entity_id": str(entity_id) if entity_id else "unknown",
+ "stats": item
+ })
+
+ return results
+
async def get_generations_by_group(self, group_id: str) -> List[Generation]:
res = await self.collection.find({"generation_group_id": group_id, "is_deleted": False}).sort("created_at", 1).to_list(None)
generations: List[Generation] = []
diff --git a/repos/idea_repo.py b/repos/idea_repo.py
index 4271018..5698261 100644
--- a/repos/idea_repo.py
+++ b/repos/idea_repo.py
@@ -39,8 +39,17 @@ class IdeaRepo:
"from": "generations",
"let": {"idea_id": "$str_id"},
"pipeline": [
- {"$match": {"$expr": {"$eq": ["$idea_id", "$$idea_id"]}}},
- {"$sort": {"created_at": -1}}, # Ensure we get the latest
+ {
+ "$match": {
+ "$and": [
+ {"$expr": {"$eq": ["$idea_id", "$$idea_id"]}},
+ {"status": "done"},
+ {"result_list": {"$exists": True, "$not": {"$size": 0}}},
+ {"is_deleted": False}
+ ]
+ }
+ },
+ {"$sort": {"created_at": -1}}, # Ensure we get the latest successful
{"$limit": 1}
],
"as": "generations"
diff --git a/routers/char_router.py b/routers/char_router.py
index a408e33..0e1b8fc 100644
--- a/routers/char_router.py
+++ b/routers/char_router.py
@@ -51,57 +51,66 @@ async def new_char_bio(message: Message, state: FSMContext, dao: DAO, bot: Bot):
wait_msg = await message.answer("💾 Сохраняю персонажа...")
try:
- # ВОТ ТУТ скачиваем файл (прямо перед сохранением)
+ # 1. Скачиваем файл (один раз)
+ # TODO: Для больших файлов лучше использовать streaming или сохранять во временный файл
file_io = await bot.download(file_id)
- # photo_bytes = file_io.getvalue() # Получаем байты
-
-
- # Создаем модель
+ file_bytes = file_io.read()
+
+ # 2. Создаем Character (сначала без ассета, чтобы получить ID)
char = Character(
id=None,
name=name,
- character_image_data=file_io.read(),
character_image_tg_id=None,
character_image_doc_tg_id=file_id,
character_bio=bio,
created_by=str(message.from_user.id)
)
- file_io.close()
-
- # Сохраняем через DAO
-
+
+ # Сохраняем, чтобы получить ID
await dao.chars.add_character(char)
- file_info = await bot.get_file(char.character_image_doc_tg_id)
- file_bytes = await bot.download_file(file_info.file_path)
- file_io = file_bytes.read()
- avatar_asset = await dao.assets.create_asset(
- Asset(name="avatar.png", type=AssetType.UPLOADED, content_type=AssetContentType.IMAGE, linked_char_id=str(char.id), data=file_io,
- tg_doc_file_id=file_id))
- char.avatar_image = avatar_asset.link
+
+ # 3. Создаем Asset (связанный с персонажем)
+ avatar_asset_id = await dao.assets.create_asset(
+ Asset(
+ name="avatar.png",
+ type=AssetType.UPLOADED,
+ content_type=AssetContentType.IMAGE,
+ linked_char_id=str(char.id),
+ data=file_bytes,
+ tg_doc_file_id=file_id
+ )
+ )
+
+ # 4. Обновляем персонажа ссылками на ассет
+ char.avatar_asset_id = avatar_asset_id
+ char.avatar_image = f"/api/assets/{avatar_asset_id}" # Формируем ссылку вручную или используем метод, если появится
+
# Отправляем подтверждение
- # Используем байты для отправки обратно
photo_msg = await message.answer_photo(
- photo=BufferedInputFile(file_io,
- filename="char.jpg") if not char.character_image_tg_id else char.character_image_tg_id,
+ photo=BufferedInputFile(file_bytes, filename="char.jpg"),
caption=(
"🎉 Персонаж создан!\n\n"
f"👤 Имя: {char.name}\n"
f"📝 Био: {char.character_bio}"
)
)
- file_bytes.close()
- char.character_image_tg_id = photo_msg.photo[0].file_id
+
+ # Сохраняем TG ID фото (которое отправили как фото, а не документ)
+ char.character_image_tg_id = photo_msg.photo[-1].file_id
+ # Финальное обновление персонажа
await dao.chars.update_char(char.id, char)
+
await wait_msg.delete()
+ file_io.close()
# Сбрасываем состояние
await state.clear()
except Exception as e:
- logging.error(e)
+ logger.error(f"Error creating character: {e}")
+ traceback.print_exc()
await wait_msg.edit_text(f"❌ Ошибка при сохранении: {e}")
- # Не сбрасываем стейт, даем возможность попробовать ввести био снова или начать заново
@router.message(Command("chars"))