This commit is contained in:
xds
2026-02-18 16:35:04 +03:00
parent 198ac44960
commit 4586daac38
16 changed files with 309 additions and 83 deletions

View File

@@ -63,10 +63,12 @@ class S3Adapter:
response = await client.get_object(Bucket=self.bucket_name, Key=object_name) response = await client.get_object(Bucket=self.bucket_name, Key=object_name)
# aioboto3 Body is an aiohttp StreamReader wrapper # aioboto3 Body is an aiohttp StreamReader wrapper
body = response['Body'] body = response['Body']
data = await body.read()
# Yield in chunks to avoid holding entire response in StreamingResponse buffer while True:
for i in range(0, len(data), chunk_size): chunk = await body.read(chunk_size)
yield data[i:i + chunk_size] if not chunk:
break
yield chunk
except ClientError as e: except ClientError as e:
print(f"Error streaming from S3: {e}") print(f"Error streaming from S3: {e}")
return return

View File

@@ -79,7 +79,7 @@ char_repo = CharacterRepo(mongo_client)
# S3 Adapter # S3 Adapter
s3_adapter = S3Adapter( s3_adapter = S3Adapter(
endpoint_url=os.getenv("MINIO_ENDPOINT", "http://31.59.58.220:9000"), endpoint_url=os.getenv("MINIO_ENDPOINT", "http://localhost:9000"),
aws_access_key_id=os.getenv("MINIO_ACCESS_KEY", "minioadmin"), aws_access_key_id=os.getenv("MINIO_ACCESS_KEY", "minioadmin"),
aws_secret_access_key=os.getenv("MINIO_SECRET_KEY", "minioadmin"), aws_secret_access_key=os.getenv("MINIO_SECRET_KEY", "minioadmin"),
bucket_name=os.getenv("MINIO_BUCKET", "ai-char") bucket_name=os.getenv("MINIO_BUCKET", "ai-char")

View File

@@ -5,6 +5,8 @@ from fastapi.security import OAuth2PasswordBearer
from pydantic import BaseModel from pydantic import BaseModel
from repos.user_repo import UsersRepo, UserStatus from repos.user_repo import UsersRepo, UserStatus
from api.dependency import get_dao
from repos.dao import DAO
from utils.security import verify_password, create_access_token, ACCESS_TOKEN_EXPIRE_MINUTES, ALGORITHM, SECRET_KEY from utils.security import verify_password, create_access_token, ACCESS_TOKEN_EXPIRE_MINUTES, ALGORITHM, SECRET_KEY
from jose import JWTError, jwt from jose import JWTError, jwt
from starlette.requests import Request from starlette.requests import Request

View File

@@ -278,8 +278,7 @@ async def upload_asset(
type=asset.type.value if hasattr(asset.type, "value") else asset.type, type=asset.type.value if hasattr(asset.type, "value") else asset.type,
content_type=asset.content_type.value if hasattr(asset.content_type, "value") else asset.content_type, content_type=asset.content_type.value if hasattr(asset.content_type, "value") else asset.content_type,
linked_char_id=asset.linked_char_id, linked_char_id=asset.linked_char_id,
created_at=asset.created_at, created_at=asset.created_at
url=asset.url
) )

View File

@@ -24,8 +24,15 @@ router = APIRouter(prefix="/api/characters", tags=["Characters"], dependencies=[
@router.get("/", response_model=List[Character]) @router.get("/", response_model=List[Character])
async def get_characters(request: Request, dao: DAO = Depends(get_dao), current_user: dict = Depends(get_current_user), project_id: Optional[str] = Depends(get_project_id)) -> List[Character]: async def get_characters(
logger.info("get_characters called") request: Request,
dao: DAO = Depends(get_dao),
current_user: dict = Depends(get_current_user),
project_id: Optional[str] = Depends(get_project_id),
limit: int = 100,
offset: int = 0
) -> List[Character]:
logger.info(f"get_characters called. Limit: {limit}, Offset: {offset}")
user_id_filter = str(current_user["_id"]) user_id_filter = str(current_user["_id"])
if project_id: if project_id:
@@ -34,7 +41,12 @@ async def get_characters(request: Request, dao: DAO = Depends(get_dao), current_
raise HTTPException(status_code=403, detail="Project access denied") raise HTTPException(status_code=403, detail="Project access denied")
user_id_filter = None user_id_filter = None
characters = await dao.chars.get_all_characters(created_by=user_id_filter, project_id=project_id) characters = await dao.chars.get_all_characters(
created_by=user_id_filter,
project_id=project_id,
limit=limit,
offset=offset
)
return characters return characters
@@ -178,10 +190,3 @@ async def delete_character(
raise HTTPException(status_code=500, detail="Failed to delete character") raise HTTPException(status_code=500, detail="Failed to delete character")
return return
@router.post("/{character_id}/_run", response_model=GenerationResponse)
async def post_character_generation(character_id: str, generation: GenerationRequest,
request: Request) -> GenerationResponse:
logger.info(f"post_character_generation called. CharacterID: {character_id}")
generation_service = request.app.state.generation_service

View File

@@ -9,6 +9,7 @@ from api.dependency import get_generation_service, get_project_id, get_dao
from repos.dao import DAO from repos.dao import DAO
from api.models.GenerationRequest import GenerationResponse, GenerationRequest, GenerationsResponse, PromptResponse, PromptRequest, GenerationGroupResponse from api.models.GenerationRequest import GenerationResponse, GenerationRequest, GenerationsResponse, PromptResponse, PromptRequest, GenerationGroupResponse
from api.models.FinancialUsageDTO import FinancialReport
from api.service.generation_service import GenerationService from api.service.generation_service import GenerationService
from models.Generation import Generation from models.Generation import Generation
@@ -68,6 +69,47 @@ async def get_generations(character_id: Optional[str] = None, limit: int = 10, o
return await generation_service.get_generations(character_id, limit=limit, offset=offset, user_id=user_id_filter, project_id=project_id) return await generation_service.get_generations(character_id, limit=limit, offset=offset, user_id=user_id_filter, project_id=project_id)
@router.get("/usage", response_model=FinancialReport)
async def get_usage_report(
breakdown: Optional[str] = None, # "user" or "project"
generation_service: GenerationService = Depends(get_generation_service),
current_user: dict = Depends(get_current_user),
project_id: Optional[str] = Depends(get_project_id),
dao: DAO = Depends(get_dao)
) -> FinancialReport:
"""
Returns usage statistics (runs, tokens, cost) for the current user or project.
If project_id is provided, returns stats for that project.
Otherwise, returns stats for the current user.
"""
user_id_filter = str(current_user["_id"])
breakdown_by = None
if project_id:
# Permission check
project = await dao.projects.get_project(project_id)
if not project or str(current_user["_id"]) not in project.members:
raise HTTPException(status_code=403, detail="Project access denied")
user_id_filter = None # If we are in project, we see stats for the WHOLE project by default
if breakdown == "user":
breakdown_by = "created_by"
elif breakdown == "project":
breakdown_by = "project_id"
else:
# Default: Stats for current user
if breakdown == "project":
breakdown_by = "project_id"
elif breakdown == "user":
# This would breakdown personal usage by user (yourself), but could be useful if it included collaborators?
# No, if project_id is None, it's personal.
breakdown_by = "created_by"
return await generation_service.get_financial_report(
user_id=user_id_filter,
project_id=project_id,
breakdown_by=breakdown_by
)
@router.post("/_run", response_model=GenerationGroupResponse) @router.post("/_run", response_model=GenerationGroupResponse)
async def post_generation(generation: GenerationRequest, request: Request, async def post_generation(generation: GenerationRequest, request: Request,
generation_service: GenerationService = Depends(get_generation_service), generation_service: GenerationService = Depends(get_generation_service),

View File

@@ -1,4 +1,6 @@
from typing import List, Optional from typing import List, Optional
from bson import ObjectId
from fastapi import APIRouter, Depends, HTTPException, status from fastapi import APIRouter, Depends, HTTPException, status
from pydantic import BaseModel from pydantic import BaseModel
from api.dependency import get_dao from api.dependency import get_dao
@@ -12,14 +14,46 @@ class ProjectCreate(BaseModel):
name: str name: str
description: Optional[str] = None description: Optional[str] = None
class ProjectMemberResponse(BaseModel):
id: str
username: str
class ProjectResponse(BaseModel): class ProjectResponse(BaseModel):
id: str id: str
name: str name: str
description: Optional[str] = None description: Optional[str] = None
owner_id: str owner_id: str
members: List[str] members: List[ProjectMemberResponse]
is_owner: bool = False is_owner: bool = False
async def _get_project_response(project: Project, current_user_id: str, dao: DAO) -> ProjectResponse:
member_responses = []
for member_id in project.members:
# We need a way to get user by ID. Let's check UsersRepo for get_user by ObjectId or string.
# Currently UsersRepo has get_user(user_id: int) for Telegram IDs.
# But for Web users we might need to search by _id.
# Let's try to get user info.
# Since project.members contains strings (ObjectIds as strings), we search by _id.
user_doc = await dao.users.collection.find_one({"_id": ObjectId(member_id)})
if not user_doc and member_id.isdigit():
# Fallback for telegram IDs if they are stored as strings of digits
user_doc = await dao.users.get_user(int(member_id))
username = "unknown"
if user_doc:
username = user_doc.get("username", "unknown")
member_responses.append(ProjectMemberResponse(id=member_id, username=username))
return ProjectResponse(
id=project.id,
name=project.name,
description=project.description,
owner_id=project.owner_id,
members=member_responses,
is_owner=(project.owner_id == current_user_id)
)
@router.post("", response_model=ProjectResponse) @router.post("", response_model=ProjectResponse)
async def create_project( async def create_project(
project_data: ProjectCreate, project_data: ProjectCreate,
@@ -34,27 +68,15 @@ async def create_project(
members=[user_id] members=[user_id]
) )
project_id = await dao.projects.create_project(new_project) project_id = await dao.projects.create_project(new_project)
new_project.id = project_id
# Add project to user's project list # Add project to user's project list
# Assuming user_repo has a method to add project or we do it directly?
# UserRepo doesn't have add_project method yet.
# But since UserRepo is just a wrapper around collection, lets add it here or update UserRepo later?
# Better to update UserRepo. For now, let's just return success.
# But user needs to see it in list.
# Update user in DB
await dao.users.collection.update_one( await dao.users.collection.update_one(
{"_id": current_user["_id"]}, {"_id": current_user["_id"]},
{"$addToSet": {"project_ids": project_id}} {"$addToSet": {"project_ids": project_id}}
) )
return ProjectResponse( return await _get_project_response(new_project, user_id, dao)
id=project_id,
name=new_project.name,
description=new_project.description,
owner_id=new_project.owner_id,
members=new_project.members,
is_owner=True
)
@router.get("", response_model=List[ProjectResponse]) @router.get("", response_model=List[ProjectResponse])
async def get_my_projects( async def get_my_projects(
@@ -66,14 +88,7 @@ async def get_my_projects(
responses = [] responses = []
for p in projects: for p in projects:
responses.append(ProjectResponse( responses.append(await _get_project_response(p, user_id, dao))
id=p.id,
name=p.name,
description=p.description,
owner_id=p.owner_id,
members=p.members,
is_owner=(p.owner_id == user_id)
))
return responses return responses
class MemberAdd(BaseModel): class MemberAdd(BaseModel):

View File

@@ -13,6 +13,7 @@ from aiogram.types import BufferedInputFile
from adapters.Exception import GoogleGenerationException from adapters.Exception import GoogleGenerationException
from adapters.google_adapter import GoogleAdapter from adapters.google_adapter import GoogleAdapter
from api.models.GenerationRequest import GenerationRequest, GenerationResponse, GenerationsResponse, GenerationGroupResponse from api.models.GenerationRequest import GenerationRequest, GenerationResponse, GenerationsResponse, GenerationGroupResponse
from api.models.FinancialUsageDTO import FinancialReport, UsageStats, UsageByEntity
# Импортируйте ваши модели DAO, Asset, Generation корректно # Импортируйте ваши модели DAO, Asset, Generation корректно
from models.Asset import Asset, AssetType, AssetContentType from models.Asset import Asset, AssetType, AssetContentType
from models.Generation import Generation, GenerationStatus from models.Generation import Generation, GenerationStatus
@@ -507,3 +508,28 @@ class GenerationService:
except Exception as e: except Exception as e:
logger.error(f"Error during old data cleanup: {e}") logger.error(f"Error during old data cleanup: {e}")
async def get_financial_report(self, user_id: Optional[str] = None, project_id: Optional[str] = None, breakdown_by: Optional[str] = None) -> FinancialReport:
"""
Generates a financial usage report for a specific user or project.
'breakdown_by' can be 'created_by' or 'project_id'.
"""
summary_data = await self.dao.generations.get_usage_stats(created_by=user_id, project_id=project_id)
summary = UsageStats(**summary_data)
by_user = None
by_project = None
if breakdown_by == "created_by":
res = await self.dao.generations.get_usage_breakdown(group_by="created_by", project_id=project_id, created_by=user_id)
by_user = [UsageByEntity(**item) for item in res]
if breakdown_by == "project_id":
res = await self.dao.generations.get_usage_breakdown(group_by="project_id", project_id=project_id, created_by=user_id)
by_project = [UsageByEntity(**item) for item in res]
return FinancialReport(
summary=summary,
by_user=by_user,
by_project=by_project
)

View File

@@ -27,19 +27,19 @@ class AlbumMiddleware(BaseMiddleware):
# Ждем сбора остальных частей # Ждем сбора остальных частей
await asyncio.sleep(self.latency) await asyncio.sleep(self.latency)
# Проверяем, что ключ все еще существует (на всякий случай) # Проверяем, что ключ все еще существует
if group_id in self.album_data: if group_id in self.album_data:
# Передаем собранный альбом в хендлер # Передаем собранный альбом в хендлер
# Сортируем по message_id, чтобы порядок был верным # Сортируем по message_id, чтобы порядок был верным
self.album_data[group_id].sort(key=lambda x: x.message_id) current_album = self.album_data[group_id]
data["album"] = self.album_data[group_id] current_album.sort(key=lambda x: x.message_id)
data["album"] = current_album
return await handler(event, data) return await handler(event, data)
finally: finally:
# ЧИСТКА: Удаляем всегда, если это "головной" поток, который создал запись # ЧИСТКА: Удаляем запись после обработки или таймаута
# Проверяем, что мы удаляем именно то, что создали, и ключ существует # Используем pop() с дефолтом, чтобы избежать KeyError
if group_id in self.album_data and self.album_data[group_id][0] == event: self.album_data.pop(group_id, None)
del self.album_data[group_id]
else: else:
# Если группа уже собирается - просто добавляем и выходим # Если группа уже собирается - просто добавляем и выходим

View File

@@ -63,6 +63,7 @@ class Asset(BaseModel):
# --- CALCULATED FIELD --- # --- CALCULATED FIELD ---
@computed_field @computed_field
@property
def url(self) -> str: def url(self) -> str:
""" """
Это поле автоматически вычислится и попадет в model_dump() / .json() Это поле автоматически вычислится и попадет в model_dump() / .json()

View File

@@ -9,7 +9,6 @@ class Character(BaseModel):
name: str name: str
avatar_asset_id: Optional[str] = None avatar_asset_id: Optional[str] = None
avatar_image: Optional[str] = None avatar_image: Optional[str] = None
character_image_data: Optional[bytes] = None
character_image_doc_tg_id: Optional[str] = None character_image_doc_tg_id: Optional[str] = None
character_image_tg_id: Optional[str] = None character_image_tg_id: Optional[str] = None
character_bio: Optional[str] = None character_bio: Optional[str] = None

View File

@@ -175,6 +175,8 @@ class AssetsRepo:
filter["linked_char_id"] = character_id filter["linked_char_id"] = character_id
if created_by: if created_by:
filter["created_by"] = created_by filter["created_by"] = created_by
if project_id is None:
filter["project_id"] = None
if project_id: if project_id:
filter["project_id"] = project_id filter["project_id"] = project_id
return await self.collection.count_documents(filter) return await self.collection.count_documents(filter)

View File

@@ -15,26 +15,24 @@ class CharacterRepo:
character.id = str(op.inserted_id) character.id = str(op.inserted_id)
return character return character
async def get_character(self, character_id: str, with_image_data: bool = False) -> Character | None: async def get_character(self, character_id: str) -> Character | None:
args = {} res = await self.collection.find_one({"_id": ObjectId(character_id)})
if not with_image_data:
args["character_image_data"] = 0
res = await self.collection.find_one({"_id": ObjectId(character_id)}, args)
if res is None: if res is None:
return None return None
else: else:
res["id"] = str(res.pop("_id")) res["id"] = str(res.pop("_id"))
return Character(**res) return Character(**res)
async def get_all_characters(self, created_by: Optional[str] = None, project_id: Optional[str] = None) -> List[Character]: async def get_all_characters(self, created_by: Optional[str] = None, project_id: Optional[str] = None, limit: int = 100, offset: int = 0) -> List[Character]:
filter = {} filter = {}
if created_by: if created_by:
filter["created_by"] = created_by filter["created_by"] = created_by
if project_id is None:
filter["project_id"] = None
if project_id: if project_id:
filter["project_id"] = project_id filter["project_id"] = project_id
args = {"character_image_data": 0} # don't return image data for list res = await self.collection.find(filter).skip(offset).limit(limit).to_list(None)
res = await self.collection.find(filter, args).to_list(None)
chars = [] chars = []
for doc in res: for doc in res:
doc["id"] = str(doc.pop("_id")) doc["id"] = str(doc.pop("_id"))

View File

@@ -65,6 +65,8 @@ class GenerationRepo:
args["status"] = status args["status"] = status
if created_by is not None: if created_by is not None:
args["created_by"] = created_by args["created_by"] = created_by
if project_id is None:
args["project_id"] = None
if project_id is not None: if project_id is not None:
args["project_id"] = project_id args["project_id"] = project_id
if idea_id is not None: if idea_id is not None:
@@ -92,6 +94,121 @@ class GenerationRepo:
async def update_generation(self, generation: Generation, ): async def update_generation(self, generation: Generation, ):
res = await self.collection.update_one({"_id": ObjectId(generation.id)}, {"$set": generation.model_dump()}) res = await self.collection.update_one({"_id": ObjectId(generation.id)}, {"$set": generation.model_dump()})
async def get_usage_stats(self, created_by: Optional[str] = None, project_id: Optional[str] = None) -> dict:
"""
Calculates usage statistics (runs, tokens, cost) using MongoDB aggregation.
"""
pipeline = []
# 1. Match active done generations
match_stage = {"is_deleted": False, "status": GenerationStatus.DONE}
if created_by:
match_stage["created_by"] = created_by
if project_id:
match_stage["project_id"] = project_id
pipeline.append({"$match": match_stage})
# 2. Group by null (total)
pipeline.append({
"$group": {
"_id": None,
"total_runs": {"$sum": 1},
"total_tokens": {
"$sum": {
"$cond": [
{"$and": [{"$gt": ["$input_token_usage", 0]}, {"$gt": ["$output_token_usage", 0]}]},
{"$add": ["$input_token_usage", "$output_token_usage"]},
{"$ifNull": ["$token_usage", 0]}
]
}
},
"total_input_tokens": {"$sum": {"$ifNull": ["$input_token_usage", 0]}},
"total_output_tokens": {"$sum": {"$ifNull": ["$output_token_usage", 0]}},
"total_cost": {
"$sum": {
"$add": [
{"$multiply": [{"$ifNull": ["$input_token_usage", 0]}, 0.000002]},
{"$multiply": [{"$ifNull": ["$output_token_usage", 0]}, 0.00012]}
]
}
}
}
})
cursor = self.collection.aggregate(pipeline)
res = await cursor.to_list(1)
if not res:
return {
"total_runs": 0,
"total_tokens": 0,
"total_input_tokens": 0,
"total_output_tokens": 0,
"total_cost": 0.0
}
result = res[0]
result.pop("_id")
result["total_cost"] = round(result["total_cost"], 4)
return result
async def get_usage_breakdown(self, group_by: str = "created_by", project_id: Optional[str] = None, created_by: Optional[str] = None) -> List[dict]:
"""
Returns usage statistics grouped by user or project.
"""
pipeline = []
match_stage = {"is_deleted": False, "status": GenerationStatus.DONE}
if project_id:
match_stage["project_id"] = project_id
if created_by:
match_stage["created_by"] = created_by
pipeline.append({"$match": match_stage})
pipeline.append({
"$group": {
"_id": f"${group_by}",
"total_runs": {"$sum": 1},
"total_tokens": {
"$sum": {
"$cond": [
{"$and": [{"$gt": ["$input_token_usage", 0]}, {"$gt": ["$output_token_usage", 0]}]},
{"$add": ["$input_token_usage", "$output_token_usage"]},
{"$ifNull": ["$token_usage", 0]}
]
}
},
"total_input_tokens": {"$sum": {"$ifNull": ["$input_token_usage", 0]}},
"total_output_tokens": {"$sum": {"$ifNull": ["$output_token_usage", 0]}},
"total_cost": {
"$sum": {
"$add": [
{"$multiply": [{"$ifNull": ["$input_token_usage", 0]}, 0.000002]},
{"$multiply": [{"$ifNull": ["$output_token_usage", 0]}, 0.00012]}
]
}
}
}
})
pipeline.append({"$sort": {"total_cost": -1}})
cursor = self.collection.aggregate(pipeline)
res = await cursor.to_list(None)
results = []
for item in res:
entity_id = item.pop("_id")
item["total_cost"] = round(item["total_cost"], 4)
results.append({
"entity_id": str(entity_id) if entity_id else "unknown",
"stats": item
})
return results
async def get_generations_by_group(self, group_id: str) -> List[Generation]: async def get_generations_by_group(self, group_id: str) -> List[Generation]:
res = await self.collection.find({"generation_group_id": group_id, "is_deleted": False}).sort("created_at", 1).to_list(None) res = await self.collection.find({"generation_group_id": group_id, "is_deleted": False}).sort("created_at", 1).to_list(None)
generations: List[Generation] = [] generations: List[Generation] = []

View File

@@ -39,8 +39,17 @@ class IdeaRepo:
"from": "generations", "from": "generations",
"let": {"idea_id": "$str_id"}, "let": {"idea_id": "$str_id"},
"pipeline": [ "pipeline": [
{"$match": {"$expr": {"$eq": ["$idea_id", "$$idea_id"]}}}, {
{"$sort": {"created_at": -1}}, # Ensure we get the latest "$match": {
"$and": [
{"$expr": {"$eq": ["$idea_id", "$$idea_id"]}},
{"status": "done"},
{"result_list": {"$exists": True, "$not": {"$size": 0}}},
{"is_deleted": False}
]
}
},
{"$sort": {"created_at": -1}}, # Ensure we get the latest successful
{"$limit": 1} {"$limit": 1}
], ],
"as": "generations" "as": "generations"

View File

@@ -51,57 +51,66 @@ async def new_char_bio(message: Message, state: FSMContext, dao: DAO, bot: Bot):
wait_msg = await message.answer("💾 Сохраняю персонажа...") wait_msg = await message.answer("💾 Сохраняю персонажа...")
try: try:
# ВОТ ТУТ скачиваем файл (прямо перед сохранением) # 1. Скачиваем файл (один раз)
# TODO: Для больших файлов лучше использовать streaming или сохранять во временный файл
file_io = await bot.download(file_id) file_io = await bot.download(file_id)
# photo_bytes = file_io.getvalue() # Получаем байты file_bytes = file_io.read()
# 2. Создаем Character (сначала без ассета, чтобы получить ID)
# Создаем модель
char = Character( char = Character(
id=None, id=None,
name=name, name=name,
character_image_data=file_io.read(),
character_image_tg_id=None, character_image_tg_id=None,
character_image_doc_tg_id=file_id, character_image_doc_tg_id=file_id,
character_bio=bio, character_bio=bio,
created_by=str(message.from_user.id) created_by=str(message.from_user.id)
) )
file_io.close()
# Сохраняем через DAO
# Сохраняем, чтобы получить ID
await dao.chars.add_character(char) await dao.chars.add_character(char)
file_info = await bot.get_file(char.character_image_doc_tg_id)
file_bytes = await bot.download_file(file_info.file_path) # 3. Создаем Asset (связанный с персонажем)
file_io = file_bytes.read() avatar_asset_id = await dao.assets.create_asset(
avatar_asset = await dao.assets.create_asset( Asset(
Asset(name="avatar.png", type=AssetType.UPLOADED, content_type=AssetContentType.IMAGE, linked_char_id=str(char.id), data=file_io, name="avatar.png",
tg_doc_file_id=file_id)) type=AssetType.UPLOADED,
char.avatar_image = avatar_asset.link content_type=AssetContentType.IMAGE,
linked_char_id=str(char.id),
data=file_bytes,
tg_doc_file_id=file_id
)
)
# 4. Обновляем персонажа ссылками на ассет
char.avatar_asset_id = avatar_asset_id
char.avatar_image = f"/api/assets/{avatar_asset_id}" # Формируем ссылку вручную или используем метод, если появится
# Отправляем подтверждение # Отправляем подтверждение
# Используем байты для отправки обратно
photo_msg = await message.answer_photo( photo_msg = await message.answer_photo(
photo=BufferedInputFile(file_io, photo=BufferedInputFile(file_bytes, filename="char.jpg"),
filename="char.jpg") if not char.character_image_tg_id else char.character_image_tg_id,
caption=( caption=(
"🎉 <b>Персонаж создан!</b>\n\n" "🎉 <b>Персонаж создан!</b>\n\n"
f"👤 <b>Имя:</b> {char.name}\n" f"👤 <b>Имя:</b> {char.name}\n"
f"📝 <b>Био:</b> {char.character_bio}" f"📝 <b>Био:</b> {char.character_bio}"
) )
) )
file_bytes.close()
char.character_image_tg_id = photo_msg.photo[0].file_id
# Сохраняем TG ID фото (которое отправили как фото, а не документ)
char.character_image_tg_id = photo_msg.photo[-1].file_id
# Финальное обновление персонажа
await dao.chars.update_char(char.id, char) await dao.chars.update_char(char.id, char)
await wait_msg.delete() await wait_msg.delete()
file_io.close()
# Сбрасываем состояние # Сбрасываем состояние
await state.clear() await state.clear()
except Exception as e: except Exception as e:
logging.error(e) logger.error(f"Error creating character: {e}")
traceback.print_exc()
await wait_msg.edit_text(f"❌ Ошибка при сохранении: {e}") await wait_msg.edit_text(f"❌ Ошибка при сохранении: {e}")
# Не сбрасываем стейт, даем возможность попробовать ввести био снова или начать заново
@router.message(Command("chars")) @router.message(Command("chars"))