Compare commits
116 Commits
28c8fb103e
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
| e011805186 | |||
| d9caececd7 | |||
| c1300b7a2d | |||
| f6001f5994 | |||
| e4a39e90c3 | |||
| e976fe1c58 | |||
| ecc8d69039 | |||
| bc9230a49b | |||
| f07105b0e5 | |||
| 9a5d54a373 | |||
| 1868864f76 | |||
| 9e0c522b5f | |||
| e1d941a2cd | |||
| c7c27197c9 | |||
| 5aa6391dc8 | |||
| ffb0463fe0 | |||
| dd0f8a1cb6 | |||
| 4af5134726 | |||
| 7488665d04 | |||
| ecc88aca62 | |||
| 70f50170fc | |||
| f4207fc4c1 | |||
| c50d2c8ad9 | |||
| 4586daac38 | |||
| 198ac44960 | |||
| d820d9145b | |||
| c93e577bcf | |||
| c5d4849bff | |||
| 9abfbef871 | |||
| 68a3f529cb | |||
| e2c050515d | |||
| 5e7dc19bf3 | |||
| 97483b7030 | |||
| 2d3da59de9 | |||
| 279cb5c6f6 | |||
| 30138bab38 | |||
| 977cab92f8 | |||
| dcab238d3e | |||
| 9d2e4e47de | |||
| c6142715d9 | |||
| 456562ec1d | |||
| 0d0fbdf7d6 | |||
| f63bcedb13 | |||
| be92c766ac | |||
| 482bc1d9b7 | |||
| a2321cf070 | |||
| 29ccd5743e | |||
| d9de2f48d2 | |||
| 1ddeb0af46 | |||
| a7c2319f13 | |||
| 00e83b8561 | |||
| a9d24c725e | |||
| 458b6ebfc3 | |||
| 668aadcdc9 | |||
| 4461964791 | |||
| fa3e1bb05f | |||
| 8a89b27624 | |||
| c17c47ccc1 | |||
| c25b029006 | |||
| a449f65de9 | |||
| 3cf7db5cdf | |||
| 288515fa04 | |||
| f1033210cc | |||
| 1832d07caa | |||
| b704707abc | |||
| 31893414eb | |||
| aa50b1cc03 | |||
| 305ad24576 | |||
| ce87ac7edb | |||
| 2f8de7a298 | |||
| b8e96a2dca | |||
| 137279bcc5 | |||
| 553335940f | |||
| fd1b023e7d | |||
| eeea0f5b8f | |||
| ac5cc53006 | |||
| c3b13360e0 | |||
| 63292a1699 | |||
| 59c40524e0 | |||
| cdb09e84fc | |||
| 37e69088a1 | |||
| 7e2f79aab1 | |||
| c0debab0cb | |||
| 002c949f08 | |||
| d4682b1418 | |||
| 463e73fa1e | |||
| 76dd976854 | |||
| 736e5a8c12 | |||
| 9ae6e8e08e | |||
| bf8396a790 | |||
| 53b2bce1b2 | |||
| fba18728d6 | |||
| c86dfa917d | |||
| f36a368051 | |||
| c8984dc472 | |||
| b4f4ead3b3 | |||
| 35de8efc56 | |||
| 11c1f4f7dc | |||
| 43e9c263d5 | |||
| 30daa1340a | |||
| e43cd575b0 | |||
| cba813337e | |||
| b8b708c659 | |||
| a1dc734cdb | |||
| 7050999ed8 | |||
| 739f027742 | |||
| f69e8f3c35 | |||
| e8b91af804 | |||
| befd1a66f7 | |||
| 2c310cae09 | |||
| 447107834c | |||
| 21f86afa38 | |||
| 2693675e85 | |||
| ea3f50db50 | |||
| e79b191463 | |||
| 8fd9f9b8c8 |
33
.context.md
Normal file
33
.context.md
Normal file
@@ -0,0 +1,33 @@
|
||||
# Project Context: AI Char Bot
|
||||
|
||||
## Overview
|
||||
Python backend project using FastAPI and MongoDB (Motor).
|
||||
Root: `/Users/xds/develop/py projects/ai-char-bot`
|
||||
|
||||
## Architecture
|
||||
- **API Layer**: `api/endpoints` (FastAPI routers).
|
||||
- **Service Layer**: `api/service` (Business logic).
|
||||
- **Data Layer**: `repos` (DAOs and Repositories).
|
||||
- **Models**: `models` (Domain models) and `api/models` (Request/Response DTOs).
|
||||
- **Adapters**: `adapters` (External services like S3, Google Gemini).
|
||||
|
||||
## Coding Standards & Preferences
|
||||
- **Type Hinting**: Use `Type | None` instead of `Optional[Type]` (Python 3.10+ style).
|
||||
- **Async/Await**: Extensive use of `asyncio` and asynchronous DB drivers.
|
||||
- **Error Handling**:
|
||||
- Repositories should return `None` if an entity is not found (e.g., `toggle_like`).
|
||||
- Services/Routers handle `HTTPException`.
|
||||
|
||||
## Key Features & Implementation Details
|
||||
- **Generations**:
|
||||
- Managed by `GenerationService` and `GenerationRepo`.
|
||||
- `toggle_like` returns `bool | None` (True=Liked, False=Unliked, None=Not Found).
|
||||
- `get_generations` requires `current_user_id` to correctly calculate `is_liked`.
|
||||
- **Ideas**:
|
||||
- Managed by `IdeaService` and `IdeaRepo`.
|
||||
- Can have linked generations.
|
||||
- When fetching generations for an idea, ensure `current_user_id` is passed to `GenerationService`.
|
||||
|
||||
## Recent Changes
|
||||
- Refactored `toggle_like` to handle non-existent generations and return `bool | None`.
|
||||
- Updated `IdeaRouter` to pass `current_user_id` when fetching generations to ensure `is_liked` flag is correct.
|
||||
19
.dockerignore
Normal file
19
.dockerignore
Normal file
@@ -0,0 +1,19 @@
|
||||
.git
|
||||
.gitignore
|
||||
__pycache__
|
||||
*.pyc
|
||||
*.pyo
|
||||
*.pyd
|
||||
.Python
|
||||
env/
|
||||
venv/
|
||||
.venv/
|
||||
node_modules/
|
||||
tmp/
|
||||
logs/
|
||||
*.log
|
||||
dist/
|
||||
build/
|
||||
.cache/
|
||||
.idea/
|
||||
.vscode/
|
||||
9
.env
9
.env
@@ -1,4 +1,11 @@
|
||||
BOT_TOKEN=8495170789:AAHyjjhHwwVtd9_ROnjHqPHRdnmyVr1aeaY
|
||||
# BOT_TOKEN=8011562605:AAF3kyzrZJgii0Jx-H8Sum5Njbo0BdbsiAo
|
||||
GEMINI_API_KEY=AIzaSyAHzDYhgjOqZZnvOnOFRGaSkKu4OAN3kZE
|
||||
MONGO_HOST=mongodb://admin:super_secure_password@31.59.58.220:27017/
|
||||
ADMIN_ID=567047
|
||||
ADMIN_ID=567047
|
||||
MINIO_ENDPOINT=http://31.59.58.220:9000
|
||||
MINIO_ACCESS_KEY=admin
|
||||
MINIO_SECRET_KEY=SuperSecretPassword123!
|
||||
MINIO_BUCKET=ai-char
|
||||
MODE=production
|
||||
EXTERNAL_API_SECRET=Gt9TyQ8OAYhcELh2YCbKjdHLflZGufKHJZcG338MQDW
|
||||
33
.gemini/AGENTS.md
Normal file
33
.gemini/AGENTS.md
Normal file
@@ -0,0 +1,33 @@
|
||||
# Project Context: AI Char Bot
|
||||
|
||||
## Overview
|
||||
Python backend project using FastAPI and MongoDB (Motor).
|
||||
Root: `/Users/xds/develop/py projects/ai-char-bot`
|
||||
|
||||
## Architecture
|
||||
- **API Layer**: `api/endpoints` (FastAPI routers).
|
||||
- **Service Layer**: `api/service` (Business logic).
|
||||
- **Data Layer**: `repos` (DAOs and Repositories).
|
||||
- **Models**: `models` (Domain models) and `api/models` (Request/Response DTOs).
|
||||
- **Adapters**: `adapters` (External services like S3, Google Gemini).
|
||||
|
||||
## Coding Standards & Preferences
|
||||
- **Type Hinting**: Use `Type | None` instead of `Optional[Type]` (Python 3.10+ style).
|
||||
- **Async/Await**: Extensive use of `asyncio` and asynchronous DB drivers.
|
||||
- **Error Handling**:
|
||||
- Repositories should return `None` if an entity is not found (e.g., `toggle_like`).
|
||||
- Services/Routers handle `HTTPException`.
|
||||
|
||||
## Key Features & Implementation Details
|
||||
- **Generations**:
|
||||
- Managed by `GenerationService` and `GenerationRepo`.
|
||||
- `toggle_like` returns `bool | None` (True=Liked, False=Unliked, None=Not Found).
|
||||
- `get_generations` requires `current_user_id` to correctly calculate `is_liked`.
|
||||
- **Ideas**:
|
||||
- Managed by `IdeaService` and `IdeaRepo`.
|
||||
- Can have linked generations.
|
||||
- When fetching generations for an idea, ensure `current_user_id` is passed to `GenerationService`.
|
||||
|
||||
## Recent Changes
|
||||
- Refactored `toggle_like` to handle non-existent generations and return `bool | None`.
|
||||
- Updated `IdeaRouter` to pass `current_user_id` when fetching generations to ensure `is_liked` flag is correct.
|
||||
26
.gitignore
vendored
Normal file
26
.gitignore
vendored
Normal file
@@ -0,0 +1,26 @@
|
||||
minio_backup.tar.gz
|
||||
.DS_Store
|
||||
**/__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
*.cpython-*.pyc
|
||||
**/.DS_Store
|
||||
.idea/ai-char-bot.iml
|
||||
.idea
|
||||
.venv
|
||||
.vscode
|
||||
.vscode/launch.json
|
||||
middlewares/__pycache__/
|
||||
middlewares/*.pyc
|
||||
api/__pycache__/
|
||||
api/*.pyc
|
||||
repos/__pycache__/
|
||||
repos/*.pyc
|
||||
adapters/__pycache__/
|
||||
adapters/*.pyc
|
||||
services/__pycache__/
|
||||
services/*.pyc
|
||||
utils/__pycache__/
|
||||
utils/*.pyc
|
||||
.vscode/launch.json
|
||||
repos/__pycache__/assets_repo.cpython-313.pyc
|
||||
8
.idea/.gitignore
generated
vendored
Normal file
8
.idea/.gitignore
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
# Default ignored files
|
||||
/shelf/
|
||||
/workspace.xml
|
||||
# Editor-based HTTP Client requests
|
||||
/httpRequests/
|
||||
# Datasource local storage ignored files
|
||||
/dataSources/
|
||||
/dataSources.local.xml
|
||||
10
.idea/ai-char-bot.iml
generated
Normal file
10
.idea/ai-char-bot.iml
generated
Normal file
@@ -0,0 +1,10 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<module type="PYTHON_MODULE" version="4">
|
||||
<component name="NewModuleRootManager">
|
||||
<content url="file://$MODULE_DIR$">
|
||||
<excludeFolder url="file://$MODULE_DIR$/.venv" />
|
||||
</content>
|
||||
<orderEntry type="jdk" jdkName="Python 3.13 (ai-char-bot)" jdkType="Python SDK" />
|
||||
<orderEntry type="sourceFolder" forTests="false" />
|
||||
</component>
|
||||
</module>
|
||||
16
.idea/inspectionProfiles/Project_Default.xml
generated
Normal file
16
.idea/inspectionProfiles/Project_Default.xml
generated
Normal file
@@ -0,0 +1,16 @@
|
||||
<component name="InspectionProjectProfileManager">
|
||||
<profile version="1.0">
|
||||
<option name="myName" value="Project Default" />
|
||||
<inspection_tool class="PyAssertTypeInspection" enabled="true" level="ERROR" enabled_by_default="true" editorAttributes="ERRORS_ATTRIBUTES" />
|
||||
<inspection_tool class="PyAsyncCallInspection" enabled="true" level="ERROR" enabled_by_default="true" editorAttributes="ERRORS_ATTRIBUTES" />
|
||||
<inspection_tool class="PyPep8NamingInspection" enabled="true" level="WEAK WARNING" enabled_by_default="true">
|
||||
<option name="ignoredErrors">
|
||||
<list>
|
||||
<option value="N802" />
|
||||
</list>
|
||||
</option>
|
||||
</inspection_tool>
|
||||
<inspection_tool class="PyTypeCheckerInspection" enabled="true" level="ERROR" enabled_by_default="true" editorAttributes="ERRORS_ATTRIBUTES" />
|
||||
<inspection_tool class="PyUnreachableCodeInspection" enabled="true" level="ERROR" enabled_by_default="true" editorAttributes="ERRORS_ATTRIBUTES" />
|
||||
</profile>
|
||||
</component>
|
||||
6
.idea/inspectionProfiles/profiles_settings.xml
generated
Normal file
6
.idea/inspectionProfiles/profiles_settings.xml
generated
Normal file
@@ -0,0 +1,6 @@
|
||||
<component name="InspectionProjectProfileManager">
|
||||
<settings>
|
||||
<option name="USE_PROJECT_PROFILE" value="false" />
|
||||
<version value="1.0" />
|
||||
</settings>
|
||||
</component>
|
||||
7
.idea/misc.xml
generated
Normal file
7
.idea/misc.xml
generated
Normal file
@@ -0,0 +1,7 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project version="4">
|
||||
<component name="Black">
|
||||
<option name="sdkName" value="Python 3.13 (ai-char-bot)" />
|
||||
</component>
|
||||
<component name="ProjectRootManager" version="2" project-jdk-name="Python 3.13 (ai-char-bot)" project-jdk-type="Python SDK" />
|
||||
</project>
|
||||
8
.idea/modules.xml
generated
Normal file
8
.idea/modules.xml
generated
Normal file
@@ -0,0 +1,8 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project version="4">
|
||||
<component name="ProjectModuleManager">
|
||||
<modules>
|
||||
<module fileurl="file://$PROJECT_DIR$/.idea/ai-char-bot.iml" filepath="$PROJECT_DIR$/.idea/ai-char-bot.iml" />
|
||||
</modules>
|
||||
</component>
|
||||
</project>
|
||||
6
.idea/vcs.xml
generated
Normal file
6
.idea/vcs.xml
generated
Normal file
@@ -0,0 +1,6 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project version="4">
|
||||
<component name="VcsDirectoryMappings">
|
||||
<mapping directory="$PROJECT_DIR$" vcs="Git" />
|
||||
</component>
|
||||
</project>
|
||||
21
.vscode/launch.json
vendored
Normal file
21
.vscode/launch.json
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
{
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"name": "Python Debugger: FastAPI",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"module": "uvicorn",
|
||||
"args": [
|
||||
"aiws:app",
|
||||
"--reload",
|
||||
"--port",
|
||||
"8090",
|
||||
"--host",
|
||||
"0.0.0.0"
|
||||
],
|
||||
"jinja": true,
|
||||
"justMyCode": true
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -11,4 +11,4 @@ RUN pip install --no-cache-dir -r requirements.txt
|
||||
COPY . .
|
||||
|
||||
# Запуск приложения (замени app.py на свой файл)
|
||||
CMD ["python", "main.py"]
|
||||
CMD ["uvicorn", "aiws:app", "--host", "0.0.0.0", "--port", "8090"]
|
||||
|
||||
BIN
__pycache__/config.cpython-313.pyc
Normal file
BIN
__pycache__/config.cpython-313.pyc
Normal file
Binary file not shown.
BIN
__pycache__/keyboards.cpython-313.pyc
Normal file
BIN
__pycache__/keyboards.cpython-313.pyc
Normal file
Binary file not shown.
BIN
__pycache__/main.cpython-313.pyc
Normal file
BIN
__pycache__/main.cpython-313.pyc
Normal file
Binary file not shown.
4
adapters/Exception.py
Normal file
4
adapters/Exception.py
Normal file
@@ -0,0 +1,4 @@
|
||||
class GoogleGenerationException(Exception):
|
||||
def __init__(self, message: str):
|
||||
self.message = message
|
||||
super().__init__(message)
|
||||
BIN
adapters/__pycache__/Exception.cpython-313.pyc
Normal file
BIN
adapters/__pycache__/Exception.cpython-313.pyc
Normal file
Binary file not shown.
BIN
adapters/__pycache__/__init__.cpython-313.pyc
Normal file
BIN
adapters/__pycache__/__init__.cpython-313.pyc
Normal file
Binary file not shown.
BIN
adapters/__pycache__/google_adapter.cpython-313.pyc
Normal file
BIN
adapters/__pycache__/google_adapter.cpython-313.pyc
Normal file
Binary file not shown.
BIN
adapters/__pycache__/kling_adapter.cpython-313.pyc
Normal file
BIN
adapters/__pycache__/kling_adapter.cpython-313.pyc
Normal file
Binary file not shown.
BIN
adapters/__pycache__/s3_adapter.cpython-313.pyc
Normal file
BIN
adapters/__pycache__/s3_adapter.cpython-313.pyc
Normal file
Binary file not shown.
@@ -1,13 +1,14 @@
|
||||
import io
|
||||
import logging
|
||||
from datetime import datetime
|
||||
from typing import List, Union
|
||||
from typing import List, Union, Tuple, Dict, Any
|
||||
|
||||
from PIL import Image
|
||||
from google import genai
|
||||
from google.genai import types
|
||||
|
||||
from models.enums import AspectRatios, Quality
|
||||
from adapters.Exception import GoogleGenerationException
|
||||
from models.enums import AspectRatios, Quality, TextModel, ImageModel
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -18,33 +19,37 @@ class GoogleAdapter:
|
||||
raise ValueError("API Key for Gemini is missing")
|
||||
self.client = genai.Client(api_key=api_key)
|
||||
|
||||
# Константы моделей
|
||||
self.TEXT_MODEL = "gemini-3-pro-preview"
|
||||
self.IMAGE_MODEL = "gemini-3-pro-image-preview"
|
||||
|
||||
def _prepare_contents(self, prompt: str, images_list: List[bytes] = None) -> list:
|
||||
"""Вспомогательный метод для подготовки контента (текст + картинки)"""
|
||||
contents = [prompt]
|
||||
def _prepare_contents(self, prompt: str, images_list: List[bytes] | None = None) -> tuple:
|
||||
"""Вспомогательный метод для подготовки контента (текст + картинки).
|
||||
Returns (contents, opened_images) — caller MUST close opened_images after use."""
|
||||
contents : list [Any]= [prompt]
|
||||
opened_images = []
|
||||
if images_list:
|
||||
logger.info(f"Preparing content with {len(images_list)} images")
|
||||
for img_bytes in images_list:
|
||||
try:
|
||||
# Gemini API требует PIL Image на входе
|
||||
image = Image.open(io.BytesIO(img_bytes))
|
||||
contents.append(image)
|
||||
opened_images.append(image)
|
||||
except Exception as e:
|
||||
logger.error(f"Error processing input image: {e}")
|
||||
return contents
|
||||
else:
|
||||
logger.info("Preparing content with no images")
|
||||
return contents, opened_images
|
||||
|
||||
def generate_text(self, prompt: str, images_list: List[bytes] = None) -> str:
|
||||
def generate_text(self, prompt: str, model: str = "gemini-3.1-pro-preview", images_list: List[bytes] | None = None) -> str:
|
||||
"""
|
||||
Генерация текста (Чат или Vision).
|
||||
Возвращает строку с ответом.
|
||||
"""
|
||||
contents = self._prepare_contents(prompt, images_list)
|
||||
if model not in [m.value for m in TextModel]:
|
||||
raise ValueError(f"Invalid model for text generation: {model}. Expected one of: {[m.value for m in TextModel]}")
|
||||
|
||||
contents, opened_images = self._prepare_contents(prompt, images_list)
|
||||
logger.info(f"Generating text: {prompt} with model: {model}")
|
||||
try:
|
||||
response = self.client.models.generate_content(
|
||||
model=self.TEXT_MODEL,
|
||||
model=model,
|
||||
contents=contents,
|
||||
config=types.GenerateContentConfig(
|
||||
response_modalities=['TEXT'],
|
||||
@@ -58,33 +63,64 @@ class GoogleAdapter:
|
||||
for part in response.parts:
|
||||
if part.text:
|
||||
result_text += part.text
|
||||
|
||||
logger.info(f"Generated text length: {len(result_text)}")
|
||||
return result_text
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Gemini Text API Error: {e}")
|
||||
return f"Ошибка генерации текста: {e}"
|
||||
raise GoogleGenerationException(f"Gemini Text API Error: {e}")
|
||||
finally:
|
||||
for img in opened_images:
|
||||
img.close()
|
||||
|
||||
def generate_image(self, prompt: str, aspect_ratio: AspectRatios, quality: Quality, images_list: List[bytes] = None, ) -> List[io.BytesIO]:
|
||||
def generate_image(self, prompt: str, aspect_ratio: AspectRatios, quality: Quality, model: str = "gemini-3-pro-image-preview", images_list: List[bytes] | None = None, ) -> Tuple[List[io.BytesIO], Dict[str, Any]]:
|
||||
"""
|
||||
Генерация изображений (Text-to-Image или Image-to-Image).
|
||||
Возвращает список байтовых потоков (готовых к отправке).
|
||||
"""
|
||||
contents = self._prepare_contents(prompt, images_list)
|
||||
if model not in [m.value for m in ImageModel]:
|
||||
raise ValueError(f"Invalid model for image generation: {model}. Expected one of: {[m.value for m in ImageModel]}")
|
||||
|
||||
contents, opened_images = self._prepare_contents(prompt, images_list)
|
||||
logger.info(f"Generating image. Prompt length: {len(prompt)}, Ratio: {aspect_ratio}, Quality: {quality}, Model: {model}")
|
||||
|
||||
start_time = datetime.now()
|
||||
token_usage = 0
|
||||
|
||||
try:
|
||||
response = self.client.models.generate_content(
|
||||
model=self.IMAGE_MODEL,
|
||||
model=model,
|
||||
contents=contents,
|
||||
config=types.GenerateContentConfig(
|
||||
response_modalities=['IMAGE'],
|
||||
temperature=1.0,
|
||||
image_config=types.ImageConfig(
|
||||
aspect_ratio=aspect_ratio.value,
|
||||
image_size=quality.value
|
||||
aspect_ratio=aspect_ratio.value_ratio,
|
||||
image_size=quality.value_quality
|
||||
),
|
||||
)
|
||||
)
|
||||
|
||||
end_time = datetime.now()
|
||||
api_duration = (end_time - start_time).total_seconds()
|
||||
|
||||
if response.usage_metadata:
|
||||
token_usage = response.usage_metadata.total_token_count
|
||||
|
||||
# Check prompt-level block (e.g. PROHIBITED_CONTENT) — no candidates in this case
|
||||
if response.prompt_feedback and response.prompt_feedback.block_reason:
|
||||
raise GoogleGenerationException(
|
||||
f"Generation blocked at prompt level: {response.prompt_feedback.block_reason.value}"
|
||||
)
|
||||
|
||||
# Check candidate-level block
|
||||
if response.parts is None:
|
||||
response_reason = (
|
||||
response.candidates[0].finish_reason
|
||||
if response.candidates and len(response.candidates) > 0
|
||||
else "Unknown"
|
||||
)
|
||||
raise GoogleGenerationException(f"Generation blocked: {response_reason}")
|
||||
|
||||
generated_images = []
|
||||
|
||||
@@ -95,7 +131,9 @@ class GoogleAdapter:
|
||||
try:
|
||||
# 1. Берем сырые байты
|
||||
raw_data = part.inline_data.data
|
||||
byte_arr = io.BytesIO(raw_data)
|
||||
if raw_data is None:
|
||||
raise GoogleGenerationException("Generation returned no data")
|
||||
byte_arr : io.BytesIO = io.BytesIO(raw_data)
|
||||
|
||||
# 2. Нейминг (формально, для TG)
|
||||
timestamp = datetime.now().timestamp()
|
||||
@@ -108,9 +146,29 @@ class GoogleAdapter:
|
||||
except Exception as e:
|
||||
logger.error(f"Error processing output image: {e}")
|
||||
|
||||
return generated_images
|
||||
if generated_images:
|
||||
logger.info(f"Successfully generated {len(generated_images)} images in {api_duration:.2f}s. Tokens: {token_usage}")
|
||||
else:
|
||||
logger.warning("No images text generated from parts")
|
||||
|
||||
input_tokens = 0
|
||||
output_tokens = 0
|
||||
if response.usage_metadata:
|
||||
input_tokens = response.usage_metadata.prompt_token_count
|
||||
output_tokens = response.usage_metadata.candidates_token_count
|
||||
|
||||
metrics = {
|
||||
"api_execution_time_seconds": api_duration,
|
||||
"token_usage": token_usage,
|
||||
"input_token_usage": input_tokens,
|
||||
"output_token_usage": output_tokens
|
||||
}
|
||||
return generated_images, metrics
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Gemini Image API Error: {e}")
|
||||
# В случае ошибки возвращаем пустой список (или можно рейзить исключение)
|
||||
return []
|
||||
raise GoogleGenerationException(f"Gemini Image API Error: {e}")
|
||||
finally:
|
||||
for img in opened_images:
|
||||
img.close()
|
||||
del contents
|
||||
112
adapters/s3_adapter.py
Normal file
112
adapters/s3_adapter.py
Normal file
@@ -0,0 +1,112 @@
|
||||
from contextlib import asynccontextmanager
|
||||
from typing import Optional, BinaryIO, AsyncGenerator
|
||||
import aioboto3
|
||||
from botocore.exceptions import ClientError
|
||||
import os
|
||||
|
||||
class S3Adapter:
|
||||
def __init__(self,
|
||||
endpoint_url: str,
|
||||
aws_access_key_id: str,
|
||||
aws_secret_access_key: str,
|
||||
bucket_name: str):
|
||||
self.endpoint_url = endpoint_url
|
||||
self.aws_access_key_id = aws_access_key_id
|
||||
self.aws_secret_access_key = aws_secret_access_key
|
||||
self.bucket_name = bucket_name
|
||||
self.session = aioboto3.Session()
|
||||
|
||||
@asynccontextmanager
|
||||
async def _get_client(self):
|
||||
async with self.session.client( # type: ignore[reportGeneralTypeIssues]
|
||||
"s3",
|
||||
endpoint_url=self.endpoint_url,
|
||||
aws_access_key_id=self.aws_access_key_id,
|
||||
aws_secret_access_key=self.aws_secret_access_key,
|
||||
) as client:
|
||||
yield client
|
||||
|
||||
async def upload_file(self, object_name: str, data: bytes, content_type: Optional[str] = None):
|
||||
"""Uploads bytes data to S3."""
|
||||
try:
|
||||
extra_args = {}
|
||||
if content_type:
|
||||
extra_args["ContentType"] = content_type
|
||||
|
||||
async with self._get_client() as client:
|
||||
await client.put_object(
|
||||
Bucket=self.bucket_name,
|
||||
Key=object_name,
|
||||
Body=data,
|
||||
**extra_args
|
||||
)
|
||||
return True
|
||||
except ClientError as e:
|
||||
# logging.error(e)
|
||||
print(f"Error uploading to S3: {e}")
|
||||
return False
|
||||
|
||||
async def get_file(self, object_name: str) -> Optional[bytes]:
|
||||
"""Downloads a file from S3 and returns bytes."""
|
||||
try:
|
||||
async with self._get_client() as client:
|
||||
response = await client.get_object(Bucket=self.bucket_name, Key=object_name)
|
||||
return await response['Body'].read()
|
||||
except ClientError as e:
|
||||
print(f"Error downloading from S3: {e}")
|
||||
return None
|
||||
|
||||
async def get_file_size(self, object_name: str) -> Optional[int]:
|
||||
"""Returns the size of the file in bytes."""
|
||||
try:
|
||||
async with self._get_client() as client:
|
||||
response = await client.head_object(Bucket=self.bucket_name, Key=object_name)
|
||||
return response['ContentLength']
|
||||
except ClientError as e:
|
||||
print(f"Error getting file size from S3: {e}")
|
||||
return None
|
||||
|
||||
async def stream_file(self, object_name: str, range_header: Optional[str] = None, chunk_size: int = 65536) -> AsyncGenerator[bytes, None]:
|
||||
"""Streams a file from S3 yielding chunks. Memory-efficient for large files."""
|
||||
try:
|
||||
async with self._get_client() as client:
|
||||
args = {'Bucket': self.bucket_name, 'Key': object_name}
|
||||
if range_header:
|
||||
args['Range'] = range_header
|
||||
|
||||
response = await client.get_object(**args)
|
||||
# aioboto3 Body is an aiohttp StreamReader wrapper
|
||||
body = response['Body']
|
||||
|
||||
while True:
|
||||
chunk = await body.read(chunk_size)
|
||||
if not chunk:
|
||||
break
|
||||
yield chunk
|
||||
except ClientError as e:
|
||||
print(f"Error streaming from S3: {e}")
|
||||
return
|
||||
|
||||
async def delete_file(self, object_name: str):
|
||||
"""Deletes a file from S3."""
|
||||
try:
|
||||
async with self._get_client() as client:
|
||||
await client.delete_object(Bucket=self.bucket_name, Key=object_name)
|
||||
return True
|
||||
except ClientError as e:
|
||||
print(f"Error deleting from S3: {e}")
|
||||
return False
|
||||
|
||||
async def get_presigned_url(self, object_name: str, expiration: int = 3600) -> Optional[str]:
|
||||
"""Generate a presigned URL to share an S3 object."""
|
||||
try:
|
||||
async with self._get_client() as client:
|
||||
response = await client.generate_presigned_url(
|
||||
'get_object',
|
||||
Params={'Bucket': self.bucket_name, 'Key': object_name},
|
||||
ExpiresIn=expiration
|
||||
)
|
||||
return response
|
||||
except ClientError as e:
|
||||
print(f"Error generating presigned URL: {e}")
|
||||
return None
|
||||
277
aiws.py
Normal file
277
aiws.py
Normal file
@@ -0,0 +1,277 @@
|
||||
import asyncio
|
||||
import logging
|
||||
from contextlib import asynccontextmanager
|
||||
|
||||
from aiogram import Bot, Dispatcher, Router, F
|
||||
from aiogram.client.default import DefaultBotProperties
|
||||
from aiogram.enums import ParseMode
|
||||
from aiogram.filters import CommandStart, Command
|
||||
from aiogram.types import Message
|
||||
from aiogram.fsm.storage.mongo import MongoStorage
|
||||
from fastapi import FastAPI
|
||||
from motor.motor_asyncio import AsyncIOMotorClient
|
||||
from prometheus_client import Info
|
||||
from starlette.middleware.cors import CORSMiddleware
|
||||
from prometheus_fastapi_instrumentator import Instrumentator
|
||||
|
||||
# --- ИМПОРТЫ ПРОЕКТА ---
|
||||
from config import settings
|
||||
from adapters.google_adapter import GoogleAdapter
|
||||
from adapters.s3_adapter import S3Adapter
|
||||
from api.service.generation_service import GenerationService
|
||||
from api.service.album_service import AlbumService
|
||||
from middlewares.album import AlbumMiddleware
|
||||
from middlewares.auth import AuthMiddleware
|
||||
from middlewares.dao import DaoMiddleware
|
||||
|
||||
# Репозитории и DAO
|
||||
from repos.char_repo import CharacterRepo
|
||||
from repos.user_repo import UsersRepo
|
||||
from repos.dao import DAO
|
||||
|
||||
|
||||
# Роутеры
|
||||
from routers.auth_router import router as auth_router
|
||||
from routers.gen_router import router as gen_router
|
||||
from routers.char_router import router as char_router
|
||||
from routers.assets_router import router as assets_router # Роутер бота для ассетов
|
||||
from api.endpoints.assets_router import router as api_assets_router # Роутер FastAPI
|
||||
from api.endpoints.character_router import router as api_char_router # Роутер FastAPI
|
||||
from api.endpoints.generation_router import router as api_gen_router
|
||||
from api.endpoints.auth import router as api_auth_router
|
||||
from api.endpoints.admin import router as api_admin_router
|
||||
from api.endpoints.album_router import router as api_album_router
|
||||
from api.endpoints.project_router import router as project_api_router
|
||||
from api.endpoints.idea_router import router as idea_api_router
|
||||
from api.endpoints.post_router import router as post_api_router
|
||||
from api.endpoints.environment_router import router as environment_api_router
|
||||
from api.endpoints.inspiration_router import router as inspiration_api_router
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# --- КОНФИГУРАЦИЯ ---
|
||||
# Настройки теперь берутся из config.py
|
||||
BOT_TOKEN = settings.BOT_TOKEN
|
||||
GEMINI_API_KEY = settings.GEMINI_API_KEY
|
||||
|
||||
MONGO_HOST = settings.MONGO_HOST
|
||||
DB_NAME = settings.DB_NAME
|
||||
ADMIN_ID = settings.ADMIN_ID
|
||||
|
||||
|
||||
def setup_logging():
|
||||
logging.basicConfig(level=logging.INFO,
|
||||
format="%(asctime)s [%(levelname)s] %(name)s (%(filename)s:%(lineno)d): %(message)s",
|
||||
force=True)
|
||||
|
||||
|
||||
# --- ИНИЦИАЛИЗАЦИЯ ЗАВИСИМОСТЕЙ ---
|
||||
if BOT_TOKEN is None:
|
||||
raise ValueError("BOT_TOKEN is not set")
|
||||
bot = Bot(token=BOT_TOKEN, default=DefaultBotProperties(parse_mode=ParseMode.HTML))
|
||||
|
||||
# Клиент БД создаем глобально, чтобы он был доступен и боту (Storage), и API
|
||||
mongo_client = AsyncIOMotorClient(MONGO_HOST)
|
||||
|
||||
# Репозитории
|
||||
# Репозитории
|
||||
users_repo = UsersRepo(mongo_client)
|
||||
char_repo = CharacterRepo(mongo_client)
|
||||
|
||||
# S3 Adapter
|
||||
s3_adapter = S3Adapter(
|
||||
endpoint_url=settings.MINIO_ENDPOINT,
|
||||
aws_access_key_id=settings.MINIO_ACCESS_KEY,
|
||||
aws_secret_access_key=settings.MINIO_SECRET_KEY,
|
||||
bucket_name=settings.MINIO_BUCKET
|
||||
)
|
||||
|
||||
dao = DAO(mongo_client, s3_adapter) # Главный DAO для бота
|
||||
if GEMINI_API_KEY is None:
|
||||
raise ValueError("GEMINI_API_KEY is not set")
|
||||
gemini = GoogleAdapter(api_key=GEMINI_API_KEY)
|
||||
if bot is None:
|
||||
raise ValueError("bot is not set")
|
||||
generation_service = GenerationService(dao=dao, gemini=gemini, s3_adapter=s3_adapter, bot=bot)
|
||||
album_service = AlbumService(dao)
|
||||
|
||||
# Dispatcher
|
||||
dp = Dispatcher(storage=MongoStorage(mongo_client, db_name=DB_NAME))
|
||||
|
||||
# Внедрение зависимостей (глобально для бота)
|
||||
dp["repo"] = users_repo
|
||||
dp["admin_id"] = ADMIN_ID
|
||||
dp["gemini"] = gemini
|
||||
|
||||
# --- НАСТРОЙКА РОУТЕРОВ БОТА ---
|
||||
|
||||
# 1. Роутеры без мидлварей (например, auth)
|
||||
dp.include_router(auth_router)
|
||||
|
||||
# 2. Основные роутеры
|
||||
main_router = Router()
|
||||
dp.include_router(main_router)
|
||||
dp.include_router(assets_router)
|
||||
dp.include_router(char_router)
|
||||
dp.include_router(gen_router)
|
||||
|
||||
# --- НАСТРОЙКА MIDDLEWARES БОТА ---
|
||||
|
||||
# DaoMiddleware прокидывает объект 'dao' во все хендлеры
|
||||
dp.update.middleware(DaoMiddleware(dao=dao))
|
||||
|
||||
# AuthMiddleware проверяет права доступа
|
||||
main_router.message.middleware(AuthMiddleware(repo=users_repo, admin_id=ADMIN_ID))
|
||||
gen_router.message.middleware(AuthMiddleware(repo=users_repo, admin_id=ADMIN_ID))
|
||||
assets_router.message.middleware(AuthMiddleware(repo=users_repo, admin_id=ADMIN_ID))
|
||||
|
||||
# AlbumMiddleware для обработки групп фото
|
||||
gen_router.message.middleware(AlbumMiddleware(latency=0.8))
|
||||
|
||||
|
||||
async def start_scheduler(service: GenerationService):
|
||||
while True:
|
||||
try:
|
||||
logger.info("Running scheduler for stacked generation killing")
|
||||
await service.cleanup_stale_generations()
|
||||
await service.cleanup_old_data(days=14)
|
||||
except asyncio.CancelledError:
|
||||
break
|
||||
except Exception as e:
|
||||
logger.error(f"Scheduler error: {e}")
|
||||
await asyncio.sleep(60) # Check every 60 seconds
|
||||
|
||||
# --- LIFESPAN (Запуск FastAPI + Bot) ---
|
||||
@asynccontextmanager
|
||||
async def lifespan(app: FastAPI):
|
||||
# --- STARTUP ---
|
||||
setup_logging()
|
||||
print("🚀 Starting up...")
|
||||
|
||||
# 1. Настройка DAO для FastAPI
|
||||
# Используем уже созданный mongo_client
|
||||
db = mongo_client[DB_NAME]
|
||||
|
||||
# Инициализируем DAO для ассетов и кладем в state приложения
|
||||
# Теперь в эндпоинтах можно делать request.app.state.assets_dao
|
||||
|
||||
app.state.mongo_client = mongo_client
|
||||
app.state.gemini_client = gemini
|
||||
app.state.bot = bot
|
||||
app.state.s3_adapter = s3_adapter
|
||||
app.state.album_service = album_service
|
||||
app.state.users_repo = users_repo # Добавляем репозиторий в state
|
||||
|
||||
print("✅ DB & DAO initialized")
|
||||
|
||||
# 2. ЗАПУСК БОТА (в фоне)
|
||||
# Важно: handle_signals=False, чтобы бот не перехватывал сигналы остановки у uvicorn
|
||||
# Мы НЕ передаем сюда dao=..., так как он уже подключен через Middleware выше
|
||||
# polling_task = asyncio.create_task(
|
||||
# dp.start_polling(bot, handle_signals=False)
|
||||
# )
|
||||
# print("🤖 Bot polling started")
|
||||
|
||||
# 3. ЗАПУСК ШЕДУЛЕРА
|
||||
scheduler_task = asyncio.create_task(start_scheduler(generation_service))
|
||||
print("⏰ Scheduler started")
|
||||
|
||||
yield
|
||||
|
||||
# --- SHUTDOWN ---
|
||||
print("🛑 Shutting down...")
|
||||
|
||||
# 4. Остановка шедулера
|
||||
scheduler_task.cancel()
|
||||
try:
|
||||
await scheduler_task
|
||||
except asyncio.CancelledError:
|
||||
print("⏰ Scheduler stopped")
|
||||
|
||||
# 3. Остановка бота
|
||||
# polling_task.cancel()
|
||||
# try:
|
||||
# await polling_task
|
||||
# except asyncio.CancelledError:
|
||||
# print("🤖 Bot polling stopped")
|
||||
|
||||
# 4. Отключение БД
|
||||
# Обычно Motor закрывать не обязательно при выходе, но хорошим тоном считается
|
||||
# mongo_client.close()
|
||||
print("🛑 DB Connection closed")
|
||||
|
||||
|
||||
# --- НАСТРОЙКА FASTAPI ---
|
||||
app = FastAPI(title="Assets API", lifespan=lifespan)
|
||||
|
||||
# CORS
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
allow_origins=["*"],
|
||||
allow_methods=["*"],
|
||||
allow_headers=["*"],
|
||||
)
|
||||
|
||||
# Подключаем роутеры API
|
||||
app.include_router(api_auth_router)
|
||||
app.include_router(api_admin_router)
|
||||
app.include_router(api_assets_router)
|
||||
app.include_router(api_char_router)
|
||||
app.include_router(api_gen_router)
|
||||
app.include_router(api_album_router)
|
||||
app.include_router(project_api_router)
|
||||
app.include_router(idea_api_router)
|
||||
app.include_router(post_api_router)
|
||||
app.include_router(environment_api_router)
|
||||
app.include_router(inspiration_api_router)
|
||||
|
||||
# Prometheus Metrics (Instrument after all routers are added)
|
||||
Instrumentator(
|
||||
should_group_status_codes=False, # 200/201/204 отдельно (по желанию)
|
||||
should_ignore_untemplated=False, # НЕ игнорировать "сырые" пути
|
||||
# should_group_untemplated=False, # (опционально) не схлопывать untemplated в "none"
|
||||
).instrument(
|
||||
app,
|
||||
metric_namespace="ai_bot",
|
||||
).expose(app, endpoint="/metrics", include_in_schema=False)
|
||||
app_info = Info("fastapi_app_info", "FastAPI application info")
|
||||
app_info.info({"app_name": "ai-bot"})
|
||||
|
||||
|
||||
# --- ХЕНДЛЕРЫ БОТА (Main Router) ---
|
||||
@main_router.message(Command("help"))
|
||||
async def show_help(message: Message) -> None:
|
||||
await message.answer("ℹ️ <b>Справка:</b>\n\n"
|
||||
"📝 <b>Текст:</b> Просто отправь промпт.\n"
|
||||
"🎨 <b>Фото:</b> /image {промпт} (или прикрепи фото с подписью).\n\n"
|
||||
"⚠️ Диалоги не сохраняются (каждое сообщение — новый запрос).")
|
||||
|
||||
|
||||
@main_router.message(CommandStart())
|
||||
async def cmd_start(message: Message):
|
||||
await message.answer("👋 Привет! Я готов к работе.\n\n"
|
||||
"Напиши мне, что нужно сгенерировать, или используй /help.")
|
||||
|
||||
|
||||
# --- ЗАПУСК ---
|
||||
if __name__ == "__main__":
|
||||
import uvicorn
|
||||
|
||||
setup_logging()
|
||||
|
||||
async def main():
|
||||
# Создаем конфигурацию uvicorn вручную
|
||||
# loop="asyncio" заставляет использовать стандартный цикл
|
||||
config = uvicorn.Config(app, host="0.0.0.0", port=8090, loop="asyncio", timeout_keep_alive=120)
|
||||
server = uvicorn.Server(config)
|
||||
|
||||
# Запускаем сервер (lifespan запустится внутри)
|
||||
await server.serve()
|
||||
|
||||
|
||||
try:
|
||||
# Сами запускаем цикл, контролируя аргументы
|
||||
asyncio.run(main())
|
||||
except KeyboardInterrupt:
|
||||
# Корректно обрабатываем выход
|
||||
pass
|
||||
0
api/__init__.py
Normal file
0
api/__init__.py
Normal file
BIN
api/__pycache__/__init__.cpython-313.pyc
Normal file
BIN
api/__pycache__/__init__.cpython-313.pyc
Normal file
Binary file not shown.
BIN
api/__pycache__/dependency.cpython-313.pyc
Normal file
BIN
api/__pycache__/dependency.cpython-313.pyc
Normal file
Binary file not shown.
70
api/dependency.py
Normal file
70
api/dependency.py
Normal file
@@ -0,0 +1,70 @@
|
||||
# dependency.py
|
||||
from fastapi import Request, Depends
|
||||
from motor.motor_asyncio import AsyncIOMotorClient
|
||||
|
||||
from adapters.google_adapter import GoogleAdapter
|
||||
from api.service.generation_service import GenerationService
|
||||
from repos.dao import DAO
|
||||
from api.service.album_service import AlbumService
|
||||
|
||||
|
||||
# ... ваши импорты ...
|
||||
|
||||
from aiogram import Bot
|
||||
|
||||
from adapters.s3_adapter import S3Adapter
|
||||
from typing import Optional
|
||||
|
||||
# Провайдеры "сырых" клиентов из состояния приложения
|
||||
def get_mongo_client(request: Request) -> AsyncIOMotorClient:
|
||||
return request.app.state.mongo_client
|
||||
|
||||
def get_gemini_client(request: Request) -> GoogleAdapter:
|
||||
return request.app.state.gemini_client
|
||||
|
||||
def get_bot_client(request: Request) -> Bot:
|
||||
return request.app.state.bot
|
||||
|
||||
def get_s3_adapter(request: Request) -> Optional[S3Adapter]:
|
||||
return request.app.state.s3_adapter
|
||||
|
||||
# Провайдер DAO (собирается из mongo_client)
|
||||
def get_dao(
|
||||
mongo_client: AsyncIOMotorClient = Depends(get_mongo_client),
|
||||
s3_adapter: Optional[S3Adapter] = Depends(get_s3_adapter)
|
||||
) -> DAO:
|
||||
# FastAPI кэширует результат Depends в рамках одного запроса,
|
||||
# так что DAO создастся один раз за запрос.
|
||||
return DAO(mongo_client, s3_adapter)
|
||||
|
||||
# Провайдер сервиса (собирается из DAO и Gemini)
|
||||
def get_generation_service(
|
||||
dao: DAO = Depends(get_dao),
|
||||
gemini: GoogleAdapter = Depends(get_gemini_client),
|
||||
s3_adapter: S3Adapter = Depends(get_s3_adapter),
|
||||
bot: Bot = Depends(get_bot_client),
|
||||
) -> GenerationService:
|
||||
return GenerationService(dao, gemini, s3_adapter, bot)
|
||||
|
||||
from api.service.idea_service import IdeaService
|
||||
|
||||
def get_idea_service(dao: DAO = Depends(get_dao)) -> IdeaService:
|
||||
return IdeaService(dao)
|
||||
|
||||
from fastapi import Header
|
||||
|
||||
async def get_project_id(x_project_id: Optional[str] = Header(None, alias="X-Project-ID")) -> Optional[str]:
|
||||
return x_project_id
|
||||
|
||||
async def get_album_service(dao: DAO = Depends(get_dao)) -> AlbumService:
|
||||
return AlbumService(dao)
|
||||
|
||||
from api.service.post_service import PostService
|
||||
|
||||
def get_post_service(dao: DAO = Depends(get_dao)) -> PostService:
|
||||
return PostService(dao)
|
||||
|
||||
from api.service.inspiration_service import InspirationService
|
||||
|
||||
def get_inspiration_service(dao: DAO = Depends(get_dao), s3_adapter: S3Adapter = Depends(get_s3_adapter)) -> InspirationService:
|
||||
return InspirationService(dao, s3_adapter)
|
||||
0
api/endpoints/__init__.py
Normal file
0
api/endpoints/__init__.py
Normal file
BIN
api/endpoints/__pycache__/__init__.cpython-313.pyc
Normal file
BIN
api/endpoints/__pycache__/__init__.cpython-313.pyc
Normal file
Binary file not shown.
BIN
api/endpoints/__pycache__/admin.cpython-313.pyc
Normal file
BIN
api/endpoints/__pycache__/admin.cpython-313.pyc
Normal file
Binary file not shown.
BIN
api/endpoints/__pycache__/assets_router.cpython-313.pyc
Normal file
BIN
api/endpoints/__pycache__/assets_router.cpython-313.pyc
Normal file
Binary file not shown.
BIN
api/endpoints/__pycache__/auth.cpython-313.pyc
Normal file
BIN
api/endpoints/__pycache__/auth.cpython-313.pyc
Normal file
Binary file not shown.
BIN
api/endpoints/__pycache__/character_router.cpython-313.pyc
Normal file
BIN
api/endpoints/__pycache__/character_router.cpython-313.pyc
Normal file
Binary file not shown.
BIN
api/endpoints/__pycache__/generation_router.cpython-313.pyc
Normal file
BIN
api/endpoints/__pycache__/generation_router.cpython-313.pyc
Normal file
Binary file not shown.
98
api/endpoints/admin.py
Normal file
98
api/endpoints/admin.py
Normal file
@@ -0,0 +1,98 @@
|
||||
from typing import Annotated
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, status
|
||||
from fastapi.security import OAuth2PasswordBearer
|
||||
from pydantic import BaseModel
|
||||
|
||||
from repos.user_repo import UsersRepo, UserStatus
|
||||
from api.dependency import get_dao
|
||||
from repos.dao import DAO
|
||||
from utils.security import verify_password, create_access_token, ACCESS_TOKEN_EXPIRE_MINUTES, ALGORITHM, SECRET_KEY
|
||||
from jose import JWTError, jwt
|
||||
from starlette.requests import Request
|
||||
|
||||
router = APIRouter(prefix="/api/admin", tags=["admin"])
|
||||
|
||||
oauth2_scheme = OAuth2PasswordBearer(tokenUrl="/auth/token")
|
||||
|
||||
from api.endpoints.auth import get_users_repo
|
||||
|
||||
async def get_current_user(token: Annotated[str, Depends(oauth2_scheme)], repo: Annotated[UsersRepo, Depends(get_users_repo)]):
|
||||
credentials_exception = HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Could not validate credentials",
|
||||
headers={"WWW-Authenticate": "Bearer"},
|
||||
)
|
||||
try:
|
||||
payload = jwt.decode(token, SECRET_KEY, algorithms=[ALGORITHM])
|
||||
username: str | None = payload.get("sub")
|
||||
if username is None:
|
||||
raise credentials_exception
|
||||
except JWTError:
|
||||
raise credentials_exception
|
||||
|
||||
user = await repo.get_user_by_username(username)
|
||||
if user is None:
|
||||
raise credentials_exception
|
||||
return user
|
||||
|
||||
async def get_current_admin(user: Annotated[dict, Depends(get_current_user)]):
|
||||
if not user.get("is_admin"):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail="Not enough permissions",
|
||||
)
|
||||
return user
|
||||
|
||||
class UserResponse(BaseModel):
|
||||
username: str
|
||||
full_name: str | None = None
|
||||
status: str
|
||||
created_at: str | None = None
|
||||
is_admin: bool
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
@router.get("/approvals", response_model=list[UserResponse])
|
||||
async def list_pending_users(
|
||||
admin: Annotated[dict, Depends(get_current_admin)],
|
||||
repo: Annotated[UsersRepo, Depends(get_users_repo)]
|
||||
):
|
||||
users = await repo.get_pending_users()
|
||||
# Pydantic conversion handles the list of dicts
|
||||
return [
|
||||
UserResponse(
|
||||
username=u["username"],
|
||||
full_name=u.get("full_name"),
|
||||
status=u["status"],
|
||||
created_at=str(u.get("created_at")),
|
||||
is_admin=u.get("is_admin", False)
|
||||
) for u in users
|
||||
]
|
||||
|
||||
@router.post("/approve/{username}")
|
||||
async def approve_user(
|
||||
username: str,
|
||||
admin: Annotated[dict, Depends(get_current_admin)],
|
||||
repo: Annotated[UsersRepo, Depends(get_users_repo)]
|
||||
):
|
||||
user = await repo.get_user_by_username(username)
|
||||
if not user:
|
||||
raise HTTPException(status_code=404, detail="User not found")
|
||||
|
||||
await repo.approve_user(username)
|
||||
return {"message": f"User {username} approved"}
|
||||
|
||||
@router.post("/deny/{username}")
|
||||
async def deny_user(
|
||||
username: str,
|
||||
admin: Annotated[dict, Depends(get_current_admin)],
|
||||
repo: Annotated[UsersRepo, Depends(get_users_repo)]
|
||||
):
|
||||
user = await repo.get_user_by_username(username)
|
||||
if not user:
|
||||
raise HTTPException(status_code=404, detail="User not found")
|
||||
|
||||
await repo.deny_user(username)
|
||||
return {"message": f"User {username} denied"}
|
||||
83
api/endpoints/album_router.py
Normal file
83
api/endpoints/album_router.py
Normal file
@@ -0,0 +1,83 @@
|
||||
from fastapi import APIRouter, HTTPException, status, Request
|
||||
from pydantic import BaseModel
|
||||
|
||||
from api.models.GenerationRequest import GenerationResponse
|
||||
from models.Album import Album
|
||||
from repos.dao import DAO
|
||||
from api.dependency import get_album_service
|
||||
from api.service.album_service import AlbumService
|
||||
|
||||
|
||||
router = APIRouter(prefix="/api/albums", tags=["Albums"])
|
||||
|
||||
class AlbumCreateRequest(BaseModel):
|
||||
name: str
|
||||
description: str | None = None
|
||||
|
||||
class AlbumUpdateRequest(BaseModel):
|
||||
name: str | None = None
|
||||
description: str | None = None
|
||||
|
||||
class AlbumResponse(BaseModel):
|
||||
id: str
|
||||
name: str
|
||||
description: str | None = None
|
||||
generation_ids: list[str] = []
|
||||
cover_asset_id: str | None = None # Not implemented yet
|
||||
|
||||
@router.post("", response_model=AlbumResponse)
|
||||
async def create_album(request: Request, album_in: AlbumCreateRequest):
|
||||
service: AlbumService = request.app.state.album_service
|
||||
album = await service.create_album(name=album_in.name, description=album_in.description)
|
||||
return AlbumResponse(**album.model_dump())
|
||||
|
||||
@router.get("", response_model=list[AlbumResponse])
|
||||
async def get_albums(request: Request, limit: int = 10, offset: int = 0):
|
||||
service: AlbumService = request.app.state.album_service
|
||||
albums = await service.get_albums(limit=limit, offset=offset)
|
||||
return [AlbumResponse(**album.model_dump()) for album in albums]
|
||||
|
||||
@router.get("/{album_id}", response_model=AlbumResponse)
|
||||
async def get_album(request: Request, album_id: str):
|
||||
service: AlbumService = request.app.state.album_service
|
||||
album = await service.get_album(album_id)
|
||||
if not album:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Album not found")
|
||||
return AlbumResponse(**album.model_dump())
|
||||
|
||||
@router.put("/{album_id}", response_model=AlbumResponse)
|
||||
async def update_album(request: Request, album_id: str, album_in: AlbumUpdateRequest):
|
||||
service: AlbumService = request.app.state.album_service
|
||||
album = await service.update_album(album_id, name=album_in.name, description=album_in.description)
|
||||
if not album:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Album not found")
|
||||
return AlbumResponse(**album.model_dump())
|
||||
|
||||
@router.delete("/{album_id}", status_code=status.HTTP_204_NO_CONTENT)
|
||||
async def delete_album(request: Request, album_id: str):
|
||||
service: AlbumService = request.app.state.album_service
|
||||
deleted = await service.delete_album(album_id)
|
||||
if not deleted:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Album not found")
|
||||
|
||||
@router.post("/{album_id}/generations/{generation_id}")
|
||||
async def add_generation_to_album(request: Request, album_id: str, generation_id: str):
|
||||
service: AlbumService = request.app.state.album_service
|
||||
success = await service.add_generation_to_album(album_id, generation_id)
|
||||
if not success:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Album or Generation not found")
|
||||
return {"status": "success"}
|
||||
|
||||
@router.delete("/{album_id}/generations/{generation_id}")
|
||||
async def remove_generation_from_album(request: Request, album_id: str, generation_id: str):
|
||||
service: AlbumService = request.app.state.album_service
|
||||
success = await service.remove_generation_from_album(album_id, generation_id)
|
||||
if not success:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Album or Generation not found")
|
||||
return {"status": "success"}
|
||||
|
||||
@router.get("/{album_id}/generations", response_model=list[GenerationResponse])
|
||||
async def get_album_generations(request: Request, album_id: str, limit: int = 10, offset: int = 0):
|
||||
service: AlbumService = request.app.state.album_service
|
||||
generations = await service.get_generations_by_album(album_id, limit=limit, offset=offset)
|
||||
return [GenerationResponse(**gen.model_dump()) for gen in generations]
|
||||
382
api/endpoints/assets_router.py
Normal file
382
api/endpoints/assets_router.py
Normal file
@@ -0,0 +1,382 @@
|
||||
from typing import Any
|
||||
|
||||
from aiogram.types import BufferedInputFile
|
||||
from bson import ObjectId
|
||||
from fastapi import APIRouter, UploadFile, File, Form, Depends
|
||||
from fastapi.openapi.models import MediaType
|
||||
from motor.motor_asyncio import AsyncIOMotorClient
|
||||
from pymongo import MongoClient
|
||||
from starlette import status
|
||||
from starlette.exceptions import HTTPException
|
||||
from starlette.requests import Request
|
||||
from starlette.responses import Response, JSONResponse, StreamingResponse
|
||||
|
||||
from adapters.s3_adapter import S3Adapter
|
||||
from api.models import AssetsResponse, AssetResponse
|
||||
from models.Asset import Asset, AssetType, AssetContentType
|
||||
from repos.dao import DAO
|
||||
from api.dependency import get_dao, get_mongo_client, get_s3_adapter
|
||||
import asyncio
|
||||
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
from api.endpoints.auth import get_current_user
|
||||
from api.dependency import get_project_id
|
||||
|
||||
router = APIRouter(prefix="/api/assets", tags=["Assets"])
|
||||
|
||||
|
||||
@router.get("/{asset_id}")
|
||||
async def get_asset(
|
||||
asset_id: str,
|
||||
request: Request,
|
||||
thumbnail: bool = False,
|
||||
dao: DAO = Depends(get_dao),
|
||||
s3_adapter: S3Adapter = Depends(get_s3_adapter),
|
||||
) -> Response:
|
||||
logger.debug(f"get_asset called for ID: {asset_id}, thumbnail={thumbnail}")
|
||||
# Загружаем только метаданные (без data/thumbnail bytes)
|
||||
asset = await dao.assets.get_asset(asset_id, with_data=False)
|
||||
if not asset:
|
||||
raise HTTPException(status_code=404, detail="Asset not found")
|
||||
|
||||
base_headers = {
|
||||
"Cache-Control": "public, max-age=31536000, immutable",
|
||||
"Accept-Ranges": "bytes"
|
||||
}
|
||||
|
||||
# Thumbnail: маленький, можно грузить в RAM
|
||||
if thumbnail:
|
||||
if asset.minio_thumbnail_object_name and s3_adapter:
|
||||
thumb_bytes = await s3_adapter.get_file(asset.minio_thumbnail_object_name)
|
||||
if thumb_bytes:
|
||||
return Response(content=thumb_bytes, media_type="image/jpeg", headers=base_headers)
|
||||
# Fallback: thumbnail in DB
|
||||
if asset.thumbnail:
|
||||
return Response(content=asset.thumbnail, media_type="image/jpeg", headers=base_headers)
|
||||
# No thumbnail available — fall through to main content
|
||||
|
||||
# Main content: стримим из S3 без загрузки в RAM
|
||||
if asset.minio_object_name and s3_adapter:
|
||||
content_type = "image/png"
|
||||
if asset.content_type == AssetContentType.VIDEO:
|
||||
content_type = "video/mp4" # Or detect from extension if stored
|
||||
elif asset.content_type == AssetContentType.IMAGE:
|
||||
content_type = "image/png" # Default for images
|
||||
|
||||
# Better content type detection based on extension if possible, but for now this is okay
|
||||
if asset.minio_object_name.endswith(".mp4"):
|
||||
content_type = "video/mp4"
|
||||
elif asset.minio_object_name.endswith(".mov"):
|
||||
content_type = "video/quicktime"
|
||||
elif asset.minio_object_name.endswith(".jpg") or asset.minio_object_name.endswith(".jpeg"):
|
||||
content_type = "image/jpeg"
|
||||
|
||||
# Handle Range requests for video streaming
|
||||
range_header = request.headers.get("range")
|
||||
file_size = await s3_adapter.get_file_size(asset.minio_object_name)
|
||||
|
||||
if range_header and file_size:
|
||||
try:
|
||||
# Parse Range header: bytes=start-end
|
||||
byte_range = range_header.replace("bytes=", "")
|
||||
start_str, end_str = byte_range.split("-")
|
||||
start = int(start_str)
|
||||
end = int(end_str) if end_str else file_size - 1
|
||||
|
||||
# Validate range
|
||||
if start >= file_size:
|
||||
# 416 Range Not Satisfiable
|
||||
return Response(status_code=416, headers={"Content-Range": f"bytes */{file_size}"})
|
||||
|
||||
chunk_size = end - start + 1
|
||||
|
||||
headers = base_headers.copy()
|
||||
headers.update({
|
||||
"Content-Range": f"bytes {start}-{end}/{file_size}",
|
||||
"Content-Length": str(chunk_size),
|
||||
})
|
||||
|
||||
# Pass the exact range string to S3
|
||||
s3_range = f"bytes={start}-{end}"
|
||||
|
||||
return StreamingResponse(
|
||||
s3_adapter.stream_file(asset.minio_object_name, range_header=s3_range),
|
||||
status_code=206,
|
||||
headers=headers,
|
||||
media_type=content_type
|
||||
)
|
||||
except ValueError:
|
||||
pass # Fallback to full content if range parsing fails
|
||||
|
||||
# Full content response
|
||||
headers = base_headers.copy()
|
||||
if file_size:
|
||||
headers["Content-Length"] = str(file_size)
|
||||
|
||||
return StreamingResponse(
|
||||
s3_adapter.stream_file(asset.minio_object_name),
|
||||
media_type=content_type,
|
||||
headers=headers,
|
||||
)
|
||||
|
||||
# Fallback: data stored in DB (legacy)
|
||||
if asset.data:
|
||||
return Response(content=asset.data, media_type="image/png", headers=base_headers)
|
||||
|
||||
raise HTTPException(status_code=404, detail="Asset data not found")
|
||||
|
||||
@router.delete("/orphans", dependencies=[Depends(get_current_user)])
|
||||
async def delete_orphan_assets_from_minio(
|
||||
mongo: AsyncIOMotorClient = Depends(get_mongo_client),
|
||||
minio_client: S3Adapter = Depends(get_s3_adapter),
|
||||
*,
|
||||
assets_collection: str = "assets",
|
||||
generations_collection: str = "generations",
|
||||
asset_type: str | None = "generated",
|
||||
project_id: str | None = None,
|
||||
dry_run: bool = True,
|
||||
mark_assets_deleted: bool = False,
|
||||
batch_size: int = 500,
|
||||
) -> dict[str, Any]:
|
||||
db = mongo['bot_db'] # БД уже выбрана в get_mongo_client
|
||||
assets = db[assets_collection]
|
||||
|
||||
match_assets: dict[str, Any] = {}
|
||||
if asset_type is not None:
|
||||
match_assets["type"] = asset_type
|
||||
if project_id is not None:
|
||||
match_assets["project_id"] = project_id
|
||||
|
||||
pipeline: list[dict[str, Any]] = [
|
||||
{"$match": match_assets} if match_assets else {"$match": {}},
|
||||
{
|
||||
"$lookup": {
|
||||
"from": generations_collection,
|
||||
"let": {"assetIdStr": {"$toString": "$_id"}},
|
||||
"pipeline": [
|
||||
# считаем "живыми" те, где is_deleted != True (т.е. false или поля нет)
|
||||
{"$match": {"is_deleted": {"$ne": True}}},
|
||||
{
|
||||
"$match": {
|
||||
"$expr": {
|
||||
"$in": [
|
||||
"$$assetIdStr",
|
||||
{"$ifNull": ["$result_list", []]},
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
{"$limit": 1},
|
||||
],
|
||||
"as": "alive_generations",
|
||||
}
|
||||
},
|
||||
{
|
||||
"$match": {
|
||||
"$expr": {"$eq": [{"$size": "$alive_generations"}, 0]}
|
||||
}
|
||||
},
|
||||
{
|
||||
"$project": {
|
||||
"_id": 1,
|
||||
"minio_object_name": 1,
|
||||
"minio_thumbnail_object_name": 1,
|
||||
}
|
||||
},
|
||||
]
|
||||
print(pipeline)
|
||||
cursor = assets.aggregate(pipeline, allowDiskUse=True, batchSize=batch_size)
|
||||
|
||||
deleted_objects = 0
|
||||
deleted_assets = 0
|
||||
errors: list[dict[str, Any]] = []
|
||||
orphan_asset_ids: list[ObjectId] = []
|
||||
|
||||
async for asset in cursor:
|
||||
aid = asset["_id"]
|
||||
obj = asset.get("minio_object_name")
|
||||
thumb = asset.get("minio_thumbnail_object_name")
|
||||
|
||||
orphan_asset_ids.append(aid)
|
||||
|
||||
if dry_run:
|
||||
print(f"[DRY RUN] orphan asset={aid} obj={obj} thumb={thumb}")
|
||||
continue
|
||||
|
||||
try:
|
||||
if obj:
|
||||
await minio_client.delete_file(obj)
|
||||
deleted_objects += 1
|
||||
|
||||
if thumb:
|
||||
await minio_client.delete_file(thumb)
|
||||
deleted_objects += 1
|
||||
|
||||
deleted_assets += 1
|
||||
|
||||
except Exception as e:
|
||||
errors.append({"asset_id": str(aid), "error": str(e)})
|
||||
|
||||
if (not dry_run) and mark_assets_deleted and orphan_asset_ids:
|
||||
res = await assets.update_many(
|
||||
{"_id": {"$in": orphan_asset_ids}},
|
||||
{"$set": {"is_deleted": True}},
|
||||
)
|
||||
marked = res.modified_count
|
||||
else:
|
||||
marked = 0
|
||||
|
||||
return {
|
||||
"dry_run": dry_run,
|
||||
"filter": {
|
||||
"asset_type": asset_type,
|
||||
"project_id": project_id,
|
||||
},
|
||||
"orphans_found": len(orphan_asset_ids),
|
||||
"deleted_assets": deleted_assets,
|
||||
"deleted_objects": deleted_objects,
|
||||
"marked_assets_deleted": marked,
|
||||
"errors": errors,
|
||||
}
|
||||
|
||||
@router.delete("/{asset_id}", status_code=status.HTTP_204_NO_CONTENT, dependencies=[Depends(get_current_user)])
|
||||
async def delete_asset(
|
||||
asset_id: str,
|
||||
dao: DAO = Depends(get_dao)
|
||||
):
|
||||
logger.info(f"delete_asset called for ID: {asset_id}")
|
||||
# 1. Проверяем наличие (опционально, delete_one вернет false если нет, но для 404 нужно знать)
|
||||
# Можно просто попробовать удалить
|
||||
deleted = await dao.assets.delete_asset(asset_id)
|
||||
if not deleted:
|
||||
raise HTTPException(status_code=404, detail="Asset not found")
|
||||
|
||||
logger.info(f"Asset {asset_id} deleted successfully")
|
||||
return None
|
||||
|
||||
|
||||
@router.get("", dependencies=[Depends(get_current_user)])
|
||||
async def get_assets(request: Request, dao: DAO = Depends(get_dao), type: str | None = None, limit: int = 10, offset: int = 0, current_user: dict = Depends(get_current_user), project_id: str | None = Depends(get_project_id)) -> AssetsResponse:
|
||||
logger.info(f"get_assets called. Limit: {limit}, Offset: {offset}")
|
||||
|
||||
user_id_filter = current_user["id"]
|
||||
if project_id:
|
||||
project = await dao.projects.get_project(project_id)
|
||||
if not project or str(current_user["id"]) not in project.members:
|
||||
raise HTTPException(status_code=403, detail="Project access denied")
|
||||
user_id_filter = None
|
||||
|
||||
assets = await dao.assets.get_assets(type, limit, offset, created_by=user_id_filter, project_id=project_id)
|
||||
# assets = await dao.assets.get_assets() # This line seemed redundant/conflicting in original code
|
||||
total_count = await dao.assets.get_asset_count(created_by=user_id_filter, project_id=project_id)
|
||||
|
||||
# Manually map to DTO to trigger computed fields validation if necessary,
|
||||
# but primarily to ensure valid Pydantic models for the response list.
|
||||
# Asset.model_dump() generally includes computed fields (url) if configured.
|
||||
# Let's ensure strict conversion.
|
||||
asset_responses = [AssetResponse.model_validate(a.model_dump()) for a in assets]
|
||||
|
||||
return AssetsResponse(assets=asset_responses, total_count=total_count)
|
||||
|
||||
|
||||
|
||||
@router.post("/upload", response_model=AssetResponse, status_code=status.HTTP_201_CREATED)
|
||||
async def upload_asset(
|
||||
file: UploadFile = File(...),
|
||||
linked_char_id: str | None = Form(None),
|
||||
dao: DAO = Depends(get_dao),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
project_id: str | None = Depends(get_project_id)
|
||||
):
|
||||
logger.info(f"upload_asset called. Filename: {file.filename}, ContentType: {file.content_type}, LinkedCharId: {linked_char_id}")
|
||||
if not file.content_type:
|
||||
raise HTTPException(status_code=400, detail="Unknown file type")
|
||||
|
||||
if not file.content_type.startswith("image/"):
|
||||
raise HTTPException(status_code=400, detail=f"Unsupported content type: {file.content_type}")
|
||||
|
||||
if project_id:
|
||||
project = await dao.projects.get_project(project_id)
|
||||
if not project or str(current_user["_id"]) not in project.members:
|
||||
raise HTTPException(status_code=403, detail="Project access denied")
|
||||
|
||||
data = await file.read()
|
||||
if not data:
|
||||
raise HTTPException(status_code=400, detail="Empty file")
|
||||
|
||||
# Generate thumbnail
|
||||
from utils.image_utils import create_thumbnail
|
||||
thumbnail_bytes = await asyncio.to_thread(create_thumbnail, data)
|
||||
|
||||
asset = Asset(
|
||||
name=file.filename or "upload",
|
||||
type=AssetType.UPLOADED,
|
||||
content_type=AssetContentType.IMAGE,
|
||||
linked_char_id=linked_char_id,
|
||||
data=data,
|
||||
thumbnail=thumbnail_bytes,
|
||||
created_by=str(current_user["_id"]),
|
||||
project_id=project_id,
|
||||
)
|
||||
|
||||
asset_id = await dao.assets.create_asset(asset)
|
||||
asset.id = str(asset_id)
|
||||
logger.info(f"Asset created successfully. ID: {asset_id}")
|
||||
|
||||
return AssetResponse(
|
||||
id=asset.id,
|
||||
name=asset.name,
|
||||
type=asset.type.value if hasattr(asset.type, "value") else asset.type,
|
||||
content_type=asset.content_type.value if hasattr(asset.content_type, "value") else asset.content_type,
|
||||
linked_char_id=asset.linked_char_id,
|
||||
created_at=asset.created_at
|
||||
)
|
||||
|
||||
|
||||
@router.post("/regenerate_thumbnails", dependencies=[Depends(get_current_user)])
|
||||
async def regenerate_thumbnails(dao: DAO = Depends(get_dao)):
|
||||
"""
|
||||
Regenerates thumbnails for all existing image assets that don't have one.
|
||||
"""
|
||||
logger.info("Starting thumbnail regeneration task")
|
||||
from utils.image_utils import create_thumbnail
|
||||
import asyncio
|
||||
|
||||
# Get all assets (pagination loop might be needed for huge datasets, but simple list for now)
|
||||
# We'll rely on DAO providing a method or just fetch large chunk.
|
||||
# Assuming get_assets might have limit, let's fetch in chunks or just all if possible within limit.
|
||||
# Ideally should use a specific repo method for iteration.
|
||||
# For now, let's fetch first 1000 or similar.
|
||||
assets = await dao.assets.get_assets(limit=1000, offset=0, with_data=True)
|
||||
logger.info(f"Found {len(assets)} assets")
|
||||
count = 0
|
||||
updated = 0
|
||||
|
||||
for asset in assets:
|
||||
if asset.content_type == AssetContentType.IMAGE and asset.data :
|
||||
try:
|
||||
thumb = await asyncio.to_thread(create_thumbnail, asset.data)
|
||||
if thumb:
|
||||
asset.thumbnail = thumb
|
||||
await dao.assets.update_asset(asset.id, asset)
|
||||
updated += 1
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to regenerate thumbnail for asset {asset.id}: {e}")
|
||||
count += 1
|
||||
|
||||
|
||||
return {"status": "completed", "processed": count, "updated": updated}
|
||||
|
||||
@router.post("/migrate_to_minio", dependencies=[Depends(get_current_user)])
|
||||
async def migrate_to_minio(dao: DAO = Depends(get_dao)):
|
||||
"""
|
||||
Migrates assets from MongoDB to MinIO.
|
||||
"""
|
||||
logger.info("Starting migration to MinIO")
|
||||
result = await dao.assets.migrate_to_minio()
|
||||
logger.info(f"Migration result: {result}")
|
||||
return result
|
||||
|
||||
123
api/endpoints/auth.py
Normal file
123
api/endpoints/auth.py
Normal file
@@ -0,0 +1,123 @@
|
||||
from datetime import timedelta
|
||||
from typing import Annotated
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, status
|
||||
from fastapi.security import OAuth2PasswordBearer, OAuth2PasswordRequestForm
|
||||
from pydantic import BaseModel
|
||||
from jose import JWTError, jwt
|
||||
|
||||
from repos.user_repo import UsersRepo, UserStatus
|
||||
from utils.security import verify_password, create_access_token, ACCESS_TOKEN_EXPIRE_MINUTES, ALGORITHM, SECRET_KEY
|
||||
from starlette.requests import Request
|
||||
|
||||
router = APIRouter(prefix="/api/auth", tags=["auth"])
|
||||
|
||||
oauth2_scheme = OAuth2PasswordBearer(tokenUrl="/api/auth/token")
|
||||
|
||||
async def get_users_repo(request: Request) -> UsersRepo:
|
||||
if not hasattr(request.app.state, "users_repo"):
|
||||
raise HTTPException(status_code=500, detail="Users repo not initialized")
|
||||
return request.app.state.users_repo
|
||||
|
||||
async def get_current_user(token: Annotated[str, Depends(oauth2_scheme)], repo: Annotated[UsersRepo, Depends(get_users_repo)]):
|
||||
credentials_exception = HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Could not validate credentials",
|
||||
headers={"WWW-Authenticate": "Bearer"},
|
||||
)
|
||||
try:
|
||||
payload = jwt.decode(token, SECRET_KEY, algorithms=[ALGORITHM])
|
||||
username: str = payload.get("sub")
|
||||
if username is None:
|
||||
raise credentials_exception
|
||||
except JWTError:
|
||||
raise credentials_exception
|
||||
|
||||
user = await repo.get_user_by_username(username)
|
||||
if user is None:
|
||||
raise credentials_exception
|
||||
return user
|
||||
|
||||
async def get_current_admin(user: Annotated[dict, Depends(get_current_user)]):
|
||||
if not user.get("is_admin"):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail="Not enough permissions",
|
||||
)
|
||||
return user
|
||||
|
||||
|
||||
class UserRegister(BaseModel):
|
||||
username: str
|
||||
password: str
|
||||
full_name: str | None = None
|
||||
|
||||
|
||||
class Token(BaseModel):
|
||||
access_token: str
|
||||
token_type: str
|
||||
|
||||
|
||||
class UserResponse(BaseModel):
|
||||
id: str
|
||||
username: str
|
||||
full_name: str | None = None
|
||||
status: str
|
||||
is_admin: bool = False
|
||||
|
||||
|
||||
@router.get("/me", response_model=UserResponse)
|
||||
async def read_users_me(current_user: Annotated[dict, Depends(get_current_user)]):
|
||||
return current_user
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
@router.post("/register")
|
||||
async def register(user_data: UserRegister, repo: Annotated[UsersRepo, Depends(get_users_repo)]):
|
||||
try:
|
||||
await repo.create_user(
|
||||
username=user_data.username,
|
||||
password=user_data.password,
|
||||
full_name=user_data.full_name
|
||||
)
|
||||
except ValueError as e:
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
|
||||
return {"message": "Registration successful. Please wait for administrator approval."}
|
||||
|
||||
|
||||
@router.post("/token", response_model=Token)
|
||||
async def login_for_access_token(
|
||||
form_data: Annotated[OAuth2PasswordRequestForm, Depends()],
|
||||
repo: Annotated[UsersRepo, Depends(get_users_repo)]
|
||||
):
|
||||
user = await repo.get_user_by_username(form_data.username)
|
||||
if not user:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Incorrect username or password",
|
||||
headers={"WWW-Authenticate": "Bearer"},
|
||||
)
|
||||
|
||||
# Проверяем пароль
|
||||
if not verify_password(form_data.password, user["hashed_password"]):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Incorrect username or password",
|
||||
headers={"WWW-Authenticate": "Bearer"},
|
||||
)
|
||||
|
||||
# Проверка статуса
|
||||
if user.get("status") != UserStatus.ALLOWED:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail="Account is not approved yet. Please contact administrator.",
|
||||
)
|
||||
|
||||
access_token_expires = timedelta(minutes=ACCESS_TOKEN_EXPIRE_MINUTES)
|
||||
access_token = create_access_token(
|
||||
data={"sub": user["username"]}, expires_delta=access_token_expires
|
||||
)
|
||||
return {"access_token": access_token, "token_type": "bearer"}
|
||||
192
api/endpoints/character_router.py
Normal file
192
api/endpoints/character_router.py
Normal file
@@ -0,0 +1,192 @@
|
||||
from typing import Any, Coroutine
|
||||
|
||||
from fastapi import APIRouter, Depends
|
||||
from pydantic import BaseModel
|
||||
from starlette.exceptions import HTTPException
|
||||
from starlette.requests import Request
|
||||
|
||||
from api.models import AssetsResponse, AssetResponse
|
||||
from api.models import GenerationRequest, GenerationResponse
|
||||
from models.Asset import Asset
|
||||
from models.Character import Character
|
||||
from api.models import CharacterCreateRequest, CharacterUpdateRequest
|
||||
from repos.dao import DAO
|
||||
from api.dependency import get_dao
|
||||
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
from api.endpoints.auth import get_current_user
|
||||
from api.dependency import get_project_id
|
||||
|
||||
router = APIRouter(prefix="/api/characters", tags=["Characters"], dependencies=[Depends(get_current_user)])
|
||||
|
||||
|
||||
@router.get("/", response_model=list[Character])
|
||||
async def get_characters(
|
||||
request: Request,
|
||||
dao: DAO = Depends(get_dao),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
project_id: str | None = Depends(get_project_id),
|
||||
limit: int = 100,
|
||||
offset: int = 0
|
||||
) -> list[Character]:
|
||||
logger.info(f"get_characters called. Limit: {limit}, Offset: {offset}")
|
||||
|
||||
user_id_filter = str(current_user["_id"])
|
||||
if project_id:
|
||||
project = await dao.projects.get_project(project_id)
|
||||
if not project or str(current_user["_id"]) not in project.members:
|
||||
raise HTTPException(status_code=403, detail="Project access denied")
|
||||
user_id_filter = None
|
||||
|
||||
characters = await dao.chars.get_all_characters(
|
||||
created_by=user_id_filter,
|
||||
project_id=project_id,
|
||||
limit=limit,
|
||||
offset=offset
|
||||
)
|
||||
return characters
|
||||
|
||||
|
||||
@router.get("/{character_id}/assets", response_model=AssetsResponse)
|
||||
async def get_character_assets(character_id: str, dao: DAO = Depends(get_dao), limit: int = 10,
|
||||
offset: int = 0, current_user: dict = Depends(get_current_user)) -> AssetsResponse:
|
||||
logger.info(f"get_character_assets called. CharacterID: {character_id}, Limit: {limit}, Offset: {offset}")
|
||||
character = await dao.chars.get_character(character_id)
|
||||
if character is None:
|
||||
raise HTTPException(status_code=404, detail="Character not found")
|
||||
|
||||
# Access Check
|
||||
is_creator = character.created_by == str(current_user["_id"])
|
||||
is_project_member = False
|
||||
if character.project_id and character.project_id in current_user.get("project_ids", []):
|
||||
is_project_member = True
|
||||
|
||||
if not is_creator and not is_project_member:
|
||||
raise HTTPException(status_code=403, detail="Access denied")
|
||||
|
||||
assets = await dao.assets.get_assets_by_char_id(character_id, limit, offset)
|
||||
# Filter assets by user ownership as well?
|
||||
# Usually if you own character, you see its assets.
|
||||
# But assets also have specific created_by.
|
||||
# Let's assume if you own character you can see its assets.
|
||||
|
||||
total_count = await dao.assets.get_asset_count(character_id)
|
||||
|
||||
asset_responses = [AssetResponse.model_validate(a.model_dump()) for a in assets]
|
||||
return AssetsResponse(assets=asset_responses, total_count=total_count)
|
||||
|
||||
|
||||
@router.get("/{character_id}", response_model=Character)
|
||||
async def get_character_by_id(character_id: str, request: Request, dao: DAO = Depends(get_dao), current_user: dict = Depends(get_current_user)) -> Character:
|
||||
logger.debug(f"get_character_by_id called. ID: {character_id}")
|
||||
character = await dao.chars.get_character(character_id)
|
||||
|
||||
if not character:
|
||||
raise HTTPException(status_code=404, detail="Character not found")
|
||||
|
||||
if character:
|
||||
is_creator = character.created_by == str(current_user["_id"])
|
||||
is_project_member = False
|
||||
if character.project_id and character.project_id in current_user.get("project_ids", []):
|
||||
is_project_member = True
|
||||
|
||||
if not is_creator and not is_project_member:
|
||||
raise HTTPException(status_code=403, detail="Access denied")
|
||||
|
||||
return character
|
||||
|
||||
|
||||
@router.post("/", response_model=Character)
|
||||
async def create_character(
|
||||
char_req: CharacterCreateRequest,
|
||||
project_id: str | None = Depends(get_project_id),
|
||||
dao: DAO = Depends(get_dao),
|
||||
current_user: dict = Depends(get_current_user)
|
||||
) -> Character:
|
||||
logger.info("create_character called")
|
||||
char_req.project_id = project_id
|
||||
char_data = char_req.model_dump()
|
||||
char_data["created_by"] = str(current_user["_id"])
|
||||
if "id" not in char_data:
|
||||
char_data["id"] = None
|
||||
|
||||
if project_id:
|
||||
project = await dao.projects.get_project(project_id)
|
||||
if not project or str(current_user["_id"]) not in project.members:
|
||||
raise HTTPException(status_code=403, detail="Project access denied")
|
||||
|
||||
new_char = Character(**char_data)
|
||||
new_char.avatar_asset_id = new_char.avatar_image.split("/")[-1]
|
||||
created_char = await dao.chars.add_character(new_char)
|
||||
return created_char
|
||||
|
||||
|
||||
@router.put("/{character_id}", response_model=Character)
|
||||
async def update_character(
|
||||
character_id: str,
|
||||
char_update: CharacterUpdateRequest,
|
||||
dao: DAO = Depends(get_dao),
|
||||
current_user: dict = Depends(get_current_user)
|
||||
) -> Character:
|
||||
logger.info(f"update_character called. ID: {character_id}")
|
||||
|
||||
existing_char = await dao.chars.get_character(character_id)
|
||||
if not existing_char:
|
||||
raise HTTPException(status_code=404, detail="Character not found")
|
||||
|
||||
is_creator = existing_char.created_by == str(current_user["_id"])
|
||||
is_project_member = False
|
||||
if existing_char.project_id and existing_char.project_id in current_user.get("project_ids", []):
|
||||
is_project_member = True
|
||||
|
||||
if not is_creator and not is_project_member:
|
||||
raise HTTPException(status_code=403, detail="Access denied")
|
||||
|
||||
update_data = char_update.model_dump(exclude_unset=True)
|
||||
|
||||
if "project_id" in update_data and update_data["project_id"]:
|
||||
new_project_id = update_data["project_id"]
|
||||
project = await dao.projects.get_project(new_project_id)
|
||||
if not project or str(current_user["_id"]) not in project.members:
|
||||
raise HTTPException(status_code=403, detail="Target project access denied")
|
||||
|
||||
updated_char_data = existing_char.model_dump()
|
||||
updated_char_data.update(update_data)
|
||||
|
||||
updated_char = Character(**updated_char_data)
|
||||
|
||||
success = await dao.chars.update_char(character_id, updated_char)
|
||||
if not success:
|
||||
raise HTTPException(status_code=500, detail="Failed to update character")
|
||||
|
||||
return updated_char
|
||||
|
||||
|
||||
@router.delete("/{character_id}", status_code=204)
|
||||
async def delete_character(
|
||||
character_id: str,
|
||||
dao: DAO = Depends(get_dao),
|
||||
current_user: dict = Depends(get_current_user)
|
||||
):
|
||||
logger.info(f"delete_character called. ID: {character_id}")
|
||||
|
||||
existing_char = await dao.chars.get_character(character_id)
|
||||
if not existing_char:
|
||||
raise HTTPException(status_code=404, detail="Character not found")
|
||||
|
||||
is_creator = existing_char.created_by == str(current_user["_id"])
|
||||
is_project_member = False
|
||||
if existing_char.project_id and existing_char.project_id in current_user.get("project_ids", []):
|
||||
is_project_member = True
|
||||
|
||||
if not is_creator and not is_project_member:
|
||||
raise HTTPException(status_code=403, detail="Access denied")
|
||||
|
||||
success = await dao.chars.delete_character(character_id)
|
||||
if not success:
|
||||
raise HTTPException(status_code=500, detail="Failed to delete character")
|
||||
|
||||
return
|
||||
191
api/endpoints/environment_router.py
Normal file
191
api/endpoints/environment_router.py
Normal file
@@ -0,0 +1,191 @@
|
||||
import logging
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
from starlette import status
|
||||
|
||||
from api.dependency import get_dao
|
||||
from api.endpoints.auth import get_current_user
|
||||
from api.models.EnvironmentRequest import EnvironmentCreate, EnvironmentUpdate, AssetToEnvironment, AssetsToEnvironment
|
||||
from models.Environment import Environment
|
||||
from repos.dao import DAO
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
router = APIRouter(prefix="/api/environments", tags=["Environments"], dependencies=[Depends(get_current_user)])
|
||||
|
||||
|
||||
async def check_character_access(character_id: str, current_user: dict, dao: DAO):
|
||||
character = await dao.chars.get_character(character_id)
|
||||
if not character:
|
||||
raise HTTPException(status_code=404, detail="Character not found")
|
||||
|
||||
is_creator = character.created_by == str(current_user["_id"])
|
||||
is_project_member = False
|
||||
if character.project_id and character.project_id in current_user.get("project_ids", []):
|
||||
is_project_member = True
|
||||
|
||||
if not is_creator and not is_project_member:
|
||||
raise HTTPException(status_code=403, detail="Access denied to character")
|
||||
return character
|
||||
|
||||
|
||||
@router.post("/", response_model=Environment)
|
||||
async def create_environment(
|
||||
env_req: EnvironmentCreate,
|
||||
dao: DAO = Depends(get_dao),
|
||||
current_user: dict = Depends(get_current_user)
|
||||
):
|
||||
logger.info(f"Creating environment '{env_req.name}' for character {env_req.character_id}")
|
||||
await check_character_access(env_req.character_id, current_user, dao)
|
||||
|
||||
# Verify assets exist if provided
|
||||
if env_req.asset_ids:
|
||||
for aid in env_req.asset_ids:
|
||||
asset = await dao.assets.get_asset(aid)
|
||||
if not asset:
|
||||
raise HTTPException(status_code=400, detail=f"Asset {aid} not found")
|
||||
|
||||
new_env = Environment(**env_req.model_dump())
|
||||
created_env = await dao.environments.create_env(new_env)
|
||||
return created_env
|
||||
|
||||
|
||||
@router.get("/character/{character_id}", response_model=list[Environment])
|
||||
async def get_character_environments(
|
||||
character_id: str,
|
||||
dao: DAO = Depends(get_dao),
|
||||
current_user: dict = Depends(get_current_user)
|
||||
):
|
||||
logger.info(f"Getting environments for character {character_id}")
|
||||
await check_character_access(character_id, current_user, dao)
|
||||
return await dao.environments.get_character_envs(character_id)
|
||||
|
||||
|
||||
@router.get("/{env_id}", response_model=Environment)
|
||||
async def get_environment(
|
||||
env_id: str,
|
||||
dao: DAO = Depends(get_dao),
|
||||
current_user: dict = Depends(get_current_user)
|
||||
):
|
||||
env = await dao.environments.get_env(env_id)
|
||||
if not env:
|
||||
raise HTTPException(status_code=404, detail="Environment not found")
|
||||
|
||||
await check_character_access(env.character_id, current_user, dao)
|
||||
return env
|
||||
|
||||
|
||||
@router.put("/{env_id}", response_model=Environment)
|
||||
async def update_environment(
|
||||
env_id: str,
|
||||
env_update: EnvironmentUpdate,
|
||||
dao: DAO = Depends(get_dao),
|
||||
current_user: dict = Depends(get_current_user)
|
||||
):
|
||||
env = await dao.environments.get_env(env_id)
|
||||
if not env:
|
||||
raise HTTPException(status_code=404, detail="Environment not found")
|
||||
|
||||
await check_character_access(env.character_id, current_user, dao)
|
||||
|
||||
update_data = env_update.model_dump(exclude_unset=True)
|
||||
if not update_data:
|
||||
return env
|
||||
|
||||
# Verify assets exist if provided
|
||||
if "asset_ids" in update_data:
|
||||
if update_data["asset_ids"] is None:
|
||||
del update_data["asset_ids"]
|
||||
elif update_data["asset_ids"]:
|
||||
# Verify all assets exist using batch check
|
||||
assets = await dao.assets.get_assets_by_ids(update_data["asset_ids"])
|
||||
if len(assets) != len(update_data["asset_ids"]):
|
||||
found_ids = {a.id for a in assets}
|
||||
missing_ids = [aid for aid in update_data["asset_ids"] if aid not in found_ids]
|
||||
raise HTTPException(status_code=400, detail=f"Some assets not found: {missing_ids}")
|
||||
|
||||
success = await dao.environments.update_env(env_id, update_data)
|
||||
if not success:
|
||||
raise HTTPException(status_code=500, detail="Failed to update environment")
|
||||
|
||||
return await dao.environments.get_env(env_id)
|
||||
|
||||
|
||||
@router.delete("/{env_id}", status_code=status.HTTP_204_NO_CONTENT)
|
||||
async def delete_environment(
|
||||
env_id: str,
|
||||
dao: DAO = Depends(get_dao),
|
||||
current_user: dict = Depends(get_current_user)
|
||||
):
|
||||
env = await dao.environments.get_env(env_id)
|
||||
if not env:
|
||||
raise HTTPException(status_code=404, detail="Environment not found")
|
||||
|
||||
await check_character_access(env.character_id, current_user, dao)
|
||||
|
||||
success = await dao.environments.delete_env(env_id)
|
||||
if not success:
|
||||
raise HTTPException(status_code=500, detail="Failed to delete environment")
|
||||
return None
|
||||
|
||||
|
||||
@router.post("/{env_id}/assets", status_code=status.HTTP_200_OK)
|
||||
async def add_asset_to_environment(
|
||||
env_id: str,
|
||||
req: AssetToEnvironment,
|
||||
dao: DAO = Depends(get_dao),
|
||||
current_user: dict = Depends(get_current_user)
|
||||
):
|
||||
env = await dao.environments.get_env(env_id)
|
||||
if not env:
|
||||
raise HTTPException(status_code=404, detail="Environment not found")
|
||||
|
||||
await check_character_access(env.character_id, current_user, dao)
|
||||
|
||||
# Verify asset exists
|
||||
asset = await dao.assets.get_asset(req.asset_id)
|
||||
if not asset:
|
||||
raise HTTPException(status_code=404, detail="Asset not found")
|
||||
|
||||
success = await dao.environments.add_asset(env_id, req.asset_id)
|
||||
return {"success": success}
|
||||
|
||||
|
||||
@router.post("/{env_id}/assets/batch", status_code=status.HTTP_200_OK)
|
||||
async def add_assets_batch_to_environment(
|
||||
env_id: str,
|
||||
req: AssetsToEnvironment,
|
||||
dao: DAO = Depends(get_dao),
|
||||
current_user: dict = Depends(get_current_user)
|
||||
):
|
||||
env = await dao.environments.get_env(env_id)
|
||||
if not env:
|
||||
raise HTTPException(status_code=404, detail="Environment not found")
|
||||
|
||||
await check_character_access(env.character_id, current_user, dao)
|
||||
|
||||
# Verify all assets exist
|
||||
assets = await dao.assets.get_assets_by_ids(req.asset_ids)
|
||||
if len(assets) != len(req.asset_ids):
|
||||
found_ids = {a.id for a in assets}
|
||||
missing_ids = [aid for aid in req.asset_ids if aid not in found_ids]
|
||||
raise HTTPException(status_code=404, detail=f"Some assets not found: {missing_ids}")
|
||||
|
||||
success = await dao.environments.add_assets(env_id, req.asset_ids)
|
||||
return {"success": success}
|
||||
|
||||
|
||||
@router.delete("/{env_id}/assets/{asset_id}", status_code=status.HTTP_200_OK)
|
||||
async def remove_asset_from_environment(
|
||||
env_id: str,
|
||||
asset_id: str,
|
||||
dao: DAO = Depends(get_dao),
|
||||
current_user: dict = Depends(get_current_user)
|
||||
):
|
||||
env = await dao.environments.get_env(env_id)
|
||||
if not env:
|
||||
raise HTTPException(status_code=404, detail="Environment not found")
|
||||
|
||||
await check_character_access(env.character_id, current_user, dao)
|
||||
|
||||
success = await dao.environments.remove_asset(env_id, asset_id)
|
||||
return {"success": success}
|
||||
258
api/endpoints/generation_router.py
Normal file
258
api/endpoints/generation_router.py
Normal file
@@ -0,0 +1,258 @@
|
||||
import logging
|
||||
import json
|
||||
|
||||
from fastapi import APIRouter, UploadFile, File, Form, Header, HTTPException
|
||||
from fastapi.params import Depends
|
||||
from starlette import status
|
||||
from starlette.requests import Request
|
||||
|
||||
from config import settings
|
||||
from api.dependency import get_generation_service, get_project_id, get_dao
|
||||
from api.endpoints.auth import get_current_user
|
||||
from api.models import (
|
||||
GenerationResponse,
|
||||
GenerationRequest,
|
||||
GenerationsResponse,
|
||||
PromptResponse,
|
||||
PromptRequest,
|
||||
GenerationGroupResponse,
|
||||
FinancialReport,
|
||||
ExternalGenerationRequest,
|
||||
NsfwRequest
|
||||
)
|
||||
from api.service.generation_service import GenerationService
|
||||
from repos.dao import DAO
|
||||
from utils.external_auth import verify_signature
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
router = APIRouter(prefix='/api/generations', tags=["Generation"])
|
||||
|
||||
|
||||
async def check_project_access(project_id: str | None, current_user: dict, dao: DAO):
|
||||
"""Helper to check if user has access to project."""
|
||||
if not project_id:
|
||||
return
|
||||
project = await dao.projects.get_project(project_id)
|
||||
if not project or str(current_user["_id"]) not in project.members:
|
||||
raise HTTPException(status_code=403, detail="Project access denied")
|
||||
|
||||
|
||||
@router.post("/prompt-assistant", response_model=PromptResponse)
|
||||
async def ask_prompt_assistant(
|
||||
prompt_request: PromptRequest,
|
||||
generation_service: GenerationService = Depends(get_generation_service),
|
||||
current_user: dict = Depends(get_current_user)
|
||||
) -> PromptResponse:
|
||||
logger.info(f"ask_prompt_assistant: {len(prompt_request.prompt)} chars")
|
||||
generated_prompt = await generation_service.ask_prompt_assistant(
|
||||
prompt_request.prompt,
|
||||
prompt_request.linked_assets,
|
||||
prompt_request.model
|
||||
)
|
||||
return PromptResponse(prompt=generated_prompt)
|
||||
|
||||
|
||||
@router.post("/prompt-from-image", response_model=PromptResponse)
|
||||
async def prompt_from_image(
|
||||
prompt: str | None = Form(None),
|
||||
model: str = Form("gemini-3.1-pro-preview"),
|
||||
images: list[UploadFile] = File(...),
|
||||
generation_service: GenerationService = Depends(get_generation_service),
|
||||
current_user: dict = Depends(get_current_user)
|
||||
) -> PromptResponse:
|
||||
images_bytes = [await img.read() for img in images]
|
||||
generated_prompt = await generation_service.generate_prompt_from_images(images_bytes, prompt, model)
|
||||
return PromptResponse(prompt=generated_prompt)
|
||||
|
||||
|
||||
@router.get("", response_model=GenerationsResponse)
|
||||
async def get_generations(
|
||||
character_id: str | None = None,
|
||||
limit: int = 10,
|
||||
offset: int = 0,
|
||||
only_liked: bool = False,
|
||||
generation_service: GenerationService = Depends(get_generation_service),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
project_id: str | None = Depends(get_project_id),
|
||||
dao: DAO = Depends(get_dao)
|
||||
):
|
||||
await check_project_access(project_id, current_user, dao)
|
||||
|
||||
# If project_id is set, we don't filter by user to show all project-wide generations
|
||||
created_by_filter = None if project_id else str(current_user["_id"])
|
||||
only_liked_by = str(current_user["_id"]) if only_liked else None
|
||||
|
||||
return await generation_service.get_generations(
|
||||
character_id=character_id,
|
||||
limit=limit,
|
||||
offset=offset,
|
||||
created_by=created_by_filter,
|
||||
project_id=project_id,
|
||||
only_liked_by=only_liked_by,
|
||||
current_user_id=str(current_user["_id"])
|
||||
)
|
||||
|
||||
|
||||
@router.get("/usage", response_model=FinancialReport)
|
||||
async def get_usage_report(
|
||||
breakdown: str | None = None, # "user" or "project"
|
||||
generation_service: GenerationService = Depends(get_generation_service),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
project_id: str | None = Depends(get_project_id),
|
||||
dao: DAO = Depends(get_dao)
|
||||
) -> FinancialReport:
|
||||
await check_project_access(project_id, current_user, dao)
|
||||
|
||||
user_id_filter = str(current_user["_id"]) if not project_id else None
|
||||
breakdown_by = None
|
||||
|
||||
if breakdown == "user":
|
||||
breakdown_by = "created_by"
|
||||
elif breakdown == "project":
|
||||
breakdown_by = "project_id"
|
||||
|
||||
return await generation_service.get_financial_report(
|
||||
user_id=user_id_filter,
|
||||
project_id=project_id,
|
||||
breakdown_by=breakdown_by
|
||||
)
|
||||
|
||||
|
||||
@router.post("/_run", response_model=GenerationGroupResponse)
|
||||
async def post_generation(
|
||||
generation: GenerationRequest,
|
||||
generation_service: GenerationService = Depends(get_generation_service),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
project_id: str | None = Depends(get_project_id),
|
||||
dao: DAO = Depends(get_dao)
|
||||
) -> GenerationGroupResponse:
|
||||
await check_project_access(project_id, current_user, dao)
|
||||
if project_id:
|
||||
generation.project_id = project_id
|
||||
|
||||
return await generation_service.create_generation_task(
|
||||
generation,
|
||||
user_id=str(current_user.get("_id"))
|
||||
)
|
||||
|
||||
|
||||
@router.get("/running")
|
||||
async def get_running_generations(
|
||||
generation_service: GenerationService = Depends(get_generation_service),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
project_id: str | None = Depends(get_project_id),
|
||||
dao: DAO = Depends(get_dao)
|
||||
):
|
||||
await check_project_access(project_id, current_user, dao)
|
||||
user_id_filter = None if project_id else str(current_user["_id"])
|
||||
|
||||
return await generation_service.get_running_generations(
|
||||
user_id=user_id_filter,
|
||||
project_id=project_id
|
||||
)
|
||||
|
||||
|
||||
@router.get("/group/{group_id}", response_model=GenerationGroupResponse)
|
||||
async def get_generation_group(
|
||||
group_id: str,
|
||||
generation_service: GenerationService = Depends(get_generation_service),
|
||||
current_user: dict = Depends(get_current_user)
|
||||
):
|
||||
return await generation_service.get_generations_by_group(group_id, current_user_id=str(current_user["_id"]))
|
||||
|
||||
|
||||
@router.get("/{generation_id}", response_model=GenerationResponse)
|
||||
async def get_generation(
|
||||
generation_id: str,
|
||||
generation_service: GenerationService = Depends(get_generation_service),
|
||||
current_user: dict = Depends(get_current_user)
|
||||
) -> GenerationResponse:
|
||||
gen = await generation_service.get_generation(generation_id, current_user_id=str(current_user["_id"]))
|
||||
if not gen:
|
||||
raise HTTPException(status_code=404, detail="Generation not found")
|
||||
|
||||
if gen.created_by != str(current_user["_id"]):
|
||||
# Check project membership
|
||||
is_member = False
|
||||
if gen.project_id:
|
||||
project = await generation_service.dao.projects.get_project(gen.project_id)
|
||||
if project and str(current_user["_id"]) in project.members:
|
||||
is_member = True
|
||||
|
||||
if not is_member:
|
||||
raise HTTPException(status_code=403, detail="Access denied")
|
||||
return gen
|
||||
|
||||
|
||||
@router.post("/{generation_id}/like", response_model=dict)
|
||||
async def toggle_like(
|
||||
generation_id: str,
|
||||
generation_service: GenerationService = Depends(get_generation_service),
|
||||
current_user: dict = Depends(get_current_user)
|
||||
):
|
||||
is_liked = await generation_service.toggle_like(generation_id, str(current_user["_id"]))
|
||||
if is_liked is None:
|
||||
raise HTTPException(status_code=404, detail="Generation not found")
|
||||
return {"is_liked": is_liked}
|
||||
|
||||
|
||||
@router.post("/{generation_id}/nsfw", status_code=status.HTTP_204_NO_CONTENT)
|
||||
async def mark_generation_nsfw(
|
||||
generation_id: str,
|
||||
request: NsfwRequest,
|
||||
generation_service: GenerationService = Depends(get_generation_service),
|
||||
current_user: dict = Depends(get_current_user)
|
||||
):
|
||||
gen = await generation_service.get_generation(generation_id, current_user_id=str(current_user["_id"]))
|
||||
if not gen:
|
||||
raise HTTPException(status_code=404, detail="Generation not found")
|
||||
|
||||
if gen.created_by != str(current_user["_id"]):
|
||||
is_member = False
|
||||
if gen.project_id:
|
||||
project = await generation_service.dao.projects.get_project(gen.project_id)
|
||||
if project and str(current_user["_id"]) in project.members:
|
||||
is_member = True
|
||||
|
||||
if not is_member:
|
||||
raise HTTPException(status_code=403, detail="Access denied")
|
||||
|
||||
await generation_service.dao.generations.mark_nsfw(generation_id, request.is_nsfw)
|
||||
return None
|
||||
|
||||
|
||||
@router.post("/import", response_model=GenerationResponse)
|
||||
async def import_external_generation(
|
||||
request: Request,
|
||||
generation_service: GenerationService = Depends(get_generation_service),
|
||||
x_signature: str = Header(..., alias="X-Signature")
|
||||
) -> GenerationResponse:
|
||||
body = await request.body()
|
||||
|
||||
secret = settings.EXTERNAL_API_SECRET
|
||||
if not secret:
|
||||
raise HTTPException(status_code=500, detail="Server configuration error")
|
||||
|
||||
if not verify_signature(body, x_signature, secret):
|
||||
raise HTTPException(status_code=401, detail="Invalid signature")
|
||||
|
||||
try:
|
||||
data = json.loads(body.decode('utf-8'))
|
||||
external_gen = ExternalGenerationRequest(**data)
|
||||
generation = await generation_service.import_external_generation(external_gen)
|
||||
return GenerationResponse(**generation.model_dump())
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to import external generation: {e}")
|
||||
raise HTTPException(status_code=500, detail=f"Import failed: {str(e)}")
|
||||
|
||||
|
||||
@router.delete("/{generation_id}", status_code=status.HTTP_204_NO_CONTENT)
|
||||
async def delete_generation(
|
||||
generation_id: str,
|
||||
generation_service: GenerationService = Depends(get_generation_service),
|
||||
current_user: dict = Depends(get_current_user)
|
||||
):
|
||||
if not await generation_service.delete_generation(generation_id):
|
||||
raise HTTPException(status_code=404, detail="Generation not found")
|
||||
return None
|
||||
106
api/endpoints/idea_router.py
Normal file
106
api/endpoints/idea_router.py
Normal file
@@ -0,0 +1,106 @@
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query, Body
|
||||
from api.dependency import get_idea_service, get_project_id, get_generation_service
|
||||
from api.endpoints.auth import get_current_user
|
||||
from api.service.idea_service import IdeaService
|
||||
from api.service.generation_service import GenerationService
|
||||
from models.Idea import Idea
|
||||
from api.models import GenerationResponse, GenerationsResponse
|
||||
from api.models import IdeaRequest, PostRequest # Adjusting for general model usage
|
||||
from api.models.IdeaRequest import IdeaCreateRequest, IdeaUpdateRequest, IdeaResponse
|
||||
|
||||
router = APIRouter(prefix="/api/ideas", tags=["ideas"])
|
||||
|
||||
@router.post("", response_model=Idea)
|
||||
async def create_idea(
|
||||
request: IdeaCreateRequest,
|
||||
project_id: str | None = Depends(get_project_id),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
idea_service: IdeaService = Depends(get_idea_service)
|
||||
):
|
||||
pid = project_id or request.project_id
|
||||
|
||||
return await idea_service.create_idea(
|
||||
name=request.name,
|
||||
description=request.description,
|
||||
project_id=pid,
|
||||
user_id=str(current_user["_id"]),
|
||||
inspiration_id=request.inspiration_id
|
||||
)
|
||||
|
||||
@router.get("", response_model=list[IdeaResponse])
|
||||
async def get_ideas(
|
||||
project_id: str | None = Depends(get_project_id),
|
||||
limit: int = 20,
|
||||
offset: int = 0,
|
||||
current_user: dict = Depends(get_current_user),
|
||||
idea_service: IdeaService = Depends(get_idea_service)
|
||||
):
|
||||
return await idea_service.get_ideas(project_id, str(current_user["_id"]), limit, offset)
|
||||
|
||||
@router.get("/{idea_id}", response_model=Idea)
|
||||
async def get_idea(
|
||||
idea_id: str,
|
||||
idea_service: IdeaService = Depends(get_idea_service)
|
||||
):
|
||||
idea = await idea_service.get_idea(idea_id)
|
||||
if not idea:
|
||||
raise HTTPException(status_code=404, detail="Idea not found")
|
||||
return idea
|
||||
|
||||
@router.put("/{idea_id}", response_model=Idea)
|
||||
async def update_idea(
|
||||
idea_id: str,
|
||||
request: IdeaUpdateRequest,
|
||||
idea_service: IdeaService = Depends(get_idea_service)
|
||||
):
|
||||
idea = await idea_service.update_idea(
|
||||
idea_id=idea_id,
|
||||
name=request.name,
|
||||
description=request.description,
|
||||
inspiration_id=request.inspiration_id
|
||||
)
|
||||
if not idea:
|
||||
raise HTTPException(status_code=404, detail="Idea not found")
|
||||
return idea
|
||||
|
||||
@router.delete("/{idea_id}")
|
||||
async def delete_idea(
|
||||
idea_id: str,
|
||||
idea_service: IdeaService = Depends(get_idea_service)
|
||||
):
|
||||
success = await idea_service.delete_idea(idea_id)
|
||||
if not success:
|
||||
raise HTTPException(status_code=404, detail="Idea not found or could not be deleted")
|
||||
return {"status": "success"}
|
||||
|
||||
@router.get("/{idea_id}/generations", response_model=GenerationsResponse)
|
||||
async def get_idea_generations(
|
||||
idea_id: str,
|
||||
limit: int = 50,
|
||||
offset: int = 0,
|
||||
generation_service: GenerationService = Depends(get_generation_service),
|
||||
current_user: dict = Depends(get_current_user)
|
||||
):
|
||||
return await generation_service.get_generations(idea_id=idea_id, limit=limit, offset=offset, current_user_id=str(current_user["_id"]))
|
||||
|
||||
@router.post("/{idea_id}/generations/{generation_id}")
|
||||
async def add_generation_to_idea(
|
||||
idea_id: str,
|
||||
generation_id: str,
|
||||
idea_service: IdeaService = Depends(get_idea_service)
|
||||
):
|
||||
success = await idea_service.add_generation_to_idea(idea_id, generation_id)
|
||||
if not success:
|
||||
raise HTTPException(status_code=404, detail="Idea or Generation not found")
|
||||
return {"status": "success"}
|
||||
|
||||
@router.delete("/{idea_id}/generations/{generation_id}")
|
||||
async def remove_generation_from_idea(
|
||||
idea_id: str,
|
||||
generation_id: str,
|
||||
idea_service: IdeaService = Depends(get_idea_service)
|
||||
):
|
||||
success = await idea_service.remove_generation_from_idea(idea_id, generation_id)
|
||||
if not success:
|
||||
raise HTTPException(status_code=404, detail="Idea or Generation not found")
|
||||
return {"status": "success"}
|
||||
94
api/endpoints/inspiration_router.py
Normal file
94
api/endpoints/inspiration_router.py
Normal file
@@ -0,0 +1,94 @@
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, status
|
||||
from api.dependency import get_inspiration_service, get_project_id
|
||||
from api.endpoints.auth import get_current_user
|
||||
from api.models.InspirationRequest import InspirationCreateRequest, InspirationResponse, InspirationListResponse
|
||||
from api.service.inspiration_service import InspirationService
|
||||
from models.Inspiration import Inspiration
|
||||
|
||||
router = APIRouter(prefix="/api/inspirations", tags=["Inspirations"])
|
||||
|
||||
|
||||
@router.post("", response_model=InspirationResponse, status_code=status.HTTP_201_CREATED)
|
||||
async def create_inspiration(
|
||||
request: InspirationCreateRequest,
|
||||
project_id: str | None = Depends(get_project_id),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
service: InspirationService = Depends(get_inspiration_service)
|
||||
):
|
||||
pid = project_id or request.project_id
|
||||
|
||||
inspiration = await service.create_inspiration(
|
||||
source_url=request.source_url,
|
||||
created_by=str(current_user["_id"]),
|
||||
project_id=pid,
|
||||
caption=request.caption
|
||||
)
|
||||
return inspiration
|
||||
|
||||
|
||||
@router.get("", response_model=InspirationListResponse)
|
||||
async def get_inspirations(
|
||||
project_id: str | None = Depends(get_project_id),
|
||||
limit: int = 20,
|
||||
offset: int = 0,
|
||||
current_user: dict = Depends(get_current_user),
|
||||
service: InspirationService = Depends(get_inspiration_service)
|
||||
):
|
||||
# If project_id is provided, filter by it. Otherwise, filter by user.
|
||||
# Or maybe we want to see all user's inspirations if no project is selected?
|
||||
# Let's follow the pattern: if project_id is present, show project's inspirations.
|
||||
# If not, show user's personal inspirations (where project_id is None) OR all user's inspirations?
|
||||
# Usually "My Inspirations" means created by me.
|
||||
|
||||
# Let's assume:
|
||||
# If project_id -> filter by project_id (and maybe created_by if we want strict ownership, but usually project members share)
|
||||
# If no project_id -> filter by created_by (personal)
|
||||
|
||||
pid = project_id
|
||||
uid = str(current_user["_id"])
|
||||
|
||||
inspirations = await service.get_inspirations(project_id=pid, created_by=uid if not pid else None, limit=limit, offset=offset)
|
||||
total_count = await service.dao.inspirations.count_inspirations(project_id=pid, created_by=uid if not pid else None)
|
||||
|
||||
return InspirationListResponse(
|
||||
inspirations=[InspirationResponse(**inspiration.model_dump()) for inspiration in inspirations],
|
||||
total_count=total_count
|
||||
)
|
||||
|
||||
|
||||
@router.get("/{inspiration_id}", response_model=InspirationResponse)
|
||||
async def get_inspiration(
|
||||
inspiration_id: str,
|
||||
service: InspirationService = Depends(get_inspiration_service),
|
||||
current_user: dict = Depends(get_current_user)
|
||||
):
|
||||
inspiration = await service.get_inspiration(inspiration_id)
|
||||
if not inspiration:
|
||||
raise HTTPException(status_code=404, detail="Inspiration not found")
|
||||
return inspiration
|
||||
|
||||
|
||||
@router.patch("/{inspiration_id}/complete", response_model=InspirationResponse)
|
||||
async def mark_inspiration_complete(
|
||||
inspiration_id: str,
|
||||
is_completed: bool = True,
|
||||
service: InspirationService = Depends(get_inspiration_service),
|
||||
current_user: dict = Depends(get_current_user)
|
||||
):
|
||||
inspiration = await service.mark_as_completed(inspiration_id, is_completed)
|
||||
if not inspiration:
|
||||
raise HTTPException(status_code=404, detail="Inspiration not found")
|
||||
return inspiration
|
||||
|
||||
|
||||
@router.delete("/{inspiration_id}", status_code=status.HTTP_204_NO_CONTENT)
|
||||
async def delete_inspiration(
|
||||
inspiration_id: str,
|
||||
service: InspirationService = Depends(get_inspiration_service),
|
||||
current_user: dict = Depends(get_current_user)
|
||||
):
|
||||
success = await service.delete_inspiration(inspiration_id)
|
||||
if not success:
|
||||
raise HTTPException(status_code=404, detail="Inspiration not found")
|
||||
return None
|
||||
98
api/endpoints/post_router.py
Normal file
98
api/endpoints/post_router.py
Normal file
@@ -0,0 +1,98 @@
|
||||
from datetime import datetime
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
|
||||
from api.dependency import get_post_service, get_project_id
|
||||
from api.endpoints.auth import get_current_user
|
||||
from api.service.post_service import PostService
|
||||
from api.models import PostRequest, PostCreateRequest, PostUpdateRequest, AddGenerationsRequest
|
||||
from models.Post import Post
|
||||
|
||||
router = APIRouter(prefix="/api/posts", tags=["posts"])
|
||||
|
||||
|
||||
@router.post("", response_model=Post)
|
||||
async def create_post(
|
||||
request: PostCreateRequest,
|
||||
project_id: str | None = Depends(get_project_id),
|
||||
current_user: dict = Depends(get_current_user),
|
||||
post_service: PostService = Depends(get_post_service),
|
||||
):
|
||||
pid = project_id or request.project_id
|
||||
return await post_service.create_post(
|
||||
date=request.date,
|
||||
topic=request.topic,
|
||||
generation_ids=request.generation_ids,
|
||||
project_id=pid,
|
||||
user_id=str(current_user["_id"]),
|
||||
)
|
||||
|
||||
|
||||
@router.get("", response_model=list[Post])
|
||||
async def get_posts(
|
||||
project_id: str | None = Depends(get_project_id),
|
||||
limit: int = 200,
|
||||
offset: int = 0,
|
||||
date_from: datetime | None = None,
|
||||
date_to: datetime | None = None,
|
||||
current_user: dict = Depends(get_current_user),
|
||||
post_service: PostService = Depends(get_post_service),
|
||||
):
|
||||
return await post_service.get_posts(project_id, str(current_user["_id"]), limit, offset, date_from, date_to)
|
||||
|
||||
|
||||
@router.get("/{post_id}", response_model=Post)
|
||||
async def get_post(
|
||||
post_id: str,
|
||||
post_service: PostService = Depends(get_post_service),
|
||||
):
|
||||
post = await post_service.get_post(post_id)
|
||||
if not post:
|
||||
raise HTTPException(status_code=404, detail="Post not found")
|
||||
return post
|
||||
|
||||
|
||||
@router.put("/{post_id}", response_model=Post)
|
||||
async def update_post(
|
||||
post_id: str,
|
||||
request: PostUpdateRequest,
|
||||
post_service: PostService = Depends(get_post_service),
|
||||
):
|
||||
post = await post_service.update_post(post_id, date=request.date, topic=request.topic)
|
||||
if not post:
|
||||
raise HTTPException(status_code=404, detail="Post not found")
|
||||
return post
|
||||
|
||||
|
||||
@router.delete("/{post_id}")
|
||||
async def delete_post(
|
||||
post_id: str,
|
||||
post_service: PostService = Depends(get_post_service),
|
||||
):
|
||||
success = await post_service.delete_post(post_id)
|
||||
if not success:
|
||||
raise HTTPException(status_code=404, detail="Post not found or could not be deleted")
|
||||
return {"status": "success"}
|
||||
|
||||
|
||||
@router.post("/{post_id}/generations")
|
||||
async def add_generations(
|
||||
post_id: str,
|
||||
request: AddGenerationsRequest,
|
||||
post_service: PostService = Depends(get_post_service),
|
||||
):
|
||||
success = await post_service.add_generations(post_id, request.generation_ids)
|
||||
if not success:
|
||||
raise HTTPException(status_code=404, detail="Post not found")
|
||||
return {"status": "success"}
|
||||
|
||||
|
||||
@router.delete("/{post_id}/generations/{generation_id}")
|
||||
async def remove_generation(
|
||||
post_id: str,
|
||||
generation_id: str,
|
||||
post_service: PostService = Depends(get_post_service),
|
||||
):
|
||||
success = await post_service.remove_generation(post_id, generation_id)
|
||||
if not success:
|
||||
raise HTTPException(status_code=404, detail="Post not found or generation not linked")
|
||||
return {"status": "success"}
|
||||
181
api/endpoints/project_router.py
Normal file
181
api/endpoints/project_router.py
Normal file
@@ -0,0 +1,181 @@
|
||||
|
||||
from bson import ObjectId
|
||||
from fastapi import APIRouter, Depends, HTTPException, status
|
||||
from pydantic import BaseModel
|
||||
from api.dependency import get_dao
|
||||
from api.endpoints.auth import get_current_user
|
||||
from models.Project import Project
|
||||
from repos.dao import DAO
|
||||
|
||||
router = APIRouter(prefix="/api/projects", tags=["Projects"])
|
||||
|
||||
class ProjectCreate(BaseModel):
|
||||
name: str
|
||||
description: str | None = None
|
||||
|
||||
class ProjectMemberResponse(BaseModel):
|
||||
id: str
|
||||
username: str
|
||||
|
||||
class ProjectResponse(BaseModel):
|
||||
id: str
|
||||
name: str
|
||||
description: str | None = None
|
||||
owner_id: str
|
||||
members: list[ProjectMemberResponse]
|
||||
is_owner: bool = False
|
||||
|
||||
async def _get_project_response(project: Project, current_user_id: str, dao: DAO) -> ProjectResponse:
|
||||
member_responses = []
|
||||
for member_id in project.members:
|
||||
# We need a way to get user by ID. Let's check UsersRepo for get_user by ObjectId or string.
|
||||
# Currently UsersRepo has get_user(user_id: int) for Telegram IDs.
|
||||
# But for Web users we might need to search by _id.
|
||||
# Let's try to get user info.
|
||||
# Since project.members contains strings (ObjectIds as strings), we search by _id.
|
||||
user_doc = await dao.users.collection.find_one({"_id": ObjectId(member_id)})
|
||||
if not user_doc and member_id.isdigit():
|
||||
# Fallback for telegram IDs if they are stored as strings of digits
|
||||
user_doc = await dao.users.get_user(int(member_id))
|
||||
|
||||
username = "unknown"
|
||||
if user_doc:
|
||||
username = user_doc.get("username", "unknown")
|
||||
|
||||
member_responses.append(ProjectMemberResponse(id=member_id, username=username))
|
||||
|
||||
return ProjectResponse(
|
||||
id=project.id,
|
||||
name=project.name,
|
||||
description=project.description,
|
||||
owner_id=project.owner_id,
|
||||
members=member_responses,
|
||||
is_owner=(project.owner_id == current_user_id)
|
||||
)
|
||||
|
||||
@router.post("", response_model=ProjectResponse)
|
||||
async def create_project(
|
||||
project_data: ProjectCreate,
|
||||
dao: DAO = Depends(get_dao),
|
||||
current_user: dict = Depends(get_current_user)
|
||||
):
|
||||
user_id = str(current_user["_id"])
|
||||
new_project = Project(
|
||||
name=project_data.name,
|
||||
description=project_data.description,
|
||||
owner_id=user_id,
|
||||
members=[user_id]
|
||||
)
|
||||
project_id = await dao.projects.create_project(new_project)
|
||||
new_project.id = project_id
|
||||
|
||||
# Add project to user's project list
|
||||
await dao.users.collection.update_one(
|
||||
{"_id": current_user["_id"]},
|
||||
{"$addToSet": {"project_ids": project_id}}
|
||||
)
|
||||
|
||||
return await _get_project_response(new_project, user_id, dao)
|
||||
|
||||
@router.get("", response_model=list[ProjectResponse])
|
||||
async def get_my_projects(
|
||||
dao: DAO = Depends(get_dao),
|
||||
current_user: dict = Depends(get_current_user)
|
||||
):
|
||||
user_id = str(current_user["_id"])
|
||||
projects = await dao.projects.get_projects_by_user(user_id)
|
||||
|
||||
responses = []
|
||||
for p in projects:
|
||||
responses.append(await _get_project_response(p, user_id, dao))
|
||||
return responses
|
||||
|
||||
class MemberAdd(BaseModel):
|
||||
username: str
|
||||
|
||||
@router.post("/{project_id}/members", dependencies=[Depends(get_current_user)])
|
||||
async def add_member(
|
||||
project_id: str,
|
||||
member_data: MemberAdd,
|
||||
dao: DAO = Depends(get_dao),
|
||||
current_user: dict = Depends(get_current_user)
|
||||
):
|
||||
user_id = str(current_user["_id"])
|
||||
project = await dao.projects.get_project(project_id)
|
||||
if not project:
|
||||
raise HTTPException(status_code=404, detail="Project not found")
|
||||
|
||||
if project.owner_id != user_id:
|
||||
raise HTTPException(status_code=403, detail="Only owner can add members")
|
||||
|
||||
target_user = await dao.users.get_user_by_username(member_data.username)
|
||||
if not target_user:
|
||||
raise HTTPException(status_code=404, detail="User not found")
|
||||
|
||||
target_user_id = str(target_user["_id"])
|
||||
|
||||
if target_user_id in project.members:
|
||||
return {"message": "User already in project"}
|
||||
|
||||
await dao.projects.add_member(project_id, target_user_id)
|
||||
|
||||
# Update target user's project list
|
||||
await dao.users.collection.update_one(
|
||||
{"_id": target_user["_id"]},
|
||||
{"$addToSet": {"project_ids": project_id}}
|
||||
)
|
||||
|
||||
return {"message": "Member added"}
|
||||
|
||||
@router.post("/{project_id}/join", dependencies=[Depends(get_current_user)])
|
||||
async def join_project(
|
||||
project_id: str,
|
||||
dao: DAO = Depends(get_dao),
|
||||
current_user: dict = Depends(get_current_user)
|
||||
):
|
||||
# Retrieve project to verify it exists
|
||||
project = await dao.projects.get_project(project_id)
|
||||
if not project:
|
||||
raise HTTPException(status_code=404, detail="Project not found")
|
||||
|
||||
user_id = str(current_user["_id"])
|
||||
|
||||
# Check if user is ALREADY in project
|
||||
if user_id in project.members:
|
||||
return {"message": "Already a member"}
|
||||
|
||||
# Add member
|
||||
await dao.projects.add_member(project_id, user_id)
|
||||
|
||||
# Update user's project list
|
||||
await dao.users.collection.update_one(
|
||||
{"_id": current_user["_id"]},
|
||||
{"$addToSet": {"project_ids": project_id}}
|
||||
)
|
||||
|
||||
return {"message": "Joined project"}
|
||||
|
||||
|
||||
@router.delete("/{project_id}", dependencies=[Depends(get_current_user)] )
|
||||
async def delete_project(
|
||||
project_id: str,
|
||||
dao: DAO = Depends(get_dao),
|
||||
current_user: dict = Depends(get_current_user)
|
||||
):
|
||||
user_id = str(current_user["_id"])
|
||||
project = await dao.projects.get_project(project_id)
|
||||
if not project:
|
||||
raise HTTPException(status_code=404, detail="Project not found")
|
||||
|
||||
if project.owner_id != user_id:
|
||||
raise HTTPException(status_code=403, detail="Only owner can delete project")
|
||||
|
||||
await dao.projects.delete_project(project_id)
|
||||
|
||||
# Remove project from user's project list
|
||||
await dao.users.collection.update_one(
|
||||
{"_id": current_user["_id"]},
|
||||
{"$pull": {"project_ids": project_id}}
|
||||
)
|
||||
|
||||
return {"message": "Project deleted"}
|
||||
19
api/models/AssetDTO.py
Normal file
19
api/models/AssetDTO.py
Normal file
@@ -0,0 +1,19 @@
|
||||
from datetime import datetime
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
from models.Asset import Asset
|
||||
|
||||
|
||||
class AssetResponse(BaseModel):
|
||||
id: str
|
||||
name: str
|
||||
type: str # uploaded / generated
|
||||
content_type: str # image / prompt
|
||||
linked_char_id: str | None = None
|
||||
created_at: datetime
|
||||
url: str | None = None
|
||||
|
||||
class AssetsResponse(BaseModel):
|
||||
assets: list[AssetResponse]
|
||||
total_count: int
|
||||
17
api/models/CharacterDTO.py
Normal file
17
api/models/CharacterDTO.py
Normal file
@@ -0,0 +1,17 @@
|
||||
from pydantic import BaseModel
|
||||
|
||||
class CharacterCreateRequest(BaseModel):
|
||||
name: str
|
||||
character_bio: str
|
||||
character_image_doc_tg_id: str | None = None
|
||||
avatar_image: str | None = None
|
||||
character_image_tg_id: str | None = None
|
||||
project_id: str | None = None
|
||||
|
||||
class CharacterUpdateRequest(BaseModel):
|
||||
name: str | None = None
|
||||
character_bio: str | None = None
|
||||
character_image_doc_tg_id: str | None = None
|
||||
avatar_image: str | None = None
|
||||
character_image_tg_id: str | None = None
|
||||
project_id: str | None = None
|
||||
22
api/models/EnvironmentRequest.py
Normal file
22
api/models/EnvironmentRequest.py
Normal file
@@ -0,0 +1,22 @@
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
class EnvironmentCreate(BaseModel):
|
||||
character_id: str
|
||||
name: str = Field(..., min_length=1)
|
||||
description: str | None = None
|
||||
asset_ids: list[str] | None = []
|
||||
|
||||
|
||||
class EnvironmentUpdate(BaseModel):
|
||||
name: str | None = Field(None, min_length=1)
|
||||
description: str | None = None
|
||||
asset_ids: list[str] | None = None
|
||||
|
||||
|
||||
class AssetToEnvironment(BaseModel):
|
||||
asset_id: str
|
||||
|
||||
|
||||
class AssetsToEnvironment(BaseModel):
|
||||
asset_ids: list[str]
|
||||
40
api/models/ExternalGenerationDTO.py
Normal file
40
api/models/ExternalGenerationDTO.py
Normal file
@@ -0,0 +1,40 @@
|
||||
from pydantic import BaseModel, Field
|
||||
from models.enums import AspectRatios, Quality
|
||||
|
||||
|
||||
class ExternalGenerationRequest(BaseModel):
|
||||
"""Request model for importing external generations."""
|
||||
|
||||
prompt: str
|
||||
tech_prompt: str | None = None
|
||||
|
||||
# Image can be provided as base64 string OR URL (one must be provided)
|
||||
image_data: str | None = Field(None, description="Base64-encoded image data")
|
||||
image_url: str | None = Field(None, description="URL to download image from")
|
||||
|
||||
nsfw: bool = False
|
||||
|
||||
# Generation metadata
|
||||
aspect_ratio: AspectRatios = AspectRatios.NINESIXTEEN # "1:1","2:3","3:2","3:4","4:3","4:5","5:4","9:16","16:9","21:9"
|
||||
quality: Quality = Quality.ONEK
|
||||
model: str | None = None
|
||||
seed: int | None = None
|
||||
|
||||
# Optional linking
|
||||
linked_character_id: str | None = None
|
||||
created_by: str = Field(..., description="User ID from external system")
|
||||
project_id: str | None = None
|
||||
|
||||
# Performance metrics
|
||||
execution_time_seconds: float | None = None
|
||||
api_execution_time_seconds: float | None = None
|
||||
token_usage: int | None = None
|
||||
input_token_usage: int | None = None
|
||||
output_token_usage: int | None = None
|
||||
|
||||
def validate_image_source(self):
|
||||
"""Ensure at least one image source is provided."""
|
||||
if not self.image_data and not self.image_url:
|
||||
raise ValueError("Either image_data or image_url must be provided")
|
||||
if self.image_data and self.image_url:
|
||||
raise ValueError("Only one of image_data or image_url should be provided")
|
||||
17
api/models/FinancialUsageDTO.py
Normal file
17
api/models/FinancialUsageDTO.py
Normal file
@@ -0,0 +1,17 @@
|
||||
from pydantic import BaseModel
|
||||
|
||||
class UsageStats(BaseModel):
|
||||
total_runs: int
|
||||
total_tokens: int
|
||||
total_input_tokens: int
|
||||
total_output_tokens: int
|
||||
total_cost: float
|
||||
|
||||
class UsageByEntity(BaseModel):
|
||||
entity_id: str | None = None
|
||||
stats: UsageStats
|
||||
|
||||
class FinancialReport(BaseModel):
|
||||
summary: UsageStats
|
||||
by_user: list[UsageByEntity] | None = None
|
||||
by_project: list[UsageByEntity] | None = None
|
||||
79
api/models/GenerationRequest.py
Normal file
79
api/models/GenerationRequest.py
Normal file
@@ -0,0 +1,79 @@
|
||||
from datetime import datetime, UTC
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from models.Asset import Asset
|
||||
from models.Generation import GenerationStatus
|
||||
from models.enums import AspectRatios, Quality, GenType, ImageModel, TextModel
|
||||
|
||||
|
||||
class GenerationRequest(BaseModel):
|
||||
linked_character_id: str | None = None
|
||||
aspect_ratio: AspectRatios = AspectRatios.NINESIXTEEN # "1:1","2:3","3:2","3:4","4:3","4:5","5:4","9:16","16:9","21:9"
|
||||
quality: Quality = Quality.ONEK
|
||||
prompt: str
|
||||
model: ImageModel = Field(default=ImageModel.GEMINI_3_PRO_IMAGE_PREVIEW)
|
||||
telegram_id: int | None = None
|
||||
use_profile_image: bool = True
|
||||
assets_list: list[str]
|
||||
environment_id: str | None = None
|
||||
project_id: str | None = None
|
||||
idea_id: str | None = None
|
||||
nsfw: bool = False
|
||||
count: int = Field(default=1, ge=1, le=10)
|
||||
|
||||
|
||||
class NsfwRequest(BaseModel):
|
||||
is_nsfw: bool
|
||||
|
||||
|
||||
class GenerationsResponse(BaseModel):
|
||||
generations: list["GenerationResponse"]
|
||||
total_count: int
|
||||
|
||||
|
||||
class GenerationResponse(BaseModel):
|
||||
id: str
|
||||
status: GenerationStatus
|
||||
failed_reason: str | None = None
|
||||
project_id: str | None = None
|
||||
linked_character_id: str | None = None
|
||||
aspect_ratio: AspectRatios
|
||||
quality: Quality
|
||||
prompt: str
|
||||
model: ImageModel | None = None
|
||||
seed: int | None = None
|
||||
tech_prompt: str | None = None
|
||||
assets_list: list[str]
|
||||
result_list: list[str] = []
|
||||
result: str | None = None
|
||||
execution_time_seconds: float | None = None
|
||||
api_execution_time_seconds: float | None = None
|
||||
token_usage: int | None = None
|
||||
input_token_usage: int | None = None
|
||||
output_token_usage: int | None = None
|
||||
progress: int = 0
|
||||
cost: float | None = None
|
||||
created_by: str | None = None
|
||||
generation_group_id: str | None = None
|
||||
idea_id: str | None = None
|
||||
likes_count: int = 0
|
||||
is_liked: bool = False
|
||||
nsfw: bool = False
|
||||
created_at: datetime = datetime.now(UTC)
|
||||
updated_at: datetime = datetime.now(UTC)
|
||||
|
||||
|
||||
class GenerationGroupResponse(BaseModel):
|
||||
generation_group_id: str
|
||||
generations: list[GenerationResponse]
|
||||
|
||||
|
||||
class PromptRequest(BaseModel):
|
||||
prompt: str
|
||||
model: TextModel = Field(default=TextModel.GEMINI_3_1_PRO_PREVIEW)
|
||||
linked_assets: list[str] = []
|
||||
|
||||
|
||||
class PromptResponse(BaseModel):
|
||||
prompt: str
|
||||
17
api/models/IdeaRequest.py
Normal file
17
api/models/IdeaRequest.py
Normal file
@@ -0,0 +1,17 @@
|
||||
from pydantic import BaseModel
|
||||
from models.Idea import Idea
|
||||
from api.models.GenerationRequest import GenerationResponse
|
||||
|
||||
class IdeaCreateRequest(BaseModel):
|
||||
name: str
|
||||
description: str | None = None
|
||||
project_id: str | None = None # Optional in body if passed via header/dependency
|
||||
inspiration_id: str | None = None
|
||||
|
||||
class IdeaUpdateRequest(BaseModel):
|
||||
name: str | None = None
|
||||
description: str | None = None
|
||||
inspiration_id: str | None = None
|
||||
|
||||
class IdeaResponse(Idea):
|
||||
last_generation: GenerationResponse | None = None
|
||||
28
api/models/InspirationRequest.py
Normal file
28
api/models/InspirationRequest.py
Normal file
@@ -0,0 +1,28 @@
|
||||
from datetime import datetime
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
from models.Inspiration import Inspiration
|
||||
|
||||
|
||||
class InspirationCreateRequest(BaseModel):
|
||||
source_url: str
|
||||
caption: str | None = None
|
||||
project_id: str | None = None
|
||||
|
||||
|
||||
class InspirationResponse(BaseModel):
|
||||
id: str
|
||||
source_url: str
|
||||
caption: str | None = None
|
||||
asset_id: str
|
||||
is_completed: bool
|
||||
created_by: str
|
||||
project_id: str | None = None
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
||||
|
||||
|
||||
class InspirationListResponse(BaseModel):
|
||||
inspirations: list[InspirationResponse]
|
||||
total_count: int
|
||||
18
api/models/PostRequest.py
Normal file
18
api/models/PostRequest.py
Normal file
@@ -0,0 +1,18 @@
|
||||
from datetime import datetime
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
class PostCreateRequest(BaseModel):
|
||||
date: datetime
|
||||
topic: str
|
||||
generation_ids: list[str] = []
|
||||
project_id: str | None = None
|
||||
|
||||
|
||||
class PostUpdateRequest(BaseModel):
|
||||
date: datetime | None = None
|
||||
topic: str | None = None
|
||||
|
||||
|
||||
class AddGenerationsRequest(BaseModel):
|
||||
generation_ids: list[str]
|
||||
7
api/models/__init__.py
Normal file
7
api/models/__init__.py
Normal file
@@ -0,0 +1,7 @@
|
||||
from .AssetDTO import AssetResponse, AssetsResponse
|
||||
from .CharacterDTO import CharacterCreateRequest, CharacterUpdateRequest
|
||||
from .ExternalGenerationDTO import ExternalGenerationRequest
|
||||
from .FinancialUsageDTO import FinancialReport, UsageStats, UsageByEntity
|
||||
from .GenerationRequest import GenerationRequest, GenerationResponse, GenerationsResponse, GenerationGroupResponse, PromptRequest, PromptResponse, NsfwRequest
|
||||
from .IdeaRequest import IdeaCreateRequest, IdeaUpdateRequest, IdeaResponse
|
||||
from .PostRequest import PostCreateRequest, PostUpdateRequest, AddGenerationsRequest
|
||||
BIN
api/models/__pycache__/AssetDTO.cpython-313.pyc
Normal file
BIN
api/models/__pycache__/AssetDTO.cpython-313.pyc
Normal file
Binary file not shown.
BIN
api/models/__pycache__/GenerationRequest.cpython-313.pyc
Normal file
BIN
api/models/__pycache__/GenerationRequest.cpython-313.pyc
Normal file
Binary file not shown.
BIN
api/models/__pycache__/__init__.cpython-313.pyc
Normal file
BIN
api/models/__pycache__/__init__.cpython-313.pyc
Normal file
Binary file not shown.
0
api/service/__init__.py
Normal file
0
api/service/__init__.py
Normal file
BIN
api/service/__pycache__/__init__.cpython-313.pyc
Normal file
BIN
api/service/__pycache__/__init__.cpython-313.pyc
Normal file
Binary file not shown.
BIN
api/service/__pycache__/generation_service.cpython-313.pyc
Normal file
BIN
api/service/__pycache__/generation_service.cpython-313.pyc
Normal file
Binary file not shown.
85
api/service/album_service.py
Normal file
85
api/service/album_service.py
Normal file
@@ -0,0 +1,85 @@
|
||||
from typing import List, Optional
|
||||
from models.Album import Album
|
||||
from models.Generation import Generation
|
||||
from repos.dao import DAO
|
||||
|
||||
class AlbumService:
|
||||
def __init__(self, dao: DAO):
|
||||
self.dao = dao
|
||||
|
||||
async def create_album(self, name: str, description: Optional[str] = None) -> Album:
|
||||
album = Album(name=name, description=description)
|
||||
album_id = await self.dao.albums.create_album(album)
|
||||
album.id = album_id
|
||||
return album
|
||||
|
||||
async def get_albums(self, limit: int = 10, offset: int = 0) -> List[Album]:
|
||||
return await self.dao.albums.get_albums(limit=limit, offset=offset)
|
||||
|
||||
async def get_album(self, album_id: str) -> Optional[Album]:
|
||||
return await self.dao.albums.get_album(album_id)
|
||||
|
||||
async def update_album(self, album_id: str, name: Optional[str] = None, description: Optional[str] = None) -> Optional[Album]:
|
||||
album = await self.dao.albums.get_album(album_id)
|
||||
if not album:
|
||||
return None
|
||||
|
||||
if name:
|
||||
album.name = name
|
||||
if description is not None:
|
||||
album.description = description
|
||||
|
||||
await self.dao.albums.update_album(album_id, album)
|
||||
return album
|
||||
|
||||
async def delete_album(self, album_id: str) -> bool:
|
||||
return await self.dao.albums.delete_album(album_id)
|
||||
|
||||
async def add_generation_to_album(self, album_id: str, generation_id: str) -> bool:
|
||||
# Verify album exists
|
||||
album = await self.dao.albums.get_album(album_id)
|
||||
if not album:
|
||||
return False
|
||||
|
||||
# Verify generation exists (optional but good practice)
|
||||
gen = await self.dao.generations.get_generation(generation_id)
|
||||
if not gen:
|
||||
return False
|
||||
if album.cover_asset_id is None and gen.status == 'done':
|
||||
album.cover_asset_id = gen.result_list[0]
|
||||
return await self.dao.albums.add_generation(album_id, generation_id, album.cover_asset_id)
|
||||
|
||||
async def remove_generation_from_album(self, album_id: str, generation_id: str) -> bool:
|
||||
return await self.dao.albums.remove_generation(album_id, generation_id)
|
||||
|
||||
async def get_generations_by_album(self, album_id: str, limit: int = 10, offset: int = 0) -> List[Generation]:
|
||||
album = await self.dao.albums.get_album(album_id)
|
||||
if not album or not album.generation_ids:
|
||||
return []
|
||||
|
||||
# Slice the generation IDs (simple pagination on ID list)
|
||||
# Note: This pagination is on IDs, then we fetch objects.
|
||||
# Ideally, fetch only slice.
|
||||
|
||||
# Reverse to show newest first? Or just follow list order?
|
||||
# Assuming list order is insertion order (which usually is what we want for manual sorting or chronological if always appended).
|
||||
# Let's assume user wants same order as in list.
|
||||
|
||||
sliced_ids = album.generation_ids[offset : offset + limit]
|
||||
if not sliced_ids:
|
||||
return []
|
||||
|
||||
# Fetch generations by IDs
|
||||
# We need a method in GenerationRepo to fetch by IDs.
|
||||
# Currently we only have get_generations with filters.
|
||||
# We can add get_generations_by_ids to GenerationRepo or use loop (inefficient).
|
||||
# Let's add get_generations_by_ids to GenerationRepo.
|
||||
|
||||
# For now, I will use a loop if I can't modify Repo immediately,
|
||||
# but I SHOULD modify GenerationRepo.
|
||||
|
||||
# Or I can use get_generations(filter={"_id": {"$in": [ObjectId(id) for id in sliced_ids]}})
|
||||
# But get_generations doesn't support generic filter passing.
|
||||
|
||||
# I'll update GenerationRepo to support fetching by IDs.
|
||||
return await self.dao.generations.get_generations_by_ids(sliced_ids)
|
||||
446
api/service/generation_service.py
Normal file
446
api/service/generation_service.py
Normal file
@@ -0,0 +1,446 @@
|
||||
import asyncio
|
||||
import base64
|
||||
import logging
|
||||
import random
|
||||
from datetime import datetime, UTC
|
||||
from typing import List, Optional, Tuple, Any, Dict
|
||||
from uuid import uuid4
|
||||
|
||||
import httpx
|
||||
from aiogram import Bot
|
||||
from aiogram.types import BufferedInputFile
|
||||
|
||||
from adapters.Exception import GoogleGenerationException
|
||||
from adapters.google_adapter import GoogleAdapter
|
||||
from adapters.s3_adapter import S3Adapter
|
||||
from api.models import (
|
||||
FinancialReport, UsageStats, UsageByEntity,
|
||||
GenerationRequest, GenerationResponse, GenerationsResponse, GenerationGroupResponse
|
||||
)
|
||||
from models.Asset import Asset, AssetType, AssetContentType
|
||||
from models.Generation import Generation, GenerationStatus
|
||||
from models.enums import AspectRatios, Quality
|
||||
from repos.dao import DAO
|
||||
from utils.image_utils import create_thumbnail
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Limit concurrent generations to 4
|
||||
generation_semaphore = asyncio.Semaphore(4)
|
||||
|
||||
|
||||
async def generate_image_task(
|
||||
prompt: str,
|
||||
media_group_bytes: List[bytes],
|
||||
aspect_ratio: AspectRatios,
|
||||
quality: Quality,
|
||||
model: str,
|
||||
gemini: GoogleAdapter,
|
||||
) -> Tuple[List[bytes], Dict[str, Any]]:
|
||||
"""
|
||||
Wrapper for calling Gemini's synchronous method in a separate thread.
|
||||
"""
|
||||
try:
|
||||
logger.info(f"Starting generate_image_task with prompt length: {len(prompt)}")
|
||||
result = await asyncio.to_thread(
|
||||
gemini.generate_image,
|
||||
prompt=prompt,
|
||||
images_list=media_group_bytes,
|
||||
aspect_ratio=aspect_ratio,
|
||||
quality=quality,
|
||||
model=model,
|
||||
)
|
||||
generated_images_io, metrics = result
|
||||
logger.info(f"generate_image_task completed, received {len(generated_images_io) if generated_images_io else 0} images")
|
||||
except GoogleGenerationException:
|
||||
raise
|
||||
finally:
|
||||
del media_group_bytes
|
||||
|
||||
images_bytes = []
|
||||
if generated_images_io:
|
||||
for img_io in generated_images_io:
|
||||
img_io.seek(0)
|
||||
images_bytes.append(img_io.read())
|
||||
img_io.close()
|
||||
del generated_images_io
|
||||
|
||||
return images_bytes, metrics
|
||||
|
||||
|
||||
class GenerationService:
|
||||
def __init__(self, dao: DAO, gemini: GoogleAdapter, s3_adapter: S3Adapter, bot: Optional[Bot] = None):
|
||||
self.dao = dao
|
||||
self.gemini = gemini
|
||||
self.s3_adapter = s3_adapter
|
||||
self.bot = bot
|
||||
|
||||
# --- Public API ---
|
||||
|
||||
async def ask_prompt_assistant(self, prompt: str, assets: list[str] | None = None, model: str = "gemini-3.1-pro-preview") -> str:
|
||||
future_prompt = (
|
||||
"You are an prompt-assistant. You improving user-entered prompts for image generation. "
|
||||
"User may upload reference image too. I will provide sources prompt entered by user. "
|
||||
"Understand user needs and generate best variation of prompt. ANSWER ONLY PROMPT STRING!!! "
|
||||
f"USER_ENTERED_PROMPT: {prompt}"
|
||||
)
|
||||
assets_data = []
|
||||
if assets:
|
||||
assets_db = await self.dao.assets.get_assets_by_ids(assets)
|
||||
assets_data.extend(asset.data for asset in assets_db if asset.data)
|
||||
|
||||
generated_prompt = await asyncio.to_thread(self.gemini.generate_text, future_prompt, model, assets_data)
|
||||
logger.info(f"Prompt Assistant: {generated_prompt}")
|
||||
return generated_prompt
|
||||
|
||||
async def generate_prompt_from_images(self, images: List[bytes], user_prompt: Optional[str] = None, model: str = "gemini-3.1-pro-preview") -> str:
|
||||
technical_prompt = "You are a prompt engineer. Describe this image in detail to create a stable diffusion using this image as reference. "
|
||||
if user_prompt:
|
||||
technical_prompt += f"User also provided this context: {user_prompt}. "
|
||||
technical_prompt += "Provide ONLY the detailed prompt."
|
||||
|
||||
return await asyncio.to_thread(self.gemini.generate_text, prompt=technical_prompt, model=model, images_list=images)
|
||||
|
||||
async def get_generations(self, **kwargs) -> GenerationsResponse:
|
||||
current_user_id = kwargs.pop('current_user_id', None)
|
||||
generations = await self.dao.generations.get_generations(**kwargs)
|
||||
total_count = await self.dao.generations.count_generations(
|
||||
character_id=kwargs.get('character_id'),
|
||||
created_by=kwargs.get('created_by'),
|
||||
project_id=kwargs.get('project_id'),
|
||||
idea_id=kwargs.get('idea_id'),
|
||||
only_liked_by=kwargs.get('only_liked_by')
|
||||
)
|
||||
return GenerationsResponse(
|
||||
generations=[self._map_to_response(gen, current_user_id) for gen in generations],
|
||||
total_count=total_count
|
||||
)
|
||||
|
||||
async def get_generation(self, generation_id: str, current_user_id: Optional[str] = None) -> Optional[GenerationResponse]:
|
||||
gen = await self.dao.generations.get_generation(generation_id)
|
||||
return self._map_to_response(gen, current_user_id) if gen else None
|
||||
|
||||
async def toggle_like(self, generation_id: str, user_id: str) -> bool | None:
|
||||
return await self.dao.generations.toggle_like(generation_id, user_id)
|
||||
|
||||
async def get_generations_by_group(self, group_id: str, current_user_id: Optional[str] = None) -> GenerationGroupResponse:
|
||||
generations = await self.dao.generations.get_generations_by_group(group_id)
|
||||
return GenerationGroupResponse(
|
||||
generation_group_id=group_id,
|
||||
generations=[self._map_to_response(gen, current_user_id) for gen in generations]
|
||||
)
|
||||
|
||||
def _map_to_response(self, gen: Generation, current_user_id: Optional[str] = None) -> GenerationResponse:
|
||||
res = GenerationResponse(**gen.model_dump())
|
||||
res.likes_count = len(gen.liked_by) if gen.liked_by else 0
|
||||
res.is_liked = current_user_id in gen.liked_by if current_user_id and gen.liked_by else False
|
||||
return res
|
||||
|
||||
async def get_running_generations(self, user_id: Optional[str] = None, project_id: Optional[str] = None) -> List[Generation]:
|
||||
return await self.dao.generations.get_generations(status=GenerationStatus.RUNNING, created_by=user_id, project_id=project_id)
|
||||
|
||||
async def create_generation_task(self, generation_request: GenerationRequest, user_id: Optional[str] = None, generation_group_id: Optional[str] = None) -> GenerationGroupResponse:
|
||||
if generation_group_id is None:
|
||||
generation_group_id = str(uuid4())
|
||||
|
||||
results = []
|
||||
for _ in range(generation_request.count):
|
||||
gen_response = await self._create_single_generation(generation_request, user_id, generation_group_id)
|
||||
results.append(gen_response)
|
||||
return GenerationGroupResponse(generation_group_id=generation_group_id, generations=results)
|
||||
|
||||
async def create_generation(self, generation: Generation):
|
||||
start_time = datetime.now()
|
||||
logger.info(f"Processing generation {generation.id}. Character ID: {generation.linked_character_id}")
|
||||
|
||||
# 1. Prepare input
|
||||
media_group_bytes, generation_prompt = await self._prepare_generation_input(generation)
|
||||
|
||||
# 2. Run generation with progress simulation
|
||||
progress_task = asyncio.create_task(self._simulate_progress(generation))
|
||||
try:
|
||||
generated_bytes_list, metrics = await generate_image_task(
|
||||
prompt=generation_prompt,
|
||||
media_group_bytes=media_group_bytes,
|
||||
aspect_ratio=generation.aspect_ratio,
|
||||
quality=generation.quality,
|
||||
model=generation.model or "gemini-3-pro-image-preview",
|
||||
gemini=self.gemini
|
||||
)
|
||||
self._update_generation_metrics(generation, metrics)
|
||||
|
||||
# 3. Process results
|
||||
created_assets = await self._process_generated_images(generation, generated_bytes_list)
|
||||
|
||||
# 4. Finalize generation record
|
||||
await self._finalize_generation(generation, created_assets, generation_prompt, start_time)
|
||||
|
||||
# 5. Notify
|
||||
if generation.telegram_id and self.bot:
|
||||
await self._notify_telegram(generation, created_assets)
|
||||
finally:
|
||||
if not progress_task.done():
|
||||
progress_task.cancel()
|
||||
try:
|
||||
await progress_task
|
||||
except asyncio.CancelledError:
|
||||
pass
|
||||
|
||||
async def import_external_generation(self, external_gen) -> Generation:
|
||||
external_gen.validate_image_source()
|
||||
logger.info(f"Importing external generation for user: {external_gen.created_by}")
|
||||
|
||||
image_bytes = await self._fetch_external_image(external_gen)
|
||||
|
||||
# Reuse internal processing logic
|
||||
new_asset = await self._save_asset(
|
||||
image_bytes=image_bytes,
|
||||
name=f"External_Generated_{external_gen.linked_character_id or 'no_char'}",
|
||||
created_by=external_gen.created_by,
|
||||
project_id=external_gen.project_id,
|
||||
linked_char_id=external_gen.linked_character_id,
|
||||
folder="external"
|
||||
)
|
||||
|
||||
generation = Generation(
|
||||
status=GenerationStatus.DONE,
|
||||
linked_character_id=external_gen.linked_character_id,
|
||||
aspect_ratio=external_gen.aspect_ratio,
|
||||
quality=external_gen.quality,
|
||||
prompt=external_gen.prompt,
|
||||
model=external_gen.model,
|
||||
tech_prompt=external_gen.tech_prompt,
|
||||
seed=external_gen.seed,
|
||||
result_list=[new_asset.id],
|
||||
result=new_asset.id,
|
||||
progress=100,
|
||||
nsfw=external_gen.nsfw,
|
||||
execution_time_seconds=external_gen.execution_time_seconds,
|
||||
api_execution_time_seconds=external_gen.api_execution_time_seconds,
|
||||
token_usage=external_gen.token_usage,
|
||||
input_token_usage=external_gen.input_token_usage,
|
||||
output_token_usage=external_gen.output_token_usage,
|
||||
created_by=external_gen.created_by,
|
||||
project_id=external_gen.project_id
|
||||
)
|
||||
|
||||
gen_id = await self.dao.generations.create_generation(generation)
|
||||
generation.id = gen_id
|
||||
return generation
|
||||
|
||||
async def delete_generation(self, generation_id: str) -> bool:
|
||||
try:
|
||||
generation = await self.dao.generations.get_generation(generation_id)
|
||||
if not generation:
|
||||
return False
|
||||
generation.is_deleted = True
|
||||
generation.updated_at = datetime.now(UTC)
|
||||
await self.dao.generations.update_generation(generation)
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Error deleting generation {generation_id}: {e}")
|
||||
return False
|
||||
|
||||
async def cleanup_stale_generations(self):
|
||||
try:
|
||||
count = await self.dao.generations.cancel_stale_generations(timeout_minutes=5)
|
||||
if count > 0:
|
||||
logger.info(f"Cleaned up {count} stale generations")
|
||||
except Exception as e:
|
||||
logger.error(f"Error cleaning up stale generations: {e}")
|
||||
|
||||
async def cleanup_old_data(self, days: int = 30):
|
||||
try:
|
||||
gen_count, asset_ids = await self.dao.generations.soft_delete_old_generations(days=days)
|
||||
if gen_count > 0:
|
||||
logger.info(f"Soft-deleted {gen_count} generations. Purging {len(asset_ids)} assets.")
|
||||
if asset_ids:
|
||||
await self.dao.assets.soft_delete_and_purge_assets(asset_ids)
|
||||
except Exception as e:
|
||||
logger.error(f"Error during old data cleanup: {e}")
|
||||
|
||||
async def get_financial_report(self, user_id: Optional[str] = None, project_id: Optional[str] = None, breakdown_by: Optional[str] = None) -> FinancialReport:
|
||||
summary_data = await self.dao.generations.get_usage_stats(created_by=user_id, project_id=project_id)
|
||||
summary = UsageStats(**summary_data)
|
||||
|
||||
by_user, by_project = None, None
|
||||
if breakdown_by == "created_by":
|
||||
res = await self.dao.generations.get_usage_breakdown(group_by="created_by", project_id=project_id, created_by=user_id)
|
||||
by_user = [UsageByEntity(**item) for item in res]
|
||||
if breakdown_by == "project_id":
|
||||
res = await self.dao.generations.get_usage_breakdown(group_by="project_id", project_id=project_id, created_by=user_id)
|
||||
by_project = [UsageByEntity(**item) for item in res]
|
||||
|
||||
return FinancialReport(summary=summary, by_user=by_user, by_project=by_project)
|
||||
|
||||
# --- Private Helpers ---
|
||||
|
||||
async def _create_single_generation(self, generation_request: GenerationRequest, user_id: Optional[str], generation_group_id: str) -> GenerationResponse:
|
||||
try:
|
||||
gen_model = Generation(**generation_request.model_dump(exclude={'count'}))
|
||||
gen_model.created_by = user_id
|
||||
gen_model.generation_group_id = generation_group_id
|
||||
|
||||
gen_id = await self.dao.generations.create_generation(gen_model)
|
||||
gen_model.id = gen_id
|
||||
|
||||
asyncio.create_task(self._queued_generation_runner(gen_model))
|
||||
return GenerationResponse(**gen_model.model_dump())
|
||||
except Exception:
|
||||
logger.exception("Failed to initiate single generation")
|
||||
raise
|
||||
|
||||
async def _queued_generation_runner(self, gen: Generation):
|
||||
logger.info(f"Generation {gen.id} waiting for slot...")
|
||||
try:
|
||||
async with generation_semaphore:
|
||||
await self.create_generation(gen)
|
||||
except Exception as e:
|
||||
await self._handle_generation_failure(gen, e)
|
||||
logger.exception(f"Background generation task failed for ID: {gen.id}")
|
||||
|
||||
async def _prepare_generation_input(self, generation: Generation) -> Tuple[List[bytes], str]:
|
||||
media_group_bytes: List[bytes] = []
|
||||
prompt = generation.prompt
|
||||
|
||||
# 1. Character Avatar
|
||||
if generation.linked_character_id:
|
||||
char_info = await self.dao.chars.get_character(generation.linked_character_id)
|
||||
if not char_info:
|
||||
raise ValueError(f"Character {generation.linked_character_id} not found")
|
||||
|
||||
if generation.use_profile_image and char_info.avatar_asset_id:
|
||||
avatar_asset = await self.dao.assets.get_asset(char_info.avatar_asset_id)
|
||||
if avatar_asset:
|
||||
data = await self._get_asset_data_bytes(avatar_asset)
|
||||
if data: media_group_bytes.append(data)
|
||||
|
||||
# 2. Reference Assets
|
||||
if generation.assets_list:
|
||||
assets = await self.dao.assets.get_assets_by_ids(generation.assets_list)
|
||||
for asset in assets:
|
||||
data = await self._get_asset_data_bytes(asset)
|
||||
if data: media_group_bytes.append(data)
|
||||
|
||||
# 3. Environment Assets
|
||||
if generation.environment_id:
|
||||
env = await self.dao.environments.get_env(generation.environment_id)
|
||||
if env and env.asset_ids:
|
||||
env_assets = await self.dao.assets.get_assets_by_ids(env.asset_ids)
|
||||
for asset in env_assets:
|
||||
data = await self._get_asset_data_bytes(asset)
|
||||
if data: media_group_bytes.append(data)
|
||||
|
||||
if media_group_bytes:
|
||||
prompt += (
|
||||
" \n\n[Reference Image Guidance]: Use the provided image(s) as the STRICT reference for the main "
|
||||
"character's facial features and hair, environment or clothes. Maintain high fidelity to the reference identity."
|
||||
)
|
||||
|
||||
return media_group_bytes, prompt
|
||||
|
||||
async def _get_asset_data_bytes(self, asset: Asset) -> Optional[bytes]:
|
||||
if asset.content_type != AssetContentType.IMAGE:
|
||||
return None
|
||||
if asset.minio_object_name:
|
||||
return await self.s3_adapter.get_file(asset.minio_object_name)
|
||||
return asset.data
|
||||
|
||||
def _update_generation_metrics(self, generation: Generation, metrics: Dict[str, Any]):
|
||||
generation.api_execution_time_seconds = metrics.get("api_execution_time_seconds")
|
||||
generation.token_usage = metrics.get("token_usage")
|
||||
generation.input_token_usage = metrics.get("input_token_usage")
|
||||
generation.output_token_usage = metrics.get("output_token_usage")
|
||||
|
||||
async def _handle_generation_failure(self, generation: Generation, error: Optional[Exception]):
|
||||
logger.error(f"Generation {generation.id} failed: {error}")
|
||||
generation.status = GenerationStatus.FAILED
|
||||
# Don't overwrite if reason is already set, unless a new error is provided
|
||||
if error:
|
||||
generation.failed_reason = str(error)
|
||||
elif not generation.failed_reason:
|
||||
generation.failed_reason = "Unknown error"
|
||||
|
||||
generation.updated_at = datetime.now(UTC)
|
||||
await self.dao.generations.update_generation(generation)
|
||||
|
||||
async def _process_generated_images(self, generation: Generation, bytes_list: List[bytes]) -> List[Asset]:
|
||||
created_assets = []
|
||||
for img_bytes in bytes_list:
|
||||
asset = await self._save_asset(
|
||||
image_bytes=img_bytes,
|
||||
name=f"Generated_{generation.linked_character_id}",
|
||||
created_by=generation.created_by,
|
||||
project_id=generation.project_id,
|
||||
linked_char_id=generation.linked_character_id,
|
||||
folder="generated"
|
||||
)
|
||||
created_assets.append(asset)
|
||||
return created_assets
|
||||
|
||||
async def _save_asset(self, image_bytes: bytes, name: str, created_by: str, project_id: str, linked_char_id: str, folder: str) -> Asset:
|
||||
thumbnail_bytes = await asyncio.to_thread(create_thumbnail, image_bytes)
|
||||
filename = f"{folder}/{linked_char_id}/{datetime.now().strftime('%Y%m%d_%H%M%S')}_{random.randint(1000, 9999)}.png"
|
||||
|
||||
await self.s3_adapter.upload_file(filename, image_bytes, content_type="image/png")
|
||||
|
||||
new_asset = Asset(
|
||||
name=name,
|
||||
type=AssetType.GENERATED,
|
||||
content_type=AssetContentType.IMAGE,
|
||||
linked_char_id=linked_char_id,
|
||||
data=None,
|
||||
minio_object_name=filename,
|
||||
minio_bucket=self.s3_adapter.bucket_name,
|
||||
thumbnail=thumbnail_bytes,
|
||||
created_by=created_by,
|
||||
project_id=project_id
|
||||
)
|
||||
asset_id = await self.dao.assets.create_asset(new_asset)
|
||||
new_asset.id = str(asset_id)
|
||||
return new_asset
|
||||
|
||||
async def _finalize_generation(self, generation: Generation, assets: List[Asset], tech_prompt: str, start_time: datetime):
|
||||
generation.result_list = [a.id for a in assets]
|
||||
generation.status = GenerationStatus.DONE
|
||||
generation.progress = 100
|
||||
generation.updated_at = datetime.now(UTC)
|
||||
generation.tech_prompt = tech_prompt
|
||||
generation.execution_time_seconds = (datetime.now() - start_time).total_seconds()
|
||||
await self.dao.generations.update_generation(generation)
|
||||
logger.info(f"Generation {generation.id} finalized. Time: {generation.execution_time_seconds:.2f}s")
|
||||
|
||||
async def _notify_telegram(self, generation: Generation, assets: List[Asset]):
|
||||
try:
|
||||
for asset in assets:
|
||||
# Need to get data for telegram if it's not in Asset object
|
||||
img_data = await self.s3_adapter.get_file(asset.minio_object_name) if asset.minio_object_name else asset.data
|
||||
if img_data:
|
||||
await self.bot.send_photo(
|
||||
chat_id=generation.telegram_id,
|
||||
photo=BufferedInputFile(img_data, filename=f"{asset.name}.png"),
|
||||
caption=f"Generated from: {generation.prompt[:100]}..."
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to send to Telegram: {e}")
|
||||
|
||||
async def _simulate_progress(self, generation: Generation):
|
||||
current_progress = 0
|
||||
try:
|
||||
while current_progress < 90:
|
||||
await asyncio.sleep(4)
|
||||
current_progress = min(current_progress + random.randint(5, 15), 90)
|
||||
generation.progress = current_progress
|
||||
await self.dao.generations.update_generation(generation)
|
||||
except asyncio.CancelledError:
|
||||
pass
|
||||
|
||||
async def _fetch_external_image(self, external_gen) -> bytes:
|
||||
if external_gen.image_url:
|
||||
async with httpx.AsyncClient() as client:
|
||||
response = await client.get(external_gen.image_url, timeout=30.0)
|
||||
response.raise_for_status()
|
||||
return response.content
|
||||
elif external_gen.image_data:
|
||||
return base64.b64decode(external_gen.image_data)
|
||||
raise ValueError("No image source provided")
|
||||
82
api/service/idea_service.py
Normal file
82
api/service/idea_service.py
Normal file
@@ -0,0 +1,82 @@
|
||||
from typing import List, Optional
|
||||
from datetime import datetime
|
||||
from repos.dao import DAO
|
||||
from models.Idea import Idea
|
||||
|
||||
class IdeaService:
|
||||
def __init__(self, dao: DAO):
|
||||
self.dao = dao
|
||||
|
||||
async def create_idea(self, name: str, description: Optional[str], project_id: Optional[str], user_id: str, inspiration_id: Optional[str] = None) -> Idea:
|
||||
idea = Idea(
|
||||
name=name,
|
||||
description=description,
|
||||
project_id=project_id,
|
||||
created_by=user_id,
|
||||
inspiration_id=inspiration_id
|
||||
)
|
||||
idea_id = await self.dao.ideas.create_idea(idea)
|
||||
idea.id = idea_id
|
||||
return idea
|
||||
|
||||
async def get_ideas(self, project_id: Optional[str], user_id: str, limit: int = 20, offset: int = 0) -> List[dict]:
|
||||
return await self.dao.ideas.get_ideas(project_id, user_id, limit, offset)
|
||||
|
||||
async def get_idea(self, idea_id: str) -> Optional[Idea]:
|
||||
return await self.dao.ideas.get_idea(idea_id)
|
||||
|
||||
async def update_idea(self, idea_id: str, name: Optional[str] = None, description: Optional[str] = None, inspiration_id: Optional[str] = None) -> Optional[Idea]:
|
||||
idea = await self.dao.ideas.get_idea(idea_id)
|
||||
if not idea:
|
||||
return None
|
||||
|
||||
if name is not None:
|
||||
idea.name = name
|
||||
if description is not None:
|
||||
idea.description = description
|
||||
if inspiration_id is not None:
|
||||
idea.inspiration_id = inspiration_id
|
||||
|
||||
idea.updated_at = datetime.now()
|
||||
await self.dao.ideas.update_idea(idea)
|
||||
return idea
|
||||
|
||||
async def delete_idea(self, idea_id: str) -> bool:
|
||||
return await self.dao.ideas.delete_idea(idea_id)
|
||||
|
||||
async def add_generation_to_idea(self, idea_id: str, generation_id: str) -> bool:
|
||||
# Verify idea exists
|
||||
idea = await self.dao.ideas.get_idea(idea_id)
|
||||
if not idea:
|
||||
return False
|
||||
|
||||
# Get generation
|
||||
gen = await self.dao.generations.get_generation(generation_id)
|
||||
if not gen:
|
||||
return False
|
||||
|
||||
# Link
|
||||
gen.idea_id = idea_id
|
||||
gen.updated_at = datetime.now()
|
||||
await self.dao.generations.update_generation(gen)
|
||||
return True
|
||||
|
||||
async def remove_generation_from_idea(self, idea_id: str, generation_id: str) -> bool:
|
||||
# Verify idea exists (optional, but good for validation)
|
||||
idea = await self.dao.ideas.get_idea(idea_id)
|
||||
if not idea:
|
||||
return False
|
||||
|
||||
# Get generation
|
||||
gen = await self.dao.generations.get_generation(generation_id)
|
||||
if not gen:
|
||||
return False
|
||||
|
||||
# Unlink only if currently linked to this idea
|
||||
if gen.idea_id == idea_id:
|
||||
gen.idea_id = None
|
||||
gen.updated_at = datetime.now()
|
||||
await self.dao.generations.update_generation(gen)
|
||||
return True
|
||||
|
||||
return False
|
||||
146
api/service/inspiration_service.py
Normal file
146
api/service/inspiration_service.py
Normal file
@@ -0,0 +1,146 @@
|
||||
import asyncio
|
||||
import logging
|
||||
import os
|
||||
import tempfile
|
||||
from datetime import datetime
|
||||
from typing import List, Optional, Tuple
|
||||
|
||||
import httpx
|
||||
from fastapi import HTTPException
|
||||
|
||||
from models.Asset import Asset, AssetType, AssetContentType
|
||||
from models.Inspiration import Inspiration
|
||||
from repos.dao import DAO
|
||||
from adapters.s3_adapter import S3Adapter
|
||||
|
||||
# Try to import yt_dlp, but don't crash if it's missing (though we added it to requirements)
|
||||
try:
|
||||
import yt_dlp
|
||||
except ImportError:
|
||||
yt_dlp = None
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class InspirationService:
|
||||
def __init__(self, dao: DAO, s3_adapter: S3Adapter):
|
||||
self.dao = dao
|
||||
self.s3_adapter = s3_adapter
|
||||
|
||||
async def create_inspiration(self, source_url: str, created_by: str, project_id: Optional[str] = None, caption: Optional[str] = None) -> Inspiration:
|
||||
# 1. Download content from Instagram
|
||||
try:
|
||||
content_bytes, content_type, ext = await self._download_content(source_url)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to download content from {source_url}: {e}")
|
||||
raise HTTPException(status_code=400, detail=f"Failed to download content: {str(e)}")
|
||||
|
||||
# 2. Save as Asset
|
||||
filename = f"inspirations/{datetime.now().strftime('%Y%m%d_%H%M%S')}_insta.{ext}"
|
||||
|
||||
await self.s3_adapter.upload_file(filename, content_bytes, content_type=content_type)
|
||||
|
||||
asset = Asset(
|
||||
name=f"Inspiration from {source_url}",
|
||||
type=AssetType.INSPIRATION,
|
||||
content_type=AssetContentType.VIDEO if content_type.startswith("video") else AssetContentType.IMAGE,
|
||||
minio_object_name=filename,
|
||||
minio_bucket=self.s3_adapter.bucket_name,
|
||||
created_by=created_by,
|
||||
project_id=project_id
|
||||
)
|
||||
asset_id = await self.dao.assets.create_asset(asset)
|
||||
|
||||
# 3. Create Inspiration object
|
||||
inspiration = Inspiration(
|
||||
source_url=source_url,
|
||||
caption=caption,
|
||||
asset_id=str(asset_id),
|
||||
created_by=created_by,
|
||||
project_id=project_id
|
||||
)
|
||||
insp_id = await self.dao.inspirations.create_inspiration(inspiration)
|
||||
inspiration.id = insp_id
|
||||
|
||||
return inspiration
|
||||
|
||||
async def get_inspirations(self, project_id: Optional[str], created_by: str, limit: int = 20, offset: int = 0) -> List[Inspiration]:
|
||||
return await self.dao.inspirations.get_inspirations(project_id, created_by, limit, offset)
|
||||
|
||||
async def get_inspiration(self, inspiration_id: str) -> Optional[Inspiration]:
|
||||
return await self.dao.inspirations.get_inspiration(inspiration_id)
|
||||
|
||||
async def mark_as_completed(self, inspiration_id: str, is_completed: bool = True) -> Optional[Inspiration]:
|
||||
inspiration = await self.dao.inspirations.get_inspiration(inspiration_id)
|
||||
if not inspiration:
|
||||
return None
|
||||
|
||||
inspiration.is_completed = is_completed
|
||||
inspiration.updated_at = datetime.now()
|
||||
await self.dao.inspirations.update_inspiration(inspiration)
|
||||
return inspiration
|
||||
|
||||
async def delete_inspiration(self, inspiration_id: str) -> bool:
|
||||
inspiration = await self.dao.inspirations.get_inspiration(inspiration_id)
|
||||
if not inspiration:
|
||||
return False
|
||||
|
||||
# Delete associated asset
|
||||
if inspiration.asset_id:
|
||||
await self.dao.assets.delete_asset(inspiration.asset_id)
|
||||
|
||||
return await self.dao.inspirations.delete_inspiration(inspiration_id)
|
||||
|
||||
async def _download_content(self, url: str) -> Tuple[bytes, str, str]:
|
||||
"""
|
||||
Downloads content using yt-dlp.
|
||||
Returns (content_bytes, content_type, extension)
|
||||
"""
|
||||
if not yt_dlp:
|
||||
raise RuntimeError("yt-dlp is not installed")
|
||||
|
||||
logger.info(f"Downloading from {url} using yt-dlp...")
|
||||
|
||||
def run_yt_dlp():
|
||||
with tempfile.TemporaryDirectory() as tmpdirname:
|
||||
ydl_opts = {
|
||||
'outtmpl': f'{tmpdirname}/%(id)s.%(ext)s',
|
||||
'quiet': True,
|
||||
'no_warnings': True,
|
||||
'format': 'best', # Best quality single file
|
||||
'noplaylist': True, # Only single video if it's a playlist/profile
|
||||
'writethumbnail': False,
|
||||
'writesubtitles': False,
|
||||
}
|
||||
|
||||
with yt_dlp.YoutubeDL(ydl_opts) as ydl:
|
||||
ydl.download([url])
|
||||
|
||||
# Find the downloaded file
|
||||
files = os.listdir(tmpdirname)
|
||||
if not files:
|
||||
raise Exception("No files downloaded")
|
||||
|
||||
# Pick the largest file if multiple (e.g. if yt-dlp downloaded parts)
|
||||
# But with 'format': 'best', it should be one.
|
||||
# If carousel, it might be multiple. Let's pick the first one.
|
||||
filename = files[0]
|
||||
filepath = os.path.join(tmpdirname, filename)
|
||||
|
||||
with open(filepath, 'rb') as f:
|
||||
data = f.read()
|
||||
|
||||
ext = filename.split('.')[-1].lower()
|
||||
|
||||
# Determine content type
|
||||
if ext in ['mp4', 'mov', 'avi', 'mkv', 'webm']:
|
||||
content_type = f"video/{ext}"
|
||||
if ext == 'mov': content_type = "video/quicktime"
|
||||
elif ext in ['jpg', 'jpeg', 'png', 'webp']:
|
||||
content_type = f"image/{ext}"
|
||||
if ext == 'jpg': content_type = "image/jpeg"
|
||||
else:
|
||||
content_type = "application/octet-stream"
|
||||
|
||||
return data, content_type, ext
|
||||
|
||||
return await asyncio.to_thread(run_yt_dlp)
|
||||
79
api/service/post_service.py
Normal file
79
api/service/post_service.py
Normal file
@@ -0,0 +1,79 @@
|
||||
from typing import List, Optional
|
||||
from datetime import datetime, UTC
|
||||
|
||||
from repos.dao import DAO
|
||||
from models.Post import Post
|
||||
|
||||
|
||||
class PostService:
|
||||
def __init__(self, dao: DAO):
|
||||
self.dao = dao
|
||||
|
||||
async def create_post(
|
||||
self,
|
||||
date: datetime,
|
||||
topic: str,
|
||||
generation_ids: List[str],
|
||||
project_id: Optional[str],
|
||||
user_id: str,
|
||||
) -> Post:
|
||||
post = Post(
|
||||
date=date,
|
||||
topic=topic,
|
||||
generation_ids=generation_ids,
|
||||
project_id=project_id,
|
||||
created_by=user_id,
|
||||
)
|
||||
post_id = await self.dao.posts.create_post(post)
|
||||
post.id = post_id
|
||||
return post
|
||||
|
||||
async def get_post(self, post_id: str) -> Optional[Post]:
|
||||
return await self.dao.posts.get_post(post_id)
|
||||
|
||||
async def get_posts(
|
||||
self,
|
||||
project_id: Optional[str],
|
||||
user_id: str,
|
||||
limit: int = 20,
|
||||
offset: int = 0,
|
||||
date_from: Optional[datetime] = None,
|
||||
date_to: Optional[datetime] = None,
|
||||
) -> List[Post]:
|
||||
return await self.dao.posts.get_posts(project_id, user_id, limit, offset, date_from, date_to)
|
||||
|
||||
async def update_post(
|
||||
self,
|
||||
post_id: str,
|
||||
date: Optional[datetime] = None,
|
||||
topic: Optional[str] = None,
|
||||
) -> Optional[Post]:
|
||||
post = await self.dao.posts.get_post(post_id)
|
||||
if not post:
|
||||
return None
|
||||
|
||||
updates: dict = {"updated_at": datetime.now(UTC)}
|
||||
if date is not None:
|
||||
updates["date"] = date
|
||||
if topic is not None:
|
||||
updates["topic"] = topic
|
||||
|
||||
await self.dao.posts.update_post(post_id, updates)
|
||||
|
||||
# Return refreshed post
|
||||
return await self.dao.posts.get_post(post_id)
|
||||
|
||||
async def delete_post(self, post_id: str) -> bool:
|
||||
return await self.dao.posts.delete_post(post_id)
|
||||
|
||||
async def add_generations(self, post_id: str, generation_ids: List[str]) -> bool:
|
||||
post = await self.dao.posts.get_post(post_id)
|
||||
if not post:
|
||||
return False
|
||||
return await self.dao.posts.add_generations(post_id, generation_ids)
|
||||
|
||||
async def remove_generation(self, post_id: str, generation_id: str) -> bool:
|
||||
post = await self.dao.posts.get_post(post_id)
|
||||
if not post:
|
||||
return False
|
||||
return await self.dao.posts.remove_generation(post_id, generation_id)
|
||||
39
config.py
Normal file
39
config.py
Normal file
@@ -0,0 +1,39 @@
|
||||
import os
|
||||
from typing import Optional
|
||||
from pydantic_settings import BaseSettings, SettingsConfigDict
|
||||
|
||||
|
||||
class Settings(BaseSettings):
|
||||
# Telegram Bot
|
||||
BOT_TOKEN: str
|
||||
ADMIN_ID: int = 0
|
||||
|
||||
# AI Service
|
||||
GEMINI_API_KEY: str
|
||||
|
||||
# Database
|
||||
MONGO_HOST: str = "mongodb://localhost:27017"
|
||||
DB_NAME: str = "my_bot_db"
|
||||
|
||||
# S3 Storage (Minio)
|
||||
MINIO_ENDPOINT: str = "http://localhost:9000"
|
||||
MINIO_ACCESS_KEY: str = "minioadmin"
|
||||
MINIO_SECRET_KEY: str = "minioadmin"
|
||||
MINIO_BUCKET: str = "ai-char"
|
||||
|
||||
# External API
|
||||
EXTERNAL_API_SECRET: Optional[str] = None
|
||||
|
||||
# JWT Security
|
||||
SECRET_KEY: str = "CHANGE_ME_TO_A_SUPER_SECRET_KEY"
|
||||
ALGORITHM: str = "HS256"
|
||||
ACCESS_TOKEN_EXPIRE_MINUTES: int = 30 * 24 * 60 # 30 days
|
||||
|
||||
model_config = SettingsConfigDict(
|
||||
env_file=os.getenv("ENV_FILE", ".env"),
|
||||
env_file_encoding="utf-8",
|
||||
extra="ignore"
|
||||
)
|
||||
|
||||
|
||||
settings = Settings()
|
||||
6
deploy.sh
Executable file
6
deploy.sh
Executable file
@@ -0,0 +1,6 @@
|
||||
|
||||
ssh root@31.59.58.220 "
|
||||
cd /root/bots/ai-char-bot &&
|
||||
git pull &&
|
||||
docker compose up -d --build
|
||||
"
|
||||
@@ -4,6 +4,26 @@ services:
|
||||
container_name: ai-bot
|
||||
build:
|
||||
context: .
|
||||
network: host
|
||||
network_mode: host
|
||||
# УБРАЛИ network_mode: host
|
||||
ports:
|
||||
- "8090:8090" # Вернули проброс порта
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
- minio
|
||||
environment:
|
||||
# Важно: внутри докера к другим контейнерам обращаемся по имени сервиса!
|
||||
MINIO_ENDPOINT: "http://minio:9000"
|
||||
|
||||
minio:
|
||||
image: minio/minio:latest
|
||||
container_name: minio
|
||||
restart: unless-stopped
|
||||
command: server /data --console-address ":9001"
|
||||
environment:
|
||||
MINIO_ROOT_USER: admin
|
||||
MINIO_ROOT_PASSWORD: SuperSecretPassword123!
|
||||
ports:
|
||||
- "9000:9000"
|
||||
- "9001:9001"
|
||||
volumes:
|
||||
- ./minio_data:/data
|
||||
104
main.py
104
main.py
@@ -1,104 +0,0 @@
|
||||
import asyncio
|
||||
import logging
|
||||
import os
|
||||
|
||||
from aiogram import Bot, Dispatcher, Router, F
|
||||
from aiogram.client.default import DefaultBotProperties
|
||||
from aiogram.enums import ParseMode
|
||||
from aiogram.filters import CommandStart, Command, CommandObject
|
||||
from aiogram.types import Message, BufferedInputFile
|
||||
from aiogram.fsm.storage.mongo import MongoStorage
|
||||
from dotenv import load_dotenv
|
||||
from motor.motor_asyncio import AsyncIOMotorClient
|
||||
|
||||
# Импорты
|
||||
from adapters.google_adapter import GoogleAdapter
|
||||
from middlewares.album import AlbumMiddleware
|
||||
from middlewares.auth import AuthMiddleware
|
||||
from middlewares.dao import DaoMiddleware
|
||||
from repos.char_repo import CharacterRepo
|
||||
from repos.dao import DAO
|
||||
from repos.user_repo import UsersRepo
|
||||
from routers import char_router
|
||||
# ВАЖНО: Импортируем роутер с логикой кнопок, а не создаем пустой
|
||||
from routers.auth_router import router as auth_router
|
||||
from routers.gen_router import router as gen_router
|
||||
from routers.char_router import router as char_router
|
||||
|
||||
|
||||
load_dotenv()
|
||||
|
||||
# Настройки
|
||||
BOT_TOKEN = os.getenv("BOT_TOKEN")
|
||||
GEMINI_API_KEY = os.getenv("GEMINI_API_KEY")
|
||||
MONGO_HOST = os.getenv("MONGO_HOST")
|
||||
ADMIN_ID = int(os.getenv("ADMIN_ID")) # Сразу преобразуем в int
|
||||
|
||||
# Инициализация
|
||||
bot = Bot(token=BOT_TOKEN, default=DefaultBotProperties(parse_mode=ParseMode.HTML))
|
||||
|
||||
# БД
|
||||
mongo_client = AsyncIOMotorClient(MONGO_HOST)
|
||||
users_repo = UsersRepo(mongo_client)
|
||||
char_repo = CharacterRepo(mongo_client)
|
||||
|
||||
# Dispatcher
|
||||
# Если MongoStorage пока не настроен на authSource=admin, можно временно убрать storage=...
|
||||
dp = Dispatcher(storage=MongoStorage(mongo_client))
|
||||
|
||||
# ВНЕДРЕНИЕ ЗАВИСИМОСТЕЙ (чтобы они были доступны в хендлерах)
|
||||
dp["repo"] = users_repo
|
||||
dp["admin_id"] = ADMIN_ID
|
||||
dp["gemini"] = GoogleAdapter(api_key=GEMINI_API_KEY) # Инициализируем тут
|
||||
|
||||
# РОУТИНГ
|
||||
|
||||
# 1. Роутер авторизации (кнопки) - ПОДКЛЮЧАЕМ ПЕРВЫМ И БЕЗ МИДЛВАРИ
|
||||
dp.include_router(auth_router)
|
||||
main_router = Router()
|
||||
dp.include_router(main_router)
|
||||
dp.include_router(char_router)
|
||||
dp.include_router(gen_router)
|
||||
|
||||
# 2. Основной роутер (чат с ботом)
|
||||
|
||||
# Вешаем защиту ТОЛЬКО на основной роутер
|
||||
main_router.message.middleware(AuthMiddleware(repo=users_repo, admin_id=ADMIN_ID))
|
||||
gen_router.message.middleware(AuthMiddleware(repo=users_repo, admin_id=ADMIN_ID))
|
||||
gen_router.message.middleware(AlbumMiddleware(latency=0.8))
|
||||
dp.update.middleware(DaoMiddleware(dao=DAO(client=mongo_client)))
|
||||
|
||||
|
||||
|
||||
def setup_logging() -> None:
|
||||
logging.basicConfig(level=logging.INFO,
|
||||
format="%(asctime)s [%(levelname)s] %(name)s: %(message)s")
|
||||
|
||||
|
||||
# --- ХЕНДЛЕРЫ ОСНОВНОГО РОУТЕРА ---
|
||||
# Переносим их прямо сюда или в отдельный файл routers/chat_router.py
|
||||
@main_router.message(Command("help"))
|
||||
async def show_help(message: Message) -> None:
|
||||
await message.answer("Для того, чтобы обратиться для текстовой генерации - просто отправь промпт.\n\n"
|
||||
"Для генерации фото - /image {prompt}\n\n"
|
||||
"Можно отправить фото и команду /image {prompt}\n\n"
|
||||
"Диалоги не поддерживаются!!!! <b>Каждое новое сообщение - новый диалог</b>")
|
||||
|
||||
|
||||
@main_router.message(CommandStart())
|
||||
async def cmd_start(message: Message):
|
||||
await message.answer("👋 Привет! Я готов к работе.\n\n"
|
||||
"Для того, чтобы обратиться для текстовой генерации - просто отправь промпт.\n\n"
|
||||
"Для генерации фото - /image {prompt}\n\n"
|
||||
"Можно отправить фото и команду /image {prompt}\n\n"
|
||||
"Диалоги не поддерживаются!!!! <b>Каждое новое сообщение - новый диалог</b>"
|
||||
)
|
||||
|
||||
|
||||
# --- ЗАПУСК ---
|
||||
if __name__ == "__main__":
|
||||
setup_logging()
|
||||
try:
|
||||
asyncio.run(dp.start_polling(bot))
|
||||
except KeyboardInterrupt:
|
||||
print("Bot stopped")
|
||||
BIN
middlewares/__pycache__/__init__.cpython-313.pyc
Normal file
BIN
middlewares/__pycache__/__init__.cpython-313.pyc
Normal file
Binary file not shown.
BIN
middlewares/__pycache__/album.cpython-313.pyc
Normal file
BIN
middlewares/__pycache__/album.cpython-313.pyc
Normal file
Binary file not shown.
BIN
middlewares/__pycache__/auth.cpython-313.pyc
Normal file
BIN
middlewares/__pycache__/auth.cpython-313.pyc
Normal file
Binary file not shown.
BIN
middlewares/__pycache__/dao.cpython-313.pyc
Normal file
BIN
middlewares/__pycache__/dao.cpython-313.pyc
Normal file
Binary file not shown.
@@ -27,19 +27,19 @@ class AlbumMiddleware(BaseMiddleware):
|
||||
# Ждем сбора остальных частей
|
||||
await asyncio.sleep(self.latency)
|
||||
|
||||
# Проверяем, что ключ все еще существует (на всякий случай)
|
||||
# Проверяем, что ключ все еще существует
|
||||
if group_id in self.album_data:
|
||||
# Передаем собранный альбом в хендлер
|
||||
# Сортируем по message_id, чтобы порядок был верным
|
||||
self.album_data[group_id].sort(key=lambda x: x.message_id)
|
||||
data["album"] = self.album_data[group_id]
|
||||
current_album = self.album_data[group_id]
|
||||
current_album.sort(key=lambda x: x.message_id)
|
||||
data["album"] = current_album
|
||||
return await handler(event, data)
|
||||
|
||||
finally:
|
||||
# ЧИСТКА: Удаляем всегда, если это "головной" поток, который создал запись
|
||||
# Проверяем, что мы удаляем именно то, что создали, и ключ существует
|
||||
if group_id in self.album_data and self.album_data[group_id][0] == event:
|
||||
del self.album_data[group_id]
|
||||
# ЧИСТКА: Удаляем запись после обработки или таймаута
|
||||
# Используем pop() с дефолтом, чтобы избежать KeyError
|
||||
self.album_data.pop(group_id, None)
|
||||
|
||||
else:
|
||||
# Если группа уже собирается - просто добавляем и выходим
|
||||
|
||||
11
models/Album.py
Normal file
11
models/Album.py
Normal file
@@ -0,0 +1,11 @@
|
||||
from datetime import datetime, UTC
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
class Album(BaseModel):
|
||||
id: str | None = None
|
||||
name: str
|
||||
description: str | None = None
|
||||
cover_asset_id: str | None = None
|
||||
generation_ids: list[str] = []
|
||||
created_at: datetime = Field(default_factory=lambda: datetime.now(UTC))
|
||||
updated_at: datetime = Field(default_factory=lambda: datetime.now(UTC))
|
||||
75
models/Asset.py
Normal file
75
models/Asset.py
Normal file
@@ -0,0 +1,75 @@
|
||||
from datetime import datetime, UTC
|
||||
from enum import Enum
|
||||
from typing import Any
|
||||
|
||||
from pydantic import BaseModel, computed_field, Field, model_validator
|
||||
|
||||
|
||||
class AssetContentType(str, Enum):
|
||||
IMAGE = 'image'
|
||||
PROMPT = 'prompt'
|
||||
VIDEO = 'video'
|
||||
|
||||
class AssetType(str, Enum):
|
||||
UPLOADED = 'uploaded'
|
||||
GENERATED = 'generated'
|
||||
INSPIRATION = 'inspiration'
|
||||
|
||||
|
||||
class Asset(BaseModel):
|
||||
id: str | None = None
|
||||
name: str
|
||||
type: AssetType = AssetType.GENERATED
|
||||
content_type: AssetContentType = AssetContentType.IMAGE
|
||||
linked_char_id: str | None = None
|
||||
data: bytes | None = None
|
||||
tg_doc_file_id: str | None = None
|
||||
tg_photo_file_id: str | None = None
|
||||
minio_object_name: str | None = None
|
||||
minio_bucket: str | None = None
|
||||
minio_thumbnail_object_name: str | None = None
|
||||
thumbnail: bytes | None = None
|
||||
tags: list[str] = []
|
||||
created_by: str | None = None
|
||||
project_id: str | None = None
|
||||
is_deleted: bool = False
|
||||
created_at: datetime = Field(default_factory=lambda: datetime.now(UTC))
|
||||
updated_at: datetime = Field(default_factory=lambda: datetime.now(UTC))
|
||||
|
||||
@model_validator(mode='before')
|
||||
@classmethod
|
||||
def check_legacy_type(cls, data: Any) -> Any:
|
||||
if isinstance(data, dict):
|
||||
# Если поле type содержит старые значения ("image", "prompt"),
|
||||
# переносим их в content_type, а type ставим по умолчанию (GENERATED)
|
||||
# или пытаемся угадать.
|
||||
# Но по задаче мы дефолтим в GENERATED, и script'ом поправим.
|
||||
|
||||
raw_type = data.get('type')
|
||||
if raw_type in ['image', 'prompt']:
|
||||
data['content_type'] = raw_type
|
||||
# Если в базе нет нового поля type, оно встанет в default=GENERATED
|
||||
# Чтобы не вызывало ошибку валидации AssetType, меняем его или удаляем,
|
||||
# полагаясь на default.
|
||||
# Но если мы просто удалим, поле type примет дефолтное значение.
|
||||
# Однако, если мы хотим явно отличить, можно ничего не делать,
|
||||
# но тогда валидация поля `type` упадет, т.к. "image" != "generated".
|
||||
# Поэтому удаляем старое значение из type, чтобы сработал дефолт.
|
||||
if 'type' in data:
|
||||
del data['type']
|
||||
|
||||
# Если content_type нет в данных (легаси), пытаемся его восстановить из удалённого type
|
||||
# (выше мы его переложили).
|
||||
|
||||
return data
|
||||
|
||||
# --- CALCULATED FIELD ---
|
||||
@computed_field
|
||||
@property
|
||||
def url(self) -> str:
|
||||
"""
|
||||
Это поле автоматически вычислится и попадет в model_dump() / .json()
|
||||
"""
|
||||
if self.id:
|
||||
return f"/assets/{self.id}"
|
||||
return ""
|
||||
@@ -1,10 +1,15 @@
|
||||
|
||||
from pydantic import BaseModel
|
||||
from pydantic_core.core_schema import computed_field
|
||||
|
||||
|
||||
class Character(BaseModel):
|
||||
id: str | None
|
||||
id: str | None = None
|
||||
name: str
|
||||
character_image_doc_tg_id: str
|
||||
character_image_tg_id: str | None
|
||||
character_bio: str
|
||||
|
||||
avatar_asset_id: str | None = None
|
||||
avatar_image: str | None = None
|
||||
character_image_doc_tg_id: str | None = None
|
||||
character_image_tg_id: str | None = None
|
||||
character_bio: str | None = None
|
||||
created_by: str | None = None
|
||||
project_id: str | None = None
|
||||
|
||||
19
models/Environment.py
Normal file
19
models/Environment.py
Normal file
@@ -0,0 +1,19 @@
|
||||
from pydantic import BaseModel, Field, ConfigDict
|
||||
from datetime import datetime
|
||||
from bson import ObjectId
|
||||
|
||||
|
||||
class Environment(BaseModel):
|
||||
id: str | None = Field(None, alias="_id")
|
||||
character_id: str
|
||||
name: str = Field(..., min_length=1)
|
||||
description: str | None = None
|
||||
asset_ids: list[str] = Field(default_factory=list)
|
||||
created_at: datetime = Field(default_factory=datetime.utcnow)
|
||||
updated_at: datetime = Field(default_factory=datetime.utcnow)
|
||||
|
||||
model_config = ConfigDict(
|
||||
populate_by_name=True,
|
||||
json_encoders={ObjectId: str},
|
||||
arbitrary_types_allowed=True
|
||||
)
|
||||
54
models/Generation.py
Normal file
54
models/Generation.py
Normal file
@@ -0,0 +1,54 @@
|
||||
from datetime import datetime, UTC
|
||||
from enum import Enum
|
||||
|
||||
from pydantic import BaseModel, Field, computed_field
|
||||
|
||||
from models.enums import AspectRatios, Quality
|
||||
|
||||
|
||||
class GenerationStatus(str, Enum):
|
||||
RUNNING = "running"
|
||||
DONE = "done"
|
||||
FAILED = "failed"
|
||||
|
||||
class Generation(BaseModel):
|
||||
id: str | None = None
|
||||
status: GenerationStatus = GenerationStatus.RUNNING
|
||||
failed_reason: str | None = None
|
||||
linked_character_id: str | None = None
|
||||
telegram_id: int | None = None
|
||||
use_profile_image: bool = True
|
||||
aspect_ratio: AspectRatios
|
||||
quality: Quality
|
||||
prompt: str
|
||||
model: str | None = None
|
||||
seed: int | None = None
|
||||
tech_prompt: str | None = None
|
||||
assets_list: list[str] = Field(default_factory=list)
|
||||
result_list: list[str] = Field(default_factory=list)
|
||||
result: str | None = None
|
||||
progress: int = 0
|
||||
execution_time_seconds: float | None = None
|
||||
api_execution_time_seconds: float | None = None
|
||||
token_usage: int | None = None
|
||||
input_token_usage: int | None = None
|
||||
output_token_usage: int | None = None
|
||||
is_deleted: bool = False
|
||||
album_id: str | None = None
|
||||
environment_id: str | None = None
|
||||
generation_group_id: str | None = None
|
||||
created_by: str | None = None # Stores User ID (Telegram ID or Web User ObjectId)
|
||||
project_id: str | None = None
|
||||
idea_id: str | None = None
|
||||
liked_by: list[str] = Field(default_factory=list)
|
||||
nsfw: bool = False
|
||||
created_at: datetime = Field(default_factory=lambda: datetime.now(UTC))
|
||||
updated_at: datetime = Field(default_factory=lambda: datetime.now(UTC))
|
||||
|
||||
@computed_field
|
||||
def cost(self) -> float:
|
||||
if self.status == GenerationStatus.DONE and self.input_token_usage and self.output_token_usage:
|
||||
cost_input = self.input_token_usage * 0.000002
|
||||
cost_output = self.output_token_usage * 0.00012
|
||||
return round(cost_input + cost_output, 3)
|
||||
return 0.0
|
||||
13
models/Idea.py
Normal file
13
models/Idea.py
Normal file
@@ -0,0 +1,13 @@
|
||||
from datetime import datetime
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
class Idea(BaseModel):
|
||||
id: str | None = None
|
||||
name: str = "New Idea"
|
||||
description: str | None = None
|
||||
project_id: str | None = None
|
||||
inspiration_id: str | None = None # Link to Inspiration
|
||||
created_by: str # User ID
|
||||
is_deleted: bool = False
|
||||
created_at: datetime = Field(default_factory=datetime.now)
|
||||
updated_at: datetime = Field(default_factory=datetime.now)
|
||||
15
models/Inspiration.py
Normal file
15
models/Inspiration.py
Normal file
@@ -0,0 +1,15 @@
|
||||
from datetime import datetime, UTC
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
class Inspiration(BaseModel):
|
||||
id: str | None = None
|
||||
source_url: str
|
||||
caption: str | None = None
|
||||
asset_id: str
|
||||
is_completed: bool = False
|
||||
created_by: str
|
||||
project_id: str | None = None
|
||||
created_at: datetime = Field(default_factory=lambda: datetime.now(UTC))
|
||||
updated_at: datetime = Field(default_factory=lambda: datetime.now(UTC))
|
||||
22
models/Post.py
Normal file
22
models/Post.py
Normal file
@@ -0,0 +1,22 @@
|
||||
from datetime import datetime, timezone, UTC
|
||||
from pydantic import BaseModel, Field, model_validator
|
||||
|
||||
|
||||
class Post(BaseModel):
|
||||
id: str | None = None
|
||||
date: datetime
|
||||
topic: str
|
||||
generation_ids: list[str] = Field(default_factory=list)
|
||||
project_id: str | None = None
|
||||
created_by: str
|
||||
is_deleted: bool = False
|
||||
created_at: datetime = Field(default_factory=lambda: datetime.now(UTC))
|
||||
updated_at: datetime = Field(default_factory=lambda: datetime.now(UTC))
|
||||
|
||||
@model_validator(mode="after")
|
||||
def ensure_tz_aware(self):
|
||||
for field in ("date", "created_at", "updated_at"):
|
||||
val = getattr(self, field)
|
||||
if val is not None and val.tzinfo is None:
|
||||
setattr(self, field, val.replace(tzinfo=timezone.utc))
|
||||
return self
|
||||
11
models/Project.py
Normal file
11
models/Project.py
Normal file
@@ -0,0 +1,11 @@
|
||||
from datetime import datetime
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
class Project(BaseModel):
|
||||
id: str | None = None
|
||||
name: str
|
||||
description: str | None = None
|
||||
owner_id: str
|
||||
members: list[str] = [] # List of User IDs
|
||||
is_deleted: bool = False
|
||||
created_at: datetime = Field(default_factory=datetime.now)
|
||||
BIN
models/__pycache__/Asset.cpython-313.pyc
Normal file
BIN
models/__pycache__/Asset.cpython-313.pyc
Normal file
Binary file not shown.
BIN
models/__pycache__/Character.cpython-313.pyc
Normal file
BIN
models/__pycache__/Character.cpython-313.pyc
Normal file
Binary file not shown.
BIN
models/__pycache__/Generation.cpython-313.pyc
Normal file
BIN
models/__pycache__/Generation.cpython-313.pyc
Normal file
Binary file not shown.
BIN
models/__pycache__/__init__.cpython-313.pyc
Normal file
BIN
models/__pycache__/__init__.cpython-313.pyc
Normal file
Binary file not shown.
BIN
models/__pycache__/enums.cpython-313.pyc
Normal file
BIN
models/__pycache__/enums.cpython-313.pyc
Normal file
Binary file not shown.
@@ -1,19 +1,71 @@
|
||||
from enum import Enum
|
||||
|
||||
|
||||
class AspectRatios(Enum):
|
||||
NINESIXTEEN = '9:16'
|
||||
SIXTEENNINE = '16:9'
|
||||
THREEFOUR = '3:4'
|
||||
FOURTHREE = '4:3'
|
||||
class AspectRatios(str, Enum):
|
||||
ONEONE = "1:1"
|
||||
TWOTHREE = "2:3"
|
||||
THREETWO = "3:2"
|
||||
THREEFOUR = "3:4"
|
||||
FOURTHREE = "4:3"
|
||||
FOURFIVE = "4:5"
|
||||
FIVEFOUR = "5:4"
|
||||
NINESIXTEEN = "9:16"
|
||||
SIXTEENNINE = "16:9"
|
||||
TWENTYONENINE = "21:9"
|
||||
|
||||
@classmethod
|
||||
def _missing_(cls, value):
|
||||
mapping = {
|
||||
"NINESIXTEEN": cls.NINESIXTEEN,
|
||||
"SIXTEENNINE": cls.SIXTEENNINE,
|
||||
"THREEFOUR": cls.THREEFOUR,
|
||||
"FOURTHREE": cls.FOURTHREE,
|
||||
}
|
||||
return mapping.get(value)
|
||||
|
||||
@property
|
||||
def value_ratio(self) -> str:
|
||||
return self.value
|
||||
|
||||
|
||||
class Quality(Enum):
|
||||
ONEK = '1K'
|
||||
TWOK = '2K'
|
||||
FOURK = '4K'
|
||||
class Quality(str, Enum):
|
||||
ONEK = 'ONEK'
|
||||
TWOK = 'TWOK'
|
||||
FOURK = 'FOURK'
|
||||
|
||||
@property
|
||||
def value_quality(self) -> str:
|
||||
return {
|
||||
Quality.ONEK: '1K',
|
||||
Quality.TWOK: '2K',
|
||||
Quality.FOURK: '4K'
|
||||
}[self]
|
||||
|
||||
|
||||
class GenType(Enum):
|
||||
class GenType(str, Enum):
|
||||
TEXT = 'Text'
|
||||
IMAGE = 'Image'
|
||||
IMAGE = 'Image'
|
||||
|
||||
@property
|
||||
def value_type(self) -> str:
|
||||
return {
|
||||
GenType.TEXT: 'Text',
|
||||
GenType.IMAGE: 'Image',
|
||||
}[self]
|
||||
|
||||
|
||||
class TextModel(str, Enum):
|
||||
GEMINI_3_1_PRO_PREVIEW = "gemini-3.1-pro-preview"
|
||||
|
||||
@property
|
||||
def value_model(self) -> str:
|
||||
return self.value
|
||||
|
||||
|
||||
class ImageModel(str, Enum):
|
||||
GEMINI_3_PRO_IMAGE_PREVIEW = "gemini-3-pro-image-preview"
|
||||
GEMINI_3_1_FLASH_IMAGE_PREVIEW = "gemini-3.1-flash-image-preview"
|
||||
|
||||
@property
|
||||
def value_model(self) -> str:
|
||||
return self.value
|
||||
|
||||
BIN
repos/__pycache__/__init__.cpython-313.pyc
Normal file
BIN
repos/__pycache__/__init__.cpython-313.pyc
Normal file
Binary file not shown.
BIN
repos/__pycache__/assets_repo.cpython-313.pyc
Normal file
BIN
repos/__pycache__/assets_repo.cpython-313.pyc
Normal file
Binary file not shown.
BIN
repos/__pycache__/char_repo.cpython-313.pyc
Normal file
BIN
repos/__pycache__/char_repo.cpython-313.pyc
Normal file
Binary file not shown.
BIN
repos/__pycache__/dao.cpython-313.pyc
Normal file
BIN
repos/__pycache__/dao.cpython-313.pyc
Normal file
Binary file not shown.
BIN
repos/__pycache__/generation_repo.cpython-313.pyc
Normal file
BIN
repos/__pycache__/generation_repo.cpython-313.pyc
Normal file
Binary file not shown.
BIN
repos/__pycache__/user_repo.cpython-313.pyc
Normal file
BIN
repos/__pycache__/user_repo.cpython-313.pyc
Normal file
Binary file not shown.
61
repos/albums_repo.py
Normal file
61
repos/albums_repo.py
Normal file
@@ -0,0 +1,61 @@
|
||||
from typing import List, Optional
|
||||
import logging
|
||||
from bson import ObjectId
|
||||
from motor.motor_asyncio import AsyncIOMotorClient
|
||||
|
||||
from models.Album import Album
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class AlbumsRepo:
|
||||
def __init__(self, client: AsyncIOMotorClient, db_name="bot_db"):
|
||||
self.collection = client[db_name]["albums"]
|
||||
|
||||
async def create_album(self, album: Album) -> str:
|
||||
res = await self.collection.insert_one(album.model_dump())
|
||||
return str(res.inserted_id)
|
||||
|
||||
async def get_album(self, album_id: str) -> Optional[Album]:
|
||||
try:
|
||||
res = await self.collection.find_one({"_id": ObjectId(album_id)})
|
||||
if not res:
|
||||
return None
|
||||
|
||||
res["id"] = str(res.pop("_id"))
|
||||
return Album(**res)
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
async def get_albums(self, limit: int = 10, offset: int = 0) -> List[Album]:
|
||||
res = await self.collection.find().sort("created_at", -1).skip(offset).limit(limit).to_list(None)
|
||||
albums = []
|
||||
for doc in res:
|
||||
doc["id"] = str(doc.pop("_id"))
|
||||
albums.append(Album(**doc))
|
||||
return albums
|
||||
|
||||
async def update_album(self, album_id: str, album: Album) -> bool:
|
||||
if not album.id:
|
||||
album.id = album_id
|
||||
|
||||
model_dump = album.model_dump()
|
||||
res = await self.collection.update_one({"_id": ObjectId(album_id)}, {"$set": model_dump})
|
||||
return res.modified_count > 0
|
||||
|
||||
async def delete_album(self, album_id: str) -> bool:
|
||||
res = await self.collection.delete_one({"_id": ObjectId(album_id)})
|
||||
return res.deleted_count > 0
|
||||
|
||||
async def add_generation(self, album_id: str, generation_id: str, cover_asset_id: Optional[str] = None) -> bool:
|
||||
res = await self.collection.update_one(
|
||||
{"_id": ObjectId(album_id)},
|
||||
{"$addToSet": {"generation_ids": generation_id}, "$set": {"cover_asset_id": cover_asset_id}}
|
||||
)
|
||||
return res.modified_count > 0
|
||||
|
||||
async def remove_generation(self, album_id: str, generation_id: str) -> bool:
|
||||
res = await self.collection.update_one(
|
||||
{"_id": ObjectId(album_id)},
|
||||
{"$pull": {"generation_ids": generation_id}}
|
||||
)
|
||||
return res.modified_count > 0
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user