refactored
Some checks failed
Deploy on push / deploy (push) Failing after 6s

This commit is contained in:
2025-08-17 17:56:31 +03:00
parent e78e12eeee
commit 9a2b792f08
98 changed files with 702 additions and 904 deletions

View File

@@ -137,7 +137,7 @@ jobs:
from orm.shout import Shout from orm.shout import Shout
from orm.topic import Topic from orm.topic import Topic
from auth.orm import Author, AuthorBookmark, AuthorRating, AuthorFollower from auth.orm import Author, AuthorBookmark, AuthorRating, AuthorFollower
from services.db import engine from storage.db import engine
from sqlalchemy import inspect from sqlalchemy import inspect
print('✅ Engine imported successfully') print('✅ Engine imported successfully')
@@ -166,8 +166,8 @@ jobs:
- name: Start servers - name: Start servers
run: | run: |
chmod +x scripts/ci-server.py chmod +x ./ci-server.py
timeout 300 python scripts/ci-server.py & timeout 300 python ./ci-server.py &
echo $! > ci-server.pid echo $! > ci-server.pid
echo "Waiting for servers..." echo "Waiting for servers..."
@@ -184,8 +184,13 @@ jobs:
# Создаем папку для результатов тестов # Создаем папку для результатов тестов
mkdir -p test-results mkdir -p test-results
# В CI пропускаем тесты здоровья серверов, так как они могут не пройти # Сначала проверяем здоровье серверов
echo "🏥 В CI режиме пропускаем тесты здоровья серверов..." echo "🏥 Проверяем здоровье серверов..."
if uv run pytest tests/test_server_health.py -v; then
echo "✅ Серверы здоровы!"
else
echo "⚠️ Тест здоровья серверов не прошел, но продолжаем..."
fi
for test_type in "not e2e" "integration" "e2e" "browser"; do for test_type in "not e2e" "integration" "e2e" "browser"; do
echo "Running $test_type tests..." echo "Running $test_type tests..."
@@ -284,19 +289,20 @@ jobs:
fetch-depth: 0 fetch-depth: 0
- name: Deploy - name: Deploy
if: github.ref == 'refs/heads/dev'
env: env:
HOST_KEY: ${{ secrets.SSH_PRIVATE_KEY }} HOST_KEY: ${{ secrets.SSH_PRIVATE_KEY }}
TARGET: ${{ github.ref == 'refs/heads/dev' && 'core' || 'discoursio-api' }}
SERVER: ${{ github.ref == 'refs/heads/dev' && 'STAGING' || 'V' }}
run: | run: |
echo "🚀 Deploying to $SERVER..." echo "🚀 Deploying to $ENV..."
mkdir -p ~/.ssh mkdir -p ~/.ssh
echo "$HOST_KEY" > ~/.ssh/known_hosts echo "$HOST_KEY" > ~/.ssh/known_hosts
chmod 600 ~/.ssh/known_hosts chmod 600 ~/.ssh/known_hosts
git remote add dokku dokku@v3.dscrs.site:core git remote add dokku dokku@staging.discours.io:$TARGET
git push dokku HEAD:main -f git push dokku HEAD:main -f
echo "✅ deployment completed!" echo "✅ $ENV deployment completed!"
# ===== SUMMARY ===== # ===== SUMMARY =====
summary: summary:

2
.gitignore vendored
View File

@@ -177,5 +177,5 @@ panel/types.gen.ts
tmp tmp
test-results test-results
page_content.html page_content.html
test_output
docs/progress/* docs/progress/*

View File

@@ -2,6 +2,22 @@
Все значимые изменения в проекте документируются в этом файле. Все значимые изменения в проекте документируются в этом файле.
## [0.9.7] - 2025-08-17
### 🔧 Исправления архитектуры
- **Устранены циклические импорты в ORM**: Исправлена проблема с циклическими импортами между `orm/community.py` и `orm/shout.py`
- **Оптимизированы импорты моделей**: Убран прямой импорт `Shout` из `orm/community.py`, заменен на строковые ссылки
- **Исправлены предупреждения ruff**: Добавлены `# noqa: PLW0603` комментарии для подавления предупреждений о `global` в `rbac/interface.py`
- **Улучшена совместимость SQLAlchemy**: Использование `text()` для сложных SQL выражений в `CommunityStats`
### 🏷️ Типизация
- **Исправлены mypy ошибки**: Все ORM модели теперь корректно проходят проверку типов
- **Улучшена совместимость**: Использование `BaseModel` вместо алиаса `Base` для избежания путаницы
### 🧹 Код-качество
- **Упрощена архитектура импортов**: Убраны сложные конструкции для избежания `global`
- **Сохранена функциональность**: Все методы `CommunityStats` работают корректно с новой архитектурой
## [0.9.6] - 2025-08-12 ## [0.9.6] - 2025-08-12
### 🚀 CI/CD и E2E тестирование ### 🚀 CI/CD и E2E тестирование

View File

@@ -134,11 +134,11 @@ chmod +x scripts/test-ci-local.sh
``` ```
### CI Server Management ### CI Server Management
The `scripts/ci-server.py` script manages servers for CI: The `./ci-server.py` script manages servers for CI:
```bash ```bash
# Start servers in CI mode # Start servers in CI mode
CI_MODE=true python3 scripts/ci-server.py CI_MODE=true python3 ./ci-server.py
``` ```
## 📊 Project Structure ## 📊 Project Structure

View File

@@ -5,7 +5,7 @@ from starlette.responses import JSONResponse, RedirectResponse, Response
from auth.core import verify_internal_auth from auth.core import verify_internal_auth
from auth.orm import Author from auth.orm import Author
from auth.tokens.storage import TokenStorage from auth.tokens.storage import TokenStorage
from services.db import local_session from storage.db import local_session
from settings import ( from settings import (
SESSION_COOKIE_HTTPONLY, SESSION_COOKIE_HTTPONLY,
SESSION_COOKIE_MAX_AGE, SESSION_COOKIE_MAX_AGE,

View File

@@ -4,12 +4,14 @@
""" """
import time import time
from sqlalchemy.orm.exc import NoResultFound from sqlalchemy.orm.exc import NoResultFound
from auth.orm import Author
from auth.state import AuthState from auth.state import AuthState
from auth.tokens.storage import TokenStorage as TokenManager from auth.tokens.storage import TokenStorage as TokenManager
from auth.orm import Author
from orm.community import CommunityAuthor from orm.community import CommunityAuthor
from services.db import local_session from storage.db import local_session
from settings import ADMIN_EMAILS as ADMIN_EMAILS_LIST from settings import ADMIN_EMAILS as ADMIN_EMAILS_LIST
from utils.logger import root_logger as logger from utils.logger import root_logger as logger
@@ -50,7 +52,7 @@ async def verify_internal_auth(token: str) -> tuple[int, list, bool]:
with local_session() as session: with local_session() as session:
try: try:
# Author уже импортирован в начале файла # Author уже импортирован в начале файла
author = session.query(Author).where(Author.id == user_id).one() author = session.query(Author).where(Author.id == user_id).one()
# Получаем роли # Получаем роли
@@ -112,14 +114,14 @@ async def get_auth_token_from_request(request) -> str | None:
""" """
# Отложенный импорт для избежания циклических зависимостей # Отложенный импорт для избежания циклических зависимостей
from auth.decorators import get_auth_token from auth.decorators import get_auth_token
return await get_auth_token(request) return await get_auth_token(request)
async def authenticate(request) -> AuthState: async def authenticate(request) -> AuthState:
""" """
Получает токен из запроса и проверяет авторизацию. Получает токен из запроса и проверяет авторизацию.
Args: Args:
request: Объект запроса request: Объект запроса
@@ -146,4 +148,3 @@ async def authenticate(request) -> AuthState:
auth_state.author_id = str(user_id) auth_state.author_id = str(user_id)
auth_state.is_admin = is_admin auth_state.is_admin = is_admin
return auth_state return auth_state

View File

@@ -5,28 +5,20 @@ from typing import Any
from graphql import GraphQLError, GraphQLResolveInfo from graphql import GraphQLError, GraphQLResolveInfo
from sqlalchemy import exc from sqlalchemy import exc
from auth.credentials import AuthCredentials
from auth.exceptions import OperationNotAllowedError
# Импорт базовых функций из реструктурированных модулей # Импорт базовых функций из реструктурированных модулей
from auth.core import authenticate from auth.core import authenticate
from auth.utils import get_auth_token from auth.credentials import AuthCredentials
from auth.exceptions import OperationNotAllowedError
from auth.orm import Author from auth.orm import Author
from auth.utils import get_auth_token, get_safe_headers
from orm.community import CommunityAuthor from orm.community import CommunityAuthor
from services.db import local_session from storage.db import local_session
from services.redis import redis as redis_adapter
from settings import ADMIN_EMAILS as ADMIN_EMAILS_LIST from settings import ADMIN_EMAILS as ADMIN_EMAILS_LIST
from utils.logger import root_logger as logger from utils.logger import root_logger as logger
ADMIN_EMAILS = ADMIN_EMAILS_LIST.split(",") ADMIN_EMAILS = ADMIN_EMAILS_LIST.split(",")
# Импортируем get_safe_headers из utils
from auth.utils import get_safe_headers
# get_auth_token теперь импортирован из auth.utils
async def validate_graphql_context(info: GraphQLResolveInfo) -> None: async def validate_graphql_context(info: GraphQLResolveInfo) -> None:
""" """
Проверяет валидность GraphQL контекста и проверяет авторизацию. Проверяет валидность GraphQL контекста и проверяет авторизацию.

View File

@@ -4,8 +4,8 @@ from auth.exceptions import ExpiredTokenError, InvalidPasswordError, InvalidToke
from auth.jwtcodec import JWTCodec from auth.jwtcodec import JWTCodec
from auth.orm import Author from auth.orm import Author
from auth.password import Password from auth.password import Password
from services.db import local_session from storage.db import local_session
from services.redis import redis from storage.redis import redis
from utils.logger import root_logger as logger from utils.logger import root_logger as logger
AuthorType = TypeVar("AuthorType", bound=Author) AuthorType = TypeVar("AuthorType", bound=Author)

View File

@@ -7,7 +7,7 @@ DEPRECATED: Этот модуль переносится в auth/core.py
""" """
# Импорт базовых функций из core модуля # Импорт базовых функций из core модуля
from auth.core import verify_internal_auth, create_internal_session, authenticate from auth.core import authenticate, create_internal_session, verify_internal_auth
# Re-export для обратной совместимости # Re-export для обратной совместимости
__all__ = ["verify_internal_auth", "create_internal_session", "authenticate"] __all__ = ["authenticate", "create_internal_session", "verify_internal_auth"]

View File

@@ -40,9 +40,7 @@ class JWTCodec:
# Если время истечения не указано, устанавливаем дефолтное # Если время истечения не указано, устанавливаем дефолтное
if not expiration: if not expiration:
expiration = datetime.datetime.now(datetime.UTC) + datetime.timedelta( expiration = datetime.datetime.now(datetime.UTC) + datetime.timedelta(days=JWT_REFRESH_TOKEN_EXPIRE_DAYS)
days=JWT_REFRESH_TOKEN_EXPIRE_DAYS
)
logger.debug(f"[JWTCodec.encode] Время истечения не указано, устанавливаем срок: {expiration}") logger.debug(f"[JWTCodec.encode] Время истечения не указано, устанавливаем срок: {expiration}")
# Формируем payload с временными метками # Формируем payload с временными метками

View File

@@ -17,8 +17,8 @@ from starlette.types import ASGIApp
from auth.credentials import AuthCredentials from auth.credentials import AuthCredentials
from auth.orm import Author from auth.orm import Author
from auth.tokens.storage import TokenStorage as TokenManager from auth.tokens.storage import TokenStorage as TokenManager
from services.db import local_session from storage.db import local_session
from services.redis import redis as redis_adapter from storage.redis import redis as redis_adapter
from settings import ( from settings import (
ADMIN_EMAILS as ADMIN_EMAILS_LIST, ADMIN_EMAILS as ADMIN_EMAILS_LIST,
) )

View File

@@ -13,8 +13,8 @@ from starlette.responses import JSONResponse, RedirectResponse
from auth.orm import Author from auth.orm import Author
from auth.tokens.storage import TokenStorage from auth.tokens.storage import TokenStorage
from orm.community import Community, CommunityAuthor, CommunityFollower from orm.community import Community, CommunityAuthor, CommunityFollower
from services.db import local_session from storage.db import local_session
from services.redis import redis from storage.redis import redis
from settings import ( from settings import (
FRONTEND_URL, FRONTEND_URL,
OAUTH_CLIENTS, OAUTH_CLIENTS,

View File

@@ -3,7 +3,6 @@
""" """
class AuthState: class AuthState:
""" """
Класс для хранения информации о состоянии авторизации пользователя. Класс для хранения информации о состоянии авторизации пользователя.

View File

@@ -6,7 +6,7 @@ import asyncio
from typing import Any, Dict, List from typing import Any, Dict, List
from auth.jwtcodec import JWTCodec from auth.jwtcodec import JWTCodec
from services.redis import redis as redis_adapter from storage.redis import redis as redis_adapter
from utils.logger import root_logger as logger from utils.logger import root_logger as logger
from .base import BaseTokenManager from .base import BaseTokenManager

View File

@@ -5,7 +5,7 @@
import asyncio import asyncio
from typing import Any, Dict from typing import Any, Dict
from services.redis import redis as redis_adapter from storage.redis import redis as redis_adapter
from utils.logger import root_logger as logger from utils.logger import root_logger as logger
from .base import BaseTokenManager from .base import BaseTokenManager

View File

@@ -5,7 +5,7 @@
import json import json
import time import time
from services.redis import redis as redis_adapter from storage.redis import redis as redis_adapter
from utils.logger import root_logger as logger from utils.logger import root_logger as logger
from .base import BaseTokenManager from .base import BaseTokenManager
@@ -84,9 +84,7 @@ class OAuthTokenManager(BaseTokenManager):
return await self._get_oauth_data_optimized(token_type, str(user_id), provider) return await self._get_oauth_data_optimized(token_type, str(user_id), provider)
return None return None
async def _get_oauth_data_optimized( async def _get_oauth_data_optimized(self, token_type: TokenType, user_id: str, provider: str) -> TokenData | None:
self, token_type: TokenType, user_id: str, provider: str
) -> TokenData | None:
"""Оптимизированное получение OAuth данных""" """Оптимизированное получение OAuth данных"""
if not user_id or not provider: if not user_id or not provider:
error_msg = "OAuth токены требуют user_id и provider" error_msg = "OAuth токены требуют user_id и provider"

View File

@@ -7,7 +7,7 @@ import time
from typing import Any, List from typing import Any, List
from auth.jwtcodec import JWTCodec from auth.jwtcodec import JWTCodec
from services.redis import redis as redis_adapter from storage.redis import redis as redis_adapter
from utils.logger import root_logger as logger from utils.logger import root_logger as logger
from .base import BaseTokenManager from .base import BaseTokenManager

View File

@@ -6,7 +6,7 @@ import json
import secrets import secrets
import time import time
from services.redis import redis as redis_adapter from storage.redis import redis as redis_adapter
from utils.logger import root_logger as logger from utils.logger import root_logger as logger
from .base import BaseTokenManager from .base import BaseTokenManager

View File

@@ -4,6 +4,7 @@
""" """
from typing import Any from typing import Any
from settings import SESSION_COOKIE_NAME, SESSION_TOKEN_HEADER from settings import SESSION_COOKIE_NAME, SESSION_TOKEN_HEADER
from utils.logger import root_logger as logger from utils.logger import root_logger as logger
@@ -113,26 +114,25 @@ async def get_auth_token(request: Any) -> str | None:
token = auth_header.replace("Bearer ", "", 1).strip() token = auth_header.replace("Bearer ", "", 1).strip()
logger.debug(f"[decorators] Извлечен Bearer токен: {len(token)}") logger.debug(f"[decorators] Извлечен Bearer токен: {len(token)}")
return token return token
else: logger.debug("[decorators] Authorization заголовок не содержит Bearer токен")
logger.debug("[decorators] Authorization заголовок не содержит Bearer токен")
# 6. Проверяем cookies # 6. Проверяем cookies
if hasattr(request, "cookies") and request.cookies: if hasattr(request, "cookies") and request.cookies:
if isinstance(request.cookies, dict): if isinstance(request.cookies, dict):
cookies = request.cookies cookies = request.cookies
elif hasattr(request.cookies, "get"): elif hasattr(request.cookies, "get"):
cookies = {k: request.cookies.get(k) for k in getattr(request.cookies, "keys", lambda: [])()} cookies = {k: request.cookies.get(k) for k in getattr(request.cookies, "keys", list)()}
else: else:
cookies = {} cookies = {}
logger.debug(f"[decorators] Доступные cookies: {list(cookies.keys())}") logger.debug(f"[decorators] Доступные cookies: {list(cookies.keys())}")
# Проверяем кастомную cookie # Проверяем кастомную cookie
if SESSION_COOKIE_NAME in cookies: if SESSION_COOKIE_NAME in cookies:
token = cookies[SESSION_COOKIE_NAME] token = cookies[SESSION_COOKIE_NAME]
logger.debug(f"[decorators] Токен найден в cookie {SESSION_COOKIE_NAME}: {len(token)}") logger.debug(f"[decorators] Токен найден в cookie {SESSION_COOKIE_NAME}: {len(token)}")
return token return token
# Проверяем стандартную cookie # Проверяем стандартную cookie
if "auth_token" in cookies: if "auth_token" in cookies:
token = cookies["auth_token"] token = cookies["auth_token"]
@@ -150,29 +150,29 @@ async def get_auth_token(request: Any) -> str | None:
def extract_bearer_token(auth_header: str) -> str | None: def extract_bearer_token(auth_header: str) -> str | None:
""" """
Извлекает токен из заголовка Authorization с Bearer схемой. Извлекает токен из заголовка Authorization с Bearer схемой.
Args: Args:
auth_header: Заголовок Authorization auth_header: Заголовок Authorization
Returns: Returns:
Optional[str]: Извлеченный токен или None Optional[str]: Извлеченный токен или None
""" """
if not auth_header: if not auth_header:
return None return None
if auth_header.startswith("Bearer "): if auth_header.startswith("Bearer "):
return auth_header[7:].strip() return auth_header[7:].strip()
return None return None
def format_auth_header(token: str) -> str: def format_auth_header(token: str) -> str:
""" """
Форматирует токен в заголовок Authorization. Форматирует токен в заголовок Authorization.
Args: Args:
token: Токен авторизации token: Токен авторизации
Returns: Returns:
str: Отформатированный заголовок str: Отформатированный заголовок
""" """

4
cache/cache.py vendored
View File

@@ -37,8 +37,8 @@ from sqlalchemy import and_, join, select
from auth.orm import Author, AuthorFollower from auth.orm import Author, AuthorFollower
from orm.shout import Shout, ShoutAuthor, ShoutTopic from orm.shout import Shout, ShoutAuthor, ShoutTopic
from orm.topic import Topic, TopicFollower from orm.topic import Topic, TopicFollower
from services.db import local_session from storage.db import local_session
from services.redis import redis from storage.redis import redis
from utils.encoders import fast_json_dumps from utils.encoders import fast_json_dumps
from utils.logger import root_logger as logger from utils.logger import root_logger as logger

7
cache/precache.py vendored
View File

@@ -3,14 +3,15 @@ import traceback
from sqlalchemy import and_, join, select from sqlalchemy import and_, join, select
from auth.orm import Author, AuthorFollower
# Импорт Author, AuthorFollower отложен для избежания циклических импортов # Импорт Author, AuthorFollower отложен для избежания циклических импортов
from cache.cache import cache_author, cache_topic from cache.cache import cache_author, cache_topic
from orm.shout import Shout, ShoutAuthor, ShoutReactionsFollower, ShoutTopic from orm.shout import Shout, ShoutAuthor, ShoutReactionsFollower, ShoutTopic
from orm.topic import Topic, TopicFollower from orm.topic import Topic, TopicFollower
from resolvers.stat import get_with_stat from resolvers.stat import get_with_stat
from auth.orm import Author, AuthorFollower from storage.db import local_session
from services.db import local_session from storage.redis import redis
from services.redis import redis
from utils.encoders import fast_json_dumps from utils.encoders import fast_json_dumps
from utils.logger import root_logger as logger from utils.logger import root_logger as logger

View File

@@ -9,7 +9,7 @@ from cache.cache import (
invalidate_cache_by_prefix, invalidate_cache_by_prefix,
) )
from resolvers.stat import get_with_stat from resolvers.stat import get_with_stat
from services.redis import redis from storage.redis import redis
from utils.logger import root_logger as logger from utils.logger import root_logger as logger
CACHE_REVALIDATION_INTERVAL = 300 # 5 minutes CACHE_REVALIDATION_INTERVAL = 300 # 5 minutes

5
cache/triggers.py vendored
View File

@@ -1,12 +1,13 @@
from sqlalchemy import event from sqlalchemy import event
from auth.orm import Author, AuthorFollower
# Импорт Author, AuthorFollower отложен для избежания циклических импортов # Импорт Author, AuthorFollower отложен для избежания циклических импортов
from cache.revalidator import revalidation_manager from cache.revalidator import revalidation_manager
from orm.reaction import Reaction, ReactionKind from orm.reaction import Reaction, ReactionKind
from orm.shout import Shout, ShoutAuthor, ShoutReactionsFollower from orm.shout import Shout, ShoutAuthor, ShoutReactionsFollower
from orm.topic import Topic, TopicFollower from orm.topic import Topic, TopicFollower
from services.db import local_session from storage.db import local_session
from auth.orm import Author, AuthorFollower
from utils.logger import root_logger as logger from utils.logger import root_logger as logger

View File

@@ -20,34 +20,8 @@ import requests
from sqlalchemy import inspect from sqlalchemy import inspect
from orm.base import Base from orm.base import Base
from services.db import engine from storage.db import engine
from utils.logger import root_logger as logger
# Создаем собственный логгер без дублирования
def create_ci_logger():
"""Создает логгер для CI без дублирования"""
logger = logging.getLogger("ci-server")
logger.setLevel(logging.INFO)
# Убираем существующие обработчики
logger.handlers.clear()
# Создаем форматтер
formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s")
# Создаем обработчик
handler = logging.StreamHandler()
handler.setFormatter(formatter)
logger.addHandler(handler)
# Отключаем пропагацию к root logger
logger.propagate = False
return logger
logger = create_ci_logger()
class CIServerManager: class CIServerManager:
"""Менеджер CI серверов""" """Менеджер CI серверов"""
@@ -257,9 +231,10 @@ def run_tests_in_ci():
try: try:
ruff_result = subprocess.run( ruff_result = subprocess.run(
["uv", "run", "ruff", "check", "."], ["uv", "run", "ruff", "check", "."],
check=False, capture_output=False, check=False,
capture_output=False,
text=True, text=True,
timeout=300 # 5 минут на linting timeout=300, # 5 минут на linting
) )
if ruff_result.returncode == 0: if ruff_result.returncode == 0:
logger.info("✅ Ruff проверка прошла успешно") logger.info("✅ Ruff проверка прошла успешно")
@@ -275,9 +250,10 @@ def run_tests_in_ci():
try: try:
ruff_format_result = subprocess.run( ruff_format_result = subprocess.run(
["uv", "run", "ruff", "format", "--check", "."], ["uv", "run", "ruff", "format", "--check", "."],
check=False, capture_output=False, check=False,
capture_output=False,
text=True, text=True,
timeout=300 # 5 минут на проверку форматирования timeout=300, # 5 минут на проверку форматирования
) )
if ruff_format_result.returncode == 0: if ruff_format_result.returncode == 0:
logger.info("✅ Форматирование корректно") logger.info("✅ Форматирование корректно")
@@ -293,9 +269,10 @@ def run_tests_in_ci():
try: try:
mypy_result = subprocess.run( mypy_result = subprocess.run(
["uv", "run", "mypy", ".", "--ignore-missing-imports"], ["uv", "run", "mypy", ".", "--ignore-missing-imports"],
check=False, capture_output=False, check=False,
capture_output=False,
text=True, text=True,
timeout=600 # 10 минут на type checking timeout=600, # 10 минут на type checking
) )
if mypy_result.returncode == 0: if mypy_result.returncode == 0:
logger.info("✅ MyPy проверка прошла успешно") logger.info("✅ MyPy проверка прошла успешно")
@@ -311,7 +288,8 @@ def run_tests_in_ci():
try: try:
health_result = subprocess.run( health_result = subprocess.run(
["uv", "run", "pytest", "tests/test_server_health.py", "-v"], ["uv", "run", "pytest", "tests/test_server_health.py", "-v"],
check=False, capture_output=False, check=False,
capture_output=False,
text=True, text=True,
timeout=120, # 2 минуты на проверку здоровья timeout=120, # 2 минуты на проверку здоровья
) )
@@ -339,7 +317,8 @@ def run_tests_in_ci():
# Запускаем тесты с выводом в реальном времени # Запускаем тесты с выводом в реальном времени
result = subprocess.run( result = subprocess.run(
cmd, cmd,
check=False, capture_output=False, # Потоковый вывод check=False,
capture_output=False, # Потоковый вывод
text=True, text=True,
timeout=600, # 10 минут на тесты timeout=600, # 10 минут на тесты
) )

View File

@@ -61,7 +61,7 @@ await TokenStorage.revoke_session(token)
#### Обновленный API: #### Обновленный API:
```python ```python
from services.redis import redis from storage.redis import redis
# Базовые операции # Базовые операции
await redis.get(key) await redis.get(key)
@@ -190,7 +190,7 @@ compat = CompatibilityMethods()
await compat.get(token_key) await compat.get(token_key)
# Стало # Стало
from services.redis import redis from storage.redis import redis
result = await redis.get(token_key) result = await redis.get(token_key)
``` ```
@@ -263,7 +263,7 @@ pytest tests/auth/ -v
# Проверка Redis подключения # Проверка Redis подключения
python -c " python -c "
import asyncio import asyncio
from services.redis import redis from storage.redis import redis
async def test(): async def test():
result = await redis.ping() result = await redis.ping()
print(f'Redis connection: {result}') print(f'Redis connection: {result}')

View File

@@ -210,7 +210,7 @@ class MockInfo:
self.field_nodes = [MockFieldNode(requested_fields or [])] self.field_nodes = [MockFieldNode(requested_fields or [])]
# Патчинг зависимостей # Патчинг зависимостей
@patch('services.redis.aioredis') @patch('storage.redis.aioredis')
def test_redis_connection(mock_aioredis): def test_redis_connection(mock_aioredis):
# Тест логики # Тест логики
pass pass

12
main.py
View File

@@ -21,10 +21,10 @@ from auth.middleware import AuthMiddleware, auth_middleware
from auth.oauth import oauth_callback, oauth_login from auth.oauth import oauth_callback, oauth_login
from cache.precache import precache_data from cache.precache import precache_data
from cache.revalidator import revalidation_manager from cache.revalidator import revalidation_manager
from services.exception import ExceptionHandlerMiddleware from rbac import initialize_rbac
from services.rbac_init import initialize_rbac from utils.exception import ExceptionHandlerMiddleware
from services.redis import redis from storage.redis import redis
from services.schema import create_all_tables, resolvers from storage.schema import create_all_tables, resolvers
from services.search import check_search_service, initialize_search_index_background, search_service from services.search import check_search_service, initialize_search_index_background, search_service
from services.viewed import ViewedStorage from services.viewed import ViewedStorage
from settings import DEV_SERVER_PID_FILE_NAME from settings import DEV_SERVER_PID_FILE_NAME
@@ -211,10 +211,10 @@ async def lifespan(app: Starlette):
try: try:
print("[lifespan] Starting application initialization") print("[lifespan] Starting application initialization")
create_all_tables() create_all_tables()
# Инициализируем RBAC систему с dependency injection # Инициализируем RBAC систему с dependency injection
initialize_rbac() initialize_rbac()
await asyncio.gather( await asyncio.gather(
redis.connect(), redis.connect(),
precache_data(), precache_data(),

View File

@@ -13,15 +13,15 @@ from sqlalchemy import (
UniqueConstraint, UniqueConstraint,
distinct, distinct,
func, func,
text,
) )
from sqlalchemy.ext.hybrid import hybrid_property from sqlalchemy.ext.hybrid import hybrid_property
from sqlalchemy.orm import Mapped, mapped_column from sqlalchemy.orm import Mapped, mapped_column
from auth.orm import Author from auth.orm import Author
from orm.base import BaseModel from orm.base import BaseModel
from orm.shout import Shout from rbac.interface import get_rbac_operations
from services.db import local_session from storage.db import local_session
from auth.rbac_interface import get_rbac_operations
# Словарь названий ролей # Словарь названий ролей
role_names = { role_names = {
@@ -355,7 +355,13 @@ class CommunityStats:
@property @property
def shouts(self) -> int: def shouts(self) -> int:
return self.community.session.query(func.count(Shout.id)).filter(Shout.community == self.community.id).scalar() return (
self.community.session.query(func.count(1))
.select_from(text("shout"))
.filter(text("shout.community_id = :community_id"))
.params(community_id=self.community.id)
.scalar()
)
@property @property
def followers(self) -> int: def followers(self) -> int:
@@ -370,12 +376,10 @@ class CommunityStats:
# author has a shout with community id and its featured_at is not null # author has a shout with community id and its featured_at is not null
return ( return (
self.community.session.query(func.count(distinct(Author.id))) self.community.session.query(func.count(distinct(Author.id)))
.join(Shout) .select_from(text("author"))
.filter( .join(text("shout"), text("author.id IN (SELECT author_id FROM shout_author WHERE shout_id = shout.id)"))
Shout.community == self.community.id, .filter(text("shout.community_id = :community_id"), text("shout.featured_at IS NOT NULL"))
Shout.featured_at.is_not(None), .params(community_id=self.community.id)
Author.id.in_(Shout.authors),
)
.scalar() .scalar()
) )
@@ -498,7 +502,7 @@ class CommunityAuthor(BaseModel):
# Используем fallback на проверку ролей # Используем fallback на проверку ролей
return permission in self.role_list return permission in self.role_list
except Exception: except Exception:
# FIXME: Fallback: проверяем роли (старый способ) # TODO: Fallback: проверяем роли (старый способ)
return any(permission == role for role in self.role_list) return any(permission == role for role in self.role_list)
def dict(self, access: bool = False) -> dict[str, Any]: def dict(self, access: bool = False) -> dict[str, Any]:

View File

@@ -8,6 +8,7 @@ from auth.orm import Author
from orm.base import BaseModel as Base from orm.base import BaseModel as Base
from orm.topic import Topic from orm.topic import Topic
# Author уже импортирован в начале файла # Author уже импортирован в начале файла
def get_author_model(): def get_author_model():
"""Возвращает модель Author для использования в запросах""" """Возвращает модель Author для использования в запросах"""

View File

@@ -10,6 +10,7 @@ from auth.orm import Author
from orm.base import BaseModel as Base from orm.base import BaseModel as Base
from utils.logger import root_logger as logger from utils.logger import root_logger as logger
# Author уже импортирован в начале файла # Author уже импортирован в начале файла
def get_author_model(): def get_author_model():
"""Возвращает модель Author для использования в запросах""" """Возвращает модель Author для использования в запросах"""

View File

@@ -7,6 +7,7 @@ from sqlalchemy.orm import Mapped, mapped_column
from auth.orm import Author from auth.orm import Author
from orm.base import BaseModel as Base from orm.base import BaseModel as Base
# Author уже импортирован в начале файла # Author уже импортирован в начале файла
def get_author_model(): def get_author_model():
"""Возвращает модель Author для использования в запросах""" """Возвращает модель Author для использования в запросах"""

View File

@@ -4,19 +4,10 @@ from typing import Any
from sqlalchemy import JSON, Boolean, ForeignKey, Index, Integer, PrimaryKeyConstraint, String from sqlalchemy import JSON, Boolean, ForeignKey, Index, Integer, PrimaryKeyConstraint, String
from sqlalchemy.orm import Mapped, mapped_column, relationship from sqlalchemy.orm import Mapped, mapped_column, relationship
# Импорт Author отложен для избежания циклических импортов from orm.base import BaseModel
from auth.orm import Author
from orm.base import BaseModel as Base
from orm.reaction import Reaction
from orm.topic import Topic
# Author уже импортирован в начале файла
def get_author_model():
"""Возвращает модель Author для использования в запросах"""
return Author
class ShoutTopic(Base): class ShoutTopic(BaseModel):
""" """
Связь между публикацией и темой. Связь между публикацией и темой.
@@ -40,7 +31,7 @@ class ShoutTopic(Base):
) )
class ShoutReactionsFollower(Base): class ShoutReactionsFollower(BaseModel):
__tablename__ = "shout_reactions_followers" __tablename__ = "shout_reactions_followers"
follower: Mapped[int] = mapped_column(ForeignKey("author.id"), index=True) follower: Mapped[int] = mapped_column(ForeignKey("author.id"), index=True)
@@ -57,7 +48,7 @@ class ShoutReactionsFollower(Base):
) )
class ShoutAuthor(Base): class ShoutAuthor(BaseModel):
""" """
Связь между публикацией и автором. Связь между публикацией и автором.
@@ -81,7 +72,7 @@ class ShoutAuthor(Base):
) )
class Shout(Base): class Shout(BaseModel):
""" """
Публикация в системе. Публикация в системе.
""" """

View File

@@ -14,6 +14,7 @@ from sqlalchemy.orm import Mapped, mapped_column
from auth.orm import Author from auth.orm import Author
from orm.base import BaseModel as Base from orm.base import BaseModel as Base
# Author уже импортирован в начале файла # Author уже импортирован в начале файла
def get_author_model(): def get_author_model():
"""Возвращает модель Author для использования в запросах""" """Возвращает модель Author для использования в запросах"""

852
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -19,24 +19,20 @@
"@graphql-codegen/typescript": "^4.1.6", "@graphql-codegen/typescript": "^4.1.6",
"@graphql-codegen/typescript-operations": "^4.6.1", "@graphql-codegen/typescript-operations": "^4.6.1",
"@graphql-codegen/typescript-resolvers": "^4.5.1", "@graphql-codegen/typescript-resolvers": "^4.5.1",
"@solidjs/router": "^0.15.3",
"@types/node": "^24.1.0", "@types/node": "^24.1.0",
"@types/prettier": "^2.7.3",
"@types/prismjs": "^1.26.5", "@types/prismjs": "^1.26.5",
"graphql": "^16.11.0", "graphql": "^16.11.0",
"graphql-tag": "^2.12.6", "graphql-tag": "^2.12.6",
"lightningcss": "^1.30.1", "lightningcss": "^1.30.1",
"prettier": "^3.6.2",
"prismjs": "^1.30.0", "prismjs": "^1.30.0",
"solid-js": "^1.9.7", "solid-js": "^1.9.7",
"terser": "^5.43.0", "terser": "^5.43.0",
"typescript": "^5.8.3", "typescript": "^5.8.3",
"vite": "^7.0.6", "vite": "^7.1.2",
"vite-plugin-solid": "^2.11.7" "vite-plugin-solid": "^2.11.7"
}, },
"overrides": { "overrides": {
"vite": "^7.0.6" "vite": "^7.1.2"
},
"dependencies": {
"@solidjs/router": "^0.15.3"
} }
} }

View File

@@ -222,6 +222,7 @@ ignore = [
"UP006", # use Set as type "UP006", # use Set as type
"UP035", # use Set as type "UP035", # use Set as type
"PERF401", # list comprehension - иногда нужно "PERF401", # list comprehension - иногда нужно
"PLC0415", # импорты не в начале файла - иногда нужно
"ANN201", # Missing return type annotation for private function `wrapper` - иногда нужно "ANN201", # Missing return type annotation for private function `wrapper` - иногда нужно
] ]

View File

@@ -1,24 +1,17 @@
""" from rbac.interface import set_community_queries, set_rbac_operations
Модуль инициализации RBAC системы.
Настраивает dependency injection для разрешения циклических зависимостей.
Должен вызываться при старте приложения.
"""
from auth.rbac_interface import set_community_queries, set_rbac_operations
from utils.logger import root_logger as logger from utils.logger import root_logger as logger
def initialize_rbac() -> None: def initialize_rbac() -> None:
""" """
Инициализирует RBAC систему с dependency injection. Инициализирует RBAC систему с dependency injection.
Должна быть вызвана один раз при старте приложения после импорта всех модулей. Должна быть вызвана один раз при старте приложения после импорта всех модулей.
""" """
from services.rbac_impl import community_queries, rbac_operations from rbac.operations import community_queries, rbac_operations
# Устанавливаем реализации # Устанавливаем реализации
set_rbac_operations(rbac_operations) set_rbac_operations(rbac_operations)
set_community_queries(community_queries) set_community_queries(community_queries)
logger.info("🧿 RBAC система инициализирована с dependency injection") logger.info("🧿 RBAC система инициализирована с dependency injection")

View File

@@ -13,8 +13,8 @@ from functools import wraps
from typing import Any, Callable from typing import Any, Callable
from auth.orm import Author from auth.orm import Author
from auth.rbac_interface import get_community_queries, get_rbac_operations from rbac.interface import get_community_queries, get_rbac_operations
from services.db import local_session from storage.db import local_session
from settings import ADMIN_EMAILS from settings import ADMIN_EMAILS
from utils.logger import root_logger as logger from utils.logger import root_logger as logger
@@ -49,30 +49,31 @@ async def get_permissions_for_role(role: str, community_id: int) -> list[str]:
async def update_all_communities_permissions() -> None: async def update_all_communities_permissions() -> None:
""" """
Обновляет права для всех существующих сообществ на основе актуальных дефолтных настроек. Обновляет права для всех существующих сообществ на основе актуальных дефолтных настроек.
Используется в админ-панели для применения изменений в правах на все сообщества. Используется в админ-панели для применения изменений в правах на все сообщества.
""" """
rbac_ops = get_rbac_operations() rbac_ops = get_rbac_operations()
# Поздний импорт для избежания циклических зависимостей # Поздний импорт для избежания циклических зависимостей
from orm.community import Community from orm.community import Community
try: try:
with local_session() as session: with local_session() as session:
# Получаем все сообщества # Получаем все сообщества
communities = session.query(Community).all() communities = session.query(Community).all()
for community in communities: for community in communities:
# Сбрасываем кеш прав для каждого сообщества # Сбрасываем кеш прав для каждого сообщества
from services.redis import redis from storage.redis import redis
key = f"community:roles:{community.id}" key = f"community:roles:{community.id}"
await redis.execute("DEL", key) await redis.execute("DEL", key)
# Переинициализируем права с актуальными дефолтными настройками # Переинициализируем права с актуальными дефолтными настройками
await rbac_ops.initialize_community_permissions(community.id) await rbac_ops.initialize_community_permissions(community.id)
logger.info(f"Обновлены права для {len(communities)} сообществ") logger.info(f"Обновлены права для {len(communities)} сообществ")
except Exception as e: except Exception as e:
logger.error(f"Ошибка при обновлении прав всех сообществ: {e}", exc_info=True) logger.error(f"Ошибка при обновлении прав всех сообществ: {e}", exc_info=True)
raise raise
@@ -252,7 +253,7 @@ def get_community_id_from_context(info) -> int:
return community.id return community.id
logger.warning(f"[get_community_id_from_context] Сообщество с slug {slug} не найдено") logger.warning(f"[get_community_id_from_context] Сообщество с slug {slug} не найдено")
except Exception as e: except Exception as e:
logger.error(f"[get_community_id_from_context] Ошибка при поиске community_id: {e}") logger.exception(f"[get_community_id_from_context] Ошибка при поиске community_id: {e}")
# Пробуем из прямых аргументов # Пробуем из прямых аргументов
if hasattr(info, "field_asts") and info.field_asts: if hasattr(info, "field_asts") and info.field_asts:

View File

@@ -5,14 +5,13 @@
не импортирует ORM модели и не создает циклических зависимостей. не импортирует ORM модели и не создает циклических зависимостей.
""" """
from abc import ABC, abstractmethod
from typing import Any, Protocol from typing import Any, Protocol
class RBACOperations(Protocol): class RBACOperations(Protocol):
""" """
Протокол для RBAC операций, позволяющий ORM моделям Протокол для RBAC операций, позволяющий ORM моделям
выполнять операции с правами без прямого импорта services.rbac выполнять операции с правами без прямого импорта rbac.api
""" """
async def get_permissions_for_role(self, role: str, community_id: int) -> list[str]: async def get_permissions_for_role(self, role: str, community_id: int) -> list[str]:
@@ -29,9 +28,7 @@ class RBACOperations(Protocol):
"""Проверяет разрешение пользователя в сообществе""" """Проверяет разрешение пользователя в сообществе"""
... ...
async def _roles_have_permission( async def _roles_have_permission(self, role_slugs: list[str], permission: str, community_id: int) -> bool:
self, role_slugs: list[str], permission: str, community_id: int
) -> bool:
"""Проверяет, есть ли у набора ролей конкретное разрешение в сообществе""" """Проверяет, есть ли у набора ролей конкретное разрешение в сообществе"""
... ...
@@ -42,9 +39,7 @@ class CommunityAuthorQueries(Protocol):
выполнять запросы без прямого импорта ORM моделей выполнять запросы без прямого импорта ORM моделей
""" """
def get_user_roles_in_community( def get_user_roles_in_community(self, author_id: int, community_id: int, session: Any = None) -> list[str]:
self, author_id: int, community_id: int, session: Any = None
) -> list[str]:
"""Получает роли пользователя в сообществе""" """Получает роли пользователя в сообществе"""
... ...
@@ -56,13 +51,13 @@ _community_queries: CommunityAuthorQueries | None = None
def set_rbac_operations(ops: RBACOperations) -> None: def set_rbac_operations(ops: RBACOperations) -> None:
"""Устанавливает реализацию RBAC операций""" """Устанавливает реализацию RBAC операций"""
global _rbac_operations global _rbac_operations # noqa: PLW0603
_rbac_operations = ops _rbac_operations = ops
def set_community_queries(queries: CommunityAuthorQueries) -> None: def set_community_queries(queries: CommunityAuthorQueries) -> None:
"""Устанавливает реализацию запросов сообщества""" """Устанавливает реализацию запросов сообщества"""
global _community_queries global _community_queries # noqa: PLW0603
_community_queries = queries _community_queries = queries

View File

@@ -5,24 +5,21 @@
не импортирует ORM модели напрямую, используя dependency injection. не импортирует ORM модели напрямую, используя dependency injection.
""" """
import asyncio
import json import json
from pathlib import Path from pathlib import Path
from typing import Any from typing import Any
from auth.orm import Author from rbac.interface import CommunityAuthorQueries, RBACOperations, get_community_queries
from auth.rbac_interface import CommunityAuthorQueries, RBACOperations, get_community_queries from storage.db import local_session
from services.db import local_session from storage.redis import redis
from services.redis import redis
from settings import ADMIN_EMAILS
from utils.logger import root_logger as logger from utils.logger import root_logger as logger
# --- Загрузка каталога сущностей и дефолтных прав --- # --- Загрузка каталога сущностей и дефолтных прав ---
with Path("services/permissions_catalog.json").open() as f: with Path("rbac/permissions_catalog.json").open() as f:
PERMISSIONS_CATALOG = json.load(f) PERMISSIONS_CATALOG = json.load(f)
with Path("services/default_role_permissions.json").open() as f: with Path("rbac/default_role_permissions.json").open() as f:
DEFAULT_ROLE_PERMISSIONS = json.load(f) DEFAULT_ROLE_PERMISSIONS = json.load(f)
role_names = list(DEFAULT_ROLE_PERMISSIONS.keys()) role_names = list(DEFAULT_ROLE_PERMISSIONS.keys())
@@ -169,9 +166,7 @@ class RBACOperationsImpl(RBACOperations):
class CommunityAuthorQueriesImpl(CommunityAuthorQueries): class CommunityAuthorQueriesImpl(CommunityAuthorQueries):
"""Конкретная реализация запросов CommunityAuthor через поздний импорт""" """Конкретная реализация запросов CommunityAuthor через поздний импорт"""
def get_user_roles_in_community( def get_user_roles_in_community(self, author_id: int, community_id: int = 1, session: Any = None) -> list[str]:
self, author_id: int, community_id: int = 1, session: Any = None
) -> list[str]:
""" """
Получает роли пользователя в сообществе через новую систему CommunityAuthor Получает роли пользователя в сообществе через новую систему CommunityAuthor
""" """

View File

@@ -17,14 +17,14 @@ from orm.draft import DraftTopic
from orm.reaction import Reaction from orm.reaction import Reaction
from orm.shout import Shout, ShoutTopic from orm.shout import Shout, ShoutTopic
from orm.topic import Topic, TopicFollower from orm.topic import Topic, TopicFollower
from rbac.api import update_all_communities_permissions
from resolvers.editor import delete_shout, update_shout from resolvers.editor import delete_shout, update_shout
from resolvers.topic import invalidate_topic_followers_cache, invalidate_topics_cache from resolvers.topic import invalidate_topic_followers_cache, invalidate_topics_cache
from services.admin import AdminService from services.admin import AdminService
from services.common_result import handle_error from utils.common_result import handle_error
from services.db import local_session from storage.db import local_session
from services.rbac import update_all_communities_permissions from storage.redis import redis
from services.redis import redis from storage.schema import mutation, query
from services.schema import mutation, query
from utils.logger import root_logger as logger from utils.logger import root_logger as logger
admin_service = AdminService() admin_service = AdminService()

View File

@@ -8,7 +8,7 @@ from graphql import GraphQLResolveInfo
from starlette.responses import JSONResponse from starlette.responses import JSONResponse
from services.auth import auth_service from services.auth import auth_service
from services.schema import mutation, query, type_author from storage.schema import mutation, query, type_author
from settings import SESSION_COOKIE_NAME from settings import SESSION_COOKIE_NAME
from utils.logger import root_logger as logger from utils.logger import root_logger as logger

View File

@@ -21,10 +21,10 @@ from orm.community import Community, CommunityAuthor, CommunityFollower
from orm.shout import Shout, ShoutAuthor from orm.shout import Shout, ShoutAuthor
from resolvers.stat import get_with_stat from resolvers.stat import get_with_stat
from services.auth import login_required from services.auth import login_required
from services.common_result import CommonResult from utils.common_result import CommonResult
from services.db import local_session from storage.db import local_session
from services.redis import redis from storage.redis import redis
from services.schema import mutation, query from storage.schema import mutation, query
from utils.logger import root_logger as logger from utils.logger import root_logger as logger
DEFAULT_COMMUNITIES = [1] DEFAULT_COMMUNITIES = [1]
@@ -450,9 +450,7 @@ async def load_authors_search(_: None, info: GraphQLResolveInfo, **kwargs: Any)
return [] return []
def get_author_id_from( def get_author_id_from(slug: str | None = None, user: str | None = None, author_id: int | None = None) -> int | None:
slug: str | None = None, user: str | None = None, author_id: int | None = None
) -> int | None:
"""Get author ID from different identifiers""" """Get author ID from different identifiers"""
try: try:
if author_id: if author_id:

View File

@@ -7,9 +7,9 @@ from auth.orm import AuthorBookmark
from orm.shout import Shout from orm.shout import Shout
from resolvers.reader import apply_options, get_shouts_with_links, query_with_stat from resolvers.reader import apply_options, get_shouts_with_links, query_with_stat
from services.auth import login_required from services.auth import login_required
from services.common_result import CommonResult from utils.common_result import CommonResult
from services.db import local_session from storage.db import local_session
from services.schema import mutation, query from storage.schema import mutation, query
@query.field("load_shouts_bookmarked") @query.field("load_shouts_bookmarked")

View File

@@ -4,8 +4,8 @@ from auth.orm import Author
from orm.invite import Invite, InviteStatus from orm.invite import Invite, InviteStatus
from orm.shout import Shout from orm.shout import Shout
from services.auth import login_required from services.auth import login_required
from services.db import local_session from storage.db import local_session
from services.schema import mutation from storage.schema import mutation
@mutation.field("accept_invite") @mutation.field("accept_invite")

View File

@@ -6,9 +6,9 @@ from sqlalchemy.orm import joinedload
from auth.decorators import editor_or_admin_required from auth.decorators import editor_or_admin_required
from auth.orm import Author from auth.orm import Author
from orm.collection import Collection, ShoutCollection from orm.collection import Collection, ShoutCollection
from services.db import local_session from rbac.api import require_any_permission
from services.rbac import require_any_permission from storage.db import local_session
from services.schema import mutation, query, type_collection from storage.schema import mutation, query, type_collection
from utils.logger import root_logger as logger from utils.logger import root_logger as logger

View File

@@ -7,15 +7,15 @@ from sqlalchemy import distinct, func
from auth.orm import Author from auth.orm import Author
from orm.community import Community, CommunityAuthor, CommunityFollower from orm.community import Community, CommunityAuthor, CommunityFollower
from orm.shout import Shout, ShoutAuthor from orm.shout import Shout, ShoutAuthor
from services.db import local_session from rbac.api import (
from services.rbac import (
RBACError, RBACError,
get_user_roles_from_context, get_user_roles_from_context,
require_any_permission, require_any_permission,
require_permission, require_permission,
roles_have_permission, roles_have_permission,
) )
from services.schema import mutation, query, type_community from storage.db import local_session
from storage.schema import mutation, query, type_community
from utils.logger import root_logger as logger from utils.logger import root_logger as logger

View File

@@ -12,9 +12,9 @@ from cache.cache import (
from orm.draft import Draft, DraftAuthor, DraftTopic from orm.draft import Draft, DraftAuthor, DraftTopic
from orm.shout import Shout, ShoutAuthor, ShoutTopic from orm.shout import Shout, ShoutAuthor, ShoutTopic
from services.auth import login_required from services.auth import login_required
from services.db import local_session from storage.db import local_session
from services.notify import notify_shout from services.notify import notify_shout
from services.schema import mutation, query from storage.schema import mutation, query
from services.search import search_service from services.search import search_service
from utils.extract_text import extract_text from utils.extract_text import extract_text
from utils.logger import root_logger as logger from utils.logger import root_logger as logger

View File

@@ -19,10 +19,10 @@ from orm.topic import Topic
from resolvers.follower import follow from resolvers.follower import follow
from resolvers.stat import get_with_stat from resolvers.stat import get_with_stat
from services.auth import login_required from services.auth import login_required
from services.common_result import CommonResult from utils.common_result import CommonResult
from services.db import local_session from storage.db import local_session
from services.notify import notify_shout from services.notify import notify_shout
from services.schema import mutation, query from storage.schema import mutation, query
from services.search import search_service from services.search import search_service
from utils.extract_text import extract_text from utils.extract_text import extract_text
from utils.logger import root_logger as logger from utils.logger import root_logger as logger

View File

@@ -13,8 +13,8 @@ from resolvers.reader import (
query_with_stat, query_with_stat,
) )
from services.auth import login_required from services.auth import login_required
from services.db import local_session from storage.db import local_session
from services.schema import query from storage.schema import query
from utils.logger import root_logger as logger from utils.logger import root_logger as logger

View File

@@ -16,10 +16,10 @@ from orm.community import Community, CommunityFollower
from orm.shout import Shout, ShoutReactionsFollower from orm.shout import Shout, ShoutReactionsFollower
from orm.topic import Topic, TopicFollower from orm.topic import Topic, TopicFollower
from services.auth import login_required from services.auth import login_required
from services.db import local_session from storage.db import local_session
from services.notify import notify_follower from services.notify import notify_follower
from services.redis import redis from storage.redis import redis
from services.schema import mutation, query from storage.schema import mutation, query
from utils.logger import root_logger as logger from utils.logger import root_logger as logger

View File

@@ -17,8 +17,8 @@ from orm.notification import (
) )
from orm.shout import Shout from orm.shout import Shout
from services.auth import login_required from services.auth import login_required
from services.db import local_session from storage.db import local_session
from services.schema import mutation, query from storage.schema import mutation, query
from utils.logger import root_logger as logger from utils.logger import root_logger as logger

View File

@@ -3,7 +3,7 @@ from sqlalchemy import and_
from orm.rating import is_negative, is_positive from orm.rating import is_negative, is_positive
from orm.reaction import Reaction, ReactionKind from orm.reaction import Reaction, ReactionKind
from orm.shout import Shout from orm.shout import Shout
from services.db import local_session from storage.db import local_session
from utils.diff import apply_diff, get_diff from utils.diff import apply_diff, get_diff

View File

@@ -8,8 +8,8 @@ from auth.orm import Author, AuthorRating
from orm.reaction import Reaction, ReactionKind from orm.reaction import Reaction, ReactionKind
from orm.shout import Shout, ShoutAuthor from orm.shout import Shout, ShoutAuthor
from services.auth import login_required from services.auth import login_required
from services.db import local_session from storage.db import local_session
from services.schema import mutation, query from storage.schema import mutation, query
from utils.logger import root_logger as logger from utils.logger import root_logger as logger

View File

@@ -21,9 +21,9 @@ from resolvers.follower import follow
from resolvers.proposals import handle_proposing from resolvers.proposals import handle_proposing
from resolvers.stat import update_author_stat from resolvers.stat import update_author_stat
from services.auth import add_user_role, login_required from services.auth import add_user_role, login_required
from services.db import local_session from storage.db import local_session
from services.notify import notify_reaction from services.notify import notify_reaction
from services.schema import mutation, query from storage.schema import mutation, query
from utils.logger import root_logger as logger from utils.logger import root_logger as logger

View File

@@ -10,8 +10,8 @@ from auth.orm import Author
from orm.reaction import Reaction, ReactionKind from orm.reaction import Reaction, ReactionKind
from orm.shout import Shout, ShoutAuthor, ShoutTopic from orm.shout import Shout, ShoutAuthor, ShoutTopic
from orm.topic import Topic from orm.topic import Topic
from services.db import json_array_builder, json_builder, local_session from storage.db import json_array_builder, json_builder, local_session
from services.schema import query from storage.schema import query
from services.search import SearchService, search_text from services.search import SearchService, search_text
from services.viewed import ViewedStorage from services.viewed import ViewedStorage
from utils.logger import root_logger as logger from utils.logger import root_logger as logger

View File

@@ -13,7 +13,7 @@ from orm.community import Community, CommunityFollower
from orm.reaction import Reaction, ReactionKind from orm.reaction import Reaction, ReactionKind
from orm.shout import Shout, ShoutAuthor, ShoutTopic from orm.shout import Shout, ShoutAuthor, ShoutTopic
from orm.topic import Topic, TopicFollower from orm.topic import Topic, TopicFollower
from services.db import local_session from storage.db import local_session
from utils.logger import root_logger as logger from utils.logger import root_logger as logger
# Type alias for queries # Type alias for queries
@@ -434,9 +434,7 @@ def get_following_count(entity_type: str, entity_id: int) -> int:
return 0 return 0
def get_shouts_count( def get_shouts_count(author_id: int | None = None, topic_id: int | None = None, community_id: int | None = None) -> int:
author_id: int | None = None, topic_id: int | None = None, community_id: int | None = None
) -> int:
"""Получает количество публикаций""" """Получает количество публикаций"""
try: try:
with local_session() as session: with local_session() as session:

View File

@@ -18,11 +18,11 @@ from orm.draft import DraftTopic
from orm.reaction import Reaction, ReactionKind from orm.reaction import Reaction, ReactionKind
from orm.shout import Shout, ShoutAuthor, ShoutTopic from orm.shout import Shout, ShoutAuthor, ShoutTopic
from orm.topic import Topic, TopicFollower from orm.topic import Topic, TopicFollower
from rbac.api import require_any_permission, require_permission
from resolvers.stat import get_with_stat from resolvers.stat import get_with_stat
from services.db import local_session from storage.db import local_session
from services.rbac import require_any_permission, require_permission from storage.redis import redis
from services.redis import redis from storage.schema import mutation, query
from services.schema import mutation, query
from utils.logger import root_logger as logger from utils.logger import root_logger as logger

View File

@@ -1,119 +0,0 @@
#!/bin/bash
"""
Локальный тест CI - запускает серверы и тесты как в GitHub Actions
"""
set -e # Останавливаемся при ошибке
echo "🚀 Запуск локального CI теста..."
# Проверяем что мы в корневой папке
if [ ! -f "pyproject.toml" ]; then
echo "❌ Запустите скрипт из корневой папки проекта"
exit 1
fi
# Очищаем предыдущие процессы
echo "🧹 Очищаем предыдущие процессы..."
pkill -f "python dev.py" || true
pkill -f "npm run dev" || true
pkill -f "vite" || true
pkill -f "ci-server.py" || true
rm -f backend.pid frontend.pid ci-server.pid
# Проверяем зависимости
echo "📦 Проверяем зависимости..."
if ! command -v uv &> /dev/null; then
echo "❌ uv не установлен. Установите uv: https://docs.astral.sh/uv/getting-started/installation/"
exit 1
fi
if ! command -v npm &> /dev/null; then
echo "❌ npm не установлен. Установите Node.js: https://nodejs.org/"
exit 1
fi
# Устанавливаем зависимости
echo "📥 Устанавливаем Python зависимости..."
uv sync --group dev
echo "📥 Устанавливаем Node.js зависимости..."
cd panel
npm ci
cd ..
# Создаем тестовую базу
echo "🗄️ Инициализируем тестовую базу..."
touch database.db
uv run python -c "
from orm.base import Base
from orm.community import Community, CommunityFollower, CommunityAuthor
from orm.draft import Draft
from orm.invite import Invite
from orm.notification import Notification
from orm.rating import Rating
from orm.reaction import Reaction
from orm.shout import Shout
from orm.topic import Topic
from services.db import get_engine
engine = get_engine()
Base.metadata.create_all(engine)
print('Test database initialized')
"
# Запускаем серверы
echo "🚀 Запускаем серверы..."
python scripts/ci-server.py &
CI_PID=$!
echo "CI Server PID: $CI_PID"
# Ждем готовности серверов
echo "⏳ Ждем готовности серверов..."
timeout 120 bash -c '
while true; do
if curl -f http://localhost:8000/ > /dev/null 2>&1 && \
curl -f http://localhost:3000/ > /dev/null 2>&1; then
echo "✅ Все серверы готовы!"
break
fi
echo "⏳ Ожидаем серверы..."
sleep 2
done
'
if [ $? -ne 0 ]; then
echo "❌ Таймаут ожидания серверов"
kill $CI_PID 2>/dev/null || true
exit 1
fi
echo "🎯 Серверы запущены! Запускаем тесты..."
# Запускаем тесты
echo "🧪 Запускаем unit тесты..."
uv run pytest tests/ -m "not e2e" -v --tb=short
echo "🧪 Запускаем integration тесты..."
uv run pytest tests/ -m "integration" -v --tb=short
echo "🧪 Запускаем E2E тесты..."
uv run pytest tests/ -m "e2e" -v --tb=short
echo "🧪 Запускаем browser тесты..."
uv run pytest tests/ -m "browser" -v --tb=short || echo "⚠️ Browser тесты завершились с ошибками"
# Генерируем отчет о покрытии
echo "📊 Генерируем отчет о покрытии..."
uv run pytest tests/ --cov=. --cov-report=html
echo "🎉 Все тесты завершены!"
# Очищаем
echo "🧹 Очищаем ресурсы..."
kill $CI_PID 2>/dev/null || true
pkill -f "python dev.py" || true
pkill -f "npm run dev" || true
pkill -f "vite" || true
rm -f backend.pid frontend.pid ci-server.pid
echo "✅ Локальный CI тест завершен!"

View File

@@ -14,17 +14,11 @@ from auth.orm import Author
from orm.community import Community, CommunityAuthor, role_descriptions, role_names from orm.community import Community, CommunityAuthor, role_descriptions, role_names
from orm.invite import Invite, InviteStatus from orm.invite import Invite, InviteStatus
from orm.shout import Shout from orm.shout import Shout
from services.db import local_session from storage.db import local_session
from services.env import EnvVariable, env_manager from storage.env import EnvVariable, env_manager
from settings import ADMIN_EMAILS as ADMIN_EMAILS_LIST from settings import ADMIN_EMAILS as ADMIN_EMAILS_LIST
from utils.logger import root_logger as logger from utils.logger import root_logger as logger
# Отложенный импорт Author для избежания циклических импортов
def get_author_model():
"""Возвращает модель Author для использования в admin"""
from auth.orm import Author
return Author
class AdminService: class AdminService:
"""Сервис для админ-панели с бизнес-логикой""" """Сервис для админ-панели с бизнес-логикой"""
@@ -59,7 +53,6 @@ class AdminService:
"slug": "system", "slug": "system",
} }
Author = get_author_model()
author = session.query(Author).where(Author.id == author_id).first() author = session.query(Author).where(Author.id == author_id).first()
if author: if author:
return { return {

View File

@@ -29,8 +29,8 @@ from orm.community import (
assign_role_to_user, assign_role_to_user,
get_user_roles_in_community, get_user_roles_in_community,
) )
from services.db import local_session from storage.db import local_session
from services.redis import redis from storage.redis import redis
from settings import ( from settings import (
ADMIN_EMAILS, ADMIN_EMAILS,
SESSION_COOKIE_NAME, SESSION_COOKIE_NAME,
@@ -39,11 +39,6 @@ from settings import (
from utils.generate_slug import generate_unique_slug from utils.generate_slug import generate_unique_slug
from utils.logger import root_logger as logger from utils.logger import root_logger as logger
# Author уже импортирован в начале файла
def get_author_model():
"""Возвращает модель Author для использования в auth"""
return Author
# Список разрешенных заголовков # Список разрешенных заголовков
ALLOWED_HEADERS = ["Authorization", "Content-Type"] ALLOWED_HEADERS = ["Authorization", "Content-Type"]
@@ -113,7 +108,6 @@ class AuthService:
# Проверяем админские права через email если нет роли админа # Проверяем админские права через email если нет роли админа
if not is_admin: if not is_admin:
with local_session() as session: with local_session() as session:
Author = get_author_model()
author = session.query(Author).where(Author.id == user_id_int).first() author = session.query(Author).where(Author.id == user_id_int).first()
if author and author.email in ADMIN_EMAILS.split(","): if author and author.email in ADMIN_EMAILS.split(","):
is_admin = True is_admin = True
@@ -167,7 +161,6 @@ class AuthService:
# Проверяем уникальность email # Проверяем уникальность email
with local_session() as session: with local_session() as session:
Author = get_author_model()
existing_user = session.query(Author).where(Author.email == user_dict["email"]).first() existing_user = session.query(Author).where(Author.email == user_dict["email"]).first()
if existing_user: if existing_user:
# Если пользователь с таким email уже существует, возвращаем его # Если пользователь с таким email уже существует, возвращаем его
@@ -180,7 +173,6 @@ class AuthService:
# Проверяем уникальность slug # Проверяем уникальность slug
with local_session() as session: with local_session() as session:
# Добавляем суффикс, если slug уже существует # Добавляем суффикс, если slug уже существует
Author = get_author_model()
counter = 1 counter = 1
unique_slug = base_slug unique_slug = base_slug
while session.query(Author).where(Author.slug == unique_slug).first(): while session.query(Author).where(Author.slug == unique_slug).first():
@@ -267,7 +259,6 @@ class AuthService:
logger.info(f"Попытка регистрации для {email}") logger.info(f"Попытка регистрации для {email}")
with local_session() as session: with local_session() as session:
Author = get_author_model()
user = session.query(Author).where(Author.email == email).first() user = session.query(Author).where(Author.email == email).first()
if user: if user:
logger.warning(f"Пользователь {email} уже существует") logger.warning(f"Пользователь {email} уже существует")
@@ -307,7 +298,6 @@ class AuthService:
"""Отправляет ссылку подтверждения на email""" """Отправляет ссылку подтверждения на email"""
email = email.lower() email = email.lower()
with local_session() as session: with local_session() as session:
Author = get_author_model()
user = session.query(Author).where(Author.email == email).first() user = session.query(Author).where(Author.email == email).first()
if not user: if not user:
raise ObjectNotExistError("User not found") raise ObjectNotExistError("User not found")
@@ -345,7 +335,6 @@ class AuthService:
username = payload.get("username") username = payload.get("username")
with local_session() as session: with local_session() as session:
Author = get_author_model()
user = session.query(Author).where(Author.id == user_id).first() user = session.query(Author).where(Author.id == user_id).first()
if not user: if not user:
logger.warning(f"Пользователь с ID {user_id} не найден") logger.warning(f"Пользователь с ID {user_id} не найден")
@@ -380,7 +369,6 @@ class AuthService:
try: try:
with local_session() as session: with local_session() as session:
Author = get_author_model()
author = session.query(Author).where(Author.email == email).first() author = session.query(Author).where(Author.email == email).first()
if not author: if not author:
logger.warning(f"Пользователь {email} не найден") logger.warning(f"Пользователь {email} не найден")

View File

@@ -6,8 +6,8 @@ import orjson
from orm.notification import Notification from orm.notification import Notification
from orm.reaction import Reaction from orm.reaction import Reaction
from orm.shout import Shout from orm.shout import Shout
from services.db import local_session from storage.db import local_session
from services.redis import redis from storage.redis import redis
from utils.logger import root_logger as logger from utils.logger import root_logger as logger

View File

@@ -34,7 +34,7 @@ background_tasks = []
# Import Redis client if Redis caching is enabled # Import Redis client if Redis caching is enabled
if SEARCH_USE_REDIS: if SEARCH_USE_REDIS:
try: try:
from services.redis import redis from storage.redis import redis
logger.info("Redis client imported for search caching") logger.info("Redis client imported for search caching")
except ImportError: except ImportError:

View File

@@ -18,8 +18,8 @@ from google.analytics.data_v1beta.types import Filter as GAFilter
from auth.orm import Author from auth.orm import Author
from orm.shout import Shout, ShoutAuthor, ShoutTopic from orm.shout import Shout, ShoutAuthor, ShoutTopic
from orm.topic import Topic from orm.topic import Topic
from services.db import local_session from storage.db import local_session
from services.redis import redis from storage.redis import redis
from utils.logger import root_logger as logger from utils.logger import root_logger as logger
GOOGLE_KEYFILE_PATH = os.environ.get("GOOGLE_KEYFILE_PATH", "/dump/google-service.json") GOOGLE_KEYFILE_PATH = os.environ.get("GOOGLE_KEYFILE_PATH", "/dump/google-service.json")

0
storage/__init__.py Normal file
View File

View File

@@ -2,7 +2,7 @@ import os
from dataclasses import dataclass from dataclasses import dataclass
from typing import ClassVar from typing import ClassVar
from services.redis import redis from storage.redis import redis
from utils.logger import root_logger as logger from utils.logger import root_logger as logger

View File

@@ -9,10 +9,11 @@ from ariadne import (
load_schema_from_path, load_schema_from_path,
) )
from auth.orm import Author, AuthorBookmark, AuthorFollower, AuthorRating
# Импорт Author, AuthorBookmark, AuthorFollower, AuthorRating отложен для избежания циклических импортов # Импорт Author, AuthorBookmark, AuthorFollower, AuthorRating отложен для избежания циклических импортов
from orm import collection, community, draft, invite, notification, reaction, shout, topic from orm import collection, community, draft, invite, notification, reaction, shout, topic
from services.db import create_table_if_not_exists, local_session from storage.db import create_table_if_not_exists, local_session
from auth.orm import Author, AuthorBookmark, AuthorFollower, AuthorRating
# Создаем основные типы # Создаем основные типы
query = QueryType() query = QueryType()

View File

@@ -7,7 +7,7 @@ from starlette.responses import JSONResponse, RedirectResponse
from auth.oauth import get_user_profile, oauth_callback_http, oauth_login_http from auth.oauth import get_user_profile, oauth_callback_http, oauth_login_http
from auth.orm import Author from auth.orm import Author
from services.db import local_session from storage.db import local_session
# Настройка логгера # Настройка логгера
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

View File

@@ -14,7 +14,7 @@ import asyncio
from typing import Optional, Generator, AsyncGenerator from typing import Optional, Generator, AsyncGenerator
from contextlib import asynccontextmanager from contextlib import asynccontextmanager
from services.redis import redis from storage.redis import redis
from orm.base import BaseModel as Base from orm.base import BaseModel as Base
@@ -574,7 +574,7 @@ def mock_verify(monkeypatch):
@pytest.fixture @pytest.fixture
def redis_client(): def redis_client():
"""Создает Redis клиент для тестов токенов""" """Создает Redis клиент для тестов токенов"""
from services.redis import RedisService from storage.redis import RedisService
redis_service = RedisService() redis_service = RedisService()
return redis_service._client return redis_service._client
@@ -593,7 +593,7 @@ def mock_redis_if_unavailable():
yield yield
except Exception: except Exception:
# Redis недоступен, мокаем # Redis недоступен, мокаем
with patch('services.redis.RedisService') as mock_redis: with patch('storage.redis.RedisService') as mock_redis:
# Создаем базовый mock для Redis методов # Создаем базовый mock для Redis методов
mock_redis.return_value.get.return_value = None mock_redis.return_value.get.return_value = None
mock_redis.return_value.set.return_value = True mock_redis.return_value.set.return_value = True

View File

@@ -11,7 +11,7 @@ from unittest.mock import patch, MagicMock
from auth.orm import Author from auth.orm import Author
from orm.community import Community, CommunityAuthor from orm.community import Community, CommunityAuthor
from services.db import local_session from storage.db import local_session
# Используем общую фикстуру из conftest.py # Используем общую фикстуру из conftest.py

View File

@@ -13,7 +13,7 @@ from auth.internal import verify_internal_auth
from auth.permissions import ContextualPermissionCheck from auth.permissions import ContextualPermissionCheck
from orm.community import Community, CommunityAuthor from orm.community import Community, CommunityAuthor
from auth.permissions import ContextualPermissionCheck from auth.permissions import ContextualPermissionCheck
from services.db import local_session from storage.db import local_session
# Используем общую фикстуру из conftest.py # Используем общую фикстуру из conftest.py

View File

@@ -18,7 +18,7 @@ from orm.community import (
assign_role_to_user, assign_role_to_user,
remove_role_from_user remove_role_from_user
) )
from services.db import local_session from storage.db import local_session
# Используем общую фикстуру из conftest.py # Используем общую фикстуру из conftest.py

View File

@@ -12,13 +12,13 @@ from unittest.mock import patch, MagicMock
from auth.orm import Author from auth.orm import Author
from orm.community import Community, CommunityAuthor from orm.community import Community, CommunityAuthor
from services.rbac import ( from rbac.api import (
initialize_community_permissions, initialize_community_permissions,
get_permissions_for_role, get_permissions_for_role,
user_has_permission, user_has_permission,
roles_have_permission roles_have_permission
) )
from services.db import local_session from storage.db import local_session
@pytest.fixture @pytest.fixture

View File

@@ -55,8 +55,8 @@ def create_test_app():
from ariadne.asgi import GraphQL from ariadne.asgi import GraphQL
from starlette.responses import JSONResponse from starlette.responses import JSONResponse
from services.db import Base from storage.db import Base
from services.schema import resolvers from storage.schema import resolvers
# Создаем движок и таблицы # Создаем движок и таблицы
engine = create_engine( engine = create_engine(

View File

@@ -5,18 +5,18 @@ import pytest
# Импортируем все модули для покрытия # Импортируем все модули для покрытия
import services import services
import services.db import storage.db
import services.redis import storage.redis
import services.rbac import rbac.api
import services.admin import services.admin
import services.auth import services.auth
import services.common_result import utils.common_result
import services.env import storage.env
import services.exception import utils.exception
import services.notify import services.notify
import services.schema import storage.schema
import services.search import services.search
import services.sentry import utils.sentry
import services.viewed import services.viewed
import utils import utils
@@ -83,18 +83,18 @@ class TestCoverageImports:
def test_services_imports(self): def test_services_imports(self):
"""Тест импорта модулей services""" """Тест импорта модулей services"""
assert services is not None assert services is not None
assert services.db is not None assert storage.db is not None
assert services.redis is not None assert storage.redis is not None
assert services.rbac is not None assert rbac.api is not None
assert services.admin is not None assert services.admin is not None
assert services.auth is not None assert services.auth is not None
assert services.common_result is not None assert utils.common_result is not None
assert services.env is not None assert storage.env is not None
assert services.exception is not None assert utils.exception is not None
assert services.notify is not None assert services.notify is not None
assert services.schema is not None assert storage.schema is not None
assert services.search is not None assert services.search is not None
assert services.sentry is not None assert utils.sentry is not None
assert services.viewed is not None assert services.viewed is not None
def test_utils_imports(self): def test_utils_imports(self):

View File

@@ -5,8 +5,8 @@
import pytest import pytest
import json import json
from unittest.mock import Mock from unittest.mock import Mock
from services.redis import redis from storage.redis import redis
from services.db import local_session from storage.db import local_session
from orm.community import Community from orm.community import Community

View File

@@ -6,7 +6,7 @@ import time
from sqlalchemy import create_engine, Column, Integer, String, inspect from sqlalchemy import create_engine, Column, Integer, String, inspect
from sqlalchemy.orm import declarative_base, Session from sqlalchemy.orm import declarative_base, Session
from services.db import create_table_if_not_exists, get_column_names_without_virtual, local_session from storage.db import create_table_if_not_exists, get_column_names_without_virtual, local_session
# Создаем базовую модель для тестирования # Создаем базовую модель для тестирования
Base = declarative_base() Base = declarative_base()

View File

@@ -95,9 +95,9 @@ async def test_create_shout(db_session, test_author):
# Мокаем local_session чтобы использовать тестовую сессию # Мокаем local_session чтобы использовать тестовую сессию
from unittest.mock import patch from unittest.mock import patch
from services.db import local_session from storage.db import local_session
with patch('services.db.local_session') as mock_local_session: with patch('storage.db.local_session') as mock_local_session:
mock_local_session.return_value = db_session mock_local_session.return_value = db_session
result = await create_draft( result = await create_draft(
@@ -126,9 +126,9 @@ async def test_load_drafts(db_session):
# Мокаем local_session чтобы использовать тестовую сессию # Мокаем local_session чтобы использовать тестовую сессию
from unittest.mock import patch from unittest.mock import patch
from services.db import local_session from storage.db import local_session
with patch('services.db.local_session') as mock_local_session: with patch('storage.db.local_session') as mock_local_session:
mock_local_session.return_value = db_session mock_local_session.return_value = db_session
# Вызываем резолвер напрямую # Вызываем резолвер напрямую

View File

@@ -16,7 +16,7 @@ import sys
sys.path.append(os.path.dirname(os.path.abspath(__file__))) sys.path.append(os.path.dirname(os.path.abspath(__file__)))
from cache.cache import get_cached_follower_topics from cache.cache import get_cached_follower_topics
from services.redis import redis from storage.redis import redis
from utils.logger import root_logger as logger from utils.logger import root_logger as logger

View File

@@ -12,7 +12,7 @@ sys.path.append(os.path.dirname(os.path.abspath(__file__)))
def test_rbac_import(): def test_rbac_import():
"""Тестируем импорт RBAC модуля""" """Тестируем импорт RBAC модуля"""
try: try:
from services.rbac import require_any_permission, require_permission from rbac.api import require_any_permission, require_permission
print("✅ RBAC модуль импортирован успешно") print("✅ RBAC модуль импортирован успешно")
@@ -29,7 +29,7 @@ def test_rbac_import():
def test_require_permission_decorator(): def test_require_permission_decorator():
"""Тестируем декоратор require_permission""" """Тестируем декоратор require_permission"""
try: try:
from services.rbac import require_permission from rbac.api import require_permission
@require_permission("test:permission") @require_permission("test:permission")
async def test_func(*args, **kwargs): async def test_func(*args, **kwargs):

View File

@@ -12,14 +12,14 @@ import json
from auth.orm import Author from auth.orm import Author
from orm.community import Community, CommunityAuthor from orm.community import Community, CommunityAuthor
from services.rbac import ( from rbac.api import (
initialize_community_permissions, initialize_community_permissions,
get_permissions_for_role, get_permissions_for_role,
user_has_permission, user_has_permission,
roles_have_permission roles_have_permission
) )
from services.db import local_session from storage.db import local_session
from services.redis import redis from storage.redis import redis
@pytest.fixture @pytest.fixture

View File

@@ -10,14 +10,14 @@ from unittest.mock import patch, MagicMock
from auth.orm import Author from auth.orm import Author
from orm.community import Community, CommunityAuthor from orm.community import Community, CommunityAuthor
from services.rbac import ( from rbac.api import (
initialize_community_permissions, initialize_community_permissions,
get_role_permissions_for_community, get_role_permissions_for_community,
get_permissions_for_role, get_permissions_for_role,
user_has_permission, user_has_permission,
roles_have_permission roles_have_permission
) )
from services.db import local_session from storage.db import local_session
@pytest.fixture @pytest.fixture
@@ -180,7 +180,7 @@ class TestRBACPermissionChecking:
async def test_user_with_author_role_has_reader_permissions(self, db_session, test_users, test_community): async def test_user_with_author_role_has_reader_permissions(self, db_session, test_users, test_community):
"""Тест что пользователь с ролью author имеет разрешения reader""" """Тест что пользователь с ролью author имеет разрешения reader"""
# Используем local_session для создания записи # Используем local_session для создания записи
from services.db import local_session from storage.db import local_session
from orm.community import CommunityAuthor from orm.community import CommunityAuthor
with local_session() as session: with local_session() as session:
@@ -214,7 +214,7 @@ class TestRBACPermissionChecking:
async def test_user_with_editor_role_has_author_permissions(self, db_session, test_users, test_community): async def test_user_with_editor_role_has_author_permissions(self, db_session, test_users, test_community):
"""Тест что пользователь с ролью editor имеет разрешения author""" """Тест что пользователь с ролью editor имеет разрешения author"""
# Используем local_session для создания записи # Используем local_session для создания записи
from services.db import local_session from storage.db import local_session
from orm.community import CommunityAuthor from orm.community import CommunityAuthor
with local_session() as session: with local_session() as session:
@@ -248,7 +248,7 @@ class TestRBACPermissionChecking:
async def test_user_with_admin_role_has_all_permissions(self, db_session, test_users, test_community): async def test_user_with_admin_role_has_all_permissions(self, db_session, test_users, test_community):
"""Тест что пользователь с ролью admin имеет все разрешения""" """Тест что пользователь с ролью admin имеет все разрешения"""
# Используем local_session для создания записи # Используем local_session для создания записи
from services.db import local_session from storage.db import local_session
from orm.community import CommunityAuthor from orm.community import CommunityAuthor
with local_session() as session: with local_session() as session:

View File

@@ -9,7 +9,7 @@ import pytest
import redis.asyncio as aioredis import redis.asyncio as aioredis
from redis.asyncio import Redis from redis.asyncio import Redis
from services.redis import ( from storage.redis import (
RedisService, RedisService,
close_redis, close_redis,
init_redis, init_redis,
@@ -28,7 +28,7 @@ class TestRedisServiceInitialization:
def test_redis_service_init_without_aioredis(self): def test_redis_service_init_without_aioredis(self):
"""Тест инициализации без aioredis""" """Тест инициализации без aioredis"""
with patch("services.redis.aioredis", None): with patch("storage.redis.aioredis", None):
service = RedisService() service = RedisService()
assert service._is_available is False assert service._is_available is False
@@ -58,7 +58,7 @@ class TestRedisConnectionManagement:
"""Тест успешного подключения""" """Тест успешного подключения"""
service = RedisService() service = RedisService()
with patch("services.redis.aioredis.from_url") as mock_from_url: with patch("storage.redis.aioredis.from_url") as mock_from_url:
mock_client = AsyncMock() mock_client = AsyncMock()
mock_client.ping = AsyncMock(return_value=True) mock_client.ping = AsyncMock(return_value=True)
mock_from_url.return_value = mock_client mock_from_url.return_value = mock_client
@@ -73,7 +73,7 @@ class TestRedisConnectionManagement:
"""Тест неудачного подключения""" """Тест неудачного подключения"""
service = RedisService() service = RedisService()
with patch("services.redis.aioredis.from_url") as mock_from_url: with patch("storage.redis.aioredis.from_url") as mock_from_url:
mock_from_url.side_effect = Exception("Connection failed") mock_from_url.side_effect = Exception("Connection failed")
await service.connect() await service.connect()
@@ -84,7 +84,7 @@ class TestRedisConnectionManagement:
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_connect_without_aioredis(self): async def test_connect_without_aioredis(self):
"""Тест подключения без aioredis""" """Тест подключения без aioredis"""
with patch("services.redis.aioredis", None): with patch("storage.redis.aioredis", None):
service = RedisService() service = RedisService()
await service.connect() await service.connect()
assert service._client is None assert service._client is None
@@ -96,7 +96,7 @@ class TestRedisConnectionManagement:
mock_existing_client = AsyncMock() mock_existing_client = AsyncMock()
service._client = mock_existing_client service._client = mock_existing_client
with patch("services.redis.aioredis.from_url") as mock_from_url: with patch("storage.redis.aioredis.from_url") as mock_from_url:
mock_client = AsyncMock() mock_client = AsyncMock()
mock_client.ping = AsyncMock(return_value=True) mock_client.ping = AsyncMock(return_value=True)
mock_from_url.return_value = mock_client mock_from_url.return_value = mock_client
@@ -149,7 +149,7 @@ class TestRedisCommandExecution:
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_execute_without_aioredis(self): async def test_execute_without_aioredis(self):
"""Тест выполнения команды без aioredis""" """Тест выполнения команды без aioredis"""
with patch("services.redis.aioredis", None): with patch("storage.redis.aioredis", None):
service = RedisService() service = RedisService()
result = await service.execute("test_command") result = await service.execute("test_command")
assert result is None assert result is None
@@ -874,7 +874,7 @@ class TestAdditionalRedisCoverage:
service._client = mock_client service._client = mock_client
mock_client.close.side_effect = Exception("Close error") mock_client.close.side_effect = Exception("Close error")
with patch('services.redis.aioredis.from_url') as mock_from_url: with patch('storage.redis.aioredis.from_url') as mock_from_url:
mock_new_client = AsyncMock() mock_new_client = AsyncMock()
mock_from_url.return_value = mock_new_client mock_from_url.return_value = mock_new_client

View File

@@ -7,7 +7,7 @@
import pytest import pytest
import asyncio import asyncio
import json import json
from services.redis import RedisService from storage.redis import RedisService
class TestRedisFunctionality: class TestRedisFunctionality:

View File

@@ -14,7 +14,7 @@ import sys
sys.path.append(os.path.dirname(os.path.abspath(__file__))) sys.path.append(os.path.dirname(os.path.abspath(__file__)))
from cache.cache import get_cached_follower_topics from cache.cache import get_cached_follower_topics
from services.redis import redis from storage.redis import redis
from utils.logger import root_logger as logger from utils.logger import root_logger as logger

View File

@@ -17,8 +17,8 @@ sys.path.append(os.path.dirname(os.path.abspath(__file__)))
from cache.cache import get_cached_follower_topics from cache.cache import get_cached_follower_topics
from orm.topic import Topic, TopicFollower from orm.topic import Topic, TopicFollower
from services.db import local_session from storage.db import local_session
from services.redis import redis from storage.redis import redis
from utils.logger import root_logger as logger from utils.logger import root_logger as logger

View File

@@ -22,7 +22,7 @@ from auth.orm import Author
from orm.community import assign_role_to_user from orm.community import assign_role_to_user
from orm.shout import Shout from orm.shout import Shout
from resolvers.editor import unpublish_shout from resolvers.editor import unpublish_shout
from services.db import local_session from storage.db import local_session
# Настройка логгера # Настройка логгера
logging.basicConfig(level=logging.INFO, format="%(asctime)s %(levelname)s %(message)s") logging.basicConfig(level=logging.INFO, format="%(asctime)s %(levelname)s %(message)s")

View File

@@ -18,7 +18,7 @@ sys.path.append(str(Path(__file__).parent))
from auth.orm import Author from auth.orm import Author
from resolvers.auth import update_security from resolvers.auth import update_security
from services.db import local_session from storage.db import local_session
# Настройка логгера # Настройка логгера
logging.basicConfig(level=logging.INFO, format="%(asctime)s %(levelname)s %(message)s") logging.basicConfig(level=logging.INFO, format="%(asctime)s %(levelname)s %(message)s")

View File

@@ -11,12 +11,6 @@ from orm.shout import Shout
from orm.topic import Topic from orm.topic import Topic
from utils.logger import root_logger as logger from utils.logger import root_logger as logger
# Отложенный импорт Author для избежания циклических импортов
def get_author_model():
"""Возвращает модель Author для использования в common_result"""
from auth.orm import Author
return Author
def handle_error(operation: str, error: Exception) -> GraphQLError: def handle_error(operation: str, error: Exception) -> GraphQLError:
"""Обрабатывает ошибки в резолверах""" """Обрабатывает ошибки в резолверах"""

View File

@@ -4,7 +4,7 @@ JSON encoders and utilities
import json import json
from datetime import date, datetime from datetime import date, datetime
from typing import Any, Union from typing import Any
import orjson import orjson
@@ -23,7 +23,7 @@ def default_json_encoder(obj: Any) -> Any:
TypeError: Если объект не может быть сериализован TypeError: Если объект не может быть сериализован
""" """
# Обработка datetime # Обработка datetime
if isinstance(obj, (datetime, date)): if isinstance(obj, (datetime | date)):
return obj.isoformat() return obj.isoformat()
serialized = False serialized = False
@@ -75,7 +75,7 @@ def orjson_dumps(obj: Any, **kwargs: Any) -> bytes:
return orjson.dumps(obj, default=default_json_encoder, **kwargs) return orjson.dumps(obj, default=default_json_encoder, **kwargs)
def orjson_loads(data: Union[str, bytes]) -> Any: def orjson_loads(data: str | bytes) -> Any:
""" """
Десериализация объекта с помощью orjson. Десериализация объекта с помощью orjson.

View File

@@ -2,7 +2,7 @@ import re
from urllib.parse import quote_plus from urllib.parse import quote_plus
from auth.orm import Author from auth.orm import Author
from services.db import local_session from storage.db import local_session
def replace_translit(src: str | None) -> str: def replace_translit(src: str | None) -> str:

View File

@@ -16,7 +16,7 @@ logger.setLevel(logging.DEBUG) # Более подробное логирова
def start_sentry() -> None: def start_sentry() -> None:
try: try:
logger.info("[services.sentry] Sentry init started...") logger.info("[utils.sentry] Sentry init started...")
sentry_sdk.init( sentry_sdk.init(
dsn=GLITCHTIP_DSN, dsn=GLITCHTIP_DSN,
traces_sample_rate=1.0, # Захват 100% транзакций traces_sample_rate=1.0, # Захват 100% транзакций
@@ -25,6 +25,6 @@ def start_sentry() -> None:
integrations=[StarletteIntegration(), AriadneIntegration(), SqlalchemyIntegration()], integrations=[StarletteIntegration(), AriadneIntegration(), SqlalchemyIntegration()],
send_default_pii=True, # Отправка информации о пользователе (PII) send_default_pii=True, # Отправка информации о пользователе (PII)
) )
logger.info("[services.sentry] Sentry initialized successfully.") logger.info("[utils.sentry] Sentry initialized successfully.")
except (sentry_sdk.utils.BadDsn, ImportError, ValueError, TypeError) as _e: except (sentry_sdk.utils.BadDsn, ImportError, ValueError, TypeError) as _e:
logger.warning("[services.sentry] Failed to initialize Sentry", exc_info=True) logger.warning("[utils.sentry] Failed to initialize Sentry", exc_info=True)