feature/e2e #4

Merged
to merged 22 commits from feature/e2e into dev 2025-08-20 17:21:31 +00:00
117 changed files with 5455 additions and 2740 deletions
Showing only changes of commit 9a2b792f08 - Show all commits

View File

@@ -137,7 +137,7 @@ jobs:
from orm.shout import Shout
from orm.topic import Topic
from auth.orm import Author, AuthorBookmark, AuthorRating, AuthorFollower
from services.db import engine
from storage.db import engine
from sqlalchemy import inspect
print('✅ Engine imported successfully')
@@ -166,8 +166,8 @@ jobs:
- name: Start servers
run: |
chmod +x scripts/ci-server.py
timeout 300 python scripts/ci-server.py &
chmod +x ./ci-server.py
timeout 300 python ./ci-server.py &
echo $! > ci-server.pid
echo "Waiting for servers..."
@@ -184,8 +184,13 @@ jobs:
# Создаем папку для результатов тестов
mkdir -p test-results
# В CI пропускаем тесты здоровья серверов, так как они могут не пройти
echo "🏥 В CI режиме пропускаем тесты здоровья серверов..."
# Сначала проверяем здоровье серверов
echo "🏥 Проверяем здоровье серверов..."
if uv run pytest tests/test_server_health.py -v; then
echo "✅ Серверы здоровы!"
else
echo "⚠️ Тест здоровья серверов не прошел, но продолжаем..."
fi
for test_type in "not e2e" "integration" "e2e" "browser"; do
echo "Running $test_type tests..."
@@ -284,19 +289,20 @@ jobs:
fetch-depth: 0
- name: Deploy
if: github.ref == 'refs/heads/dev'
env:
HOST_KEY: ${{ secrets.SSH_PRIVATE_KEY }}
TARGET: ${{ github.ref == 'refs/heads/dev' && 'core' || 'discoursio-api' }}
SERVER: ${{ github.ref == 'refs/heads/dev' && 'STAGING' || 'V' }}
run: |
echo "🚀 Deploying to $SERVER..."
echo "🚀 Deploying to $ENV..."
mkdir -p ~/.ssh
echo "$HOST_KEY" > ~/.ssh/known_hosts
chmod 600 ~/.ssh/known_hosts
git remote add dokku dokku@v3.dscrs.site:core
git remote add dokku dokku@staging.discours.io:$TARGET
git push dokku HEAD:main -f
echo "✅ deployment completed!"
echo "✅ $ENV deployment completed!"
# ===== SUMMARY =====
summary:

2
.gitignore vendored
View File

@@ -177,5 +177,5 @@ panel/types.gen.ts
tmp
test-results
page_content.html
test_output
docs/progress/*

View File

@@ -2,6 +2,22 @@
Все значимые изменения в проекте документируются в этом файле.
## [0.9.7] - 2025-08-17
### 🔧 Исправления архитектуры
- **Устранены циклические импорты в ORM**: Исправлена проблема с циклическими импортами между `orm/community.py` и `orm/shout.py`
- **Оптимизированы импорты моделей**: Убран прямой импорт `Shout` из `orm/community.py`, заменен на строковые ссылки
- **Исправлены предупреждения ruff**: Добавлены `# noqa: PLW0603` комментарии для подавления предупреждений о `global` в `rbac/interface.py`
- **Улучшена совместимость SQLAlchemy**: Использование `text()` для сложных SQL выражений в `CommunityStats`
### 🏷️ Типизация
- **Исправлены mypy ошибки**: Все ORM модели теперь корректно проходят проверку типов
- **Улучшена совместимость**: Использование `BaseModel` вместо алиаса `Base` для избежания путаницы
### 🧹 Код-качество
- **Упрощена архитектура импортов**: Убраны сложные конструкции для избежания `global`
- **Сохранена функциональность**: Все методы `CommunityStats` работают корректно с новой архитектурой
## [0.9.6] - 2025-08-12
### 🚀 CI/CD и E2E тестирование

View File

@@ -134,11 +134,11 @@ chmod +x scripts/test-ci-local.sh
```
### CI Server Management
The `scripts/ci-server.py` script manages servers for CI:
The `./ci-server.py` script manages servers for CI:
```bash
# Start servers in CI mode
CI_MODE=true python3 scripts/ci-server.py
CI_MODE=true python3 ./ci-server.py
```
## 📊 Project Structure

View File

@@ -5,7 +5,7 @@ from starlette.responses import JSONResponse, RedirectResponse, Response
from auth.core import verify_internal_auth
from auth.orm import Author
from auth.tokens.storage import TokenStorage
from services.db import local_session
from storage.db import local_session
from settings import (
SESSION_COOKIE_HTTPONLY,
SESSION_COOKIE_MAX_AGE,

View File

@@ -4,12 +4,14 @@
"""
import time
from sqlalchemy.orm.exc import NoResultFound
from auth.orm import Author
from auth.state import AuthState
from auth.tokens.storage import TokenStorage as TokenManager
from auth.orm import Author
from orm.community import CommunityAuthor
from services.db import local_session
from storage.db import local_session
from settings import ADMIN_EMAILS as ADMIN_EMAILS_LIST
from utils.logger import root_logger as logger
@@ -50,7 +52,7 @@ async def verify_internal_auth(token: str) -> tuple[int, list, bool]:
with local_session() as session:
try:
# Author уже импортирован в начале файла
author = session.query(Author).where(Author.id == user_id).one()
# Получаем роли
@@ -112,14 +114,14 @@ async def get_auth_token_from_request(request) -> str | None:
"""
# Отложенный импорт для избежания циклических зависимостей
from auth.decorators import get_auth_token
return await get_auth_token(request)
async def authenticate(request) -> AuthState:
"""
Получает токен из запроса и проверяет авторизацию.
Args:
request: Объект запроса
@@ -146,4 +148,3 @@ async def authenticate(request) -> AuthState:
auth_state.author_id = str(user_id)
auth_state.is_admin = is_admin
return auth_state

View File

@@ -5,28 +5,20 @@ from typing import Any
from graphql import GraphQLError, GraphQLResolveInfo
from sqlalchemy import exc
from auth.credentials import AuthCredentials
from auth.exceptions import OperationNotAllowedError
# Импорт базовых функций из реструктурированных модулей
from auth.core import authenticate
from auth.utils import get_auth_token
from auth.credentials import AuthCredentials
from auth.exceptions import OperationNotAllowedError
from auth.orm import Author
from auth.utils import get_auth_token, get_safe_headers
from orm.community import CommunityAuthor
from services.db import local_session
from services.redis import redis as redis_adapter
from storage.db import local_session
from settings import ADMIN_EMAILS as ADMIN_EMAILS_LIST
from utils.logger import root_logger as logger
ADMIN_EMAILS = ADMIN_EMAILS_LIST.split(",")
# Импортируем get_safe_headers из utils
from auth.utils import get_safe_headers
# get_auth_token теперь импортирован из auth.utils
async def validate_graphql_context(info: GraphQLResolveInfo) -> None:
"""
Проверяет валидность GraphQL контекста и проверяет авторизацию.

View File

@@ -4,8 +4,8 @@ from auth.exceptions import ExpiredTokenError, InvalidPasswordError, InvalidToke
from auth.jwtcodec import JWTCodec
from auth.orm import Author
from auth.password import Password
from services.db import local_session
from services.redis import redis
from storage.db import local_session
from storage.redis import redis
from utils.logger import root_logger as logger
AuthorType = TypeVar("AuthorType", bound=Author)

View File

@@ -7,7 +7,7 @@ DEPRECATED: Этот модуль переносится в auth/core.py
"""
# Импорт базовых функций из core модуля
from auth.core import verify_internal_auth, create_internal_session, authenticate
from auth.core import authenticate, create_internal_session, verify_internal_auth
# Re-export для обратной совместимости
__all__ = ["verify_internal_auth", "create_internal_session", "authenticate"]
__all__ = ["authenticate", "create_internal_session", "verify_internal_auth"]

View File

@@ -40,9 +40,7 @@ class JWTCodec:
# Если время истечения не указано, устанавливаем дефолтное
if not expiration:
expiration = datetime.datetime.now(datetime.UTC) + datetime.timedelta(
days=JWT_REFRESH_TOKEN_EXPIRE_DAYS
)
expiration = datetime.datetime.now(datetime.UTC) + datetime.timedelta(days=JWT_REFRESH_TOKEN_EXPIRE_DAYS)
logger.debug(f"[JWTCodec.encode] Время истечения не указано, устанавливаем срок: {expiration}")
# Формируем payload с временными метками

View File

@@ -17,8 +17,8 @@ from starlette.types import ASGIApp
from auth.credentials import AuthCredentials
from auth.orm import Author
from auth.tokens.storage import TokenStorage as TokenManager
from services.db import local_session
from services.redis import redis as redis_adapter
from storage.db import local_session
from storage.redis import redis as redis_adapter
from settings import (
ADMIN_EMAILS as ADMIN_EMAILS_LIST,
)

View File

@@ -13,8 +13,8 @@ from starlette.responses import JSONResponse, RedirectResponse
from auth.orm import Author
from auth.tokens.storage import TokenStorage
from orm.community import Community, CommunityAuthor, CommunityFollower
from services.db import local_session
from services.redis import redis
from storage.db import local_session
from storage.redis import redis
from settings import (
FRONTEND_URL,
OAUTH_CLIENTS,

View File

@@ -3,7 +3,6 @@
"""
class AuthState:
"""
Класс для хранения информации о состоянии авторизации пользователя.

View File

@@ -6,7 +6,7 @@ import asyncio
from typing import Any, Dict, List
from auth.jwtcodec import JWTCodec
from services.redis import redis as redis_adapter
from storage.redis import redis as redis_adapter
from utils.logger import root_logger as logger
from .base import BaseTokenManager

View File

@@ -5,7 +5,7 @@
import asyncio
from typing import Any, Dict
from services.redis import redis as redis_adapter
from storage.redis import redis as redis_adapter
from utils.logger import root_logger as logger
from .base import BaseTokenManager

View File

@@ -5,7 +5,7 @@
import json
import time
from services.redis import redis as redis_adapter
from storage.redis import redis as redis_adapter
from utils.logger import root_logger as logger
from .base import BaseTokenManager
@@ -84,9 +84,7 @@ class OAuthTokenManager(BaseTokenManager):
return await self._get_oauth_data_optimized(token_type, str(user_id), provider)
return None
async def _get_oauth_data_optimized(
self, token_type: TokenType, user_id: str, provider: str
) -> TokenData | None:
async def _get_oauth_data_optimized(self, token_type: TokenType, user_id: str, provider: str) -> TokenData | None:
"""Оптимизированное получение OAuth данных"""
if not user_id or not provider:
error_msg = "OAuth токены требуют user_id и provider"

View File

@@ -7,7 +7,7 @@ import time
from typing import Any, List
from auth.jwtcodec import JWTCodec
from services.redis import redis as redis_adapter
from storage.redis import redis as redis_adapter
from utils.logger import root_logger as logger
from .base import BaseTokenManager

View File

@@ -6,7 +6,7 @@ import json
import secrets
import time
from services.redis import redis as redis_adapter
from storage.redis import redis as redis_adapter
from utils.logger import root_logger as logger
from .base import BaseTokenManager

View File

@@ -4,6 +4,7 @@
"""
from typing import Any
from settings import SESSION_COOKIE_NAME, SESSION_TOKEN_HEADER
from utils.logger import root_logger as logger
@@ -113,26 +114,25 @@ async def get_auth_token(request: Any) -> str | None:
token = auth_header.replace("Bearer ", "", 1).strip()
logger.debug(f"[decorators] Извлечен Bearer токен: {len(token)}")
return token
else:
logger.debug("[decorators] Authorization заголовок не содержит Bearer токен")
logger.debug("[decorators] Authorization заголовок не содержит Bearer токен")
# 6. Проверяем cookies
if hasattr(request, "cookies") and request.cookies:
if isinstance(request.cookies, dict):
cookies = request.cookies
elif hasattr(request.cookies, "get"):
cookies = {k: request.cookies.get(k) for k in getattr(request.cookies, "keys", lambda: [])()}
cookies = {k: request.cookies.get(k) for k in getattr(request.cookies, "keys", list)()}
else:
cookies = {}
logger.debug(f"[decorators] Доступные cookies: {list(cookies.keys())}")
# Проверяем кастомную cookie
if SESSION_COOKIE_NAME in cookies:
token = cookies[SESSION_COOKIE_NAME]
logger.debug(f"[decorators] Токен найден в cookie {SESSION_COOKIE_NAME}: {len(token)}")
return token
# Проверяем стандартную cookie
if "auth_token" in cookies:
token = cookies["auth_token"]
@@ -150,29 +150,29 @@ async def get_auth_token(request: Any) -> str | None:
def extract_bearer_token(auth_header: str) -> str | None:
"""
Извлекает токен из заголовка Authorization с Bearer схемой.
Args:
auth_header: Заголовок Authorization
Returns:
Optional[str]: Извлеченный токен или None
"""
if not auth_header:
return None
if auth_header.startswith("Bearer "):
return auth_header[7:].strip()
return None
def format_auth_header(token: str) -> str:
"""
Форматирует токен в заголовок Authorization.
Args:
token: Токен авторизации
Returns:
str: Отформатированный заголовок
"""

4
cache/cache.py vendored
View File

@@ -37,8 +37,8 @@ from sqlalchemy import and_, join, select
from auth.orm import Author, AuthorFollower
from orm.shout import Shout, ShoutAuthor, ShoutTopic
from orm.topic import Topic, TopicFollower
from services.db import local_session
from services.redis import redis
from storage.db import local_session
from storage.redis import redis
from utils.encoders import fast_json_dumps
from utils.logger import root_logger as logger

7
cache/precache.py vendored
View File

@@ -3,14 +3,15 @@ import traceback
from sqlalchemy import and_, join, select
from auth.orm import Author, AuthorFollower
# Импорт Author, AuthorFollower отложен для избежания циклических импортов
from cache.cache import cache_author, cache_topic
from orm.shout import Shout, ShoutAuthor, ShoutReactionsFollower, ShoutTopic
from orm.topic import Topic, TopicFollower
from resolvers.stat import get_with_stat
from auth.orm import Author, AuthorFollower
from services.db import local_session
from services.redis import redis
from storage.db import local_session
from storage.redis import redis
from utils.encoders import fast_json_dumps
from utils.logger import root_logger as logger

View File

@@ -9,7 +9,7 @@ from cache.cache import (
invalidate_cache_by_prefix,
)
from resolvers.stat import get_with_stat
from services.redis import redis
from storage.redis import redis
from utils.logger import root_logger as logger
CACHE_REVALIDATION_INTERVAL = 300 # 5 minutes

5
cache/triggers.py vendored
View File

@@ -1,12 +1,13 @@
from sqlalchemy import event
from auth.orm import Author, AuthorFollower
# Импорт Author, AuthorFollower отложен для избежания циклических импортов
from cache.revalidator import revalidation_manager
from orm.reaction import Reaction, ReactionKind
from orm.shout import Shout, ShoutAuthor, ShoutReactionsFollower
from orm.topic import Topic, TopicFollower
from services.db import local_session
from auth.orm import Author, AuthorFollower
from storage.db import local_session
from utils.logger import root_logger as logger

View File

@@ -20,34 +20,8 @@ import requests
from sqlalchemy import inspect
from orm.base import Base
from services.db import engine
# Создаем собственный логгер без дублирования
def create_ci_logger():
"""Создает логгер для CI без дублирования"""
logger = logging.getLogger("ci-server")
logger.setLevel(logging.INFO)
# Убираем существующие обработчики
logger.handlers.clear()
# Создаем форматтер
formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s")
# Создаем обработчик
handler = logging.StreamHandler()
handler.setFormatter(formatter)
logger.addHandler(handler)
# Отключаем пропагацию к root logger
logger.propagate = False
return logger
logger = create_ci_logger()
from storage.db import engine
from utils.logger import root_logger as logger
class CIServerManager:
"""Менеджер CI серверов"""
@@ -257,9 +231,10 @@ def run_tests_in_ci():
try:
ruff_result = subprocess.run(
["uv", "run", "ruff", "check", "."],
check=False, capture_output=False,
check=False,
capture_output=False,
text=True,
timeout=300 # 5 минут на linting
timeout=300, # 5 минут на linting
)
if ruff_result.returncode == 0:
logger.info("✅ Ruff проверка прошла успешно")
@@ -275,9 +250,10 @@ def run_tests_in_ci():
try:
ruff_format_result = subprocess.run(
["uv", "run", "ruff", "format", "--check", "."],
check=False, capture_output=False,
check=False,
capture_output=False,
text=True,
timeout=300 # 5 минут на проверку форматирования
timeout=300, # 5 минут на проверку форматирования
)
if ruff_format_result.returncode == 0:
logger.info("✅ Форматирование корректно")
@@ -293,9 +269,10 @@ def run_tests_in_ci():
try:
mypy_result = subprocess.run(
["uv", "run", "mypy", ".", "--ignore-missing-imports"],
check=False, capture_output=False,
check=False,
capture_output=False,
text=True,
timeout=600 # 10 минут на type checking
timeout=600, # 10 минут на type checking
)
if mypy_result.returncode == 0:
logger.info("✅ MyPy проверка прошла успешно")
@@ -311,7 +288,8 @@ def run_tests_in_ci():
try:
health_result = subprocess.run(
["uv", "run", "pytest", "tests/test_server_health.py", "-v"],
check=False, capture_output=False,
check=False,
capture_output=False,
text=True,
timeout=120, # 2 минуты на проверку здоровья
)
@@ -339,7 +317,8 @@ def run_tests_in_ci():
# Запускаем тесты с выводом в реальном времени
result = subprocess.run(
cmd,
check=False, capture_output=False, # Потоковый вывод
check=False,
capture_output=False, # Потоковый вывод
text=True,
timeout=600, # 10 минут на тесты
)

View File

@@ -61,7 +61,7 @@ await TokenStorage.revoke_session(token)
#### Обновленный API:
```python
from services.redis import redis
from storage.redis import redis
# Базовые операции
await redis.get(key)
@@ -190,7 +190,7 @@ compat = CompatibilityMethods()
await compat.get(token_key)
# Стало
from services.redis import redis
from storage.redis import redis
result = await redis.get(token_key)
```
@@ -263,7 +263,7 @@ pytest tests/auth/ -v
# Проверка Redis подключения
python -c "
import asyncio
from services.redis import redis
from storage.redis import redis
async def test():
result = await redis.ping()
print(f'Redis connection: {result}')

View File

@@ -210,7 +210,7 @@ class MockInfo:
self.field_nodes = [MockFieldNode(requested_fields or [])]
# Патчинг зависимостей
@patch('services.redis.aioredis')
@patch('storage.redis.aioredis')
def test_redis_connection(mock_aioredis):
# Тест логики
pass

12
main.py
View File

@@ -21,10 +21,10 @@ from auth.middleware import AuthMiddleware, auth_middleware
from auth.oauth import oauth_callback, oauth_login
from cache.precache import precache_data
from cache.revalidator import revalidation_manager
from services.exception import ExceptionHandlerMiddleware
from services.rbac_init import initialize_rbac
from services.redis import redis
from services.schema import create_all_tables, resolvers
from rbac import initialize_rbac
from utils.exception import ExceptionHandlerMiddleware
from storage.redis import redis
from storage.schema import create_all_tables, resolvers
from services.search import check_search_service, initialize_search_index_background, search_service
from services.viewed import ViewedStorage
from settings import DEV_SERVER_PID_FILE_NAME
@@ -211,10 +211,10 @@ async def lifespan(app: Starlette):
try:
print("[lifespan] Starting application initialization")
create_all_tables()
# Инициализируем RBAC систему с dependency injection
initialize_rbac()
await asyncio.gather(
redis.connect(),
precache_data(),

View File

@@ -13,15 +13,15 @@ from sqlalchemy import (
UniqueConstraint,
distinct,
func,
text,
)
from sqlalchemy.ext.hybrid import hybrid_property
from sqlalchemy.orm import Mapped, mapped_column
from auth.orm import Author
from orm.base import BaseModel
from orm.shout import Shout
from services.db import local_session
from auth.rbac_interface import get_rbac_operations
from rbac.interface import get_rbac_operations
from storage.db import local_session
# Словарь названий ролей
role_names = {
@@ -355,7 +355,13 @@ class CommunityStats:
@property
def shouts(self) -> int:
return self.community.session.query(func.count(Shout.id)).filter(Shout.community == self.community.id).scalar()
return (
self.community.session.query(func.count(1))
.select_from(text("shout"))
.filter(text("shout.community_id = :community_id"))
.params(community_id=self.community.id)
.scalar()
)
@property
def followers(self) -> int:
@@ -370,12 +376,10 @@ class CommunityStats:
# author has a shout with community id and its featured_at is not null
return (
self.community.session.query(func.count(distinct(Author.id)))
.join(Shout)
.filter(
Shout.community == self.community.id,
Shout.featured_at.is_not(None),
Author.id.in_(Shout.authors),
)
.select_from(text("author"))
.join(text("shout"), text("author.id IN (SELECT author_id FROM shout_author WHERE shout_id = shout.id)"))
.filter(text("shout.community_id = :community_id"), text("shout.featured_at IS NOT NULL"))
.params(community_id=self.community.id)
.scalar()
)
@@ -498,7 +502,7 @@ class CommunityAuthor(BaseModel):
# Используем fallback на проверку ролей
return permission in self.role_list
except Exception:
# FIXME: Fallback: проверяем роли (старый способ)
# TODO: Fallback: проверяем роли (старый способ)
return any(permission == role for role in self.role_list)
def dict(self, access: bool = False) -> dict[str, Any]:

View File

@@ -8,6 +8,7 @@ from auth.orm import Author
from orm.base import BaseModel as Base
from orm.topic import Topic
# Author уже импортирован в начале файла
def get_author_model():
"""Возвращает модель Author для использования в запросах"""

View File

@@ -10,6 +10,7 @@ from auth.orm import Author
from orm.base import BaseModel as Base
from utils.logger import root_logger as logger
# Author уже импортирован в начале файла
def get_author_model():
"""Возвращает модель Author для использования в запросах"""

View File

@@ -7,6 +7,7 @@ from sqlalchemy.orm import Mapped, mapped_column
from auth.orm import Author
from orm.base import BaseModel as Base
# Author уже импортирован в начале файла
def get_author_model():
"""Возвращает модель Author для использования в запросах"""

View File

@@ -4,19 +4,10 @@ from typing import Any
from sqlalchemy import JSON, Boolean, ForeignKey, Index, Integer, PrimaryKeyConstraint, String
from sqlalchemy.orm import Mapped, mapped_column, relationship
# Импорт Author отложен для избежания циклических импортов
from auth.orm import Author
from orm.base import BaseModel as Base
from orm.reaction import Reaction
from orm.topic import Topic
# Author уже импортирован в начале файла
def get_author_model():
"""Возвращает модель Author для использования в запросах"""
return Author
from orm.base import BaseModel
class ShoutTopic(Base):
class ShoutTopic(BaseModel):
"""
Связь между публикацией и темой.
@@ -40,7 +31,7 @@ class ShoutTopic(Base):
)
class ShoutReactionsFollower(Base):
class ShoutReactionsFollower(BaseModel):
__tablename__ = "shout_reactions_followers"
follower: Mapped[int] = mapped_column(ForeignKey("author.id"), index=True)
@@ -57,7 +48,7 @@ class ShoutReactionsFollower(Base):
)
class ShoutAuthor(Base):
class ShoutAuthor(BaseModel):
"""
Связь между публикацией и автором.
@@ -81,7 +72,7 @@ class ShoutAuthor(Base):
)
class Shout(Base):
class Shout(BaseModel):
"""
Публикация в системе.
"""

View File

@@ -14,6 +14,7 @@ from sqlalchemy.orm import Mapped, mapped_column
from auth.orm import Author
from orm.base import BaseModel as Base
# Author уже импортирован в начале файла
def get_author_model():
"""Возвращает модель Author для использования в запросах"""

852
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -19,24 +19,20 @@
"@graphql-codegen/typescript": "^4.1.6",
"@graphql-codegen/typescript-operations": "^4.6.1",
"@graphql-codegen/typescript-resolvers": "^4.5.1",
"@solidjs/router": "^0.15.3",
"@types/node": "^24.1.0",
"@types/prettier": "^2.7.3",
"@types/prismjs": "^1.26.5",
"graphql": "^16.11.0",
"graphql-tag": "^2.12.6",
"lightningcss": "^1.30.1",
"prettier": "^3.6.2",
"prismjs": "^1.30.0",
"solid-js": "^1.9.7",
"terser": "^5.43.0",
"typescript": "^5.8.3",
"vite": "^7.0.6",
"vite": "^7.1.2",
"vite-plugin-solid": "^2.11.7"
},
"overrides": {
"vite": "^7.0.6"
},
"dependencies": {
"@solidjs/router": "^0.15.3"
"vite": "^7.1.2"
}
}

View File

@@ -222,6 +222,7 @@ ignore = [
"UP006", # use Set as type
"UP035", # use Set as type
"PERF401", # list comprehension - иногда нужно
"PLC0415", # импорты не в начале файла - иногда нужно
"ANN201", # Missing return type annotation for private function `wrapper` - иногда нужно
]

View File

@@ -1,24 +1,17 @@
"""
Модуль инициализации RBAC системы.
Настраивает dependency injection для разрешения циклических зависимостей.
Должен вызываться при старте приложения.
"""
from auth.rbac_interface import set_community_queries, set_rbac_operations
from rbac.interface import set_community_queries, set_rbac_operations
from utils.logger import root_logger as logger
def initialize_rbac() -> None:
"""
Инициализирует RBAC систему с dependency injection.
Должна быть вызвана один раз при старте приложения после импорта всех модулей.
"""
from services.rbac_impl import community_queries, rbac_operations
from rbac.operations import community_queries, rbac_operations
# Устанавливаем реализации
set_rbac_operations(rbac_operations)
set_community_queries(community_queries)
logger.info("🧿 RBAC система инициализирована с dependency injection")

View File

@@ -13,8 +13,8 @@ from functools import wraps
from typing import Any, Callable
from auth.orm import Author
from auth.rbac_interface import get_community_queries, get_rbac_operations
from services.db import local_session
from rbac.interface import get_community_queries, get_rbac_operations
from storage.db import local_session
from settings import ADMIN_EMAILS
from utils.logger import root_logger as logger
@@ -49,30 +49,31 @@ async def get_permissions_for_role(role: str, community_id: int) -> list[str]:
async def update_all_communities_permissions() -> None:
"""
Обновляет права для всех существующих сообществ на основе актуальных дефолтных настроек.
Используется в админ-панели для применения изменений в правах на все сообщества.
"""
rbac_ops = get_rbac_operations()
# Поздний импорт для избежания циклических зависимостей
from orm.community import Community
try:
with local_session() as session:
# Получаем все сообщества
communities = session.query(Community).all()
for community in communities:
# Сбрасываем кеш прав для каждого сообщества
from services.redis import redis
from storage.redis import redis
key = f"community:roles:{community.id}"
await redis.execute("DEL", key)
# Переинициализируем права с актуальными дефолтными настройками
await rbac_ops.initialize_community_permissions(community.id)
logger.info(f"Обновлены права для {len(communities)} сообществ")
except Exception as e:
logger.error(f"Ошибка при обновлении прав всех сообществ: {e}", exc_info=True)
raise
@@ -252,7 +253,7 @@ def get_community_id_from_context(info) -> int:
return community.id
logger.warning(f"[get_community_id_from_context] Сообщество с slug {slug} не найдено")
except Exception as e:
logger.error(f"[get_community_id_from_context] Ошибка при поиске community_id: {e}")
logger.exception(f"[get_community_id_from_context] Ошибка при поиске community_id: {e}")
# Пробуем из прямых аргументов
if hasattr(info, "field_asts") and info.field_asts:

View File

@@ -5,14 +5,13 @@
не импортирует ORM модели и не создает циклических зависимостей.
"""
from abc import ABC, abstractmethod
from typing import Any, Protocol
class RBACOperations(Protocol):
"""
Протокол для RBAC операций, позволяющий ORM моделям
выполнять операции с правами без прямого импорта services.rbac
выполнять операции с правами без прямого импорта rbac.api
"""
async def get_permissions_for_role(self, role: str, community_id: int) -> list[str]:
@@ -29,9 +28,7 @@ class RBACOperations(Protocol):
"""Проверяет разрешение пользователя в сообществе"""
...
async def _roles_have_permission(
self, role_slugs: list[str], permission: str, community_id: int
) -> bool:
async def _roles_have_permission(self, role_slugs: list[str], permission: str, community_id: int) -> bool:
"""Проверяет, есть ли у набора ролей конкретное разрешение в сообществе"""
...
@@ -42,9 +39,7 @@ class CommunityAuthorQueries(Protocol):
выполнять запросы без прямого импорта ORM моделей
"""
def get_user_roles_in_community(
self, author_id: int, community_id: int, session: Any = None
) -> list[str]:
def get_user_roles_in_community(self, author_id: int, community_id: int, session: Any = None) -> list[str]:
"""Получает роли пользователя в сообществе"""
...
@@ -56,13 +51,13 @@ _community_queries: CommunityAuthorQueries | None = None
def set_rbac_operations(ops: RBACOperations) -> None:
"""Устанавливает реализацию RBAC операций"""
global _rbac_operations
global _rbac_operations # noqa: PLW0603
_rbac_operations = ops
def set_community_queries(queries: CommunityAuthorQueries) -> None:
"""Устанавливает реализацию запросов сообщества"""
global _community_queries
global _community_queries # noqa: PLW0603
_community_queries = queries

View File

@@ -5,24 +5,21 @@
не импортирует ORM модели напрямую, используя dependency injection.
"""
import asyncio
import json
from pathlib import Path
from typing import Any
from auth.orm import Author
from auth.rbac_interface import CommunityAuthorQueries, RBACOperations, get_community_queries
from services.db import local_session
from services.redis import redis
from settings import ADMIN_EMAILS
from rbac.interface import CommunityAuthorQueries, RBACOperations, get_community_queries
from storage.db import local_session
from storage.redis import redis
from utils.logger import root_logger as logger
# --- Загрузка каталога сущностей и дефолтных прав ---
with Path("services/permissions_catalog.json").open() as f:
with Path("rbac/permissions_catalog.json").open() as f:
PERMISSIONS_CATALOG = json.load(f)
with Path("services/default_role_permissions.json").open() as f:
with Path("rbac/default_role_permissions.json").open() as f:
DEFAULT_ROLE_PERMISSIONS = json.load(f)
role_names = list(DEFAULT_ROLE_PERMISSIONS.keys())
@@ -169,9 +166,7 @@ class RBACOperationsImpl(RBACOperations):
class CommunityAuthorQueriesImpl(CommunityAuthorQueries):
"""Конкретная реализация запросов CommunityAuthor через поздний импорт"""
def get_user_roles_in_community(
self, author_id: int, community_id: int = 1, session: Any = None
) -> list[str]:
def get_user_roles_in_community(self, author_id: int, community_id: int = 1, session: Any = None) -> list[str]:
"""
Получает роли пользователя в сообществе через новую систему CommunityAuthor
"""

View File

@@ -17,14 +17,14 @@ from orm.draft import DraftTopic
from orm.reaction import Reaction
from orm.shout import Shout, ShoutTopic
from orm.topic import Topic, TopicFollower
from rbac.api import update_all_communities_permissions
from resolvers.editor import delete_shout, update_shout
from resolvers.topic import invalidate_topic_followers_cache, invalidate_topics_cache
from services.admin import AdminService
from services.common_result import handle_error
from services.db import local_session
from services.rbac import update_all_communities_permissions
from services.redis import redis
from services.schema import mutation, query
from utils.common_result import handle_error
from storage.db import local_session
from storage.redis import redis
from storage.schema import mutation, query
from utils.logger import root_logger as logger
admin_service = AdminService()

View File

@@ -8,7 +8,7 @@ from graphql import GraphQLResolveInfo
from starlette.responses import JSONResponse
from services.auth import auth_service
from services.schema import mutation, query, type_author
from storage.schema import mutation, query, type_author
from settings import SESSION_COOKIE_NAME
from utils.logger import root_logger as logger

View File

@@ -21,10 +21,10 @@ from orm.community import Community, CommunityAuthor, CommunityFollower
from orm.shout import Shout, ShoutAuthor
from resolvers.stat import get_with_stat
from services.auth import login_required
from services.common_result import CommonResult
from services.db import local_session
from services.redis import redis
from services.schema import mutation, query
from utils.common_result import CommonResult
from storage.db import local_session
from storage.redis import redis
from storage.schema import mutation, query
from utils.logger import root_logger as logger
DEFAULT_COMMUNITIES = [1]
@@ -450,9 +450,7 @@ async def load_authors_search(_: None, info: GraphQLResolveInfo, **kwargs: Any)
return []
def get_author_id_from(
slug: str | None = None, user: str | None = None, author_id: int | None = None
) -> int | None:
def get_author_id_from(slug: str | None = None, user: str | None = None, author_id: int | None = None) -> int | None:
"""Get author ID from different identifiers"""
try:
if author_id:

View File

@@ -7,9 +7,9 @@ from auth.orm import AuthorBookmark
from orm.shout import Shout
from resolvers.reader import apply_options, get_shouts_with_links, query_with_stat
from services.auth import login_required
from services.common_result import CommonResult
from services.db import local_session
from services.schema import mutation, query
from utils.common_result import CommonResult
from storage.db import local_session
from storage.schema import mutation, query
@query.field("load_shouts_bookmarked")

View File

@@ -4,8 +4,8 @@ from auth.orm import Author
from orm.invite import Invite, InviteStatus
from orm.shout import Shout
from services.auth import login_required
from services.db import local_session
from services.schema import mutation
from storage.db import local_session
from storage.schema import mutation
@mutation.field("accept_invite")

View File

@@ -6,9 +6,9 @@ from sqlalchemy.orm import joinedload
from auth.decorators import editor_or_admin_required
from auth.orm import Author
from orm.collection import Collection, ShoutCollection
from services.db import local_session
from services.rbac import require_any_permission
from services.schema import mutation, query, type_collection
from rbac.api import require_any_permission
from storage.db import local_session
from storage.schema import mutation, query, type_collection
from utils.logger import root_logger as logger

View File

@@ -7,15 +7,15 @@ from sqlalchemy import distinct, func
from auth.orm import Author
from orm.community import Community, CommunityAuthor, CommunityFollower
from orm.shout import Shout, ShoutAuthor
from services.db import local_session
from services.rbac import (
from rbac.api import (
RBACError,
get_user_roles_from_context,
require_any_permission,
require_permission,
roles_have_permission,
)
from services.schema import mutation, query, type_community
from storage.db import local_session
from storage.schema import mutation, query, type_community
from utils.logger import root_logger as logger

View File

@@ -12,9 +12,9 @@ from cache.cache import (
from orm.draft import Draft, DraftAuthor, DraftTopic
from orm.shout import Shout, ShoutAuthor, ShoutTopic
from services.auth import login_required
from services.db import local_session
from storage.db import local_session
from services.notify import notify_shout
from services.schema import mutation, query
from storage.schema import mutation, query
from services.search import search_service
from utils.extract_text import extract_text
from utils.logger import root_logger as logger

View File

@@ -19,10 +19,10 @@ from orm.topic import Topic
from resolvers.follower import follow
from resolvers.stat import get_with_stat
from services.auth import login_required
from services.common_result import CommonResult
from services.db import local_session
from utils.common_result import CommonResult
from storage.db import local_session
from services.notify import notify_shout
from services.schema import mutation, query
from storage.schema import mutation, query
from services.search import search_service
from utils.extract_text import extract_text
from utils.logger import root_logger as logger

View File

@@ -13,8 +13,8 @@ from resolvers.reader import (
query_with_stat,
)
from services.auth import login_required
from services.db import local_session
from services.schema import query
from storage.db import local_session
from storage.schema import query
from utils.logger import root_logger as logger

View File

@@ -16,10 +16,10 @@ from orm.community import Community, CommunityFollower
from orm.shout import Shout, ShoutReactionsFollower
from orm.topic import Topic, TopicFollower
from services.auth import login_required
from services.db import local_session
from storage.db import local_session
from services.notify import notify_follower
from services.redis import redis
from services.schema import mutation, query
from storage.redis import redis
from storage.schema import mutation, query
from utils.logger import root_logger as logger

View File

@@ -17,8 +17,8 @@ from orm.notification import (
)
from orm.shout import Shout
from services.auth import login_required
from services.db import local_session
from services.schema import mutation, query
from storage.db import local_session
from storage.schema import mutation, query
from utils.logger import root_logger as logger

View File

@@ -3,7 +3,7 @@ from sqlalchemy import and_
from orm.rating import is_negative, is_positive
from orm.reaction import Reaction, ReactionKind
from orm.shout import Shout
from services.db import local_session
from storage.db import local_session
from utils.diff import apply_diff, get_diff

View File

@@ -8,8 +8,8 @@ from auth.orm import Author, AuthorRating
from orm.reaction import Reaction, ReactionKind
from orm.shout import Shout, ShoutAuthor
from services.auth import login_required
from services.db import local_session
from services.schema import mutation, query
from storage.db import local_session
from storage.schema import mutation, query
from utils.logger import root_logger as logger

View File

@@ -21,9 +21,9 @@ from resolvers.follower import follow
from resolvers.proposals import handle_proposing
from resolvers.stat import update_author_stat
from services.auth import add_user_role, login_required
from services.db import local_session
from storage.db import local_session
from services.notify import notify_reaction
from services.schema import mutation, query
from storage.schema import mutation, query
from utils.logger import root_logger as logger

View File

@@ -10,8 +10,8 @@ from auth.orm import Author
from orm.reaction import Reaction, ReactionKind
from orm.shout import Shout, ShoutAuthor, ShoutTopic
from orm.topic import Topic
from services.db import json_array_builder, json_builder, local_session
from services.schema import query
from storage.db import json_array_builder, json_builder, local_session
from storage.schema import query
from services.search import SearchService, search_text
from services.viewed import ViewedStorage
from utils.logger import root_logger as logger

View File

@@ -13,7 +13,7 @@ from orm.community import Community, CommunityFollower
from orm.reaction import Reaction, ReactionKind
from orm.shout import Shout, ShoutAuthor, ShoutTopic
from orm.topic import Topic, TopicFollower
from services.db import local_session
from storage.db import local_session
from utils.logger import root_logger as logger
# Type alias for queries
@@ -434,9 +434,7 @@ def get_following_count(entity_type: str, entity_id: int) -> int:
return 0
def get_shouts_count(
author_id: int | None = None, topic_id: int | None = None, community_id: int | None = None
) -> int:
def get_shouts_count(author_id: int | None = None, topic_id: int | None = None, community_id: int | None = None) -> int:
"""Получает количество публикаций"""
try:
with local_session() as session:

View File

@@ -18,11 +18,11 @@ from orm.draft import DraftTopic
from orm.reaction import Reaction, ReactionKind
from orm.shout import Shout, ShoutAuthor, ShoutTopic
from orm.topic import Topic, TopicFollower
from rbac.api import require_any_permission, require_permission
from resolvers.stat import get_with_stat
from services.db import local_session
from services.rbac import require_any_permission, require_permission
from services.redis import redis
from services.schema import mutation, query
from storage.db import local_session
from storage.redis import redis
from storage.schema import mutation, query
from utils.logger import root_logger as logger

View File

@@ -1,119 +0,0 @@
#!/bin/bash
"""
Локальный тест CI - запускает серверы и тесты как в GitHub Actions
"""
set -e # Останавливаемся при ошибке
echo "🚀 Запуск локального CI теста..."
# Проверяем что мы в корневой папке
if [ ! -f "pyproject.toml" ]; then
echo "❌ Запустите скрипт из корневой папки проекта"
exit 1
fi
# Очищаем предыдущие процессы
echo "🧹 Очищаем предыдущие процессы..."
pkill -f "python dev.py" || true
pkill -f "npm run dev" || true
pkill -f "vite" || true
pkill -f "ci-server.py" || true
rm -f backend.pid frontend.pid ci-server.pid
# Проверяем зависимости
echo "📦 Проверяем зависимости..."
if ! command -v uv &> /dev/null; then
echo "❌ uv не установлен. Установите uv: https://docs.astral.sh/uv/getting-started/installation/"
exit 1
fi
if ! command -v npm &> /dev/null; then
echo "❌ npm не установлен. Установите Node.js: https://nodejs.org/"
exit 1
fi
# Устанавливаем зависимости
echo "📥 Устанавливаем Python зависимости..."
uv sync --group dev
echo "📥 Устанавливаем Node.js зависимости..."
cd panel
npm ci
cd ..
# Создаем тестовую базу
echo "🗄️ Инициализируем тестовую базу..."
touch database.db
uv run python -c "
from orm.base import Base
from orm.community import Community, CommunityFollower, CommunityAuthor
from orm.draft import Draft
from orm.invite import Invite
from orm.notification import Notification
from orm.rating import Rating
from orm.reaction import Reaction
from orm.shout import Shout
from orm.topic import Topic
from services.db import get_engine
engine = get_engine()
Base.metadata.create_all(engine)
print('Test database initialized')
"
# Запускаем серверы
echo "🚀 Запускаем серверы..."
python scripts/ci-server.py &
CI_PID=$!
echo "CI Server PID: $CI_PID"
# Ждем готовности серверов
echo "⏳ Ждем готовности серверов..."
timeout 120 bash -c '
while true; do
if curl -f http://localhost:8000/ > /dev/null 2>&1 && \
curl -f http://localhost:3000/ > /dev/null 2>&1; then
echo "✅ Все серверы готовы!"
break
fi
echo "⏳ Ожидаем серверы..."
sleep 2
done
'
if [ $? -ne 0 ]; then
echo "❌ Таймаут ожидания серверов"
kill $CI_PID 2>/dev/null || true
exit 1
fi
echo "🎯 Серверы запущены! Запускаем тесты..."
# Запускаем тесты
echo "🧪 Запускаем unit тесты..."
uv run pytest tests/ -m "not e2e" -v --tb=short
echo "🧪 Запускаем integration тесты..."
uv run pytest tests/ -m "integration" -v --tb=short
echo "🧪 Запускаем E2E тесты..."
uv run pytest tests/ -m "e2e" -v --tb=short
echo "🧪 Запускаем browser тесты..."
uv run pytest tests/ -m "browser" -v --tb=short || echo "⚠️ Browser тесты завершились с ошибками"
# Генерируем отчет о покрытии
echo "📊 Генерируем отчет о покрытии..."
uv run pytest tests/ --cov=. --cov-report=html
echo "🎉 Все тесты завершены!"
# Очищаем
echo "🧹 Очищаем ресурсы..."
kill $CI_PID 2>/dev/null || true
pkill -f "python dev.py" || true
pkill -f "npm run dev" || true
pkill -f "vite" || true
rm -f backend.pid frontend.pid ci-server.pid
echo "✅ Локальный CI тест завершен!"

View File

@@ -14,17 +14,11 @@ from auth.orm import Author
from orm.community import Community, CommunityAuthor, role_descriptions, role_names
from orm.invite import Invite, InviteStatus
from orm.shout import Shout
from services.db import local_session
from services.env import EnvVariable, env_manager
from storage.db import local_session
from storage.env import EnvVariable, env_manager
from settings import ADMIN_EMAILS as ADMIN_EMAILS_LIST
from utils.logger import root_logger as logger
# Отложенный импорт Author для избежания циклических импортов
def get_author_model():
"""Возвращает модель Author для использования в admin"""
from auth.orm import Author
return Author
class AdminService:
"""Сервис для админ-панели с бизнес-логикой"""
@@ -59,7 +53,6 @@ class AdminService:
"slug": "system",
}
Author = get_author_model()
author = session.query(Author).where(Author.id == author_id).first()
if author:
return {

View File

@@ -29,8 +29,8 @@ from orm.community import (
assign_role_to_user,
get_user_roles_in_community,
)
from services.db import local_session
from services.redis import redis
from storage.db import local_session
from storage.redis import redis
from settings import (
ADMIN_EMAILS,
SESSION_COOKIE_NAME,
@@ -39,11 +39,6 @@ from settings import (
from utils.generate_slug import generate_unique_slug
from utils.logger import root_logger as logger
# Author уже импортирован в начале файла
def get_author_model():
"""Возвращает модель Author для использования в auth"""
return Author
# Список разрешенных заголовков
ALLOWED_HEADERS = ["Authorization", "Content-Type"]
@@ -113,7 +108,6 @@ class AuthService:
# Проверяем админские права через email если нет роли админа
if not is_admin:
with local_session() as session:
Author = get_author_model()
author = session.query(Author).where(Author.id == user_id_int).first()
if author and author.email in ADMIN_EMAILS.split(","):
is_admin = True
@@ -167,7 +161,6 @@ class AuthService:
# Проверяем уникальность email
with local_session() as session:
Author = get_author_model()
existing_user = session.query(Author).where(Author.email == user_dict["email"]).first()
if existing_user:
# Если пользователь с таким email уже существует, возвращаем его
@@ -180,7 +173,6 @@ class AuthService:
# Проверяем уникальность slug
with local_session() as session:
# Добавляем суффикс, если slug уже существует
Author = get_author_model()
counter = 1
unique_slug = base_slug
while session.query(Author).where(Author.slug == unique_slug).first():
@@ -267,7 +259,6 @@ class AuthService:
logger.info(f"Попытка регистрации для {email}")
with local_session() as session:
Author = get_author_model()
user = session.query(Author).where(Author.email == email).first()
if user:
logger.warning(f"Пользователь {email} уже существует")
@@ -307,7 +298,6 @@ class AuthService:
"""Отправляет ссылку подтверждения на email"""
email = email.lower()
with local_session() as session:
Author = get_author_model()
user = session.query(Author).where(Author.email == email).first()
if not user:
raise ObjectNotExistError("User not found")
@@ -345,7 +335,6 @@ class AuthService:
username = payload.get("username")
with local_session() as session:
Author = get_author_model()
user = session.query(Author).where(Author.id == user_id).first()
if not user:
logger.warning(f"Пользователь с ID {user_id} не найден")
@@ -380,7 +369,6 @@ class AuthService:
try:
with local_session() as session:
Author = get_author_model()
author = session.query(Author).where(Author.email == email).first()
if not author:
logger.warning(f"Пользователь {email} не найден")

View File

@@ -6,8 +6,8 @@ import orjson
from orm.notification import Notification
from orm.reaction import Reaction
from orm.shout import Shout
from services.db import local_session
from services.redis import redis
from storage.db import local_session
from storage.redis import redis
from utils.logger import root_logger as logger

View File

@@ -34,7 +34,7 @@ background_tasks = []
# Import Redis client if Redis caching is enabled
if SEARCH_USE_REDIS:
try:
from services.redis import redis
from storage.redis import redis
logger.info("Redis client imported for search caching")
except ImportError:

View File

@@ -18,8 +18,8 @@ from google.analytics.data_v1beta.types import Filter as GAFilter
from auth.orm import Author
from orm.shout import Shout, ShoutAuthor, ShoutTopic
from orm.topic import Topic
from services.db import local_session
from services.redis import redis
from storage.db import local_session
from storage.redis import redis
from utils.logger import root_logger as logger
GOOGLE_KEYFILE_PATH = os.environ.get("GOOGLE_KEYFILE_PATH", "/dump/google-service.json")

0
storage/__init__.py Normal file
View File

View File

@@ -2,7 +2,7 @@ import os
from dataclasses import dataclass
from typing import ClassVar
from services.redis import redis
from storage.redis import redis
from utils.logger import root_logger as logger

View File

@@ -9,10 +9,11 @@ from ariadne import (
load_schema_from_path,
)
from auth.orm import Author, AuthorBookmark, AuthorFollower, AuthorRating
# Импорт Author, AuthorBookmark, AuthorFollower, AuthorRating отложен для избежания циклических импортов
from orm import collection, community, draft, invite, notification, reaction, shout, topic
from services.db import create_table_if_not_exists, local_session
from auth.orm import Author, AuthorBookmark, AuthorFollower, AuthorRating
from storage.db import create_table_if_not_exists, local_session
# Создаем основные типы
query = QueryType()

View File

@@ -7,7 +7,7 @@ from starlette.responses import JSONResponse, RedirectResponse
from auth.oauth import get_user_profile, oauth_callback_http, oauth_login_http
from auth.orm import Author
from services.db import local_session
from storage.db import local_session
# Настройка логгера
logger = logging.getLogger(__name__)

View File

@@ -14,7 +14,7 @@ import asyncio
from typing import Optional, Generator, AsyncGenerator
from contextlib import asynccontextmanager
from services.redis import redis
from storage.redis import redis
from orm.base import BaseModel as Base
@@ -574,7 +574,7 @@ def mock_verify(monkeypatch):
@pytest.fixture
def redis_client():
"""Создает Redis клиент для тестов токенов"""
from services.redis import RedisService
from storage.redis import RedisService
redis_service = RedisService()
return redis_service._client
@@ -593,7 +593,7 @@ def mock_redis_if_unavailable():
yield
except Exception:
# Redis недоступен, мокаем
with patch('services.redis.RedisService') as mock_redis:
with patch('storage.redis.RedisService') as mock_redis:
# Создаем базовый mock для Redis методов
mock_redis.return_value.get.return_value = None
mock_redis.return_value.set.return_value = True

View File

@@ -11,7 +11,7 @@ from unittest.mock import patch, MagicMock
from auth.orm import Author
from orm.community import Community, CommunityAuthor
from services.db import local_session
from storage.db import local_session
# Используем общую фикстуру из conftest.py

View File

@@ -13,7 +13,7 @@ from auth.internal import verify_internal_auth
from auth.permissions import ContextualPermissionCheck
from orm.community import Community, CommunityAuthor
from auth.permissions import ContextualPermissionCheck
from services.db import local_session
from storage.db import local_session
# Используем общую фикстуру из conftest.py

View File

@@ -18,7 +18,7 @@ from orm.community import (
assign_role_to_user,
remove_role_from_user
)
from services.db import local_session
from storage.db import local_session
# Используем общую фикстуру из conftest.py

View File

@@ -12,13 +12,13 @@ from unittest.mock import patch, MagicMock
from auth.orm import Author
from orm.community import Community, CommunityAuthor
from services.rbac import (
from rbac.api import (
initialize_community_permissions,
get_permissions_for_role,
user_has_permission,
roles_have_permission
)
from services.db import local_session
from storage.db import local_session
@pytest.fixture

View File

@@ -55,8 +55,8 @@ def create_test_app():
from ariadne.asgi import GraphQL
from starlette.responses import JSONResponse
from services.db import Base
from services.schema import resolvers
from storage.db import Base
from storage.schema import resolvers
# Создаем движок и таблицы
engine = create_engine(

View File

@@ -5,18 +5,18 @@ import pytest
# Импортируем все модули для покрытия
import services
import services.db
import services.redis
import services.rbac
import storage.db
import storage.redis
import rbac.api
import services.admin
import services.auth
import services.common_result
import services.env
import services.exception
import utils.common_result
import storage.env
import utils.exception
import services.notify
import services.schema
import storage.schema
import services.search
import services.sentry
import utils.sentry
import services.viewed
import utils
@@ -83,18 +83,18 @@ class TestCoverageImports:
def test_services_imports(self):
"""Тест импорта модулей services"""
assert services is not None
assert services.db is not None
assert services.redis is not None
assert services.rbac is not None
assert storage.db is not None
assert storage.redis is not None
assert rbac.api is not None
assert services.admin is not None
assert services.auth is not None
assert services.common_result is not None
assert services.env is not None
assert services.exception is not None
assert utils.common_result is not None
assert storage.env is not None
assert utils.exception is not None
assert services.notify is not None
assert services.schema is not None
assert storage.schema is not None
assert services.search is not None
assert services.sentry is not None
assert utils.sentry is not None
assert services.viewed is not None
def test_utils_imports(self):

View File

@@ -5,8 +5,8 @@
import pytest
import json
from unittest.mock import Mock
from services.redis import redis
from services.db import local_session
from storage.redis import redis
from storage.db import local_session
from orm.community import Community

View File

@@ -6,7 +6,7 @@ import time
from sqlalchemy import create_engine, Column, Integer, String, inspect
from sqlalchemy.orm import declarative_base, Session
from services.db import create_table_if_not_exists, get_column_names_without_virtual, local_session
from storage.db import create_table_if_not_exists, get_column_names_without_virtual, local_session
# Создаем базовую модель для тестирования
Base = declarative_base()

View File

@@ -95,9 +95,9 @@ async def test_create_shout(db_session, test_author):
# Мокаем local_session чтобы использовать тестовую сессию
from unittest.mock import patch
from services.db import local_session
from storage.db import local_session
with patch('services.db.local_session') as mock_local_session:
with patch('storage.db.local_session') as mock_local_session:
mock_local_session.return_value = db_session
result = await create_draft(
@@ -126,9 +126,9 @@ async def test_load_drafts(db_session):
# Мокаем local_session чтобы использовать тестовую сессию
from unittest.mock import patch
from services.db import local_session
from storage.db import local_session
with patch('services.db.local_session') as mock_local_session:
with patch('storage.db.local_session') as mock_local_session:
mock_local_session.return_value = db_session
# Вызываем резолвер напрямую

View File

@@ -16,7 +16,7 @@ import sys
sys.path.append(os.path.dirname(os.path.abspath(__file__)))
from cache.cache import get_cached_follower_topics
from services.redis import redis
from storage.redis import redis
from utils.logger import root_logger as logger

View File

@@ -12,7 +12,7 @@ sys.path.append(os.path.dirname(os.path.abspath(__file__)))
def test_rbac_import():
"""Тестируем импорт RBAC модуля"""
try:
from services.rbac import require_any_permission, require_permission
from rbac.api import require_any_permission, require_permission
print("✅ RBAC модуль импортирован успешно")
@@ -29,7 +29,7 @@ def test_rbac_import():
def test_require_permission_decorator():
"""Тестируем декоратор require_permission"""
try:
from services.rbac import require_permission
from rbac.api import require_permission
@require_permission("test:permission")
async def test_func(*args, **kwargs):

View File

@@ -12,14 +12,14 @@ import json
from auth.orm import Author
from orm.community import Community, CommunityAuthor
from services.rbac import (
from rbac.api import (
initialize_community_permissions,
get_permissions_for_role,
user_has_permission,
roles_have_permission
)
from services.db import local_session
from services.redis import redis
from storage.db import local_session
from storage.redis import redis
@pytest.fixture

View File

@@ -10,14 +10,14 @@ from unittest.mock import patch, MagicMock
from auth.orm import Author
from orm.community import Community, CommunityAuthor
from services.rbac import (
from rbac.api import (
initialize_community_permissions,
get_role_permissions_for_community,
get_permissions_for_role,
user_has_permission,
roles_have_permission
)
from services.db import local_session
from storage.db import local_session
@pytest.fixture
@@ -180,7 +180,7 @@ class TestRBACPermissionChecking:
async def test_user_with_author_role_has_reader_permissions(self, db_session, test_users, test_community):
"""Тест что пользователь с ролью author имеет разрешения reader"""
# Используем local_session для создания записи
from services.db import local_session
from storage.db import local_session
from orm.community import CommunityAuthor
with local_session() as session:
@@ -214,7 +214,7 @@ class TestRBACPermissionChecking:
async def test_user_with_editor_role_has_author_permissions(self, db_session, test_users, test_community):
"""Тест что пользователь с ролью editor имеет разрешения author"""
# Используем local_session для создания записи
from services.db import local_session
from storage.db import local_session
from orm.community import CommunityAuthor
with local_session() as session:
@@ -248,7 +248,7 @@ class TestRBACPermissionChecking:
async def test_user_with_admin_role_has_all_permissions(self, db_session, test_users, test_community):
"""Тест что пользователь с ролью admin имеет все разрешения"""
# Используем local_session для создания записи
from services.db import local_session
from storage.db import local_session
from orm.community import CommunityAuthor
with local_session() as session:

View File

@@ -9,7 +9,7 @@ import pytest
import redis.asyncio as aioredis
from redis.asyncio import Redis
from services.redis import (
from storage.redis import (
RedisService,
close_redis,
init_redis,
@@ -28,7 +28,7 @@ class TestRedisServiceInitialization:
def test_redis_service_init_without_aioredis(self):
"""Тест инициализации без aioredis"""
with patch("services.redis.aioredis", None):
with patch("storage.redis.aioredis", None):
service = RedisService()
assert service._is_available is False
@@ -58,7 +58,7 @@ class TestRedisConnectionManagement:
"""Тест успешного подключения"""
service = RedisService()
with patch("services.redis.aioredis.from_url") as mock_from_url:
with patch("storage.redis.aioredis.from_url") as mock_from_url:
mock_client = AsyncMock()
mock_client.ping = AsyncMock(return_value=True)
mock_from_url.return_value = mock_client
@@ -73,7 +73,7 @@ class TestRedisConnectionManagement:
"""Тест неудачного подключения"""
service = RedisService()
with patch("services.redis.aioredis.from_url") as mock_from_url:
with patch("storage.redis.aioredis.from_url") as mock_from_url:
mock_from_url.side_effect = Exception("Connection failed")
await service.connect()
@@ -84,7 +84,7 @@ class TestRedisConnectionManagement:
@pytest.mark.asyncio
async def test_connect_without_aioredis(self):
"""Тест подключения без aioredis"""
with patch("services.redis.aioredis", None):
with patch("storage.redis.aioredis", None):
service = RedisService()
await service.connect()
assert service._client is None
@@ -96,7 +96,7 @@ class TestRedisConnectionManagement:
mock_existing_client = AsyncMock()
service._client = mock_existing_client
with patch("services.redis.aioredis.from_url") as mock_from_url:
with patch("storage.redis.aioredis.from_url") as mock_from_url:
mock_client = AsyncMock()
mock_client.ping = AsyncMock(return_value=True)
mock_from_url.return_value = mock_client
@@ -149,7 +149,7 @@ class TestRedisCommandExecution:
@pytest.mark.asyncio
async def test_execute_without_aioredis(self):
"""Тест выполнения команды без aioredis"""
with patch("services.redis.aioredis", None):
with patch("storage.redis.aioredis", None):
service = RedisService()
result = await service.execute("test_command")
assert result is None
@@ -874,7 +874,7 @@ class TestAdditionalRedisCoverage:
service._client = mock_client
mock_client.close.side_effect = Exception("Close error")
with patch('services.redis.aioredis.from_url') as mock_from_url:
with patch('storage.redis.aioredis.from_url') as mock_from_url:
mock_new_client = AsyncMock()
mock_from_url.return_value = mock_new_client

View File

@@ -7,7 +7,7 @@
import pytest
import asyncio
import json
from services.redis import RedisService
from storage.redis import RedisService
class TestRedisFunctionality:

View File

@@ -14,7 +14,7 @@ import sys
sys.path.append(os.path.dirname(os.path.abspath(__file__)))
from cache.cache import get_cached_follower_topics
from services.redis import redis
from storage.redis import redis
from utils.logger import root_logger as logger

View File

@@ -17,8 +17,8 @@ sys.path.append(os.path.dirname(os.path.abspath(__file__)))
from cache.cache import get_cached_follower_topics
from orm.topic import Topic, TopicFollower
from services.db import local_session
from services.redis import redis
from storage.db import local_session
from storage.redis import redis
from utils.logger import root_logger as logger

View File

@@ -22,7 +22,7 @@ from auth.orm import Author
from orm.community import assign_role_to_user
from orm.shout import Shout
from resolvers.editor import unpublish_shout
from services.db import local_session
from storage.db import local_session
# Настройка логгера
logging.basicConfig(level=logging.INFO, format="%(asctime)s %(levelname)s %(message)s")

View File

@@ -18,7 +18,7 @@ sys.path.append(str(Path(__file__).parent))
from auth.orm import Author
from resolvers.auth import update_security
from services.db import local_session
from storage.db import local_session
# Настройка логгера
logging.basicConfig(level=logging.INFO, format="%(asctime)s %(levelname)s %(message)s")

View File

@@ -11,12 +11,6 @@ from orm.shout import Shout
from orm.topic import Topic
from utils.logger import root_logger as logger
# Отложенный импорт Author для избежания циклических импортов
def get_author_model():
"""Возвращает модель Author для использования в common_result"""
from auth.orm import Author
return Author
def handle_error(operation: str, error: Exception) -> GraphQLError:
"""Обрабатывает ошибки в резолверах"""

View File

@@ -4,7 +4,7 @@ JSON encoders and utilities
import json
from datetime import date, datetime
from typing import Any, Union
from typing import Any
import orjson
@@ -23,7 +23,7 @@ def default_json_encoder(obj: Any) -> Any:
TypeError: Если объект не может быть сериализован
"""
# Обработка datetime
if isinstance(obj, (datetime, date)):
if isinstance(obj, (datetime | date)):
return obj.isoformat()
serialized = False
@@ -75,7 +75,7 @@ def orjson_dumps(obj: Any, **kwargs: Any) -> bytes:
return orjson.dumps(obj, default=default_json_encoder, **kwargs)
def orjson_loads(data: Union[str, bytes]) -> Any:
def orjson_loads(data: str | bytes) -> Any:
"""
Десериализация объекта с помощью orjson.

View File

@@ -2,7 +2,7 @@ import re
from urllib.parse import quote_plus
from auth.orm import Author
from services.db import local_session
from storage.db import local_session
def replace_translit(src: str | None) -> str:

View File

@@ -16,7 +16,7 @@ logger.setLevel(logging.DEBUG) # Более подробное логирова
def start_sentry() -> None:
try:
logger.info("[services.sentry] Sentry init started...")
logger.info("[utils.sentry] Sentry init started...")
sentry_sdk.init(
dsn=GLITCHTIP_DSN,
traces_sample_rate=1.0, # Захват 100% транзакций
@@ -25,6 +25,6 @@ def start_sentry() -> None:
integrations=[StarletteIntegration(), AriadneIntegration(), SqlalchemyIntegration()],
send_default_pii=True, # Отправка информации о пользователе (PII)
)
logger.info("[services.sentry] Sentry initialized successfully.")
logger.info("[utils.sentry] Sentry initialized successfully.")
except (sentry_sdk.utils.BadDsn, ImportError, ValueError, TypeError) as _e:
logger.warning("[services.sentry] Failed to initialize Sentry", exc_info=True)
logger.warning("[utils.sentry] Failed to initialize Sentry", exc_info=True)