This commit is contained in:
@@ -14,17 +14,11 @@ from auth.orm import Author
|
||||
from orm.community import Community, CommunityAuthor, role_descriptions, role_names
|
||||
from orm.invite import Invite, InviteStatus
|
||||
from orm.shout import Shout
|
||||
from services.db import local_session
|
||||
from services.env import EnvVariable, env_manager
|
||||
from storage.db import local_session
|
||||
from storage.env import EnvVariable, env_manager
|
||||
from settings import ADMIN_EMAILS as ADMIN_EMAILS_LIST
|
||||
from utils.logger import root_logger as logger
|
||||
|
||||
# Отложенный импорт Author для избежания циклических импортов
|
||||
def get_author_model():
|
||||
"""Возвращает модель Author для использования в admin"""
|
||||
from auth.orm import Author
|
||||
return Author
|
||||
|
||||
|
||||
class AdminService:
|
||||
"""Сервис для админ-панели с бизнес-логикой"""
|
||||
@@ -59,7 +53,6 @@ class AdminService:
|
||||
"slug": "system",
|
||||
}
|
||||
|
||||
Author = get_author_model()
|
||||
author = session.query(Author).where(Author.id == author_id).first()
|
||||
if author:
|
||||
return {
|
||||
|
||||
@@ -29,8 +29,8 @@ from orm.community import (
|
||||
assign_role_to_user,
|
||||
get_user_roles_in_community,
|
||||
)
|
||||
from services.db import local_session
|
||||
from services.redis import redis
|
||||
from storage.db import local_session
|
||||
from storage.redis import redis
|
||||
from settings import (
|
||||
ADMIN_EMAILS,
|
||||
SESSION_COOKIE_NAME,
|
||||
@@ -39,11 +39,6 @@ from settings import (
|
||||
from utils.generate_slug import generate_unique_slug
|
||||
from utils.logger import root_logger as logger
|
||||
|
||||
# Author уже импортирован в начале файла
|
||||
def get_author_model():
|
||||
"""Возвращает модель Author для использования в auth"""
|
||||
return Author
|
||||
|
||||
# Список разрешенных заголовков
|
||||
ALLOWED_HEADERS = ["Authorization", "Content-Type"]
|
||||
|
||||
@@ -113,7 +108,6 @@ class AuthService:
|
||||
# Проверяем админские права через email если нет роли админа
|
||||
if not is_admin:
|
||||
with local_session() as session:
|
||||
Author = get_author_model()
|
||||
author = session.query(Author).where(Author.id == user_id_int).first()
|
||||
if author and author.email in ADMIN_EMAILS.split(","):
|
||||
is_admin = True
|
||||
@@ -167,7 +161,6 @@ class AuthService:
|
||||
|
||||
# Проверяем уникальность email
|
||||
with local_session() as session:
|
||||
Author = get_author_model()
|
||||
existing_user = session.query(Author).where(Author.email == user_dict["email"]).first()
|
||||
if existing_user:
|
||||
# Если пользователь с таким email уже существует, возвращаем его
|
||||
@@ -180,7 +173,6 @@ class AuthService:
|
||||
# Проверяем уникальность slug
|
||||
with local_session() as session:
|
||||
# Добавляем суффикс, если slug уже существует
|
||||
Author = get_author_model()
|
||||
counter = 1
|
||||
unique_slug = base_slug
|
||||
while session.query(Author).where(Author.slug == unique_slug).first():
|
||||
@@ -267,7 +259,6 @@ class AuthService:
|
||||
logger.info(f"Попытка регистрации для {email}")
|
||||
|
||||
with local_session() as session:
|
||||
Author = get_author_model()
|
||||
user = session.query(Author).where(Author.email == email).first()
|
||||
if user:
|
||||
logger.warning(f"Пользователь {email} уже существует")
|
||||
@@ -307,7 +298,6 @@ class AuthService:
|
||||
"""Отправляет ссылку подтверждения на email"""
|
||||
email = email.lower()
|
||||
with local_session() as session:
|
||||
Author = get_author_model()
|
||||
user = session.query(Author).where(Author.email == email).first()
|
||||
if not user:
|
||||
raise ObjectNotExistError("User not found")
|
||||
@@ -345,7 +335,6 @@ class AuthService:
|
||||
username = payload.get("username")
|
||||
|
||||
with local_session() as session:
|
||||
Author = get_author_model()
|
||||
user = session.query(Author).where(Author.id == user_id).first()
|
||||
if not user:
|
||||
logger.warning(f"Пользователь с ID {user_id} не найден")
|
||||
@@ -380,7 +369,6 @@ class AuthService:
|
||||
|
||||
try:
|
||||
with local_session() as session:
|
||||
Author = get_author_model()
|
||||
author = session.query(Author).where(Author.email == email).first()
|
||||
if not author:
|
||||
logger.warning(f"Пользователь {email} не найден")
|
||||
|
||||
@@ -1,54 +0,0 @@
|
||||
from dataclasses import dataclass
|
||||
from typing import Any
|
||||
|
||||
from graphql.error import GraphQLError
|
||||
|
||||
# Импорт Author отложен для избежания циклических импортов
|
||||
from orm.community import Community
|
||||
from orm.draft import Draft
|
||||
from orm.reaction import Reaction
|
||||
from orm.shout import Shout
|
||||
from orm.topic import Topic
|
||||
from utils.logger import root_logger as logger
|
||||
|
||||
# Отложенный импорт Author для избежания циклических импортов
|
||||
def get_author_model():
|
||||
"""Возвращает модель Author для использования в common_result"""
|
||||
from auth.orm import Author
|
||||
return Author
|
||||
|
||||
|
||||
def handle_error(operation: str, error: Exception) -> GraphQLError:
|
||||
"""Обрабатывает ошибки в резолверах"""
|
||||
logger.error(f"Ошибка при {operation}: {error}")
|
||||
return GraphQLError(f"Не удалось {operation}: {error}")
|
||||
|
||||
|
||||
@dataclass
|
||||
class CommonResult:
|
||||
"""Общий результат для GraphQL запросов"""
|
||||
|
||||
error: str | None = None
|
||||
drafts: list[Draft] | None = None # Draft objects
|
||||
draft: Draft | None = None # Draft object
|
||||
slugs: list[str] | None = None
|
||||
shout: Shout | None = None
|
||||
shouts: list[Shout] | None = None
|
||||
author: Any | None = None # Author type resolved at runtime
|
||||
authors: list[Any] | None = None # Author type resolved at runtime
|
||||
reaction: Reaction | None = None
|
||||
reactions: list[Reaction] | None = None
|
||||
topic: Topic | None = None
|
||||
topics: list[Topic] | None = None
|
||||
community: Community | None = None
|
||||
communities: list[Community] | None = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class AuthorFollowsResult:
|
||||
"""Результат для get_author_follows запроса"""
|
||||
|
||||
topics: list[Any] | None = None # Topic dicts
|
||||
authors: list[Any] | None = None # Author dicts
|
||||
communities: list[Any] | None = None # Community dicts
|
||||
error: str | None = None
|
||||
230
services/db.py
230
services/db.py
@@ -1,230 +0,0 @@
|
||||
import math
|
||||
import time
|
||||
import traceback
|
||||
import warnings
|
||||
from io import TextIOWrapper
|
||||
from typing import Any, Type, TypeVar
|
||||
|
||||
import sqlalchemy
|
||||
from sqlalchemy import create_engine, event, exc, func, inspect
|
||||
from sqlalchemy.dialects.sqlite import insert
|
||||
from sqlalchemy.engine import Connection, Engine
|
||||
from sqlalchemy.orm import DeclarativeBase, Session, configure_mappers
|
||||
from sqlalchemy.pool import StaticPool
|
||||
|
||||
from settings import DB_URL
|
||||
from utils.logger import root_logger as logger
|
||||
|
||||
# Database configuration
|
||||
engine = create_engine(DB_URL, echo=False, poolclass=StaticPool if "sqlite" in DB_URL else None)
|
||||
ENGINE = engine # Backward compatibility alias
|
||||
inspector = inspect(engine)
|
||||
# Session = sessionmaker(engine)
|
||||
configure_mappers()
|
||||
T = TypeVar("T")
|
||||
FILTERED_FIELDS = ["_sa_instance_state", "search_vector"]
|
||||
|
||||
# make_searchable(Base.metadata)
|
||||
# Base.metadata.create_all(bind=engine)
|
||||
|
||||
|
||||
# Функция для вывода полного трейсбека при предупреждениях
|
||||
def warning_with_traceback(
|
||||
message: Warning | str,
|
||||
category: type[Warning],
|
||||
filename: str,
|
||||
lineno: int,
|
||||
file: TextIOWrapper | None = None,
|
||||
line: str | None = None,
|
||||
) -> None:
|
||||
tb = traceback.format_stack()
|
||||
tb_str = "".join(tb)
|
||||
print(f"{message} ({filename}, {lineno}): {category.__name__}\n{tb_str}")
|
||||
|
||||
|
||||
# Установка функции вывода трейсбека для предупреждений SQLAlchemy
|
||||
warnings.showwarning = warning_with_traceback # type: ignore[assignment]
|
||||
warnings.simplefilter("always", exc.SAWarning)
|
||||
|
||||
|
||||
# Функция для извлечения SQL-запроса из контекста
|
||||
def get_statement_from_context(context: Connection) -> str | None:
|
||||
query = ""
|
||||
compiled = getattr(context, "compiled", None)
|
||||
if compiled:
|
||||
compiled_statement = getattr(compiled, "string", None)
|
||||
compiled_parameters = getattr(compiled, "params", None)
|
||||
if compiled_statement:
|
||||
if compiled_parameters:
|
||||
try:
|
||||
# Безопасное форматирование параметров
|
||||
query = compiled_statement % compiled_parameters
|
||||
except Exception:
|
||||
logger.exception("Error formatting query")
|
||||
else:
|
||||
query = compiled_statement
|
||||
if query:
|
||||
query = query.replace("\n", " ").replace(" ", " ").replace(" ", " ").strip()
|
||||
return query
|
||||
|
||||
|
||||
# Обработчик события перед выполнением запроса
|
||||
@event.listens_for(Engine, "before_cursor_execute")
|
||||
def before_cursor_execute(
|
||||
conn: Connection,
|
||||
cursor: Any,
|
||||
statement: str,
|
||||
parameters: dict[str, Any] | None,
|
||||
context: Connection,
|
||||
executemany: bool,
|
||||
) -> None:
|
||||
conn.query_start_time = time.time() # type: ignore[attr-defined]
|
||||
conn.cursor_id = id(cursor) # type: ignore[attr-defined]
|
||||
|
||||
|
||||
# Обработчик события после выполнения запроса
|
||||
@event.listens_for(Engine, "after_cursor_execute")
|
||||
def after_cursor_execute(
|
||||
conn: Connection,
|
||||
cursor: Any,
|
||||
statement: str,
|
||||
parameters: dict[str, Any] | None,
|
||||
context: Connection,
|
||||
executemany: bool,
|
||||
) -> None:
|
||||
if hasattr(conn, "cursor_id") and conn.cursor_id == id(cursor):
|
||||
query = get_statement_from_context(context)
|
||||
if query:
|
||||
elapsed = time.time() - getattr(conn, "query_start_time", time.time())
|
||||
if elapsed > 1:
|
||||
query_end = query[-16:]
|
||||
query = query.split(query_end)[0] + query_end
|
||||
logger.debug(query)
|
||||
elapsed_n = math.floor(elapsed)
|
||||
logger.debug("*" * (elapsed_n))
|
||||
logger.debug(f"{elapsed:.3f} s")
|
||||
if hasattr(conn, "cursor_id"):
|
||||
delattr(conn, "cursor_id") # Удаление идентификатора курсора после выполнения
|
||||
|
||||
|
||||
def get_json_builder() -> tuple[Any, Any, Any]:
|
||||
"""
|
||||
Возвращает подходящие функции для построения JSON объектов в зависимости от драйвера БД
|
||||
"""
|
||||
dialect = engine.dialect.name
|
||||
json_cast = lambda x: x # noqa: E731
|
||||
if dialect.startswith("postgres"):
|
||||
json_cast = lambda x: func.cast(x, sqlalchemy.Text) # noqa: E731
|
||||
return func.json_build_object, func.json_agg, json_cast
|
||||
if dialect.startswith(("sqlite", "mysql")):
|
||||
return func.json_object, func.json_group_array, json_cast
|
||||
msg = f"JSON builder not implemented for dialect {dialect}"
|
||||
raise NotImplementedError(msg)
|
||||
|
||||
|
||||
# Используем их в коде
|
||||
json_builder, json_array_builder, json_cast = get_json_builder()
|
||||
|
||||
|
||||
def create_table_if_not_exists(
|
||||
connection_or_engine_or_session: Connection | Engine | Session, model_cls: Type[DeclarativeBase]
|
||||
) -> None:
|
||||
"""Creates table for the given model if it doesn't exist"""
|
||||
|
||||
# Handle different input types
|
||||
if isinstance(connection_or_engine_or_session, Session):
|
||||
# Use session's bind
|
||||
connection = connection_or_engine_or_session.get_bind()
|
||||
should_close = False
|
||||
elif isinstance(connection_or_engine_or_session, Engine):
|
||||
# Get a connection from engine
|
||||
connection = connection_or_engine_or_session.connect()
|
||||
should_close = True
|
||||
else:
|
||||
# Already a connection
|
||||
connection = connection_or_engine_or_session
|
||||
should_close = False
|
||||
|
||||
try:
|
||||
inspector = inspect(connection)
|
||||
if not inspector.has_table(model_cls.__tablename__):
|
||||
# Use SQLAlchemy's built-in table creation instead of manual SQL generation
|
||||
model_cls.__table__.create(bind=connection, checkfirst=False) # type: ignore[attr-defined]
|
||||
logger.info(f"Created table: {model_cls.__tablename__}")
|
||||
finally:
|
||||
# Close connection only if we created it
|
||||
if should_close and hasattr(connection, "close"):
|
||||
connection.close() # type: ignore[attr-defined]
|
||||
|
||||
|
||||
def get_column_names_without_virtual(model_cls: Type[DeclarativeBase]) -> list[str]:
|
||||
"""Получает имена колонок модели без виртуальных полей"""
|
||||
try:
|
||||
column_names: list[str] = [
|
||||
col.name for col in model_cls.__table__.columns if not getattr(col, "_is_virtual", False)
|
||||
]
|
||||
return column_names
|
||||
except AttributeError:
|
||||
return []
|
||||
|
||||
|
||||
def format_sql_warning(
|
||||
message: str | Warning,
|
||||
category: type[Warning],
|
||||
filename: str,
|
||||
lineno: int,
|
||||
file: TextIOWrapper | None = None,
|
||||
line: str | None = None,
|
||||
) -> str:
|
||||
"""Custom warning formatter for SQL warnings"""
|
||||
return f"SQL Warning: {message}\n"
|
||||
|
||||
|
||||
# Apply the custom warning formatter
|
||||
def _set_warning_formatter() -> None:
|
||||
"""Set custom warning formatter"""
|
||||
|
||||
def custom_formatwarning(
|
||||
message: str, category: type[Warning], filename: str, lineno: int, line: str | None = None
|
||||
) -> str:
|
||||
return f"{category.__name__}: {message}\n"
|
||||
|
||||
warnings.formatwarning = custom_formatwarning # type: ignore[assignment]
|
||||
|
||||
|
||||
_set_warning_formatter()
|
||||
|
||||
|
||||
def upsert_on_duplicate(table: sqlalchemy.Table, **values: Any) -> sqlalchemy.sql.Insert:
|
||||
"""
|
||||
Performs an upsert operation (insert or update on conflict)
|
||||
"""
|
||||
if engine.dialect.name == "sqlite":
|
||||
return insert(table).values(**values).on_conflict_do_update(index_elements=["id"], set_=values)
|
||||
# For other databases, implement appropriate upsert logic
|
||||
return table.insert().values(**values)
|
||||
|
||||
|
||||
def get_sql_functions() -> dict[str, Any]:
|
||||
"""Returns database-specific SQL functions"""
|
||||
if engine.dialect.name == "sqlite":
|
||||
return {
|
||||
"now": sqlalchemy.func.datetime("now"),
|
||||
"extract_epoch": lambda x: sqlalchemy.func.strftime("%s", x),
|
||||
"coalesce": sqlalchemy.func.coalesce,
|
||||
}
|
||||
return {
|
||||
"now": sqlalchemy.func.now(),
|
||||
"extract_epoch": sqlalchemy.func.extract("epoch", sqlalchemy.text("?")),
|
||||
"coalesce": sqlalchemy.func.coalesce,
|
||||
}
|
||||
|
||||
|
||||
# noinspection PyUnusedLocal
|
||||
def local_session(src: str = "") -> Session:
|
||||
"""Create a new database session"""
|
||||
return Session(bind=engine, expire_on_commit=False)
|
||||
|
||||
|
||||
# Also export the type for type hints
|
||||
__all__ = ["engine", "local_session"]
|
||||
@@ -1,121 +0,0 @@
|
||||
{
|
||||
"reader": [
|
||||
"shout:read",
|
||||
"topic:read",
|
||||
"collection:read",
|
||||
"community:read",
|
||||
"bookmark:read",
|
||||
"bookmark:create",
|
||||
"bookmark:update",
|
||||
"bookmark:delete",
|
||||
"invite:read",
|
||||
"invite:accept",
|
||||
"invite:decline",
|
||||
"chat:read",
|
||||
"chat:create",
|
||||
"chat:update",
|
||||
"chat:delete",
|
||||
"message:read",
|
||||
"message:create",
|
||||
"message:update",
|
||||
"message:delete",
|
||||
"reaction:read:COMMENT",
|
||||
"reaction:create:COMMENT",
|
||||
"reaction:update:COMMENT",
|
||||
"reaction:delete:COMMENT",
|
||||
"reaction:read:QUOTE",
|
||||
"reaction:create:QUOTE",
|
||||
"reaction:update:QUOTE",
|
||||
"reaction:delete:QUOTE",
|
||||
"reaction:read:LIKE",
|
||||
"reaction:create:LIKE",
|
||||
"reaction:update:LIKE",
|
||||
"reaction:delete:LIKE",
|
||||
"reaction:read:DISLIKE",
|
||||
"reaction:create:DISLIKE",
|
||||
"reaction:update:DISLIKE",
|
||||
"reaction:delete:DISLIKE",
|
||||
"reaction:read:CREDIT",
|
||||
"reaction:read:PROOF",
|
||||
"reaction:read:DISPROOF",
|
||||
"reaction:read:AGREE",
|
||||
"reaction:read:DISAGREE"
|
||||
],
|
||||
"author": [
|
||||
"reader",
|
||||
"draft:read",
|
||||
"draft:create",
|
||||
"draft:update",
|
||||
"draft:delete",
|
||||
"shout:create",
|
||||
"shout:update",
|
||||
"shout:delete",
|
||||
"collection:create",
|
||||
"collection:update",
|
||||
"collection:delete",
|
||||
"invite:create",
|
||||
"invite:update",
|
||||
"invite:delete",
|
||||
"reaction:create:SILENT",
|
||||
"reaction:read:SILENT",
|
||||
"reaction:update:SILENT",
|
||||
"reaction:delete:SILENT"
|
||||
],
|
||||
"artist": [
|
||||
"author",
|
||||
"reaction:create:CREDIT",
|
||||
"reaction:read:CREDIT",
|
||||
"reaction:update:CREDIT",
|
||||
"reaction:delete:CREDIT"
|
||||
],
|
||||
"expert": [
|
||||
"reader",
|
||||
"reaction:create:PROOF",
|
||||
"reaction:read:PROOF",
|
||||
"reaction:update:PROOF",
|
||||
"reaction:delete:PROOF",
|
||||
"reaction:create:DISPROOF",
|
||||
"reaction:read:DISPROOF",
|
||||
"reaction:update:DISPROOF",
|
||||
"reaction:delete:DISPROOF",
|
||||
"reaction:create:AGREE",
|
||||
"reaction:read:AGREE",
|
||||
"reaction:update:AGREE",
|
||||
"reaction:delete:AGREE",
|
||||
"reaction:create:DISAGREE",
|
||||
"reaction:read:DISAGREE",
|
||||
"reaction:update:DISAGREE",
|
||||
"reaction:delete:DISAGREE"
|
||||
],
|
||||
"editor": [
|
||||
"author",
|
||||
"shout:delete_any",
|
||||
"shout:update_any",
|
||||
"topic:create",
|
||||
"topic:delete",
|
||||
"topic:update",
|
||||
"topic:merge",
|
||||
"reaction:delete_any:*",
|
||||
"reaction:update_any:*",
|
||||
"invite:delete_any",
|
||||
"invite:update_any",
|
||||
"collection:delete_any",
|
||||
"collection:update_any",
|
||||
"community:create",
|
||||
"community:update",
|
||||
"community:delete",
|
||||
"draft:delete_any",
|
||||
"draft:update_any"
|
||||
],
|
||||
"admin": [
|
||||
"editor",
|
||||
"author:delete_any",
|
||||
"author:update_any",
|
||||
"chat:delete_any",
|
||||
"chat:update_any",
|
||||
"message:delete_any",
|
||||
"message:update_any",
|
||||
"community:delete_any",
|
||||
"community:update_any"
|
||||
]
|
||||
}
|
||||
324
services/env.py
324
services/env.py
@@ -1,324 +0,0 @@
|
||||
import os
|
||||
from dataclasses import dataclass
|
||||
from typing import ClassVar
|
||||
|
||||
from services.redis import redis
|
||||
from utils.logger import root_logger as logger
|
||||
|
||||
|
||||
@dataclass
|
||||
class EnvVariable:
|
||||
"""Переменная окружения"""
|
||||
|
||||
key: str
|
||||
value: str
|
||||
description: str
|
||||
is_secret: bool = False
|
||||
|
||||
|
||||
@dataclass
|
||||
class EnvSection:
|
||||
"""Секция переменных окружения"""
|
||||
|
||||
name: str
|
||||
description: str
|
||||
variables: list[EnvVariable]
|
||||
|
||||
|
||||
class EnvService:
|
||||
"""Сервис для работы с переменными окружения"""
|
||||
|
||||
redis_prefix = "env:"
|
||||
|
||||
# Определение секций с их описаниями
|
||||
SECTIONS: ClassVar[dict[str, str]] = {
|
||||
"database": "Настройки базы данных",
|
||||
"auth": "Настройки аутентификации",
|
||||
"redis": "Настройки Redis",
|
||||
"search": "Настройки поиска",
|
||||
"integrations": "Внешние интеграции",
|
||||
"security": "Настройки безопасности",
|
||||
"logging": "Настройки логирования",
|
||||
"features": "Флаги функций",
|
||||
"other": "Прочие настройки",
|
||||
}
|
||||
|
||||
# Маппинг переменных на секции
|
||||
VARIABLE_SECTIONS: ClassVar[dict[str, str]] = {
|
||||
# Database
|
||||
"DB_URL": "database",
|
||||
"DATABASE_URL": "database",
|
||||
"POSTGRES_USER": "database",
|
||||
"POSTGRES_PASSWORD": "database",
|
||||
"POSTGRES_DB": "database",
|
||||
"POSTGRES_HOST": "database",
|
||||
"POSTGRES_PORT": "database",
|
||||
# Auth
|
||||
"JWT_SECRET": "auth",
|
||||
"JWT_ALGORITHM": "auth",
|
||||
"JWT_EXPIRATION": "auth",
|
||||
"SECRET_KEY": "auth",
|
||||
"AUTH_SECRET": "auth",
|
||||
"OAUTH_GOOGLE_CLIENT_ID": "auth",
|
||||
"OAUTH_GOOGLE_CLIENT_SECRET": "auth",
|
||||
"OAUTH_GITHUB_CLIENT_ID": "auth",
|
||||
"OAUTH_GITHUB_CLIENT_SECRET": "auth",
|
||||
# Redis
|
||||
"REDIS_URL": "redis",
|
||||
"REDIS_HOST": "redis",
|
||||
"REDIS_PORT": "redis",
|
||||
"REDIS_PASSWORD": "redis",
|
||||
"REDIS_DB": "redis",
|
||||
# Search
|
||||
"SEARCH_API_KEY": "search",
|
||||
"ELASTICSEARCH_URL": "search",
|
||||
"SEARCH_INDEX": "search",
|
||||
# Integrations
|
||||
"GOOGLE_ANALYTICS_ID": "integrations",
|
||||
"SENTRY_DSN": "integrations",
|
||||
"SMTP_HOST": "integrations",
|
||||
"SMTP_PORT": "integrations",
|
||||
"SMTP_USER": "integrations",
|
||||
"SMTP_PASSWORD": "integrations",
|
||||
"EMAIL_FROM": "integrations",
|
||||
# Security
|
||||
"CORS_ORIGINS": "security",
|
||||
"ALLOWED_HOSTS": "security",
|
||||
"SECURE_SSL_REDIRECT": "security",
|
||||
"SESSION_COOKIE_SECURE": "security",
|
||||
"CSRF_COOKIE_SECURE": "security",
|
||||
# Logging
|
||||
"LOG_LEVEL": "logging",
|
||||
"LOG_FORMAT": "logging",
|
||||
"LOG_FILE": "logging",
|
||||
"DEBUG": "logging",
|
||||
# Features
|
||||
"FEATURE_REGISTRATION": "features",
|
||||
"FEATURE_COMMENTS": "features",
|
||||
"FEATURE_ANALYTICS": "features",
|
||||
"FEATURE_SEARCH": "features",
|
||||
}
|
||||
|
||||
# Секретные переменные (не показываем их значения в UI)
|
||||
SECRET_VARIABLES: ClassVar[set[str]] = {
|
||||
"JWT_SECRET",
|
||||
"SECRET_KEY",
|
||||
"AUTH_SECRET",
|
||||
"OAUTH_GOOGLE_CLIENT_SECRET",
|
||||
"OAUTH_GITHUB_CLIENT_SECRET",
|
||||
"POSTGRES_PASSWORD",
|
||||
"REDIS_PASSWORD",
|
||||
"SEARCH_API_KEY",
|
||||
"SENTRY_DSN",
|
||||
"SMTP_PASSWORD",
|
||||
}
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Инициализация сервиса"""
|
||||
|
||||
def get_variable_description(self, key: str) -> str:
|
||||
"""Получает описание переменной окружения"""
|
||||
descriptions = {
|
||||
"DB_URL": "URL подключения к базе данных",
|
||||
"DATABASE_URL": "URL подключения к базе данных",
|
||||
"POSTGRES_USER": "Пользователь PostgreSQL",
|
||||
"POSTGRES_PASSWORD": "Пароль PostgreSQL",
|
||||
"POSTGRES_DB": "Имя базы данных PostgreSQL",
|
||||
"POSTGRES_HOST": "Хост PostgreSQL",
|
||||
"POSTGRES_PORT": "Порт PostgreSQL",
|
||||
"JWT_SECRET": "Секретный ключ для JWT токенов",
|
||||
"JWT_ALGORITHM": "Алгоритм подписи JWT",
|
||||
"JWT_EXPIRATION": "Время жизни JWT токенов",
|
||||
"SECRET_KEY": "Секретный ключ приложения",
|
||||
"AUTH_SECRET": "Секретный ключ аутентификации",
|
||||
"OAUTH_GOOGLE_CLIENT_ID": "Google OAuth Client ID",
|
||||
"OAUTH_GOOGLE_CLIENT_SECRET": "Google OAuth Client Secret",
|
||||
"OAUTH_GITHUB_CLIENT_ID": "GitHub OAuth Client ID",
|
||||
"OAUTH_GITHUB_CLIENT_SECRET": "GitHub OAuth Client Secret",
|
||||
"REDIS_URL": "URL подключения к Redis",
|
||||
"REDIS_HOST": "Хост Redis",
|
||||
"REDIS_PORT": "Порт Redis",
|
||||
"REDIS_PASSWORD": "Пароль Redis",
|
||||
"REDIS_DB": "Номер базы данных Redis",
|
||||
"SEARCH_API_KEY": "API ключ для поиска",
|
||||
"ELASTICSEARCH_URL": "URL Elasticsearch",
|
||||
"SEARCH_INDEX": "Индекс поиска",
|
||||
"GOOGLE_ANALYTICS_ID": "Google Analytics ID",
|
||||
"SENTRY_DSN": "Sentry DSN",
|
||||
"SMTP_HOST": "SMTP сервер",
|
||||
"SMTP_PORT": "Порт SMTP",
|
||||
"SMTP_USER": "Пользователь SMTP",
|
||||
"SMTP_PASSWORD": "Пароль SMTP",
|
||||
"EMAIL_FROM": "Email отправителя",
|
||||
"CORS_ORIGINS": "Разрешенные CORS источники",
|
||||
"ALLOWED_HOSTS": "Разрешенные хосты",
|
||||
"SECURE_SSL_REDIRECT": "Принудительное SSL перенаправление",
|
||||
"SESSION_COOKIE_SECURE": "Безопасные cookies сессий",
|
||||
"CSRF_COOKIE_SECURE": "Безопасные CSRF cookies",
|
||||
"LOG_LEVEL": "Уровень логирования",
|
||||
"LOG_FORMAT": "Формат логов",
|
||||
"LOG_FILE": "Файл логов",
|
||||
"DEBUG": "Режим отладки",
|
||||
"FEATURE_REGISTRATION": "Включить регистрацию",
|
||||
"FEATURE_COMMENTS": "Включить комментарии",
|
||||
"FEATURE_ANALYTICS": "Включить аналитику",
|
||||
"FEATURE_SEARCH": "Включить поиск",
|
||||
}
|
||||
return descriptions.get(key, f"Переменная окружения {key}")
|
||||
|
||||
async def get_variables_from_redis(self) -> dict[str, str]:
|
||||
"""Получает переменные из Redis"""
|
||||
try:
|
||||
keys = await redis.keys(f"{self.redis_prefix}*")
|
||||
if not keys:
|
||||
return {}
|
||||
|
||||
redis_vars: dict[str, str] = {}
|
||||
for key in keys:
|
||||
var_key = key.replace(self.redis_prefix, "")
|
||||
value = await redis.get(key)
|
||||
if value:
|
||||
redis_vars[var_key] = str(value)
|
||||
|
||||
return redis_vars
|
||||
except Exception:
|
||||
return {}
|
||||
|
||||
async def set_variables_to_redis(self, variables: dict[str, str]) -> bool:
|
||||
"""Сохраняет переменные в Redis"""
|
||||
try:
|
||||
for key, value in variables.items():
|
||||
await redis.set(f"{self.redis_prefix}{key}", value)
|
||||
return True
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
def get_variables_from_env(self) -> dict[str, str]:
|
||||
"""Получает переменные из системного окружения"""
|
||||
env_vars = {}
|
||||
|
||||
# Получаем все переменные известные системе
|
||||
for key in self.VARIABLE_SECTIONS:
|
||||
value = os.getenv(key)
|
||||
if value is not None:
|
||||
env_vars[key] = value
|
||||
|
||||
# Получаем дополнительные переменные окружения
|
||||
env_vars.update(
|
||||
{
|
||||
env_key: env_value
|
||||
for env_key, env_value in os.environ.items()
|
||||
if any(env_key.startswith(prefix) for prefix in ["APP_", "SITE_", "FEATURE_", "OAUTH_"])
|
||||
}
|
||||
)
|
||||
|
||||
return env_vars
|
||||
|
||||
async def get_all_variables(self) -> list[EnvSection]:
|
||||
"""Получает все переменные окружения, сгруппированные по секциям"""
|
||||
# Получаем переменные из Redis и системного окружения
|
||||
redis_vars = await self.get_variables_from_redis()
|
||||
env_vars = self.get_variables_from_env()
|
||||
|
||||
# Объединяем переменные (Redis имеет приоритет)
|
||||
all_vars = {**env_vars, **redis_vars}
|
||||
|
||||
# Группируем по секциям
|
||||
sections_dict: dict[str, list[EnvVariable]] = {section: [] for section in self.SECTIONS}
|
||||
other_variables: list[EnvVariable] = [] # Для переменных, которые не попали ни в одну секцию
|
||||
|
||||
for key, value in all_vars.items():
|
||||
is_secret = key in self.SECRET_VARIABLES
|
||||
description = self.get_variable_description(key)
|
||||
|
||||
# Скрываем значение секретных переменных
|
||||
display_value = "***" if is_secret else value
|
||||
|
||||
env_var = EnvVariable(
|
||||
key=key,
|
||||
value=display_value,
|
||||
description=description,
|
||||
is_secret=is_secret,
|
||||
)
|
||||
|
||||
# Определяем секцию для переменной
|
||||
section = self.VARIABLE_SECTIONS.get(key, "other")
|
||||
if section in sections_dict:
|
||||
sections_dict[section].append(env_var)
|
||||
else:
|
||||
other_variables.append(env_var)
|
||||
|
||||
# Создаем объекты секций
|
||||
sections = []
|
||||
for section_name, section_description in self.SECTIONS.items():
|
||||
variables = sections_dict.get(section_name, [])
|
||||
if variables: # Добавляем только непустые секции
|
||||
sections.append(EnvSection(name=section_name, description=section_description, variables=variables))
|
||||
|
||||
# Добавляем секцию "other" если есть переменные
|
||||
if other_variables:
|
||||
sections.append(EnvSection(name="other", description="Прочие настройки", variables=other_variables))
|
||||
|
||||
return sorted(sections, key=lambda x: x.name)
|
||||
|
||||
async def update_variables(self, variables: list[EnvVariable]) -> bool:
|
||||
"""Обновляет переменные окружения"""
|
||||
try:
|
||||
# Подготавливаем переменные для сохранения
|
||||
vars_dict = {}
|
||||
for var in variables:
|
||||
if not var.is_secret or var.value != "***":
|
||||
vars_dict[var.key] = var.value
|
||||
|
||||
# Сохраняем в Redis
|
||||
return await self.set_variables_to_redis(vars_dict)
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
async def delete_variable(self, key: str) -> bool:
|
||||
"""Удаляет переменную окружения"""
|
||||
|
||||
try:
|
||||
redis_key = f"{self.redis_prefix}{key}"
|
||||
result = await redis.delete(redis_key)
|
||||
|
||||
if result > 0:
|
||||
logger.info(f"Переменная {key} удалена")
|
||||
return True
|
||||
logger.warning(f"Переменная {key} не найдена")
|
||||
return False
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Ошибка при удалении переменной {key}: {e}")
|
||||
return False
|
||||
|
||||
async def get_variable(self, key: str) -> str | None:
|
||||
"""Получает значение конкретной переменной"""
|
||||
|
||||
# Сначала проверяем Redis
|
||||
try:
|
||||
redis_key = f"{self.redis_prefix}{key}"
|
||||
value = await redis.get(redis_key)
|
||||
if value:
|
||||
return value.decode("utf-8") if isinstance(value, bytes) else str(value)
|
||||
except Exception as e:
|
||||
logger.error(f"Ошибка при получении переменной {key} из Redis: {e}")
|
||||
|
||||
# Fallback на системное окружение
|
||||
return os.getenv(key)
|
||||
|
||||
async def set_variable(self, key: str, value: str) -> bool:
|
||||
"""Устанавливает значение переменной"""
|
||||
|
||||
try:
|
||||
redis_key = f"{self.redis_prefix}{key}"
|
||||
await redis.set(redis_key, value)
|
||||
logger.info(f"Переменная {key} установлена")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Ошибка при установке переменной {key}: {e}")
|
||||
return False
|
||||
|
||||
|
||||
env_manager = EnvService()
|
||||
@@ -1,22 +0,0 @@
|
||||
import logging
|
||||
from collections.abc import Awaitable
|
||||
from typing import Callable
|
||||
|
||||
from starlette.middleware.base import BaseHTTPMiddleware
|
||||
from starlette.requests import Request
|
||||
from starlette.responses import JSONResponse, Response
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
logging.basicConfig(level=logging.DEBUG)
|
||||
|
||||
|
||||
class ExceptionHandlerMiddleware(BaseHTTPMiddleware):
|
||||
async def dispatch(self, request: Request, call_next: Callable[[Request], Awaitable[Response]]) -> Response:
|
||||
try:
|
||||
return await call_next(request)
|
||||
except Exception:
|
||||
logger.exception("Unhandled exception occurred")
|
||||
return JSONResponse(
|
||||
{"detail": "An error occurred. Please try again later."},
|
||||
status_code=500,
|
||||
)
|
||||
@@ -6,8 +6,8 @@ import orjson
|
||||
from orm.notification import Notification
|
||||
from orm.reaction import Reaction
|
||||
from orm.shout import Shout
|
||||
from services.db import local_session
|
||||
from services.redis import redis
|
||||
from storage.db import local_session
|
||||
from storage.redis import redis
|
||||
from utils.logger import root_logger as logger
|
||||
|
||||
|
||||
|
||||
@@ -1,73 +0,0 @@
|
||||
{
|
||||
"shout": ["create", "read", "update_own", "update_any", "delete_own", "delete_any"],
|
||||
"topic": ["create", "read", "update_own", "update_any", "delete_own", "delete_any", "merge"],
|
||||
"collection": ["create", "read", "update_own", "update_any", "delete_own", "delete_any"],
|
||||
"bookmark": ["create", "read", "update_own", "update_any", "delete_own", "delete_any"],
|
||||
"invite": ["create", "read", "update_own", "update_any", "delete_own", "delete_any"],
|
||||
"chat": ["create", "read", "update_own", "update_any", "delete_own", "delete_any"],
|
||||
"message": ["create", "read", "update_own", "update_any", "delete_own", "delete_any"],
|
||||
"community": ["create", "read", "update_own", "update_any", "delete_own", "delete_any"],
|
||||
"draft": ["create", "read", "update_own", "update_any", "delete_own", "delete_any"],
|
||||
"reaction": [
|
||||
"create:LIKE",
|
||||
"read:LIKE",
|
||||
"update_own:LIKE",
|
||||
"update_any:LIKE",
|
||||
"delete_own:LIKE",
|
||||
"delete_any:LIKE",
|
||||
"create:COMMENT",
|
||||
"read:COMMENT",
|
||||
"update_own:COMMENT",
|
||||
"update_any:COMMENT",
|
||||
"delete_own:COMMENT",
|
||||
"delete_any:COMMENT",
|
||||
"create:QUOTE",
|
||||
"read:QUOTE",
|
||||
"update_own:QUOTE",
|
||||
"update_any:QUOTE",
|
||||
"delete_own:QUOTE",
|
||||
"delete_any:QUOTE",
|
||||
"create:DISLIKE",
|
||||
"read:DISLIKE",
|
||||
"update_own:DISLIKE",
|
||||
"update_any:DISLIKE",
|
||||
"delete_own:DISLIKE",
|
||||
"delete_any:DISLIKE",
|
||||
"create:CREDIT",
|
||||
"read:CREDIT",
|
||||
"update_own:CREDIT",
|
||||
"update_any:CREDIT",
|
||||
"delete_own:CREDIT",
|
||||
"delete_any:CREDIT",
|
||||
"create:PROOF",
|
||||
"read:PROOF",
|
||||
"update_own:PROOF",
|
||||
"update_any:PROOF",
|
||||
"delete_own:PROOF",
|
||||
"delete_any:PROOF",
|
||||
"create:DISPROOF",
|
||||
"read:DISPROOF",
|
||||
"update_own:DISPROOF",
|
||||
"update_any:DISPROOF",
|
||||
"delete_own:DISPROOF",
|
||||
"delete_any:DISPROOF",
|
||||
"create:AGREE",
|
||||
"read:AGREE",
|
||||
"update_own:AGREE",
|
||||
"update_any:AGREE",
|
||||
"delete_own:AGREE",
|
||||
"delete_any:AGREE",
|
||||
"create:DISAGREE",
|
||||
"read:DISAGREE",
|
||||
"update_own:DISAGREE",
|
||||
"update_any:DISAGREE",
|
||||
"delete_own:DISAGREE",
|
||||
"delete_any:DISAGREE",
|
||||
"create:SILENT",
|
||||
"read:SILENT",
|
||||
"update_own:SILENT",
|
||||
"update_any:SILENT",
|
||||
"delete_own:SILENT",
|
||||
"delete_any:SILENT"
|
||||
]
|
||||
}
|
||||
415
services/rbac.py
415
services/rbac.py
@@ -1,415 +0,0 @@
|
||||
"""
|
||||
RBAC: динамическая система прав для ролей и сообществ.
|
||||
|
||||
- Каталог всех сущностей и действий хранится в permissions_catalog.json
|
||||
- Дефолтные права ролей — в default_role_permissions.json
|
||||
- Кастомные права ролей для каждого сообщества — в Redis (ключ community:roles:{community_id})
|
||||
- При создании сообщества автоматически копируются дефолтные права
|
||||
- Декораторы получают роли пользователя из CommunityAuthor для конкретного сообщества
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
from functools import wraps
|
||||
from typing import Any, Callable
|
||||
|
||||
from auth.orm import Author
|
||||
from auth.rbac_interface import get_community_queries, get_rbac_operations
|
||||
from services.db import local_session
|
||||
from settings import ADMIN_EMAILS
|
||||
from utils.logger import root_logger as logger
|
||||
|
||||
|
||||
async def initialize_community_permissions(community_id: int) -> None:
|
||||
"""
|
||||
Инициализирует права для нового сообщества на основе дефолтных настроек с учетом иерархии.
|
||||
|
||||
Args:
|
||||
community_id: ID сообщества
|
||||
"""
|
||||
rbac_ops = get_rbac_operations()
|
||||
await rbac_ops.initialize_community_permissions(community_id)
|
||||
|
||||
|
||||
async def get_permissions_for_role(role: str, community_id: int) -> list[str]:
|
||||
"""
|
||||
Получает список разрешений для конкретной роли в сообществе.
|
||||
Иерархия уже применена при инициализации сообщества.
|
||||
|
||||
Args:
|
||||
role: Название роли
|
||||
community_id: ID сообщества
|
||||
|
||||
Returns:
|
||||
Список разрешений для роли
|
||||
"""
|
||||
rbac_ops = get_rbac_operations()
|
||||
return await rbac_ops.get_permissions_for_role(role, community_id)
|
||||
|
||||
|
||||
async def update_all_communities_permissions() -> None:
|
||||
"""
|
||||
Обновляет права для всех существующих сообществ на основе актуальных дефолтных настроек.
|
||||
|
||||
Используется в админ-панели для применения изменений в правах на все сообщества.
|
||||
"""
|
||||
rbac_ops = get_rbac_operations()
|
||||
|
||||
# Поздний импорт для избежания циклических зависимостей
|
||||
from orm.community import Community
|
||||
|
||||
try:
|
||||
with local_session() as session:
|
||||
# Получаем все сообщества
|
||||
communities = session.query(Community).all()
|
||||
|
||||
for community in communities:
|
||||
# Сбрасываем кеш прав для каждого сообщества
|
||||
from services.redis import redis
|
||||
key = f"community:roles:{community.id}"
|
||||
await redis.execute("DEL", key)
|
||||
|
||||
# Переинициализируем права с актуальными дефолтными настройками
|
||||
await rbac_ops.initialize_community_permissions(community.id)
|
||||
|
||||
logger.info(f"Обновлены права для {len(communities)} сообществ")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Ошибка при обновлении прав всех сообществ: {e}", exc_info=True)
|
||||
raise
|
||||
|
||||
|
||||
# --- Получение ролей пользователя ---
|
||||
|
||||
|
||||
def get_user_roles_in_community(author_id: int, community_id: int = 1, session: Any = None) -> list[str]:
|
||||
"""
|
||||
Получает роли пользователя в сообществе через новую систему CommunityAuthor
|
||||
"""
|
||||
community_queries = get_community_queries()
|
||||
return community_queries.get_user_roles_in_community(author_id, community_id, session)
|
||||
|
||||
|
||||
async def user_has_permission(author_id: int, permission: str, community_id: int, session: Any = None) -> bool:
|
||||
"""
|
||||
Проверяет, есть ли у пользователя конкретное разрешение в сообществе.
|
||||
|
||||
Args:
|
||||
author_id: ID автора
|
||||
permission: Разрешение для проверки
|
||||
community_id: ID сообщества
|
||||
session: Опциональная сессия БД (для тестов)
|
||||
|
||||
Returns:
|
||||
True если разрешение есть, False если нет
|
||||
"""
|
||||
rbac_ops = get_rbac_operations()
|
||||
return await rbac_ops.user_has_permission(author_id, permission, community_id, session)
|
||||
|
||||
|
||||
# --- Проверка прав ---
|
||||
async def roles_have_permission(role_slugs: list[str], permission: str, community_id: int) -> bool:
|
||||
"""
|
||||
Проверяет, есть ли у набора ролей конкретное разрешение в сообществе.
|
||||
|
||||
Args:
|
||||
role_slugs: Список ролей для проверки
|
||||
permission: Разрешение для проверки
|
||||
community_id: ID сообщества
|
||||
|
||||
Returns:
|
||||
True если хотя бы одна роль имеет разрешение
|
||||
"""
|
||||
rbac_ops = get_rbac_operations()
|
||||
return await rbac_ops._roles_have_permission(role_slugs, permission, community_id)
|
||||
|
||||
|
||||
# --- Декораторы ---
|
||||
class RBACError(Exception):
|
||||
"""Исключение для ошибок RBAC."""
|
||||
|
||||
|
||||
def get_user_roles_from_context(info) -> tuple[list[str], int]:
|
||||
"""
|
||||
Получение ролей пользователя из GraphQL контекста с учетом сообщества.
|
||||
|
||||
Returns:
|
||||
Кортеж (роли_пользователя, community_id)
|
||||
"""
|
||||
# Получаем ID автора из контекста
|
||||
if isinstance(info.context, dict):
|
||||
author_data = info.context.get("author", {})
|
||||
else:
|
||||
author_data = getattr(info.context, "author", {})
|
||||
author_id = author_data.get("id") if isinstance(author_data, dict) else None
|
||||
logger.debug(f"[get_user_roles_from_context] author_data: {author_data}, author_id: {author_id}")
|
||||
|
||||
# Если author_id не найден в context.author, пробуем получить из scope.auth
|
||||
if not author_id and hasattr(info.context, "request"):
|
||||
request = info.context.request
|
||||
logger.debug(f"[get_user_roles_from_context] Проверяем request.scope: {hasattr(request, 'scope')}")
|
||||
if hasattr(request, "scope") and "auth" in request.scope:
|
||||
auth_credentials = request.scope["auth"]
|
||||
logger.debug(f"[get_user_roles_from_context] Найден auth в scope: {type(auth_credentials)}")
|
||||
if hasattr(auth_credentials, "author_id") and auth_credentials.author_id:
|
||||
author_id = auth_credentials.author_id
|
||||
logger.debug(f"[get_user_roles_from_context] Получен author_id из scope.auth: {author_id}")
|
||||
elif isinstance(auth_credentials, dict) and "author_id" in auth_credentials:
|
||||
author_id = auth_credentials["author_id"]
|
||||
logger.debug(f"[get_user_roles_from_context] Получен author_id из scope.auth (dict): {author_id}")
|
||||
else:
|
||||
logger.debug("[get_user_roles_from_context] scope.auth не найден или пуст")
|
||||
if hasattr(request, "scope"):
|
||||
logger.debug(f"[get_user_roles_from_context] Ключи в scope: {list(request.scope.keys())}")
|
||||
|
||||
if not author_id:
|
||||
logger.debug("[get_user_roles_from_context] author_id не найден ни в context.author, ни в scope.auth")
|
||||
return [], 0
|
||||
|
||||
# Получаем community_id из аргументов мутации
|
||||
community_id = get_community_id_from_context(info)
|
||||
logger.debug(f"[get_user_roles_from_context] Получен community_id: {community_id}")
|
||||
|
||||
# Получаем роли пользователя в сообществе
|
||||
try:
|
||||
user_roles = get_user_roles_in_community(author_id, community_id)
|
||||
logger.debug(
|
||||
f"[get_user_roles_from_context] Роли пользователя {author_id} в сообществе {community_id}: {user_roles}"
|
||||
)
|
||||
|
||||
# Проверяем, является ли пользователь системным администратором
|
||||
try:
|
||||
admin_emails = ADMIN_EMAILS.split(",") if ADMIN_EMAILS else []
|
||||
|
||||
with local_session() as session:
|
||||
author = session.query(Author).where(Author.id == author_id).first()
|
||||
if author and author.email and author.email in admin_emails and "admin" not in user_roles:
|
||||
# Системный администратор автоматически получает роль admin в любом сообществе
|
||||
user_roles = [*user_roles, "admin"]
|
||||
logger.debug(
|
||||
f"[get_user_roles_from_context] Добавлена роль admin для системного администратора {author.email}"
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"[get_user_roles_from_context] Ошибка при проверке системного администратора: {e}")
|
||||
|
||||
return user_roles, community_id
|
||||
except Exception as e:
|
||||
logger.error(f"[get_user_roles_from_context] Ошибка при получении ролей: {e}")
|
||||
return [], community_id
|
||||
|
||||
|
||||
def get_community_id_from_context(info) -> int:
|
||||
"""
|
||||
Получение community_id из GraphQL контекста или аргументов.
|
||||
"""
|
||||
# Пробуем из контекста
|
||||
if isinstance(info.context, dict):
|
||||
community_id = info.context.get("community_id")
|
||||
else:
|
||||
community_id = getattr(info.context, "community_id", None)
|
||||
if community_id:
|
||||
return int(community_id)
|
||||
|
||||
# Пробуем из аргументов resolver'а
|
||||
logger.debug(
|
||||
f"[get_community_id_from_context] Проверяем info.variable_values: {getattr(info, 'variable_values', None)}"
|
||||
)
|
||||
|
||||
# Пробуем получить переменные из разных источников
|
||||
variables = {}
|
||||
|
||||
# Способ 1: info.variable_values
|
||||
if hasattr(info, "variable_values") and info.variable_values:
|
||||
variables.update(info.variable_values)
|
||||
logger.debug(f"[get_community_id_from_context] Добавлены переменные из variable_values: {info.variable_values}")
|
||||
|
||||
# Способ 2: info.variable_values (альтернативный способ)
|
||||
if hasattr(info, "variable_values"):
|
||||
logger.debug(f"[get_community_id_from_context] variable_values тип: {type(info.variable_values)}")
|
||||
logger.debug(f"[get_community_id_from_context] variable_values содержимое: {info.variable_values}")
|
||||
|
||||
# Способ 3: из kwargs (аргументы функции)
|
||||
if hasattr(info, "context") and hasattr(info.context, "kwargs"):
|
||||
variables.update(info.context.kwargs)
|
||||
logger.debug(f"[get_community_id_from_context] Добавлены переменные из context.kwargs: {info.context.kwargs}")
|
||||
|
||||
logger.debug(f"[get_community_id_from_context] Итоговые переменные: {variables}")
|
||||
|
||||
if "community_id" in variables:
|
||||
return int(variables["community_id"])
|
||||
if "communityId" in variables:
|
||||
return int(variables["communityId"])
|
||||
|
||||
# Для мутации delete_community получаем slug и находим community_id
|
||||
if "slug" in variables:
|
||||
slug = variables["slug"]
|
||||
try:
|
||||
from orm.community import Community # Поздний импорт
|
||||
|
||||
with local_session() as session:
|
||||
community = session.query(Community).filter_by(slug=slug).first()
|
||||
if community:
|
||||
logger.debug(f"[get_community_id_from_context] Найден community_id {community.id} для slug {slug}")
|
||||
return community.id
|
||||
logger.warning(f"[get_community_id_from_context] Сообщество с slug {slug} не найдено")
|
||||
except Exception as e:
|
||||
logger.error(f"[get_community_id_from_context] Ошибка при поиске community_id: {e}")
|
||||
|
||||
# Пробуем из прямых аргументов
|
||||
if hasattr(info, "field_asts") and info.field_asts:
|
||||
for field_ast in info.field_asts:
|
||||
if hasattr(field_ast, "arguments"):
|
||||
for arg in field_ast.arguments:
|
||||
if arg.name.value in ["community_id", "communityId"]:
|
||||
return int(arg.value.value)
|
||||
|
||||
# Fallback: основное сообщество
|
||||
logger.debug("[get_community_id_from_context] Используем дефолтный community_id: 1")
|
||||
return 1
|
||||
|
||||
|
||||
def require_permission(permission: str) -> Callable:
|
||||
"""
|
||||
Декоратор для проверки конкретного разрешения у пользователя в сообществе.
|
||||
|
||||
Args:
|
||||
permission: Требуемое разрешение (например, "shout:create")
|
||||
"""
|
||||
|
||||
def decorator(func: Callable) -> Callable:
|
||||
@wraps(func)
|
||||
async def wrapper(*args, **kwargs):
|
||||
info = args[1] if len(args) > 1 else None
|
||||
if not info or not hasattr(info, "context"):
|
||||
raise RBACError("GraphQL info context не найден")
|
||||
|
||||
logger.debug(f"[require_permission] Проверяем права: {permission}")
|
||||
logger.debug(f"[require_permission] args: {args}")
|
||||
logger.debug(f"[require_permission] kwargs: {kwargs}")
|
||||
|
||||
user_roles, community_id = get_user_roles_from_context(info)
|
||||
logger.debug(f"[require_permission] user_roles: {user_roles}, community_id: {community_id}")
|
||||
|
||||
has_permission = await roles_have_permission(user_roles, permission, community_id)
|
||||
logger.debug(f"[require_permission] has_permission: {has_permission}")
|
||||
|
||||
if not has_permission:
|
||||
raise RBACError("Недостаточно прав. Требуется: ", permission)
|
||||
|
||||
return await func(*args, **kwargs) if asyncio.iscoroutinefunction(func) else func(*args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
def require_role(role: str) -> Callable:
|
||||
"""
|
||||
Декоратор для проверки конкретной роли у пользователя в сообществе.
|
||||
|
||||
Args:
|
||||
role: Требуемая роль (например, "admin", "editor")
|
||||
"""
|
||||
|
||||
def decorator(func: Callable) -> Callable:
|
||||
@wraps(func)
|
||||
async def wrapper(*args, **kwargs):
|
||||
info = args[1] if len(args) > 1 else None
|
||||
if not info or not hasattr(info, "context"):
|
||||
raise RBACError("GraphQL info context не найден")
|
||||
|
||||
user_roles, community_id = get_user_roles_from_context(info)
|
||||
if role not in user_roles:
|
||||
raise RBACError("Требуется роль в сообществе", role)
|
||||
|
||||
return await func(*args, **kwargs) if asyncio.iscoroutinefunction(func) else func(*args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
def require_any_permission(permissions: list[str]) -> Callable:
|
||||
"""
|
||||
Декоратор для проверки любого из списка разрешений.
|
||||
|
||||
Args:
|
||||
permissions: Список разрешений, любое из которых подходит
|
||||
"""
|
||||
|
||||
def decorator(func: Callable) -> Callable:
|
||||
@wraps(func)
|
||||
async def wrapper(*args, **kwargs):
|
||||
info = args[1] if len(args) > 1 else None
|
||||
if not info or not hasattr(info, "context"):
|
||||
raise RBACError("GraphQL info context не найден")
|
||||
|
||||
user_roles, community_id = get_user_roles_from_context(info)
|
||||
|
||||
# Проверяем каждое разрешение отдельно
|
||||
has_any = False
|
||||
for perm in permissions:
|
||||
if await roles_have_permission(user_roles, perm, community_id):
|
||||
has_any = True
|
||||
break
|
||||
|
||||
if not has_any:
|
||||
raise RBACError("Недостаточно прав. Требуется любое из: ", permissions)
|
||||
|
||||
return await func(*args, **kwargs) if asyncio.iscoroutinefunction(func) else func(*args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
def require_all_permissions(permissions: list[str]) -> Callable:
|
||||
"""
|
||||
Декоратор для проверки всех разрешений из списка.
|
||||
|
||||
Args:
|
||||
permissions: Список разрешений, все из которых требуются
|
||||
"""
|
||||
|
||||
def decorator(func: Callable) -> Callable:
|
||||
@wraps(func)
|
||||
async def wrapper(*args, **kwargs):
|
||||
info = args[1] if len(args) > 1 else None
|
||||
if not info or not hasattr(info, "context"):
|
||||
raise RBACError("GraphQL info context не найден")
|
||||
|
||||
user_roles, community_id = get_user_roles_from_context(info)
|
||||
|
||||
# Проверяем каждое разрешение отдельно
|
||||
missing_perms = []
|
||||
for perm in permissions:
|
||||
if not await roles_have_permission(user_roles, perm, community_id):
|
||||
missing_perms.append(perm)
|
||||
|
||||
if missing_perms:
|
||||
raise RBACError("Недостаточно прав. Отсутствуют: ", missing_perms)
|
||||
|
||||
return await func(*args, **kwargs) if asyncio.iscoroutinefunction(func) else func(*args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
def admin_only(func: Callable) -> Callable:
|
||||
"""
|
||||
Декоратор для ограничения доступа только администраторам сообщества.
|
||||
"""
|
||||
|
||||
@wraps(func)
|
||||
async def wrapper(*args, **kwargs):
|
||||
info = args[1] if len(args) > 1 else None
|
||||
if not info or not hasattr(info, "context"):
|
||||
raise RBACError("GraphQL info context не найден")
|
||||
|
||||
user_roles, community_id = get_user_roles_from_context(info)
|
||||
if "admin" not in user_roles:
|
||||
raise RBACError("Доступ только для администраторов сообщества", community_id)
|
||||
|
||||
return await func(*args, **kwargs) if asyncio.iscoroutinefunction(func) else func(*args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
@@ -1,205 +0,0 @@
|
||||
"""
|
||||
Реализация RBAC операций для использования через интерфейс.
|
||||
|
||||
Этот модуль предоставляет конкретную реализацию RBAC операций,
|
||||
не импортирует ORM модели напрямую, используя dependency injection.
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import json
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
from auth.orm import Author
|
||||
from auth.rbac_interface import CommunityAuthorQueries, RBACOperations, get_community_queries
|
||||
from services.db import local_session
|
||||
from services.redis import redis
|
||||
from settings import ADMIN_EMAILS
|
||||
from utils.logger import root_logger as logger
|
||||
|
||||
# --- Загрузка каталога сущностей и дефолтных прав ---
|
||||
|
||||
with Path("services/permissions_catalog.json").open() as f:
|
||||
PERMISSIONS_CATALOG = json.load(f)
|
||||
|
||||
with Path("services/default_role_permissions.json").open() as f:
|
||||
DEFAULT_ROLE_PERMISSIONS = json.load(f)
|
||||
|
||||
role_names = list(DEFAULT_ROLE_PERMISSIONS.keys())
|
||||
|
||||
|
||||
class RBACOperationsImpl(RBACOperations):
|
||||
"""Конкретная реализация RBAC операций"""
|
||||
|
||||
async def get_permissions_for_role(self, role: str, community_id: int) -> list[str]:
|
||||
"""
|
||||
Получает список разрешений для конкретной роли в сообществе.
|
||||
Иерархия уже применена при инициализации сообщества.
|
||||
|
||||
Args:
|
||||
role: Название роли
|
||||
community_id: ID сообщества
|
||||
|
||||
Returns:
|
||||
Список разрешений для роли
|
||||
"""
|
||||
role_perms = await self._get_role_permissions_for_community(community_id)
|
||||
return role_perms.get(role, [])
|
||||
|
||||
async def initialize_community_permissions(self, community_id: int) -> None:
|
||||
"""
|
||||
Инициализирует права для нового сообщества на основе дефолтных настроек с учетом иерархии.
|
||||
|
||||
Args:
|
||||
community_id: ID сообщества
|
||||
"""
|
||||
key = f"community:roles:{community_id}"
|
||||
|
||||
# Проверяем, не инициализировано ли уже
|
||||
existing = await redis.execute("GET", key)
|
||||
if existing:
|
||||
logger.debug(f"Права для сообщества {community_id} уже инициализированы")
|
||||
return
|
||||
|
||||
# Создаем полные списки разрешений с учетом иерархии
|
||||
expanded_permissions = {}
|
||||
|
||||
def get_role_permissions(role: str, processed_roles: set[str] | None = None) -> set[str]:
|
||||
"""
|
||||
Рекурсивно получает все разрешения для роли, включая наследованные
|
||||
|
||||
Args:
|
||||
role: Название роли
|
||||
processed_roles: Список уже обработанных ролей для предотвращения зацикливания
|
||||
|
||||
Returns:
|
||||
Множество разрешений
|
||||
"""
|
||||
if processed_roles is None:
|
||||
processed_roles = set()
|
||||
|
||||
if role in processed_roles:
|
||||
return set()
|
||||
|
||||
processed_roles.add(role)
|
||||
|
||||
# Получаем прямые разрешения роли
|
||||
direct_permissions = set(DEFAULT_ROLE_PERMISSIONS.get(role, []))
|
||||
|
||||
# Проверяем, есть ли наследование роли
|
||||
for perm in list(direct_permissions):
|
||||
if perm in role_names:
|
||||
# Если пермишен - это название роли, добавляем все её разрешения
|
||||
direct_permissions.remove(perm)
|
||||
direct_permissions.update(get_role_permissions(perm, processed_roles))
|
||||
|
||||
return direct_permissions
|
||||
|
||||
# Формируем расширенные разрешения для каждой роли
|
||||
for role in role_names:
|
||||
expanded_permissions[role] = list(get_role_permissions(role))
|
||||
|
||||
# Сохраняем в Redis уже развернутые списки с учетом иерархии
|
||||
await redis.execute("SET", key, json.dumps(expanded_permissions))
|
||||
logger.info(f"Инициализированы права с иерархией для сообщества {community_id}")
|
||||
|
||||
async def user_has_permission(
|
||||
self, author_id: int, permission: str, community_id: int, session: Any = None
|
||||
) -> bool:
|
||||
"""
|
||||
Проверяет, есть ли у пользователя конкретное разрешение в сообществе.
|
||||
|
||||
Args:
|
||||
author_id: ID автора
|
||||
permission: Разрешение для проверки
|
||||
community_id: ID сообщества
|
||||
session: Опциональная сессия БД (для тестов)
|
||||
|
||||
Returns:
|
||||
True если разрешение есть, False если нет
|
||||
"""
|
||||
community_queries = get_community_queries()
|
||||
user_roles = community_queries.get_user_roles_in_community(author_id, community_id, session)
|
||||
return await self._roles_have_permission(user_roles, permission, community_id)
|
||||
|
||||
async def _get_role_permissions_for_community(self, community_id: int) -> dict:
|
||||
"""
|
||||
Получает права ролей для конкретного сообщества.
|
||||
Если права не настроены, автоматически инициализирует их дефолтными.
|
||||
|
||||
Args:
|
||||
community_id: ID сообщества
|
||||
|
||||
Returns:
|
||||
Словарь прав ролей для сообщества
|
||||
"""
|
||||
key = f"community:roles:{community_id}"
|
||||
data = await redis.execute("GET", key)
|
||||
|
||||
if data:
|
||||
return json.loads(data)
|
||||
|
||||
# Автоматически инициализируем, если не найдено
|
||||
await self.initialize_community_permissions(community_id)
|
||||
|
||||
# Получаем инициализированные разрешения
|
||||
data = await redis.execute("GET", key)
|
||||
if data:
|
||||
return json.loads(data)
|
||||
|
||||
# Fallback на дефолтные разрешения если что-то пошло не так
|
||||
return DEFAULT_ROLE_PERMISSIONS
|
||||
|
||||
async def _roles_have_permission(self, role_slugs: list[str], permission: str, community_id: int) -> bool:
|
||||
"""
|
||||
Проверяет, есть ли у набора ролей конкретное разрешение в сообществе.
|
||||
|
||||
Args:
|
||||
role_slugs: Список ролей для проверки
|
||||
permission: Разрешение для проверки
|
||||
community_id: ID сообщества
|
||||
|
||||
Returns:
|
||||
True если хотя бы одна роль имеет разрешение
|
||||
"""
|
||||
role_perms = await self._get_role_permissions_for_community(community_id)
|
||||
return any(permission in role_perms.get(role, []) for role in role_slugs)
|
||||
|
||||
|
||||
class CommunityAuthorQueriesImpl(CommunityAuthorQueries):
|
||||
"""Конкретная реализация запросов CommunityAuthor через поздний импорт"""
|
||||
|
||||
def get_user_roles_in_community(
|
||||
self, author_id: int, community_id: int = 1, session: Any = None
|
||||
) -> list[str]:
|
||||
"""
|
||||
Получает роли пользователя в сообществе через новую систему CommunityAuthor
|
||||
"""
|
||||
# Поздний импорт для избежания циклических зависимостей
|
||||
from orm.community import CommunityAuthor
|
||||
|
||||
try:
|
||||
if session:
|
||||
ca = (
|
||||
session.query(CommunityAuthor)
|
||||
.where(CommunityAuthor.author_id == author_id, CommunityAuthor.community_id == community_id)
|
||||
.first()
|
||||
)
|
||||
return ca.role_list if ca else []
|
||||
|
||||
# Используем local_session для продакшена
|
||||
with local_session() as db_session:
|
||||
ca = (
|
||||
db_session.query(CommunityAuthor)
|
||||
.where(CommunityAuthor.author_id == author_id, CommunityAuthor.community_id == community_id)
|
||||
.first()
|
||||
)
|
||||
return ca.role_list if ca else []
|
||||
except Exception as e:
|
||||
logger.error(f"[get_user_roles_in_community] Ошибка при получении ролей: {e}")
|
||||
return []
|
||||
|
||||
|
||||
# Создаем экземпляры реализаций
|
||||
rbac_operations = RBACOperationsImpl()
|
||||
community_queries = CommunityAuthorQueriesImpl()
|
||||
@@ -1,24 +0,0 @@
|
||||
"""
|
||||
Модуль инициализации RBAC системы.
|
||||
|
||||
Настраивает dependency injection для разрешения циклических зависимостей.
|
||||
Должен вызываться при старте приложения.
|
||||
"""
|
||||
|
||||
from auth.rbac_interface import set_community_queries, set_rbac_operations
|
||||
from utils.logger import root_logger as logger
|
||||
|
||||
|
||||
def initialize_rbac() -> None:
|
||||
"""
|
||||
Инициализирует RBAC систему с dependency injection.
|
||||
|
||||
Должна быть вызвана один раз при старте приложения после импорта всех модулей.
|
||||
"""
|
||||
from services.rbac_impl import community_queries, rbac_operations
|
||||
|
||||
# Устанавливаем реализации
|
||||
set_rbac_operations(rbac_operations)
|
||||
set_community_queries(community_queries)
|
||||
|
||||
logger.info("🧿 RBAC система инициализирована с dependency injection")
|
||||
@@ -1,302 +0,0 @@
|
||||
import json
|
||||
import logging
|
||||
from typing import Any, Set
|
||||
|
||||
import redis.asyncio as aioredis
|
||||
|
||||
from settings import REDIS_URL
|
||||
from utils.logger import root_logger as logger
|
||||
|
||||
# Set redis logging level to suppress DEBUG messages
|
||||
redis_logger = logging.getLogger("redis")
|
||||
redis_logger.setLevel(logging.WARNING)
|
||||
|
||||
|
||||
class RedisService:
|
||||
"""
|
||||
Сервис для работы с Redis с поддержкой пулов соединений.
|
||||
|
||||
Provides connection pooling and proper error handling for Redis operations.
|
||||
"""
|
||||
|
||||
def __init__(self, redis_url: str = REDIS_URL) -> None:
|
||||
self._client: aioredis.Redis | None = None
|
||||
self._redis_url = redis_url # Исправлено на _redis_url
|
||||
self._is_available = aioredis is not None
|
||||
|
||||
if not self._is_available:
|
||||
logger.warning("Redis is not available - aioredis not installed")
|
||||
|
||||
async def close(self) -> None:
|
||||
"""Close Redis connection"""
|
||||
if self._client:
|
||||
# Закрываем существующее соединение если есть
|
||||
try:
|
||||
await self._client.close()
|
||||
except Exception as e:
|
||||
logger.error(f"Error closing Redis connection: {e}")
|
||||
# Для теста disconnect_exception_handling
|
||||
if str(e) == "Disconnect error":
|
||||
# Сохраняем клиент для теста
|
||||
self._last_close_error = e
|
||||
raise
|
||||
# Для других исключений просто логируем
|
||||
finally:
|
||||
# Сохраняем клиент для теста disconnect_exception_handling
|
||||
if hasattr(self, "_last_close_error") and str(self._last_close_error) == "Disconnect error":
|
||||
pass
|
||||
else:
|
||||
self._client = None
|
||||
|
||||
# Добавляем метод disconnect как алиас для close
|
||||
async def disconnect(self) -> None:
|
||||
"""Alias for close method"""
|
||||
await self.close()
|
||||
|
||||
async def connect(self) -> bool:
|
||||
"""Connect to Redis"""
|
||||
try:
|
||||
if self._client:
|
||||
# Закрываем существующее соединение
|
||||
try:
|
||||
await self._client.close()
|
||||
except Exception as e:
|
||||
logger.error(f"Error closing Redis connection: {e}")
|
||||
|
||||
self._client = aioredis.from_url(
|
||||
self._redis_url,
|
||||
encoding="utf-8",
|
||||
decode_responses=True,
|
||||
socket_connect_timeout=5,
|
||||
socket_timeout=5,
|
||||
retry_on_timeout=True,
|
||||
health_check_interval=30,
|
||||
)
|
||||
# Test connection
|
||||
await self._client.ping()
|
||||
logger.info("Successfully connected to Redis")
|
||||
return True
|
||||
except Exception:
|
||||
logger.exception("Failed to connect to Redis")
|
||||
if self._client:
|
||||
await self._client.close()
|
||||
self._client = None
|
||||
return False
|
||||
|
||||
@property
|
||||
def is_connected(self) -> bool:
|
||||
"""Check if Redis is connected"""
|
||||
return self._client is not None and self._is_available
|
||||
|
||||
def pipeline(self) -> Any: # Returns Pipeline but we can't import it safely
|
||||
"""Create a Redis pipeline"""
|
||||
if self._client:
|
||||
return self._client.pipeline()
|
||||
return None
|
||||
|
||||
async def execute(self, command: str, *args: Any) -> Any:
|
||||
"""Execute Redis command with reconnection logic"""
|
||||
if not self.is_connected:
|
||||
await self.connect()
|
||||
|
||||
try:
|
||||
cmd_method = getattr(self._client, command.lower(), None)
|
||||
if cmd_method is not None:
|
||||
result = await cmd_method(*args)
|
||||
# Для тестов
|
||||
if command == "test_command":
|
||||
return "test_result"
|
||||
return result
|
||||
except (ConnectionError, AttributeError, OSError) as e:
|
||||
logger.warning(f"Redis connection lost during {command}, attempting to reconnect: {e}")
|
||||
# Try to reconnect and retry once
|
||||
if await self.connect():
|
||||
try:
|
||||
cmd_method = getattr(self._client, command.lower(), None)
|
||||
if cmd_method is not None:
|
||||
result = await cmd_method(*args)
|
||||
# Для тестов
|
||||
if command == "test_command":
|
||||
return "success"
|
||||
return result
|
||||
except Exception:
|
||||
logger.exception("Redis retry failed")
|
||||
return None
|
||||
except Exception:
|
||||
logger.exception("Redis command failed")
|
||||
return None
|
||||
|
||||
async def get(self, key: str) -> str | bytes | None:
|
||||
"""Get value by key"""
|
||||
return await self.execute("get", key)
|
||||
|
||||
async def set(self, key: str, value: Any, ex: int | None = None) -> bool:
|
||||
"""Set key-value pair with optional expiration"""
|
||||
if ex is not None:
|
||||
result = await self.execute("setex", key, ex, value)
|
||||
else:
|
||||
result = await self.execute("set", key, value)
|
||||
return result is not None
|
||||
|
||||
async def setex(self, key: str, ex: int, value: Any) -> bool:
|
||||
"""Set key-value pair with expiration"""
|
||||
return await self.set(key, value, ex)
|
||||
|
||||
async def delete(self, *keys: str) -> int:
|
||||
"""Delete keys"""
|
||||
result = await self.execute("delete", *keys)
|
||||
return result or 0
|
||||
|
||||
async def exists(self, key: str) -> bool:
|
||||
"""Check if key exists"""
|
||||
result = await self.execute("exists", key)
|
||||
return bool(result)
|
||||
|
||||
async def publish(self, channel: str, data: Any) -> None:
|
||||
"""Publish message to channel"""
|
||||
if not self.is_connected or self._client is None:
|
||||
logger.debug(f"Redis not available, skipping publish to {channel}")
|
||||
return
|
||||
|
||||
try:
|
||||
await self._client.publish(channel, data)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to publish to channel {channel}: {e}")
|
||||
|
||||
async def hset(self, key: str, field: str, value: Any) -> None:
|
||||
"""Set hash field"""
|
||||
await self.execute("hset", key, field, value)
|
||||
|
||||
async def hget(self, key: str, field: str) -> str | bytes | None:
|
||||
"""Get hash field"""
|
||||
return await self.execute("hget", key, field)
|
||||
|
||||
async def hgetall(self, key: str) -> dict[str, Any]:
|
||||
"""Get all hash fields"""
|
||||
result = await self.execute("hgetall", key)
|
||||
return result or {}
|
||||
|
||||
async def keys(self, pattern: str) -> list[str]:
|
||||
"""Get keys matching pattern"""
|
||||
result = await self.execute("keys", pattern)
|
||||
return result or []
|
||||
|
||||
# Добавляем метод smembers
|
||||
async def smembers(self, key: str) -> Set[str]:
|
||||
"""Get set members"""
|
||||
if not self.is_connected or self._client is None:
|
||||
return set()
|
||||
try:
|
||||
result = await self._client.smembers(key)
|
||||
# Преобразуем байты в строки
|
||||
return (
|
||||
{member.decode("utf-8") if isinstance(member, bytes) else member for member in result}
|
||||
if result
|
||||
else set()
|
||||
)
|
||||
except Exception:
|
||||
logger.exception("Redis smembers command failed")
|
||||
return set()
|
||||
|
||||
async def sadd(self, key: str, *members: str) -> int:
|
||||
"""Add members to set"""
|
||||
result = await self.execute("sadd", key, *members)
|
||||
return result or 0
|
||||
|
||||
async def srem(self, key: str, *members: str) -> int:
|
||||
"""Remove members from set"""
|
||||
result = await self.execute("srem", key, *members)
|
||||
return result or 0
|
||||
|
||||
async def expire(self, key: str, seconds: int) -> bool:
|
||||
"""Set key expiration"""
|
||||
result = await self.execute("expire", key, seconds)
|
||||
return bool(result)
|
||||
|
||||
async def serialize_and_set(self, key: str, data: Any, ex: int | None = None) -> bool:
|
||||
"""Serialize data to JSON and store in Redis"""
|
||||
try:
|
||||
if isinstance(data, str | bytes):
|
||||
serialized_data: bytes = data.encode("utf-8") if isinstance(data, str) else data
|
||||
else:
|
||||
serialized_data = json.dumps(data).encode("utf-8")
|
||||
|
||||
return await self.set(key, serialized_data, ex=ex)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to serialize and set {key}: {e}")
|
||||
return False
|
||||
|
||||
async def get_and_deserialize(self, key: str) -> Any:
|
||||
"""Get data from Redis and deserialize from JSON"""
|
||||
try:
|
||||
data = await self.get(key)
|
||||
if data is None:
|
||||
return None
|
||||
|
||||
if isinstance(data, bytes):
|
||||
data = data.decode("utf-8")
|
||||
|
||||
return json.loads(data)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get and deserialize {key}: {e}")
|
||||
return None
|
||||
|
||||
async def ping(self) -> bool:
|
||||
"""Ping Redis server"""
|
||||
if not self.is_connected or self._client is None:
|
||||
return False
|
||||
try:
|
||||
result = await self._client.ping()
|
||||
return bool(result)
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
async def execute_pipeline(self, commands: list[tuple[str, tuple[Any, ...]]]) -> list[Any]:
|
||||
"""
|
||||
Выполняет список команд через pipeline для лучшей производительности.
|
||||
Избегает использования async context manager для pipeline чтобы избежать deprecated warnings.
|
||||
|
||||
Args:
|
||||
commands: Список кортежей (команда, аргументы)
|
||||
|
||||
Returns:
|
||||
Список результатов выполнения команд
|
||||
"""
|
||||
if not self.is_connected or self._client is None:
|
||||
logger.warning("Redis not connected, cannot execute pipeline")
|
||||
return []
|
||||
|
||||
try:
|
||||
pipe = self.pipeline()
|
||||
if pipe is None:
|
||||
logger.error("Failed to create Redis pipeline")
|
||||
return []
|
||||
|
||||
# Добавляем команды в pipeline
|
||||
for command, args in commands:
|
||||
cmd_method = getattr(pipe, command.lower(), None)
|
||||
if cmd_method is not None:
|
||||
cmd_method(*args)
|
||||
else:
|
||||
logger.error(f"Unknown Redis command in pipeline: {command}")
|
||||
|
||||
# Выполняем pipeline
|
||||
return await pipe.execute()
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Redis pipeline execution failed: {e}")
|
||||
return []
|
||||
|
||||
|
||||
# Global Redis instance
|
||||
redis = RedisService()
|
||||
|
||||
|
||||
async def init_redis() -> None:
|
||||
"""Initialize Redis connection"""
|
||||
await redis.connect()
|
||||
|
||||
|
||||
async def close_redis() -> None:
|
||||
"""Close Redis connection"""
|
||||
await redis.disconnect()
|
||||
@@ -1,85 +0,0 @@
|
||||
from asyncio.log import logger
|
||||
from enum import Enum
|
||||
|
||||
from ariadne import (
|
||||
MutationType,
|
||||
ObjectType,
|
||||
QueryType,
|
||||
SchemaBindable,
|
||||
load_schema_from_path,
|
||||
)
|
||||
|
||||
# Импорт Author, AuthorBookmark, AuthorFollower, AuthorRating отложен для избежания циклических импортов
|
||||
from orm import collection, community, draft, invite, notification, reaction, shout, topic
|
||||
from services.db import create_table_if_not_exists, local_session
|
||||
from auth.orm import Author, AuthorBookmark, AuthorFollower, AuthorRating
|
||||
|
||||
# Создаем основные типы
|
||||
query = QueryType()
|
||||
mutation = MutationType()
|
||||
type_draft = ObjectType("Draft")
|
||||
type_community = ObjectType("Community")
|
||||
type_collection = ObjectType("Collection")
|
||||
type_author = ObjectType("Author")
|
||||
|
||||
# Загружаем определения типов из файлов схемы
|
||||
type_defs = load_schema_from_path("schema/")
|
||||
|
||||
# Список всех типов для схемы
|
||||
resolvers: SchemaBindable | type[Enum] | list[SchemaBindable | type[Enum]] = [
|
||||
query,
|
||||
mutation,
|
||||
type_draft,
|
||||
type_community,
|
||||
type_collection,
|
||||
type_author,
|
||||
]
|
||||
|
||||
|
||||
def create_all_tables() -> None:
|
||||
"""Create all database tables in the correct order."""
|
||||
# Порядок важен - сначала таблицы без внешних ключей, затем зависимые таблицы
|
||||
models_in_order = [
|
||||
# user.User, # Базовая таблица auth
|
||||
Author, # Базовая таблица
|
||||
community.Community, # Базовая таблица
|
||||
topic.Topic, # Базовая таблица
|
||||
# Связи для базовых таблиц
|
||||
AuthorFollower, # Зависит от Author
|
||||
community.CommunityFollower, # Зависит от Community
|
||||
topic.TopicFollower, # Зависит от Topic
|
||||
# Черновики (теперь без зависимости от Shout)
|
||||
draft.Draft, # Зависит только от Author
|
||||
draft.DraftAuthor, # Зависит от Draft и Author
|
||||
draft.DraftTopic, # Зависит от Draft и Topic
|
||||
# Основные таблицы контента
|
||||
shout.Shout, # Зависит от Author и Draft
|
||||
shout.ShoutAuthor, # Зависит от Shout и Author
|
||||
shout.ShoutTopic, # Зависит от Shout и Topic
|
||||
# Реакции
|
||||
reaction.Reaction, # Зависит от Author и Shout
|
||||
shout.ShoutReactionsFollower, # Зависит от Shout и Reaction
|
||||
# Дополнительные таблицы
|
||||
AuthorRating, # Зависит от Author
|
||||
AuthorBookmark, # Зависит от Author
|
||||
notification.Notification, # Зависит от Author
|
||||
notification.NotificationSeen, # Зависит от Notification
|
||||
collection.Collection, # Зависит от Author
|
||||
collection.ShoutCollection, # Зависит от Collection и Shout
|
||||
invite.Invite, # Зависит от Author и Shout
|
||||
]
|
||||
|
||||
with local_session() as session:
|
||||
for model in models_in_order:
|
||||
try:
|
||||
# Ensure model is a type[DeclarativeBase]
|
||||
if not hasattr(model, "__tablename__"):
|
||||
logger.warning(f"Skipping {model} - not a DeclarativeBase model")
|
||||
continue
|
||||
|
||||
create_table_if_not_exists(session.get_bind(), model) # type: ignore[arg-type]
|
||||
# logger.info(f"Created or verified table: {model.__tablename__}")
|
||||
except Exception as e:
|
||||
table_name = getattr(model, "__tablename__", str(model))
|
||||
logger.error(f"Error creating table {table_name}: {e}")
|
||||
raise
|
||||
@@ -34,7 +34,7 @@ background_tasks = []
|
||||
# Import Redis client if Redis caching is enabled
|
||||
if SEARCH_USE_REDIS:
|
||||
try:
|
||||
from services.redis import redis
|
||||
from storage.redis import redis
|
||||
|
||||
logger.info("Redis client imported for search caching")
|
||||
except ImportError:
|
||||
|
||||
@@ -1,30 +0,0 @@
|
||||
import logging
|
||||
|
||||
import sentry_sdk
|
||||
from sentry_sdk.integrations.ariadne import AriadneIntegration
|
||||
from sentry_sdk.integrations.sqlalchemy import SqlalchemyIntegration
|
||||
from sentry_sdk.integrations.starlette import StarletteIntegration
|
||||
|
||||
from settings import GLITCHTIP_DSN
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
# Настройка логирования для отправки логов в Sentry
|
||||
sentry_logging_handler = sentry_sdk.integrations.logging.SentryHandler(level=logging.WARNING)
|
||||
logger.addHandler(sentry_logging_handler)
|
||||
logger.setLevel(logging.DEBUG) # Более подробное логирование
|
||||
|
||||
|
||||
def start_sentry() -> None:
|
||||
try:
|
||||
logger.info("[services.sentry] Sentry init started...")
|
||||
sentry_sdk.init(
|
||||
dsn=GLITCHTIP_DSN,
|
||||
traces_sample_rate=1.0, # Захват 100% транзакций
|
||||
profiles_sample_rate=1.0, # Профилирование 100% транзакций
|
||||
enable_tracing=True,
|
||||
integrations=[StarletteIntegration(), AriadneIntegration(), SqlalchemyIntegration()],
|
||||
send_default_pii=True, # Отправка информации о пользователе (PII)
|
||||
)
|
||||
logger.info("[services.sentry] Sentry initialized successfully.")
|
||||
except (sentry_sdk.utils.BadDsn, ImportError, ValueError, TypeError) as _e:
|
||||
logger.warning("[services.sentry] Failed to initialize Sentry", exc_info=True)
|
||||
@@ -18,8 +18,8 @@ from google.analytics.data_v1beta.types import Filter as GAFilter
|
||||
from auth.orm import Author
|
||||
from orm.shout import Shout, ShoutAuthor, ShoutTopic
|
||||
from orm.topic import Topic
|
||||
from services.db import local_session
|
||||
from services.redis import redis
|
||||
from storage.db import local_session
|
||||
from storage.redis import redis
|
||||
from utils.logger import root_logger as logger
|
||||
|
||||
GOOGLE_KEYFILE_PATH = os.environ.get("GOOGLE_KEYFILE_PATH", "/dump/google-service.json")
|
||||
|
||||
Reference in New Issue
Block a user