2024-06-11 19:46:35 +00:00
|
|
|
|
import asyncio
|
2023-11-03 10:10:22 +00:00
|
|
|
|
import time
|
2025-05-29 09:37:39 +00:00
|
|
|
|
from typing import Any, Dict, List, Optional
|
2023-12-17 20:30:20 +00:00
|
|
|
|
|
2025-03-22 08:47:19 +00:00
|
|
|
|
from sqlalchemy import select, text
|
2023-10-23 14:47:11 +00:00
|
|
|
|
|
2025-05-29 09:37:39 +00:00
|
|
|
|
from auth.orm import Author
|
2024-08-07 05:57:56 +00:00
|
|
|
|
from cache.cache import (
|
2024-05-20 22:40:57 +00:00
|
|
|
|
cache_author,
|
2025-03-22 08:47:19 +00:00
|
|
|
|
cached_query,
|
2024-05-20 22:40:57 +00:00
|
|
|
|
get_cached_author,
|
|
|
|
|
get_cached_author_followers,
|
2024-05-30 16:42:38 +00:00
|
|
|
|
get_cached_follower_authors,
|
|
|
|
|
get_cached_follower_topics,
|
2025-03-22 08:47:19 +00:00
|
|
|
|
invalidate_cache_by_prefix,
|
2024-05-20 22:40:57 +00:00
|
|
|
|
)
|
2024-08-12 08:00:01 +00:00
|
|
|
|
from resolvers.stat import get_with_stat
|
|
|
|
|
from services.auth import login_required
|
2023-12-17 20:30:20 +00:00
|
|
|
|
from services.db import local_session
|
2025-03-22 08:47:19 +00:00
|
|
|
|
from services.redis import redis
|
2023-12-17 20:30:20 +00:00
|
|
|
|
from services.schema import mutation, query
|
2025-04-29 20:45:37 +00:00
|
|
|
|
from services.search import search_service
|
2024-08-12 08:00:01 +00:00
|
|
|
|
from utils.logger import root_logger as logger
|
2024-01-13 08:49:12 +00:00
|
|
|
|
|
2025-03-22 08:47:19 +00:00
|
|
|
|
DEFAULT_COMMUNITIES = [1]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# Вспомогательная функция для получения всех авторов без статистики
|
2025-05-20 22:34:02 +00:00
|
|
|
|
async def get_all_authors(current_user_id=None):
|
2025-03-22 08:47:19 +00:00
|
|
|
|
"""
|
|
|
|
|
Получает всех авторов без статистики.
|
|
|
|
|
Используется для случаев, когда нужен полный список авторов без дополнительной информации.
|
|
|
|
|
|
2025-05-20 22:34:02 +00:00
|
|
|
|
Args:
|
|
|
|
|
current_user_id: ID текущего пользователя для проверки прав доступа
|
|
|
|
|
is_admin: Флаг, указывающий, является ли пользователь администратором
|
|
|
|
|
|
2025-03-22 08:47:19 +00:00
|
|
|
|
Returns:
|
|
|
|
|
list: Список всех авторов без статистики
|
|
|
|
|
"""
|
|
|
|
|
cache_key = "authors:all:basic"
|
|
|
|
|
|
|
|
|
|
# Функция для получения всех авторов из БД
|
|
|
|
|
async def fetch_all_authors():
|
|
|
|
|
logger.debug("Получаем список всех авторов из БД и кешируем результат")
|
|
|
|
|
|
|
|
|
|
with local_session() as session:
|
|
|
|
|
# Запрос на получение базовой информации об авторах
|
|
|
|
|
authors_query = select(Author).where(Author.deleted_at.is_(None))
|
|
|
|
|
authors = session.execute(authors_query).scalars().all()
|
|
|
|
|
|
2025-05-20 22:34:02 +00:00
|
|
|
|
# Преобразуем авторов в словари с учетом прав доступа
|
|
|
|
|
return [author.dict(current_user_id, False) for author in authors]
|
2025-03-22 08:47:19 +00:00
|
|
|
|
|
|
|
|
|
# Используем универсальную функцию для кеширования запросов
|
|
|
|
|
return await cached_query(cache_key, fetch_all_authors)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# Вспомогательная функция для получения авторов со статистикой с пагинацией
|
2025-05-20 22:34:02 +00:00
|
|
|
|
async def get_authors_with_stats(limit=50, offset=0, by: Optional[str] = None, current_user_id: Optional[int] = None):
|
2025-03-22 08:47:19 +00:00
|
|
|
|
"""
|
|
|
|
|
Получает авторов со статистикой с пагинацией.
|
|
|
|
|
|
|
|
|
|
Args:
|
|
|
|
|
limit: Максимальное количество возвращаемых авторов
|
|
|
|
|
offset: Смещение для пагинации
|
|
|
|
|
by: Опциональный параметр сортировки (new/active)
|
2025-05-20 22:34:02 +00:00
|
|
|
|
current_user_id: ID текущего пользователя
|
2025-03-22 08:47:19 +00:00
|
|
|
|
Returns:
|
|
|
|
|
list: Список авторов с их статистикой
|
|
|
|
|
"""
|
|
|
|
|
# Формируем ключ кеша с помощью универсальной функции
|
|
|
|
|
cache_key = f"authors:stats:limit={limit}:offset={offset}"
|
|
|
|
|
|
|
|
|
|
# Функция для получения авторов из БД
|
|
|
|
|
async def fetch_authors_with_stats():
|
2025-05-29 09:37:39 +00:00
|
|
|
|
logger.debug(f"Выполняем запрос на получение авторов со статистикой: limit={limit}, offset={offset}, by={by}")
|
2025-03-22 08:47:19 +00:00
|
|
|
|
|
|
|
|
|
with local_session() as session:
|
|
|
|
|
# Базовый запрос для получения авторов
|
|
|
|
|
base_query = select(Author).where(Author.deleted_at.is_(None))
|
|
|
|
|
|
|
|
|
|
# Применяем сортировку
|
2025-05-29 09:37:39 +00:00
|
|
|
|
|
2025-05-25 20:30:12 +00:00
|
|
|
|
# vars for statistics sorting
|
|
|
|
|
stats_sort_field = None
|
|
|
|
|
stats_sort_direction = "desc"
|
2025-05-29 09:37:39 +00:00
|
|
|
|
|
2025-03-22 08:47:19 +00:00
|
|
|
|
if by:
|
|
|
|
|
if isinstance(by, dict):
|
2025-05-25 20:30:12 +00:00
|
|
|
|
logger.debug(f"Processing dict-based sorting: {by}")
|
2025-03-22 08:47:19 +00:00
|
|
|
|
# Обработка словаря параметров сортировки
|
2025-05-25 20:30:12 +00:00
|
|
|
|
from sqlalchemy import asc, desc, func
|
2025-05-29 09:37:39 +00:00
|
|
|
|
|
2025-05-28 15:51:49 +00:00
|
|
|
|
from auth.orm import AuthorFollower
|
2025-05-29 09:37:39 +00:00
|
|
|
|
from orm.shout import ShoutAuthor
|
2025-05-25 20:30:12 +00:00
|
|
|
|
|
|
|
|
|
# Checking for order field in the dictionary
|
|
|
|
|
if "order" in by:
|
|
|
|
|
order_value = by["order"]
|
|
|
|
|
logger.debug(f"Found order field with value: {order_value}")
|
|
|
|
|
if order_value in ["shouts", "followers", "rating", "comments"]:
|
|
|
|
|
stats_sort_field = order_value
|
|
|
|
|
stats_sort_direction = "desc" # По умолчанию убывающая сортировка для статистики
|
|
|
|
|
logger.debug(f"Applying statistics-based sorting by: {stats_sort_field}")
|
|
|
|
|
elif order_value == "name":
|
|
|
|
|
# Sorting by name in ascending order
|
|
|
|
|
base_query = base_query.order_by(asc(Author.name))
|
|
|
|
|
logger.debug("Applying alphabetical sorting by name")
|
|
|
|
|
else:
|
|
|
|
|
# If order is not a stats field, treat it as a regular field
|
|
|
|
|
column = getattr(Author, order_value, None)
|
|
|
|
|
if column:
|
2025-03-22 08:47:19 +00:00
|
|
|
|
base_query = base_query.order_by(desc(column))
|
2025-05-25 20:30:12 +00:00
|
|
|
|
else:
|
|
|
|
|
# Regular sorting by fields
|
|
|
|
|
for field, direction in by.items():
|
|
|
|
|
column = getattr(Author, field, None)
|
|
|
|
|
if column:
|
|
|
|
|
if direction.lower() == "desc":
|
|
|
|
|
base_query = base_query.order_by(desc(column))
|
|
|
|
|
else:
|
|
|
|
|
base_query = base_query.order_by(column)
|
2025-03-22 08:47:19 +00:00
|
|
|
|
elif by == "new":
|
|
|
|
|
base_query = base_query.order_by(desc(Author.created_at))
|
|
|
|
|
elif by == "active":
|
|
|
|
|
base_query = base_query.order_by(desc(Author.last_seen))
|
|
|
|
|
else:
|
|
|
|
|
# По умолчанию сортируем по времени создания
|
|
|
|
|
base_query = base_query.order_by(desc(Author.created_at))
|
|
|
|
|
else:
|
|
|
|
|
base_query = base_query.order_by(desc(Author.created_at))
|
|
|
|
|
|
2025-05-25 20:30:12 +00:00
|
|
|
|
# If sorting by statistics, modify the query
|
|
|
|
|
if stats_sort_field == "shouts":
|
|
|
|
|
# Sorting by the number of shouts
|
2025-05-29 09:37:39 +00:00
|
|
|
|
from sqlalchemy import and_, func
|
|
|
|
|
|
2025-05-25 20:30:12 +00:00
|
|
|
|
from orm.shout import Shout, ShoutAuthor
|
2025-05-29 09:37:39 +00:00
|
|
|
|
|
2025-05-25 20:30:12 +00:00
|
|
|
|
subquery = (
|
2025-05-29 09:37:39 +00:00
|
|
|
|
select(ShoutAuthor.author, func.count(func.distinct(Shout.id)).label("shouts_count"))
|
2025-05-25 20:30:12 +00:00
|
|
|
|
.select_from(ShoutAuthor)
|
|
|
|
|
.join(Shout, ShoutAuthor.shout == Shout.id)
|
2025-05-29 09:37:39 +00:00
|
|
|
|
.where(and_(Shout.deleted_at.is_(None), Shout.published_at.is_not(None)))
|
2025-05-25 20:30:12 +00:00
|
|
|
|
.group_by(ShoutAuthor.author)
|
|
|
|
|
.subquery()
|
|
|
|
|
)
|
2025-05-29 09:37:39 +00:00
|
|
|
|
|
|
|
|
|
base_query = base_query.outerjoin(subquery, Author.id == subquery.c.author).order_by(
|
|
|
|
|
desc(func.coalesce(subquery.c.shouts_count, 0))
|
2025-05-25 20:30:12 +00:00
|
|
|
|
)
|
|
|
|
|
elif stats_sort_field == "followers":
|
|
|
|
|
# Sorting by the number of followers
|
|
|
|
|
from sqlalchemy import func
|
2025-05-29 09:37:39 +00:00
|
|
|
|
|
2025-05-28 15:51:49 +00:00
|
|
|
|
from auth.orm import AuthorFollower
|
2025-05-29 09:37:39 +00:00
|
|
|
|
|
2025-05-25 20:30:12 +00:00
|
|
|
|
subquery = (
|
|
|
|
|
select(
|
|
|
|
|
AuthorFollower.author,
|
2025-05-29 09:37:39 +00:00
|
|
|
|
func.count(func.distinct(AuthorFollower.follower)).label("followers_count"),
|
2025-05-25 20:30:12 +00:00
|
|
|
|
)
|
|
|
|
|
.select_from(AuthorFollower)
|
|
|
|
|
.group_by(AuthorFollower.author)
|
|
|
|
|
.subquery()
|
|
|
|
|
)
|
2025-05-29 09:37:39 +00:00
|
|
|
|
|
|
|
|
|
base_query = base_query.outerjoin(subquery, Author.id == subquery.c.author).order_by(
|
|
|
|
|
desc(func.coalesce(subquery.c.followers_count, 0))
|
2025-05-25 20:30:12 +00:00
|
|
|
|
)
|
|
|
|
|
|
2025-03-22 08:47:19 +00:00
|
|
|
|
# Применяем лимит и смещение
|
|
|
|
|
base_query = base_query.limit(limit).offset(offset)
|
|
|
|
|
|
|
|
|
|
# Получаем авторов
|
|
|
|
|
authors = session.execute(base_query).scalars().all()
|
|
|
|
|
author_ids = [author.id for author in authors]
|
|
|
|
|
|
|
|
|
|
if not author_ids:
|
|
|
|
|
return []
|
|
|
|
|
|
|
|
|
|
# Оптимизированный запрос для получения статистики по публикациям для авторов
|
|
|
|
|
shouts_stats_query = f"""
|
|
|
|
|
SELECT sa.author, COUNT(DISTINCT s.id) as shouts_count
|
|
|
|
|
FROM shout_author sa
|
|
|
|
|
JOIN shout s ON sa.shout = s.id AND s.deleted_at IS NULL AND s.published_at IS NOT NULL
|
|
|
|
|
WHERE sa.author IN ({",".join(map(str, author_ids))})
|
|
|
|
|
GROUP BY sa.author
|
|
|
|
|
"""
|
|
|
|
|
shouts_stats = {row[0]: row[1] for row in session.execute(text(shouts_stats_query))}
|
|
|
|
|
|
|
|
|
|
# Запрос на получение статистики по подписчикам для авторов
|
|
|
|
|
followers_stats_query = f"""
|
|
|
|
|
SELECT author, COUNT(DISTINCT follower) as followers_count
|
|
|
|
|
FROM author_follower
|
|
|
|
|
WHERE author IN ({",".join(map(str, author_ids))})
|
|
|
|
|
GROUP BY author
|
|
|
|
|
"""
|
|
|
|
|
followers_stats = {row[0]: row[1] for row in session.execute(text(followers_stats_query))}
|
|
|
|
|
|
|
|
|
|
# Формируем результат с добавлением статистики
|
|
|
|
|
result = []
|
|
|
|
|
for author in authors:
|
2025-05-20 22:34:02 +00:00
|
|
|
|
# Получаем словарь с учетом прав доступа
|
2025-03-22 08:47:19 +00:00
|
|
|
|
author_dict = author.dict()
|
|
|
|
|
author_dict["stat"] = {
|
|
|
|
|
"shouts": shouts_stats.get(author.id, 0),
|
|
|
|
|
"followers": followers_stats.get(author.id, 0),
|
|
|
|
|
}
|
2025-05-29 09:37:39 +00:00
|
|
|
|
|
2025-03-22 08:47:19 +00:00
|
|
|
|
result.append(author_dict)
|
|
|
|
|
|
|
|
|
|
# Кешируем каждого автора отдельно для использования в других функциях
|
2025-05-20 22:34:02 +00:00
|
|
|
|
# Важно: кэшируем полный словарь для админов
|
|
|
|
|
await cache_author(author.dict())
|
2025-03-22 08:47:19 +00:00
|
|
|
|
|
|
|
|
|
return result
|
|
|
|
|
|
|
|
|
|
# Используем универсальную функцию для кеширования запросов
|
|
|
|
|
return await cached_query(cache_key, fetch_authors_with_stats)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# Функция для инвалидации кеша авторов
|
|
|
|
|
async def invalidate_authors_cache(author_id=None):
|
|
|
|
|
"""
|
|
|
|
|
Инвалидирует кеши авторов при изменении данных.
|
|
|
|
|
|
|
|
|
|
Args:
|
|
|
|
|
author_id: Опциональный ID автора для точечной инвалидации.
|
|
|
|
|
Если не указан, инвалидируются все кеши авторов.
|
|
|
|
|
"""
|
|
|
|
|
if author_id:
|
|
|
|
|
# Точечная инвалидация конкретного автора
|
|
|
|
|
logger.debug(f"Инвалидация кеша для автора #{author_id}")
|
|
|
|
|
specific_keys = [
|
|
|
|
|
f"author:id:{author_id}",
|
|
|
|
|
f"author:followers:{author_id}",
|
|
|
|
|
f"author:follows-authors:{author_id}",
|
|
|
|
|
f"author:follows-topics:{author_id}",
|
|
|
|
|
f"author:follows-shouts:{author_id}",
|
|
|
|
|
]
|
|
|
|
|
|
2025-05-29 09:15:06 +00:00
|
|
|
|
# Получаем author_id автора, если есть
|
2025-03-22 08:47:19 +00:00
|
|
|
|
with local_session() as session:
|
|
|
|
|
author = session.query(Author).filter(Author.id == author_id).first()
|
2025-05-20 22:34:02 +00:00
|
|
|
|
if author and Author.id:
|
2025-05-29 09:15:06 +00:00
|
|
|
|
specific_keys.append(f"author:id:{Author.id}")
|
2025-03-22 08:47:19 +00:00
|
|
|
|
|
|
|
|
|
# Удаляем конкретные ключи
|
|
|
|
|
for key in specific_keys:
|
|
|
|
|
try:
|
|
|
|
|
await redis.execute("DEL", key)
|
|
|
|
|
logger.debug(f"Удален ключ кеша {key}")
|
|
|
|
|
except Exception as e:
|
|
|
|
|
logger.error(f"Ошибка при удалении ключа {key}: {e}")
|
|
|
|
|
|
|
|
|
|
# Также ищем и удаляем ключи коллекций, содержащих данные об этом авторе
|
|
|
|
|
collection_keys = await redis.execute("KEYS", "authors:stats:*")
|
|
|
|
|
if collection_keys:
|
|
|
|
|
await redis.execute("DEL", *collection_keys)
|
|
|
|
|
logger.debug(f"Удалено {len(collection_keys)} коллекционных ключей авторов")
|
|
|
|
|
else:
|
|
|
|
|
# Общая инвалидация всех кешей авторов
|
|
|
|
|
logger.debug("Полная инвалидация кеша авторов")
|
|
|
|
|
await invalidate_cache_by_prefix("authors")
|
|
|
|
|
|
2023-10-23 14:47:11 +00:00
|
|
|
|
|
2024-04-17 15:32:23 +00:00
|
|
|
|
@mutation.field("update_author")
|
2023-10-23 14:47:11 +00:00
|
|
|
|
@login_required
|
2024-02-26 09:14:08 +00:00
|
|
|
|
async def update_author(_, info, profile):
|
2025-05-22 01:34:30 +00:00
|
|
|
|
author_id = info.context.get("author", {}).get("id")
|
2025-05-20 22:34:02 +00:00
|
|
|
|
is_admin = info.context.get("is_admin", False)
|
2025-05-22 01:34:30 +00:00
|
|
|
|
if not author_id:
|
2024-04-17 15:32:23 +00:00
|
|
|
|
return {"error": "unauthorized", "author": None}
|
2024-03-18 12:01:10 +00:00
|
|
|
|
try:
|
|
|
|
|
with local_session() as session:
|
2025-05-22 01:34:30 +00:00
|
|
|
|
author = session.query(Author).where(Author.id == author_id).first()
|
2024-03-18 12:01:10 +00:00
|
|
|
|
if author:
|
|
|
|
|
Author.update(author, profile)
|
|
|
|
|
session.add(author)
|
|
|
|
|
session.commit()
|
2025-05-22 01:34:30 +00:00
|
|
|
|
author_query = select(Author).where(Author.id == author_id)
|
2024-06-11 19:46:35 +00:00
|
|
|
|
result = get_with_stat(author_query)
|
|
|
|
|
if result:
|
|
|
|
|
author_with_stat = result[0]
|
|
|
|
|
if isinstance(author_with_stat, Author):
|
2025-05-20 22:34:02 +00:00
|
|
|
|
# Кэшируем полную версию для админов
|
2025-05-22 01:34:30 +00:00
|
|
|
|
author_dict = author_with_stat.dict(access=is_admin)
|
2024-06-11 19:46:35 +00:00
|
|
|
|
asyncio.create_task(cache_author(author_dict))
|
2025-05-29 09:37:39 +00:00
|
|
|
|
|
2025-05-20 22:34:02 +00:00
|
|
|
|
# Возвращаем обычную полную версию, т.к. это владелец
|
|
|
|
|
return {"error": None, "author": author}
|
2024-03-18 12:01:10 +00:00
|
|
|
|
except Exception as exc:
|
2024-03-18 12:01:43 +00:00
|
|
|
|
import traceback
|
|
|
|
|
|
|
|
|
|
logger.error(traceback.format_exc())
|
2024-04-17 15:32:23 +00:00
|
|
|
|
return {"error": exc, "author": None}
|
2023-10-23 14:47:11 +00:00
|
|
|
|
|
|
|
|
|
|
2024-04-17 15:32:23 +00:00
|
|
|
|
@query.field("get_authors_all")
|
2025-05-20 22:34:02 +00:00
|
|
|
|
async def get_authors_all(_, info):
|
2025-03-22 08:47:19 +00:00
|
|
|
|
"""
|
|
|
|
|
Получает список всех авторов без статистики.
|
|
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
|
list: Список всех авторов
|
|
|
|
|
"""
|
2025-05-20 22:34:02 +00:00
|
|
|
|
# Получаем ID текущего пользователя и флаг админа из контекста
|
2025-05-22 01:34:30 +00:00
|
|
|
|
viewer_id = info.context.get("author", {}).get("id")
|
|
|
|
|
is_admin = info.context.get("is_admin", False)
|
|
|
|
|
authors = await get_all_authors(viewer_id, is_admin)
|
2025-05-20 22:34:02 +00:00
|
|
|
|
return authors
|
2025-03-22 08:47:19 +00:00
|
|
|
|
|
|
|
|
|
|
2024-04-17 15:32:23 +00:00
|
|
|
|
@query.field("get_author")
|
2025-05-20 22:34:02 +00:00
|
|
|
|
async def get_author(_, info, slug="", author_id=0):
|
|
|
|
|
# Получаем ID текущего пользователя и флаг админа из контекста
|
2025-05-22 01:34:30 +00:00
|
|
|
|
is_admin = info.context.get("is_admin", False)
|
2025-05-29 09:37:39 +00:00
|
|
|
|
|
2024-03-14 06:59:38 +00:00
|
|
|
|
author_dict = None
|
2024-02-24 10:22:35 +00:00
|
|
|
|
try:
|
2024-05-30 09:49:46 +00:00
|
|
|
|
author_id = get_author_id_from(slug=slug, user="", author_id=author_id)
|
|
|
|
|
if not author_id:
|
|
|
|
|
raise ValueError("cant find")
|
2025-05-29 09:37:39 +00:00
|
|
|
|
|
2025-05-20 22:34:02 +00:00
|
|
|
|
# Получаем данные автора из кэша (полные данные)
|
|
|
|
|
cached_author = await get_cached_author(int(author_id), get_with_stat)
|
2025-05-29 09:37:39 +00:00
|
|
|
|
|
2025-05-20 22:34:02 +00:00
|
|
|
|
# Применяем фильтрацию на стороне клиента, так как в кэше хранится полная версия
|
|
|
|
|
if cached_author:
|
|
|
|
|
# Создаем объект автора для использования метода dict
|
|
|
|
|
temp_author = Author()
|
|
|
|
|
for key, value in cached_author.items():
|
|
|
|
|
if hasattr(temp_author, key):
|
|
|
|
|
setattr(temp_author, key, value)
|
|
|
|
|
# Получаем отфильтрованную версию
|
2025-05-22 01:34:30 +00:00
|
|
|
|
author_dict = temp_author.dict(access=is_admin)
|
2025-05-20 22:34:02 +00:00
|
|
|
|
# Добавляем статистику, которая могла быть в кэшированной версии
|
|
|
|
|
if "stat" in cached_author:
|
|
|
|
|
author_dict["stat"] = cached_author["stat"]
|
2024-05-25 23:17:45 +00:00
|
|
|
|
|
2024-04-17 15:32:23 +00:00
|
|
|
|
if not author_dict or not author_dict.get("stat"):
|
2024-05-25 23:17:45 +00:00
|
|
|
|
# update stat from db
|
2024-05-30 09:49:46 +00:00
|
|
|
|
author_query = select(Author).filter(Author.id == author_id)
|
2024-06-11 19:46:35 +00:00
|
|
|
|
result = get_with_stat(author_query)
|
|
|
|
|
if result:
|
|
|
|
|
author_with_stat = result[0]
|
|
|
|
|
if isinstance(author_with_stat, Author):
|
2025-05-20 22:34:02 +00:00
|
|
|
|
# Кэшируем полные данные для админов
|
2025-05-22 01:34:30 +00:00
|
|
|
|
original_dict = author_with_stat.dict(access=True)
|
2025-05-20 22:34:02 +00:00
|
|
|
|
asyncio.create_task(cache_author(original_dict))
|
2025-05-29 09:37:39 +00:00
|
|
|
|
|
2025-05-20 22:34:02 +00:00
|
|
|
|
# Возвращаем отфильтрованную версию
|
2025-05-22 01:34:30 +00:00
|
|
|
|
author_dict = author_with_stat.dict(access=is_admin)
|
2025-05-20 22:34:02 +00:00
|
|
|
|
# Добавляем статистику
|
|
|
|
|
if hasattr(author_with_stat, "stat"):
|
|
|
|
|
author_dict["stat"] = author_with_stat.stat
|
2024-03-29 11:44:44 +00:00
|
|
|
|
except ValueError:
|
|
|
|
|
pass
|
2024-04-09 19:02:26 +00:00
|
|
|
|
except Exception as exc:
|
2024-02-25 23:07:46 +00:00
|
|
|
|
import traceback
|
2024-04-17 15:32:23 +00:00
|
|
|
|
|
|
|
|
|
logger.error(f"{exc}:\n{traceback.format_exc()}")
|
2024-03-28 17:36:35 +00:00
|
|
|
|
return author_dict
|
2023-12-27 22:37:54 +00:00
|
|
|
|
|
|
|
|
|
|
2024-04-17 15:32:23 +00:00
|
|
|
|
@query.field("load_authors_by")
|
2025-05-20 22:34:02 +00:00
|
|
|
|
async def load_authors_by(_, info, by, limit, offset):
|
2025-03-22 08:47:19 +00:00
|
|
|
|
"""
|
|
|
|
|
Загружает авторов по заданному критерию с пагинацией.
|
2024-06-06 11:16:16 +00:00
|
|
|
|
|
2025-03-22 08:47:19 +00:00
|
|
|
|
Args:
|
|
|
|
|
by: Критерий сортировки авторов (new/active)
|
|
|
|
|
limit: Максимальное количество возвращаемых авторов
|
|
|
|
|
offset: Смещение для пагинации
|
2024-05-06 17:00:26 +00:00
|
|
|
|
|
2025-03-22 08:47:19 +00:00
|
|
|
|
Returns:
|
|
|
|
|
list: Список авторов с учетом критерия
|
|
|
|
|
"""
|
2025-05-26 17:34:51 +00:00
|
|
|
|
try:
|
|
|
|
|
# Получаем ID текущего пользователя и флаг админа из контекста
|
|
|
|
|
viewer_id = info.context.get("author", {}).get("id")
|
|
|
|
|
is_admin = info.context.get("is_admin", False)
|
2025-05-29 09:37:39 +00:00
|
|
|
|
|
2025-05-26 17:34:51 +00:00
|
|
|
|
# Используем оптимизированную функцию для получения авторов
|
|
|
|
|
return await get_authors_with_stats(limit, offset, by, viewer_id, is_admin)
|
|
|
|
|
except Exception as exc:
|
|
|
|
|
import traceback
|
2025-05-29 09:37:39 +00:00
|
|
|
|
|
2025-05-26 17:34:51 +00:00
|
|
|
|
logger.error(f"{exc}:\n{traceback.format_exc()}")
|
|
|
|
|
return []
|
2024-03-12 13:18:07 +00:00
|
|
|
|
|
|
|
|
|
|
2025-04-29 20:45:37 +00:00
|
|
|
|
@query.field("load_authors_search")
|
2025-04-29 22:49:47 +00:00
|
|
|
|
async def load_authors_search(_, info, text: str, limit: int = 10, offset: int = 0):
|
2025-04-29 21:00:38 +00:00
|
|
|
|
"""
|
|
|
|
|
Resolver for searching authors by text. Works with txt-ai search endpony.
|
|
|
|
|
Args:
|
|
|
|
|
text: Search text
|
|
|
|
|
limit: Maximum number of authors to return
|
|
|
|
|
offset: Offset for pagination
|
|
|
|
|
Returns:
|
|
|
|
|
list: List of authors matching the search criteria
|
|
|
|
|
"""
|
2025-05-29 09:37:39 +00:00
|
|
|
|
|
2025-04-29 20:45:37 +00:00
|
|
|
|
# Get author IDs from search engine (already sorted by relevance)
|
|
|
|
|
search_results = await search_service.search_authors(text, limit, offset)
|
|
|
|
|
|
|
|
|
|
if not search_results:
|
|
|
|
|
return []
|
|
|
|
|
|
|
|
|
|
author_ids = [result.get("id") for result in search_results if result.get("id")]
|
|
|
|
|
if not author_ids:
|
|
|
|
|
return []
|
|
|
|
|
|
|
|
|
|
# Fetch full author objects from DB
|
|
|
|
|
with local_session() as session:
|
|
|
|
|
# Simple query to get authors by IDs - no need for stats here
|
|
|
|
|
authors_query = select(Author).filter(Author.id.in_(author_ids))
|
|
|
|
|
db_authors = session.execute(authors_query).scalars().all()
|
2025-05-29 09:37:39 +00:00
|
|
|
|
|
2025-04-29 20:45:37 +00:00
|
|
|
|
if not db_authors:
|
|
|
|
|
return []
|
|
|
|
|
|
|
|
|
|
# Create a dictionary for quick lookup
|
|
|
|
|
authors_dict = {str(author.id): author for author in db_authors}
|
2025-05-29 09:37:39 +00:00
|
|
|
|
|
2025-04-29 20:45:37 +00:00
|
|
|
|
# Keep the order from search results (maintains the relevance sorting)
|
|
|
|
|
ordered_authors = [authors_dict[author_id] for author_id in author_ids if author_id in authors_dict]
|
|
|
|
|
|
|
|
|
|
return ordered_authors
|
|
|
|
|
|
|
|
|
|
|
2024-05-30 04:12:00 +00:00
|
|
|
|
def get_author_id_from(slug="", user=None, author_id=None):
|
2025-03-22 08:47:19 +00:00
|
|
|
|
try:
|
|
|
|
|
author_id = None
|
|
|
|
|
if author_id:
|
|
|
|
|
return author_id
|
|
|
|
|
with local_session() as session:
|
|
|
|
|
author = None
|
|
|
|
|
if slug:
|
|
|
|
|
author = session.query(Author).filter(Author.slug == slug).first()
|
|
|
|
|
if author:
|
|
|
|
|
author_id = author.id
|
|
|
|
|
return author_id
|
|
|
|
|
if user:
|
2025-05-20 22:34:02 +00:00
|
|
|
|
author = session.query(Author).filter(Author.id == user).first()
|
2025-03-22 08:47:19 +00:00
|
|
|
|
if author:
|
|
|
|
|
author_id = author.id
|
|
|
|
|
except Exception as exc:
|
|
|
|
|
logger.error(exc)
|
2024-05-30 04:12:00 +00:00
|
|
|
|
return author_id
|
|
|
|
|
|
|
|
|
|
|
2024-04-17 15:32:23 +00:00
|
|
|
|
@query.field("get_author_follows")
|
2025-05-20 22:34:02 +00:00
|
|
|
|
async def get_author_follows(_, info, slug="", user=None, author_id=0):
|
|
|
|
|
# Получаем ID текущего пользователя и флаг админа из контекста
|
2025-05-22 01:34:30 +00:00
|
|
|
|
viewer_id = info.context.get("author", {}).get("id")
|
|
|
|
|
is_admin = info.context.get("is_admin", False)
|
2025-05-29 09:37:39 +00:00
|
|
|
|
|
2025-03-22 08:47:19 +00:00
|
|
|
|
logger.debug(f"getting follows for @{slug}")
|
|
|
|
|
author_id = get_author_id_from(slug=slug, user=user, author_id=author_id)
|
|
|
|
|
if not author_id:
|
|
|
|
|
return {}
|
2024-03-28 11:09:11 +00:00
|
|
|
|
|
2025-05-20 22:34:02 +00:00
|
|
|
|
# Получаем данные из кэша
|
|
|
|
|
followed_authors_raw = await get_cached_follower_authors(author_id)
|
2025-03-22 08:47:19 +00:00
|
|
|
|
followed_topics = await get_cached_follower_topics(author_id)
|
2025-05-29 09:37:39 +00:00
|
|
|
|
|
2025-05-20 22:34:02 +00:00
|
|
|
|
# Фильтруем чувствительные данные авторов
|
|
|
|
|
followed_authors = []
|
|
|
|
|
for author_data in followed_authors_raw:
|
|
|
|
|
# Создаем объект автора для использования метода dict
|
|
|
|
|
temp_author = Author()
|
|
|
|
|
for key, value in author_data.items():
|
|
|
|
|
if hasattr(temp_author, key):
|
|
|
|
|
setattr(temp_author, key, value)
|
|
|
|
|
# Добавляем отфильтрованную версию
|
2025-05-21 15:29:46 +00:00
|
|
|
|
# temp_author - это объект Author, который мы хотим сериализовать
|
|
|
|
|
# current_user_id - ID текущего авторизованного пользователя (может быть None)
|
|
|
|
|
# is_admin - булево значение, является ли текущий пользователь админом
|
2025-05-22 01:34:30 +00:00
|
|
|
|
has_access = is_admin or (viewer_id is not None and str(viewer_id) == str(temp_author.id))
|
2025-05-21 15:29:46 +00:00
|
|
|
|
followed_authors.append(temp_author.dict(access=has_access))
|
2025-03-22 08:47:19 +00:00
|
|
|
|
|
|
|
|
|
# TODO: Get followed communities too
|
|
|
|
|
return {
|
|
|
|
|
"authors": followed_authors,
|
|
|
|
|
"topics": followed_topics,
|
|
|
|
|
"communities": DEFAULT_COMMUNITIES,
|
|
|
|
|
"shouts": [],
|
|
|
|
|
}
|
2024-02-23 18:10:11 +00:00
|
|
|
|
|
2024-02-24 10:22:35 +00:00
|
|
|
|
|
2024-04-17 15:32:23 +00:00
|
|
|
|
@query.field("get_author_follows_topics")
|
|
|
|
|
async def get_author_follows_topics(_, _info, slug="", user=None, author_id=None):
|
2025-03-22 08:47:19 +00:00
|
|
|
|
logger.debug(f"getting followed topics for @{slug}")
|
|
|
|
|
author_id = get_author_id_from(slug=slug, user=user, author_id=author_id)
|
|
|
|
|
if not author_id:
|
|
|
|
|
return []
|
|
|
|
|
followed_topics = await get_cached_follower_topics(author_id)
|
|
|
|
|
return followed_topics
|
2024-02-23 18:10:11 +00:00
|
|
|
|
|
|
|
|
|
|
2024-04-17 15:32:23 +00:00
|
|
|
|
@query.field("get_author_follows_authors")
|
2025-05-20 22:34:02 +00:00
|
|
|
|
async def get_author_follows_authors(_, info, slug="", user=None, author_id=None):
|
|
|
|
|
# Получаем ID текущего пользователя и флаг админа из контекста
|
2025-05-22 01:34:30 +00:00
|
|
|
|
viewer_id = info.context.get("author", {}).get("id")
|
|
|
|
|
is_admin = info.context.get("is_admin", False)
|
2025-05-29 09:37:39 +00:00
|
|
|
|
|
2025-03-22 08:47:19 +00:00
|
|
|
|
logger.debug(f"getting followed authors for @{slug}")
|
|
|
|
|
if not author_id:
|
|
|
|
|
return []
|
2025-05-22 01:34:30 +00:00
|
|
|
|
|
2025-05-20 22:34:02 +00:00
|
|
|
|
# Получаем данные из кэша
|
|
|
|
|
followed_authors_raw = await get_cached_follower_authors(author_id)
|
2025-05-29 09:37:39 +00:00
|
|
|
|
|
2025-05-20 22:34:02 +00:00
|
|
|
|
# Фильтруем чувствительные данные авторов
|
|
|
|
|
followed_authors = []
|
|
|
|
|
for author_data in followed_authors_raw:
|
|
|
|
|
# Создаем объект автора для использования метода dict
|
|
|
|
|
temp_author = Author()
|
|
|
|
|
for key, value in author_data.items():
|
|
|
|
|
if hasattr(temp_author, key):
|
|
|
|
|
setattr(temp_author, key, value)
|
|
|
|
|
# Добавляем отфильтрованную версию
|
2025-05-21 15:29:46 +00:00
|
|
|
|
# temp_author - это объект Author, который мы хотим сериализовать
|
|
|
|
|
# current_user_id - ID текущего авторизованного пользователя (может быть None)
|
|
|
|
|
# is_admin - булево значение, является ли текущий пользователь админом
|
2025-05-22 01:34:30 +00:00
|
|
|
|
has_access = is_admin or (viewer_id is not None and str(viewer_id) == str(temp_author.id))
|
2025-05-21 15:29:46 +00:00
|
|
|
|
followed_authors.append(temp_author.dict(access=has_access))
|
2025-05-29 09:37:39 +00:00
|
|
|
|
|
2025-03-22 08:47:19 +00:00
|
|
|
|
return followed_authors
|
2023-10-23 14:47:11 +00:00
|
|
|
|
|
|
|
|
|
|
2024-04-17 15:32:23 +00:00
|
|
|
|
def create_author(user_id: str, slug: str, name: str = ""):
|
2025-03-22 08:47:19 +00:00
|
|
|
|
author = Author()
|
2025-05-20 22:34:02 +00:00
|
|
|
|
Author.id = user_id # Связь с user_id из системы авторизации
|
2025-03-22 08:47:19 +00:00
|
|
|
|
author.slug = slug # Идентификатор из системы авторизации
|
|
|
|
|
author.created_at = author.updated_at = int(time.time())
|
|
|
|
|
author.name = name or slug # если не указано
|
|
|
|
|
|
2023-11-28 19:07:53 +00:00
|
|
|
|
with local_session() as session:
|
2025-03-22 08:47:19 +00:00
|
|
|
|
session.add(author)
|
|
|
|
|
session.commit()
|
|
|
|
|
return author
|
2024-02-21 09:34:12 +00:00
|
|
|
|
|
|
|
|
|
|
2024-04-17 15:32:23 +00:00
|
|
|
|
@query.field("get_author_followers")
|
2025-05-20 22:34:02 +00:00
|
|
|
|
async def get_author_followers(_, info, slug: str = "", user: str = "", author_id: int = 0):
|
|
|
|
|
# Получаем ID текущего пользователя и флаг админа из контекста
|
2025-05-22 01:34:30 +00:00
|
|
|
|
viewer_id = info.context.get("author", {}).get("id")
|
|
|
|
|
is_admin = info.context.get("is_admin", False)
|
2025-05-29 09:37:39 +00:00
|
|
|
|
|
2025-03-22 08:47:19 +00:00
|
|
|
|
logger.debug(f"getting followers for author @{slug} or ID:{author_id}")
|
2024-12-11 18:25:03 +00:00
|
|
|
|
author_id = get_author_id_from(slug=slug, user=user, author_id=author_id)
|
2025-03-22 08:47:19 +00:00
|
|
|
|
if not author_id:
|
|
|
|
|
return []
|
2025-05-29 09:37:39 +00:00
|
|
|
|
|
2025-05-20 22:34:02 +00:00
|
|
|
|
# Получаем данные из кэша
|
|
|
|
|
followers_raw = await get_cached_author_followers(author_id)
|
2025-05-29 09:37:39 +00:00
|
|
|
|
|
2025-05-20 22:34:02 +00:00
|
|
|
|
# Фильтруем чувствительные данные авторов
|
|
|
|
|
followers = []
|
|
|
|
|
for follower_data in followers_raw:
|
|
|
|
|
# Создаем объект автора для использования метода dict
|
|
|
|
|
temp_author = Author()
|
|
|
|
|
for key, value in follower_data.items():
|
|
|
|
|
if hasattr(temp_author, key):
|
|
|
|
|
setattr(temp_author, key, value)
|
|
|
|
|
# Добавляем отфильтрованную версию
|
2025-05-21 15:29:46 +00:00
|
|
|
|
# temp_author - это объект Author, который мы хотим сериализовать
|
|
|
|
|
# current_user_id - ID текущего авторизованного пользователя (может быть None)
|
|
|
|
|
# is_admin - булево значение, является ли текущий пользователь админом
|
2025-05-22 01:34:30 +00:00
|
|
|
|
has_access = is_admin or (viewer_id is not None and str(viewer_id) == str(temp_author.id))
|
2025-05-21 15:29:46 +00:00
|
|
|
|
followers.append(temp_author.dict(access=has_access))
|
2025-05-29 09:37:39 +00:00
|
|
|
|
|
2024-05-20 22:40:57 +00:00
|
|
|
|
return followers
|