This commit is contained in:
@@ -10,7 +10,6 @@ from sqlalchemy.sql import desc as sql_desc
|
|||||||
from cache.cache import (
|
from cache.cache import (
|
||||||
cache_author,
|
cache_author,
|
||||||
cached_query,
|
cached_query,
|
||||||
get_cached_author,
|
|
||||||
get_cached_author_followers,
|
get_cached_author_followers,
|
||||||
get_cached_follower_authors,
|
get_cached_follower_authors,
|
||||||
get_cached_follower_topics,
|
get_cached_follower_topics,
|
||||||
@@ -461,12 +460,12 @@ async def get_authors_with_stats(
|
|||||||
# ✍️ Статистика по соавторам (количество уникальных соавторов)
|
# ✍️ Статистика по соавторам (количество уникальных соавторов)
|
||||||
logger.debug("Executing coauthors statistics query")
|
logger.debug("Executing coauthors statistics query")
|
||||||
coauthors_stats_query = f"""
|
coauthors_stats_query = f"""
|
||||||
SELECT sa1.author, COUNT(DISTINCT sa2.author) as coauthors_count
|
SELECT sa1.author, COALESCE(COUNT(DISTINCT sa2.author), 0) as coauthors_count
|
||||||
FROM shout_author sa1
|
FROM shout_author sa1
|
||||||
JOIN shout s ON sa1.shout = s.id
|
JOIN shout s ON sa1.shout = s.id
|
||||||
AND s.deleted_at IS NULL
|
AND s.deleted_at IS NULL
|
||||||
AND s.published_at IS NOT NULL
|
AND s.published_at IS NOT NULL
|
||||||
JOIN shout_author sa2 ON s.id = sa2.shout
|
LEFT JOIN shout_author sa2 ON s.id = sa2.shout
|
||||||
AND sa2.author != sa1.author -- исключаем самого автора
|
AND sa2.author != sa1.author -- исключаем самого автора
|
||||||
WHERE sa1.author IN ({placeholders})
|
WHERE sa1.author IN ({placeholders})
|
||||||
GROUP BY sa1.author
|
GROUP BY sa1.author
|
||||||
@@ -502,14 +501,14 @@ async def get_authors_with_stats(
|
|||||||
logger.debug("Executing rating_shouts statistics query")
|
logger.debug("Executing rating_shouts statistics query")
|
||||||
rating_shouts_stats_query = f"""
|
rating_shouts_stats_query = f"""
|
||||||
SELECT sa.author,
|
SELECT sa.author,
|
||||||
SUM(CASE
|
COALESCE(SUM(CASE
|
||||||
WHEN r.kind IN ('LIKE', 'AGREE', 'ACCEPT', 'PROOF', 'CREDIT') THEN 1
|
WHEN r.kind IN ('LIKE', 'AGREE', 'ACCEPT', 'PROOF', 'CREDIT') THEN 1
|
||||||
WHEN r.kind IN ('DISLIKE', 'DISAGREE', 'REJECT', 'DISPROOF') THEN -1
|
WHEN r.kind IN ('DISLIKE', 'DISAGREE', 'REJECT', 'DISPROOF') THEN -1
|
||||||
ELSE 0
|
ELSE 0
|
||||||
END) as rating_shouts
|
END), 0) as rating_shouts
|
||||||
FROM shout_author sa
|
FROM shout_author sa
|
||||||
JOIN shout s ON sa.shout = s.id AND s.deleted_at IS NULL AND s.published_at IS NOT NULL
|
JOIN shout s ON sa.shout = s.id AND s.deleted_at IS NULL AND s.published_at IS NOT NULL
|
||||||
JOIN reaction r ON s.id = r.shout AND r.deleted_at IS NULL
|
LEFT JOIN reaction r ON s.id = r.shout AND r.deleted_at IS NULL
|
||||||
WHERE sa.author IN ({placeholders})
|
WHERE sa.author IN ({placeholders})
|
||||||
GROUP BY sa.author
|
GROUP BY sa.author
|
||||||
"""
|
"""
|
||||||
@@ -522,13 +521,13 @@ async def get_authors_with_stats(
|
|||||||
logger.debug("Executing rating_comments statistics query")
|
logger.debug("Executing rating_comments statistics query")
|
||||||
rating_comments_stats_query = f"""
|
rating_comments_stats_query = f"""
|
||||||
SELECT r1.created_by,
|
SELECT r1.created_by,
|
||||||
SUM(CASE
|
COALESCE(SUM(CASE
|
||||||
WHEN r2.kind IN ('LIKE', 'AGREE', 'ACCEPT', 'PROOF', 'CREDIT') THEN 1
|
WHEN r2.kind IN ('LIKE', 'AGREE', 'ACCEPT', 'PROOF', 'CREDIT') THEN 1
|
||||||
WHEN r2.kind IN ('DISLIKE', 'DISAGREE', 'REJECT', 'DISPROOF') THEN -1
|
WHEN r2.kind IN ('DISLIKE', 'DISAGREE', 'REJECT', 'DISPROOF') THEN -1
|
||||||
ELSE 0
|
ELSE 0
|
||||||
END) as rating_comments
|
END), 0) as rating_comments
|
||||||
FROM reaction r1
|
FROM reaction r1
|
||||||
JOIN reaction r2 ON r1.id = r2.reply_to AND r2.deleted_at IS NULL
|
LEFT JOIN reaction r2 ON r1.id = r2.reply_to AND r2.deleted_at IS NULL
|
||||||
WHERE r1.created_by IN ({placeholders}) AND r1.deleted_at IS NULL
|
WHERE r1.created_by IN ({placeholders}) AND r1.deleted_at IS NULL
|
||||||
AND r1.kind IN ('COMMENT', 'QUOTE')
|
AND r1.kind IN ('COMMENT', 'QUOTE')
|
||||||
GROUP BY r1.created_by
|
GROUP BY r1.created_by
|
||||||
@@ -748,39 +747,22 @@ async def get_author(
|
|||||||
msg = "cant find"
|
msg = "cant find"
|
||||||
raise ValueError(msg)
|
raise ValueError(msg)
|
||||||
|
|
||||||
# Получаем данные автора из кэша (полные данные)
|
# Всегда используем новую логику статистики из get_authors_with_stats
|
||||||
cached_author = await get_cached_author(int(author_id), get_with_stat)
|
# Это гарантирует консистентность с load_authors_by
|
||||||
|
try:
|
||||||
|
filter_by: AuthorsBy = {}
|
||||||
|
if slug:
|
||||||
|
filter_by["slug"] = slug
|
||||||
|
elif author_id:
|
||||||
|
filter_by["id"] = author_id
|
||||||
|
|
||||||
# Применяем фильтрацию на стороне клиента, так как в кэше хранится полная версия
|
authors_with_stats = await get_authors_with_stats(limit=1, offset=0, by=filter_by)
|
||||||
if cached_author:
|
if authors_with_stats and len(authors_with_stats) > 0:
|
||||||
# Создаем объект автора для использования метода dict
|
author_dict = authors_with_stats[0]
|
||||||
temp_author = Author()
|
# Кэшируем полные данные
|
||||||
for key, value in cached_author.items():
|
_t = asyncio.create_task(cache_author(author_dict))
|
||||||
if hasattr(temp_author, key) and key != "username": # username - это свойство, нельзя устанавливать
|
else:
|
||||||
setattr(temp_author, key, value)
|
# Fallback к старому методу если автор не найден
|
||||||
# Получаем отфильтрованную версию
|
|
||||||
author_dict = temp_author.dict(is_admin)
|
|
||||||
# Добавляем статистику, которая могла быть в кэшированной версии
|
|
||||||
if "stat" in cached_author:
|
|
||||||
author_dict["stat"] = cached_author["stat"]
|
|
||||||
|
|
||||||
if not author_dict or not author_dict.get("stat"):
|
|
||||||
# Используем ту же логику что и в load_authors_by
|
|
||||||
try:
|
|
||||||
filter_by: AuthorsBy = {}
|
|
||||||
if slug:
|
|
||||||
filter_by["slug"] = slug
|
|
||||||
elif author_id:
|
|
||||||
filter_by["id"] = author_id
|
|
||||||
|
|
||||||
authors_with_stats = await get_authors_with_stats(limit=1, offset=0, by=filter_by)
|
|
||||||
if authors_with_stats and len(authors_with_stats) > 0:
|
|
||||||
author_dict = authors_with_stats[0]
|
|
||||||
# Кэшируем полные данные
|
|
||||||
_t = asyncio.create_task(cache_author(author_dict))
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error getting author stats: {e}")
|
|
||||||
# Fallback к старому методу
|
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
if slug:
|
if slug:
|
||||||
author = session.query(Author).filter_by(slug=slug).first()
|
author = session.query(Author).filter_by(slug=slug).first()
|
||||||
@@ -788,6 +770,16 @@ async def get_author(
|
|||||||
author = session.query(Author).filter_by(id=author_id).first()
|
author = session.query(Author).filter_by(id=author_id).first()
|
||||||
if author:
|
if author:
|
||||||
author_dict = author.dict(is_admin)
|
author_dict = author.dict(is_admin)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error getting author stats: {e}")
|
||||||
|
# Fallback к старому методу
|
||||||
|
with local_session() as session:
|
||||||
|
if slug:
|
||||||
|
author = session.query(Author).filter_by(slug=slug).first()
|
||||||
|
else:
|
||||||
|
author = session.query(Author).filter_by(id=author_id).first()
|
||||||
|
if author:
|
||||||
|
author_dict = author.dict(is_admin)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
pass
|
pass
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
|
|||||||
Reference in New Issue
Block a user