distinct-fix

This commit is contained in:
Untone 2025-05-30 08:51:24 +03:00
parent 1223c1d278
commit d917d63bf2
4 changed files with 9 additions and 7 deletions

2
cache/cache.py vendored
View File

@ -177,7 +177,7 @@ async def get_cached_authors_by_ids(author_ids: List[int]) -> List[dict]:
missing_ids = [author_ids[index] for index in missing_indices]
with local_session() as session:
query = select(Author).where(Author.id.in_(missing_ids))
missing_authors = session.execute(query).scalars().all()
missing_authors = session.execute(query).scalars().unique().all()
await asyncio.gather(*(cache_author(author.dict()) for author in missing_authors))
for index, author in zip(missing_indices, missing_authors):
authors[index] = author.dict()

View File

@ -47,7 +47,7 @@ async def get_all_authors(current_user_id=None):
with local_session() as session:
# Запрос на получение базовой информации об авторах
authors_query = select(Author).where(Author.deleted_at.is_(None))
authors = session.execute(authors_query).scalars().all()
authors = session.execute(authors_query).scalars().unique().all()
# Преобразуем авторов в словари с учетом прав доступа
return [author.dict(current_user_id, False) for author in authors]
@ -174,7 +174,7 @@ async def get_authors_with_stats(limit=50, offset=0, by: Optional[str] = None, c
base_query = base_query.limit(limit).offset(offset)
# Получаем авторов
authors = session.execute(base_query).scalars().all()
authors = session.execute(base_query).scalars().unique().all()
author_ids = [author.id for author in authors]
if not author_ids:
@ -418,7 +418,7 @@ async def load_authors_search(_, info, text: str, limit: int = 10, offset: int =
with local_session() as session:
# Simple query to get authors by IDs - no need for stats here
authors_query = select(Author).filter(Author.id.in_(author_ids))
db_authors = session.execute(authors_query).scalars().all()
db_authors = session.execute(authors_query).scalars().unique().all()
if not db_authors:
return []

View File

@ -79,7 +79,9 @@ def get_reactions_with_stat(q, limit=10, offset=0):
>>> get_reactions_with_stat(q, 10, 0) # doctest: +SKIP
[{'id': 1, 'body': 'Текст комментария', 'stat': {'rating': 5, 'comments_count': 3}, ...}]
"""
q = q.distinct().limit(limit).offset(offset)
# Убираем distinct() поскольку GROUP BY уже обеспечивает уникальность,
# а distinct() вызывает ошибку PostgreSQL с JSON полями
q = q.limit(limit).offset(offset)
reactions = []
with local_session() as session:

View File

@ -37,7 +37,7 @@ async def get_all_topics():
with local_session() as session:
# Запрос на получение базовой информации о темах
topics_query = select(Topic)
topics = session.execute(topics_query).scalars().all()
topics = session.execute(topics_query).scalars().unique().all()
# Преобразуем темы в словари
return [topic.dict() for topic in topics]
@ -103,7 +103,7 @@ async def get_topics_with_stats(limit=100, offset=0, community_id=None, by=None)
base_query = base_query.limit(limit).offset(offset)
# Получаем темы
topics = session.execute(base_query).scalars().all()
topics = session.execute(base_query).scalars().unique().all()
topic_ids = [topic.id for topic in topics]
if not topic_ids: