distinct-fix
This commit is contained in:
parent
1223c1d278
commit
d917d63bf2
2
cache/cache.py
vendored
2
cache/cache.py
vendored
|
@ -177,7 +177,7 @@ async def get_cached_authors_by_ids(author_ids: List[int]) -> List[dict]:
|
||||||
missing_ids = [author_ids[index] for index in missing_indices]
|
missing_ids = [author_ids[index] for index in missing_indices]
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
query = select(Author).where(Author.id.in_(missing_ids))
|
query = select(Author).where(Author.id.in_(missing_ids))
|
||||||
missing_authors = session.execute(query).scalars().all()
|
missing_authors = session.execute(query).scalars().unique().all()
|
||||||
await asyncio.gather(*(cache_author(author.dict()) for author in missing_authors))
|
await asyncio.gather(*(cache_author(author.dict()) for author in missing_authors))
|
||||||
for index, author in zip(missing_indices, missing_authors):
|
for index, author in zip(missing_indices, missing_authors):
|
||||||
authors[index] = author.dict()
|
authors[index] = author.dict()
|
||||||
|
|
|
@ -47,7 +47,7 @@ async def get_all_authors(current_user_id=None):
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
# Запрос на получение базовой информации об авторах
|
# Запрос на получение базовой информации об авторах
|
||||||
authors_query = select(Author).where(Author.deleted_at.is_(None))
|
authors_query = select(Author).where(Author.deleted_at.is_(None))
|
||||||
authors = session.execute(authors_query).scalars().all()
|
authors = session.execute(authors_query).scalars().unique().all()
|
||||||
|
|
||||||
# Преобразуем авторов в словари с учетом прав доступа
|
# Преобразуем авторов в словари с учетом прав доступа
|
||||||
return [author.dict(current_user_id, False) for author in authors]
|
return [author.dict(current_user_id, False) for author in authors]
|
||||||
|
@ -174,7 +174,7 @@ async def get_authors_with_stats(limit=50, offset=0, by: Optional[str] = None, c
|
||||||
base_query = base_query.limit(limit).offset(offset)
|
base_query = base_query.limit(limit).offset(offset)
|
||||||
|
|
||||||
# Получаем авторов
|
# Получаем авторов
|
||||||
authors = session.execute(base_query).scalars().all()
|
authors = session.execute(base_query).scalars().unique().all()
|
||||||
author_ids = [author.id for author in authors]
|
author_ids = [author.id for author in authors]
|
||||||
|
|
||||||
if not author_ids:
|
if not author_ids:
|
||||||
|
@ -418,7 +418,7 @@ async def load_authors_search(_, info, text: str, limit: int = 10, offset: int =
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
# Simple query to get authors by IDs - no need for stats here
|
# Simple query to get authors by IDs - no need for stats here
|
||||||
authors_query = select(Author).filter(Author.id.in_(author_ids))
|
authors_query = select(Author).filter(Author.id.in_(author_ids))
|
||||||
db_authors = session.execute(authors_query).scalars().all()
|
db_authors = session.execute(authors_query).scalars().unique().all()
|
||||||
|
|
||||||
if not db_authors:
|
if not db_authors:
|
||||||
return []
|
return []
|
||||||
|
|
|
@ -79,7 +79,9 @@ def get_reactions_with_stat(q, limit=10, offset=0):
|
||||||
>>> get_reactions_with_stat(q, 10, 0) # doctest: +SKIP
|
>>> get_reactions_with_stat(q, 10, 0) # doctest: +SKIP
|
||||||
[{'id': 1, 'body': 'Текст комментария', 'stat': {'rating': 5, 'comments_count': 3}, ...}]
|
[{'id': 1, 'body': 'Текст комментария', 'stat': {'rating': 5, 'comments_count': 3}, ...}]
|
||||||
"""
|
"""
|
||||||
q = q.distinct().limit(limit).offset(offset)
|
# Убираем distinct() поскольку GROUP BY уже обеспечивает уникальность,
|
||||||
|
# а distinct() вызывает ошибку PostgreSQL с JSON полями
|
||||||
|
q = q.limit(limit).offset(offset)
|
||||||
reactions = []
|
reactions = []
|
||||||
|
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
|
|
|
@ -37,7 +37,7 @@ async def get_all_topics():
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
# Запрос на получение базовой информации о темах
|
# Запрос на получение базовой информации о темах
|
||||||
topics_query = select(Topic)
|
topics_query = select(Topic)
|
||||||
topics = session.execute(topics_query).scalars().all()
|
topics = session.execute(topics_query).scalars().unique().all()
|
||||||
|
|
||||||
# Преобразуем темы в словари
|
# Преобразуем темы в словари
|
||||||
return [topic.dict() for topic in topics]
|
return [topic.dict() for topic in topics]
|
||||||
|
@ -103,7 +103,7 @@ async def get_topics_with_stats(limit=100, offset=0, community_id=None, by=None)
|
||||||
base_query = base_query.limit(limit).offset(offset)
|
base_query = base_query.limit(limit).offset(offset)
|
||||||
|
|
||||||
# Получаем темы
|
# Получаем темы
|
||||||
topics = session.execute(base_query).scalars().all()
|
topics = session.execute(base_query).scalars().unique().all()
|
||||||
topic_ids = [topic.id for topic in topics]
|
topic_ids = [topic.id for topic in topics]
|
||||||
|
|
||||||
if not topic_ids:
|
if not topic_ids:
|
||||||
|
|
Loading…
Reference in New Issue
Block a user