core/resolvers/reader.py

496 lines
19 KiB
Python
Raw Normal View History

2024-10-31 18:11:54 +00:00
import json
2024-11-01 18:04:30 +00:00
import time
2024-11-01 06:50:19 +00:00
2024-11-01 19:11:42 +00:00
from sqlalchemy import nulls_last, text
2024-11-01 06:50:19 +00:00
from sqlalchemy.orm import aliased
2024-11-01 19:04:39 +00:00
from sqlalchemy.sql.expression import and_, asc, case, desc, func, select
2024-11-01 06:50:19 +00:00
from orm.author import Author
2023-12-17 20:30:20 +00:00
from orm.reaction import Reaction, ReactionKind
2024-11-01 06:50:19 +00:00
from orm.shout import Shout, ShoutAuthor, ShoutTopic
from orm.topic import Topic
2024-11-12 14:56:20 +00:00
from services.auth import login_accepted
2024-11-01 17:24:09 +00:00
from services.db import json_array_builder, json_builder, local_session
2023-11-23 23:00:28 +00:00
from services.schema import query
2024-01-29 01:41:46 +00:00
from services.search import search_text
2024-08-07 10:15:58 +00:00
from services.viewed import ViewedStorage
2024-08-09 06:37:06 +00:00
from utils.logger import root_logger as logger
2024-08-07 11:54:13 +00:00
2024-08-08 13:10:45 +00:00
2024-11-01 11:29:58 +00:00
def apply_options(q, options, reactions_created_by=0):
"""
Применяет опции фильтрации и сортировки
[опционально] выбирая те публикации, на которые есть реакции/комментарии от указанного автора
:param q: Исходный запрос.
:param options: Опции фильтрации и сортировки.
:param reactions_created_by: Идентификатор автора.
:return: Запрос с примененными опциями.
"""
filters = options.get("filters")
if isinstance(filters, dict):
q = apply_filters(q, filters)
if reactions_created_by:
q = q.join(Reaction, Reaction.shout == Shout.id)
q = q.filter(Reaction.created_by == reactions_created_by)
if "commented" in filters:
q = q.filter(Reaction.body.is_not(None))
q = apply_sorting(q, options)
limit = options.get("limit", 10)
offset = options.get("offset", 0)
return q, limit, offset
2024-10-31 17:28:52 +00:00
def has_field(info, fieldname: str) -> bool:
2024-11-01 07:04:32 +00:00
"""
Проверяет, запрошено ли поле :fieldname: в GraphQL запросе
:param info: Информация о контексте GraphQL
:param fieldname: Имя запрашиваемого поля
:return: True, если поле запрошено, False в противном случае
"""
2024-10-31 17:28:52 +00:00
field_node = info.field_nodes[0]
for selection in field_node.selection_set.selections:
if hasattr(selection, "name") and selection.name.value == fieldname:
return True
return False
2024-11-01 10:50:47 +00:00
def query_with_stat(info):
2024-11-01 07:04:32 +00:00
"""
2024-11-01 10:50:47 +00:00
Добавляет подзапрос статистики
2024-11-01 07:04:32 +00:00
"""
2024-11-01 18:52:25 +00:00
q = (
select(Shout)
.where(and_(Shout.published_at.is_not(None), Shout.deleted_at.is_(None)))
.join(Author, Author.id == Shout.created_by)
)
2024-11-01 17:02:46 +00:00
# main_author
2024-11-01 19:17:56 +00:00
main_author = aliased(Author)
2024-11-01 17:02:46 +00:00
q = q.join(main_author, main_author.id == Shout.created_by)
q = q.add_columns(
2024-11-01 17:11:58 +00:00
json_builder(
2024-11-02 09:09:24 +00:00
"id",
main_author.id,
"name",
main_author.name,
"slug",
main_author.slug,
"pic",
main_author.pic,
"created_at",
main_author.created_at,
2024-11-01 17:02:46 +00:00
).label("main_author")
)
2024-11-01 10:50:47 +00:00
2024-11-01 17:02:46 +00:00
if has_field(info, "main_topic"):
2024-11-01 19:17:56 +00:00
main_topic_join = aliased(ShoutTopic)
main_topic = aliased(Topic)
q = q.join(main_topic_join, and_(main_topic_join.shout == Shout.id, main_topic_join.main.is_(True)))
q = q.join(main_topic, main_topic.id == main_topic_join.topic)
2024-10-31 18:58:24 +00:00
q = q.add_columns(
2024-11-01 17:11:58 +00:00
json_builder(
2024-11-02 09:09:24 +00:00
"id", main_topic.id, "title", main_topic.title, "slug", main_topic.slug, "is_main", main_topic_join.main
2024-11-01 17:02:46 +00:00
).label("main_topic")
2024-11-01 10:50:47 +00:00
)
2024-11-01 17:02:46 +00:00
if has_field(info, "topics"):
2024-11-01 10:50:47 +00:00
topics_subquery = (
select(
2024-11-01 19:23:23 +00:00
ShoutTopic.shout,
2024-11-01 17:11:58 +00:00
json_array_builder(
2024-11-02 09:09:24 +00:00
json_builder("id", Topic.id, "title", Topic.title, "slug", Topic.slug, "is_main", ShoutTopic.main)
).label("topics"),
2024-11-01 10:50:47 +00:00
)
.outerjoin(Topic, ShoutTopic.topic == Topic.id)
.where(ShoutTopic.shout == Shout.id)
2024-11-02 09:09:24 +00:00
.group_by(ShoutTopic.shout)
2024-11-01 10:50:47 +00:00
.subquery()
)
q = q.outerjoin(topics_subquery, topics_subquery.c.shout == Shout.id)
2024-11-01 17:02:46 +00:00
q = q.add_columns(topics_subquery.c.topics)
2024-11-01 10:50:47 +00:00
2024-11-01 17:02:46 +00:00
if has_field(info, "authors"):
2024-11-01 10:50:47 +00:00
authors_subquery = (
select(
2024-11-01 19:17:56 +00:00
ShoutAuthor.shout,
2024-11-01 17:11:58 +00:00
json_array_builder(
json_builder(
2024-11-02 09:09:24 +00:00
"id",
Author.id,
"name",
Author.name,
"slug",
Author.slug,
"pic",
Author.pic,
"caption",
ShoutAuthor.caption,
"created_at",
Author.created_at,
2024-11-01 17:02:46 +00:00
)
2024-11-02 09:09:24 +00:00
).label("authors"),
2024-11-01 10:50:47 +00:00
)
.outerjoin(Author, ShoutAuthor.author == Author.id)
.where(ShoutAuthor.shout == Shout.id)
2024-11-01 19:23:23 +00:00
.group_by(ShoutAuthor.shout)
2024-11-01 10:50:47 +00:00
.subquery()
)
q = q.outerjoin(authors_subquery, authors_subquery.c.shout == Shout.id)
2024-11-01 17:02:46 +00:00
q = q.add_columns(authors_subquery.c.authors)
2024-11-01 17:24:09 +00:00
2024-11-01 17:02:46 +00:00
if has_field(info, "stat"):
stats_subquery = (
select(
Reaction.shout,
2024-11-01 17:24:09 +00:00
func.count(func.distinct(Reaction.id))
.filter(Reaction.kind == ReactionKind.COMMENT.value)
.label("comments_count"),
2024-11-01 19:17:56 +00:00
func.sum(
case(
(Reaction.kind == ReactionKind.LIKE.value, 1),
(Reaction.kind == ReactionKind.DISLIKE.value, -1),
2024-11-02 09:09:24 +00:00
else_=0,
2024-11-01 19:17:56 +00:00
)
2024-11-02 09:09:24 +00:00
)
.filter(Reaction.reply_to.is_(None))
.label("rating"),
func.max(Reaction.created_at).filter(Reaction.reply_to.is_(None)).label("last_reacted_at"),
2024-11-01 17:02:46 +00:00
)
.where(Reaction.deleted_at.is_(None))
.group_by(Reaction.shout)
.subquery()
)
2024-11-12 14:56:20 +00:00
author_id = info.context.get("author", {}).get("id")
user_reaction_subquery = None
if author_id:
user_reaction_subquery = (
select(Reaction.shout, Reaction.kind.label("my_rate"))
.where(
and_(
Reaction.created_by == author_id,
Reaction.deleted_at.is_(None),
Reaction.kind.in_([ReactionKind.LIKE.value, ReactionKind.DISLIKE.value]),
Reaction.reply_to.is_(None),
)
)
.distinct(Reaction.shout)
.subquery()
)
2024-11-01 17:02:46 +00:00
q = q.outerjoin(stats_subquery, stats_subquery.c.shout == Shout.id)
2024-11-12 14:56:20 +00:00
if user_reaction_subquery:
q = q.outerjoin(user_reaction_subquery, user_reaction_subquery.c.shout == Shout.id)
# Добавляем поле my_rate в JSON
2024-11-01 10:50:47 +00:00
q = q.add_columns(
2024-11-01 17:11:58 +00:00
json_builder(
2024-11-02 09:09:24 +00:00
"comments_count",
stats_subquery.c.comments_count,
"rating",
stats_subquery.c.rating,
"last_reacted_at",
stats_subquery.c.last_reacted_at,
2024-11-12 14:56:20 +00:00
"my_rate",
user_reaction_subquery.c.my_rate if user_reaction_subquery else None,
2024-11-01 17:02:46 +00:00
).label("stat")
2024-10-31 18:11:54 +00:00
)
2024-11-02 09:09:24 +00:00
2024-11-01 10:50:47 +00:00
return q
def get_shouts_with_links(info, q, limit=20, offset=0):
"""
получение публикаций с применением пагинации
"""
2024-11-01 17:02:46 +00:00
shouts = []
try:
logger.info(f"Starting get_shouts_with_links with limit={limit}, offset={offset}")
q = q.limit(limit).offset(offset)
2024-11-01 17:24:09 +00:00
2024-11-01 17:02:46 +00:00
with local_session() as session:
logger.info("Executing query...")
2024-11-01 18:04:30 +00:00
t1 = time.time()
2024-11-01 17:02:46 +00:00
shouts_result = session.execute(q).all()
2024-11-01 18:04:30 +00:00
logger.info(f"Query executed, got {len(shouts_result)} results in {time.time() - t1:.3f} seconds")
2024-11-01 17:24:09 +00:00
2024-11-01 17:02:46 +00:00
if not shouts_result:
logger.warning("No results found")
return []
shouts = []
for idx, row in enumerate(shouts_result):
try:
# logger.debug(row)
shout = None
if hasattr(row, "Shout"):
shout = row.Shout
else:
logger.warning(f"Row {idx} has no Shout attribute: {row}")
continue
2024-11-01 17:24:09 +00:00
2024-11-01 17:02:46 +00:00
if shout:
shout_id = int(f"{shout.id}")
# logger.info(f"Processing shout ID: {shout_id}")
shout_dict = shout.dict()
if has_field(info, "created_by") and shout_dict.get("created_by"):
main_author_id = shout_dict.get("created_by")
a = session.query(Author).filter(Author.id == main_author_id).first()
shout_dict["created_by"] = {
"id": main_author_id,
"name": a.id,
"slug": a.slug,
2024-11-01 17:24:09 +00:00
"pic": a.pic,
2024-11-01 17:02:46 +00:00
}
2024-11-01 18:17:51 +00:00
if hasattr(row, "stat"):
stat = {}
# logger.debug(row.stat)
if isinstance(row.stat, str):
stat = json.loads(row.stat)
elif isinstance(row.stat, dict):
stat = row.stat
2024-11-01 18:09:53 +00:00
viewed = ViewedStorage.get_shout(shout_id=shout_id) or 0
2024-11-02 09:09:24 +00:00
shout_dict["stat"] = {**stat, "viewed": viewed, "commented": stat.get("comments_count", 0)}
2024-11-01 17:02:46 +00:00
if has_field(info, "main_topic") and hasattr(row, "main_topic"):
2024-11-02 09:09:24 +00:00
shout_dict["main_topic"] = (
json.loads(row.main_topic) if isinstance(row.stat, str) else row.main_topic
)
2024-11-01 17:02:46 +00:00
if has_field(info, "authors") and hasattr(row, "authors"):
2024-11-02 09:09:24 +00:00
shout_dict["authors"] = (
json.loads(row.authors) if isinstance(row.authors, str) else row.authors
)
2024-11-01 17:02:46 +00:00
if has_field(info, "topics") and hasattr(row, "topics"):
2024-11-01 18:25:25 +00:00
shout_dict["topics"] = json.loads(row.topics) if isinstance(row.topics, str) else row.topics
2024-11-01 17:02:46 +00:00
shouts.append(shout_dict)
2024-11-01 17:24:09 +00:00
2024-11-01 17:02:46 +00:00
except Exception as row_error:
logger.error(f"Error processing row {idx}: {row_error}", exc_info=True)
continue
except Exception as e:
logger.error(f"Fatal error in get_shouts_with_links: {e}", exc_info=True)
raise
2024-11-01 17:24:09 +00:00
finally:
2024-11-01 14:26:45 +00:00
return shouts
2024-03-25 17:28:58 +00:00
2024-08-08 13:10:45 +00:00
2024-11-01 06:50:19 +00:00
def apply_filters(q, filters):
2024-08-07 08:35:59 +00:00
"""
2024-11-01 06:50:19 +00:00
Применение общих фильтров к запросу.
2024-08-07 08:35:59 +00:00
:param q: Исходный запрос.
:param filters: Словарь фильтров.
:return: Запрос с примененными фильтрами.
"""
2024-03-25 17:41:28 +00:00
if isinstance(filters, dict):
2024-10-24 13:27:16 +00:00
if "featured" in filters:
featured_filter = filters.get("featured")
if featured_filter:
q = q.filter(Shout.featured_at.is_not(None))
else:
q = q.filter(Shout.featured_at.is_(None))
2024-04-17 15:32:23 +00:00
by_layouts = filters.get("layouts")
2024-05-01 02:02:35 +00:00
if by_layouts and isinstance(by_layouts, list):
2024-03-25 17:41:28 +00:00
q = q.filter(Shout.layout.in_(by_layouts))
2024-04-17 15:32:23 +00:00
by_author = filters.get("author")
2024-03-25 17:41:28 +00:00
if by_author:
q = q.filter(Shout.authors.any(slug=by_author))
2024-04-17 15:32:23 +00:00
by_topic = filters.get("topic")
2024-03-25 17:41:28 +00:00
if by_topic:
q = q.filter(Shout.topics.any(slug=by_topic))
2024-04-17 15:32:23 +00:00
by_after = filters.get("after")
2024-03-25 17:41:28 +00:00
if by_after:
ts = int(by_after)
q = q.filter(Shout.created_at > ts)
2022-11-21 08:13:57 +00:00
2022-11-25 18:31:53 +00:00
return q
2024-08-08 13:10:45 +00:00
2024-04-17 15:32:23 +00:00
@query.field("get_shout")
2024-10-31 17:28:52 +00:00
async def get_shout(_, info, slug="", shout_id=0):
2024-08-07 08:35:59 +00:00
"""
2024-10-31 16:06:58 +00:00
Получение публикации по slug или id.
2024-08-07 08:35:59 +00:00
2024-10-31 16:06:58 +00:00
:param _: Корневой объект запроса (не используется)
2024-10-31 18:11:54 +00:00
:param info: Информация о контексте GraphQL
2024-10-31 16:06:58 +00:00
:param slug: Уникальный идентификатор публикации
:param shout_id: ID публикации
:return: Данные публикации с включенной статистикой
2024-08-07 08:35:59 +00:00
"""
2024-04-26 08:43:22 +00:00
try:
2024-10-31 16:06:58 +00:00
# Получаем базовый запрос с подзапросами статистики
2024-11-01 10:50:47 +00:00
q = query_with_stat(info)
2024-10-31 16:06:58 +00:00
# Применяем фильтр по slug или id
if slug:
q = q.where(Shout.slug == slug)
elif shout_id:
q = q.where(Shout.id == shout_id)
else:
return None
2024-08-12 08:00:01 +00:00
2024-10-31 16:06:58 +00:00
# Получаем результат через get_shouts_with_stats с limit=1
2024-10-31 17:28:52 +00:00
shouts = get_shouts_with_links(info, q, limit=1)
2024-08-12 08:00:01 +00:00
2024-10-31 16:06:58 +00:00
# Возвращаем первую (и единственную) публикацию, если она найдена
return shouts[0] if shouts else None
2024-08-08 14:36:20 +00:00
2024-10-31 16:06:58 +00:00
except Exception as exc:
logger.error(f"Error in get_shout: {exc}", exc_info=True)
return None
2022-11-23 21:53:53 +00:00
2024-08-08 13:10:45 +00:00
2024-11-01 06:50:19 +00:00
def apply_sorting(q, options):
2024-11-01 07:04:32 +00:00
"""
2024-11-01 19:11:42 +00:00
Применение сортировки с сохранением порядка
2024-11-01 07:04:32 +00:00
"""
2024-11-01 19:01:41 +00:00
order_str = options.get("order_by")
2024-11-01 06:50:19 +00:00
if order_str in ["rating", "comments_count", "last_reacted_at"]:
2024-11-01 19:11:42 +00:00
query_order_by = desc(text(order_str)) if options.get("order_by_desc", True) else asc(text(order_str))
2024-11-02 09:09:24 +00:00
q = q.distinct(text(order_str), Shout.id).order_by( # DISTINCT ON включает поле сортировки
nulls_last(query_order_by), Shout.id
2024-11-01 19:11:42 +00:00
)
2024-11-01 06:50:19 +00:00
else:
2024-11-02 09:09:24 +00:00
q = q.distinct(Shout.published_at, Shout.id).order_by(Shout.published_at.desc(), Shout.id)
2024-11-01 19:04:39 +00:00
2024-11-01 19:11:42 +00:00
return q
2024-11-01 06:50:19 +00:00
2024-04-17 15:32:23 +00:00
@query.field("load_shouts_by")
2024-11-12 14:56:20 +00:00
@login_accepted
2024-10-31 17:28:52 +00:00
async def load_shouts_by(_, info, options):
2022-11-15 02:36:30 +00:00
"""
2024-08-07 08:35:59 +00:00
Загрузка публикаций с фильтрацией, сортировкой и пагинацией.
2024-11-01 07:04:32 +00:00
:param _: Корневой объект запроса (не используется)
:param info: Информация о контексте GraphQL
2024-08-07 08:35:59 +00:00
:param options: Опции фильтрации и сортировки.
:return: Список публикаций, удовлетворяющих критериям.
2022-11-15 02:36:30 +00:00
"""
2024-11-12 14:56:20 +00:00
# Базовый запрос: используем специальный запрос с статистикой
2024-11-01 10:50:47 +00:00
q = query_with_stat(info)
2024-11-01 11:29:58 +00:00
q, limit, offset = apply_options(q, options)
2023-12-09 18:15:30 +00:00
2024-11-01 06:50:19 +00:00
# Передача сформированного запроса в метод получения публикаций с учетом сортировки и пагинации
2024-10-31 17:28:52 +00:00
return get_shouts_with_links(info, q, limit, offset)
2023-02-06 14:27:23 +00:00
2024-03-05 13:59:55 +00:00
2024-04-17 15:32:23 +00:00
@query.field("load_shouts_search")
2024-11-01 06:50:19 +00:00
async def load_shouts_search(_, info, text, options):
2024-08-07 08:35:59 +00:00
"""
Поиск публикаций по тексту.
2024-11-01 07:04:32 +00:00
:param _: Корневой объект запроса (не используется)
:param info: Информация о контексте GraphQL
2024-08-07 08:35:59 +00:00
:param text: Строка поиска.
2024-11-01 06:50:19 +00:00
:param options: Опции фильтрации и сортировки.
2024-08-07 08:35:59 +00:00
:return: Список публикаций, найденных по тексту.
"""
2024-11-01 06:50:19 +00:00
limit = options.get("limit", 10)
offset = options.get("offset", 0)
2024-01-29 06:45:00 +00:00
if isinstance(text, str) and len(text) > 2:
2024-01-29 07:37:21 +00:00
results = await search_text(text, limit, offset)
2024-06-02 12:56:17 +00:00
scores = {}
hits_ids = []
2024-06-02 12:32:02 +00:00
for sr in results:
shout_id = sr.get("id")
if shout_id:
2024-06-02 14:36:34 +00:00
shout_id = str(shout_id)
2024-06-02 12:56:17 +00:00
scores[shout_id] = sr.get("score")
hits_ids.append(shout_id)
2024-06-02 16:19:30 +00:00
2024-10-31 17:28:52 +00:00
q = (
2024-11-01 10:50:47 +00:00
query_with_stat(info)
2024-10-31 17:28:52 +00:00
if has_field(info, "stat")
else select(Shout).filter(and_(Shout.published_at.is_not(None), Shout.deleted_at.is_(None)))
)
2024-08-07 09:48:57 +00:00
q = q.filter(Shout.id.in_(hits_ids))
2024-11-01 06:50:19 +00:00
q = apply_filters(q, options)
q = apply_sorting(q, options)
2024-10-31 17:28:52 +00:00
shouts = get_shouts_with_links(info, q, limit, offset)
2024-08-07 08:35:59 +00:00
for shout in shouts:
shout.score = scores[f"{shout.id}"]
shouts.sort(key=lambda x: x.score, reverse=True)
2024-06-02 14:01:22 +00:00
return shouts
2024-01-28 21:28:04 +00:00
return []
2023-12-02 22:22:16 +00:00
2023-12-16 15:24:30 +00:00
2024-04-17 15:32:23 +00:00
@query.field("load_shouts_unrated")
2024-11-01 06:50:19 +00:00
async def load_shouts_unrated(_, info, options):
2024-08-07 08:35:59 +00:00
"""
2024-10-31 16:57:09 +00:00
Загрузка публикаций с менее чем 3 реакциями типа LIKE/DISLIKE
2024-11-01 07:04:32 +00:00
:param _: Корневой объект запроса (не используется)
:param info: Информация о контексте GraphQL
:param options: Опции фильтрации и сортировки.
:return: Список публикаций.
2024-08-07 08:35:59 +00:00
"""
2024-10-31 16:57:09 +00:00
rated_shouts = (
select(Reaction.shout)
2024-10-31 14:47:07 +00:00
.where(
2023-12-16 15:24:30 +00:00
and_(
2024-10-31 17:28:52 +00:00
Reaction.deleted_at.is_(None), Reaction.kind.in_([ReactionKind.LIKE.value, ReactionKind.DISLIKE.value])
2024-10-31 14:47:07 +00:00
)
2024-03-28 12:56:32 +00:00
)
2024-10-31 16:57:09 +00:00
.group_by(Reaction.shout)
2024-10-31 17:28:52 +00:00
.having(func.count("*") >= 3)
2024-10-31 14:47:07 +00:00
.scalar_subquery()
2024-03-28 12:56:32 +00:00
)
2023-12-16 15:24:30 +00:00
2024-11-03 08:32:05 +00:00
# add topic inside output data in main_topic field
aliased_topic = aliased(Topic)
2024-10-31 16:57:09 +00:00
q = (
select(Shout)
2024-11-03 08:32:05 +00:00
.join(aliased_topic, aliased_topic.id == Shout.main_topic.id)
2024-10-31 17:28:52 +00:00
.where(and_(Shout.published_at.is_not(None), Shout.deleted_at.is_(None), ~Shout.id.in_(rated_shouts)))
2024-10-31 17:42:09 +00:00
# .order_by(desc(Shout.published_at))
.order_by(func.random())
2024-10-31 16:57:09 +00:00
)
2024-11-01 06:50:19 +00:00
limit = options.get("limit", 5)
offset = options.get("offset", 0)
2024-10-31 17:28:52 +00:00
return get_shouts_with_links(info, q, limit, offset)
2023-12-16 15:24:30 +00:00
2024-01-25 19:41:27 +00:00
2024-04-17 15:32:23 +00:00
@query.field("load_shouts_random_top")
2024-10-31 17:28:52 +00:00
async def load_shouts_random_top(_, info, options):
2023-12-16 15:24:30 +00:00
"""
2024-08-07 08:35:59 +00:00
Загрузка случайных публикаций, упорядоченных по топовым реакциям.
:param _info: Информация о контексте GraphQL.
:param options: Опции фильтрации и сортировки.
:return: Список случайных публикаций.
2023-12-16 15:24:30 +00:00
"""
aliased_reaction = aliased(Reaction)
2024-11-01 06:50:19 +00:00
subquery = select(Shout.id).outerjoin(aliased_reaction).where(Shout.deleted_at.is_(None))
2023-12-16 15:24:30 +00:00
2024-11-01 06:50:19 +00:00
filters = options.get("filters")
if isinstance(filters, dict):
subquery = apply_filters(subquery, filters)
2024-03-25 12:03:03 +00:00
2024-01-25 19:41:27 +00:00
subquery = subquery.group_by(Shout.id).order_by(
desc(
2024-01-23 13:04:38 +00:00
func.sum(
case(
2024-08-07 08:35:59 +00:00
# не учитывать реакции на комментарии
2024-03-25 12:31:16 +00:00
(aliased_reaction.reply_to.is_not(None), 0),
2024-03-25 12:03:03 +00:00
(aliased_reaction.kind == ReactionKind.LIKE.value, 1),
(aliased_reaction.kind == ReactionKind.DISLIKE.value, -1),
2024-01-25 19:41:27 +00:00
else_=0,
2024-01-23 13:04:38 +00:00
)
)
2024-03-28 12:56:32 +00:00
)
2024-01-23 13:04:38 +00:00
)
2023-12-16 15:24:30 +00:00
2024-04-17 15:32:23 +00:00
random_limit = options.get("random_limit", 100)
2024-11-01 19:11:42 +00:00
subquery = subquery.limit(random_limit)
q = query_with_stat(info)
2024-08-07 09:48:57 +00:00
q = q.filter(Shout.id.in_(subquery))
2024-08-07 08:35:59 +00:00
q = q.order_by(func.random())
2024-04-17 15:32:23 +00:00
limit = options.get("limit", 10)
2024-10-31 17:28:52 +00:00
return get_shouts_with_links(info, q, limit)