search-debug
All checks were successful
Deploy on push / deploy (push) Successful in 5m40s

This commit is contained in:
2025-08-30 20:06:12 +03:00
parent 5729e65e55
commit 7d9a3a59e3

View File

@@ -68,7 +68,7 @@ def has_field(info: GraphQLResolveInfo, fieldname: str) -> bool:
return False
def query_with_stat(info: GraphQLResolveInfo) -> Select:
def query_with_stat(info: GraphQLResolveInfo, force_topics: bool = False) -> Select:
"""
:param info: Информация о контексте GraphQL - для получения id авторизованного пользователя
:return: Запрос с подзапросами статистики.
@@ -100,11 +100,12 @@ def query_with_stat(info: GraphQLResolveInfo) -> Select:
).label("main_author")
)
if has_field(info, "main_topic"):
if has_field(info, "main_topic") or force_topics:
logger.debug(f"[query_with_stat] Adding main_topic subquery (force_topics={force_topics})")
main_topic_join = aliased(ShoutTopic)
main_topic = aliased(Topic)
q = q.join(main_topic_join, and_(main_topic_join.shout == Shout.id, main_topic_join.main.is_(True)))
q = q.join(main_topic, main_topic.id == main_topic_join.topic)
q = q.outerjoin(main_topic_join, and_(main_topic_join.shout == Shout.id, main_topic_join.main.is_(True)))
q = q.outerjoin(main_topic, main_topic.id == main_topic_join.topic)
q = q.add_columns(
json_builder(
"id",
@@ -147,7 +148,8 @@ def query_with_stat(info: GraphQLResolveInfo) -> Select:
q = q.outerjoin(authors_subquery, authors_subquery.c.shout == Shout.id)
q = q.add_columns(authors_subquery.c.authors)
if has_field(info, "topics"):
if has_field(info, "topics") or force_topics:
logger.debug(f"[query_with_stat] Adding topics subquery (force_topics={force_topics})")
topics_subquery = (
select(
ShoutTopic.shout,
@@ -204,14 +206,21 @@ def query_with_stat(info: GraphQLResolveInfo) -> Select:
def get_shouts_with_links(
info: GraphQLResolveInfo, q: Select, limit: int = 20, offset: int = 0, sort_meta: dict[str, Any] | None = None
info: GraphQLResolveInfo,
q: Select,
limit: int = 20,
offset: int = 0,
sort_meta: dict[str, Any] | None = None,
force_topics: bool = False,
) -> list[Shout]:
"""
получение публикаций с применением пагинации
"""
shouts = []
try:
# logger.info(f"Starting get_shouts_with_links with limit={limit}, offset={offset}")
logger.debug(
f"[get_shouts_with_links] Starting with limit={limit}, offset={offset}, force_topics={force_topics}"
)
q = q.limit(limit).offset(offset)
with local_session() as session:
@@ -325,22 +334,22 @@ def get_shouts_with_links(
# Обработка main_topic и topics
topics = None
if has_field(info, "topics") and hasattr(row, "topics"):
if (has_field(info, "topics") or force_topics) and hasattr(row, "topics"):
topics = orjson.loads(row.topics) if isinstance(row.topics, str) else row.topics
# logger.debug(f"Shout#{shout_id} topics: {topics}")
logger.debug(f"Shout#{shout_id} topics: {topics}")
shout_dict["topics"] = topics
if has_field(info, "main_topic"):
if has_field(info, "main_topic") or force_topics:
main_topic = None
if hasattr(row, "main_topic"):
# logger.debug(f"Raw main_topic for shout#{shout_id}: {row.main_topic}")
logger.debug(f"Raw main_topic for shout#{shout_id}: {row.main_topic}")
main_topic = (
orjson.loads(row.main_topic) if isinstance(row.main_topic, str) else row.main_topic
)
# logger.debug(f"Parsed main_topic for shout#{shout_id}: {main_topic}")
logger.debug(f"Parsed main_topic for shout#{shout_id}: {main_topic}")
if not main_topic and topics and len(topics) > 0:
# logger.info(f"No main_topic found for shout#{shout_id}, using first topic from list")
logger.info(f"No main_topic found for shout#{shout_id}, using first topic from list")
main_topic = {
"id": topics[0]["id"],
"title": topics[0]["title"],
@@ -356,7 +365,7 @@ def get_shouts_with_links(
"is_main": True,
}
shout_dict["main_topic"] = main_topic
# logger.debug(f"Final main_topic for shout#{shout_id}: {main_topic}")
logger.debug(f"Final main_topic for shout#{shout_id}: {main_topic}")
if has_field(info, "authors") and hasattr(row, "authors"):
authors_data = orjson.loads(row.authors) if isinstance(row.authors, str) else row.authors
@@ -534,6 +543,19 @@ async def load_shouts_search(
offset = options.get("offset", 0)
logger.info(f"[load_shouts_search] Starting search for '{text}' with limit={limit}, offset={offset}")
logger.debug(
f"[load_shouts_search] Requested fields: topics={has_field(info, 'topics')}, main_topic={has_field(info, 'main_topic')}"
)
# Выводим все запрашиваемые поля для диагностики
field_selections = []
if info.field_nodes:
for field_node in info.field_nodes:
if field_node.selection_set:
for selection in field_node.selection_set.selections:
if hasattr(selection, "name"):
field_selections.append(selection.name.value)
logger.info(f"[load_shouts_search] All requested fields: {field_selections}")
if isinstance(text, str) and len(text) > 2:
logger.debug(f"[load_shouts_search] Calling Muvera search service for '{text}'")
@@ -559,17 +581,14 @@ async def load_shouts_search(
logger.warning(f"[load_shouts_search] No valid shout IDs found for query '{text}'")
return []
q = (
query_with_stat(info)
if has_field(info, "stat")
else select(Shout).where(and_(Shout.published_at.is_not(None), Shout.deleted_at.is_(None)))
)
# Для поиска принудительно включаем топики
q = query_with_stat(info, force_topics=True)
q = q.where(Shout.id.in_(hits_ids))
q = apply_filters(q, options)
q = apply_sorting(q, options)
logger.debug(f"[load_shouts_search] Executing database query for {len(hits_ids)} shout IDs")
shouts = get_shouts_with_links(info, q, limit, offset)
shouts = get_shouts_with_links(info, q, limit, offset, force_topics=True)
logger.debug(f"[load_shouts_search] Database returned {len(shouts)} shouts")
shouts_dicts: list[dict[str, Any]] = []
for shout in shouts: