fixed-shouts-load
All checks were successful
Deploy on push / deploy (push) Successful in 6s

This commit is contained in:
Untone 2024-11-01 20:02:46 +03:00
parent 4ef5d172a0
commit fcbbe4fcac
2 changed files with 158 additions and 150 deletions

5
CHECKS
View File

@ -1,5 +0,0 @@
WAIT=10
TIMEOUT=10
ATTEMPTS=3
/

View File

@ -61,17 +61,90 @@ def query_with_stat(info):
:return: Запрос с подзапросом статистики.
"""
q = select(Shout)
q = select(Shout).distinct().group_by(Shout.id)
# Создаем алиасы для всех таблиц
main_author = aliased(Author)
main_topic_join = aliased(ShoutTopic)
main_topic = aliased(Topic)
# main_author
q = q.join(main_author, main_author.id == Shout.created_by)
q = q.add_columns(
func.json_object(
'id', main_author.id,
'name', main_author.name,
'slug', main_author.slug,
'pic', main_author.pic
).label("main_author")
)
if has_field(info, "main_topic"):
main_topic = aliased(Topic)
q = q.join(
main_topic_join,
and_(
main_topic_join.shout == Shout.id,
main_topic_join.main.is_(True)
)
).join(
main_topic,
main_topic.id == main_topic_join.topic
)
q = q.add_columns(
func.json_object(
'id', main_topic.id,
'title', main_topic.title,
'slug', main_topic.slug,
'is_main', main_topic_join.main
).label("main_topic")
)
if has_field(info, "topics"):
topics_subquery = (
select(
func.json_group_array(
func.json_object(
'id', Topic.id,
'title', Topic.title,
'slug', Topic.slug,
'is_main', ShoutTopic.main
)
).label("topics")
)
.outerjoin(Topic, ShoutTopic.topic == Topic.id)
.where(ShoutTopic.shout == Shout.id)
.subquery()
)
q = q.outerjoin(topics_subquery, topics_subquery.c.shout == Shout.id)
q = q.add_columns(topics_subquery.c.topics)
if has_field(info, "authors"):
authors_subquery = (
select(
func.json_group_array(
func.json_object(
'id', Author.id,
'name', Author.name,
'slug', Author.slug,
'pic', Author.pic,
'caption', ShoutAuthor.caption
)
).label("authors")
)
.outerjoin(Author, ShoutAuthor.author == Author.id)
.where(ShoutAuthor.shout == Shout.id)
.subquery()
)
q = q.outerjoin(authors_subquery, authors_subquery.c.shout == Shout.id)
q = q.add_columns(authors_subquery.c.authors)
if has_field(info, "stat"):
stats_subquery = (
select(
Reaction.shout,
func.count(
case(
(Reaction.kind == ReactionKind.COMMENT.value, 1),
else_=None,
)
func.count(func.distinct(Reaction.id)).filter(
Reaction.kind == ReactionKind.COMMENT.value
).label("comments_count"),
func.coalesce(
func.sum(
@ -79,17 +152,20 @@ def query_with_stat(info):
(Reaction.reply_to.is_not(None), 0),
(Reaction.kind == ReactionKind.LIKE.value, 1),
(Reaction.kind == ReactionKind.DISLIKE.value, -1),
else_=0,
else_=0
)
),
0,
0
).label("rating"),
func.max(
case(
(Reaction.reply_to.is_(None), Reaction.created_at),
else_=None,
)
).label("last_reacted_at"),
func.coalesce(
func.max(
case(
(Reaction.reply_to.is_(None), Reaction.created_at),
else_=None
)
),
0
).label("last_reacted_at")
)
.where(Reaction.deleted_at.is_(None))
.group_by(Reaction.shout)
@ -97,70 +173,17 @@ def query_with_stat(info):
)
q = q.outerjoin(stats_subquery, stats_subquery.c.shout == Shout.id)
q = q.add_columns(stats_subquery.c.comments_count, stats_subquery.c.rating, stats_subquery.c.last_reacted_at)
if has_field(info, "created_by"):
q = q.outerjoin(Author, Shout.created_by == Author.id)
# aggregate in one column
q = q.add_columns(
Author.id.label("main_author_id"),
Author.name.label("main_author_name"),
Author.slug.label("main_author_slug"),
Author.pic.label("main_author_pic"),
)
if has_field(info, "main_topic"):
q = q.outerjoin(ShoutTopic, and_(ShoutTopic.shout == Shout.id, ShoutTopic.main.is_(True)))
q = q.outerjoin(Topic, ShoutTopic.topic == Topic.id)
q = q.add_columns(
Topic.id.label("main_topic_id"), Topic.title.label("main_topic_title"), Topic.slug.label("main_topic_slug")
)
if has_field(info, "authors"):
topics_subquery = (
select(
ShoutTopic.shout,
Topic.id.label("topic_id"),
Topic.title.label("topic_title"),
Topic.slug.label("topic_slug"),
ShoutTopic.main.label("is_main"),
)
.outerjoin(Topic, ShoutTopic.topic == Topic.id)
.where(ShoutTopic.shout == Shout.id)
.subquery()
)
q = q.outerjoin(topics_subquery, topics_subquery.c.shout == Shout.id)
q = q.add_columns(
topics_subquery.c.topic_id,
topics_subquery.c.topic_title,
topics_subquery.c.topic_slug,
topics_subquery.c.is_main,
)
authors_subquery = (
select(
ShoutAuthor.shout,
Author.id.label("author_id"),
Author.name.label("author_name"),
Author.slug.label("author_slug"),
Author.pic.label("author_pic"),
ShoutAuthor.caption.label("author_caption"),
)
.outerjoin(Author, ShoutAuthor.author == Author.id)
.where(ShoutAuthor.shout == Shout.id)
.subquery()
)
q = q.outerjoin(authors_subquery, authors_subquery.c.shout == Shout.id)
q = q.add_columns(
authors_subquery.c.author_id,
authors_subquery.c.author_name,
authors_subquery.c.author_slug,
authors_subquery.c.author_pic,
authors_subquery.c.author_caption,
func.json_object(
'comments_count', stats_subquery.c.comments_count,
'rating', stats_subquery.c.rating,
'last_reacted_at', stats_subquery.c.last_reacted_at,
).label("stat")
)
# Фильтр опубликованных
q = q.where(and_(Shout.published_at.is_not(None), Shout.deleted_at.is_(None)))
return q
@ -168,83 +191,73 @@ def get_shouts_with_links(info, q, limit=20, offset=0):
"""
получение публикаций с применением пагинации
"""
q = q.limit(limit).offset(offset)
shouts = []
try:
logger.info(f"Starting get_shouts_with_links with limit={limit}, offset={offset}")
q = q.limit(limit).offset(offset)
includes_authors = has_field(info, "authors")
includes_topics = has_field(info, "topics")
includes_stat = has_field(info, "stat")
includes_media = has_field(info, "media")
with local_session() as session:
logger.info("Executing query...")
shouts_result = session.execute(q).all()
logger.info(f"Query executed, got {len(shouts_result)} results")
with local_session() as session:
shouts_result = session.execute(q).all()
if not shouts_result:
return []
if not shouts_result:
logger.warning("No results found")
return []
shouts = []
shouts = []
for row in shouts_result:
logger.debug(row)
shout_id = row.Shout.id
shout_dict = row.Shout.dict()
shout_dict.update(
{
"authors": [],
"topics": set(),
"media": json.dumps(shout_dict.get("media", [])) if includes_media else [],
"stat": {
"viewed": ViewedStorage.get_shout(shout_id=shout_id) or 0,
"commented": row.comments_count or 0,
"rating": row.rating or 0,
"last_reacted_at": row.last_reacted_at,
}
if includes_stat and hasattr(row, "comments_count")
else {},
}
)
if includes_authors and hasattr(row, "main_author_id"):
shout_dict["created_by"] = {
"id": row.main_author_id,
"name": row.main_author_name or "Аноним",
"slug": row.main_author_slug or "",
"pic": row.main_author_pic or "",
}
if includes_topics and hasattr(row, "main_topic_id"):
shout_dict["main_topic"] = {
"id": row.main_topic_id or 0,
"title": row.main_topic_title or "",
"slug": row.main_topic_slug or "",
}
for idx, row in enumerate(shouts_result):
try:
# logger.debug(row)
shout = None
if hasattr(row, "Shout"):
shout = row.Shout
else:
logger.warning(f"Row {idx} has no Shout attribute: {row}")
continue
if includes_authors and hasattr(row, "author_id"):
author = {
"id": row.author_id,
"name": row.author_name,
"slug": row.author_slug,
"pic": row.author_pic,
"caption": row.author_caption,
}
if not filter(lambda x: x["id"] == author["id"], shout_dict["authors"]):
shout_dict["authors"].append(author)
if shout:
shout_id = int(f"{shout.id}")
# logger.info(f"Processing shout ID: {shout_id}")
shout_dict = shout.dict()
if has_field(info, "created_by") and shout_dict.get("created_by"):
main_author_id = shout_dict.get("created_by")
a = session.query(Author).filter(Author.id == main_author_id).first()
shout_dict["created_by"] = {
"id": main_author_id,
"name": a.id,
"slug": a.slug,
"pic": a.pic
}
# logger.info({ **shout_dict, "body": "", "media": []})
stat = json.loads(row.stat) if hasattr(row, "stat") else {}
viewed = ViewedStorage.get_shout(shout_id=shout_id) or 0
stat["viewed"] = viewed
if stat:
shout_dict["stat"] = {
**stat,
"commented": stat.get("comments_count", 0)
}
if includes_topics and hasattr(row, "topic_id"):
topic = {
"id": row.topic_id,
"title": row.topic_title,
"slug": row.topic_slug,
"is_main": row.is_main,
}
if not filter(lambda x: x["id"] == topic["id"], shout_dict["topics"]):
shout_dict["topics"].add(frozenset(topic.items()))
if has_field(info, "main_topic") and hasattr(row, "main_topic"):
shout_dict["main_topic"] = json.loads(row.main_topic)
if has_field(info, "authors") and hasattr(row, "authors"):
shout_dict["authors"] = json.loads(row.authors)
if has_field(info, "topics") and hasattr(row, "topics"):
shout_dict["topics"] = json.loads(row.topics)
if includes_topics:
shout_dict["topics"] = sorted(
[dict(t) for t in shout_dict["topics"]], key=lambda x: (not x.get("is_main", False), x["id"])
)
shouts.append(shout_dict)
shouts.append(shout_dict)
except Exception as row_error:
logger.error(f"Error processing row {idx}: {row_error}", exc_info=True)
continue
except Exception as e:
logger.error(f"Fatal error in get_shouts_with_links: {e}", exc_info=True)
raise
finally:
return shouts
return []
def apply_filters(q, filters):