cache-fix-9
All checks were successful
Deploy on push / deploy (push) Successful in 46s

This commit is contained in:
Untone 2024-05-30 20:43:18 +03:00
parent afef19fae3
commit c9dcd6a9c9
2 changed files with 33 additions and 27 deletions

View File

@ -55,7 +55,8 @@ async def follow(_, info, what, slug):
error = author_follow(follower_id, slug) error = author_follow(follower_id, slug)
if not error: if not error:
follows = await get_cached_follower_authors(follower_id) follows = await get_cached_follower_authors(follower_id)
[author_id] = local_session().query(Author.id).filter(Author.slug == slug).first() with local_session() as session:
[author_id] = session.query(Author.id).filter(Author.slug == slug).first()
if author_id and author_id not in follows: if author_id and author_id not in follows:
follows.append(author_id) follows.append(author_id)
await cache_author(follower_dict) await cache_author(follower_dict)

View File

@ -114,32 +114,36 @@ async def get_cached_topic_followers(topic_id: int):
followers = json.loads(cached) followers = json.loads(cached)
if isinstance(followers, list): if isinstance(followers, list):
return followers return followers
with local_session() as session:
followers_ids = ( result = (
local_session() session.query(Author.id)
.query(Author.id)
.join( .join(
TopicFollower, TopicFollower,
and_(TopicFollower.topic == topic_id, TopicFollower.follower == Author.id), and_(TopicFollower.topic == topic_id, TopicFollower.follower == Author.id),
) )
.all() .all()
) )
followers_ids = [f[0] for f in result]
followers = await get_cached_authors_by_ids(followers_ids) followers = await get_cached_authors_by_ids(followers_ids)
logger.debug(f"topic#{topic_id} cache updated with {len(followers)} followers") logger.debug(f"topic#{topic_id} cache updated with {len(followers)} followers")
return followers return followers
async def get_cached_author_followers(author_id: int): async def get_cached_author_followers(author_id: int):
followers = [] # follower profile
cached_author = await redis.execute("GET", f"author:id:{author_id}")
author = json.loads(cache_author)
if not author:
return []
followers_ids = []
followers_rkey = f"author:followers:{author_id}" followers_rkey = f"author:followers:{author_id}"
cached = await redis.execute("GET", followers_rkey) cached = await redis.execute("GET", followers_rkey)
cached_author = await redis.execute("GET", f"author:followers:{author_id}")
if isinstance(cached, str) and isinstance(cached_author, str): if isinstance(cached, str) and isinstance(cached_author, str):
followers_ids = json.loads(cached) followers_ids = json.loads(cached) or []
author = json.loads(cache_author) if not str(len(followers_ids)) == str(author["stat"]["followers"]):
if not isinstance(followers_ids, list) or not str(len(followers_ids)) == str(author["stat"]["followers"]):
with local_session() as session: with local_session() as session:
followers_ids = ( followers_result = (
session.query(Author.id) session.query(Author.id)
.join( .join(
AuthorFollower, AuthorFollower,
@ -151,8 +155,9 @@ async def get_cached_author_followers(author_id: int):
) )
.all() .all()
) )
followers_ids = [a[0] for a in followers_result]
await redis.execute("SET", followers_rkey, json.dumps([a.id for a in followers_ids])) await redis.execute("SET", followers_rkey, json.dumps(followers_ids))
else:
followers = await get_cached_authors_by_ids(followers_ids) followers = await get_cached_authors_by_ids(followers_ids)
logger.debug(f"author#{author_id} cache updated with {len(followers)} followers") logger.debug(f"author#{author_id} cache updated with {len(followers)} followers")