cache-refactored
All checks were successful
Deploy on push / deploy (push) Successful in 39s

This commit is contained in:
Untone 2024-05-30 19:15:11 +03:00
parent 3712ecf8ae
commit 042cf595f7

View File

@ -1,5 +1,6 @@
import json import json
from pydantic import List
from sqlalchemy import and_, join, select from sqlalchemy import and_, join, select
from orm.author import Author, AuthorFollower from orm.author import Author, AuthorFollower
@ -19,9 +20,9 @@ DEFAULT_FOLLOWS = {
async def cache_topic(topic: dict): async def cache_topic(topic: dict):
topic_id = topic.get("id") topic_id = topic.get("id")
topic_slug = topic.get("slug") topic_slug = topic.get("slug")
payload = json.dumps(topic.dict(), cls=CustomJSONEncoder) payload = json.dumps(topic, cls=CustomJSONEncoder)
await redis.execute("SET", f"topic:id:{topic_id}", payload) await redis.execute("SET", f"topic:id:{topic_id}", payload)
await redis.execute("SET", f"topic:slug:{topic_slug}", topic.id) await redis.execute("SET", f"topic:slug:{topic_slug}", topic_id)
async def cache_author(author: dict): async def cache_author(author: dict):
@ -33,7 +34,6 @@ async def cache_author(author: dict):
async def cache_follows(follower_id: int, entity_type: str, entity_id: int, is_insert=True): async def cache_follows(follower_id: int, entity_type: str, entity_id: int, is_insert=True):
# prepare
follows = [] follows = []
redis_key = f"author:follows-{entity_type}s:{follower_id}" redis_key = f"author:follows-{entity_type}s:{follower_id}"
follows_str = await redis.execute("GET", redis_key) follows_str = await redis.execute("GET", redis_key)
@ -45,14 +45,11 @@ async def cache_follows(follower_id: int, entity_type: str, entity_id: int, is_i
else: else:
if not entity_id: if not entity_id:
raise Exception("wrong entity") raise Exception("wrong entity")
# Remove the entity from follows
follows = [eid for eid in follows if eid != entity_id] follows = [eid for eid in follows if eid != entity_id]
# update follows cache
payload = json.dumps(follows, cls=CustomJSONEncoder) payload = json.dumps(follows, cls=CustomJSONEncoder)
await redis.execute("SET", redis_key, payload) await redis.execute("SET", redis_key, payload)
# update follower's stats everywhere
follower_str = await redis.execute("GET", f"author:id:{follower_id}") follower_str = await redis.execute("GET", f"author:id:{follower_id}")
if isinstance(follower_str, str): if isinstance(follower_str, str):
follower = json.loads(follower_str) follower = json.loads(follower_str)
@ -66,11 +63,13 @@ async def get_cached_author(author_id: int, get_with_stat):
cached_result = await redis.execute("GET", rkey) cached_result = await redis.execute("GET", rkey)
if isinstance(cached_result, str): if isinstance(cached_result, str):
return json.loads(cached_result) return json.loads(cached_result)
else: elif get_with_stat:
author_query = select(Author).filter(Author.id == author_id) async with local_session() as session:
[author] = get_with_stat(author_query) author_query = select(Author).filter(Author.id == author_id)
if author: [author] = get_with_stat(session.execute(author_query))
await cache_author(author.dict()) if author:
await cache_author(author)
return author
async def get_cached_author_by_user_id(user_id: str, get_with_stat): async def get_cached_author_by_user_id(user_id: str, get_with_stat):
@ -79,94 +78,32 @@ async def get_cached_author_by_user_id(user_id: str, get_with_stat):
return await get_cached_author(int(author_id), get_with_stat) return await get_cached_author(int(author_id), get_with_stat)
async def get_cached_author_follows_topics(author_id: int): async def get_cached_authors_by_ids(authors_ids: List[int]):
topics = []
rkey = f"author:follows-topics:{author_id}"
cached = await redis.execute("GET", rkey)
if cached and isinstance(cached, str):
topics = json.loads(cached)
if not cached:
topics = (
local_session()
.query(Topic.id)
.select_from(join(Topic, TopicFollower, Topic.id == TopicFollower.topic))
.where(TopicFollower.follower == author_id)
.all()
)
await redis.execute("SET", rkey, json.dumps(topics))
topics_objects = []
for topic_id in topics:
topic_str = await redis.execute("GET", f"topic:id:{topic_id}")
if topic_str:
topic = json.loads(topic_str)
if topic and topic not in topics_objects:
topics_objects.append(topic)
logger.debug(f"author#{author_id} cache updated with {len(topics_objects)} topics")
return topics_objects
async def get_cached_author_follows_authors(author_id: int):
authors = [] authors = []
rkey = f"author:follows-authors:{author_id}" for author_id in authors_ids:
if author_id:
rkey = f"author:id:{author_id}"
cached_result = await redis.execute("GET", rkey)
if isinstance(cached_result, str):
author = json.loads(cached_result)
if author:
authors.push(author)
return authors
async def get_cached_topic_authors(topic_id: int, topic_authors_query):
rkey = f"topic:authors:{topic_id}"
cached = await redis.execute("GET", rkey) cached = await redis.execute("GET", rkey)
if not cached: authors_ids = []
authors_query = ( if isinstance(cached, str):
select(Author.id) authors_ids = json.loads(cached)
.select_from(join(Author, AuthorFollower, Author.id == AuthorFollower.author)) else:
.where(AuthorFollower.follower == author_id) async with local_session() as session:
) authors_ids = [aid for (aid,) in session.execute(topic_authors_query)]
with local_session() as session: await redis.execute("SET", rkey, json.dumps(authors_ids))
authors = session.execute(authors_query) authors = await get_cached_authors_by_ids(authors_ids)
await redis.execute("SET", rkey, json.dumps([aid for aid in authors])) logger.debug(f"topic#{topic_id} cache updated with {len(authors)} authors")
elif isinstance(cached, str): return authors
authors = json.loads(cached)
authors_objects = []
for author_id in authors:
author_str = await redis.execute("GET", f"author:id:{author_id}")
if author_str:
author = json.loads(author_str)
if author and author not in authors_objects:
authors_objects.append(author)
return authors_objects
async def get_cached_author_followers(author_id: int):
followers = []
followers_rkey = f"author:followers:{author_id}"
cached = await redis.execute("GET", followers_rkey)
cached_author = await redis.execute("GET", f"author:followers:{author_id}")
if isinstance(cached, str) and isinstance(cached_author, str):
followers = json.loads(cached)
author = json.loads(cache_author)
if isinstance(followers, list) and str(len(followers)) == str(author["stat"]["followers"]):
return followers
followers = (
local_session()
.query(Author)
.join(
AuthorFollower,
and_(
AuthorFollower.author == author_id,
AuthorFollower.follower == Author.id,
Author.id != author_id, # exclude the author from the followers
),
)
.all()
)
await redis.execute("SET", followers_rkey, json.dumps([a.id for a in followers]))
followers_objects = []
for follower_id in followers:
follower_str = await redis.execute("GET", f"author:id:{follower_id}")
if follower_str:
follower = json.loads(follower_str)
if follower and follower not in followers_objects:
followers_objects.append(follower)
logger.debug(f"author#{author_id} cache updated with {len(followers)} followers")
return followers_objects
async def get_cached_topic_followers(topic_id: int): async def get_cached_topic_followers(topic_id: int):
@ -178,47 +115,95 @@ async def get_cached_topic_followers(topic_id: int):
if isinstance(followers, list): if isinstance(followers, list):
return followers return followers
followers = ( followers_ids = (
local_session() local_session()
.query(Author) .query(Author.id)
.join( .join(
TopicFollower, TopicFollower,
and_(TopicFollower.topic == topic_id, TopicFollower.follower == Author.id), and_(TopicFollower.topic == topic_id, TopicFollower.follower == Author.id),
) )
.all() .all()
) )
followers_objects = [] followers = await get_cached_authors_by_ids(followers_ids)
if followers: logger.debug(f"topic#{topic_id} cache updated with {len(followers)} followers")
await redis.execute("SET", rkey, json.dumps([a.id for a in followers])) return followers
for follower_id in followers:
follower_str = await redis.execute("GET", f"author:id:{follower_id}")
if follower_str:
follower = json.loads(follower_str)
if follower and follower not in followers_objects:
followers_objects.append(follower)
logger.debug(f"topic#{topic_id} cache updated with {len(followers)} followers")
return followers_objects
async def get_cached_topic_authors(topic_id: int, topic_authors_query): async def get_cached_author_followers(author_id: int):
authors = [] followers = []
rkey = f"topic:authors:{topic_id}" followers_rkey = f"author:followers:{author_id}"
cached = await redis.execute("GET", followers_rkey)
cached_author = await redis.execute("GET", f"author:followers:{author_id}")
if isinstance(cached, str) and isinstance(cached_author, str):
followers_ids = json.loads(cached)
author = json.loads(cache_author)
if not isinstance(followers_ids, list) or not str(len(followers_ids)) == str(author["stat"]["followers"]):
with local_session() as session:
followers_ids = (
session.query(Author.id)
.join(
AuthorFollower,
and_(
AuthorFollower.author == author_id,
AuthorFollower.follower == Author.id,
Author.id != author_id, # exclude the author from the followers
),
)
.all()
)
await redis.execute("SET", followers_rkey, json.dumps([a.id for a in followers_ids]))
followers = await get_cached_authors_by_ids(followers_ids)
logger.debug(f"author#{author_id} cache updated with {len(followers)} followers")
return followers
async def get_cached_author_follows_authors(author_id: int):
rkey = f"author:follows-authors:{author_id}"
authors_ids = []
cached = await redis.execute("GET", rkey) cached = await redis.execute("GET", rkey)
if isinstance(cached, str): if not cached:
authors = json.loads(cached) authors_query = (
if isinstance(authors, list): select(Author.id)
return authors .select_from(join(Author, AuthorFollower, Author.id == AuthorFollower.author))
.where(AuthorFollower.follower == author_id)
)
with local_session() as session:
authors_ids = session.execute(authors_query)
await redis.execute("SET", rkey, json.dumps([aid for aid in authors_ids]))
elif isinstance(cached, str):
authors_ids = json.loads(cached)
return await get_cached_authors_by_ids(authors_ids)
authors = local_session().execute(topic_authors_query)
authors_objects = [] async def get_cached_topics_by_ids(topics_ids: List[int]):
if authors: topics_objects = []
await redis.execute("SET", rkey, json.dumps(authors)) for topic_id in topics_ids:
for author_id in authors: topic_str = await redis.execute("GET", f"topic:id:{topic_id}")
author_str = await redis.execute("GET", f"author:id:{author_id}") if topic_str:
if author_str: topic = json.loads(topic_str)
author = json.loads(author_str) if topic and topic not in topics_objects:
if author and author not in authors_objects: topics_objects.append(topic)
authors_objects.append(author) return topics_objects
logger.debug(f"topic#{topic_id} cache updated with {len(authors)} authors")
return authors_objects
async def get_cached_author_follows_topics(author_id: int):
rkey = f"author:follows-topics:{author_id}"
topics_ids = []
cached = await redis.execute("GET", rkey)
if cached and isinstance(cached, str):
topics_ids = json.loads(cached)
else:
topics_ids = (
local_session()
.query(Topic.id)
.select_from(join(Topic, TopicFollower, Topic.id == TopicFollower.topic))
.where(TopicFollower.follower == author_id)
.all()
)
await redis.execute("SET", rkey, json.dumps(topics_ids))
topics = await get_cached_topics_by_ids(topics_ids)
logger.debug(f"author#{author_id} cache updated with {len(topics)} topics")
return topics