cache-reimplement-2
All checks were successful
Deploy on push / deploy (push) Successful in 1m4s

This commit is contained in:
2024-05-21 01:40:57 +03:00
parent 3742528e3a
commit 4c1fbf64a2
13 changed files with 292 additions and 326 deletions

View File

@@ -1,8 +1,13 @@
import json
from orm.topic import TopicFollower
from sqlalchemy import and_, join, select
from orm.author import Author, AuthorFollower
from orm.topic import Topic, TopicFollower
from resolvers.stat import get_topic_authors_query, get_with_stat
from services.db import local_session
from services.encoders import CustomJSONEncoder
from services.logger import root_logger as logger
from services.rediscache import redis
DEFAULT_FOLLOWS = {
@@ -12,11 +17,20 @@ DEFAULT_FOLLOWS = {
}
async def cache_topic(topic: dict):
await redis.execute(
"SET",
f"topic:id:{topic.get('id')}",
json.dumps(topic.dict(), cls=CustomJSONEncoder),
)
await redis.execute("SET", f"topic:slug:{topic.get('slug')}", topic.id)
async def cache_author(author: dict):
author_id = author.get("id")
payload = json.dumps(author, cls=CustomJSONEncoder)
await redis.execute("SET", f'user:{author.get("user")}', payload)
await redis.execute("SET", f"author:{author_id}", payload)
await redis.execute("SET", f'user:id:{author.get("user")}', author_id)
await redis.execute("SET", f"author:id:{author_id}", payload)
# update stat all field for followers' caches in <authors> list
followers_str = await redis.execute("GET", f"author:{author_id}:followers")
@@ -70,108 +84,158 @@ async def cache_author(author: dict):
)
async def cache_follows(follower: dict, entity_type: str, entity: dict, is_insert=True):
async def cache_follows(
follower_id: int, entity_type: str, entity_id: int, is_insert=True
):
# prepare
follows = []
follower_id = follower.get("id")
if follower_id:
redis_key = f"author:{follower_id}:follows-{entity_type}s"
follows_str = await redis.execute("GET", redis_key)
if isinstance(follows_str, str):
follows = json.loads(follows_str)
if is_insert:
follows.append(entity)
else:
entity_id = entity.get("id")
if not entity_id:
raise Exception("wrong entity")
# Remove the entity from follows
follows = [e for e in follows if e["id"] != entity_id]
redis_key = f"author:follows-{entity_type}s:{follower_id}"
follows_str = await redis.execute("GET", redis_key)
if isinstance(follows_str, str):
follows = json.loads(follows_str)
if is_insert:
if entity_id not in follows:
follows.append(entity_id)
else:
if not entity_id:
raise Exception("wrong entity")
# Remove the entity from follows
follows = [eid for eid in follows if eid != entity_id]
# update follows cache
payload = json.dumps(follows, cls=CustomJSONEncoder)
await redis.execute("SET", redis_key, payload)
# update follows cache
payload = json.dumps(follows, cls=CustomJSONEncoder)
await redis.execute("SET", redis_key, payload)
# update follower's stats everywhere
follower_str = await redis.execute("GET", f"author:{follower_id}")
if isinstance(follower_str, str):
follower = json.loads(follower_str)
follower["stat"][f"{entity_type}s"] = len(follows)
await cache_author(follower)
# update follower's stats everywhere
follower_str = await redis.execute("GET", f"author:id:{follower_id}")
if isinstance(follower_str, str):
follower = json.loads(follower_str)
follower["stat"][f"{entity_type}s"] = len(follows)
await cache_author(follower)
return follows
async def cache_follow_author_change(follower: dict, author: dict, is_insert=True):
author_id = author.get("id")
follower_id = follower.get("id")
followers = []
if author_id and follower_id:
redis_key = f"author:{author_id}:followers"
followers_str = await redis.execute("GET", redis_key)
followers = json.loads(followers_str) if isinstance(followers_str, str) else []
# Remove the author from the list of followers, if present
followers = [f for f in followers if f["id"] != author_id]
# If inserting, add the new follower to the list if not already present
if is_insert and not any(f["id"] == follower_id for f in followers):
followers.append(follower)
# Remove the follower from the list if not inserting and present
async def get_cached_author(author_id: int):
if author_id:
rkey = f"author:id:{author_id}"
cached_result = await redis.execute("GET", rkey)
if isinstance(cached_result, str):
return json.loads(cached_result)
else:
followers = [f for f in followers if f["id"] != follower_id]
author_query = select(Author).filter(Author.id == author_id)
[author] = get_with_stat(author_query)
if author:
await cache_author(author.dict())
# Ensure followers are unique based on their 'id' field
followers = list({f["id"]: f for f in followers}.values())
# Update follower's stats everywhere
follower_str = await redis.execute("GET", f"author:{follower_id}")
if isinstance(follower_str, str):
follower = json.loads(follower_str)
follower["stat"]["followers"] = len(followers)
await cache_author(follower)
async def get_cached_author_by_user_id(user_id: str):
author_id = await redis.execute("GET", f"user:id:{user_id}")
if author_id:
return await get_cached_author(int(author_id))
payload = json.dumps(followers, cls=CustomJSONEncoder)
await redis.execute("SET", redis_key, payload)
async def get_cached_author_follows_topics(author_id: int):
topics = []
rkey = f"author:follows-topics:{author_id}"
cached = await redis.execute("GET", rkey)
if cached and isinstance(cached, str):
topics = json.loads(cached)
if not cached:
topics = (
local_session()
.query(Topic.id)
.select_from(join(Topic, TopicFollower, Topic.id == TopicFollower.topic))
.where(TopicFollower.follower == author_id)
.all()
)
await redis.execute("SET", rkey, json.dumps(topics))
return topics
async def get_cached_author_follows_authors(author_id: int):
authors = []
rkey = f"author:follows-authors:{author_id}"
cached = await redis.execute("GET", rkey)
if not cached:
with local_session() as session:
authors = (
session.query(Author.id)
.select_from(
join(Author, AuthorFollower, Author.id == AuthorFollower.author)
)
.where(AuthorFollower.follower == author_id)
.all()
)
await redis.execute("SET", rkey, json.dumps(authors))
elif isinstance(cached, str):
authors = json.loads(cached)
return authors
async def get_cached_author_followers(author_id: int):
followers = []
rkey = f"author:followers:{author_id}"
cached = await redis.execute("GET", rkey)
if isinstance(cached, str):
followers = json.loads(cached)
if isinstance(followers, list):
return followers
followers = (
local_session()
.query(Author.id)
.join(
AuthorFollower,
and_(
AuthorFollower.author == author_id,
AuthorFollower.follower == Author.id,
Author.id != author_id, # exclude the author from the followers
),
)
.all()
)
if followers:
await redis.execute("SET", rkey, json.dumps(followers))
logger.debug(f"author#{author_id} cache updated with {len(followers)} followers")
return followers
async def cache_topic(topic_dict: dict):
# update stat all field for followers' caches in <topics> list
async def get_cached_topic_followers(topic_id: int):
followers = []
rkey = f"topic:followers:{topic_id}"
cached = await redis.execute("GET", rkey)
if isinstance(cached, str):
followers = json.loads(cached)
if isinstance(followers, list):
return followers
followers = (
local_session()
.query(TopicFollower)
.filter(TopicFollower.topic == topic_dict.get("id"))
.query(Author.id)
.join(
TopicFollower,
and_(TopicFollower.topic == topic_id, TopicFollower.follower == Author.id),
)
.all()
)
for tf in followers:
follower_id = tf.follower
follower_follows_topics = []
follower_follows_topics_str = await redis.execute(
"GET", f"author:{follower_id}:follows-topics"
)
if isinstance(follower_follows_topics_str, str):
follower_follows_topics = json.loads(follower_follows_topics_str)
c = 0
for old_topic in follower_follows_topics:
if int(old_topic.get("id")) == int(topic_dict.get("id", 0)):
follower_follows_topics[c] = topic_dict
break # exit the loop since we found and updated the topic
c += 1
else:
# topic not found in the list, so add the new topic with the updated stat field
follower_follows_topics.append(topic_dict)
if followers:
await redis.execute("SET", rkey, json.dumps(followers))
logger.debug(f"topic#{topic_id} cache updated with {len(followers)} followers")
return followers
await redis.execute(
"SET",
f"author:{follower_id}:follows-topics",
json.dumps(follower_follows_topics, cls=CustomJSONEncoder),
)
# update topic's stat
topic_dict["stat"]["followers"] = len(followers)
async def get_cached_topic_authors(topic_id: int):
authors = []
rkey = f"topic:authors:{topic_id}"
cached = await redis.execute("GET", rkey)
if isinstance(cached, str):
authors = json.loads(cached)
if isinstance(authors, list):
return authors
# save in cache
payload = json.dumps(topic_dict, cls=CustomJSONEncoder)
await redis.execute("SET", f'topic:{topic_dict.get("slug")}', payload)
authors = local_session().execute(get_topic_authors_query(topic_id))
# should be id list
if authors:
await redis.execute("SET", rkey, json.dumps(authors))
logger.debug(f"topic#{topic_id} cache updated with {len(authors)} authors")
return authors