core/services/cache.py

229 lines
7.8 KiB
Python
Raw Normal View History

2024-02-27 12:40:53 +00:00
import json
2024-02-25 13:43:04 +00:00
2024-05-20 22:40:57 +00:00
from sqlalchemy import and_, join, select
from orm.author import Author, AuthorFollower
from orm.topic import Topic, TopicFollower
2024-05-06 21:06:31 +00:00
from services.db import local_session
2024-03-06 18:57:04 +00:00
from services.encoders import CustomJSONEncoder
2024-05-20 22:40:57 +00:00
from services.logger import root_logger as logger
2024-04-08 07:38:58 +00:00
from services.rediscache import redis
2024-02-25 13:43:04 +00:00
2024-02-27 12:40:53 +00:00
DEFAULT_FOLLOWS = {
2024-04-17 15:32:23 +00:00
"topics": [],
"authors": [],
"communities": [{"id": 1, "name": "Дискурс", "slug": "discours", "pic": ""}],
2024-02-27 12:40:53 +00:00
}
2024-02-25 13:43:04 +00:00
2024-05-20 22:40:57 +00:00
async def cache_topic(topic: dict):
2024-05-20 23:56:58 +00:00
topic_id = topic.get("id")
topic_slug = topic.get("slug")
payload = json.dumps(topic.dict(), cls=CustomJSONEncoder)
await redis.execute("SET", f"topic:id:{topic_id}", payload)
await redis.execute("SET", f"topic:slug:{topic_slug}", topic.id)
2024-05-20 22:40:57 +00:00
2024-04-09 08:17:32 +00:00
async def cache_author(author: dict):
2024-05-05 18:38:59 +00:00
author_id = author.get("id")
2024-05-20 23:56:58 +00:00
user_id = author.get("user")
2024-03-06 19:00:37 +00:00
payload = json.dumps(author, cls=CustomJSONEncoder)
2024-05-20 23:56:58 +00:00
await redis.execute("SET", f"user:id:{user_id}", author_id)
2024-05-20 22:40:57 +00:00
await redis.execute("SET", f"author:id:{author_id}", payload)
2024-03-12 12:50:57 +00:00
2024-02-27 12:40:53 +00:00
2024-05-20 22:40:57 +00:00
async def cache_follows(
follower_id: int, entity_type: str, entity_id: int, is_insert=True
):
2024-04-09 08:17:32 +00:00
# prepare
2024-03-11 10:37:35 +00:00
follows = []
2024-05-20 22:40:57 +00:00
redis_key = f"author:follows-{entity_type}s:{follower_id}"
follows_str = await redis.execute("GET", redis_key)
if isinstance(follows_str, str):
follows = json.loads(follows_str)
if is_insert:
if entity_id not in follows:
follows.append(entity_id)
else:
if not entity_id:
raise Exception("wrong entity")
# Remove the entity from follows
follows = [eid for eid in follows if eid != entity_id]
# update follows cache
payload = json.dumps(follows, cls=CustomJSONEncoder)
await redis.execute("SET", redis_key, payload)
# update follower's stats everywhere
follower_str = await redis.execute("GET", f"author:id:{follower_id}")
if isinstance(follower_str, str):
follower = json.loads(follower_str)
follower["stat"][f"{entity_type}s"] = len(follows)
await cache_author(follower)
2024-02-29 21:51:49 +00:00
2024-05-20 23:01:18 +00:00
async def get_cached_author(author_id: int, get_with_stat):
2024-05-20 22:40:57 +00:00
if author_id:
rkey = f"author:id:{author_id}"
cached_result = await redis.execute("GET", rkey)
if isinstance(cached_result, str):
return json.loads(cached_result)
2024-05-06 07:25:09 +00:00
else:
2024-05-20 22:40:57 +00:00
author_query = select(Author).filter(Author.id == author_id)
[author] = get_with_stat(author_query)
if author:
await cache_author(author.dict())
2024-05-20 23:01:18 +00:00
async def get_cached_author_by_user_id(user_id: str, get_with_stat):
2024-05-20 22:40:57 +00:00
author_id = await redis.execute("GET", f"user:id:{user_id}")
if author_id:
2024-05-20 23:01:18 +00:00
return await get_cached_author(int(author_id), get_with_stat)
2024-05-20 22:40:57 +00:00
async def get_cached_author_follows_topics(author_id: int):
topics = []
rkey = f"author:follows-topics:{author_id}"
cached = await redis.execute("GET", rkey)
if cached and isinstance(cached, str):
topics = json.loads(cached)
if not cached:
topics = (
local_session()
.query(Topic.id)
.select_from(join(Topic, TopicFollower, Topic.id == TopicFollower.topic))
.where(TopicFollower.follower == author_id)
.all()
)
await redis.execute("SET", rkey, json.dumps(topics))
2024-05-20 23:56:58 +00:00
topics_objects = []
for topic_id in topics:
topic_str = await redis.execute("GET", f"topic:id:{topic_id}")
if topic_str:
topic = json.loads(topic_str)
if topic and topic not in topics_objects:
topics_objects.append(topic)
logger.debug(
f"author#{author_id} cache updated with {len(topics_objects)} topics"
)
return topics_objects
2024-05-20 22:40:57 +00:00
async def get_cached_author_follows_authors(author_id: int):
authors = []
rkey = f"author:follows-authors:{author_id}"
cached = await redis.execute("GET", rkey)
if not cached:
2024-05-20 23:56:58 +00:00
authors_query = (
select(Author.id)
.select_from(
join(Author, AuthorFollower, Author.id == AuthorFollower.author)
2024-05-20 22:40:57 +00:00
)
2024-05-20 23:56:58 +00:00
.where(AuthorFollower.follower == author_id)
.all()
)
authors = local_session().execute(authors_query)
await redis.execute("SET", rkey, json.dumps([aid for aid in authors]))
2024-05-20 22:40:57 +00:00
elif isinstance(cached, str):
authors = json.loads(cached)
2024-05-20 23:56:58 +00:00
authors_objects = []
for author_id in authors:
author_str = await redis.execute("GET", f"author:id:{author_id}")
if author_str:
author = json.loads(author_str)
if author and author not in authors_objects:
authors_objects.append(author)
return authors_objects
2024-05-06 21:02:15 +00:00
2024-05-20 22:40:57 +00:00
async def get_cached_author_followers(author_id: int):
followers = []
rkey = f"author:followers:{author_id}"
cached = await redis.execute("GET", rkey)
if isinstance(cached, str):
followers = json.loads(cached)
if isinstance(followers, list):
return followers
2024-05-06 21:02:15 +00:00
2024-05-20 22:40:57 +00:00
followers = (
local_session()
2024-05-21 01:40:48 +00:00
.query(Author)
2024-05-20 22:40:57 +00:00
.join(
AuthorFollower,
and_(
AuthorFollower.author == author_id,
AuthorFollower.follower == Author.id,
Author.id != author_id, # exclude the author from the followers
),
)
.all()
)
2024-05-20 23:56:58 +00:00
2024-05-21 01:40:48 +00:00
await redis.execute("SET", rkey, json.dumps([a.id for a in followers]))
2024-05-20 23:56:58 +00:00
followers_objects = []
for follower_id in followers:
follower_str = await redis.execute("GET", f"author:id:{follower_id}")
if follower_str:
follower = json.loads(follower_str)
if follower and follower not in followers_objects:
followers_objects.append(follower)
2024-05-20 22:40:57 +00:00
logger.debug(f"author#{author_id} cache updated with {len(followers)} followers")
2024-05-20 23:56:58 +00:00
return followers_objects
2024-05-03 11:12:57 +00:00
2024-05-20 22:40:57 +00:00
async def get_cached_topic_followers(topic_id: int):
followers = []
rkey = f"topic:followers:{topic_id}"
cached = await redis.execute("GET", rkey)
if isinstance(cached, str):
followers = json.loads(cached)
if isinstance(followers, list):
return followers
2024-05-03 11:12:57 +00:00
followers = (
local_session()
2024-05-21 01:40:48 +00:00
.query(Author)
2024-05-20 22:40:57 +00:00
.join(
TopicFollower,
and_(TopicFollower.topic == topic_id, TopicFollower.follower == Author.id),
)
2024-05-03 11:12:57 +00:00
.all()
)
2024-05-20 23:56:58 +00:00
followers_objects = []
2024-05-20 22:40:57 +00:00
if followers:
2024-05-21 01:40:48 +00:00
await redis.execute("SET", rkey, json.dumps([a.id for a in followers]))
2024-05-20 23:56:58 +00:00
for follower_id in followers:
follower_str = await redis.execute("GET", f"author:id:{follower_id}")
if follower_str:
follower = json.loads(follower_str)
if follower and follower not in followers_objects:
followers_objects.append(follower)
logger.debug(f"topic#{topic_id} cache updated with {len(followers)} followers")
return followers_objects
2024-05-03 11:12:57 +00:00
2024-05-20 23:01:18 +00:00
async def get_cached_topic_authors(topic_id: int, topic_authors_query):
2024-05-20 22:40:57 +00:00
authors = []
rkey = f"topic:authors:{topic_id}"
cached = await redis.execute("GET", rkey)
if isinstance(cached, str):
authors = json.loads(cached)
if isinstance(authors, list):
return authors
2024-05-20 23:01:18 +00:00
authors = local_session().execute(topic_authors_query)
2024-05-20 23:56:58 +00:00
authors_objects = []
2024-05-20 22:40:57 +00:00
if authors:
await redis.execute("SET", rkey, json.dumps(authors))
2024-05-20 23:56:58 +00:00
for author_id in authors:
author_str = await redis.execute("GET", f"author:id:{author_id}")
if author_str:
author = json.loads(author_str)
if author and author not in authors_objects:
authors_objects.append(author)
logger.debug(f"topic#{topic_id} cache updated with {len(authors)} authors")
return authors_objects