core/services/cache.py

121 lines
5.0 KiB
Python
Raw Normal View History

2024-02-27 12:40:53 +00:00
import json
2024-02-25 13:43:04 +00:00
2024-03-06 18:57:04 +00:00
from services.encoders import CustomJSONEncoder
2024-04-08 07:38:58 +00:00
from services.rediscache import redis
2024-02-25 13:43:04 +00:00
2024-02-27 12:40:53 +00:00
DEFAULT_FOLLOWS = {
2024-04-17 15:32:23 +00:00
"topics": [],
"authors": [],
"communities": [{"id": 1, "name": "Дискурс", "slug": "discours", "pic": ""}],
2024-02-27 12:40:53 +00:00
}
2024-02-25 13:43:04 +00:00
2024-04-09 08:17:32 +00:00
async def cache_author(author: dict):
2024-03-06 19:00:37 +00:00
payload = json.dumps(author, cls=CustomJSONEncoder)
2024-04-17 15:32:23 +00:00
await redis.execute("SET", f'user:{author.get("user")}', payload)
await redis.execute("SET", f'author:{author.get("id")}', payload)
2024-03-12 12:50:57 +00:00
2024-04-08 18:33:47 +00:00
# update stat all field for followers' caches in <authors> list
2024-04-17 15:32:23 +00:00
followers_str = await redis.execute("GET", f'author:{author.get("id")}:followers')
2024-04-08 18:33:47 +00:00
followers = []
2024-04-17 17:30:05 +00:00
if isinstance(followers_str, str):
2024-04-08 18:33:47 +00:00
followers = json.loads(followers_str)
if isinstance(followers, list):
for follower in followers:
follower_follows_authors = []
2024-04-17 15:32:23 +00:00
follower_follows_authors_str = await redis.execute(
"GET", f'author:{author.get("id")}:follows-authors'
)
2024-04-18 09:34:04 +00:00
if isinstance(follower_follows_authors_str, str):
2024-04-08 18:33:47 +00:00
follower_follows_authors = json.loads(follower_follows_authors_str)
c = 0
for old_author in follower_follows_authors:
2024-04-17 15:32:23 +00:00
if int(old_author.get("id")) == int(author.get("id", 0)):
2024-04-08 18:33:47 +00:00
follower_follows_authors[c] = author
break # exit the loop since we found and updated the author
c += 1
else:
# author not found in the list, so add the new author with the updated stat field
follower_follows_authors.append(author)
# update stat field for all authors' caches in <followers> list
2024-04-17 15:32:23 +00:00
follows_str = await redis.execute(
"GET", f'author:{author.get("id")}:follows-authors'
)
2024-04-08 18:33:47 +00:00
follows_authors = []
2024-04-18 09:34:04 +00:00
if isinstance(follows_str, str):
2024-04-08 18:33:47 +00:00
follows_authors = json.loads(follows_str)
if isinstance(follows_authors, list):
for followed_author in follows_authors:
followed_author_followers = []
2024-04-17 15:32:23 +00:00
followed_author_followers_str = await redis.execute(
"GET", f'author:{author.get("id")}:followers'
)
2024-04-18 09:34:04 +00:00
if isinstance(followed_author_followers_str, str):
2024-04-08 18:33:47 +00:00
followed_author_followers = json.loads(followed_author_followers_str)
c = 0
for old_follower in followed_author_followers:
2024-04-17 15:32:23 +00:00
if int(old_follower.get("id")) == int(author.get("id", 0)):
2024-04-08 18:33:47 +00:00
followed_author_followers[c] = author
break # exit the loop since we found and updated the author
c += 1
else:
# author not found in the list, so add the new author with the updated stat field
followed_author_followers.append(author)
2024-02-27 12:40:53 +00:00
2024-04-19 15:22:07 +00:00
async def cache_follows(follower: dict, entity_type: str, entity: dict, is_insert=True):
2024-04-09 08:17:32 +00:00
# prepare
2024-03-11 10:37:35 +00:00
follows = []
2024-04-19 15:22:07 +00:00
follower_id = follower.get("id")
if follower_id:
redis_key = f"author:{follower_id}:follows-{entity_type}s"
follows_str = await redis.execute("GET", redis_key)
if isinstance(follows_str, str):
follows = json.loads(follows_str)
if is_insert:
follows.append(entity)
else:
entity_id = entity.get("id")
if not entity_id:
raise Exception("wrong entity")
# Remove the entity from follows
follows = [e for e in follows if e["id"] != entity_id]
2024-04-09 08:17:32 +00:00
2024-04-19 15:22:07 +00:00
# update follows cache
payload = json.dumps(follows, cls=CustomJSONEncoder)
await redis.execute("SET", redis_key, payload)
2024-04-09 08:17:32 +00:00
2024-04-19 15:22:07 +00:00
# update follower's stats everywhere
author_str = await redis.execute("GET", f"author:{follower_id}")
if isinstance(author_str, str):
author = json.loads(author_str)
author["stat"][f"{entity_type}s"] = len(follows)
await cache_author(author)
2024-02-29 21:51:49 +00:00
return follows
2024-04-19 15:22:07 +00:00
async def cache_follower(follower: dict, author: dict, is_insert=True):
author_id = author.get("id")
follower_id = follower.get("id")
2024-03-12 05:00:42 +00:00
followers = []
2024-04-19 15:22:07 +00:00
if author_id and follower_id:
redis_key = f"author:{author_id}:followers"
followers_str = await redis.execute("GET", redis_key)
followers = []
if isinstance(followers_str, str):
followers = json.loads(followers_str)
if is_insert:
# Remove the entity from followers
followers = [e for e in followers if e["id"] != author_id]
else:
followers.append(follower)
payload = json.dumps(followers, cls=CustomJSONEncoder)
await redis.execute("SET", redis_key, payload)
author_str = await redis.execute("GET", f"author:{follower_id}")
if isinstance(author_str, str):
author = json.loads(author_str)
author["stat"]["followers"] = len(followers)
await cache_author(author)
2024-02-29 21:51:49 +00:00
return followers