cachedep-fix+orjson+fmt
All checks were successful
Deploy on push / deploy (push) Successful in 1m16s

This commit is contained in:
2025-03-20 11:55:21 +03:00
parent a1781b3800
commit 247fc98760
14 changed files with 245 additions and 75 deletions

54
cache/cache.py vendored
View File

@@ -1,7 +1,7 @@
import asyncio
import json
from typing import List
import orjson
from sqlalchemy import and_, join, select
from orm.author import Author, AuthorFollower
@@ -35,7 +35,7 @@ CACHE_KEYS = {
# Cache topic data
async def cache_topic(topic: dict):
payload = json.dumps(topic, cls=CustomJSONEncoder)
payload = orjson.dumps(topic, cls=CustomJSONEncoder)
await asyncio.gather(
redis_operation("SET", f"topic:id:{topic['id']}", payload),
redis_operation("SET", f"topic:slug:{topic['slug']}", payload),
@@ -44,7 +44,7 @@ async def cache_topic(topic: dict):
# Cache author data
async def cache_author(author: dict):
payload = json.dumps(author, cls=CustomJSONEncoder)
payload = orjson.dumps(author, cls=CustomJSONEncoder)
await asyncio.gather(
redis_operation("SET", f"author:user:{author['user'].strip()}", str(author["id"])),
redis_operation("SET", f"author:id:{author['id']}", payload),
@@ -55,13 +55,13 @@ async def cache_author(author: dict):
async def cache_follows(follower_id: int, entity_type: str, entity_id: int, is_insert=True):
key = f"author:follows-{entity_type}s:{follower_id}"
follows_str = await redis_operation("GET", key)
follows = json.loads(follows_str) if follows_str else DEFAULT_FOLLOWS[entity_type]
follows = orjson.loads(follows_str) if follows_str else DEFAULT_FOLLOWS[entity_type]
if is_insert:
if entity_id not in follows:
follows.append(entity_id)
else:
follows = [eid for eid in follows if eid != entity_id]
await redis_operation("SET", key, json.dumps(follows, cls=CustomJSONEncoder))
await redis_operation("SET", key, orjson.dumps(follows, cls=CustomJSONEncoder))
await update_follower_stat(follower_id, entity_type, len(follows))
@@ -69,7 +69,7 @@ async def cache_follows(follower_id: int, entity_type: str, entity_id: int, is_i
async def update_follower_stat(follower_id, entity_type, count):
follower_key = f"author:id:{follower_id}"
follower_str = await redis_operation("GET", follower_key)
follower = json.loads(follower_str) if follower_str else None
follower = orjson.loads(follower_str) if follower_str else None
if follower:
follower["stat"] = {f"{entity_type}s": count}
await cache_author(follower)
@@ -80,7 +80,7 @@ async def get_cached_author(author_id: int, get_with_stat):
author_key = f"author:id:{author_id}"
result = await redis_operation("GET", author_key)
if result:
return json.loads(result)
return orjson.loads(result)
# Load from database if not found in cache
q = select(Author).where(Author.id == author_id)
authors = get_with_stat(q)
@@ -105,14 +105,14 @@ async def get_cached_topic(topic_id: int):
topic_key = f"topic:id:{topic_id}"
cached_topic = await redis_operation("GET", topic_key)
if cached_topic:
return json.loads(cached_topic)
return orjson.loads(cached_topic)
# If not in cache, fetch from the database
with local_session() as session:
topic = session.execute(select(Topic).where(Topic.id == topic_id)).scalar_one_or_none()
if topic:
topic_dict = topic.dict()
await redis_operation("SET", topic_key, json.dumps(topic_dict, cls=CustomJSONEncoder))
await redis_operation("SET", topic_key, orjson.dumps(topic_dict, cls=CustomJSONEncoder))
return topic_dict
return None
@@ -123,7 +123,7 @@ async def get_cached_topic_by_slug(slug: str, get_with_stat):
topic_key = f"topic:slug:{slug}"
result = await redis_operation("GET", topic_key)
if result:
return json.loads(result)
return orjson.loads(result)
# Load from database if not found in cache
topic_query = select(Topic).where(Topic.slug == slug)
topics = get_with_stat(topic_query)
@@ -139,7 +139,7 @@ async def get_cached_authors_by_ids(author_ids: List[int]) -> List[dict]:
# Fetch all author data concurrently
keys = [f"author:id:{author_id}" for author_id in author_ids]
results = await asyncio.gather(*(redis_operation("GET", key) for key in keys))
authors = [json.loads(result) if result else None for result in results]
authors = [orjson.loads(result) if result else None for result in results]
# Load missing authors from database and cache
missing_indices = [index for index, author in enumerate(authors) if author is None]
if missing_indices:
@@ -168,7 +168,7 @@ async def get_cached_topic_followers(topic_id: int):
cached = await redis_operation("GET", cache_key)
if cached:
followers_ids = json.loads(cached)
followers_ids = orjson.loads(cached)
logger.debug(f"Found {len(followers_ids)} cached followers for topic #{topic_id}")
return await get_cached_authors_by_ids(followers_ids)
@@ -181,7 +181,7 @@ async def get_cached_topic_followers(topic_id: int):
.all()
]
await redis_operation("SETEX", cache_key, value=json.dumps(followers_ids), ttl=CACHE_TTL)
await redis_operation("SETEX", cache_key, value=orjson.dumps(followers_ids), ttl=CACHE_TTL)
followers = await get_cached_authors_by_ids(followers_ids)
logger.debug(f"Cached {len(followers)} followers for topic #{topic_id}")
return followers
@@ -196,7 +196,7 @@ async def get_cached_author_followers(author_id: int):
# Check cache for data
cached = await redis_operation("GET", f"author:followers:{author_id}")
if cached:
followers_ids = json.loads(cached)
followers_ids = orjson.loads(cached)
followers = await get_cached_authors_by_ids(followers_ids)
logger.debug(f"Cached followers for author #{author_id}: {len(followers)}")
return followers
@@ -210,7 +210,7 @@ async def get_cached_author_followers(author_id: int):
.filter(AuthorFollower.author == author_id, Author.id != author_id)
.all()
]
await redis_operation("SET", f"author:followers:{author_id}", json.dumps(followers_ids))
await redis_operation("SET", f"author:followers:{author_id}", orjson.dumps(followers_ids))
followers = await get_cached_authors_by_ids(followers_ids)
return followers
@@ -220,7 +220,7 @@ async def get_cached_follower_authors(author_id: int):
# Attempt to retrieve authors from cache
cached = await redis_operation("GET", f"author:follows-authors:{author_id}")
if cached:
authors_ids = json.loads(cached)
authors_ids = orjson.loads(cached)
else:
# Query authors from database
with local_session() as session:
@@ -232,7 +232,7 @@ async def get_cached_follower_authors(author_id: int):
.where(AuthorFollower.follower == author_id)
).all()
]
await redis_operation("SET", f"author:follows-authors:{author_id}", json.dumps(authors_ids))
await redis_operation("SET", f"author:follows-authors:{author_id}", orjson.dumps(authors_ids))
authors = await get_cached_authors_by_ids(authors_ids)
return authors
@@ -243,7 +243,7 @@ async def get_cached_follower_topics(author_id: int):
# Attempt to retrieve topics from cache
cached = await redis_operation("GET", f"author:follows-topics:{author_id}")
if cached:
topics_ids = json.loads(cached)
topics_ids = orjson.loads(cached)
else:
# Load topics from database and cache them
with local_session() as session:
@@ -254,13 +254,13 @@ async def get_cached_follower_topics(author_id: int):
.where(TopicFollower.follower == author_id)
.all()
]
await redis_operation("SET", f"author:follows-topics:{author_id}", json.dumps(topics_ids))
await redis_operation("SET", f"author:follows-topics:{author_id}", orjson.dumps(topics_ids))
topics = []
for topic_id in topics_ids:
topic_str = await redis_operation("GET", f"topic:id:{topic_id}")
if topic_str:
topic = json.loads(topic_str)
topic = orjson.loads(topic_str)
if topic and topic not in topics:
topics.append(topic)
@@ -285,7 +285,7 @@ async def get_cached_author_by_user_id(user_id: str, get_with_stat):
# If ID is found, get full author data by ID
author_data = await redis_operation("GET", f"author:id:{author_id}")
if author_data:
return json.loads(author_data)
return orjson.loads(author_data)
# If data is not found in cache, query the database
author_query = select(Author).where(Author.user == user_id)
@@ -296,7 +296,7 @@ async def get_cached_author_by_user_id(user_id: str, get_with_stat):
author_dict = author.dict()
await asyncio.gather(
redis_operation("SET", f"author:user:{user_id.strip()}", str(author.id)),
redis_operation("SET", f"author:id:{author.id}", json.dumps(author_dict)),
redis_operation("SET", f"author:id:{author.id}", orjson.dumps(author_dict)),
)
return author_dict
@@ -319,7 +319,7 @@ async def get_cached_topic_authors(topic_id: int):
rkey = f"topic:authors:{topic_id}"
cached_authors_ids = await redis_operation("GET", rkey)
if cached_authors_ids:
authors_ids = json.loads(cached_authors_ids)
authors_ids = orjson.loads(cached_authors_ids)
else:
# If cache is empty, get data from the database
with local_session() as session:
@@ -331,7 +331,7 @@ async def get_cached_topic_authors(topic_id: int):
)
authors_ids = [author_id for (author_id,) in session.execute(query).all()]
# Cache the retrieved author IDs
await redis_operation("SET", rkey, json.dumps(authors_ids))
await redis_operation("SET", rkey, orjson.dumps(authors_ids))
# Retrieve full author details from cached IDs
if authors_ids:
@@ -378,7 +378,7 @@ async def invalidate_shouts_cache(cache_keys: List[str]):
async def cache_topic_shouts(topic_id: int, shouts: List[dict]):
"""Кэширует список публикаций для темы"""
key = f"topic_shouts_{topic_id}"
payload = json.dumps(shouts, cls=CustomJSONEncoder)
payload = orjson.dumps(shouts, cls=CustomJSONEncoder)
await redis_operation("SETEX", key, value=payload, ttl=CACHE_TTL)
@@ -387,7 +387,7 @@ async def get_cached_topic_shouts(topic_id: int) -> List[dict]:
key = f"topic_shouts_{topic_id}"
cached = await redis_operation("GET", key)
if cached:
return json.loads(cached)
return orjson.loads(cached)
return None
@@ -467,7 +467,7 @@ async def get_cached_entity(entity_type: str, entity_id: int, get_method, cache_
key = f"{entity_type}:id:{entity_id}"
cached = await redis_operation("GET", key)
if cached:
return json.loads(cached)
return orjson.loads(cached)
entity = await get_method(entity_id)
if entity: