redis-fix
Some checks failed
Deploy on push / deploy (push) Failing after 9s

This commit is contained in:
Untone 2024-08-07 07:18:49 +03:00
parent 3981fa3181
commit 821c81dd9c
2 changed files with 45 additions and 9 deletions

View File

@ -38,6 +38,42 @@ async def cache_author(author: dict):
)
async def get_cached_shout_authors(shout_id: int):
"""
Retrieves a list of authors for a given shout from the cache or database if not present.
Args:
shout_id (int): The ID of the shout for which to retrieve authors.
Returns:
List[dict]: A list of dictionaries containing author data.
"""
# Attempt to retrieve cached author IDs for the shout
rkey = f"shout:authors:{shout_id}"
cached_author_ids = await redis.get(rkey)
if cached_author_ids:
author_ids = json.loads(cached_author_ids)
else:
# If not in cache, fetch from the database and cache the result
with local_session() as session:
query = (
select(ShoutAuthor.author)
.where(ShoutAuthor.shout == shout_id)
.join(Author, ShoutAuthor.author == Author.id)
.filter(Author.deleted_at.is_(None))
)
author_ids = [author_id for (author_id,) in session.execute(query).all()]
await redis.execute("set", rkey, json.dumps(author_ids))
# Retrieve full author details from cached IDs
if author_ids:
authors = await get_cached_authors_by_ids(author_ids)
logger.debug(f"Shout#{shout_id} authors fetched and cached: {len(authors)} authors found.")
return authors
return []
# Кэширование данных о подписках
async def cache_follows(follower_id: int, entity_type: str, entity_id: int, is_insert=True):
key = f"author:follows-{entity_type}s:{follower_id}"
@ -48,7 +84,7 @@ async def cache_follows(follower_id: int, entity_type: str, entity_id: int, is_i
follows.append(entity_id)
else:
follows = [eid for eid in follows if eid != entity_id]
await redis.set(key, json.dumps(follows, cls=CustomJSONEncoder))
await redis.execute("set", key, json.dumps(follows, cls=CustomJSONEncoder))
update_follower_stat(follower_id, entity_type, len(follows))
@ -267,7 +303,7 @@ async def get_cached_topic_authors(topic_id: int):
)
authors_ids = [author_id for (author_id,) in session.execute(query).all()]
# Кэшируем полученные ID авторов
await redis.set(rkey, json.dumps(authors_ids))
await redis.execute("set", rkey, json.dumps(authors_ids))
# Получаем полные данные авторов по кэшированным ID
if authors_ids:

View File

@ -4,7 +4,7 @@ from orm.author import Author, AuthorFollower
from orm.reaction import Reaction
from orm.shout import Shout, ShoutAuthor
from orm.topic import Topic, TopicFollower
from services.cache import cache_manager # Предполагается, что этот менеджер уже реализован
from services.cache import cache_author, get_cached_author, cache_topic, get_cached_shout, get_cached_topic, cache_shout
from services.logger import root_logger as logger
@ -27,19 +27,19 @@ class CacheRevalidationManager:
for entity_id in ids:
if entity_type == "authors":
# Ревалидация кэша автора
author = await cache_manager.get_author(entity_id)
author = await get_cached_author(entity_id)
if author:
await cache_manager.cache_author(author)
await cache_author(author)
elif entity_type == "topics":
# Ревалидация кэша темы
topic = await cache_manager.get_topic(entity_id)
topic = await get_cached_topic(entity_id)
if topic:
await cache_manager.cache_topic(topic)
await cache_topic(topic)
elif entity_type == "shouts":
# Ревалидация кэша shout
shout = await cache_manager.get_shout(entity_id)
shout = await get_cached_shout(entity_id)
if shout:
await cache_manager.cache_shout(shout)
await cache_shout(shout)
ids.clear()
def mark_for_revalidation(self, entity_id, entity_type):