This commit is contained in:
62
cache/cache.py
vendored
62
cache/cache.py
vendored
@@ -29,6 +29,7 @@ for new cache operations.
|
||||
|
||||
import asyncio
|
||||
import json
|
||||
import traceback
|
||||
from typing import Any, Callable, Dict, List, Type
|
||||
|
||||
import orjson
|
||||
@@ -78,11 +79,21 @@ async def cache_topic(topic: dict) -> None:
|
||||
|
||||
# Cache author data
|
||||
async def cache_author(author: dict) -> None:
|
||||
payload = fast_json_dumps(author)
|
||||
await asyncio.gather(
|
||||
redis.execute("SET", f"author:slug:{author['slug'].strip()}", str(author["id"])),
|
||||
redis.execute("SET", f"author:id:{author['id']}", payload),
|
||||
)
|
||||
try:
|
||||
# logger.debug(f"Caching author {author.get('id', 'unknown')} with slug: {author.get('slug', 'unknown')}")
|
||||
payload = fast_json_dumps(author)
|
||||
# logger.debug(f"Author payload size: {len(payload)} bytes")
|
||||
|
||||
await asyncio.gather(
|
||||
redis.execute("SET", f"author:slug:{author['slug'].strip()}", str(author["id"])),
|
||||
redis.execute("SET", f"author:id:{author['id']}", payload),
|
||||
)
|
||||
# logger.debug(f"Successfully cached author {author.get('id', 'unknown')}")
|
||||
except Exception as e:
|
||||
logger.error(f"Error caching author: {e}")
|
||||
logger.error(f"Author data: {author}")
|
||||
logger.error(f"Traceback: {traceback.format_exc()}")
|
||||
raise
|
||||
|
||||
|
||||
# Cache follows data
|
||||
@@ -109,12 +120,22 @@ async def cache_follows(follower_id: int, entity_type: str, entity_id: int, is_i
|
||||
|
||||
# Update follower statistics
|
||||
async def update_follower_stat(follower_id: int, entity_type: str, count: int) -> None:
|
||||
follower_key = f"author:id:{follower_id}"
|
||||
follower_str = await redis.execute("GET", follower_key)
|
||||
follower = orjson.loads(follower_str) if follower_str else None
|
||||
if follower:
|
||||
follower["stat"] = {f"{entity_type}s": count}
|
||||
await cache_author(follower)
|
||||
try:
|
||||
logger.debug(f"Updating follower stat for author {follower_id}, entity_type: {entity_type}, count: {count}")
|
||||
follower_key = f"author:id:{follower_id}"
|
||||
follower_str = await redis.execute("GET", follower_key)
|
||||
follower = orjson.loads(follower_str) if follower_str else None
|
||||
if follower:
|
||||
follower["stat"] = {f"{entity_type}s": count}
|
||||
logger.debug(f"Updating follower {follower_id} with new stat: {follower['stat']}")
|
||||
await cache_author(follower)
|
||||
else:
|
||||
logger.warning(f"Follower {follower_id} not found in cache for stat update")
|
||||
except Exception as e:
|
||||
logger.error(f"Error updating follower stat: {e}")
|
||||
logger.error(f"follower_id: {follower_id}, entity_type: {entity_type}, count: {count}")
|
||||
logger.error(f"Traceback: {traceback.format_exc()}")
|
||||
raise
|
||||
|
||||
|
||||
# Get author from cache
|
||||
@@ -556,7 +577,9 @@ async def cache_data(key: str, data: Any, ttl: int | None = None) -> None:
|
||||
ttl: Время жизни кеша в секундах (None - бессрочно)
|
||||
"""
|
||||
try:
|
||||
logger.debug(f"Attempting to cache data for key: {key}, data type: {type(data)}")
|
||||
payload = fast_json_dumps(data)
|
||||
logger.debug(f"Serialized payload size: {len(payload)} bytes")
|
||||
if ttl:
|
||||
await redis.execute("SETEX", key, ttl, payload)
|
||||
else:
|
||||
@@ -564,6 +587,9 @@ async def cache_data(key: str, data: Any, ttl: int | None = None) -> None:
|
||||
logger.debug(f"Данные сохранены в кеш по ключу {key}")
|
||||
except Exception as e:
|
||||
logger.error(f"Ошибка при сохранении данных в кеш: {e}")
|
||||
logger.error(f"Key: {key}, data type: {type(data)}")
|
||||
logger.error(f"Traceback: {traceback.format_exc()}")
|
||||
raise
|
||||
|
||||
|
||||
# Универсальная функция для получения данных из кеша
|
||||
@@ -578,14 +604,19 @@ async def get_cached_data(key: str) -> Any | None:
|
||||
Any: Данные из кеша или None, если данных нет
|
||||
"""
|
||||
try:
|
||||
logger.debug(f"Attempting to get cached data for key: {key}")
|
||||
cached_data = await redis.execute("GET", key)
|
||||
if cached_data:
|
||||
logger.debug(f"Raw cached data size: {len(cached_data)} bytes")
|
||||
loaded = orjson.loads(cached_data)
|
||||
logger.debug(f"Данные получены из кеша по ключу {key}: {len(loaded)}")
|
||||
return loaded
|
||||
logger.debug(f"No cached data found for key: {key}")
|
||||
return None
|
||||
except Exception as e:
|
||||
logger.error(f"Ошибка при получении данных из кеша: {e}")
|
||||
logger.error(f"Key: {key}")
|
||||
logger.error(f"Traceback: {traceback.format_exc()}")
|
||||
return None
|
||||
|
||||
|
||||
@@ -650,15 +681,24 @@ async def cached_query(
|
||||
|
||||
# If data not in cache or refresh required, execute query
|
||||
try:
|
||||
logger.debug(f"Executing query function for cache key: {actual_key}")
|
||||
result = await query_func(**query_params)
|
||||
logger.debug(
|
||||
f"Query function returned: {type(result)}, length: {len(result) if hasattr(result, '__len__') else 'N/A'}"
|
||||
)
|
||||
if result is not None:
|
||||
# Save result to cache
|
||||
logger.debug(f"Saving result to cache with key: {actual_key}")
|
||||
await cache_data(actual_key, result, ttl)
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error(f"Error executing query for caching: {e}")
|
||||
logger.error(f"Query function: {query_func.__name__ if hasattr(query_func, '__name__') else 'unknown'}")
|
||||
logger.error(f"Query params: {query_params}")
|
||||
logger.error(f"Traceback: {traceback.format_exc()}")
|
||||
# In case of error, return data from cache if not forcing refresh
|
||||
if not force_refresh:
|
||||
logger.debug(f"Attempting to get cached data as fallback for key: {actual_key}")
|
||||
return await get_cached_data(actual_key)
|
||||
raise
|
||||
|
||||
|
||||
Reference in New Issue
Block a user