This commit is contained in:
181
cache/cache.py
vendored
181
cache/cache.py
vendored
@@ -1,122 +1,58 @@
|
||||
import asyncio
|
||||
import json
|
||||
from typing import List
|
||||
|
||||
from sqlalchemy import select, join, and_
|
||||
from orm.author import Author, AuthorFollower
|
||||
from orm.topic import Topic, TopicFollower
|
||||
from orm.shout import Shout, ShoutAuthor, ShoutTopic
|
||||
from services.db import local_session
|
||||
from utils.encoders import CustomJSONEncoder
|
||||
from cache.rediscache import redis
|
||||
from services.redis import redis
|
||||
from utils.logger import root_logger as logger
|
||||
|
||||
DEFAULT_FOLLOWS = {
|
||||
"topics": [],
|
||||
"authors": [],
|
||||
"shouts": [],
|
||||
"communities": [{"id": 1, "name": "Дискурс", "slug": "discours", "pic": ""}],
|
||||
}
|
||||
|
||||
|
||||
# Кэширование данных темы
|
||||
# Cache topic data
|
||||
async def cache_topic(topic: dict):
|
||||
payload = json.dumps(topic, cls=CustomJSONEncoder)
|
||||
# Одновременное кэширование по id и slug для быстрого доступа
|
||||
# Cache by id and slug for quick access
|
||||
await asyncio.gather(
|
||||
redis.execute("SET", f"topic:id:{topic['id']}", payload),
|
||||
redis.execute("SET", f"topic:slug:{topic['slug']}", payload),
|
||||
)
|
||||
|
||||
|
||||
# Кэширование данных автора
|
||||
# Cache author data
|
||||
async def cache_author(author: dict):
|
||||
payload = json.dumps(author, cls=CustomJSONEncoder)
|
||||
# Кэширование данных автора по user и id
|
||||
# Cache author data by user and id
|
||||
await asyncio.gather(
|
||||
redis.execute("SET", f"author:user:{author['user'].strip()}", str(author["id"])),
|
||||
redis.execute("SET", f"author:id:{author['id']}", payload),
|
||||
)
|
||||
|
||||
|
||||
async def get_cached_topic(topic_id: int):
|
||||
"""
|
||||
Получает информацию о теме из кэша или базы данных.
|
||||
|
||||
Args:
|
||||
topic_id (int): Идентификатор темы.
|
||||
|
||||
Returns:
|
||||
dict: Данные темы в формате словаря или None, если тема не найдена.
|
||||
"""
|
||||
# Ключ для кэширования темы в Redis
|
||||
topic_key = f"topic:id:{topic_id}"
|
||||
cached_topic = await redis.get(topic_key)
|
||||
if cached_topic:
|
||||
return json.loads(cached_topic)
|
||||
|
||||
# Если данных о теме нет в кэше, загружаем из базы данных
|
||||
with local_session() as session:
|
||||
topic = session.execute(select(Topic).where(Topic.id == topic_id)).scalar_one_or_none()
|
||||
if topic:
|
||||
# Кэшируем полученные данные
|
||||
topic_dict = topic.dict()
|
||||
await redis.set(topic_key, json.dumps(topic_dict, cls=CustomJSONEncoder))
|
||||
return topic_dict
|
||||
|
||||
return None
|
||||
|
||||
|
||||
async def get_cached_shout_authors(shout_id: int):
|
||||
"""
|
||||
Retrieves a list of authors for a given shout from the cache or database if not present.
|
||||
|
||||
Args:
|
||||
shout_id (int): The ID of the shout for which to retrieve authors.
|
||||
|
||||
Returns:
|
||||
List[dict]: A list of dictionaries containing author data.
|
||||
"""
|
||||
# Attempt to retrieve cached author IDs for the shout
|
||||
rkey = f"shout:authors:{shout_id}"
|
||||
cached_author_ids = await redis.get(rkey)
|
||||
if cached_author_ids:
|
||||
author_ids = json.loads(cached_author_ids)
|
||||
else:
|
||||
# If not in cache, fetch from the database and cache the result
|
||||
with local_session() as session:
|
||||
query = (
|
||||
select(ShoutAuthor.author)
|
||||
.where(ShoutAuthor.shout == shout_id)
|
||||
.join(Author, ShoutAuthor.author == Author.id)
|
||||
.filter(Author.deleted_at.is_(None))
|
||||
)
|
||||
author_ids = [author_id for (author_id,) in session.execute(query).all()]
|
||||
await redis.execute("set", rkey, json.dumps(author_ids))
|
||||
|
||||
# Retrieve full author details from cached IDs
|
||||
if author_ids:
|
||||
authors = await get_cached_authors_by_ids(author_ids)
|
||||
logger.debug(f"Shout#{shout_id} authors fetched and cached: {len(authors)} authors found.")
|
||||
return authors
|
||||
|
||||
return []
|
||||
|
||||
|
||||
# Кэширование данных о подписках
|
||||
# Cache follows data
|
||||
async def cache_follows(follower_id: int, entity_type: str, entity_id: int, is_insert=True):
|
||||
key = f"author:follows-{entity_type}s:{follower_id}"
|
||||
follows_str = await redis.get(key)
|
||||
follows = json.loads(follows_str) if follows_str else []
|
||||
follows = json.loads(follows_str) if follows_str else DEFAULT_FOLLOWS[entity_type]
|
||||
if is_insert:
|
||||
if entity_id not in follows:
|
||||
follows.append(entity_id)
|
||||
else:
|
||||
follows = [eid for eid in follows if eid != entity_id]
|
||||
await redis.execute("set", key, json.dumps(follows, cls=CustomJSONEncoder))
|
||||
update_follower_stat(follower_id, entity_type, len(follows))
|
||||
await update_follower_stat(follower_id, entity_type, len(follows))
|
||||
|
||||
|
||||
# Обновление статистики подписчика
|
||||
# Update follower statistics
|
||||
async def update_follower_stat(follower_id, entity_type, count):
|
||||
follower_key = f"author:id:{follower_id}"
|
||||
follower_str = await redis.get(follower_key)
|
||||
@@ -126,13 +62,13 @@ async def update_follower_stat(follower_id, entity_type, count):
|
||||
await cache_author(follower)
|
||||
|
||||
|
||||
# Получение автора из кэша
|
||||
# Get author from cache
|
||||
async def get_cached_author(author_id: int):
|
||||
author_key = f"author:id:{author_id}"
|
||||
result = await redis.get(author_key)
|
||||
if result:
|
||||
return json.loads(result)
|
||||
# Загрузка из базы данных, если не найдено в кэше
|
||||
# Load from database if not found in cache
|
||||
with local_session() as session:
|
||||
author = session.execute(select(Author).where(Author.id == author_id)).scalar_one_or_none()
|
||||
if author:
|
||||
@@ -141,13 +77,41 @@ async def get_cached_author(author_id: int):
|
||||
return None
|
||||
|
||||
|
||||
# Получение темы по slug из кэша
|
||||
# Function to get cached topic
|
||||
async def get_cached_topic(topic_id: int):
|
||||
"""
|
||||
Fetch topic data from cache or database by id.
|
||||
|
||||
Args:
|
||||
topic_id (int): The identifier for the topic.
|
||||
|
||||
Returns:
|
||||
dict: Topic data or None if not found.
|
||||
"""
|
||||
topic_key = f"topic:id:{topic_id}"
|
||||
cached_topic = await redis.get(topic_key)
|
||||
if cached_topic:
|
||||
return json.loads(cached_topic)
|
||||
|
||||
# If not in cache, fetch from the database
|
||||
with local_session() as session:
|
||||
topic = session.execute(select(Topic).where(Topic.id == topic_id)).scalar_one_or_none()
|
||||
if topic:
|
||||
topic_dict = topic.dict()
|
||||
await redis.set(topic_key, json.dumps(topic_dict, cls=CustomJSONEncoder))
|
||||
return topic_dict
|
||||
|
||||
return None
|
||||
|
||||
|
||||
|
||||
# Get topic by slug from cache
|
||||
async def get_cached_topic_by_slug(slug: str):
|
||||
topic_key = f"topic:slug:{slug}"
|
||||
result = await redis.get(topic_key)
|
||||
if result:
|
||||
return json.loads(result)
|
||||
# Загрузка из базы данных, если не найдено в кэше
|
||||
# Load from database if not found in cache
|
||||
with local_session() as session:
|
||||
topic = session.execute(select(Topic).where(Topic.slug == slug)).scalar_one_or_none()
|
||||
if topic:
|
||||
@@ -156,13 +120,13 @@ async def get_cached_topic_by_slug(slug: str):
|
||||
return None
|
||||
|
||||
|
||||
# Получение списка авторов по ID из кэша
|
||||
# Get list of authors by ID from cache
|
||||
async def get_cached_authors_by_ids(author_ids: List[int]) -> List[dict]:
|
||||
# Одновременное получение данных всех авторов
|
||||
# Fetch all author data concurrently
|
||||
keys = [f"author:id:{author_id}" for author_id in author_ids]
|
||||
results = await asyncio.gather(*(redis.get(key) for key in keys))
|
||||
authors = [json.loads(result) if result else None for result in results]
|
||||
# Загрузка отсутствующих авторов из базы данных и кэширование
|
||||
# Load missing authors from database and cache
|
||||
missing_indices = [index for index, author in enumerate(authors) if author is None]
|
||||
if missing_indices:
|
||||
missing_ids = [author_ids[index] for index in missing_indices]
|
||||
@@ -170,19 +134,21 @@ async def get_cached_authors_by_ids(author_ids: List[int]) -> List[dict]:
|
||||
query = select(Author).where(Author.id.in_(missing_ids))
|
||||
missing_authors = session.execute(query).scalars().all()
|
||||
await asyncio.gather(*(cache_author(author.dict()) for author in missing_authors))
|
||||
authors = [author.dict() for author in missing_authors]
|
||||
for index, author in zip(missing_indices, missing_authors):
|
||||
authors[index] = author.dict()
|
||||
return authors
|
||||
|
||||
|
||||
# Get cached topic followers
|
||||
async def get_cached_topic_followers(topic_id: int):
|
||||
# Попытка извлечь кэшированные данные
|
||||
# Attempt to retrieve cached data
|
||||
cached = await redis.get(f"topic:followers:{topic_id}")
|
||||
if cached:
|
||||
followers = json.loads(cached)
|
||||
logger.debug(f"Cached followers for topic#{topic_id}: {len(followers)}")
|
||||
return followers
|
||||
|
||||
# Загрузка из базы данных и кэширование результатов
|
||||
# Load from database and cache results
|
||||
with local_session() as session:
|
||||
followers_ids = [
|
||||
f[0]
|
||||
@@ -196,8 +162,9 @@ async def get_cached_topic_followers(topic_id: int):
|
||||
return followers
|
||||
|
||||
|
||||
# Get cached author followers
|
||||
async def get_cached_author_followers(author_id: int):
|
||||
# Проверяем кэш на наличие данных
|
||||
# Check cache for data
|
||||
cached = await redis.get(f"author:followers:{author_id}")
|
||||
if cached:
|
||||
followers_ids = json.loads(cached)
|
||||
@@ -205,7 +172,7 @@ async def get_cached_author_followers(author_id: int):
|
||||
logger.debug(f"Cached followers for author#{author_id}: {len(followers)}")
|
||||
return followers
|
||||
|
||||
# Запрос в базу данных если кэш пуст
|
||||
# Query database if cache is empty
|
||||
with local_session() as session:
|
||||
followers_ids = [
|
||||
f[0]
|
||||
@@ -219,13 +186,14 @@ async def get_cached_author_followers(author_id: int):
|
||||
return followers
|
||||
|
||||
|
||||
# Get cached follower authors
|
||||
async def get_cached_follower_authors(author_id: int):
|
||||
# Попытка получить авторов из кэша
|
||||
# Attempt to retrieve authors from cache
|
||||
cached = await redis.get(f"author:follows-authors:{author_id}")
|
||||
if cached:
|
||||
authors_ids = json.loads(cached)
|
||||
else:
|
||||
# Запрос авторов из базы данных
|
||||
# Query authors from database
|
||||
with local_session() as session:
|
||||
authors_ids = [
|
||||
a[0]
|
||||
@@ -241,13 +209,14 @@ async def get_cached_follower_authors(author_id: int):
|
||||
return authors
|
||||
|
||||
|
||||
# Get cached follower topics
|
||||
async def get_cached_follower_topics(author_id: int):
|
||||
# Попытка получить темы из кэша
|
||||
# Attempt to retrieve topics from cache
|
||||
cached = await redis.get(f"author:follows-topics:{author_id}")
|
||||
if cached:
|
||||
topics_ids = json.loads(cached)
|
||||
else:
|
||||
# Загрузка тем из базы данных и их кэширование
|
||||
# Load topics from database and cache them
|
||||
with local_session() as session:
|
||||
topics_ids = [
|
||||
t[0]
|
||||
@@ -270,30 +239,31 @@ async def get_cached_follower_topics(author_id: int):
|
||||
return topics
|
||||
|
||||
|
||||
# Get author by user ID from cache
|
||||
async def get_cached_author_by_user_id(user_id: str):
|
||||
"""
|
||||
Получает информацию об авторе по его user_id, сначала проверяя кэш, а затем базу данных.
|
||||
Retrieve author information by user_id, checking the cache first, then the database.
|
||||
|
||||
Args:
|
||||
user_id (str): Идентификатор пользователя, по которому нужно получить автора.
|
||||
user_id (str): The user identifier for which to retrieve the author.
|
||||
|
||||
Returns:
|
||||
dict: Словарь с данными автора или None, если автор не найден.
|
||||
dict: Dictionary with author data or None if not found.
|
||||
"""
|
||||
# Пытаемся найти ID автора по user_id в кэше Redis
|
||||
# Attempt to find author ID by user_id in Redis cache
|
||||
author_id = await redis.get(f"author:user:{user_id.strip()}")
|
||||
if author_id:
|
||||
# Если ID найден, получаем полные данные автора по его ID
|
||||
# If ID is found, get full author data by ID
|
||||
author_data = await redis.get(f"author:id:{author_id}")
|
||||
if author_data:
|
||||
return json.loads(author_data)
|
||||
|
||||
# Если данные в кэше не найдены, выполняем запрос к базе данных
|
||||
# If data is not found in cache, query the database
|
||||
with local_session() as session:
|
||||
author = session.execute(select(Author).where(Author.user == user_id)).scalar_one_or_none()
|
||||
|
||||
if author:
|
||||
# Кэшируем полученные данные автора
|
||||
# Cache the retrieved author data
|
||||
author_dict = author.dict()
|
||||
await asyncio.gather(
|
||||
redis.execute("SET", f"author:user:{user_id.strip()}", str(author.id)),
|
||||
@@ -301,27 +271,28 @@ async def get_cached_author_by_user_id(user_id: str):
|
||||
)
|
||||
return author_dict
|
||||
|
||||
# Возвращаем None, если автор не найден
|
||||
# Return None if author is not found
|
||||
return None
|
||||
|
||||
|
||||
# Get cached topic authors
|
||||
async def get_cached_topic_authors(topic_id: int):
|
||||
"""
|
||||
Получает список авторов для заданной темы, используя кэш или базу данных.
|
||||
Retrieve a list of authors for a given topic, using cache or database.
|
||||
|
||||
Args:
|
||||
topic_id (int): Идентификатор темы, для которой нужно получить авторов.
|
||||
topic_id (int): The identifier of the topic for which to retrieve authors.
|
||||
|
||||
Returns:
|
||||
List[dict]: Список словарей, содержащих данные авторов.
|
||||
List[dict]: A list of dictionaries containing author data.
|
||||
"""
|
||||
# Пытаемся получить список ID авторов из кэша
|
||||
# Attempt to get a list of author IDs from cache
|
||||
rkey = f"topic:authors:{topic_id}"
|
||||
cached_authors_ids = await redis.get(rkey)
|
||||
if cached_authors_ids:
|
||||
authors_ids = json.loads(cached_authors_ids)
|
||||
else:
|
||||
# Если кэш пуст, получаем данные из базы данных
|
||||
# If cache is empty, get data from the database
|
||||
with local_session() as session:
|
||||
query = (
|
||||
select(ShoutAuthor.author)
|
||||
@@ -330,10 +301,10 @@ async def get_cached_topic_authors(topic_id: int):
|
||||
.where(and_(ShoutTopic.topic == topic_id, Shout.published_at.is_not(None), Shout.deleted_at.is_(None)))
|
||||
)
|
||||
authors_ids = [author_id for (author_id,) in session.execute(query).all()]
|
||||
# Кэшируем полученные ID авторов
|
||||
# Cache the retrieved author IDs
|
||||
await redis.execute("set", rkey, json.dumps(authors_ids))
|
||||
|
||||
# Получаем полные данные авторов по кэшированным ID
|
||||
# Retrieve full author details from cached IDs
|
||||
if authors_ids:
|
||||
authors = await get_cached_authors_by_ids(authors_ids)
|
||||
logger.debug(f"Topic#{topic_id} authors fetched and cached: {len(authors)} authors found.")
|
||||
|
Reference in New Issue
Block a user