caching-wip1
This commit is contained in:
parent
9f91490441
commit
c276a0eeb0
|
@ -1,8 +1,8 @@
|
||||||
import asyncio
|
import asyncio
|
||||||
import json
|
import json
|
||||||
from typing import List
|
from typing import List
|
||||||
from sqlalchemy import select
|
|
||||||
|
|
||||||
|
from sqlalchemy import select
|
||||||
from orm.author import Author
|
from orm.author import Author
|
||||||
from orm.topic import Topic
|
from orm.topic import Topic
|
||||||
from services.db import local_session
|
from services.db import local_session
|
||||||
|
@ -16,80 +16,92 @@ DEFAULT_FOLLOWS = {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
async def cache_multiple_items(items, cache_function):
|
# Кэширование данных темы
|
||||||
await asyncio.gather(*(cache_function(item) for item in items))
|
|
||||||
|
|
||||||
|
|
||||||
async def cache_topic(topic: dict):
|
async def cache_topic(topic: dict):
|
||||||
await cache_multiple_items([topic], _cache_topic_helper)
|
|
||||||
|
|
||||||
|
|
||||||
async def _cache_topic_helper(topic):
|
|
||||||
topic_id = topic.get("id")
|
|
||||||
topic_slug = topic.get("slug")
|
|
||||||
payload = json.dumps(topic, cls=CustomJSONEncoder)
|
payload = json.dumps(topic, cls=CustomJSONEncoder)
|
||||||
await redis.set(f"topic:id:{topic_id}", payload)
|
# Одновременное кэширование по id и slug для быстрого доступа
|
||||||
await redis.set(f"topic:slug:{topic_slug}", payload)
|
await asyncio.gather(
|
||||||
|
redis.set(f"topic:id:{topic['id']}", payload), redis.set(f"topic:slug:{topic['slug']}", payload)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# Кэширование данных автора
|
||||||
async def cache_author(author: dict):
|
async def cache_author(author: dict):
|
||||||
author_id = author.get("id")
|
|
||||||
user_id = author.get("user", "").strip()
|
|
||||||
payload = json.dumps(author, cls=CustomJSONEncoder)
|
payload = json.dumps(author, cls=CustomJSONEncoder)
|
||||||
await redis.set(f"author:user:{user_id}", author_id)
|
# Кэширование данных автора по user и id
|
||||||
await redis.set(f"author:id:{author_id}", payload)
|
await asyncio.gather(
|
||||||
|
redis.set(f"author:user:{author['user'].strip()}", str(author["id"])),
|
||||||
|
redis.set(f"author:id:{author['id']}", payload),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# Кэширование данных о подписках
|
||||||
async def cache_follows(follower_id: int, entity_type: str, entity_id: int, is_insert=True):
|
async def cache_follows(follower_id: int, entity_type: str, entity_id: int, is_insert=True):
|
||||||
redis_key = f"author:follows-{entity_type}s:{follower_id}"
|
key = f"author:follows-{entity_type}s:{follower_id}"
|
||||||
follows = await redis.get(redis_key)
|
follows_str = await redis.get(key)
|
||||||
follows = json.loads(follows) if follows else []
|
follows = json.loads(follows_str) if follows_str else []
|
||||||
|
|
||||||
if is_insert:
|
if is_insert:
|
||||||
follows.append(entity_id) if entity_id not in follows else None
|
if entity_id not in follows:
|
||||||
|
follows.append(entity_id)
|
||||||
else:
|
else:
|
||||||
follows = [eid for eid in follows if eid != entity_id]
|
follows = [eid for eid in follows if eid != entity_id]
|
||||||
|
await redis.set(key, json.dumps(follows, cls=CustomJSONEncoder))
|
||||||
|
update_follower_stat(follower_id, entity_type, len(follows))
|
||||||
|
|
||||||
payload = json.dumps(follows, cls=CustomJSONEncoder)
|
|
||||||
await redis.set(redis_key, payload)
|
# Обновление статистики подписчика
|
||||||
follower = await redis.get(f"author:id:{follower_id}")
|
async def update_follower_stat(follower_id, entity_type, count):
|
||||||
|
follower_key = f"author:id:{follower_id}"
|
||||||
|
follower_str = await redis.get(follower_key)
|
||||||
|
follower = json.loads(follower_str) if follower_str else None
|
||||||
if follower:
|
if follower:
|
||||||
follower = json.loads(follower)
|
follower["stat"] = {f"{entity_type}s": count}
|
||||||
follower["stat"][f"{entity_type}s"] = len(follows)
|
|
||||||
await cache_author(follower)
|
await cache_author(follower)
|
||||||
|
|
||||||
|
|
||||||
async def get_cached_topic_by_slug(slug: str, get_with_stat):
|
# Получение автора из кэша
|
||||||
cached_result = await redis.get(f"topic:slug:{slug}")
|
async def get_cached_author(author_id: int):
|
||||||
if cached_result:
|
author_key = f"author:id:{author_id}"
|
||||||
return json.loads(cached_result)
|
result = await redis.get(author_key)
|
||||||
|
if result:
|
||||||
|
return json.loads(result)
|
||||||
|
# Загрузка из базы данных, если не найдено в кэше
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
topic_query = select(Topic).filter(Topic.slug == slug)
|
author = session.execute(select(Author).where(Author.id == author_id)).scalar_one_or_none()
|
||||||
result = await get_with_stat(session.execute(topic_query))
|
if author:
|
||||||
topic = result if isinstance(result, Topic) else result[0]
|
await cache_author(author.dict())
|
||||||
|
return author.dict()
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
# Получение темы по slug из кэша
|
||||||
|
async def get_cached_topic_by_slug(slug: str):
|
||||||
|
topic_key = f"topic:slug:{slug}"
|
||||||
|
result = await redis.get(topic_key)
|
||||||
|
if result:
|
||||||
|
return json.loads(result)
|
||||||
|
# Загрузка из базы данных, если не найдено в кэше
|
||||||
|
with local_session() as session:
|
||||||
|
topic = session.execute(select(Topic).where(Topic.slug == slug)).scalar_one_or_none()
|
||||||
if topic:
|
if topic:
|
||||||
await cache_topic(topic.dict())
|
await cache_topic(topic.dict())
|
||||||
return topic
|
return topic.dict()
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
# Пример агрегации получения и кеширования информации для авторов
|
# Получение списка авторов по ID из кэша
|
||||||
async def get_cached_authors_by_ids(author_ids: List[int]) -> List[dict]:
|
async def get_cached_authors_by_ids(author_ids: List[int]) -> List[dict]:
|
||||||
cache_keys = [f"author:id:{author_id}" for author_id in author_ids]
|
# Одновременное получение данных всех авторов
|
||||||
authors_data = await asyncio.gather(*(redis.get(key) for key in cache_keys))
|
keys = [f"author:id:{author_id}" for author_id in author_ids]
|
||||||
authors = [json.loads(author) for author in authors_data if author]
|
results = await asyncio.gather(*(redis.get(key) for key in keys))
|
||||||
|
authors = [json.loads(result) if result else None for result in results]
|
||||||
# Кешируем отсутствующие данные
|
# Загрузка отсутствующих авторов из базы данных и кэширование
|
||||||
missing_ids = [author_ids[i] for i, data in enumerate(authors_data) if not data]
|
missing_indices = [index for index, author in enumerate(authors) if author is None]
|
||||||
if missing_ids:
|
if missing_indices:
|
||||||
|
missing_ids = [author_ids[index] for index in missing_indices]
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
query = select(Author).where(Author.id.in_(missing_ids))
|
query = select(Author).where(Author.id.in_(missing_ids))
|
||||||
results = await session.execute(query)
|
missing_authors = session.execute(query).scalars().all()
|
||||||
authors_to_cache = [result.dict() for result in results.scalars()]
|
await asyncio.gather(*(cache_author(author.dict()) for author in missing_authors))
|
||||||
await cache_multiple_items(authors_to_cache, cache_author)
|
authors = [author.dict() for author in missing_authors]
|
||||||
authors.extend(authors_to_cache)
|
|
||||||
|
|
||||||
return authors
|
return authors
|
||||||
|
|
||||||
|
|
||||||
# Остальные функции с аналогичными оптимизациями
|
|
||||||
|
|
Loading…
Reference in New Issue
Block a user