2024-02-27 12:40:53 +00:00
|
|
|
import json
|
2024-05-30 16:16:50 +00:00
|
|
|
from typing import List
|
2024-05-30 16:42:38 +00:00
|
|
|
|
2024-05-20 22:40:57 +00:00
|
|
|
from sqlalchemy import and_, join, select
|
|
|
|
|
|
|
|
from orm.author import Author, AuthorFollower
|
2024-06-05 14:45:55 +00:00
|
|
|
from orm.shout import Shout, ShoutAuthor, ShoutTopic
|
2024-05-20 22:40:57 +00:00
|
|
|
from orm.topic import Topic, TopicFollower
|
2024-05-06 21:06:31 +00:00
|
|
|
from services.db import local_session
|
2024-03-06 18:57:04 +00:00
|
|
|
from services.encoders import CustomJSONEncoder
|
2024-05-20 22:40:57 +00:00
|
|
|
from services.logger import root_logger as logger
|
2024-04-08 07:38:58 +00:00
|
|
|
from services.rediscache import redis
|
2024-02-25 13:43:04 +00:00
|
|
|
|
2024-02-27 12:40:53 +00:00
|
|
|
DEFAULT_FOLLOWS = {
|
2024-04-17 15:32:23 +00:00
|
|
|
"topics": [],
|
|
|
|
"authors": [],
|
|
|
|
"communities": [{"id": 1, "name": "Дискурс", "slug": "discours", "pic": ""}],
|
2024-02-27 12:40:53 +00:00
|
|
|
}
|
2024-02-25 13:43:04 +00:00
|
|
|
|
|
|
|
|
2024-05-20 22:40:57 +00:00
|
|
|
async def cache_topic(topic: dict):
|
2024-05-20 23:56:58 +00:00
|
|
|
topic_id = topic.get("id")
|
|
|
|
topic_slug = topic.get("slug")
|
2024-05-30 16:15:11 +00:00
|
|
|
payload = json.dumps(topic, cls=CustomJSONEncoder)
|
2024-05-20 23:56:58 +00:00
|
|
|
await redis.execute("SET", f"topic:id:{topic_id}", payload)
|
2024-06-05 19:56:48 +00:00
|
|
|
await redis.execute("SET", f"topic:slug:{topic_slug}", payload)
|
2024-05-20 22:40:57 +00:00
|
|
|
|
|
|
|
|
2024-04-09 08:17:32 +00:00
|
|
|
async def cache_author(author: dict):
|
2024-05-05 18:38:59 +00:00
|
|
|
author_id = author.get("id")
|
2024-06-05 20:03:13 +00:00
|
|
|
user_id = author.get("user", "").strip()
|
2024-03-06 19:00:37 +00:00
|
|
|
payload = json.dumps(author, cls=CustomJSONEncoder)
|
2024-06-05 20:42:09 +00:00
|
|
|
await redis.execute("SET", f"author:user:{user_id}", author_id)
|
2024-05-20 22:40:57 +00:00
|
|
|
await redis.execute("SET", f"author:id:{author_id}", payload)
|
2024-03-12 12:50:57 +00:00
|
|
|
|
2024-02-27 12:40:53 +00:00
|
|
|
|
2024-05-30 09:49:46 +00:00
|
|
|
async def cache_follows(follower_id: int, entity_type: str, entity_id: int, is_insert=True):
|
2024-03-11 10:37:35 +00:00
|
|
|
follows = []
|
2024-05-20 22:40:57 +00:00
|
|
|
redis_key = f"author:follows-{entity_type}s:{follower_id}"
|
|
|
|
follows_str = await redis.execute("GET", redis_key)
|
|
|
|
if isinstance(follows_str, str):
|
|
|
|
follows = json.loads(follows_str)
|
|
|
|
if is_insert:
|
|
|
|
if entity_id not in follows:
|
|
|
|
follows.append(entity_id)
|
|
|
|
else:
|
|
|
|
if not entity_id:
|
|
|
|
raise Exception("wrong entity")
|
|
|
|
follows = [eid for eid in follows if eid != entity_id]
|
|
|
|
|
|
|
|
payload = json.dumps(follows, cls=CustomJSONEncoder)
|
|
|
|
await redis.execute("SET", redis_key, payload)
|
|
|
|
|
|
|
|
follower_str = await redis.execute("GET", f"author:id:{follower_id}")
|
|
|
|
if isinstance(follower_str, str):
|
|
|
|
follower = json.loads(follower_str)
|
|
|
|
follower["stat"][f"{entity_type}s"] = len(follows)
|
|
|
|
await cache_author(follower)
|
2024-02-29 21:51:49 +00:00
|
|
|
|
|
|
|
|
2024-05-20 23:01:18 +00:00
|
|
|
async def get_cached_author(author_id: int, get_with_stat):
|
2024-05-20 22:40:57 +00:00
|
|
|
if author_id:
|
|
|
|
rkey = f"author:id:{author_id}"
|
|
|
|
cached_result = await redis.execute("GET", rkey)
|
|
|
|
if isinstance(cached_result, str):
|
|
|
|
return json.loads(cached_result)
|
2024-05-30 16:15:11 +00:00
|
|
|
elif get_with_stat:
|
2024-05-30 17:26:53 +00:00
|
|
|
with local_session() as session:
|
2024-05-30 16:15:11 +00:00
|
|
|
author_query = select(Author).filter(Author.id == author_id)
|
2024-06-05 13:23:53 +00:00
|
|
|
result = get_with_stat(session.execute(author_query))
|
|
|
|
if result:
|
|
|
|
[author] = result
|
|
|
|
if author:
|
2024-06-05 18:04:48 +00:00
|
|
|
await cache_author(author.dict())
|
2024-06-05 13:23:53 +00:00
|
|
|
return author
|
2024-05-20 22:40:57 +00:00
|
|
|
|
|
|
|
|
2024-06-05 15:51:12 +00:00
|
|
|
async def get_cached_author_by_user_id(user_id: str, get_with_stat) -> dict:
|
2024-06-05 20:42:09 +00:00
|
|
|
author_id = await redis.execute("GET", f"author:user:{user_id.strip()}")
|
2024-06-05 15:51:12 +00:00
|
|
|
author_dict = None
|
2024-06-05 20:42:09 +00:00
|
|
|
if not author_id:
|
2024-06-05 18:04:48 +00:00
|
|
|
author_query = select(Author).filter(Author.user == user_id)
|
2024-06-05 18:44:51 +00:00
|
|
|
result = get_with_stat(author_query)
|
|
|
|
if result:
|
|
|
|
[author_with_stat] = result
|
|
|
|
if author_with_stat:
|
|
|
|
await cache_author(author_with_stat.dict())
|
|
|
|
author_dict = author_with_stat.dict()
|
2024-06-05 15:46:01 +00:00
|
|
|
else:
|
2024-06-05 20:42:09 +00:00
|
|
|
author_str = await redis.execute("GET", f"author:id:{author_id}")
|
|
|
|
if author_str:
|
|
|
|
author_dict = json.loads(author_str)
|
2024-06-05 15:51:12 +00:00
|
|
|
return author_dict
|
2024-05-20 22:40:57 +00:00
|
|
|
|
|
|
|
|
2024-06-05 14:45:55 +00:00
|
|
|
async def get_cached_topic_by_slug(slug: str, get_with_stat):
|
|
|
|
cached_result = await redis.execute("GET", f"topic:slug:{slug}")
|
|
|
|
if isinstance(cached_result, str):
|
|
|
|
return json.loads(cached_result)
|
|
|
|
elif get_with_stat:
|
|
|
|
with local_session() as session:
|
|
|
|
topic_query = select(Topic).filter(Topic.slug == slug)
|
|
|
|
result = get_with_stat(session.execute(topic_query))
|
|
|
|
if result:
|
|
|
|
[topic] = result
|
|
|
|
if topic:
|
|
|
|
await cache_topic(topic)
|
|
|
|
return topic
|
|
|
|
|
|
|
|
|
2024-05-30 16:42:38 +00:00
|
|
|
async def get_cached_authors_by_ids(authors_ids: List[int]) -> List[Author | dict]:
|
2024-05-20 22:40:57 +00:00
|
|
|
authors = []
|
2024-05-30 16:15:11 +00:00
|
|
|
for author_id in authors_ids:
|
|
|
|
if author_id:
|
|
|
|
rkey = f"author:id:{author_id}"
|
|
|
|
cached_result = await redis.execute("GET", rkey)
|
|
|
|
if isinstance(cached_result, str):
|
|
|
|
author = json.loads(cached_result)
|
|
|
|
if author:
|
2024-05-30 18:10:16 +00:00
|
|
|
authors.append(author)
|
2024-05-30 16:15:11 +00:00
|
|
|
return authors
|
2024-05-06 21:02:15 +00:00
|
|
|
|
2024-05-20 23:56:58 +00:00
|
|
|
|
2024-06-05 14:45:55 +00:00
|
|
|
async def get_cached_topic_authors(topic_id: int):
|
2024-05-30 16:15:11 +00:00
|
|
|
rkey = f"topic:authors:{topic_id}"
|
|
|
|
cached = await redis.execute("GET", rkey)
|
|
|
|
authors_ids = []
|
|
|
|
if isinstance(cached, str):
|
|
|
|
authors_ids = json.loads(cached)
|
|
|
|
else:
|
2024-06-05 14:45:55 +00:00
|
|
|
topic_authors_query = (
|
|
|
|
select(ShoutAuthor.author)
|
|
|
|
.select_from(join(ShoutTopic, Shout, ShoutTopic.shout == Shout.id))
|
|
|
|
.join(ShoutAuthor, ShoutAuthor.shout == Shout.id)
|
|
|
|
.filter(
|
|
|
|
and_(
|
|
|
|
ShoutTopic.topic == topic_id,
|
|
|
|
Shout.published_at.is_not(None),
|
|
|
|
Shout.deleted_at.is_(None),
|
|
|
|
)
|
|
|
|
)
|
|
|
|
)
|
2024-05-30 17:26:53 +00:00
|
|
|
with local_session() as session:
|
2024-05-30 16:15:11 +00:00
|
|
|
authors_ids = [aid for (aid,) in session.execute(topic_authors_query)]
|
|
|
|
await redis.execute("SET", rkey, json.dumps(authors_ids))
|
|
|
|
authors = await get_cached_authors_by_ids(authors_ids)
|
|
|
|
logger.debug(f"topic#{topic_id} cache updated with {len(authors)} authors")
|
|
|
|
return authors
|
2024-05-03 11:12:57 +00:00
|
|
|
|
|
|
|
|
2024-05-20 22:40:57 +00:00
|
|
|
async def get_cached_topic_followers(topic_id: int):
|
|
|
|
followers = []
|
|
|
|
rkey = f"topic:followers:{topic_id}"
|
|
|
|
cached = await redis.execute("GET", rkey)
|
|
|
|
if isinstance(cached, str):
|
|
|
|
followers = json.loads(cached)
|
|
|
|
if isinstance(followers, list):
|
|
|
|
return followers
|
2024-05-30 17:43:18 +00:00
|
|
|
with local_session() as session:
|
|
|
|
result = (
|
|
|
|
session.query(Author.id)
|
|
|
|
.join(
|
|
|
|
TopicFollower,
|
|
|
|
and_(TopicFollower.topic == topic_id, TopicFollower.follower == Author.id),
|
|
|
|
)
|
|
|
|
.all()
|
2024-05-20 22:40:57 +00:00
|
|
|
)
|
2024-05-30 17:43:18 +00:00
|
|
|
followers_ids = [f[0] for f in result]
|
2024-05-30 16:15:11 +00:00
|
|
|
followers = await get_cached_authors_by_ids(followers_ids)
|
|
|
|
logger.debug(f"topic#{topic_id} cache updated with {len(followers)} followers")
|
|
|
|
return followers
|
|
|
|
|
|
|
|
|
|
|
|
async def get_cached_author_followers(author_id: int):
|
2024-05-30 17:43:18 +00:00
|
|
|
# follower profile
|
|
|
|
cached_author = await redis.execute("GET", f"author:id:{author_id}")
|
2024-05-30 17:55:47 +00:00
|
|
|
author = None
|
|
|
|
if cache_author:
|
2024-05-30 19:04:28 +00:00
|
|
|
author = json.loads(cached_author)
|
2024-05-30 17:43:18 +00:00
|
|
|
if not author:
|
|
|
|
return []
|
|
|
|
|
|
|
|
followers_ids = []
|
2024-05-30 16:15:11 +00:00
|
|
|
followers_rkey = f"author:followers:{author_id}"
|
|
|
|
cached = await redis.execute("GET", followers_rkey)
|
|
|
|
if isinstance(cached, str) and isinstance(cached_author, str):
|
2024-05-30 17:43:18 +00:00
|
|
|
followers_ids = json.loads(cached) or []
|
|
|
|
if not str(len(followers_ids)) == str(author["stat"]["followers"]):
|
2024-05-30 16:15:11 +00:00
|
|
|
with local_session() as session:
|
2024-05-30 17:43:18 +00:00
|
|
|
followers_result = (
|
2024-05-30 16:15:11 +00:00
|
|
|
session.query(Author.id)
|
|
|
|
.join(
|
|
|
|
AuthorFollower,
|
|
|
|
and_(
|
|
|
|
AuthorFollower.author == author_id,
|
|
|
|
AuthorFollower.follower == Author.id,
|
|
|
|
Author.id != author_id, # exclude the author from the followers
|
|
|
|
),
|
|
|
|
)
|
|
|
|
.all()
|
|
|
|
)
|
2024-05-30 17:43:18 +00:00
|
|
|
followers_ids = [a[0] for a in followers_result]
|
|
|
|
await redis.execute("SET", followers_rkey, json.dumps(followers_ids))
|
|
|
|
else:
|
|
|
|
followers = await get_cached_authors_by_ids(followers_ids)
|
2024-05-20 23:56:58 +00:00
|
|
|
|
2024-05-30 16:15:11 +00:00
|
|
|
logger.debug(f"author#{author_id} cache updated with {len(followers)} followers")
|
|
|
|
return followers
|
2024-05-03 11:12:57 +00:00
|
|
|
|
|
|
|
|
2024-05-30 16:42:38 +00:00
|
|
|
async def get_cached_follower_authors(author_id: int):
|
2024-05-30 16:15:11 +00:00
|
|
|
rkey = f"author:follows-authors:{author_id}"
|
|
|
|
authors_ids = []
|
2024-05-20 22:40:57 +00:00
|
|
|
cached = await redis.execute("GET", rkey)
|
2024-05-30 16:15:11 +00:00
|
|
|
if not cached:
|
|
|
|
authors_query = (
|
|
|
|
select(Author.id)
|
|
|
|
.select_from(join(Author, AuthorFollower, Author.id == AuthorFollower.author))
|
|
|
|
.where(AuthorFollower.follower == author_id)
|
|
|
|
)
|
|
|
|
with local_session() as session:
|
2024-05-30 17:49:37 +00:00
|
|
|
result = session.execute(authors_query)
|
|
|
|
authors_ids = [a[0] for a in result]
|
|
|
|
await redis.execute("SET", rkey, json.dumps(authors_ids))
|
2024-05-30 16:15:11 +00:00
|
|
|
elif isinstance(cached, str):
|
|
|
|
authors_ids = json.loads(cached)
|
|
|
|
return await get_cached_authors_by_ids(authors_ids)
|
|
|
|
|
|
|
|
|
2024-05-30 16:42:38 +00:00
|
|
|
async def get_cached_follower_topics(author_id: int):
|
2024-05-30 16:15:11 +00:00
|
|
|
rkey = f"author:follows-topics:{author_id}"
|
|
|
|
topics_ids = []
|
|
|
|
cached = await redis.execute("GET", rkey)
|
|
|
|
if cached and isinstance(cached, str):
|
|
|
|
topics_ids = json.loads(cached)
|
|
|
|
else:
|
2024-05-30 17:23:32 +00:00
|
|
|
with local_session() as session:
|
|
|
|
topics = (
|
|
|
|
session.query(Topic)
|
|
|
|
.select_from(join(Topic, TopicFollower, Topic.id == TopicFollower.topic))
|
|
|
|
.where(TopicFollower.follower == author_id)
|
|
|
|
.all()
|
|
|
|
)
|
|
|
|
|
|
|
|
topics_ids = [topic.id for topic in topics]
|
2024-05-30 16:15:11 +00:00
|
|
|
|
|
|
|
await redis.execute("SET", rkey, json.dumps(topics_ids))
|
2024-05-30 18:29:25 +00:00
|
|
|
|
|
|
|
topics = []
|
|
|
|
for topic_id in topics_ids:
|
|
|
|
topic_str = await redis.execute("GET", f"topic:id:{topic_id}")
|
|
|
|
if topic_str:
|
|
|
|
topic = json.loads(topic_str)
|
|
|
|
if topic and topic not in topics:
|
|
|
|
topics.append(topic)
|
2024-05-30 17:22:10 +00:00
|
|
|
|
2024-05-30 16:15:11 +00:00
|
|
|
logger.debug(f"author#{author_id} cache updated with {len(topics)} topics")
|
|
|
|
return topics
|