This commit is contained in:
2024-02-21 19:14:58 +03:00
parent 88cd6e1060
commit 214af0cf51
33 changed files with 750 additions and 773 deletions

View File

@@ -12,48 +12,48 @@ from services.rediscache import redis
from services.viewed import ViewedStorage
@event.listens_for(Author, "after_insert")
@event.listens_for(Author, "after_update")
@event.listens_for(Author, 'after_insert')
@event.listens_for(Author, 'after_update')
def after_author_update(mapper, connection, author: Author):
redis_key = f"user:{author.user}:author"
redis_key = f'user:{author.user}:author'
asyncio.create_task(
redis.execute(
"set",
'set',
redis_key,
json.dumps(
{
"id": author.id,
"name": author.name,
"slug": author.slug,
"pic": author.pic,
'id': author.id,
'name': author.name,
'slug': author.slug,
'pic': author.pic,
}
),
)
)
@event.listens_for(TopicFollower, "after_insert")
@event.listens_for(TopicFollower, 'after_insert')
def after_topic_follower_insert(mapper, connection, target: TopicFollower):
asyncio.create_task(
handle_topic_follower_change(connection, target.topic, target.follower, True)
)
@event.listens_for(TopicFollower, "after_delete")
@event.listens_for(TopicFollower, 'after_delete')
def after_topic_follower_delete(mapper, connection, target: TopicFollower):
asyncio.create_task(
handle_topic_follower_change(connection, target.topic, target.follower, False)
)
@event.listens_for(AuthorFollower, "after_insert")
@event.listens_for(AuthorFollower, 'after_insert')
def after_author_follower_insert(mapper, connection, target: AuthorFollower):
asyncio.create_task(
handle_author_follower_change(connection, target.author, target.follower, True)
)
@event.listens_for(AuthorFollower, "after_delete")
@event.listens_for(AuthorFollower, 'after_delete')
def after_author_follower_delete(mapper, connection, target: AuthorFollower):
asyncio.create_task(
handle_author_follower_change(connection, target.author, target.follower, False)
@@ -63,26 +63,26 @@ def after_author_follower_delete(mapper, connection, target: AuthorFollower):
async def update_follows_for_user(
connection, user_id, entity_type, entity: dict, is_insert
):
redis_key = f"user:{user_id}:follows"
redis_key = f'user:{user_id}:follows'
follows_str = await redis.get(redis_key)
if follows_str:
follows = json.loads(follows_str)
else:
follows = {
"topics": [],
"authors": [],
"communities": [
{"slug": "discours", "name": "Дискурс", "id": 1, "desc": ""}
'topics': [],
'authors': [],
'communities': [
{'slug': 'discours', 'name': 'Дискурс', 'id': 1, 'desc': ''}
],
}
if is_insert:
follows[f"{entity_type}s"].append(entity)
follows[f'{entity_type}s'].append(entity)
else:
# Remove the entity from follows
follows[f"{entity_type}s"] = [
e for e in follows[f"{entity_type}s"] if e["id"] != entity["id"]
follows[f'{entity_type}s'] = [
e for e in follows[f'{entity_type}s'] if e['id'] != entity['id']
]
await redis.execute("set", redis_key, json.dumps(follows))
await redis.execute('set', redis_key, json.dumps(follows))
async def handle_author_follower_change(connection, author_id, follower_id, is_insert):
@@ -93,17 +93,24 @@ async def handle_author_follower_change(connection, author_id, follower_id, is_i
q
).first()
author.stat = {
"shouts": shouts_stat,
"viewed": await ViewedStorage.get_author(author.slug),
"followers": followers_stat,
"followings": followings_stat,
'shouts': shouts_stat,
'viewed': await ViewedStorage.get_author(author.slug),
'followers': followers_stat,
'followings': followings_stat,
}
follower = await conn.execute(
select(Author).filter(Author.id == follower_id)
).first()
if follower and author:
await update_follows_for_user(
connection, follower.user, "author", author.dict(), is_insert
connection, follower.user, 'author', {
"id": author.id,
"name": author.name,
"slug": author.slug,
"pic": author.pic,
"bio": author.bio,
"stat": author.stat
}, is_insert
)
@@ -115,30 +122,37 @@ async def handle_topic_follower_change(connection, topic_id, follower_id, is_ins
q
).first()
topic.stat = {
"shouts": shouts_stat,
"authors": authors_stat,
"followers": followers_stat,
"viewed": await ViewedStorage.get_topic(topic.slug),
'shouts': shouts_stat,
'authors': authors_stat,
'followers': followers_stat,
'viewed': await ViewedStorage.get_topic(topic.slug),
}
follower = connection.execute(
select(Author).filter(Author.id == follower_id)
).first()
if follower and topic:
await update_follows_for_user(
connection, follower.user, "topic", topic.dict(), is_insert
connection, follower.user, 'topic', {
"id": topic.id,
"title": topic.title,
"slug": topic.slug,
"body": topic.body,
"stat": topic.stat
}, is_insert
)
BATCH_SIZE = 33
class FollowsCached:
lock = asyncio.Lock()
@staticmethod
async def update_cache():
BATCH_SIZE = 30 # Adjust batch size as needed
with local_session() as session:
authors = session.query(Author).all()
total_authors = len(authors)
for i in range(0, total_authors, BATCH_SIZE):
q = select(Author)
q = add_author_stat_columns(q)
authors = session.execute(q)
for i in range(0, len(authors), BATCH_SIZE):
batch_authors = authors[i : i + BATCH_SIZE]
await asyncio.gather(
*[
@@ -149,24 +163,26 @@ class FollowsCached:
@staticmethod
async def update_author_cache(author: Author):
redis_key = f"user:{author.user}:author"
redis_key = f'user:{author.user}:author'
if isinstance(author, Author):
await redis.execute(
"set",
'set',
redis_key,
json.dumps(
{
"id": author.id,
"name": author.name,
"slug": author.slug,
"pic": author.pic,
'id': author.id,
'name': author.name,
'slug': author.slug,
'pic': author.pic,
'bio': author.bio,
'stat': author.stat
}
),
)
follows = await get_author_follows(None, None, user=author.user)
if isinstance(follows, dict):
redis_key = f"user:{author.user}:follows"
await redis.execute("set", redis_key, json.dumps(follows))
redis_key = f'user:{author.user}:follows'
await redis.execute('set', redis_key, json.dumps(follows))
@staticmethod
async def worker():
@@ -178,7 +194,7 @@ class FollowsCached:
await asyncio.sleep(10 * 60 * 60)
except asyncio.CancelledError:
# Handle cancellation due to SIGTERM
logger.info("Cancellation requested. Cleaning up...")
logger.info('Cancellation requested. Cleaning up...')
# Perform any necessary cleanup before exiting the loop
break
except Exception as exc: