core/resolvers/author.py

344 lines
12 KiB
Python
Raw Normal View History

2024-02-29 07:23:08 +00:00
import asyncio
2024-02-21 14:37:58 +00:00
import json
2023-11-03 10:10:22 +00:00
import time
2023-12-17 20:30:20 +00:00
2024-03-12 11:59:36 +00:00
from sqlalchemy import select, or_, and_, text, desc
2023-10-23 14:47:11 +00:00
from sqlalchemy.orm import aliased
2024-02-25 13:43:04 +00:00
from sqlalchemy_searchable import search
2023-10-23 14:47:11 +00:00
2024-02-24 18:45:38 +00:00
from orm.author import Author, AuthorFollower
2024-02-24 16:23:53 +00:00
from orm.shout import ShoutAuthor, ShoutTopic
2023-10-23 14:47:11 +00:00
from orm.topic import Topic
2024-03-28 12:56:32 +00:00
from resolvers.stat import (
get_authors_with_stat_cached,
author_follows_authors,
author_follows_topics,
get_with_stat,
)
2024-02-29 21:51:49 +00:00
from services.cache import set_author_cache, update_author_followers_cache
2023-12-17 20:30:20 +00:00
from services.auth import login_required
from services.db import local_session
2024-03-06 18:57:04 +00:00
from services.encoders import CustomJSONEncoder
2024-03-12 14:26:52 +00:00
from services.memorycache import cache_region
2024-02-21 07:27:16 +00:00
from services.rediscache import redis
2023-12-17 20:30:20 +00:00
from services.schema import mutation, query
2024-02-20 16:19:46 +00:00
from services.logger import root_logger as logger
2024-01-13 08:49:12 +00:00
2023-10-23 14:47:11 +00:00
2024-02-21 16:14:58 +00:00
@mutation.field('update_author')
2023-10-23 14:47:11 +00:00
@login_required
2024-02-26 09:14:08 +00:00
async def update_author(_, info, profile):
2024-03-18 12:01:10 +00:00
user_id = info.context.get('user_id')
if not user_id:
return {'error': 'unauthorized', 'author': None}
try:
with local_session() as session:
author = session.query(Author).where(Author.user == user_id).first()
if author:
Author.update(author, profile)
session.add(author)
session.commit()
return {'error': None, 'author': author}
except Exception as exc:
2024-03-18 12:01:43 +00:00
import traceback
logger.error(traceback.format_exc())
2024-03-18 12:01:10 +00:00
return {'error': exc, 'author': None}
2023-10-23 14:47:11 +00:00
2024-02-21 16:14:58 +00:00
@query.field('get_authors_all')
2024-02-22 23:08:43 +00:00
def get_authors_all(_, _info):
2023-12-19 08:09:50 +00:00
with local_session() as session:
2024-02-03 14:31:00 +00:00
authors = session.query(Author).all()
2024-02-24 10:22:35 +00:00
return authors
2023-10-23 14:47:11 +00:00
2024-02-21 16:14:58 +00:00
@query.field('get_author')
2024-03-28 16:51:09 +00:00
async def get_author(_, _info, slug='', author_id=0):
2024-03-14 06:59:38 +00:00
author_dict = None
2024-02-24 10:22:35 +00:00
try:
2024-03-28 16:36:27 +00:00
author_query = ''
2024-03-28 16:45:21 +00:00
author = None
2024-03-28 16:51:09 +00:00
author_id = 0
2024-03-28 16:36:27 +00:00
author_dict = None
2024-02-25 16:02:15 +00:00
if slug:
2024-03-28 16:45:21 +00:00
author_query = select(Author).filter(Author.slug == slug)
[author] = await get_authors_with_stat_cached(author_query)
2024-03-28 16:51:09 +00:00
if author:
author_id = author.id
logger.debug(f'found @{slug} with id {author_id}')
if author_id:
author_query = select(Author).filter(Author.id == author_id)
2024-03-28 16:05:27 +00:00
cache_key = f'author:{author_id}'
cache = await redis.execute('GET', cache_key)
if cache and isinstance(cache, str):
author_dict = json.loads(cache)
2024-03-28 16:36:27 +00:00
logger.debug(f'got cached author {cache_key} -> {author_dict}')
2024-03-28 16:39:10 +00:00
if not author_dict.get('stat'):
cache = ''
logger.warn(f'author {author_id} stat updating')
if not cache:
2024-03-28 16:36:27 +00:00
[author] = await get_authors_with_stat_cached(author_query)
if not author or not author.stat:
2024-03-28 16:05:27 +00:00
[author] = get_with_stat(author_query)
2024-03-28 16:36:27 +00:00
if author:
2024-03-28 16:05:27 +00:00
author_dict = author.dict()
2024-03-28 16:36:27 +00:00
author.stat = author_dict.get('stat')
2024-03-28 16:05:27 +00:00
if author_dict:
await set_author_cache(author_dict)
2024-03-28 16:36:27 +00:00
logger.debug('updated author stored in cache')
return author_dict
2024-02-29 06:48:41 +00:00
except Exception as exc:
2024-02-25 23:07:46 +00:00
import traceback
2024-03-06 09:25:55 +00:00
2024-02-29 06:48:41 +00:00
logger.error(exc)
2024-02-26 02:36:18 +00:00
exc = traceback.format_exc()
2024-02-24 10:22:35 +00:00
logger.error(exc)
2024-02-29 06:44:04 +00:00
return
# {"slug": "anonymous", "id": 1, "name": "Аноним", "bio": "Неизвестно кто"}
2023-12-27 22:37:54 +00:00
2024-02-25 08:27:08 +00:00
async def get_author_by_user_id(user_id: str):
2024-02-25 11:39:26 +00:00
logger.info(f'getting author id for {user_id}')
2024-03-12 12:50:57 +00:00
redis_key = f'user:{user_id}'
2024-02-24 10:22:35 +00:00
author = None
try:
res = await redis.execute('GET', redis_key)
if isinstance(res, str):
author = json.loads(res)
2024-02-29 10:04:25 +00:00
author_id = author.get('id')
2024-02-29 12:47:32 +00:00
author_slug = author.get('slug')
2024-02-29 10:04:25 +00:00
if author_id:
2024-03-11 08:16:12 +00:00
logger.debug(f'got author @{author_slug} #{author_id} cached')
2024-02-24 10:22:35 +00:00
return author
q = select(Author).filter(Author.user == user_id)
2024-03-12 12:50:57 +00:00
result = await get_authors_with_stat_cached(q)
2024-03-04 17:34:11 +00:00
if result:
[author] = result
2024-02-29 21:51:49 +00:00
await set_author_cache(author.dict())
2024-02-24 10:22:35 +00:00
except Exception as exc:
2024-03-04 17:25:47 +00:00
import traceback
2024-03-06 09:25:55 +00:00
2024-03-04 17:25:47 +00:00
traceback.print_exc()
2024-02-24 10:22:35 +00:00
logger.error(exc)
return author
2024-02-21 07:27:16 +00:00
2024-02-21 16:14:58 +00:00
@query.field('get_author_id')
2023-12-27 22:37:54 +00:00
async def get_author_id(_, _info, user: str):
2024-02-25 08:27:08 +00:00
return await get_author_by_user_id(user)
2023-10-23 14:47:11 +00:00
2024-02-21 16:14:58 +00:00
@query.field('load_authors_by')
2024-03-12 14:00:20 +00:00
def load_authors_by(_, _info, by, limit, offset):
2024-03-28 12:56:32 +00:00
cache_key = f'{json.dumps(by)}_{limit}_{offset}'
2024-03-12 13:18:07 +00:00
2024-03-12 14:26:52 +00:00
@cache_region.cache_on_arguments(cache_key)
2024-03-12 14:00:20 +00:00
def _load_authors_by():
2024-03-12 13:18:07 +00:00
logger.debug(f'loading authors by {by}')
q = select(Author)
if by.get('slug'):
q = q.filter(Author.slug.ilike(f"%{by['slug']}%"))
elif by.get('name'):
q = q.filter(Author.name.ilike(f"%{by['name']}%"))
elif by.get('topic'):
q = (
q.join(ShoutAuthor)
.join(ShoutTopic)
.join(Topic)
.where(Topic.slug == str(by['topic']))
)
if by.get('last_seen'): # in unix time
before = int(time.time()) - by['last_seen']
q = q.filter(Author.last_seen > before)
elif by.get('created_at'): # in unix time
before = int(time.time()) - by['created_at']
q = q.filter(Author.created_at > before)
order = by.get('order')
if order in ['likes', 'shouts', 'followers']:
q = q.order_by(desc(text(f'{order}_stat')))
# q = q.distinct()
q = q.limit(limit).offset(offset)
authors = get_with_stat(q)
return authors
2024-03-12 14:00:20 +00:00
return _load_authors_by()
2024-03-12 13:18:07 +00:00
2024-02-21 16:14:58 +00:00
@query.field('get_author_follows')
2024-03-28 10:33:41 +00:00
async def get_author_follows(_, _info, slug='', user=None, author_id=0):
2024-03-28 11:09:11 +00:00
try:
author_query = select(Author)
if user:
author_query = author_query.filter(Author.user == user)
elif slug:
author_query = author_query.filter(Author.slug == slug)
elif author_id:
author_query = author_query.filter(Author.id == author_id)
else:
raise ValueError('One of slug, user, or author_id must be provided')
2024-03-28 12:48:58 +00:00
[result] = local_session().execute(author_query)
if len(result) > 0:
2024-03-28 12:56:32 +00:00
# logger.debug(result)
2024-03-28 12:48:58 +00:00
[author] = result
2024-03-28 12:56:32 +00:00
# logger.debug(author)
2024-03-28 12:48:58 +00:00
if author and isinstance(author, Author):
2024-03-28 13:01:48 +00:00
# logger.debug(author.dict())
2024-03-28 12:56:32 +00:00
author_id = author.id
2024-03-28 12:48:58 +00:00
rkey = f'author:{author_id}:follows-authors'
logger.debug(f'getting {author_id} follows authors')
cached = await redis.execute('GET', rkey)
if not cached:
authors = author_follows_authors(author_id)
prepared = [author.dict() for author in authors]
2024-03-28 12:56:32 +00:00
await redis.execute(
'SET', rkey, json.dumps(prepared, cls=CustomJSONEncoder)
)
2024-03-28 12:48:58 +00:00
elif isinstance(cached, str):
authors = json.loads(cached)
rkey = f'author:{author_id}:follows-topics'
cached = await redis.execute('GET', rkey)
if cached and isinstance(cached, str):
topics = json.loads(cached)
if not cached:
topics = author_follows_topics(author_id)
prepared = [topic.dict() for topic in topics]
2024-03-28 12:56:32 +00:00
await redis.execute(
'SET', rkey, json.dumps(prepared, cls=CustomJSONEncoder)
)
2024-03-28 12:48:58 +00:00
return {
'topics': topics,
'authors': authors,
'communities': [
{'id': 1, 'name': 'Дискурс', 'slug': 'discours', 'pic': ''}
],
}
2024-03-28 11:09:11 +00:00
except Exception:
import traceback
traceback.print_exc()
2024-03-28 11:56:08 +00:00
return {'error': 'Author not found'}
2024-02-23 18:10:11 +00:00
2024-02-24 10:22:35 +00:00
2024-02-23 18:10:11 +00:00
@query.field('get_author_follows_topics')
2024-02-26 01:58:27 +00:00
async def get_author_follows_topics(_, _info, slug='', user=None, author_id=None):
2024-02-23 18:10:11 +00:00
with local_session() as session:
if user or slug:
2024-02-24 18:45:38 +00:00
author_id_result = (
session.query(Author.id)
.filter(or_(Author.user == user, Author.slug == slug))
.first()
)
2024-02-23 18:10:11 +00:00
author_id = author_id_result[0] if author_id_result else None
2024-03-28 11:05:06 +00:00
if not author_id:
2024-02-23 18:10:11 +00:00
raise ValueError('Author not found')
2024-03-28 11:05:06 +00:00
logger.debug(f'getting {author_id} follows topics')
rkey = f'author:{author_id}:follows-topics'
cached = await redis.execute('GET', rkey)
topics = []
if isinstance(cached, str):
topics = json.loads(cached)
if not cached:
topics = author_follows_topics(author_id)
prepared = [topic.dict() for topic in topics]
2024-03-28 12:56:32 +00:00
await redis.execute(
'SET', rkey, json.dumps(prepared, cls=CustomJSONEncoder)
)
2024-03-28 11:05:06 +00:00
return topics
2024-02-23 18:10:11 +00:00
@query.field('get_author_follows_authors')
2024-02-26 01:58:27 +00:00
async def get_author_follows_authors(_, _info, slug='', user=None, author_id=None):
2024-02-23 18:10:11 +00:00
with local_session() as session:
if user or slug:
2024-02-24 18:45:38 +00:00
author_id_result = (
session.query(Author.id)
.filter(or_(Author.user == user, Author.slug == slug))
.first()
)
2024-02-23 18:10:11 +00:00
author_id = author_id_result[0] if author_id_result else None
if author_id:
2024-02-25 11:39:26 +00:00
logger.debug(f'getting {author_id} follows authors')
2024-02-29 07:34:22 +00:00
rkey = f'author:{author_id}:follows-authors'
2024-02-26 02:06:27 +00:00
cached = await redis.execute('GET', rkey)
2024-03-28 11:05:06 +00:00
authors = []
if isinstance(cached, str):
authors = json.loads(cached)
if not authors:
authors = author_follows_authors(author_id)
2024-02-26 02:36:18 +00:00
prepared = [author.dict() for author in authors]
2024-03-28 12:56:32 +00:00
await redis.execute(
'SET', rkey, json.dumps(prepared, cls=CustomJSONEncoder)
)
2024-02-26 01:58:27 +00:00
return authors
2024-02-21 16:14:58 +00:00
else:
raise ValueError('Author not found')
2023-10-23 14:47:11 +00:00
2024-02-22 23:08:43 +00:00
def create_author(user_id: str, slug: str, name: str = ''):
2023-11-28 19:07:53 +00:00
with local_session() as session:
2024-03-03 13:59:15 +00:00
try:
author = None
if user_id:
author = session.query(Author).filter(Author.user == user_id).first()
elif slug:
author = session.query(Author).filter(Author.slug == slug).first()
if not author:
new_author = Author(user=user_id, slug=slug, name=name)
session.add(new_author)
session.commit()
logger.info(f'author created by webhook {new_author.dict()}')
except Exception as exc:
logger.debug(exc)
2024-02-21 09:34:12 +00:00
2024-02-21 16:14:58 +00:00
@query.field('get_author_followers')
2024-02-27 12:40:53 +00:00
async def get_author_followers(_, _info, slug: str):
2024-02-25 11:39:26 +00:00
logger.debug(f'getting followers for @{slug}')
2024-02-25 08:27:08 +00:00
try:
with local_session() as session:
2024-02-25 11:41:04 +00:00
author_alias = aliased(Author)
2024-03-28 12:56:32 +00:00
author_id = session.query(author_alias.id).filter(author_alias.slug == slug)
2024-02-27 14:03:21 +00:00
if author_id:
2024-02-29 07:34:22 +00:00
cached = await redis.execute('GET', f'author:{author_id}:followers')
2024-02-27 14:03:21 +00:00
if not cached:
author_follower_alias = aliased(AuthorFollower, name='af')
q = select(Author).join(
author_follower_alias,
and_(
author_follower_alias.author == author_id,
author_follower_alias.follower == Author.id,
2024-03-06 09:25:55 +00:00
),
2024-02-27 14:03:21 +00:00
)
2024-03-12 12:50:57 +00:00
results = await get_authors_with_stat_cached(q)
2024-03-06 09:25:55 +00:00
_ = asyncio.create_task(
update_author_followers_cache(
author_id, [x.dict() for x in results]
)
)
2024-02-29 07:31:49 +00:00
logger.debug(f'@{slug} cache updated with {len(results)} followers')
return results
else:
2024-03-11 08:16:12 +00:00
logger.debug(f'@{slug} got followers cached')
2024-03-28 11:05:06 +00:00
if isinstance(cached, str):
return json.loads(cached)
2024-02-25 08:27:08 +00:00
except Exception as exc:
2024-02-27 13:41:09 +00:00
import traceback
2024-03-06 09:25:55 +00:00
2024-02-25 08:27:08 +00:00
logger.error(exc)
2024-02-27 13:41:09 +00:00
logger.error(traceback.format_exc())
2024-02-25 08:27:08 +00:00
return []
2024-02-25 12:22:48 +00:00
@query.field('search_authors')
2024-03-12 12:50:57 +00:00
async def search_authors(_, _info, what: str):
2024-02-25 18:43:30 +00:00
q = search(select(Author), what)
2024-03-12 12:50:57 +00:00
return await get_authors_with_stat_cached(q)