fmt
Some checks failed
Deploy to core / deploy (push) Failing after 15m33s

This commit is contained in:
Untone 2024-02-21 19:14:58 +03:00
parent 88cd6e1060
commit 823e59ea74
33 changed files with 697 additions and 752 deletions

14
main.py
View File

@ -15,24 +15,24 @@ from services.viewed import ViewedStorage
from services.webhook import WebhookEndpoint from services.webhook import WebhookEndpoint
from settings import DEV_SERVER_PID_FILE_NAME, MODE from settings import DEV_SERVER_PID_FILE_NAME, MODE
import_module("resolvers") import_module('resolvers')
schema = make_executable_schema(load_schema_from_path("schema/"), resolvers) schema = make_executable_schema(load_schema_from_path('schema/'), resolvers)
async def start(): async def start():
if MODE == "development": if MODE == 'development':
if not exists(DEV_SERVER_PID_FILE_NAME): if not exists(DEV_SERVER_PID_FILE_NAME):
# pid file management # pid file management
with open(DEV_SERVER_PID_FILE_NAME, "w", encoding="utf-8") as f: with open(DEV_SERVER_PID_FILE_NAME, 'w', encoding='utf-8') as f:
f.write(str(os.getpid())) f.write(str(os.getpid()))
print(f"[main] process started in {MODE} mode") print(f'[main] process started in {MODE} mode')
# main starlette app object with ariadne mounted in root # main starlette app object with ariadne mounted in root
app = Starlette( app = Starlette(
routes=[ routes=[
Route("/", GraphQL(schema, debug=True)), Route('/', GraphQL(schema, debug=True)),
Route("/new-author", WebhookEndpoint), Route('/new-author', WebhookEndpoint),
], ],
on_startup=[ on_startup=[
redis.connect, redis.connect,

View File

@ -5,36 +5,36 @@ from services.db import Base
class AuthorRating(Base): class AuthorRating(Base):
__tablename__ = "author_rating" __tablename__ = 'author_rating'
id = None # type: ignore id = None # type: ignore
rater = Column(ForeignKey("author.id"), primary_key=True) rater = Column(ForeignKey('author.id'), primary_key=True)
author = Column(ForeignKey("author.id"), primary_key=True) author = Column(ForeignKey('author.id'), primary_key=True)
plus = Column(Boolean) plus = Column(Boolean)
class AuthorFollower(Base): class AuthorFollower(Base):
__tablename__ = "author_follower" __tablename__ = 'author_follower'
id = None # type: ignore id = None # type: ignore
follower = Column(ForeignKey("author.id"), primary_key=True) follower = Column(ForeignKey('author.id'), primary_key=True)
author = Column(ForeignKey("author.id"), primary_key=True) author = Column(ForeignKey('author.id'), primary_key=True)
created_at = Column(Integer, nullable=False, default=lambda: int(time.time())) created_at = Column(Integer, nullable=False, default=lambda: int(time.time()))
auto = Column(Boolean, nullable=False, default=False) auto = Column(Boolean, nullable=False, default=False)
class Author(Base): class Author(Base):
__tablename__ = "author" __tablename__ = 'author'
user = Column(String) # unbounded link with authorizer's User type user = Column(String) # unbounded link with authorizer's User type
name = Column(String, nullable=True, comment="Display name") name = Column(String, nullable=True, comment='Display name')
slug = Column(String, unique=True, comment="Author's slug") slug = Column(String, unique=True, comment="Author's slug")
bio = Column(String, nullable=True, comment="Bio") # status description bio = Column(String, nullable=True, comment='Bio') # status description
about = Column(String, nullable=True, comment="About") # long and formatted about = Column(String, nullable=True, comment='About') # long and formatted
pic = Column(String, nullable=True, comment="Picture") pic = Column(String, nullable=True, comment='Picture')
links = Column(JSON, nullable=True, comment="Links") links = Column(JSON, nullable=True, comment='Links')
created_at = Column(Integer, nullable=False, default=lambda: int(time.time())) created_at = Column(Integer, nullable=False, default=lambda: int(time.time()))
last_seen = Column(Integer, nullable=False, default=lambda: int(time.time())) last_seen = Column(Integer, nullable=False, default=lambda: int(time.time()))
updated_at = Column(Integer, nullable=False, default=lambda: int(time.time())) updated_at = Column(Integer, nullable=False, default=lambda: int(time.time()))
deleted_at = Column(Integer, nullable=True, comment="Deleted at") deleted_at = Column(Integer, nullable=True, comment='Deleted at')

View File

@ -6,20 +6,20 @@ from services.db import Base
class ShoutCollection(Base): class ShoutCollection(Base):
__tablename__ = "shout_collection" __tablename__ = 'shout_collection'
id = None # type: ignore id = None # type: ignore
shout = Column(ForeignKey("shout.id"), primary_key=True) shout = Column(ForeignKey('shout.id'), primary_key=True)
collection = Column(ForeignKey("collection.id"), primary_key=True) collection = Column(ForeignKey('collection.id'), primary_key=True)
class Collection(Base): class Collection(Base):
__tablename__ = "collection" __tablename__ = 'collection'
slug = Column(String, unique=True) slug = Column(String, unique=True)
title = Column(String, nullable=False, comment="Title") title = Column(String, nullable=False, comment='Title')
body = Column(String, nullable=True, comment="Body") body = Column(String, nullable=True, comment='Body')
pic = Column(String, nullable=True, comment="Picture") pic = Column(String, nullable=True, comment='Picture')
created_at = Column(Integer, default=lambda: int(time.time())) created_at = Column(Integer, default=lambda: int(time.time()))
created_by = Column(ForeignKey("author.id"), comment="Created By") created_by = Column(ForeignKey('author.id'), comment='Created By')
published_at = Column(Integer, default=lambda: int(time.time())) published_at = Column(Integer, default=lambda: int(time.time()))

View File

@ -8,22 +8,22 @@ from orm.author import Author
class CommunityAuthor(Base): class CommunityAuthor(Base):
__tablename__ = "community_author" __tablename__ = 'community_author'
id = None # type: ignore id = None # type: ignore
author = Column(ForeignKey("author.id"), primary_key=True) author = Column(ForeignKey('author.id'), primary_key=True)
community = Column(ForeignKey("community.id"), primary_key=True) community = Column(ForeignKey('community.id'), primary_key=True)
joined_at = Column(Integer, nullable=False, default=lambda: int(time.time())) joined_at = Column(Integer, nullable=False, default=lambda: int(time.time()))
role = Column(String, nullable=False) role = Column(String, nullable=False)
class Community(Base): class Community(Base):
__tablename__ = "community" __tablename__ = 'community'
name = Column(String, nullable=False) name = Column(String, nullable=False)
slug = Column(String, nullable=False, unique=True) slug = Column(String, nullable=False, unique=True)
desc = Column(String, nullable=False, default="") desc = Column(String, nullable=False, default='')
pic = Column(String, nullable=False, default="") pic = Column(String, nullable=False, default='')
created_at = Column(Integer, nullable=False, default=lambda: int(time.time())) created_at = Column(Integer, nullable=False, default=lambda: int(time.time()))
authors = relationship(Author, secondary="community_author") authors = relationship(Author, secondary='community_author')

View File

@ -7,19 +7,19 @@ from services.db import Base
class InviteStatus(Enumeration): class InviteStatus(Enumeration):
PENDING = "PENDING" PENDING = 'PENDING'
ACCEPTED = "ACCEPTED" ACCEPTED = 'ACCEPTED'
REJECTED = "REJECTED" REJECTED = 'REJECTED'
class Invite(Base): class Invite(Base):
__tablename__ = "invite" __tablename__ = 'invite'
inviter_id = Column(ForeignKey("author.id"), primary_key=True) inviter_id = Column(ForeignKey('author.id'), primary_key=True)
author_id = Column(ForeignKey("author.id"), primary_key=True) author_id = Column(ForeignKey('author.id'), primary_key=True)
shout_id = Column(ForeignKey("shout.id"), primary_key=True) shout_id = Column(ForeignKey('shout.id'), primary_key=True)
status = Column(String, default=InviteStatus.PENDING.value) status = Column(String, default=InviteStatus.PENDING.value)
inviter = relationship("author", foreign_keys=[inviter_id]) inviter = relationship('author', foreign_keys=[inviter_id])
author = relationship("author", foreign_keys=[author_id]) author = relationship('author', foreign_keys=[author_id])
shout = relationship("shout") shout = relationship('shout')

View File

@ -10,34 +10,34 @@ class ReactionKind(Enumeration):
# TYPE = <reaction index> # rating diff # TYPE = <reaction index> # rating diff
# editor mode # editor mode
AGREE = "AGREE" # +1 AGREE = 'AGREE' # +1
DISAGREE = "DISAGREE" # -1 DISAGREE = 'DISAGREE' # -1
ASK = "ASK" # +0 ASK = 'ASK' # +0
PROPOSE = "PROPOSE" # +0 PROPOSE = 'PROPOSE' # +0
PROOF = "PROOF" # +1 PROOF = 'PROOF' # +1
DISPROOF = "DISPROOF" # -1 DISPROOF = 'DISPROOF' # -1
ACCEPT = "ACCEPT" # +1 ACCEPT = 'ACCEPT' # +1
REJECT = "REJECT" # -1 REJECT = 'REJECT' # -1
# public feed # public feed
QUOTE = "QUOTE" # +0 TODO: use to bookmark in collection QUOTE = 'QUOTE' # +0 TODO: use to bookmark in collection
COMMENT = "COMMENT" # +0 COMMENT = 'COMMENT' # +0
LIKE = "LIKE" # +1 LIKE = 'LIKE' # +1
DISLIKE = "DISLIKE" # -1 DISLIKE = 'DISLIKE' # -1
class Reaction(Base): class Reaction(Base):
__tablename__ = "reaction" __tablename__ = 'reaction'
body = Column(String, default="", comment="Reaction Body") body = Column(String, default='', comment='Reaction Body')
created_at = Column(Integer, nullable=False, default=lambda: int(time.time())) created_at = Column(Integer, nullable=False, default=lambda: int(time.time()))
updated_at = Column(Integer, nullable=True, comment="Updated at") updated_at = Column(Integer, nullable=True, comment='Updated at')
deleted_at = Column(Integer, nullable=True, comment="Deleted at") deleted_at = Column(Integer, nullable=True, comment='Deleted at')
deleted_by = Column(ForeignKey("author.id"), nullable=True) deleted_by = Column(ForeignKey('author.id'), nullable=True)
reply_to = Column(ForeignKey("reaction.id"), nullable=True) reply_to = Column(ForeignKey('reaction.id'), nullable=True)
quote = Column(String, nullable=True, comment="Original quoted text") quote = Column(String, nullable=True, comment='Original quoted text')
shout = Column(ForeignKey("shout.id"), nullable=False) shout = Column(ForeignKey('shout.id'), nullable=False)
created_by = Column(ForeignKey("author.id"), nullable=False) created_by = Column(ForeignKey('author.id'), nullable=False)
kind = Column(String, nullable=False) kind = Column(String, nullable=False)
oid = Column(String) oid = Column(String)

View File

@ -11,44 +11,44 @@ from orm.topic import Topic
class ShoutTopic(Base): class ShoutTopic(Base):
__tablename__ = "shout_topic" __tablename__ = 'shout_topic'
id = None # type: ignore id = None # type: ignore
shout = Column(ForeignKey("shout.id"), primary_key=True) shout = Column(ForeignKey('shout.id'), primary_key=True)
topic = Column(ForeignKey("topic.id"), primary_key=True) topic = Column(ForeignKey('topic.id'), primary_key=True)
main = Column(Boolean, nullable=True) main = Column(Boolean, nullable=True)
class ShoutReactionsFollower(Base): class ShoutReactionsFollower(Base):
__tablename__ = "shout_reactions_followers" __tablename__ = 'shout_reactions_followers'
id = None # type: ignore id = None # type: ignore
follower = Column(ForeignKey("author.id"), primary_key=True) follower = Column(ForeignKey('author.id'), primary_key=True)
shout = Column(ForeignKey("shout.id"), primary_key=True) shout = Column(ForeignKey('shout.id'), primary_key=True)
auto = Column(Boolean, nullable=False, default=False) auto = Column(Boolean, nullable=False, default=False)
created_at = Column(Integer, nullable=False, default=lambda: int(time.time())) created_at = Column(Integer, nullable=False, default=lambda: int(time.time()))
deleted_at = Column(Integer, nullable=True) deleted_at = Column(Integer, nullable=True)
class ShoutAuthor(Base): class ShoutAuthor(Base):
__tablename__ = "shout_author" __tablename__ = 'shout_author'
id = None # type: ignore id = None # type: ignore
shout = Column(ForeignKey("shout.id"), primary_key=True) shout = Column(ForeignKey('shout.id'), primary_key=True)
author = Column(ForeignKey("author.id"), primary_key=True) author = Column(ForeignKey('author.id'), primary_key=True)
caption = Column(String, nullable=True, default="") caption = Column(String, nullable=True, default='')
class ShoutCommunity(Base): class ShoutCommunity(Base):
__tablename__ = "shout_community" __tablename__ = 'shout_community'
id = None # type: ignore id = None # type: ignore
shout = Column(ForeignKey("shout.id"), primary_key=True) shout = Column(ForeignKey('shout.id'), primary_key=True)
community = Column(ForeignKey("community.id"), primary_key=True) community = Column(ForeignKey('community.id'), primary_key=True)
class Shout(Base): class Shout(Base):
__tablename__ = "shout" __tablename__ = 'shout'
created_at = Column(Integer, nullable=False, default=lambda: int(time.time())) created_at = Column(Integer, nullable=False, default=lambda: int(time.time()))
updated_at = Column(Integer, nullable=True) updated_at = Column(Integer, nullable=True)
@ -56,28 +56,28 @@ class Shout(Base):
featured_at = Column(Integer, nullable=True) featured_at = Column(Integer, nullable=True)
deleted_at = Column(Integer, nullable=True) deleted_at = Column(Integer, nullable=True)
created_by = Column(ForeignKey("author.id"), nullable=False) created_by = Column(ForeignKey('author.id'), nullable=False)
updated_by = Column(ForeignKey("author.id"), nullable=True) updated_by = Column(ForeignKey('author.id'), nullable=True)
deleted_by = Column(ForeignKey("author.id"), nullable=True) deleted_by = Column(ForeignKey('author.id'), nullable=True)
body = Column(String, nullable=False, comment="Body") body = Column(String, nullable=False, comment='Body')
slug = Column(String, unique=True) slug = Column(String, unique=True)
cover = Column(String, nullable=True, comment="Cover image url") cover = Column(String, nullable=True, comment='Cover image url')
cover_caption = Column(String, nullable=True, comment="Cover image alt caption") cover_caption = Column(String, nullable=True, comment='Cover image alt caption')
lead = Column(String, nullable=True) lead = Column(String, nullable=True)
description = Column(String, nullable=True) description = Column(String, nullable=True)
title = Column(String, nullable=False) title = Column(String, nullable=False)
subtitle = Column(String, nullable=True) subtitle = Column(String, nullable=True)
layout = Column(String, nullable=False, default="article") layout = Column(String, nullable=False, default='article')
media = Column(JSON, nullable=True) media = Column(JSON, nullable=True)
authors = relationship(Author, secondary="shout_author") authors = relationship(Author, secondary='shout_author')
topics = relationship(Topic, secondary="shout_topic") topics = relationship(Topic, secondary='shout_topic')
communities = relationship(Community, secondary="shout_community") communities = relationship(Community, secondary='shout_community')
reactions = relationship(Reaction) reactions = relationship(Reaction)
lang = Column(String, nullable=False, default="ru", comment="Language") lang = Column(String, nullable=False, default='ru', comment='Language')
version_of = Column(ForeignKey("shout.id"), nullable=True) version_of = Column(ForeignKey('shout.id'), nullable=True)
oid = Column(String, nullable=True) oid = Column(String, nullable=True)
seo = Column(String, nullable=True) # JSON seo = Column(String, nullable=True) # JSON

View File

@ -6,21 +6,21 @@ from services.db import Base
class TopicFollower(Base): class TopicFollower(Base):
__tablename__ = "topic_followers" __tablename__ = 'topic_followers'
id = None # type: ignore id = None # type: ignore
follower = Column(ForeignKey("author.id"), primary_key=True) follower = Column(ForeignKey('author.id'), primary_key=True)
topic = Column(ForeignKey("topic.id"), primary_key=True) topic = Column(ForeignKey('topic.id'), primary_key=True)
created_at = Column(Integer, nullable=False, default=lambda: int(time.time())) created_at = Column(Integer, nullable=False, default=lambda: int(time.time()))
auto = Column(Boolean, nullable=False, default=False) auto = Column(Boolean, nullable=False, default=False)
class Topic(Base): class Topic(Base):
__tablename__ = "topic" __tablename__ = 'topic'
slug = Column(String, unique=True) slug = Column(String, unique=True)
title = Column(String, nullable=False, comment="Title") title = Column(String, nullable=False, comment='Title')
body = Column(String, nullable=True, comment="Body") body = Column(String, nullable=True, comment='Body')
pic = Column(String, nullable=True, comment="Picture") pic = Column(String, nullable=True, comment='Picture')
community = Column(ForeignKey("community.id"), default=1) community = Column(ForeignKey('community.id'), default=1)
oid = Column(String, nullable=True, comment="Old ID") oid = Column(String, nullable=True, comment='Old ID')

View File

@ -6,7 +6,7 @@ from services.db import Base
class User(Base): class User(Base):
__tablename__ = "authorizer_users" __tablename__ = 'authorizer_users'
id = Column(String, primary_key=True, unique=True, nullable=False, default=None) id = Column(String, primary_key=True, unique=True, nullable=False, default=None)
key = Column(String) key = Column(String)
@ -24,7 +24,7 @@ class User(Base):
# preferred_username = Column(String, nullable=False) # preferred_username = Column(String, nullable=False)
picture = Column(String) picture = Column(String)
revoked_timestamp = Column(Integer) revoked_timestamp = Column(Integer)
roles = Column(String, default="author, reader") roles = Column(String, default='author, reader')
signup_methods = Column(String, default="magic_link_login") signup_methods = Column(String, default='magic_link_login')
created_at = Column(Integer, default=lambda: int(time.time())) created_at = Column(Integer, default=lambda: int(time.time()))
updated_at = Column(Integer, default=lambda: int(time.time())) updated_at = Column(Integer, default=lambda: int(time.time()))

View File

@ -46,6 +46,11 @@ ignore = []
line-length = 120 line-length = 120
target-version = "py312" target-version = "py312"
[tool.ruff.format]
# Prefer single quotes over double quotes.
quote-style = "single"
skip-magic-trailing-comma = true
[tool.pyright] [tool.pyright]
venvPath = "." venvPath = "."
venv = ".venv" venv = ".venv"

View File

@ -42,43 +42,43 @@ from resolvers.topic import (
__all__ = [ __all__ = [
# author # author
"get_author", 'get_author',
"get_author_id", 'get_author_id',
"get_author_follows", 'get_author_follows',
"get_authors_all", 'get_authors_all',
"load_authors_by", 'load_authors_by',
"rate_author", 'rate_author',
"update_author", 'update_author',
# community # community
"get_community", 'get_community',
"get_communities_all", 'get_communities_all',
# topic # topic
"get_topic", 'get_topic',
"get_topics_all", 'get_topics_all',
"get_topics_by_community", 'get_topics_by_community',
"get_topics_by_author", 'get_topics_by_author',
# reader # reader
"get_shout", 'get_shout',
"load_shouts_by", 'load_shouts_by',
"load_shouts_feed", 'load_shouts_feed',
"load_shouts_search", 'load_shouts_search',
"load_shouts_followed", 'load_shouts_followed',
"load_shouts_unrated", 'load_shouts_unrated',
"load_shouts_random_top", 'load_shouts_random_top',
"load_shouts_random_topic", 'load_shouts_random_topic',
# follower # follower
"follow", 'follow',
"unfollow", 'unfollow',
"get_topic_followers", 'get_topic_followers',
"get_shout_followers", 'get_shout_followers',
"get_author_followers", 'get_author_followers',
# editor # editor
"create_shout", 'create_shout',
"update_shout", 'update_shout',
"delete_shout", 'delete_shout',
# reaction # reaction
"create_reaction", 'create_reaction',
"update_reaction", 'update_reaction',
"delete_reaction", 'delete_reaction',
"load_reactions_by", 'load_reactions_by',
] ]

View File

@ -21,18 +21,18 @@ from services.logger import root_logger as logger
def add_author_stat_columns(q): def add_author_stat_columns(q):
shout_author_aliased = aliased(ShoutAuthor) shout_author_aliased = aliased(ShoutAuthor)
q = q.outerjoin(shout_author_aliased).add_columns( q = q.outerjoin(shout_author_aliased).add_columns(
func.count(distinct(shout_author_aliased.shout)).label("shouts_stat") func.count(distinct(shout_author_aliased.shout)).label('shouts_stat')
) )
followers_table = aliased(AuthorFollower) followers_table = aliased(AuthorFollower)
q = q.outerjoin(followers_table, followers_table.author == Author.id).add_columns( q = q.outerjoin(followers_table, followers_table.author == Author.id).add_columns(
func.count(distinct(followers_table.follower)).label("followers_stat") func.count(distinct(followers_table.follower)).label('followers_stat')
) )
followings_table = aliased(AuthorFollower) followings_table = aliased(AuthorFollower)
q = q.outerjoin( q = q.outerjoin(
followings_table, followings_table.follower == Author.id followings_table, followings_table.follower == Author.id
).add_columns(func.count(distinct(followers_table.author)).label("followings_stat")) ).add_columns(func.count(distinct(followers_table.author)).label('followings_stat'))
q = q.group_by(Author.id) q = q.group_by(Author.id)
return q return q
@ -45,29 +45,29 @@ async def get_authors_from_query(q):
q q
): ):
author.stat = { author.stat = {
"shouts": shouts_stat, 'shouts': shouts_stat,
"viewed": await ViewedStorage.get_author(author.slug), 'viewed': await ViewedStorage.get_author(author.slug),
"followers": followers_stat, 'followers': followers_stat,
"followings": followings_stat, 'followings': followings_stat,
} }
authors.append(author) authors.append(author)
return authors return authors
@mutation.field("update_author") @mutation.field('update_author')
@login_required @login_required
async def update_author(_, info, profile): async def update_author(_, info, profile):
user_id = info.context["user_id"] user_id = info.context['user_id']
with local_session() as session: with local_session() as session:
author = session.query(Author).where(Author.user == user_id).first() author = session.query(Author).where(Author.user == user_id).first()
Author.update(author, profile) Author.update(author, profile)
session.add(author) session.add(author)
session.commit() session.commit()
return {"error": None, "author": author} return {'error': None, 'author': author}
# TODO: caching query # TODO: caching query
@query.field("get_authors_all") @query.field('get_authors_all')
async def get_authors_all(_, _info): async def get_authors_all(_, _info):
authors = [] authors = []
with local_session() as session: with local_session() as session:
@ -168,19 +168,19 @@ async def load_author_with_stats(q):
) )
.count() .count()
) )
author.stat["rating"] = likes_count - dislikes_count author.stat['rating'] = likes_count - dislikes_count
author.stat["rating_shouts"] = count_author_shouts_rating( author.stat['rating_shouts'] = count_author_shouts_rating(
session, author.id session, author.id
) )
author.stat["rating_comments"] = count_author_comments_rating( author.stat['rating_comments'] = count_author_comments_rating(
session, author.id session, author.id
) )
author.stat["commented"] = comments_count author.stat['commented'] = comments_count
return author return author
@query.field("get_author") @query.field('get_author')
async def get_author(_, _info, slug="", author_id=None): async def get_author(_, _info, slug='', author_id=None):
q = None q = None
if slug or author_id: if slug or author_id:
if bool(slug): if bool(slug):
@ -192,64 +192,64 @@ async def get_author(_, _info, slug="", author_id=None):
async def get_author_by_user_id(user_id: str): async def get_author_by_user_id(user_id: str):
redis_key = f"user:{user_id}:author" redis_key = f'user:{user_id}:author'
res = await redis.execute("GET", redis_key) res = await redis.execute('GET', redis_key)
if isinstance(res, str): if isinstance(res, str):
author = json.loads(res) author = json.loads(res)
if author.get("id"): if author.get('id'):
logger.debug(f"got cached author: {author}") logger.debug(f'got cached author: {author}')
return author return author
logger.info(f"getting author id for {user_id}") logger.info(f'getting author id for {user_id}')
q = select(Author).filter(Author.user == user_id) q = select(Author).filter(Author.user == user_id)
author = await load_author_with_stats(q) author = await load_author_with_stats(q)
if author: if author:
await redis.execute( await redis.execute(
"set", 'set',
redis_key, redis_key,
json.dumps( json.dumps(
{ {
"id": author.id, 'id': author.id,
"name": author.name, 'name': author.name,
"slug": author.slug, 'slug': author.slug,
"pic": author.pic, 'pic': author.pic,
} }
), ),
) )
return author return author
@query.field("get_author_id") @query.field('get_author_id')
async def get_author_id(_, _info, user: str): async def get_author_id(_, _info, user: str):
return await get_author_by_user_id(user) return await get_author_by_user_id(user)
@query.field("load_authors_by") @query.field('load_authors_by')
async def load_authors_by(_, _info, by, limit, offset): async def load_authors_by(_, _info, by, limit, offset):
q = select(Author) q = select(Author)
q = add_author_stat_columns(q) q = add_author_stat_columns(q)
if by.get("slug"): if by.get('slug'):
q = q.filter(Author.slug.ilike(f"%{by['slug']}%")) q = q.filter(Author.slug.ilike(f"%{by['slug']}%"))
elif by.get("name"): elif by.get('name'):
q = q.filter(Author.name.ilike(f"%{by['name']}%")) q = q.filter(Author.name.ilike(f"%{by['name']}%"))
elif by.get("topic"): elif by.get('topic'):
q = ( q = (
q.join(ShoutAuthor) q.join(ShoutAuthor)
.join(ShoutTopic) .join(ShoutTopic)
.join(Topic) .join(Topic)
.where(Topic.slug == by["topic"]) .where(Topic.slug == by['topic'])
) )
if by.get("last_seen"): # in unix time if by.get('last_seen'): # in unix time
before = int(time.time()) - by["last_seen"] before = int(time.time()) - by['last_seen']
q = q.filter(Author.last_seen > before) q = q.filter(Author.last_seen > before)
elif by.get("created_at"): # in unix time elif by.get('created_at'): # in unix time
before = int(time.time()) - by["created_at"] before = int(time.time()) - by['created_at']
q = q.filter(Author.created_at > before) q = q.filter(Author.created_at > before)
order = by.get("order") order = by.get('order')
if order == "followers" or order == "shouts": if order == 'followers' or order == 'shouts':
q = q.order_by(desc(f"{order}_stat")) q = q.order_by(desc(f'{order}_stat'))
q = q.limit(limit).offset(offset) q = q.limit(limit).offset(offset)
@ -258,9 +258,9 @@ async def load_authors_by(_, _info, by, limit, offset):
return authors return authors
@query.field("get_author_follows") @query.field('get_author_follows')
async def get_author_follows( async def get_author_follows(
_, _info, slug="", user=None, author_id=None _, _info, slug='', user=None, author_id=None
) -> List[Author]: ) -> List[Author]:
user_id = user user_id = user
if not user_id and author_id or slug: if not user_id and author_id or slug:
@ -277,13 +277,13 @@ async def get_author_follows(
return follows return follows
else: else:
raise ValueError("Author not found") raise ValueError('Author not found')
@mutation.field("rate_author") @mutation.field('rate_author')
@login_required @login_required
async def rate_author(_, info, rated_slug, value): async def rate_author(_, info, rated_slug, value):
user_id = info.context["user_id"] user_id = info.context['user_id']
with local_session() as session: with local_session() as session:
rated_author = session.query(Author).filter(Author.slug == rated_slug).first() rated_author = session.query(Author).filter(Author.slug == rated_slug).first()
@ -312,19 +312,19 @@ async def rate_author(_, info, rated_slug, value):
session.add(rating) session.add(rating)
session.commit() session.commit()
except Exception as err: except Exception as err:
return {"error": err} return {'error': err}
return {} return {}
async def create_author(user_id: str, slug: str, name: str = ""): async def create_author(user_id: str, slug: str, name: str = ''):
with local_session() as session: with local_session() as session:
new_author = Author(user=user_id, slug=slug, name=name) new_author = Author(user=user_id, slug=slug, name=name)
session.add(new_author) session.add(new_author)
session.commit() session.commit()
logger.info(f"author created by webhook {new_author.dict()}") logger.info(f'author created by webhook {new_author.dict()}')
@query.field("get_author_followers") @query.field('get_author_followers')
async def get_author_followers(_, _info, slug) -> List[Author]: async def get_author_followers(_, _info, slug) -> List[Author]:
q = select(Author) q = select(Author)
q = add_author_stat_columns(q) q = add_author_stat_columns(q)

View File

@ -6,10 +6,10 @@ from services.db import local_session
from services.schema import mutation from services.schema import mutation
@mutation.field("accept_invite") @mutation.field('accept_invite')
@login_required @login_required
async def accept_invite(_, info, invite_id: int): async def accept_invite(_, info, invite_id: int):
user_id = info.context["user_id"] user_id = info.context['user_id']
# Check if the user exists # Check if the user exists
with local_session() as session: with local_session() as session:
@ -30,19 +30,19 @@ async def accept_invite(_, info, invite_id: int):
session.delete(invite) session.delete(invite)
session.add(shout) session.add(shout)
session.commit() session.commit()
return {"success": True, "message": "Invite accepted"} return {'success': True, 'message': 'Invite accepted'}
else: else:
return {"error": "Shout not found"} return {'error': 'Shout not found'}
else: else:
return {"error": "Invalid invite or already accepted/rejected"} return {'error': 'Invalid invite or already accepted/rejected'}
else: else:
return {"error": "User not found"} return {'error': 'User not found'}
@mutation.field("reject_invite") @mutation.field('reject_invite')
@login_required @login_required
async def reject_invite(_, info, invite_id: int): async def reject_invite(_, info, invite_id: int):
user_id = info.context["user_id"] user_id = info.context['user_id']
# Check if the user exists # Check if the user exists
with local_session() as session: with local_session() as session:
@ -58,17 +58,17 @@ async def reject_invite(_, info, invite_id: int):
# Delete the invite # Delete the invite
session.delete(invite) session.delete(invite)
session.commit() session.commit()
return {"success": True, "message": "Invite rejected"} return {'success': True, 'message': 'Invite rejected'}
else: else:
return {"error": "Invalid invite or already accepted/rejected"} return {'error': 'Invalid invite or already accepted/rejected'}
else: else:
return {"error": "User not found"} return {'error': 'User not found'}
@mutation.field("create_invite") @mutation.field('create_invite')
@login_required @login_required
async def create_invite(_, info, slug: str = "", author_id: int = 0): async def create_invite(_, info, slug: str = '', author_id: int = 0):
user_id = info.context["user_id"] user_id = info.context['user_id']
# Check if the inviter is the owner of the shout # Check if the inviter is the owner of the shout
with local_session() as session: with local_session() as session:
@ -90,7 +90,7 @@ async def create_invite(_, info, slug: str = "", author_id: int = 0):
.first() .first()
) )
if existing_invite: if existing_invite:
return {"error": "Invite already sent"} return {'error': 'Invite already sent'}
# Create a new invite # Create a new invite
new_invite = Invite( new_invite = Invite(
@ -102,17 +102,17 @@ async def create_invite(_, info, slug: str = "", author_id: int = 0):
session.add(new_invite) session.add(new_invite)
session.commit() session.commit()
return {"error": None, "invite": new_invite} return {'error': None, 'invite': new_invite}
else: else:
return {"error": "Invalid author"} return {'error': 'Invalid author'}
else: else:
return {"error": "Access denied"} return {'error': 'Access denied'}
@mutation.field("remove_author") @mutation.field('remove_author')
@login_required @login_required
async def remove_author(_, info, slug: str = "", author_id: int = 0): async def remove_author(_, info, slug: str = '', author_id: int = 0):
user_id = info.context["user_id"] user_id = info.context['user_id']
with local_session() as session: with local_session() as session:
author = session.query(Author).filter(Author.user == user_id).first() author = session.query(Author).filter(Author.user == user_id).first()
if author: if author:
@ -124,13 +124,13 @@ async def remove_author(_, info, slug: str = "", author_id: int = 0):
] ]
session.commit() session.commit()
return {} return {}
return {"error": "Access denied"} return {'error': 'Access denied'}
@mutation.field("remove_invite") @mutation.field('remove_invite')
@login_required @login_required
async def remove_invite(_, info, invite_id: int): async def remove_invite(_, info, invite_id: int):
user_id = info.context["user_id"] user_id = info.context['user_id']
# Check if the user exists # Check if the user exists
with local_session() as session: with local_session() as session:
@ -148,6 +148,6 @@ async def remove_invite(_, info, invite_id: int):
session.commit() session.commit()
return {} return {}
else: else:
return {"error": "Invalid invite or already accepted/rejected"} return {'error': 'Invalid invite or already accepted/rejected'}
else: else:
return {"error": "Author not found"} return {'error': 'Author not found'}

View File

@ -14,12 +14,12 @@ def add_community_stat_columns(q):
shout_community_aliased = aliased(ShoutCommunity) shout_community_aliased = aliased(ShoutCommunity)
q = q.outerjoin(shout_community_aliased).add_columns( q = q.outerjoin(shout_community_aliased).add_columns(
func.count(distinct(shout_community_aliased.shout)).label("shouts_stat") func.count(distinct(shout_community_aliased.shout)).label('shouts_stat')
) )
q = q.outerjoin( q = q.outerjoin(
community_followers, community_followers.author == Author.id community_followers, community_followers.author == Author.id
).add_columns( ).add_columns(
func.count(distinct(community_followers.follower)).label("followers_stat") func.count(distinct(community_followers.follower)).label('followers_stat')
) )
q = q.group_by(Author.id) q = q.group_by(Author.id)
@ -32,8 +32,8 @@ def get_communities_from_query(q):
with local_session() as session: with local_session() as session:
for [c, shouts_stat, followers_stat] in session.execute(q): for [c, shouts_stat, followers_stat] in session.execute(q):
c.stat = { c.stat = {
"shouts": shouts_stat, 'shouts': shouts_stat,
"followers": followers_stat, 'followers': followers_stat,
# "commented": commented_stat, # "commented": commented_stat,
} }
ccc.append(c) ccc.append(c)
@ -72,7 +72,7 @@ def community_unfollow(follower_id, slug):
return False return False
@query.field("get_communities_all") @query.field('get_communities_all')
async def get_communities_all(_, _info): async def get_communities_all(_, _info):
q = select(Author) q = select(Author)
q = add_community_stat_columns(q) q = add_community_stat_columns(q)
@ -80,7 +80,7 @@ async def get_communities_all(_, _info):
return get_communities_from_query(q) return get_communities_from_query(q)
@query.field("get_community") @query.field('get_community')
async def get_community(_, _info, slug): async def get_community(_, _info, slug):
q = select(Community).where(Community.slug == slug) q = select(Community).where(Community.slug == slug)
q = add_community_stat_columns(q) q = add_community_stat_columns(q)

View File

@ -18,20 +18,17 @@ from services.search import search_service
from services.logger import root_logger as logger from services.logger import root_logger as logger
@query.field("get_shouts_drafts") @query.field('get_shouts_drafts')
@login_required @login_required
async def get_shouts_drafts(_, info): async def get_shouts_drafts(_, info):
user_id = info.context["user_id"] user_id = info.context['user_id']
shouts = [] shouts = []
with local_session() as session: with local_session() as session:
author = session.query(Author).filter(Author.user == user_id).first() author = session.query(Author).filter(Author.user == user_id).first()
if author: if author:
q = ( q = (
select(Shout) select(Shout)
.options( .options(joinedload(Shout.authors), joinedload(Shout.topics))
joinedload(Shout.authors),
joinedload(Shout.topics),
)
.filter(and_(Shout.deleted_at.is_(None), Shout.created_by == author.id)) .filter(and_(Shout.deleted_at.is_(None), Shout.created_by == author.id))
.filter(Shout.published_at.is_(None)) .filter(Shout.published_at.is_(None))
.group_by(Shout.id) .group_by(Shout.id)
@ -40,28 +37,28 @@ async def get_shouts_drafts(_, info):
return shouts return shouts
@mutation.field("create_shout") @mutation.field('create_shout')
@login_required @login_required
async def create_shout(_, info, inp): async def create_shout(_, info, inp):
user_id = info.context["user_id"] user_id = info.context['user_id']
with local_session() as session: with local_session() as session:
author = session.query(Author).filter(Author.user == user_id).first() author = session.query(Author).filter(Author.user == user_id).first()
if isinstance(author, Author): if isinstance(author, Author):
current_time = int(time.time()) current_time = int(time.time())
slug = inp.get("slug") or f"draft-{current_time}" slug = inp.get('slug') or f'draft-{current_time}'
shout_dict = { shout_dict = {
"title": inp.get("title", ""), 'title': inp.get('title', ''),
"subtitle": inp.get("subtitle", ""), 'subtitle': inp.get('subtitle', ''),
"lead": inp.get("lead", ""), 'lead': inp.get('lead', ''),
"description": inp.get("description", ""), 'description': inp.get('description', ''),
"body": inp.get("body", ""), 'body': inp.get('body', ''),
"layout": inp.get("layout", "article"), 'layout': inp.get('layout', 'article'),
"created_by": author.id, 'created_by': author.id,
"authors": [], 'authors': [],
"slug": slug, 'slug': slug,
"topics": inp.get("topics", []), 'topics': inp.get('topics', []),
"published_at": None, 'published_at': None,
"created_at": current_time, # Set created_at as Unix timestamp 'created_at': current_time, # Set created_at as Unix timestamp
} }
new_shout = Shout(**shout_dict) new_shout = Shout(**shout_dict)
@ -77,7 +74,7 @@ async def create_shout(_, info, inp):
topics = ( topics = (
session.query(Topic) session.query(Topic)
.filter(Topic.slug.in_(inp.get("topics", []))) .filter(Topic.slug.in_(inp.get('topics', [])))
.all() .all()
) )
for topic in topics: for topic in topics:
@ -89,20 +86,15 @@ async def create_shout(_, info, inp):
# notifier # notifier
# await notify_shout(shout_dict, 'create') # await notify_shout(shout_dict, 'create')
return {"shout": shout.dict()} return {'shout': shout.dict()}
return {"error": "cant create shout"} return {'error': 'cant create shout'}
def patch_main_topic(session, main_topic, shout): def patch_main_topic(session, main_topic, shout):
old_main_topic = ( old_main_topic = (
session.query(ShoutTopic) session.query(ShoutTopic)
.filter( .filter(and_(ShoutTopic.shout == shout.id, ShoutTopic.main.is_(True)))
and_(
ShoutTopic.shout == shout.id,
ShoutTopic.main.is_(True),
)
)
.first() .first()
) )
@ -112,25 +104,22 @@ def patch_main_topic(session, main_topic, shout):
new_main_topic = ( new_main_topic = (
session.query(ShoutTopic) session.query(ShoutTopic)
.filter( .filter(
and_( and_(ShoutTopic.shout == shout.id, ShoutTopic.topic == main_topic.id)
ShoutTopic.shout == shout.id,
ShoutTopic.topic == main_topic.id,
)
) )
.first() .first()
) )
if old_main_topic and new_main_topic and old_main_topic is not new_main_topic: if old_main_topic and new_main_topic and old_main_topic is not new_main_topic:
ShoutTopic.update(old_main_topic, {"main": False}) ShoutTopic.update(old_main_topic, {'main': False})
session.add(old_main_topic) session.add(old_main_topic)
ShoutTopic.update(new_main_topic, {"main": True}) ShoutTopic.update(new_main_topic, {'main': True})
session.add(new_main_topic) session.add(new_main_topic)
def patch_topics(session, shout, topics_input): def patch_topics(session, shout, topics_input):
new_topics_to_link = [ new_topics_to_link = [
Topic(**new_topic) for new_topic in topics_input if new_topic["id"] < 0 Topic(**new_topic) for new_topic in topics_input if new_topic['id'] < 0
] ]
if new_topics_to_link: if new_topics_to_link:
session.add_all(new_topics_to_link) session.add_all(new_topics_to_link)
@ -141,12 +130,12 @@ def patch_topics(session, shout, topics_input):
session.add(created_unlinked_topic) session.add(created_unlinked_topic)
existing_topics_input = [ existing_topics_input = [
topic_input for topic_input in topics_input if topic_input.get("id", 0) > 0 topic_input for topic_input in topics_input if topic_input.get('id', 0) > 0
] ]
existing_topic_to_link_ids = [ existing_topic_to_link_ids = [
existing_topic_input["id"] existing_topic_input['id']
for existing_topic_input in existing_topics_input for existing_topic_input in existing_topics_input
if existing_topic_input["id"] not in [topic.id for topic in shout.topics] if existing_topic_input['id'] not in [topic.id for topic in shout.topics]
] ]
for existing_topic_to_link_id in existing_topic_to_link_ids: for existing_topic_to_link_id in existing_topic_to_link_ids:
@ -158,60 +147,54 @@ def patch_topics(session, shout, topics_input):
topic_to_unlink_ids = [ topic_to_unlink_ids = [
topic.id topic.id
for topic in shout.topics for topic in shout.topics
if topic.id not in [topic_input["id"] for topic_input in existing_topics_input] if topic.id not in [topic_input['id'] for topic_input in existing_topics_input]
] ]
session.query(ShoutTopic).filter( session.query(ShoutTopic).filter(
and_( and_(ShoutTopic.shout == shout.id, ShoutTopic.topic.in_(topic_to_unlink_ids))
ShoutTopic.shout == shout.id,
ShoutTopic.topic.in_(topic_to_unlink_ids),
)
).delete(synchronize_session=False) ).delete(synchronize_session=False)
@mutation.field("update_shout") @mutation.field('update_shout')
@login_required @login_required
async def update_shout(_, info, shout_id, shout_input=None, publish=False): async def update_shout(_, info, shout_id, shout_input=None, publish=False):
user_id = info.context["user_id"] user_id = info.context['user_id']
roles = info.context["roles"] roles = info.context['roles']
shout_input = shout_input or {} shout_input = shout_input or {}
with local_session() as session: with local_session() as session:
author = session.query(Author).filter(Author.user == user_id).first() author = session.query(Author).filter(Author.user == user_id).first()
current_time = int(time.time()) current_time = int(time.time())
shout_id = shout_id or shout_input.get("id") shout_id = shout_id or shout_input.get('id')
if isinstance(author, Author) and isinstance(shout_id, int): if isinstance(author, Author) and isinstance(shout_id, int):
shout = ( shout = (
session.query(Shout) session.query(Shout)
.options( .options(joinedload(Shout.authors), joinedload(Shout.topics))
joinedload(Shout.authors),
joinedload(Shout.topics),
)
.filter(Shout.id == shout_id) .filter(Shout.id == shout_id)
.first() .first()
) )
if not shout: if not shout:
return {"error": "shout not found"} return {'error': 'shout not found'}
if ( if (
shout.created_by is not author.id shout.created_by is not author.id
and author.id not in shout.authors and author.id not in shout.authors
and "editor" not in roles and 'editor' not in roles
): ):
return {"error": "access denied"} return {'error': 'access denied'}
# topics patch # topics patch
topics_input = shout_input.get("topics") topics_input = shout_input.get('topics')
if topics_input: if topics_input:
patch_topics(session, shout, topics_input) patch_topics(session, shout, topics_input)
del shout_input["topics"] del shout_input['topics']
# main topic # main topic
main_topic = shout_input.get("main_topic") main_topic = shout_input.get('main_topic')
if main_topic: if main_topic:
patch_main_topic(session, main_topic, shout) patch_main_topic(session, main_topic, shout)
shout_input["updated_at"] = current_time shout_input['updated_at'] = current_time
shout_input["published_at"] = current_time if publish else None shout_input['published_at'] = current_time if publish else None
Shout.update(shout, shout_input) Shout.update(shout, shout_input)
session.add(shout) session.add(shout)
session.commit() session.commit()
@ -219,44 +202,44 @@ async def update_shout(_, info, shout_id, shout_input=None, publish=False):
shout_dict = shout.dict() shout_dict = shout.dict()
if not publish: if not publish:
await notify_shout(shout_dict, "update") await notify_shout(shout_dict, 'update')
else: else:
await notify_shout(shout_dict, "published") await notify_shout(shout_dict, 'published')
# search service indexing # search service indexing
search_service.index(shout) search_service.index(shout)
return {"shout": shout_dict} return {'shout': shout_dict}
logger.debug(f" cannot update with data: {shout_input}") logger.debug(f' cannot update with data: {shout_input}')
return {"error": "cant update shout"} return {'error': 'cant update shout'}
@mutation.field("delete_shout") @mutation.field('delete_shout')
@login_required @login_required
async def delete_shout(_, info, shout_id): async def delete_shout(_, info, shout_id):
user_id = info.context["user_id"] user_id = info.context['user_id']
roles = info.context["roles"] roles = info.context['roles']
with local_session() as session: with local_session() as session:
author = session.query(Author).filter(Author.user == user_id).first() author = session.query(Author).filter(Author.user == user_id).first()
shout = session.query(Shout).filter(Shout.id == shout_id).first() shout = session.query(Shout).filter(Shout.id == shout_id).first()
if not shout: if not shout:
return {"error": "invalid shout id"} return {'error': 'invalid shout id'}
if author and shout: if author and shout:
if ( if (
shout.created_by is not author.id shout.created_by is not author.id
and author.id not in shout.authors and author.id not in shout.authors
and "editor" not in roles and 'editor' not in roles
): ):
return {"error": "access denied"} return {'error': 'access denied'}
for author_id in shout.authors: for author_id in shout.authors:
reactions_unfollow(author_id, shout_id) reactions_unfollow(author_id, shout_id)
shout_dict = shout.dict() shout_dict = shout.dict()
shout_dict["deleted_at"] = int(time.time()) shout_dict['deleted_at'] = int(time.time())
Shout.update(shout, shout_dict) Shout.update(shout, shout_dict)
session.add(shout) session.add(shout)
session.commit() session.commit()
await notify_shout(shout_dict, "delete") await notify_shout(shout_dict, 'delete')
return {} return {}
@ -290,7 +273,7 @@ def handle_proposing(session, r, shout):
if proposal.quote: if proposal.quote:
proposal_diff = get_diff(shout.body, proposal.quote) proposal_diff = get_diff(shout.body, proposal.quote)
proposal_dict = proposal.dict() proposal_dict = proposal.dict()
proposal_dict["quote"] = apply_diff( proposal_dict['quote'] = apply_diff(
replied_reaction.quote, proposal_diff replied_reaction.quote, proposal_diff
) )
Reaction.update(proposal, proposal_dict) Reaction.update(proposal, proposal_dict)
@ -298,7 +281,7 @@ def handle_proposing(session, r, shout):
# patch shout's body # patch shout's body
shout_dict = shout.dict() shout_dict = shout.dict()
shout_dict["body"] = replied_reaction.quote shout_dict['body'] = replied_reaction.quote
Shout.update(shout, shout_dict) Shout.update(shout, shout_dict)
session.add(shout) session.add(shout)
session.commit() session.commit()

View File

@ -26,16 +26,16 @@ from services.logger import root_logger as logger
from services.rediscache import redis from services.rediscache import redis
@mutation.field("follow") @mutation.field('follow')
@login_required @login_required
async def follow(_, info, what, slug): async def follow(_, info, what, slug):
try: try:
user_id = info.context["user_id"] user_id = info.context['user_id']
with local_session() as session: with local_session() as session:
actor = session.query(Author).filter(Author.user == user_id).first() actor = session.query(Author).filter(Author.user == user_id).first()
if actor: if actor:
follower_id = actor.id follower_id = actor.id
if what == "AUTHOR": if what == 'AUTHOR':
if author_follow(follower_id, slug): if author_follow(follower_id, slug):
author = ( author = (
session.query(Author.id).where(Author.slug == slug).one() session.query(Author.id).where(Author.slug == slug).one()
@ -44,30 +44,30 @@ async def follow(_, info, what, slug):
session.query(Author).where(Author.id == follower_id).one() session.query(Author).where(Author.id == follower_id).one()
) )
await notify_follower(follower.dict(), author.id) await notify_follower(follower.dict(), author.id)
elif what == "TOPIC": elif what == 'TOPIC':
topic_follow(follower_id, slug) topic_follow(follower_id, slug)
elif what == "COMMUNITY": elif what == 'COMMUNITY':
community_follow(follower_id, slug) community_follow(follower_id, slug)
elif what == "REACTIONS": elif what == 'REACTIONS':
reactions_follow(follower_id, slug) reactions_follow(follower_id, slug)
except Exception as e: except Exception as e:
logger.debug(info, what, slug) logger.debug(info, what, slug)
logger.error(e) logger.error(e)
return {"error": str(e)} return {'error': str(e)}
return {} return {}
@mutation.field("unfollow") @mutation.field('unfollow')
@login_required @login_required
async def unfollow(_, info, what, slug): async def unfollow(_, info, what, slug):
user_id = info.context["user_id"] user_id = info.context['user_id']
try: try:
with local_session() as session: with local_session() as session:
actor = session.query(Author).filter(Author.user == user_id).first() actor = session.query(Author).filter(Author.user == user_id).first()
if actor: if actor:
follower_id = actor.id follower_id = actor.id
if what == "AUTHOR": if what == 'AUTHOR':
if author_unfollow(follower_id, slug): if author_unfollow(follower_id, slug):
author = ( author = (
session.query(Author.id).where(Author.slug == slug).one() session.query(Author.id).where(Author.slug == slug).one()
@ -75,15 +75,15 @@ async def unfollow(_, info, what, slug):
follower = ( follower = (
session.query(Author).where(Author.id == follower_id).one() session.query(Author).where(Author.id == follower_id).one()
) )
await notify_follower(follower.dict(), author.id, "unfollow") await notify_follower(follower.dict(), author.id, 'unfollow')
elif what == "TOPIC": elif what == 'TOPIC':
topic_unfollow(follower_id, slug) topic_unfollow(follower_id, slug)
elif what == "COMMUNITY": elif what == 'COMMUNITY':
community_unfollow(follower_id, slug) community_unfollow(follower_id, slug)
elif what == "REACTIONS": elif what == 'REACTIONS':
reactions_unfollow(follower_id, slug) reactions_unfollow(follower_id, slug)
except Exception as e: except Exception as e:
return {"error": str(e)} return {'error': str(e)}
return {} return {}
@ -97,14 +97,14 @@ def query_follows(user_id: str):
if isinstance(author, Author): if isinstance(author, Author):
author_id = author.id author_id = author.id
authors_query = ( authors_query = (
select(column("name"), column("id"), column("slug"), column("pic")) select(column('name'), column('id'), column('slug'), column('pic'))
.select_from(Author) .select_from(Author)
.join(AuthorFollower, AuthorFollower.follower == author_id) .join(AuthorFollower, AuthorFollower.follower == author_id)
.filter(AuthorFollower.author == Author.id) .filter(AuthorFollower.author == Author.id)
) )
topics_query = ( topics_query = (
select(column("title"), column("id"), column("slug"), column("pic")) select(column('title'), column('id'), column('slug'), column('pic'))
.select_from(Topic) .select_from(Topic)
.join(TopicFollower, TopicFollower.follower == author_id) .join(TopicFollower, TopicFollower.follower == author_id)
.filter(TopicFollower.topic == Topic.id) .filter(TopicFollower.topic == Topic.id)
@ -124,24 +124,24 @@ def query_follows(user_id: str):
# communities = session.query(Community).all() # communities = session.query(Community).all()
return { return {
"topics": topics, 'topics': topics,
"authors": authors, 'authors': authors,
# "shouts": shouts, # "shouts": shouts,
"communities": [{"id": 1, "name": "Дискурс", "slug": "discours"}], 'communities': [{'id': 1, 'name': 'Дискурс', 'slug': 'discours'}],
} }
async def get_follows_by_user_id(user_id: str): async def get_follows_by_user_id(user_id: str):
if user_id: if user_id:
redis_key = f"user:{user_id}:follows" redis_key = f'user:{user_id}:follows'
res = await redis.execute("GET", redis_key) res = await redis.execute('GET', redis_key)
if isinstance(res, str): if isinstance(res, str):
follows = json.loads(res) follows = json.loads(res)
return follows return follows
logger.debug(f"getting follows for {user_id}") logger.debug(f'getting follows for {user_id}')
follows = query_follows(user_id) follows = query_follows(user_id)
await redis.execute("SET", redis_key, json.dumps(follows)) await redis.execute('SET', redis_key, json.dumps(follows))
return follows return follows
@ -227,7 +227,7 @@ def author_unfollow(follower_id, slug):
return False return False
@query.field("get_topic_followers") @query.field('get_topic_followers')
async def get_topic_followers(_, _info, slug: str, topic_id: int) -> List[Author]: async def get_topic_followers(_, _info, slug: str, topic_id: int) -> List[Author]:
q = select(Author) q = select(Author)
q = add_topic_stat_columns(q) q = add_topic_stat_columns(q)
@ -241,9 +241,9 @@ async def get_topic_followers(_, _info, slug: str, topic_id: int) -> List[Author
return await get_topics_from_query(q) return await get_topics_from_query(q)
@query.field("get_shout_followers") @query.field('get_shout_followers')
def get_shout_followers( def get_shout_followers(
_, _info, slug: str = "", shout_id: int | None = None _, _info, slug: str = '', shout_id: int | None = None
) -> List[Author]: ) -> List[Author]:
followers = [] followers = []
with local_session() as session: with local_session() as session:

View File

@ -21,22 +21,22 @@ from services.logger import root_logger as logger
def add_stat_columns(q, aliased_reaction): def add_stat_columns(q, aliased_reaction):
q = q.outerjoin(aliased_reaction).add_columns( q = q.outerjoin(aliased_reaction).add_columns(
func.sum(aliased_reaction.id).label("reacted_stat"), func.sum(aliased_reaction.id).label('reacted_stat'),
func.sum( func.sum(
case((aliased_reaction.kind == ReactionKind.COMMENT.value, 1), else_=0) case((aliased_reaction.kind == ReactionKind.COMMENT.value, 1), else_=0)
).label("comments_stat"), ).label('comments_stat'),
func.sum( func.sum(
case((aliased_reaction.kind == ReactionKind.LIKE.value, 1), else_=0) case((aliased_reaction.kind == ReactionKind.LIKE.value, 1), else_=0)
).label("likes_stat"), ).label('likes_stat'),
func.sum( func.sum(
case((aliased_reaction.kind == ReactionKind.DISLIKE.value, 1), else_=0) case((aliased_reaction.kind == ReactionKind.DISLIKE.value, 1), else_=0)
).label("dislikes_stat"), ).label('dislikes_stat'),
func.max( func.max(
case( case(
(aliased_reaction.kind != ReactionKind.COMMENT.value, None), (aliased_reaction.kind != ReactionKind.COMMENT.value, None),
else_=aliased_reaction.created_at, else_=aliased_reaction.created_at,
) )
).label("last_comment"), ).label('last_comment'),
) )
return q return q
@ -101,7 +101,7 @@ def check_to_unfeature(session, rejecter_id, reaction):
async def set_featured(session, shout_id): async def set_featured(session, shout_id):
s = session.query(Shout).where(Shout.id == shout_id).first() s = session.query(Shout).where(Shout.id == shout_id).first()
s.featured_at = int(time.time()) s.featured_at = int(time.time())
Shout.update(s, {"featured_at": int(time.time())}) Shout.update(s, {'featured_at': int(time.time())})
author = session.query(Author).filter(Author.id == s.created_by).first() author = session.query(Author).filter(Author.id == s.created_by).first()
if author: if author:
await add_user_role(str(author.user)) await add_user_role(str(author.user))
@ -111,7 +111,7 @@ async def set_featured(session, shout_id):
def set_unfeatured(session, shout_id): def set_unfeatured(session, shout_id):
s = session.query(Shout).where(Shout.id == shout_id).first() s = session.query(Shout).where(Shout.id == shout_id).first()
Shout.update(s, {"featured_at": None}) Shout.update(s, {'featured_at': None})
session.add(s) session.add(s)
session.commit() session.commit()
@ -124,7 +124,7 @@ async def _create_reaction(session, shout, author, reaction):
# collaborative editing # collaborative editing
if ( if (
rdict.get("reply_to") rdict.get('reply_to')
and r.kind in RATING_REACTIONS and r.kind in RATING_REACTIONS
and author.id in shout.authors and author.id in shout.authors
): ):
@ -137,42 +137,42 @@ async def _create_reaction(session, shout, author, reaction):
await set_featured(session, shout.id) await set_featured(session, shout.id)
# reactions auto-following # reactions auto-following
reactions_follow(author.id, reaction["shout"], True) reactions_follow(author.id, reaction['shout'], True)
rdict["shout"] = shout.dict() rdict['shout'] = shout.dict()
rdict["created_by"] = author.dict() rdict['created_by'] = author.dict()
rdict["stat"] = {"commented": 0, "reacted": 0, "rating": 0} rdict['stat'] = {'commented': 0, 'reacted': 0, 'rating': 0}
# notifications call # notifications call
await notify_reaction(rdict, "create") await notify_reaction(rdict, 'create')
return rdict return rdict
@mutation.field("create_reaction") @mutation.field('create_reaction')
@login_required @login_required
async def create_reaction(_, info, reaction): async def create_reaction(_, info, reaction):
user_id = info.context["user_id"] user_id = info.context['user_id']
shout_id = reaction.get("shout") shout_id = reaction.get('shout')
if not shout_id: if not shout_id:
return {"error": "Shout ID is required to create a reaction."} return {'error': 'Shout ID is required to create a reaction.'}
try: try:
with local_session() as session: with local_session() as session:
shout = session.query(Shout).filter(Shout.id == shout_id).first() shout = session.query(Shout).filter(Shout.id == shout_id).first()
author = session.query(Author).filter(Author.user == user_id).first() author = session.query(Author).filter(Author.user == user_id).first()
if shout and author: if shout and author:
reaction["created_by"] = author.id reaction['created_by'] = author.id
kind = reaction.get("kind") kind = reaction.get('kind')
shout_id = shout.id shout_id = shout.id
if not kind and isinstance(reaction.get("body"), str): if not kind and isinstance(reaction.get('body'), str):
kind = ReactionKind.COMMENT.value kind = ReactionKind.COMMENT.value
if not kind: if not kind:
return {"error": "cannot create reaction without a kind"} return {'error': 'cannot create reaction without a kind'}
if kind in RATING_REACTIONS: if kind in RATING_REACTIONS:
opposite_kind = ( opposite_kind = (
@ -188,7 +188,7 @@ async def create_reaction(_, info, reaction):
Reaction.kind.in_(RATING_REACTIONS), Reaction.kind.in_(RATING_REACTIONS),
) )
) )
reply_to = reaction.get("reply_to") reply_to = reaction.get('reply_to')
if reply_to: if reply_to:
q = q.filter(Reaction.reply_to == reply_to) q = q.filter(Reaction.reply_to == reply_to)
rating_reactions = session.execute(q).all() rating_reactions = session.execute(q).all()
@ -201,31 +201,31 @@ async def create_reaction(_, info, reaction):
rating_reactions, rating_reactions,
) )
if same_rating: if same_rating:
return {"error": "You can't rate the same thing twice"} return {'error': "You can't rate the same thing twice"}
elif opposite_rating: elif opposite_rating:
return {"error": "Remove opposite vote first"} return {'error': 'Remove opposite vote first'}
elif filter(lambda r: r.created_by == author.id, rating_reactions): elif filter(lambda r: r.created_by == author.id, rating_reactions):
return {"error": "You can't rate your own thing"} return {'error': "You can't rate your own thing"}
rdict = await _create_reaction(session, shout, author, reaction) rdict = await _create_reaction(session, shout, author, reaction)
return {"reaction": rdict} return {'reaction': rdict}
except Exception as e: except Exception as e:
import traceback import traceback
traceback.print_exc() traceback.print_exc()
logger.error(f"{type(e).__name__}: {e}") logger.error(f'{type(e).__name__}: {e}')
return {"error": "Cannot create reaction."} return {'error': 'Cannot create reaction.'}
@mutation.field("update_reaction") @mutation.field('update_reaction')
@login_required @login_required
async def update_reaction(_, info, reaction): async def update_reaction(_, info, reaction):
user_id = info.context.get("user_id") user_id = info.context.get('user_id')
roles = info.context.get("roles") roles = info.context.get('roles')
rid = reaction.get("id") rid = reaction.get('id')
if rid and user_id and roles: if rid and user_id and roles:
del reaction["id"] del reaction['id']
with local_session() as session: with local_session() as session:
reaction_query = select(Reaction).filter(Reaction.id == int(rid)) reaction_query = select(Reaction).filter(Reaction.id == int(rid))
aliased_reaction = aliased(Reaction) aliased_reaction = aliased(Reaction)
@ -238,19 +238,19 @@ async def update_reaction(_, info, reaction):
) )
if not r: if not r:
return {"error": "invalid reaction id"} return {'error': 'invalid reaction id'}
author = session.query(Author).filter(Author.user == user_id).first() author = session.query(Author).filter(Author.user == user_id).first()
if author: if author:
if r.created_by != author.id and "editor" not in roles: if r.created_by != author.id and 'editor' not in roles:
return {"error": "access denied"} return {'error': 'access denied'}
body = reaction.get("body") body = reaction.get('body')
if body: if body:
r.body = body r.body = body
r.updated_at = int(time.time()) r.updated_at = int(time.time())
if r.kind != reaction["kind"]: if r.kind != reaction['kind']:
# Определение изменения мнения может быть реализовано здесь # Определение изменения мнения может быть реализовано здесь
pass pass
@ -259,79 +259,79 @@ async def update_reaction(_, info, reaction):
session.commit() session.commit()
r.stat = { r.stat = {
"reacted": reacted_stat, 'reacted': reacted_stat,
"commented": commented_stat, 'commented': commented_stat,
"rating": int(likes_stat or 0) - int(dislikes_stat or 0), 'rating': int(likes_stat or 0) - int(dislikes_stat or 0),
} }
await notify_reaction(r.dict(), "update") await notify_reaction(r.dict(), 'update')
return {"reaction": r} return {'reaction': r}
else: else:
return {"error": "not authorized"} return {'error': 'not authorized'}
except Exception: except Exception:
import traceback import traceback
traceback.print_exc() traceback.print_exc()
return {"error": "cannot create reaction"} return {'error': 'cannot create reaction'}
@mutation.field("delete_reaction") @mutation.field('delete_reaction')
@login_required @login_required
async def delete_reaction(_, info, reaction_id: int): async def delete_reaction(_, info, reaction_id: int):
user_id = info.context["user_id"] user_id = info.context['user_id']
roles = info.context["roles"] roles = info.context['roles']
if isinstance(reaction_id, int) and user_id and isinstance(roles, list): if isinstance(reaction_id, int) and user_id and isinstance(roles, list):
with local_session() as session: with local_session() as session:
try: try:
author = session.query(Author).filter(Author.user == user_id).one() author = session.query(Author).filter(Author.user == user_id).one()
r = session.query(Reaction).filter(Reaction.id == reaction_id).one() r = session.query(Reaction).filter(Reaction.id == reaction_id).one()
if r and author: if r and author:
if r.created_by is author.id and "editor" not in roles: if r.created_by is author.id and 'editor' not in roles:
return {"error": "access denied"} return {'error': 'access denied'}
if r.kind in [ReactionKind.LIKE.value, ReactionKind.DISLIKE.value]: if r.kind in [ReactionKind.LIKE.value, ReactionKind.DISLIKE.value]:
session.delete(r) session.delete(r)
session.commit() session.commit()
await notify_reaction(r.dict(), "delete") await notify_reaction(r.dict(), 'delete')
except Exception as exc: except Exception as exc:
return {"error": f"cannot delete reaction: {exc}"} return {'error': f'cannot delete reaction: {exc}'}
return {"error": "cannot delete reaction"} return {'error': 'cannot delete reaction'}
def apply_reaction_filters(by, q): def apply_reaction_filters(by, q):
shout_slug = by.get("shout", None) shout_slug = by.get('shout', None)
if shout_slug: if shout_slug:
q = q.filter(Shout.slug == shout_slug) q = q.filter(Shout.slug == shout_slug)
elif by.get("shouts"): elif by.get('shouts'):
q = q.filter(Shout.slug.in_(by.get("shouts", []))) q = q.filter(Shout.slug.in_(by.get('shouts', [])))
created_by = by.get("created_by", None) created_by = by.get('created_by', None)
if created_by: if created_by:
q = q.filter(Author.id == created_by) q = q.filter(Author.id == created_by)
topic = by.get("topic", None) topic = by.get('topic', None)
if topic: if topic:
q = q.filter(Shout.topics.contains(topic)) q = q.filter(Shout.topics.contains(topic))
if by.get("comment", False): if by.get('comment', False):
q = q.filter(Reaction.kind == ReactionKind.COMMENT.value) q = q.filter(Reaction.kind == ReactionKind.COMMENT.value)
if by.get("rating", False): if by.get('rating', False):
q = q.filter(Reaction.kind.in_(RATING_REACTIONS)) q = q.filter(Reaction.kind.in_(RATING_REACTIONS))
by_search = by.get("search", "") by_search = by.get('search', '')
if len(by_search) > 2: if len(by_search) > 2:
q = q.filter(Reaction.body.ilike(f"%{by_search}%")) q = q.filter(Reaction.body.ilike(f'%{by_search}%'))
after = by.get("after", None) after = by.get('after', None)
if isinstance(after, int): if isinstance(after, int):
q = q.filter(Reaction.created_at > after) q = q.filter(Reaction.created_at > after)
return q return q
@query.field("load_reactions_by") @query.field('load_reactions_by')
async def load_reactions_by(_, info, by, limit=50, offset=0): async def load_reactions_by(_, info, by, limit=50, offset=0):
""" """
:param info: graphql meta :param info: graphql meta
@ -368,7 +368,7 @@ async def load_reactions_by(_, info, by, limit=50, offset=0):
q = q.group_by(Reaction.id, Author.id, Shout.id, aliased_reaction.id) q = q.group_by(Reaction.id, Author.id, Shout.id, aliased_reaction.id)
# order by # order by
q = q.order_by(desc("created_at")) q = q.order_by(desc('created_at'))
# pagination # pagination
q = q.limit(limit).offset(offset) q = q.limit(limit).offset(offset)
@ -389,19 +389,19 @@ async def load_reactions_by(_, info, by, limit=50, offset=0):
reaction.created_by = author reaction.created_by = author
reaction.shout = shout reaction.shout = shout
reaction.stat = { reaction.stat = {
"rating": int(likes_stat or 0) - int(dislikes_stat or 0), 'rating': int(likes_stat or 0) - int(dislikes_stat or 0),
"reacted": reacted_stat, 'reacted': reacted_stat,
"commented": commented_stat, 'commented': commented_stat,
} }
reactions.add(reaction) reactions.add(reaction)
# sort if by stat is present # sort if by stat is present
stat_sort = by.get("stat") stat_sort = by.get('stat')
if stat_sort: if stat_sort:
reactions = sorted( reactions = sorted(
reactions, reactions,
key=lambda r: r.stat.get(stat_sort) or r.created_at, key=lambda r: r.stat.get(stat_sort) or r.created_at,
reverse=stat_sort.startswith("-"), reverse=stat_sort.startswith('-'),
) )
return reactions return reactions
@ -440,7 +440,7 @@ async def reacted_shouts_updates(follower_id: int, limit=50, offset=0) -> List[S
# Sort shouts by the `last_comment` field # Sort shouts by the `last_comment` field
combined_query = ( combined_query = (
union(q1, q2).order_by(desc("last_comment")).limit(limit).offset(offset) union(q1, q2).order_by(desc('last_comment')).limit(limit).offset(offset)
) )
results = session.execute(combined_query).scalars() results = session.execute(combined_query).scalars()
with local_session() as session: with local_session() as session:
@ -453,26 +453,26 @@ async def reacted_shouts_updates(follower_id: int, limit=50, offset=0) -> List[S
last_comment, last_comment,
] in results: ] in results:
shout.stat = { shout.stat = {
"viewed": await ViewedStorage.get_shout(shout.slug), 'viewed': await ViewedStorage.get_shout(shout.slug),
"rating": int(likes_stat or 0) - int(dislikes_stat or 0), 'rating': int(likes_stat or 0) - int(dislikes_stat or 0),
"reacted": reacted_stat, 'reacted': reacted_stat,
"commented": commented_stat, 'commented': commented_stat,
"last_comment": last_comment, 'last_comment': last_comment,
} }
shouts.append(shout) shouts.append(shout)
return shouts return shouts
@query.field("load_shouts_followed") @query.field('load_shouts_followed')
@login_required @login_required
async def load_shouts_followed(_, info, limit=50, offset=0) -> List[Shout]: async def load_shouts_followed(_, info, limit=50, offset=0) -> List[Shout]:
user_id = info.context["user_id"] user_id = info.context['user_id']
with local_session() as session: with local_session() as session:
author = session.query(Author).filter(Author.user == user_id).first() author = session.query(Author).filter(Author.user == user_id).first()
if author: if author:
try: try:
author_id: int = author.dict()["id"] author_id: int = author.dict()['id']
shouts = await reacted_shouts_updates(author_id, limit, offset) shouts = await reacted_shouts_updates(author_id, limit, offset)
return shouts return shouts
except Exception as error: except Exception as error:

View File

@ -18,22 +18,22 @@ from services.logger import root_logger as logger
def apply_filters(q, filters, author_id=None): def apply_filters(q, filters, author_id=None):
if filters.get("reacted") and author_id: if filters.get('reacted') and author_id:
q.join(Reaction, Reaction.created_by == author_id) q.join(Reaction, Reaction.created_by == author_id)
by_featured = filters.get("featured") by_featured = filters.get('featured')
if by_featured: if by_featured:
q = q.filter(Shout.featured_at.is_not(None)) q = q.filter(Shout.featured_at.is_not(None))
by_layouts = filters.get("layouts") by_layouts = filters.get('layouts')
if by_layouts: if by_layouts:
q = q.filter(Shout.layout.in_(by_layouts)) q = q.filter(Shout.layout.in_(by_layouts))
by_author = filters.get("author") by_author = filters.get('author')
if by_author: if by_author:
q = q.filter(Shout.authors.any(slug=by_author)) q = q.filter(Shout.authors.any(slug=by_author))
by_topic = filters.get("topic") by_topic = filters.get('topic')
if by_topic: if by_topic:
q = q.filter(Shout.topics.any(slug=by_topic)) q = q.filter(Shout.topics.any(slug=by_topic))
by_after = filters.get("after") by_after = filters.get('after')
if by_after: if by_after:
ts = int(by_after) ts = int(by_after)
q = q.filter(Shout.created_at > ts) q = q.filter(Shout.created_at > ts)
@ -41,13 +41,10 @@ def apply_filters(q, filters, author_id=None):
return q return q
@query.field("get_shout") @query.field('get_shout')
async def get_shout(_, _info, slug=None, shout_id=None): async def get_shout(_, _info, slug=None, shout_id=None):
with local_session() as session: with local_session() as session:
q = select(Shout).options( q = select(Shout).options(joinedload(Shout.authors), joinedload(Shout.topics))
joinedload(Shout.authors),
joinedload(Shout.topics),
)
aliased_reaction = aliased(Reaction) aliased_reaction = aliased(Reaction)
q = add_stat_columns(q, aliased_reaction) q = add_stat_columns(q, aliased_reaction)
@ -72,10 +69,10 @@ async def get_shout(_, _info, slug=None, shout_id=None):
] = results ] = results
shout.stat = { shout.stat = {
"viewed": await ViewedStorage.get_shout(shout.slug), 'viewed': await ViewedStorage.get_shout(shout.slug),
"reacted": reacted_stat, 'reacted': reacted_stat,
"commented": commented_stat, 'commented': commented_stat,
"rating": int(likes_stat or 0) - int(dislikes_stat or 0), 'rating': int(likes_stat or 0) - int(dislikes_stat or 0),
} }
for author_caption in ( for author_caption in (
@ -102,11 +99,11 @@ async def get_shout(_, _info, slug=None, shout_id=None):
return shout return shout
except Exception: except Exception:
raise HTTPException( raise HTTPException(
status_code=404, detail=f"shout {slug or shout_id} not found" status_code=404, detail=f'shout {slug or shout_id} not found'
) )
@query.field("load_shouts_by") @query.field('load_shouts_by')
async def load_shouts_by(_, _info, options): async def load_shouts_by(_, _info, options):
""" """
:param options: { :param options: {
@ -130,10 +127,7 @@ async def load_shouts_by(_, _info, options):
# base # base
q = ( q = (
select(Shout) select(Shout)
.options( .options(joinedload(Shout.authors), joinedload(Shout.topics))
joinedload(Shout.authors),
joinedload(Shout.topics),
)
.where(and_(Shout.deleted_at.is_(None), Shout.layout.is_not(None))) .where(and_(Shout.deleted_at.is_(None), Shout.layout.is_not(None)))
) )
@ -142,7 +136,7 @@ async def load_shouts_by(_, _info, options):
q = add_stat_columns(q, aliased_reaction) q = add_stat_columns(q, aliased_reaction)
# filters # filters
filters = options.get("filters", {}) filters = options.get('filters', {})
q = apply_filters(q, filters) q = apply_filters(q, filters)
# group # group
@ -150,16 +144,16 @@ async def load_shouts_by(_, _info, options):
# order # order
order_by = options.get( order_by = options.get(
"order_by", Shout.featured_at if filters.get("featured") else Shout.published_at 'order_by', Shout.featured_at if filters.get('featured') else Shout.published_at
) )
query_order_by = ( query_order_by = (
desc(order_by) if options.get("order_by_desc", True) else asc(order_by) desc(order_by) if options.get('order_by_desc', True) else asc(order_by)
) )
q = q.order_by(nulls_last(query_order_by)) q = q.order_by(nulls_last(query_order_by))
# limit offset # limit offset
offset = options.get("offset", 0) offset = options.get('offset', 0)
limit = options.get("limit", 10) limit = options.get('limit', 10)
q = q.limit(limit).offset(offset) q = q.limit(limit).offset(offset)
shouts = [] shouts = []
@ -188,27 +182,24 @@ async def load_shouts_by(_, _info, options):
if main_topic: if main_topic:
shout.main_topic = main_topic[0] shout.main_topic = main_topic[0]
shout.stat = { shout.stat = {
"viewed": await ViewedStorage.get_shout(shout.slug), 'viewed': await ViewedStorage.get_shout(shout.slug),
"reacted": reacted_stat, 'reacted': reacted_stat,
"commented": commented_stat, 'commented': commented_stat,
"rating": int(likes_stat) - int(dislikes_stat), 'rating': int(likes_stat) - int(dislikes_stat),
} }
shouts.append(shout) shouts.append(shout)
return shouts return shouts
@query.field("load_shouts_drafts") @query.field('load_shouts_drafts')
@login_required @login_required
async def load_shouts_drafts(_, info): async def load_shouts_drafts(_, info):
user_id = info.context["user_id"] user_id = info.context['user_id']
q = ( q = (
select(Shout) select(Shout)
.options( .options(joinedload(Shout.authors), joinedload(Shout.topics))
joinedload(Shout.authors),
joinedload(Shout.topics),
)
.filter(and_(Shout.deleted_at.is_(None), Shout.published_at.is_(None))) .filter(and_(Shout.deleted_at.is_(None), Shout.published_at.is_(None)))
) )
@ -239,10 +230,10 @@ async def load_shouts_drafts(_, info):
return shouts return shouts
@query.field("load_shouts_feed") @query.field('load_shouts_feed')
@login_required @login_required
async def load_shouts_feed(_, info, options): async def load_shouts_feed(_, info, options):
user_id = info.context["user_id"] user_id = info.context['user_id']
shouts = [] shouts = []
with local_session() as session: with local_session() as session:
@ -267,10 +258,7 @@ async def load_shouts_feed(_, info, options):
q = ( q = (
select(Shout) select(Shout)
.options( .options(joinedload(Shout.authors), joinedload(Shout.topics))
joinedload(Shout.authors),
joinedload(Shout.topics),
)
.where( .where(
and_( and_(
Shout.published_at.is_not(None), Shout.published_at.is_not(None),
@ -282,19 +270,19 @@ async def load_shouts_feed(_, info, options):
aliased_reaction = aliased(Reaction) aliased_reaction = aliased(Reaction)
q = add_stat_columns(q, aliased_reaction) q = add_stat_columns(q, aliased_reaction)
filters = options.get("filters", {}) filters = options.get('filters', {})
q = apply_filters(q, filters, reader.id) q = apply_filters(q, filters, reader.id)
order_by = options.get( order_by = options.get(
"order_by", 'order_by',
Shout.featured_at if filters.get("featured") else Shout.published_at, Shout.featured_at if filters.get('featured') else Shout.published_at,
) )
query_order_by = ( query_order_by = (
desc(order_by) if options.get("order_by_desc", True) else asc(order_by) desc(order_by) if options.get('order_by_desc', True) else asc(order_by)
) )
offset = options.get("offset", 0) offset = options.get('offset', 0)
limit = options.get("limit", 10) limit = options.get('limit', 10)
q = ( q = (
q.group_by(Shout.id) q.group_by(Shout.id)
@ -329,17 +317,17 @@ async def load_shouts_feed(_, info, options):
if main_topic: if main_topic:
shout.main_topic = main_topic[0] shout.main_topic = main_topic[0]
shout.stat = { shout.stat = {
"viewed": await ViewedStorage.get_shout(shout.slug), 'viewed': await ViewedStorage.get_shout(shout.slug),
"reacted": reacted_stat, 'reacted': reacted_stat,
"commented": commented_stat, 'commented': commented_stat,
"rating": likes_stat - dislikes_stat, 'rating': likes_stat - dislikes_stat,
} }
shouts.append(shout) shouts.append(shout)
return shouts return shouts
@query.field("load_shouts_search") @query.field('load_shouts_search')
async def load_shouts_search(_, _info, text, limit=50, offset=0): async def load_shouts_search(_, _info, text, limit=50, offset=0):
if isinstance(text, str) and len(text) > 2: if isinstance(text, str) and len(text) > 2:
results = await search_text(text, limit, offset) results = await search_text(text, limit, offset)
@ -349,14 +337,11 @@ async def load_shouts_search(_, _info, text, limit=50, offset=0):
@login_required @login_required
@query.field("load_shouts_unrated") @query.field('load_shouts_unrated')
async def load_shouts_unrated(_, info, limit: int = 50, offset: int = 0): async def load_shouts_unrated(_, info, limit: int = 50, offset: int = 0):
q = ( q = (
select(Shout) select(Shout)
.options( .options(selectinload(Shout.authors), selectinload(Shout.topics))
selectinload(Shout.authors),
selectinload(Shout.topics),
)
.outerjoin( .outerjoin(
Reaction, Reaction,
and_( and_(
@ -367,7 +352,7 @@ async def load_shouts_unrated(_, info, limit: int = 50, offset: int = 0):
), ),
), ),
) )
.outerjoin(Author, Author.user == bindparam("user_id")) .outerjoin(Author, Author.user == bindparam('user_id'))
.where( .where(
and_( and_(
Shout.deleted_at.is_(None), Shout.deleted_at.is_(None),
@ -384,7 +369,7 @@ async def load_shouts_unrated(_, info, limit: int = 50, offset: int = 0):
q = add_stat_columns(q, aliased_reaction) q = add_stat_columns(q, aliased_reaction)
q = q.group_by(Shout.id).order_by(func.random()).limit(limit).offset(offset) q = q.group_by(Shout.id).order_by(func.random()).limit(limit).offset(offset)
user_id = info.context.get("user_id") user_id = info.context.get('user_id')
if user_id: if user_id:
with local_session() as session: with local_session() as session:
author = session.query(Author).filter(Author.user == user_id).first() author = session.query(Author).filter(Author.user == user_id).first()
@ -404,20 +389,20 @@ async def get_shouts_from_query(q, author_id=None):
likes_stat, likes_stat,
dislikes_stat, dislikes_stat,
last_comment, last_comment,
] in session.execute(q, {"author_id": author_id}).unique(): ] in session.execute(q, {'author_id': author_id}).unique():
shouts.append(shout) shouts.append(shout)
shout.stat = { shout.stat = {
"viewed": await ViewedStorage.get_shout(shout_slug=shout.slug), 'viewed': await ViewedStorage.get_shout(shout_slug=shout.slug),
"reacted": reacted_stat, 'reacted': reacted_stat,
"commented": commented_stat, 'commented': commented_stat,
"rating": int(likes_stat or 0) - int(dislikes_stat or 0), 'rating': int(likes_stat or 0) - int(dislikes_stat or 0),
"last_comment": last_comment, 'last_comment': last_comment,
} }
return shouts return shouts
@query.field("load_shouts_random_top") @query.field('load_shouts_random_top')
async def load_shouts_random_top(_, _info, options): async def load_shouts_random_top(_, _info, options):
""" """
:param _ :param _
@ -440,7 +425,7 @@ async def load_shouts_random_top(_, _info, options):
select(Shout.id).outerjoin(aliased_reaction).where(Shout.deleted_at.is_(None)) select(Shout.id).outerjoin(aliased_reaction).where(Shout.deleted_at.is_(None))
) )
subquery = apply_filters(subquery, options.get("filters", {})) subquery = apply_filters(subquery, options.get('filters', {}))
subquery = subquery.group_by(Shout.id).order_by( subquery = subquery.group_by(Shout.id).order_by(
desc( desc(
func.sum( func.sum(
@ -455,36 +440,33 @@ async def load_shouts_random_top(_, _info, options):
) )
) )
random_limit = options.get("random_limit") random_limit = options.get('random_limit')
if random_limit: if random_limit:
subquery = subquery.limit(random_limit) subquery = subquery.limit(random_limit)
q = ( q = (
select(Shout) select(Shout)
.options( .options(joinedload(Shout.authors), joinedload(Shout.topics))
joinedload(Shout.authors),
joinedload(Shout.topics),
)
.where(Shout.id.in_(subquery)) .where(Shout.id.in_(subquery))
) )
aliased_reaction = aliased(Reaction) aliased_reaction = aliased(Reaction)
q = add_stat_columns(q, aliased_reaction) q = add_stat_columns(q, aliased_reaction)
limit = options.get("limit", 10) limit = options.get('limit', 10)
q = q.group_by(Shout.id).order_by(func.random()).limit(limit) q = q.group_by(Shout.id).order_by(func.random()).limit(limit)
return await get_shouts_from_query(q) return await get_shouts_from_query(q)
@query.field("load_shouts_random_topic") @query.field('load_shouts_random_topic')
async def load_shouts_random_topic(_, info, limit: int = 10): async def load_shouts_random_topic(_, info, limit: int = 10):
topic = get_random_topic() topic = get_random_topic()
if topic: if topic:
shouts = fetch_shouts_by_topic(topic, limit) shouts = fetch_shouts_by_topic(topic, limit)
if shouts: if shouts:
return {"topic": topic, "shouts": shouts} return {'topic': topic, 'shouts': shouts}
return { return {
"error": "failed to get random topic after few retries", 'error': 'failed to get random topic after few retries',
shouts: [], shouts: [],
topic: {}, topic: {},
} }
@ -493,10 +475,7 @@ async def load_shouts_random_topic(_, info, limit: int = 10):
def fetch_shouts_by_topic(topic, limit): def fetch_shouts_by_topic(topic, limit):
q = ( q = (
select(Shout) select(Shout)
.options( .options(joinedload(Shout.authors), joinedload(Shout.topics))
joinedload(Shout.authors),
joinedload(Shout.topics),
)
.filter( .filter(
and_( and_(
Shout.deleted_at.is_(None), Shout.deleted_at.is_(None),

View File

@ -17,15 +17,15 @@ def add_topic_stat_columns(q):
q = ( q = (
q.outerjoin(ShoutTopic, Topic.id == ShoutTopic.topic) q.outerjoin(ShoutTopic, Topic.id == ShoutTopic.topic)
.add_columns(func.count(distinct(ShoutTopic.shout)).label("shouts_stat")) .add_columns(func.count(distinct(ShoutTopic.shout)).label('shouts_stat'))
.outerjoin(aliased_shout_author, ShoutTopic.shout == aliased_shout_author.shout) .outerjoin(aliased_shout_author, ShoutTopic.shout == aliased_shout_author.shout)
.add_columns( .add_columns(
func.count(distinct(aliased_shout_author.author)).label("authors_stat") func.count(distinct(aliased_shout_author.author)).label('authors_stat')
) )
.outerjoin(aliased_topic_follower) .outerjoin(aliased_topic_follower)
.add_columns( .add_columns(
func.count(distinct(aliased_topic_follower.follower)).label( func.count(distinct(aliased_topic_follower.follower)).label(
"followers_stat" 'followers_stat'
) )
) )
) )
@ -40,17 +40,17 @@ async def get_topics_from_query(q):
with local_session() as session: with local_session() as session:
for [topic, shouts_stat, authors_stat, followers_stat] in session.execute(q): for [topic, shouts_stat, authors_stat, followers_stat] in session.execute(q):
topic.stat = { topic.stat = {
"shouts": shouts_stat, 'shouts': shouts_stat,
"authors": authors_stat, 'authors': authors_stat,
"followers": followers_stat, 'followers': followers_stat,
"viewed": await ViewedStorage.get_topic(topic.slug), 'viewed': await ViewedStorage.get_topic(topic.slug),
} }
topics.append(topic) topics.append(topic)
return topics return topics
@query.field("get_topics_all") @query.field('get_topics_all')
async def get_topics_all(_, _info): async def get_topics_all(_, _info):
q = select(Topic) q = select(Topic)
q = add_topic_stat_columns(q) q = add_topic_stat_columns(q)
@ -66,7 +66,7 @@ async def topics_followed_by(author_id):
return await get_topics_from_query(q) return await get_topics_from_query(q)
@query.field("get_topics_by_community") @query.field('get_topics_by_community')
async def get_topics_by_community(_, _info, community_id: int): async def get_topics_by_community(_, _info, community_id: int):
q = select(Topic).where(Topic.community == community_id) q = select(Topic).where(Topic.community == community_id)
q = add_topic_stat_columns(q) q = add_topic_stat_columns(q)
@ -74,8 +74,8 @@ async def get_topics_by_community(_, _info, community_id: int):
return await get_topics_from_query(q) return await get_topics_from_query(q)
@query.field("get_topics_by_author") @query.field('get_topics_by_author')
async def get_topics_by_author(_, _info, author_id=None, slug="", user=""): async def get_topics_by_author(_, _info, author_id=None, slug='', user=''):
q = select(Topic) q = select(Topic)
q = add_topic_stat_columns(q) q = add_topic_stat_columns(q)
if author_id: if author_id:
@ -88,7 +88,7 @@ async def get_topics_by_author(_, _info, author_id=None, slug="", user=""):
return await get_topics_from_query(q) return await get_topics_from_query(q)
@query.field("get_topic") @query.field('get_topic')
async def get_topic(_, _info, slug): async def get_topic(_, _info, slug):
q = select(Topic).where(Topic.slug == slug) q = select(Topic).where(Topic.slug == slug)
q = add_topic_stat_columns(q) q = add_topic_stat_columns(q)
@ -98,7 +98,7 @@ async def get_topic(_, _info, slug):
return topics[0] return topics[0]
@mutation.field("create_topic") @mutation.field('create_topic')
@login_required @login_required
async def create_topic(_, _info, inp): async def create_topic(_, _info, inp):
with local_session() as session: with local_session() as session:
@ -108,43 +108,43 @@ async def create_topic(_, _info, inp):
session.add(new_topic) session.add(new_topic)
session.commit() session.commit()
return {"topic": new_topic} return {'topic': new_topic}
@mutation.field("update_topic") @mutation.field('update_topic')
@login_required @login_required
async def update_topic(_, _info, inp): async def update_topic(_, _info, inp):
slug = inp["slug"] slug = inp['slug']
with local_session() as session: with local_session() as session:
topic = session.query(Topic).filter(Topic.slug == slug).first() topic = session.query(Topic).filter(Topic.slug == slug).first()
if not topic: if not topic:
return {"error": "topic not found"} return {'error': 'topic not found'}
else: else:
Topic.update(topic, inp) Topic.update(topic, inp)
session.add(topic) session.add(topic)
session.commit() session.commit()
return {"topic": topic} return {'topic': topic}
@mutation.field("delete_topic") @mutation.field('delete_topic')
@login_required @login_required
async def delete_topic(_, info, slug: str): async def delete_topic(_, info, slug: str):
user_id = info.context["user_id"] user_id = info.context['user_id']
with local_session() as session: with local_session() as session:
t: Topic = session.query(Topic).filter(Topic.slug == slug).first() t: Topic = session.query(Topic).filter(Topic.slug == slug).first()
if not t: if not t:
return {"error": "invalid topic slug"} return {'error': 'invalid topic slug'}
author = session.query(Author).filter(Author.user == user_id).first() author = session.query(Author).filter(Author.user == user_id).first()
if author: if author:
if t.created_by != author.id: if t.created_by != author.id:
return {"error": "access denied"} return {'error': 'access denied'}
session.delete(t) session.delete(t)
session.commit() session.commit()
return {} return {}
return {"error": "access denied"} return {'error': 'access denied'}
def topic_follow(follower_id, slug): def topic_follow(follower_id, slug):
@ -175,7 +175,7 @@ def topic_unfollow(follower_id, slug):
return False return False
@query.field("get_topics_random") @query.field('get_topics_random')
async def get_topics_random(_, info, amount=12): async def get_topics_random(_, info, amount=12):
q = select(Topic) q = select(Topic)
q = q.join(ShoutTopic) q = q.join(ShoutTopic)

View File

@ -2,12 +2,12 @@ from granian.constants import Interfaces
from granian.server import Granian from granian.server import Granian
from services.logger import root_logger as logger from services.logger import root_logger as logger
if __name__ == "__main__": if __name__ == '__main__':
logger.info("started") logger.info('started')
granian_instance = Granian( granian_instance = Granian(
"main:app", 'main:app',
address="0.0.0.0", # noqa S104 address='0.0.0.0', # noqa S104
port=8000, port=8000,
threads=4, threads=4,
websockets=False, websockets=False,

View File

@ -9,31 +9,31 @@ from services.logger import root_logger as logger
async def request_data(gql, headers=None): async def request_data(gql, headers=None):
if headers is None: if headers is None:
headers = {"Content-Type": "application/json"} headers = {'Content-Type': 'application/json'}
try: try:
async with httpx.AsyncClient() as client: async with httpx.AsyncClient() as client:
response = await client.post(AUTH_URL, json=gql, headers=headers) response = await client.post(AUTH_URL, json=gql, headers=headers)
if response.status_code == 200: if response.status_code == 200:
data = response.json() data = response.json()
errors = data.get("errors") errors = data.get('errors')
if errors: if errors:
logger.error(f"HTTP Errors: {errors}") logger.error(f'HTTP Errors: {errors}')
else: else:
return data return data
except Exception as e: except Exception as e:
# Handling and logging exceptions during authentication check # Handling and logging exceptions during authentication check
logger.error(f"request_data error: {e}") logger.error(f'request_data error: {e}')
return None return None
# Создание региона кэша с TTL 30 секунд # Создание региона кэша с TTL 30 секунд
region = make_region().configure("dogpile.cache.memory", expiration_time=30) region = make_region().configure('dogpile.cache.memory', expiration_time=30)
# Функция-ключ для кэширования # Функция-ключ для кэширования
def auth_cache_key(req): def auth_cache_key(req):
token = req.headers.get("Authorization") token = req.headers.get('Authorization')
return f"auth_token:{token}" return f'auth_token:{token}'
# Декоратор для кэширования запроса проверки токена # Декоратор для кэширования запроса проверки токена
@ -55,32 +55,27 @@ def cache_auth_request(f):
# Измененная функция проверки аутентификации с кэшированием # Измененная функция проверки аутентификации с кэшированием
@cache_auth_request @cache_auth_request
async def check_auth(req): async def check_auth(req):
token = req.headers.get("Authorization") token = req.headers.get('Authorization')
user_id = "" user_id = ''
user_roles = [] user_roles = []
if token: if token:
try: try:
# Logging the authentication token # Logging the authentication token
logger.debug(f"{token}") logger.debug(f'{token}')
query_name = "validate_jwt_token" query_name = 'validate_jwt_token'
operation = "ValidateToken" operation = 'ValidateToken'
variables = { variables = {'params': {'token_type': 'access_token', 'token': token}}
"params": {
"token_type": "access_token",
"token": token,
}
}
gql = { gql = {
"query": f"query {operation}($params: ValidateJWTTokenInput!) {{ {query_name}(params: $params) {{ is_valid claims }} }}", 'query': f'query {operation}($params: ValidateJWTTokenInput!) {{ {query_name}(params: $params) {{ is_valid claims }} }}',
"variables": variables, 'variables': variables,
"operationName": operation, 'operationName': operation,
} }
data = await request_data(gql) data = await request_data(gql)
if data: if data:
user_data = data.get("data", {}).get(query_name, {}).get("claims", {}) user_data = data.get('data', {}).get(query_name, {}).get('claims', {})
user_id = user_data.get("sub") user_id = user_data.get('sub')
user_roles = user_data.get("allowed_roles") user_roles = user_data.get('allowed_roles')
except Exception as e: except Exception as e:
import traceback import traceback
@ -92,41 +87,41 @@ async def check_auth(req):
async def add_user_role(user_id): async def add_user_role(user_id):
logger.info(f"add author role for user_id: {user_id}") logger.info(f'add author role for user_id: {user_id}')
query_name = "_update_user" query_name = '_update_user'
operation = "UpdateUserRoles" operation = 'UpdateUserRoles'
headers = { headers = {
"Content-Type": "application/json", 'Content-Type': 'application/json',
"x-authorizer-admin-secret": ADMIN_SECRET, 'x-authorizer-admin-secret': ADMIN_SECRET,
} }
variables = {"params": {"roles": "author, reader", "id": user_id}} variables = {'params': {'roles': 'author, reader', 'id': user_id}}
gql = { gql = {
"query": f"mutation {operation}($params: UpdateUserInput!) {{ {query_name}(params: $params) {{ id roles }} }}", 'query': f'mutation {operation}($params: UpdateUserInput!) {{ {query_name}(params: $params) {{ id roles }} }}',
"variables": variables, 'variables': variables,
"operationName": operation, 'operationName': operation,
} }
data = await request_data(gql, headers) data = await request_data(gql, headers)
if data: if data:
user_id = data.get("data", {}).get(query_name, {}).get("id") user_id = data.get('data', {}).get(query_name, {}).get('id')
return user_id return user_id
def login_required(f): def login_required(f):
@wraps(f) @wraps(f)
async def decorated_function(*args, **kwargs): async def decorated_function(*args, **kwargs):
user_id = "" user_id = ''
user_roles = [] user_roles = []
info = args[1] info = args[1]
try: try:
req = info.context.get("request") req = info.context.get('request')
[user_id, user_roles] = await check_auth(req) [user_id, user_roles] = await check_auth(req)
except Exception as e: except Exception as e:
logger.error(f"Failed to authenticate user: {e}") logger.error(f'Failed to authenticate user: {e}')
if user_id: if user_id:
logger.info(f" got {user_id} roles: {user_roles}") logger.info(f' got {user_id} roles: {user_roles}')
info.context["user_id"] = user_id.strip() info.context['user_id'] = user_id.strip()
info.context["roles"] = user_roles info.context['roles'] = user_roles
return await f(*args, **kwargs) return await f(*args, **kwargs)
return decorated_function return decorated_function
@ -135,7 +130,7 @@ def login_required(f):
def auth_request(f): def auth_request(f):
@wraps(f) @wraps(f)
async def decorated_function(*args, **kwargs): async def decorated_function(*args, **kwargs):
user_id = "" user_id = ''
user_roles = [] user_roles = []
req = {} req = {}
try: try:
@ -145,11 +140,11 @@ def auth_request(f):
import traceback import traceback
traceback.print_exc() traceback.print_exc()
logger.error(f"Failed to authenticate user: {args} {e}") logger.error(f'Failed to authenticate user: {args} {e}')
if user_id: if user_id:
logger.info(f" got {user_id} roles: {user_roles}") logger.info(f' got {user_id} roles: {user_roles}')
req["user_id"] = user_id.strip() req['user_id'] = user_id.strip()
req["roles"] = user_roles req['roles'] = user_roles
return await f(*args, **kwargs) return await f(*args, **kwargs)
return decorated_function return decorated_function

View File

@ -14,11 +14,11 @@ from services.logger import root_logger as logger
from settings import DB_URL from settings import DB_URL
# Создание региона кэша с TTL 300 секунд # Создание региона кэша с TTL 300 секунд
cache_region = make_region().configure("dogpile.cache.memory", expiration_time=300) cache_region = make_region().configure('dogpile.cache.memory', expiration_time=300)
# Подключение к базе данных SQLAlchemy # Подключение к базе данных SQLAlchemy
engine = create_engine(DB_URL, echo=False, pool_size=10, max_overflow=20) engine = create_engine(DB_URL, echo=False, pool_size=10, max_overflow=20)
T = TypeVar("T") T = TypeVar('T')
REGISTRY: Dict[str, type] = {} REGISTRY: Dict[str, type] = {}
Base = declarative_base() Base = declarative_base()
@ -29,9 +29,9 @@ def profile_sqlalchemy_queries(threshold=0.1):
def wrapper(*args, **kw): def wrapper(*args, **kw):
elapsed, stat_loader, result = _profile(fn, threshold, *args, **kw) elapsed, stat_loader, result = _profile(fn, threshold, *args, **kw)
if elapsed is not None: if elapsed is not None:
print(f"Query took {elapsed:.3f} seconds to execute.") print(f'Query took {elapsed:.3f} seconds to execute.')
stats = stat_loader() stats = stat_loader()
stats.sort_stats("cumulative") stats.sort_stats('cumulative')
stats.print_stats() stats.print_stats()
return result return result
@ -52,14 +52,14 @@ def _profile(fn, threshold, *args, **kw):
# Перехватчики для журнала запросов SQLAlchemy # Перехватчики для журнала запросов SQLAlchemy
@event.listens_for(Engine, "before_cursor_execute") @event.listens_for(Engine, 'before_cursor_execute')
def before_cursor_execute(conn, cursor, statement, parameters, context, executemany): def before_cursor_execute(conn, cursor, statement, parameters, context, executemany):
conn._query_start_time = time.time() conn._query_start_time = time.time()
@event.listens_for(Engine, "after_cursor_execute") @event.listens_for(Engine, 'after_cursor_execute')
def after_cursor_execute(conn, cursor, statement, parameters, context, executemany): def after_cursor_execute(conn, cursor, statement, parameters, context, executemany):
if hasattr(conn, "_query_start_time"): if hasattr(conn, '_query_start_time'):
elapsed = time.time() - conn._query_start_time elapsed = time.time() - conn._query_start_time
del conn._query_start_time del conn._query_start_time
if elapsed > 0.2: # Adjust threshold as needed if elapsed > 0.2: # Adjust threshold as needed
@ -71,7 +71,7 @@ def after_cursor_execute(conn, cursor, statement, parameters, context, executema
profiler(statement, parameters) profiler(statement, parameters)
def local_session(src=""): def local_session(src=''):
return Session(bind=engine, expire_on_commit=False) return Session(bind=engine, expire_on_commit=False)
@ -82,7 +82,7 @@ class Base(declarative_base()):
__init__: Callable __init__: Callable
__allow_unmapped__ = True __allow_unmapped__ = True
__abstract__ = True __abstract__ = True
__table_args__ = {"extend_existing": True} __table_args__ = {'extend_existing': True}
id = Column(Integer, primary_key=True) id = Column(Integer, primary_key=True)
@ -91,12 +91,12 @@ class Base(declarative_base()):
def dict(self) -> Dict[str, Any]: def dict(self) -> Dict[str, Any]:
column_names = self.__table__.columns.keys() column_names = self.__table__.columns.keys()
if "_sa_instance_state" in column_names: if '_sa_instance_state' in column_names:
column_names.remove("_sa_instance_state") column_names.remove('_sa_instance_state')
try: try:
return {c: getattr(self, c) for c in column_names} return {c: getattr(self, c) for c in column_names}
except Exception as e: except Exception as e:
logger.error(f"Error occurred while converting object to dictionary: {e}") logger.error(f'Error occurred while converting object to dictionary: {e}')
return {} return {}
def update(self, values: Dict[str, Any]) -> None: def update(self, values: Dict[str, Any]) -> None:

View File

@ -29,19 +29,19 @@ def apply_diff(original, diff):
The modified string. The modified string.
""" """
result = [] result = []
pattern = re.compile(r"^(\+|-) ") pattern = re.compile(r'^(\+|-) ')
for line in diff: for line in diff:
match = pattern.match(line) match = pattern.match(line)
if match: if match:
op = match.group(1) op = match.group(1)
content = line[2:] content = line[2:]
if op == "+": if op == '+':
result.append(content) result.append(content)
elif op == "-": elif op == '-':
# Ignore deleted lines # Ignore deleted lines
pass pass
else: else:
result.append(line) result.append(line)
return " ".join(result) return ' '.join(result)

View File

@ -12,48 +12,48 @@ from services.rediscache import redis
from services.viewed import ViewedStorage from services.viewed import ViewedStorage
@event.listens_for(Author, "after_insert") @event.listens_for(Author, 'after_insert')
@event.listens_for(Author, "after_update") @event.listens_for(Author, 'after_update')
def after_author_update(mapper, connection, author: Author): def after_author_update(mapper, connection, author: Author):
redis_key = f"user:{author.user}:author" redis_key = f'user:{author.user}:author'
asyncio.create_task( asyncio.create_task(
redis.execute( redis.execute(
"set", 'set',
redis_key, redis_key,
json.dumps( json.dumps(
{ {
"id": author.id, 'id': author.id,
"name": author.name, 'name': author.name,
"slug": author.slug, 'slug': author.slug,
"pic": author.pic, 'pic': author.pic,
} }
), ),
) )
) )
@event.listens_for(TopicFollower, "after_insert") @event.listens_for(TopicFollower, 'after_insert')
def after_topic_follower_insert(mapper, connection, target: TopicFollower): def after_topic_follower_insert(mapper, connection, target: TopicFollower):
asyncio.create_task( asyncio.create_task(
handle_topic_follower_change(connection, target.topic, target.follower, True) handle_topic_follower_change(connection, target.topic, target.follower, True)
) )
@event.listens_for(TopicFollower, "after_delete") @event.listens_for(TopicFollower, 'after_delete')
def after_topic_follower_delete(mapper, connection, target: TopicFollower): def after_topic_follower_delete(mapper, connection, target: TopicFollower):
asyncio.create_task( asyncio.create_task(
handle_topic_follower_change(connection, target.topic, target.follower, False) handle_topic_follower_change(connection, target.topic, target.follower, False)
) )
@event.listens_for(AuthorFollower, "after_insert") @event.listens_for(AuthorFollower, 'after_insert')
def after_author_follower_insert(mapper, connection, target: AuthorFollower): def after_author_follower_insert(mapper, connection, target: AuthorFollower):
asyncio.create_task( asyncio.create_task(
handle_author_follower_change(connection, target.author, target.follower, True) handle_author_follower_change(connection, target.author, target.follower, True)
) )
@event.listens_for(AuthorFollower, "after_delete") @event.listens_for(AuthorFollower, 'after_delete')
def after_author_follower_delete(mapper, connection, target: AuthorFollower): def after_author_follower_delete(mapper, connection, target: AuthorFollower):
asyncio.create_task( asyncio.create_task(
handle_author_follower_change(connection, target.author, target.follower, False) handle_author_follower_change(connection, target.author, target.follower, False)
@ -63,26 +63,26 @@ def after_author_follower_delete(mapper, connection, target: AuthorFollower):
async def update_follows_for_user( async def update_follows_for_user(
connection, user_id, entity_type, entity: dict, is_insert connection, user_id, entity_type, entity: dict, is_insert
): ):
redis_key = f"user:{user_id}:follows" redis_key = f'user:{user_id}:follows'
follows_str = await redis.get(redis_key) follows_str = await redis.get(redis_key)
if follows_str: if follows_str:
follows = json.loads(follows_str) follows = json.loads(follows_str)
else: else:
follows = { follows = {
"topics": [], 'topics': [],
"authors": [], 'authors': [],
"communities": [ 'communities': [
{"slug": "discours", "name": "Дискурс", "id": 1, "desc": ""} {'slug': 'discours', 'name': 'Дискурс', 'id': 1, 'desc': ''}
], ],
} }
if is_insert: if is_insert:
follows[f"{entity_type}s"].append(entity) follows[f'{entity_type}s'].append(entity)
else: else:
# Remove the entity from follows # Remove the entity from follows
follows[f"{entity_type}s"] = [ follows[f'{entity_type}s'] = [
e for e in follows[f"{entity_type}s"] if e["id"] != entity["id"] e for e in follows[f'{entity_type}s'] if e['id'] != entity['id']
] ]
await redis.execute("set", redis_key, json.dumps(follows)) await redis.execute('set', redis_key, json.dumps(follows))
async def handle_author_follower_change(connection, author_id, follower_id, is_insert): async def handle_author_follower_change(connection, author_id, follower_id, is_insert):
@ -93,17 +93,17 @@ async def handle_author_follower_change(connection, author_id, follower_id, is_i
q q
).first() ).first()
author.stat = { author.stat = {
"shouts": shouts_stat, 'shouts': shouts_stat,
"viewed": await ViewedStorage.get_author(author.slug), 'viewed': await ViewedStorage.get_author(author.slug),
"followers": followers_stat, 'followers': followers_stat,
"followings": followings_stat, 'followings': followings_stat,
} }
follower = await conn.execute( follower = await conn.execute(
select(Author).filter(Author.id == follower_id) select(Author).filter(Author.id == follower_id)
).first() ).first()
if follower and author: if follower and author:
await update_follows_for_user( await update_follows_for_user(
connection, follower.user, "author", author.dict(), is_insert connection, follower.user, 'author', author.dict(), is_insert
) )
@ -115,17 +115,17 @@ async def handle_topic_follower_change(connection, topic_id, follower_id, is_ins
q q
).first() ).first()
topic.stat = { topic.stat = {
"shouts": shouts_stat, 'shouts': shouts_stat,
"authors": authors_stat, 'authors': authors_stat,
"followers": followers_stat, 'followers': followers_stat,
"viewed": await ViewedStorage.get_topic(topic.slug), 'viewed': await ViewedStorage.get_topic(topic.slug),
} }
follower = connection.execute( follower = connection.execute(
select(Author).filter(Author.id == follower_id) select(Author).filter(Author.id == follower_id)
).first() ).first()
if follower and topic: if follower and topic:
await update_follows_for_user( await update_follows_for_user(
connection, follower.user, "topic", topic.dict(), is_insert connection, follower.user, 'topic', topic.dict(), is_insert
) )
@ -149,24 +149,24 @@ class FollowsCached:
@staticmethod @staticmethod
async def update_author_cache(author: Author): async def update_author_cache(author: Author):
redis_key = f"user:{author.user}:author" redis_key = f'user:{author.user}:author'
if isinstance(author, Author): if isinstance(author, Author):
await redis.execute( await redis.execute(
"set", 'set',
redis_key, redis_key,
json.dumps( json.dumps(
{ {
"id": author.id, 'id': author.id,
"name": author.name, 'name': author.name,
"slug": author.slug, 'slug': author.slug,
"pic": author.pic, 'pic': author.pic,
} }
), ),
) )
follows = await get_author_follows(None, None, user=author.user) follows = await get_author_follows(None, None, user=author.user)
if isinstance(follows, dict): if isinstance(follows, dict):
redis_key = f"user:{author.user}:follows" redis_key = f'user:{author.user}:follows'
await redis.execute("set", redis_key, json.dumps(follows)) await redis.execute('set', redis_key, json.dumps(follows))
@staticmethod @staticmethod
async def worker(): async def worker():
@ -178,7 +178,7 @@ class FollowsCached:
await asyncio.sleep(10 * 60 * 60) await asyncio.sleep(10 * 60 * 60)
except asyncio.CancelledError: except asyncio.CancelledError:
# Handle cancellation due to SIGTERM # Handle cancellation due to SIGTERM
logger.info("Cancellation requested. Cleaning up...") logger.info('Cancellation requested. Cleaning up...')
# Perform any necessary cleanup before exiting the loop # Perform any necessary cleanup before exiting the loop
break break
except Exception as exc: except Exception as exc:

View File

@ -3,45 +3,45 @@ import colorlog
# Define the color scheme # Define the color scheme
color_scheme = { color_scheme = {
"DEBUG": "light_black", 'DEBUG': 'light_black',
"INFO": "green", 'INFO': 'green',
"WARNING": "yellow", 'WARNING': 'yellow',
"ERROR": "red", 'ERROR': 'red',
"CRITICAL": "red,bg_white", 'CRITICAL': 'red,bg_white',
} }
# Define secondary log colors # Define secondary log colors
secondary_colors = { secondary_colors = {
"log_name": {"DEBUG": "blue"}, 'log_name': {'DEBUG': 'blue'},
"asctime": {"DEBUG": "cyan"}, 'asctime': {'DEBUG': 'cyan'},
"process": {"DEBUG": "purple"}, 'process': {'DEBUG': 'purple'},
"module": {"DEBUG": "light_black,bg_blue"}, 'module': {'DEBUG': 'light_black,bg_blue'},
} }
# Define the log format string # Define the log format string
fmt_string = "%(log_color)s%(levelname)s: %(log_color)s[%(module)s]%(reset)s %(white)s%(message)s" fmt_string = '%(log_color)s%(levelname)s: %(log_color)s[%(module)s]%(reset)s %(white)s%(message)s'
# Define formatting configuration # Define formatting configuration
fmt_config = { fmt_config = {
"log_colors": color_scheme, 'log_colors': color_scheme,
"secondary_log_colors": secondary_colors, 'secondary_log_colors': secondary_colors,
"style": "%", 'style': '%',
"reset": True, 'reset': True,
} }
class MultilineColoredFormatter(colorlog.ColoredFormatter): class MultilineColoredFormatter(colorlog.ColoredFormatter):
def format(self, record): def format(self, record):
# Check if the message is multiline # Check if the message is multiline
if record.getMessage() and "\n" in record.getMessage(): if record.getMessage() and '\n' in record.getMessage():
# Split the message into lines # Split the message into lines
lines = record.getMessage().split("\n") lines = record.getMessage().split('\n')
formatted_lines = [] formatted_lines = []
for line in lines: for line in lines:
# Format each line with the provided format # Format each line with the provided format
formatted_lines.append(super().format(record)) formatted_lines.append(super().format(record))
# Join the formatted lines # Join the formatted lines
return "\n".join(formatted_lines) return '\n'.join(formatted_lines)
else: else:
# If not multiline or no message, use the default formatting # If not multiline or no message, use the default formatting
return super().format(record) return super().format(record)
@ -55,7 +55,7 @@ stream = logging.StreamHandler()
stream.setFormatter(formatter) stream.setFormatter(formatter)
def get_colorful_logger(name="main"): def get_colorful_logger(name='main'):
# Create and configure the logger # Create and configure the logger
logger = logging.getLogger(name) logger = logging.getLogger(name)
logger.setLevel(logging.DEBUG) logger.setLevel(logging.DEBUG)

View File

@ -3,43 +3,43 @@ import json
from services.rediscache import redis from services.rediscache import redis
async def notify_reaction(reaction, action: str = "create"): async def notify_reaction(reaction, action: str = 'create'):
channel_name = "reaction" channel_name = 'reaction'
data = {"payload": reaction, "action": action} data = {'payload': reaction, 'action': action}
try: try:
await redis.publish(channel_name, json.dumps(data)) await redis.publish(channel_name, json.dumps(data))
except Exception as e: except Exception as e:
print(f"[services.notify] Failed to publish to channel {channel_name}: {e}") print(f'[services.notify] Failed to publish to channel {channel_name}: {e}')
async def notify_shout(shout, action: str = "update"): async def notify_shout(shout, action: str = 'update'):
channel_name = "shout" channel_name = 'shout'
data = {"payload": shout, "action": action} data = {'payload': shout, 'action': action}
try: try:
await redis.publish(channel_name, json.dumps(data)) await redis.publish(channel_name, json.dumps(data))
except Exception as e: except Exception as e:
print(f"[services.notify] Failed to publish to channel {channel_name}: {e}") print(f'[services.notify] Failed to publish to channel {channel_name}: {e}')
async def notify_follower(follower: dict, author_id: int, action: str = "follow"): async def notify_follower(follower: dict, author_id: int, action: str = 'follow'):
channel_name = f"follower:{author_id}" channel_name = f'follower:{author_id}'
try: try:
# Simplify dictionary before publishing # Simplify dictionary before publishing
simplified_follower = {k: follower[k] for k in ["id", "name", "slug", "pic"]} simplified_follower = {k: follower[k] for k in ['id', 'name', 'slug', 'pic']}
data = {"payload": simplified_follower, "action": action} data = {'payload': simplified_follower, 'action': action}
# Convert data to JSON string # Convert data to JSON string
json_data = json.dumps(data) json_data = json.dumps(data)
# Ensure the data is not empty before publishing # Ensure the data is not empty before publishing
if not json_data: if not json_data:
raise ValueError("Empty data to publish.") raise ValueError('Empty data to publish.')
# Use the 'await' keyword when publishing # Use the 'await' keyword when publishing
await redis.publish(channel_name, json_data) await redis.publish(channel_name, json_data)
except Exception as e: except Exception as e:
# Log the error and re-raise it # Log the error and re-raise it
print(f"[services.notify] Failed to publish to channel {channel_name}: {e}") print(f'[services.notify] Failed to publish to channel {channel_name}: {e}')
raise raise

View File

@ -20,11 +20,11 @@ class RedisCache:
async def execute(self, command, *args, **kwargs): async def execute(self, command, *args, **kwargs):
if self._client: if self._client:
try: try:
logger.debug(f"{command} {args} {kwargs}") logger.debug(f'{command} {args} {kwargs}')
for arg in args: for arg in args:
if isinstance(arg, dict): if isinstance(arg, dict):
if arg.get("_sa_instance_state"): if arg.get('_sa_instance_state'):
del arg["_sa_instance_state"] del arg['_sa_instance_state']
r = await self._client.execute_command(command, *args, **kwargs) r = await self._client.execute_command(command, *args, **kwargs)
logger.debug(type(r)) logger.debug(type(r))
logger.debug(r) logger.debug(r)
@ -55,4 +55,4 @@ class RedisCache:
redis = RedisCache() redis = RedisCache()
__all__ = ["redis"] __all__ = ['redis']

View File

@ -7,69 +7,60 @@ from opensearchpy import OpenSearch
from services.logger import root_logger as logger from services.logger import root_logger as logger
from services.rediscache import redis from services.rediscache import redis
ELASTIC_HOST = os.environ.get("ELASTIC_HOST", "").replace("https://", "") ELASTIC_HOST = os.environ.get('ELASTIC_HOST', '').replace('https://', '')
ELASTIC_USER = os.environ.get("ELASTIC_USER", "") ELASTIC_USER = os.environ.get('ELASTIC_USER', '')
ELASTIC_PASSWORD = os.environ.get("ELASTIC_PASSWORD", "") ELASTIC_PASSWORD = os.environ.get('ELASTIC_PASSWORD', '')
ELASTIC_PORT = os.environ.get("ELASTIC_PORT", 9200) ELASTIC_PORT = os.environ.get('ELASTIC_PORT', 9200)
ELASTIC_AUTH = f"{ELASTIC_USER}:{ELASTIC_PASSWORD}" if ELASTIC_USER else "" ELASTIC_AUTH = f'{ELASTIC_USER}:{ELASTIC_PASSWORD}' if ELASTIC_USER else ''
ELASTIC_URL = os.environ.get( ELASTIC_URL = os.environ.get(
"ELASTIC_URL", f"https://{ELASTIC_AUTH}@{ELASTIC_HOST}:{ELASTIC_PORT}" 'ELASTIC_URL', f'https://{ELASTIC_AUTH}@{ELASTIC_HOST}:{ELASTIC_PORT}'
) )
REDIS_TTL = 86400 # 1 day in seconds REDIS_TTL = 86400 # 1 day in seconds
index_settings = { index_settings = {
"settings": { 'settings': {
"index": { 'index': {'number_of_shards': 1, 'auto_expand_replicas': '0-all'},
"number_of_shards": 1, 'analysis': {
"auto_expand_replicas": "0-all", 'analyzer': {
}, 'ru': {
"analysis": { 'tokenizer': 'standard',
"analyzer": { 'filter': ['lowercase', 'ru_stop', 'ru_stemmer'],
"ru": {
"tokenizer": "standard",
"filter": ["lowercase", "ru_stop", "ru_stemmer"],
} }
}, },
"filter": { 'filter': {
"ru_stemmer": { 'ru_stemmer': {'type': 'stemmer', 'language': 'russian'},
"type": "stemmer", 'ru_stop': {'type': 'stop', 'stopwords': '_russian_'},
"language": "russian",
},
"ru_stop": {
"type": "stop",
"stopwords": "_russian_",
},
}, },
}, },
}, },
"mappings": { 'mappings': {
"properties": { 'properties': {
"body": {"type": "text", "analyzer": "ru"}, 'body': {'type': 'text', 'analyzer': 'ru'},
"title": {"type": "text", "analyzer": "ru"}, 'title': {'type': 'text', 'analyzer': 'ru'},
# 'author': {'type': 'text'}, # 'author': {'type': 'text'},
} }
}, },
} }
expected_mapping = index_settings["mappings"] expected_mapping = index_settings['mappings']
class SearchService: class SearchService:
def __init__(self, index_name="search_index"): def __init__(self, index_name='search_index'):
self.index_name = index_name self.index_name = index_name
self.manager = Manager() self.manager = Manager()
self.client = None self.client = None
# Используем менеджер для создания Lock и Value # Используем менеджер для создания Lock и Value
self.lock = self.manager.Lock() self.lock = self.manager.Lock()
self.initialized_flag = self.manager.Value("i", 0) self.initialized_flag = self.manager.Value('i', 0)
# Only initialize the instance if it's not already initialized # Only initialize the instance if it's not already initialized
if not self.initialized_flag.value and ELASTIC_HOST: if not self.initialized_flag.value and ELASTIC_HOST:
try: try:
self.client = OpenSearch( self.client = OpenSearch(
hosts=[{"host": ELASTIC_HOST, "port": ELASTIC_PORT}], hosts=[{'host': ELASTIC_HOST, 'port': ELASTIC_PORT}],
http_compress=True, http_compress=True,
http_auth=(ELASTIC_USER, ELASTIC_PASSWORD), http_auth=(ELASTIC_USER, ELASTIC_PASSWORD),
use_ssl=True, use_ssl=True,
@ -78,34 +69,34 @@ class SearchService:
ssl_show_warn=False, ssl_show_warn=False,
# ca_certs = ca_certs_path # ca_certs = ca_certs_path
) )
logger.info(" Клиент OpenSearch.org подключен") logger.info(' Клиент OpenSearch.org подключен')
if self.lock.acquire(blocking=False): if self.lock.acquire(blocking=False):
try: try:
self.check_index() self.check_index()
finally: finally:
self.lock.release() self.lock.release()
else: else:
logger.debug(" проверка пропущена") logger.debug(' проверка пропущена')
except Exception as exc: except Exception as exc:
logger.error(f" {exc}") logger.error(f' {exc}')
self.client = None self.client = None
def info(self): def info(self):
if isinstance(self.client, OpenSearch): if isinstance(self.client, OpenSearch):
logger.info(" Поиск подключен") # : {self.client.info()}') logger.info(' Поиск подключен') # : {self.client.info()}')
else: else:
logger.info(" * Задайте переменные среды для подключения к серверу поиска") logger.info(' * Задайте переменные среды для подключения к серверу поиска')
def delete_index(self): def delete_index(self):
if self.client: if self.client:
logger.debug(f" Удаляем индекс {self.index_name}") logger.debug(f' Удаляем индекс {self.index_name}')
self.client.indices.delete(index=self.index_name, ignore_unavailable=True) self.client.indices.delete(index=self.index_name, ignore_unavailable=True)
def create_index(self): def create_index(self):
if self.client: if self.client:
if self.lock.acquire(blocking=False): if self.lock.acquire(blocking=False):
try: try:
logger.debug(f" Создаём новый индекс: {self.index_name} ") logger.debug(f' Создаём новый индекс: {self.index_name} ')
self.client.indices.create( self.client.indices.create(
index=self.index_name, body=index_settings index=self.index_name, body=index_settings
) )
@ -114,11 +105,11 @@ class SearchService:
finally: finally:
self.lock.release() self.lock.release()
else: else:
logger.debug(" ..") logger.debug(' ..')
def put_mapping(self): def put_mapping(self):
if self.client: if self.client:
logger.debug(f" Разметка индекации {self.index_name}") logger.debug(f' Разметка индекации {self.index_name}')
self.client.indices.put_mapping( self.client.indices.put_mapping(
index=self.index_name, body=expected_mapping index=self.index_name, body=expected_mapping
) )
@ -142,36 +133,28 @@ class SearchService:
finally: finally:
self.lock.release() self.lock.release()
else: else:
logger.debug(" ..") logger.debug(' ..')
def index(self, shout): def index(self, shout):
if self.client: if self.client:
id_ = str(shout.id) id_ = str(shout.id)
logger.debug(f" Индексируем пост {id_}") logger.debug(f' Индексируем пост {id_}')
self.client.index(index=self.index_name, id=id_, body=shout.dict()) self.client.index(index=self.index_name, id=id_, body=shout.dict())
async def search(self, text, limit, offset): async def search(self, text, limit, offset):
logger.debug(f" Ищем: {text}") logger.debug(f' Ищем: {text}')
search_body = { search_body = {'query': {'match': {'_all': text}}}
"query": {"match": {"_all": text}},
}
if self.client: if self.client:
search_response = self.client.search( search_response = self.client.search(
index=self.index_name, body=search_body, size=limit, from_=offset index=self.index_name, body=search_body, size=limit, from_=offset
) )
hits = search_response["hits"]["hits"] hits = search_response['hits']['hits']
results = [ results = [{**hit['_source'], 'score': hit['_score']} for hit in hits]
{
**hit["_source"],
"score": hit["_score"],
}
for hit in hits
]
# Use Redis as cache with TTL # Use Redis as cache with TTL
redis_key = f"search:{text}" redis_key = f'search:{text}'
await redis.execute("SETEX", redis_key, REDIS_TTL, json.dumps(results)) await redis.execute('SETEX', redis_key, REDIS_TTL, json.dumps(results))
return [] return []

View File

@ -26,5 +26,5 @@ def start_sentry():
], ],
) )
except Exception as e: except Exception as e:
print("[services.sentry] init error") print('[services.sentry] init error')
print(e) print(e)

View File

@ -4,7 +4,7 @@ from services.rediscache import redis
async def get_unread_counter(chat_id: str, author_id: int) -> int: async def get_unread_counter(chat_id: str, author_id: int) -> int:
r = await redis.execute("LLEN", f"chats/{chat_id}/unread/{author_id}") r = await redis.execute('LLEN', f'chats/{chat_id}/unread/{author_id}')
if isinstance(r, str): if isinstance(r, str):
return int(r) return int(r)
elif isinstance(r, int): elif isinstance(r, int):
@ -14,7 +14,7 @@ async def get_unread_counter(chat_id: str, author_id: int) -> int:
async def get_total_unread_counter(author_id: int) -> int: async def get_total_unread_counter(author_id: int) -> int:
chats_set = await redis.execute("SMEMBERS", f"chats_by_author/{author_id}") chats_set = await redis.execute('SMEMBERS', f'chats_by_author/{author_id}')
s = 0 s = 0
if isinstance(chats_set, str): if isinstance(chats_set, str):
chats_set = json.loads(chats_set) chats_set = json.loads(chats_set)

View File

@ -20,9 +20,9 @@ from orm.topic import Topic
from services.db import local_session from services.db import local_session
from services.logger import root_logger as logger from services.logger import root_logger as logger
GOOGLE_KEYFILE_PATH = os.environ.get("GOOGLE_KEYFILE_PATH", "/dump/google-service.json") GOOGLE_KEYFILE_PATH = os.environ.get('GOOGLE_KEYFILE_PATH', '/dump/google-service.json')
GOOGLE_PROPERTY_ID = os.environ.get("GOOGLE_PROPERTY_ID", "") GOOGLE_PROPERTY_ID = os.environ.get('GOOGLE_PROPERTY_ID', '')
VIEWS_FILEPATH = "/dump/views.json" VIEWS_FILEPATH = '/dump/views.json'
class ViewedStorage: class ViewedStorage:
@ -42,12 +42,12 @@ class ViewedStorage:
"""Подключение к клиенту Google Analytics с использованием аутентификации""" """Подключение к клиенту Google Analytics с использованием аутентификации"""
self = ViewedStorage self = ViewedStorage
async with self.lock: async with self.lock:
os.environ.setdefault("GOOGLE_APPLICATION_CREDENTIALS", GOOGLE_KEYFILE_PATH) os.environ.setdefault('GOOGLE_APPLICATION_CREDENTIALS', GOOGLE_KEYFILE_PATH)
if GOOGLE_KEYFILE_PATH and os.path.isfile(GOOGLE_KEYFILE_PATH): if GOOGLE_KEYFILE_PATH and os.path.isfile(GOOGLE_KEYFILE_PATH):
# Using a default constructor instructs the client to use the credentials # Using a default constructor instructs the client to use the credentials
# specified in GOOGLE_APPLICATION_CREDENTIALS environment variable. # specified in GOOGLE_APPLICATION_CREDENTIALS environment variable.
self.analytics_client = BetaAnalyticsDataClient() self.analytics_client = BetaAnalyticsDataClient()
logger.info(" * Клиент Google Analytics успешно авторизован") logger.info(' * Клиент Google Analytics успешно авторизован')
# Загрузка предварительно подсчитанных просмотров из файла JSON # Загрузка предварительно подсчитанных просмотров из файла JSON
self.load_precounted_views() self.load_precounted_views()
@ -55,19 +55,19 @@ class ViewedStorage:
if os.path.exists(VIEWS_FILEPATH): if os.path.exists(VIEWS_FILEPATH):
file_timestamp = os.path.getctime(VIEWS_FILEPATH) file_timestamp = os.path.getctime(VIEWS_FILEPATH)
self.start_date = datetime.fromtimestamp(file_timestamp).strftime( self.start_date = datetime.fromtimestamp(file_timestamp).strftime(
"%Y-%m-%d" '%Y-%m-%d'
) )
now_date = datetime.now().strftime("%Y-%m-%d") now_date = datetime.now().strftime('%Y-%m-%d')
if now_date == self.start_date: if now_date == self.start_date:
logger.info(" * Данные актуализованы!") logger.info(' * Данные актуализованы!')
else: else:
logger.info(f" * Миграция проводилась: {self.start_date}") logger.info(f' * Миграция проводилась: {self.start_date}')
# Запуск фоновой задачи # Запуск фоновой задачи
asyncio.create_task(self.worker()) asyncio.create_task(self.worker())
else: else:
logger.info(" * Пожалуйста, добавьте ключевой файл Google Analytics") logger.info(' * Пожалуйста, добавьте ключевой файл Google Analytics')
self.disabled = True self.disabled = True
@staticmethod @staticmethod
@ -75,31 +75,31 @@ class ViewedStorage:
"""Загрузка предварительно подсчитанных просмотров из файла JSON""" """Загрузка предварительно подсчитанных просмотров из файла JSON"""
self = ViewedStorage self = ViewedStorage
try: try:
with open(VIEWS_FILEPATH, "r") as file: with open(VIEWS_FILEPATH, 'r') as file:
precounted_views = json.load(file) precounted_views = json.load(file)
self.views_by_shout.update(precounted_views) self.views_by_shout.update(precounted_views)
logger.info( logger.info(
f" * {len(precounted_views)} публикаций с просмотрами успешно загружены." f' * {len(precounted_views)} публикаций с просмотрами успешно загружены.'
) )
except Exception as e: except Exception as e:
logger.error(f"Ошибка загрузки предварительно подсчитанных просмотров: {e}") logger.error(f'Ошибка загрузки предварительно подсчитанных просмотров: {e}')
@staticmethod @staticmethod
async def update_pages(): async def update_pages():
"""Запрос всех страниц от Google Analytics, отсортированных по количеству просмотров""" """Запрос всех страниц от Google Analytics, отсортированных по количеству просмотров"""
self = ViewedStorage self = ViewedStorage
logger.info(" ⎧ Обновление данных просмотров от Google Analytics ---") logger.info(' ⎧ Обновление данных просмотров от Google Analytics ---')
if not self.disabled: if not self.disabled:
try: try:
start = time.time() start = time.time()
async with self.lock: async with self.lock:
if self.analytics_client: if self.analytics_client:
request = RunReportRequest( request = RunReportRequest(
property=f"properties/{GOOGLE_PROPERTY_ID}", property=f'properties/{GOOGLE_PROPERTY_ID}',
dimensions=[Dimension(name="pagePath")], dimensions=[Dimension(name='pagePath')],
metrics=[Metric(name="screenPageViews")], metrics=[Metric(name='screenPageViews')],
date_ranges=[ date_ranges=[
DateRange(start_date=self.start_date, end_date="today") DateRange(start_date=self.start_date, end_date='today')
], ],
) )
response = self.analytics_client.run_report(request) response = self.analytics_client.run_report(request)
@ -113,7 +113,7 @@ class ViewedStorage:
# Извлечение путей страниц из ответа Google Analytics # Извлечение путей страниц из ответа Google Analytics
if isinstance(row.dimension_values, list): if isinstance(row.dimension_values, list):
page_path = row.dimension_values[0].value page_path = row.dimension_values[0].value
slug = page_path.split("discours.io/")[-1] slug = page_path.split('discours.io/')[-1]
views_count = int(row.metric_values[0].value) views_count = int(row.metric_values[0].value)
# Обновление данных в хранилище # Обновление данных в хранилище
@ -126,10 +126,10 @@ class ViewedStorage:
# Запись путей страниц для логирования # Запись путей страниц для логирования
slugs.add(slug) slugs.add(slug)
logger.info(f" ⎪ Собрано страниц: {len(slugs)} ") logger.info(f' ⎪ Собрано страниц: {len(slugs)} ')
end = time.time() end = time.time()
logger.info(" ⎪ Обновление страниц заняло %fs " % (end - start)) logger.info(' ⎪ Обновление страниц заняло %fs ' % (end - start))
except Exception as error: except Exception as error:
logger.error(error) logger.error(error)
@ -209,18 +209,18 @@ class ViewedStorage:
failed = 0 failed = 0
except Exception as _exc: except Exception as _exc:
failed += 1 failed += 1
logger.info(" - Обновление не удалось #%d, ожидание 10 секунд" % failed) logger.info(' - Обновление не удалось #%d, ожидание 10 секунд' % failed)
if failed > 3: if failed > 3:
logger.info(" - Больше не пытаемся обновить") logger.info(' - Больше не пытаемся обновить')
break break
if failed == 0: if failed == 0:
when = datetime.now(timezone.utc) + timedelta(seconds=self.period) when = datetime.now(timezone.utc) + timedelta(seconds=self.period)
t = format(when.astimezone().isoformat()) t = format(when.astimezone().isoformat())
logger.info( logger.info(
" ⎩ Следующее обновление: %s" ' ⎩ Следующее обновление: %s'
% (t.split("T")[0] + " " + t.split("T")[1].split(".")[0]) % (t.split('T')[0] + ' ' + t.split('T')[1].split('.')[0])
) )
await asyncio.sleep(self.period) await asyncio.sleep(self.period)
else: else:
await asyncio.sleep(10) await asyncio.sleep(10)
logger.info(" - Попытка снова обновить данные") logger.info(' - Попытка снова обновить данные')

View File

@ -15,13 +15,13 @@ class WebhookEndpoint(HTTPEndpoint):
try: try:
data = await request.json() data = await request.json()
if data: if data:
auth = request.headers.get("Authorization") auth = request.headers.get('Authorization')
if auth: if auth:
if auth == os.environ.get("WEBHOOK_SECRET"): if auth == os.environ.get('WEBHOOK_SECRET'):
user_id: str = data["user"]["id"] user_id: str = data['user']['id']
name: str = data["user"]["given_name"] name: str = data['user']['given_name']
slug: str = data["user"]["email"].split("@")[0] slug: str = data['user']['email'].split('@')[0]
slug: str = re.sub("[^0-9a-z]+", "-", slug.lower()) slug: str = re.sub('[^0-9a-z]+', '-', slug.lower())
with local_session() as session: with local_session() as session:
author = ( author = (
session.query(Author) session.query(Author)
@ -29,12 +29,12 @@ class WebhookEndpoint(HTTPEndpoint):
.first() .first()
) )
if author: if author:
slug = slug + "-" + user_id.split("-").pop() slug = slug + '-' + user_id.split('-').pop()
await create_author(user_id, slug, name) await create_author(user_id, slug, name)
return JSONResponse({"status": "success"}) return JSONResponse({'status': 'success'})
except Exception as e: except Exception as e:
import traceback import traceback
traceback.print_exc() traceback.print_exc()
return JSONResponse({"status": "error", "message": str(e)}, status_code=500) return JSONResponse({'status': 'error', 'message': str(e)}, status_code=500)

View File

@ -3,15 +3,15 @@ from os import environ
PORT = 8080 PORT = 8080
DB_URL = ( DB_URL = (
environ.get("DATABASE_URL", "").replace("postgres://", "postgresql://") environ.get('DATABASE_URL', '').replace('postgres://', 'postgresql://')
or environ.get("DB_URL", "").replace("postgres://", "postgresql://") or environ.get('DB_URL', '').replace('postgres://', 'postgresql://')
or "postgresql://postgres@localhost:5432/discoursio" or 'postgresql://postgres@localhost:5432/discoursio'
) )
REDIS_URL = environ.get("REDIS_URL") or "redis://127.0.0.1" REDIS_URL = environ.get('REDIS_URL') or 'redis://127.0.0.1'
API_BASE = environ.get("API_BASE") or "" API_BASE = environ.get('API_BASE') or ''
AUTH_URL = environ.get("AUTH_URL") or "" AUTH_URL = environ.get('AUTH_URL') or ''
SENTRY_DSN = environ.get("SENTRY_DSN") SENTRY_DSN = environ.get('SENTRY_DSN')
DEV_SERVER_PID_FILE_NAME = "dev-server.pid" DEV_SERVER_PID_FILE_NAME = 'dev-server.pid'
MODE = "development" if "dev" in sys.argv else "production" MODE = 'development' if 'dev' in sys.argv else 'production'
ADMIN_SECRET = environ.get("AUTH_SECRET") or "nothing" ADMIN_SECRET = environ.get('AUTH_SECRET') or 'nothing'