fmt
This commit is contained in:
parent
937b154c6b
commit
c25d7e3ab6
14
main.py
14
main.py
|
@ -14,24 +14,24 @@ from services.viewed import ViewedStorage
|
||||||
from services.webhook import WebhookEndpoint
|
from services.webhook import WebhookEndpoint
|
||||||
from settings import DEV_SERVER_PID_FILE_NAME, MODE
|
from settings import DEV_SERVER_PID_FILE_NAME, MODE
|
||||||
|
|
||||||
import_module('resolvers')
|
import_module("resolvers")
|
||||||
schema = make_executable_schema(load_schema_from_path('schema/'), resolvers)
|
schema = make_executable_schema(load_schema_from_path("schema/"), resolvers)
|
||||||
|
|
||||||
|
|
||||||
async def start():
|
async def start():
|
||||||
if MODE == 'development':
|
if MODE == "development":
|
||||||
if not exists(DEV_SERVER_PID_FILE_NAME):
|
if not exists(DEV_SERVER_PID_FILE_NAME):
|
||||||
# pid file management
|
# pid file management
|
||||||
with open(DEV_SERVER_PID_FILE_NAME, 'w', encoding='utf-8') as f:
|
with open(DEV_SERVER_PID_FILE_NAME, "w", encoding="utf-8") as f:
|
||||||
f.write(str(os.getpid()))
|
f.write(str(os.getpid()))
|
||||||
print(f'[main] process started in {MODE} mode')
|
print(f"[main] process started in {MODE} mode")
|
||||||
|
|
||||||
|
|
||||||
# main starlette app object with ariadne mounted in root
|
# main starlette app object with ariadne mounted in root
|
||||||
app = Starlette(
|
app = Starlette(
|
||||||
routes=[
|
routes=[
|
||||||
Route('/', GraphQL(schema, debug=True)),
|
Route("/", GraphQL(schema, debug=True)),
|
||||||
Route('/new-author', WebhookEndpoint),
|
Route("/new-author", WebhookEndpoint),
|
||||||
],
|
],
|
||||||
on_startup=[
|
on_startup=[
|
||||||
redis.connect,
|
redis.connect,
|
||||||
|
|
|
@ -7,40 +7,40 @@ from services.db import Base
|
||||||
|
|
||||||
|
|
||||||
class AuthorRating(Base):
|
class AuthorRating(Base):
|
||||||
__tablename__ = 'author_rating'
|
__tablename__ = "author_rating"
|
||||||
|
|
||||||
id = None # type: ignore
|
id = None # type: ignore
|
||||||
rater = Column(ForeignKey('author.id'), primary_key=True)
|
rater = Column(ForeignKey("author.id"), primary_key=True)
|
||||||
author = Column(ForeignKey('author.id'), primary_key=True)
|
author = Column(ForeignKey("author.id"), primary_key=True)
|
||||||
plus = Column(Boolean)
|
plus = Column(Boolean)
|
||||||
|
|
||||||
|
|
||||||
class AuthorFollower(Base):
|
class AuthorFollower(Base):
|
||||||
__tablename__ = 'author_follower'
|
__tablename__ = "author_follower"
|
||||||
|
|
||||||
id = None # type: ignore
|
id = None # type: ignore
|
||||||
follower = Column(ForeignKey('author.id'), primary_key=True)
|
follower = Column(ForeignKey("author.id"), primary_key=True)
|
||||||
author = Column(ForeignKey('author.id'), primary_key=True)
|
author = Column(ForeignKey("author.id"), primary_key=True)
|
||||||
created_at = Column(Integer, nullable=False, default=lambda: int(time.time()))
|
created_at = Column(Integer, nullable=False, default=lambda: int(time.time()))
|
||||||
auto = Column(Boolean, nullable=False, default=False)
|
auto = Column(Boolean, nullable=False, default=False)
|
||||||
|
|
||||||
|
|
||||||
class Author(Base):
|
class Author(Base):
|
||||||
__tablename__ = 'author'
|
__tablename__ = "author"
|
||||||
|
|
||||||
user = Column(String) # unbounded link with authorizer's User type
|
user = Column(String) # unbounded link with authorizer's User type
|
||||||
|
|
||||||
name = Column(String, nullable=True, comment='Display name')
|
name = Column(String, nullable=True, comment="Display name")
|
||||||
slug = Column(String, unique=True, comment="Author's slug")
|
slug = Column(String, unique=True, comment="Author's slug")
|
||||||
bio = Column(String, nullable=True, comment='Bio') # status description
|
bio = Column(String, nullable=True, comment="Bio") # status description
|
||||||
about = Column(String, nullable=True, comment='About') # long and formatted
|
about = Column(String, nullable=True, comment="About") # long and formatted
|
||||||
pic = Column(String, nullable=True, comment='Picture')
|
pic = Column(String, nullable=True, comment="Picture")
|
||||||
links = Column(JSON, nullable=True, comment='Links')
|
links = Column(JSON, nullable=True, comment="Links")
|
||||||
created_at = Column(Integer, nullable=False, default=lambda: int(time.time()))
|
created_at = Column(Integer, nullable=False, default=lambda: int(time.time()))
|
||||||
last_seen = Column(Integer, nullable=False, default=lambda: int(time.time()))
|
last_seen = Column(Integer, nullable=False, default=lambda: int(time.time()))
|
||||||
updated_at = Column(Integer, nullable=False, default=lambda: int(time.time()))
|
updated_at = Column(Integer, nullable=False, default=lambda: int(time.time()))
|
||||||
deleted_at = Column(Integer, nullable=True, comment='Deleted at')
|
deleted_at = Column(Integer, nullable=True, comment="Deleted at")
|
||||||
|
|
||||||
search_vector = Column(
|
search_vector = Column(
|
||||||
TSVectorType('name', 'slug', 'bio', 'about', regconfig='pg_catalog.russian')
|
TSVectorType("name", "slug", "bio", "about", regconfig="pg_catalog.russian")
|
||||||
)
|
)
|
||||||
|
|
|
@ -6,20 +6,20 @@ from services.db import Base
|
||||||
|
|
||||||
|
|
||||||
class ShoutCollection(Base):
|
class ShoutCollection(Base):
|
||||||
__tablename__ = 'shout_collection'
|
__tablename__ = "shout_collection"
|
||||||
|
|
||||||
id = None # type: ignore
|
id = None # type: ignore
|
||||||
shout = Column(ForeignKey('shout.id'), primary_key=True)
|
shout = Column(ForeignKey("shout.id"), primary_key=True)
|
||||||
collection = Column(ForeignKey('collection.id'), primary_key=True)
|
collection = Column(ForeignKey("collection.id"), primary_key=True)
|
||||||
|
|
||||||
|
|
||||||
class Collection(Base):
|
class Collection(Base):
|
||||||
__tablename__ = 'collection'
|
__tablename__ = "collection"
|
||||||
|
|
||||||
slug = Column(String, unique=True)
|
slug = Column(String, unique=True)
|
||||||
title = Column(String, nullable=False, comment='Title')
|
title = Column(String, nullable=False, comment="Title")
|
||||||
body = Column(String, nullable=True, comment='Body')
|
body = Column(String, nullable=True, comment="Body")
|
||||||
pic = Column(String, nullable=True, comment='Picture')
|
pic = Column(String, nullable=True, comment="Picture")
|
||||||
created_at = Column(Integer, default=lambda: int(time.time()))
|
created_at = Column(Integer, default=lambda: int(time.time()))
|
||||||
created_by = Column(ForeignKey('author.id'), comment='Created By')
|
created_by = Column(ForeignKey("author.id"), comment="Created By")
|
||||||
published_at = Column(Integer, default=lambda: int(time.time()))
|
published_at = Column(Integer, default=lambda: int(time.time()))
|
||||||
|
|
|
@ -8,22 +8,22 @@ from services.db import Base
|
||||||
|
|
||||||
|
|
||||||
class CommunityAuthor(Base):
|
class CommunityAuthor(Base):
|
||||||
__tablename__ = 'community_author'
|
__tablename__ = "community_author"
|
||||||
|
|
||||||
id = None # type: ignore
|
id = None # type: ignore
|
||||||
author = Column(ForeignKey('author.id'), primary_key=True)
|
author = Column(ForeignKey("author.id"), primary_key=True)
|
||||||
community = Column(ForeignKey('community.id'), primary_key=True)
|
community = Column(ForeignKey("community.id"), primary_key=True)
|
||||||
joined_at = Column(Integer, nullable=False, default=lambda: int(time.time()))
|
joined_at = Column(Integer, nullable=False, default=lambda: int(time.time()))
|
||||||
role = Column(String, nullable=False)
|
role = Column(String, nullable=False)
|
||||||
|
|
||||||
|
|
||||||
class Community(Base):
|
class Community(Base):
|
||||||
__tablename__ = 'community'
|
__tablename__ = "community"
|
||||||
|
|
||||||
name = Column(String, nullable=False)
|
name = Column(String, nullable=False)
|
||||||
slug = Column(String, nullable=False, unique=True)
|
slug = Column(String, nullable=False, unique=True)
|
||||||
desc = Column(String, nullable=False, default='')
|
desc = Column(String, nullable=False, default="")
|
||||||
pic = Column(String, nullable=False, default='')
|
pic = Column(String, nullable=False, default="")
|
||||||
created_at = Column(Integer, nullable=False, default=lambda: int(time.time()))
|
created_at = Column(Integer, nullable=False, default=lambda: int(time.time()))
|
||||||
|
|
||||||
authors = relationship(Author, secondary='community_author')
|
authors = relationship(Author, secondary="community_author")
|
||||||
|
|
|
@ -7,19 +7,19 @@ from services.db import Base
|
||||||
|
|
||||||
|
|
||||||
class InviteStatus(Enumeration):
|
class InviteStatus(Enumeration):
|
||||||
PENDING = 'PENDING'
|
PENDING = "PENDING"
|
||||||
ACCEPTED = 'ACCEPTED'
|
ACCEPTED = "ACCEPTED"
|
||||||
REJECTED = 'REJECTED'
|
REJECTED = "REJECTED"
|
||||||
|
|
||||||
|
|
||||||
class Invite(Base):
|
class Invite(Base):
|
||||||
__tablename__ = 'invite'
|
__tablename__ = "invite"
|
||||||
|
|
||||||
inviter_id = Column(ForeignKey('author.id'), primary_key=True)
|
inviter_id = Column(ForeignKey("author.id"), primary_key=True)
|
||||||
author_id = Column(ForeignKey('author.id'), primary_key=True)
|
author_id = Column(ForeignKey("author.id"), primary_key=True)
|
||||||
shout_id = Column(ForeignKey('shout.id'), primary_key=True)
|
shout_id = Column(ForeignKey("shout.id"), primary_key=True)
|
||||||
status = Column(String, default=InviteStatus.PENDING.value)
|
status = Column(String, default=InviteStatus.PENDING.value)
|
||||||
|
|
||||||
inviter = relationship('author', foreign_keys=[inviter_id])
|
inviter = relationship("author", foreign_keys=[inviter_id])
|
||||||
author = relationship('author', foreign_keys=[author_id])
|
author = relationship("author", foreign_keys=[author_id])
|
||||||
shout = relationship('shout')
|
shout = relationship("shout")
|
||||||
|
|
|
@ -11,36 +11,36 @@ from services.logger import root_logger as logger
|
||||||
|
|
||||||
|
|
||||||
class NotificationEntity(Enumeration):
|
class NotificationEntity(Enumeration):
|
||||||
REACTION = 'reaction'
|
REACTION = "reaction"
|
||||||
SHOUT = 'shout'
|
SHOUT = "shout"
|
||||||
FOLLOWER = 'follower'
|
FOLLOWER = "follower"
|
||||||
|
|
||||||
|
|
||||||
class NotificationAction(Enumeration):
|
class NotificationAction(Enumeration):
|
||||||
CREATE = 'create'
|
CREATE = "create"
|
||||||
UPDATE = 'update'
|
UPDATE = "update"
|
||||||
DELETE = 'delete'
|
DELETE = "delete"
|
||||||
SEEN = 'seen'
|
SEEN = "seen"
|
||||||
FOLLOW = 'follow'
|
FOLLOW = "follow"
|
||||||
UNFOLLOW = 'unfollow'
|
UNFOLLOW = "unfollow"
|
||||||
|
|
||||||
|
|
||||||
class NotificationSeen(Base):
|
class NotificationSeen(Base):
|
||||||
__tablename__ = 'notification_seen'
|
__tablename__ = "notification_seen"
|
||||||
|
|
||||||
viewer = Column(ForeignKey('author.id'))
|
viewer = Column(ForeignKey("author.id"))
|
||||||
notification = Column(ForeignKey('notification.id'))
|
notification = Column(ForeignKey("notification.id"))
|
||||||
|
|
||||||
|
|
||||||
class Notification(Base):
|
class Notification(Base):
|
||||||
__tablename__ = 'notification'
|
__tablename__ = "notification"
|
||||||
|
|
||||||
created_at = Column(Integer, server_default=str(int(time.time())))
|
created_at = Column(Integer, server_default=str(int(time.time())))
|
||||||
entity = Column(String, nullable=False)
|
entity = Column(String, nullable=False)
|
||||||
action = Column(String, nullable=False)
|
action = Column(String, nullable=False)
|
||||||
payload = Column(JSON, nullable=True)
|
payload = Column(JSON, nullable=True)
|
||||||
|
|
||||||
seen = relationship(lambda: Author, secondary='notification_seen')
|
seen = relationship(lambda: Author, secondary="notification_seen")
|
||||||
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
|
|
@ -10,15 +10,9 @@ PROPOSAL_REACTIONS = [
|
||||||
ReactionKind.PROPOSE.value,
|
ReactionKind.PROPOSE.value,
|
||||||
]
|
]
|
||||||
|
|
||||||
PROOF_REACTIONS = [
|
PROOF_REACTIONS = [ReactionKind.PROOF.value, ReactionKind.DISPROOF.value]
|
||||||
ReactionKind.PROOF.value,
|
|
||||||
ReactionKind.DISPROOF.value
|
|
||||||
]
|
|
||||||
|
|
||||||
RATING_REACTIONS = [
|
RATING_REACTIONS = [ReactionKind.LIKE.value, ReactionKind.DISLIKE.value]
|
||||||
ReactionKind.LIKE.value,
|
|
||||||
ReactionKind.DISLIKE.value
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
def is_negative(x):
|
def is_negative(x):
|
||||||
|
|
|
@ -10,36 +10,36 @@ class ReactionKind(Enumeration):
|
||||||
# TYPE = <reaction index> # rating diff
|
# TYPE = <reaction index> # rating diff
|
||||||
|
|
||||||
# editor mode
|
# editor mode
|
||||||
AGREE = 'AGREE' # +1
|
AGREE = "AGREE" # +1
|
||||||
DISAGREE = 'DISAGREE' # -1
|
DISAGREE = "DISAGREE" # -1
|
||||||
ASK = 'ASK' # +0
|
ASK = "ASK" # +0
|
||||||
PROPOSE = 'PROPOSE' # +0
|
PROPOSE = "PROPOSE" # +0
|
||||||
ACCEPT = 'ACCEPT' # +1
|
ACCEPT = "ACCEPT" # +1
|
||||||
REJECT = 'REJECT' # -1
|
REJECT = "REJECT" # -1
|
||||||
|
|
||||||
# expert mode
|
# expert mode
|
||||||
PROOF = 'PROOF' # +1
|
PROOF = "PROOF" # +1
|
||||||
DISPROOF = 'DISPROOF' # -1
|
DISPROOF = "DISPROOF" # -1
|
||||||
|
|
||||||
# public feed
|
# public feed
|
||||||
QUOTE = 'QUOTE' # +0 TODO: use to bookmark in collection
|
QUOTE = "QUOTE" # +0 TODO: use to bookmark in collection
|
||||||
COMMENT = 'COMMENT' # +0
|
COMMENT = "COMMENT" # +0
|
||||||
LIKE = 'LIKE' # +1
|
LIKE = "LIKE" # +1
|
||||||
DISLIKE = 'DISLIKE' # -1
|
DISLIKE = "DISLIKE" # -1
|
||||||
|
|
||||||
|
|
||||||
class Reaction(Base):
|
class Reaction(Base):
|
||||||
__tablename__ = 'reaction'
|
__tablename__ = "reaction"
|
||||||
|
|
||||||
body = Column(String, default='', comment='Reaction Body')
|
body = Column(String, default="", comment="Reaction Body")
|
||||||
created_at = Column(Integer, nullable=False, default=lambda: int(time.time()))
|
created_at = Column(Integer, nullable=False, default=lambda: int(time.time()))
|
||||||
updated_at = Column(Integer, nullable=True, comment='Updated at')
|
updated_at = Column(Integer, nullable=True, comment="Updated at")
|
||||||
deleted_at = Column(Integer, nullable=True, comment='Deleted at')
|
deleted_at = Column(Integer, nullable=True, comment="Deleted at")
|
||||||
deleted_by = Column(ForeignKey('author.id'), nullable=True)
|
deleted_by = Column(ForeignKey("author.id"), nullable=True)
|
||||||
reply_to = Column(ForeignKey('reaction.id'), nullable=True)
|
reply_to = Column(ForeignKey("reaction.id"), nullable=True)
|
||||||
quote = Column(String, nullable=True, comment='Original quoted text')
|
quote = Column(String, nullable=True, comment="Original quoted text")
|
||||||
shout = Column(ForeignKey('shout.id'), nullable=False)
|
shout = Column(ForeignKey("shout.id"), nullable=False)
|
||||||
created_by = Column(ForeignKey('author.id'), nullable=False)
|
created_by = Column(ForeignKey("author.id"), nullable=False)
|
||||||
kind = Column(String, nullable=False)
|
kind = Column(String, nullable=False)
|
||||||
|
|
||||||
oid = Column(String)
|
oid = Column(String)
|
||||||
|
|
52
orm/shout.py
52
orm/shout.py
|
@ -11,44 +11,44 @@ from services.db import Base
|
||||||
|
|
||||||
|
|
||||||
class ShoutTopic(Base):
|
class ShoutTopic(Base):
|
||||||
__tablename__ = 'shout_topic'
|
__tablename__ = "shout_topic"
|
||||||
|
|
||||||
id = None # type: ignore
|
id = None # type: ignore
|
||||||
shout = Column(ForeignKey('shout.id'), primary_key=True)
|
shout = Column(ForeignKey("shout.id"), primary_key=True)
|
||||||
topic = Column(ForeignKey('topic.id'), primary_key=True)
|
topic = Column(ForeignKey("topic.id"), primary_key=True)
|
||||||
main = Column(Boolean, nullable=True)
|
main = Column(Boolean, nullable=True)
|
||||||
|
|
||||||
|
|
||||||
class ShoutReactionsFollower(Base):
|
class ShoutReactionsFollower(Base):
|
||||||
__tablename__ = 'shout_reactions_followers'
|
__tablename__ = "shout_reactions_followers"
|
||||||
|
|
||||||
id = None # type: ignore
|
id = None # type: ignore
|
||||||
follower = Column(ForeignKey('author.id'), primary_key=True)
|
follower = Column(ForeignKey("author.id"), primary_key=True)
|
||||||
shout = Column(ForeignKey('shout.id'), primary_key=True)
|
shout = Column(ForeignKey("shout.id"), primary_key=True)
|
||||||
auto = Column(Boolean, nullable=False, default=False)
|
auto = Column(Boolean, nullable=False, default=False)
|
||||||
created_at = Column(Integer, nullable=False, default=lambda: int(time.time()))
|
created_at = Column(Integer, nullable=False, default=lambda: int(time.time()))
|
||||||
deleted_at = Column(Integer, nullable=True)
|
deleted_at = Column(Integer, nullable=True)
|
||||||
|
|
||||||
|
|
||||||
class ShoutAuthor(Base):
|
class ShoutAuthor(Base):
|
||||||
__tablename__ = 'shout_author'
|
__tablename__ = "shout_author"
|
||||||
|
|
||||||
id = None # type: ignore
|
id = None # type: ignore
|
||||||
shout = Column(ForeignKey('shout.id'), primary_key=True)
|
shout = Column(ForeignKey("shout.id"), primary_key=True)
|
||||||
author = Column(ForeignKey('author.id'), primary_key=True)
|
author = Column(ForeignKey("author.id"), primary_key=True)
|
||||||
caption = Column(String, nullable=True, default='')
|
caption = Column(String, nullable=True, default="")
|
||||||
|
|
||||||
|
|
||||||
class ShoutCommunity(Base):
|
class ShoutCommunity(Base):
|
||||||
__tablename__ = 'shout_community'
|
__tablename__ = "shout_community"
|
||||||
|
|
||||||
id = None # type: ignore
|
id = None # type: ignore
|
||||||
shout = Column(ForeignKey('shout.id'), primary_key=True)
|
shout = Column(ForeignKey("shout.id"), primary_key=True)
|
||||||
community = Column(ForeignKey('community.id'), primary_key=True)
|
community = Column(ForeignKey("community.id"), primary_key=True)
|
||||||
|
|
||||||
|
|
||||||
class Shout(Base):
|
class Shout(Base):
|
||||||
__tablename__ = 'shout'
|
__tablename__ = "shout"
|
||||||
|
|
||||||
created_at = Column(Integer, nullable=False, default=lambda: int(time.time()))
|
created_at = Column(Integer, nullable=False, default=lambda: int(time.time()))
|
||||||
updated_at = Column(Integer, nullable=True)
|
updated_at = Column(Integer, nullable=True)
|
||||||
|
@ -56,28 +56,28 @@ class Shout(Base):
|
||||||
featured_at = Column(Integer, nullable=True)
|
featured_at = Column(Integer, nullable=True)
|
||||||
deleted_at = Column(Integer, nullable=True)
|
deleted_at = Column(Integer, nullable=True)
|
||||||
|
|
||||||
created_by = Column(ForeignKey('author.id'), nullable=False)
|
created_by = Column(ForeignKey("author.id"), nullable=False)
|
||||||
updated_by = Column(ForeignKey('author.id'), nullable=True)
|
updated_by = Column(ForeignKey("author.id"), nullable=True)
|
||||||
deleted_by = Column(ForeignKey('author.id'), nullable=True)
|
deleted_by = Column(ForeignKey("author.id"), nullable=True)
|
||||||
|
|
||||||
body = Column(String, nullable=False, comment='Body')
|
body = Column(String, nullable=False, comment="Body")
|
||||||
slug = Column(String, unique=True)
|
slug = Column(String, unique=True)
|
||||||
cover = Column(String, nullable=True, comment='Cover image url')
|
cover = Column(String, nullable=True, comment="Cover image url")
|
||||||
cover_caption = Column(String, nullable=True, comment='Cover image alt caption')
|
cover_caption = Column(String, nullable=True, comment="Cover image alt caption")
|
||||||
lead = Column(String, nullable=True)
|
lead = Column(String, nullable=True)
|
||||||
description = Column(String, nullable=True)
|
description = Column(String, nullable=True)
|
||||||
title = Column(String, nullable=False)
|
title = Column(String, nullable=False)
|
||||||
subtitle = Column(String, nullable=True)
|
subtitle = Column(String, nullable=True)
|
||||||
layout = Column(String, nullable=False, default='article')
|
layout = Column(String, nullable=False, default="article")
|
||||||
media = Column(JSON, nullable=True)
|
media = Column(JSON, nullable=True)
|
||||||
|
|
||||||
authors = relationship(Author, secondary='shout_author')
|
authors = relationship(Author, secondary="shout_author")
|
||||||
topics = relationship(Topic, secondary='shout_topic')
|
topics = relationship(Topic, secondary="shout_topic")
|
||||||
communities = relationship(Community, secondary='shout_community')
|
communities = relationship(Community, secondary="shout_community")
|
||||||
reactions = relationship(Reaction)
|
reactions = relationship(Reaction)
|
||||||
|
|
||||||
lang = Column(String, nullable=False, default='ru', comment='Language')
|
lang = Column(String, nullable=False, default="ru", comment="Language")
|
||||||
version_of = Column(ForeignKey('shout.id'), nullable=True)
|
version_of = Column(ForeignKey("shout.id"), nullable=True)
|
||||||
oid = Column(String, nullable=True)
|
oid = Column(String, nullable=True)
|
||||||
|
|
||||||
seo = Column(String, nullable=True) # JSON
|
seo = Column(String, nullable=True) # JSON
|
||||||
|
|
18
orm/topic.py
18
orm/topic.py
|
@ -6,21 +6,21 @@ from services.db import Base
|
||||||
|
|
||||||
|
|
||||||
class TopicFollower(Base):
|
class TopicFollower(Base):
|
||||||
__tablename__ = 'topic_followers'
|
__tablename__ = "topic_followers"
|
||||||
|
|
||||||
id = None # type: ignore
|
id = None # type: ignore
|
||||||
follower = Column(Integer, ForeignKey('author.id'), primary_key=True)
|
follower = Column(Integer, ForeignKey("author.id"), primary_key=True)
|
||||||
topic = Column(Integer, ForeignKey('topic.id'), primary_key=True)
|
topic = Column(Integer, ForeignKey("topic.id"), primary_key=True)
|
||||||
created_at = Column(Integer, nullable=False, default=int(time.time()))
|
created_at = Column(Integer, nullable=False, default=int(time.time()))
|
||||||
auto = Column(Boolean, nullable=False, default=False)
|
auto = Column(Boolean, nullable=False, default=False)
|
||||||
|
|
||||||
|
|
||||||
class Topic(Base):
|
class Topic(Base):
|
||||||
__tablename__ = 'topic'
|
__tablename__ = "topic"
|
||||||
|
|
||||||
slug = Column(String, unique=True)
|
slug = Column(String, unique=True)
|
||||||
title = Column(String, nullable=False, comment='Title')
|
title = Column(String, nullable=False, comment="Title")
|
||||||
body = Column(String, nullable=True, comment='Body')
|
body = Column(String, nullable=True, comment="Body")
|
||||||
pic = Column(String, nullable=True, comment='Picture')
|
pic = Column(String, nullable=True, comment="Picture")
|
||||||
community = Column(ForeignKey('community.id'), default=1)
|
community = Column(ForeignKey("community.id"), default=1)
|
||||||
oid = Column(String, nullable=True, comment='Old ID')
|
oid = Column(String, nullable=True, comment="Old ID")
|
||||||
|
|
|
@ -6,7 +6,7 @@ from services.db import Base
|
||||||
|
|
||||||
|
|
||||||
class User(Base):
|
class User(Base):
|
||||||
__tablename__ = 'authorizer_users'
|
__tablename__ = "authorizer_users"
|
||||||
|
|
||||||
id = Column(String, primary_key=True, unique=True, nullable=False, default=None)
|
id = Column(String, primary_key=True, unique=True, nullable=False, default=None)
|
||||||
key = Column(String)
|
key = Column(String)
|
||||||
|
@ -24,7 +24,7 @@ class User(Base):
|
||||||
# preferred_username = Column(String, nullable=False)
|
# preferred_username = Column(String, nullable=False)
|
||||||
picture = Column(String)
|
picture = Column(String)
|
||||||
revoked_timestamp = Column(Integer)
|
revoked_timestamp = Column(Integer)
|
||||||
roles = Column(String, default='author, reader')
|
roles = Column(String, default="author, reader")
|
||||||
signup_methods = Column(String, default='magic_link_login')
|
signup_methods = Column(String, default="magic_link_login")
|
||||||
created_at = Column(Integer, default=lambda: int(time.time()))
|
created_at = Column(Integer, default=lambda: int(time.time()))
|
||||||
updated_at = Column(Integer, default=lambda: int(time.time()))
|
updated_at = Column(Integer, default=lambda: int(time.time()))
|
||||||
|
|
|
@ -1,75 +1,103 @@
|
||||||
from resolvers.author import (get_author, get_author_followers,
|
from resolvers.author import (
|
||||||
get_author_follows, get_author_follows_authors,
|
get_author,
|
||||||
get_author_follows_topics, get_author_id,
|
get_author_followers,
|
||||||
get_authors_all, load_authors_by, search_authors,
|
get_author_follows,
|
||||||
update_author)
|
get_author_follows_authors,
|
||||||
|
get_author_follows_topics,
|
||||||
|
get_author_id,
|
||||||
|
get_authors_all,
|
||||||
|
load_authors_by,
|
||||||
|
search_authors,
|
||||||
|
update_author,
|
||||||
|
)
|
||||||
from resolvers.community import get_communities_all, get_community
|
from resolvers.community import get_communities_all, get_community
|
||||||
from resolvers.editor import create_shout, delete_shout, update_shout
|
from resolvers.editor import create_shout, delete_shout, update_shout
|
||||||
from resolvers.follower import (follow, get_shout_followers,
|
from resolvers.follower import (
|
||||||
get_topic_followers, unfollow)
|
follow,
|
||||||
from resolvers.notifier import (load_notifications, notification_mark_seen,
|
get_shout_followers,
|
||||||
|
get_topic_followers,
|
||||||
|
unfollow,
|
||||||
|
)
|
||||||
|
from resolvers.notifier import (
|
||||||
|
load_notifications,
|
||||||
|
notification_mark_seen,
|
||||||
notifications_seen_after,
|
notifications_seen_after,
|
||||||
notifications_seen_thread)
|
notifications_seen_thread,
|
||||||
|
)
|
||||||
from resolvers.rating import rate_author
|
from resolvers.rating import rate_author
|
||||||
from resolvers.reaction import (create_reaction, delete_reaction,
|
from resolvers.reaction import (
|
||||||
load_reactions_by, load_shouts_followed,
|
create_reaction,
|
||||||
update_reaction)
|
delete_reaction,
|
||||||
from resolvers.reader import (get_shout, load_shouts_by, load_shouts_feed,
|
load_reactions_by,
|
||||||
load_shouts_random_top, load_shouts_random_topic,
|
load_shouts_followed,
|
||||||
load_shouts_search, load_shouts_unrated)
|
update_reaction,
|
||||||
from resolvers.topic import (get_topic, get_topics_all, get_topics_by_author,
|
)
|
||||||
get_topics_by_community)
|
from resolvers.reader import (
|
||||||
|
get_shout,
|
||||||
|
load_shouts_by,
|
||||||
|
load_shouts_feed,
|
||||||
|
load_shouts_random_top,
|
||||||
|
load_shouts_random_topic,
|
||||||
|
load_shouts_search,
|
||||||
|
load_shouts_unrated,
|
||||||
|
)
|
||||||
|
from resolvers.topic import (
|
||||||
|
get_topic,
|
||||||
|
get_topics_all,
|
||||||
|
get_topics_by_author,
|
||||||
|
get_topics_by_community,
|
||||||
|
)
|
||||||
from services.triggers import events_register
|
from services.triggers import events_register
|
||||||
|
|
||||||
events_register()
|
events_register()
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
# author
|
# author
|
||||||
'get_author',
|
"get_author",
|
||||||
'get_author_id',
|
"get_author_id",
|
||||||
'get_author_follows',
|
"get_author_follows",
|
||||||
'get_author_follows_topics',
|
"get_author_follows_topics",
|
||||||
'get_author_follows_authors',
|
"get_author_follows_authors",
|
||||||
'get_authors_all',
|
"get_authors_all",
|
||||||
'load_authors_by',
|
"load_authors_by",
|
||||||
'rate_author',
|
"rate_author",
|
||||||
'update_author',
|
"update_author",
|
||||||
'search_authors',
|
"search_authors",
|
||||||
# community
|
# community
|
||||||
'get_community',
|
"get_community",
|
||||||
'get_communities_all',
|
"get_communities_all",
|
||||||
# topic
|
# topic
|
||||||
'get_topic',
|
"get_topic",
|
||||||
'get_topics_all',
|
"get_topics_all",
|
||||||
'get_topics_by_community',
|
"get_topics_by_community",
|
||||||
'get_topics_by_author',
|
"get_topics_by_author",
|
||||||
# reader
|
# reader
|
||||||
'get_shout',
|
"get_shout",
|
||||||
'load_shouts_by',
|
"load_shouts_by",
|
||||||
'load_shouts_feed',
|
"load_shouts_feed",
|
||||||
'load_shouts_search',
|
"load_shouts_search",
|
||||||
'load_shouts_followed',
|
"load_shouts_followed",
|
||||||
'load_shouts_unrated',
|
"load_shouts_unrated",
|
||||||
'load_shouts_random_top',
|
"load_shouts_random_top",
|
||||||
'load_shouts_random_topic',
|
"load_shouts_random_topic",
|
||||||
# follower
|
# follower
|
||||||
'follow',
|
"follow",
|
||||||
'unfollow',
|
"unfollow",
|
||||||
'get_topic_followers',
|
"get_topic_followers",
|
||||||
'get_shout_followers',
|
"get_shout_followers",
|
||||||
'get_author_followers',
|
"get_author_followers",
|
||||||
# editor
|
# editor
|
||||||
'create_shout',
|
"create_shout",
|
||||||
'update_shout',
|
"update_shout",
|
||||||
'delete_shout',
|
"delete_shout",
|
||||||
# reaction
|
# reaction
|
||||||
'create_reaction',
|
"create_reaction",
|
||||||
'update_reaction',
|
"update_reaction",
|
||||||
'delete_reaction',
|
"delete_reaction",
|
||||||
'load_reactions_by',
|
"load_reactions_by",
|
||||||
# notifier
|
# notifier
|
||||||
'load_notifications',
|
"load_notifications",
|
||||||
'notifications_seen_thread',
|
"notifications_seen_thread",
|
||||||
'notifications_seen_after',
|
"notifications_seen_after",
|
||||||
'notification_mark_seen',
|
"notification_mark_seen",
|
||||||
]
|
]
|
||||||
|
|
|
@ -19,12 +19,12 @@ from services.rediscache import redis
|
||||||
from services.schema import mutation, query
|
from services.schema import mutation, query
|
||||||
|
|
||||||
|
|
||||||
@mutation.field('update_author')
|
@mutation.field("update_author")
|
||||||
@login_required
|
@login_required
|
||||||
async def update_author(_, info, profile):
|
async def update_author(_, info, profile):
|
||||||
user_id = info.context.get('user_id')
|
user_id = info.context.get("user_id")
|
||||||
if not user_id:
|
if not user_id:
|
||||||
return {'error': 'unauthorized', 'author': None}
|
return {"error": "unauthorized", "author": None}
|
||||||
try:
|
try:
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
author = session.query(Author).where(Author.user == user_id).first()
|
author = session.query(Author).where(Author.user == user_id).first()
|
||||||
|
@ -32,69 +32,72 @@ async def update_author(_, info, profile):
|
||||||
Author.update(author, profile)
|
Author.update(author, profile)
|
||||||
session.add(author)
|
session.add(author)
|
||||||
session.commit()
|
session.commit()
|
||||||
return {'error': None, 'author': author}
|
return {"error": None, "author": author}
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
logger.error(traceback.format_exc())
|
logger.error(traceback.format_exc())
|
||||||
return {'error': exc, 'author': None}
|
return {"error": exc, "author": None}
|
||||||
|
|
||||||
|
|
||||||
@query.field('get_authors_all')
|
@query.field("get_authors_all")
|
||||||
def get_authors_all(_, _info):
|
def get_authors_all(_, _info):
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
authors = session.query(Author).all()
|
authors = session.query(Author).all()
|
||||||
return authors
|
return authors
|
||||||
|
|
||||||
|
|
||||||
@query.field('get_author')
|
@query.field("get_author")
|
||||||
async def get_author(_, _info, slug='', author_id=0):
|
async def get_author(_, _info, slug="", author_id=0):
|
||||||
author_query = ''
|
author_query = ""
|
||||||
author = None
|
author = None
|
||||||
author_dict = None
|
author_dict = None
|
||||||
try:
|
try:
|
||||||
# lookup for cached author
|
# lookup for cached author
|
||||||
author_query = select(Author).filter(or_(Author.slug == slug, Author.id == author_id))
|
author_query = select(Author).filter(
|
||||||
|
or_(Author.slug == slug, Author.id == author_id)
|
||||||
|
)
|
||||||
[found_author] = local_session().execute(author_query).first()
|
[found_author] = local_session().execute(author_query).first()
|
||||||
logger.debug(found_author)
|
logger.debug(found_author)
|
||||||
if found_author:
|
if found_author:
|
||||||
logger.debug(f'found author id: {found_author.id}')
|
logger.debug(f"found author id: {found_author.id}")
|
||||||
author_id = found_author.id if found_author.id else author_id
|
author_id = found_author.id if found_author.id else author_id
|
||||||
if author_id:
|
if author_id:
|
||||||
cached_result = await redis.execute('GET', f'author:{author_id}')
|
cached_result = await redis.execute("GET", f"author:{author_id}")
|
||||||
author_dict = json.loads(cached_result) if cached_result else None
|
author_dict = json.loads(cached_result) if cached_result else None
|
||||||
|
|
||||||
# update stat from db
|
# update stat from db
|
||||||
if not author_dict or not author_dict.get('stat'):
|
if not author_dict or not author_dict.get("stat"):
|
||||||
result = get_with_stat(author_query)
|
result = get_with_stat(author_query)
|
||||||
if not result:
|
if not result:
|
||||||
raise ValueError('Author not found')
|
raise ValueError("Author not found")
|
||||||
[author] = result
|
[author] = result
|
||||||
# use found author
|
# use found author
|
||||||
if isinstance(author, Author):
|
if isinstance(author, Author):
|
||||||
logger.debug(f'update @{author.slug} with id {author.id}')
|
logger.debug(f"update @{author.slug} with id {author.id}")
|
||||||
author_dict = author.dict()
|
author_dict = author.dict()
|
||||||
await cache_author(author_dict)
|
await cache_author(author_dict)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
pass
|
pass
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
import traceback
|
import traceback
|
||||||
logger.error(f'{exc}:\n{traceback.format_exc()}')
|
|
||||||
|
logger.error(f"{exc}:\n{traceback.format_exc()}")
|
||||||
return author_dict
|
return author_dict
|
||||||
|
|
||||||
|
|
||||||
async def get_author_by_user_id(user_id: str):
|
async def get_author_by_user_id(user_id: str):
|
||||||
logger.info(f'getting author id for {user_id}')
|
logger.info(f"getting author id for {user_id}")
|
||||||
redis_key = f'user:{user_id}'
|
redis_key = f"user:{user_id}"
|
||||||
author = None
|
author = None
|
||||||
try:
|
try:
|
||||||
res = await redis.execute('GET', redis_key)
|
res = await redis.execute("GET", redis_key)
|
||||||
if isinstance(res, str):
|
if isinstance(res, str):
|
||||||
author = json.loads(res)
|
author = json.loads(res)
|
||||||
author_id = author.get('id')
|
author_id = author.get("id")
|
||||||
author_slug = author.get('slug')
|
author_slug = author.get("slug")
|
||||||
if author_id:
|
if author_id:
|
||||||
logger.debug(f'got author @{author_slug} #{author_id} cached')
|
logger.debug(f"got author @{author_slug} #{author_id} cached")
|
||||||
return author
|
return author
|
||||||
|
|
||||||
author_query = select(Author).filter(Author.user == user_id)
|
author_query = select(Author).filter(Author.user == user_id)
|
||||||
|
@ -110,41 +113,41 @@ async def get_author_by_user_id(user_id: str):
|
||||||
return author
|
return author
|
||||||
|
|
||||||
|
|
||||||
@query.field('get_author_id')
|
@query.field("get_author_id")
|
||||||
async def get_author_id(_, _info, user: str):
|
async def get_author_id(_, _info, user: str):
|
||||||
return await get_author_by_user_id(user)
|
return await get_author_by_user_id(user)
|
||||||
|
|
||||||
|
|
||||||
@query.field('load_authors_by')
|
@query.field("load_authors_by")
|
||||||
def load_authors_by(_, _info, by, limit, offset):
|
def load_authors_by(_, _info, by, limit, offset):
|
||||||
cache_key = f'{json.dumps(by)}_{limit}_{offset}'
|
cache_key = f"{json.dumps(by)}_{limit}_{offset}"
|
||||||
|
|
||||||
@cache_region.cache_on_arguments(cache_key)
|
@cache_region.cache_on_arguments(cache_key)
|
||||||
def _load_authors_by():
|
def _load_authors_by():
|
||||||
logger.debug(f'loading authors by {by}')
|
logger.debug(f"loading authors by {by}")
|
||||||
q = select(Author)
|
q = select(Author)
|
||||||
if by.get('slug'):
|
if by.get("slug"):
|
||||||
q = q.filter(Author.slug.ilike(f"%{by['slug']}%"))
|
q = q.filter(Author.slug.ilike(f"%{by['slug']}%"))
|
||||||
elif by.get('name'):
|
elif by.get("name"):
|
||||||
q = q.filter(Author.name.ilike(f"%{by['name']}%"))
|
q = q.filter(Author.name.ilike(f"%{by['name']}%"))
|
||||||
elif by.get('topic'):
|
elif by.get("topic"):
|
||||||
q = (
|
q = (
|
||||||
q.join(ShoutAuthor)
|
q.join(ShoutAuthor)
|
||||||
.join(ShoutTopic)
|
.join(ShoutTopic)
|
||||||
.join(Topic)
|
.join(Topic)
|
||||||
.where(Topic.slug == str(by['topic']))
|
.where(Topic.slug == str(by["topic"]))
|
||||||
)
|
)
|
||||||
|
|
||||||
if by.get('last_seen'): # in unix time
|
if by.get("last_seen"): # in unix time
|
||||||
before = int(time.time()) - by['last_seen']
|
before = int(time.time()) - by["last_seen"]
|
||||||
q = q.filter(Author.last_seen > before)
|
q = q.filter(Author.last_seen > before)
|
||||||
elif by.get('created_at'): # in unix time
|
elif by.get("created_at"): # in unix time
|
||||||
before = int(time.time()) - by['created_at']
|
before = int(time.time()) - by["created_at"]
|
||||||
q = q.filter(Author.created_at > before)
|
q = q.filter(Author.created_at > before)
|
||||||
|
|
||||||
order = by.get('order')
|
order = by.get("order")
|
||||||
if order in ['likes', 'shouts', 'followers']:
|
if order in ["likes", "shouts", "followers"]:
|
||||||
q = q.order_by(desc(text(f'{order}_stat')))
|
q = q.order_by(desc(text(f"{order}_stat")))
|
||||||
|
|
||||||
# q = q.distinct()
|
# q = q.distinct()
|
||||||
q = q.limit(limit).offset(offset)
|
q = q.limit(limit).offset(offset)
|
||||||
|
@ -156,8 +159,8 @@ def load_authors_by(_, _info, by, limit, offset):
|
||||||
return _load_authors_by()
|
return _load_authors_by()
|
||||||
|
|
||||||
|
|
||||||
@query.field('get_author_follows')
|
@query.field("get_author_follows")
|
||||||
async def get_author_follows(_, _info, slug='', user=None, author_id=0):
|
async def get_author_follows(_, _info, slug="", user=None, author_id=0):
|
||||||
try:
|
try:
|
||||||
author_query = select(Author)
|
author_query = select(Author)
|
||||||
if user:
|
if user:
|
||||||
|
@ -167,7 +170,7 @@ async def get_author_follows(_, _info, slug='', user=None, author_id=0):
|
||||||
elif author_id:
|
elif author_id:
|
||||||
author_query = author_query.filter(Author.id == author_id)
|
author_query = author_query.filter(Author.id == author_id)
|
||||||
else:
|
else:
|
||||||
raise ValueError('One of slug, user, or author_id must be provided')
|
raise ValueError("One of slug, user, or author_id must be provided")
|
||||||
[result] = local_session().execute(author_query)
|
[result] = local_session().execute(author_query)
|
||||||
if len(result) > 0:
|
if len(result) > 0:
|
||||||
# logger.debug(result)
|
# logger.debug(result)
|
||||||
|
@ -176,19 +179,21 @@ async def get_author_follows(_, _info, slug='', user=None, author_id=0):
|
||||||
if author and isinstance(author, Author):
|
if author and isinstance(author, Author):
|
||||||
# logger.debug(author.dict())
|
# logger.debug(author.dict())
|
||||||
author_id = author.id
|
author_id = author.id
|
||||||
rkey = f'author:{author_id}:follows-authors'
|
rkey = f"author:{author_id}:follows-authors"
|
||||||
logger.debug(f'getting {author_id} follows authors')
|
logger.debug(f"getting {author_id} follows authors")
|
||||||
cached = await redis.execute('GET', rkey)
|
cached = await redis.execute("GET", rkey)
|
||||||
authors = []
|
authors = []
|
||||||
if not cached:
|
if not cached:
|
||||||
authors = author_follows_authors(author_id)
|
authors = author_follows_authors(author_id)
|
||||||
prepared = [author.dict() for author in authors]
|
prepared = [author.dict() for author in authors]
|
||||||
await redis.execute('SET', rkey, json.dumps(prepared, cls=CustomJSONEncoder))
|
await redis.execute(
|
||||||
|
"SET", rkey, json.dumps(prepared, cls=CustomJSONEncoder)
|
||||||
|
)
|
||||||
elif isinstance(cached, str):
|
elif isinstance(cached, str):
|
||||||
authors = json.loads(cached)
|
authors = json.loads(cached)
|
||||||
|
|
||||||
rkey = f'author:{author_id}:follows-topics'
|
rkey = f"author:{author_id}:follows-topics"
|
||||||
cached = await redis.execute('GET', rkey)
|
cached = await redis.execute("GET", rkey)
|
||||||
topics = []
|
topics = []
|
||||||
if cached and isinstance(cached, str):
|
if cached and isinstance(cached, str):
|
||||||
topics = json.loads(cached)
|
topics = json.loads(cached)
|
||||||
|
@ -196,24 +201,24 @@ async def get_author_follows(_, _info, slug='', user=None, author_id=0):
|
||||||
topics = author_follows_topics(author_id)
|
topics = author_follows_topics(author_id)
|
||||||
prepared = [topic.dict() for topic in topics]
|
prepared = [topic.dict() for topic in topics]
|
||||||
await redis.execute(
|
await redis.execute(
|
||||||
'SET', rkey, json.dumps(prepared, cls=CustomJSONEncoder)
|
"SET", rkey, json.dumps(prepared, cls=CustomJSONEncoder)
|
||||||
)
|
)
|
||||||
return {
|
return {
|
||||||
'topics': topics,
|
"topics": topics,
|
||||||
'authors': authors,
|
"authors": authors,
|
||||||
'communities': [
|
"communities": [
|
||||||
{'id': 1, 'name': 'Дискурс', 'slug': 'discours', 'pic': ''}
|
{"id": 1, "name": "Дискурс", "slug": "discours", "pic": ""}
|
||||||
],
|
],
|
||||||
}
|
}
|
||||||
except Exception:
|
except Exception:
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
traceback.print_exc()
|
traceback.print_exc()
|
||||||
return {'error': 'Author not found'}
|
return {"error": "Author not found"}
|
||||||
|
|
||||||
|
|
||||||
@query.field('get_author_follows_topics')
|
@query.field("get_author_follows_topics")
|
||||||
async def get_author_follows_topics(_, _info, slug='', user=None, author_id=None):
|
async def get_author_follows_topics(_, _info, slug="", user=None, author_id=None):
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
if user or slug:
|
if user or slug:
|
||||||
author_id_result = (
|
author_id_result = (
|
||||||
|
@ -223,10 +228,10 @@ async def get_author_follows_topics(_, _info, slug='', user=None, author_id=None
|
||||||
)
|
)
|
||||||
author_id = author_id_result[0] if author_id_result else None
|
author_id = author_id_result[0] if author_id_result else None
|
||||||
if not author_id:
|
if not author_id:
|
||||||
raise ValueError('Author not found')
|
raise ValueError("Author not found")
|
||||||
logger.debug(f'getting {author_id} follows topics')
|
logger.debug(f"getting {author_id} follows topics")
|
||||||
rkey = f'author:{author_id}:follows-topics'
|
rkey = f"author:{author_id}:follows-topics"
|
||||||
cached = await redis.execute('GET', rkey)
|
cached = await redis.execute("GET", rkey)
|
||||||
topics = []
|
topics = []
|
||||||
if isinstance(cached, str):
|
if isinstance(cached, str):
|
||||||
topics = json.loads(cached)
|
topics = json.loads(cached)
|
||||||
|
@ -234,13 +239,13 @@ async def get_author_follows_topics(_, _info, slug='', user=None, author_id=None
|
||||||
topics = author_follows_topics(author_id)
|
topics = author_follows_topics(author_id)
|
||||||
prepared = [topic.dict() for topic in topics]
|
prepared = [topic.dict() for topic in topics]
|
||||||
await redis.execute(
|
await redis.execute(
|
||||||
'SET', rkey, json.dumps(prepared, cls=CustomJSONEncoder)
|
"SET", rkey, json.dumps(prepared, cls=CustomJSONEncoder)
|
||||||
)
|
)
|
||||||
return topics
|
return topics
|
||||||
|
|
||||||
|
|
||||||
@query.field('get_author_follows_authors')
|
@query.field("get_author_follows_authors")
|
||||||
async def get_author_follows_authors(_, _info, slug='', user=None, author_id=None):
|
async def get_author_follows_authors(_, _info, slug="", user=None, author_id=None):
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
if user or slug:
|
if user or slug:
|
||||||
author_id_result = (
|
author_id_result = (
|
||||||
|
@ -250,9 +255,9 @@ async def get_author_follows_authors(_, _info, slug='', user=None, author_id=Non
|
||||||
)
|
)
|
||||||
author_id = author_id_result[0] if author_id_result else None
|
author_id = author_id_result[0] if author_id_result else None
|
||||||
if author_id:
|
if author_id:
|
||||||
logger.debug(f'getting {author_id} follows authors')
|
logger.debug(f"getting {author_id} follows authors")
|
||||||
rkey = f'author:{author_id}:follows-authors'
|
rkey = f"author:{author_id}:follows-authors"
|
||||||
cached = await redis.execute('GET', rkey)
|
cached = await redis.execute("GET", rkey)
|
||||||
authors = []
|
authors = []
|
||||||
if isinstance(cached, str):
|
if isinstance(cached, str):
|
||||||
authors = json.loads(cached)
|
authors = json.loads(cached)
|
||||||
|
@ -260,14 +265,14 @@ async def get_author_follows_authors(_, _info, slug='', user=None, author_id=Non
|
||||||
authors = author_follows_authors(author_id)
|
authors = author_follows_authors(author_id)
|
||||||
prepared = [author.dict() for author in authors]
|
prepared = [author.dict() for author in authors]
|
||||||
await redis.execute(
|
await redis.execute(
|
||||||
'SET', rkey, json.dumps(prepared, cls=CustomJSONEncoder)
|
"SET", rkey, json.dumps(prepared, cls=CustomJSONEncoder)
|
||||||
)
|
)
|
||||||
return authors
|
return authors
|
||||||
else:
|
else:
|
||||||
raise ValueError('Author not found')
|
raise ValueError("Author not found")
|
||||||
|
|
||||||
|
|
||||||
def create_author(user_id: str, slug: str, name: str = ''):
|
def create_author(user_id: str, slug: str, name: str = ""):
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
try:
|
try:
|
||||||
author = None
|
author = None
|
||||||
|
@ -279,14 +284,14 @@ def create_author(user_id: str, slug: str, name: str = ''):
|
||||||
new_author = Author(user=user_id, slug=slug, name=name)
|
new_author = Author(user=user_id, slug=slug, name=name)
|
||||||
session.add(new_author)
|
session.add(new_author)
|
||||||
session.commit()
|
session.commit()
|
||||||
logger.info(f'author created by webhook {new_author.dict()}')
|
logger.info(f"author created by webhook {new_author.dict()}")
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
logger.debug(exc)
|
logger.debug(exc)
|
||||||
|
|
||||||
|
|
||||||
@query.field('get_author_followers')
|
@query.field("get_author_followers")
|
||||||
async def get_author_followers(_, _info, slug: str):
|
async def get_author_followers(_, _info, slug: str):
|
||||||
logger.debug(f'getting followers for @{slug}')
|
logger.debug(f"getting followers for @{slug}")
|
||||||
try:
|
try:
|
||||||
author_alias = aliased(Author)
|
author_alias = aliased(Author)
|
||||||
author_query = select(author_alias).filter(author_alias.slug == slug)
|
author_query = select(author_alias).filter(author_alias.slug == slug)
|
||||||
|
@ -294,9 +299,9 @@ async def get_author_followers(_, _info, slug: str):
|
||||||
if result:
|
if result:
|
||||||
[author] = result
|
[author] = result
|
||||||
author_id = author.id
|
author_id = author.id
|
||||||
cached = await redis.execute('GET', f'author:{author_id}:followers')
|
cached = await redis.execute("GET", f"author:{author_id}:followers")
|
||||||
if not cached:
|
if not cached:
|
||||||
author_follower_alias = aliased(AuthorFollower, name='af')
|
author_follower_alias = aliased(AuthorFollower, name="af")
|
||||||
q = select(Author).join(
|
q = select(Author).join(
|
||||||
author_follower_alias,
|
author_follower_alias,
|
||||||
and_(
|
and_(
|
||||||
|
@ -308,10 +313,10 @@ async def get_author_followers(_, _info, slug: str):
|
||||||
if isinstance(results, list):
|
if isinstance(results, list):
|
||||||
for follower in results:
|
for follower in results:
|
||||||
await cache_follower(follower, author)
|
await cache_follower(follower, author)
|
||||||
logger.debug(f'@{slug} cache updated with {len(results)} followers')
|
logger.debug(f"@{slug} cache updated with {len(results)} followers")
|
||||||
return results
|
return results
|
||||||
else:
|
else:
|
||||||
logger.debug(f'@{slug} got followers cached')
|
logger.debug(f"@{slug} got followers cached")
|
||||||
if isinstance(cached, str):
|
if isinstance(cached, str):
|
||||||
return json.loads(cached)
|
return json.loads(cached)
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
|
@ -322,7 +327,7 @@ async def get_author_followers(_, _info, slug: str):
|
||||||
return []
|
return []
|
||||||
|
|
||||||
|
|
||||||
@query.field('search_authors')
|
@query.field("search_authors")
|
||||||
async def search_authors(_, _info, what: str):
|
async def search_authors(_, _info, what: str):
|
||||||
q = search(select(Author), what)
|
q = search(select(Author), what)
|
||||||
return get_with_stat(q)
|
return get_with_stat(q)
|
||||||
|
|
|
@ -6,10 +6,10 @@ from services.db import local_session
|
||||||
from services.schema import mutation
|
from services.schema import mutation
|
||||||
|
|
||||||
|
|
||||||
@mutation.field('accept_invite')
|
@mutation.field("accept_invite")
|
||||||
@login_required
|
@login_required
|
||||||
async def accept_invite(_, info, invite_id: int):
|
async def accept_invite(_, info, invite_id: int):
|
||||||
user_id = info.context['user_id']
|
user_id = info.context["user_id"]
|
||||||
|
|
||||||
# Check if the user exists
|
# Check if the user exists
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
|
@ -30,19 +30,19 @@ async def accept_invite(_, info, invite_id: int):
|
||||||
session.delete(invite)
|
session.delete(invite)
|
||||||
session.add(shout)
|
session.add(shout)
|
||||||
session.commit()
|
session.commit()
|
||||||
return {'success': True, 'message': 'Invite accepted'}
|
return {"success": True, "message": "Invite accepted"}
|
||||||
else:
|
else:
|
||||||
return {'error': 'Shout not found'}
|
return {"error": "Shout not found"}
|
||||||
else:
|
else:
|
||||||
return {'error': 'Invalid invite or already accepted/rejected'}
|
return {"error": "Invalid invite or already accepted/rejected"}
|
||||||
else:
|
else:
|
||||||
return {'error': 'User not found'}
|
return {"error": "User not found"}
|
||||||
|
|
||||||
|
|
||||||
@mutation.field('reject_invite')
|
@mutation.field("reject_invite")
|
||||||
@login_required
|
@login_required
|
||||||
async def reject_invite(_, info, invite_id: int):
|
async def reject_invite(_, info, invite_id: int):
|
||||||
user_id = info.context['user_id']
|
user_id = info.context["user_id"]
|
||||||
|
|
||||||
# Check if the user exists
|
# Check if the user exists
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
|
@ -58,17 +58,17 @@ async def reject_invite(_, info, invite_id: int):
|
||||||
# Delete the invite
|
# Delete the invite
|
||||||
session.delete(invite)
|
session.delete(invite)
|
||||||
session.commit()
|
session.commit()
|
||||||
return {'success': True, 'message': 'Invite rejected'}
|
return {"success": True, "message": "Invite rejected"}
|
||||||
else:
|
else:
|
||||||
return {'error': 'Invalid invite or already accepted/rejected'}
|
return {"error": "Invalid invite or already accepted/rejected"}
|
||||||
else:
|
else:
|
||||||
return {'error': 'User not found'}
|
return {"error": "User not found"}
|
||||||
|
|
||||||
|
|
||||||
@mutation.field('create_invite')
|
@mutation.field("create_invite")
|
||||||
@login_required
|
@login_required
|
||||||
async def create_invite(_, info, slug: str = '', author_id: int = 0):
|
async def create_invite(_, info, slug: str = "", author_id: int = 0):
|
||||||
user_id = info.context['user_id']
|
user_id = info.context["user_id"]
|
||||||
|
|
||||||
# Check if the inviter is the owner of the shout
|
# Check if the inviter is the owner of the shout
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
|
@ -90,7 +90,7 @@ async def create_invite(_, info, slug: str = '', author_id: int = 0):
|
||||||
.first()
|
.first()
|
||||||
)
|
)
|
||||||
if existing_invite:
|
if existing_invite:
|
||||||
return {'error': 'Invite already sent'}
|
return {"error": "Invite already sent"}
|
||||||
|
|
||||||
# Create a new invite
|
# Create a new invite
|
||||||
new_invite = Invite(
|
new_invite = Invite(
|
||||||
|
@ -102,17 +102,17 @@ async def create_invite(_, info, slug: str = '', author_id: int = 0):
|
||||||
session.add(new_invite)
|
session.add(new_invite)
|
||||||
session.commit()
|
session.commit()
|
||||||
|
|
||||||
return {'error': None, 'invite': new_invite}
|
return {"error": None, "invite": new_invite}
|
||||||
else:
|
else:
|
||||||
return {'error': 'Invalid author'}
|
return {"error": "Invalid author"}
|
||||||
else:
|
else:
|
||||||
return {'error': 'Access denied'}
|
return {"error": "Access denied"}
|
||||||
|
|
||||||
|
|
||||||
@mutation.field('remove_author')
|
@mutation.field("remove_author")
|
||||||
@login_required
|
@login_required
|
||||||
async def remove_author(_, info, slug: str = '', author_id: int = 0):
|
async def remove_author(_, info, slug: str = "", author_id: int = 0):
|
||||||
user_id = info.context['user_id']
|
user_id = info.context["user_id"]
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
author = session.query(Author).filter(Author.user == user_id).first()
|
author = session.query(Author).filter(Author.user == user_id).first()
|
||||||
if author:
|
if author:
|
||||||
|
@ -124,13 +124,13 @@ async def remove_author(_, info, slug: str = '', author_id: int = 0):
|
||||||
]
|
]
|
||||||
session.commit()
|
session.commit()
|
||||||
return {}
|
return {}
|
||||||
return {'error': 'Access denied'}
|
return {"error": "Access denied"}
|
||||||
|
|
||||||
|
|
||||||
@mutation.field('remove_invite')
|
@mutation.field("remove_invite")
|
||||||
@login_required
|
@login_required
|
||||||
async def remove_invite(_, info, invite_id: int):
|
async def remove_invite(_, info, invite_id: int):
|
||||||
user_id = info.context['user_id']
|
user_id = info.context["user_id"]
|
||||||
|
|
||||||
# Check if the user exists
|
# Check if the user exists
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
|
@ -148,6 +148,6 @@ async def remove_invite(_, info, invite_id: int):
|
||||||
session.commit()
|
session.commit()
|
||||||
return {}
|
return {}
|
||||||
else:
|
else:
|
||||||
return {'error': 'Invalid invite or already accepted/rejected'}
|
return {"error": "Invalid invite or already accepted/rejected"}
|
||||||
else:
|
else:
|
||||||
return {'error': 'Author not found'}
|
return {"error": "Author not found"}
|
||||||
|
|
|
@ -14,12 +14,12 @@ def add_community_stat_columns(q):
|
||||||
shout_community_aliased = aliased(ShoutCommunity)
|
shout_community_aliased = aliased(ShoutCommunity)
|
||||||
|
|
||||||
q = q.outerjoin(shout_community_aliased).add_columns(
|
q = q.outerjoin(shout_community_aliased).add_columns(
|
||||||
func.count(distinct(shout_community_aliased.shout)).label('shouts_stat')
|
func.count(distinct(shout_community_aliased.shout)).label("shouts_stat")
|
||||||
)
|
)
|
||||||
q = q.outerjoin(
|
q = q.outerjoin(
|
||||||
community_followers, community_followers.author == Author.id
|
community_followers, community_followers.author == Author.id
|
||||||
).add_columns(
|
).add_columns(
|
||||||
func.count(distinct(community_followers.follower)).label('followers_stat')
|
func.count(distinct(community_followers.follower)).label("followers_stat")
|
||||||
)
|
)
|
||||||
|
|
||||||
q = q.group_by(Author.id)
|
q = q.group_by(Author.id)
|
||||||
|
@ -32,8 +32,8 @@ def get_communities_from_query(q):
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
for [c, shouts_stat, followers_stat] in session.execute(q):
|
for [c, shouts_stat, followers_stat] in session.execute(q):
|
||||||
c.stat = {
|
c.stat = {
|
||||||
'shouts': shouts_stat,
|
"shouts": shouts_stat,
|
||||||
'followers': followers_stat,
|
"followers": followers_stat,
|
||||||
# "commented": commented_stat,
|
# "commented": commented_stat,
|
||||||
}
|
}
|
||||||
ccc.append(c)
|
ccc.append(c)
|
||||||
|
@ -72,7 +72,7 @@ def community_unfollow(follower_id, slug):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
@query.field('get_communities_all')
|
@query.field("get_communities_all")
|
||||||
async def get_communities_all(_, _info):
|
async def get_communities_all(_, _info):
|
||||||
q = select(Author)
|
q = select(Author)
|
||||||
q = add_community_stat_columns(q)
|
q = add_community_stat_columns(q)
|
||||||
|
@ -80,7 +80,7 @@ async def get_communities_all(_, _info):
|
||||||
return get_communities_from_query(q)
|
return get_communities_from_query(q)
|
||||||
|
|
||||||
|
|
||||||
@query.field('get_community')
|
@query.field("get_community")
|
||||||
async def get_community(_, _info, slug: str):
|
async def get_community(_, _info, slug: str):
|
||||||
q = select(Community).where(Community.slug == slug)
|
q = select(Community).where(Community.slug == slug)
|
||||||
q = add_community_stat_columns(q)
|
q = add_community_stat_columns(q)
|
||||||
|
|
|
@ -19,13 +19,13 @@ from services.schema import mutation, query
|
||||||
from services.search import search_service
|
from services.search import search_service
|
||||||
|
|
||||||
|
|
||||||
@query.field('get_my_shout')
|
@query.field("get_my_shout")
|
||||||
@login_required
|
@login_required
|
||||||
async def get_my_shout(_, info, shout_id: int):
|
async def get_my_shout(_, info, shout_id: int):
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
user_id = info.context.get('user_id', '')
|
user_id = info.context.get("user_id", "")
|
||||||
if not user_id:
|
if not user_id:
|
||||||
return {'error': 'unauthorized', 'shout': None}
|
return {"error": "unauthorized", "shout": None}
|
||||||
shout = (
|
shout = (
|
||||||
session.query(Shout)
|
session.query(Shout)
|
||||||
.filter(Shout.id == shout_id)
|
.filter(Shout.id == shout_id)
|
||||||
|
@ -34,23 +34,23 @@ async def get_my_shout(_, info, shout_id: int):
|
||||||
.first()
|
.first()
|
||||||
)
|
)
|
||||||
if not shout:
|
if not shout:
|
||||||
return {'error': 'no shout found', 'shout': None}
|
return {"error": "no shout found", "shout": None}
|
||||||
if not bool(shout.published_at):
|
if not bool(shout.published_at):
|
||||||
author = session.query(Author).filter(Author.user == user_id).first()
|
author = session.query(Author).filter(Author.user == user_id).first()
|
||||||
if not author:
|
if not author:
|
||||||
return {'error': 'no author found', 'shout': None}
|
return {"error": "no author found", "shout": None}
|
||||||
roles = info.context.get('roles', [])
|
roles = info.context.get("roles", [])
|
||||||
if 'editor' not in roles and not filter(
|
if "editor" not in roles and not filter(
|
||||||
lambda x: x.id == author.id, [x for x in shout.authors]
|
lambda x: x.id == author.id, [x for x in shout.authors]
|
||||||
):
|
):
|
||||||
return {'error': 'forbidden', 'shout': None}
|
return {"error": "forbidden", "shout": None}
|
||||||
return {'error': None, 'shout': shout}
|
return {"error": None, "shout": shout}
|
||||||
|
|
||||||
|
|
||||||
@query.field('get_shouts_drafts')
|
@query.field("get_shouts_drafts")
|
||||||
@login_required
|
@login_required
|
||||||
async def get_shouts_drafts(_, info):
|
async def get_shouts_drafts(_, info):
|
||||||
user_id = info.context.get('user_id')
|
user_id = info.context.get("user_id")
|
||||||
shouts = []
|
shouts = []
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
author = session.query(Author).filter(Author.user == user_id).first()
|
author = session.query(Author).filter(Author.user == user_id).first()
|
||||||
|
@ -67,44 +67,44 @@ async def get_shouts_drafts(_, info):
|
||||||
return shouts
|
return shouts
|
||||||
|
|
||||||
|
|
||||||
@mutation.field('create_shout')
|
@mutation.field("create_shout")
|
||||||
@login_required
|
@login_required
|
||||||
async def create_shout(_, info, inp):
|
async def create_shout(_, info, inp):
|
||||||
user_id = info.context.get('user_id')
|
user_id = info.context.get("user_id")
|
||||||
if user_id:
|
if user_id:
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
author = session.query(Author).filter(Author.user == user_id).first()
|
author = session.query(Author).filter(Author.user == user_id).first()
|
||||||
if isinstance(author, Author):
|
if isinstance(author, Author):
|
||||||
current_time = int(time.time())
|
current_time = int(time.time())
|
||||||
slug = inp.get('slug') or f'draft-{current_time}'
|
slug = inp.get("slug") or f"draft-{current_time}"
|
||||||
shout_dict = {
|
shout_dict = {
|
||||||
'title': inp.get('title', ''),
|
"title": inp.get("title", ""),
|
||||||
'subtitle': inp.get('subtitle', ''),
|
"subtitle": inp.get("subtitle", ""),
|
||||||
'lead': inp.get('lead', ''),
|
"lead": inp.get("lead", ""),
|
||||||
'description': inp.get('description', ''),
|
"description": inp.get("description", ""),
|
||||||
'body': inp.get('body', ''),
|
"body": inp.get("body", ""),
|
||||||
'layout': inp.get('layout', 'article'),
|
"layout": inp.get("layout", "article"),
|
||||||
'created_by': author.id,
|
"created_by": author.id,
|
||||||
'authors': [],
|
"authors": [],
|
||||||
'slug': slug,
|
"slug": slug,
|
||||||
'topics': inp.get('topics', []),
|
"topics": inp.get("topics", []),
|
||||||
'published_at': None,
|
"published_at": None,
|
||||||
'created_at': current_time, # Set created_at as Unix timestamp
|
"created_at": current_time, # Set created_at as Unix timestamp
|
||||||
}
|
}
|
||||||
same_slug_shout = (
|
same_slug_shout = (
|
||||||
session.query(Shout)
|
session.query(Shout)
|
||||||
.filter(Shout.slug == shout_dict.get('slug'))
|
.filter(Shout.slug == shout_dict.get("slug"))
|
||||||
.first()
|
.first()
|
||||||
)
|
)
|
||||||
c = 1
|
c = 1
|
||||||
while same_slug_shout is not None:
|
while same_slug_shout is not None:
|
||||||
same_slug_shout = (
|
same_slug_shout = (
|
||||||
session.query(Shout)
|
session.query(Shout)
|
||||||
.filter(Shout.slug == shout_dict.get('slug'))
|
.filter(Shout.slug == shout_dict.get("slug"))
|
||||||
.first()
|
.first()
|
||||||
)
|
)
|
||||||
c += 1
|
c += 1
|
||||||
shout_dict['slug'] += f'-{c}'
|
shout_dict["slug"] += f"-{c}"
|
||||||
new_shout = Shout(**shout_dict)
|
new_shout = Shout(**shout_dict)
|
||||||
session.add(new_shout)
|
session.add(new_shout)
|
||||||
session.commit()
|
session.commit()
|
||||||
|
@ -117,7 +117,7 @@ async def create_shout(_, info, inp):
|
||||||
|
|
||||||
topics = (
|
topics = (
|
||||||
session.query(Topic)
|
session.query(Topic)
|
||||||
.filter(Topic.slug.in_(inp.get('topics', [])))
|
.filter(Topic.slug.in_(inp.get("topics", [])))
|
||||||
.all()
|
.all()
|
||||||
)
|
)
|
||||||
for topic in topics:
|
for topic in topics:
|
||||||
|
@ -131,9 +131,9 @@ async def create_shout(_, info, inp):
|
||||||
# notifier
|
# notifier
|
||||||
# await notify_shout(shout_dict, 'create')
|
# await notify_shout(shout_dict, 'create')
|
||||||
|
|
||||||
return {'shout': shout}
|
return {"shout": shout}
|
||||||
|
|
||||||
return {'error': 'cant create shout' if user_id else 'unauthorized'}
|
return {"error": "cant create shout" if user_id else "unauthorized"}
|
||||||
|
|
||||||
|
|
||||||
def patch_main_topic(session, main_topic, shout):
|
def patch_main_topic(session, main_topic, shout):
|
||||||
|
@ -170,16 +170,16 @@ def patch_main_topic(session, main_topic, shout):
|
||||||
and new_main_topic
|
and new_main_topic
|
||||||
and old_main_topic is not new_main_topic
|
and old_main_topic is not new_main_topic
|
||||||
):
|
):
|
||||||
ShoutTopic.update(old_main_topic, {'main': False})
|
ShoutTopic.update(old_main_topic, {"main": False})
|
||||||
session.add(old_main_topic)
|
session.add(old_main_topic)
|
||||||
|
|
||||||
ShoutTopic.update(new_main_topic, {'main': True})
|
ShoutTopic.update(new_main_topic, {"main": True})
|
||||||
session.add(new_main_topic)
|
session.add(new_main_topic)
|
||||||
|
|
||||||
|
|
||||||
def patch_topics(session, shout, topics_input):
|
def patch_topics(session, shout, topics_input):
|
||||||
new_topics_to_link = [
|
new_topics_to_link = [
|
||||||
Topic(**new_topic) for new_topic in topics_input if new_topic['id'] < 0
|
Topic(**new_topic) for new_topic in topics_input if new_topic["id"] < 0
|
||||||
]
|
]
|
||||||
if new_topics_to_link:
|
if new_topics_to_link:
|
||||||
session.add_all(new_topics_to_link)
|
session.add_all(new_topics_to_link)
|
||||||
|
@ -190,12 +190,12 @@ def patch_topics(session, shout, topics_input):
|
||||||
session.add(created_unlinked_topic)
|
session.add(created_unlinked_topic)
|
||||||
|
|
||||||
existing_topics_input = [
|
existing_topics_input = [
|
||||||
topic_input for topic_input in topics_input if topic_input.get('id', 0) > 0
|
topic_input for topic_input in topics_input if topic_input.get("id", 0) > 0
|
||||||
]
|
]
|
||||||
existing_topic_to_link_ids = [
|
existing_topic_to_link_ids = [
|
||||||
existing_topic_input['id']
|
existing_topic_input["id"]
|
||||||
for existing_topic_input in existing_topics_input
|
for existing_topic_input in existing_topics_input
|
||||||
if existing_topic_input['id'] not in [topic.id for topic in shout.topics]
|
if existing_topic_input["id"] not in [topic.id for topic in shout.topics]
|
||||||
]
|
]
|
||||||
|
|
||||||
for existing_topic_to_link_id in existing_topic_to_link_ids:
|
for existing_topic_to_link_id in existing_topic_to_link_ids:
|
||||||
|
@ -207,7 +207,7 @@ def patch_topics(session, shout, topics_input):
|
||||||
topic_to_unlink_ids = [
|
topic_to_unlink_ids = [
|
||||||
topic.id
|
topic.id
|
||||||
for topic in shout.topics
|
for topic in shout.topics
|
||||||
if topic.id not in [topic_input['id'] for topic_input in existing_topics_input]
|
if topic.id not in [topic_input["id"] for topic_input in existing_topics_input]
|
||||||
]
|
]
|
||||||
|
|
||||||
session.query(ShoutTopic).filter(
|
session.query(ShoutTopic).filter(
|
||||||
|
@ -215,25 +215,25 @@ def patch_topics(session, shout, topics_input):
|
||||||
).delete(synchronize_session=False)
|
).delete(synchronize_session=False)
|
||||||
|
|
||||||
|
|
||||||
@mutation.field('update_shout')
|
@mutation.field("update_shout")
|
||||||
@login_required
|
@login_required
|
||||||
async def update_shout(_, info, shout_id: int, shout_input=None, publish=False):
|
async def update_shout(_, info, shout_id: int, shout_input=None, publish=False):
|
||||||
user_id = info.context.get('user_id')
|
user_id = info.context.get("user_id")
|
||||||
roles = info.context.get('roles', [])
|
roles = info.context.get("roles", [])
|
||||||
shout_input = shout_input or {}
|
shout_input = shout_input or {}
|
||||||
current_time = int(time.time())
|
current_time = int(time.time())
|
||||||
shout_id = shout_id or shout_input.get('id', shout_id)
|
shout_id = shout_id or shout_input.get("id", shout_id)
|
||||||
slug = shout_input.get('slug')
|
slug = shout_input.get("slug")
|
||||||
if not user_id:
|
if not user_id:
|
||||||
return {'error': 'unauthorized'}
|
return {"error": "unauthorized"}
|
||||||
try:
|
try:
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
author = session.query(Author).filter(Author.user == user_id).first()
|
author = session.query(Author).filter(Author.user == user_id).first()
|
||||||
if author:
|
if author:
|
||||||
logger.info(f'author for shout#{shout_id} detected {author.dict()}')
|
logger.info(f"author for shout#{shout_id} detected {author.dict()}")
|
||||||
shout_by_id = session.query(Shout).filter(Shout.id == shout_id).first()
|
shout_by_id = session.query(Shout).filter(Shout.id == shout_id).first()
|
||||||
if not shout_by_id:
|
if not shout_by_id:
|
||||||
return {'error': 'shout not found'}
|
return {"error": "shout not found"}
|
||||||
if slug != shout_by_id.slug:
|
if slug != shout_by_id.slug:
|
||||||
same_slug_shout = (
|
same_slug_shout = (
|
||||||
session.query(Shout).filter(Shout.slug == slug).first()
|
session.query(Shout).filter(Shout.slug == slug).first()
|
||||||
|
@ -241,31 +241,31 @@ async def update_shout(_, info, shout_id: int, shout_input=None, publish=False):
|
||||||
c = 1
|
c = 1
|
||||||
while same_slug_shout is not None:
|
while same_slug_shout is not None:
|
||||||
c += 1
|
c += 1
|
||||||
slug = f'{slug}-{c}'
|
slug = f"{slug}-{c}"
|
||||||
same_slug_shout = (
|
same_slug_shout = (
|
||||||
session.query(Shout).filter(Shout.slug == slug).first()
|
session.query(Shout).filter(Shout.slug == slug).first()
|
||||||
)
|
)
|
||||||
shout_input['slug'] = slug
|
shout_input["slug"] = slug
|
||||||
|
|
||||||
if (
|
if (
|
||||||
filter(
|
filter(
|
||||||
lambda x: x.id == author.id, [x for x in shout_by_id.authors]
|
lambda x: x.id == author.id, [x for x in shout_by_id.authors]
|
||||||
)
|
)
|
||||||
or 'editor' in roles
|
or "editor" in roles
|
||||||
):
|
):
|
||||||
# topics patch
|
# topics patch
|
||||||
topics_input = shout_input.get('topics')
|
topics_input = shout_input.get("topics")
|
||||||
if topics_input:
|
if topics_input:
|
||||||
patch_topics(session, shout_by_id, topics_input)
|
patch_topics(session, shout_by_id, topics_input)
|
||||||
del shout_input['topics']
|
del shout_input["topics"]
|
||||||
|
|
||||||
# main topic
|
# main topic
|
||||||
main_topic = shout_input.get('main_topic')
|
main_topic = shout_input.get("main_topic")
|
||||||
if main_topic:
|
if main_topic:
|
||||||
patch_main_topic(session, main_topic, shout_by_id)
|
patch_main_topic(session, main_topic, shout_by_id)
|
||||||
|
|
||||||
shout_input['updated_at'] = current_time
|
shout_input["updated_at"] = current_time
|
||||||
shout_input['published_at'] = current_time if publish else None
|
shout_input["published_at"] = current_time if publish else None
|
||||||
Shout.update(shout_by_id, shout_input)
|
Shout.update(shout_by_id, shout_input)
|
||||||
session.add(shout_by_id)
|
session.add(shout_by_id)
|
||||||
session.commit()
|
session.commit()
|
||||||
|
@ -273,52 +273,52 @@ async def update_shout(_, info, shout_id: int, shout_input=None, publish=False):
|
||||||
shout_dict = shout_by_id.dict()
|
shout_dict = shout_by_id.dict()
|
||||||
|
|
||||||
if not publish:
|
if not publish:
|
||||||
await notify_shout(shout_dict, 'update')
|
await notify_shout(shout_dict, "update")
|
||||||
else:
|
else:
|
||||||
await notify_shout(shout_dict, 'published')
|
await notify_shout(shout_dict, "published")
|
||||||
# search service indexing
|
# search service indexing
|
||||||
search_service.index(shout_by_id)
|
search_service.index(shout_by_id)
|
||||||
|
|
||||||
return {'shout': shout_dict, 'error': None}
|
return {"shout": shout_dict, "error": None}
|
||||||
else:
|
else:
|
||||||
return {'error': 'access denied', 'shout': None}
|
return {"error": "access denied", "shout": None}
|
||||||
|
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
traceback.print_exc()
|
traceback.print_exc()
|
||||||
logger.error(exc)
|
logger.error(exc)
|
||||||
logger.error(f' cannot update with data: {shout_input}')
|
logger.error(f" cannot update with data: {shout_input}")
|
||||||
|
|
||||||
return {'error': 'cant update shout'}
|
return {"error": "cant update shout"}
|
||||||
|
|
||||||
|
|
||||||
@mutation.field('delete_shout')
|
@mutation.field("delete_shout")
|
||||||
@login_required
|
@login_required
|
||||||
async def delete_shout(_, info, shout_id: int):
|
async def delete_shout(_, info, shout_id: int):
|
||||||
user_id = info.context.get('user_id')
|
user_id = info.context.get("user_id")
|
||||||
roles = info.context.get('roles')
|
roles = info.context.get("roles")
|
||||||
if user_id:
|
if user_id:
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
author = session.query(Author).filter(Author.user == user_id).first()
|
author = session.query(Author).filter(Author.user == user_id).first()
|
||||||
shout = session.query(Shout).filter(Shout.id == shout_id).first()
|
shout = session.query(Shout).filter(Shout.id == shout_id).first()
|
||||||
if not shout:
|
if not shout:
|
||||||
return {'error': 'invalid shout id'}
|
return {"error": "invalid shout id"}
|
||||||
if author and shout:
|
if author and shout:
|
||||||
# NOTE: only owner and editor can mark the shout as deleted
|
# NOTE: only owner and editor can mark the shout as deleted
|
||||||
if shout.created_by == author.id or 'editor' in roles:
|
if shout.created_by == author.id or "editor" in roles:
|
||||||
for author_id in shout.authors:
|
for author_id in shout.authors:
|
||||||
reactions_unfollow(author_id, shout_id)
|
reactions_unfollow(author_id, shout_id)
|
||||||
|
|
||||||
shout_dict = shout.dict()
|
shout_dict = shout.dict()
|
||||||
shout_dict['deleted_at'] = int(time.time())
|
shout_dict["deleted_at"] = int(time.time())
|
||||||
Shout.update(shout, shout_dict)
|
Shout.update(shout, shout_dict)
|
||||||
session.add(shout)
|
session.add(shout)
|
||||||
session.commit()
|
session.commit()
|
||||||
await notify_shout(shout_dict, 'delete')
|
await notify_shout(shout_dict, "delete")
|
||||||
return {'error': None}
|
return {"error": None}
|
||||||
else:
|
else:
|
||||||
return {'error': 'access denied'}
|
return {"error": "access denied"}
|
||||||
|
|
||||||
|
|
||||||
def handle_proposing(session, r, shout):
|
def handle_proposing(session, r, shout):
|
||||||
|
@ -350,7 +350,7 @@ def handle_proposing(session, r, shout):
|
||||||
if proposal.quote:
|
if proposal.quote:
|
||||||
proposal_diff = get_diff(shout.body, proposal.quote)
|
proposal_diff = get_diff(shout.body, proposal.quote)
|
||||||
proposal_dict = proposal.dict()
|
proposal_dict = proposal.dict()
|
||||||
proposal_dict['quote'] = apply_diff(
|
proposal_dict["quote"] = apply_diff(
|
||||||
replied_reaction.quote, proposal_diff
|
replied_reaction.quote, proposal_diff
|
||||||
)
|
)
|
||||||
Reaction.update(proposal, proposal_dict)
|
Reaction.update(proposal, proposal_dict)
|
||||||
|
@ -358,7 +358,7 @@ def handle_proposing(session, r, shout):
|
||||||
|
|
||||||
# patch shout's body
|
# patch shout's body
|
||||||
shout_dict = shout.dict()
|
shout_dict = shout.dict()
|
||||||
shout_dict['body'] = replied_reaction.quote
|
shout_dict["body"] = replied_reaction.quote
|
||||||
Shout.update(shout, shout_dict)
|
Shout.update(shout, shout_dict)
|
||||||
session.add(shout)
|
session.add(shout)
|
||||||
session.commit()
|
session.commit()
|
||||||
|
|
|
@ -21,111 +21,111 @@ from services.rediscache import redis
|
||||||
from services.schema import mutation, query
|
from services.schema import mutation, query
|
||||||
|
|
||||||
|
|
||||||
@mutation.field('follow')
|
@mutation.field("follow")
|
||||||
@login_required
|
@login_required
|
||||||
async def follow(_, info, what, slug):
|
async def follow(_, info, what, slug):
|
||||||
follows = []
|
follows = []
|
||||||
error = None
|
error = None
|
||||||
user_id = info.context.get('user_id')
|
user_id = info.context.get("user_id")
|
||||||
if not user_id:
|
if not user_id:
|
||||||
return {'error': 'unauthorized'}
|
return {"error": "unauthorized"}
|
||||||
|
|
||||||
follower = local_session().query(Author).filter(Author.user == user_id).first()
|
follower = local_session().query(Author).filter(Author.user == user_id).first()
|
||||||
if not follower:
|
if not follower:
|
||||||
return {'error': 'cant find follower'}
|
return {"error": "cant find follower"}
|
||||||
if what == 'AUTHOR':
|
if what == "AUTHOR":
|
||||||
error = author_follow(follower.id, slug)
|
error = author_follow(follower.id, slug)
|
||||||
if not error:
|
if not error:
|
||||||
author = local_session().query(Author).where(Author.slug == slug).first()
|
author = local_session().query(Author).where(Author.slug == slug).first()
|
||||||
if author:
|
if author:
|
||||||
await notify_follower(follower.dict(), author.id, 'follow')
|
await notify_follower(follower.dict(), author.id, "follow")
|
||||||
|
|
||||||
elif what == 'TOPIC':
|
elif what == "TOPIC":
|
||||||
error = topic_follow(follower.id, slug)
|
error = topic_follow(follower.id, slug)
|
||||||
|
|
||||||
elif what == 'COMMUNITY':
|
elif what == "COMMUNITY":
|
||||||
# FIXME: when more communities
|
# FIXME: when more communities
|
||||||
follows = local_session().execute(select(Community))
|
follows = local_session().execute(select(Community))
|
||||||
|
|
||||||
elif what == 'SHOUT':
|
elif what == "SHOUT":
|
||||||
error = reactions_follow(follower.id, slug)
|
error = reactions_follow(follower.id, slug)
|
||||||
|
|
||||||
if error:
|
if error:
|
||||||
return {'error': error}
|
return {"error": error}
|
||||||
|
|
||||||
entity = what.lower()
|
entity = what.lower()
|
||||||
follows_str = await redis.execute('GET', f'author:{follower.id}:follows-{entity}s')
|
follows_str = await redis.execute("GET", f"author:{follower.id}:follows-{entity}s")
|
||||||
if follows_str:
|
if follows_str:
|
||||||
follows = json.loads(follows_str)
|
follows = json.loads(follows_str)
|
||||||
return { f'{entity}s': follows }
|
return {f"{entity}s": follows}
|
||||||
|
|
||||||
|
|
||||||
@mutation.field('unfollow')
|
@mutation.field("unfollow")
|
||||||
@login_required
|
@login_required
|
||||||
async def unfollow(_, info, what, slug):
|
async def unfollow(_, info, what, slug):
|
||||||
follows = []
|
follows = []
|
||||||
error = None
|
error = None
|
||||||
user_id = info.context.get('user_id')
|
user_id = info.context.get("user_id")
|
||||||
if not user_id:
|
if not user_id:
|
||||||
return {'error': 'unauthorized'}
|
return {"error": "unauthorized"}
|
||||||
follower = local_session().query(Author).filter(Author.user == user_id).first()
|
follower = local_session().query(Author).filter(Author.user == user_id).first()
|
||||||
if not follower:
|
if not follower:
|
||||||
return {'error': 'follower profile is not found'}
|
return {"error": "follower profile is not found"}
|
||||||
if what == 'AUTHOR':
|
if what == "AUTHOR":
|
||||||
error = author_unfollow(follower.id, slug)
|
error = author_unfollow(follower.id, slug)
|
||||||
# NOTE: after triggers should update cached stats
|
# NOTE: after triggers should update cached stats
|
||||||
if not error:
|
if not error:
|
||||||
logger.info(f'@{follower.slug} unfollowed @{slug}')
|
logger.info(f"@{follower.slug} unfollowed @{slug}")
|
||||||
author = local_session().query(Author).where(Author.slug == slug).first()
|
author = local_session().query(Author).where(Author.slug == slug).first()
|
||||||
if author:
|
if author:
|
||||||
await notify_follower(follower.dict(), author.id, 'unfollow')
|
await notify_follower(follower.dict(), author.id, "unfollow")
|
||||||
|
|
||||||
elif what == 'TOPIC':
|
elif what == "TOPIC":
|
||||||
error = topic_unfollow(follower.id, slug)
|
error = topic_unfollow(follower.id, slug)
|
||||||
|
|
||||||
elif what == 'COMMUNITY':
|
elif what == "COMMUNITY":
|
||||||
follows = local_session().execute(select(Community))
|
follows = local_session().execute(select(Community))
|
||||||
|
|
||||||
elif what == 'SHOUT':
|
elif what == "SHOUT":
|
||||||
error = reactions_unfollow(follower.id, slug)
|
error = reactions_unfollow(follower.id, slug)
|
||||||
|
|
||||||
entity = what.lower()
|
entity = what.lower()
|
||||||
follows_str = await redis.execute('GET', f'author:{follower.id}:follows-{entity}s')
|
follows_str = await redis.execute("GET", f"author:{follower.id}:follows-{entity}s")
|
||||||
if follows_str:
|
if follows_str:
|
||||||
follows = json.loads(follows_str)
|
follows = json.loads(follows_str)
|
||||||
return {'error': error, f'{entity}s': follows}
|
return {"error": error, f"{entity}s": follows}
|
||||||
|
|
||||||
|
|
||||||
async def get_follows_by_user_id(user_id: str):
|
async def get_follows_by_user_id(user_id: str):
|
||||||
if not user_id:
|
if not user_id:
|
||||||
return {'error': 'unauthorized'}
|
return {"error": "unauthorized"}
|
||||||
author = await redis.execute('GET', f'user:{user_id}')
|
author = await redis.execute("GET", f"user:{user_id}")
|
||||||
if isinstance(author, str):
|
if isinstance(author, str):
|
||||||
author = json.loads(author)
|
author = json.loads(author)
|
||||||
if not author:
|
if not author:
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
author = session.query(Author).filter(Author.user == user_id).first()
|
author = session.query(Author).filter(Author.user == user_id).first()
|
||||||
if not author:
|
if not author:
|
||||||
return {'error': 'cant find author'}
|
return {"error": "cant find author"}
|
||||||
author = author.dict()
|
author = author.dict()
|
||||||
last_seen = author.get('last_seen', 0) if isinstance(author, dict) else 0
|
last_seen = author.get("last_seen", 0) if isinstance(author, dict) else 0
|
||||||
follows = DEFAULT_FOLLOWS
|
follows = DEFAULT_FOLLOWS
|
||||||
day_old = int(time.time()) - last_seen > 24 * 60 * 60
|
day_old = int(time.time()) - last_seen > 24 * 60 * 60
|
||||||
if day_old:
|
if day_old:
|
||||||
author_id = json.loads(str(author)).get('id')
|
author_id = json.loads(str(author)).get("id")
|
||||||
if author_id:
|
if author_id:
|
||||||
topics = author_follows_topics(author_id)
|
topics = author_follows_topics(author_id)
|
||||||
authors = author_follows_authors(author_id)
|
authors = author_follows_authors(author_id)
|
||||||
follows = {
|
follows = {
|
||||||
'topics': topics,
|
"topics": topics,
|
||||||
'authors': authors,
|
"authors": authors,
|
||||||
'communities': [
|
"communities": [
|
||||||
{'id': 1, 'name': 'Дискурс', 'slug': 'discours', 'pic': ''}
|
{"id": 1, "name": "Дискурс", "slug": "discours", "pic": ""}
|
||||||
],
|
],
|
||||||
}
|
}
|
||||||
else:
|
else:
|
||||||
logger.debug(f'getting follows for {user_id} from redis')
|
logger.debug(f"getting follows for {user_id} from redis")
|
||||||
res = await redis.execute('GET', f'user:{user_id}:follows')
|
res = await redis.execute("GET", f"user:{user_id}:follows")
|
||||||
if isinstance(res, str):
|
if isinstance(res, str):
|
||||||
follows = json.loads(res)
|
follows = json.loads(res)
|
||||||
return follows
|
return follows
|
||||||
|
@ -139,7 +139,7 @@ def topic_follow(follower_id, slug):
|
||||||
return None
|
return None
|
||||||
except UniqueViolation as error:
|
except UniqueViolation as error:
|
||||||
logger.warn(error)
|
logger.warn(error)
|
||||||
return 'already followed'
|
return "already followed"
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
logger.error(exc)
|
logger.error(exc)
|
||||||
return exc
|
return exc
|
||||||
|
@ -160,7 +160,7 @@ def topic_unfollow(follower_id, slug):
|
||||||
return None
|
return None
|
||||||
except UniqueViolation as error:
|
except UniqueViolation as error:
|
||||||
logger.warn(error)
|
logger.warn(error)
|
||||||
return 'already unfollowed'
|
return "already unfollowed"
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
logger.debug(ex)
|
logger.debug(ex)
|
||||||
return ex
|
return ex
|
||||||
|
@ -191,7 +191,7 @@ def reactions_follow(author_id, shout_id, auto=False):
|
||||||
return None
|
return None
|
||||||
except UniqueViolation as error:
|
except UniqueViolation as error:
|
||||||
logger.warn(error)
|
logger.warn(error)
|
||||||
return 'already followed'
|
return "already followed"
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
return exc
|
return exc
|
||||||
|
|
||||||
|
@ -218,7 +218,7 @@ def reactions_unfollow(author_id, shout_id: int):
|
||||||
return None
|
return None
|
||||||
except UniqueViolation as error:
|
except UniqueViolation as error:
|
||||||
logger.warn(error)
|
logger.warn(error)
|
||||||
return 'already unfollowed'
|
return "already unfollowed"
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
|
@ -237,7 +237,7 @@ def author_follow(follower_id, slug):
|
||||||
return None
|
return None
|
||||||
except UniqueViolation as error:
|
except UniqueViolation as error:
|
||||||
logger.warn(error)
|
logger.warn(error)
|
||||||
return 'already followed'
|
return "already followed"
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
|
@ -263,12 +263,12 @@ def author_unfollow(follower_id, slug):
|
||||||
return None
|
return None
|
||||||
except UniqueViolation as error:
|
except UniqueViolation as error:
|
||||||
logger.warn(error)
|
logger.warn(error)
|
||||||
return 'already unfollowed'
|
return "already unfollowed"
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
return exc
|
return exc
|
||||||
|
|
||||||
|
|
||||||
@query.field('get_topic_followers')
|
@query.field("get_topic_followers")
|
||||||
async def get_topic_followers(_, _info, slug: str, topic_id: int) -> List[Author]:
|
async def get_topic_followers(_, _info, slug: str, topic_id: int) -> List[Author]:
|
||||||
q = select(Author)
|
q = select(Author)
|
||||||
q = (
|
q = (
|
||||||
|
@ -279,9 +279,9 @@ async def get_topic_followers(_, _info, slug: str, topic_id: int) -> List[Author
|
||||||
return get_with_stat(q)
|
return get_with_stat(q)
|
||||||
|
|
||||||
|
|
||||||
@query.field('get_shout_followers')
|
@query.field("get_shout_followers")
|
||||||
def get_shout_followers(
|
def get_shout_followers(
|
||||||
_, _info, slug: str = '', shout_id: int | None = None
|
_, _info, slug: str = "", shout_id: int | None = None
|
||||||
) -> List[Author]:
|
) -> List[Author]:
|
||||||
followers = []
|
followers = []
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
|
|
|
@ -8,8 +8,12 @@ from sqlalchemy.orm import aliased
|
||||||
from sqlalchemy.sql import not_
|
from sqlalchemy.sql import not_
|
||||||
|
|
||||||
from orm.author import Author
|
from orm.author import Author
|
||||||
from orm.notification import (Notification, NotificationAction,
|
from orm.notification import (
|
||||||
NotificationEntity, NotificationSeen)
|
Notification,
|
||||||
|
NotificationAction,
|
||||||
|
NotificationEntity,
|
||||||
|
NotificationSeen,
|
||||||
|
)
|
||||||
from orm.shout import Shout
|
from orm.shout import Shout
|
||||||
from services.auth import login_required
|
from services.auth import login_required
|
||||||
from services.db import local_session
|
from services.db import local_session
|
||||||
|
@ -21,7 +25,7 @@ def query_notifications(
|
||||||
author_id: int, after: int = 0
|
author_id: int, after: int = 0
|
||||||
) -> Tuple[int, int, List[Tuple[Notification, bool]]]:
|
) -> Tuple[int, int, List[Tuple[Notification, bool]]]:
|
||||||
notification_seen_alias = aliased(NotificationSeen)
|
notification_seen_alias = aliased(NotificationSeen)
|
||||||
q = select(Notification, notification_seen_alias.viewer.label('seen')).outerjoin(
|
q = select(Notification, notification_seen_alias.viewer.label("seen")).outerjoin(
|
||||||
NotificationSeen,
|
NotificationSeen,
|
||||||
and_(
|
and_(
|
||||||
NotificationSeen.viewer == author_id,
|
NotificationSeen.viewer == author_id,
|
||||||
|
@ -65,18 +69,18 @@ def query_notifications(
|
||||||
|
|
||||||
|
|
||||||
def group_notification(
|
def group_notification(
|
||||||
thread, authors=None, shout=None, reactions=None, entity='follower', action='follow'
|
thread, authors=None, shout=None, reactions=None, entity="follower", action="follow"
|
||||||
):
|
):
|
||||||
reactions = reactions or []
|
reactions = reactions or []
|
||||||
authors = authors or []
|
authors = authors or []
|
||||||
return {
|
return {
|
||||||
'thread': thread,
|
"thread": thread,
|
||||||
'authors': authors,
|
"authors": authors,
|
||||||
'updated_at': int(time.time()),
|
"updated_at": int(time.time()),
|
||||||
'shout': shout,
|
"shout": shout,
|
||||||
'reactions': reactions,
|
"reactions": reactions,
|
||||||
'entity': entity,
|
"entity": entity,
|
||||||
'action': action,
|
"action": action,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -121,9 +125,9 @@ def get_notifications_grouped(
|
||||||
|
|
||||||
if str(notification.entity) == NotificationEntity.SHOUT.value:
|
if str(notification.entity) == NotificationEntity.SHOUT.value:
|
||||||
shout = payload
|
shout = payload
|
||||||
shout_id = shout.get('id')
|
shout_id = shout.get("id")
|
||||||
author_id = shout.get('created_by')
|
author_id = shout.get("created_by")
|
||||||
thread_id = f'shout-{shout_id}'
|
thread_id = f"shout-{shout_id}"
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
author = session.query(Author).filter(Author.id == author_id).first()
|
author = session.query(Author).filter(Author.id == author_id).first()
|
||||||
shout = session.query(Shout).filter(Shout.id == shout_id).first()
|
shout = session.query(Shout).filter(Shout.id == shout_id).first()
|
||||||
|
@ -143,9 +147,9 @@ def get_notifications_grouped(
|
||||||
elif str(notification.entity) == NotificationEntity.REACTION.value:
|
elif str(notification.entity) == NotificationEntity.REACTION.value:
|
||||||
reaction = payload
|
reaction = payload
|
||||||
if not isinstance(shout, dict):
|
if not isinstance(shout, dict):
|
||||||
raise ValueError('reaction data is not consistent')
|
raise ValueError("reaction data is not consistent")
|
||||||
shout_id = shout.get('shout')
|
shout_id = shout.get("shout")
|
||||||
author_id = shout.get('created_by', 0)
|
author_id = shout.get("created_by", 0)
|
||||||
if shout_id and author_id:
|
if shout_id and author_id:
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
author = (
|
author = (
|
||||||
|
@ -155,18 +159,18 @@ def get_notifications_grouped(
|
||||||
if shout and author:
|
if shout and author:
|
||||||
author = author.dict()
|
author = author.dict()
|
||||||
shout = shout.dict()
|
shout = shout.dict()
|
||||||
reply_id = reaction.get('reply_to')
|
reply_id = reaction.get("reply_to")
|
||||||
thread_id = f'shout-{shout_id}'
|
thread_id = f"shout-{shout_id}"
|
||||||
if reply_id and reaction.get('kind', '').lower() == 'comment':
|
if reply_id and reaction.get("kind", "").lower() == "comment":
|
||||||
thread_id += f'{reply_id}'
|
thread_id += f"{reply_id}"
|
||||||
existing_group = groups_by_thread.get(thread_id)
|
existing_group = groups_by_thread.get(thread_id)
|
||||||
if existing_group:
|
if existing_group:
|
||||||
existing_group['seen'] = False
|
existing_group["seen"] = False
|
||||||
existing_group['authors'].append(author_id)
|
existing_group["authors"].append(author_id)
|
||||||
existing_group['reactions'] = (
|
existing_group["reactions"] = (
|
||||||
existing_group['reactions'] or []
|
existing_group["reactions"] or []
|
||||||
)
|
)
|
||||||
existing_group['reactions'].append(reaction)
|
existing_group["reactions"].append(reaction)
|
||||||
groups_by_thread[thread_id] = existing_group
|
groups_by_thread[thread_id] = existing_group
|
||||||
else:
|
else:
|
||||||
group = group_notification(
|
group = group_notification(
|
||||||
|
@ -181,18 +185,18 @@ def get_notifications_grouped(
|
||||||
groups_by_thread[thread_id] = group
|
groups_by_thread[thread_id] = group
|
||||||
groups_amount += 1
|
groups_amount += 1
|
||||||
|
|
||||||
elif str(notification.entity) == 'follower':
|
elif str(notification.entity) == "follower":
|
||||||
thread_id = 'followers'
|
thread_id = "followers"
|
||||||
follower = json.loads(payload)
|
follower = json.loads(payload)
|
||||||
group = groups_by_thread.get(thread_id)
|
group = groups_by_thread.get(thread_id)
|
||||||
if group:
|
if group:
|
||||||
if str(notification.action) == 'follow':
|
if str(notification.action) == "follow":
|
||||||
group['authors'].append(follower)
|
group["authors"].append(follower)
|
||||||
elif str(notification.action) == 'unfollow':
|
elif str(notification.action) == "unfollow":
|
||||||
follower_id = follower.get('id')
|
follower_id = follower.get("id")
|
||||||
for author in group['authors']:
|
for author in group["authors"]:
|
||||||
if author.get('id') == follower_id:
|
if author.get("id") == follower_id:
|
||||||
group['authors'].remove(author)
|
group["authors"].remove(author)
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
group = group_notification(
|
group = group_notification(
|
||||||
|
@ -206,10 +210,10 @@ def get_notifications_grouped(
|
||||||
return groups_by_thread, unread, total
|
return groups_by_thread, unread, total
|
||||||
|
|
||||||
|
|
||||||
@query.field('load_notifications')
|
@query.field("load_notifications")
|
||||||
@login_required
|
@login_required
|
||||||
async def load_notifications(_, info, after: int, limit: int = 50, offset=0):
|
async def load_notifications(_, info, after: int, limit: int = 50, offset=0):
|
||||||
author_id = info.context.get('author_id')
|
author_id = info.context.get("author_id")
|
||||||
error = None
|
error = None
|
||||||
total = 0
|
total = 0
|
||||||
unread = 0
|
unread = 0
|
||||||
|
@ -224,17 +228,17 @@ async def load_notifications(_, info, after: int, limit: int = 50, offset=0):
|
||||||
error = e
|
error = e
|
||||||
logger.error(e)
|
logger.error(e)
|
||||||
return {
|
return {
|
||||||
'notifications': notifications,
|
"notifications": notifications,
|
||||||
'total': total,
|
"total": total,
|
||||||
'unread': unread,
|
"unread": unread,
|
||||||
'error': error,
|
"error": error,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@mutation.field('notification_mark_seen')
|
@mutation.field("notification_mark_seen")
|
||||||
@login_required
|
@login_required
|
||||||
async def notification_mark_seen(_, info, notification_id: int):
|
async def notification_mark_seen(_, info, notification_id: int):
|
||||||
author_id = info.context.get('author_id')
|
author_id = info.context.get("author_id")
|
||||||
if author_id:
|
if author_id:
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
try:
|
try:
|
||||||
|
@ -243,18 +247,18 @@ async def notification_mark_seen(_, info, notification_id: int):
|
||||||
session.commit()
|
session.commit()
|
||||||
except SQLAlchemyError as e:
|
except SQLAlchemyError as e:
|
||||||
session.rollback()
|
session.rollback()
|
||||||
logger.error(f'seen mutation failed: {e}')
|
logger.error(f"seen mutation failed: {e}")
|
||||||
return {'error': 'cant mark as read'}
|
return {"error": "cant mark as read"}
|
||||||
return {'error': None}
|
return {"error": None}
|
||||||
|
|
||||||
|
|
||||||
@mutation.field('notifications_seen_after')
|
@mutation.field("notifications_seen_after")
|
||||||
@login_required
|
@login_required
|
||||||
async def notifications_seen_after(_, info, after: int):
|
async def notifications_seen_after(_, info, after: int):
|
||||||
# TODO: use latest loaded notification_id as input offset parameter
|
# TODO: use latest loaded notification_id as input offset parameter
|
||||||
error = None
|
error = None
|
||||||
try:
|
try:
|
||||||
author_id = info.context.get('author_id')
|
author_id = info.context.get("author_id")
|
||||||
if author_id:
|
if author_id:
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
nnn = (
|
nnn = (
|
||||||
|
@ -271,24 +275,24 @@ async def notifications_seen_after(_, info, after: int):
|
||||||
session.rollback()
|
session.rollback()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(e)
|
print(e)
|
||||||
error = 'cant mark as read'
|
error = "cant mark as read"
|
||||||
return {'error': error}
|
return {"error": error}
|
||||||
|
|
||||||
|
|
||||||
@mutation.field('notifications_seen_thread')
|
@mutation.field("notifications_seen_thread")
|
||||||
@login_required
|
@login_required
|
||||||
async def notifications_seen_thread(_, info, thread: str, after: int):
|
async def notifications_seen_thread(_, info, thread: str, after: int):
|
||||||
error = None
|
error = None
|
||||||
author_id = info.context.get('author_id')
|
author_id = info.context.get("author_id")
|
||||||
if author_id:
|
if author_id:
|
||||||
[shout_id, reply_to_id] = thread.split(':')
|
[shout_id, reply_to_id] = thread.split(":")
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
# TODO: handle new follower and new shout notifications
|
# TODO: handle new follower and new shout notifications
|
||||||
new_reaction_notifications = (
|
new_reaction_notifications = (
|
||||||
session.query(Notification)
|
session.query(Notification)
|
||||||
.filter(
|
.filter(
|
||||||
Notification.action == 'create',
|
Notification.action == "create",
|
||||||
Notification.entity == 'reaction',
|
Notification.entity == "reaction",
|
||||||
Notification.created_at > after,
|
Notification.created_at > after,
|
||||||
)
|
)
|
||||||
.all()
|
.all()
|
||||||
|
@ -296,8 +300,8 @@ async def notifications_seen_thread(_, info, thread: str, after: int):
|
||||||
removed_reaction_notifications = (
|
removed_reaction_notifications = (
|
||||||
session.query(Notification)
|
session.query(Notification)
|
||||||
.filter(
|
.filter(
|
||||||
Notification.action == 'delete',
|
Notification.action == "delete",
|
||||||
Notification.entity == 'reaction',
|
Notification.entity == "reaction",
|
||||||
Notification.created_at > after,
|
Notification.created_at > after,
|
||||||
)
|
)
|
||||||
.all()
|
.all()
|
||||||
|
@ -305,15 +309,15 @@ async def notifications_seen_thread(_, info, thread: str, after: int):
|
||||||
exclude = set()
|
exclude = set()
|
||||||
for nr in removed_reaction_notifications:
|
for nr in removed_reaction_notifications:
|
||||||
reaction = json.loads(str(nr.payload))
|
reaction = json.loads(str(nr.payload))
|
||||||
reaction_id = reaction.get('id')
|
reaction_id = reaction.get("id")
|
||||||
exclude.add(reaction_id)
|
exclude.add(reaction_id)
|
||||||
for n in new_reaction_notifications:
|
for n in new_reaction_notifications:
|
||||||
reaction = json.loads(str(n.payload))
|
reaction = json.loads(str(n.payload))
|
||||||
reaction_id = reaction.get('id')
|
reaction_id = reaction.get("id")
|
||||||
if (
|
if (
|
||||||
reaction_id not in exclude
|
reaction_id not in exclude
|
||||||
and reaction.get('shout') == shout_id
|
and reaction.get("shout") == shout_id
|
||||||
and reaction.get('reply_to') == reply_to_id
|
and reaction.get("reply_to") == reply_to_id
|
||||||
):
|
):
|
||||||
try:
|
try:
|
||||||
ns = NotificationSeen(notification=n.id, viewer=author_id)
|
ns = NotificationSeen(notification=n.id, viewer=author_id)
|
||||||
|
@ -323,5 +327,5 @@ async def notifications_seen_thread(_, info, thread: str, after: int):
|
||||||
logger.warn(e)
|
logger.warn(e)
|
||||||
session.rollback()
|
session.rollback()
|
||||||
else:
|
else:
|
||||||
error = 'You are not logged in'
|
error = "You are not logged in"
|
||||||
return {'error': error}
|
return {"error": error}
|
||||||
|
|
|
@ -9,10 +9,10 @@ from services.db import local_session
|
||||||
from services.schema import mutation
|
from services.schema import mutation
|
||||||
|
|
||||||
|
|
||||||
@mutation.field('rate_author')
|
@mutation.field("rate_author")
|
||||||
@login_required
|
@login_required
|
||||||
async def rate_author(_, info, rated_slug, value):
|
async def rate_author(_, info, rated_slug, value):
|
||||||
user_id = info.context['user_id']
|
user_id = info.context["user_id"]
|
||||||
|
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
rated_author = session.query(Author).filter(Author.slug == rated_slug).first()
|
rated_author = session.query(Author).filter(Author.slug == rated_slug).first()
|
||||||
|
@ -41,7 +41,7 @@ async def rate_author(_, info, rated_slug, value):
|
||||||
session.add(rating)
|
session.add(rating)
|
||||||
session.commit()
|
session.commit()
|
||||||
except Exception as err:
|
except Exception as err:
|
||||||
return {'error': err}
|
return {"error": err}
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
|
|
||||||
|
@ -129,7 +129,7 @@ def get_author_rating_shouts(session, author: Author) -> int:
|
||||||
)
|
)
|
||||||
),
|
),
|
||||||
0,
|
0,
|
||||||
).label('shouts_rating')
|
).label("shouts_rating")
|
||||||
)
|
)
|
||||||
.select_from(Reaction)
|
.select_from(Reaction)
|
||||||
.outerjoin(Shout, Shout.authors.any(id=author.id))
|
.outerjoin(Shout, Shout.authors.any(id=author.id))
|
||||||
|
@ -159,7 +159,7 @@ def get_author_rating_comments(session, author: Author) -> int:
|
||||||
)
|
)
|
||||||
),
|
),
|
||||||
0,
|
0,
|
||||||
).label('shouts_rating')
|
).label("shouts_rating")
|
||||||
)
|
)
|
||||||
.select_from(Reaction)
|
.select_from(Reaction)
|
||||||
.outerjoin(
|
.outerjoin(
|
||||||
|
@ -185,7 +185,7 @@ def add_author_rating_columns(q, group_list):
|
||||||
# old karma
|
# old karma
|
||||||
q = q.outerjoin(AuthorRating, AuthorRating.author == Author.id)
|
q = q.outerjoin(AuthorRating, AuthorRating.author == Author.id)
|
||||||
q = q.add_columns(
|
q = q.add_columns(
|
||||||
func.sum(case((AuthorRating.plus == true(), 1), else_=-1)).label('rating')
|
func.sum(case((AuthorRating.plus == true(), 1), else_=-1)).label("rating")
|
||||||
)
|
)
|
||||||
|
|
||||||
# by shouts rating
|
# by shouts rating
|
||||||
|
@ -201,7 +201,7 @@ def add_author_rating_columns(q, group_list):
|
||||||
else_=0,
|
else_=0,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
).label('shouts_rating'),
|
).label("shouts_rating"),
|
||||||
)
|
)
|
||||||
.select_from(shout_reaction)
|
.select_from(shout_reaction)
|
||||||
.outerjoin(Shout, Shout.authors.any(id=Author.id))
|
.outerjoin(Shout, Shout.authors.any(id=Author.id))
|
||||||
|
@ -235,7 +235,7 @@ def add_author_rating_columns(q, group_list):
|
||||||
else_=0,
|
else_=0,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
).label('comments_rating'),
|
).label("comments_rating"),
|
||||||
)
|
)
|
||||||
.select_from(reaction_2)
|
.select_from(reaction_2)
|
||||||
.outerjoin(
|
.outerjoin(
|
||||||
|
|
|
@ -22,22 +22,22 @@ from services.viewed import ViewedStorage
|
||||||
|
|
||||||
def add_reaction_stat_columns(q, aliased_reaction):
|
def add_reaction_stat_columns(q, aliased_reaction):
|
||||||
q = q.outerjoin(aliased_reaction).add_columns(
|
q = q.outerjoin(aliased_reaction).add_columns(
|
||||||
func.sum(aliased_reaction.id).label('reacted_stat'),
|
func.sum(aliased_reaction.id).label("reacted_stat"),
|
||||||
func.sum(
|
func.sum(
|
||||||
case((aliased_reaction.kind == str(ReactionKind.COMMENT.value), 1), else_=0)
|
case((aliased_reaction.kind == str(ReactionKind.COMMENT.value), 1), else_=0)
|
||||||
).label('comments_stat'),
|
).label("comments_stat"),
|
||||||
func.sum(
|
func.sum(
|
||||||
case((aliased_reaction.kind == str(ReactionKind.LIKE.value), 1), else_=0)
|
case((aliased_reaction.kind == str(ReactionKind.LIKE.value), 1), else_=0)
|
||||||
).label('likes_stat'),
|
).label("likes_stat"),
|
||||||
func.sum(
|
func.sum(
|
||||||
case((aliased_reaction.kind == str(ReactionKind.DISLIKE.value), 1), else_=0)
|
case((aliased_reaction.kind == str(ReactionKind.DISLIKE.value), 1), else_=0)
|
||||||
).label('dislikes_stat'),
|
).label("dislikes_stat"),
|
||||||
func.max(
|
func.max(
|
||||||
case(
|
case(
|
||||||
(aliased_reaction.kind != str(ReactionKind.COMMENT.value), None),
|
(aliased_reaction.kind != str(ReactionKind.COMMENT.value), None),
|
||||||
else_=aliased_reaction.created_at,
|
else_=aliased_reaction.created_at,
|
||||||
)
|
)
|
||||||
).label('last_comment_stat'),
|
).label("last_comment_stat"),
|
||||||
)
|
)
|
||||||
|
|
||||||
return q
|
return q
|
||||||
|
@ -101,7 +101,7 @@ def check_to_unfeature(session, rejecter_id, reaction):
|
||||||
async def set_featured(session, shout_id):
|
async def set_featured(session, shout_id):
|
||||||
s = session.query(Shout).where(Shout.id == shout_id).first()
|
s = session.query(Shout).where(Shout.id == shout_id).first()
|
||||||
s.featured_at = int(time.time())
|
s.featured_at = int(time.time())
|
||||||
Shout.update(s, {'featured_at': int(time.time())})
|
Shout.update(s, {"featured_at": int(time.time())})
|
||||||
author = session.query(Author).filter(Author.id == s.created_by).first()
|
author = session.query(Author).filter(Author.id == s.created_by).first()
|
||||||
if author:
|
if author:
|
||||||
await add_user_role(str(author.user))
|
await add_user_role(str(author.user))
|
||||||
|
@ -111,7 +111,7 @@ async def set_featured(session, shout_id):
|
||||||
|
|
||||||
def set_unfeatured(session, shout_id):
|
def set_unfeatured(session, shout_id):
|
||||||
s = session.query(Shout).where(Shout.id == shout_id).first()
|
s = session.query(Shout).where(Shout.id == shout_id).first()
|
||||||
Shout.update(s, {'featured_at': None})
|
Shout.update(s, {"featured_at": None})
|
||||||
session.add(s)
|
session.add(s)
|
||||||
session.commit()
|
session.commit()
|
||||||
|
|
||||||
|
@ -128,7 +128,7 @@ async def _create_reaction(session, shout, author, reaction):
|
||||||
|
|
||||||
# collaborative editing
|
# collaborative editing
|
||||||
if (
|
if (
|
||||||
rdict.get('reply_to')
|
rdict.get("reply_to")
|
||||||
and r.kind in PROPOSAL_REACTIONS
|
and r.kind in PROPOSAL_REACTIONS
|
||||||
and author.id in shout.authors
|
and author.id in shout.authors
|
||||||
):
|
):
|
||||||
|
@ -146,7 +146,7 @@ async def _create_reaction(session, shout, author, reaction):
|
||||||
if r.kind == ReactionKind.LIKE.value:
|
if r.kind == ReactionKind.LIKE.value:
|
||||||
try:
|
try:
|
||||||
# reactions auto-following
|
# reactions auto-following
|
||||||
reactions_follow(author.id, reaction['shout'], True)
|
reactions_follow(author.id, reaction["shout"], True)
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
@ -154,18 +154,18 @@ async def _create_reaction(session, shout, author, reaction):
|
||||||
if r.kind == ReactionKind.COMMENT.value:
|
if r.kind == ReactionKind.COMMENT.value:
|
||||||
await update_author_stat(author)
|
await update_author_stat(author)
|
||||||
|
|
||||||
rdict['shout'] = shout.dict()
|
rdict["shout"] = shout.dict()
|
||||||
rdict['created_by'] = author.id
|
rdict["created_by"] = author.id
|
||||||
rdict['stat'] = {'commented': 0, 'reacted': 0, 'rating': 0}
|
rdict["stat"] = {"commented": 0, "reacted": 0, "rating": 0}
|
||||||
|
|
||||||
# notifications call
|
# notifications call
|
||||||
await notify_reaction(rdict, 'create')
|
await notify_reaction(rdict, "create")
|
||||||
|
|
||||||
return rdict
|
return rdict
|
||||||
|
|
||||||
|
|
||||||
def prepare_new_rating(reaction: dict, shout_id: int, session, author: Author):
|
def prepare_new_rating(reaction: dict, shout_id: int, session, author: Author):
|
||||||
kind = reaction.get('kind')
|
kind = reaction.get("kind")
|
||||||
opposite_kind = (
|
opposite_kind = (
|
||||||
ReactionKind.DISLIKE.value if is_positive(kind) else ReactionKind.LIKE.value
|
ReactionKind.DISLIKE.value if is_positive(kind) else ReactionKind.LIKE.value
|
||||||
)
|
)
|
||||||
|
@ -177,7 +177,7 @@ def prepare_new_rating(reaction: dict, shout_id: int, session, author: Author):
|
||||||
Reaction.kind.in_(RATING_REACTIONS),
|
Reaction.kind.in_(RATING_REACTIONS),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
reply_to = reaction.get('reply_to')
|
reply_to = reaction.get("reply_to")
|
||||||
if reply_to and isinstance(reply_to, int):
|
if reply_to and isinstance(reply_to, int):
|
||||||
q = q.filter(Reaction.reply_to == reply_to)
|
q = q.filter(Reaction.reply_to == reply_to)
|
||||||
rating_reactions = session.execute(q).all()
|
rating_reactions = session.execute(q).all()
|
||||||
|
@ -190,41 +190,43 @@ def prepare_new_rating(reaction: dict, shout_id: int, session, author: Author):
|
||||||
rating_reactions,
|
rating_reactions,
|
||||||
)
|
)
|
||||||
if same_rating:
|
if same_rating:
|
||||||
return {'error': "You can't rate the same thing twice"}
|
return {"error": "You can't rate the same thing twice"}
|
||||||
elif opposite_rating:
|
elif opposite_rating:
|
||||||
return {'error': 'Remove opposite vote first'}
|
return {"error": "Remove opposite vote first"}
|
||||||
elif filter(lambda r: r.created_by == author.id, rating_reactions):
|
elif filter(lambda r: r.created_by == author.id, rating_reactions):
|
||||||
return {'error': "You can't rate your own thing"}
|
return {"error": "You can't rate your own thing"}
|
||||||
return
|
return
|
||||||
|
|
||||||
|
|
||||||
@mutation.field('create_reaction')
|
@mutation.field("create_reaction")
|
||||||
@login_required
|
@login_required
|
||||||
async def create_reaction(_, info, reaction):
|
async def create_reaction(_, info, reaction):
|
||||||
logger.debug(f'{info.context} for {reaction}')
|
logger.debug(f"{info.context} for {reaction}")
|
||||||
user_id = info.context.get('user_id')
|
user_id = info.context.get("user_id")
|
||||||
shout_id = reaction.get('shout')
|
shout_id = reaction.get("shout")
|
||||||
|
|
||||||
if not shout_id:
|
if not shout_id:
|
||||||
return {'error': 'Shout ID is required to create a reaction.'}
|
return {"error": "Shout ID is required to create a reaction."}
|
||||||
|
|
||||||
try:
|
try:
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
shout = session.query(Shout).filter(Shout.id == shout_id).first()
|
shout = session.query(Shout).filter(Shout.id == shout_id).first()
|
||||||
author = session.query(Author).filter(Author.user == user_id).first()
|
author = session.query(Author).filter(Author.user == user_id).first()
|
||||||
if shout and author:
|
if shout and author:
|
||||||
reaction['created_by'] = author.id
|
reaction["created_by"] = author.id
|
||||||
kind = reaction.get('kind')
|
kind = reaction.get("kind")
|
||||||
shout_id = shout.id
|
shout_id = shout.id
|
||||||
|
|
||||||
if not kind and isinstance(reaction.get('body'), str):
|
if not kind and isinstance(reaction.get("body"), str):
|
||||||
kind = ReactionKind.COMMENT.value
|
kind = ReactionKind.COMMENT.value
|
||||||
|
|
||||||
if not kind:
|
if not kind:
|
||||||
return {'error': 'cannot create reaction without a kind'}
|
return {"error": "cannot create reaction without a kind"}
|
||||||
|
|
||||||
if kind in RATING_REACTIONS:
|
if kind in RATING_REACTIONS:
|
||||||
error_result = prepare_new_rating(reaction, shout_id, session, author)
|
error_result = prepare_new_rating(
|
||||||
|
reaction, shout_id, session, author
|
||||||
|
)
|
||||||
if error_result:
|
if error_result:
|
||||||
return error_result
|
return error_result
|
||||||
|
|
||||||
|
@ -232,25 +234,25 @@ async def create_reaction(_, info, reaction):
|
||||||
|
|
||||||
# TODO: call recount ratings periodically
|
# TODO: call recount ratings periodically
|
||||||
|
|
||||||
return {'reaction': rdict}
|
return {"reaction": rdict}
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
traceback.print_exc()
|
traceback.print_exc()
|
||||||
logger.error(f'{type(e).__name__}: {e}')
|
logger.error(f"{type(e).__name__}: {e}")
|
||||||
|
|
||||||
return {'error': 'Cannot create reaction.'}
|
return {"error": "Cannot create reaction."}
|
||||||
|
|
||||||
|
|
||||||
@mutation.field('update_reaction')
|
@mutation.field("update_reaction")
|
||||||
@login_required
|
@login_required
|
||||||
async def update_reaction(_, info, reaction):
|
async def update_reaction(_, info, reaction):
|
||||||
logger.debug(f'{info.context} for {reaction}')
|
logger.debug(f"{info.context} for {reaction}")
|
||||||
user_id = info.context.get('user_id')
|
user_id = info.context.get("user_id")
|
||||||
roles = info.context.get('roles')
|
roles = info.context.get("roles")
|
||||||
rid = reaction.get('id')
|
rid = reaction.get("id")
|
||||||
if rid and isinstance(rid, int) and user_id and roles:
|
if rid and isinstance(rid, int) and user_id and roles:
|
||||||
del reaction['id']
|
del reaction["id"]
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
reaction_query = select(Reaction).filter(Reaction.id == rid)
|
reaction_query = select(Reaction).filter(Reaction.id == rid)
|
||||||
aliased_reaction = aliased(Reaction)
|
aliased_reaction = aliased(Reaction)
|
||||||
|
@ -263,19 +265,19 @@ async def update_reaction(_, info, reaction):
|
||||||
)
|
)
|
||||||
|
|
||||||
if not r:
|
if not r:
|
||||||
return {'error': 'invalid reaction id'}
|
return {"error": "invalid reaction id"}
|
||||||
|
|
||||||
author = session.query(Author).filter(Author.user == user_id).first()
|
author = session.query(Author).filter(Author.user == user_id).first()
|
||||||
if author:
|
if author:
|
||||||
if r.created_by != author.id and 'editor' not in roles:
|
if r.created_by != author.id and "editor" not in roles:
|
||||||
return {'error': 'access denied'}
|
return {"error": "access denied"}
|
||||||
|
|
||||||
body = reaction.get('body')
|
body = reaction.get("body")
|
||||||
if body:
|
if body:
|
||||||
r.body = body
|
r.body = body
|
||||||
r.updated_at = int(time.time())
|
r.updated_at = int(time.time())
|
||||||
|
|
||||||
if r.kind != reaction['kind']:
|
if r.kind != reaction["kind"]:
|
||||||
# Определение изменения мнения может быть реализовано здесь
|
# Определение изменения мнения может быть реализовано здесь
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
@ -284,38 +286,38 @@ async def update_reaction(_, info, reaction):
|
||||||
session.commit()
|
session.commit()
|
||||||
|
|
||||||
r.stat = {
|
r.stat = {
|
||||||
'reacted': reacted_stat,
|
"reacted": reacted_stat,
|
||||||
'commented': commented_stat,
|
"commented": commented_stat,
|
||||||
'rating': int(likes_stat or 0) - int(dislikes_stat or 0),
|
"rating": int(likes_stat or 0) - int(dislikes_stat or 0),
|
||||||
}
|
}
|
||||||
|
|
||||||
await notify_reaction(r.dict(), 'update')
|
await notify_reaction(r.dict(), "update")
|
||||||
|
|
||||||
return {'reaction': r}
|
return {"reaction": r}
|
||||||
else:
|
else:
|
||||||
return {'error': 'not authorized'}
|
return {"error": "not authorized"}
|
||||||
except Exception:
|
except Exception:
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
traceback.print_exc()
|
traceback.print_exc()
|
||||||
return {'error': 'cannot create reaction'}
|
return {"error": "cannot create reaction"}
|
||||||
|
|
||||||
|
|
||||||
@mutation.field('delete_reaction')
|
@mutation.field("delete_reaction")
|
||||||
@login_required
|
@login_required
|
||||||
async def delete_reaction(_, info, reaction_id: int):
|
async def delete_reaction(_, info, reaction_id: int):
|
||||||
logger.debug(f'{info.context} for {reaction_id}')
|
logger.debug(f"{info.context} for {reaction_id}")
|
||||||
user_id = info.context.get('user_id')
|
user_id = info.context.get("user_id")
|
||||||
roles = info.context.get('roles', [])
|
roles = info.context.get("roles", [])
|
||||||
if user_id:
|
if user_id:
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
try:
|
try:
|
||||||
author = session.query(Author).filter(Author.user == user_id).one()
|
author = session.query(Author).filter(Author.user == user_id).one()
|
||||||
r = session.query(Reaction).filter(Reaction.id == reaction_id).one()
|
r = session.query(Reaction).filter(Reaction.id == reaction_id).one()
|
||||||
if r.created_by != author.id and 'editor' not in roles:
|
if r.created_by != author.id and "editor" not in roles:
|
||||||
return {'error': 'access denied'}
|
return {"error": "access denied"}
|
||||||
|
|
||||||
logger.debug(f'{user_id} user removing his #{reaction_id} reaction')
|
logger.debug(f"{user_id} user removing his #{reaction_id} reaction")
|
||||||
reaction_dict = r.dict()
|
reaction_dict = r.dict()
|
||||||
session.delete(r)
|
session.delete(r)
|
||||||
session.commit()
|
session.commit()
|
||||||
|
@ -323,47 +325,47 @@ async def delete_reaction(_, info, reaction_id: int):
|
||||||
# обновление счетчика комментариев в кеше
|
# обновление счетчика комментариев в кеше
|
||||||
if r.kind == ReactionKind.COMMENT.value:
|
if r.kind == ReactionKind.COMMENT.value:
|
||||||
await update_author_stat(author)
|
await update_author_stat(author)
|
||||||
await notify_reaction(reaction_dict, 'delete')
|
await notify_reaction(reaction_dict, "delete")
|
||||||
|
|
||||||
return {'error': None, 'reaction': reaction_dict}
|
return {"error": None, "reaction": reaction_dict}
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
return {'error': f'cannot delete reaction: {exc}'}
|
return {"error": f"cannot delete reaction: {exc}"}
|
||||||
return {'error': 'cannot delete reaction'}
|
return {"error": "cannot delete reaction"}
|
||||||
|
|
||||||
|
|
||||||
def apply_reaction_filters(by, q):
|
def apply_reaction_filters(by, q):
|
||||||
shout_slug = by.get('shout', None)
|
shout_slug = by.get("shout", None)
|
||||||
if shout_slug:
|
if shout_slug:
|
||||||
q = q.filter(Shout.slug == shout_slug)
|
q = q.filter(Shout.slug == shout_slug)
|
||||||
|
|
||||||
elif by.get('shouts'):
|
elif by.get("shouts"):
|
||||||
q = q.filter(Shout.slug.in_(by.get('shouts', [])))
|
q = q.filter(Shout.slug.in_(by.get("shouts", [])))
|
||||||
|
|
||||||
created_by = by.get('created_by', None)
|
created_by = by.get("created_by", None)
|
||||||
if created_by:
|
if created_by:
|
||||||
q = q.filter(Author.id == created_by)
|
q = q.filter(Author.id == created_by)
|
||||||
|
|
||||||
topic = by.get('topic', None)
|
topic = by.get("topic", None)
|
||||||
if isinstance(topic, int):
|
if isinstance(topic, int):
|
||||||
q = q.filter(Shout.topics.any(id=topic))
|
q = q.filter(Shout.topics.any(id=topic))
|
||||||
|
|
||||||
if by.get('comment', False):
|
if by.get("comment", False):
|
||||||
q = q.filter(Reaction.kind == ReactionKind.COMMENT.value)
|
q = q.filter(Reaction.kind == ReactionKind.COMMENT.value)
|
||||||
if by.get('rating', False):
|
if by.get("rating", False):
|
||||||
q = q.filter(Reaction.kind.in_(RATING_REACTIONS))
|
q = q.filter(Reaction.kind.in_(RATING_REACTIONS))
|
||||||
|
|
||||||
by_search = by.get('search', '')
|
by_search = by.get("search", "")
|
||||||
if len(by_search) > 2:
|
if len(by_search) > 2:
|
||||||
q = q.filter(Reaction.body.ilike(f'%{by_search}%'))
|
q = q.filter(Reaction.body.ilike(f"%{by_search}%"))
|
||||||
|
|
||||||
after = by.get('after', None)
|
after = by.get("after", None)
|
||||||
if isinstance(after, int):
|
if isinstance(after, int):
|
||||||
q = q.filter(Reaction.created_at > after)
|
q = q.filter(Reaction.created_at > after)
|
||||||
|
|
||||||
return q
|
return q
|
||||||
|
|
||||||
|
|
||||||
@query.field('load_reactions_by')
|
@query.field("load_reactions_by")
|
||||||
async def load_reactions_by(_, info, by, limit=50, offset=0):
|
async def load_reactions_by(_, info, by, limit=50, offset=0):
|
||||||
"""
|
"""
|
||||||
:param info: graphql meta
|
:param info: graphql meta
|
||||||
|
@ -401,12 +403,12 @@ async def load_reactions_by(_, info, by, limit=50, offset=0):
|
||||||
q = q.group_by(Reaction.id, Author.id, Shout.id, aliased_reaction.id)
|
q = q.group_by(Reaction.id, Author.id, Shout.id, aliased_reaction.id)
|
||||||
|
|
||||||
# order by
|
# order by
|
||||||
order_stat = by.get('sort', '').lower() # 'like' | 'dislike' | 'newest' | 'oldest'
|
order_stat = by.get("sort", "").lower() # 'like' | 'dislike' | 'newest' | 'oldest'
|
||||||
order_by_stmt = desc(Reaction.created_at)
|
order_by_stmt = desc(Reaction.created_at)
|
||||||
if order_stat == 'oldest':
|
if order_stat == "oldest":
|
||||||
order_by_stmt = asc(Reaction.created_at)
|
order_by_stmt = asc(Reaction.created_at)
|
||||||
elif order_stat.endswith('like'):
|
elif order_stat.endswith("like"):
|
||||||
order_by_stmt = desc(f'{order_stat}s_stat')
|
order_by_stmt = desc(f"{order_stat}s_stat")
|
||||||
q = q.order_by(order_by_stmt)
|
q = q.order_by(order_by_stmt)
|
||||||
|
|
||||||
# pagination
|
# pagination
|
||||||
|
@ -428,9 +430,9 @@ async def load_reactions_by(_, info, by, limit=50, offset=0):
|
||||||
reaction.created_by = author
|
reaction.created_by = author
|
||||||
reaction.shout = shout
|
reaction.shout = shout
|
||||||
reaction.stat = {
|
reaction.stat = {
|
||||||
'rating': int(likes_stat or 0) - int(dislikes_stat or 0),
|
"rating": int(likes_stat or 0) - int(dislikes_stat or 0),
|
||||||
'reacted': reacted_stat,
|
"reacted": reacted_stat,
|
||||||
'commented': commented_stat,
|
"commented": commented_stat,
|
||||||
}
|
}
|
||||||
reactions.add(reaction) # Используем список для хранения реакций
|
reactions.add(reaction) # Используем список для хранения реакций
|
||||||
|
|
||||||
|
@ -470,7 +472,7 @@ async def reacted_shouts_updates(follower_id: int, limit=50, offset=0) -> List[S
|
||||||
# Sort shouts by the `last_comment` field
|
# Sort shouts by the `last_comment` field
|
||||||
combined_query = (
|
combined_query = (
|
||||||
union(q1, q2)
|
union(q1, q2)
|
||||||
.order_by(desc(text('last_comment_stat')))
|
.order_by(desc(text("last_comment_stat")))
|
||||||
.limit(limit)
|
.limit(limit)
|
||||||
.offset(offset)
|
.offset(offset)
|
||||||
)
|
)
|
||||||
|
@ -485,26 +487,26 @@ async def reacted_shouts_updates(follower_id: int, limit=50, offset=0) -> List[S
|
||||||
last_comment,
|
last_comment,
|
||||||
] in results:
|
] in results:
|
||||||
shout.stat = {
|
shout.stat = {
|
||||||
'viewed': await ViewedStorage.get_shout(shout.slug),
|
"viewed": await ViewedStorage.get_shout(shout.slug),
|
||||||
'rating': int(likes_stat or 0) - int(dislikes_stat or 0),
|
"rating": int(likes_stat or 0) - int(dislikes_stat or 0),
|
||||||
'reacted': reacted_stat,
|
"reacted": reacted_stat,
|
||||||
'commented': commented_stat,
|
"commented": commented_stat,
|
||||||
'last_comment': last_comment,
|
"last_comment": last_comment,
|
||||||
}
|
}
|
||||||
shouts.append(shout)
|
shouts.append(shout)
|
||||||
|
|
||||||
return shouts
|
return shouts
|
||||||
|
|
||||||
|
|
||||||
@query.field('load_shouts_followed')
|
@query.field("load_shouts_followed")
|
||||||
@login_required
|
@login_required
|
||||||
async def load_shouts_followed(_, info, limit=50, offset=0) -> List[Shout]:
|
async def load_shouts_followed(_, info, limit=50, offset=0) -> List[Shout]:
|
||||||
user_id = info.context['user_id']
|
user_id = info.context["user_id"]
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
author = session.query(Author).filter(Author.user == user_id).first()
|
author = session.query(Author).filter(Author.user == user_id).first()
|
||||||
if author:
|
if author:
|
||||||
try:
|
try:
|
||||||
author_id: int = author.dict()['id']
|
author_id: int = author.dict()["id"]
|
||||||
shouts = await reacted_shouts_updates(author_id, limit, offset)
|
shouts = await reacted_shouts_updates(author_id, limit, offset)
|
||||||
return shouts
|
return shouts
|
||||||
except Exception as error:
|
except Exception as error:
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
from sqlalchemy import bindparam, distinct, or_, text
|
from sqlalchemy import bindparam, distinct, or_, text
|
||||||
from sqlalchemy.orm import aliased, joinedload
|
from sqlalchemy.orm import aliased, joinedload
|
||||||
from sqlalchemy.sql.expression import (and_, asc, case, desc, func, nulls_last,
|
from sqlalchemy.sql.expression import and_, asc, case, desc, func, nulls_last, select
|
||||||
select)
|
|
||||||
|
|
||||||
from orm.author import Author, AuthorFollower
|
from orm.author import Author, AuthorFollower
|
||||||
from orm.reaction import Reaction, ReactionKind
|
from orm.reaction import Reaction, ReactionKind
|
||||||
|
@ -29,7 +28,7 @@ def filter_my(info, session, q):
|
||||||
reader_id = None
|
reader_id = None
|
||||||
user_id = None
|
user_id = None
|
||||||
if isinstance(info.context, dict):
|
if isinstance(info.context, dict):
|
||||||
user_id = info.context.get('user_id')
|
user_id = info.context.get("user_id")
|
||||||
if user_id:
|
if user_id:
|
||||||
reader = session.query(Author).filter(Author.user == user_id).first()
|
reader = session.query(Author).filter(Author.user == user_id).first()
|
||||||
if reader:
|
if reader:
|
||||||
|
@ -56,22 +55,22 @@ def filter_my(info, session, q):
|
||||||
|
|
||||||
def apply_filters(q, filters, author_id=None):
|
def apply_filters(q, filters, author_id=None):
|
||||||
if isinstance(filters, dict):
|
if isinstance(filters, dict):
|
||||||
if filters.get('reacted'):
|
if filters.get("reacted"):
|
||||||
q.join(Reaction, Reaction.created_by == author_id)
|
q.join(Reaction, Reaction.created_by == author_id)
|
||||||
|
|
||||||
by_featured = filters.get('featured')
|
by_featured = filters.get("featured")
|
||||||
if by_featured:
|
if by_featured:
|
||||||
q = q.filter(Shout.featured_at.is_not(None))
|
q = q.filter(Shout.featured_at.is_not(None))
|
||||||
by_layouts = filters.get('layouts')
|
by_layouts = filters.get("layouts")
|
||||||
if by_layouts:
|
if by_layouts:
|
||||||
q = q.filter(Shout.layout.in_(by_layouts))
|
q = q.filter(Shout.layout.in_(by_layouts))
|
||||||
by_author = filters.get('author')
|
by_author = filters.get("author")
|
||||||
if by_author:
|
if by_author:
|
||||||
q = q.filter(Shout.authors.any(slug=by_author))
|
q = q.filter(Shout.authors.any(slug=by_author))
|
||||||
by_topic = filters.get('topic')
|
by_topic = filters.get("topic")
|
||||||
if by_topic:
|
if by_topic:
|
||||||
q = q.filter(Shout.topics.any(slug=by_topic))
|
q = q.filter(Shout.topics.any(slug=by_topic))
|
||||||
by_after = filters.get('after')
|
by_after = filters.get("after")
|
||||||
if by_after:
|
if by_after:
|
||||||
ts = int(by_after)
|
ts = int(by_after)
|
||||||
q = q.filter(Shout.created_at > ts)
|
q = q.filter(Shout.created_at > ts)
|
||||||
|
@ -79,7 +78,7 @@ def apply_filters(q, filters, author_id=None):
|
||||||
return q
|
return q
|
||||||
|
|
||||||
|
|
||||||
@query.field('get_shout')
|
@query.field("get_shout")
|
||||||
async def get_shout(_, info, slug: str):
|
async def get_shout(_, info, slug: str):
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
q = query_shouts()
|
q = query_shouts()
|
||||||
|
@ -100,11 +99,11 @@ async def get_shout(_, info, slug: str):
|
||||||
] = results
|
] = results
|
||||||
|
|
||||||
shout.stat = {
|
shout.stat = {
|
||||||
'viewed': await ViewedStorage.get_shout(shout.slug),
|
"viewed": await ViewedStorage.get_shout(shout.slug),
|
||||||
'reacted': reacted_stat,
|
"reacted": reacted_stat,
|
||||||
'commented': commented_stat,
|
"commented": commented_stat,
|
||||||
'rating': int(likes_stat or 0) - int(dislikes_stat or 0),
|
"rating": int(likes_stat or 0) - int(dislikes_stat or 0),
|
||||||
'last_comment': last_comment,
|
"last_comment": last_comment,
|
||||||
}
|
}
|
||||||
|
|
||||||
for author_caption in (
|
for author_caption in (
|
||||||
|
@ -139,7 +138,7 @@ async def get_shout(_, info, slug: str):
|
||||||
return shout
|
return shout
|
||||||
|
|
||||||
|
|
||||||
@query.field('load_shouts_by')
|
@query.field("load_shouts_by")
|
||||||
async def load_shouts_by(_, _info, options):
|
async def load_shouts_by(_, _info, options):
|
||||||
"""
|
"""
|
||||||
:param options: {
|
:param options: {
|
||||||
|
@ -168,25 +167,25 @@ async def load_shouts_by(_, _info, options):
|
||||||
q = add_reaction_stat_columns(q, aliased_reaction)
|
q = add_reaction_stat_columns(q, aliased_reaction)
|
||||||
|
|
||||||
# filters
|
# filters
|
||||||
filters = options.get('filters', {})
|
filters = options.get("filters", {})
|
||||||
q = apply_filters(q, filters)
|
q = apply_filters(q, filters)
|
||||||
|
|
||||||
# group
|
# group
|
||||||
q = q.group_by(Shout.id)
|
q = q.group_by(Shout.id)
|
||||||
|
|
||||||
# order
|
# order
|
||||||
order_by = Shout.featured_at if filters.get('featured') else Shout.published_at
|
order_by = Shout.featured_at if filters.get("featured") else Shout.published_at
|
||||||
order_str = options.get('order_by')
|
order_str = options.get("order_by")
|
||||||
if order_str in ['likes', 'shouts', 'followers', 'comments', 'last_comment']:
|
if order_str in ["likes", "shouts", "followers", "comments", "last_comment"]:
|
||||||
q = q.order_by(desc(text(f'{order_str}_stat')))
|
q = q.order_by(desc(text(f"{order_str}_stat")))
|
||||||
query_order_by = (
|
query_order_by = (
|
||||||
desc(order_by) if options.get('order_by_desc', True) else asc(order_by)
|
desc(order_by) if options.get("order_by_desc", True) else asc(order_by)
|
||||||
)
|
)
|
||||||
q = q.order_by(nulls_last(query_order_by))
|
q = q.order_by(nulls_last(query_order_by))
|
||||||
|
|
||||||
# limit offset
|
# limit offset
|
||||||
offset = options.get('offset', 0)
|
offset = options.get("offset", 0)
|
||||||
limit = options.get('limit', 10)
|
limit = options.get("limit", 10)
|
||||||
q = q.limit(limit).offset(offset)
|
q = q.limit(limit).offset(offset)
|
||||||
|
|
||||||
shouts = []
|
shouts = []
|
||||||
|
@ -215,18 +214,18 @@ async def load_shouts_by(_, _info, options):
|
||||||
if main_topic:
|
if main_topic:
|
||||||
shout.main_topic = main_topic[0]
|
shout.main_topic = main_topic[0]
|
||||||
shout.stat = {
|
shout.stat = {
|
||||||
'viewed': await ViewedStorage.get_shout(shout.slug),
|
"viewed": await ViewedStorage.get_shout(shout.slug),
|
||||||
'reacted': reacted_stat,
|
"reacted": reacted_stat,
|
||||||
'commented': commented_stat,
|
"commented": commented_stat,
|
||||||
'rating': int(likes_stat) - int(dislikes_stat),
|
"rating": int(likes_stat) - int(dislikes_stat),
|
||||||
'last_comment': last_comment,
|
"last_comment": last_comment,
|
||||||
}
|
}
|
||||||
shouts.append(shout)
|
shouts.append(shout)
|
||||||
|
|
||||||
return shouts
|
return shouts
|
||||||
|
|
||||||
|
|
||||||
@query.field('load_shouts_feed')
|
@query.field("load_shouts_feed")
|
||||||
@login_required
|
@login_required
|
||||||
async def load_shouts_feed(_, info, options):
|
async def load_shouts_feed(_, info, options):
|
||||||
shouts = []
|
shouts = []
|
||||||
|
@ -237,24 +236,24 @@ async def load_shouts_feed(_, info, options):
|
||||||
q = add_reaction_stat_columns(q, aliased_reaction)
|
q = add_reaction_stat_columns(q, aliased_reaction)
|
||||||
|
|
||||||
# filters
|
# filters
|
||||||
filters = options.get('filters', {})
|
filters = options.get("filters", {})
|
||||||
if filters:
|
if filters:
|
||||||
q, reader_id = filter_my(info, session, q)
|
q, reader_id = filter_my(info, session, q)
|
||||||
q = apply_filters(q, filters, reader_id)
|
q = apply_filters(q, filters, reader_id)
|
||||||
|
|
||||||
# sort order
|
# sort order
|
||||||
order_by = options.get(
|
order_by = options.get(
|
||||||
'order_by',
|
"order_by",
|
||||||
Shout.featured_at if filters.get('featured') else Shout.published_at,
|
Shout.featured_at if filters.get("featured") else Shout.published_at,
|
||||||
)
|
)
|
||||||
|
|
||||||
query_order_by = (
|
query_order_by = (
|
||||||
desc(order_by) if options.get('order_by_desc', True) else asc(order_by)
|
desc(order_by) if options.get("order_by_desc", True) else asc(order_by)
|
||||||
)
|
)
|
||||||
|
|
||||||
# pagination
|
# pagination
|
||||||
offset = options.get('offset', 0)
|
offset = options.get("offset", 0)
|
||||||
limit = options.get('limit', 10)
|
limit = options.get("limit", 10)
|
||||||
|
|
||||||
q = (
|
q = (
|
||||||
q.group_by(Shout.id)
|
q.group_by(Shout.id)
|
||||||
|
@ -289,18 +288,18 @@ async def load_shouts_feed(_, info, options):
|
||||||
if main_topic:
|
if main_topic:
|
||||||
shout.main_topic = main_topic[0]
|
shout.main_topic = main_topic[0]
|
||||||
shout.stat = {
|
shout.stat = {
|
||||||
'viewed': await ViewedStorage.get_shout(shout.slug),
|
"viewed": await ViewedStorage.get_shout(shout.slug),
|
||||||
'reacted': reacted_stat,
|
"reacted": reacted_stat,
|
||||||
'commented': commented_stat,
|
"commented": commented_stat,
|
||||||
'rating': likes_stat - dislikes_stat,
|
"rating": likes_stat - dislikes_stat,
|
||||||
'last_comment': last_comment,
|
"last_comment": last_comment,
|
||||||
}
|
}
|
||||||
shouts.append(shout)
|
shouts.append(shout)
|
||||||
|
|
||||||
return shouts
|
return shouts
|
||||||
|
|
||||||
|
|
||||||
@query.field('load_shouts_search')
|
@query.field("load_shouts_search")
|
||||||
async def load_shouts_search(_, _info, text, limit=50, offset=0):
|
async def load_shouts_search(_, _info, text, limit=50, offset=0):
|
||||||
if isinstance(text, str) and len(text) > 2:
|
if isinstance(text, str) and len(text) > 2:
|
||||||
results = await search_text(text, limit, offset)
|
results = await search_text(text, limit, offset)
|
||||||
|
@ -309,7 +308,7 @@ async def load_shouts_search(_, _info, text, limit=50, offset=0):
|
||||||
return []
|
return []
|
||||||
|
|
||||||
|
|
||||||
@query.field('load_shouts_unrated')
|
@query.field("load_shouts_unrated")
|
||||||
@login_required
|
@login_required
|
||||||
async def load_shouts_unrated(_, info, limit: int = 50, offset: int = 0):
|
async def load_shouts_unrated(_, info, limit: int = 50, offset: int = 0):
|
||||||
q = query_shouts()
|
q = query_shouts()
|
||||||
|
@ -324,7 +323,7 @@ async def load_shouts_unrated(_, info, limit: int = 50, offset: int = 0):
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
.outerjoin(Author, Author.user == bindparam('user_id'))
|
.outerjoin(Author, Author.user == bindparam("user_id"))
|
||||||
.where(
|
.where(
|
||||||
and_(
|
and_(
|
||||||
Shout.deleted_at.is_(None),
|
Shout.deleted_at.is_(None),
|
||||||
|
@ -341,7 +340,7 @@ async def load_shouts_unrated(_, info, limit: int = 50, offset: int = 0):
|
||||||
q = add_reaction_stat_columns(q, aliased_reaction)
|
q = add_reaction_stat_columns(q, aliased_reaction)
|
||||||
|
|
||||||
q = q.group_by(Shout.id).order_by(func.random()).limit(limit).offset(offset)
|
q = q.group_by(Shout.id).order_by(func.random()).limit(limit).offset(offset)
|
||||||
user_id = info.context.get('user_id') if isinstance(info.context, dict) else None
|
user_id = info.context.get("user_id") if isinstance(info.context, dict) else None
|
||||||
if user_id:
|
if user_id:
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
author = session.query(Author).filter(Author.user == user_id).first()
|
author = session.query(Author).filter(Author.user == user_id).first()
|
||||||
|
@ -361,20 +360,20 @@ async def get_shouts_from_query(q, author_id=None):
|
||||||
likes_stat,
|
likes_stat,
|
||||||
dislikes_stat,
|
dislikes_stat,
|
||||||
last_comment,
|
last_comment,
|
||||||
] in session.execute(q, {'author_id': author_id}).unique():
|
] in session.execute(q, {"author_id": author_id}).unique():
|
||||||
shouts.append(shout)
|
shouts.append(shout)
|
||||||
shout.stat = {
|
shout.stat = {
|
||||||
'viewed': await ViewedStorage.get_shout(shout_slug=shout.slug),
|
"viewed": await ViewedStorage.get_shout(shout_slug=shout.slug),
|
||||||
'reacted': reacted_stat,
|
"reacted": reacted_stat,
|
||||||
'commented': commented_stat,
|
"commented": commented_stat,
|
||||||
'rating': int(likes_stat or 0) - int(dislikes_stat or 0),
|
"rating": int(likes_stat or 0) - int(dislikes_stat or 0),
|
||||||
'last_comment': last_comment,
|
"last_comment": last_comment,
|
||||||
}
|
}
|
||||||
|
|
||||||
return shouts
|
return shouts
|
||||||
|
|
||||||
|
|
||||||
@query.field('load_shouts_random_top')
|
@query.field("load_shouts_random_top")
|
||||||
async def load_shouts_random_top(_, _info, options):
|
async def load_shouts_random_top(_, _info, options):
|
||||||
"""
|
"""
|
||||||
:param _
|
:param _
|
||||||
|
@ -399,7 +398,7 @@ async def load_shouts_random_top(_, _info, options):
|
||||||
.where(and_(Shout.deleted_at.is_(None), Shout.layout.is_not(None)))
|
.where(and_(Shout.deleted_at.is_(None), Shout.layout.is_not(None)))
|
||||||
)
|
)
|
||||||
|
|
||||||
subquery = apply_filters(subquery, options.get('filters', {}))
|
subquery = apply_filters(subquery, options.get("filters", {}))
|
||||||
|
|
||||||
subquery = subquery.group_by(Shout.id).order_by(
|
subquery = subquery.group_by(Shout.id).order_by(
|
||||||
desc(
|
desc(
|
||||||
|
@ -415,7 +414,7 @@ async def load_shouts_random_top(_, _info, options):
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
random_limit = options.get('random_limit', 100)
|
random_limit = options.get("random_limit", 100)
|
||||||
if random_limit:
|
if random_limit:
|
||||||
subquery = subquery.limit(random_limit)
|
subquery = subquery.limit(random_limit)
|
||||||
|
|
||||||
|
@ -427,7 +426,7 @@ async def load_shouts_random_top(_, _info, options):
|
||||||
|
|
||||||
q = add_reaction_stat_columns(q, aliased_reaction)
|
q = add_reaction_stat_columns(q, aliased_reaction)
|
||||||
|
|
||||||
limit = options.get('limit', 10)
|
limit = options.get("limit", 10)
|
||||||
q = q.group_by(Shout.id).order_by(func.random()).limit(limit)
|
q = q.group_by(Shout.id).order_by(func.random()).limit(limit)
|
||||||
|
|
||||||
shouts = await get_shouts_from_query(q)
|
shouts = await get_shouts_from_query(q)
|
||||||
|
@ -435,17 +434,17 @@ async def load_shouts_random_top(_, _info, options):
|
||||||
return shouts
|
return shouts
|
||||||
|
|
||||||
|
|
||||||
@query.field('load_shouts_random_topic')
|
@query.field("load_shouts_random_topic")
|
||||||
async def load_shouts_random_topic(_, info, limit: int = 10):
|
async def load_shouts_random_topic(_, info, limit: int = 10):
|
||||||
[topic] = get_topics_random(None, None, 1)
|
[topic] = get_topics_random(None, None, 1)
|
||||||
if topic:
|
if topic:
|
||||||
shouts = fetch_shouts_by_topic(topic, limit)
|
shouts = fetch_shouts_by_topic(topic, limit)
|
||||||
if shouts:
|
if shouts:
|
||||||
return {'topic': topic, 'shouts': shouts}
|
return {"topic": topic, "shouts": shouts}
|
||||||
return {
|
return {
|
||||||
'error': 'failed to get random topic after few retries',
|
"error": "failed to get random topic after few retries",
|
||||||
'shouts': [],
|
"shouts": [],
|
||||||
'topic': {},
|
"topic": {},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -18,28 +18,35 @@ def add_topic_stat_columns(q):
|
||||||
|
|
||||||
# shouts
|
# shouts
|
||||||
q = q.outerjoin(aliased_shout_topic, aliased_shout_topic.topic == Topic.id)
|
q = q.outerjoin(aliased_shout_topic, aliased_shout_topic.topic == Topic.id)
|
||||||
q = q.add_columns(func.count(distinct(aliased_shout_topic.shout)).label('shouts_stat'))
|
q = q.add_columns(
|
||||||
|
func.count(distinct(aliased_shout_topic.shout)).label("shouts_stat")
|
||||||
|
)
|
||||||
|
|
||||||
# authors
|
# authors
|
||||||
q = q.outerjoin(aliased_shout, and_(
|
q = q.outerjoin(
|
||||||
|
aliased_shout,
|
||||||
|
and_(
|
||||||
aliased_shout.id == aliased_shout_topic.shout,
|
aliased_shout.id == aliased_shout_topic.shout,
|
||||||
aliased_shout.published_at.is_not(None),
|
aliased_shout.published_at.is_not(None),
|
||||||
aliased_shout.deleted_at.is_(None)
|
aliased_shout.deleted_at.is_(None),
|
||||||
))
|
),
|
||||||
|
)
|
||||||
q = q.outerjoin(aliased_authors, aliased_shout.authors.any(id=aliased_authors.id))
|
q = q.outerjoin(aliased_authors, aliased_shout.authors.any(id=aliased_authors.id))
|
||||||
q = q.add_columns(func.count(distinct(aliased_authors.author)).label('authors_stat'))
|
q = q.add_columns(
|
||||||
|
func.count(distinct(aliased_authors.author)).label("authors_stat")
|
||||||
|
)
|
||||||
|
|
||||||
# followers
|
# followers
|
||||||
q = q.outerjoin(aliased_followers, aliased_followers.topic == Topic.id)
|
q = q.outerjoin(aliased_followers, aliased_followers.topic == Topic.id)
|
||||||
q = q.add_columns(
|
q = q.add_columns(
|
||||||
func.count(distinct(aliased_followers.follower)).label('followers_stat')
|
func.count(distinct(aliased_followers.follower)).label("followers_stat")
|
||||||
)
|
)
|
||||||
|
|
||||||
# comments
|
# comments
|
||||||
sub_comments = (
|
sub_comments = (
|
||||||
select(
|
select(
|
||||||
Shout.id.label('shout_id'),
|
Shout.id.label("shout_id"),
|
||||||
func.coalesce(func.count(Reaction.id)).label('comments_count')
|
func.coalesce(func.count(Reaction.id)).label("comments_count"),
|
||||||
)
|
)
|
||||||
.join(ShoutTopic, ShoutTopic.shout == Shout.id)
|
.join(ShoutTopic, ShoutTopic.shout == Shout.id)
|
||||||
.join(Topic, ShoutTopic.topic == Topic.id)
|
.join(Topic, ShoutTopic.topic == Topic.id)
|
||||||
|
@ -55,7 +62,9 @@ def add_topic_stat_columns(q):
|
||||||
.subquery()
|
.subquery()
|
||||||
)
|
)
|
||||||
q = q.outerjoin(sub_comments, aliased_shout_topic.shout == sub_comments.c.shout_id)
|
q = q.outerjoin(sub_comments, aliased_shout_topic.shout == sub_comments.c.shout_id)
|
||||||
q = q.add_columns(func.coalesce(sub_comments.c.comments_count, 0).label('comments_stat'))
|
q = q.add_columns(
|
||||||
|
func.coalesce(sub_comments.c.comments_count, 0).label("comments_stat")
|
||||||
|
)
|
||||||
|
|
||||||
group_list = [Topic.id, sub_comments.c.comments_count]
|
group_list = [Topic.id, sub_comments.c.comments_count]
|
||||||
|
|
||||||
|
@ -71,23 +80,23 @@ def add_author_stat_columns(q):
|
||||||
|
|
||||||
q = q.outerjoin(aliased_shout_author, aliased_shout_author.author == Author.id)
|
q = q.outerjoin(aliased_shout_author, aliased_shout_author.author == Author.id)
|
||||||
q = q.add_columns(
|
q = q.add_columns(
|
||||||
func.count(distinct(aliased_shout_author.shout)).label('shouts_stat')
|
func.count(distinct(aliased_shout_author.shout)).label("shouts_stat")
|
||||||
)
|
)
|
||||||
|
|
||||||
q = q.outerjoin(aliased_authors, aliased_authors.follower == Author.id)
|
q = q.outerjoin(aliased_authors, aliased_authors.follower == Author.id)
|
||||||
q = q.add_columns(
|
q = q.add_columns(
|
||||||
func.count(distinct(aliased_authors.author)).label('authors_stat')
|
func.count(distinct(aliased_authors.author)).label("authors_stat")
|
||||||
)
|
)
|
||||||
|
|
||||||
q = q.outerjoin(aliased_followers, aliased_followers.author == Author.id)
|
q = q.outerjoin(aliased_followers, aliased_followers.author == Author.id)
|
||||||
q = q.add_columns(
|
q = q.add_columns(
|
||||||
func.count(distinct(aliased_followers.follower)).label('followers_stat')
|
func.count(distinct(aliased_followers.follower)).label("followers_stat")
|
||||||
)
|
)
|
||||||
|
|
||||||
# Create a subquery for comments count
|
# Create a subquery for comments count
|
||||||
sub_comments = (
|
sub_comments = (
|
||||||
select(
|
select(
|
||||||
Author.id, func.coalesce(func.count(Reaction.id)).label('comments_count')
|
Author.id, func.coalesce(func.count(Reaction.id)).label("comments_count")
|
||||||
)
|
)
|
||||||
.outerjoin(
|
.outerjoin(
|
||||||
Reaction,
|
Reaction,
|
||||||
|
@ -113,8 +122,8 @@ def add_author_stat_columns(q):
|
||||||
def get_with_stat(q):
|
def get_with_stat(q):
|
||||||
records = []
|
records = []
|
||||||
try:
|
try:
|
||||||
is_author = f'{q}'.lower().startswith('select author')
|
is_author = f"{q}".lower().startswith("select author")
|
||||||
is_topic = f'{q}'.lower().startswith('select topic')
|
is_topic = f"{q}".lower().startswith("select topic")
|
||||||
if is_author:
|
if is_author:
|
||||||
q = add_author_stat_columns(q)
|
q = add_author_stat_columns(q)
|
||||||
elif is_topic:
|
elif is_topic:
|
||||||
|
@ -124,22 +133,23 @@ def get_with_stat(q):
|
||||||
for cols in result:
|
for cols in result:
|
||||||
entity = cols[0]
|
entity = cols[0]
|
||||||
stat = dict()
|
stat = dict()
|
||||||
stat['shouts'] = cols[1]
|
stat["shouts"] = cols[1]
|
||||||
stat['authors'] = cols[2]
|
stat["authors"] = cols[2]
|
||||||
stat['followers'] = cols[3]
|
stat["followers"] = cols[3]
|
||||||
if is_author:
|
if is_author:
|
||||||
stat['comments'] = cols[4]
|
stat["comments"] = cols[4]
|
||||||
entity.stat = stat
|
entity.stat = stat
|
||||||
records.append(entity)
|
records.append(entity)
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
logger.error(exc, traceback.format_exc())
|
logger.error(exc, traceback.format_exc())
|
||||||
raise Exception(exc)
|
raise Exception(exc)
|
||||||
return records
|
return records
|
||||||
|
|
||||||
|
|
||||||
def author_follows_authors(author_id: int):
|
def author_follows_authors(author_id: int):
|
||||||
af = aliased(AuthorFollower, name='af')
|
af = aliased(AuthorFollower, name="af")
|
||||||
q = (
|
q = (
|
||||||
select(Author)
|
select(Author)
|
||||||
.select_from(join(Author, af, Author.id == af.author))
|
.select_from(join(Author, af, Author.id == af.author))
|
||||||
|
|
|
@ -10,9 +10,9 @@ from services.memorycache import cache_region
|
||||||
from services.schema import mutation, query
|
from services.schema import mutation, query
|
||||||
|
|
||||||
|
|
||||||
@query.field('get_topics_all')
|
@query.field("get_topics_all")
|
||||||
def get_topics_all(_, _info):
|
def get_topics_all(_, _info):
|
||||||
cache_key = 'get_topics_all'
|
cache_key = "get_topics_all"
|
||||||
|
|
||||||
@cache_region.cache_on_arguments(cache_key)
|
@cache_region.cache_on_arguments(cache_key)
|
||||||
def _get_topics_all():
|
def _get_topics_all():
|
||||||
|
@ -21,9 +21,9 @@ def get_topics_all(_, _info):
|
||||||
return _get_topics_all()
|
return _get_topics_all()
|
||||||
|
|
||||||
|
|
||||||
@query.field('get_topics_by_community')
|
@query.field("get_topics_by_community")
|
||||||
def get_topics_by_community(_, _info, community_id: int):
|
def get_topics_by_community(_, _info, community_id: int):
|
||||||
cache_key = f'get_topics_by_community_{community_id}'
|
cache_key = f"get_topics_by_community_{community_id}"
|
||||||
|
|
||||||
@cache_region.cache_on_arguments(cache_key)
|
@cache_region.cache_on_arguments(cache_key)
|
||||||
def _get_topics_by_community():
|
def _get_topics_by_community():
|
||||||
|
@ -33,8 +33,8 @@ def get_topics_by_community(_, _info, community_id: int):
|
||||||
return _get_topics_by_community()
|
return _get_topics_by_community()
|
||||||
|
|
||||||
|
|
||||||
@query.field('get_topics_by_author')
|
@query.field("get_topics_by_author")
|
||||||
async def get_topics_by_author(_, _info, author_id=0, slug='', user=''):
|
async def get_topics_by_author(_, _info, author_id=0, slug="", user=""):
|
||||||
q = select(Topic)
|
q = select(Topic)
|
||||||
if author_id:
|
if author_id:
|
||||||
q = q.join(Author).where(Author.id == author_id)
|
q = q.join(Author).where(Author.id == author_id)
|
||||||
|
@ -46,7 +46,7 @@ async def get_topics_by_author(_, _info, author_id=0, slug='', user=''):
|
||||||
return get_with_stat(q)
|
return get_with_stat(q)
|
||||||
|
|
||||||
|
|
||||||
@query.field('get_topic')
|
@query.field("get_topic")
|
||||||
def get_topic(_, _info, slug: str):
|
def get_topic(_, _info, slug: str):
|
||||||
q = select(Topic).filter(Topic.slug == slug)
|
q = select(Topic).filter(Topic.slug == slug)
|
||||||
result = get_with_stat(q)
|
result = get_with_stat(q)
|
||||||
|
@ -54,7 +54,7 @@ def get_topic(_, _info, slug: str):
|
||||||
return topic
|
return topic
|
||||||
|
|
||||||
|
|
||||||
@mutation.field('create_topic')
|
@mutation.field("create_topic")
|
||||||
@login_required
|
@login_required
|
||||||
async def create_topic(_, _info, inp):
|
async def create_topic(_, _info, inp):
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
|
@ -64,46 +64,46 @@ async def create_topic(_, _info, inp):
|
||||||
session.add(new_topic)
|
session.add(new_topic)
|
||||||
session.commit()
|
session.commit()
|
||||||
|
|
||||||
return {'topic': new_topic}
|
return {"topic": new_topic}
|
||||||
|
|
||||||
|
|
||||||
@mutation.field('update_topic')
|
@mutation.field("update_topic")
|
||||||
@login_required
|
@login_required
|
||||||
async def update_topic(_, _info, inp):
|
async def update_topic(_, _info, inp):
|
||||||
slug = inp['slug']
|
slug = inp["slug"]
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
topic = session.query(Topic).filter(Topic.slug == slug).first()
|
topic = session.query(Topic).filter(Topic.slug == slug).first()
|
||||||
if not topic:
|
if not topic:
|
||||||
return {'error': 'topic not found'}
|
return {"error": "topic not found"}
|
||||||
else:
|
else:
|
||||||
Topic.update(topic, inp)
|
Topic.update(topic, inp)
|
||||||
session.add(topic)
|
session.add(topic)
|
||||||
session.commit()
|
session.commit()
|
||||||
|
|
||||||
return {'topic': topic}
|
return {"topic": topic}
|
||||||
|
|
||||||
|
|
||||||
@mutation.field('delete_topic')
|
@mutation.field("delete_topic")
|
||||||
@login_required
|
@login_required
|
||||||
async def delete_topic(_, info, slug: str):
|
async def delete_topic(_, info, slug: str):
|
||||||
user_id = info.context['user_id']
|
user_id = info.context["user_id"]
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
t: Topic = session.query(Topic).filter(Topic.slug == slug).first()
|
t: Topic = session.query(Topic).filter(Topic.slug == slug).first()
|
||||||
if not t:
|
if not t:
|
||||||
return {'error': 'invalid topic slug'}
|
return {"error": "invalid topic slug"}
|
||||||
author = session.query(Author).filter(Author.user == user_id).first()
|
author = session.query(Author).filter(Author.user == user_id).first()
|
||||||
if author:
|
if author:
|
||||||
if t.created_by != author.id:
|
if t.created_by != author.id:
|
||||||
return {'error': 'access denied'}
|
return {"error": "access denied"}
|
||||||
|
|
||||||
session.delete(t)
|
session.delete(t)
|
||||||
session.commit()
|
session.commit()
|
||||||
|
|
||||||
return {}
|
return {}
|
||||||
return {'error': 'access denied'}
|
return {"error": "access denied"}
|
||||||
|
|
||||||
|
|
||||||
@query.field('get_topics_random')
|
@query.field("get_topics_random")
|
||||||
def get_topics_random(_, _info, amount=12):
|
def get_topics_random(_, _info, amount=12):
|
||||||
q = select(Topic)
|
q = select(Topic)
|
||||||
q = q.join(ShoutTopic)
|
q = q.join(ShoutTopic)
|
||||||
|
|
|
@ -4,12 +4,12 @@ from granian.server import Granian
|
||||||
from services.logger import root_logger as logger
|
from services.logger import root_logger as logger
|
||||||
from settings import PORT
|
from settings import PORT
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == "__main__":
|
||||||
logger.info('started')
|
logger.info("started")
|
||||||
|
|
||||||
granian_instance = Granian(
|
granian_instance = Granian(
|
||||||
'main:app',
|
"main:app",
|
||||||
address='0.0.0.0', # noqa S104
|
address="0.0.0.0", # noqa S104
|
||||||
port=PORT,
|
port=PORT,
|
||||||
threads=4,
|
threads=4,
|
||||||
websockets=False,
|
websockets=False,
|
||||||
|
|
|
@ -9,67 +9,67 @@ from settings import ADMIN_SECRET, AUTH_URL
|
||||||
|
|
||||||
async def request_data(gql, headers=None):
|
async def request_data(gql, headers=None):
|
||||||
if headers is None:
|
if headers is None:
|
||||||
headers = {'Content-Type': 'application/json'}
|
headers = {"Content-Type": "application/json"}
|
||||||
try:
|
try:
|
||||||
async with httpx.AsyncClient() as client:
|
async with httpx.AsyncClient() as client:
|
||||||
response = await client.post(AUTH_URL, json=gql, headers=headers)
|
response = await client.post(AUTH_URL, json=gql, headers=headers)
|
||||||
if response.status_code == 200:
|
if response.status_code == 200:
|
||||||
data = response.json()
|
data = response.json()
|
||||||
errors = data.get('errors')
|
errors = data.get("errors")
|
||||||
if errors:
|
if errors:
|
||||||
logger.error(f'HTTP Errors: {errors}')
|
logger.error(f"HTTP Errors: {errors}")
|
||||||
else:
|
else:
|
||||||
return data
|
return data
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
# Handling and logging exceptions during authentication check
|
# Handling and logging exceptions during authentication check
|
||||||
logger.error(f'request_data error: {e}')
|
logger.error(f"request_data error: {e}")
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
async def check_auth(req):
|
async def check_auth(req):
|
||||||
token = req.headers.get('Authorization')
|
token = req.headers.get("Authorization")
|
||||||
user_id = ''
|
user_id = ""
|
||||||
user_roles = []
|
user_roles = []
|
||||||
if token:
|
if token:
|
||||||
# Logging the authentication token
|
# Logging the authentication token
|
||||||
logger.debug(f'{token}')
|
logger.debug(f"{token}")
|
||||||
query_name = 'validate_jwt_token'
|
query_name = "validate_jwt_token"
|
||||||
operation = 'ValidateToken'
|
operation = "ValidateToken"
|
||||||
variables = {'params': {'token_type': 'access_token', 'token': token}}
|
variables = {"params": {"token_type": "access_token", "token": token}}
|
||||||
|
|
||||||
gql = {
|
gql = {
|
||||||
'query': f'query {operation}($params: ValidateJWTTokenInput!) {{'
|
"query": f"query {operation}($params: ValidateJWTTokenInput!) {{"
|
||||||
+ f'{query_name}(params: $params) {{ is_valid claims }} '
|
+ f"{query_name}(params: $params) {{ is_valid claims }} "
|
||||||
+ '}',
|
+ "}",
|
||||||
'variables': variables,
|
"variables": variables,
|
||||||
'operationName': operation,
|
"operationName": operation,
|
||||||
}
|
}
|
||||||
data = await request_data(gql)
|
data = await request_data(gql)
|
||||||
if data:
|
if data:
|
||||||
logger.debug(data)
|
logger.debug(data)
|
||||||
user_data = data.get('data', {}).get(query_name, {}).get('claims', {})
|
user_data = data.get("data", {}).get(query_name, {}).get("claims", {})
|
||||||
user_id = user_data.get('sub', '')
|
user_id = user_data.get("sub", "")
|
||||||
user_roles = user_data.get('allowed_roles', [])
|
user_roles = user_data.get("allowed_roles", [])
|
||||||
return user_id, user_roles
|
return user_id, user_roles
|
||||||
|
|
||||||
|
|
||||||
async def add_user_role(user_id):
|
async def add_user_role(user_id):
|
||||||
logger.info(f'add author role for user_id: {user_id}')
|
logger.info(f"add author role for user_id: {user_id}")
|
||||||
query_name = '_update_user'
|
query_name = "_update_user"
|
||||||
operation = 'UpdateUserRoles'
|
operation = "UpdateUserRoles"
|
||||||
headers = {
|
headers = {
|
||||||
'Content-Type': 'application/json',
|
"Content-Type": "application/json",
|
||||||
'x-authorizer-admin-secret': ADMIN_SECRET,
|
"x-authorizer-admin-secret": ADMIN_SECRET,
|
||||||
}
|
}
|
||||||
variables = {'params': {'roles': 'author, reader', 'id': user_id}}
|
variables = {"params": {"roles": "author, reader", "id": user_id}}
|
||||||
gql = {
|
gql = {
|
||||||
'query': f'mutation {operation}($params: UpdateUserInput!) {{ {query_name}(params: $params) {{ id roles }} }}',
|
"query": f"mutation {operation}($params: UpdateUserInput!) {{ {query_name}(params: $params) {{ id roles }} }}",
|
||||||
'variables': variables,
|
"variables": variables,
|
||||||
'operationName': operation,
|
"operationName": operation,
|
||||||
}
|
}
|
||||||
data = await request_data(gql, headers)
|
data = await request_data(gql, headers)
|
||||||
if data:
|
if data:
|
||||||
user_id = data.get('data', {}).get(query_name, {}).get('id')
|
user_id = data.get("data", {}).get(query_name, {}).get("id")
|
||||||
return user_id
|
return user_id
|
||||||
|
|
||||||
|
|
||||||
|
@ -77,15 +77,15 @@ def login_required(f):
|
||||||
@wraps(f)
|
@wraps(f)
|
||||||
async def decorated_function(*args, **kwargs):
|
async def decorated_function(*args, **kwargs):
|
||||||
info = args[1]
|
info = args[1]
|
||||||
req = info.context.get('request')
|
req = info.context.get("request")
|
||||||
authorized = await check_auth(req)
|
authorized = await check_auth(req)
|
||||||
if authorized:
|
if authorized:
|
||||||
logger.info(authorized)
|
logger.info(authorized)
|
||||||
user_id, user_roles = authorized
|
user_id, user_roles = authorized
|
||||||
if user_id and user_roles:
|
if user_id and user_roles:
|
||||||
logger.info(f' got {user_id} roles: {user_roles}')
|
logger.info(f" got {user_id} roles: {user_roles}")
|
||||||
info.context['user_id'] = user_id.strip()
|
info.context["user_id"] = user_id.strip()
|
||||||
info.context['roles'] = user_roles
|
info.context["roles"] = user_roles
|
||||||
return await f(*args, **kwargs)
|
return await f(*args, **kwargs)
|
||||||
|
|
||||||
return decorated_function
|
return decorated_function
|
||||||
|
@ -99,11 +99,11 @@ def auth_request(f):
|
||||||
if authorized:
|
if authorized:
|
||||||
user_id, user_roles = authorized
|
user_id, user_roles = authorized
|
||||||
if user_id and user_roles:
|
if user_id and user_roles:
|
||||||
logger.info(f' got {user_id} roles: {user_roles}')
|
logger.info(f" got {user_id} roles: {user_roles}")
|
||||||
req['user_id'] = user_id.strip()
|
req["user_id"] = user_id.strip()
|
||||||
req['roles'] = user_roles
|
req["roles"] = user_roles
|
||||||
return await f(*args, **kwargs)
|
return await f(*args, **kwargs)
|
||||||
else:
|
else:
|
||||||
raise HTTPException(status_code=401, detail='Unauthorized')
|
raise HTTPException(status_code=401, detail="Unauthorized")
|
||||||
|
|
||||||
return decorated_function
|
return decorated_function
|
||||||
|
|
|
@ -7,31 +7,33 @@ from services.encoders import CustomJSONEncoder
|
||||||
from services.rediscache import redis
|
from services.rediscache import redis
|
||||||
|
|
||||||
DEFAULT_FOLLOWS = {
|
DEFAULT_FOLLOWS = {
|
||||||
'topics': [],
|
"topics": [],
|
||||||
'authors': [],
|
"authors": [],
|
||||||
'communities': [{'id': 1, 'name': 'Дискурс', 'slug': 'discours', 'pic': ''}],
|
"communities": [{"id": 1, "name": "Дискурс", "slug": "discours", "pic": ""}],
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
async def cache_author(author: dict):
|
async def cache_author(author: dict):
|
||||||
payload = json.dumps(author, cls=CustomJSONEncoder)
|
payload = json.dumps(author, cls=CustomJSONEncoder)
|
||||||
await redis.execute('SET', f'user:{author.get("user")}', payload)
|
await redis.execute("SET", f'user:{author.get("user")}', payload)
|
||||||
await redis.execute('SET', f'author:{author.get("id")}', payload)
|
await redis.execute("SET", f'author:{author.get("id")}', payload)
|
||||||
|
|
||||||
# update stat all field for followers' caches in <authors> list
|
# update stat all field for followers' caches in <authors> list
|
||||||
followers_str = await redis.execute('GET', f'author:{author.get("id")}:followers')
|
followers_str = await redis.execute("GET", f'author:{author.get("id")}:followers')
|
||||||
followers = []
|
followers = []
|
||||||
if followers_str:
|
if followers_str:
|
||||||
followers = json.loads(followers_str)
|
followers = json.loads(followers_str)
|
||||||
if isinstance(followers, list):
|
if isinstance(followers, list):
|
||||||
for follower in followers:
|
for follower in followers:
|
||||||
follower_follows_authors = []
|
follower_follows_authors = []
|
||||||
follower_follows_authors_str = await redis.execute('GET', f'author:{author.get("id")}:follows-authors')
|
follower_follows_authors_str = await redis.execute(
|
||||||
|
"GET", f'author:{author.get("id")}:follows-authors'
|
||||||
|
)
|
||||||
if follower_follows_authors_str:
|
if follower_follows_authors_str:
|
||||||
follower_follows_authors = json.loads(follower_follows_authors_str)
|
follower_follows_authors = json.loads(follower_follows_authors_str)
|
||||||
c = 0
|
c = 0
|
||||||
for old_author in follower_follows_authors:
|
for old_author in follower_follows_authors:
|
||||||
if int(old_author.get('id')) == int(author.get('id', 0)):
|
if int(old_author.get("id")) == int(author.get("id", 0)):
|
||||||
follower_follows_authors[c] = author
|
follower_follows_authors[c] = author
|
||||||
break # exit the loop since we found and updated the author
|
break # exit the loop since we found and updated the author
|
||||||
c += 1
|
c += 1
|
||||||
|
@ -40,19 +42,23 @@ async def cache_author(author: dict):
|
||||||
follower_follows_authors.append(author)
|
follower_follows_authors.append(author)
|
||||||
|
|
||||||
# update stat field for all authors' caches in <followers> list
|
# update stat field for all authors' caches in <followers> list
|
||||||
follows_str = await redis.execute('GET', f'author:{author.get("id")}:follows-authors')
|
follows_str = await redis.execute(
|
||||||
|
"GET", f'author:{author.get("id")}:follows-authors'
|
||||||
|
)
|
||||||
follows_authors = []
|
follows_authors = []
|
||||||
if follows_str:
|
if follows_str:
|
||||||
follows_authors = json.loads(follows_str)
|
follows_authors = json.loads(follows_str)
|
||||||
if isinstance(follows_authors, list):
|
if isinstance(follows_authors, list):
|
||||||
for followed_author in follows_authors:
|
for followed_author in follows_authors:
|
||||||
followed_author_followers = []
|
followed_author_followers = []
|
||||||
followed_author_followers_str = await redis.execute('GET', f'author:{author.get("id")}:followers')
|
followed_author_followers_str = await redis.execute(
|
||||||
|
"GET", f'author:{author.get("id")}:followers'
|
||||||
|
)
|
||||||
if followed_author_followers_str:
|
if followed_author_followers_str:
|
||||||
followed_author_followers = json.loads(followed_author_followers_str)
|
followed_author_followers = json.loads(followed_author_followers_str)
|
||||||
c = 0
|
c = 0
|
||||||
for old_follower in followed_author_followers:
|
for old_follower in followed_author_followers:
|
||||||
if int(old_follower.get('id')) == int(author.get('id', 0)):
|
if int(old_follower.get("id")) == int(author.get("id", 0)):
|
||||||
followed_author_followers[c] = author
|
followed_author_followers[c] = author
|
||||||
break # exit the loop since we found and updated the author
|
break # exit the loop since we found and updated the author
|
||||||
c += 1
|
c += 1
|
||||||
|
@ -64,50 +70,52 @@ async def cache_author(author: dict):
|
||||||
async def cache_follows(follower: Author, entity_type: str, entity, is_insert=True):
|
async def cache_follows(follower: Author, entity_type: str, entity, is_insert=True):
|
||||||
# prepare
|
# prepare
|
||||||
follows = []
|
follows = []
|
||||||
redis_key = f'author:{follower.id}:follows-{entity_type}s'
|
redis_key = f"author:{follower.id}:follows-{entity_type}s"
|
||||||
follows_str = await redis.execute('GET', redis_key)
|
follows_str = await redis.execute("GET", redis_key)
|
||||||
if isinstance(follows_str, str):
|
if isinstance(follows_str, str):
|
||||||
follows = json.loads(follows_str)
|
follows = json.loads(follows_str)
|
||||||
if is_insert:
|
if is_insert:
|
||||||
follows.append(entity)
|
follows.append(entity)
|
||||||
else:
|
else:
|
||||||
entity_id = entity.get('id')
|
entity_id = entity.get("id")
|
||||||
if not entity_id:
|
if not entity_id:
|
||||||
raise Exception('wrong entity')
|
raise Exception("wrong entity")
|
||||||
# Remove the entity from follows
|
# Remove the entity from follows
|
||||||
follows = [e for e in follows if e['id'] != entity_id]
|
follows = [e for e in follows if e["id"] != entity_id]
|
||||||
|
|
||||||
# update follows cache
|
# update follows cache
|
||||||
updated_data = [t.dict() if isinstance(t, Topic) else t for t in follows]
|
updated_data = [t.dict() if isinstance(t, Topic) else t for t in follows]
|
||||||
payload = json.dumps(updated_data, cls=CustomJSONEncoder)
|
payload = json.dumps(updated_data, cls=CustomJSONEncoder)
|
||||||
await redis.execute('SET', redis_key, payload)
|
await redis.execute("SET", redis_key, payload)
|
||||||
|
|
||||||
# update follower's stats everywhere
|
# update follower's stats everywhere
|
||||||
author_str = await redis.execute('GET', f'author:{follower.id}')
|
author_str = await redis.execute("GET", f"author:{follower.id}")
|
||||||
if author_str:
|
if author_str:
|
||||||
author = json.loads(author_str)
|
author = json.loads(author_str)
|
||||||
author['stat'][f'{entity_type}s'] = len(updated_data)
|
author["stat"][f"{entity_type}s"] = len(updated_data)
|
||||||
await cache_author(author)
|
await cache_author(author)
|
||||||
return follows
|
return follows
|
||||||
|
|
||||||
|
|
||||||
async def cache_follower(follower: Author, author: Author, is_insert=True):
|
async def cache_follower(follower: Author, author: Author, is_insert=True):
|
||||||
redis_key = f'author:{author.id}:followers'
|
redis_key = f"author:{author.id}:followers"
|
||||||
followers_str = await redis.execute('GET', redis_key)
|
followers_str = await redis.execute("GET", redis_key)
|
||||||
followers = []
|
followers = []
|
||||||
if isinstance(followers_str, str):
|
if isinstance(followers_str, str):
|
||||||
followers = json.loads(followers_str)
|
followers = json.loads(followers_str)
|
||||||
if is_insert:
|
if is_insert:
|
||||||
# Remove the entity from followers
|
# Remove the entity from followers
|
||||||
followers = [e for e in followers if e['id'] != author.id]
|
followers = [e for e in followers if e["id"] != author.id]
|
||||||
else:
|
else:
|
||||||
followers.append(follower)
|
followers.append(follower)
|
||||||
updated_followers = [f.dict() if isinstance(f, Author) else f for f in followers]
|
updated_followers = [
|
||||||
|
f.dict() if isinstance(f, Author) else f for f in followers
|
||||||
|
]
|
||||||
payload = json.dumps(updated_followers, cls=CustomJSONEncoder)
|
payload = json.dumps(updated_followers, cls=CustomJSONEncoder)
|
||||||
await redis.execute('SET', redis_key, payload)
|
await redis.execute("SET", redis_key, payload)
|
||||||
author_str = await redis.execute('GET', f'author:{follower.id}')
|
author_str = await redis.execute("GET", f"author:{follower.id}")
|
||||||
if author_str:
|
if author_str:
|
||||||
author = json.loads(author_str)
|
author = json.loads(author_str)
|
||||||
author['stat']['followers'] = len(updated_followers)
|
author["stat"]["followers"] = len(updated_followers)
|
||||||
await cache_author(author)
|
await cache_author(author)
|
||||||
return followers
|
return followers
|
||||||
|
|
|
@ -5,8 +5,7 @@ import traceback
|
||||||
import warnings
|
import warnings
|
||||||
from typing import Any, Callable, Dict, TypeVar
|
from typing import Any, Callable, Dict, TypeVar
|
||||||
|
|
||||||
from sqlalchemy import (JSON, Column, Engine, Integer, create_engine, event,
|
from sqlalchemy import JSON, Column, Engine, Integer, create_engine, event, exc, inspect
|
||||||
exc, inspect)
|
|
||||||
from sqlalchemy.ext.declarative import declarative_base
|
from sqlalchemy.ext.declarative import declarative_base
|
||||||
from sqlalchemy.orm import Session, configure_mappers
|
from sqlalchemy.orm import Session, configure_mappers
|
||||||
from sqlalchemy.sql.schema import Table
|
from sqlalchemy.sql.schema import Table
|
||||||
|
@ -19,13 +18,13 @@ from settings import DB_URL
|
||||||
engine = create_engine(DB_URL, echo=False, pool_size=10, max_overflow=20)
|
engine = create_engine(DB_URL, echo=False, pool_size=10, max_overflow=20)
|
||||||
inspector = inspect(engine)
|
inspector = inspect(engine)
|
||||||
configure_mappers()
|
configure_mappers()
|
||||||
T = TypeVar('T')
|
T = TypeVar("T")
|
||||||
REGISTRY: Dict[str, type] = {}
|
REGISTRY: Dict[str, type] = {}
|
||||||
FILTERED_FIELDS = ['_sa_instance_state', 'search_vector']
|
FILTERED_FIELDS = ["_sa_instance_state", "search_vector"]
|
||||||
|
|
||||||
|
|
||||||
# noinspection PyUnusedLocal
|
# noinspection PyUnusedLocal
|
||||||
def local_session(src=''):
|
def local_session(src=""):
|
||||||
return Session(bind=engine, expire_on_commit=False)
|
return Session(bind=engine, expire_on_commit=False)
|
||||||
|
|
||||||
|
|
||||||
|
@ -36,7 +35,7 @@ class Base(declarative_base()):
|
||||||
__init__: Callable
|
__init__: Callable
|
||||||
__allow_unmapped__ = True
|
__allow_unmapped__ = True
|
||||||
__abstract__ = True
|
__abstract__ = True
|
||||||
__table_args__ = {'extend_existing': True}
|
__table_args__ = {"extend_existing": True}
|
||||||
|
|
||||||
id = Column(Integer, primary_key=True)
|
id = Column(Integer, primary_key=True)
|
||||||
|
|
||||||
|
@ -57,11 +56,11 @@ class Base(declarative_base()):
|
||||||
else:
|
else:
|
||||||
data[c] = value
|
data[c] = value
|
||||||
# Add synthetic field .stat
|
# Add synthetic field .stat
|
||||||
if hasattr(self, 'stat'):
|
if hasattr(self, "stat"):
|
||||||
data['stat'] = self.stat
|
data["stat"] = self.stat
|
||||||
return data
|
return data
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f'Error occurred while converting object to dictionary: {e}')
|
logger.error(f"Error occurred while converting object to dictionary: {e}")
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
def update(self, values: Dict[str, Any]) -> None:
|
def update(self, values: Dict[str, Any]) -> None:
|
||||||
|
@ -79,22 +78,22 @@ def warning_with_traceback(
|
||||||
message: Warning | str, category, filename: str, lineno: int, file=None, line=None
|
message: Warning | str, category, filename: str, lineno: int, file=None, line=None
|
||||||
):
|
):
|
||||||
tb = traceback.format_stack()
|
tb = traceback.format_stack()
|
||||||
tb_str = ''.join(tb)
|
tb_str = "".join(tb)
|
||||||
return f'{message} ({filename}, {lineno}): {category.__name__}\n{tb_str}'
|
return f"{message} ({filename}, {lineno}): {category.__name__}\n{tb_str}"
|
||||||
|
|
||||||
|
|
||||||
# Установка функции вывода трейсбека для предупреждений SQLAlchemy
|
# Установка функции вывода трейсбека для предупреждений SQLAlchemy
|
||||||
warnings.showwarning = warning_with_traceback
|
warnings.showwarning = warning_with_traceback
|
||||||
warnings.simplefilter('always', exc.SAWarning)
|
warnings.simplefilter("always", exc.SAWarning)
|
||||||
|
|
||||||
|
|
||||||
@event.listens_for(Engine, 'before_cursor_execute')
|
@event.listens_for(Engine, "before_cursor_execute")
|
||||||
def before_cursor_execute(conn, cursor, statement, parameters, context, executemany):
|
def before_cursor_execute(conn, cursor, statement, parameters, context, executemany):
|
||||||
conn.query_start_time = time.time()
|
conn.query_start_time = time.time()
|
||||||
conn.last_statement = ''
|
conn.last_statement = ""
|
||||||
|
|
||||||
|
|
||||||
@event.listens_for(Engine, 'after_cursor_execute')
|
@event.listens_for(Engine, "after_cursor_execute")
|
||||||
def after_cursor_execute(conn, cursor, statement, parameters, context, executemany):
|
def after_cursor_execute(conn, cursor, statement, parameters, context, executemany):
|
||||||
compiled_statement = context.compiled.string
|
compiled_statement = context.compiled.string
|
||||||
compiled_parameters = context.compiled.params
|
compiled_parameters = context.compiled.params
|
||||||
|
@ -105,7 +104,6 @@ def after_cursor_execute(conn, cursor, statement, parameters, context, executema
|
||||||
else:
|
else:
|
||||||
query = compiled_statement # or handle this case in a way that makes sense for your application
|
query = compiled_statement # or handle this case in a way that makes sense for your application
|
||||||
|
|
||||||
|
|
||||||
if elapsed > 1 and conn.last_statement != query:
|
if elapsed > 1 and conn.last_statement != query:
|
||||||
conn.last_statement = query
|
conn.last_statement = query
|
||||||
logger.debug(f"\n{query}\n{'*' * math.floor(elapsed)} {elapsed:.3f} s\n")
|
logger.debug(f"\n{query}\n{'*' * math.floor(elapsed)} {elapsed:.3f} s\n")
|
||||||
|
|
|
@ -29,19 +29,19 @@ def apply_diff(original, diff):
|
||||||
The modified string.
|
The modified string.
|
||||||
"""
|
"""
|
||||||
result = []
|
result = []
|
||||||
pattern = re.compile(r'^(\+|-) ')
|
pattern = re.compile(r"^(\+|-) ")
|
||||||
|
|
||||||
for line in diff:
|
for line in diff:
|
||||||
match = pattern.match(line)
|
match = pattern.match(line)
|
||||||
if match:
|
if match:
|
||||||
op = match.group(1)
|
op = match.group(1)
|
||||||
content = line[2:]
|
content = line[2:]
|
||||||
if op == '+':
|
if op == "+":
|
||||||
result.append(content)
|
result.append(content)
|
||||||
elif op == '-':
|
elif op == "-":
|
||||||
# Ignore deleted lines
|
# Ignore deleted lines
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
result.append(line)
|
result.append(line)
|
||||||
|
|
||||||
return ' '.join(result)
|
return " ".join(result)
|
||||||
|
|
|
@ -4,51 +4,51 @@ import colorlog
|
||||||
|
|
||||||
# Define the color scheme
|
# Define the color scheme
|
||||||
color_scheme = {
|
color_scheme = {
|
||||||
'DEBUG': 'light_black',
|
"DEBUG": "light_black",
|
||||||
'INFO': 'green',
|
"INFO": "green",
|
||||||
'WARNING': 'yellow',
|
"WARNING": "yellow",
|
||||||
'ERROR': 'red',
|
"ERROR": "red",
|
||||||
'CRITICAL': 'red,bg_white',
|
"CRITICAL": "red,bg_white",
|
||||||
}
|
}
|
||||||
|
|
||||||
# Define secondary log colors
|
# Define secondary log colors
|
||||||
secondary_colors = {
|
secondary_colors = {
|
||||||
'log_name': {'DEBUG': 'blue'},
|
"log_name": {"DEBUG": "blue"},
|
||||||
'asctime': {'DEBUG': 'cyan'},
|
"asctime": {"DEBUG": "cyan"},
|
||||||
'process': {'DEBUG': 'purple'},
|
"process": {"DEBUG": "purple"},
|
||||||
'module': {'DEBUG': 'light_black,bg_blue'},
|
"module": {"DEBUG": "light_black,bg_blue"},
|
||||||
'funcName': {'DEBUG': 'light_white,bg_blue'}, # Add this line
|
"funcName": {"DEBUG": "light_white,bg_blue"}, # Add this line
|
||||||
}
|
}
|
||||||
|
|
||||||
# Define the log format string
|
# Define the log format string
|
||||||
fmt_string = '%(log_color)s%(levelname)s: %(log_color)s[%(module)s.%(funcName)s]%(reset)s %(white)s%(message)s'
|
fmt_string = "%(log_color)s%(levelname)s: %(log_color)s[%(module)s.%(funcName)s]%(reset)s %(white)s%(message)s"
|
||||||
|
|
||||||
# Define formatting configuration
|
# Define formatting configuration
|
||||||
fmt_config = {
|
fmt_config = {
|
||||||
'log_colors': color_scheme,
|
"log_colors": color_scheme,
|
||||||
'secondary_log_colors': secondary_colors,
|
"secondary_log_colors": secondary_colors,
|
||||||
'style': '%',
|
"style": "%",
|
||||||
'reset': True,
|
"reset": True,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
class MultilineColoredFormatter(colorlog.ColoredFormatter):
|
class MultilineColoredFormatter(colorlog.ColoredFormatter):
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
super().__init__(*args, **kwargs)
|
super().__init__(*args, **kwargs)
|
||||||
self.log_colors = kwargs.pop('log_colors', {})
|
self.log_colors = kwargs.pop("log_colors", {})
|
||||||
self.secondary_log_colors = kwargs.pop('secondary_log_colors', {})
|
self.secondary_log_colors = kwargs.pop("secondary_log_colors", {})
|
||||||
|
|
||||||
def format(self, record):
|
def format(self, record):
|
||||||
message = record.getMessage()
|
message = record.getMessage()
|
||||||
if '\n' in message:
|
if "\n" in message:
|
||||||
lines = message.split('\n')
|
lines = message.split("\n")
|
||||||
first_line = lines[0]
|
first_line = lines[0]
|
||||||
record.message = first_line
|
record.message = first_line
|
||||||
formatted_first_line = super().format(record)
|
formatted_first_line = super().format(record)
|
||||||
formatted_lines = [formatted_first_line]
|
formatted_lines = [formatted_first_line]
|
||||||
for line in lines[1:]:
|
for line in lines[1:]:
|
||||||
formatted_lines.append(line)
|
formatted_lines.append(line)
|
||||||
return '\n'.join(formatted_lines)
|
return "\n".join(formatted_lines)
|
||||||
else:
|
else:
|
||||||
return super().format(record)
|
return super().format(record)
|
||||||
|
|
||||||
|
@ -61,7 +61,7 @@ stream = logging.StreamHandler()
|
||||||
stream.setFormatter(formatter)
|
stream.setFormatter(formatter)
|
||||||
|
|
||||||
|
|
||||||
def get_colorful_logger(name='main'):
|
def get_colorful_logger(name="main"):
|
||||||
# Create and configure the logger
|
# Create and configure the logger
|
||||||
logger = logging.getLogger(name)
|
logger = logging.getLogger(name)
|
||||||
logger.setLevel(logging.DEBUG)
|
logger.setLevel(logging.DEBUG)
|
||||||
|
@ -75,7 +75,7 @@ root_logger = logging.getLogger()
|
||||||
root_logger.setLevel(logging.DEBUG)
|
root_logger.setLevel(logging.DEBUG)
|
||||||
root_logger.addHandler(stream)
|
root_logger.addHandler(stream)
|
||||||
|
|
||||||
ignore_logs = ['_trace', 'httpx', '_client', '_trace.atrace', 'aiohttp', '_client']
|
ignore_logs = ["_trace", "httpx", "_client", "_trace.atrace", "aiohttp", "_client"]
|
||||||
for lgr in ignore_logs:
|
for lgr in ignore_logs:
|
||||||
loggr = logging.getLogger(lgr)
|
loggr = logging.getLogger(lgr)
|
||||||
loggr.setLevel(logging.INFO)
|
loggr.setLevel(logging.INFO)
|
||||||
|
|
|
@ -5,7 +5,7 @@ from settings import REDIS_URL
|
||||||
# Создание региона кэша с TTL
|
# Создание региона кэша с TTL
|
||||||
cache_region = make_region()
|
cache_region = make_region()
|
||||||
cache_region.configure(
|
cache_region.configure(
|
||||||
'dogpile.cache.redis',
|
"dogpile.cache.redis",
|
||||||
arguments={'url': f'{REDIS_URL}/1'},
|
arguments={"url": f"{REDIS_URL}/1"},
|
||||||
expiration_time=3600, # Cache expiration time in seconds
|
expiration_time=3600, # Cache expiration time in seconds
|
||||||
)
|
)
|
||||||
|
|
|
@ -5,6 +5,7 @@ from services.db import local_session
|
||||||
from services.rediscache import redis
|
from services.rediscache import redis
|
||||||
from services.logger import root_logger as logger
|
from services.logger import root_logger as logger
|
||||||
|
|
||||||
|
|
||||||
def save_notification(action: str, entity: str, payload):
|
def save_notification(action: str, entity: str, payload):
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
n = Notification(action=action, entity=entity, payload=payload)
|
n = Notification(action=action, entity=entity, payload=payload)
|
||||||
|
@ -12,34 +13,34 @@ def save_notification(action: str, entity: str, payload):
|
||||||
session.commit()
|
session.commit()
|
||||||
|
|
||||||
|
|
||||||
async def notify_reaction(reaction, action: str = 'create'):
|
async def notify_reaction(reaction, action: str = "create"):
|
||||||
channel_name = 'reaction'
|
channel_name = "reaction"
|
||||||
data = {'payload': reaction, 'action': action}
|
data = {"payload": reaction, "action": action}
|
||||||
try:
|
try:
|
||||||
save_notification(action, channel_name, data.get('payload'))
|
save_notification(action, channel_name, data.get("payload"))
|
||||||
await redis.publish(channel_name, json.dumps(data))
|
await redis.publish(channel_name, json.dumps(data))
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f'Failed to publish to channel {channel_name}: {e}')
|
logger.error(f"Failed to publish to channel {channel_name}: {e}")
|
||||||
|
|
||||||
|
|
||||||
async def notify_shout(shout, action: str = 'update'):
|
async def notify_shout(shout, action: str = "update"):
|
||||||
channel_name = 'shout'
|
channel_name = "shout"
|
||||||
data = {'payload': shout, 'action': action}
|
data = {"payload": shout, "action": action}
|
||||||
try:
|
try:
|
||||||
save_notification(action, channel_name, data.get('payload'))
|
save_notification(action, channel_name, data.get("payload"))
|
||||||
await redis.publish(channel_name, json.dumps(data))
|
await redis.publish(channel_name, json.dumps(data))
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f'Failed to publish to channel {channel_name}: {e}')
|
logger.error(f"Failed to publish to channel {channel_name}: {e}")
|
||||||
|
|
||||||
|
|
||||||
async def notify_follower(follower: dict, author_id: int, action: str = 'follow'):
|
async def notify_follower(follower: dict, author_id: int, action: str = "follow"):
|
||||||
channel_name = f'follower:{author_id}'
|
channel_name = f"follower:{author_id}"
|
||||||
try:
|
try:
|
||||||
# Simplify dictionary before publishing
|
# Simplify dictionary before publishing
|
||||||
simplified_follower = {k: follower[k] for k in ['id', 'name', 'slug', 'pic']}
|
simplified_follower = {k: follower[k] for k in ["id", "name", "slug", "pic"]}
|
||||||
data = {'payload': simplified_follower, 'action': action}
|
data = {"payload": simplified_follower, "action": action}
|
||||||
# save in channel
|
# save in channel
|
||||||
save_notification(action, channel_name, data.get('payload'))
|
save_notification(action, channel_name, data.get("payload"))
|
||||||
|
|
||||||
# Convert data to JSON string
|
# Convert data to JSON string
|
||||||
json_data = json.dumps(data)
|
json_data = json.dumps(data)
|
||||||
|
@ -49,7 +50,6 @@ async def notify_follower(follower: dict, author_id: int, action: str = 'follow'
|
||||||
# Use the 'await' keyword when publishing
|
# Use the 'await' keyword when publishing
|
||||||
await redis.publish(channel_name, json_data)
|
await redis.publish(channel_name, json_data)
|
||||||
|
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
# Log the error and re-raise it
|
# Log the error and re-raise it
|
||||||
logger.error(f'Failed to publish to channel {channel_name}: {e}')
|
logger.error(f"Failed to publish to channel {channel_name}: {e}")
|
||||||
|
|
|
@ -5,7 +5,7 @@ import redis.asyncio as aredis
|
||||||
from settings import REDIS_URL
|
from settings import REDIS_URL
|
||||||
|
|
||||||
# Set redis logging level to suppress DEBUG messages
|
# Set redis logging level to suppress DEBUG messages
|
||||||
logger = logging.getLogger('redis')
|
logger = logging.getLogger("redis")
|
||||||
logger.setLevel(logging.WARNING)
|
logger.setLevel(logging.WARNING)
|
||||||
|
|
||||||
|
|
||||||
|
@ -25,11 +25,11 @@ class RedisCache:
|
||||||
async def execute(self, command, *args, **kwargs):
|
async def execute(self, command, *args, **kwargs):
|
||||||
if self._client:
|
if self._client:
|
||||||
try:
|
try:
|
||||||
logger.debug(f'{command} {args} {kwargs}')
|
logger.debug(f"{command} {args} {kwargs}")
|
||||||
for arg in args:
|
for arg in args:
|
||||||
if isinstance(arg, dict):
|
if isinstance(arg, dict):
|
||||||
if arg.get('_sa_instance_state'):
|
if arg.get("_sa_instance_state"):
|
||||||
del arg['_sa_instance_state']
|
del arg["_sa_instance_state"]
|
||||||
r = await self._client.execute_command(command, *args, **kwargs)
|
r = await self._client.execute_command(command, *args, **kwargs)
|
||||||
logger.debug(type(r))
|
logger.debug(type(r))
|
||||||
logger.debug(r)
|
logger.debug(r)
|
||||||
|
@ -60,4 +60,4 @@ class RedisCache:
|
||||||
|
|
||||||
redis = RedisCache()
|
redis = RedisCache()
|
||||||
|
|
||||||
__all__ = ['redis']
|
__all__ = ["redis"]
|
||||||
|
|
|
@ -8,51 +8,51 @@ from services.encoders import CustomJSONEncoder
|
||||||
from services.logger import root_logger as logger
|
from services.logger import root_logger as logger
|
||||||
from services.rediscache import redis
|
from services.rediscache import redis
|
||||||
|
|
||||||
ELASTIC_HOST = os.environ.get('ELASTIC_HOST', '').replace('https://', '')
|
ELASTIC_HOST = os.environ.get("ELASTIC_HOST", "").replace("https://", "")
|
||||||
ELASTIC_USER = os.environ.get('ELASTIC_USER', '')
|
ELASTIC_USER = os.environ.get("ELASTIC_USER", "")
|
||||||
ELASTIC_PASSWORD = os.environ.get('ELASTIC_PASSWORD', '')
|
ELASTIC_PASSWORD = os.environ.get("ELASTIC_PASSWORD", "")
|
||||||
ELASTIC_PORT = os.environ.get('ELASTIC_PORT', 9200)
|
ELASTIC_PORT = os.environ.get("ELASTIC_PORT", 9200)
|
||||||
ELASTIC_AUTH = f'{ELASTIC_USER}:{ELASTIC_PASSWORD}' if ELASTIC_USER else ''
|
ELASTIC_AUTH = f"{ELASTIC_USER}:{ELASTIC_PASSWORD}" if ELASTIC_USER else ""
|
||||||
ELASTIC_URL = os.environ.get(
|
ELASTIC_URL = os.environ.get(
|
||||||
'ELASTIC_URL', f'https://{ELASTIC_AUTH}@{ELASTIC_HOST}:{ELASTIC_PORT}'
|
"ELASTIC_URL", f"https://{ELASTIC_AUTH}@{ELASTIC_HOST}:{ELASTIC_PORT}"
|
||||||
)
|
)
|
||||||
REDIS_TTL = 86400 # 1 day in seconds
|
REDIS_TTL = 86400 # 1 day in seconds
|
||||||
|
|
||||||
index_settings = {
|
index_settings = {
|
||||||
'settings': {
|
"settings": {
|
||||||
'index': {'number_of_shards': 1, 'auto_expand_replicas': '0-all'},
|
"index": {"number_of_shards": 1, "auto_expand_replicas": "0-all"},
|
||||||
'analysis': {
|
"analysis": {
|
||||||
'analyzer': {
|
"analyzer": {
|
||||||
'ru': {
|
"ru": {
|
||||||
'tokenizer': 'standard',
|
"tokenizer": "standard",
|
||||||
'filter': ['lowercase', 'ru_stop', 'ru_stemmer'],
|
"filter": ["lowercase", "ru_stop", "ru_stemmer"],
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
'filter': {
|
"filter": {
|
||||||
'ru_stemmer': {'type': 'stemmer', 'language': 'russian'},
|
"ru_stemmer": {"type": "stemmer", "language": "russian"},
|
||||||
'ru_stop': {'type': 'stop', 'stopwords': '_russian_'},
|
"ru_stop": {"type": "stop", "stopwords": "_russian_"},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
'mappings': {
|
"mappings": {
|
||||||
'properties': {
|
"properties": {
|
||||||
'body': {'type': 'text', 'analyzer': 'ru'},
|
"body": {"type": "text", "analyzer": "ru"},
|
||||||
'title': {'type': 'text', 'analyzer': 'ru'},
|
"title": {"type": "text", "analyzer": "ru"},
|
||||||
'subtitle': {'type': 'text', 'analyzer': 'ru'},
|
"subtitle": {"type": "text", "analyzer": "ru"},
|
||||||
'lead': {'type': 'text', 'analyzer': 'ru'},
|
"lead": {"type": "text", "analyzer": "ru"},
|
||||||
# 'author': {'type': 'text'},
|
# 'author': {'type': 'text'},
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
expected_mapping = index_settings['mappings']
|
expected_mapping = index_settings["mappings"]
|
||||||
|
|
||||||
# Create an event loop
|
# Create an event loop
|
||||||
search_loop = asyncio.get_event_loop()
|
search_loop = asyncio.get_event_loop()
|
||||||
|
|
||||||
|
|
||||||
class SearchService:
|
class SearchService:
|
||||||
def __init__(self, index_name='search_index'):
|
def __init__(self, index_name="search_index"):
|
||||||
self.index_name = index_name
|
self.index_name = index_name
|
||||||
self.client = None
|
self.client = None
|
||||||
self.lock = asyncio.Lock() # Create an asyncio lock
|
self.lock = asyncio.Lock() # Create an asyncio lock
|
||||||
|
@ -61,7 +61,7 @@ class SearchService:
|
||||||
if ELASTIC_HOST:
|
if ELASTIC_HOST:
|
||||||
try:
|
try:
|
||||||
self.client = OpenSearch(
|
self.client = OpenSearch(
|
||||||
hosts=[{'host': ELASTIC_HOST, 'port': ELASTIC_PORT}],
|
hosts=[{"host": ELASTIC_HOST, "port": ELASTIC_PORT}],
|
||||||
http_compress=True,
|
http_compress=True,
|
||||||
http_auth=(ELASTIC_USER, ELASTIC_PASSWORD),
|
http_auth=(ELASTIC_USER, ELASTIC_PASSWORD),
|
||||||
use_ssl=True,
|
use_ssl=True,
|
||||||
|
@ -70,52 +70,52 @@ class SearchService:
|
||||||
ssl_show_warn=False,
|
ssl_show_warn=False,
|
||||||
# ca_certs = ca_certs_path
|
# ca_certs = ca_certs_path
|
||||||
)
|
)
|
||||||
logger.info(' Клиент OpenSearch.org подключен')
|
logger.info(" Клиент OpenSearch.org подключен")
|
||||||
|
|
||||||
# Create a task and run it in the event loop
|
# Create a task and run it in the event loop
|
||||||
search_loop.create_task(self.check_index())
|
search_loop.create_task(self.check_index())
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
logger.error(f' {exc}')
|
logger.error(f" {exc}")
|
||||||
self.client = None
|
self.client = None
|
||||||
|
|
||||||
def info(self):
|
def info(self):
|
||||||
if isinstance(self.client, OpenSearch):
|
if isinstance(self.client, OpenSearch):
|
||||||
logger.info(' Поиск подключен') # : {self.client.info()}')
|
logger.info(" Поиск подключен") # : {self.client.info()}')
|
||||||
else:
|
else:
|
||||||
logger.info(' * Задайте переменные среды для подключения к серверу поиска')
|
logger.info(" * Задайте переменные среды для подключения к серверу поиска")
|
||||||
|
|
||||||
def delete_index(self):
|
def delete_index(self):
|
||||||
if self.client:
|
if self.client:
|
||||||
logger.debug(f' Удаляем индекс {self.index_name}')
|
logger.debug(f" Удаляем индекс {self.index_name}")
|
||||||
self.client.indices.delete(index=self.index_name, ignore_unavailable=True)
|
self.client.indices.delete(index=self.index_name, ignore_unavailable=True)
|
||||||
|
|
||||||
def create_index(self):
|
def create_index(self):
|
||||||
if self.client:
|
if self.client:
|
||||||
logger.debug(f'Создается индекс: {self.index_name}')
|
logger.debug(f"Создается индекс: {self.index_name}")
|
||||||
self.delete_index()
|
self.delete_index()
|
||||||
self.client.indices.create(index=self.index_name, body=index_settings)
|
self.client.indices.create(index=self.index_name, body=index_settings)
|
||||||
logger.debug(f'Индекс {self.index_name} создан')
|
logger.debug(f"Индекс {self.index_name} создан")
|
||||||
|
|
||||||
async def check_index(self):
|
async def check_index(self):
|
||||||
if self.client:
|
if self.client:
|
||||||
logger.debug(f' Проверяем индекс {self.index_name}...')
|
logger.debug(f" Проверяем индекс {self.index_name}...")
|
||||||
if not self.client.indices.exists(index=self.index_name):
|
if not self.client.indices.exists(index=self.index_name):
|
||||||
self.create_index()
|
self.create_index()
|
||||||
self.client.indices.put_mapping(
|
self.client.indices.put_mapping(
|
||||||
index=self.index_name, body=expected_mapping
|
index=self.index_name, body=expected_mapping
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
logger.info(f'найден существующий индекс {self.index_name}')
|
logger.info(f"найден существующий индекс {self.index_name}")
|
||||||
# Check if the mapping is correct, and recreate the index if needed
|
# Check if the mapping is correct, and recreate the index if needed
|
||||||
result = self.client.indices.get_mapping(index=self.index_name)
|
result = self.client.indices.get_mapping(index=self.index_name)
|
||||||
if isinstance(result, str):
|
if isinstance(result, str):
|
||||||
result = json.loads(result)
|
result = json.loads(result)
|
||||||
if isinstance(result, dict):
|
if isinstance(result, dict):
|
||||||
mapping = result.get('mapping')
|
mapping = result.get("mapping")
|
||||||
if mapping and mapping != expected_mapping:
|
if mapping and mapping != expected_mapping:
|
||||||
logger.debug(f' найдена структура индексации: {mapping}')
|
logger.debug(f" найдена структура индексации: {mapping}")
|
||||||
logger.warn(
|
logger.warn(
|
||||||
' требуется другая структура индексации, переиндексация'
|
" требуется другая структура индексации, переиндексация"
|
||||||
)
|
)
|
||||||
await self.recreate_index()
|
await self.recreate_index()
|
||||||
|
|
||||||
|
@ -130,28 +130,30 @@ class SearchService:
|
||||||
def index(self, shout):
|
def index(self, shout):
|
||||||
if self.client:
|
if self.client:
|
||||||
id_ = str(shout.id)
|
id_ = str(shout.id)
|
||||||
logger.debug(f' Индексируем пост {id_}')
|
logger.debug(f" Индексируем пост {id_}")
|
||||||
asyncio.create_task(self.perform_index(shout))
|
asyncio.create_task(self.perform_index(shout))
|
||||||
|
|
||||||
async def perform_index(self, shout):
|
async def perform_index(self, shout):
|
||||||
if self.client:
|
if self.client:
|
||||||
self.client.index(index=self.index_name, id=str(shout.id), body=shout.dict())
|
self.client.index(
|
||||||
|
index=self.index_name, id=str(shout.id), body=shout.dict()
|
||||||
|
)
|
||||||
|
|
||||||
async def search(self, text, limit, offset):
|
async def search(self, text, limit, offset):
|
||||||
logger.debug(f' Ищем: {text}')
|
logger.debug(f" Ищем: {text}")
|
||||||
search_body = {'query': {'match': {'_all': text}}}
|
search_body = {"query": {"match": {"_all": text}}}
|
||||||
if self.client:
|
if self.client:
|
||||||
search_response = self.client.search(
|
search_response = self.client.search(
|
||||||
index=self.index_name, body=search_body, size=limit, from_=offset
|
index=self.index_name, body=search_body, size=limit, from_=offset
|
||||||
)
|
)
|
||||||
hits = search_response['hits']['hits']
|
hits = search_response["hits"]["hits"]
|
||||||
|
|
||||||
results = [{**hit['_source'], 'score': hit['_score']} for hit in hits]
|
results = [{**hit["_source"], "score": hit["_score"]} for hit in hits]
|
||||||
|
|
||||||
# Use Redis as cache with TTL
|
# Use Redis as cache with TTL
|
||||||
redis_key = f'search:{text}'
|
redis_key = f"search:{text}"
|
||||||
await redis.execute(
|
await redis.execute(
|
||||||
'SETEX',
|
"SETEX",
|
||||||
redis_key,
|
redis_key,
|
||||||
REDIS_TTL,
|
REDIS_TTL,
|
||||||
json.dumps(results, cls=CustomJSONEncoder),
|
json.dumps(results, cls=CustomJSONEncoder),
|
||||||
|
|
|
@ -26,5 +26,5 @@ def start_sentry():
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print('[services.sentry] init error')
|
print("[services.sentry] init error")
|
||||||
print(e)
|
print(e)
|
||||||
|
|
|
@ -14,13 +14,15 @@ from services.logger import root_logger as logger
|
||||||
from services.cache import cache_author, cache_follows, cache_follower
|
from services.cache import cache_author, cache_follows, cache_follower
|
||||||
|
|
||||||
DEFAULT_FOLLOWS = {
|
DEFAULT_FOLLOWS = {
|
||||||
'topics': [],
|
"topics": [],
|
||||||
'authors': [],
|
"authors": [],
|
||||||
'communities': [{'id': 1, 'name': 'Дискурс', 'slug': 'discours', 'pic': ''}],
|
"communities": [{"id": 1, "name": "Дискурс", "slug": "discours", "pic": ""}],
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
async def handle_author_follower_change(author_id: int, follower_id: int, is_insert: bool):
|
async def handle_author_follower_change(
|
||||||
|
author_id: int, follower_id: int, is_insert: bool
|
||||||
|
):
|
||||||
logger.info(author_id)
|
logger.info(author_id)
|
||||||
author_query = select(Author).select_from(Author).filter(Author.id == author_id)
|
author_query = select(Author).select_from(Author).filter(Author.id == author_id)
|
||||||
[author] = get_with_stat(author_query)
|
[author] = get_with_stat(author_query)
|
||||||
|
@ -29,11 +31,13 @@ async def handle_author_follower_change(author_id: int, follower_id: int, is_ins
|
||||||
if follower and author:
|
if follower and author:
|
||||||
await cache_author(author.dict())
|
await cache_author(author.dict())
|
||||||
await cache_author(follower.dict())
|
await cache_author(follower.dict())
|
||||||
await cache_follows(follower, 'author', author.dict(), is_insert)
|
await cache_follows(follower, "author", author.dict(), is_insert)
|
||||||
await cache_follower(follower, author, is_insert)
|
await cache_follower(follower, author, is_insert)
|
||||||
|
|
||||||
|
|
||||||
async def handle_topic_follower_change(topic_id: int, follower_id: int, is_insert: bool):
|
async def handle_topic_follower_change(
|
||||||
|
topic_id: int, follower_id: int, is_insert: bool
|
||||||
|
):
|
||||||
logger.info(topic_id)
|
logger.info(topic_id)
|
||||||
topic_query = select(Topic).filter(Topic.id == topic_id)
|
topic_query = select(Topic).filter(Topic.id == topic_id)
|
||||||
[topic] = get_with_stat(topic_query)
|
[topic] = get_with_stat(topic_query)
|
||||||
|
@ -41,15 +45,17 @@ async def handle_topic_follower_change(topic_id: int, follower_id: int, is_inser
|
||||||
[follower] = get_with_stat(follower_query)
|
[follower] = get_with_stat(follower_query)
|
||||||
if follower and topic:
|
if follower and topic:
|
||||||
await cache_author(follower.dict())
|
await cache_author(follower.dict())
|
||||||
await redis.execute('SET', f'topic:{topic.id}', json.dumps(topic.dict(), cls=CustomJSONEncoder))
|
await redis.execute(
|
||||||
await cache_follows(follower, 'topic', topic.dict(), is_insert)
|
"SET", f"topic:{topic.id}", json.dumps(topic.dict(), cls=CustomJSONEncoder)
|
||||||
|
)
|
||||||
|
await cache_follows(follower, "topic", topic.dict(), is_insert)
|
||||||
|
|
||||||
|
|
||||||
# handle_author_follow and handle_topic_follow -> cache_author, cache_follows, cache_followers
|
# handle_author_follow and handle_topic_follow -> cache_author, cache_follows, cache_followers
|
||||||
|
|
||||||
|
|
||||||
def after_shout_update(_mapper, _connection, shout: Shout):
|
def after_shout_update(_mapper, _connection, shout: Shout):
|
||||||
logger.info('after shout update')
|
logger.info("after shout update")
|
||||||
# Main query to get authors associated with the shout through ShoutAuthor
|
# Main query to get authors associated with the shout through ShoutAuthor
|
||||||
authors_query = (
|
authors_query = (
|
||||||
select(Author)
|
select(Author)
|
||||||
|
@ -63,7 +69,7 @@ def after_shout_update(_mapper, _connection, shout: Shout):
|
||||||
|
|
||||||
|
|
||||||
def after_reaction_update(mapper, connection, reaction: Reaction):
|
def after_reaction_update(mapper, connection, reaction: Reaction):
|
||||||
logger.info('after reaction update')
|
logger.info("after reaction update")
|
||||||
try:
|
try:
|
||||||
author_subquery = select(Author).where(Author.id == reaction.created_by)
|
author_subquery = select(Author).where(Author.id == reaction.created_by)
|
||||||
replied_author_subquery = (
|
replied_author_subquery = (
|
||||||
|
@ -98,7 +104,7 @@ def after_reaction_update(mapper, connection, reaction: Reaction):
|
||||||
|
|
||||||
|
|
||||||
def after_author_update(_mapper, _connection, author: Author):
|
def after_author_update(_mapper, _connection, author: Author):
|
||||||
logger.info('after author update')
|
logger.info("after author update")
|
||||||
q = select(Author).where(Author.id == author.id)
|
q = select(Author).where(Author.id == author.id)
|
||||||
result = get_with_stat(q)
|
result = get_with_stat(q)
|
||||||
if result:
|
if result:
|
||||||
|
@ -135,19 +141,19 @@ def after_author_follower_delete(_mapper, _connection, target: AuthorFollower):
|
||||||
|
|
||||||
|
|
||||||
def events_register():
|
def events_register():
|
||||||
event.listen(Shout, 'after_insert', after_shout_update)
|
event.listen(Shout, "after_insert", after_shout_update)
|
||||||
event.listen(Shout, 'after_update', after_shout_update)
|
event.listen(Shout, "after_update", after_shout_update)
|
||||||
|
|
||||||
event.listen(Reaction, 'after_insert', after_reaction_update)
|
event.listen(Reaction, "after_insert", after_reaction_update)
|
||||||
event.listen(Reaction, 'after_update', after_reaction_update)
|
event.listen(Reaction, "after_update", after_reaction_update)
|
||||||
|
|
||||||
event.listen(Author, 'after_insert', after_author_update)
|
event.listen(Author, "after_insert", after_author_update)
|
||||||
event.listen(Author, 'after_update', after_author_update)
|
event.listen(Author, "after_update", after_author_update)
|
||||||
|
|
||||||
event.listen(AuthorFollower, 'after_insert', after_author_follower_insert)
|
event.listen(AuthorFollower, "after_insert", after_author_follower_insert)
|
||||||
event.listen(AuthorFollower, 'after_delete', after_author_follower_delete)
|
event.listen(AuthorFollower, "after_delete", after_author_follower_delete)
|
||||||
|
|
||||||
event.listen(TopicFollower, 'after_insert', after_topic_follower_insert)
|
event.listen(TopicFollower, "after_insert", after_topic_follower_insert)
|
||||||
event.listen(TopicFollower, 'after_delete', after_topic_follower_delete)
|
event.listen(TopicFollower, "after_delete", after_topic_follower_delete)
|
||||||
|
|
||||||
logger.info('cache events were registered!')
|
logger.info("cache events were registered!")
|
||||||
|
|
|
@ -4,7 +4,7 @@ from services.rediscache import redis
|
||||||
|
|
||||||
|
|
||||||
async def get_unread_counter(chat_id: str, author_id: int) -> int:
|
async def get_unread_counter(chat_id: str, author_id: int) -> int:
|
||||||
r = await redis.execute('LLEN', f'chats/{chat_id}/unread/{author_id}')
|
r = await redis.execute("LLEN", f"chats/{chat_id}/unread/{author_id}")
|
||||||
if isinstance(r, str):
|
if isinstance(r, str):
|
||||||
return int(r)
|
return int(r)
|
||||||
elif isinstance(r, int):
|
elif isinstance(r, int):
|
||||||
|
@ -14,7 +14,7 @@ async def get_unread_counter(chat_id: str, author_id: int) -> int:
|
||||||
|
|
||||||
|
|
||||||
async def get_total_unread_counter(author_id: int) -> int:
|
async def get_total_unread_counter(author_id: int) -> int:
|
||||||
chats_set = await redis.execute('SMEMBERS', f'chats_by_author/{author_id}')
|
chats_set = await redis.execute("SMEMBERS", f"chats_by_author/{author_id}")
|
||||||
s = 0
|
s = 0
|
||||||
if isinstance(chats_set, str):
|
if isinstance(chats_set, str):
|
||||||
chats_set = json.loads(chats_set)
|
chats_set = json.loads(chats_set)
|
||||||
|
|
|
@ -7,8 +7,12 @@ from typing import Dict
|
||||||
|
|
||||||
# ga
|
# ga
|
||||||
from google.analytics.data_v1beta import BetaAnalyticsDataClient
|
from google.analytics.data_v1beta import BetaAnalyticsDataClient
|
||||||
from google.analytics.data_v1beta.types import (DateRange, Dimension, Metric,
|
from google.analytics.data_v1beta.types import (
|
||||||
RunReportRequest)
|
DateRange,
|
||||||
|
Dimension,
|
||||||
|
Metric,
|
||||||
|
RunReportRequest,
|
||||||
|
)
|
||||||
|
|
||||||
from orm.author import Author
|
from orm.author import Author
|
||||||
from orm.shout import Shout, ShoutAuthor, ShoutTopic
|
from orm.shout import Shout, ShoutAuthor, ShoutTopic
|
||||||
|
@ -16,9 +20,9 @@ from orm.topic import Topic
|
||||||
from services.db import local_session
|
from services.db import local_session
|
||||||
from services.logger import root_logger as logger
|
from services.logger import root_logger as logger
|
||||||
|
|
||||||
GOOGLE_KEYFILE_PATH = os.environ.get('GOOGLE_KEYFILE_PATH', '/dump/google-service.json')
|
GOOGLE_KEYFILE_PATH = os.environ.get("GOOGLE_KEYFILE_PATH", "/dump/google-service.json")
|
||||||
GOOGLE_PROPERTY_ID = os.environ.get('GOOGLE_PROPERTY_ID', '')
|
GOOGLE_PROPERTY_ID = os.environ.get("GOOGLE_PROPERTY_ID", "")
|
||||||
VIEWS_FILEPATH = '/dump/views.json'
|
VIEWS_FILEPATH = "/dump/views.json"
|
||||||
|
|
||||||
|
|
||||||
class ViewedStorage:
|
class ViewedStorage:
|
||||||
|
@ -41,17 +45,17 @@ class ViewedStorage:
|
||||||
# Загрузка предварительно подсчитанных просмотров из файла JSON
|
# Загрузка предварительно подсчитанных просмотров из файла JSON
|
||||||
self.load_precounted_views()
|
self.load_precounted_views()
|
||||||
|
|
||||||
os.environ.setdefault('GOOGLE_APPLICATION_CREDENTIALS', GOOGLE_KEYFILE_PATH)
|
os.environ.setdefault("GOOGLE_APPLICATION_CREDENTIALS", GOOGLE_KEYFILE_PATH)
|
||||||
if GOOGLE_KEYFILE_PATH and os.path.isfile(GOOGLE_KEYFILE_PATH):
|
if GOOGLE_KEYFILE_PATH and os.path.isfile(GOOGLE_KEYFILE_PATH):
|
||||||
# Using a default constructor instructs the client to use the credentials
|
# Using a default constructor instructs the client to use the credentials
|
||||||
# specified in GOOGLE_APPLICATION_CREDENTIALS environment variable.
|
# specified in GOOGLE_APPLICATION_CREDENTIALS environment variable.
|
||||||
self.analytics_client = BetaAnalyticsDataClient()
|
self.analytics_client = BetaAnalyticsDataClient()
|
||||||
logger.info(' * Клиент Google Analytics успешно авторизован')
|
logger.info(" * Клиент Google Analytics успешно авторизован")
|
||||||
|
|
||||||
# Запуск фоновой задачи
|
# Запуск фоновой задачи
|
||||||
_task = asyncio.create_task(self.worker())
|
_task = asyncio.create_task(self.worker())
|
||||||
else:
|
else:
|
||||||
logger.info(' * Пожалуйста, добавьте ключевой файл Google Analytics')
|
logger.info(" * Пожалуйста, добавьте ключевой файл Google Analytics")
|
||||||
self.disabled = True
|
self.disabled = True
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
|
@ -63,44 +67,44 @@ class ViewedStorage:
|
||||||
self.file_modification_timestamp = os.path.getmtime(VIEWS_FILEPATH)
|
self.file_modification_timestamp = os.path.getmtime(VIEWS_FILEPATH)
|
||||||
self.start_date = datetime.fromtimestamp(
|
self.start_date = datetime.fromtimestamp(
|
||||||
self.file_modification_timestamp
|
self.file_modification_timestamp
|
||||||
).strftime('%Y-%m-%d')
|
).strftime("%Y-%m-%d")
|
||||||
now_date = datetime.now().strftime('%Y-%m-%d')
|
now_date = datetime.now().strftime("%Y-%m-%d")
|
||||||
|
|
||||||
if now_date == self.start_date:
|
if now_date == self.start_date:
|
||||||
logger.info(' * Данные актуализованы!')
|
logger.info(" * Данные актуализованы!")
|
||||||
else:
|
else:
|
||||||
logger.warn(
|
logger.warn(
|
||||||
f' * Файл просмотров {VIEWS_FILEPATH} устарел: {self.start_date}'
|
f" * Файл просмотров {VIEWS_FILEPATH} устарел: {self.start_date}"
|
||||||
)
|
)
|
||||||
|
|
||||||
with open(VIEWS_FILEPATH, 'r') as file:
|
with open(VIEWS_FILEPATH, "r") as file:
|
||||||
precounted_views = json.load(file)
|
precounted_views = json.load(file)
|
||||||
self.views_by_shout.update(precounted_views)
|
self.views_by_shout.update(precounted_views)
|
||||||
logger.info(
|
logger.info(
|
||||||
f' * {len(precounted_views)} публикаций с просмотрами успешно загружены.'
|
f" * {len(precounted_views)} публикаций с просмотрами успешно загружены."
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
logger.info(' * Файл просмотров не найден.')
|
logger.info(" * Файл просмотров не найден.")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f'Ошибка загрузки предварительно подсчитанных просмотров: {e}')
|
logger.error(f"Ошибка загрузки предварительно подсчитанных просмотров: {e}")
|
||||||
|
|
||||||
# noinspection PyTypeChecker
|
# noinspection PyTypeChecker
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def update_pages():
|
async def update_pages():
|
||||||
"""Запрос всех страниц от Google Analytics, отсортированных по количеству просмотров"""
|
"""Запрос всех страниц от Google Analytics, отсортированных по количеству просмотров"""
|
||||||
self = ViewedStorage
|
self = ViewedStorage
|
||||||
logger.info(' ⎧ Обновление данных просмотров от Google Analytics ---')
|
logger.info(" ⎧ Обновление данных просмотров от Google Analytics ---")
|
||||||
if not self.disabled:
|
if not self.disabled:
|
||||||
try:
|
try:
|
||||||
start = time.time()
|
start = time.time()
|
||||||
async with self.lock:
|
async with self.lock:
|
||||||
if self.analytics_client:
|
if self.analytics_client:
|
||||||
request = RunReportRequest(
|
request = RunReportRequest(
|
||||||
property=f'properties/{GOOGLE_PROPERTY_ID}',
|
property=f"properties/{GOOGLE_PROPERTY_ID}",
|
||||||
dimensions=[Dimension(name='pagePath')],
|
dimensions=[Dimension(name="pagePath")],
|
||||||
metrics=[Metric(name='screenPageViews')],
|
metrics=[Metric(name="screenPageViews")],
|
||||||
date_ranges=[
|
date_ranges=[
|
||||||
DateRange(start_date=self.start_date, end_date='today')
|
DateRange(start_date=self.start_date, end_date="today")
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
response = self.analytics_client.run_report(request)
|
response = self.analytics_client.run_report(request)
|
||||||
|
@ -114,7 +118,7 @@ class ViewedStorage:
|
||||||
# Извлечение путей страниц из ответа Google Analytics
|
# Извлечение путей страниц из ответа Google Analytics
|
||||||
if isinstance(row.dimension_values, list):
|
if isinstance(row.dimension_values, list):
|
||||||
page_path = row.dimension_values[0].value
|
page_path = row.dimension_values[0].value
|
||||||
slug = page_path.split('discours.io/')[-1]
|
slug = page_path.split("discours.io/")[-1]
|
||||||
views_count = int(row.metric_values[0].value)
|
views_count = int(row.metric_values[0].value)
|
||||||
|
|
||||||
# Обновление данных в хранилище
|
# Обновление данных в хранилище
|
||||||
|
@ -127,10 +131,10 @@ class ViewedStorage:
|
||||||
# Запись путей страниц для логирования
|
# Запись путей страниц для логирования
|
||||||
slugs.add(slug)
|
slugs.add(slug)
|
||||||
|
|
||||||
logger.info(f' ⎪ Собрано страниц: {len(slugs)} ')
|
logger.info(f" ⎪ Собрано страниц: {len(slugs)} ")
|
||||||
|
|
||||||
end = time.time()
|
end = time.time()
|
||||||
logger.info(' ⎪ Обновление страниц заняло %fs ' % (end - start))
|
logger.info(" ⎪ Обновление страниц заняло %fs " % (end - start))
|
||||||
except Exception as error:
|
except Exception as error:
|
||||||
logger.error(error)
|
logger.error(error)
|
||||||
|
|
||||||
|
@ -211,18 +215,18 @@ class ViewedStorage:
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
failed += 1
|
failed += 1
|
||||||
logger.debug(exc)
|
logger.debug(exc)
|
||||||
logger.info(' - Обновление не удалось #%d, ожидание 10 секунд' % failed)
|
logger.info(" - Обновление не удалось #%d, ожидание 10 секунд" % failed)
|
||||||
if failed > 3:
|
if failed > 3:
|
||||||
logger.info(' - Больше не пытаемся обновить')
|
logger.info(" - Больше не пытаемся обновить")
|
||||||
break
|
break
|
||||||
if failed == 0:
|
if failed == 0:
|
||||||
when = datetime.now(timezone.utc) + timedelta(seconds=self.period)
|
when = datetime.now(timezone.utc) + timedelta(seconds=self.period)
|
||||||
t = format(when.astimezone().isoformat())
|
t = format(when.astimezone().isoformat())
|
||||||
logger.info(
|
logger.info(
|
||||||
' ⎩ Следующее обновление: %s'
|
" ⎩ Следующее обновление: %s"
|
||||||
% (t.split('T')[0] + ' ' + t.split('T')[1].split('.')[0])
|
% (t.split("T")[0] + " " + t.split("T")[1].split(".")[0])
|
||||||
)
|
)
|
||||||
await asyncio.sleep(self.period)
|
await asyncio.sleep(self.period)
|
||||||
else:
|
else:
|
||||||
await asyncio.sleep(10)
|
await asyncio.sleep(10)
|
||||||
logger.info(' - Попытка снова обновить данные')
|
logger.info(" - Попытка снова обновить данные")
|
||||||
|
|
|
@ -15,50 +15,50 @@ class WebhookEndpoint(HTTPEndpoint):
|
||||||
try:
|
try:
|
||||||
data = await request.json()
|
data = await request.json()
|
||||||
if not data:
|
if not data:
|
||||||
raise HTTPException(status_code=400, detail='Request body is empty')
|
raise HTTPException(status_code=400, detail="Request body is empty")
|
||||||
auth = request.headers.get('Authorization')
|
auth = request.headers.get("Authorization")
|
||||||
if not auth or auth != os.environ.get('WEBHOOK_SECRET'):
|
if not auth or auth != os.environ.get("WEBHOOK_SECRET"):
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=401, detail='Invalid Authorization header'
|
status_code=401, detail="Invalid Authorization header"
|
||||||
)
|
)
|
||||||
# logger.debug(data)
|
# logger.debug(data)
|
||||||
user = data.get('user')
|
user = data.get("user")
|
||||||
if not isinstance(user, dict):
|
if not isinstance(user, dict):
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=400, detail='User data is not a dictionary'
|
status_code=400, detail="User data is not a dictionary"
|
||||||
)
|
)
|
||||||
user_id: str = user.get('id', '')
|
user_id: str = user.get("id", "")
|
||||||
name: str = (
|
name: str = (
|
||||||
f"{user.get('given_name', user.get('slug'))} {user.get('middle_name', '')}"
|
f"{user.get('given_name', user.get('slug'))} {user.get('middle_name', '')}"
|
||||||
+ f"{user.get('family_name', '')}".strip()
|
+ f"{user.get('family_name', '')}".strip()
|
||||||
) or 'Аноним'
|
) or "Аноним"
|
||||||
email: str = user.get('email', '')
|
email: str = user.get("email", "")
|
||||||
pic: str = user.get('picture', '')
|
pic: str = user.get("picture", "")
|
||||||
|
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
author = session.query(Author).filter(Author.user == user_id).first()
|
author = session.query(Author).filter(Author.user == user_id).first()
|
||||||
if not author:
|
if not author:
|
||||||
# If the author does not exist, create a new one
|
# If the author does not exist, create a new one
|
||||||
slug: str = email.split('@')[0].replace('.', '-').lower()
|
slug: str = email.split("@")[0].replace(".", "-").lower()
|
||||||
slug: str = re.sub('[^0-9a-z]+', '-', slug)
|
slug: str = re.sub("[^0-9a-z]+", "-", slug)
|
||||||
while True:
|
while True:
|
||||||
author = (
|
author = (
|
||||||
session.query(Author).filter(Author.slug == slug).first()
|
session.query(Author).filter(Author.slug == slug).first()
|
||||||
)
|
)
|
||||||
if not author:
|
if not author:
|
||||||
break
|
break
|
||||||
slug = f'{slug}-{len(session.query(Author).filter(Author.email == email).all()) + 1}'
|
slug = f"{slug}-{len(session.query(Author).filter(Author.email == email).all()) + 1}"
|
||||||
author = Author(user=user_id, slug=slug, name=name, pic=pic)
|
author = Author(user=user_id, slug=slug, name=name, pic=pic)
|
||||||
session.add(author)
|
session.add(author)
|
||||||
session.commit()
|
session.commit()
|
||||||
|
|
||||||
return JSONResponse({'status': 'success'})
|
return JSONResponse({"status": "success"})
|
||||||
except HTTPException as e:
|
except HTTPException as e:
|
||||||
return JSONResponse(
|
return JSONResponse(
|
||||||
{'status': 'error', 'message': str(e.detail)}, status_code=e.status_code
|
{"status": "error", "message": str(e.detail)}, status_code=e.status_code
|
||||||
)
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
traceback.print_exc()
|
traceback.print_exc()
|
||||||
return JSONResponse({'status': 'error', 'message': str(e)}, status_code=500)
|
return JSONResponse({"status": "error", "message": str(e)}, status_code=500)
|
||||||
|
|
20
settings.py
20
settings.py
|
@ -3,15 +3,15 @@ from os import environ
|
||||||
|
|
||||||
PORT = 8000
|
PORT = 8000
|
||||||
DB_URL = (
|
DB_URL = (
|
||||||
environ.get('DATABASE_URL', '').replace('postgres://', 'postgresql://')
|
environ.get("DATABASE_URL", "").replace("postgres://", "postgresql://")
|
||||||
or environ.get('DB_URL', '').replace('postgres://', 'postgresql://')
|
or environ.get("DB_URL", "").replace("postgres://", "postgresql://")
|
||||||
or 'postgresql://postgres@localhost:5432/discoursio'
|
or "postgresql://postgres@localhost:5432/discoursio"
|
||||||
)
|
)
|
||||||
REDIS_URL = environ.get('REDIS_URL') or 'redis://127.0.0.1'
|
REDIS_URL = environ.get("REDIS_URL") or "redis://127.0.0.1"
|
||||||
API_BASE = environ.get('API_BASE') or ''
|
API_BASE = environ.get("API_BASE") or ""
|
||||||
AUTH_URL = environ.get('AUTH_URL') or ''
|
AUTH_URL = environ.get("AUTH_URL") or ""
|
||||||
GLITCHTIP_DSN = environ.get('GLITCHTIP_DSN')
|
GLITCHTIP_DSN = environ.get("GLITCHTIP_DSN")
|
||||||
DEV_SERVER_PID_FILE_NAME = 'dev-server.pid'
|
DEV_SERVER_PID_FILE_NAME = "dev-server.pid"
|
||||||
MODE = 'development' if 'dev' in sys.argv else 'production'
|
MODE = "development" if "dev" in sys.argv else "production"
|
||||||
|
|
||||||
ADMIN_SECRET = environ.get('AUTH_SECRET') or 'nothing'
|
ADMIN_SECRET = environ.get("AUTH_SECRET") or "nothing"
|
||||||
|
|
Loading…
Reference in New Issue
Block a user