fmt+follows-refactored
This commit is contained in:
parent
a9ab2e8bb2
commit
e638ad81e2
|
@ -1,7 +1,7 @@
|
||||||
import json
|
import json
|
||||||
import time
|
import time
|
||||||
|
|
||||||
from sqlalchemy import and_, desc, or_, select, text
|
from sqlalchemy import and_, desc, select, text
|
||||||
from sqlalchemy.orm import aliased
|
from sqlalchemy.orm import aliased
|
||||||
|
|
||||||
from orm.author import Author, AuthorFollower
|
from orm.author import Author, AuthorFollower
|
||||||
|
@ -72,9 +72,7 @@ async def get_author(_, _info, slug="", author_id=0):
|
||||||
logger.debug(f"found author id: {found_author.id}")
|
logger.debug(f"found author id: {found_author.id}")
|
||||||
author_id = found_author.id if found_author.id else author_id
|
author_id = found_author.id if found_author.id else author_id
|
||||||
if author_id:
|
if author_id:
|
||||||
cached_result = await redis.execute(
|
cached_result = await redis.execute("GET", f"author:{author_id}")
|
||||||
"GET", f"author:{author_id}"
|
|
||||||
)
|
|
||||||
if isinstance(cached_result, str):
|
if isinstance(cached_result, str):
|
||||||
author_dict = json.loads(cached_result)
|
author_dict = json.loads(cached_result)
|
||||||
|
|
||||||
|
@ -140,10 +138,7 @@ async def load_authors_by(_, _info, by, limit, offset):
|
||||||
authors_query = authors_query.filter(Author.name.ilike(f"%{by['name']}%"))
|
authors_query = authors_query.filter(Author.name.ilike(f"%{by['name']}%"))
|
||||||
elif by.get("topic"):
|
elif by.get("topic"):
|
||||||
authors_query = (
|
authors_query = (
|
||||||
authors_query.join(ShoutAuthor)
|
authors_query.join(ShoutAuthor).join(ShoutTopic).join(Topic).where(Topic.slug == str(by["topic"]))
|
||||||
.join(ShoutTopic)
|
|
||||||
.join(Topic)
|
|
||||||
.where(Topic.slug == str(by["topic"]))
|
|
||||||
)
|
)
|
||||||
|
|
||||||
if by.get("last_seen"): # in unix time
|
if by.get("last_seen"): # in unix time
|
||||||
|
@ -162,14 +157,10 @@ async def load_authors_by(_, _info, by, limit, offset):
|
||||||
if isinstance(a, Author):
|
if isinstance(a, Author):
|
||||||
author_id = a.id
|
author_id = a.id
|
||||||
if bool(author_id):
|
if bool(author_id):
|
||||||
cached_result = await redis.execute(
|
cached_result = await redis.execute("GET", f"author:{author_id}")
|
||||||
"GET", f"author:{author_id}"
|
|
||||||
)
|
|
||||||
if isinstance(cached_result, str):
|
if isinstance(cached_result, str):
|
||||||
author_dict = json.loads(cached_result)
|
author_dict = json.loads(cached_result)
|
||||||
if not author_dict or not isinstance(
|
if not author_dict or not isinstance(author_dict.get("shouts"), int):
|
||||||
author_dict.get("shouts"), int
|
|
||||||
):
|
|
||||||
break
|
break
|
||||||
|
|
||||||
# order
|
# order
|
||||||
|
@ -181,39 +172,41 @@ async def load_authors_by(_, _info, by, limit, offset):
|
||||||
return authors or []
|
return authors or []
|
||||||
|
|
||||||
|
|
||||||
@query.field("get_author_follows")
|
def get_author_id_from(slug="", user=None, author_id=None):
|
||||||
async def get_author_follows(_, _info, slug="", user=None, author_id=0):
|
if not slug and not user and not author_id:
|
||||||
try:
|
raise ValueError("One of slug, user, or author_id must be provided")
|
||||||
author_query = select(Author)
|
|
||||||
|
author_query = select(Author.id)
|
||||||
if user:
|
if user:
|
||||||
author_query = author_query.filter(Author.user == user)
|
author_query = author_query.filter(Author.user == user)
|
||||||
elif slug:
|
elif slug:
|
||||||
author_query = author_query.filter(Author.slug == slug)
|
author_query = author_query.filter(Author.slug == slug)
|
||||||
elif author_id:
|
elif author_id:
|
||||||
author_query = author_query.filter(Author.id == author_id)
|
author_query = author_query.filter(Author.id == author_id)
|
||||||
else:
|
|
||||||
return {"error": "One of slug, user, or author_id must be provided"}
|
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
result = session.execute(author_query)
|
author_id_result = session.execute(author_query).first()
|
||||||
if result:
|
author_id = author_id_result[0] if author_id_result else None
|
||||||
# logger.debug(result)
|
|
||||||
[author] = result
|
if not author_id:
|
||||||
# logger.debug(author)
|
raise ValueError("Author not found")
|
||||||
if author and isinstance(author, Author):
|
|
||||||
# logger.debug(author.dict())
|
return author_id
|
||||||
author_id = author.id if not author_id else author_id
|
|
||||||
|
|
||||||
|
@query.field("get_author_follows")
|
||||||
|
async def get_author_follows(_, _info, slug="", user=None, author_id=0):
|
||||||
|
try:
|
||||||
|
author_id = get_author_id_from(slug, user, author_id)
|
||||||
topics = []
|
topics = []
|
||||||
authors = []
|
authors = []
|
||||||
if bool(author_id):
|
|
||||||
rkey = f"author:{author_id}:follows-authors"
|
rkey = f"author:{author_id}:follows-authors"
|
||||||
logger.debug(f"getting {author_id} follows authors")
|
logger.debug(f"getting {author_id} follows authors")
|
||||||
cached = await redis.execute("GET", rkey)
|
cached = await redis.execute("GET", rkey)
|
||||||
if not cached:
|
if not cached:
|
||||||
authors = author_follows_authors(author_id) # type: ignore
|
authors = author_follows_authors(author_id) # type: ignore
|
||||||
prepared = [author.dict() for author in authors]
|
prepared = [a.dict() for a in authors]
|
||||||
await redis.execute(
|
await redis.execute("SET", rkey, json.dumps(prepared, cls=CustomJSONEncoder))
|
||||||
"SET", rkey, json.dumps(prepared, cls=CustomJSONEncoder)
|
|
||||||
)
|
|
||||||
elif isinstance(cached, str):
|
elif isinstance(cached, str):
|
||||||
authors = json.loads(cached)
|
authors = json.loads(cached)
|
||||||
|
|
||||||
|
@ -224,15 +217,11 @@ async def get_author_follows(_, _info, slug="", user=None, author_id=0):
|
||||||
if not cached:
|
if not cached:
|
||||||
topics = author_follows_topics(author_id) # type: ignore
|
topics = author_follows_topics(author_id) # type: ignore
|
||||||
prepared = [topic.dict() for topic in topics]
|
prepared = [topic.dict() for topic in topics]
|
||||||
await redis.execute(
|
await redis.execute("SET", rkey, json.dumps(prepared, cls=CustomJSONEncoder))
|
||||||
"SET", rkey, json.dumps(prepared, cls=CustomJSONEncoder)
|
|
||||||
)
|
|
||||||
return {
|
return {
|
||||||
"topics": topics,
|
"topics": topics,
|
||||||
"authors": authors,
|
"authors": authors,
|
||||||
"communities": [
|
"communities": [{"id": 1, "name": "Дискурс", "slug": "discours", "pic": ""}],
|
||||||
{"id": 1, "name": "Дискурс", "slug": "discours", "pic": ""}
|
|
||||||
],
|
|
||||||
}
|
}
|
||||||
except Exception:
|
except Exception:
|
||||||
import traceback
|
import traceback
|
||||||
|
@ -243,16 +232,8 @@ async def get_author_follows(_, _info, slug="", user=None, author_id=0):
|
||||||
|
|
||||||
@query.field("get_author_follows_topics")
|
@query.field("get_author_follows_topics")
|
||||||
async def get_author_follows_topics(_, _info, slug="", user=None, author_id=None):
|
async def get_author_follows_topics(_, _info, slug="", user=None, author_id=None):
|
||||||
with local_session() as session:
|
try:
|
||||||
if user or slug:
|
author_id = get_author_id_from(slug, user, author_id)
|
||||||
author_id_result = (
|
|
||||||
session.query(Author.id)
|
|
||||||
.filter(or_(Author.user == user, Author.slug == slug))
|
|
||||||
.first()
|
|
||||||
)
|
|
||||||
author_id = author_id_result[0] if author_id_result else None
|
|
||||||
if not author_id:
|
|
||||||
raise ValueError("Author not found")
|
|
||||||
logger.debug(f"getting {author_id} follows topics")
|
logger.debug(f"getting {author_id} follows topics")
|
||||||
rkey = f"author:{author_id}:follows-topics"
|
rkey = f"author:{author_id}:follows-topics"
|
||||||
cached = await redis.execute("GET", rkey)
|
cached = await redis.execute("GET", rkey)
|
||||||
|
@ -262,23 +243,18 @@ async def get_author_follows_topics(_, _info, slug="", user=None, author_id=None
|
||||||
if not cached:
|
if not cached:
|
||||||
topics = author_follows_topics(author_id)
|
topics = author_follows_topics(author_id)
|
||||||
prepared = [topic.dict() for topic in topics]
|
prepared = [topic.dict() for topic in topics]
|
||||||
await redis.execute(
|
await redis.execute("SET", rkey, json.dumps(prepared, cls=CustomJSONEncoder))
|
||||||
"SET", rkey, json.dumps(prepared, cls=CustomJSONEncoder)
|
|
||||||
)
|
|
||||||
return topics
|
return topics
|
||||||
|
except Exception:
|
||||||
|
import traceback
|
||||||
|
|
||||||
|
traceback.print_exc()
|
||||||
|
|
||||||
|
|
||||||
@query.field("get_author_follows_authors")
|
@query.field("get_author_follows_authors")
|
||||||
async def get_author_follows_authors(_, _info, slug="", user=None, author_id=None):
|
async def get_author_follows_authors(_, _info, slug="", user=None, author_id=None):
|
||||||
with local_session() as session:
|
try:
|
||||||
if user or slug:
|
author_id = get_author_id_from(slug, user, author_id)
|
||||||
author_id_result = (
|
|
||||||
session.query(Author.id)
|
|
||||||
.filter(or_(Author.user == user, Author.slug == slug))
|
|
||||||
.first()
|
|
||||||
)
|
|
||||||
author_id = author_id_result[0] if author_id_result else None
|
|
||||||
if author_id:
|
|
||||||
logger.debug(f"getting {author_id} follows authors")
|
logger.debug(f"getting {author_id} follows authors")
|
||||||
rkey = f"author:{author_id}:follows-authors"
|
rkey = f"author:{author_id}:follows-authors"
|
||||||
cached = await redis.execute("GET", rkey)
|
cached = await redis.execute("GET", rkey)
|
||||||
|
@ -288,12 +264,12 @@ async def get_author_follows_authors(_, _info, slug="", user=None, author_id=Non
|
||||||
if not authors:
|
if not authors:
|
||||||
authors = author_follows_authors(author_id)
|
authors = author_follows_authors(author_id)
|
||||||
prepared = [author.dict() for author in authors]
|
prepared = [author.dict() for author in authors]
|
||||||
await redis.execute(
|
await redis.execute("SET", rkey, json.dumps(prepared, cls=CustomJSONEncoder))
|
||||||
"SET", rkey, json.dumps(prepared, cls=CustomJSONEncoder)
|
|
||||||
)
|
|
||||||
return authors
|
return authors
|
||||||
else:
|
except Exception:
|
||||||
raise ValueError("Author not found")
|
import traceback
|
||||||
|
|
||||||
|
traceback.print_exc()
|
||||||
|
|
||||||
|
|
||||||
def create_author(user_id: str, slug: str, name: str = ""):
|
def create_author(user_id: str, slug: str, name: str = ""):
|
||||||
|
@ -332,9 +308,7 @@ async def get_author_followers(_, _info, slug: str):
|
||||||
if cached and cached_author:
|
if cached and cached_author:
|
||||||
followers = json.loads(cached)
|
followers = json.loads(cached)
|
||||||
author_dict = json.loads(cached_author)
|
author_dict = json.loads(cached_author)
|
||||||
if isinstance(followers, list) and str(len(followers)) == str(
|
if isinstance(followers, list) and str(len(followers)) == str(author_dict["stat"]["followers"]):
|
||||||
author_dict["stat"]["followers"]
|
|
||||||
):
|
|
||||||
logger.debug(f"@{slug} got {len(followers)} followers cached")
|
logger.debug(f"@{slug} got {len(followers)} followers cached")
|
||||||
return followers
|
return followers
|
||||||
|
|
||||||
|
@ -352,9 +326,7 @@ async def get_author_followers(_, _info, slug: str):
|
||||||
|
|
||||||
if isinstance(followers, list):
|
if isinstance(followers, list):
|
||||||
followers_data = [follower.dict() for follower in followers]
|
followers_data = [follower.dict() for follower in followers]
|
||||||
await redis.execute(
|
await redis.execute("SET", f"author:{author_id}:followers", json.dumps(followers_data))
|
||||||
"SET", f"author:{author_id}:followers", json.dumps(followers_data)
|
|
||||||
)
|
|
||||||
logger.debug(f"@{slug} cache updated with {len(followers)} followers")
|
logger.debug(f"@{slug} cache updated with {len(followers)} followers")
|
||||||
return followers
|
return followers
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
|
|
|
@ -18,18 +18,12 @@ async def accept_invite(_, info, invite_id: int):
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
# Check if the invite exists
|
# Check if the invite exists
|
||||||
invite = session.query(Invite).filter(Invite.id == invite_id).first()
|
invite = session.query(Invite).filter(Invite.id == invite_id).first()
|
||||||
if (
|
if invite and invite.author_id is author_id and invite.status is InviteStatus.PENDING.value:
|
||||||
invite
|
|
||||||
and invite.author_id is author_id
|
|
||||||
and invite.status is InviteStatus.PENDING.value
|
|
||||||
):
|
|
||||||
# Add the user to the shout authors
|
# Add the user to the shout authors
|
||||||
shout = session.query(Shout).filter(Shout.id == invite.shout_id).first()
|
shout = session.query(Shout).filter(Shout.id == invite.shout_id).first()
|
||||||
if shout:
|
if shout:
|
||||||
if author_id not in shout.authors:
|
if author_id not in shout.authors:
|
||||||
author = (
|
author = session.query(Author).filter(Author.id == author_id).first()
|
||||||
session.query(Author).filter(Author.id == author_id).first()
|
|
||||||
)
|
|
||||||
if author:
|
if author:
|
||||||
shout.authors.append(author)
|
shout.authors.append(author)
|
||||||
session.add(shout)
|
session.add(shout)
|
||||||
|
@ -57,11 +51,7 @@ async def reject_invite(_, info, invite_id: int):
|
||||||
author_id = int(author_id)
|
author_id = int(author_id)
|
||||||
# Check if the invite exists
|
# Check if the invite exists
|
||||||
invite = session.query(Invite).filter(Invite.id == invite_id).first()
|
invite = session.query(Invite).filter(Invite.id == invite_id).first()
|
||||||
if (
|
if invite and invite.author_id is author_id and invite.status is InviteStatus.PENDING.value:
|
||||||
invite
|
|
||||||
and invite.author_id is author_id
|
|
||||||
and invite.status is InviteStatus.PENDING.value
|
|
||||||
):
|
|
||||||
# Delete the invite
|
# Delete the invite
|
||||||
session.delete(invite)
|
session.delete(invite)
|
||||||
session.commit()
|
session.commit()
|
||||||
|
@ -124,9 +114,7 @@ async def remove_author(_, info, slug: str = "", author_id: int = 0):
|
||||||
shout = session.query(Shout).filter(Shout.slug == slug).first()
|
shout = session.query(Shout).filter(Shout.slug == slug).first()
|
||||||
# NOTE: owner should be first in a list
|
# NOTE: owner should be first in a list
|
||||||
if shout and author.id is shout.created_by:
|
if shout and author.id is shout.created_by:
|
||||||
shout.authors = [
|
shout.authors = [author for author in shout.authors if author.id != author_id]
|
||||||
author for author in shout.authors if author.id != author_id
|
|
||||||
]
|
|
||||||
session.commit()
|
session.commit()
|
||||||
return {}
|
return {}
|
||||||
return {"error": "Access denied"}
|
return {"error": "Access denied"}
|
||||||
|
|
|
@ -14,9 +14,7 @@ def get_communities_from_query(q):
|
||||||
for [c, shouts_stat, followers_stat] in session.execute(q):
|
for [c, shouts_stat, followers_stat] in session.execute(q):
|
||||||
c.stat = {
|
c.stat = {
|
||||||
"shouts": session.execute(
|
"shouts": session.execute(
|
||||||
select(func.count(distinct(ShoutCommunity.shout))).filter(
|
select(func.count(distinct(ShoutCommunity.shout))).filter(ShoutCommunity.community == c.id)
|
||||||
ShoutCommunity.community == c.id
|
|
||||||
)
|
|
||||||
),
|
),
|
||||||
# "authors": session.execute(select(func.count(distinct(ShoutCommunity.shout))).filter(ShoutCommunity.community == c.id)),
|
# "authors": session.execute(select(func.count(distinct(ShoutCommunity.shout))).filter(ShoutCommunity.community == c.id)),
|
||||||
# "followers": session.execute(select(func.count(distinct(ShoutCommunity.shout))).filter(ShoutCommunity.community == c.id)),
|
# "followers": session.execute(select(func.count(distinct(ShoutCommunity.shout))).filter(ShoutCommunity.community == c.id)),
|
||||||
|
|
|
@ -57,16 +57,12 @@ async def get_my_shout(_, info, shout_id: int):
|
||||||
if not shout:
|
if not shout:
|
||||||
return {"error": "no shout found", "shout": None}
|
return {"error": "no shout found", "shout": None}
|
||||||
|
|
||||||
logger.debug(
|
logger.debug(f"got shout authors: {shout.authors} created by {shout.created_by}")
|
||||||
f"got shout authors: {shout.authors} created by {shout.created_by}"
|
|
||||||
)
|
|
||||||
is_editor = "editor" in roles
|
is_editor = "editor" in roles
|
||||||
logger.debug(f'viewer is{'' if is_editor else ' not'} editor')
|
logger.debug(f'viewer is{'' if is_editor else ' not'} editor')
|
||||||
is_creator = author_id == shout.created_by
|
is_creator = author_id == shout.created_by
|
||||||
logger.debug(f'viewer is{'' if is_creator else ' not'} creator')
|
logger.debug(f'viewer is{'' if is_creator else ' not'} creator')
|
||||||
is_author = bool(
|
is_author = bool(list(filter(lambda x: x.id == int(author_id), [x for x in shout.authors])))
|
||||||
list(filter(lambda x: x.id == int(author_id), [x for x in shout.authors]))
|
|
||||||
)
|
|
||||||
logger.debug(f'viewer is{'' if is_creator else ' not'} author')
|
logger.debug(f'viewer is{'' if is_creator else ' not'} author')
|
||||||
can_edit = is_editor or is_author or is_creator
|
can_edit = is_editor or is_author or is_creator
|
||||||
|
|
||||||
|
@ -91,9 +87,7 @@ async def get_shouts_drafts(_, info):
|
||||||
q = (
|
q = (
|
||||||
select(Shout)
|
select(Shout)
|
||||||
.options(joinedload(Shout.authors), joinedload(Shout.topics))
|
.options(joinedload(Shout.authors), joinedload(Shout.topics))
|
||||||
.filter(
|
.filter(and_(Shout.deleted_at.is_(None), Shout.created_by == int(author_id)))
|
||||||
and_(Shout.deleted_at.is_(None), Shout.created_by == int(author_id))
|
|
||||||
)
|
|
||||||
.filter(Shout.published_at.is_(None))
|
.filter(Shout.published_at.is_(None))
|
||||||
.order_by(desc(coalesce(Shout.updated_at, Shout.created_at)))
|
.order_by(desc(coalesce(Shout.updated_at, Shout.created_at)))
|
||||||
.group_by(Shout.id)
|
.group_by(Shout.id)
|
||||||
|
@ -129,18 +123,10 @@ async def create_shout(_, info, inp):
|
||||||
"published_at": None,
|
"published_at": None,
|
||||||
"created_at": current_time, # Set created_at as Unix timestamp
|
"created_at": current_time, # Set created_at as Unix timestamp
|
||||||
}
|
}
|
||||||
same_slug_shout = (
|
same_slug_shout = session.query(Shout).filter(Shout.slug == shout_dict.get("slug")).first()
|
||||||
session.query(Shout)
|
|
||||||
.filter(Shout.slug == shout_dict.get("slug"))
|
|
||||||
.first()
|
|
||||||
)
|
|
||||||
c = 1
|
c = 1
|
||||||
while same_slug_shout is not None:
|
while same_slug_shout is not None:
|
||||||
same_slug_shout = (
|
same_slug_shout = session.query(Shout).filter(Shout.slug == shout_dict.get("slug")).first()
|
||||||
session.query(Shout)
|
|
||||||
.filter(Shout.slug == shout_dict.get("slug"))
|
|
||||||
.first()
|
|
||||||
)
|
|
||||||
c += 1
|
c += 1
|
||||||
shout_dict["slug"] += f"-{c}"
|
shout_dict["slug"] += f"-{c}"
|
||||||
new_shout = Shout(**shout_dict)
|
new_shout = Shout(**shout_dict)
|
||||||
|
@ -153,11 +139,7 @@ async def create_shout(_, info, inp):
|
||||||
sa = ShoutAuthor(shout=shout.id, author=author_id)
|
sa = ShoutAuthor(shout=shout.id, author=author_id)
|
||||||
session.add(sa)
|
session.add(sa)
|
||||||
|
|
||||||
topics = (
|
topics = session.query(Topic).filter(Topic.slug.in_(inp.get("topics", []))).all()
|
||||||
session.query(Topic)
|
|
||||||
.filter(Topic.slug.in_(inp.get("topics", [])))
|
|
||||||
.all()
|
|
||||||
)
|
|
||||||
for topic in topics:
|
for topic in topics:
|
||||||
t = ShoutTopic(topic=topic.id, shout=shout.id)
|
t = ShoutTopic(topic=topic.id, shout=shout.id)
|
||||||
session.add(t)
|
session.add(t)
|
||||||
|
@ -176,18 +158,11 @@ async def create_shout(_, info, inp):
|
||||||
|
|
||||||
def patch_main_topic(session, main_topic, shout):
|
def patch_main_topic(session, main_topic, shout):
|
||||||
with session.begin():
|
with session.begin():
|
||||||
shout = (
|
shout = session.query(Shout).options(joinedload(Shout.topics)).filter(Shout.id == shout.id).first()
|
||||||
session.query(Shout)
|
|
||||||
.options(joinedload(Shout.topics))
|
|
||||||
.filter(Shout.id == shout.id)
|
|
||||||
.first()
|
|
||||||
)
|
|
||||||
if not shout:
|
if not shout:
|
||||||
return
|
return
|
||||||
old_main_topic = (
|
old_main_topic = (
|
||||||
session.query(ShoutTopic)
|
session.query(ShoutTopic).filter(and_(ShoutTopic.shout == shout.id, ShoutTopic.main.is_(True))).first()
|
||||||
.filter(and_(ShoutTopic.shout == shout.id, ShoutTopic.main.is_(True)))
|
|
||||||
.first()
|
|
||||||
)
|
)
|
||||||
|
|
||||||
main_topic = session.query(Topic).filter(Topic.slug == main_topic).first()
|
main_topic = session.query(Topic).filter(Topic.slug == main_topic).first()
|
||||||
|
@ -195,19 +170,11 @@ def patch_main_topic(session, main_topic, shout):
|
||||||
if main_topic:
|
if main_topic:
|
||||||
new_main_topic = (
|
new_main_topic = (
|
||||||
session.query(ShoutTopic)
|
session.query(ShoutTopic)
|
||||||
.filter(
|
.filter(and_(ShoutTopic.shout == shout.id, ShoutTopic.topic == main_topic.id))
|
||||||
and_(
|
|
||||||
ShoutTopic.shout == shout.id, ShoutTopic.topic == main_topic.id
|
|
||||||
)
|
|
||||||
)
|
|
||||||
.first()
|
.first()
|
||||||
)
|
)
|
||||||
|
|
||||||
if (
|
if old_main_topic and new_main_topic and old_main_topic is not new_main_topic:
|
||||||
old_main_topic
|
|
||||||
and new_main_topic
|
|
||||||
and old_main_topic is not new_main_topic
|
|
||||||
):
|
|
||||||
ShoutTopic.update(old_main_topic, {"main": False})
|
ShoutTopic.update(old_main_topic, {"main": False})
|
||||||
session.add(old_main_topic)
|
session.add(old_main_topic)
|
||||||
|
|
||||||
|
@ -216,9 +183,7 @@ def patch_main_topic(session, main_topic, shout):
|
||||||
|
|
||||||
|
|
||||||
def patch_topics(session, shout, topics_input):
|
def patch_topics(session, shout, topics_input):
|
||||||
new_topics_to_link = [
|
new_topics_to_link = [Topic(**new_topic) for new_topic in topics_input if new_topic["id"] < 0]
|
||||||
Topic(**new_topic) for new_topic in topics_input if new_topic["id"] < 0
|
|
||||||
]
|
|
||||||
if new_topics_to_link:
|
if new_topics_to_link:
|
||||||
session.add_all(new_topics_to_link)
|
session.add_all(new_topics_to_link)
|
||||||
session.commit()
|
session.commit()
|
||||||
|
@ -227,9 +192,7 @@ def patch_topics(session, shout, topics_input):
|
||||||
created_unlinked_topic = ShoutTopic(shout=shout.id, topic=new_topic_to_link.id)
|
created_unlinked_topic = ShoutTopic(shout=shout.id, topic=new_topic_to_link.id)
|
||||||
session.add(created_unlinked_topic)
|
session.add(created_unlinked_topic)
|
||||||
|
|
||||||
existing_topics_input = [
|
existing_topics_input = [topic_input for topic_input in topics_input if topic_input.get("id", 0) > 0]
|
||||||
topic_input for topic_input in topics_input if topic_input.get("id", 0) > 0
|
|
||||||
]
|
|
||||||
existing_topic_to_link_ids = [
|
existing_topic_to_link_ids = [
|
||||||
existing_topic_input["id"]
|
existing_topic_input["id"]
|
||||||
for existing_topic_input in existing_topics_input
|
for existing_topic_input in existing_topics_input
|
||||||
|
@ -237,9 +200,7 @@ def patch_topics(session, shout, topics_input):
|
||||||
]
|
]
|
||||||
|
|
||||||
for existing_topic_to_link_id in existing_topic_to_link_ids:
|
for existing_topic_to_link_id in existing_topic_to_link_ids:
|
||||||
created_unlinked_topic = ShoutTopic(
|
created_unlinked_topic = ShoutTopic(shout=shout.id, topic=existing_topic_to_link_id)
|
||||||
shout=shout.id, topic=existing_topic_to_link_id
|
|
||||||
)
|
|
||||||
session.add(created_unlinked_topic)
|
session.add(created_unlinked_topic)
|
||||||
|
|
||||||
topic_to_unlink_ids = [
|
topic_to_unlink_ids = [
|
||||||
|
@ -276,24 +237,15 @@ async def update_shout(_, info, shout_id: int, shout_input=None, publish=False):
|
||||||
if not shout_by_id:
|
if not shout_by_id:
|
||||||
return {"error": "shout not found"}
|
return {"error": "shout not found"}
|
||||||
if slug != shout_by_id.slug:
|
if slug != shout_by_id.slug:
|
||||||
same_slug_shout = (
|
same_slug_shout = session.query(Shout).filter(Shout.slug == slug).first()
|
||||||
session.query(Shout).filter(Shout.slug == slug).first()
|
|
||||||
)
|
|
||||||
c = 1
|
c = 1
|
||||||
while same_slug_shout is not None:
|
while same_slug_shout is not None:
|
||||||
c += 1
|
c += 1
|
||||||
slug = f"{slug}-{c}"
|
slug = f"{slug}-{c}"
|
||||||
same_slug_shout = (
|
same_slug_shout = session.query(Shout).filter(Shout.slug == slug).first()
|
||||||
session.query(Shout).filter(Shout.slug == slug).first()
|
|
||||||
)
|
|
||||||
shout_input["slug"] = slug
|
shout_input["slug"] = slug
|
||||||
|
|
||||||
if (
|
if filter(lambda x: x.id == author_id, [x for x in shout_by_id.authors]) or "editor" in roles:
|
||||||
filter(
|
|
||||||
lambda x: x.id == author_id, [x for x in shout_by_id.authors]
|
|
||||||
)
|
|
||||||
or "editor" in roles
|
|
||||||
):
|
|
||||||
# topics patch
|
# topics patch
|
||||||
topics_input = shout_input.get("topics")
|
topics_input = shout_input.get("topics")
|
||||||
if topics_input:
|
if topics_input:
|
||||||
|
@ -376,17 +328,9 @@ async def delete_shout(_, info, shout_id: int):
|
||||||
|
|
||||||
def handle_proposing(session, r, shout):
|
def handle_proposing(session, r, shout):
|
||||||
if is_positive(r.kind):
|
if is_positive(r.kind):
|
||||||
replied_reaction = (
|
replied_reaction = session.query(Reaction).filter(Reaction.id == r.reply_to, Reaction.shout == r.shout).first()
|
||||||
session.query(Reaction)
|
|
||||||
.filter(Reaction.id == r.reply_to, Reaction.shout == r.shout)
|
|
||||||
.first()
|
|
||||||
)
|
|
||||||
|
|
||||||
if (
|
if replied_reaction and replied_reaction.kind is ReactionKind.PROPOSE.value and replied_reaction.quote:
|
||||||
replied_reaction
|
|
||||||
and replied_reaction.kind is ReactionKind.PROPOSE.value
|
|
||||||
and replied_reaction.quote
|
|
||||||
):
|
|
||||||
# patch all the proposals' quotes
|
# patch all the proposals' quotes
|
||||||
proposals = (
|
proposals = (
|
||||||
session.query(Reaction)
|
session.query(Reaction)
|
||||||
|
@ -403,9 +347,7 @@ def handle_proposing(session, r, shout):
|
||||||
if proposal.quote:
|
if proposal.quote:
|
||||||
proposal_diff = get_diff(shout.body, proposal.quote)
|
proposal_diff = get_diff(shout.body, proposal.quote)
|
||||||
proposal_dict = proposal.dict()
|
proposal_dict = proposal.dict()
|
||||||
proposal_dict["quote"] = apply_diff(
|
proposal_dict["quote"] = apply_diff(replied_reaction.quote, proposal_diff)
|
||||||
replied_reaction.quote, proposal_diff
|
|
||||||
)
|
|
||||||
Reaction.update(proposal, proposal_dict)
|
Reaction.update(proposal, proposal_dict)
|
||||||
session.add(proposal)
|
session.add(proposal)
|
||||||
|
|
||||||
|
|
|
@ -154,9 +154,7 @@ async def get_follows_by_user_id(user_id: str):
|
||||||
follows = {
|
follows = {
|
||||||
"topics": topics,
|
"topics": topics,
|
||||||
"authors": authors,
|
"authors": authors,
|
||||||
"communities": [
|
"communities": [{"id": 1, "name": "Дискурс", "slug": "discours", "pic": ""}],
|
||||||
{"id": 1, "name": "Дискурс", "slug": "discours", "pic": ""}
|
|
||||||
],
|
|
||||||
}
|
}
|
||||||
else:
|
else:
|
||||||
logger.debug(f"getting follows for {user_id} from redis")
|
logger.debug(f"getting follows for {user_id} from redis")
|
||||||
|
@ -212,9 +210,7 @@ def reactions_follow(author_id, shout_id, auto=False):
|
||||||
)
|
)
|
||||||
|
|
||||||
if not following:
|
if not following:
|
||||||
following = ShoutReactionsFollower(
|
following = ShoutReactionsFollower(follower=author_id, shout=shout.id, auto=auto)
|
||||||
follower=author_id, shout=shout.id, auto=auto
|
|
||||||
)
|
|
||||||
session.add(following)
|
session.add(following)
|
||||||
session.commit()
|
session.commit()
|
||||||
return None
|
return None
|
||||||
|
@ -269,9 +265,7 @@ def author_unfollow(follower_id, slug):
|
||||||
flw = (
|
flw = (
|
||||||
session.query(AuthorFollower)
|
session.query(AuthorFollower)
|
||||||
.join(Author, Author.id == AuthorFollower.author)
|
.join(Author, Author.id == AuthorFollower.author)
|
||||||
.filter(
|
.filter(and_(AuthorFollower.follower == follower_id, Author.slug == slug))
|
||||||
and_(AuthorFollower.follower == follower_id, Author.slug == slug)
|
|
||||||
)
|
|
||||||
.first()
|
.first()
|
||||||
)
|
)
|
||||||
if flw:
|
if flw:
|
||||||
|
@ -295,9 +289,7 @@ async def get_topic_followers(_, _info, slug: str) -> List[Author]:
|
||||||
|
|
||||||
|
|
||||||
@query.field("get_shout_followers")
|
@query.field("get_shout_followers")
|
||||||
def get_shout_followers(
|
def get_shout_followers(_, _info, slug: str = "", shout_id: int | None = None) -> List[Author]:
|
||||||
_, _info, slug: str = "", shout_id: int | None = None
|
|
||||||
) -> List[Author]:
|
|
||||||
followers = []
|
followers = []
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
shout = None
|
shout = None
|
||||||
|
|
|
@ -16,9 +16,7 @@ from services.logger import root_logger as logger
|
||||||
from services.schema import mutation, query
|
from services.schema import mutation, query
|
||||||
|
|
||||||
|
|
||||||
def query_notifications(
|
def query_notifications(author_id: int, after: int = 0) -> Tuple[int, int, List[Tuple[Notification, bool]]]:
|
||||||
author_id: int, after: int = 0
|
|
||||||
) -> Tuple[int, int, List[Tuple[Notification, bool]]]:
|
|
||||||
notification_seen_alias = aliased(NotificationSeen)
|
notification_seen_alias = aliased(NotificationSeen)
|
||||||
q = select(Notification, notification_seen_alias.viewer.label("seen")).outerjoin(
|
q = select(Notification, notification_seen_alias.viewer.label("seen")).outerjoin(
|
||||||
NotificationSeen,
|
NotificationSeen,
|
||||||
|
@ -63,9 +61,7 @@ def query_notifications(
|
||||||
return total, unread, notifications
|
return total, unread, notifications
|
||||||
|
|
||||||
|
|
||||||
def group_notification(
|
def group_notification(thread, authors=None, shout=None, reactions=None, entity="follower", action="follow"):
|
||||||
thread, authors=None, shout=None, reactions=None, entity="follower", action="follow"
|
|
||||||
):
|
|
||||||
reactions = reactions or []
|
reactions = reactions or []
|
||||||
authors = authors or []
|
authors = authors or []
|
||||||
return {
|
return {
|
||||||
|
@ -79,9 +75,7 @@ def group_notification(
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def get_notifications_grouped(
|
def get_notifications_grouped(author_id: int, after: int = 0, limit: int = 10, offset: int = 0):
|
||||||
author_id: int, after: int = 0, limit: int = 10, offset: int = 0
|
|
||||||
):
|
|
||||||
"""
|
"""
|
||||||
Retrieves notifications for a given author.
|
Retrieves notifications for a given author.
|
||||||
|
|
||||||
|
@ -147,9 +141,7 @@ def get_notifications_grouped(
|
||||||
author_id = reaction.get("created_by", 0)
|
author_id = reaction.get("created_by", 0)
|
||||||
if shout_id and author_id:
|
if shout_id and author_id:
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
author = (
|
author = session.query(Author).filter(Author.id == author_id).first()
|
||||||
session.query(Author).filter(Author.id == author_id).first()
|
|
||||||
)
|
|
||||||
shout = session.query(Shout).filter(Shout.id == shout_id).first()
|
shout = session.query(Shout).filter(Shout.id == shout_id).first()
|
||||||
if shout and author:
|
if shout and author:
|
||||||
author = author.dict()
|
author = author.dict()
|
||||||
|
@ -162,9 +154,7 @@ def get_notifications_grouped(
|
||||||
if existing_group:
|
if existing_group:
|
||||||
existing_group["seen"] = False
|
existing_group["seen"] = False
|
||||||
existing_group["authors"].append(author_id)
|
existing_group["authors"].append(author_id)
|
||||||
existing_group["reactions"] = (
|
existing_group["reactions"] = existing_group["reactions"] or []
|
||||||
existing_group["reactions"] or []
|
|
||||||
)
|
|
||||||
existing_group["reactions"].append(reaction)
|
existing_group["reactions"].append(reaction)
|
||||||
groups_by_thread[thread_id] = existing_group
|
groups_by_thread[thread_id] = existing_group
|
||||||
else:
|
else:
|
||||||
|
@ -217,9 +207,7 @@ async def load_notifications(_, info, after: int, limit: int = 50, offset=0):
|
||||||
try:
|
try:
|
||||||
if author_id:
|
if author_id:
|
||||||
groups, unread, total = get_notifications_grouped(author_id, after, limit)
|
groups, unread, total = get_notifications_grouped(author_id, after, limit)
|
||||||
notifications = sorted(
|
notifications = sorted(groups.values(), key=lambda group: group.updated_at, reverse=True)
|
||||||
groups.values(), key=lambda group: group.updated_at, reverse=True
|
|
||||||
)
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
error = e
|
error = e
|
||||||
logger.error(e)
|
logger.error(e)
|
||||||
|
@ -257,11 +245,7 @@ async def notifications_seen_after(_, info, after: int):
|
||||||
author_id = info.context.get("author", {}).get("id")
|
author_id = info.context.get("author", {}).get("id")
|
||||||
if author_id:
|
if author_id:
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
nnn = (
|
nnn = session.query(Notification).filter(and_(Notification.created_at > after)).all()
|
||||||
session.query(Notification)
|
|
||||||
.filter(and_(Notification.created_at > after))
|
|
||||||
.all()
|
|
||||||
)
|
|
||||||
for n in nnn:
|
for n in nnn:
|
||||||
try:
|
try:
|
||||||
ns = NotificationSeen(notification=n.id, viewer=author_id)
|
ns = NotificationSeen(notification=n.id, viewer=author_id)
|
||||||
|
|
|
@ -35,9 +35,7 @@ async def rate_author(_, info, rated_slug, value):
|
||||||
return {}
|
return {}
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
rating = AuthorRating(
|
rating = AuthorRating(rater=rater_id, author=rated_author.id, plus=value > 0)
|
||||||
rater=rater_id, author=rated_author.id, plus=value > 0
|
|
||||||
)
|
|
||||||
session.add(rating)
|
session.add(rating)
|
||||||
session.commit()
|
session.commit()
|
||||||
except Exception as err:
|
except Exception as err:
|
||||||
|
@ -105,9 +103,7 @@ def count_author_shouts_rating(session, author_id) -> int:
|
||||||
|
|
||||||
def get_author_rating_old(session, author: Author):
|
def get_author_rating_old(session, author: Author):
|
||||||
likes_count = (
|
likes_count = (
|
||||||
session.query(AuthorRating)
|
session.query(AuthorRating).filter(and_(AuthorRating.author == author.id, AuthorRating.plus.is_(True))).count()
|
||||||
.filter(and_(AuthorRating.author == author.id, AuthorRating.plus.is_(True)))
|
|
||||||
.count()
|
|
||||||
)
|
)
|
||||||
dislikes_count = (
|
dislikes_count = (
|
||||||
session.query(AuthorRating)
|
session.query(AuthorRating)
|
||||||
|
@ -167,9 +163,7 @@ def get_author_rating_comments(session, author: Author) -> int:
|
||||||
and_(
|
and_(
|
||||||
replied_comment.kind == ReactionKind.COMMENT.value,
|
replied_comment.kind == ReactionKind.COMMENT.value,
|
||||||
replied_comment.created_by == author.id,
|
replied_comment.created_by == author.id,
|
||||||
Reaction.kind.in_(
|
Reaction.kind.in_([ReactionKind.LIKE.value, ReactionKind.DISLIKE.value]),
|
||||||
[ReactionKind.LIKE.value, ReactionKind.DISLIKE.value]
|
|
||||||
),
|
|
||||||
Reaction.reply_to == replied_comment.id,
|
Reaction.reply_to == replied_comment.id,
|
||||||
Reaction.deleted_at.is_(None),
|
Reaction.deleted_at.is_(None),
|
||||||
),
|
),
|
||||||
|
@ -184,9 +178,7 @@ def add_author_rating_columns(q, group_list):
|
||||||
|
|
||||||
# old karma
|
# old karma
|
||||||
q = q.outerjoin(AuthorRating, AuthorRating.author == Author.id)
|
q = q.outerjoin(AuthorRating, AuthorRating.author == Author.id)
|
||||||
q = q.add_columns(
|
q = q.add_columns(func.sum(case((AuthorRating.plus == true(), 1), else_=-1)).label("rating"))
|
||||||
func.sum(case((AuthorRating.plus == true(), 1), else_=-1)).label("rating")
|
|
||||||
)
|
|
||||||
|
|
||||||
# by shouts rating
|
# by shouts rating
|
||||||
shout_reaction = aliased(Reaction)
|
shout_reaction = aliased(Reaction)
|
||||||
|
@ -243,9 +235,7 @@ def add_author_rating_columns(q, group_list):
|
||||||
and_(
|
and_(
|
||||||
replied_comment.kind == ReactionKind.COMMENT.value,
|
replied_comment.kind == ReactionKind.COMMENT.value,
|
||||||
replied_comment.created_by == Author.id,
|
replied_comment.created_by == Author.id,
|
||||||
reaction_2.kind.in_(
|
reaction_2.kind.in_([ReactionKind.LIKE.value, ReactionKind.DISLIKE.value]),
|
||||||
[ReactionKind.LIKE.value, ReactionKind.DISLIKE.value]
|
|
||||||
),
|
|
||||||
reaction_2.reply_to == replied_comment.id,
|
reaction_2.reply_to == replied_comment.id,
|
||||||
reaction_2.deleted_at.is_(None),
|
reaction_2.deleted_at.is_(None),
|
||||||
),
|
),
|
||||||
|
|
|
@ -23,15 +23,9 @@ from services.viewed import ViewedStorage
|
||||||
def add_reaction_stat_columns(q, aliased_reaction):
|
def add_reaction_stat_columns(q, aliased_reaction):
|
||||||
q = q.outerjoin(aliased_reaction).add_columns(
|
q = q.outerjoin(aliased_reaction).add_columns(
|
||||||
func.sum(aliased_reaction.id).label("reacted_stat"),
|
func.sum(aliased_reaction.id).label("reacted_stat"),
|
||||||
func.sum(
|
func.sum(case((aliased_reaction.kind == str(ReactionKind.COMMENT.value), 1), else_=0)).label("comments_stat"),
|
||||||
case((aliased_reaction.kind == str(ReactionKind.COMMENT.value), 1), else_=0)
|
func.sum(case((aliased_reaction.kind == str(ReactionKind.LIKE.value), 1), else_=0)).label("likes_stat"),
|
||||||
).label("comments_stat"),
|
func.sum(case((aliased_reaction.kind == str(ReactionKind.DISLIKE.value), 1), else_=0)).label("dislikes_stat"),
|
||||||
func.sum(
|
|
||||||
case((aliased_reaction.kind == str(ReactionKind.LIKE.value), 1), else_=0)
|
|
||||||
).label("likes_stat"),
|
|
||||||
func.sum(
|
|
||||||
case((aliased_reaction.kind == str(ReactionKind.DISLIKE.value), 1), else_=0)
|
|
||||||
).label("dislikes_stat"),
|
|
||||||
func.max(
|
func.max(
|
||||||
case(
|
case(
|
||||||
(aliased_reaction.kind != str(ReactionKind.COMMENT.value), None),
|
(aliased_reaction.kind != str(ReactionKind.COMMENT.value), None),
|
||||||
|
@ -60,9 +54,7 @@ def check_to_feature(session, approver_id, reaction):
|
||||||
if is_featured_author(session, approver_id):
|
if is_featured_author(session, approver_id):
|
||||||
approvers = [approver_id]
|
approvers = [approver_id]
|
||||||
# now count how many approvers are voted already
|
# now count how many approvers are voted already
|
||||||
reacted_readers = (
|
reacted_readers = session.query(Reaction).where(Reaction.shout == reaction.shout).all()
|
||||||
session.query(Reaction).where(Reaction.shout == reaction.shout).all()
|
|
||||||
)
|
|
||||||
for reacted_reader in reacted_readers:
|
for reacted_reader in reacted_readers:
|
||||||
if is_featured_author(session, reacted_reader.id):
|
if is_featured_author(session, reacted_reader.id):
|
||||||
approvers.append(reacted_reader.id)
|
approvers.append(reacted_reader.id)
|
||||||
|
@ -87,9 +79,7 @@ def check_to_unfeature(session, rejecter_id, reaction):
|
||||||
)
|
)
|
||||||
rejects = 0
|
rejects = 0
|
||||||
for r in reactions:
|
for r in reactions:
|
||||||
approver = (
|
approver = session.query(Author).filter(Author.id == r.created_by).first()
|
||||||
session.query(Author).filter(Author.id == r.created_by).first()
|
|
||||||
)
|
|
||||||
if is_featured_author(session, approver):
|
if is_featured_author(session, approver):
|
||||||
if is_negative(r.kind):
|
if is_negative(r.kind):
|
||||||
rejects += 1
|
rejects += 1
|
||||||
|
@ -127,11 +117,7 @@ async def _create_reaction(session, shout, author_id: int, reaction):
|
||||||
await update_author_stat(author_id)
|
await update_author_stat(author_id)
|
||||||
|
|
||||||
# collaborative editing
|
# collaborative editing
|
||||||
if (
|
if rdict.get("reply_to") and r.kind in PROPOSAL_REACTIONS and author_id in shout.authors:
|
||||||
rdict.get("reply_to")
|
|
||||||
and r.kind in PROPOSAL_REACTIONS
|
|
||||||
and author_id in shout.authors
|
|
||||||
):
|
|
||||||
handle_proposing(session, r, shout)
|
handle_proposing(session, r, shout)
|
||||||
|
|
||||||
# рейтинг и саморегуляция
|
# рейтинг и саморегуляция
|
||||||
|
@ -165,9 +151,7 @@ async def _create_reaction(session, shout, author_id: int, reaction):
|
||||||
|
|
||||||
def prepare_new_rating(reaction: dict, shout_id: int, session, author_id: int):
|
def prepare_new_rating(reaction: dict, shout_id: int, session, author_id: int):
|
||||||
kind = reaction.get("kind")
|
kind = reaction.get("kind")
|
||||||
opposite_kind = (
|
opposite_kind = ReactionKind.DISLIKE.value if is_positive(kind) else ReactionKind.LIKE.value
|
||||||
ReactionKind.DISLIKE.value if is_positive(kind) else ReactionKind.LIKE.value
|
|
||||||
)
|
|
||||||
|
|
||||||
q = select(Reaction).filter(
|
q = select(Reaction).filter(
|
||||||
and_(
|
and_(
|
||||||
|
@ -226,9 +210,7 @@ async def create_reaction(_, info, reaction):
|
||||||
return {"error": "cannot create reaction without a kind"}
|
return {"error": "cannot create reaction without a kind"}
|
||||||
|
|
||||||
if kind in RATING_REACTIONS:
|
if kind in RATING_REACTIONS:
|
||||||
error_result = prepare_new_rating(
|
error_result = prepare_new_rating(reaction, shout_id, session, author_id)
|
||||||
reaction, shout_id, session, author_id
|
|
||||||
)
|
|
||||||
if error_result:
|
if error_result:
|
||||||
return error_result
|
return error_result
|
||||||
|
|
||||||
|
@ -275,9 +257,7 @@ async def update_reaction(_, info, reaction):
|
||||||
if not r:
|
if not r:
|
||||||
return {"error": "invalid reaction id"}
|
return {"error": "invalid reaction id"}
|
||||||
|
|
||||||
author = (
|
author = session.query(Author).filter(Author.user == user_id).first()
|
||||||
session.query(Author).filter(Author.user == user_id).first()
|
|
||||||
)
|
|
||||||
if author:
|
if author:
|
||||||
if r.created_by != author.id and "editor" not in roles:
|
if r.created_by != author.id and "editor" not in roles:
|
||||||
return {"error": "access denied"}
|
return {"error": "access denied"}
|
||||||
|
@ -460,9 +440,7 @@ async def reacted_shouts_updates(follower_id: int, limit=50, offset=0) -> List[S
|
||||||
select(Shout)
|
select(Shout)
|
||||||
.outerjoin(
|
.outerjoin(
|
||||||
Reaction,
|
Reaction,
|
||||||
and_(
|
and_(Reaction.shout == Shout.id, Reaction.created_by == follower_id),
|
||||||
Reaction.shout == Shout.id, Reaction.created_by == follower_id
|
|
||||||
),
|
|
||||||
)
|
)
|
||||||
.outerjoin(Author, Shout.authors.any(id=follower_id))
|
.outerjoin(Author, Shout.authors.any(id=follower_id))
|
||||||
.options(joinedload(Shout.reactions), joinedload(Shout.authors))
|
.options(joinedload(Shout.reactions), joinedload(Shout.authors))
|
||||||
|
@ -481,12 +459,7 @@ async def reacted_shouts_updates(follower_id: int, limit=50, offset=0) -> List[S
|
||||||
q2 = add_reaction_stat_columns(q2, aliased(Reaction))
|
q2 = add_reaction_stat_columns(q2, aliased(Reaction))
|
||||||
|
|
||||||
# Sort shouts by the `last_comment` field
|
# Sort shouts by the `last_comment` field
|
||||||
combined_query = (
|
combined_query = union(q1, q2).order_by(desc(text("last_comment_stat"))).limit(limit).offset(offset)
|
||||||
union(q1, q2)
|
|
||||||
.order_by(desc(text("last_comment_stat")))
|
|
||||||
.limit(limit)
|
|
||||||
.offset(offset)
|
|
||||||
)
|
|
||||||
|
|
||||||
results = session.execute(combined_query).scalars()
|
results = session.execute(combined_query).scalars()
|
||||||
for [
|
for [
|
||||||
|
|
|
@ -28,21 +28,14 @@ def filter_my(info, session, q):
|
||||||
user_id = info.context.get("user_id")
|
user_id = info.context.get("user_id")
|
||||||
reader_id = info.context.get("author", {}).get("id")
|
reader_id = info.context.get("author", {}).get("id")
|
||||||
if user_id and reader_id:
|
if user_id and reader_id:
|
||||||
reader_followed_authors = select(AuthorFollower.author).where(
|
reader_followed_authors = select(AuthorFollower.author).where(AuthorFollower.follower == reader_id)
|
||||||
AuthorFollower.follower == reader_id
|
reader_followed_topics = select(TopicFollower.topic).where(TopicFollower.follower == reader_id)
|
||||||
)
|
|
||||||
reader_followed_topics = select(TopicFollower.topic).where(
|
|
||||||
TopicFollower.follower == reader_id
|
|
||||||
)
|
|
||||||
|
|
||||||
subquery = (
|
subquery = (
|
||||||
select(Shout.id)
|
select(Shout.id)
|
||||||
.where(Shout.id == ShoutAuthor.shout)
|
.where(Shout.id == ShoutAuthor.shout)
|
||||||
.where(Shout.id == ShoutTopic.shout)
|
.where(Shout.id == ShoutTopic.shout)
|
||||||
.where(
|
.where((ShoutAuthor.author.in_(reader_followed_authors)) | (ShoutTopic.topic.in_(reader_followed_topics)))
|
||||||
(ShoutAuthor.author.in_(reader_followed_authors))
|
|
||||||
| (ShoutTopic.topic.in_(reader_followed_topics))
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
q = q.filter(Shout.id.in_(subquery))
|
q = q.filter(Shout.id.in_(subquery))
|
||||||
return q, reader_id
|
return q, reader_id
|
||||||
|
@ -188,9 +181,7 @@ async def load_shouts_by(_, _info, options):
|
||||||
order_str = options.get("order_by")
|
order_str = options.get("order_by")
|
||||||
if order_str in ["likes", "followers", "comments", "last_comment"]:
|
if order_str in ["likes", "followers", "comments", "last_comment"]:
|
||||||
q = q.order_by(desc(text(f"{order_str}_stat")))
|
q = q.order_by(desc(text(f"{order_str}_stat")))
|
||||||
query_order_by = (
|
query_order_by = desc(order_by) if options.get("order_by_desc", True) else asc(order_by)
|
||||||
desc(order_by) if options.get("order_by_desc", True) else asc(order_by)
|
|
||||||
)
|
|
||||||
q = q.order_by(nulls_last(query_order_by))
|
q = q.order_by(nulls_last(query_order_by))
|
||||||
|
|
||||||
# limit offset
|
# limit offset
|
||||||
|
@ -257,20 +248,13 @@ async def load_shouts_feed(_, info, options):
|
||||||
Shout.featured_at if filters.get("featured") else Shout.published_at,
|
Shout.featured_at if filters.get("featured") else Shout.published_at,
|
||||||
)
|
)
|
||||||
|
|
||||||
query_order_by = (
|
query_order_by = desc(order_by) if options.get("order_by_desc", True) else asc(order_by)
|
||||||
desc(order_by) if options.get("order_by_desc", True) else asc(order_by)
|
|
||||||
)
|
|
||||||
|
|
||||||
# pagination
|
# pagination
|
||||||
offset = options.get("offset", 0)
|
offset = options.get("offset", 0)
|
||||||
limit = options.get("limit", 10)
|
limit = options.get("limit", 10)
|
||||||
|
|
||||||
q = (
|
q = q.group_by(Shout.id).order_by(nulls_last(query_order_by)).limit(limit).offset(offset)
|
||||||
q.group_by(Shout.id)
|
|
||||||
.order_by(nulls_last(query_order_by))
|
|
||||||
.limit(limit)
|
|
||||||
.offset(offset)
|
|
||||||
)
|
|
||||||
|
|
||||||
logger.debug(q.compile(compile_kwargs={"literal_binds": True}))
|
logger.debug(q.compile(compile_kwargs={"literal_binds": True}))
|
||||||
|
|
||||||
|
@ -328,9 +312,7 @@ async def load_shouts_unrated(_, info, limit: int = 50, offset: int = 0):
|
||||||
and_(
|
and_(
|
||||||
Reaction.shout == Shout.id,
|
Reaction.shout == Shout.id,
|
||||||
Reaction.reply_to.is_(None),
|
Reaction.reply_to.is_(None),
|
||||||
Reaction.kind.in_(
|
Reaction.kind.in_([ReactionKind.LIKE.value, ReactionKind.DISLIKE.value]),
|
||||||
[ReactionKind.LIKE.value, ReactionKind.DISLIKE.value]
|
|
||||||
),
|
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
.outerjoin(Author, Author.user == bindparam("user_id"))
|
.outerjoin(Author, Author.user == bindparam("user_id"))
|
||||||
|
@ -403,9 +385,7 @@ async def load_shouts_random_top(_, _info, options):
|
||||||
aliased_reaction = aliased(Reaction)
|
aliased_reaction = aliased(Reaction)
|
||||||
|
|
||||||
subquery = (
|
subquery = (
|
||||||
select(Shout.id)
|
select(Shout.id).outerjoin(aliased_reaction).where(and_(Shout.deleted_at.is_(None), Shout.layout.is_not(None)))
|
||||||
.outerjoin(aliased_reaction)
|
|
||||||
.where(and_(Shout.deleted_at.is_(None), Shout.layout.is_not(None)))
|
|
||||||
)
|
)
|
||||||
|
|
||||||
subquery = apply_filters(subquery, options.get("filters", {}))
|
subquery = apply_filters(subquery, options.get("filters", {}))
|
||||||
|
@ -428,11 +408,7 @@ async def load_shouts_random_top(_, _info, options):
|
||||||
if random_limit:
|
if random_limit:
|
||||||
subquery = subquery.limit(random_limit)
|
subquery = subquery.limit(random_limit)
|
||||||
|
|
||||||
q = (
|
q = select(Shout).options(joinedload(Shout.authors), joinedload(Shout.topics)).where(Shout.id.in_(subquery))
|
||||||
select(Shout)
|
|
||||||
.options(joinedload(Shout.authors), joinedload(Shout.topics))
|
|
||||||
.where(Shout.id.in_(subquery))
|
|
||||||
)
|
|
||||||
|
|
||||||
q = add_reaction_stat_columns(q, aliased_reaction)
|
q = add_reaction_stat_columns(q, aliased_reaction)
|
||||||
|
|
||||||
|
|
|
@ -12,9 +12,7 @@ from services.logger import root_logger as logger
|
||||||
|
|
||||||
def add_topic_stat_columns(q):
|
def add_topic_stat_columns(q):
|
||||||
aliased_shout = aliased(ShoutTopic)
|
aliased_shout = aliased(ShoutTopic)
|
||||||
q = q.outerjoin(aliased_shout).add_columns(
|
q = q.outerjoin(aliased_shout).add_columns(func.count(distinct(aliased_shout.shout)).label("shouts_stat"))
|
||||||
func.count(distinct(aliased_shout.shout)).label("shouts_stat")
|
|
||||||
)
|
|
||||||
aliased_follower = aliased(TopicFollower)
|
aliased_follower = aliased(TopicFollower)
|
||||||
q = q.outerjoin(aliased_follower, aliased_follower.topic == Topic.id).add_columns(
|
q = q.outerjoin(aliased_follower, aliased_follower.topic == Topic.id).add_columns(
|
||||||
func.count(distinct(aliased_follower.follower)).label("followers_stat")
|
func.count(distinct(aliased_follower.follower)).label("followers_stat")
|
||||||
|
@ -27,9 +25,7 @@ def add_topic_stat_columns(q):
|
||||||
|
|
||||||
def add_author_stat_columns(q):
|
def add_author_stat_columns(q):
|
||||||
aliased_shout = aliased(ShoutAuthor)
|
aliased_shout = aliased(ShoutAuthor)
|
||||||
q = q.outerjoin(aliased_shout).add_columns(
|
q = q.outerjoin(aliased_shout).add_columns(func.count(distinct(aliased_shout.shout)).label("shouts_stat"))
|
||||||
func.count(distinct(aliased_shout.shout)).label("shouts_stat")
|
|
||||||
)
|
|
||||||
aliased_follower = aliased(AuthorFollower)
|
aliased_follower = aliased(AuthorFollower)
|
||||||
q = q.outerjoin(aliased_follower, aliased_follower.author == Author.id).add_columns(
|
q = q.outerjoin(aliased_follower, aliased_follower.author == Author.id).add_columns(
|
||||||
func.count(distinct(aliased_follower.follower)).label("followers_stat")
|
func.count(distinct(aliased_follower.follower)).label("followers_stat")
|
||||||
|
@ -79,9 +75,7 @@ def get_topic_authors_stat(topic_id: int):
|
||||||
|
|
||||||
def get_topic_followers_stat(topic_id: int):
|
def get_topic_followers_stat(topic_id: int):
|
||||||
aliased_followers = aliased(TopicFollower)
|
aliased_followers = aliased(TopicFollower)
|
||||||
q = select(func.count(distinct(aliased_followers.follower))).filter(
|
q = select(func.count(distinct(aliased_followers.follower))).filter(aliased_followers.topic == topic_id)
|
||||||
aliased_followers.topic == topic_id
|
|
||||||
)
|
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
result = session.execute(q).first()
|
result = session.execute(q).first()
|
||||||
return result[0] if result else 0
|
return result[0] if result else 0
|
||||||
|
@ -106,9 +100,7 @@ def get_topic_comments_stat(topic_id: int):
|
||||||
.group_by(Shout.id)
|
.group_by(Shout.id)
|
||||||
.subquery()
|
.subquery()
|
||||||
)
|
)
|
||||||
q = select(func.coalesce(func.sum(sub_comments.c.comments_count), 0)).filter(
|
q = select(func.coalesce(func.sum(sub_comments.c.comments_count), 0)).filter(ShoutTopic.topic == topic_id)
|
||||||
ShoutTopic.topic == topic_id
|
|
||||||
)
|
|
||||||
q = q.outerjoin(sub_comments, ShoutTopic.shout == sub_comments.c.shout_id)
|
q = q.outerjoin(sub_comments, ShoutTopic.shout == sub_comments.c.shout_id)
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
result = session.execute(q).first()
|
result = session.execute(q).first()
|
||||||
|
@ -152,9 +144,7 @@ def get_author_authors_stat(author_id: int):
|
||||||
|
|
||||||
def get_author_followers_stat(author_id: int):
|
def get_author_followers_stat(author_id: int):
|
||||||
aliased_followers = aliased(AuthorFollower)
|
aliased_followers = aliased(AuthorFollower)
|
||||||
q = select(func.count(distinct(aliased_followers.follower))).filter(
|
q = select(func.count(distinct(aliased_followers.follower))).filter(aliased_followers.author == author_id)
|
||||||
aliased_followers.author == author_id
|
|
||||||
)
|
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
result = session.execute(q).first()
|
result = session.execute(q).first()
|
||||||
return result[0] if result else 0
|
return result[0] if result else 0
|
||||||
|
@ -162,9 +152,7 @@ def get_author_followers_stat(author_id: int):
|
||||||
|
|
||||||
def get_author_comments_stat(author_id: int):
|
def get_author_comments_stat(author_id: int):
|
||||||
sub_comments = (
|
sub_comments = (
|
||||||
select(
|
select(Author.id, func.coalesce(func.count(Reaction.id)).label("comments_count"))
|
||||||
Author.id, func.coalesce(func.count(Reaction.id)).label("comments_count")
|
|
||||||
)
|
|
||||||
.select_from(Author) # явно указываем левый элемент join'а
|
.select_from(Author) # явно указываем левый элемент join'а
|
||||||
.outerjoin(
|
.outerjoin(
|
||||||
Reaction,
|
Reaction,
|
||||||
|
@ -219,9 +207,7 @@ def get_with_stat(q):
|
||||||
def author_follows_authors(author_id: int):
|
def author_follows_authors(author_id: int):
|
||||||
af = aliased(AuthorFollower, name="af")
|
af = aliased(AuthorFollower, name="af")
|
||||||
author_follows_authors_query = (
|
author_follows_authors_query = (
|
||||||
select(Author)
|
select(Author).select_from(join(Author, af, Author.id == af.author)).where(af.follower == author_id)
|
||||||
.select_from(join(Author, af, Author.id == af.author))
|
|
||||||
.where(af.follower == author_id)
|
|
||||||
)
|
)
|
||||||
return get_with_stat(author_follows_authors_query)
|
return get_with_stat(author_follows_authors_query)
|
||||||
|
|
||||||
|
|
|
@ -38,17 +38,11 @@ def get_topics_by_community(_, _info, community_id: int):
|
||||||
async def get_topics_by_author(_, _info, author_id=0, slug="", user=""):
|
async def get_topics_by_author(_, _info, author_id=0, slug="", user=""):
|
||||||
topics_by_author_query = select(Topic)
|
topics_by_author_query = select(Topic)
|
||||||
if author_id:
|
if author_id:
|
||||||
topics_by_author_query = topics_by_author_query.join(Author).where(
|
topics_by_author_query = topics_by_author_query.join(Author).where(Author.id == author_id)
|
||||||
Author.id == author_id
|
|
||||||
)
|
|
||||||
elif slug:
|
elif slug:
|
||||||
topics_by_author_query = topics_by_author_query.join(Author).where(
|
topics_by_author_query = topics_by_author_query.join(Author).where(Author.slug == slug)
|
||||||
Author.slug == slug
|
|
||||||
)
|
|
||||||
elif user:
|
elif user:
|
||||||
topics_by_author_query = topics_by_author_query.join(Author).where(
|
topics_by_author_query = topics_by_author_query.join(Author).where(Author.user == user)
|
||||||
Author.user == user
|
|
||||||
)
|
|
||||||
|
|
||||||
return get_with_stat(topics_by_author_query)
|
return get_with_stat(topics_by_author_query)
|
||||||
|
|
||||||
|
|
|
@ -26,9 +26,7 @@ async def cache_author(author: dict):
|
||||||
if isinstance(followers, list):
|
if isinstance(followers, list):
|
||||||
for follower in followers:
|
for follower in followers:
|
||||||
follower_follows_authors = []
|
follower_follows_authors = []
|
||||||
follower_follows_authors_str = await redis.execute(
|
follower_follows_authors_str = await redis.execute("GET", f"author:{author_id}:follows-authors")
|
||||||
"GET", f"author:{author_id}:follows-authors"
|
|
||||||
)
|
|
||||||
if isinstance(follower_follows_authors_str, str):
|
if isinstance(follower_follows_authors_str, str):
|
||||||
follower_follows_authors = json.loads(follower_follows_authors_str)
|
follower_follows_authors = json.loads(follower_follows_authors_str)
|
||||||
c = 0
|
c = 0
|
||||||
|
@ -49,9 +47,7 @@ async def cache_author(author: dict):
|
||||||
if isinstance(follows_authors, list):
|
if isinstance(follows_authors, list):
|
||||||
for followed_author in follows_authors:
|
for followed_author in follows_authors:
|
||||||
followed_author_followers = []
|
followed_author_followers = []
|
||||||
followed_author_followers_str = await redis.execute(
|
followed_author_followers_str = await redis.execute("GET", f"author:{author_id}:followers")
|
||||||
"GET", f"author:{author_id}:followers"
|
|
||||||
)
|
|
||||||
if isinstance(followed_author_followers_str, str):
|
if isinstance(followed_author_followers_str, str):
|
||||||
followed_author_followers = json.loads(followed_author_followers_str)
|
followed_author_followers = json.loads(followed_author_followers_str)
|
||||||
c = 0
|
c = 0
|
||||||
|
@ -139,18 +135,11 @@ async def cache_follow_author_change(follower: dict, author: dict, is_insert=Tru
|
||||||
|
|
||||||
async def cache_topic(topic_dict: dict):
|
async def cache_topic(topic_dict: dict):
|
||||||
# update stat all field for followers' caches in <topics> list
|
# update stat all field for followers' caches in <topics> list
|
||||||
followers = (
|
followers = local_session().query(TopicFollower).filter(TopicFollower.topic == topic_dict.get("id")).all()
|
||||||
local_session()
|
|
||||||
.query(TopicFollower)
|
|
||||||
.filter(TopicFollower.topic == topic_dict.get("id"))
|
|
||||||
.all()
|
|
||||||
)
|
|
||||||
for tf in followers:
|
for tf in followers:
|
||||||
follower_id = tf.follower
|
follower_id = tf.follower
|
||||||
follower_follows_topics = []
|
follower_follows_topics = []
|
||||||
follower_follows_topics_str = await redis.execute(
|
follower_follows_topics_str = await redis.execute("GET", f"author:{follower_id}:follows-topics")
|
||||||
"GET", f"author:{follower_id}:follows-topics"
|
|
||||||
)
|
|
||||||
if isinstance(follower_follows_topics_str, str):
|
if isinstance(follower_follows_topics_str, str):
|
||||||
follower_follows_topics = json.loads(follower_follows_topics_str)
|
follower_follows_topics = json.loads(follower_follows_topics_str)
|
||||||
c = 0
|
c = 0
|
||||||
|
|
|
@ -45,9 +45,7 @@ class Base(declarative_base()):
|
||||||
REGISTRY[cls.__name__] = cls
|
REGISTRY[cls.__name__] = cls
|
||||||
|
|
||||||
def dict(self) -> Dict[str, Any]:
|
def dict(self) -> Dict[str, Any]:
|
||||||
column_names = filter(
|
column_names = filter(lambda x: x not in FILTERED_FIELDS, self.__table__.columns.keys())
|
||||||
lambda x: x not in FILTERED_FIELDS, self.__table__.columns.keys()
|
|
||||||
)
|
|
||||||
try:
|
try:
|
||||||
data = {}
|
data = {}
|
||||||
for c in column_names:
|
for c in column_names:
|
||||||
|
@ -76,9 +74,7 @@ Base.metadata.create_all(bind=engine)
|
||||||
|
|
||||||
|
|
||||||
# Функция для вывода полного трейсбека при предупреждениях
|
# Функция для вывода полного трейсбека при предупреждениях
|
||||||
def warning_with_traceback(
|
def warning_with_traceback(message: Warning | str, category, filename: str, lineno: int, file=None, line=None):
|
||||||
message: Warning | str, category, filename: str, lineno: int, file=None, line=None
|
|
||||||
):
|
|
||||||
tb = traceback.format_stack()
|
tb = traceback.format_stack()
|
||||||
tb_str = "".join(tb)
|
tb_str = "".join(tb)
|
||||||
return f"{message} ({filename}, {lineno}): {category.__name__}\n{tb_str}"
|
return f"{message} ({filename}, {lineno}): {category.__name__}\n{tb_str}"
|
||||||
|
|
|
@ -127,9 +127,7 @@ class SearchService:
|
||||||
logger.debug(f"Проверяем индекс {self.index_name}...")
|
logger.debug(f"Проверяем индекс {self.index_name}...")
|
||||||
if not self.client.indices.exists(index=self.index_name):
|
if not self.client.indices.exists(index=self.index_name):
|
||||||
self.create_index()
|
self.create_index()
|
||||||
self.client.indices.put_mapping(
|
self.client.indices.put_mapping(index=self.index_name, body=expected_mapping)
|
||||||
index=self.index_name, body=expected_mapping
|
|
||||||
)
|
|
||||||
else:
|
else:
|
||||||
logger.info(f"Найден существующий индекс {self.index_name}")
|
logger.info(f"Найден существующий индекс {self.index_name}")
|
||||||
# Проверка и обновление структуры индекса, если необходимо
|
# Проверка и обновление структуры индекса, если необходимо
|
||||||
|
@ -138,17 +136,9 @@ class SearchService:
|
||||||
result = json.loads(result)
|
result = json.loads(result)
|
||||||
if isinstance(result, dict):
|
if isinstance(result, dict):
|
||||||
mapping = result.get(self.index_name, {}).get("mappings")
|
mapping = result.get(self.index_name, {}).get("mappings")
|
||||||
logger.debug(
|
logger.debug(f"Найдена структура индексации: {mapping['properties'].keys()}")
|
||||||
f"Найдена структура индексации: {mapping['properties'].keys()}"
|
if mapping and mapping["properties"].keys() != expected_mapping["properties"].keys():
|
||||||
)
|
logger.debug(f"Ожидаемая структура индексации: {expected_mapping}")
|
||||||
if (
|
|
||||||
mapping
|
|
||||||
and mapping["properties"].keys()
|
|
||||||
!= expected_mapping["properties"].keys()
|
|
||||||
):
|
|
||||||
logger.debug(
|
|
||||||
f"Ожидаемая структура индексации: {expected_mapping}"
|
|
||||||
)
|
|
||||||
logger.warn("[!!!] Требуется переиндексация всех данных")
|
logger.warn("[!!!] Требуется переиндексация всех данных")
|
||||||
self.delete_index()
|
self.delete_index()
|
||||||
self.client = None
|
self.client = None
|
||||||
|
@ -177,9 +167,7 @@ class SearchService:
|
||||||
logger.debug(f"Ищем: {text}")
|
logger.debug(f"Ищем: {text}")
|
||||||
search_body = {"query": {"match": {"_all": text}}}
|
search_body = {"query": {"match": {"_all": text}}}
|
||||||
if self.client:
|
if self.client:
|
||||||
search_response = self.client.search(
|
search_response = self.client.search(index=self.index_name, body=search_body, size=limit, from_=offset)
|
||||||
index=self.index_name, body=search_body, size=limit, from_=offset
|
|
||||||
)
|
|
||||||
hits = search_response["hits"]["hits"]
|
hits = search_response["hits"]["hits"]
|
||||||
|
|
||||||
results = [{**hit["_source"], "score": hit["_score"]} for hit in hits]
|
results = [{**hit["_source"], "score": hit["_score"]} for hit in hits]
|
||||||
|
|
|
@ -20,9 +20,7 @@ DEFAULT_FOLLOWS = {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
async def handle_author_follower_change(
|
async def handle_author_follower_change(author_id: int, follower_id: int, is_insert: bool):
|
||||||
author_id: int, follower_id: int, is_insert: bool
|
|
||||||
):
|
|
||||||
logger.info(author_id)
|
logger.info(author_id)
|
||||||
author_query = select(Author).select_from(Author).filter(Author.id == author_id)
|
author_query = select(Author).select_from(Author).filter(Author.id == author_id)
|
||||||
[author] = get_with_stat(author_query)
|
[author] = get_with_stat(author_query)
|
||||||
|
@ -30,17 +28,13 @@ async def handle_author_follower_change(
|
||||||
[follower] = get_with_stat(follower_query)
|
[follower] = get_with_stat(follower_query)
|
||||||
if follower and author:
|
if follower and author:
|
||||||
await cache_author(author.dict())
|
await cache_author(author.dict())
|
||||||
await cache_follows(
|
await cache_follows(follower.dict(), "author", author.dict(), is_insert) # cache_author(follower_dict) inside
|
||||||
follower.dict(), "author", author.dict(), is_insert
|
|
||||||
) # cache_author(follower_dict) inside
|
|
||||||
await cache_follow_author_change(
|
await cache_follow_author_change(
|
||||||
follower.dict(), author.dict(), is_insert
|
follower.dict(), author.dict(), is_insert
|
||||||
) # cache_author(follower_dict) inside
|
) # cache_author(follower_dict) inside
|
||||||
|
|
||||||
|
|
||||||
async def handle_topic_follower_change(
|
async def handle_topic_follower_change(topic_id: int, follower_id: int, is_insert: bool):
|
||||||
topic_id: int, follower_id: int, is_insert: bool
|
|
||||||
):
|
|
||||||
logger.info(topic_id)
|
logger.info(topic_id)
|
||||||
topic_query = select(Topic).filter(Topic.id == topic_id)
|
topic_query = select(Topic).filter(Topic.id == topic_id)
|
||||||
[topic] = get_with_stat(topic_query)
|
[topic] = get_with_stat(topic_query)
|
||||||
|
@ -48,9 +42,7 @@ async def handle_topic_follower_change(
|
||||||
[follower] = get_with_stat(follower_query)
|
[follower] = get_with_stat(follower_query)
|
||||||
if follower and topic:
|
if follower and topic:
|
||||||
await cache_author(follower.dict())
|
await cache_author(follower.dict())
|
||||||
await redis.execute(
|
await redis.execute("SET", f"topic:{topic.id}", json.dumps(topic.dict(), cls=CustomJSONEncoder))
|
||||||
"SET", f"topic:{topic.id}", json.dumps(topic.dict(), cls=CustomJSONEncoder)
|
|
||||||
)
|
|
||||||
await cache_follows(follower.dict(), "topic", topic.dict(), is_insert)
|
await cache_follows(follower.dict(), "topic", topic.dict(), is_insert)
|
||||||
|
|
||||||
|
|
||||||
|
@ -84,9 +76,7 @@ def after_reaction_update(mapper, connection, reaction: Reaction):
|
||||||
|
|
||||||
# reaction repliers
|
# reaction repliers
|
||||||
replied_author_subquery = (
|
replied_author_subquery = (
|
||||||
select(Author)
|
select(Author).join(Reaction, Author.id == Reaction.created_by).where(Reaction.id == reaction.reply_to)
|
||||||
.join(Reaction, Author.id == Reaction.created_by)
|
|
||||||
.where(Reaction.id == reaction.reply_to)
|
|
||||||
)
|
)
|
||||||
authors_with_stat = get_with_stat(replied_author_subquery)
|
authors_with_stat = get_with_stat(replied_author_subquery)
|
||||||
for author_with_stat in authors_with_stat:
|
for author_with_stat in authors_with_stat:
|
||||||
|
|
|
@ -60,25 +60,19 @@ class ViewedStorage:
|
||||||
try:
|
try:
|
||||||
if os.path.exists(VIEWS_FILEPATH):
|
if os.path.exists(VIEWS_FILEPATH):
|
||||||
start_date_int = os.path.getmtime(VIEWS_FILEPATH)
|
start_date_int = os.path.getmtime(VIEWS_FILEPATH)
|
||||||
start_date_str = datetime.fromtimestamp(start_date_int).strftime(
|
start_date_str = datetime.fromtimestamp(start_date_int).strftime("%Y-%m-%d")
|
||||||
"%Y-%m-%d"
|
|
||||||
)
|
|
||||||
self.start_date = start_date_str
|
self.start_date = start_date_str
|
||||||
now_date = datetime.now().strftime("%Y-%m-%d")
|
now_date = datetime.now().strftime("%Y-%m-%d")
|
||||||
|
|
||||||
if now_date == self.start_date:
|
if now_date == self.start_date:
|
||||||
logger.info(" * Данные актуализованы!")
|
logger.info(" * Данные актуализованы!")
|
||||||
else:
|
else:
|
||||||
logger.warn(
|
logger.warn(f" * Файл просмотров {VIEWS_FILEPATH} устарел: {self.start_date}")
|
||||||
f" * Файл просмотров {VIEWS_FILEPATH} устарел: {self.start_date}"
|
|
||||||
)
|
|
||||||
|
|
||||||
with open(VIEWS_FILEPATH, "r") as file:
|
with open(VIEWS_FILEPATH, "r") as file:
|
||||||
precounted_views = json.load(file)
|
precounted_views = json.load(file)
|
||||||
self.views_by_shout.update(precounted_views)
|
self.views_by_shout.update(precounted_views)
|
||||||
logger.info(
|
logger.info(f" * {len(precounted_views)} публикаций с просмотрами успешно загружены.")
|
||||||
f" * {len(precounted_views)} публикаций с просмотрами успешно загружены."
|
|
||||||
)
|
|
||||||
else:
|
else:
|
||||||
logger.info(" * Файл просмотров не найден.")
|
logger.info(" * Файл просмотров не найден.")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
@ -99,9 +93,7 @@ class ViewedStorage:
|
||||||
property=f"properties/{GOOGLE_PROPERTY_ID}",
|
property=f"properties/{GOOGLE_PROPERTY_ID}",
|
||||||
dimensions=[Dimension(name="pagePath")],
|
dimensions=[Dimension(name="pagePath")],
|
||||||
metrics=[Metric(name="screenPageViews")],
|
metrics=[Metric(name="screenPageViews")],
|
||||||
date_ranges=[
|
date_ranges=[DateRange(start_date=self.start_date, end_date="today")],
|
||||||
DateRange(start_date=self.start_date, end_date="today")
|
|
||||||
],
|
|
||||||
)
|
)
|
||||||
response = self.analytics_client.run_report(request)
|
response = self.analytics_client.run_report(request)
|
||||||
if response and isinstance(response.rows, list):
|
if response and isinstance(response.rows, list):
|
||||||
|
@ -118,9 +110,7 @@ class ViewedStorage:
|
||||||
views_count = int(row.metric_values[0].value)
|
views_count = int(row.metric_values[0].value)
|
||||||
|
|
||||||
# Обновление данных в хранилище
|
# Обновление данных в хранилище
|
||||||
self.views_by_shout[slug] = self.views_by_shout.get(
|
self.views_by_shout[slug] = self.views_by_shout.get(slug, 0)
|
||||||
slug, 0
|
|
||||||
)
|
|
||||||
self.views_by_shout[slug] += views_count
|
self.views_by_shout[slug] += views_count
|
||||||
self.update_topics(slug)
|
self.update_topics(slug)
|
||||||
|
|
||||||
|
@ -179,20 +169,12 @@ class ViewedStorage:
|
||||||
|
|
||||||
# Обновление тем и авторов с использованием вспомогательной функции
|
# Обновление тем и авторов с использованием вспомогательной функции
|
||||||
for [_shout_topic, topic] in (
|
for [_shout_topic, topic] in (
|
||||||
session.query(ShoutTopic, Topic)
|
session.query(ShoutTopic, Topic).join(Topic).join(Shout).where(Shout.slug == shout_slug).all()
|
||||||
.join(Topic)
|
|
||||||
.join(Shout)
|
|
||||||
.where(Shout.slug == shout_slug)
|
|
||||||
.all()
|
|
||||||
):
|
):
|
||||||
update_groups(self.shouts_by_topic, topic.slug, shout_slug)
|
update_groups(self.shouts_by_topic, topic.slug, shout_slug)
|
||||||
|
|
||||||
for [_shout_topic, author] in (
|
for [_shout_topic, author] in (
|
||||||
session.query(ShoutAuthor, Author)
|
session.query(ShoutAuthor, Author).join(Author).join(Shout).where(Shout.slug == shout_slug).all()
|
||||||
.join(Author)
|
|
||||||
.join(Shout)
|
|
||||||
.where(Shout.slug == shout_slug)
|
|
||||||
.all()
|
|
||||||
):
|
):
|
||||||
update_groups(self.shouts_by_author, author.slug, shout_slug)
|
update_groups(self.shouts_by_author, author.slug, shout_slug)
|
||||||
|
|
||||||
|
@ -219,8 +201,7 @@ class ViewedStorage:
|
||||||
when = datetime.now(timezone.utc) + timedelta(seconds=self.period)
|
when = datetime.now(timezone.utc) + timedelta(seconds=self.period)
|
||||||
t = format(when.astimezone().isoformat())
|
t = format(when.astimezone().isoformat())
|
||||||
logger.info(
|
logger.info(
|
||||||
" ⎩ Следующее обновление: %s"
|
" ⎩ Следующее обновление: %s" % (t.split("T")[0] + " " + t.split("T")[1].split(".")[0])
|
||||||
% (t.split("T")[0] + " " + t.split("T")[1].split(".")[0])
|
|
||||||
)
|
)
|
||||||
await asyncio.sleep(self.period)
|
await asyncio.sleep(self.period)
|
||||||
else:
|
else:
|
||||||
|
|
|
@ -21,15 +21,11 @@ class WebhookEndpoint(HTTPEndpoint):
|
||||||
raise HTTPException(status_code=400, detail="Request body is empty")
|
raise HTTPException(status_code=400, detail="Request body is empty")
|
||||||
auth = request.headers.get("Authorization")
|
auth = request.headers.get("Authorization")
|
||||||
if not auth or auth != os.environ.get("WEBHOOK_SECRET"):
|
if not auth or auth != os.environ.get("WEBHOOK_SECRET"):
|
||||||
raise HTTPException(
|
raise HTTPException(status_code=401, detail="Invalid Authorization header")
|
||||||
status_code=401, detail="Invalid Authorization header"
|
|
||||||
)
|
|
||||||
# logger.debug(data)
|
# logger.debug(data)
|
||||||
user = data.get("user")
|
user = data.get("user")
|
||||||
if not isinstance(user, dict):
|
if not isinstance(user, dict):
|
||||||
raise HTTPException(
|
raise HTTPException(status_code=400, detail="User data is not a dictionary")
|
||||||
status_code=400, detail="User data is not a dictionary"
|
|
||||||
)
|
|
||||||
#
|
#
|
||||||
name: str = (
|
name: str = (
|
||||||
f"{user.get('given_name', user.get('slug'))} {user.get('middle_name', '')}"
|
f"{user.get('given_name', user.get('slug'))} {user.get('middle_name', '')}"
|
||||||
|
@ -40,19 +36,13 @@ class WebhookEndpoint(HTTPEndpoint):
|
||||||
pic: str = user.get("picture", "")
|
pic: str = user.get("picture", "")
|
||||||
if user_id:
|
if user_id:
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
author = (
|
author = session.query(Author).filter(Author.user == user_id).first()
|
||||||
session.query(Author).filter(Author.user == user_id).first()
|
|
||||||
)
|
|
||||||
if not author:
|
if not author:
|
||||||
# If the author does not exist, create a new one
|
# If the author does not exist, create a new one
|
||||||
slug: str = email.split("@")[0].replace(".", "-").lower()
|
slug: str = email.split("@")[0].replace(".", "-").lower()
|
||||||
slug: str = re.sub("[^0-9a-z]+", "-", slug)
|
slug: str = re.sub("[^0-9a-z]+", "-", slug)
|
||||||
while True:
|
while True:
|
||||||
author = (
|
author = session.query(Author).filter(Author.slug == slug).first()
|
||||||
session.query(Author)
|
|
||||||
.filter(Author.slug == slug)
|
|
||||||
.first()
|
|
||||||
)
|
|
||||||
if not author:
|
if not author:
|
||||||
break
|
break
|
||||||
slug = f"{slug}-{len(session.query(Author).filter(Author.email == email).all()) + 1}"
|
slug = f"{slug}-{len(session.query(Author).filter(Author.email == email).all()) + 1}"
|
||||||
|
@ -66,9 +56,7 @@ class WebhookEndpoint(HTTPEndpoint):
|
||||||
|
|
||||||
return JSONResponse({"status": "success"})
|
return JSONResponse({"status": "success"})
|
||||||
except HTTPException as e:
|
except HTTPException as e:
|
||||||
return JSONResponse(
|
return JSONResponse({"status": "error", "message": str(e.detail)}, status_code=e.status_code)
|
||||||
{"status": "error", "message": str(e.detail)}, status_code=e.status_code
|
|
||||||
)
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue
Block a user