sqlalchemy-debug
Some checks failed
deploy / deploy (push) Has been cancelled

This commit is contained in:
Untone 2023-12-24 17:25:57 +03:00
parent 8856bfc978
commit 392712c604
8 changed files with 44 additions and 48 deletions

View File

@ -4,7 +4,7 @@
- resolvers: added reader.load_shouts_top_random
- resolvers: added reader.load_shouts_unrated
- resolvers: community follower id property name is .author
- resolvers: get_authors_all and load_authors_all
- resolvers: get_authors_all and load_authors_by
- services: auth connector upgraded

View File

@ -18,12 +18,13 @@ from services.schema import resolvers
from settings import DEV_SERVER_PID_FILE_NAME, MODE, SENTRY_DSN
from services.viewed import ViewedStorage
import_module("resolvers")
schema = make_executable_schema(load_schema_from_path("schemas/core.graphql"), resolvers) # type: ignore
async def start_up():
print(f"[main] starting in {MODE} mode")
await redis.connect()
# start viewed service

View File

@ -4,7 +4,6 @@ from resolvers.author import (
get_author_followers,
get_author_id,
get_authors_all,
load_authors_all,
load_authors_by,
rate_author,
update_profile,
@ -26,7 +25,7 @@ from resolvers.reader import (
load_shouts_random_top,
load_shouts_search,
load_shouts_unrated,
load_shouts_random_topic
load_shouts_random_topic,
)
from resolvers.topic import get_topic, get_topics_all, get_topics_by_author, get_topics_by_community
@ -35,7 +34,6 @@ __all__ = [
"get_author",
"get_author_id",
"get_authors_all",
"load_authors_all",
"get_author_followers",
"get_author_followed",
"load_authors_by",

View File

@ -36,39 +36,31 @@ def add_author_stat_columns(q):
rating_aliased = aliased(Reaction)
# q = q.add_columns(literal(0).label("rating_stat"))
q = q.outerjoin(rating_aliased, rating_aliased.shout == shout_author_aliased.shout).add_columns(
func.coalesce(
func.sum(
case(
(and_(rating_aliased.kind == ReactionKind.LIKE.value, rating_aliased.reply_to.is_(None)), 1),
(and_(rating_aliased.kind == ReactionKind.DISLIKE.value, rating_aliased.reply_to.is_(None)), -1),
else_=0,
)
),
0,
q = q.outerjoin(rating_aliased, rating_aliased.created_by == Author.id).add_columns(
func.sum(
case(
(and_(rating_aliased.kind == ReactionKind.LIKE.value, rating_aliased.reply_to.is_(None)), 1),
(and_(rating_aliased.kind == ReactionKind.DISLIKE.value, rating_aliased.reply_to.is_(None)), -1),
else_=0,
)
).label("rating_stat")
)
q = q.add_columns(literal(0).label("commented_stat"))
comments_aliased = aliased(Reaction)
q = (
q.outerjoin(comments_aliased, comments_aliased.created_by == Author.id).filter(
comments_aliased.kind == ReactionKind.COMMENT.value
)
).add_columns(func.count(distinct(comments_aliased.id)).label("commented_stat"))
# WARNING: too high cpu cost
# TODO: check version 1
# q = q.outerjoin(
# Reaction, and_(Reaction.createdBy == User.id, Reaction.body.is_not(None))
# ).add_columns(func.count(distinct(Reaction.id))
# .label("commented_stat"))
# TODO: check version 2
# q = q.add_columns(
# func.count(case((reaction_aliased.kind == ReactionKind.COMMENT.value, 1), else_=0))
# .label("commented_stat"))
# q = q.add_columns(literal(0).label("commented_stat"))
# Filter based on shouts where the user is the author
q = q.filter(shout_author_aliased.author == Author.id)
q = q.group_by(Author.id)
return q
@ -156,17 +148,7 @@ def author_unfollow(follower_id, slug):
@query.field("get_authors_all")
async def get_authors_all(_, _info):
with local_session() as session:
return session.query(Author).join(ShoutAuthor, Author.id == ShoutAuthor.author).all()
@query.field("load_authors_all")
async def load_authors_all(_, _info, limit: int = 50, offset: int = 0):
q = select(Author)
q = add_author_stat_columns(q)
q = q.join(ShoutAuthor, Author.id == ShoutAuthor.author)
q = q.limit(limit).offset(offset)
return get_authors_from_query(q)
return session.query(Author).all()
@query.field("get_author_id")
@ -188,7 +170,6 @@ async def get_author(_, _info, slug="", author_id=None):
elif author_id:
q = select(Author).where(Author.id == author_id)
q = add_author_stat_columns(q)
# print(f"[resolvers.author] SQL: {q}")
authors = get_authors_from_query(q)
if authors:
return authors[0]
@ -215,7 +196,6 @@ async def load_authors_by(_, _info, by, limit, offset):
q = q.filter(Author.created_at > before)
q = q.order_by(by.get("order", Author.created_at)).limit(limit).offset(offset)
return get_authors_from_query(q)

View File

@ -337,7 +337,6 @@ type Query {
get_author(slug: String, author_id: Int): Author
get_author_id(user: String!): Author
get_authors_all: [Author]
load_authors_all(limit: Int, offset: Int): [Author]
get_author_followers(slug: String, user: String, author_id: Int): [Author]
get_author_followed(slug: String, user: String, author_id: Int): [Author]
load_authors_by(by: AuthorsBy, limit: Int, offset: Int): [Author]

View File

@ -56,8 +56,4 @@ def exception_handler(_et, exc, _tb):
if __name__ == "__main__":
sys.excepthook = exception_handler
if "dev" in sys.argv:
import os
os.environ["MODE"] = "development"
uvicorn.run("main:app", host="0.0.0.0", port=PORT, proxy_headers=True, server_header=True)

View File

@ -1,14 +1,35 @@
import time
import logging
# from contextlib import contextmanager
from typing import Any, Callable, Dict, TypeVar
# from psycopg2.errors import UniqueViolation
from sqlalchemy import Column, Integer, create_engine
from sqlalchemy import Column, Integer, create_engine, event
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import Session
from sqlalchemy.sql.schema import Table
from sqlalchemy.engine import Engine
from settings import DB_URL
logging.basicConfig()
logger = logging.getLogger("\t [sqlalchemy.profiler]\t")
logger.setLevel(logging.DEBUG)
@event.listens_for(Engine, "before_cursor_execute")
def before_cursor_execute(conn, cursor, statement, parameters, context, executemany):
conn.info.setdefault("query_start_time", []).append(time.time())
logger.debug(f" {statement}")
@event.listens_for(Engine, "after_cursor_execute")
def after_cursor_execute(conn, cursor, statement, parameters, context, executemany):
total = time.time() - conn.info["query_start_time"].pop(-1)
logger.debug(f" Finished in {total*1000} ms ")
engine = create_engine(DB_URL, echo=False, pool_size=10, max_overflow=20)
T = TypeVar("T")

View File

@ -1,4 +1,5 @@
from os import environ
import sys
PORT = 8080
DB_URL = (
@ -9,6 +10,6 @@ DB_URL = (
REDIS_URL = environ.get("REDIS_URL") or "redis://127.0.0.1"
API_BASE = environ.get("API_BASE") or ""
AUTH_URL = environ.get("AUTH_URL") or ""
MODE = environ.get("MODE") or "production"
SENTRY_DSN = environ.get("SENTRY_DSN")
DEV_SERVER_PID_FILE_NAME = "dev-server.pid"
MODE = "development" if "dev" in sys.argv else "production"