region-cache-fix
This commit is contained in:
parent
e90d5aefb2
commit
5dbb0ccb12
|
@ -15,7 +15,6 @@ from services.cache import cache_author, cache_follower
|
||||||
from services.db import local_session
|
from services.db import local_session
|
||||||
from services.encoders import CustomJSONEncoder
|
from services.encoders import CustomJSONEncoder
|
||||||
from services.logger import root_logger as logger
|
from services.logger import root_logger as logger
|
||||||
from services.memorycache import cache_region
|
|
||||||
from services.rediscache import redis
|
from services.rediscache import redis
|
||||||
from services.schema import mutation, query
|
from services.schema import mutation, query
|
||||||
|
|
||||||
|
@ -126,43 +125,36 @@ async def get_author_id(_, _info, user: str):
|
||||||
|
|
||||||
@query.field("load_authors_by")
|
@query.field("load_authors_by")
|
||||||
def load_authors_by(_, _info, by, limit, offset):
|
def load_authors_by(_, _info, by, limit, offset):
|
||||||
cache_key = f"{json.dumps(by)}_{limit}_{offset}"
|
logger.debug(f"loading authors by {by}")
|
||||||
|
q = select(Author)
|
||||||
|
if by.get("slug"):
|
||||||
|
q = q.filter(Author.slug.ilike(f"%{by['slug']}%"))
|
||||||
|
elif by.get("name"):
|
||||||
|
q = q.filter(Author.name.ilike(f"%{by['name']}%"))
|
||||||
|
elif by.get("topic"):
|
||||||
|
q = (
|
||||||
|
q.join(ShoutAuthor)
|
||||||
|
.join(ShoutTopic)
|
||||||
|
.join(Topic)
|
||||||
|
.where(Topic.slug == str(by["topic"]))
|
||||||
|
)
|
||||||
|
|
||||||
@cache_region.cache_on_arguments(cache_key)
|
if by.get("last_seen"): # in unix time
|
||||||
def _load_authors_by():
|
before = int(time.time()) - by["last_seen"]
|
||||||
logger.debug(f"loading authors by {by}")
|
q = q.filter(Author.last_seen > before)
|
||||||
q = select(Author)
|
elif by.get("created_at"): # in unix time
|
||||||
if by.get("slug"):
|
before = int(time.time()) - by["created_at"]
|
||||||
q = q.filter(Author.slug.ilike(f"%{by['slug']}%"))
|
q = q.filter(Author.created_at > before)
|
||||||
elif by.get("name"):
|
|
||||||
q = q.filter(Author.name.ilike(f"%{by['name']}%"))
|
|
||||||
elif by.get("topic"):
|
|
||||||
q = (
|
|
||||||
q.join(ShoutAuthor)
|
|
||||||
.join(ShoutTopic)
|
|
||||||
.join(Topic)
|
|
||||||
.where(Topic.slug == str(by["topic"]))
|
|
||||||
)
|
|
||||||
|
|
||||||
if by.get("last_seen"): # in unix time
|
order = by.get("order")
|
||||||
before = int(time.time()) - by["last_seen"]
|
if order in ["likes", "shouts", "followers"]:
|
||||||
q = q.filter(Author.last_seen > before)
|
q = q.order_by(desc(text(f"{order}_stat")))
|
||||||
elif by.get("created_at"): # in unix time
|
|
||||||
before = int(time.time()) - by["created_at"]
|
|
||||||
q = q.filter(Author.created_at > before)
|
|
||||||
|
|
||||||
order = by.get("order")
|
q = q.limit(limit).offset(offset)
|
||||||
if order in ["likes", "shouts", "followers"]:
|
|
||||||
q = q.order_by(desc(text(f"{order}_stat")))
|
|
||||||
|
|
||||||
# q = q.distinct()
|
authors = get_with_stat(q)
|
||||||
q = q.limit(limit).offset(offset)
|
|
||||||
|
|
||||||
authors = get_with_stat(q)
|
return authors
|
||||||
|
|
||||||
return authors
|
|
||||||
|
|
||||||
return _load_authors_by()
|
|
||||||
|
|
||||||
|
|
||||||
@query.field("get_author_follows")
|
@query.field("get_author_follows")
|
||||||
|
@ -185,30 +177,31 @@ async def get_author_follows(_, _info, slug="", user=None, author_id=0):
|
||||||
if author and isinstance(author, Author):
|
if author and isinstance(author, Author):
|
||||||
# logger.debug(author.dict())
|
# logger.debug(author.dict())
|
||||||
author_id = author.id if not author_id else author_id
|
author_id = author.id if not author_id else author_id
|
||||||
rkey = f"author:{author_id}:follows-authors"
|
|
||||||
logger.debug(f"getting {author_id} follows authors")
|
|
||||||
cached = await redis.execute("GET", rkey)
|
|
||||||
authors = []
|
|
||||||
if not cached:
|
|
||||||
authors = author_follows_authors(author_id)
|
|
||||||
prepared = [author.dict() for author in authors]
|
|
||||||
await redis.execute(
|
|
||||||
"SET", rkey, json.dumps(prepared, cls=CustomJSONEncoder)
|
|
||||||
)
|
|
||||||
elif isinstance(cached, str):
|
|
||||||
authors = json.loads(cached)
|
|
||||||
|
|
||||||
rkey = f"author:{author_id}:follows-topics"
|
|
||||||
cached = await redis.execute("GET", rkey)
|
|
||||||
topics = []
|
topics = []
|
||||||
if cached and isinstance(cached, str):
|
authors = []
|
||||||
topics = json.loads(cached)
|
if author_id:
|
||||||
if not cached:
|
rkey = f"author:{author_id}:follows-authors"
|
||||||
topics = author_follows_topics(author_id)
|
logger.debug(f"getting {author_id} follows authors")
|
||||||
prepared = [topic.dict() for topic in topics]
|
cached = await redis.execute("GET", rkey)
|
||||||
await redis.execute(
|
if not cached:
|
||||||
"SET", rkey, json.dumps(prepared, cls=CustomJSONEncoder)
|
authors = author_follows_authors(author_id)
|
||||||
)
|
prepared = [author.dict() for author in authors]
|
||||||
|
await redis.execute(
|
||||||
|
"SET", rkey, json.dumps(prepared, cls=CustomJSONEncoder)
|
||||||
|
)
|
||||||
|
elif isinstance(cached, str):
|
||||||
|
authors = json.loads(cached)
|
||||||
|
|
||||||
|
rkey = f"author:{author_id}:follows-topics"
|
||||||
|
cached = await redis.execute("GET", rkey)
|
||||||
|
if cached and isinstance(cached, str):
|
||||||
|
topics = json.loads(cached)
|
||||||
|
if not cached:
|
||||||
|
topics = author_follows_topics(author_id)
|
||||||
|
prepared = [topic.dict() for topic in topics]
|
||||||
|
await redis.execute(
|
||||||
|
"SET", rkey, json.dumps(prepared, cls=CustomJSONEncoder)
|
||||||
|
)
|
||||||
return {
|
return {
|
||||||
"topics": topics,
|
"topics": topics,
|
||||||
"authors": authors,
|
"authors": authors,
|
||||||
|
|
Loading…
Reference in New Issue
Block a user