This commit is contained in:
parent
0aff77eda6
commit
ca01181f37
|
@ -13,4 +13,7 @@ RUN pip install -r requirements.txt
|
||||||
|
|
||||||
COPY . .
|
COPY . .
|
||||||
|
|
||||||
CMD ["python", "-m", "granian", "main:app", "--interface", "asgi", "--host", "0.0.0.0", "--port", "8000"]
|
ENV PORT=8000
|
||||||
|
EXPOSE $PORT
|
||||||
|
|
||||||
|
CMD ["python", "-m", "granian", "main:app", "--interface", "asgi", "--host", "0.0.0.0", "--port", "$PORT"]
|
12
cache/cache.py
vendored
12
cache/cache.py
vendored
|
@ -1,6 +1,8 @@
|
||||||
import asyncio
|
import asyncio
|
||||||
|
import json
|
||||||
from typing import List
|
from typing import List
|
||||||
|
|
||||||
|
import jsonschema
|
||||||
import orjson
|
import orjson
|
||||||
from sqlalchemy import and_, join, select
|
from sqlalchemy import and_, join, select
|
||||||
|
|
||||||
|
@ -35,7 +37,7 @@ CACHE_KEYS = {
|
||||||
|
|
||||||
# Cache topic data
|
# Cache topic data
|
||||||
async def cache_topic(topic: dict):
|
async def cache_topic(topic: dict):
|
||||||
payload = orjson.dumps(topic, cls=CustomJSONEncoder)
|
payload = json.dumps(topic, cls=CustomJSONEncoder)
|
||||||
await asyncio.gather(
|
await asyncio.gather(
|
||||||
redis_operation("SET", f"topic:id:{topic['id']}", payload),
|
redis_operation("SET", f"topic:id:{topic['id']}", payload),
|
||||||
redis_operation("SET", f"topic:slug:{topic['slug']}", payload),
|
redis_operation("SET", f"topic:slug:{topic['slug']}", payload),
|
||||||
|
@ -44,7 +46,7 @@ async def cache_topic(topic: dict):
|
||||||
|
|
||||||
# Cache author data
|
# Cache author data
|
||||||
async def cache_author(author: dict):
|
async def cache_author(author: dict):
|
||||||
payload = orjson.dumps(author, cls=CustomJSONEncoder)
|
payload = json.dumps(author, cls=CustomJSONEncoder)
|
||||||
await asyncio.gather(
|
await asyncio.gather(
|
||||||
redis_operation("SET", f"author:user:{author['user'].strip()}", str(author["id"])),
|
redis_operation("SET", f"author:user:{author['user'].strip()}", str(author["id"])),
|
||||||
redis_operation("SET", f"author:id:{author['id']}", payload),
|
redis_operation("SET", f"author:id:{author['id']}", payload),
|
||||||
|
@ -61,7 +63,7 @@ async def cache_follows(follower_id: int, entity_type: str, entity_id: int, is_i
|
||||||
follows.append(entity_id)
|
follows.append(entity_id)
|
||||||
else:
|
else:
|
||||||
follows = [eid for eid in follows if eid != entity_id]
|
follows = [eid for eid in follows if eid != entity_id]
|
||||||
await redis_operation("SET", key, orjson.dumps(follows, cls=CustomJSONEncoder))
|
await redis_operation("SET", key, json.dumps(follows, cls=CustomJSONEncoder))
|
||||||
await update_follower_stat(follower_id, entity_type, len(follows))
|
await update_follower_stat(follower_id, entity_type, len(follows))
|
||||||
|
|
||||||
|
|
||||||
|
@ -112,7 +114,7 @@ async def get_cached_topic(topic_id: int):
|
||||||
topic = session.execute(select(Topic).where(Topic.id == topic_id)).scalar_one_or_none()
|
topic = session.execute(select(Topic).where(Topic.id == topic_id)).scalar_one_or_none()
|
||||||
if topic:
|
if topic:
|
||||||
topic_dict = topic.dict()
|
topic_dict = topic.dict()
|
||||||
await redis_operation("SET", topic_key, orjson.dumps(topic_dict, cls=CustomJSONEncoder))
|
await redis_operation("SET", topic_key, json.dumps(topic_dict, cls=CustomJSONEncoder))
|
||||||
return topic_dict
|
return topic_dict
|
||||||
|
|
||||||
return None
|
return None
|
||||||
|
@ -378,7 +380,7 @@ async def invalidate_shouts_cache(cache_keys: List[str]):
|
||||||
async def cache_topic_shouts(topic_id: int, shouts: List[dict]):
|
async def cache_topic_shouts(topic_id: int, shouts: List[dict]):
|
||||||
"""Кэширует список публикаций для темы"""
|
"""Кэширует список публикаций для темы"""
|
||||||
key = f"topic_shouts_{topic_id}"
|
key = f"topic_shouts_{topic_id}"
|
||||||
payload = orjson.dumps(shouts, cls=CustomJSONEncoder)
|
payload = json.dumps(shouts, cls=CustomJSONEncoder)
|
||||||
await redis_operation("SETEX", key, value=payload, ttl=CACHE_TTL)
|
await redis_operation("SETEX", key, value=payload, ttl=CACHE_TTL)
|
||||||
|
|
||||||
|
|
||||||
|
|
5
cache/memorycache.py
vendored
5
cache/memorycache.py
vendored
|
@ -6,6 +6,7 @@
|
||||||
import functools
|
import functools
|
||||||
import hashlib
|
import hashlib
|
||||||
import inspect
|
import inspect
|
||||||
|
import json
|
||||||
import logging
|
import logging
|
||||||
import pickle
|
import pickle
|
||||||
from typing import Callable, Optional
|
from typing import Callable, Optional
|
||||||
|
@ -77,7 +78,7 @@ class RedisCache:
|
||||||
# Сохранение результата в кеш
|
# Сохранение результата в кеш
|
||||||
try:
|
try:
|
||||||
# Пытаемся сериализовать как JSON
|
# Пытаемся сериализовать как JSON
|
||||||
serialized = orjson.dumps(result, cls=CustomJSONEncoder)
|
serialized = json.dumps(result, cls=CustomJSONEncoder)
|
||||||
except (TypeError, ValueError):
|
except (TypeError, ValueError):
|
||||||
# Если не удалось, используем pickle
|
# Если не удалось, используем pickle
|
||||||
serialized = pickle.dumps(result).decode()
|
serialized = pickle.dumps(result).decode()
|
||||||
|
@ -98,7 +99,7 @@ class RedisCache:
|
||||||
try:
|
try:
|
||||||
import asyncio
|
import asyncio
|
||||||
|
|
||||||
serialized = orjson.dumps(result, cls=CustomJSONEncoder)
|
serialized = json.dumps(result, cls=CustomJSONEncoder)
|
||||||
asyncio.create_task(redis.set(key, serialized, ex=self.ttl))
|
asyncio.create_task(redis.set(key, serialized, ex=self.ttl))
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Ошибка при кешировании результата: {e}")
|
logger.error(f"Ошибка при кешировании результата: {e}")
|
||||||
|
|
13
cache/precache.py
vendored
13
cache/precache.py
vendored
|
@ -1,4 +1,5 @@
|
||||||
import asyncio
|
import asyncio
|
||||||
|
import json
|
||||||
|
|
||||||
import orjson
|
import orjson
|
||||||
from sqlalchemy import and_, join, select
|
from sqlalchemy import and_, join, select
|
||||||
|
@ -21,7 +22,7 @@ async def precache_authors_followers(author_id, session):
|
||||||
result = session.execute(followers_query)
|
result = session.execute(followers_query)
|
||||||
authors_followers.update(row[0] for row in result if row[0])
|
authors_followers.update(row[0] for row in result if row[0])
|
||||||
|
|
||||||
followers_payload = orjson.dumps(list(authors_followers), cls=CustomJSONEncoder)
|
followers_payload = json.dumps(list(authors_followers), cls=CustomJSONEncoder)
|
||||||
await redis.execute("SET", f"author:followers:{author_id}", followers_payload)
|
await redis.execute("SET", f"author:followers:{author_id}", followers_payload)
|
||||||
|
|
||||||
|
|
||||||
|
@ -35,9 +36,9 @@ async def precache_authors_follows(author_id, session):
|
||||||
follows_authors = {row[0] for row in session.execute(follows_authors_query) if row[0]}
|
follows_authors = {row[0] for row in session.execute(follows_authors_query) if row[0]}
|
||||||
follows_shouts = {row[0] for row in session.execute(follows_shouts_query) if row[0]}
|
follows_shouts = {row[0] for row in session.execute(follows_shouts_query) if row[0]}
|
||||||
|
|
||||||
topics_payload = orjson.dumps(list(follows_topics), cls=CustomJSONEncoder)
|
topics_payload = json.dumps(list(follows_topics), cls=CustomJSONEncoder)
|
||||||
authors_payload = orjson.dumps(list(follows_authors), cls=CustomJSONEncoder)
|
authors_payload = json.dumps(list(follows_authors), cls=CustomJSONEncoder)
|
||||||
shouts_payload = orjson.dumps(list(follows_shouts), cls=CustomJSONEncoder)
|
shouts_payload = json.dumps(list(follows_shouts), cls=CustomJSONEncoder)
|
||||||
|
|
||||||
await asyncio.gather(
|
await asyncio.gather(
|
||||||
redis.execute("SET", f"author:follows-topics:{author_id}", topics_payload),
|
redis.execute("SET", f"author:follows-topics:{author_id}", topics_payload),
|
||||||
|
@ -62,7 +63,7 @@ async def precache_topics_authors(topic_id: int, session):
|
||||||
)
|
)
|
||||||
topic_authors = {row[0] for row in session.execute(topic_authors_query) if row[0]}
|
topic_authors = {row[0] for row in session.execute(topic_authors_query) if row[0]}
|
||||||
|
|
||||||
authors_payload = orjson.dumps(list(topic_authors), cls=CustomJSONEncoder)
|
authors_payload = json.dumps(list(topic_authors), cls=CustomJSONEncoder)
|
||||||
await redis.execute("SET", f"topic:authors:{topic_id}", authors_payload)
|
await redis.execute("SET", f"topic:authors:{topic_id}", authors_payload)
|
||||||
|
|
||||||
|
|
||||||
|
@ -71,7 +72,7 @@ async def precache_topics_followers(topic_id: int, session):
|
||||||
followers_query = select(TopicFollower.follower).where(TopicFollower.topic == topic_id)
|
followers_query = select(TopicFollower.follower).where(TopicFollower.topic == topic_id)
|
||||||
topic_followers = {row[0] for row in session.execute(followers_query) if row[0]}
|
topic_followers = {row[0] for row in session.execute(followers_query) if row[0]}
|
||||||
|
|
||||||
followers_payload = orjson.dumps(list(topic_followers), cls=CustomJSONEncoder)
|
followers_payload = json.dumps(list(topic_followers), cls=CustomJSONEncoder)
|
||||||
await redis.execute("SET", f"topic:followers:{topic_id}", followers_payload)
|
await redis.execute("SET", f"topic:followers:{topic_id}", followers_payload)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
import asyncio
|
import asyncio
|
||||||
|
import json
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
|
|
||||||
|
@ -210,7 +211,7 @@ class SearchService:
|
||||||
"SETEX",
|
"SETEX",
|
||||||
redis_key,
|
redis_key,
|
||||||
REDIS_TTL,
|
REDIS_TTL,
|
||||||
orjson.dumps(results, cls=CustomJSONEncoder),
|
json.dumps(results, cls=CustomJSONEncoder),
|
||||||
)
|
)
|
||||||
return results
|
return results
|
||||||
return []
|
return []
|
||||||
|
|
16
settings.py
16
settings.py
|
@ -1,18 +1,24 @@
|
||||||
import sys
|
import sys
|
||||||
from os import environ
|
from os import environ
|
||||||
|
|
||||||
PORT = 8000
|
MODE = "development" if "dev" in sys.argv else "production"
|
||||||
|
DEV_SERVER_PID_FILE_NAME = "dev-server.pid"
|
||||||
|
|
||||||
|
PORT = environ.get("PORT") or 8000
|
||||||
|
|
||||||
|
# storages
|
||||||
DB_URL = (
|
DB_URL = (
|
||||||
environ.get("DATABASE_URL", "").replace("postgres://", "postgresql://")
|
environ.get("DATABASE_URL", "").replace("postgres://", "postgresql://")
|
||||||
or environ.get("DB_URL", "").replace("postgres://", "postgresql://")
|
or environ.get("DB_URL", "").replace("postgres://", "postgresql://")
|
||||||
or "sqlite:///discoursio.db"
|
or "sqlite:///discoursio.db"
|
||||||
)
|
)
|
||||||
REDIS_URL = environ.get("REDIS_URL") or "redis://127.0.0.1"
|
REDIS_URL = environ.get("REDIS_URL") or "redis://127.0.0.1"
|
||||||
AUTH_URL = environ.get("AUTH_URL") or ""
|
|
||||||
GLITCHTIP_DSN = environ.get("GLITCHTIP_DSN")
|
|
||||||
DEV_SERVER_PID_FILE_NAME = "dev-server.pid"
|
|
||||||
MODE = "development" if "dev" in sys.argv else "production"
|
|
||||||
|
|
||||||
|
# debug
|
||||||
|
GLITCHTIP_DSN = environ.get("GLITCHTIP_DSN")
|
||||||
|
|
||||||
|
# authorizer.dev
|
||||||
|
AUTH_URL = environ.get("AUTH_URL") or "https://auth.discours.io/graphql"
|
||||||
ADMIN_SECRET = environ.get("AUTH_SECRET") or "nothing"
|
ADMIN_SECRET = environ.get("AUTH_SECRET") or "nothing"
|
||||||
WEBHOOK_SECRET = environ.get("WEBHOOK_SECRET") or "nothing-else"
|
WEBHOOK_SECRET = environ.get("WEBHOOK_SECRET") or "nothing-else"
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue
Block a user