0.4.9-c
All checks were successful
Deploy on push / deploy (push) Successful in 7s

This commit is contained in:
Untone 2025-02-10 18:04:08 +03:00
parent 20173f7d1c
commit a84d8a0c7e
11 changed files with 216 additions and 196 deletions

View File

@ -7,6 +7,7 @@
- `publish_` and `unpublish_` mutations and resolvers added
- `create_`, `update_`, `delete_` mutations and resolvers added for `Draft` entity
- tests with pytest for original auth, shouts, drafts
- `Dockerfile` and `pyproject.toml` removed for the simplicity: `Procfile` and `requirements.txt`
#### [0.4.8] - 2025-02-03
- `Reaction.deleted_at` filter on `update_reaction` resolver added

View File

@ -1,25 +0,0 @@
FROM python:3.12-alpine
# Update package lists and install necessary dependencies
RUN apk update && \
apk add --no-cache build-base icu-data-full curl python3-dev musl-dev && \
curl -sSL https://install.python-poetry.org | python
# Set working directory
WORKDIR /app
# Copy only the pyproject.toml file initially
COPY pyproject.toml /app/
# Install poetry and dependencies
RUN pip install poetry && \
poetry config virtualenvs.create false && \
poetry install --no-root --only main
# Copy the rest of the files
COPY . /app
# Expose the port
EXPOSE 8000
CMD ["python", "server.py"]

View File

@ -37,36 +37,43 @@ Backend service providing GraphQL API for content management system with reactio
## Development
### Setup
Start API server with `dev` keyword added and `mkcert` installed:
### Prepare environment:
```shell
mkdir .venv
python3.12 -m venv venv
poetry env use venv/bin/python3.12
poetry update
source venv/bin/activate
```
### Run server
First, certifcates are required to run the server.
```shell
mkcert -install
mkcert localhost
poetry run server.py dev
```
Then, run the server:
```shell
python server.py dev
```
### Useful Commands
```shell
# Linting and import sorting
poetry run ruff check . --fix --select I
ruff check . --fix --select I
# Code formatting
poetry run ruff format . --line-length=120
ruff format . --line-length=120
# Run tests
poetry run pytest
pytest
# Type checking
poetry run mypy .
mypy .
```
### Code Style

54
main.py
View File

@ -14,24 +14,15 @@ from starlette.routing import Route
from cache.precache import precache_data
from cache.revalidator import revalidation_manager
from orm import (
# collection,
# invite,
author,
community,
notification,
reaction,
shout,
topic,
)
from services.db import create_table_if_not_exists, engine
from services.exception import ExceptionHandlerMiddleware
from services.redis import redis
from services.schema import resolvers
from services.schema import create_all_tables, resolvers
from services.search import search_service
from services.viewed import ViewedStorage
from services.webhook import WebhookEndpoint, create_webhook_endpoint
from settings import DEV_SERVER_PID_FILE_NAME, MODE
from services.db import engine
from utils.logger import root_logger as logger
import_module("resolvers")
schema = make_executable_schema(load_schema_from_path("schema/"), resolvers)
@ -46,30 +37,6 @@ async def start():
print(f"[main] process started in {MODE} mode")
def create_all_tables():
for model in [
# user.User,
author.Author,
author.AuthorFollower,
community.Community,
community.CommunityFollower,
shout.Shout,
shout.ShoutAuthor,
author.AuthorBookmark,
topic.Topic,
topic.TopicFollower,
shout.ShoutTopic,
reaction.Reaction,
shout.ShoutReactionsFollower,
author.AuthorRating,
notification.Notification,
notification.NotificationSeen,
# collection.Collection, collection.ShoutCollection,
# invite.Invite
]:
create_table_if_not_exists(engine, model)
async def create_all_tables_async():
# Оборачиваем синхронную функцию в асинхронную
await asyncio.to_thread(create_all_tables)
@ -133,3 +100,18 @@ if "dev" in sys.argv:
allow_methods=["*"],
allow_headers=["*"],
)
def init_database():
"""Initialize database tables before starting the server"""
logger.info("Initializing database...")
create_all_tables(engine)
logger.info("Database initialized")
def main():
# Инициализируем базу данных перед запуском сервера
init_database()
# Остальной код запуска сервера...
if __name__ == "__main__":
main()

View File

@ -53,4 +53,3 @@ class Draft(Base):
deleted_by: int | None = Column(ForeignKey("author.id"), nullable=True)
authors = relationship(Author, secondary="draft_author")
topics = relationship(Topic, secondary="draft_topic")
shout: int | None = Column(ForeignKey("shout.id"), nullable=True)

View File

@ -1,62 +0,0 @@
[tool.poetry]
name = "core"
version = "0.4.9"
description = "core module for discours.io"
authors = ["discoursio devteam"]
license = "MIT"
readme = "README.md"
[tool.poetry.dependencies]
python = "^3.12"
SQLAlchemy = "^2.0.29"
psycopg2-binary = "^2.9.9"
redis = {extras = ["hiredis"], version = "^5.0.1"}
sentry-sdk = {version = "^1.44.1", extras = ["starlette", "ariadne", "sqlalchemy"]}
starlette = "^0.39.2"
gql = "^3.5.0"
ariadne = "^0.23.0"
pre-commit = "^3.7.0"
granian = "^1.4.1"
google-analytics-data = "^0.18.7"
opensearch-py = "^2.6.0"
httpx = "^0.27.0"
dogpile-cache = "^1.3.1"
colorlog = "^6.8.2"
fakeredis = "^2.25.1"
pydantic = "^2.9.2"
jwt = "^1.3.1"
authlib = "^1.3.2"
passlib = "^1.7.4"
bcrypt = "^4.2.1"
[tool.poetry.group.dev.dependencies]
ruff = "^0.4.7"
isort = "^5.13.2"
pydantic = "^2.9.2"
pytest = "^8.3.4"
mypy = "^1.15.0"
pytest-asyncio = "^0.23.5"
pytest-cov = "^4.1.0"
[build-system]
requires = ["poetry-core>=1.0.0"]
build-backend = "poetry.core.masonry.api"
[tool.pyright]
venvPath = "venv"
venv = "venv"
[tool.isort]
multi_line_output = 3
include_trailing_comma = true
force_grid_wrap = 0
line_length = 120
[tool.pytest.ini_options]
testpaths = ["tests"]
pythonpath = ["."]
venv = "venv"
[tool.ruff]
line-length = 120

26
requirements.txt Normal file
View File

@ -0,0 +1,26 @@
# own auth
bcrypt
authlib
passlib
google-analytics-data
dogpile-cache
opensearch-py
colorlog
psycopg2-binary
dogpile-cache
httpx
redis[hiredis]
sentry-sdk[starlette,sqlalchemy]
starlette
gql
ariadne
granian
pydantic
fakeredis
pytest
pytest-asyncio
pytest-cov
mypy
ruff

View File

@ -18,6 +18,30 @@ from services.notify import notify_shout
from services.search import search_service
def create_shout_from_draft(session, draft, author_id):
# Создаем новую публикацию
shout = Shout(
body=draft.body,
slug=draft.slug,
cover=draft.cover,
cover_caption=draft.cover_caption,
lead=draft.lead,
description=draft.description,
title=draft.title,
subtitle=draft.subtitle,
layout=draft.layout,
media=draft.media,
lang=draft.lang,
seo=draft.seo,
created_by=author_id,
community=draft.community,
draft=draft.id,
deleted_at=None,
)
return shout
@query.field("load_drafts")
@login_required
async def load_drafts(_, info):
@ -45,8 +69,6 @@ async def create_draft(_, info, shout_id: int = 0):
with local_session() as session:
draft = Draft(created_by=author_id)
if shout_id:
draft.shout = shout_id
session.add(draft)
session.commit()
return {"draft": draft}
@ -84,6 +106,8 @@ async def delete_draft(_, info, draft_id: int):
draft = session.query(Draft).filter(Draft.id == draft_id).first()
if not draft:
return {"error": "Draft not found"}
if author_id != draft.created_by and draft.authors.filter(Author.id == author_id).count() == 0:
return {"error": "You are not allowed to delete this draft"}
session.delete(draft)
session.commit()
return {"draft": draft}
@ -102,7 +126,10 @@ async def publish_draft(_, info, draft_id: int):
draft = session.query(Draft).filter(Draft.id == draft_id).first()
if not draft:
return {"error": "Draft not found"}
return publish_shout(None, None, draft.shout, draft)
shout = create_shout_from_draft(session, draft, author_id)
session.add(shout)
session.commit()
return {"shout": shout}
@mutation.field("unpublish_draft")
@ -116,8 +143,14 @@ async def unpublish_draft(_, info, draft_id: int):
with local_session() as session:
draft = session.query(Draft).filter(Draft.id == draft_id).first()
shout_id = draft.shout
unpublish_shout(None, None, shout_id)
if not draft:
return {"error": "Draft not found"}
shout = session.query(Shout).filter(Shout.draft == draft.id).first()
if shout:
shout.published_at = None
session.commit()
return {"shout": shout}
return {"error": "Failed to unpublish draft"}
@mutation.field("publish_shout")
@ -132,47 +165,23 @@ async def publish_shout(_, info, shout_id: int, draft=None):
user_id = info.context.get("user_id")
author_dict = info.context.get("author", {})
author_id = author_dict.get("id")
now = int(time.time())
if not user_id or not author_id:
return {"error": "User ID and author ID are required"}
try:
with local_session() as session:
# Находим черновик если не передан
if not draft:
find_draft_stmt = select(Draft).where(Draft.shout == shout_id)
draft = session.execute(find_draft_stmt).scalar_one_or_none()
if not draft:
return {"error": "Draft not found"}
now = int(time.time())
# Находим существующую публикацию или создаем новую
shout = None
was_published = False
if shout_id:
shout = session.query(Shout).filter(Shout.id == shout_id).first()
was_published = shout and shout.published_at is not None
shout = session.query(Shout).filter(Shout.id == shout_id).first()
if not shout:
# Создаем новую публикацию
shout = Shout(
body=draft.body,
slug=draft.slug,
cover=draft.cover,
cover_caption=draft.cover_caption,
lead=draft.lead,
description=draft.description,
title=draft.title,
subtitle=draft.subtitle,
layout=draft.layout,
media=draft.media,
lang=draft.lang,
seo=draft.seo,
created_by=author_id,
community=draft.community,
draft=draft.id,
deleted_at=None,
)
return {"error": "Shout not found"}
was_published = shout and shout.published_at is not None
draft = draft or session.query(Draft).where(Draft.id == shout.draft).first()
if not draft:
return {"error": "Draft not found"}
# Находим черновик если не передан
if not shout:
shout = create_shout_from_draft(session, draft, author_id)
else:
# Обновляем существующую публикацию
shout.draft = draft.id
@ -189,17 +198,14 @@ async def publish_shout(_, info, shout_id: int, draft=None):
shout.lang = draft.lang
shout.seo = draft.seo
# Обновляем временные метки
shout.updated_at = now
# Устанавливаем published_at только если это новая публикация
# или публикация была ранее снята с публикации
if not was_published:
shout.published_at = now
draft.updated_at = now
draft.published_at = now
draft.updated_at = now
draft.published_at = now
shout.updated_at = now
# Устанавливаем published_at только если это новая публикация
# или публикация была ранее снята с публикации
if not was_published:
shout.published_at = now
# Обрабатываем связи с авторами
if not session.query(ShoutAuthor).filter(
and_(ShoutAuthor.shout == shout.id, ShoutAuthor.author == author_id)
@ -293,3 +299,5 @@ async def unpublish_shout(_, info, shout_id: int):
return {"error": "Failed to unpublish shout"}
return {"shout": shout}

View File

@ -10,7 +10,6 @@ from utils.logger import root_logger as logger
if __name__ == "__main__":
logger.info("started")
try:
granian_instance = Granian(
"main:app",
@ -28,7 +27,7 @@ if __name__ == "__main__":
granian_instance.build_ssl_context(cert=Path("localhost.pem"), key=Path("localhost-key.pem"), password=None)
granian_instance.serve()
except Exception as error:
logger.error(f"Granian error: {error}", exc_info=True)
logger.error(error, exc_info=True)
raise
finally:
logger.info("stopped")

View File

@ -3,6 +3,7 @@ from asyncio.log import logger
import httpx
from ariadne import MutationType, QueryType
from services.db import create_table_if_not_exists, local_session
from settings import AUTH_URL
query = QueryType()
@ -40,3 +41,53 @@ async def request_graphql_data(gql, url=AUTH_URL, headers=None):
logger.error(f"request_graphql_data error: {traceback.format_exc()}")
return None
def create_all_tables():
"""Create all database tables in the correct order."""
from orm import author, community, draft, notification, reaction, shout, topic, user
# Порядок важен - сначала таблицы без внешних ключей, затем зависимые таблицы
models_in_order = [
user.User, # Базовая таблица auth
author.Author, # Базовая таблица
community.Community, # Базовая таблица
topic.Topic, # Базовая таблица
# Связи для базовых таблиц
author.AuthorFollower, # Зависит от Author
community.CommunityFollower, # Зависит от Community
topic.TopicFollower, # Зависит от Topic
# Черновики (теперь без зависимости от Shout)
draft.Draft, # Зависит только от Author
draft.DraftAuthor, # Зависит от Draft и Author
draft.DraftTopic, # Зависит от Draft и Topic
# Основные таблицы контента
shout.Shout, # Зависит от Author и Draft
shout.ShoutAuthor, # Зависит от Shout и Author
shout.ShoutTopic, # Зависит от Shout и Topic
# Реакции
reaction.Reaction, # Зависит от Author и Shout
shout.ShoutReactionsFollower, # Зависит от Shout и Reaction
# Дополнительные таблицы
author.AuthorRating, # Зависит от Author
notification.Notification, # Зависит от Author
notification.NotificationSeen, # Зависит от Notification
# collection.Collection,
# collection.ShoutCollection,
# invite.Invite
]
with local_session() as session:
for model in models_in_order:
try:
create_table_if_not_exists(session.get_bind(), model)
logger.info(f"Created or verified table: {model.__tablename__}")
except Exception as e:
logger.error(f"Error creating table {model.__tablename__}: {e}")
raise

View File

@ -1,15 +1,29 @@
import logging
from pathlib import Path
import colorlog
_lib_path = Path(__file__).parents[1]
_leng_path = len(_lib_path.as_posix())
def filter(record: logging.LogRecord):
# Define `package` attribute with the relative path.
record.package = record.pathname[_leng_path+1:].replace(".py", "")
record.emoji = "🔍" if record.levelno == logging.DEBUG \
else "🖊️" if record.levelno == logging.INFO \
else "🚧" if record.levelno == logging.WARNING \
else "" if record.levelno == logging.ERROR \
else "🧨" if record.levelno == logging.CRITICAL \
else ""
return record
# Define the color scheme
color_scheme = {
"DEBUG": "cyan",
"DEBUG": "light_black",
"INFO": "green",
"WARNING": "yellow",
"ERROR": "red",
"CRITICAL": "red,bg_white",
"DEFAULT": "white",
}
# Define secondary log colors
@ -17,12 +31,12 @@ secondary_colors = {
"log_name": {"DEBUG": "blue"},
"asctime": {"DEBUG": "cyan"},
"process": {"DEBUG": "purple"},
"module": {"DEBUG": "cyan,bg_blue"},
"funcName": {"DEBUG": "light_white,bg_blue"},
"module": {"DEBUG": "light_black,bg_blue"},
"funcName": {"DEBUG": "light_white,bg_blue"}, # Add this line
}
# Define the log format string
fmt_string = "%(log_color)s%(levelname)s: %(log_color)s[%(module)s.%(funcName)s]%(reset)s %(white)s%(message)s"
fmt_string = "%(emoji)s%(log_color)s%(package)s.%(funcName)s%(reset)s %(white)s%(message)s"
# Define formatting configuration
fmt_config = {
@ -40,6 +54,10 @@ class MultilineColoredFormatter(colorlog.ColoredFormatter):
self.secondary_log_colors = kwargs.pop("secondary_log_colors", {})
def format(self, record):
# Add default emoji if not present
if not hasattr(record, 'emoji'):
record = filter(record)
message = record.getMessage()
if "\n" in message:
lines = message.split("\n")
@ -61,8 +79,24 @@ formatter = MultilineColoredFormatter(fmt_string, **fmt_config)
stream = logging.StreamHandler()
stream.setFormatter(formatter)
def get_colorful_logger(name="main"):
# Create and configure the logger
logger = logging.getLogger(name)
logger.setLevel(logging.DEBUG)
logger.addHandler(stream)
logger.addFilter(filter)
return logger
# Set up the root logger with the same formatting
root_logger = logging.getLogger()
if not root_logger.hasHandlers():
root_logger.setLevel(logging.DEBUG)
root_logger.addHandler(stream)
root_logger.setLevel(logging.DEBUG)
root_logger.addHandler(stream)
root_logger.addFilter(filter)
ignore_logs = ["_trace", "httpx", "_client", "_trace.atrace", "aiohttp", "_client"]
for lgr in ignore_logs:
loggr = logging.getLogger(lgr)
loggr.setLevel(logging.INFO)