Compare commits

...

39 Commits
dev ... main

Author SHA1 Message Date
e942d82412 merged
All checks were successful
Deploy to v2 / deploy (push) Successful in 2m9s
2024-02-18 10:47:07 +03:00
b274d83aae pythonpath-patch
All checks were successful
Deploy to v2 / deploy (push) Successful in 1m50s
2024-02-18 08:28:51 +03:00
a5b4f87177 default-connections2
All checks were successful
Deploy to v2 / deploy (push) Successful in 6m41s
2024-02-17 23:21:39 +03:00
201b3c2f33 default-connections
All checks were successful
Deploy to v2 / deploy (push) Successful in 1m47s
2024-02-17 23:17:12 +03:00
43d6c36af9 init
All checks were successful
Deploy to v2 / deploy (push) Successful in 1m50s
2024-02-17 23:00:05 +03:00
43e3ae568f redis
All checks were successful
Deploy to v2 / deploy (push) Successful in 1m35s
2024-02-17 16:46:08 +03:00
d963ed714c stay-ubuntu
Some checks failed
Deploy to v2 / deploy (push) Failing after 22s
2024-02-17 14:03:33 +03:00
c758984158 simple-dockerfile
Some checks failed
Deploy to v2 / deploy (push) Failing after 1m25s
2024-02-17 14:00:32 +03:00
cef6ff8157 docker-fix
Some checks failed
Deploy to v2 / deploy (push) Failing after 1m27s
2024-02-17 03:37:46 +03:00
1e7f01c6d5 poetry-2
Some checks failed
Deploy to v2 / deploy (push) Failing after 8s
2024-02-17 03:35:59 +03:00
24c897ba42 ubuntu-img
Some checks failed
Deploy to v2 / deploy (push) Failing after 40s
2024-02-17 03:24:12 +03:00
0a1ccb1a14 rustup-3
Some checks failed
Deploy to v2 / deploy (push) Failing after 25s
2024-02-17 03:22:48 +03:00
d379c288f1 rustup-2
Some checks failed
Deploy to v2 / deploy (push) Failing after 1m8s
2024-02-17 03:20:39 +03:00
5be2039b30 rustup
Some checks failed
Deploy to v2 / deploy (push) Failing after 59s
2024-02-17 03:18:32 +03:00
1bb362249b less-versioning-py-2
Some checks failed
Deploy to v2 / deploy (push) Failing after 57s
2024-02-17 03:15:34 +03:00
51ee7f728c less-versioning-py
Some checks failed
Deploy to v2 / deploy (push) Failing after 10s
2024-02-17 03:13:34 +03:00
660cdb5a7b ci-fix-3
Some checks failed
Deploy to v2 / deploy (push) Failing after 1m31s
2024-02-17 03:04:12 +03:00
1b422c0b6a ci-fix-2
Some checks failed
deploy to v2 / test (push) Failing after 5s
deploy to v2 / deploy (push) Has been skipped
2024-02-17 03:02:24 +03:00
1a43563049 ci-fix
Some checks failed
deploy to v2 / test (push) Failing after 23s
deploy to v2 / deploy (push) Has been skipped
2024-02-17 03:00:55 +03:00
8b39b47714 fixed-fmt-linted
Some checks failed
deploy to v2 / test (push) Failing after 49s
deploy to v2 / deploy (push) Has been skipped
2024-02-17 02:56:15 +03:00
2163e85b16 port8000
Some checks failed
deploy to v2 / test (push) Failing after 2m37s
deploy to v2 / deploy (push) Has been skipped
2024-02-15 16:00:35 +03:00
b773e03f9b 312
Some checks failed
deploy to v2 / test (push) Failing after 31s
deploy to v2 / deploy (push) Has been skipped
2024-02-15 12:55:24 +03:00
7a3f823804 py312-fix
Some checks failed
deploy to v2 / test (push) Failing after 2m36s
deploy to v2 / deploy (push) Has been skipped
2024-02-15 12:26:17 +03:00
25bdca1371 Merge branch 'main' of https://dev.discours.io/discours.io/notifier
Some checks failed
deploy to v2 / deploy (push) Waiting to run
deploy to v2 / test (push) Has been cancelled
2024-02-15 12:23:37 +03:00
d9d4667c6a py312-fix 2024-02-15 12:23:25 +03:00
Stepan Vladovskii
82226bdcb5 feat: change port to 80
Some checks failed
deploy to v2 / test (push) Failing after 2m19s
deploy to v2 / deploy (push) Has been skipped
2024-02-13 17:08:13 -03:00
Stepan Vladovskii
41add50256 feat: change port to 8000
All checks were successful
deploy to v2 / test (push) Successful in 2m30s
deploy to v2 / deploy (push) Successful in 1m58s
2024-02-13 16:42:16 -03:00
125fa2b86d notest
All checks were successful
deploy to v2 / test (push) Successful in 40s
deploy to v2 / deploy (push) Successful in 1m19s
2024-02-05 17:17:18 +03:00
625099568c ci-fix-3.9
Some checks failed
deploy to v2 / test (push) Failing after 39s
deploy to v2 / deploy (push) Has been skipped
2024-02-05 16:20:00 +03:00
c096dc6fbc ci-fix-6
Some checks failed
deploy to v2 / test (push) Failing after 24s
deploy to v2 / deploy (push) Has been skipped
2024-02-05 16:18:34 +03:00
09f44ec857 ci-fix-3.11
Some checks failed
deploy to v2 / test (push) Failing after 5s
deploy to v2 / deploy (push) Has been skipped
2024-02-05 16:17:53 +03:00
aa165b8d94 ci-fix-4
Some checks failed
deploy to v2 / test (push) Failing after 23s
deploy to v2 / deploy (push) Has been skipped
2024-02-05 16:16:54 +03:00
9220857577 p311-2
Some checks failed
deploy to v2 / test (push) Failing after 4s
deploy to v2 / deploy (push) Has been skipped
2024-02-05 16:12:00 +03:00
c363b5478e p311
Some checks failed
deploy to v2 / test (push) Failing after 3s
deploy to v2 / deploy (push) Has been skipped
2024-02-05 16:11:21 +03:00
404a8256d9 ci-upgrade
Some checks failed
deploy to v2 / test (push) Failing after 16s
deploy to v2 / deploy (push) Has been skipped
2024-02-05 16:09:21 +03:00
89ab5545ae test-ready
Some checks failed
deploy to v2 / test (push) Failing after 21s
deploy to v2 / deploy (push) Has been skipped
2024-02-05 16:01:26 +03:00
28a363663d ci-fix 2024-02-05 15:31:43 +03:00
fe53ff6afc granian-dry-test 2024-02-04 08:03:29 +03:00
b98da839ed precommit 2024-02-04 07:58:44 +03:00
12 changed files with 260 additions and 313 deletions

View File

@ -1,4 +1,4 @@
name: "Deploy to notifier" name: 'Deploy to v2'
on: [push] on: [push]
jobs: jobs:
@ -21,6 +21,6 @@ jobs:
- name: Push to dokku - name: Push to dokku
uses: dokku/github-action@master uses: dokku/github-action@master
with: with:
branch: "main" branch: 'main'
git_remote_url: "ssh://dokku@v2.discours.io:22/notifier" git_remote_url: 'ssh://dokku@v2.discours.io:22/notifier'
ssh_private_key: ${{ secrets.SSH_PRIVATE_KEY }} ssh_private_key: ${{ secrets.SSH_PRIVATE_KEY }}

1
.gitignore vendored
View File

@ -6,3 +6,4 @@ __pycache__
poetry.lock poetry.lock
.venv .venv
.ruff_cache .ruff_cache
.pytest_cache

View File

@ -9,7 +9,6 @@ repos:
- id: trailing-whitespace - id: trailing-whitespace
- id: check-added-large-files - id: check-added-large-files
- id: detect-private-key - id: detect-private-key
- id: double-quote-string-fixer
- id: check-ast - id: check-ast
- id: check-merge-conflict - id: check-merge-conflict
@ -18,3 +17,4 @@ repos:
hooks: hooks:
- id: ruff - id: ruff
args: [--fix] args: [--fix]
- id: ruff-format

View File

@ -1,22 +1,15 @@
# Use an official Python runtime as a parent image
FROM python:slim FROM python:slim
# Set the working directory in the container to /app
WORKDIR /app WORKDIR /app
# Add metadata to the image to describe that the container is listening on port 80
EXPOSE 8000
# Copy the current directory contents into the container at /app
COPY . /app COPY . /app
# Install any needed packages specified in pyproject.toml RUN apt-get update && apt-get install -y git gcc curl postgresql && \
RUN apt-get update && apt-get install -y gcc curl && \
curl -sSL https://install.python-poetry.org | python - && \ curl -sSL https://install.python-poetry.org | python - && \
echo "export PATH=$PATH:/root/.local/bin" >> ~/.bashrc && \ echo "export PATH=$PATH:/root/.local/bin" >> ~/.bashrc && \
. ~/.bashrc && \ . ~/.bashrc && \
poetry config virtualenvs.create false && \ poetry config virtualenvs.create false && \
poetry install --no-dev poetry install --no-dev
# Run server.py when the container launches EXPOSE 8000
# Run server when the container launches
CMD python server.py CMD python server.py

6
__init__.py Normal file
View File

@ -0,0 +1,6 @@
import os
import sys
# Получаем путь к корневой директории проекта
root_path = os.path.abspath(os.path.dirname(__file__))
sys.path.append(root_path)

14
main.py
View File

@ -16,23 +16,23 @@ from services.rediscache import redis
from settings import DEV_SERVER_PID_FILE_NAME, MODE, SENTRY_DSN from settings import DEV_SERVER_PID_FILE_NAME, MODE, SENTRY_DSN
logger = logging.getLogger('\t[main]\t') logger = logging.getLogger("\t[main]\t")
logger.setLevel(logging.DEBUG) logger.setLevel(logging.DEBUG)
async def start_up(): async def start_up():
logger.info('[main] starting...') logger.info("[main] starting...")
await redis.connect() await redis.connect()
task = asyncio.create_task(notifications_worker()) task = asyncio.create_task(notifications_worker())
logger.info(task) logger.info(task)
if MODE == 'dev': if MODE == "dev":
if exists(DEV_SERVER_PID_FILE_NAME): if exists(DEV_SERVER_PID_FILE_NAME):
with open(DEV_SERVER_PID_FILE_NAME, 'w', encoding='utf-8') as f: with open(DEV_SERVER_PID_FILE_NAME, "w", encoding="utf-8") as f:
f.write(str(os.getpid())) f.write(str(os.getpid()))
else: else:
logger.info('[main] production mode') logger.info("[main] production mode")
try: try:
import sentry_sdk import sentry_sdk
@ -47,7 +47,7 @@ async def start_up():
], ],
) )
except Exception as e: except Exception as e:
logger.error('sentry init error', e) logger.error("sentry init error", e)
async def shutdown(): async def shutdown():
@ -55,4 +55,4 @@ async def shutdown():
app = Starlette(debug=True, on_startup=[start_up], on_shutdown=[shutdown]) app = Starlette(debug=True, on_startup=[start_up], on_shutdown=[shutdown])
app.mount('/', GraphQL(schema, graphiql=True, debug=True)) app.mount("/", GraphQL(schema, debug=True))

View File

@ -1,113 +1,25 @@
[tool.poetry] [tool.poetry]
name = "discoursio-notifier" name = "discoursio-notifier"
version = "0.3.0" version = "0.3.0"
description = "notifier server for discours.io" description = "notifier service for discours.io"
authors = ["discours.io devteam"] authors = ["Untone <anton.rewin@gmail.com>"]
license = "MIT"
readme = "README.md"
[tool.poetry.dependencies] [tool.poetry.dependencies]
python = "^3.12" python = "^3.12"
SQLAlchemy = "^2.0.22" aiohttp = "^3.9.3"
psycopg2-binary = "^2.9.9"
redis = {extras = ["hiredis"], version = "^5.0.1"} redis = {extras = ["hiredis"], version = "^5.0.1"}
strawberry-graphql = {extras = ["asgi", "debug-server"], version = "^0.219.0" } strawberry-graphql = {extras = ["asgi", "debug-server"], version = "^0.219.2"}
granian = "^1.1.0"
sentry-sdk = {extras = ["strawberry"], version = "^1.40.4"}
strawberry-sqlalchemy-mapper = "^0.4.2" strawberry-sqlalchemy-mapper = "^0.4.2"
sentry-sdk = "^1.39.2" psycopg2-binary = "^2.9.9"
aiohttp = "^3.9.1" SQLAlchemy = "^2.0.27"
pre-commit = "^3.6.0"
granian = "^1.0.1"
discoursio-core = { git = "https://dev.discours.io/discours.io/core.git", branch = "feature/core" }
[tool.poetry.group.dev.dependencies] [tool.poetry.group.dev.dependencies]
setuptools = "^69.0.2" ruff = "^0.2.1"
pyright = "^1.1.341"
pytest = "^7.4.2"
black = { version = "^23.12.0", python = ">=3.12" }
ruff = { version = "^0.1.15", python = ">=3.12" }
isort = "^5.13.2"
[build-system] [build-system]
requires = ["poetry-core"] requires = ["poetry-core"]
build-backend = "poetry.core.masonry.api" build-backend = "poetry.core.masonry.api"
[tool.setuptools.dynamic]
version = {attr = "notifier.__version__"}
readme = {file = "README.md"}
[tool.ruff]
line-length = 120
extend-select = [
# E and F are enabled by default
'B', # flake8-bugbear
'C4', # flake8-comprehensions
'C90', # mccabe
'I', # isort
'N', # pep8-naming
'Q', # flake8-quotes
'S', # flake8-bandit
'W', # pycodestyle
]
extend-ignore = [
'B008', # function calls in args defaults are fine
'B009', # getattr with constants is fine
'B034', # re.split won't confuse us
'B904', # rising without from is fine
'E501', # leave line length to black
'N818', # leave to us exceptions naming
'S101', # assert is fine
'E712', # allow == True
]
flake8-quotes = { inline-quotes = 'single', multiline-quotes = 'double' }
mccabe = { max-complexity = 13 }
target-version = "py312"
[tool.ruff.format]
quote-style = 'single'
[tool.black]
skip-string-normalization = true
[tool.ruff.isort]
combine-as-imports = true
lines-after-imports = 2
known-first-party = ['resolvers', 'services', 'orm', 'tests']
[tool.ruff.per-file-ignores]
'tests/**' = ['B018', 'S110', 'S501']
[tool.mypy]
python_version = "3.12"
warn_return_any = true
warn_unused_configs = true
ignore_missing_imports = true
exclude = ["nb"]
[tool.pytest.ini_options]
asyncio_mode = 'auto'
[tool.pyright]
venvPath = "."
venv = ".venv"
include = ["."]
useLibraryCodeForTypes = true
disableLanguageServices = false
disableOrganizeImports = false
reportMissingImports = false
reportMissingModuleSource = "warning"
reportImportCycles = "warning"
maxMemoryForLargeFile = 4096
pythonVersion = "3.12"
autoImportCompletions = true
useVirtualEnv = true
typeCheckingMode = "basic"
disableJediCompletion = false
disableCompletion = false
disableSnippetCompletion = false
disableGoToDefinition = false
disableRenaming = false
disableSignatureHelp = false
diagnostics = true
logLevel = "Information"
pluginSearchPaths = []
typings = {}
mergeTypeStubPackages = false

View File

@ -1,7 +1,7 @@
import json import json
import logging import logging
import time import time
from typing import Dict, List from typing import Dict, List, Tuple, Union
import strawberry import strawberry
from sqlalchemy import and_, select from sqlalchemy import and_, select
@ -24,13 +24,134 @@ from resolvers.model import (
from services.db import local_session from services.db import local_session
logger = logging.getLogger('[resolvers.schema] ') logger = logging.getLogger("[resolvers.schema]")
logger.setLevel(logging.DEBUG) logger.setLevel(logging.DEBUG)
async def get_notifications_grouped( # noqa: C901 def query_notifications(
author_id: int, after: int = 0, limit: int = 10, offset: int = 0 author_id: int, after: int = 0
): ) -> Tuple[int, int, List[Tuple[Notification, bool]]]:
notification_seen_alias = aliased(NotificationSeen)
query = select(
Notification, notification_seen_alias.viewer.label("seen")
).outerjoin(
NotificationSeen,
and_(
NotificationSeen.viewer == author_id,
NotificationSeen.notification == Notification.id,
),
)
if after:
query = query.filter(Notification.created_at > after)
query = query.group_by(NotificationSeen.notification, Notification.created_at)
with local_session() as session:
total = (
session.query(Notification)
.filter(
and_(
Notification.action == NotificationAction.CREATE.value,
Notification.created_at > after,
)
)
.count()
)
unread = (
session.query(Notification)
.filter(
and_(
Notification.action == NotificationAction.CREATE.value,
Notification.created_at > after,
not_(Notification.seen),
)
)
.count()
)
notifications_result = session.execute(query)
notifications = []
for n, seen in notifications_result:
notifications.append((n, seen))
return total, unread, notifications
def process_shout_notification(
notification: Notification, seen: bool
) -> Union[Tuple[str, NotificationGroup], None] | None:
if not isinstance(notification.payload, str) or not isinstance(
notification.entity, str
):
return
payload = json.loads(notification.payload)
shout: NotificationShout = payload
thread_id = str(shout.id)
group = NotificationGroup(
id=thread_id,
entity=notification.entity,
shout=shout,
authors=shout.authors,
updated_at=shout.created_at,
reactions=[],
action="create",
seen=seen,
)
return thread_id, group
def process_reaction_notification(
notification: Notification, seen: bool
) -> Union[Tuple[str, NotificationGroup], None] | None:
if (
not isinstance(notification, Notification)
or not isinstance(notification.payload, str)
or not isinstance(notification.entity, str)
):
return
payload = json.loads(notification.payload)
reaction: NotificationReaction = payload
shout: NotificationShout = reaction.shout
thread_id = str(reaction.shout)
if reaction.kind == "COMMENT" and reaction.reply_to:
thread_id += f"::{reaction.reply_to}"
group = NotificationGroup(
id=thread_id,
action=str(notification.action),
entity=notification.entity,
updated_at=reaction.created_at,
reactions=[reaction.id],
shout=shout,
authors=[reaction.created_by],
seen=seen,
)
return thread_id, group
def process_follower_notification(
notification: Notification, seen: bool
) -> Union[Tuple[str, NotificationGroup], None] | None:
if not isinstance(notification.payload, str):
return
payload = json.loads(notification.payload)
follower: NotificationAuthor = payload
thread_id = "followers"
group = NotificationGroup(
id=thread_id,
authors=[follower],
updated_at=int(time.time()),
shout=None,
reactions=[],
entity="follower",
action="follow",
seen=seen,
)
return thread_id, group
async def get_notifications_grouped(
author_id: int, after: int = 0, limit: int = 10
) -> Tuple[Dict[str, NotificationGroup], int, int]:
""" """
Retrieves notifications for a given author. Retrieves notifications for a given author.
@ -56,163 +177,66 @@ async def get_notifications_grouped( # noqa: C901
authors: List[NotificationAuthor], # List of authors involved in the thread. authors: List[NotificationAuthor], # List of authors involved in the thread.
} }
""" """
seen_alias = aliased(NotificationSeen) total, unread, notifications = query_notifications(author_id, after)
query = select(Notification, seen_alias.viewer.label('seen')).outerjoin(
NotificationSeen,
and_(
NotificationSeen.viewer == author_id,
NotificationSeen.notification == Notification.id,
),
)
if after:
query = query.filter(Notification.created_at > after)
query = query.group_by(NotificationSeen.notification, Notification.created_at)
groups_amount = 0
unread = 0
total = 0
notifications_by_thread: Dict[str, List[Notification]] = {}
groups_by_thread: Dict[str, NotificationGroup] = {} groups_by_thread: Dict[str, NotificationGroup] = {}
with local_session() as session: groups_amount = 0
total = (
session.query(Notification)
.filter(
and_(
Notification.action == NotificationAction.CREATE.value,
Notification.created_at > after,
)
)
.count()
)
unread = (
session.query(Notification)
.filter(
and_(
Notification.action == NotificationAction.CREATE.value,
Notification.created_at > after,
not_(Notification.seen),
)
)
.count()
)
notifications_result = session.execute(query)
for n, _seen in notifications_result:
thread_id = ''
payload = json.loads(n.payload)
logger.debug(f'[resolvers.schema] {n.action} {n.entity}: {payload}')
if n.entity == 'shout' and n.action == 'create':
shout: NotificationShout = payload
thread_id += f'{shout.id}'
logger.debug(f'create shout: {shout}')
group = groups_by_thread.get(thread_id) or NotificationGroup(
id=thread_id,
entity=n.entity,
shout=shout,
authors=shout.authors,
updated_at=shout.created_at,
reactions=[],
action='create',
seen=author_id in n.seen,
)
# store group in result
groups_by_thread[thread_id] = group
notifications = notifications_by_thread.get(thread_id, [])
if n not in notifications:
notifications.append(n)
notifications_by_thread[thread_id] = notifications
groups_amount += 1
elif n.entity == NotificationEntity.REACTION.value and n.action == NotificationAction.CREATE.value:
reaction: NotificationReaction = payload
shout: NotificationShout = reaction.shout
thread_id += f'{reaction.shout}'
if not bool(reaction.reply_to) and (reaction.kind == 'LIKE' or reaction.kind == 'DISLIKE'):
# TODO: making published reaction vote announce
pass
elif reaction.kind == 'COMMENT':
if reaction.reply_to:
thread_id += f"{'::' + str(reaction.reply_to)}"
group: NotificationGroup | None = groups_by_thread.get(thread_id)
notifications: List[Notification] = notifications_by_thread.get(thread_id, [])
if group and notifications:
group.seen = False # any not seen notification make it false
group.shout = shout
group.authors.append(reaction.created_by)
if not group.reactions:
group.reactions = []
group.reactions.append(reaction.id)
# store group in result
groups_by_thread[thread_id] = group
notifications = notifications_by_thread.get(thread_id, [])
if n not in notifications:
notifications.append(n)
notifications_by_thread[thread_id] = notifications
groups_amount += 1
else:
groups_amount += 1
if groups_amount > limit:
break
else:
# init notification group
reactions = [
reaction.id,
]
group = NotificationGroup(
id=thread_id,
action=n.action,
entity=n.entity,
updated_at=reaction.created_at,
reactions=reactions,
shout=shout,
authors=[
reaction.created_by,
],
seen=author_id in n.seen,
)
# store group in result
groups_by_thread[thread_id] = group
notifications = notifications_by_thread.get(thread_id, [])
if n not in notifications:
notifications.append(n)
notifications_by_thread[thread_id] = notifications
elif n.entity == 'follower': for notification, seen in notifications:
thread_id = 'followers' if groups_amount >= limit:
follower: NotificationAuthor = payload break
group = groups_by_thread.get(thread_id) or NotificationGroup(
id=thread_id, if str(notification.entity) == "shout" and str(notification.action) == "create":
authors=[follower], result = process_shout_notification(notification, seen)
updated_at=int(time.time()), if result:
shout=None, thread_id, group = result
reactions=[],
entity='follower',
action='follow',
seen=author_id in n.seen,
)
group.authors = [
follower,
]
group.updated_at = int(time.time())
# store group in result
groups_by_thread[thread_id] = group groups_by_thread[thread_id] = group
notifications = notifications_by_thread.get(thread_id, [])
if n not in notifications:
notifications.append(n)
notifications_by_thread[thread_id] = notifications
groups_amount += 1 groups_amount += 1
if groups_amount > limit: elif (
break str(notification.entity) == NotificationEntity.REACTION.value
and str(notification.action) == NotificationAction.CREATE.value
):
result = process_reaction_notification(notification, seen)
if result:
thread_id, group = result
existing_group = groups_by_thread.get(thread_id)
if existing_group:
existing_group.seen = False
existing_group.shout = group.shout
existing_group.authors.append(group.authors[0])
if not existing_group.reactions:
existing_group.reactions = []
existing_group.reactions.extend(group.reactions or [])
groups_by_thread[thread_id] = existing_group
else:
groups_by_thread[thread_id] = group
groups_amount += 1
return groups_by_thread, notifications_by_thread, unread, total elif str(notification.entity) == "follower":
result = process_follower_notification(notification, seen)
if result:
thread_id, group = result
groups_by_thread[thread_id] = group
groups_amount += 1
return groups_by_thread, unread, total
@strawberry.type @strawberry.type
class Query: class Query:
@strawberry.field @strawberry.field
async def load_notifications(self, info, after: int, limit: int = 50, offset: int = 0) -> NotificationsResult: async def load_notifications(
author_id = info.context.get('author_id') self, info, after: int, limit: int = 50, offset: int = 0
groups: Dict[str, NotificationGroup] = {} ) -> NotificationsResult:
author_id = info.context.get("author_id")
if author_id: if author_id:
groups, notifications, total, unread = await get_notifications_grouped(author_id, after, limit, offset) groups, unread, total = await get_notifications_grouped(
notifications = sorted(groups.values(), key=lambda group: group.updated_at, reverse=True) author_id, after, limit
return NotificationsResult(notifications=notifications, total=0, unread=0, error=None) )
notifications = sorted(
groups.values(), key=lambda group: group.updated_at, reverse=True
)
return NotificationsResult(
notifications=notifications, total=total, unread=unread, error=None
)
return NotificationsResult(notifications=[], total=0, unread=0, error=None)

View File

@ -1,14 +1,15 @@
from granian.constants import Interfaces from granian.constants import Interfaces
from granian.server import Granian from granian.server import Granian
from settings import PORT
if __name__ == '__main__': if __name__ == "__main__":
print('[server] started') print("[server] started")
granian_instance = Granian( granian_instance = Granian(
'main:app', "main:app",
address='0.0.0.0', # noqa S104 address="0.0.0.0", # noqa S104
port=8000, port=PORT,
workers=2, workers=2,
threads=2, threads=2,
websockets=False, websockets=False,

View File

@ -8,53 +8,62 @@ from services.db import local_session
from settings import AUTH_URL from settings import AUTH_URL
logger = logging.getLogger('\t[services.auth]\t') logger = logging.getLogger("\t[services.auth]\t")
logger.setLevel(logging.DEBUG) logger.setLevel(logging.DEBUG)
async def check_auth(req) -> str | None: async def check_auth(req) -> str | None:
token = req.headers.get('Authorization') token = req.headers.get("Authorization")
user_id = '' user_id = ""
if token: if token:
query_name = 'validate_jwt_token' query_name = "validate_jwt_token"
operation = 'ValidateToken' operation = "ValidateToken"
headers = { headers = {
'Content-Type': 'application/json', "Content-Type": "application/json",
} }
variables = { variables = {
'params': { "params": {
'token_type': 'access_token', "token_type": "access_token",
'token': token, "token": token,
} }
} }
gql = { gql = {
'query': f'query {operation}($params: ValidateJWTTokenInput!) {{ {query_name}(params: $params) {{ is_valid claims }} }}', "query": f"query {operation}($params: ValidateJWTTokenInput!) {{ {query_name}(params: $params) {{ is_valid claims }} }}",
'variables': variables, "variables": variables,
'operationName': operation, "operationName": operation,
} }
try: try:
# Asynchronous HTTP request to the authentication server # Asynchronous HTTP request to the authentication server
async with ClientSession() as session: async with ClientSession() as session:
async with session.post(AUTH_URL, json=gql, headers=headers) as response: async with session.post(
print(f'[services.auth] HTTP Response {response.status} {await response.text()}') AUTH_URL, json=gql, headers=headers
) as response:
logger.debug(
f"HTTP Response {response.status} {await response.text()}"
)
if response.status == 200: if response.status == 200:
data = await response.json() data = await response.json()
errors = data.get('errors') errors = data.get("errors")
if errors: if errors:
print(f'errors: {errors}') logger.error(f"errors: {errors}")
else: else:
user_id = data.get('data', {}).get(query_name, {}).get('claims', {}).get('sub') user_id = (
data.get("data", {})
.get(query_name, {})
.get("claims", {})
.get("sub")
)
if user_id: if user_id:
print(f'[services.auth] got user_id: {user_id}') logger.info(f"got user_id: {user_id}")
return user_id return user_id
except Exception as e: except Exception as e:
import traceback import traceback
traceback.print_exc() traceback.print_exc()
# Handling and logging exceptions during authentication check # Handling and logging exceptions during authentication check
print(f'[services.auth] Error {e}') logger.error(f"Error {e}")
return None return None
@ -62,14 +71,14 @@ async def check_auth(req) -> str | None:
class LoginRequiredMiddleware(Extension): class LoginRequiredMiddleware(Extension):
async def on_request_start(self): async def on_request_start(self):
context = self.execution_context.context context = self.execution_context.context
req = context.get('request') req = context.get("request")
user_id = await check_auth(req) user_id = await check_auth(req)
if user_id: if user_id:
context['user_id'] = user_id.strip() context["user_id"] = user_id.strip()
with local_session() as session: with local_session() as session:
author = session.query(Author).filter(Author.user == user_id).first() author = session.query(Author).filter(Author.user == user_id).first()
if author: if author:
context['author_id'] = author.id context["author_id"] = author.id
context['user_id'] = user_id or None context["user_id"] = user_id or None
self.execution_context.context = context self.execution_context.context = context

View File

@ -3,8 +3,7 @@ from typing import Any, Callable, Dict, TypeVar
# from psycopg2.errors import UniqueViolation # from psycopg2.errors import UniqueViolation
from sqlalchemy import Column, Integer, create_engine from sqlalchemy import Column, Integer, create_engine
from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import Session, declarative_base
from sqlalchemy.orm import Session
from sqlalchemy.sql.schema import Table from sqlalchemy.sql.schema import Table
from settings import DB_URL from settings import DB_URL

View File

@ -1,14 +1,16 @@
from os import environ from os import environ
PORT = 80 PORT = 8000
DB_URL = ( DB_URL = (
environ.get('DATABASE_URL', environ.get('DB_URL', '')).replace('postgres://', 'postgresql://') environ.get("DATABASE_URL", environ.get("DB_URL", "")).replace(
or 'postgresql://postgres@localhost:5432/discoursio' "postgres://", "postgresql://"
)
or "postgresql://postgres@localhost:5432/discoursio"
) )
REDIS_URL = environ.get('REDIS_URL') or 'redis://127.0.0.1' REDIS_URL = environ.get("REDIS_URL") or "redis://127.0.0.1"
API_BASE = environ.get('API_BASE') or 'https://core.discours.io' API_BASE = environ.get("API_BASE") or "http://127.0.0.1:8001"
AUTH_URL = environ.get('AUTH_URL') or 'https://auth.discours.io' AUTH_URL = environ.get("AUTH_URL") or "http://127.0.0.1:8080/graphql"
MODE = environ.get('MODE') or 'production' MODE = environ.get("MODE") or "production"
SENTRY_DSN = environ.get('SENTRY_DSN') SENTRY_DSN = environ.get("SENTRY_DSN")
DEV_SERVER_PID_FILE_NAME = 'dev-server.pid' DEV_SERVER_PID_FILE_NAME = "dev-server.pid"