Compare commits

...

126 Commits

Author SHA1 Message Date
Stepan Vladovskiy
412036521d no force in dev for stagging
All checks were successful
deploy / deploy (push) Successful in 47s
2025-02-12 20:00:51 -03:00
Stepan Vladovskiy
5ad7ff8265 fix: no root added for install only dependencies
All checks were successful
deploy / deploy (push) Successful in 49s
2025-02-12 19:58:36 -03:00
Stepan Vladovskiy
0a2ca2d231 fix: changed config in pyproject
Some checks failed
deploy / deploy (push) Failing after 34s
2025-02-12 19:54:39 -03:00
Stepan Vladovskiy
e7b71aae7c fix: poetry with without dev option
Some checks failed
deploy / deploy (push) Failing after 33s
2025-02-12 19:41:09 -03:00
Stepan Vladovskiy
b003aa8083 force
Some checks failed
deploy / deploy (push) Failing after 29s
2025-02-12 19:36:58 -03:00
Stepan Vladovskiy
954acf00e5 feat: sv nginx conf sigil without map function, becasue it is moved to global nginx config
Some checks failed
deploy / deploy (push) Failing after 7s
2025-01-29 16:29:24 -03:00
f5425d3599 cl
Some checks failed
deploy / deploy (push) Failing after 5s
2024-04-19 14:36:37 +03:00
76186df2b0 fmt
All checks were successful
deploy / deploy (push) Successful in 53s
2024-04-19 14:28:57 +03:00
3c219bfa69 author-cache-removed 2024-04-19 14:28:21 +03:00
41af1a7349 fmt
All checks were successful
deploy / deploy (push) Successful in 53s
2024-04-19 11:50:50 +03:00
6a450a84c1 logger-fix-getchat-fix 2024-04-19 11:50:25 +03:00
c96e8afc45 logfix
All checks were successful
deploy / deploy (push) Successful in 1m10s
2024-04-19 11:41:02 +03:00
12c5d2677d async-fix
All checks were successful
deploy / deploy (push) Successful in 49s
2024-04-19 11:22:57 +03:00
63cf0b9fee less-code
All checks were successful
deploy / deploy (push) Successful in 48s
2024-04-19 11:01:43 +03:00
38c0a4e3ee auth-fix
All checks were successful
deploy / deploy (push) Successful in 47s
2024-04-19 10:57:54 +03:00
97aed41143 typo-fix
All checks were successful
deploy / deploy (push) Successful in 47s
2024-04-19 10:48:51 +03:00
1a730a9eab core-adapter-upgrade
Some checks failed
deploy / deploy (push) Failing after 57s
2024-04-19 10:47:16 +03:00
fe069696d3 auth-update
Some checks failed
deploy / deploy (push) Failing after 1m5s
2024-04-18 14:28:10 +03:00
f2726633cd depfix
Some checks failed
deploy / deploy (push) Failing after 58s
2024-04-18 13:50:32 +03:00
5c4f73d2ca logger+fmt+isort-2
Some checks failed
deploy / deploy (push) Failing after 1m6s
2024-04-18 13:48:29 +03:00
595fa945cf Merge branch 'main' of https://dev.discours.io/discours.io/inbox 2024-04-18 13:47:31 +03:00
12602ac57c logger+fmt+isort 2024-04-18 13:47:01 +03:00
Stepan Vladovskiy
d292be591e feat: add in CI main->v2 , dev->staging
Some checks failed
deploy / deploy (push) Failing after 6s
2024-04-11 15:36:17 -03:00
Stepan Vladovskiy
a774108b18 feat: with Playground mounted on /
All checks were successful
deploy / deploy (push) Successful in 52s
2024-04-10 10:18:52 -03:00
Stepan Vladovskiy
dbf1d8880d feat: add healtcheck to ok in starlette app
All checks were successful
deploy / deploy (push) Successful in 52s
2024-04-09 15:11:17 -03:00
2b3aa43faf glitchtip
Some checks failed
deploy / deploy (push) Failing after 14s
2024-04-09 20:19:46 +03:00
3a30f8aa62 precommit-fix
Some checks failed
deploy / deploy (push) Failing after 13s
2024-04-08 09:32:43 +03:00
dcdd7e16af update 2024-04-08 09:31:46 +03:00
c8f65ca0c9 0.3.0+sentry
Some checks failed
deploy / deploy (push) Failing after 47s
2024-04-08 09:30:57 +03:00
106f1bfbde pythonpath-patch
All checks were successful
deploy / deploy (push) Successful in 1m23s
2024-02-18 08:28:20 +03:00
601330246d default-connections2
All checks were successful
deploy / deploy (push) Successful in 1m4s
2024-02-17 23:21:04 +03:00
ba193c2aae default-connections
All checks were successful
deploy / deploy (push) Successful in 1m7s
2024-02-17 23:18:03 +03:00
23e8310fb4 init
All checks were successful
deploy / deploy (push) Successful in 57s
2024-02-17 23:00:25 +03:00
2248afe3cd less-install
All checks were successful
deploy / deploy (push) Successful in 51s
2024-02-17 14:00:46 +03:00
b1ace7d82c fix-reload
All checks were successful
deploy / deploy (push) Successful in 1m36s
2024-02-17 13:56:06 +03:00
7c53dc0c42 Merge branch 'main' of https://dev.discours.io/discours.io/inbox
Some checks failed
deploy / deploy (push) Failing after 1m56s
2024-02-17 13:31:43 +03:00
ab1e426a17 refresh 2024-02-17 13:31:05 +03:00
899cb05c1b py-builder-too 2024-02-17 11:52:26 +03:00
Stepan Vladovskii
a4c1663cc7 feat: add in CI pipe to v2
All checks were successful
deploy / deploy (push) Successful in 1m12s
2024-01-27 20:21:10 -03:00
70079c38ad .. 2024-01-25 13:00:41 +03:00
152d5a4e99 precommit-setup 2024-01-25 12:25:52 +03:00
8435c8e6b5 precommit2 2024-01-25 11:28:06 +03:00
2df2f6accd granian-test-1 2024-01-25 11:18:29 +03:00
87cd8de5ab trig-deploy 2024-01-25 11:12:19 +03:00
fce949603e launch-fix 2024-01-25 01:34:44 +03:00
e0e1e88882 port-fix 2024-01-25 01:12:01 +03:00
7c8b58d613 test-asgi 2024-01-25 01:09:11 +03:00
91025d453f redis-debug-fix 2024-01-24 15:31:46 +03:00
751d91562e inbox-debug-7 2024-01-24 15:26:16 +03:00
cccaf16817 inbox-debug-6 2024-01-24 15:00:28 +03:00
a83ef022a3 inbox-debug-3 2024-01-24 13:06:47 +03:00
6e8c084816 inbox-debug-2 2024-01-24 12:51:16 +03:00
636b5446a2 cache-authors-fix 2024-01-24 12:22:05 +03:00
bd745407bf traceback 2024-01-24 10:47:34 +03:00
cbd77122a5 load-chats-debug 2024-01-24 10:36:50 +03:00
8901ded662 logs-redis-typings-fix 2024-01-24 00:13:14 +03:00
9b03e625f4 logs-fixes-debug
Some checks failed
deploy / deploy (push) Failing after 5s
2024-01-23 23:13:49 +03:00
80fed4049a sigil-fix 2024-01-15 11:25:28 +03:00
Igor Lobanov
339b6c6457 test 2024-01-11 08:46:17 +01:00
c8d46cf863 logs-fix-2
All checks were successful
deploy / deploy (push) Successful in 1m13s
2024-01-11 10:11:23 +03:00
74d80f1447 less-log
All checks were successful
deploy / deploy (push) Successful in 1m15s
2024-01-11 10:06:40 +03:00
2d66870443 0.2.19-fix
All checks were successful
deploy / deploy (push) Successful in 1m15s
2024-01-10 16:31:01 +03:00
e7b9d419c4 logs-fix
All checks were successful
deploy / deploy (push) Successful in 1m10s
2023-12-25 01:53:14 +03:00
ce02ce0130 cached-storage-authors
All checks were successful
deploy / deploy (push) Successful in 1m3s
2023-12-23 09:11:04 +03:00
e81eabd0d0 discours-default-dialogue
All checks were successful
deploy / deploy (push) Successful in 1m14s
2023-12-19 20:43:39 +03:00
9635bb8a7c cached-request-14
All checks were successful
deploy / deploy (push) Successful in 1m5s
2023-12-19 20:37:43 +03:00
1bacbc93ee cached-request-13
All checks were successful
deploy / deploy (push) Successful in 1m5s
2023-12-19 20:34:36 +03:00
4dd419bdf9 cached-request-12
All checks were successful
deploy / deploy (push) Successful in 1m4s
2023-12-19 20:31:26 +03:00
01e8a07ae7 cached-request-11
All checks were successful
deploy / deploy (push) Successful in 1m4s
2023-12-19 20:26:52 +03:00
0188140224 cached-request-10
Some checks failed
deploy / deploy (push) Failing after 59s
2023-12-19 20:20:37 +03:00
2253bcf956 cached-request-9
Some checks failed
deploy / deploy (push) Failing after 1m1s
2023-12-19 20:19:16 +03:00
2380e88168 cached-request-8
All checks were successful
deploy / deploy (push) Successful in 1m14s
2023-12-19 19:42:53 +03:00
2d588c549e cached-request-7
All checks were successful
deploy / deploy (push) Successful in 1m5s
2023-12-19 19:30:51 +03:00
2658cd323b cached-request-6
All checks were successful
deploy / deploy (push) Successful in 1m4s
2023-12-19 19:26:15 +03:00
c90e68d3ae cached-request-5
All checks were successful
deploy / deploy (push) Successful in 1m5s
2023-12-19 19:22:26 +03:00
fa79164ca8 cached-request-4
All checks were successful
deploy / deploy (push) Successful in 1m5s
2023-12-19 19:17:01 +03:00
4c119abbea cached-request-3
Some checks failed
deploy / deploy (push) Failing after 58s
2023-12-19 19:16:42 +03:00
92791efa9c cached-request-2
All checks were successful
deploy / deploy (push) Successful in 1m4s
2023-12-19 19:04:35 +03:00
6c7f269206 cached-request
All checks were successful
deploy / deploy (push) Successful in 1m3s
2023-12-19 18:58:26 +03:00
b141c26e80 sync-precache
All checks were successful
deploy / deploy (push) Successful in 1m3s
2023-12-19 18:31:31 +03:00
a0d111c50d precache-fix-2
All checks were successful
deploy / deploy (push) Successful in 1m3s
2023-12-19 18:17:17 +03:00
95a237f349 precache-fix
All checks were successful
deploy / deploy (push) Successful in 1m5s
2023-12-19 18:13:37 +03:00
531eb1f028 precache-authors-2
All checks were successful
deploy / deploy (push) Successful in 1m27s
2023-12-19 17:43:17 +03:00
85a40eba83 less-logs
All checks were successful
deploy / deploy (push) Successful in 1m17s
2023-12-19 17:39:53 +03:00
afe96fc909 double-debug
All checks were successful
deploy / deploy (push) Successful in 1m4s
2023-12-19 17:30:27 +03:00
8c1f52f99b author-getting-fix
All checks were successful
deploy / deploy (push) Successful in 1m4s
2023-12-19 17:24:52 +03:00
d75f31072c author-id-fix
All checks were successful
deploy / deploy (push) Successful in 1m6s
2023-12-19 17:10:42 +03:00
e2bcffdc4c create-chat-debug-2 2023-12-19 12:46:57 +03:00
6f478a1d16 create-chat-debug 2023-12-19 12:39:36 +03:00
c3333c41a7 user-field-fix
All checks were successful
deploy / deploy (push) Successful in 1m2s
2023-12-19 11:51:36 +03:00
65eaa7b6cb less-logs
Some checks failed
deploy / deploy (push) Failing after 1m0s
2023-12-19 11:33:49 +03:00
be7bf90f0b authors-cache-fix
Some checks are pending
deploy / deploy (push) Waiting to run
2023-12-19 11:31:37 +03:00
c3a6ecd3ae precached-authors-fix
Some checks are pending
deploy / deploy (push) Waiting to run
2023-12-19 11:25:06 +03:00
b2040099a8 loadchats-fix
All checks were successful
deploy / deploy (push) Successful in 1m6s
2023-12-18 22:33:40 +03:00
9d1a4e90c9 load-recipients-fix-2
All checks were successful
deploy / deploy (push) Successful in 1m11s
2023-12-18 21:32:49 +03:00
93c1727be3 load-recipients-fix
All checks were successful
deploy / deploy (push) Successful in 1m8s
2023-12-18 20:59:20 +03:00
83fba058ab authors-all-fix-2
All checks were successful
deploy / deploy (push) Successful in 1m4s
2023-12-18 20:38:31 +03:00
ef0f5168e1 authors-all-fix
All checks were successful
deploy / deploy (push) Successful in 1m3s
2023-12-18 20:30:05 +03:00
876087c528 core-connectors-upgrade
All checks were successful
deploy / deploy (push) Successful in 1m4s
2023-12-18 10:24:42 +03:00
284a69085f auth-upgrade
All checks were successful
deploy / deploy (push) Successful in 1m3s
2023-12-18 10:17:50 +03:00
b1b7bf4dc2 context-fix
All checks were successful
deploy / deploy (push) Successful in 1m4s
2023-12-18 10:05:48 +03:00
f15f14ecd9 load-recpnts-fix
All checks were successful
deploy / deploy (push) Successful in 1m10s
2023-12-18 04:29:37 +03:00
6023cfb4c2 unread-counter-fix
All checks were successful
deploy / deploy (push) Successful in 1m14s
2023-12-18 03:15:59 +03:00
5c0621dc9b get-author-fix-9
All checks were successful
deploy / deploy (push) Successful in 1m3s
2023-12-18 03:09:07 +03:00
f19ff47e99 get-author-fix-8
All checks were successful
deploy / deploy (push) Successful in 1m17s
2023-12-18 03:04:25 +03:00
2db82eddfd get-author-fix-7
All checks were successful
deploy / deploy (push) Successful in 1m2s
2023-12-18 03:01:46 +03:00
b3c01abd37 get-author-fix-6
Some checks failed
deploy / deploy (push) Failing after 5s
2023-12-18 02:58:07 +03:00
35e68097d8 get-author-fix-5
Some checks failed
deploy / deploy (push) Has been cancelled
2023-12-18 02:57:14 +03:00
15bbe8eb9d get-author-fix-4
Some checks failed
deploy / deploy (push) Failing after 5s
2023-12-18 02:56:17 +03:00
0091acd0ec get-author-fix-3
Some checks failed
deploy / deploy (push) Has been cancelled
2023-12-18 02:55:49 +03:00
eb6ccdc481 get-author-fix-2
All checks were successful
deploy / deploy (push) Successful in 1m1s
2023-12-18 02:53:25 +03:00
f20f5adedf get-author-fix
All checks were successful
deploy / deploy (push) Successful in 1m5s
2023-12-18 02:45:31 +03:00
bc8ed964ec debug-auth-2
All checks were successful
deploy / deploy (push) Successful in 1m3s
2023-12-18 02:36:53 +03:00
3b08d5d6c7 debug-auth
All checks were successful
deploy / deploy (push) Successful in 1m3s
2023-12-18 02:34:10 +03:00
65a6d534c6 auth-debug
All checks were successful
deploy / deploy (push) Successful in 1m8s
2023-12-18 02:22:51 +03:00
40ac53d32d traceback-debug
All checks were successful
deploy / deploy (push) Successful in 1m5s
2023-12-18 02:14:02 +03:00
a11ee74fc3 print-request-auth
All checks were successful
deploy / deploy (push) Successful in 1m16s
2023-12-18 01:47:44 +03:00
a402af4590 core-connector-fix
All checks were successful
deploy / deploy (push) Successful in 1m5s
2023-12-18 00:54:44 +03:00
844f32f204 auth-req-fix
All checks were successful
deploy / deploy (push) Successful in 1m5s
2023-12-18 00:10:29 +03:00
3cccf97198 core-connector-fix
All checks were successful
deploy / deploy (push) Successful in 1m6s
2023-12-18 00:01:53 +03:00
bc8a07e619 autho-connector-fix
All checks were successful
deploy / deploy (push) Successful in 1m3s
2023-12-17 23:40:40 +03:00
517de93ccd sorted
All checks were successful
deploy / deploy (push) Successful in 1m11s
2023-12-17 20:13:17 +03:00
bb48a8ef11 auth-connector
All checks were successful
deploy / deploy (push) Successful in 1m15s
2023-12-14 01:08:47 +03:00
cc02711ab3 no-services-2
All checks were successful
deploy / deploy (push) Successful in 1m16s
2023-12-02 14:04:04 +03:00
ab3c271a6b no-services
Some checks failed
deploy / deploy (push) Failing after 1m13s
2023-12-02 13:59:25 +03:00
905d03b9ed feat: with sdl resolver in inbox.graphql
All checks were successful
deploy / deploy (push) Successful in 1m26s
2023-12-01 23:06:25 -03:00
28 changed files with 750 additions and 619 deletions

View File

@@ -14,9 +14,22 @@ jobs:
id: repo_name id: repo_name
run: echo "::set-output name=repo::$(echo ${GITHUB_REPOSITORY##*/})" run: echo "::set-output name=repo::$(echo ${GITHUB_REPOSITORY##*/})"
- name: Push to dokku - name: Get Branch Name
id: branch_name
run: echo "::set-output name=branch::$(echo ${GITHUB_REF##*/})"
- name: Push to dokku for main branch
if: github.ref == 'refs/heads/main'
uses: dokku/github-action@master uses: dokku/github-action@master
with: with:
branch: 'main' branch: 'main'
git_remote_url: 'ssh://dokku@staging.discours.io:22/${{ steps.repo_name.outputs.repo }}' git_remote_url: 'ssh://dokku@v2.discours.io:22/inbox'
ssh_private_key: ${{ secrets.SSH_PRIVATE_KEY }}
- name: Push to dokku for staging branch
if: github.ref == 'refs/heads/dev'
uses: dokku/github-action@master
with:
branch: 'main'
git_remote_url: 'ssh://dokku@staging.discours.io:22/inbox'
ssh_private_key: ${{ secrets.SSH_PRIVATE_KEY }} ssh_private_key: ${{ secrets.SSH_PRIVATE_KEY }}

1
.gitignore vendored
View File

@@ -5,3 +5,4 @@ __pycache__
.vscode .vscode
poetry.lock poetry.lock
.venv .venv
.ruff_cache

20
.pre-commit-config.yaml Normal file
View File

@@ -0,0 +1,20 @@
fail_fast: true
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.5.0
hooks:
- id: check-yaml
- id: check-toml
- id: end-of-file-fixer
- id: trailing-whitespace
- id: check-added-large-files
- id: detect-private-key
- id: check-ast
- id: check-merge-conflict
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.3.5
hooks:
- id: ruff
args: [--fix]

View File

@@ -1,11 +1,47 @@
[0.3.2]
- added custom logger
- auth logix synced with core
- added httpx
- aiohttp and requests removed
- core adapter loads data from redis now
- authors cachestorage removed
[0.3.1]
- glitchtip connect
[0.3.0]
- versions updates
- startup refactoring
- auth update
- pre-commit fix
[0.2.21]
- replace uvicorn with granian
- pre-commit hook installed
- app.json with healthchecks used
[0.2.20]
- added logger
- typing revision
[0.2.19]
- fix: stripping user_id
[0.2.18]
- services: auth updated
- services: core updated
[0.2.17] [0.2.17]
- httpx -> aiohttp - httpx -> aiohttp
- sentry integrations - sentry integrations
[0.2.16] [0.2.16]
- resolvers: snake case queries and mutations - resolvers: snake case queries and mutations
- resolvers: fix auth context usage with string user_id - resolvers: fix auth context usage with string user_id
[0.2.15] [0.2.15]
- chore: schema service removed - chore: schema service removed
@@ -23,34 +59,41 @@
- uses official redis[hiredis] async - uses official redis[hiredis] async
- datetime scalar removed - datetime scalar removed
[0.2.12] [0.2.12]
- sigil is back for test - sigil is back for test
[0.2.11] [0.2.11]
- does not need sigil with schema stitching - does not need sigil with schema stitching
[0.2.10] [0.2.10]
- middlwares removed - middlwares removed
- orm removed - orm removed
- added core api connector - added core api connector
- nginx.conf.sigil fixed - nginx.conf.sigil fixed
[0.2.9] [0.2.9]
- starlette is back - starlette is back
- auth middleware - auth middleware
- create first chat with member by id = 1 if empty smembers chats_by_author/author_id - create first chat with member by id = 1 if empty smembers chats_by_author/author_id
[0.2.8] [0.2.8]
- sse removed to presence service - sse removed to presence service
- bugfixes - bugfixes
- pydantic removed as not used - pydantic removed as not used
[0.2.7] [0.2.7]
- search messages fix - search messages fix
- context with author_id fix - context with author_id fix
- redis pubsub new_message event announce - redis pubsub new_message event announce
- sse new_message events broadcast - sse new_message events broadcast
[0.2.6] [0.2.6]
- authors / members / users terms revision - authors / members / users terms revision
- auth service connection - auth service connection

View File

@@ -1,22 +1,12 @@
# Use an official Python runtime as a parent image FROM python:alpine3.18
FROM python:slim
# Set the working directory in the container to /app
WORKDIR /app WORKDIR /app
# Add metadata to the image to describe that the container is listening on port 80
EXPOSE 80
# Copy the current directory contents into the container at /app
COPY . /app COPY . /app
# Install any needed packages specified in pyproject.toml RUN apk update && apk add --no-cache git gcc curl
RUN apt-get update && apt-get install -y gcc curl && \ RUN curl -sSL https://install.python-poetry.org | python
curl -sSL https://install.python-poetry.org | python - && \ ENV PATH="${PATH}:/root/.local/bin"
echo "export PATH=$PATH:/root/.local/bin" >> ~/.bashrc && \ RUN poetry config virtualenvs.create false && poetry install --without dev --no-root
. ~/.bashrc && \
poetry config virtualenvs.create false && \ EXPOSE 8000
poetry install --no-dev
# Run server.py when the container launches
CMD ["python", "server.py"] CMD ["python", "server.py"]

6
__init__.py Normal file
View File

@@ -0,0 +1,6 @@
import os
import sys
# Получаем путь к корневой директории проекта
root_path = os.path.abspath(os.path.dirname(__file__))
sys.path.append(root_path)

13
app.json Normal file
View File

@@ -0,0 +1,13 @@
{
"healthchecks": {
"web": [
{
"type": "startup",
"name": "web check",
"description": "Checking if the app responds to the GET /",
"path": "/",
"attempts": 3
}
]
}
}

View File

@@ -1,9 +1,3 @@
# duffok 2023-12-01
# add sdl to _Service
type _Service {
sdl: String
}
enum MessageStatus { enum MessageStatus {
NEW NEW
UPDATED UPDATED
@@ -70,9 +64,9 @@ type Query {
# inbox # inbox
load_chats(limit: Int, offset: Int): ChatResult! # your chats load_chats(limit: Int, offset: Int): ChatResult! # your chats
load_messages_by(by: MessagesBy!, limit: Int, offset: Int): ChatResult! load_messages_by(by: MessagesBy!, limit: Int, offset: Int): ChatResult!
load_recipients(limit: Int, offset: Int): ChatResult!
search_recipients(query: String!, limit: Int, offset: Int): ChatResult! search_recipients(query: String!, limit: Int, offset: Int): ChatResult!
search_messages(by: MessagesBy!, limit: Int, offset: Int): ChatResult! search_messages(by: MessagesBy!, limit: Int, offset: Int): ChatResult!
# _service: _Service!
} }
type Message { type Message {

57
main.py
View File

@@ -4,49 +4,38 @@ from os.path import exists
from ariadne import load_schema_from_path, make_executable_schema from ariadne import load_schema_from_path, make_executable_schema
from ariadne.asgi import GraphQL from ariadne.asgi import GraphQL
from sentry_sdk.integrations.aiohttp import AioHttpIntegration
from sentry_sdk.integrations.ariadne import AriadneIntegration
from sentry_sdk.integrations.redis import RedisIntegration
from starlette.applications import Starlette from starlette.applications import Starlette
from starlette.routing import Route
from services.schema import resolvers from services.logger import root_logger as logger
from services.rediscache import redis from services.rediscache import redis
from settings import DEV_SERVER_PID_FILE_NAME, SENTRY_DSN, MODE from services.schema import resolvers
from services.sentry import start_sentry
from settings import DEV_SERVER_PID_FILE_NAME, MODE
import_module("resolvers") import_module("resolvers")
schema = make_executable_schema(load_schema_from_path("inbox.graphql"), resolvers) # type: ignore schema = make_executable_schema(load_schema_from_path("inbox.graphql"), resolvers)
async def start_up(): async def start():
if MODE == "dev": if MODE == "development":
if exists(DEV_SERVER_PID_FILE_NAME): if not exists(DEV_SERVER_PID_FILE_NAME):
await redis.connect() # pid file management
return
else:
with open(DEV_SERVER_PID_FILE_NAME, "w", encoding="utf-8") as f: with open(DEV_SERVER_PID_FILE_NAME, "w", encoding="utf-8") as f:
f.write(str(os.getpid())) f.write(str(os.getpid()))
else: logger.info(f"process started in {MODE} mode")
await redis.connect()
try:
import sentry_sdk
sentry_sdk.init(
SENTRY_DSN, # main starlette app object with ariadne mounted in root
enable_tracing=True, app = Starlette(
integrations=[ on_startup=[
AriadneIntegration(), redis.connect,
RedisIntegration(), start_sentry,
AioHttpIntegration(), start,
],
on_shutdown=[redis.disconnect],
debug=True,
routes=[
Route("/", GraphQL(schema, debug=True)),
], ],
) )
except Exception as e:
print("[sentry] init error")
print(e)
async def shutdown():
await redis.disconnect()
app = Starlette(debug=True, on_startup=[start_up], on_shutdown=[shutdown])
app.mount("/", GraphQL(schema, debug=True))

View File

@@ -1,4 +1,4 @@
from typing import TypedDict, Optional, List from typing import List, Optional, TypedDict
from models.member import ChatMember from models.member import ChatMember
from models.message import Message from models.message import Message
@@ -25,7 +25,7 @@ class ChatPayload(TypedDict):
created_by: int created_by: int
description: Optional[str] description: Optional[str]
messages: Optional[List[Message]] messages: Optional[List[Message]]
unread: Optional[List[int]] unread: Optional[int] # counter
class ChatUpdate(TypedDict): class ChatUpdate(TypedDict):

View File

@@ -1,4 +1,4 @@
from typing import TypedDict, Optional from typing import Optional, TypedDict
class ChatMember(TypedDict): class ChatMember(TypedDict):

View File

@@ -1,4 +1,4 @@
from typing import TypedDict, Optional from typing import Optional, TypedDict
class Message(TypedDict): class Message(TypedDict):

View File

@@ -9,10 +9,7 @@
{{ $cors_headers_get := "if ($request_method = 'GET') { add_header 'Access-Control-Allow-Origin' '$allow_origin' always; add_header 'Access-Control-Allow-Methods' 'GET, POST, OPTIONS' always; add_header 'Access-Control-Allow-Headers' 'DNT,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Range,Authorization' always; add_header 'Access-Control-Expose-Headers' 'Content-Length,Content-Range' always; add_header 'Access-Control-Allow-Credentials' 'true' always; }" }} {{ $cors_headers_get := "if ($request_method = 'GET') { add_header 'Access-Control-Allow-Origin' '$allow_origin' always; add_header 'Access-Control-Allow-Methods' 'GET, POST, OPTIONS' always; add_header 'Access-Control-Allow-Headers' 'DNT,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Range,Authorization' always; add_header 'Access-Control-Expose-Headers' 'Content-Length,Content-Range' always; add_header 'Access-Control-Allow-Credentials' 'true' always; }" }}
# Mapping for allowed origins # Mapping for allowed origins
map $http_origin $allow_origin { # is moved in Global nginx config file /etc/nginx/nginx.conf
~^https?:\/\/((.*\.)?localhost(:\d+)?|discoursio-webapp(-(.*))?\.vercel\.app|(.*\.)?discours\.io)$ $http_origin;
default "";
}
# Server block setup # Server block setup
{{ range $port_map := .PROXY_PORT_MAP | split " " }} {{ range $port_map := .PROXY_PORT_MAP | split " " }}
@@ -54,23 +51,6 @@ server {
{{ $cors_headers_get }} {{ $cors_headers_get }}
} }
# Custom location block for /connect
location /connect/ {
proxy_pass http://presence-8080/;
add_header 'Cache-Control' 'no-cache';
add_header 'Content-Type' 'text/event-stream';
add_header 'Connection' 'keep-alive';
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection "upgrade";
proxy_buffering off;
proxy_cache off;
proxy_read_timeout 36000s;
{{ $proxy_settings }}
{{ $cors_headers_options }}
{{ $cors_headers_post }}
{{ $cors_headers_get }}
}
# Error pages # Error pages
error_page 400 401 402 403 405 406 407 408 409 410 411 412 413 414 415 416 417 418 420 422 423 424 426 428 429 431 444 449 450 451 /400-error.html; error_page 400 401 402 403 405 406 407 408 409 410 411 412 413 414 415 416 417 418 420 422 423 424 426 428 429 431 444 449 450 451 /400-error.html;
@@ -97,7 +77,8 @@ server {
internal; internal;
} }
include /home/dokku/gateway/nginx.conf.d/*.conf; # include /home/dokku/gateway/nginx.conf.d/*.conf;
include {{ $.DOKKU_ROOT }}/{{ $.APP }}/nginx.conf.d/*.conf;
} }
{{ end }} {{ end }}

View File

@@ -1,71 +1,25 @@
[build-system]
requires = ["poetry-core>=1.0.0"]
build-backend = "poetry.core.masonry.api"
[tool.poetry] [tool.poetry]
name = "discoursio-inbox" name = "discoursio-inbox"
version = "0.2.15" version = "0.3.2"
description = "Inbox server for discours.io" description = "Inbox server for discours.io"
authors = ["Tony Rewin <anton.rewin@gmail.com>"] authors = ["Tony Rewin <anton.rewin@gmail.com>"]
[tool.poetry.dependencies] [tool.poetry.dependencies]
python = "^3.12" python = "^3.12"
sentry-sdk = "^1.32.0" sentry-sdk = "^1.44.1"
redis = { extras = ["hiredis"], version = "^5.0.1" } ariadne = "^0.23.0"
ariadne = "^0.21" starlette = "^0.37.2"
starlette = "^0.32"
uvicorn = "^0.24"
itsdangerous = "^2.1.2" itsdangerous = "^2.1.2"
aiohttp = "^3.9.1" requests = "^2.31.0"
granian = "^1.2.1"
colorlog = "^6.8.2"
httpx = "^0.27.0"
redis = {version = "^5.0.3", extras = ["async"]}
[tool.poetry.dev-dependencies] [tool.poetry.group.dev.dependencies]
pytest = "^7.4.2" pre-commit = "^3.6.0"
black = { version = "^23.9.1", python = ">=3.12" } ruff = "^0.3.5"
ruff = { version = "^0.1.0", python = ">=3.12" }
[tool.black] [build-system]
line-length = 120 requires = ["poetry-core"]
target-version = ['py312'] build-backend = "poetry.core.masonry.api"
include = '\.pyi?$'
exclude = '''
(
/(
\.eggs # exclude a few common directories in the
| \.git # root of the project
| \.hg
| \.mypy_cache
| \.tox
| \.venv
| _build
| buck-out
| build
| dist
)/
| foo.py # also separately exclude a file named foo.py in
# the root of the project
)
'''
[tool.isort]
multi_line_output = 3
include_trailing_comma = true
force_grid_wrap = 0
use_parentheses = true
ensure_newline_before_comments = true
line_length = 120
[tool.ruff]
# Enable Pyflakes (`F`) and a subset of the pycodestyle (`E`) codes by default.
# Unlike Flake8, Ruff doesn't enable pycodestyle warnings (`W`) or
# McCabe complexity (`C901`) by default.
select = ["E4", "E7", "E9", "F"]
ignore = []
line-length = 120
target-version = "py312"
[tool.pyright]
venvPath = "."
venv = ".venv"

View File

@@ -1,12 +1,12 @@
from resolvers.chats import create_chat, delete_chat, update_chat from resolvers.chats import create_chat, delete_chat, update_chat
from resolvers.load import load_chats, load_messages_by, load_recipients from resolvers.load import load_chats, load_messages_by
from resolvers.messages import ( from resolvers.messages import (
create_message, create_message,
delete_message, delete_message,
update_message,
mark_as_read, mark_as_read,
update_message,
) )
from resolvers.search import search_recipients, search_messages from resolvers.search import search_messages, search_recipients
__all__ = [ __all__ = [
# inbox # inbox
@@ -19,7 +19,6 @@ __all__ = [
"delete_message", "delete_message",
"update_message", "update_message",
"mark_as_read", "mark_as_read",
"load_recipients",
"search_recipients", "search_recipients",
"search_messages", "search_messages",
] ]

View File

@@ -1,12 +1,16 @@
import json import json
import uuid import logging
import time import time
import uuid
from models.chat import Chat, ChatUpdate
from services.auth import login_required from services.auth import login_required
from services.presence import notify_chat
from services.rediscache import redis from services.rediscache import redis
from services.schema import mutation from services.schema import mutation
from models.chat import Chat, ChatUpdate
from services.presence import notify_chat logger = logging.getLogger("[resolvers.chats] ")
logger.setLevel(logging.DEBUG)
@mutation.field("update_chat") @mutation.field("update_chat")
@@ -20,12 +24,13 @@ async def update_chat(_, info, chat_new: ChatUpdate):
:param chat_new: dict with chat data :param chat_new: dict with chat data
:return: Result { error chat } :return: Result { error chat }
""" """
logger.info("update_chat")
author_id = info.context["author_id"] author_id = info.context["author_id"]
chat_id = chat_new["id"] chat_id = chat_new["id"]
chat_str = await redis.execute("GET", f"chats/{chat_id}") chat_str = await redis.execute("GET", f"chats/{chat_id}")
if not chat_str: if not chat_str:
return {"error": "chat not exist"} return {"error": "chat not exist"}
else: elif isinstance(chat_str, str):
chat: Chat = json.loads(chat_str) chat: Chat = json.loads(chat_str)
if author_id in chat["admins"]: if author_id in chat["admins"]:
chat.update( chat.update(
@@ -48,24 +53,26 @@ async def update_chat(_, info, chat_new: ChatUpdate):
@mutation.field("create_chat") @mutation.field("create_chat")
@login_required @login_required
async def create_chat(_, info, title="", members=None): async def create_chat(_, info, title="", members=None):
if members is None: logger.info("create_chat")
members = [] members = members or []
author_id = info.context["author_id"] author_id = info.context["author_id"]
print("create_chat members: %r" % members) chat: Chat
if author_id:
if author_id not in members: if author_id not in members:
members.append(int(author_id)) members.append(int(author_id))
# NOTE: private chats has no title # NOTE: private chats has no title
# reuse private chat created before if exists # reuse private chat created before if exists
if len(members) == 2 and title == "": if len(members) == 2 and title == "":
chatset1 = set((await redis.execute("SMEMBERS", f"chats_by_author/{members[0]}")) or []) chatset1 = await redis.execute("SMEMBERS", f"chats_by_author/{members[0]}")
chatset2 = set((await redis.execute("SMEMBERS", f"chats_by_author/{members[1]}")) or []) chatset2 = await redis.execute("SMEMBERS", f"chats_by_author/{members[1]}")
if isinstance(chatset1, set) and isinstance(chatset2, set):
for c in chatset1.intersection(chatset2): for c in chatset1.intersection(chatset2):
chat_data = await redis.execute("GET", f"chats/{c}") chat_result = await redis.execute("GET", f"chats/{c}")
if chat_data: if chat_result:
chat = json.loads(chat_data) chat = json.loads(chat_result)
if chat["title"] == "": if chat["title"] == "":
print("[inbox] createChat found old chat") logger.info("[inbox] createChat found old chat")
return {"chat": chat, "error": "existed"} return {"chat": chat, "error": "existed"}
chat_id = str(uuid.uuid4()) chat_id = str(uuid.uuid4())
@@ -90,20 +97,20 @@ async def create_chat(_, info, title="", members=None):
await redis.execute("SET", f"chats/{chat_id}/next_message_id", str(0)) await redis.execute("SET", f"chats/{chat_id}/next_message_id", str(0))
return {"error": None, "chat": chat} return {"error": None, "chat": chat}
return {"error": "no chat was created"}
@mutation.field("delete_chat") @mutation.field("delete_chat")
@login_required @login_required
async def delete_chat(_, info, chat_id: str): async def delete_chat(_, info, chat_id: str):
logger.info("delete_chat")
author_id = info.context["author_id"] author_id = info.context["author_id"]
chat_str = await redis.execute("GET", f"chats/{chat_id}") chat_str = await redis.execute("GET", f"chats/{chat_id}")
if chat_str: if isinstance(chat_str, str):
chat: Chat = json.loads(chat_str) chat: Chat = json.loads(chat_str)
if author_id in chat["admins"]: if author_id in chat["admins"]:
await redis.execute("DEL", f"chats/{chat_id}") await redis.execute("DEL", f"chats/{chat_id}")
await redis.execute("SREM", f"chats_by_author/{author_id}", chat_id) await redis.execute("SREM", f"chats_by_author/{author_id}", chat_id)
for member_id in chat["members"]: for member_id in chat["members"]:
await notify_chat(chat, member_id, "delete") await notify_chat(chat, member_id, "delete")
else:
return {"error": "chat not exist"} return {"error": "chat not exist"}

View File

@@ -1,47 +1,63 @@
import asyncio import asyncio
import json import json
import logging
from typing import Any, Dict, List, Optional, Union from typing import Any, Dict, List, Optional, Union
from models.chat import ChatPayload
from resolvers.chats import create_chat
from services.auth import login_required from services.auth import login_required
from services.core import get_my_followed, get_all_authors from services.core import get_author_by_id
from services.rediscache import redis from services.rediscache import redis
from services.schema import query from services.schema import query
from models.chat import Message, ChatPayload
from models.member import ChatMember logger = logging.getLogger("[resolvers.load] ")
from resolvers.chats import create_chat logger.setLevel(logging.DEBUG)
async def get_unread_counter(chat_id: str, member_id: int) -> int: async def get_unread_counter(chat_id: str, member_id: int) -> int:
unread = await redis.execute("LLEN", f"chats/{chat_id}/unread/{member_id}") unread = await redis.execute("LLEN", f"chats/{chat_id}/unread/{member_id}")
return unread or 0 if isinstance(unread, int):
return unread
else:
return 0
# NOTE: not an API handler # NOTE: not an API handler
async def load_messages( async def load_messages(
chat_id: str, limit: int = 5, offset: int = 0, ids: Optional[List[int]] = None chat_id: str, limit: int = 5, offset: int = 0, ids: Optional[List[int]] = None
) -> List[Message | None]: ):
"""load :limit messages for :chat_id with :offset""" """load :limit messages for :chat_id with :offset"""
logger.info("load_messages")
messages = [] messages = []
try: try:
message_ids = [] + (ids or []) message_ids = [] + (ids or [])
if limit: if limit:
mids = (await redis.lrange(f"chats/{chat_id}/message_ids", offset, offset + limit)) or [] mids = await redis.execute(
message_ids += mids "LRANGE", f"chats/{chat_id}/message_ids", offset, offset + limit
)
if isinstance(mids, list):
message_ids.extend(mids)
if message_ids: if message_ids:
message_keys = [f"chats/{chat_id}/messages/{mid}" for mid in message_ids] message_keys = [f"chats/{chat_id}/messages/{mid}" for mid in message_ids]
messages = (await redis.mget(*message_keys)) or [] messages = await redis.execute("MGET", *message_keys)
messages = [json.loads(m) if isinstance(m, str) else m for m in messages] if isinstance(messages, list):
messages = [
json.loads(m) if isinstance(m, str) else m for m in messages
]
replies = [] replies = []
for m in messages: for m in messages:
if m: if m:
reply_to = m.get("replyTo") reply_to = m.get("reply_to")
if reply_to: if reply_to:
reply_to = int(reply_to) reply_to = int(reply_to)
if reply_to not in message_ids: if reply_to not in message_ids:
replies.append(reply_to) replies.append(reply_to)
if replies: if replies:
messages += await load_messages(chat_id, offset, limit, replies) more_messages = await load_messages(chat_id, offset, limit, replies)
except Exception: if isinstance(more_messages, list):
messages.extend(more_messages)
except Exception as ex:
logger.error(ex)
import traceback import traceback
traceback.print_exc() traceback.print_exc()
@@ -50,37 +66,75 @@ async def load_messages(
@query.field("load_chats") @query.field("load_chats")
@login_required @login_required
async def load_chats(_, info, limit: int = 50, offset: int = 0) -> Dict[str, Union[List[Dict[str, Any]], None]]: async def load_chats(
_, info, limit: int = 50, offset: int = 0
) -> Dict[str, Union[List[Dict[str, Any]], None]]:
"""load :limit chats of current user with :offset""" """load :limit chats of current user with :offset"""
logger.info("load_chats")
author_id = info.context["author_id"] author_id = info.context["author_id"]
cids = (await redis.execute("SMEMBERS", f"chats_by_author/{author_id}")) or []
members_online = (await redis.execute("SMEMBERS", "authors-online")) or []
cids = list(cids)[offset : (offset + limit)]
chats = [] chats = []
try:
if author_id:
logger.debug(f"got author {author_id}")
cids = await redis.execute("SMEMBERS", f"chats_by_author/{author_id}")
logger.debug(f"got cids {cids}")
members_online = (
await redis.execute("SMEMBERS", "authors-online")
) or [] # to show online status
logger.debug(f"members online: {members_online}")
if isinstance(cids, set):
# TODO: add sort by chat.created_at with in-memory caching chats service
cids = list(cids)[offset : (offset + limit)]
lock = asyncio.Lock() lock = asyncio.Lock()
if len(cids) == 0: if len(cids) == 0:
print(f"[resolvers.load] no chats for user with id={author_id}") logger.debug(f"no chats for user with id={author_id}")
r = await create_chat(None, info, members=[2]) # member with id = 2 is discours r = await create_chat(
print(f"[resolvers.load] created chat: {r['chat_id']}") None, info, members=[2]
cids.append(r["chat"]["id"]) ) # member with id = 2 is discours
all_authors: List[ChatMember] = await get_all_authors() if (
authors = {a["id"]: a for a in all_authors} isinstance(r, dict)
and "chat" in r
and isinstance(r["chat"], dict)
):
chat_id = r["chat"].get("id")
if chat_id:
logger.debug(f"created chat: {chat_id}")
cids.append(chat_id)
logger.debug(f"getting data for {len(cids)} user's chats")
for cid in cids: for cid in cids:
async with lock: async with lock:
chat_str = await redis.execute("GET", f"chats/{cid}") chat_str = await redis.execute("GET", f"chats/{cid}")
print(f"[resolvers.load] redis GET by {cid}: {chat_str}") if isinstance(chat_str, str):
if chat_str: logger.debug(f"redis GET by {cid}: {chat_str}")
c: ChatPayload = json.loads(chat_str) c: ChatPayload = json.loads(chat_str)
c["messages"] = await load_messages(cid, 5, 0) c["messages"] = (await load_messages(cid, 5, 0)) or []
c["unread"] = await get_unread_counter(cid, author_id) c["unread"] = await get_unread_counter(cid, author_id)
member_ids = c["members"].copy() member_ids = c["members"].copy()
c["members"] = [] c["members"] = []
for member_id in member_ids: for member_id in member_ids:
a = authors.get(member_id) if isinstance(member_id, int):
a = await get_author_by_id(int(member_id))
if a: if a:
a["online"] = a.get("id") in members_online a["online"] = a.get("id") in members_online
c["members"].append(a) c["members"].append(a)
else:
logger.error(
f"cant find author by id {member_id}"
)
elif (
"members" in member_id
and member_id not in c["members"]
):
c["members"].append(member_id)
chats.append(c) chats.append(c)
else:
logger.error(f"cant find chat by id {cid}")
except Exception:
import traceback
traceback.print_exc()
return {"chats": chats, "error": None} return {"chats": chats, "error": None}
@@ -88,45 +142,33 @@ async def load_chats(_, info, limit: int = 50, offset: int = 0) -> Dict[str, Uni
@login_required @login_required
async def load_messages_by(_, info, by, limit: int = 10, offset: int = 0): async def load_messages_by(_, info, by, limit: int = 10, offset: int = 0):
"""load :limit messages of :chat_id with :offset""" """load :limit messages of :chat_id with :offset"""
logger.info("load_messages_by")
author_id = info.context["author_id"] author_id = info.context["author_id"]
user_chats = (await redis.execute("SMEMBERS", "chats_by_author/" + str(author_id))) or [] author_chats = await redis.execute("SMEMBERS", f"chats_by_author/{author_id}")
user_chats = [c for c in user_chats] try:
if user_chats: if isinstance(author_chats, set):
author_chats = list(author_chats)
messages = [] messages = []
by_chat = by.get("chat") by_chat = by.get("chat")
if by_chat in user_chats: if by_chat in author_chats:
chat = await redis.execute("GET", f"chats/{by_chat}") chat = await redis.execute("GET", f"chats/{by_chat}")
if not chat: if not chat:
return {"messages": [], "error": "chat not exist"} return {"messages": [], "error": "chat not exist"}
# everyone's messages in filtered chat # everyone's messages in filtered chat
messages = await load_messages(by_chat, limit, offset) messages = await load_messages(by_chat, limit, offset)
if isinstance(messages, list):
sorted_messages = [m for m in messages if m and m.get("created_at")]
return { return {
"messages": sorted( "messages": sorted(
[m for m in messages if m.get("created_at")], sorted_messages,
key=lambda m: m.get("created_at"), key=lambda m: m.get("created_at"),
), ),
"error": None, "error": None,
} }
else:
return {"error": "Cannot access messages of this chat"}
except Exception as exc:
logger.error(exc)
import traceback
@query.field("load_recipients") traceback.print_exc()
async def load_recipients(_, _info, limit=50, offset=0): return {"error": "Cannot get messages of this chat"}
"""load possible chat participants"""
onliners = (await redis.execute("SMEMBERS", "authors-online")) or []
r = []
all_authors: List[ChatMember] = await get_all_authors()
my_followings: List[ChatMember] = await get_my_followed()
if all_authors:
if len(my_followings) < limit:
my_followings = my_followings + all_authors[0 : limit - len(my_followings)]
for a in my_followings:
a["online"] = a["id"] in onliners
r.append(a)
# NOTE: maybe sort members here
print(f"[resolvers.load] loadRecipients found {len(r)} members")
return {"members": r, "error": None}

View File

@@ -1,11 +1,15 @@
import json import json
import logging
import time import time
from models.chat import Message
from services.auth import login_required from services.auth import login_required
from services.presence import notify_message from services.presence import notify_message
from services.rediscache import redis from services.rediscache import redis
from services.schema import mutation from services.schema import mutation
from models.chat import Message
logger = logging.getLogger("[resolvers.messages] ")
logger.setLevel(logging.DEBUG)
@mutation.field("create_message") @mutation.field("create_message")
@@ -16,20 +20,22 @@ async def create_message(_, info, chat_id: str, body: str, reply_to=None):
# Получение данных чата из Redis # Получение данных чата из Redis
chat_data = await redis.execute("GET", f"chats/{chat_id}") chat_data = await redis.execute("GET", f"chats/{chat_id}")
print(f"[resolvers.messages] debug chat data: {chat_data}") logger.debug(f"chat data: {chat_data}")
# Если данных чата нет, возвращаем ошибку # Если данных чата нет, возвращаем ошибку
if not chat_data: if not chat_data:
return {"error": "chat is not exist"} return {"error": "chat is not exist"}
else: elif isinstance(chat_data, str):
# Преобразование данных чата из строки JSON в словарь # Преобразование данных чата из строки JSON в словарь
chat_dict = json.loads(chat_data) chat_dict = json.loads(chat_data)
print(chat_dict) chat_id = chat_dict["id"]
# Получение ID следующего сообщения # Получение ID следующего сообщения
message_id = await redis.execute("GET", f"chats/{chat_dict['id']}/next_message_id") message_id = await redis.execute(
"GET", f"chats/{chat_dict['id']}/next_message_id"
)
if isinstance(message_id, str) or isinstance(message_id, int):
message_id = int(message_id) if message_id else 0 message_id = int(message_id) if message_id else 0
chat_id = chat_dict["id"]
# Создание нового сообщения # Создание нового сообщения
new_message: Message = { new_message: Message = {
"chat_id": chat_id, "chat_id": chat_id,
@@ -38,6 +44,7 @@ async def create_message(_, info, chat_id: str, body: str, reply_to=None):
"body": body, "body": body,
"created_at": int(time.time()), "created_at": int(time.time()),
"updated_at": None, "updated_at": None,
"reply_to": None,
} }
# Если есть ответ, добавляем его в сообщение # Если есть ответ, добавляем его в сообщение
@@ -49,7 +56,7 @@ async def create_message(_, info, chat_id: str, body: str, reply_to=None):
# Запись обновленных данных чата обратно в Redis # Запись обновленных данных чата обратно в Redis
await redis.execute("SET", f"chats/{chat_id}", json.dumps(chat_dict)) await redis.execute("SET", f"chats/{chat_id}", json.dumps(chat_dict))
print(f"[inbox] creating message {new_message}") logger.debug(f"creating message {new_message}")
# Запись нового сообщения в Redis # Запись нового сообщения в Redis
await redis.execute( await redis.execute(
@@ -59,21 +66,30 @@ async def create_message(_, info, chat_id: str, body: str, reply_to=None):
) )
# Добавление ID нового сообщения в список ID сообщений чата # Добавление ID нового сообщения в список ID сообщений чата
await redis.execute("LPUSH", f"chats/{chat_id}/message_ids", str(message_id)) await redis.execute(
"LPUSH", f"chats/{chat_id}/message_ids", str(message_id)
)
# Обновление ID следующего сообщения # Обновление ID следующего сообщения
await redis.execute("SET", f"chats/{chat_id}/next_message_id", str(message_id + 1)) await redis.execute(
"SET", f"chats/{chat_id}/next_message_id", str(message_id + 1)
)
# Добавление нового сообщения в список непрочитанных сообщений для каждого участника чата # Добавление нового сообщения в список непрочитанных сообщений для каждого участника чата
members = chat_dict["members"] members = chat_dict["members"]
for member_id in members: for member_id in members:
await redis.execute("LPUSH", f"chats/{chat_dict['id']}/unread/{member_id}", str(message_id)) await redis.execute(
"LPUSH",
f"chats/{chat_dict['id']}/unread/{member_id}",
str(message_id),
)
# Отправка уведомления о новом сообщении # Отправка уведомления о новом сообщении
new_message["chat_id"] = chat_id new_message["chat_id"] = chat_id
await notify_message(new_message, "create") await notify_message(new_message, "create")
return {"message": new_message, "error": None} return {"message": new_message, "error": None}
return {"error": "cannot create message"}
@mutation.field("update_message") @mutation.field("update_message")
@@ -92,9 +108,7 @@ async def update_message(_, info, message):
if message_id: if message_id:
message = await redis.execute("GET", f"chats/{chat_id}/messages/{message_id}") message = await redis.execute("GET", f"chats/{chat_id}/messages/{message_id}")
if not message: if isinstance(message, str):
return {"error": "message not exist"}
message = json.loads(message) message = json.loads(message)
if message["created_by"] != author_id: if message["created_by"] != author_id:
return {"error": "access denied"} return {"error": "access denied"}
@@ -103,15 +117,17 @@ async def update_message(_, info, message):
message["body"] = body message["body"] = body
message["updated_at"] = int(time.time()) message["updated_at"] = int(time.time())
await redis.execute("SET", f"chats/{chat_id}/messages/{message_id}", json.dumps(message)) await redis.execute(
"SET", f"chats/{chat_id}/messages/{message_id}", json.dumps(message)
)
# Отправка уведомления # Отправка уведомления
message["chat_id"] = chat_id message["chat_id"] = chat_id
await notify_message(message, "update") await notify_message(message, "update")
return {"message": message, "error": None} return {"message": message, "error": None}
else:
return {"message": message, "error": "cannot update, no message_id"} return {"message": message, "error": "cannot update"}
@mutation.field("delete_message") @mutation.field("delete_message")
@@ -120,23 +136,26 @@ async def delete_message(_, info, chat_id: str, message_id: int):
author_id = info.context["author_id"] author_id = info.context["author_id"]
chat_str = await redis.execute("GET", f"chats/{chat_id}") chat_str = await redis.execute("GET", f"chats/{chat_id}")
if not chat_str: if isinstance(chat_str, str):
return {"error": "chat not exist"}
chat = json.loads(chat_str) chat = json.loads(chat_str)
message_data = await redis.execute(
message_data = await redis.execute("GET", f"chats/{chat_id}/messages/{str(message_id)}") "GET", f"chats/{chat_id}/messages/{str(message_id)}"
if not message_data: )
return {"error": "message not exist"} if isinstance(message_data, str):
message: Message = json.loads(message_data) message: Message = json.loads(message_data)
if message["author"] != author_id: if message["created_by"] != author_id:
return {"error": "access denied"} return {"error": "access denied"}
await redis.execute("LREM", f"chats/{chat_id}/message_ids", 0, str(message_id)) await redis.execute(
"LREM", f"chats/{chat_id}/message_ids", 0, str(message_id)
)
await redis.execute("DEL", f"chats/{chat_id}/messages/{str(message_id)}") await redis.execute("DEL", f"chats/{chat_id}/messages/{str(message_id)}")
members = chat["members"] members = chat["members"]
for member_id in members: for member_id in members:
await redis.execute("LREM", f"chats/{chat_id}/unread/{member_id}", 0, str(message_id)) await redis.execute(
"LREM", f"chats/{chat_id}/unread/{member_id}", 0, str(message_id)
)
message["chat_id"] = chat_id message["chat_id"] = chat_id
await notify_message(message, "delete") await notify_message(message, "delete")
@@ -150,19 +169,20 @@ async def mark_as_read(_, info, chat_id: str, message_id: int):
author_id = info.context["author_id"] author_id = info.context["author_id"]
chat_str = await redis.execute("GET", f"chats/{chat_id}") chat_str = await redis.execute("GET", f"chats/{chat_id}")
if not chat_str: if isinstance(chat_str, str):
return {"error": "chat not exist"}
chat = json.loads(chat_str) chat = json.loads(chat_str)
members = set(chat["members"]) members = set(chat["members"])
if author_id not in members: if author_id not in members:
return {"error": "access denied"} return {"error": "access denied"}
await redis.execute("LREM", f"chats/{chat_id}/unread/{author_id}", 0, str(message_id)) await redis.execute(
"LREM", f"chats/{chat_id}/unread/{author_id}", 0, str(message_id)
)
message_data = await redis.execute("GET", f"chats/{chat_id}/messages/{str(message_id)}") message_data = await redis.execute(
if not message_data: "GET", f"chats/{chat_id}/messages/{str(message_id)}"
return {"error": "message not exist"} )
if isinstance(message_data, str):
message: Message = json.loads(message_data) message: Message = json.loads(message_data)
await notify_message(message, "seen") await notify_message(message, "seen")

View File

@@ -1,10 +1,9 @@
import json import time
from datetime import datetime, timezone, timedelta from typing import Any, Dict, List, Union
from typing import Dict, Union, List, Any
from resolvers.load import load_messages from resolvers.load import load_messages
from services.auth import login_required from services.auth import login_required
from services.core import get_all_authors from services.core import get_author_by_id
from services.rediscache import redis from services.rediscache import redis
from services.schema import query from services.schema import query
@@ -12,27 +11,24 @@ from services.schema import query
@query.field("search_recipients") @query.field("search_recipients")
@login_required @login_required
async def search_recipients(_, info, text: str, limit: int = 50, offset: int = 0): async def search_recipients(_, info, text: str, limit: int = 50, offset: int = 0):
result = [] result = set()
# TODO: maybe redis scan? # TODO: maybe redis scan?
author_id = info.context["author_id"] author_id = info.context["author_id"]
existed_chats = await redis.execute("SMEMBERS", f"/chats_by_author/{author_id}") existed_chats = await redis.execute("SMEMBERS", f"/chats_by_author/{author_id}")
authors = await get_all_authors() if isinstance(existed_chats, set):
members = {a["id"]: a for a in authors} chats_list = list(existed_chats)
if existed_chats: for chat_id in chats_list[offset : (offset + limit)]:
for chat_id in list(json.loads(existed_chats))[offset : (offset + limit)]: members_ids = await redis.execute("SMEMBERS", f"/chats/{chat_id}/members")
members_ids = await redis.execute("GET", f"/chats/{chat_id}/members") if isinstance(members_ids, set):
for member_id in members_ids: for member_id in members_ids:
author = members.get(member_id) author = await get_author_by_id(member_id)
if author: if author:
if author["name"].startswith(text): if author["name"].startswith(text):
if author not in result: result.add(author)
result.append(author)
more_amount = limit - len(result)
if more_amount > 0:
result += authors[0:more_amount]
return {"members": list(result), "error": None} return {"members": list(result), "error": None}
@@ -41,15 +37,12 @@ async def search_recipients(_, info, text: str, limit: int = 50, offset: int = 0
async def search_messages( async def search_messages(
_, info, by: Dict[str, Union[str, int]], limit: int, offset: int _, info, by: Dict[str, Union[str, int]], limit: int, offset: int
) -> Dict[str, Union[List[Dict[str, Any]], None]]: ) -> Dict[str, Union[List[Dict[str, Any]], None]]:
messages_set = set()
author_id = info.context["author_id"] author_id = info.context["author_id"]
lookup_chats = set((await redis.execute("SMEMBERS", f"chats_by_author/{author_id}")) or []) lookup_chats = await redis.execute("SMEMBERS", f"chats_by_author/{author_id}")
messages_set = set([]) if isinstance(lookup_chats, set):
by_member = by.get("author")
body_like = by.get("body")
days_ago = by.get("days")
# pre-filter lookup chats # pre-filter lookup chats
by_member = by.get("author")
if by_member: if by_member:
lookup_chats = filter( lookup_chats = filter(
lambda ca: by_member in ca["members"], lambda ca: by_member in ca["members"],
@@ -59,21 +52,21 @@ async def search_messages(
# load the messages from lookup chats # load the messages from lookup chats
for c in lookup_chats: for c in lookup_chats:
chat_id = c.decode() chat_id = c.decode()
mmm = await load_messages(chat_id, limit, offset) fltr = None
now = int(time.time())
if by_member: if by_member:
mmm = list(filter(lambda mx: mx["author"] == by_member, mmm)) fltr = lambda mx: mx and mx["created_by"] == by_member # noqa E731
if body_like: body_like = by.get("body") or ""
mmm = list(filter(lambda mx: body_like in mx["body"], mmm)) if isinstance(body_like, str):
fltr = lambda mx: mx and body_like in mx["body"] # noqa E731
days_ago = int(by.get("days") or "0")
if days_ago: if days_ago:
mmm = list( ts = days_ago * 24 * 60 * 60
filter( fltr = lambda mx: mx and now - mx["created_by"] < ts # noqa E731
lambda msg: int(datetime.now(tz=timezone.utc)) - int(msg["created_at"]) if fltr:
< int(timedelta(days=days_ago)), mmm = await load_messages(chat_id, limit, offset)
mmm, if isinstance(mmm, list):
) mmm = list(filter(fltr, mmm))
) messages_set |= set(mmm)
messages_set.union(set(mmm)) return {"messages": sorted(messages_set), "error": None}
messages_sorted = sorted(list(messages_set))
return {"messages": messages_sorted, "error": None}

View File

@@ -1,59 +1,17 @@
import sys from granian.constants import Interfaces
from granian.server import Granian
import uvicorn
from uvicorn.main import logger
from settings import PORT from settings import PORT
log_settings = {
"version": 1,
"disable_existing_loggers": True,
"formatters": {
"default": {
"()": "uvicorn.logging.DefaultFormatter",
"fmt": "%(levelprefix)s %(message)s",
"use_colors": None,
},
"access": {
"()": "uvicorn.logging.AccessFormatter",
"fmt": '%(levelprefix)s %(client_addr)s - "%(request_line)s" %(status_code)s',
},
},
"handlers": {
"default": {
"formatter": "default",
"class": "logging.StreamHandler",
"stream": "ext://sys.stderr",
},
"access": {
"formatter": "access",
"class": "logging.StreamHandler",
"stream": "ext://sys.stdout",
},
},
"loggers": {
"uvicorn": {"handlers": ["default"], "level": "INFO"},
"uvicorn.error": {"level": "INFO", "handlers": ["default"], "propagate": True},
"uvicorn.access": {"handlers": ["access"], "level": "INFO", "propagate": False},
},
}
local_headers = [
("Access-Control-Allow-Methods", "GET, POST, OPTIONS, HEAD"),
("Access-Control-Allow-Origin", "https://localhost:3000"),
(
"Access-Control-Allow-Headers",
"DNT,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Range,Authorization",
),
("Access-Control-Expose-Headers", "Content-Length,Content-Range"),
("Access-Control-Allow-Credentials", "true"),
]
def exception_handler(_et, exc, _tb):
logger.error(..., exc_info=(type(exc), exc, exc.__traceback__))
if __name__ == "__main__": if __name__ == "__main__":
sys.excepthook = exception_handler print("[server] starting...")
uvicorn.run("main:app", host="0.0.0.0", port=PORT, proxy_headers=True, server_header=True)
granian_instance = Granian(
"main:app",
address="0.0.0.0", # noqa S104
port=PORT,
threads=2,
websockets=False,
interface=Interfaces.ASGI,
)
granian_instance.serve()

View File

@@ -1,36 +1,57 @@
from functools import wraps from functools import wraps
import aiohttp
from services.core import get_author import httpx
from starlette.exceptions import HTTPException
from services.core import get_author_by_user
from services.logger import root_logger as logger
from settings import AUTH_URL from settings import AUTH_URL
async def request_data(gql, headers=None):
if headers is None:
headers = {"Content-Type": "application/json"}
try:
async with httpx.AsyncClient() as client:
response = await client.post(AUTH_URL, json=gql, headers=headers)
if response.status_code == 200:
data = response.json()
errors = data.get("errors")
if errors:
logger.error(f"HTTP Errors: {errors}")
else:
return data
except Exception as e:
# Handling and logging exceptions during authentication check
logger.error(f"request_data error: {e}")
return None
async def check_auth(req): async def check_auth(req):
token = req.headers.get("Authorization") token = req.headers.get("Authorization")
headers = {"Authorization": token, "Content-Type": "application/json"} # "Bearer " + removed user_id = ""
print(f"[services.auth] checking auth token: {token}") user_roles = []
if token:
query_name = "session" # Logging the authentication token
query_type = "query" logger.debug(f"{token}")
operation = "GetUserId" query_name = "validate_jwt_token"
operation = "ValidateToken"
variables = {"params": {"token_type": "access_token", "token": token}}
gql = { gql = {
"query": query_type + " " + operation + " { " + query_name + " { user { id } } }", "query": f"query {operation}($params: ValidateJWTTokenInput!) {{"
+ f"{query_name}(params: $params) {{ is_valid claims }} "
+ "}",
"variables": variables,
"operationName": operation, "operationName": operation,
"variables": None,
} }
data = await request_data(gql)
async with aiohttp.ClientSession(timeout=aiohttp.ClientTimeout(total=30.0)) as session: if data:
async with session.post(AUTH_URL, headers=headers, json=gql) as response: logger.debug(data)
print(f"[services.auth] {AUTH_URL} response: {response.status}") user_data = data.get("data", {}).get(query_name, {}).get("claims", {})
if response.status != 200: user_id = user_data.get("sub", "")
return False, None user_roles = user_data.get("allowed_roles", [])
r = await response.json() return user_id, user_roles
if r:
user_id = r.get("data", {}).get(query_name, {}).get("user", {}).get("id", None)
is_authenticated = user_id is not None
return is_authenticated, user_id
return False, None
def login_required(f): def login_required(f):
@@ -39,15 +60,31 @@ def login_required(f):
info = args[1] info = args[1]
context = info.context context = info.context
req = context.get("request") req = context.get("request")
is_authenticated, user_id = await check_auth(req) user_id, user_roles = await check_auth(req)
if not is_authenticated: if user_id and isinstance(user_id, str):
raise Exception("You are not logged in") context["user_id"] = user_id.strip()
else: author = await get_author_by_user(user_id)
# Добавляем author_id и user_id в контекст if author and "id" in author:
context["author_id"] = await get_author(user_id) context["author_id"] = author["id"]
context["user_id"] = user_id
# Если пользователь аутентифицирован, выполняем резолвер
return await f(*args, **kwargs) return await f(*args, **kwargs)
else:
raise HTTPException(status_code=401, detail="Unauthorized")
return decorated_function
def auth_request(f):
@wraps(f)
async def decorated_function(*args, **kwargs):
req = args[0]
user_id, user_roles = await check_auth(req)
if user_id and isinstance(user_id, str):
user_id = user_id.strip()
author = await get_author_by_user(user_id)
if author and "id" in author:
req["author_id"] = author["id"]
return await f(*args, **kwargs)
else:
raise HTTPException(status_code=401, detail="Unauthorized")
return decorated_function return decorated_function

View File

@@ -1,89 +1,51 @@
import aiohttp import json
from settings import API_BASE import logging
from typing import List, Any
from models.member import ChatMember
headers = {"Content-Type": "application/json"} from services.logger import root_logger as logger
from services.rediscache import redis
logger.setLevel(logging.DEBUG)
async def _request_endpoint(query_name, body) -> Any: async def get_all_authors():
async with aiohttp.ClientSession() as session: authors = []
try: redis_key = "user:*"
async with session.post(API_BASE, headers=headers, json=body) as response:
print(f"[services.core] {query_name}: [{response.status}] {len(await response.text())} bytes")
if response.status != 200:
return []
r = await response.json()
if r:
return r.get("data", {}).get(query_name, {})
else:
raise Exception("json response error")
except Exception:
import traceback
traceback.print_exc() result = await redis.execute("GET", redis_key)
if isinstance(result, str):
authors = json.loads(result)
return authors
async def get_all_authors() -> List[ChatMember]: async def get_author_by_user(user: str):
query_name = "authorsAll" author = None
query_type = "query" redis_key = f"user:{user}"
operation = "AuthorsAll"
query_fields = "id slug pic name"
gql = { result = await redis.execute("GET", redis_key)
"query": query_type + " " + operation + " { " + query_name + " { " + query_fields + " } " + " }", if isinstance(result, str):
"operationName": operation, author = json.loads(result)
"variables": None,
}
return _request_endpoint(query_name, gql) return author
async def get_my_followed() -> List[ChatMember]: async def get_author_by_id(author_id: int):
query_name = "get_my_followed" author = None
query_type = "query" redis_key = f"author:{author_id}"
operation = "GetMyFollowed"
query_fields = "id slug pic name"
gql = { result = await redis.execute("GET", redis_key)
"query": query_type + " " + operation + " { " + query_name + " { authors {" + query_fields + "} } " + " }", if isinstance(result, str):
"operationName": operation, author = json.loads(result)
"variables": None,
}
async with AsyncClient() as client: return author
try:
response = await client.post(API_BASE, headers=headers, json=gql)
print(f"[services.core] {query_name}: [{response.status_code}] {len(response.text)} bytes")
if response.status_code != 200:
return []
r = response.json()
if r:
return r.get("data", {}).get(query_name, {}).get("authors", [])
except Exception:
import traceback
traceback.print_exc()
return []
async def get_author(author_id: int = None, slug: str = "", user: str = ""): async def get_author_followed(author_id: int):
query_name = "get_author(author_id: $author_id, slug: $slug, user: $user)" authors = []
query_type = "query" redis_key = f"author:{author_id}:follows-authors"
operation = "GetAuthor($author_id: Int, $slug: String, $user: String)"
query_fields = "id slug pic name"
vars = {}
if author_id:
vars["author_id"] = author_id
elif slug:
vars["slug"] = slug
elif user:
vars["user"] = user
gql = { result = await redis.execute("GET", redis_key)
"query": query_type + " " + operation + " { " + query_name + " { " + query_fields + "} " + " }", if isinstance(result, str):
"operationName": operation, authors = json.loads(result)
"variables": None if vars == {} else vars,
}
return await _request_endpoint(query_name, gql) return authors

81
services/logger.py Normal file
View File

@@ -0,0 +1,81 @@
import logging
import colorlog
# Define the color scheme
color_scheme = {
"DEBUG": "light_black",
"INFO": "green",
"WARNING": "yellow",
"ERROR": "red",
"CRITICAL": "red,bg_white",
}
# Define secondary log colors
secondary_colors = {
"log_name": {"DEBUG": "blue"},
"asctime": {"DEBUG": "cyan"},
"process": {"DEBUG": "purple"},
"module": {"DEBUG": "light_black,bg_blue"},
"funcName": {"DEBUG": "light_white,bg_blue"}, # Add this line
}
# Define the log format string
fmt_string = "%(log_color)s%(levelname)s: %(log_color)s[%(module)s.%(funcName)s]%(reset)s %(white)s%(message)s"
# Define formatting configuration
fmt_config = {
"log_colors": color_scheme,
"secondary_log_colors": secondary_colors,
"style": "%",
"reset": True,
}
class MultilineColoredFormatter(colorlog.ColoredFormatter):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.log_colors = kwargs.pop("log_colors", {})
self.secondary_log_colors = kwargs.pop("secondary_log_colors", {})
def format(self, record):
message = record.getMessage()
if "\n" in message:
lines = message.split("\n")
first_line = lines[0]
record.message = first_line
formatted_first_line = super().format(record)
formatted_lines = [formatted_first_line]
for line in lines[1:]:
formatted_lines.append(line)
return "\n".join(formatted_lines)
else:
return super().format(record)
# Create a MultilineColoredFormatter object for colorized logging
formatter = MultilineColoredFormatter(fmt_string, **fmt_config)
# Create a stream handler for logging output
stream = logging.StreamHandler()
stream.setFormatter(formatter)
def get_colorful_logger(name="main"):
# Create and configure the logger
logger = logging.getLogger(name)
logger.setLevel(logging.DEBUG)
logger.addHandler(stream)
return logger
# Set up the root logger with the same formatting
root_logger = logging.getLogger()
root_logger.setLevel(logging.DEBUG)
root_logger.addHandler(stream)
ignore_logs = ["_trace", "httpx", "_client", "_trace.atrace", "aiohttp", "_client"]
for lgr in ignore_logs:
loggr = logging.getLogger(lgr)
loggr.setLevel(logging.INFO)

View File

@@ -1,7 +1,8 @@
import json import json
from models.chat import ChatUpdate, Message
from services.logger import root_logger as logger
from services.rediscache import redis from services.rediscache import redis
from models.chat import Message, ChatUpdate
async def notify_message(message: Message, action="create"): async def notify_message(message: Message, action="create"):
@@ -9,9 +10,9 @@ async def notify_message(message: Message, action="create"):
data = {"payload": message, "action": action} data = {"payload": message, "action": action}
try: try:
await redis.publish(channel_name, json.dumps(data)) await redis.publish(channel_name, json.dumps(data))
print(f"[services.presence] ok {data}") logger.info(f"ok {data}")
except Exception as e: except Exception as e:
print(f"Failed to publish to channel {channel_name}: {e}") logger.error(f"Failed to publish to channel {channel_name}: {e}")
async def notify_chat(chat: ChatUpdate, member_id: int, action="create"): async def notify_chat(chat: ChatUpdate, member_id: int, action="create"):
@@ -19,6 +20,6 @@ async def notify_chat(chat: ChatUpdate, member_id: int, action="create"):
data = {"payload": chat, "action": action} data = {"payload": chat, "action": action}
try: try:
await redis.publish(channel_name, json.dumps(data)) await redis.publish(channel_name, json.dumps(data))
print(f"[services.presence] ok {data}") logger.info(f"ok {data}")
except Exception as e: except Exception as e:
print(f"Failed to publish to channel {channel_name}: {e}") logger.error(f"Failed to publish to channel {channel_name}: {e}")

View File

@@ -1,6 +1,12 @@
import logging
import redis.asyncio as aredis import redis.asyncio as aredis
from services.logger import root_logger as logger
from settings import REDIS_URL from settings import REDIS_URL
logger.setLevel(logging.DEBUG)
class RedisCache: class RedisCache:
def __init__(self, uri=REDIS_URL): def __init__(self, uri=REDIS_URL):
@@ -18,12 +24,13 @@ class RedisCache:
async def execute(self, command, *args, **kwargs): async def execute(self, command, *args, **kwargs):
if self._client: if self._client:
try: try:
print("[redis] " + command + " " + " ".join(args)) logger.debug(f"{command} {args} {kwargs}")
r = await self._client.execute_command(command, *args, **kwargs) r = await self._client.execute_command(command, *args, **kwargs)
logger.debug(type(r))
logger.debug(r)
return r return r
except Exception as e: except Exception as e:
print(f"[redis] error: {e}") logger.error(e)
return None
async def subscribe(self, *channels): async def subscribe(self, *channels):
if self._client: if self._client:
@@ -45,16 +52,6 @@ class RedisCache:
return return
await self._client.publish(channel, data) await self._client.publish(channel, data)
async def lrange(self, key, start, stop):
if self._client:
print(f"[redis] LRANGE {key} {start} {stop}")
return await self._client.lrange(key, start, stop)
async def mget(self, key, *keys):
if self._client:
print(f"[redis] MGET {key} {keys}")
return await self._client.mget(key, *keys)
redis = RedisCache() redis = RedisCache()

View File

@@ -1,16 +1,16 @@
from ariadne import QueryType, MutationType from ariadne import MutationType, QueryType
query = QueryType() query = QueryType()
mutation = MutationType() mutation = MutationType()
# duffok was here 2023-12-1 # duffok was here 2023-12-1
# This is a query resolver for Apollo Federation # This is a query resolver for Apollo Federation
@query.field("_service") # @query.field("_service")
def resolve_service(*_): # def resolve_service(*_):
# Load the full SDL from your SDL file # # Load the full SDL from your SDL file
with open("inbox.graphql", "r") as file: # with open("inbox.graphql", "r") as file:
full_sdl = file.read() # full_sdl = file.read()
#
return {"sdl": full_sdl} # return {"sdl": full_sdl}
resolvers = [query, mutation] resolvers = [query, mutation]

30
services/sentry.py Normal file
View File

@@ -0,0 +1,30 @@
import sentry_sdk
from sentry_sdk.integrations.ariadne import AriadneIntegration
from sentry_sdk.integrations.redis import RedisIntegration
from sentry_sdk.integrations.starlette import StarletteIntegration
from services.logger import root_logger as logger
from settings import GLITCHTIP_DSN
def start_sentry():
# sentry monitoring
try:
sentry_sdk.init(
GLITCHTIP_DSN,
# Set traces_sample_rate to 1.0 to capture 100%
# of transactions for performance monitoring.
traces_sample_rate=1.0,
# Set profiles_sample_rate to 1.0 to profile 100%
# of sampled transactions.
# We recommend adjusting this value in production.
profiles_sample_rate=1.0,
enable_tracing=True,
integrations=[
StarletteIntegration(),
AriadneIntegration(),
RedisIntegration(),
],
)
except Exception as e:
logger.error(e)

View File

@@ -1,9 +1,9 @@
from os import environ from os import environ
PORT = 80 PORT = 8000
REDIS_URL = environ.get("REDIS_URL") or "redis://127.0.0.1" REDIS_URL = environ.get("REDIS_URL") or "redis://127.0.0.1"
API_BASE = environ.get("API_BASE") or "https://v2.discours.io/" API_BASE = environ.get("API_BASE") or "http://127.0.0.1:8001/"
AUTH_URL = environ.get("AUTH_URL") or "https://v2.discours.io/" AUTH_URL = environ.get("AUTH_URL") or "http://127.0.0.1:8080/graphql/"
MODE = environ.get("MODE") or "production" MODE = environ.get("MODE") or "production"
SENTRY_DSN = environ.get("SENTRY_DSN") GLITCHTIP_DSN = environ.get("GLITCHTIP_DSN")
DEV_SERVER_PID_FILE_NAME = "dev-server.pid" DEV_SERVER_PID_FILE_NAME = "dev-server.pid"