794 Commits

Author SHA1 Message Date
615f1fe468 topics+authors-reimplemented-cache
All checks were successful
Deploy on push / deploy (push) Successful in 5s
2025-03-22 11:47:19 +03:00
86ddb50cb8 topics caching upgrade 2025-03-22 09:31:53 +03:00
31c32143d0 reaction-to-feature-fix
All checks were successful
Deploy on push / deploy (push) Successful in 56s
2025-03-21 12:34:10 +03:00
b63c387806 jsonfix3
All checks were successful
Deploy on push / deploy (push) Successful in 56s
2025-03-20 12:52:44 +03:00
dbbfd42e08 redeploy
All checks were successful
Deploy on push / deploy (push) Successful in 57s
2025-03-20 12:35:55 +03:00
47e12b4452 fx2
Some checks failed
Deploy on push / deploy (push) Failing after 16s
2025-03-20 12:33:27 +03:00
e1a1b4dc7d fx
All checks were successful
Deploy on push / deploy (push) Successful in 44s
2025-03-20 12:25:18 +03:00
ca01181f37 jsonfix
All checks were successful
Deploy on push / deploy (push) Successful in 44s
2025-03-20 12:24:30 +03:00
0aff77eda6 portfix
All checks were successful
Deploy on push / deploy (push) Successful in 49s
2025-03-20 12:13:14 +03:00
8a95aa1209 jsonload-fix
All checks were successful
Deploy on push / deploy (push) Successful in 45s
2025-03-20 12:05:58 +03:00
a4a3c35f4d lesscode
All checks were successful
Deploy on push / deploy (push) Successful in 46s
2025-03-20 12:04:47 +03:00
edece36ecc jsonenc-fix
All checks were successful
Deploy on push / deploy (push) Successful in 12s
2025-03-20 11:59:43 +03:00
247fc98760 cachedep-fix+orjson+fmt
All checks were successful
Deploy on push / deploy (push) Successful in 1m16s
2025-03-20 11:55:21 +03:00
a1781b3800 depfix
All checks were successful
Deploy on push / deploy (push) Successful in 1m4s
2025-03-20 11:36:12 +03:00
450c73c060 nothreads
All checks were successful
Deploy on push / deploy (push) Successful in 47s
2025-03-20 11:30:36 +03:00
3a1924279f redeploy
All checks were successful
Deploy on push / deploy (push) Successful in 49s
2025-03-20 11:23:37 +03:00
094e7e6fe2 granian-fix
Some checks failed
Deploy on push / deploy (push) Failing after 7s
2025-03-20 11:19:29 +03:00
ae48a18536 comment-delete-handling-patch
All checks were successful
Deploy on push / deploy (push) Successful in 1m15s
2025-03-20 11:01:39 +03:00
354bda0efa drafts-fix
All checks were successful
Deploy on push / deploy (push) Successful in 59s
2025-03-13 22:21:43 +03:00
856f4ffc85 i 2025-03-09 21:01:52 +03:00
20eba36c65 create-draft-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m47s
2025-02-27 16:16:41 +03:00
8cd0c8ea4c less-logs
All checks were successful
Deploy on push / deploy (push) Successful in 56s
2025-02-19 00:23:42 +03:00
2939cd8adc pyright-conf
All checks were successful
Deploy on push / deploy (push) Successful in 57s
2025-02-19 00:21:51 +03:00
41d8253094 lesslogs
All checks were successful
Deploy on push / deploy (push) Successful in 56s
2025-02-14 21:49:21 +03:00
5263d1657e 0.4.11-b
All checks were successful
Deploy on push / deploy (push) Successful in 56s
2025-02-12 22:34:57 +03:00
1de3d163c1 0.4.11-create_draft-fix
All checks were successful
Deploy on push / deploy (push) Successful in 57s
2025-02-12 21:59:05 +03:00
d3ed335fde main_topic-fix7-debug
All checks were successful
Deploy on push / deploy (push) Successful in 55s
2025-02-12 20:04:06 +03:00
f84be7b11b main_topic-fix7
All checks were successful
Deploy on push / deploy (push) Successful in 56s
2025-02-12 19:33:02 +03:00
b011c0fd48 main_topic-fix6
All checks were successful
Deploy on push / deploy (push) Successful in 54s
2025-02-12 19:21:21 +03:00
fe661a5008 main_topic-json-fix
All checks were successful
Deploy on push / deploy (push) Successful in 56s
2025-02-12 02:23:51 +03:00
e97823f99c main_topic-fix4
All checks were successful
Deploy on push / deploy (push) Successful in 56s
2025-02-12 00:55:55 +03:00
a9dd593ac8 main_topic-fix3
All checks were successful
Deploy on push / deploy (push) Successful in 56s
2025-02-12 00:47:39 +03:00
1585e55342 main_topic-fix2
All checks were successful
Deploy on push / deploy (push) Successful in 57s
2025-02-12 00:39:25 +03:00
52b608da99 main_topic-fix
All checks were successful
Deploy on push / deploy (push) Successful in 57s
2025-02-12 00:31:18 +03:00
5a4f75537d debug more
All checks were successful
Deploy on push / deploy (push) Successful in 58s
2025-02-11 23:47:54 +03:00
ce4a401c1a minor-debug
All checks were successful
Deploy on push / deploy (push) Successful in 57s
2025-02-11 23:44:29 +03:00
7814e3d64d 0.4.10
All checks were successful
Deploy on push / deploy (push) Successful in 57s
2025-02-11 12:40:55 +03:00
9191d83f84 usermoved
All checks were successful
Deploy on push / deploy (push) Successful in 56s
2025-02-11 12:24:02 +03:00
5d87035885 0.4.10-a
All checks were successful
Deploy on push / deploy (push) Successful in 44s
2025-02-11 12:00:35 +03:00
25b61c6b29 simple-dockerfile
All checks were successful
Deploy on push / deploy (push) Successful in 1m41s
2025-02-10 19:10:13 +03:00
9671ef2508 author-stat-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m22s
2025-02-10 18:38:26 +03:00
759520f024 initdb-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m13s
2025-02-10 18:15:54 +03:00
a84d8a0c7e 0.4.9-c
All checks were successful
Deploy on push / deploy (push) Successful in 7s
2025-02-10 18:04:08 +03:00
20173f7d1c trigdeploy
All checks were successful
Deploy on push / deploy (push) Successful in 55s
2025-02-10 11:30:58 +03:00
4a835bbfba 0.4.9-b
All checks were successful
Deploy on push / deploy (push) Successful in 2m38s
2025-02-09 22:26:50 +03:00
37a9a284ef 0.4.9-drafts 2025-02-09 17:18:01 +03:00
dce05342df fmt
All checks were successful
Deploy on push / deploy (push) Successful in 58s
2025-02-04 15:27:59 +03:00
56db33d7f1 get_my_rates_comments-fix
All checks were successful
Deploy on push / deploy (push) Successful in 55s
2025-02-04 02:53:01 +03:00
40b4703b1a get_cached_topic_followers-fix
All checks were successful
Deploy on push / deploy (push) Successful in 55s
2025-02-04 01:40:00 +03:00
747d550d80 fix-revalidation2
All checks were successful
Deploy on push / deploy (push) Successful in 58s
2025-02-04 00:08:25 +03:00
84de0c5538 fix-revalidation
All checks were successful
Deploy on push / deploy (push) Successful in 59s
2025-02-04 00:01:54 +03:00
33ddfc6c31 after-handlers-cache
All checks were successful
Deploy on push / deploy (push) Successful in 57s
2025-02-03 23:22:45 +03:00
26b862d601 more-revalidation
All checks were successful
Deploy on push / deploy (push) Successful in 1m32s
2025-02-03 23:16:50 +03:00
9fe5fea238 editor-fix
All checks were successful
Deploy on push / deploy (push) Successful in 58s
2025-02-03 19:06:00 +03:00
0347b6f5ff logs-update-shout-5
All checks were successful
Deploy on push / deploy (push) Successful in 59s
2025-02-02 21:57:51 +03:00
ffb75e53f7 logs-update-shout-4
All checks were successful
Deploy on push / deploy (push) Successful in 58s
2025-02-02 21:55:22 +03:00
582ba75643 logs-update-shout-3
All checks were successful
Deploy on push / deploy (push) Successful in 58s
2025-02-02 21:49:28 +03:00
2db1da3194 logs-update-shout-2
All checks were successful
Deploy on push / deploy (push) Successful in 58s
2025-02-02 21:45:24 +03:00
fd6b0ce5fd logs-update-shout
All checks were successful
Deploy on push / deploy (push) Successful in 58s
2025-02-02 21:41:03 +03:00
Stepan Vladovskiy
670a477f9a debug: ok, moved map on tgop layaer of nginx. now this version without map
All checks were successful
Deploy on push / deploy (push) Successful in 57s
2025-01-29 14:38:33 -03:00
Stepan Vladovskiy
46945197d9 debug: with hardcoded domain testing.dscrs.site and in default non for understanding
All checks were successful
Deploy on push / deploy (push) Successful in 1m0s
2025-01-29 13:59:47 -03:00
Stepan Vladovskiy
4ebc64d13a fix: so, the problem can be somewhere else, becasue map is working fine. And we are trying to find where it is ovveriting issue. Modified main.py with some extra rules. Maybe it is helps
All checks were successful
Deploy on push / deploy (push) Successful in 55s
2025-01-28 20:03:56 -03:00
Stepan Vladovskiy
bc9560e56e feat: safe version. debug is not give results. this is simple version. In case of code beuty can be rewrite with previos nodebug version
All checks were successful
Deploy on push / deploy (push) Successful in 55s
2025-01-28 19:38:19 -03:00
Stepan Vladovskiy
38f5aab9e0 debug: not safe version. back to safe map function
All checks were successful
Deploy on push / deploy (push) Successful in 55s
2025-01-28 19:27:24 -03:00
Stepan Vladovskiy
95f49a7ca5 debug: rewrite nginx file to use it without variables logic
All checks were successful
Deploy on push / deploy (push) Successful in 57s
2025-01-28 19:23:02 -03:00
Stepan Vladovskiy
cd8f5977af debug: sv just in case for testing maping issue and trying to find place with filter maybe, in this option if dscrs.site origin then allow is discours.io
All checks were successful
Deploy on push / deploy (push) Successful in 57s
2025-01-28 18:57:27 -03:00
Stepan Vladovskiy
a218d1309b debug: no force optins and simpl regexp logic
All checks were successful
Deploy on push / deploy (push) Successful in 59s
2025-01-28 18:24:10 -03:00
Stepan Vladovskiy
113d4807b2 feat:sv with force flag
All checks were successful
Deploy on push / deploy (push) Successful in 1m2s
2025-01-28 17:55:41 -03:00
Stepan Vladovskiy
9bc3cdbd0b debug: sv clean testing in cors polici maping because it is redundant and add Allow origin heade custom log
Some checks failed
Deploy on push / deploy (push) Failing after 7s
2025-01-28 17:48:59 -03:00
Stepan Vladovskiy
79e6402df3 debug: added for dscrs.site separate rule in nginx config map part 2025-01-28 17:48:59 -03:00
Stepan Vladovskiy
ec2e9444e3 debug: nginx conf sigil with custom logs with headers and backslash in dscrs.site 2025-01-28 17:48:59 -03:00
Stepan Vladovskiy
a86a2fee85 debug: nginx conf sigil file withou custom log and add for domain dscrs.site determinating backsplash 2025-01-28 17:48:59 -03:00
Stepan Vladovskiy
aec67b9db8 debug: layer with logs added for debug allow_orrigin missing for dscrs.site domain fix back slash 2025-01-28 17:48:59 -03:00
Stepan Vladovskiy
0bbe1d428a debug: layer with logs added for debug allow_orrigin missing for dscrs.site domain 2025-01-28 17:48:59 -03:00
Stepan Vladovskiy
a05f0afa8b debug: layer with logs added for debug allow_orrigin missing for dscrs.site domain 2025-01-28 17:48:59 -03:00
5e2842774a media-field-workarounds
Some checks failed
Deploy on push / deploy (push) Failing after 8s
2025-01-28 15:38:10 +03:00
e17690f27b nostat
Some checks failed
Deploy on push / deploy (push) Failing after 7s
2025-01-26 18:16:33 +03:00
cb990b61a3 gqldata 2025-01-26 18:01:04 +03:00
cc837288bb simpler-reader-field 2025-01-26 17:59:08 +03:00
4a26e4f75b fmt 2025-01-26 17:53:16 +03:00
eee2c1a13d fieldresolver-fix 2025-01-26 17:52:45 +03:00
209d5c1a5e shout-media-field-resolver 2025-01-25 15:31:23 +03:00
4f4affaca4 cache-invalidate-fix-3 2025-01-25 15:19:19 +03:00
d59710309d cache-invalidate-fix-2 2025-01-25 11:57:10 +03:00
88525276c2 cache-invalidate-fix 2025-01-25 11:23:20 +03:00
1f4b3d3eee create-shout-fix6 2025-01-22 00:43:59 +03:00
76a4c5fb53 create-shout-fix5 2025-01-21 21:54:23 +03:00
8f6b96cb0f create-shout-fix4 2025-01-21 20:53:27 +03:00
76a707c7fd create-shout-fix3 2025-01-21 20:39:54 +03:00
ae584abb5b create-shout-fix2 2025-01-21 19:58:20 +03:00
eff8278cc3 create-shout-fix 2025-01-21 19:33:28 +03:00
8432a00691 create-shout-fix2 2025-01-21 18:28:03 +03:00
1ed185a701 create-shout-fix 2025-01-21 18:19:25 +03:00
562ce3296e published_at-revert2 2025-01-21 17:52:04 +03:00
ddc2d69e54 published_at-revert 2025-01-21 17:50:02 +03:00
f6863b32e8 published_at-fix5 2025-01-21 17:44:29 +03:00
9bf9f3d384 published_at-fix4 2025-01-21 16:40:52 +03:00
998d01c751 published_at-fix3 2025-01-21 15:57:22 +03:00
57d04ddf1c published_at-fix2 2025-01-21 13:34:20 +03:00
0ba2d2ecee published_at-fix 2025-01-21 13:11:15 +03:00
839cc84c26 stat-syntetic 2025-01-21 10:21:38 +03:00
c80c282118 prepare-topics-authors-dicts 2025-01-21 10:09:49 +03:00
5acae03c55 fmt 2025-01-21 10:09:28 +03:00
49be05d4db shout-create-fix 2025-01-18 10:57:34 +03:00
ae7580252b invcache-fix6 2025-01-16 06:49:15 +03:00
7c85f51436 invcache-fix5 2025-01-16 06:42:12 +03:00
83ec475cc8 invcache-fix4 2025-01-16 06:01:47 +03:00
c1c095a73c invcache-fix3 2025-01-16 06:00:15 +03:00
c4e84364c6 invcache-fix 2025-01-16 05:53:37 +03:00
8287b82554 invalidate-cache-fix 2025-01-16 05:46:31 +03:00
56fe8bebbe invalidate-cache-fix 2025-01-16 05:45:53 +03:00
4fffd1025f debug-update-shout-2 2025-01-16 05:42:53 +03:00
576e1ea152 debug-update-shout 2025-01-16 05:34:43 +03:00
5e1021a18e corsfix-34 2024-12-24 14:22:49 +03:00
dcbdd01f53 cors-fix-33
All checks were successful
Deploy on push / deploy (push) Successful in 57s
2024-12-24 14:04:52 +03:00
608bf8f33a tokencheckfix
All checks were successful
Deploy on push / deploy (push) Successful in 55s
2024-12-22 11:33:57 +03:00
48994d8bfd claims
All checks were successful
Deploy on push / deploy (push) Successful in 55s
2024-12-22 00:34:35 +03:00
4ffcbf36d3 nobearer
All checks were successful
Deploy on push / deploy (push) Successful in 55s
2024-12-22 00:30:04 +03:00
e539e0334f Merge branch 'dev' of https://dev.discours.io/discours.io/core into dev
All checks were successful
Deploy on push / deploy (push) Successful in 55s
2024-12-22 00:24:29 +03:00
1898b3ef3f auth-debug 2024-12-22 00:22:26 +03:00
Stepan Vladovskiy
1100a1b66f debug: add dscrs.site map in cors
All checks were successful
Deploy on push / deploy (push) Successful in 57s
2024-12-20 14:47:40 -03:00
Stepan Vladovskiy
04a0a6ddf4 debug: Sigil back to map with only discours.io domain
All checks were successful
Deploy on push / deploy (push) Successful in 56s
2024-12-20 14:35:59 -03:00
bfbb307d6b corsfix8
All checks were successful
Deploy on push / deploy (push) Successful in 5s
2024-12-17 20:26:17 +03:00
1c573f9a12 corsfix7 2024-12-17 20:17:19 +03:00
6b1533402a corsfix6 2024-12-17 20:14:01 +03:00
fdf5f795da corsfix5 2024-12-17 20:09:39 +03:00
daf5336410 corsfix4 2024-12-17 20:06:15 +03:00
0923dc61d6 corsfix3 2024-12-17 20:02:41 +03:00
4275131645 corslogs 2024-12-17 19:52:49 +03:00
c64d5971ee corsfix2 2024-12-17 19:51:00 +03:00
3968bc3910 sigilfix
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-12-17 19:46:47 +03:00
99b0748129 mapfix2 2024-12-17 00:31:02 +03:00
fcaac9cc41 mapfix 2024-12-17 00:27:07 +03:00
b5c6535ee8 wh5
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-12-16 20:14:11 +03:00
cf6150b155 wh4 2024-12-16 20:10:39 +03:00
5d1bfeaa9a headercase 2024-12-16 20:07:56 +03:00
c4ffc08bae webfk 2024-12-16 20:03:00 +03:00
f73f3608c0 webhook-fix2 2024-12-16 19:50:25 +03:00
5944d9542e webhook-fix 2024-12-16 19:44:24 +03:00
2aefcd2708 corsfix 2024-12-16 19:39:31 +03:00
3af4c1ac7a issuer-port-fix 2024-12-16 19:23:45 +03:00
aff0e8b1df webhookfix 2024-12-16 19:13:16 +03:00
e4a9bfa08b authdev2 2024-12-16 19:06:47 +03:00
a41a5ad39a authdev 2024-12-16 18:57:10 +03:00
434d59a7ba nginx-fix10 2024-12-16 14:05:26 +03:00
407de622ec allow-origin-fix 2024-12-16 14:01:05 +03:00
be03e7b931 viewed-storage-update
All checks were successful
Deploy on push / deploy (push) Successful in 5s
2024-12-12 02:03:19 +03:00
d02ae5bd3f fmt+debug
All checks were successful
Deploy on push / deploy (push) Successful in 5s
2024-12-12 01:04:11 +03:00
87506b0478 check-inner-logix
All checks were successful
Deploy on push / deploy (push) Successful in 5s
2024-12-12 00:32:27 +03:00
3a819007c1 morelogs-update
All checks were successful
Deploy on push / deploy (push) Successful in 5s
2024-12-12 00:29:04 +03:00
961ba9c616 warnbetter
All checks were successful
Deploy on push / deploy (push) Successful in 5s
2024-12-12 00:21:51 +03:00
7b58c7537e warn-not-found
All checks were successful
Deploy on push / deploy (push) Successful in 4s
2024-12-12 00:20:43 +03:00
a1486b3bba comments-rates-fix
All checks were successful
Deploy on push / deploy (push) Successful in 7s
2024-12-11 23:49:58 +03:00
f3c06e1969 mutation-fix-2 2024-12-11 23:06:55 +03:00
354d9c20a3 mutation-fix
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-12-11 23:04:45 +03:00
fbcee18db1 fmt
All checks were successful
Deploy on push / deploy (push) Successful in 7s
2024-12-11 23:02:14 +03:00
c5d21c3554 check-webhook
All checks were successful
Deploy on push / deploy (push) Successful in 7s
2024-12-11 23:01:03 +03:00
4410311b80 webhook-is-mutation
All checks were successful
Deploy on push / deploy (push) Successful in 7s
2024-12-11 22:54:37 +03:00
8f5ee384ff logsdebug
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-12-11 22:52:25 +03:00
bffc48e5d9 log-auth-graphql
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-12-11 22:49:08 +03:00
9cead2ab0e search-off
All checks were successful
Deploy on push / deploy (push) Successful in 7s
2024-12-11 22:31:41 +03:00
444c853f54 webhook-fix
All checks were successful
Deploy on push / deploy (push) Successful in 7s
2024-12-11 22:21:05 +03:00
7751b0d0f8 startup-fixes
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-12-11 22:10:48 +03:00
fe93439194 webhook-add
All checks were successful
Deploy on push / deploy (push) Successful in 7s
2024-12-11 22:07:36 +03:00
6762b18135 get-author-followers-fix2 2024-12-11 21:34:43 +03:00
9439d71249 get-author-followers-fix
All checks were successful
Deploy on push / deploy (push) Successful in 7s
2024-12-11 21:25:03 +03:00
b8f86e5d5e last-commented-fix
All checks were successful
Deploy on push / deploy (push) Successful in 8s
2024-12-04 18:25:51 +03:00
597fd6ad55 last_commented_at
All checks were successful
Deploy on push / deploy (push) Successful in 8s
2024-12-04 17:40:45 +03:00
a71a6fcc41 saerch-fail-toler
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-11-22 20:32:14 +03:00
9dde136c9c search-fail-tolerance
All checks were successful
Deploy on push / deploy (push) Successful in 5s
2024-11-22 20:23:45 +03:00
779cb9a87c following-error
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-11-22 20:19:56 +03:00
79f7c914d7 v0.4.7 2024-11-20 23:59:11 +03:00
a9d181db8f fixapi
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-11-18 23:23:20 +03:00
283ad80632 fasternomyreate
All checks were successful
Deploy on push / deploy (push) Successful in 7s
2024-11-18 22:24:54 +03:00
e9f9582110 sqlsynt2 2024-11-18 22:21:15 +03:00
3a5449df79 sqlsynt
All checks were successful
Deploy on push / deploy (push) Successful in 5s
2024-11-18 22:19:06 +03:00
cf88c165ee nomyratestat2
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-11-18 22:16:42 +03:00
2fec47d363 nomyratestat
All checks were successful
Deploy on push / deploy (push) Successful in 5s
2024-11-18 22:13:49 +03:00
6966d900fa myrates-api-minor-fix3 2024-11-18 22:10:25 +03:00
773615e201 myrates-api-minor-fix2 2024-11-18 22:05:45 +03:00
080ba76684 myrates-api-minor-fix
All checks were successful
Deploy on push / deploy (push) Successful in 5s
2024-11-18 22:03:11 +03:00
25f929026f commend-id-fix
All checks were successful
Deploy on push / deploy (push) Successful in 5s
2024-11-18 13:14:32 +03:00
47a8493824 no-my-rate
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-11-18 11:31:19 +03:00
821a4c0df1 info-context-debug
All checks were successful
Deploy on push / deploy (push) Successful in 5s
2024-11-14 14:11:51 +03:00
1a371b191a ..
All checks were successful
Deploy on push / deploy (push) Successful in 5s
2024-11-14 14:00:33 +03:00
471781f942 debug-stat-wip
All checks were successful
Deploy on push / deploy (push) Successful in 5s
2024-11-14 13:42:40 +03:00
b4eff32427 authorized-context-debug
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-11-14 13:33:09 +03:00
2d0ca1c7bf myrate-fix+log
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-11-14 13:25:33 +03:00
88812da592 myrate-fix
All checks were successful
Deploy on push / deploy (push) Successful in 5s
2024-11-14 13:21:32 +03:00
bffa4aa1ef unrated-fix5
All checks were successful
Deploy on push / deploy (push) Successful in 5s
2024-11-14 01:36:15 +03:00
4adf3d5a1e unrated-fix3 2024-11-14 01:32:00 +03:00
4b111951b7 unrated-fix
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-11-14 00:55:25 +03:00
b91e4ddfd1 unrated-fix2 2024-11-14 00:29:15 +03:00
cd90e7a2d0 unrated-fix
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-11-14 00:26:12 +03:00
af2d8caebe toler-none2
All checks were successful
Deploy on push / deploy (push) Successful in 5s
2024-11-12 18:52:48 +03:00
f32b6a6a27 toler-none
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-11-12 18:49:44 +03:00
8116160b4d my_rate-stat
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-11-12 17:56:20 +03:00
34511a8edf join-maintopic-unrated
All checks were successful
Deploy on push / deploy (push) Successful in 5s
2024-11-03 11:32:05 +03:00
08fb1d3510 create-reaction-shout
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-11-02 22:38:40 +03:00
6d61e038e7 create-reaction-fix-4
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-11-02 22:34:20 +03:00
bcb602d3cf create-reaction-fix3 2024-11-02 19:48:43 +03:00
f4a8a653d0 create-reaction-fix
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-11-02 19:16:52 +03:00
2c981bc972 create-reaction-fkx2
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-11-02 13:52:03 +03:00
b322219173 create-reaction-fkx
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-11-02 13:49:22 +03:00
52567557e8 debug-create-reaction
All checks were successful
Deploy on push / deploy (push) Successful in 5s
2024-11-02 13:44:00 +03:00
3f1ef8dfd8 proposals-fix
All checks were successful
Deploy on push / deploy (push) Successful in 5s
2024-11-02 13:35:30 +03:00
1b43f742d3 tolerate-double-follow
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-11-02 12:33:52 +03:00
5f3f00366f tolerate-double-follow 2024-11-02 12:33:35 +03:00
a61bb6da20 unfollow-fix
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-11-02 12:12:19 +03:00
11611fd577 following-fixes+fmt
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-11-02 12:09:24 +03:00
09a6d085fd revalidation-fix
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-11-02 11:56:47 +03:00
d4548f71c7 lesslogs
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-11-02 11:49:30 +03:00
9b67f1aa21 notify-follower-fix
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-11-02 11:42:24 +03:00
2e91f9399a revalidation-follower-fix
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-11-02 11:40:02 +03:00
0eb95e238b following-debug
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-11-02 11:35:02 +03:00
65bd2ef9cf author-created-at-fix
All checks were successful
Deploy on push / deploy (push) Successful in 5s
2024-11-02 06:27:31 +03:00
9a6c995589 lgos
All checks were successful
Deploy on push / deploy (push) Successful in 5s
2024-11-02 04:44:07 +03:00
8965395377 viewed-fix
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-11-02 04:28:16 +03:00
38d39dd618 debug-create-reaction
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-11-02 04:24:41 +03:00
0c009495a3 async-revised
All checks were successful
Deploy on push / deploy (push) Successful in 5s
2024-11-02 00:26:57 +03:00
54c59d26b9 media-item-type
All checks were successful
Deploy on push / deploy (push) Successful in 5s
2024-11-01 22:57:20 +03:00
92e49c8ad9 group-by-shout
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-11-01 22:23:23 +03:00
493e6cf92c psql8
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-11-01 22:17:56 +03:00
1dcc0cf8c5 psql7
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-11-01 22:11:42 +03:00
d3daf2800e psql6
All checks were successful
Deploy on push / deploy (push) Successful in 5s
2024-11-01 22:04:39 +03:00
d0b5c2d3f9 psql5
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-11-01 22:01:41 +03:00
0930e80b9b psql2 2024-11-01 21:52:25 +03:00
044d28cfe9 psql
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-11-01 21:49:31 +03:00
4b4234314d fields-group 2024-11-01 21:45:51 +03:00
baa8d56799 .
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-11-01 21:42:20 +03:00
d40728aec9 nodist4 2024-11-01 21:39:05 +03:00
c78347b6f9 nodist2 2024-11-01 21:35:33 +03:00
021765340a nodist
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-11-01 21:30:52 +03:00
567507c412 groupby-fix
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-11-01 21:25:25 +03:00
8bf0566d72 row.stat-fix
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-11-01 21:17:51 +03:00
0874794140 stat-dict
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-11-01 21:09:53 +03:00
154477e1ad logfix
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-11-01 21:04:30 +03:00
f495953f6a media-item-type
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-11-01 21:03:09 +03:00
fba0f34020 nodistinct
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-11-01 20:28:59 +03:00
4752ef19b2 order-by-fix
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-11-01 20:27:25 +03:00
3e50902f07 json-distinct-fix
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-11-01 20:24:09 +03:00
a0f29eb5b8 json-builder-compat
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-11-01 20:11:58 +03:00
fcbbe4fcac fixed-shouts-load
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-11-01 20:02:46 +03:00
4ef5d172a0 results-fix
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-11-01 17:26:45 +03:00
31bd421e22 merged-hub
All checks were successful
Deploy on push / deploy (push) Successful in 5s
2024-11-01 15:06:21 +03:00
dd60d1a1c4 deployfix
All checks were successful
Deploy on push / deploy (push) Successful in 5s
2024-11-01 14:33:34 +03:00
1892ea666a apply-options-moved
All checks were successful
Deploy on push / deploy (push) Successful in 1m26s
2024-11-01 14:29:58 +03:00
3a5297015f rating-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m15s
2024-11-01 14:09:22 +03:00
8ad00f0fa5 case-whens-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m21s
2024-11-01 14:07:10 +03:00
3247a3674f feed-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m22s
2024-11-01 14:00:19 +03:00
d88f905609 reworked-feed+reader
All checks were successful
Deploy on push / deploy (push) Successful in 1m16s
2024-11-01 13:50:47 +03:00
a01a3f1d7a reader-fix2
All checks were successful
Deploy on push / deploy (push) Successful in 1m18s
2024-11-01 12:27:13 +03:00
75e7079087 reader-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m15s
2024-11-01 12:06:23 +03:00
7f58bf48fe row-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m16s
2024-11-01 11:57:49 +03:00
f7c41532a5 feed-fixes
All checks were successful
Deploy on push / deploy (push) Successful in 1m16s
2024-11-01 11:29:41 +03:00
a105372b15 norandomtopic-onserver-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m17s
2024-11-01 11:09:16 +03:00
54e26fb863 main-topic
Some checks failed
Deploy on push / deploy (push) Failing after 10s
2024-11-01 10:29:18 +03:00
600d52414e txt
Some checks failed
Deploy on push / deploy (push) Failing after 10s
2024-11-01 10:04:32 +03:00
5a9a02d3a4 0.4.6 2024-11-01 09:50:19 +03:00
bcac627345 main-x-fix
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-11-01 07:51:33 +03:00
5dd47b3cd4 maintopic-nullable
Some checks failed
Deploy on push / deploy (push) Failing after 10s
2024-11-01 07:43:10 +03:00
c9328041ce main_-fix
Some checks failed
Deploy on push / deploy (push) Failing after 10s
2024-10-31 21:58:24 +03:00
ddd18f8d70 media-type
Some checks failed
Deploy on push / deploy (push) Failing after 11s
2024-10-31 21:45:55 +03:00
1ccc5fb9e7 more-agile-query-shout-api
Some checks failed
Deploy on push / deploy (push) Failing after 16s
2024-10-31 21:11:54 +03:00
fc930a539b 5random
Some checks failed
Deploy on push / deploy (push) Failing after 10s
2024-10-31 20:42:09 +03:00
e7b4e59b65 authors_and_topics-fix
Some checks failed
Deploy on push / deploy (push) Failing after 10s
2024-10-31 20:34:25 +03:00
e2b6ae5e81 agile-query
Some checks failed
Deploy on push / deploy (push) Failing after 10s
2024-10-31 20:28:52 +03:00
827300366d unrated-fix
Some checks failed
Deploy on push / deploy (push) Failing after 10s
2024-10-31 19:57:09 +03:00
8c05589168 optimized-query
Some checks failed
Deploy on push / deploy (push) Failing after 10s
2024-10-31 19:48:06 +03:00
f29eb5f35a separate-subq
Some checks failed
Deploy on push / deploy (push) Failing after 10s
2024-10-31 19:11:41 +03:00
62370b94b3 reader-query-optimized
Some checks failed
Deploy on push / deploy (push) Failing after 10s
2024-10-31 19:06:58 +03:00
1114c7766d get-shout-fix
Some checks failed
Deploy on push / deploy (push) Failing after 10s
2024-10-31 18:37:00 +03:00
0c83b9c401 query-shouts-simpler 2024-10-31 18:28:09 +03:00
f437119711 unrated-sort-fix
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-10-31 17:47:07 +03:00
eaa23134de comments-fix
Some checks failed
Deploy on push / deploy (push) Failing after 10s
2024-10-31 17:27:07 +03:00
00fe5d91a7 dictify
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-10-31 15:41:18 +03:00
071022c63b _sa_instance..
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-10-31 15:37:32 +03:00
3ace2093b2 keep-json
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-10-31 15:32:13 +03:00
42e06bd2e6 jsongix
Some checks failed
Deploy on push / deploy (push) Failing after 10s
2024-10-31 15:28:41 +03:00
6dd6fd764a no-create-json
Some checks failed
Deploy on push / deploy (push) Failing after 10s
2024-10-31 15:25:22 +03:00
21888c6d00 ismain-field
Some checks failed
Deploy on push / deploy (push) Failing after 10s
2024-10-31 15:18:27 +03:00
2bc0ac1cff maintopicslug
Some checks failed
Deploy on push / deploy (push) Failing after 11s
2024-10-31 15:14:58 +03:00
bf3fd4b39a captionfix-2
Some checks failed
Deploy on push / deploy (push) Failing after 10s
2024-10-31 15:10:26 +03:00
7eed615991 author-captions-fix
Some checks failed
Deploy on push / deploy (push) Failing after 10s
2024-10-31 15:05:22 +03:00
6e56eba0c2 oneval-subq
Some checks failed
Deploy on push / deploy (push) Failing after 10s
2024-10-31 14:50:45 +03:00
5f2f4262a5 scalar
Some checks failed
Deploy on push / deploy (push) Failing after 10s
2024-10-31 14:48:15 +03:00
882ef0288a whensfix
Some checks failed
Deploy on push / deploy (push) Failing after 10s
2024-10-31 14:29:47 +03:00
9416165699 minorfix-3
Some checks failed
Deploy on push / deploy (push) Failing after 10s
2024-10-31 14:27:13 +03:00
c72588800f minorfi4
Some checks failed
Deploy on push / deploy (push) Failing after 10s
2024-10-31 14:20:22 +03:00
1c6678d55d minorfix
Some checks failed
Deploy on push / deploy (push) Failing after 10s
2024-10-31 14:14:54 +03:00
91e4e751d8 readerfix2 2024-10-31 14:11:59 +03:00
bc4432c057 readerfix
Some checks failed
Deploy on push / deploy (push) Failing after 10s
2024-10-31 14:09:33 +03:00
38185273af get-shouts-with-stats-fix8 2024-10-31 14:04:28 +03:00
5fb7ba074c get-shouts-with-stats-fix7 2024-10-31 14:02:36 +03:00
d83be5247b get-shouts-with-stats-fix6 2024-10-31 14:00:56 +03:00
0f87ac6a00 get-shouts-with-stats-fix5 2024-10-31 13:59:18 +03:00
f61a2d07fe get-shouts-with-stats-fix4
Some checks failed
Deploy on push / deploy (push) Failing after 10s
2024-10-31 13:52:32 +03:00
d48577b191 get-shouts-with-stats-fix3 2024-10-31 13:46:33 +03:00
4aec829c74 get-shouts-with-stats-fix2 2024-10-31 13:42:46 +03:00
d8496bf094 get-shouts-with-stats-fix
Some checks failed
Deploy on push / deploy (push) Failing after 10s
2024-10-31 13:39:38 +03:00
55a0474602 reader-reactionalias-fix
Some checks failed
Deploy on push / deploy (push) Failing after 10s
2024-10-31 13:25:05 +03:00
751f3de4b1 jsonify2
Some checks failed
Deploy on push / deploy (push) Failing after 10s
2024-10-31 12:49:18 +03:00
5b211c349e create_shout-community-1-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m15s
2024-10-31 09:33:17 +03:00
a578e8160e unrated-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m14s
2024-10-24 16:27:16 +03:00
9ac533ee73 fmt 2024-10-24 00:01:09 +03:00
d9644f901e more-toler3
All checks were successful
Deploy on push / deploy (push) Successful in 1m12s
2024-10-24 00:00:04 +03:00
0a26f2986f more-toler2
Some checks failed
Deploy on push / deploy (push) Has been cancelled
2024-10-23 23:59:17 +03:00
7cf3f91dac more-toler
All checks were successful
Deploy on push / deploy (push) Successful in 1m13s
2024-10-23 23:57:52 +03:00
33bedbcd67 restoring-test
All checks were successful
Deploy on push / deploy (push) Successful in 1m15s
2024-10-23 11:29:44 +03:00
8de91a8232 hgetall-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m12s
2024-10-23 11:25:56 +03:00
23514ca5a4 get_shout-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m12s
2024-10-23 11:22:07 +03:00
79ab0d6a4c init-create-fix
All checks were successful
Deploy on push / deploy (push) Successful in 31s
2024-10-21 20:21:31 +03:00
1476d4262d trick-import
Some checks failed
Deploy on push / deploy (push) Failing after 10s
2024-10-21 20:19:52 +03:00
724f901bbd community-stat-fixes
All checks were successful
Deploy on push / deploy (push) Successful in 1m4s
2024-10-21 16:57:03 +03:00
a4e48eb3f4 commynity-cudl
All checks were successful
Deploy on push / deploy (push) Successful in 1m5s
2024-10-21 16:42:50 +03:00
c6f160c8cf update-api-3
All checks were successful
Deploy on push / deploy (push) Successful in 1m12s
2024-10-21 12:15:44 +03:00
62f2876ade queryfix
All checks were successful
Deploy on push / deploy (push) Successful in 1m12s
2024-10-21 11:53:00 +03:00
93b7c6bf4d rolesfix
All checks were successful
Deploy on push / deploy (push) Successful in 1m4s
2024-10-21 11:48:51 +03:00
635ff4285e communityfollower-roles
All checks were successful
Deploy on push / deploy (push) Successful in 1m3s
2024-10-21 11:29:57 +03:00
0cf963240e virtual-cols-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m6s
2024-10-21 11:08:16 +03:00
160f02e67f 0.4.5-api-update
All checks were successful
Deploy on push / deploy (push) Successful in 1m49s
2024-10-21 10:52:23 +03:00
045d2ddadf create-all-tables-fix2
All checks were successful
Deploy on push / deploy (push) Successful in 1m35s
2024-10-15 19:52:12 +03:00
63ebf3af2d create-all-tables-fix 2024-10-15 19:50:17 +03:00
bf33cdc95c fixed-coales
Some checks failed
Deploy on push / deploy (push) Failing after 10s
2024-10-15 11:12:09 +03:00
76aeddbde2 ignoreup 2024-10-15 10:07:44 +03:00
3b1c4475c6 readme-update 2024-10-14 19:06:30 +03:00
5966512a8f poetry-fix
Some checks failed
Deploy on push / deploy (push) Failing after 10s
2024-10-14 18:28:16 +03:00
8b65c87750 add-fakeredis
All checks were successful
Deploy on push / deploy (push) Successful in 1m19s
2024-10-14 13:08:43 +03:00
6f6b619c11 graphql-handler-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m12s
2024-10-14 12:31:55 +03:00
3188a67661 async+fmt-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m12s
2024-10-14 12:19:30 +03:00
4e7fb953ba try-to-fix-2 2024-10-14 12:13:18 +03:00
173c865a69 try-to-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m46s
2024-10-14 11:11:13 +03:00
d5ba8d1cde correlate-stat-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m12s
2024-10-14 10:47:38 +03:00
998db09c09 shout-query-substat-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m12s
2024-10-14 09:37:40 +03:00
78d575863d logfixes 2024-10-14 09:33:31 +03:00
503e859b5c query-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m33s
2024-10-14 09:23:11 +03:00
5dc61dc397 db-init-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m13s
2024-10-14 09:12:20 +03:00
7c86d95f5e sqlite-support
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-10-14 02:05:20 +03:00
5c40ab3d00 312
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-10-13 00:49:06 +03:00
31867d3c6c ex-support
All checks were successful
Deploy on push / deploy (push) Successful in 1m59s
2024-09-27 10:18:08 +03:00
e2b54b37dd sentry-log-detailed
All checks were successful
Deploy on push / deploy (push) Successful in 1m13s
2024-08-26 21:18:33 +03:00
6a6df10825 fixd2
All checks were successful
Deploy on push / deploy (push) Successful in 1m11s
2024-08-22 16:04:23 +03:00
15ffc9eb3e restore-authorizer-dev
All checks were successful
Deploy on push / deploy (push) Successful in 1m12s
2024-08-22 15:55:26 +03:00
5095b0b4c0 get-with-stat-as-arg
All checks were successful
Deploy on push / deploy (push) Successful in 1m11s
2024-08-14 18:33:11 +03:00
4c126fd859 cache-author-fiz 2024-08-14 16:30:52 +03:00
8f3fded5fe nosp
Some checks failed
Deploy on push / deploy (push) Failing after 11s
2024-08-14 14:32:40 +03:00
96ea356c62 ms2 2024-08-12 11:16:25 +03:00
4c8f7d5958 ms
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-08-12 11:13:36 +03:00
c5ee827230 merged
Some checks failed
Deploy on push / deploy (push) Failing after 11s
2024-08-12 11:00:01 +03:00
208de158bc imports sort
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-08-09 09:37:06 +03:00
d0c1f33227 nodistinct
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-08-09 08:17:40 +03:00
71db929fa4 comments-counter-fix
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-08-09 07:44:23 +03:00
56f1506450 followers-ids-fix
Some checks failed
Deploy on push / deploy (push) Failing after 10s
2024-08-09 07:35:45 +03:00
fae5f6f735 get-objects
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-08-09 07:26:04 +03:00
983f25d6d3 debug-followers
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-08-09 07:22:55 +03:00
1c9f6f30d9 debug:get_cached_topic_followers
Some checks failed
Deploy on push / deploy (push) Failing after 10s
2024-08-09 07:14:33 +03:00
4a7b305ad4 fmt
Some checks failed
Deploy on push / deploy (push) Failing after 10s
2024-08-08 18:57:03 +03:00
b5deb8889a follower-stat-fix 2024-08-08 18:56:49 +03:00
218bbd54da redis-fix-3
Some checks failed
Deploy on push / deploy (push) Failing after 10s
2024-08-08 18:14:49 +03:00
531e4bf32c redis-fix
Some checks failed
Deploy on push / deploy (push) Failing after 10s
2024-08-08 18:13:51 +03:00
65bbbdb2b0 is_main-fix2
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-08-08 18:06:06 +03:00
13acff1708 get_cached_topic_followers-fix
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-08-08 18:00:50 +03:00
ff9c0a0b82 redis-fixes
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-08-08 17:55:34 +03:00
69a848d6a7 redis-fix 2024-08-08 17:54:15 +03:00
6a13e3bb0f is_main-fix
Some checks failed
Deploy on push / deploy (push) Failing after 10s
2024-08-08 17:48:53 +03:00
e4266b0bab get_cached_topic_by_slug-fix
Some checks failed
Deploy on push / deploy (push) Failing after 11s
2024-08-08 17:46:25 +03:00
5bd9c9750d parse_aggregated_string-fix
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-08-08 17:39:37 +03:00
e46de27ba9 get-shout-fix2
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-08-08 17:36:20 +03:00
7bb70c41df get-shout-fix 2024-08-08 17:36:11 +03:00
a771cd0617 reaction
Some checks failed
Deploy on push / deploy (push) Failing after 10s
2024-08-08 17:33:55 +03:00
21d9b75a09 fix-string-agg 2024-08-08 16:20:45 +03:00
71015c2ca3 fix-topic-non-body
Some checks failed
Deploy on push / deploy (push) Failing after 8s
2024-08-08 16:16:40 +03:00
ea99219283 fmt
Some checks failed
Deploy on push / deploy (push) Failing after 10s
2024-08-08 16:10:45 +03:00
0533863230 minor-fixes 2024-08-08 16:10:31 +03:00
a5ec1838b1 parser-fix
Some checks failed
Deploy on push / deploy (push) Failing after 8s
2024-08-07 18:13:40 +03:00
7fb4b5bd18 follower-fix
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-08-07 18:11:32 +03:00
87aa39959a tricky-followers-count
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-08-07 18:09:44 +03:00
8b377123e1 follower-groupby2
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-08-07 18:06:31 +03:00
fb687d50dd follower-groupby
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-08-07 18:03:30 +03:00
64e0e0ce79 followers_stat
Some checks failed
Deploy on push / deploy (push) Failing after 8s
2024-08-07 18:01:12 +03:00
5a6a318b60 ismain-fix
Some checks failed
Deploy on push / deploy (push) Failing after 10s
2024-08-07 17:53:59 +03:00
1ce12c0980 parse-agregated-string
Some checks failed
Deploy on push / deploy (push) Failing after 11s
2024-08-07 17:52:23 +03:00
9c374d789e string_agg
Some checks failed
Deploy on push / deploy (push) Failing after 11s
2024-08-07 17:45:22 +03:00
f9a91e3a66 from-clause
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-08-07 15:36:05 +03:00
c551ca2e70 nogroupby2
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-08-07 15:31:13 +03:00
6a4785cdac nogroupby
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-08-07 15:10:37 +03:00
ec7b25df3c вшые 2024-08-07 15:04:17 +03:00
c601fcc2a4 alc
Some checks failed
Deploy on push / deploy (push) Failing after 8s
2024-08-07 14:54:13 +03:00
1524f141b8 distinct
Some checks failed
Deploy on push / deploy (push) Failing after 8s
2024-08-07 14:49:15 +03:00
50f2c9d161 subsub3 2024-08-07 14:41:22 +03:00
7712832b76 subsub2 2024-08-07 14:38:42 +03:00
a973da5bb4 subsub
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-08-07 14:35:50 +03:00
3fde67a87d sqltypes-text
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-08-07 14:29:31 +03:00
19c9ef462e CITEXT
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-08-07 14:26:41 +03:00
56c010975c array_agg
Some checks failed
Deploy on push / deploy (push) Failing after 8s
2024-08-07 14:22:24 +03:00
572f63f12b reader-loads-move 2024-08-07 14:18:05 +03:00
a01ca30f5b stat-docs-reactions-apifix
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-08-07 14:02:36 +03:00
6517fc9550 groupby
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-08-07 13:57:37 +03:00
dcd9f9e0bf json-agg-fix§2 2024-08-07 13:53:44 +03:00
26d83aba7a json-agg-fix
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-08-07 13:51:35 +03:00
087f6a7157 shouts-distinc-topics-authors-fix
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-08-07 13:47:10 +03:00
7e89a3471f import-fix
Some checks failed
Deploy on push / deploy (push) Failing after 8s
2024-08-07 13:37:50 +03:00
1f9b320f04 viewed-fix
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-08-07 13:37:08 +03:00
eba97e967b thread-lock-fix2
Some checks failed
Deploy on push / deploy (push) Failing after 10s
2024-08-07 13:30:41 +03:00
2f65a538fa thread-lock-fix
Some checks failed
Deploy on push / deploy (push) Failing after 10s
2024-08-07 13:25:48 +03:00
57d25b637d sync-viewed-stat
Some checks failed
Deploy on push / deploy (push) Failing after 10s
2024-08-07 13:15:58 +03:00
9c7a62c384 selectinload2
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-08-07 12:57:48 +03:00
41482bfd4b selectinload 2024-08-07 12:57:01 +03:00
d369cfe333 ident-fix
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-08-07 12:49:25 +03:00
2082e2a6e5 discussed-fix
Some checks failed
Deploy on push / deploy (push) Failing after 10s
2024-08-07 12:48:57 +03:00
7a8f0a1c21 reader-oneloop
Some checks failed
Deploy on push / deploy (push) Failing after 8s
2024-08-07 12:38:15 +03:00
3febfff1db postquery-topics-authors3 2024-08-07 12:29:51 +03:00
ad320ae83e postquery-topics-authors2 2024-08-07 12:23:56 +03:00
5609184d3b all 2024-08-07 12:22:51 +03:00
1e8d2aba0a postquery-topics-authors
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-08-07 12:18:29 +03:00
ebec80f198 precache-аix2 2024-08-07 11:56:13 +03:00
2a21decc94 precache-debug 2024-08-07 11:53:31 +03:00
520b39cb0b groupbyfix 2024-08-07 11:52:16 +03:00
1b46184781 groupbyfix 2024-08-07 11:52:07 +03:00
c1675cdf32 precache-fix2 2024-08-07 11:40:32 +03:00
c5a5e449d4 precache-fix
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-08-07 11:38:34 +03:00
69a5dfcc45 shouts-load-optimisations
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-08-07 11:35:59 +03:00
7c48a6a1dc dict-fix
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-08-07 10:30:51 +03:00
1af63dee81 shout-stats-fix
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-08-07 10:22:37 +03:00
d4982017f6 refactored-starting
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-08-07 09:51:09 +03:00
60a56fd098 moved 2024-08-07 08:57:56 +03:00
1d4fa4b977 loop-fix-4 2024-08-07 08:42:59 +03:00
8b1e42de1c loop-fix-3 2024-08-07 08:35:38 +03:00
6bab1b0189 loop-fix-2 2024-08-07 08:33:02 +03:00
26fcd4ba50 loop-fix
Some checks failed
Deploy on push / deploy (push) Failing after 8s
2024-08-07 08:31:11 +03:00
c731639aa4 get-cached-topic
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-08-07 08:25:47 +03:00
b358a6f4a9 nocacheshout
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-08-07 08:22:08 +03:00
df25eaf905 query-fitness
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-08-07 07:27:56 +03:00
821c81dd9c redis-fix
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-08-07 07:18:49 +03:00
3981fa3181 revalidation-manager
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-08-06 21:44:33 +03:00
a577b5510d cache-fix3
Some checks failed
Deploy on push / deploy (push) Failing after 10s
2024-08-06 20:55:19 +03:00
1612778baa cache-fix3 2024-08-06 20:23:23 +03:00
4cbe78f81f cache-fix2 2024-08-06 20:20:20 +03:00
31d38c016e get_cached_author_by_user_id
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-08-06 20:05:24 +03:00
08eebd6071 cache-part2 2024-08-06 19:59:27 +03:00
c276a0eeb0 caching-wip1 2024-08-06 19:55:27 +03:00
9f91490441 trigger-fix-2
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-08-06 19:45:42 +03:00
e0a44ae199 indexing2
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-08-06 19:03:43 +03:00
ab388af35f indexing
Some checks failed
Deploy on push / deploy (push) Failing after 10s
2024-08-06 19:01:50 +03:00
95977f0853 semaphore
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-08-06 18:57:35 +03:00
b823862cec caching-fix
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-08-06 18:53:25 +03:00
522718f3a1 last-comment-revert
Some checks failed
Deploy on push / deploy (push) Failing after 10s
2024-08-06 18:18:51 +03:00
dfd476411f nossl
All checks were successful
Deploy on push / deploy (push) Successful in 1m10s
2024-08-06 14:44:01 +03:00
626d76f406 fmt2
All checks were successful
Deploy on push / deploy (push) Successful in 1m8s
2024-08-06 14:37:50 +03:00
c576fc0241 fmt
All checks were successful
Deploy on push / deploy (push) Successful in 1m9s
2024-08-06 14:34:12 +03:00
385c8ce04b logging-fix 2024-08-06 14:33:52 +03:00
34c16c8cdf logging-sentry
All checks were successful
Deploy on push / deploy (push) Successful in 2m0s
2024-08-06 13:47:49 +03:00
2f4c8acaa2 reaction.likes fix
Some checks failed
Deploy on push / deploy (push) Failing after 56s
2024-07-30 05:19:16 +03:00
960a00101c load-comment-ratings
All checks were successful
Deploy on push / deploy (push) Successful in 1m9s
2024-07-26 19:04:40 +03:00
c46dc759d7 load-shout-comments-fix-2
All checks were successful
Deploy on push / deploy (push) Successful in 57s
2024-07-26 16:56:30 +03:00
16728f1d49 group-by-asked
All checks were successful
Deploy on push / deploy (push) Successful in 1m9s
2024-07-26 16:42:26 +03:00
4c625db853 group_by-fix2
All checks were successful
Deploy on push / deploy (push) Successful in 2m7s
2024-07-23 17:36:26 +03:00
fce78df549 group_by-fix 2024-07-23 17:35:45 +03:00
a4411cfa34 comment-ratings
All checks were successful
Deploy on push / deploy (push) Successful in 1m31s
2024-07-22 11:32:47 +03:00
a43a44302b reactions-api-update
All checks were successful
Deploy on push / deploy (push) Successful in 1m8s
2024-07-22 10:42:41 +03:00
451f041206 aliased-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m8s
2024-07-18 12:13:30 +03:00
6595d12108 unrated-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m10s
2024-07-18 12:07:53 +03:00
983ad12dd3 slug-filter-author-2
All checks were successful
Deploy on push / deploy (push) Successful in 1m11s
2024-07-18 09:09:48 +03:00
3ff52f944c slug-filter-author
All checks were successful
Deploy on push / deploy (push) Successful in 1m10s
2024-07-18 09:05:10 +03:00
77282ade62 load-shouts-discussed-coauthored
All checks were successful
Deploy on push / deploy (push) Successful in 2m4s
2024-07-16 01:06:43 +03:00
1223c633d4 followed-by
All checks were successful
Deploy on push / deploy (push) Successful in 1m9s
2024-07-03 15:35:12 +03:00
d55a3050fc Merge branch 'dev' of https://dev.discours.io/discours.io/core into dev
All checks were successful
Deploy on push / deploy (push) Successful in 1m11s
2024-07-03 11:57:26 +03:00
62a2280a80 load-shouts-followed-fix 2024-07-03 11:57:17 +03:00
Stepan Vladovskiy
c57fca0aee test: encrease users from one ip to 10000 to see if something chnages on stress press tests
All checks were successful
Deploy on push / deploy (push) Successful in 2m2s
2024-07-03 01:40:00 -03:00
612f91a708 followers-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m53s
2024-06-17 14:52:09 +03:00
a25a434ea2 check-existing-on-create
All checks were successful
Deploy on push / deploy (push) Successful in 1m9s
2024-06-12 13:00:35 +03:00
ac9f1d8a40 followers-empty-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m9s
2024-06-12 12:52:07 +03:00
e32baa8d8f stat-aliased-select-fix-3 2024-06-12 12:48:09 +03:00
9580282c79 stat-aliased-select-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m10s
2024-06-12 12:26:53 +03:00
c24f3bbb4a faster-response
All checks were successful
Deploy on push / deploy (push) Successful in 1m8s
2024-06-11 22:46:35 +03:00
04e20b29ee author-with-stat-cache-nonblock-2
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-06-11 17:51:34 +03:00
b2fdc9a453 parser-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m9s
2024-06-11 14:46:10 +03:00
8708efece2 stabfix
All checks were successful
Deploy on push / deploy (push) Successful in 1m10s
2024-06-09 15:49:37 +03:00
51f56c0f1f issue#842-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m9s
2024-06-09 14:02:24 +03:00
e58fbe263f reaction.shout-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m7s
2024-06-09 09:13:21 +03:00
ea28f5346c auth-debug
All checks were successful
Deploy on push / deploy (push) Successful in 1m54s
2024-06-09 09:07:48 +03:00
4743581395 load_authors_by-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m9s
2024-06-06 14:16:16 +03:00
3f12bcfd39 precache-debug-2
All checks were successful
Deploy on push / deploy (push) Successful in 1m8s
2024-06-06 12:49:22 +03:00
10ad7089f4 precache-debug
All checks were successful
Deploy on push / deploy (push) Successful in 1m8s
2024-06-06 12:47:43 +03:00
8d371e6519 log-loza
All checks were successful
Deploy on push / deploy (push) Successful in 1m9s
2024-06-06 12:42:54 +03:00
76ee4a387c shout-link-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m8s
2024-06-06 12:37:55 +03:00
7a4c02d11d typo-fx
All checks were successful
Deploy on push / deploy (push) Successful in 1m8s
2024-06-06 12:33:58 +03:00
ae861aa8b4 fix-select-by-topic
All checks were successful
Deploy on push / deploy (push) Successful in 1m9s
2024-06-06 12:23:47 +03:00
ddc5254e5f log-response
All checks were successful
Deploy on push / deploy (push) Successful in 1m9s
2024-06-06 11:24:26 +03:00
543b2e6b4d load_shouts_unrated-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m9s
2024-06-06 11:13:54 +03:00
626e899ca3 get-cached-topic-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m9s
2024-06-06 11:07:49 +03:00
f5ebd0ada9 text-order-by-fix 2024-06-06 11:06:18 +03:00
afe710d955 debug-precache
All checks were successful
Deploy on push / deploy (push) Successful in 1m9s
2024-06-06 09:56:21 +03:00
1946d5eda2 int
All checks were successful
Deploy on push / deploy (push) Successful in 1m9s
2024-06-06 09:40:39 +03:00
3476d6e6d1 get_cached_topic_by_slug-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m9s
2024-06-06 09:23:45 +03:00
85f63a0e17 precache-synced-with-cache
All checks were successful
Deploy on push / deploy (push) Successful in 1m9s
2024-06-06 08:06:10 +03:00
1cc779e17b get-author-id-by-user-id-2
All checks were successful
Deploy on push / deploy (push) Successful in 1m8s
2024-06-05 23:42:25 +03:00
b04fc1ba65 get-author-id-by-user-id
Some checks failed
Deploy on push / deploy (push) Has been cancelled
2024-06-05 23:42:09 +03:00
5afa046f18 get-author-by-user
All checks were successful
Deploy on push / deploy (push) Successful in 1m9s
2024-06-05 23:15:19 +03:00
fbf21ae3f9 strip-more-2
All checks were successful
Deploy on push / deploy (push) Successful in 1m8s
2024-06-05 23:07:29 +03:00
12439b6ef2 strip-more
All checks were successful
Deploy on push / deploy (push) Successful in 1m8s
2024-06-05 23:03:13 +03:00
b72ef072e4 fix-cache-topic
All checks were successful
Deploy on push / deploy (push) Successful in 1m8s
2024-06-05 22:56:48 +03:00
ee6a636e68 fix-cache-author
Some checks failed
Deploy on push / deploy (push) Has been cancelled
2024-06-05 22:55:44 +03:00
e942fdbffa debug-precache
All checks were successful
Deploy on push / deploy (push) Successful in 1m9s
2024-06-05 22:27:23 +03:00
13e609bcf7 fixed-redis-intfix4
All checks were successful
Deploy on push / deploy (push) Successful in 1m8s
2024-06-05 22:20:39 +03:00
d5d5a69ab4 userid-renewal-toler
All checks were successful
Deploy on push / deploy (push) Successful in 1m9s
2024-06-05 21:44:51 +03:00
53545605d0 fixed-redis-cache-4
All checks were successful
Deploy on push / deploy (push) Successful in 1m9s
2024-06-05 21:40:32 +03:00
d93fa4cb4b cached-author-fi
All checks were successful
Deploy on push / deploy (push) Successful in 1m9s
2024-06-05 21:04:48 +03:00
35ef4357fb simpler-cache-author
All checks were successful
Deploy on push / deploy (push) Successful in 1m9s
2024-06-05 18:51:12 +03:00
d3fe4c4aff get_cached_author-fix-2
All checks were successful
Deploy on push / deploy (push) Successful in 1m6s
2024-06-05 18:48:41 +03:00
1e0d0f465a get_cached_author-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m8s
2024-06-05 18:46:01 +03:00
6e80942beb reactions-follow-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m10s
2024-06-05 18:29:15 +03:00
67636e6d17 author-id-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m21s
2024-06-05 18:18:03 +03:00
713fb4d62b 0.4.1-following-update 2024-06-05 17:45:55 +03:00
67c299939c toler-no-author
All checks were successful
Deploy on push / deploy (push) Successful in 1m9s
2024-06-05 16:23:53 +03:00
1042eb6e58 less-bloat-log
All checks were successful
Deploy on push / deploy (push) Successful in 1m8s
2024-06-04 12:55:12 +03:00
db2ae09ead aifix
All checks were successful
Deploy on push / deploy (push) Successful in 1m28s
2024-06-04 11:51:39 +03:00
708bdaa7f6 ruff-update 2024-06-04 09:10:52 +03:00
9c02333e2b precache-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m7s
2024-06-04 09:07:46 +03:00
bfc177a811 exc-mw-connected
All checks were successful
Deploy on push / deploy (push) Successful in 1m7s
2024-06-04 08:15:59 +03:00
d53256bcd7 exc-mw
All checks were successful
Deploy on push / deploy (push) Successful in 1m8s
2024-06-04 08:10:57 +03:00
231de135ca search-fin
All checks were successful
Deploy on push / deploy (push) Successful in 1m9s
2024-06-02 19:28:21 +03:00
5f36b7c6e2 search-with-images40
All checks were successful
Deploy on push / deploy (push) Successful in 1m8s
2024-06-02 19:24:23 +03:00
23e46df8a9 search-with-images39
All checks were successful
Deploy on push / deploy (push) Successful in 1m8s
2024-06-02 19:21:50 +03:00
6b8b61fa37 search-with-images38
All checks were successful
Deploy on push / deploy (push) Successful in 1m8s
2024-06-02 19:19:30 +03:00
25964b6797 search-with-images36
All checks were successful
Deploy on push / deploy (push) Successful in 1m9s
2024-06-02 19:14:18 +03:00
c0b3e90943 search-with-images35
All checks were successful
Deploy on push / deploy (push) Successful in 1m9s
2024-06-02 19:09:02 +03:00
9c4ddea33d search-with-images34
All checks were successful
Deploy on push / deploy (push) Successful in 1m9s
2024-06-02 19:06:26 +03:00
f41359b8c9 search-with-images33
Some checks failed
Deploy on push / deploy (push) Failing after 8s
2024-06-02 18:59:00 +03:00
44b797c1de search-with-images32
Some checks failed
Deploy on push / deploy (push) Has been cancelled
2024-06-02 18:58:24 +03:00
4933553d50 search-with-images31
All checks were successful
Deploy on push / deploy (push) Successful in 1m7s
2024-06-02 18:52:57 +03:00
93c9fcc248 search-with-images30
Some checks failed
Deploy on push / deploy (push) Has been cancelled
2024-06-02 18:52:34 +03:00
2365485a68 search-with-images29
All checks were successful
Deploy on push / deploy (push) Successful in 1m8s
2024-06-02 18:48:54 +03:00
27bea7d06f search-with-images28
All checks were successful
Deploy on push / deploy (push) Successful in 1m10s
2024-06-02 18:47:01 +03:00
c29838b6ee search-with-images27
All checks were successful
Deploy on push / deploy (push) Successful in 1m8s
2024-06-02 18:44:27 +03:00
c8baa6abf9 search-with-images26
All checks were successful
Deploy on push / deploy (push) Successful in 1m9s
2024-06-02 18:41:09 +03:00
9358a86df1 search-with-images25
All checks were successful
Deploy on push / deploy (push) Successful in 1m9s
2024-06-02 18:39:05 +03:00
7e8757ec72 search-with-images24
All checks were successful
Deploy on push / deploy (push) Successful in 1m7s
2024-06-02 18:36:11 +03:00
c1fe419ff9 search-with-images22
All checks were successful
Deploy on push / deploy (push) Successful in 1m9s
2024-06-02 18:34:15 +03:00
ebf1309b48 search-with-images22
All checks were successful
Deploy on push / deploy (push) Successful in 1m8s
2024-06-02 18:01:17 +03:00
d83b459408 search-with-images120
All checks were successful
Deploy on push / deploy (push) Successful in 1m34s
2024-06-02 17:56:24 +03:00
db8472ae06 search-with-images19
All checks were successful
Deploy on push / deploy (push) Successful in 1m8s
2024-06-02 17:47:27 +03:00
9d265fa3f9 search-with-images17
All checks were successful
Deploy on push / deploy (push) Successful in 1m8s
2024-06-02 17:36:34 +03:00
5169cff892 search-with-images16
All checks were successful
Deploy on push / deploy (push) Successful in 1m8s
2024-06-02 17:25:09 +03:00
8f2bd30d54 search-with-images15
All checks were successful
Deploy on push / deploy (push) Successful in 1m8s
2024-06-02 17:16:43 +03:00
b8266c41fc search-with-images14
All checks were successful
Deploy on push / deploy (push) Successful in 1m8s
2024-06-02 17:12:34 +03:00
1a601b93eb search-with-images13
All checks were successful
Deploy on push / deploy (push) Successful in 1m8s
2024-06-02 17:08:50 +03:00
1b838676e3 search-with-images12
All checks were successful
Deploy on push / deploy (push) Successful in 1m8s
2024-06-02 17:07:29 +03:00
8cc9d0d4d3 search-with-images11
All checks were successful
Deploy on push / deploy (push) Successful in 1m8s
2024-06-02 17:01:22 +03:00
8e77a57bc1 search-with-images10
All checks were successful
Deploy on push / deploy (push) Successful in 1m7s
2024-06-02 16:48:11 +03:00
e74c9688c8 search-with-images9
All checks were successful
Deploy on push / deploy (push) Successful in 1m7s
2024-06-02 16:40:47 +03:00
60d6743fcd search-with-images8
All checks were successful
Deploy on push / deploy (push) Successful in 1m7s
2024-06-02 16:38:38 +03:00
f42d81b9fc search-with-images7
All checks were successful
Deploy on push / deploy (push) Successful in 1m8s
2024-06-02 16:36:12 +03:00
774240ca73 search-with-images6
All checks were successful
Deploy on push / deploy (push) Successful in 1m9s
2024-06-02 16:18:19 +03:00
fb2c31a81b search-with-images6
All checks were successful
Deploy on push / deploy (push) Successful in 1m8s
2024-06-02 16:14:01 +03:00
eba991f4f5 search-with-images5
All checks were successful
Deploy on push / deploy (push) Successful in 1m8s
2024-06-02 16:10:09 +03:00
0fdb056460 search-with-images4
All checks were successful
Deploy on push / deploy (push) Successful in 1m8s
2024-06-02 16:05:59 +03:00
17da2c8359 search-with-images3
All checks were successful
Deploy on push / deploy (push) Successful in 1m8s
2024-06-02 16:00:09 +03:00
0abb4d605d search-with-images2
All checks were successful
Deploy on push / deploy (push) Successful in 1m7s
2024-06-02 15:58:14 +03:00
465d9093bd search-with-images
All checks were successful
Deploy on push / deploy (push) Successful in 1m8s
2024-06-02 15:56:17 +03:00
67e4cacb28 scoreidcache-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m7s
2024-06-02 15:32:02 +03:00
a3d1d1b067 saerch-fix2
All checks were successful
Deploy on push / deploy (push) Successful in 1m7s
2024-06-02 14:11:46 +03:00
2e5919f3e6 saerch-fix
Some checks failed
Deploy on push / deploy (push) Has been cancelled
2024-06-02 14:10:49 +03:00
9b2db3cc1d d-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m7s
2024-05-30 22:04:28 +03:00
9307fc97fb follower-topics-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m8s
2024-05-30 21:29:25 +03:00
b3a998fec2 followers-cache-fix-3
All checks were successful
Deploy on push / deploy (push) Successful in 1m27s
2024-05-30 21:13:50 +03:00
5ba7f5e3c9 followers-cache-fix-2
All checks were successful
Deploy on push / deploy (push) Successful in 48s
2024-05-30 21:10:16 +03:00
9212fbe6b5 followers-cache-fix
All checks were successful
Deploy on push / deploy (push) Successful in 48s
2024-05-30 20:55:47 +03:00
8dcd985c67 cache-fix-10
All checks were successful
Deploy on push / deploy (push) Successful in 47s
2024-05-30 20:49:37 +03:00
c9dcd6a9c9 cache-fix-9
All checks were successful
Deploy on push / deploy (push) Successful in 46s
2024-05-30 20:43:18 +03:00
afef19fae3 cache-fix-8
All checks were successful
Deploy on push / deploy (push) Successful in 47s
2024-05-30 20:26:53 +03:00
2e2dc80718 cache-fix-7
All checks were successful
Deploy on push / deploy (push) Successful in 46s
2024-05-30 20:23:32 +03:00
abc5381adb cache-fix-6
All checks were successful
Deploy on push / deploy (push) Successful in 47s
2024-05-30 20:22:10 +03:00
75dd4120ec cache-fix-5
All checks were successful
Deploy on push / deploy (push) Successful in 46s
2024-05-30 20:20:02 +03:00
b0637da11d cache-fix-4
All checks were successful
Deploy on push / deploy (push) Successful in 47s
2024-05-30 20:14:26 +03:00
968935869e cache-refactored-4
All checks were successful
Deploy on push / deploy (push) Successful in 48s
2024-05-30 19:42:38 +03:00
74e000c96b cache-refactored-3
All checks were successful
Deploy on push / deploy (push) Successful in 48s
2024-05-30 19:29:57 +03:00
8dd885b6a8 cache-refactored2
All checks were successful
Deploy on push / deploy (push) Successful in 46s
2024-05-30 19:16:50 +03:00
042cf595f7 cache-refactored
All checks were successful
Deploy on push / deploy (push) Successful in 39s
2024-05-30 19:15:11 +03:00
3712ecf8ae author:user-key
All checks were successful
Deploy on push / deploy (push) Successful in 49s
2024-05-30 18:07:41 +03:00
d20647c825 cache-fix
All checks were successful
Deploy on push / deploy (push) Successful in 47s
2024-05-30 17:49:33 +03:00
98010ed1bc get-with-stat-fix-2
All checks were successful
Deploy on push / deploy (push) Successful in 47s
2024-05-30 15:05:06 +03:00
76d4fc675f get-with-stat-fix
All checks were successful
Deploy on push / deploy (push) Successful in 26s
2024-05-30 14:45:41 +03:00
e4cc182db4 get-with-stat-debug-2
All checks were successful
Deploy on push / deploy (push) Successful in 26s
2024-05-30 14:40:04 +03:00
9ca7a42d56 get-with-stat-debug
All checks were successful
Deploy on push / deploy (push) Successful in 30s
2024-05-30 14:38:14 +03:00
570c8a97e3 shouts-stat-fix-5
All checks were successful
Deploy on push / deploy (push) Successful in 26s
2024-05-30 14:29:00 +03:00
3bde3ea5e9 shouts-stat-fix-3
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-05-30 14:25:35 +03:00
d54e2a2f3f shouts-stat-fix
All checks were successful
Deploy on push / deploy (push) Successful in 27s
2024-05-30 14:01:34 +03:00
a1ee49ba54 poestmerge
All checks were successful
Deploy on push / deploy (push) Successful in 27s
2024-05-30 12:49:46 +03:00
e638ad81e2 fmt+follows-refactored 2024-05-30 07:12:00 +03:00
bce43096b1 postmerge-fixex
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-05-27 20:14:40 +03:00
19d10b6219 Merge branch 'v2' into dev
All checks were successful
Deploy on push / deploy (push) Successful in 26s
2024-05-27 20:11:04 +03:00
a9ab2e8bb2 cached-empty-fix-4 2024-05-27 20:03:07 +03:00
9a94e5ac56 cached-empty-fix-3 2024-05-27 19:59:16 +03:00
d93311541e cached-empty-fix-2 2024-05-27 19:57:22 +03:00
01d2d90df1 cached-empty-fix 2024-05-27 19:55:56 +03:00
7b72963b24 reply-to-fix 2024-05-27 19:39:48 +03:00
c90783f461 async-fix-3 2024-05-27 19:38:34 +03:00
9d9adfbdfa async-fix-2 2024-05-27 19:36:25 +03:00
f43624ca3d async-fix 2024-05-27 19:29:51 +03:00
3f6f7f1aa0 get-followers-fix 2024-05-27 18:30:28 +03:00
da89b20e5c session-close-fix 2024-05-26 02:17:45 +03:00
c4817c1e52 logfix
All checks were successful
Deploy on push / deploy (push) Successful in 29s
2024-05-24 13:25:05 +03:00
c444895945 log-response
All checks were successful
Deploy on push / deploy (push) Successful in 1m50s
2024-05-24 13:14:19 +03:00
9791ba4b49 result-fix6
All checks were successful
Deploy on push / deploy (push) Successful in 26s
2024-05-21 04:40:48 +03:00
6ed144327c result-fix5
All checks were successful
Deploy on push / deploy (push) Successful in 31s
2024-05-21 04:34:08 +03:00
472801199c result-fix4
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-05-21 04:28:15 +03:00
a3514e6874 result-fix3
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-05-21 04:25:40 +03:00
95b2b97dd4 result-fix2
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-05-21 04:24:58 +03:00
df934a8fd2 result-fix
All checks were successful
Deploy on push / deploy (push) Successful in 26s
2024-05-21 04:22:36 +03:00
d89fa283dc cache-postrefactor-fix
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-05-21 02:56:58 +03:00
1592065a8c postfixing-reimplemented-cache
All checks were successful
Deploy on push / deploy (push) Successful in 27s
2024-05-21 02:01:18 +03:00
4c1fbf64a2 cache-reimplement-2
All checks were successful
Deploy on push / deploy (push) Successful in 1m4s
2024-05-21 01:40:57 +03:00
3742528e3a follows-returns
All checks were successful
Deploy on push / deploy (push) Successful in 27s
2024-05-20 19:11:07 +03:00
232892d397 isort
All checks were successful
Deploy on push / deploy (push) Successful in 26s
2024-05-20 16:46:05 +03:00
e0b3562e80 follow/unfollow-handling-noauthor 2024-05-20 16:23:49 +03:00
71c2e8ea13 notopicid
All checks were successful
Deploy on push / deploy (push) Successful in 27s
2024-05-18 19:30:25 +03:00
b73cce5431 create-reaction-fix-2
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-05-18 17:41:04 +03:00
0d618116e1 compound-fix
All checks were successful
Deploy on push / deploy (push) Successful in 28s
2024-05-18 17:31:45 +03:00
b7dbaa6e73 aliased-union-fix
All checks were successful
Deploy on push / deploy (push) Successful in 26s
2024-05-18 16:16:09 +03:00
5fe51e03bb fix-get-stat
All checks were successful
Deploy on push / deploy (push) Successful in 26s
2024-05-18 15:40:15 +03:00
306caf9520 logs-fix
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-05-18 15:26:22 +03:00
e6f42b388a logs-fix
Some checks failed
Deploy on push / deploy (push) Has been cancelled
2024-05-18 15:25:53 +03:00
fd7bd385fc queries-refactoring-2
All checks were successful
Deploy on push / deploy (push) Successful in 26s
2024-05-18 14:15:05 +03:00
7d97f40826 cache-when-created
All checks were successful
Deploy on push / deploy (push) Successful in 28s
2024-05-18 13:57:30 +03:00
bc01dfb125 media-indexed
All checks were successful
Deploy on push / deploy (push) Successful in 26s
2024-05-18 13:16:39 +03:00
5dfb890b84 no-delete-on-create
All checks were successful
Deploy on push / deploy (push) Successful in 26s
2024-05-18 12:57:18 +03:00
2beb584e87 search-index-softer-check
All checks were successful
Deploy on push / deploy (push) Successful in 32s
2024-05-18 12:55:34 +03:00
1f3607b4d3 search-compare-fix
All checks were successful
Deploy on push / deploy (push) Successful in 32s
2024-05-18 12:51:41 +03:00
0051492bd3 proper-searchfields
All checks were successful
Deploy on push / deploy (push) Successful in 26s
2024-05-18 12:48:43 +03:00
0f5df77d28 create-reaction-unauthorized-handling
All checks were successful
Deploy on push / deploy (push) Successful in 26s
2024-05-18 12:38:46 +03:00
c80229b7b9 delete-if-wrong
Some checks failed
Deploy on push / deploy (push) Failing after 8s
2024-05-18 12:11:34 +03:00
8bc7a471cd index-struct-sync
All checks were successful
Deploy on push / deploy (push) Successful in 27s
2024-05-18 11:58:47 +03:00
91a2854537 no-remove-index-fix
All checks were successful
Deploy on push / deploy (push) Successful in 27s
2024-05-18 11:52:17 +03:00
3d8e484187 no-delete-index 2024-05-18 11:32:30 +03:00
be6d2454b1 search-info-2
All checks were successful
Deploy on push / deploy (push) Successful in 41s
2024-05-18 11:28:38 +03:00
4e97a22642 search-service-improve
All checks were successful
Deploy on push / deploy (push) Successful in 26s
2024-05-18 11:22:13 +03:00
a749ade30b fmt
All checks were successful
Deploy on push / deploy (push) Successful in 26s
2024-05-18 11:00:46 +03:00
3d90d9c81d search-cached-non-empty-only-fix 2024-05-18 11:00:01 +03:00
102eae1c98 sentry-on
All checks were successful
Deploy on push / deploy (push) Successful in 26s
2024-05-09 08:32:25 +03:00
75cd8b9f71 get-author-ref
All checks were successful
Deploy on push / deploy (push) Successful in 26s
2024-05-09 00:02:59 +03:00
a18ad12ff7 lesslog 2024-05-08 23:57:31 +03:00
f7fdd6fd76 sentry-off
All checks were successful
Deploy on push / deploy (push) Successful in 26s
2024-05-08 23:48:11 +03:00
80685fd1cc follows-result-update
All checks were successful
Deploy on push / deploy (push) Successful in 27s
2024-05-08 23:42:57 +03:00
69409f92e1 redis-set-set-fix
All checks were successful
Deploy on push / deploy (push) Successful in 27s
2024-05-07 21:56:07 +03:00
cfcb858bba new-profile-followers 2024-05-07 19:17:18 +03:00
8618e1eff7 followers-stat-fix
All checks were successful
Deploy on push / deploy (push) Successful in 26s
2024-05-07 00:10:54 +03:00
e0a5c654d8 fmt
All checks were successful
Deploy on push / deploy (push) Successful in 24s
2024-05-07 00:06:31 +03:00
e61db5d6e5 logs-fix
All checks were successful
Deploy on push / deploy (push) Successful in 24s
2024-05-07 00:03:58 +03:00
fac25ab4f4 followers-fix
All checks were successful
Deploy on push / deploy (push) Successful in 24s
2024-05-07 00:02:15 +03:00
ceeeb23c26 delete-reaction-update-stat-fix
All checks were successful
Deploy on push / deploy (push) Successful in 24s
2024-05-06 22:44:30 +03:00
ce90fedacb delete-reaction-fix
All checks were successful
Deploy on push / deploy (push) Successful in 24s
2024-05-06 22:38:19 +03:00
0179c69b82 delete-reaction-debug 2024-05-06 22:37:38 +03:00
dac79b53ca api-fix
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-05-06 21:18:19 +03:00
b372fd81d5 drafts-api-common
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-05-06 21:14:17 +03:00
205019ce39 handle-no-author-profile
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-05-06 21:01:10 +03:00
9c4d88c8fd handle-noauthor 2024-05-06 20:59:56 +03:00
dd2becaab2 cache_by_id-fix
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-05-06 20:41:34 +03:00
658c8c7702 followers-cache-fix
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-05-06 20:30:49 +03:00
809b980145 load-authors-fix-2
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-05-06 20:04:50 +03:00
1185880f8e authors-all-fix
All checks were successful
Deploy on push / deploy (push) Successful in 37s
2024-05-06 20:00:26 +03:00
499ecb501d load-authors-fix
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-05-06 19:40:51 +03:00
b3e7d24d9d shouts-counter-fix
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-05-06 19:27:51 +03:00
78b12d4f33 followers-cache-debug-2 2024-05-06 12:46:42 +03:00
5caa2d1f8c followers-cache-debug
All checks were successful
Deploy on push / deploy (push) Successful in 26s
2024-05-06 12:41:53 +03:00
c46f264c4b followers-fix
All checks were successful
Deploy on push / deploy (push) Successful in 27s
2024-05-06 12:38:39 +03:00
f6b21174bf unique-fix-2
All checks were successful
Deploy on push / deploy (push) Successful in 37s
2024-05-06 11:27:15 +03:00
d15b36a0f1 unique-fix
All checks were successful
Deploy on push / deploy (push) Successful in 26s
2024-05-06 11:23:14 +03:00
232cdbfad8 docker-check-logger
All checks were successful
Deploy on push / deploy (push) Successful in 27s
2024-05-06 10:58:31 +03:00
55e28162fe subprocess-fix
All checks were successful
Deploy on push / deploy (push) Successful in 27s
2024-05-06 10:53:27 +03:00
49eec2de46 Merge branch 'dev' of https://dev.discours.io/discours.io/core into dev
All checks were successful
Deploy on push / deploy (push) Successful in 30s
2024-05-06 10:30:05 +03:00
52f5a4e813 followers-cache-fix 2024-05-06 10:29:50 +03:00
a5d99fa517 cache-follower-fix 2024-05-06 10:25:09 +03:00
Stepan Vladovskiy
2a08e6204e feat: sv - nginx sigil with /upload
All checks were successful
Deploy on push / deploy (push) Successful in 27s
2024-05-06 04:20:18 -03:00
Stepan Vladovskiy
ab6dcde170 feat: sv - nginx sigil with /upload and now / on the end of proxy-pass 2024-05-06 04:18:24 -03:00
Stepan Vladovskiy
bf9e571cd8 feat: sv - nginx sigil with /upload 2024-05-06 04:07:07 -03:00
Stepan Vladovskiy
e38df1f9d5 debug: sv - nginx sigil old fasch alot of /// around all 2024-05-06 03:28:31 -03:00
Stepan Vladovskiy
449f63f540 debug: sv - back configs in nginx.sigil 2024-05-06 02:56:40 -03:00
Stepan Vladovskiy
22106ad657 debug: sv - trying different configs in nginx.sigil 2024-05-06 02:54:58 -03:00
Stepan Vladovskiy
4c274eee2e debug: /upload instead of /upload/ in sigil 2024-05-06 02:04:55 -03:00
Stepan Vladovskiy
b3caccb786 debug: sv - sigil style for uploader without / on the end of the proxy_pass 2024-05-05 23:11:31 -03:00
Stepan Vladovskiy
fc033734f5 debug: with proxy-pass in nginx to uploader
All checks were successful
Deploy on push / deploy (push) Successful in 24s
2024-05-05 16:55:21 -03:00
Stepan Vladovskiy
2fb21847c3 debug: nginx.conf.sigil right names
Some checks failed
Deploy on push / deploy (push) Failing after 22s
2024-05-05 16:48:11 -03:00
Stepan Vladovskiy
e4d83d35eb debug: without check uploader in server.py
Some checks failed
Deploy on push / deploy (push) Failing after 22s
2024-05-05 16:44:09 -03:00
Stepan Vladovskiy
98d7c522fb debug: run check with dokku not cocker for find uploader
Some checks failed
Deploy on push / deploy (push) Failing after 32s
2024-05-05 16:41:39 -03:00
Stepan Vladovskiy
e6f88ffff0 feat: run check with dokku not cocker for find uploader
Some checks failed
Deploy on push / deploy (push) Failing after 7s
2024-05-05 16:40:33 -03:00
Stepan Vladovskiy
d26f8c4903 feat: gitea workflow if uploader then stop installer
Some checks failed
Deploy on push / deploy (push) Failing after 8s
2024-05-05 16:35:19 -03:00
Stepan Vladovskiy
89021ea018 feat: gitea workflow with Uploader check is runing, plus in server.py is checker too 2024-05-05 16:35:19 -03:00
0d87d3d889 unique-follows-debug
All checks were successful
Deploy on push / deploy (push) Successful in 35s
2024-05-05 21:38:59 +03:00
2b5fb704ba follow/unfollow-cache-fix
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-05-05 21:04:38 +03:00
13d144f838 cant-follow-catch-fix
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-05-05 20:44:57 +03:00
ac5674d18f following-cache-anyway-found
All checks were successful
Deploy on push / deploy (push) Successful in 27s
2024-05-05 20:17:07 +03:00
3ab42ecb72 following-cache-anyway-found 2024-05-05 20:16:45 +03:00
cfe9ac1005 follow-fi
All checks were successful
Deploy on push / deploy (push) Successful in 22s
2024-05-05 19:49:07 +03:00
e50a6358a8 merged
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-05-05 18:46:46 +03:00
f6cb7e18d1 cache-updates-fix 2024-05-05 18:46:16 +03:00
Stepan Vladovskiy
526d2c3e4e feat: sv - in nginx client_max_body_size=100M, solution for upload large files
All checks were successful
Deploy on push / deploy (push) Successful in 24s
2024-05-05 03:15:47 -03:00
c9205a698f typo-fix
All checks were successful
Deploy on push / deploy (push) Successful in 24s
2024-05-05 00:00:58 +03:00
dc791d4e7a same-rating-fox
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-05-04 23:48:55 +03:00
b2f7b06a93 topic caching
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-05-03 14:12:57 +03:00
db33410675 lesslog
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-05-02 09:12:47 +03:00
6c58f09402 feed-filter-fix-2 2024-05-01 05:08:54 +03:00
79f21387a5 feed-filter-fix
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-05-01 05:02:35 +03:00
dc9c66c00f follow-fmr
All checks were successful
Deploy on push / deploy (push) Successful in 22s
2024-05-01 04:01:21 +03:00
c68322e550 follow-fix 2024-05-01 04:00:54 +03:00
88de00706d follow-fix 2024-05-01 04:00:37 +03:00
658a2400c4 debug-4 2024-05-01 03:46:16 +03:00
12e42f2023 debug-2 2024-05-01 03:38:49 +03:00
f1bda441b4 debug-2 2024-05-01 03:35:31 +03:00
026bad95e2 debug
All checks were successful
Deploy on push / deploy (push) Successful in 22s
2024-05-01 03:29:25 +03:00
831684922a get-my-shout-dbg 2024-05-01 03:09:54 +03:00
435e97ab04 get-my-shout-debug 2024-05-01 02:46:19 +03:00
883e98c3d3 get-my-shout-debug 2024-05-01 02:42:25 +03:00
94bf54b192 get-my-shout-fix
All checks were successful
Deploy on push / deploy (push) Successful in 26s
2024-05-01 02:38:17 +03:00
9aacb75e84 auth-debug
All checks were successful
Deploy on push / deploy (push) Successful in 24s
2024-04-30 16:18:50 +03:00
61c7f5a0dc drafts-debug
All checks were successful
Deploy on push / deploy (push) Successful in 24s
2024-04-30 14:10:01 +03:00
a7f163009e fastify-load-authors-2
All checks were successful
Deploy on push / deploy (push) Successful in 49s
2024-04-30 12:35:51 +03:00
ab6ef76a34 fastify-load-authors
All checks were successful
Deploy on push / deploy (push) Successful in 39s
2024-04-30 12:33:41 +03:00
a992941aef logs-fix
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-04-27 13:30:19 +03:00
9dc986b08c start-date-views-fix
All checks were successful
Deploy on push / deploy (push) Successful in 21s
2024-04-27 01:51:45 +03:00
653b18041e str-time
All checks were successful
Deploy on push / deploy (push) Successful in 21s
2024-04-27 01:48:15 +03:00
868b2ba16a removed-search-vector
All checks were successful
Deploy on push / deploy (push) Successful in 21s
2024-04-27 01:43:42 +03:00
2e4d70db28 viwed-fix+fmt+outerjoin-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m1s
2024-04-27 01:41:47 +03:00
89956d6240 get-shout-debug
All checks were successful
Deploy on push / deploy (push) Successful in 22s
2024-04-26 11:43:22 +03:00
7f1794891c cache-follower-fix
All checks were successful
Deploy on push / deploy (push) Successful in 22s
2024-04-26 11:21:00 +03:00
ee24f2f1db my-shout-not-published
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-04-26 11:06:13 +03:00
cfed40ddd9 not-error-expired-token
All checks were successful
Deploy on push / deploy (push) Successful in 24s
2024-04-26 07:21:03 +03:00
899016907c shouts-load-debug-2
All checks were successful
Deploy on push / deploy (push) Successful in 22s
2024-04-25 14:06:21 +03:00
54e82f99eb shouts-load-debug
All checks were successful
Deploy on push / deploy (push) Successful in 24s
2024-04-25 14:03:30 +03:00
605d60f126 featured-filter-fix 2024-04-25 14:01:16 +03:00
b1bd9a4829 feed-featured-fix-2
All checks were successful
Deploy on push / deploy (push) Successful in 26s
2024-04-25 12:19:42 +03:00
54766ffa42 feed-featured-fix 2024-04-25 12:08:20 +03:00
27d5272032 fmt
All checks were successful
Deploy on push / deploy (push) Successful in 58s
2024-04-25 12:07:30 +03:00
e68196ce0b counters-fix
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-04-25 11:47:13 +03:00
c4148254ed get-topic-fix-5
All checks were successful
Deploy on push / deploy (push) Successful in 22s
2024-04-25 11:25:39 +03:00
1e8b6b156b get-topic-fix-4 2024-04-25 11:25:23 +03:00
b1d459d7fa get-topic-fix-3
All checks were successful
Deploy on push / deploy (push) Successful in 21s
2024-04-25 11:24:16 +03:00
961d86c8f9 get-topic-fix-2
All checks were successful
Deploy on push / deploy (push) Successful in 21s
2024-04-25 11:22:18 +03:00
1b22276237 get-topic-fix
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-04-25 11:20:57 +03:00
0b185c1c2d fmt
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-04-24 10:42:33 +03:00
5dbb0ccb12 region-cache-fix 2024-04-24 10:42:09 +03:00
e90d5aefb2 stat-resolver-fix
All checks were successful
Deploy on push / deploy (push) Successful in 24s
2024-04-24 10:30:32 +03:00
c1a66500e5 sort-order-reimplement-syntetic-stat
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-04-23 16:05:27 +03:00
54980faf49 select-from-fix
All checks were successful
Deploy on push / deploy (push) Successful in 28s
2024-04-23 15:46:59 +03:00
83204d1dff left-join-fix 2024-04-23 15:27:19 +03:00
870d5b62dc isort
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-04-23 15:15:18 +03:00
0b4c0faa79 stat-fix 2024-04-23 15:14:59 +03:00
f64d0a09a8 color-fix
All checks were successful
Deploy on push / deploy (push) Successful in 21s
2024-04-23 14:41:19 +03:00
8436bc4305 separate-stat-query
All checks were successful
Deploy on push / deploy (push) Successful in 22s
2024-04-23 14:31:34 +03:00
8e130027f0 auth-request-data-log
All checks were successful
Deploy on push / deploy (push) Successful in 22s
2024-04-22 12:48:57 +03:00
b7d82d9cc5 refactored-author-on-login-required
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-04-19 18:22:07 +03:00
0ca6676474 Merge branch 'dev' of https://dev.discours.io/discours.io/core into dev
All checks were successful
Deploy on push / deploy (push) Successful in 56s
2024-04-18 12:34:32 +03:00
1a685e458d following-fix 2024-04-18 12:34:04 +03:00
47bc3adb69 fixes 2024-04-17 20:30:05 +03:00
372185e336 webhook-fix 2024-04-17 19:54:38 +03:00
519f5e4624 use-email-login-webhook 2024-04-17 19:20:35 +03:00
c25d7e3ab6 fmt 2024-04-17 18:32:23 +03:00
937b154c6b family-name-fix 2024-04-17 18:31:11 +03:00
Stepan Vladovskiy
994cd05d85 feat: no fore push dev to staging
All checks were successful
Deploy on push / deploy (push) Successful in 22s
2024-04-11 16:14:26 -03:00
Stepan Vladovskiy
52280c29ea feat: fore push dev to staging
All checks were successful
Deploy on push / deploy (push) Successful in 22s
2024-04-11 16:12:11 -03:00
Igor Lobanov
c3a482614e robo migrate script 2024-01-08 09:20:29 +01:00
Igor Lobanov
67576d0a5b only published in random topic shouts (#114) 2023-12-21 11:49:28 +01:00
Igor Lobanov
f395832d32 random topic shouts query, published date filter in random tops (#113)
Co-authored-by: Igor Lobanov <igor.lobanov@onetwotrip.com>
2023-12-21 00:53:53 +01:00
Igor Lobanov
ff834987d4 unrated shouts query fix (#112)
Co-authored-by: Igor Lobanov <igor.lobanov@onetwotrip.com>
2023-12-18 14:38:45 +01:00
Igor Lobanov
e23e379102 unrated shouts query update (#111)
Co-authored-by: Igor Lobanov <igor.lobanov@onetwotrip.com>
2023-12-16 14:47:58 +01:00
Igor Lobanov
f5a3e273a6 unrated shouts query (#110)
Co-authored-by: Igor Lobanov <igor.lobanov@onetwotrip.com>
2023-12-14 19:40:12 +01:00
Igor Lobanov
f9bc1d67ae random top articles query (#109)
* loadRandomTopShouts

* minor fixes

---------

Co-authored-by: Igor Lobanov <igor.lobanov@onetwotrip.com>
2023-12-13 23:56:01 +01:00
Ilya Y
b63b6e7ee7 timezones fixed once again (#107)
Co-authored-by: Igor Lobanov <igor.lobanov@onetwotrip.com>
2023-11-14 14:56:41 +03:00
Igor Lobanov
34e18317a2 google oauth fix 2023-11-08 19:24:38 +01:00
Igor Lobanov
a2b47dab66 google oauth fix 2023-11-08 19:19:20 +01:00
Ilya Y
0e9f0b0682 Feature/google oauth (#106)
google oauth
---------

Co-authored-by: Igor Lobanov <igor.lobanov@onetwotrip.com>
2023-11-08 21:12:55 +03:00
Ilya Y
2679b2c873 debug code removed (#105)
Co-authored-by: Igor Lobanov <igor.lobanov@onetwotrip.com>
2023-11-06 12:03:04 +03:00
Ilya Y
0da4e110c1 test article (#104)
Co-authored-by: Igor Lobanov <igor.lobanov@onetwotrip.com>
2023-11-04 19:44:58 +03:00
Ilya Y
21316187e0 Fix/deploy fix (#103)
* deploy fix
---------

Co-authored-by: Igor Lobanov <igor.lobanov@onetwotrip.com>
2023-10-31 18:48:00 +03:00
Kosta
7f22966b41 Merge pull request #102 from Discours/feature/my_feed
my feed query fixed
2023-10-31 15:54:10 +02:00
Igor Lobanov
34d04e4240 my feed query fixed 2023-10-31 14:52:58 +01:00
Kosta
d7dd79336b Merge pull request #101 from Discours/feature/lint3
configured isort, black, flake8
2023-10-30 23:40:32 +02:00
Igor Lobanov
eaca3d613d build fix 2023-10-30 22:34:59 +01:00
Igor Lobanov
756a80151a build fix 2023-10-30 22:32:04 +01:00
Igor Lobanov
4395e3a72d build fix 2023-10-30 22:09:04 +01:00
Igor Lobanov
441bcc1e90 configured isort, black, flake8 2023-10-30 22:00:55 +01:00
Kosta
17c29c7f4f Merge pull request #100 from Discours/revert-99-feature/lint
Revert "Feature/lint"
2023-10-27 00:09:00 +03:00
Kosta
b142949805 Revert "Feature/lint" 2023-10-27 00:07:35 +03:00
Kosta
05136699ee Merge pull request #99 from Discours/feature/lint
Feature/lint
2023-10-26 23:48:25 +03:00
Igor Lobanov
c2cc428abe lint 2023-10-26 22:38:31 +02:00
Igor Lobanov
1c49780cd4 lint wip 2023-10-26 20:05:32 +02:00
Igor Lobanov
54457cb9c5 lint wip 2023-10-26 19:57:17 +02:00
Igor Lobanov
2c524279f6 lint wip 2023-10-26 19:56:42 +02:00
44bd146bdf Merge pull request #97 from Discours/feature/thumbor2
Feature/thumbor2
2023-10-26 00:55:36 +03:00
Igor Lobanov
9e3306fc3d Merge remote-tracking branch 'origin/main' into feature/thumbor2 2023-10-25 23:48:51 +02:00
Igor Lobanov
3389c5ce20 https for CDN, separate dir for non-image files 2023-10-25 23:48:16 +02:00
b71210a644 Merge pull request #96 from Discours/feature/thumbor
new thumbor (assets.discours.io -> images.discours.io), new visibilit…
2023-10-25 23:50:59 +03:00
Igor Lobanov
c8a951594c new thumbor (assets.discours.io -> images.discours.io), new visibility for non published articles (authors -> community) 2023-10-25 22:38:22 +02:00
Ilya Y
da8ee9b9c3 signIn/getSession optimization (#95)
Co-authored-by: Igor Lobanov <igor.lobanov@onetwotrip.com>
2023-10-19 17:54:38 +03:00
96 changed files with 9348 additions and 3394 deletions

1
.cursorignore Normal file
View File

@@ -0,0 +1 @@
# Add directories or file patterns to ignore during indexing (e.g. foo/ or *.csv)

View File

@@ -29,7 +29,7 @@ jobs:
if: github.ref == 'refs/heads/dev' if: github.ref == 'refs/heads/dev'
uses: dokku/github-action@master uses: dokku/github-action@master
with: with:
branch: 'main' branch: 'dev'
git_remote_url: 'ssh://dokku@staging.discours.io:22/core' force: true
git_remote_url: 'ssh://dokku@v2.discours.io:22/core'
ssh_private_key: ${{ secrets.SSH_PRIVATE_KEY }} ssh_private_key: ${{ secrets.SSH_PRIVATE_KEY }}
git_push_flags: '--force'

View File

@@ -17,11 +17,11 @@ jobs:
- uses: webfactory/ssh-agent@v0.8.0 - uses: webfactory/ssh-agent@v0.8.0
with: with:
ssh-private-key: ${{ secrets.SSH_PRIVATE_KEY }} ssh-private-key: ${{ github.action.secrets.SSH_PRIVATE_KEY }}
- name: Push to dokku - name: Push to dokku
env: env:
HOST_KEY: ${{ secrets.HOST_KEY }} HOST_KEY: ${{ github.action.secrets.HOST_KEY }}
run: | run: |
echo $HOST_KEY > ~/.ssh/known_hosts echo $HOST_KEY > ~/.ssh/known_hosts
git remote add dokku dokku@v2.discours.io:discoursio-api git remote add dokku dokku@v2.discours.io:discoursio-api

11
.gitignore vendored
View File

@@ -151,3 +151,14 @@ poetry.lock
.ruff_cache .ruff_cache
.jj .jj
.zed .zed
dokku_config
*.db
*.sqlite3
views.json
*.pem
*.key
*.crt
*cache.json
.cursor

View File

@@ -12,7 +12,7 @@ repos:
- id: check-merge-conflict - id: check-merge-conflict
- repo: https://github.com/astral-sh/ruff-pre-commit - repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.3.5 rev: v0.4.7
hooks: hooks:
- id: ruff - id: ruff
args: [--fix] args: [--fix]

301
CHANGELOG.md Normal file
View File

@@ -0,0 +1,301 @@
#### [0.4.15] - 2025-03-22
- Upgraded caching system described `docs/caching.md`
- Module `cache/memorycache.py` removed
- Enhanced caching system with backward compatibility:
- Unified cache key generation with support for existing naming patterns
- Improved Redis operation function with better error handling
- Updated precache module to use consistent Redis interface
- Integrated revalidator with the invalidation system for better performance
- Added comprehensive documentation for the caching system
- Enhanced cached_query to support template-based cache keys
- Standardized error handling across all cache operations
- Optimized cache invalidation system:
- Added targeted invalidation for individual entities (authors, topics)
- Improved revalidation manager with individual object processing
- Implemented batched processing for high-volume invalidations
- Reduced Redis operations by using precise key invalidation instead of prefix-based wipes
- Added special handling for slug changes in topics
- Unified caching system for all models:
- Implemented abstract functions `cache_data`, `get_cached_data` and `invalidate_cache_by_prefix`
- Added `cached_query` function for unified approach to query caching
- Updated resolvers `author.py` and `topic.py` to use the new caching API
- Improved logging for cache operations to simplify debugging
- Optimized Redis memory usage through key format unification
- Improved caching and sorting in Topic and Author modules:
- Added support for dictionary sorting parameters in `by` for both modules
- Optimized cache key generation for stable behavior with various parameters
- Enhanced sorting logic with direction support and arbitrary fields
- Added `by` parameter support in the API for getting topics by community
- Performance optimizations for author-related queries:
- Added SQLAlchemy-managed indexes to `Author`, `AuthorFollower`, `AuthorRating` and `AuthorBookmark` models
- Implemented persistent Redis caching for author queries without TTL (invalidated only on changes)
- Optimized author retrieval with separate endpoints:
- `get_authors_all` - returns all non-deleted authors without statistics
- `get_authors_paginated` - returns authors with statistics and pagination support
- `load_authors_by` - optimized to use caching and efficient sorting
- Improved SQL queries with optimized JOIN conditions and efficient filtering
- Added pre-aggregation of statistics (shouts count, followers count) in single efficient queries
- Implemented robust cache invalidation on author updates
- Created necessary indexes for author lookups by user ID, slug, and timestamps
#### [0.4.14] - 2025-03-21
- Significant performance improvements for topic queries:
- Added database indexes to optimize JOIN operations
- Implemented persistent Redis caching for topic queries (no TTL, invalidated only on changes)
- Optimized topic retrieval with separate endpoints for different use cases:
- `get_topics_all` - returns all topics without statistics for lightweight listing
- `get_topics_paginated` - returns topics with statistics and pagination support
- `get_topics_by_community` - adds pagination and optimized filtering by community
- Added SQLAlchemy-managed indexes directly in ORM models for automatic schema maintenance
- Created `sync_indexes()` function for automatic index synchronization during app startup
- Reduced database load by pre-aggregating statistics in optimized SQL queries
- Added robust cache invalidation on topic create/update/delete operations
- Improved query optimization with proper JOIN conditions and specific partial indexes
#### [0.4.13] - 2025-03-20
- Fixed Topic objects serialization error in cache/memorycache.py
- Improved CustomJSONEncoder to support SQLAlchemy models with dict() method
- Enhanced error handling in cache_on_arguments decorator
- Modified `load_reactions_by` to include deleted reactions when `include_deleted=true` for proper comment tree building
- Fixed featured/unfeatured logic in reaction processing:
- Dislike reactions now properly take precedence over likes
- Featured status now requires more than 4 likes from users with featured articles
- Removed unnecessary filters for deleted reactions since rating reactions are physically deleted
- Author's featured status now based on having non-deleted articles with featured_at
#### [0.4.12] - 2025-03-19
- `delete_reaction` detects comments and uses `deleted_at` update
- `check_to_unfeature` etc. update
- dogpile dep in `services/memorycache.py` optimized
#### [0.4.11] - 2025-02-12
- `create_draft` resolver requires draft_id fixed
- `create_draft` resolver defaults body and title fields to empty string
#### [0.4.9] - 2025-02-09
- `Shout.draft` field added
- `Draft` entity added
- `create_draft`, `update_draft`, `delete_draft` mutations and resolvers added
- `create_shout`, `update_shout`, `delete_shout` mutations removed from GraphQL API
- `load_drafts` resolver implemented
- `publish_` and `unpublish_` mutations and resolvers added
- `create_`, `update_`, `delete_` mutations and resolvers added for `Draft` entity
- tests with pytest for original auth, shouts, drafts
- `Dockerfile` and `pyproject.toml` removed for the simplicity: `Procfile` and `requirements.txt`
#### [0.4.8] - 2025-02-03
- `Reaction.deleted_at` filter on `update_reaction` resolver added
- `triggers` module updated with `after_shout_handler`, `after_reaction_handler` for cache revalidation
- `after_shout_handler`, `after_reaction_handler` now also handle `deleted_at` field
- `get_cached_topic_followers` fixed
- `get_my_rates_comments` fixed
#### [0.4.7]
- `get_my_rates_shouts` resolver added with:
- `shout_id` and `my_rate` fields in response
- filters by `Reaction.deleted_at.is_(None)`
- filters by `Reaction.kind.in_([ReactionKind.LIKE.value, ReactionKind.DISLIKE.value])`
- filters by `Reaction.reply_to.is_(None)`
- uses `local_session()` context manager
- returns empty list on errors
- SQLAlchemy syntax updated:
- `select()` statement fixed for newer versions
- `Reaction` model direct selection instead of labeled columns
- proper row access with `row[0].shout` and `row[0].kind`
- GraphQL resolver fixes:
- added root parameter `_` to match schema
- proper async/await handling with `@login_required`
- error logging added via `logger.error()`
#### [0.4.6]
- login_accepted decorator added
- `docs` added
- optimized and unified `load_shouts_*` resolvers with `LoadShoutsOptions`
- `load_shouts_bookmarked` resolver fixed
- resolvers updates:
- new resolvers group `feed`
- `load_shouts_authored_by` resolver added
- `load_shouts_with_topic` resolver added
- `load_shouts_followed` removed
- `load_shouts_random_topic` removed
- `get_topics_random` removed
- model updates:
- `ShoutsOrderBy` enum added
- `Shout.main_topic` from `ShoutTopic.main` as `Topic` type output
- `Shout.created_by` as `Author` type output
#### [0.4.5]
- `bookmark_shout` mutation resolver added
- `load_shouts_bookmarked` resolver added
- `get_communities_by_author` resolver added
- `get_communities_all` resolver fixed
- `Community` stats in orm
- `Community` CUDL resolvers added
- `Reaction` filter by `Reaction.kind`s
- `ReactionSort` enum added
- `CommunityFollowerRole` enum added
- `InviteStatus` enum added
- `Topic.parents` ids added
- `get_shout` resolver accepts slug or shout_id
#### [0.4.4]
- `followers_stat` removed for shout
- sqlite3 support added
- `rating_stat` and `commented_stat` fixes
#### [0.4.3]
- cache reimplemented
- load shouts queries unified
- `followers_stat` removed from shout
#### [0.4.2]
- reactions load resolvers separated for ratings (no stats) and comments
- reactions stats improved
- `load_comment_ratings` separate resolver
#### [0.4.1]
- follow/unfollow logic updated and unified with cache
#### [0.4.0]
- chore: version migrator synced
- feat: precache_data on start
- fix: store id list for following cache data
- fix: shouts stat filter out deleted
#### [0.3.5]
- cache isolated to services
- topics followers and authors cached
- redis stores lists of ids
#### [0.3.4]
- `load_authors_by` from cache
#### [0.3.3]
- feat: sentry integration enabled with glitchtip
- fix: reindex on update shout
- packages upgrade, isort
- separated stats queries for author and topic
- fix: feed featured filter
- fts search removed
#### [0.3.2]
- redis cache for what author follows
- redis cache for followers
- graphql add query: get topic followers
#### [0.3.1]
- enabling sentry
- long query log report added
- editor fixes
- authors links cannot be updated by `update_shout` anymore
#### [0.3.0]
- `Shout.featured_at` timestamp of the frontpage featuring event
- added proposal accepting logics
- schema modulized
- Shout.visibility removed
#### [0.2.22]
- added precommit hook
- fmt
- granian asgi
#### [0.2.21]
- fix: rating logix
- fix: `load_top_random_shouts`
- resolvers: `add_stat_*` refactored
- services: use google analytics
- services: minor fixes search
#### [0.2.20]
- services: ackee removed
- services: following manager fixed
- services: import views.json
#### [0.2.19]
- fix: adding `author` role
- fix: stripping `user_id` in auth connector
#### [0.2.18]
- schema: added `Shout.seo` string field
- resolvers: added `/new-author` webhook resolver
- resolvers: added reader.load_shouts_top_random
- resolvers: added reader.load_shouts_unrated
- resolvers: community follower id property name is `.author`
- resolvers: `get_authors_all` and `load_authors_by`
- services: auth connector upgraded
#### [0.2.17]
- schema: enum types workaround, `ReactionKind`, `InviteStatus`, `ShoutVisibility`
- schema: `Shout.created_by`, `Shout.updated_by`
- schema: `Shout.authors` can be empty
- resolvers: optimized `reacted_shouts_updates` query
#### [0.2.16]
- resolvers: collab inviting logics
- resolvers: queries and mutations revision and renaming
- resolvers: `delete_topic(slug)` implemented
- resolvers: added `get_shout_followers`
- resolvers: `load_shouts_by` filters implemented
- orm: invite entity
- schema: `Reaction.range` -> `Reaction.quote`
- filters: `time_ago` -> `after`
- httpx -> aiohttp
#### [0.2.15]
- schema: `Shout.created_by` removed
- schema: `Shout.mainTopic` removed
- services: cached elasticsearch connector
- services: auth is using `user_id` from authorizer
- resolvers: `notify_*` usage fixes
- resolvers: `getAuthor` now accepts slug, `user_id` or `author_id`
- resolvers: login_required usage fixes
#### [0.2.14]
- schema: some fixes from migrator
- schema: `.days` -> `.time_ago`
- schema: `excludeLayout` + `layout` in filters -> `layouts`
- services: db access simpler, no contextmanager
- services: removed Base.create() method
- services: rediscache updated
- resolvers: get_reacted_shouts_updates as followedReactions query
#### [0.2.13]
- services: db context manager
- services: `ViewedStorage` fixes
- services: views are not stored in core db anymore
- schema: snake case in model fields names
- schema: no DateTime scalar
- resolvers: `get_my_feed` comments filter reactions body.is_not('')
- resolvers: `get_my_feed` query fix
- resolvers: `LoadReactionsBy.days` -> `LoadReactionsBy.time_ago`
- resolvers: `LoadShoutsBy.days` -> `LoadShoutsBy.time_ago`
#### [0.2.12]
- `Author.userpic` -> `Author.pic`
- `CommunityFollower.role` is string now
- `Author.user` is string now
#### [0.2.11]
- redis interface updated
- `viewed` interface updated
- `presence` interface updated
- notify on create, update, delete for reaction and shout
- notify on follow / unfollow author
- use pyproject
- devmode fixed
#### [0.2.10]
- community resolvers connected
#### [0.2.9]
- starlette is back, aiohttp removed
- aioredis replaced with aredis
#### [0.2.8]
- refactored
#### [0.2.7]
- `loadFollowedReactions` now with `

View File

@@ -1,145 +0,0 @@
[0.3.3]
- feat: sentry integration enabled with glitchtip
- fix: reindex on update shout
- packages upgrade, isort
[0.3.2]
- redis cache for what author follows
- redis cache for followers
- graphql add query: get topic followers
[0.3.1]
- enabling sentry
- long query log report added
- editor fixes
- authors links cannot be updated by update_shout anymore
[0.3.0]
- Shout.featured_at timestamp of the frontpage featuring event
- added proposal accepting logics
- schema modulized
- Shout.visibility removed
[0.2.22]
- added precommit hook
- fmt
- granian asgi
[0.2.21]
- fix: rating logix
- fix: load_top_random_shouts
- resolvers: add_stat_* refactored
- services: use google analytics
- services: minor fixes search
[0.2.20]
- services: ackee removed
- services: following manager fixed
- services: import views.json
[0.2.19]
- fix: adding 'author' role
- fix: stripping user_id in auth connector
[0.2.18]
- schema: added Shout.seo string field
- resolvers: added /new-author webhook resolver
- resolvers: added reader.load_shouts_top_random
- resolvers: added reader.load_shouts_unrated
- resolvers: community follower id property name is .author
- resolvers: get_authors_all and load_authors_by
- services: auth connector upgraded
[0.2.17]
- schema: enum types workaround, ReactionKind, InviteStatus, ShoutVisibility
- schema: Shout.created_by, Shout.updated_by
- schema: Shout.authors can be empty
- resovlers: optimized reacted shouts updates query
[0.2.16]
- resolvers: collab inviting logics
- resolvers: queries and mutations revision and renaming
- resolvers: delete_topic(slug) implemented
- resolvers: added get_shout_followers
- resolvers: load_shouts_by filters implemented
- orm: invite entity
- schema: Reaction.range -> Reaction.quote
- filters: time_ago -> after
- httpx -> aiohttp
[0.2.15]
- schema: Shout.created_by removed
- schema: Shout.mainTopic removed
- services: cached elasticsearch connector
- services: auth is using user_id from authorizer
- resolvers: notify_* usage fixes
- resolvers: getAuthor now accepts slug, user_id or author_id
- resolvers: login_required usage fixes
[0.2.14]
- schema: some fixes from migrator
- schema: .days -> .time_ago
- schema: excludeLayout + layout in filters -> layouts
- services: db access simpler, no contextmanager
- services: removed Base.create() method
- services: rediscache updated
- resolvers: get_reacted_shouts_updates as followedReactions query
[0.2.13]
- services: db context manager
- services: ViewedStorage fixes
- services: views are not stored in core db anymore
- schema: snake case in model fields names
- schema: no DateTime scalar
- resolvers: get_my_feed comments filter reactions body.is_not('')
- resolvers: get_my_feed query fix
- resolvers: LoadReactionsBy.days -> LoadReactionsBy.time_ago
- resolvers: LoadShoutsBy.days -> LoadShoutsBy.time_ago
[0.2.12]
- Author.userpic -> Author.pic
- CommunityAuthor.role is string now
- Author.user is string now
[0.2.11]
- redis interface updated
- viewed interface updated
- presence interface updated
- notify on create, update, delete for reaction and shout
- notify on follow / unfollow author
- use pyproject
- devmode fixed
[0.2.10]
- community resolvers connected
[0.2.9]
- starlette is back, aiohttp removed
- aioredis replaced with aredis
[0.2.8]
- refactored
[0.2.7]
- loadFollowedReactions now with login_required
- notifier service api draft
- added shout visibility kind in schema
- community isolated from author in orm
[0.2.6]
- redis connection pool
- auth context fixes
- communities orm, resolvers, schema
[0.2.5]
- restructured
- all users have their profiles as authors in core
- gittask, inbox and auth logics removed
- settings moved to base and now smaller
- new outside auth schema
- removed gittask, auth, inbox, migration

View File

@@ -1,25 +1,18 @@
FROM python:alpine FROM python:slim
# Update package lists and install necessary dependencies RUN apt-get update && apt-get install -y \
RUN apk update && \ postgresql-client \
apk add --no-cache build-base icu-data-full curl python3-dev musl-dev && \ curl \
curl -sSL https://install.python-poetry.org | python && rm -rf /var/lib/apt/lists/*
# Set working directory
WORKDIR /app WORKDIR /app
# Copy only the pyproject.toml file initially COPY requirements.txt .
COPY pyproject.toml /app/
# Install poetry and dependencies RUN pip install -r requirements.txt
RUN pip install poetry && \
poetry config virtualenvs.create false && \
poetry install --no-root --only main
# Copy the rest of the files COPY . .
COPY . /app
# Expose the port
EXPOSE 8000 EXPOSE 8000
CMD ["python", "server.py"] CMD ["python", "-m", "granian", "main:app", "--interface", "asgi", "--host", "0.0.0.0", "--port", "8000"]

114
README.md
View File

@@ -1,56 +1,102 @@
## Техстек # GraphQL API Backend
Backend service providing GraphQL API for content management system with reactions, ratings and comments.
- sqlalchemy ## Core Features
- redis
- ariadne
- starlette
- granian
## Локальная разработка ### Shouts (Posts)
- CRUD operations via GraphQL mutations
- Rich filtering and sorting options
- Support for multiple authors and topics
- Rating system with likes/dislikes
- Comments and nested replies
- Bookmarks and following
Подготовьте зависимости ### Reactions System
- `ReactionKind` types: LIKE, DISLIKE, COMMENT
- Rating calculation for shouts and comments
- User-specific reaction tracking
- Reaction stats and aggregations
- Nested comments support
osx: ### Authors & Topics
``` - Author profiles with stats
brew install redis nginx postgres - Topic categorization and hierarchy
brew services start redis - Following system for authors/topics
``` - Activity tracking and stats
- Community features
debian/ubuntu: ## Tech Stack
```
apt install redis nginx
```
Затем запустите postgres, redis и наш API-сервер: - **(Python)[https://www.python.org/]** 3.12+
- **GraphQL** with [Ariadne](https://ariadnegraphql.org/)
- **(SQLAlchemy)[https://docs.sqlalchemy.org/en/20/orm/]**
- **(PostgreSQL)[https://www.postgresql.org/]/(SQLite)[https://www.sqlite.org/]** support
- **(Starlette)[https://www.starlette.io/]** for ASGI server
- **(Redis)[https://redis.io/]** for caching
## Development
### Prepare environment:
```shell ```shell
mkdir .venv mkdir .venv
python3.12 -m venv .venv python3.12 -m venv venv
poetry env use .venv/bin/python3.12 source venv/bin/activate
poetry update
poetry run server.py
``` ```
## Подключенные сервисы
Для межсерверной коммуникации используются отдельные логики, папка `services/*` содержит адаптеры для использования базы данных, `redis`, кеширование и клиенты для запросов GraphQL. ### Run server
### auth.py First, certifcates are required to run the server.
Задайте переменную окружения `WEBHOOK_SECRET` чтобы принимать запросы по адресу `/new-author` от [сервера авторизации](https://dev.discours.io/devstack/authorizer). Событие ожидается при создании нового пользователя. Для авторизованных запросов и мутаций фронтенд добавляет к запросу токен авторизации в заголовок `Authorization`. ```shell
mkcert -install
mkcert localhost
```
### viewed.py Then, run the server:
Задайте переменные окружения `GOOGLE_KEYFILE_PATH` и `GOOGLE_PROPERTY_ID` для получения данных из [Google Analytics](https://developers.google.com/analytics?hl=ru). ```shell
python server.py dev
```
### search.py ### Useful Commands
Позволяет получать результаты пользовательских поисковых запросов в кешируемом виде от ElasticSearch с оценкой `score`, объединенные с запросами к базе данных, запрашиваем через GraphQL API `load_shouts_search`. Требует установка `ELASTIC_HOST`, `ELASTIC_PORT`, `ELASTIC_USER` и `ELASTIC_PASSWORD`. ```shell
# Linting and import sorting
ruff check . --fix --select I
### notify.py # Code formatting
ruff format . --line-length=120
Отправка уведомлений по Redis PubSub каналам, согласно структуре данных, за которую отвечает [сервис уведомлений](https://dev.discours.io/discours.io/notifier) # Run tests
pytest
### unread.py # Type checking
mypy .
```
Счетчик непрочитанных сообщений получается через Redis-запрос к данным [сервиса сообщений](https://dev.discours.io/discours.io/inbox). ### Code Style
We use:
- Ruff for linting and import sorting
- Line length: 120 characters
- Python type hints
- Docstrings for public methods
### GraphQL Development
Test queries in GraphQL Playground at `http://localhost:8000`:
```graphql
# Example query
query GetShout($slug: String) {
get_shout(slug: $slug) {
id
title
main_author {
name
}
}
}
```

76
alembic/env.py Normal file
View File

@@ -0,0 +1,76 @@
from logging.config import fileConfig
from sqlalchemy import engine_from_config, pool
from alembic import context
from services.db import Base
from settings import DB_URL
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# override DB_URL
config.set_section_option(config.config_ini_section, "DB_URL", DB_URL)
# Interpret the config file for Python logging.
# This line sets up loggers basically.
if config.config_file_name is not None:
fileConfig(config.config_file_name)
target_metadata = [Base.metadata]
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def run_migrations_offline() -> None:
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online() -> None:
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
connectable = engine_from_config(
config.get_section(config.config_ini_section, {}),
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)
with connectable.connect() as connection:
context.configure(connection=connection, target_metadata=target_metadata)
with context.begin_transaction():
context.run_migrations()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()

96
auth/authenticate.py Normal file
View File

@@ -0,0 +1,96 @@
from functools import wraps
from typing import Optional, Tuple
from graphql.type import GraphQLResolveInfo
from sqlalchemy.orm import exc, joinedload
from starlette.authentication import AuthenticationBackend
from starlette.requests import HTTPConnection
from auth.credentials import AuthCredentials, AuthUser
from auth.exceptions import OperationNotAllowed
from auth.tokenstorage import SessionToken
from auth.usermodel import Role, User
from services.db import local_session
from settings import SESSION_TOKEN_HEADER
class JWTAuthenticate(AuthenticationBackend):
async def authenticate(self, request: HTTPConnection) -> Optional[Tuple[AuthCredentials, AuthUser]]:
if SESSION_TOKEN_HEADER not in request.headers:
return AuthCredentials(scopes={}), AuthUser(user_id=None, username="")
token = request.headers.get(SESSION_TOKEN_HEADER)
if not token:
print("[auth.authenticate] no token in header %s" % SESSION_TOKEN_HEADER)
return AuthCredentials(scopes={}, error_message=str("no token")), AuthUser(user_id=None, username="")
if len(token.split(".")) > 1:
payload = await SessionToken.verify(token)
with local_session() as session:
try:
user = (
session.query(User)
.options(
joinedload(User.roles).options(joinedload(Role.permissions)),
joinedload(User.ratings),
)
.filter(User.id == payload.user_id)
.one()
)
scopes = {} # TODO: integrate await user.get_permission()
return (
AuthCredentials(user_id=payload.user_id, scopes=scopes, logged_in=True),
AuthUser(user_id=user.id, username=""),
)
except exc.NoResultFound:
pass
return AuthCredentials(scopes={}, error_message=str("Invalid token")), AuthUser(user_id=None, username="")
def login_required(func):
@wraps(func)
async def wrap(parent, info: GraphQLResolveInfo, *args, **kwargs):
auth: AuthCredentials = info.context["request"].auth
if not auth or not auth.logged_in:
return {"error": "Please login first"}
return await func(parent, info, *args, **kwargs)
return wrap
def permission_required(resource, operation, func):
@wraps(func)
async def wrap(parent, info: GraphQLResolveInfo, *args, **kwargs):
print("[auth.authenticate] permission_required for %r with info %r" % (func, info)) # debug only
auth: AuthCredentials = info.context["request"].auth
if not auth.logged_in:
raise OperationNotAllowed(auth.error_message or "Please login")
# TODO: add actual check permission logix here
return await func(parent, info, *args, **kwargs)
return wrap
def login_accepted(func):
@wraps(func)
async def wrap(parent, info: GraphQLResolveInfo, *args, **kwargs):
auth: AuthCredentials = info.context["request"].auth
# Если есть авторизация, добавляем данные автора в контекст
if auth and auth.logged_in:
info.context["author"] = auth.author
info.context["user_id"] = auth.author.get("id")
else:
# Очищаем данные автора из контекста если авторизация отсутствует
info.context["author"] = None
info.context["user_id"] = None
return await func(parent, info, *args, **kwargs)
return wrap

43
auth/credentials.py Normal file
View File

@@ -0,0 +1,43 @@
from typing import List, Optional, Text
from pydantic import BaseModel
# from base.exceptions import Unauthorized
class Permission(BaseModel):
name: Text
class AuthCredentials(BaseModel):
user_id: Optional[int] = None
scopes: Optional[dict] = {}
logged_in: bool = False
error_message: str = ""
@property
def is_admin(self):
# TODO: check admin logix
return True
async def permissions(self) -> List[Permission]:
if self.user_id is None:
# raise Unauthorized("Please login first")
return {"error": "Please login first"}
else:
# TODO: implement permissions logix
print(self.user_id)
return NotImplemented
class AuthUser(BaseModel):
user_id: Optional[int]
username: Optional[str]
@property
def is_authenticated(self) -> bool:
return self.user_id is not None
# @property
# def display_id(self) -> int:
# return self.user_id

30
auth/email.py Normal file
View File

@@ -0,0 +1,30 @@
import requests
from settings import MAILGUN_API_KEY, MAILGUN_DOMAIN
api_url = "https://api.mailgun.net/v3/%s/messages" % (MAILGUN_DOMAIN or "discours.io")
noreply = "discours.io <noreply@%s>" % (MAILGUN_DOMAIN or "discours.io")
lang_subject = {"ru": "Подтверждение почты", "en": "Confirm email"}
async def send_auth_email(user, token, lang="ru", template="email_confirmation"):
try:
to = "%s <%s>" % (user.name, user.email)
if lang not in ["ru", "en"]:
lang = "ru"
subject = lang_subject.get(lang, lang_subject["en"])
template = template + "_" + lang
payload = {
"from": noreply,
"to": to,
"subject": subject,
"template": template,
"h:X-Mailgun-Variables": '{ "token": "%s" }' % token,
}
print("[auth.email] payload: %r" % payload)
# debug
# print('http://localhost:3000/?modal=auth&mode=confirm-email&token=%s' % token)
response = requests.post(api_url, auth=("api", MAILGUN_API_KEY), data=payload)
response.raise_for_status()
except Exception as e:
print(e)

38
auth/exceptions.py Normal file
View File

@@ -0,0 +1,38 @@
from graphql.error import GraphQLError
# TODO: remove traceback from logs for defined exceptions
class BaseHttpException(GraphQLError):
code = 500
message = "500 Server error"
class ExpiredToken(BaseHttpException):
code = 401
message = "401 Expired Token"
class InvalidToken(BaseHttpException):
code = 401
message = "401 Invalid Token"
class Unauthorized(BaseHttpException):
code = 401
message = "401 Unauthorized"
class ObjectNotExist(BaseHttpException):
code = 404
message = "404 Object Does Not Exist"
class OperationNotAllowed(BaseHttpException):
code = 403
message = "403 Operation Is Not Allowed"
class InvalidPassword(BaseHttpException):
code = 403
message = "403 Invalid Password"

97
auth/identity.py Normal file
View File

@@ -0,0 +1,97 @@
from binascii import hexlify
from hashlib import sha256
from passlib.hash import bcrypt
from auth.exceptions import ExpiredToken, InvalidToken
from auth.jwtcodec import JWTCodec
from auth.tokenstorage import TokenStorage
from orm.user import User
# from base.exceptions import InvalidPassword, InvalidToken
from services.db import local_session
class Password:
@staticmethod
def _to_bytes(data: str) -> bytes:
return bytes(data.encode())
@classmethod
def _get_sha256(cls, password: str) -> bytes:
bytes_password = cls._to_bytes(password)
return hexlify(sha256(bytes_password).digest())
@staticmethod
def encode(password: str) -> str:
password_sha256 = Password._get_sha256(password)
return bcrypt.using(rounds=10).hash(password_sha256)
@staticmethod
def verify(password: str, hashed: str) -> bool:
"""
Verify that password hash is equal to specified hash. Hash format:
$2a$10$Ro0CUfOqk6cXEKf3dyaM7OhSCvnwM9s4wIX9JeLapehKK5YdLxKcm
\__/\/ \____________________/\_____________________________/ # noqa: W605
| | Salt Hash
| Cost
Version
More info: https://passlib.readthedocs.io/en/stable/lib/passlib.hash.bcrypt.html
:param password: clear text password
:param hashed: hash of the password
:return: True if clear text password matches specified hash
"""
hashed_bytes = Password._to_bytes(hashed)
password_sha256 = Password._get_sha256(password)
return bcrypt.verify(password_sha256, hashed_bytes)
class Identity:
@staticmethod
def password(orm_user: User, password: str) -> User:
user = User(**orm_user.dict())
if not user.password:
# raise InvalidPassword("User password is empty")
return {"error": "User password is empty"}
if not Password.verify(password, user.password):
# raise InvalidPassword("Wrong user password")
return {"error": "Wrong user password"}
return user
@staticmethod
def oauth(inp) -> User:
with local_session() as session:
user = session.query(User).filter(User.email == inp["email"]).first()
if not user:
user = User.create(**inp, emailConfirmed=True)
session.commit()
return user
@staticmethod
async def onetime(token: str) -> User:
try:
print("[auth.identity] using one time token")
payload = JWTCodec.decode(token)
if not await TokenStorage.exist(f"{payload.user_id}-{payload.username}-{token}"):
# raise InvalidToken("Login token has expired, please login again")
return {"error": "Token has expired"}
except ExpiredToken:
# raise InvalidToken("Login token has expired, please try again")
return {"error": "Token has expired"}
except InvalidToken:
# raise InvalidToken("token format error") from e
return {"error": "Token format error"}
with local_session() as session:
user = session.query(User).filter_by(id=payload.user_id).first()
if not user:
# raise Exception("user not exist")
return {"error": "User does not exist"}
if not user.emailConfirmed:
user.emailConfirmed = True
session.commit()
return user

60
auth/jwtcodec.py Normal file
View File

@@ -0,0 +1,60 @@
from datetime import datetime, timezone
import jwt
from pydantic import BaseModel
from auth.exceptions import ExpiredToken, InvalidToken
from settings import JWT_ALGORITHM, JWT_SECRET_KEY
class TokenPayload(BaseModel):
user_id: str
username: str
exp: datetime
iat: datetime
iss: str
class JWTCodec:
@staticmethod
def encode(user, exp: datetime) -> str:
payload = {
"user_id": user.id,
"username": user.email or user.phone,
"exp": exp,
"iat": datetime.now(tz=timezone.utc),
"iss": "discours",
}
try:
return jwt.encode(payload, JWT_SECRET_KEY, JWT_ALGORITHM)
except Exception as e:
print("[auth.jwtcodec] JWT encode error %r" % e)
@staticmethod
def decode(token: str, verify_exp: bool = True):
r = None
payload = None
try:
payload = jwt.decode(
token,
key=JWT_SECRET_KEY,
options={
"verify_exp": verify_exp,
# "verify_signature": False
},
algorithms=[JWT_ALGORITHM],
issuer="discours",
)
r = TokenPayload(**payload)
# print('[auth.jwtcodec] debug token %r' % r)
return r
except jwt.InvalidIssuedAtError:
print("[auth.jwtcodec] invalid issued at: %r" % payload)
raise ExpiredToken("check token issued time")
except jwt.ExpiredSignatureError:
print("[auth.jwtcodec] expired signature %r" % payload)
raise ExpiredToken("check token lifetime")
except jwt.InvalidTokenError:
raise InvalidToken("token is not valid")
except jwt.InvalidSignatureError:
raise InvalidToken("token is not valid")

98
auth/oauth.py Normal file
View File

@@ -0,0 +1,98 @@
from authlib.integrations.starlette_client import OAuth
from starlette.responses import RedirectResponse
from auth.identity import Identity
from auth.tokenstorage import TokenStorage
from settings import FRONTEND_URL, OAUTH_CLIENTS
oauth = OAuth()
oauth.register(
name="facebook",
client_id=OAUTH_CLIENTS["FACEBOOK"]["id"],
client_secret=OAUTH_CLIENTS["FACEBOOK"]["key"],
access_token_url="https://graph.facebook.com/v11.0/oauth/access_token",
access_token_params=None,
authorize_url="https://www.facebook.com/v11.0/dialog/oauth",
authorize_params=None,
api_base_url="https://graph.facebook.com/",
client_kwargs={"scope": "public_profile email"},
)
oauth.register(
name="github",
client_id=OAUTH_CLIENTS["GITHUB"]["id"],
client_secret=OAUTH_CLIENTS["GITHUB"]["key"],
access_token_url="https://github.com/login/oauth/access_token",
access_token_params=None,
authorize_url="https://github.com/login/oauth/authorize",
authorize_params=None,
api_base_url="https://api.github.com/",
client_kwargs={"scope": "user:email"},
)
oauth.register(
name="google",
client_id=OAUTH_CLIENTS["GOOGLE"]["id"],
client_secret=OAUTH_CLIENTS["GOOGLE"]["key"],
server_metadata_url="https://accounts.google.com/.well-known/openid-configuration",
client_kwargs={"scope": "openid email profile"},
authorize_state="test",
)
async def google_profile(client, request, token):
userinfo = token["userinfo"]
profile = {"name": userinfo["name"], "email": userinfo["email"], "id": userinfo["sub"]}
if userinfo["picture"]:
userpic = userinfo["picture"].replace("=s96", "=s600")
profile["userpic"] = userpic
return profile
async def facebook_profile(client, request, token):
profile = await client.get("me?fields=name,id,email", token=token)
return profile.json()
async def github_profile(client, request, token):
profile = await client.get("user", token=token)
return profile.json()
profile_callbacks = {
"google": google_profile,
"facebook": facebook_profile,
"github": github_profile,
}
async def oauth_login(request):
provider = request.path_params["provider"]
request.session["provider"] = provider
client = oauth.create_client(provider)
redirect_uri = "https://v2.discours.io/oauth-authorize"
return await client.authorize_redirect(request, redirect_uri)
async def oauth_authorize(request):
provider = request.session["provider"]
client = oauth.create_client(provider)
token = await client.authorize_access_token(request)
get_profile = profile_callbacks[provider]
profile = await get_profile(client, request, token)
user_oauth_info = "%s:%s" % (provider, profile["id"])
user_input = {
"oauth": user_oauth_info,
"email": profile["email"],
"username": profile["name"],
"userpic": profile["userpic"],
}
user = Identity.oauth(user_input)
session_token = await TokenStorage.create_session(user)
response = RedirectResponse(url=FRONTEND_URL + "/confirm")
response.set_cookie("token", session_token)
return response

215
auth/resolvers.py Normal file
View File

@@ -0,0 +1,215 @@
# -*- coding: utf-8 -*-
import re
from datetime import datetime, timezone
from urllib.parse import quote_plus
from graphql.type import GraphQLResolveInfo
from auth.authenticate import login_required
from auth.credentials import AuthCredentials
from auth.email import send_auth_email
from auth.exceptions import InvalidPassword, InvalidToken, ObjectNotExist, Unauthorized
from auth.identity import Identity, Password
from auth.jwtcodec import JWTCodec
from auth.tokenstorage import TokenStorage
from orm import Role, User
from services.db import local_session
from services.schema import mutation, query
from settings import SESSION_TOKEN_HEADER
@mutation.field("getSession")
@login_required
async def get_current_user(_, info):
auth: AuthCredentials = info.context["request"].auth
token = info.context["request"].headers.get(SESSION_TOKEN_HEADER)
with local_session() as session:
user = session.query(User).where(User.id == auth.user_id).one()
user.lastSeen = datetime.now(tz=timezone.utc)
session.commit()
return {"token": token, "user": user}
@mutation.field("confirmEmail")
async def confirm_email(_, info, token):
"""confirm owning email address"""
try:
print("[resolvers.auth] confirm email by token")
payload = JWTCodec.decode(token)
user_id = payload.user_id
await TokenStorage.get(f"{user_id}-{payload.username}-{token}")
with local_session() as session:
user = session.query(User).where(User.id == user_id).first()
session_token = await TokenStorage.create_session(user)
user.emailConfirmed = True
user.lastSeen = datetime.now(tz=timezone.utc)
session.add(user)
session.commit()
return {"token": session_token, "user": user}
except InvalidToken as e:
raise InvalidToken(e.message)
except Exception as e:
print(e) # FIXME: debug only
return {"error": "email is not confirmed"}
def create_user(user_dict):
user = User(**user_dict)
with local_session() as session:
user.roles.append(session.query(Role).first())
session.add(user)
session.commit()
return user
def replace_translit(src):
ruchars = "абвгдеёжзийклмнопрстуфхцчшщъыьэюя."
enchars = [
"a",
"b",
"v",
"g",
"d",
"e",
"yo",
"zh",
"z",
"i",
"y",
"k",
"l",
"m",
"n",
"o",
"p",
"r",
"s",
"t",
"u",
"f",
"h",
"c",
"ch",
"sh",
"sch",
"",
"y",
"'",
"e",
"yu",
"ya",
"-",
]
return src.translate(str.maketrans(ruchars, enchars))
def generate_unique_slug(src):
print("[resolvers.auth] generating slug from: " + src)
slug = replace_translit(src.lower())
slug = re.sub("[^0-9a-zA-Z]+", "-", slug)
if slug != src:
print("[resolvers.auth] translited name: " + slug)
c = 1
with local_session() as session:
user = session.query(User).where(User.slug == slug).first()
while user:
user = session.query(User).where(User.slug == slug).first()
slug = slug + "-" + str(c)
c += 1
if not user:
unique_slug = slug
print("[resolvers.auth] " + unique_slug)
return quote_plus(unique_slug.replace("'", "")).replace("+", "-")
@mutation.field("registerUser")
async def register_by_email(_, _info, email: str, password: str = "", name: str = ""):
email = email.lower()
"""creates new user account"""
with local_session() as session:
user = session.query(User).filter(User.email == email).first()
if user:
raise Unauthorized("User already exist")
else:
slug = generate_unique_slug(name)
user = session.query(User).where(User.slug == slug).first()
if user:
slug = generate_unique_slug(email.split("@")[0])
user_dict = {
"email": email,
"username": email, # will be used to store phone number or some messenger network id
"name": name,
"slug": slug,
}
if password:
user_dict["password"] = Password.encode(password)
user = create_user(user_dict)
user = await auth_send_link(_, _info, email)
return {"user": user}
@mutation.field("sendLink")
async def auth_send_link(_, _info, email, lang="ru", template="email_confirmation"):
email = email.lower()
"""send link with confirm code to email"""
with local_session() as session:
user = session.query(User).filter(User.email == email).first()
if not user:
raise ObjectNotExist("User not found")
else:
token = await TokenStorage.create_onetime(user)
await send_auth_email(user, token, lang, template)
return user
@query.field("signIn")
async def login(_, info, email: str, password: str = "", lang: str = "ru"):
email = email.lower()
with local_session() as session:
orm_user = session.query(User).filter(User.email == email).first()
if orm_user is None:
print(f"[auth] {email}: email not found")
# return {"error": "email not found"}
raise ObjectNotExist("User not found") # contains webserver status
if not password:
print(f"[auth] send confirm link to {email}")
token = await TokenStorage.create_onetime(orm_user)
await send_auth_email(orm_user, token, lang)
# FIXME: not an error, warning
return {"error": "no password, email link was sent"}
else:
# sign in using password
if not orm_user.emailConfirmed:
# not an error, warns users
return {"error": "please, confirm email"}
else:
try:
user = Identity.password(orm_user, password)
session_token = await TokenStorage.create_session(user)
print(f"[auth] user {email} authorized")
return {"token": session_token, "user": user}
except InvalidPassword:
print(f"[auth] {email}: invalid password")
raise InvalidPassword("invalid password") # contains webserver status
# return {"error": "invalid password"}
@query.field("signOut")
@login_required
async def sign_out(_, info: GraphQLResolveInfo):
token = info.context["request"].headers.get(SESSION_TOKEN_HEADER, "")
status = await TokenStorage.revoke(token)
return status
@query.field("isEmailUsed")
async def is_email_used(_, _info, email):
email = email.lower()
with local_session() as session:
user = session.query(User).filter(User.email == email).first()
return user is not None

73
auth/tokenstorage.py Normal file
View File

@@ -0,0 +1,73 @@
from datetime import datetime, timedelta, timezone
from auth.jwtcodec import JWTCodec
from auth.validations import AuthInput
from services.redis import redis
from settings import ONETIME_TOKEN_LIFE_SPAN, SESSION_TOKEN_LIFE_SPAN
async def save(token_key, life_span, auto_delete=True):
await redis.execute("SET", token_key, "True")
if auto_delete:
expire_at = (datetime.now(tz=timezone.utc) + timedelta(seconds=life_span)).timestamp()
await redis.execute("EXPIREAT", token_key, int(expire_at))
class SessionToken:
@classmethod
async def verify(cls, token: str):
"""
Rules for a token to be valid.
- token format is legal
- token exists in redis database
- token is not expired
"""
try:
return JWTCodec.decode(token)
except Exception as e:
raise e
@classmethod
async def get(cls, payload, token):
return await TokenStorage.get(f"{payload.user_id}-{payload.username}-{token}")
class TokenStorage:
@staticmethod
async def get(token_key):
print("[tokenstorage.get] " + token_key)
# 2041-user@domain.zn-eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJ1c2VyX2lkIjoyMDQxLCJ1c2VybmFtZSI6ImFudG9uLnJld2luK3Rlc3QtbG9hZGNoYXRAZ21haWwuY29tIiwiZXhwIjoxNjcxNzgwNjE2LCJpYXQiOjE2NjkxODg2MTYsImlzcyI6ImRpc2NvdXJzIn0.Nml4oV6iMjMmc6xwM7lTKEZJKBXvJFEIZ-Up1C1rITQ
return await redis.execute("GET", token_key)
@staticmethod
async def create_onetime(user: AuthInput) -> str:
life_span = ONETIME_TOKEN_LIFE_SPAN
exp = datetime.now(tz=timezone.utc) + timedelta(seconds=life_span)
one_time_token = JWTCodec.encode(user, exp)
await save(f"{user.id}-{user.username}-{one_time_token}", life_span)
return one_time_token
@staticmethod
async def create_session(user: AuthInput) -> str:
life_span = SESSION_TOKEN_LIFE_SPAN
exp = datetime.now(tz=timezone.utc) + timedelta(seconds=life_span)
session_token = JWTCodec.encode(user, exp)
await save(f"{user.id}-{user.username}-{session_token}", life_span)
return session_token
@staticmethod
async def revoke(token: str) -> bool:
payload = None
try:
print("[auth.tokenstorage] revoke token")
payload = JWTCodec.decode(token)
except: # noqa
pass
else:
await redis.execute("DEL", f"{payload.user_id}-{payload.username}-{token}")
return True
@staticmethod
async def revoke_all(user: AuthInput):
tokens = await redis.execute("KEYS", f"{user.id}-*")
await redis.execute("DEL", *tokens)

119
auth/usermodel.py Normal file
View File

@@ -0,0 +1,119 @@
import time
from sqlalchemy import (
JSON,
Boolean,
Column,
DateTime,
ForeignKey,
Integer,
String,
func,
)
from sqlalchemy.orm import relationship
from services.db import Base
class Permission(Base):
__tablename__ = "permission"
id = Column(String, primary_key=True, unique=True, nullable=False, default=None)
resource = Column(String, nullable=False)
operation = Column(String, nullable=False)
class Role(Base):
__tablename__ = "role"
id = Column(String, primary_key=True, unique=True, nullable=False, default=None)
name = Column(String, nullable=False)
permissions = relationship(Permission)
class AuthorizerUser(Base):
__tablename__ = "authorizer_users"
id = Column(String, primary_key=True, unique=True, nullable=False, default=None)
key = Column(String)
email = Column(String, unique=True)
email_verified_at = Column(Integer)
family_name = Column(String)
gender = Column(String)
given_name = Column(String)
is_multi_factor_auth_enabled = Column(Boolean)
middle_name = Column(String)
nickname = Column(String)
password = Column(String)
phone_number = Column(String, unique=True)
phone_number_verified_at = Column(Integer)
# preferred_username = Column(String, nullable=False)
picture = Column(String)
revoked_timestamp = Column(Integer)
roles = Column(String, default="author,reader")
signup_methods = Column(String, default="magic_link_login")
created_at = Column(Integer, default=lambda: int(time.time()))
updated_at = Column(Integer, default=lambda: int(time.time()))
class UserRating(Base):
__tablename__ = "user_rating"
id = None
rater: Column = Column(ForeignKey("user.id"), primary_key=True, index=True)
user: Column = Column(ForeignKey("user.id"), primary_key=True, index=True)
value: Column = Column(Integer)
@staticmethod
def init_table():
pass
class UserRole(Base):
__tablename__ = "user_role"
id = None
user = Column(ForeignKey("user.id"), primary_key=True, index=True)
role = Column(ForeignKey("role.id"), primary_key=True, index=True)
class User(Base):
__tablename__ = "user"
default_user = None
email = Column(String, unique=True, nullable=False, comment="Email")
username = Column(String, nullable=False, comment="Login")
password = Column(String, nullable=True, comment="Password")
bio = Column(String, nullable=True, comment="Bio") # status description
about = Column(String, nullable=True, comment="About") # long and formatted
userpic = Column(String, nullable=True, comment="Userpic")
name = Column(String, nullable=True, comment="Display name")
slug = Column(String, unique=True, comment="User's slug")
links = Column(JSON, nullable=True, comment="Links")
oauth = Column(String, nullable=True)
oid = Column(String, nullable=True)
muted = Column(Boolean, default=False)
confirmed = Column(Boolean, default=False)
created_at = Column(DateTime(timezone=True), nullable=False, server_default=func.now(), comment="Created at")
updated_at = Column(DateTime(timezone=True), nullable=False, server_default=func.now(), comment="Updated at")
last_seen = Column(DateTime(timezone=True), nullable=False, server_default=func.now(), comment="Was online at")
deleted_at = Column(DateTime(timezone=True), nullable=True, comment="Deleted at")
ratings = relationship(UserRating, foreign_keys=UserRating.user)
roles = relationship(lambda: Role, secondary=UserRole.__tablename__)
def get_permission(self):
scope = {}
for role in self.roles:
for p in role.permissions:
if p.resource not in scope:
scope[p.resource] = set()
scope[p.resource].add(p.operation)
print(scope)
return scope
# if __name__ == "__main__":
# print(User.get_permission(user_id=1))

116
auth/validations.py Normal file
View File

@@ -0,0 +1,116 @@
import re
from datetime import datetime
from typing import Dict, List, Optional, Union
from pydantic import BaseModel, Field, field_validator
# RFC 5322 compliant email regex pattern
EMAIL_PATTERN = r"^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,}$"
class AuthInput(BaseModel):
"""Base model for authentication input validation"""
user_id: str = Field(description="Unique user identifier")
username: str = Field(min_length=2, max_length=50)
token: str = Field(min_length=32)
@field_validator("user_id")
@classmethod
def validate_user_id(cls, v: str) -> str:
if not v.strip():
raise ValueError("user_id cannot be empty")
return v
class UserRegistrationInput(BaseModel):
"""Validation model for user registration"""
email: str = Field(max_length=254) # Max email length per RFC 5321
password: str = Field(min_length=8, max_length=100)
name: str = Field(min_length=2, max_length=50)
@field_validator("email")
@classmethod
def validate_email(cls, v: str) -> str:
"""Validate email format"""
if not re.match(EMAIL_PATTERN, v):
raise ValueError("Invalid email format")
return v.lower()
@field_validator("password")
@classmethod
def validate_password_strength(cls, v: str) -> str:
"""Validate password meets security requirements"""
if not any(c.isupper() for c in v):
raise ValueError("Password must contain at least one uppercase letter")
if not any(c.islower() for c in v):
raise ValueError("Password must contain at least one lowercase letter")
if not any(c.isdigit() for c in v):
raise ValueError("Password must contain at least one number")
if not any(c in "!@#$%^&*()_+-=[]{}|;:,.<>?" for c in v):
raise ValueError("Password must contain at least one special character")
return v
class UserLoginInput(BaseModel):
"""Validation model for user login"""
email: str = Field(max_length=254)
password: str = Field(min_length=8, max_length=100)
@field_validator("email")
@classmethod
def validate_email(cls, v: str) -> str:
if not re.match(EMAIL_PATTERN, v):
raise ValueError("Invalid email format")
return v.lower()
class TokenPayload(BaseModel):
"""Validation model for JWT token payload"""
user_id: str
username: str
exp: datetime
iat: datetime
scopes: Optional[List[str]] = []
class OAuthInput(BaseModel):
"""Validation model for OAuth input"""
provider: str = Field(pattern="^(google|github|facebook)$")
code: str
redirect_uri: Optional[str] = None
@field_validator("provider")
@classmethod
def validate_provider(cls, v: str) -> str:
valid_providers = ["google", "github", "facebook"]
if v.lower() not in valid_providers:
raise ValueError(f"Provider must be one of: {', '.join(valid_providers)}")
return v.lower()
class AuthResponse(BaseModel):
"""Validation model for authentication responses"""
success: bool
token: Optional[str] = None
error: Optional[str] = None
user: Optional[Dict[str, Union[str, int, bool]]] = None
@field_validator("error")
@classmethod
def validate_error_if_not_success(cls, v: Optional[str], info) -> Optional[str]:
if not info.data.get("success") and not v:
raise ValueError("Error message required when success is False")
return v
@field_validator("token")
@classmethod
def validate_token_if_success(cls, v: Optional[str], info) -> Optional[str]:
if info.data.get("success") and not v:
raise ValueError("Token required when success is True")
return v

627
cache/cache.py vendored Normal file
View File

@@ -0,0 +1,627 @@
"""
Caching system for the Discours platform
----------------------------------------
This module provides a comprehensive caching solution with these key components:
1. KEY NAMING CONVENTIONS:
- Entity-based keys: "entity:property:value" (e.g., "author:id:123")
- Collection keys: "entity:collection:params" (e.g., "authors:stats:limit=10:offset=0")
- Special case keys: Maintained for backwards compatibility (e.g., "topic_shouts_123")
2. CORE FUNCTIONS:
- cached_query(): High-level function for retrieving cached data or executing queries
3. ENTITY-SPECIFIC FUNCTIONS:
- cache_author(), cache_topic(): Cache entity data
- get_cached_author(), get_cached_topic(): Retrieve entity data from cache
- invalidate_cache_by_prefix(): Invalidate all keys with a specific prefix
4. CACHE INVALIDATION STRATEGY:
- Direct invalidation via invalidate_* functions for immediate changes
- Delayed invalidation via revalidation_manager for background processing
- Event-based triggers for automatic cache updates (see triggers.py)
To maintain consistency with the existing codebase, this module preserves
the original key naming patterns while providing a more structured approach
for new cache operations.
"""
import asyncio
import json
from typing import Any, Dict, List, Optional, Union
import orjson
from sqlalchemy import and_, join, select
from orm.author import Author, AuthorFollower
from orm.shout import Shout, ShoutAuthor, ShoutTopic
from orm.topic import Topic, TopicFollower
from services.db import local_session
from services.redis import redis
from utils.encoders import CustomJSONEncoder
from utils.logger import root_logger as logger
DEFAULT_FOLLOWS = {
"topics": [],
"authors": [],
"shouts": [],
"communities": [{"id": 1, "name": "Дискурс", "slug": "discours", "pic": ""}],
}
CACHE_TTL = 300 # 5 minutes
# Key templates for common entity types
# These are used throughout the codebase and should be maintained for compatibility
CACHE_KEYS = {
"TOPIC_ID": "topic:id:{}",
"TOPIC_SLUG": "topic:slug:{}",
"TOPIC_AUTHORS": "topic:authors:{}",
"TOPIC_FOLLOWERS": "topic:followers:{}",
"TOPIC_SHOUTS": "topic_shouts_{}",
"AUTHOR_ID": "author:id:{}",
"AUTHOR_USER": "author:user:{}",
"SHOUTS": "shouts:{}",
}
# Cache topic data
async def cache_topic(topic: dict):
payload = json.dumps(topic, cls=CustomJSONEncoder)
await asyncio.gather(
redis.execute("SET", f"topic:id:{topic['id']}", payload),
redis.execute("SET", f"topic:slug:{topic['slug']}", payload),
)
# Cache author data
async def cache_author(author: dict):
payload = json.dumps(author, cls=CustomJSONEncoder)
await asyncio.gather(
redis.execute("SET", f"author:user:{author['user'].strip()}", str(author["id"])),
redis.execute("SET", f"author:id:{author['id']}", payload),
)
# Cache follows data
async def cache_follows(follower_id: int, entity_type: str, entity_id: int, is_insert=True):
key = f"author:follows-{entity_type}s:{follower_id}"
follows_str = await redis.execute("GET", key)
follows = orjson.loads(follows_str) if follows_str else DEFAULT_FOLLOWS[entity_type]
if is_insert:
if entity_id not in follows:
follows.append(entity_id)
else:
follows = [eid for eid in follows if eid != entity_id]
await redis.execute("SET", key, json.dumps(follows, cls=CustomJSONEncoder))
await update_follower_stat(follower_id, entity_type, len(follows))
# Update follower statistics
async def update_follower_stat(follower_id, entity_type, count):
follower_key = f"author:id:{follower_id}"
follower_str = await redis.execute("GET", follower_key)
follower = orjson.loads(follower_str) if follower_str else None
if follower:
follower["stat"] = {f"{entity_type}s": count}
await cache_author(follower)
# Get author from cache
async def get_cached_author(author_id: int, get_with_stat):
author_key = f"author:id:{author_id}"
result = await redis.execute("GET", author_key)
if result:
return orjson.loads(result)
# Load from database if not found in cache
q = select(Author).where(Author.id == author_id)
authors = get_with_stat(q)
if authors:
author = authors[0]
await cache_author(author.dict())
return author.dict()
return None
# Function to get cached topic
async def get_cached_topic(topic_id: int):
"""
Fetch topic data from cache or database by id.
Args:
topic_id (int): The identifier for the topic.
Returns:
dict: Topic data or None if not found.
"""
topic_key = f"topic:id:{topic_id}"
cached_topic = await redis.execute("GET", topic_key)
if cached_topic:
return orjson.loads(cached_topic)
# If not in cache, fetch from the database
with local_session() as session:
topic = session.execute(select(Topic).where(Topic.id == topic_id)).scalar_one_or_none()
if topic:
topic_dict = topic.dict()
await redis.execute("SET", topic_key, json.dumps(topic_dict, cls=CustomJSONEncoder))
return topic_dict
return None
# Get topic by slug from cache
async def get_cached_topic_by_slug(slug: str, get_with_stat):
topic_key = f"topic:slug:{slug}"
result = await redis.execute("GET", topic_key)
if result:
return orjson.loads(result)
# Load from database if not found in cache
topic_query = select(Topic).where(Topic.slug == slug)
topics = get_with_stat(topic_query)
if topics:
topic_dict = topics[0].dict()
await cache_topic(topic_dict)
return topic_dict
return None
# Get list of authors by ID from cache
async def get_cached_authors_by_ids(author_ids: List[int]) -> List[dict]:
# Fetch all author data concurrently
keys = [f"author:id:{author_id}" for author_id in author_ids]
results = await asyncio.gather(*(redis.execute("GET", key) for key in keys))
authors = [orjson.loads(result) if result else None for result in results]
# Load missing authors from database and cache
missing_indices = [index for index, author in enumerate(authors) if author is None]
if missing_indices:
missing_ids = [author_ids[index] for index in missing_indices]
with local_session() as session:
query = select(Author).where(Author.id.in_(missing_ids))
missing_authors = session.execute(query).scalars().all()
await asyncio.gather(*(cache_author(author.dict()) for author in missing_authors))
for index, author in zip(missing_indices, missing_authors):
authors[index] = author.dict()
return authors
async def get_cached_topic_followers(topic_id: int):
"""
Получает подписчиков темы по ID, используя кеш Redis.
Args:
topic_id: ID темы
Returns:
List[dict]: Список подписчиков с их данными
"""
try:
cache_key = CACHE_KEYS["TOPIC_FOLLOWERS"].format(topic_id)
cached = await redis.execute("GET", cache_key)
if cached:
followers_ids = orjson.loads(cached)
logger.debug(f"Found {len(followers_ids)} cached followers for topic #{topic_id}")
return await get_cached_authors_by_ids(followers_ids)
with local_session() as session:
followers_ids = [
f[0]
for f in session.query(Author.id)
.join(TopicFollower, TopicFollower.follower == Author.id)
.filter(TopicFollower.topic == topic_id)
.all()
]
await redis.execute("SETEX", cache_key, CACHE_TTL, orjson.dumps(followers_ids))
followers = await get_cached_authors_by_ids(followers_ids)
logger.debug(f"Cached {len(followers)} followers for topic #{topic_id}")
return followers
except Exception as e:
logger.error(f"Error getting followers for topic #{topic_id}: {str(e)}")
return []
# Get cached author followers
async def get_cached_author_followers(author_id: int):
# Check cache for data
cached = await redis.execute("GET", f"author:followers:{author_id}")
if cached:
followers_ids = orjson.loads(cached)
followers = await get_cached_authors_by_ids(followers_ids)
logger.debug(f"Cached followers for author #{author_id}: {len(followers)}")
return followers
# Query database if cache is empty
with local_session() as session:
followers_ids = [
f[0]
for f in session.query(Author.id)
.join(AuthorFollower, AuthorFollower.follower == Author.id)
.filter(AuthorFollower.author == author_id, Author.id != author_id)
.all()
]
await redis.execute("SET", f"author:followers:{author_id}", orjson.dumps(followers_ids))
followers = await get_cached_authors_by_ids(followers_ids)
return followers
# Get cached follower authors
async def get_cached_follower_authors(author_id: int):
# Attempt to retrieve authors from cache
cached = await redis.execute("GET", f"author:follows-authors:{author_id}")
if cached:
authors_ids = orjson.loads(cached)
else:
# Query authors from database
with local_session() as session:
authors_ids = [
a[0]
for a in session.execute(
select(Author.id)
.select_from(join(Author, AuthorFollower, Author.id == AuthorFollower.author))
.where(AuthorFollower.follower == author_id)
).all()
]
await redis.execute("SET", f"author:follows-authors:{author_id}", orjson.dumps(authors_ids))
authors = await get_cached_authors_by_ids(authors_ids)
return authors
# Get cached follower topics
async def get_cached_follower_topics(author_id: int):
# Attempt to retrieve topics from cache
cached = await redis.execute("GET", f"author:follows-topics:{author_id}")
if cached:
topics_ids = orjson.loads(cached)
else:
# Load topics from database and cache them
with local_session() as session:
topics_ids = [
t[0]
for t in session.query(Topic.id)
.join(TopicFollower, TopicFollower.topic == Topic.id)
.where(TopicFollower.follower == author_id)
.all()
]
await redis.execute("SET", f"author:follows-topics:{author_id}", orjson.dumps(topics_ids))
topics = []
for topic_id in topics_ids:
topic_str = await redis.execute("GET", f"topic:id:{topic_id}")
if topic_str:
topic = orjson.loads(topic_str)
if topic and topic not in topics:
topics.append(topic)
logger.debug(f"Cached topics for author#{author_id}: {len(topics)}")
return topics
# Get author by user ID from cache
async def get_cached_author_by_user_id(user_id: str, get_with_stat):
"""
Retrieve author information by user_id, checking the cache first, then the database.
Args:
user_id (str): The user identifier for which to retrieve the author.
Returns:
dict: Dictionary with author data or None if not found.
"""
# Attempt to find author ID by user_id in Redis cache
author_id = await redis.execute("GET", f"author:user:{user_id.strip()}")
if author_id:
# If ID is found, get full author data by ID
author_data = await redis.execute("GET", f"author:id:{author_id}")
if author_data:
return orjson.loads(author_data)
# If data is not found in cache, query the database
author_query = select(Author).where(Author.user == user_id)
authors = get_with_stat(author_query)
if authors:
# Cache the retrieved author data
author = authors[0]
author_dict = author.dict()
await asyncio.gather(
redis.execute("SET", f"author:user:{user_id.strip()}", str(author.id)),
redis.execute("SET", f"author:id:{author.id}", orjson.dumps(author_dict)),
)
return author_dict
# Return None if author is not found
return None
# Get cached topic authors
async def get_cached_topic_authors(topic_id: int):
"""
Retrieve a list of authors for a given topic, using cache or database.
Args:
topic_id (int): The identifier of the topic for which to retrieve authors.
Returns:
List[dict]: A list of dictionaries containing author data.
"""
# Attempt to get a list of author IDs from cache
rkey = f"topic:authors:{topic_id}"
cached_authors_ids = await redis.execute("GET", rkey)
if cached_authors_ids:
authors_ids = orjson.loads(cached_authors_ids)
else:
# If cache is empty, get data from the database
with local_session() as session:
query = (
select(ShoutAuthor.author)
.select_from(join(ShoutTopic, Shout, ShoutTopic.shout == Shout.id))
.join(ShoutAuthor, ShoutAuthor.shout == Shout.id)
.where(and_(ShoutTopic.topic == topic_id, Shout.published_at.is_not(None), Shout.deleted_at.is_(None)))
)
authors_ids = [author_id for (author_id,) in session.execute(query).all()]
# Cache the retrieved author IDs
await redis.execute("SET", rkey, orjson.dumps(authors_ids))
# Retrieve full author details from cached IDs
if authors_ids:
authors = await get_cached_authors_by_ids(authors_ids)
logger.debug(f"Topic#{topic_id} authors fetched and cached: {len(authors)} authors found.")
return authors
return []
async def invalidate_shouts_cache(cache_keys: List[str]):
"""
Инвалидирует кэш выборок публикаций по переданным ключам.
"""
for key in cache_keys:
try:
# Формируем полный ключ кэша
cache_key = f"shouts:{key}"
# Удаляем основной кэш
await redis.execute("DEL", cache_key)
logger.debug(f"Invalidated cache key: {cache_key}")
# Добавляем ключ в список инвалидированных с TTL
await redis.execute("SETEX", f"{cache_key}:invalidated", CACHE_TTL, "1")
# Если это кэш темы, инвалидируем также связанные ключи
if key.startswith("topic_"):
topic_id = key.split("_")[1]
related_keys = [
f"topic:id:{topic_id}",
f"topic:authors:{topic_id}",
f"topic:followers:{topic_id}",
f"topic:stats:{topic_id}",
]
for related_key in related_keys:
await redis.execute("DEL", related_key)
logger.debug(f"Invalidated related key: {related_key}")
except Exception as e:
logger.error(f"Error invalidating cache key {key}: {e}")
async def cache_topic_shouts(topic_id: int, shouts: List[dict]):
"""Кэширует список публикаций для темы"""
key = f"topic_shouts_{topic_id}"
payload = json.dumps(shouts, cls=CustomJSONEncoder)
await redis.execute("SETEX", key, CACHE_TTL, payload)
async def get_cached_topic_shouts(topic_id: int) -> List[dict]:
"""Получает кэшированный список публикаций для темы"""
key = f"topic_shouts_{topic_id}"
cached = await redis.execute("GET", key)
if cached:
return orjson.loads(cached)
return None
async def cache_related_entities(shout: Shout):
"""
Кэширует все связанные с публикацией сущности (авторов и темы)
"""
tasks = []
for author in shout.authors:
tasks.append(cache_by_id(Author, author.id, cache_author))
for topic in shout.topics:
tasks.append(cache_by_id(Topic, topic.id, cache_topic))
await asyncio.gather(*tasks)
async def invalidate_shout_related_cache(shout: Shout, author_id: int):
"""
Инвалидирует весь кэш, связанный с публикацией и её связями
Args:
shout: Объект публикации
author_id: ID автора
"""
cache_keys = {
"feed", # основная лента
f"author_{author_id}", # публикации автора
"random_top", # случайные топовые
"unrated", # неоцененные
"recent", # последние
"coauthored", # совместные
}
# Добавляем ключи авторов
cache_keys.update(f"author_{a.id}" for a in shout.authors)
cache_keys.update(f"authored_{a.id}" for a in shout.authors)
# Добавляем ключи тем
cache_keys.update(f"topic_{t.id}" for t in shout.topics)
cache_keys.update(f"topic_shouts_{t.id}" for t in shout.topics)
await invalidate_shouts_cache(list(cache_keys))
# Function removed - direct Redis calls used throughout the module instead
async def get_cached_entity(entity_type: str, entity_id: int, get_method, cache_method):
"""
Универсальная функция получения кэшированной сущности
Args:
entity_type: 'author' или 'topic'
entity_id: ID сущности
get_method: метод получения из БД
cache_method: метод кэширования
"""
key = f"{entity_type}:id:{entity_id}"
cached = await redis.execute("GET", key)
if cached:
return orjson.loads(cached)
entity = await get_method(entity_id)
if entity:
await cache_method(entity)
return entity
return None
async def cache_by_id(entity, entity_id: int, cache_method):
"""
Кэширует сущность по ID, используя указанный метод кэширования
Args:
entity: класс сущности (Author/Topic)
entity_id: ID сущности
cache_method: функция кэширования
"""
from resolvers.stat import get_with_stat
caching_query = select(entity).filter(entity.id == entity_id)
result = get_with_stat(caching_query)
if not result or not result[0]:
logger.warning(f"{entity.__name__} with id {entity_id} not found")
return
x = result[0]
d = x.dict()
await cache_method(d)
return d
# Универсальная функция для сохранения данных в кеш
async def cache_data(key: str, data: Any, ttl: Optional[int] = None) -> None:
"""
Сохраняет данные в кеш по указанному ключу.
Args:
key: Ключ кеша
data: Данные для сохранения
ttl: Время жизни кеша в секундах (None - бессрочно)
"""
try:
payload = json.dumps(data, cls=CustomJSONEncoder)
if ttl:
await redis.execute("SETEX", key, ttl, payload)
else:
await redis.execute("SET", key, payload)
logger.debug(f"Данные сохранены в кеш по ключу {key}")
except Exception as e:
logger.error(f"Ошибка при сохранении данных в кеш: {e}")
# Универсальная функция для получения данных из кеша
async def get_cached_data(key: str) -> Optional[Any]:
"""
Получает данные из кеша по указанному ключу.
Args:
key: Ключ кеша
Returns:
Any: Данные из кеша или None, если данных нет
"""
try:
cached_data = await redis.execute("GET", key)
if cached_data:
logger.debug(f"Данные получены из кеша по ключу {key}")
return orjson.loads(cached_data)
return None
except Exception as e:
logger.error(f"Ошибка при получении данных из кеша: {e}")
return None
# Универсальная функция для инвалидации кеша по префиксу
async def invalidate_cache_by_prefix(prefix: str) -> None:
"""
Инвалидирует все ключи кеша с указанным префиксом.
Args:
prefix: Префикс ключей кеша для инвалидации
"""
try:
keys = await redis.execute("KEYS", f"{prefix}:*")
if keys:
await redis.execute("DEL", *keys)
logger.debug(f"Удалено {len(keys)} ключей кеша с префиксом {prefix}")
except Exception as e:
logger.error(f"Ошибка при инвалидации кеша: {e}")
# Универсальная функция для получения и кеширования данных
async def cached_query(
cache_key: str,
query_func: callable,
ttl: Optional[int] = None,
force_refresh: bool = False,
use_key_format: bool = True,
**query_params,
) -> Any:
"""
Gets data from cache or executes query and saves result to cache.
Supports existing key formats for compatibility.
Args:
cache_key: Cache key or key template from CACHE_KEYS
query_func: Function to execute the query
ttl: Cache TTL in seconds (None - indefinite)
force_refresh: Force cache refresh
use_key_format: Whether to check if cache_key matches a key template in CACHE_KEYS
**query_params: Parameters to pass to the query function
Returns:
Any: Data from cache or query result
"""
# Check if cache_key matches a pattern in CACHE_KEYS
actual_key = cache_key
if use_key_format and "{}" in cache_key:
# Look for a template match in CACHE_KEYS
for key_name, key_format in CACHE_KEYS.items():
if cache_key == key_format:
# We have a match, now look for the id or value to format with
for param_name, param_value in query_params.items():
if param_name in ["id", "slug", "user", "topic_id", "author_id"]:
actual_key = cache_key.format(param_value)
break
# If not forcing refresh, try to get data from cache
if not force_refresh:
cached_result = await get_cached_data(actual_key)
if cached_result is not None:
return cached_result
# If data not in cache or refresh required, execute query
try:
result = await query_func(**query_params)
if result is not None:
# Save result to cache
await cache_data(actual_key, result, ttl)
return result
except Exception as e:
logger.error(f"Error executing query for caching: {e}")
# In case of error, return data from cache if not forcing refresh
if not force_refresh:
return await get_cached_data(actual_key)
raise

133
cache/precache.py vendored Normal file
View File

@@ -0,0 +1,133 @@
import asyncio
import json
from sqlalchemy import and_, join, select
from cache.cache import cache_author, cache_topic
from orm.author import Author, AuthorFollower
from orm.shout import Shout, ShoutAuthor, ShoutReactionsFollower, ShoutTopic
from orm.topic import Topic, TopicFollower
from resolvers.stat import get_with_stat
from services.db import local_session
from services.redis import redis
from utils.encoders import CustomJSONEncoder
from utils.logger import root_logger as logger
# Предварительное кеширование подписчиков автора
async def precache_authors_followers(author_id, session):
authors_followers = set()
followers_query = select(AuthorFollower.follower).where(AuthorFollower.author == author_id)
result = session.execute(followers_query)
authors_followers.update(row[0] for row in result if row[0])
followers_payload = json.dumps(list(authors_followers), cls=CustomJSONEncoder)
await redis.execute("SET", f"author:followers:{author_id}", followers_payload)
# Предварительное кеширование подписок автора
async def precache_authors_follows(author_id, session):
follows_topics_query = select(TopicFollower.topic).where(TopicFollower.follower == author_id)
follows_authors_query = select(AuthorFollower.author).where(AuthorFollower.follower == author_id)
follows_shouts_query = select(ShoutReactionsFollower.shout).where(ShoutReactionsFollower.follower == author_id)
follows_topics = {row[0] for row in session.execute(follows_topics_query) if row[0]}
follows_authors = {row[0] for row in session.execute(follows_authors_query) if row[0]}
follows_shouts = {row[0] for row in session.execute(follows_shouts_query) if row[0]}
topics_payload = json.dumps(list(follows_topics), cls=CustomJSONEncoder)
authors_payload = json.dumps(list(follows_authors), cls=CustomJSONEncoder)
shouts_payload = json.dumps(list(follows_shouts), cls=CustomJSONEncoder)
await asyncio.gather(
redis.execute("SET", f"author:follows-topics:{author_id}", topics_payload),
redis.execute("SET", f"author:follows-authors:{author_id}", authors_payload),
redis.execute("SET", f"author:follows-shouts:{author_id}", shouts_payload),
)
# Предварительное кеширование авторов тем
async def precache_topics_authors(topic_id: int, session):
topic_authors_query = (
select(ShoutAuthor.author)
.select_from(join(ShoutTopic, Shout, ShoutTopic.shout == Shout.id))
.join(ShoutAuthor, ShoutAuthor.shout == Shout.id)
.filter(
and_(
ShoutTopic.topic == topic_id,
Shout.published_at.is_not(None),
Shout.deleted_at.is_(None),
)
)
)
topic_authors = {row[0] for row in session.execute(topic_authors_query) if row[0]}
authors_payload = json.dumps(list(topic_authors), cls=CustomJSONEncoder)
await redis.execute("SET", f"topic:authors:{topic_id}", authors_payload)
# Предварительное кеширование подписчиков тем
async def precache_topics_followers(topic_id: int, session):
followers_query = select(TopicFollower.follower).where(TopicFollower.topic == topic_id)
topic_followers = {row[0] for row in session.execute(followers_query) if row[0]}
followers_payload = json.dumps(list(topic_followers), cls=CustomJSONEncoder)
await redis.execute("SET", f"topic:followers:{topic_id}", followers_payload)
async def precache_data():
logger.info("precaching...")
try:
key = "authorizer_env"
# cache reset
value = await redis.execute("HGETALL", key)
await redis.execute("FLUSHDB")
logger.info("redis: FLUSHDB")
# Преобразуем словарь в список аргументов для HSET
if value:
# Если значение - словарь, преобразуем его в плоский список для HSET
if isinstance(value, dict):
flattened = []
for field, val in value.items():
flattened.extend([field, val])
await redis.execute("HSET", key, *flattened)
else:
# Предполагаем, что значение уже содержит список
await redis.execute("HSET", key, *value)
logger.info(f"redis hash '{key}' was restored")
with local_session() as session:
# topics
q = select(Topic).where(Topic.community == 1)
topics = get_with_stat(q)
for topic in topics:
topic_dict = topic.dict() if hasattr(topic, "dict") else topic
await cache_topic(topic_dict)
await asyncio.gather(
precache_topics_followers(topic_dict["id"], session),
precache_topics_authors(topic_dict["id"], session),
)
logger.info(f"{len(topics)} topics and their followings precached")
# authors
authors = get_with_stat(select(Author).where(Author.user.is_not(None)))
logger.info(f"{len(authors)} authors found in database")
for author in authors:
if isinstance(author, Author):
profile = author.dict()
author_id = profile.get("id")
user_id = profile.get("user", "").strip()
if author_id and user_id:
await cache_author(profile)
await asyncio.gather(
precache_authors_followers(author_id, session), precache_authors_follows(author_id, session)
)
else:
logger.error(f"fail caching {author}")
logger.info(f"{len(authors)} authors and their followings precached")
except Exception as exc:
import traceback
traceback.print_exc()
logger.error(f"Error in precache_data: {exc}")

157
cache/revalidator.py vendored Normal file
View File

@@ -0,0 +1,157 @@
import asyncio
from cache.cache import (
cache_author,
cache_topic,
get_cached_author,
get_cached_topic,
invalidate_cache_by_prefix,
)
from resolvers.stat import get_with_stat
from utils.logger import root_logger as logger
CACHE_REVALIDATION_INTERVAL = 300 # 5 minutes
class CacheRevalidationManager:
def __init__(self, interval=CACHE_REVALIDATION_INTERVAL):
"""Инициализация менеджера с заданным интервалом проверки (в секундах)."""
self.interval = interval
self.items_to_revalidate = {"authors": set(), "topics": set(), "shouts": set(), "reactions": set()}
self.lock = asyncio.Lock()
self.running = True
self.MAX_BATCH_SIZE = 10 # Максимальное количество элементов для поштучной обработки
async def start(self):
"""Запуск фонового воркера для ревалидации кэша."""
self.task = asyncio.create_task(self.revalidate_cache())
async def revalidate_cache(self):
"""Циклическая проверка и ревалидация кэша каждые self.interval секунд."""
try:
while self.running:
await asyncio.sleep(self.interval)
await self.process_revalidation()
except asyncio.CancelledError:
logger.info("Revalidation worker was stopped.")
except Exception as e:
logger.error(f"An error occurred in the revalidation worker: {e}")
async def process_revalidation(self):
"""Обновление кэша для всех сущностей, требующих ревалидации."""
async with self.lock:
# Ревалидация кэша авторов
if self.items_to_revalidate["authors"]:
logger.debug(f"Revalidating {len(self.items_to_revalidate['authors'])} authors")
for author_id in self.items_to_revalidate["authors"]:
if author_id == "all":
await invalidate_cache_by_prefix("authors")
break
author = await get_cached_author(author_id, get_with_stat)
if author:
await cache_author(author)
self.items_to_revalidate["authors"].clear()
# Ревалидация кэша тем
if self.items_to_revalidate["topics"]:
logger.debug(f"Revalidating {len(self.items_to_revalidate['topics'])} topics")
for topic_id in self.items_to_revalidate["topics"]:
if topic_id == "all":
await invalidate_cache_by_prefix("topics")
break
topic = await get_cached_topic(topic_id)
if topic:
await cache_topic(topic)
self.items_to_revalidate["topics"].clear()
# Ревалидация шаутов (публикаций)
if self.items_to_revalidate["shouts"]:
shouts_count = len(self.items_to_revalidate["shouts"])
logger.debug(f"Revalidating {shouts_count} shouts")
# Проверяем наличие специального флага 'all'
if "all" in self.items_to_revalidate["shouts"]:
await invalidate_cache_by_prefix("shouts")
# Если элементов много, но не 'all', используем специфический подход
elif shouts_count > self.MAX_BATCH_SIZE:
# Инвалидируем только collections keys, которые затрагивают много сущностей
collection_keys = await asyncio.create_task(self._redis.execute("KEYS", "shouts:*"))
if collection_keys:
await self._redis.execute("DEL", *collection_keys)
logger.debug(f"Удалено {len(collection_keys)} коллекционных ключей шаутов")
# Обновляем кеш каждого конкретного шаута
for shout_id in self.items_to_revalidate["shouts"]:
if shout_id != "all":
# Точечная инвалидация для каждого shout_id
specific_keys = [f"shout:id:{shout_id}"]
for key in specific_keys:
await self._redis.execute("DEL", key)
logger.debug(f"Удален ключ кеша {key}")
else:
# Если элементов немного, обрабатываем каждый
for shout_id in self.items_to_revalidate["shouts"]:
if shout_id != "all":
# Точечная инвалидация для каждого shout_id
specific_keys = [f"shout:id:{shout_id}"]
for key in specific_keys:
await self._redis.execute("DEL", key)
logger.debug(f"Удален ключ кеша {key}")
self.items_to_revalidate["shouts"].clear()
# Аналогично для реакций - точечная инвалидация
if self.items_to_revalidate["reactions"]:
reactions_count = len(self.items_to_revalidate["reactions"])
logger.debug(f"Revalidating {reactions_count} reactions")
if "all" in self.items_to_revalidate["reactions"]:
await invalidate_cache_by_prefix("reactions")
elif reactions_count > self.MAX_BATCH_SIZE:
# Инвалидируем только collections keys для реакций
collection_keys = await asyncio.create_task(self._redis.execute("KEYS", "reactions:*"))
if collection_keys:
await self._redis.execute("DEL", *collection_keys)
logger.debug(f"Удалено {len(collection_keys)} коллекционных ключей реакций")
# Точечная инвалидация для каждой реакции
for reaction_id in self.items_to_revalidate["reactions"]:
if reaction_id != "all":
specific_keys = [f"reaction:id:{reaction_id}"]
for key in specific_keys:
await self._redis.execute("DEL", key)
logger.debug(f"Удален ключ кеша {key}")
else:
# Точечная инвалидация для каждой реакции
for reaction_id in self.items_to_revalidate["reactions"]:
if reaction_id != "all":
specific_keys = [f"reaction:id:{reaction_id}"]
for key in specific_keys:
await self._redis.execute("DEL", key)
logger.debug(f"Удален ключ кеша {key}")
self.items_to_revalidate["reactions"].clear()
def mark_for_revalidation(self, entity_id, entity_type):
"""Отметить сущность для ревалидации."""
if entity_id and entity_type:
self.items_to_revalidate[entity_type].add(entity_id)
def invalidate_all(self, entity_type):
"""Пометить для инвалидации все элементы указанного типа."""
logger.debug(f"Marking all {entity_type} for invalidation")
# Особый флаг для полной инвалидации
self.items_to_revalidate[entity_type].add("all")
async def stop(self):
"""Остановка фонового воркера."""
self.running = False
if hasattr(self, "task"):
self.task.cancel()
try:
await self.task
except asyncio.CancelledError:
pass
revalidation_manager = CacheRevalidationManager()

131
cache/triggers.py vendored Normal file
View File

@@ -0,0 +1,131 @@
from sqlalchemy import event
from cache.revalidator import revalidation_manager
from orm.author import Author, AuthorFollower
from orm.reaction import Reaction, ReactionKind
from orm.shout import Shout, ShoutAuthor, ShoutReactionsFollower
from orm.topic import Topic, TopicFollower
from services.db import local_session
from utils.logger import root_logger as logger
def mark_for_revalidation(entity, *args):
"""Отметка сущности для ревалидации."""
entity_type = (
"authors"
if isinstance(entity, Author)
else "topics"
if isinstance(entity, Topic)
else "reactions"
if isinstance(entity, Reaction)
else "shouts"
if isinstance(entity, Shout)
else None
)
if entity_type:
revalidation_manager.mark_for_revalidation(entity.id, entity_type)
def after_follower_handler(mapper, connection, target, is_delete=False):
"""Обработчик добавления, обновления или удаления подписки."""
entity_type = None
if isinstance(target, AuthorFollower):
entity_type = "authors"
elif isinstance(target, TopicFollower):
entity_type = "topics"
elif isinstance(target, ShoutReactionsFollower):
entity_type = "shouts"
if entity_type:
revalidation_manager.mark_for_revalidation(
target.author if entity_type == "authors" else target.topic, entity_type
)
if not is_delete:
revalidation_manager.mark_for_revalidation(target.follower, "authors")
def after_shout_handler(mapper, connection, target):
"""Обработчик изменения статуса публикации"""
if not isinstance(target, Shout):
return
# Проверяем изменение статуса публикации
# was_published = target.published_at is not None and target.deleted_at is None
# Всегда обновляем счетчики для авторов и тем при любом изменении поста
for author in target.authors:
revalidation_manager.mark_for_revalidation(author.id, "authors")
for topic in target.topics:
revalidation_manager.mark_for_revalidation(topic.id, "topics")
# Обновляем сам пост
revalidation_manager.mark_for_revalidation(target.id, "shouts")
def after_reaction_handler(mapper, connection, target):
"""Обработчик для комментариев"""
if not isinstance(target, Reaction):
return
# Проверяем что это комментарий
is_comment = target.kind == ReactionKind.COMMENT.value
# Получаем связанный пост
shout_id = target.shout if isinstance(target.shout, int) else target.shout.id
if not shout_id:
return
# Обновляем счетчики для автора комментария
if target.created_by:
revalidation_manager.mark_for_revalidation(target.created_by, "authors")
# Обновляем счетчики для поста
revalidation_manager.mark_for_revalidation(shout_id, "shouts")
if is_comment:
# Для комментариев обновляем также авторов и темы
with local_session() as session:
shout = (
session.query(Shout)
.filter(Shout.id == shout_id, Shout.published_at.is_not(None), Shout.deleted_at.is_(None))
.first()
)
if shout:
for author in shout.authors:
revalidation_manager.mark_for_revalidation(author.id, "authors")
for topic in shout.topics:
revalidation_manager.mark_for_revalidation(topic.id, "topics")
def events_register():
"""Регистрация обработчиков событий для всех сущностей."""
event.listen(ShoutAuthor, "after_insert", mark_for_revalidation)
event.listen(ShoutAuthor, "after_update", mark_for_revalidation)
event.listen(ShoutAuthor, "after_delete", mark_for_revalidation)
event.listen(AuthorFollower, "after_insert", after_follower_handler)
event.listen(AuthorFollower, "after_update", after_follower_handler)
event.listen(AuthorFollower, "after_delete", lambda *args: after_follower_handler(*args, is_delete=True))
event.listen(TopicFollower, "after_insert", after_follower_handler)
event.listen(TopicFollower, "after_update", after_follower_handler)
event.listen(TopicFollower, "after_delete", lambda *args: after_follower_handler(*args, is_delete=True))
event.listen(ShoutReactionsFollower, "after_insert", after_follower_handler)
event.listen(ShoutReactionsFollower, "after_update", after_follower_handler)
event.listen(ShoutReactionsFollower, "after_delete", lambda *args: after_follower_handler(*args, is_delete=True))
event.listen(Reaction, "after_update", mark_for_revalidation)
event.listen(Author, "after_update", mark_for_revalidation)
event.listen(Topic, "after_update", mark_for_revalidation)
event.listen(Shout, "after_update", after_shout_handler)
event.listen(Shout, "after_delete", after_shout_handler)
event.listen(Reaction, "after_insert", after_reaction_handler)
event.listen(Reaction, "after_update", after_reaction_handler)
event.listen(Reaction, "after_delete", after_reaction_handler)
logger.info("Event handlers registered successfully.")

279
docs/caching.md Normal file
View File

@@ -0,0 +1,279 @@
# Система кеширования Discours
## Общее описание
Система кеширования Discours - это комплексное решение для повышения производительности платформы. Она использует Redis для хранения часто запрашиваемых данных и уменьшения нагрузки на основную базу данных.
Кеширование реализовано как многоуровневая система, состоящая из нескольких модулей:
- `cache.py` - основной модуль с функциями кеширования
- `revalidator.py` - асинхронный менеджер ревалидации кеша
- `triggers.py` - триггеры событий SQLAlchemy для автоматической ревалидации
- `precache.py` - предварительное кеширование данных при старте приложения
## Ключевые компоненты
### 1. Форматы ключей кеша
Система поддерживает несколько форматов ключей для обеспечения совместимости и удобства использования:
- **Ключи сущностей**: `entity:property:value` (например, `author:id:123`)
- **Ключи коллекций**: `entity:collection:params` (например, `authors:stats:limit=10:offset=0`)
- **Специальные ключи**: для обратной совместимости (например, `topic_shouts_123`)
Все стандартные форматы ключей хранятся в словаре `CACHE_KEYS`:
```python
CACHE_KEYS = {
"TOPIC_ID": "topic:id:{}",
"TOPIC_SLUG": "topic:slug:{}",
"AUTHOR_ID": "author:id:{}",
# и другие...
}
```
### 2. Основные функции кеширования
#### Структура ключей
Вместо генерации ключей через вспомогательные функции, система следует строгим конвенциям формирования ключей:
1. **Ключи для отдельных сущностей** строятся по шаблону:
```
entity:property:value
```
Например:
- `topic:id:123` - тема с ID 123
- `author:slug:john-doe` - автор со слагом "john-doe"
- `shout:id:456` - публикация с ID 456
2. **Ключи для коллекций** строятся по шаблону:
```
entity:collection[:filter1=value1:filter2=value2:...]
```
Например:
- `topics:all:basic` - базовый список всех тем
- `authors:stats:limit=10:offset=0:sort=name` - отсортированный список авторов с пагинацией
- `shouts:feed:limit=20:community=1` - лента публикаций с фильтром по сообществу
3. **Специальные форматы ключей** для обратной совместимости:
```
entity_action_id
```
Например:
- `topic_shouts_123` - публикации для темы с ID 123
Во всех модулях системы разработчики должны явно формировать ключи в соответствии с этими конвенциями, что обеспечивает единообразие и предсказуемость кеширования.
#### Работа с данными в кеше
```python
async def cache_data(key, data, ttl=None)
async def get_cached_data(key)
```
Эти функции предоставляют универсальный интерфейс для сохранения и получения данных из кеша. Они напрямую используют Redis через вызовы `redis.execute()`.
#### Высокоуровневое кеширование запросов
```python
async def cached_query(cache_key, query_func, ttl=None, force_refresh=False, **query_params)
```
Функция `cached_query` объединяет получение данных из кеша и выполнение запроса в случае отсутствия данных в кеше. Это основная функция, которую следует использовать в резолверах для кеширования результатов запросов.
### 3. Кеширование сущностей
Для основных типов сущностей реализованы специальные функции:
```python
async def cache_topic(topic: dict)
async def cache_author(author: dict)
async def get_cached_topic(topic_id: int)
async def get_cached_author(author_id: int, get_with_stat)
```
Эти функции упрощают работу с часто используемыми типами данных и обеспечивают единообразный подход к их кешированию.
### 4. Работа со связями
Для работы со связями между сущностями предназначены функции:
```python
async def cache_follows(follower_id, entity_type, entity_id, is_insert=True)
async def get_cached_topic_followers(topic_id)
async def get_cached_author_followers(author_id)
async def get_cached_follower_topics(author_id)
```
Они позволяют эффективно кешировать и получать информацию о подписках, связях между авторами, темами и публикациями.
## Система инвалидации кеша
### 1. Прямая инвалидация
Система поддерживает два типа инвалидации кеша:
#### 1.1. Инвалидация по префиксу
```python
async def invalidate_cache_by_prefix(prefix)
```
Позволяет инвалидировать все ключи кеша, начинающиеся с указанного префикса. Используется в резолверах для инвалидации группы кешей при массовых изменениях.
#### 1.2. Точечная инвалидация
```python
async def invalidate_authors_cache(author_id=None)
async def invalidate_topics_cache(topic_id=None)
```
Эти функции позволяют инвалидировать кеш только для конкретной сущности, что снижает нагрузку на Redis и предотвращает ненужную потерю кешированных данных. Если ID сущности не указан, используется инвалидация по префиксу.
Примеры использования точечной инвалидации:
```python
# Инвалидация кеша только для автора с ID 123
await invalidate_authors_cache(123)
# Инвалидация кеша только для темы с ID 456
await invalidate_topics_cache(456)
```
### 2. Отложенная инвалидация
Модуль `revalidator.py` реализует систему отложенной инвалидации кеша через класс `CacheRevalidationManager`:
```python
class CacheRevalidationManager:
# ...
async def process_revalidation(self):
# ...
def mark_for_revalidation(self, entity_id, entity_type):
# ...
```
Менеджер ревалидации работает как асинхронный фоновый процесс, который периодически (по умолчанию каждые 5 минут) проверяет наличие сущностей для ревалидации.
Особенности реализации:
- Для авторов и тем используется поштучная ревалидация каждой записи
- Для шаутов и реакций используется батчевая обработка, с порогом в 10 элементов
- При достижении порога система переключается на инвалидацию коллекций вместо поштучной обработки
- Специальный флаг `all` позволяет запустить полную инвалидацию всех записей типа
### 3. Автоматическая инвалидация через триггеры
Модуль `triggers.py` регистрирует обработчики событий SQLAlchemy, которые автоматически отмечают сущности для ревалидации при изменении данных в базе:
```python
def events_register():
event.listen(Author, "after_update", mark_for_revalidation)
event.listen(Topic, "after_update", mark_for_revalidation)
# и другие...
```
Триггеры имеют следующие особенности:
- Реагируют на события вставки, обновления и удаления
- Отмечают затронутые сущности для отложенной ревалидации
- Учитывают связи между сущностями (например, при изменении темы обновляются связанные шауты)
## Предварительное кеширование
Модуль `precache.py` реализует предварительное кеширование часто используемых данных при старте приложения:
```python
async def precache_data():
# ...
```
Эта функция выполняется при запуске приложения и заполняет кеш данными, которые будут часто запрашиваться пользователями.
## Примеры использования
### Простое кеширование результата запроса
```python
async def get_topics_with_stats(limit=10, offset=0, by="title"):
# Формирование ключа кеша по конвенции
cache_key = f"topics:stats:limit={limit}:offset={offset}:sort={by}"
cached_data = await get_cached_data(cache_key)
if cached_data:
return cached_data
# Выполнение запроса к базе данных
result = ... # логика получения данных
await cache_data(cache_key, result, ttl=300)
return result
```
### Использование обобщенной функции cached_query
```python
async def get_topics_with_stats(limit=10, offset=0, by="title"):
async def fetch_data(limit, offset, by):
# Логика получения данных
return result
# Формирование ключа кеша по конвенции
cache_key = f"topics:stats:limit={limit}:offset={offset}:sort={by}"
return await cached_query(
cache_key,
fetch_data,
ttl=300,
limit=limit,
offset=offset,
by=by
)
```
### Точечная инвалидация кеша при изменении данных
```python
async def update_topic(topic_id, new_data):
# Обновление данных в базе
# ...
# Точечная инвалидация кеша только для измененной темы
await invalidate_topics_cache(topic_id)
return updated_topic
```
## Отладка и мониторинг
Система кеширования использует логгер для отслеживания операций:
```python
logger.debug(f"Данные получены из кеша по ключу {key}")
logger.debug(f"Удалено {len(keys)} ключей кеша с префиксом {prefix}")
logger.error(f"Ошибка при инвалидации кеша: {e}")
```
Это позволяет отслеживать работу кеша и выявлять возможные проблемы на ранних стадиях.
## Рекомендации по использованию
1. **Следуйте конвенциям формирования ключей** - это критически важно для консистентности и предсказуемости кеша.
2. **Не создавайте собственные форматы ключей** - используйте существующие шаблоны для обеспечения единообразия.
3. **Не забывайте об инвалидации** - всегда инвалидируйте кеш при изменении данных.
4. **Используйте точечную инвалидацию** - вместо инвалидации по префиксу для снижения нагрузки на Redis.
5. **Устанавливайте разумные TTL** - используйте разные значения TTL в зависимости от частоты изменения данных.
6. **Не кешируйте большие объемы данных** - кешируйте только то, что действительно необходимо для повышения производительности.
## Технические детали реализации
- **Сериализация данных**: используется `orjson` для эффективной сериализации и десериализации данных.
- **Форматирование даты и времени**: для корректной работы с датами используется `CustomJSONEncoder`.
- **Асинхронность**: все операции кеширования выполняются асинхронно для минимального влияния на производительность API.
- **Прямое взаимодействие с Redis**: все операции выполняются через прямые вызовы `redis.execute()` с обработкой ошибок.
- **Батчевая обработка**: для массовых операций используется пороговое значение, после которого применяются оптимизированные стратегии.
## Известные ограничения
1. **Согласованность данных** - система не гарантирует абсолютную согласованность данных в кеше и базе данных.
2. **Память** - необходимо следить за объемом данных в кеше, чтобы избежать проблем с памятью Redis.
3. **Производительность Redis** - при большом количестве операций с кешем может стать узким местом.

37
docs/features.md Normal file
View File

@@ -0,0 +1,37 @@
## Просмотры публикаций
- Интеграция с Google Analytics для отслеживания просмотров публикаций
- Подсчет уникальных пользователей и общего количества просмотров
- Автоматическое обновление статистики при запросе данных публикации
## Мультидоменная авторизация
- Поддержка авторизации для разных доменов
- Автоматическое определение сервера авторизации
- Корректная обработка CORS для всех поддерживаемых доменов
## Система кеширования
- Redis используется в качестве основного механизма кеширования
- Поддержка как синхронных, так и асинхронных функций в декораторе cache_on_arguments
- Автоматическая сериализация/десериализация данных в JSON с использованием CustomJSONEncoder
- Резервная сериализация через pickle для сложных объектов
- Генерация уникальных ключей кеша на основе сигнатуры функции и переданных аргументов
- Настраиваемое время жизни кеша (TTL)
- Возможность ручной инвалидации кеша для конкретных функций и аргументов
## Webhooks
- Автоматическая регистрация вебхука для события user.login
- Предотвращение создания дублирующихся вебхуков
- Автоматическая очистка устаревших вебхуков
- Поддержка авторизации вебхуков через WEBHOOK_SECRET
- Обработка ошибок при операциях с вебхуками
- Динамическое определение endpoint'а на основе окружения
## CORS Configuration
- Поддерживаемые методы: GET, POST, OPTIONS
- Настроена поддержка credentials
- Разрешенные заголовки: Authorization, Content-Type, X-Requested-With, DNT, Cache-Control
- Настроено кэширование preflight-ответов на 20 дней (1728000 секунд)

94
docs/follower.md Normal file
View File

@@ -0,0 +1,94 @@
# Following System
## Overview
System supports following different entity types:
- Authors
- Topics
- Communities
- Shouts (Posts)
## GraphQL API
### Mutations
#### follow
Follow an entity (author/topic/community/shout).
**Parameters:**
- `what: String!` - Entity type (`AUTHOR`, `TOPIC`, `COMMUNITY`, `SHOUT`)
- `slug: String` - Entity slug
- `entity_id: Int` - Optional entity ID
**Returns:**
```typescript
{
authors?: Author[] // For AUTHOR type
topics?: Topic[] // For TOPIC type
communities?: Community[] // For COMMUNITY type
shouts?: Shout[] // For SHOUT type
error?: String // Error message if any
}
```
#### unfollow
Unfollow an entity.
**Parameters:** Same as `follow`
**Returns:** Same as `follow`
### Queries
#### get_shout_followers
Get list of users who reacted to a shout.
**Parameters:**
- `slug: String` - Shout slug
- `shout_id: Int` - Optional shout ID
**Returns:**
```typescript
Author[] // List of authors who reacted
```
## Caching System
### Supported Entity Types
- Authors: `cache_author`, `get_cached_follower_authors`
- Topics: `cache_topic`, `get_cached_follower_topics`
- Communities: No cache
- Shouts: No cache
### Cache Flow
1. On follow/unfollow:
- Update entity in cache
- Update follower's following list
2. Cache is updated before notifications
## Notifications
- Sent when author is followed/unfollowed
- Contains:
- Follower info
- Author ID
- Action type ("follow"/"unfollow")
## Error Handling
- Unauthorized access check
- Entity existence validation
- Duplicate follow prevention
- Full error logging
- Transaction safety with `local_session()`
## Database Schema
### Follower Tables
- `AuthorFollower`
- `TopicFollower`
- `CommunityFollower`
- `ShoutReactionsFollower`
Each table contains:
- `follower` - ID of following user
- `{entity_type}` - ID of followed entity

80
docs/load_shouts.md Normal file
View File

@@ -0,0 +1,80 @@
# Система загрузки публикаций
## Особенности реализации
### Базовый запрос
- Автоматически подгружает основного автора
- Добавляет основную тему публикации
- Поддерживает гибкую систему фильтрации
- Оптимизирует запросы на основе запрошенных полей
### Статистика
- Подсчёт лайков/дислайков
- Количество комментариев
- Дата последней реакции
- Статистика подгружается только при запросе поля `stat`
### Оптимизация производительности
- Ленивая загрузка связанных данных
- Кэширование результатов на 5 минут
- Пакетная загрузка авторов и тем
- Использование подзапросов для сложных выборок
## Типы лент
### Случайные топовые посты (load_shouts_random_top)
**Преимущества:**
- Разнообразный контент
- Быстрая выборка из кэша топовых постов
- Настраиваемый размер пула для выборки
**Ограничения:**
- Обновление раз в 5 минут
- Максимальный размер пула: 100 постов
- Учитываются только лайки/дислайки (без комментариев)
### Неоцененные посты (load_shouts_unrated)
**Преимущества:**
- Помогает найти новый контент
- Равномерное распределение оценок
- Случайный порядок выдачи
**Ограничения:**
- Только посты с менее чем 3 реакциями
- Не учитываются комментарии
- Без сортировки по рейтингу
### Закладки (load_shouts_bookmarked)
**Преимущества:**
- Персонализированная выборка
- Быстрый доступ к сохраненному
- Поддержка всех фильтров
**Ограничения:**
- Требует авторизации
- Ограничение на количество закладок
- Кэширование отключено
## Важные моменты
### Пагинация
- Стандартный размер страницы: 10
- Максимальный размер: 100
- Поддержка курсор-пагинации
### Кэширование
- TTL: 5 минут
- Инвалидация при изменении поста
- Отдельный кэш для каждого типа сортировки
### Сортировка
- По рейтингу (лайки минус дислайки)
- По количеству комментариев
- По дате последней реакции
- По дате публикации (по умолчанию)
### Безопасность
- Проверка прав доступа
- Фильтрация удаленного контента
- Защита от SQL-инъекций
- Валидация входных данных

82
docs/rating.md Normal file
View File

@@ -0,0 +1,82 @@
# Rating System
## GraphQL Resolvers
### Queries
#### get_my_rates_shouts
Get user's reactions (LIKE/DISLIKE) for specified posts.
**Parameters:**
- `shouts: [Int!]!` - array of shout IDs
**Returns:**
```typescript
[{
shout_id: Int
my_rate: ReactionKind // LIKE or DISLIKE
}]
```
#### get_my_rates_comments
Get user's reactions (LIKE/DISLIKE) for specified comments.
**Parameters:**
- `comments: [Int!]!` - array of comment IDs
**Returns:**
```typescript
[{
comment_id: Int
my_rate: ReactionKind // LIKE or DISLIKE
}]
```
### Mutations
#### rate_author
Rate another author (karma system).
**Parameters:**
- `rated_slug: String!` - author's slug
- `value: Int!` - rating value (positive/negative)
## Rating Calculation
### Author Rating Components
#### Shouts Rating
- Calculated from LIKE/DISLIKE reactions on author's posts
- Each LIKE: +1
- Each DISLIKE: -1
- Excludes deleted reactions
- Excludes comment reactions
#### Comments Rating
- Calculated from LIKE/DISLIKE reactions on author's comments
- Each LIKE: +1
- Each DISLIKE: -1
- Only counts reactions to COMMENT type reactions
- Excludes deleted reactions
#### Legacy Karma
- Based on direct author ratings via `rate_author` mutation
- Stored in `AuthorRating` table
- Each positive rating: +1
- Each negative rating: -1
### Helper Functions
- `count_author_comments_rating()` - Calculate comment rating
- `count_author_shouts_rating()` - Calculate posts rating
- `get_author_rating_old()` - Get legacy karma rating
- `get_author_rating_shouts()` - Get posts rating (optimized)
- `get_author_rating_comments()` - Get comments rating (optimized)
- `add_author_rating_columns()` - Add rating columns to author query
## Notes
- All ratings exclude deleted content
- Reactions are unique per user/content
- Rating calculations are optimized with SQLAlchemy
- System supports both direct author rating and content-based rating

90
main.py
View File

@@ -1,45 +1,95 @@
import asyncio
import os import os
import sys
from importlib import import_module from importlib import import_module
from os.path import exists from os.path import exists
from ariadne import load_schema_from_path, make_executable_schema from ariadne import load_schema_from_path, make_executable_schema
from ariadne.asgi import GraphQL from ariadne.asgi import GraphQL
from starlette.applications import Starlette from starlette.applications import Starlette
from starlette.middleware.cors import CORSMiddleware
from starlette.requests import Request
from starlette.responses import JSONResponse, Response
from starlette.routing import Route from starlette.routing import Route
from services.rediscache import redis from cache.precache import precache_data
from services.schema import resolvers from cache.revalidator import revalidation_manager
from services.sentry import start_sentry from services.exception import ExceptionHandlerMiddleware
from services.redis import redis
from services.schema import create_all_tables, resolvers
from services.search import search_service
from services.viewed import ViewedStorage from services.viewed import ViewedStorage
from services.webhook import WebhookEndpoint from services.webhook import WebhookEndpoint, create_webhook_endpoint
from settings import DEV_SERVER_PID_FILE_NAME, MODE from settings import DEV_SERVER_PID_FILE_NAME, MODE
import_module('resolvers') import_module("resolvers")
schema = make_executable_schema(load_schema_from_path('schema/'), resolvers) schema = make_executable_schema(load_schema_from_path("schema/"), resolvers)
async def start(): async def start():
if MODE == 'development': if MODE == "development":
if not exists(DEV_SERVER_PID_FILE_NAME): if not exists(DEV_SERVER_PID_FILE_NAME):
# pid file management # pid file management
with open(DEV_SERVER_PID_FILE_NAME, 'w', encoding='utf-8') as f: with open(DEV_SERVER_PID_FILE_NAME, "w", encoding="utf-8") as f:
f.write(str(os.getpid())) f.write(str(os.getpid()))
print(f'[main] process started in {MODE} mode') print(f"[main] process started in {MODE} mode")
# main starlette app object with ariadne mounted in root async def lifespan(_app):
try:
create_all_tables()
await asyncio.gather(
redis.connect(),
precache_data(),
ViewedStorage.init(),
create_webhook_endpoint(),
search_service.info(),
start(),
revalidation_manager.start(),
)
yield
finally:
tasks = [redis.disconnect(), ViewedStorage.stop(), revalidation_manager.stop()]
await asyncio.gather(*tasks, return_exceptions=True)
# Создаем экземпляр GraphQL
graphql_app = GraphQL(schema, debug=True)
# Оборачиваем GraphQL-обработчик для лучшей обработки ошибок
async def graphql_handler(request: Request):
if request.method not in ["GET", "POST"]:
return JSONResponse({"error": "Method Not Allowed"}, status_code=405)
try:
result = await graphql_app.handle_request(request)
if isinstance(result, Response):
return result
return JSONResponse(result)
except asyncio.CancelledError:
return JSONResponse({"error": "Request cancelled"}, status_code=499)
except Exception as e:
print(f"GraphQL error: {str(e)}")
return JSONResponse({"error": str(e)}, status_code=500)
# Обновляем маршрут в Starlette
app = Starlette( app = Starlette(
routes=[ routes=[
Route('/', GraphQL(schema, debug=True)), Route("/", graphql_handler, methods=["GET", "POST"]),
Route('/new-author', WebhookEndpoint), Route("/new-author", WebhookEndpoint),
], ],
on_startup=[ lifespan=lifespan,
redis.connect,
ViewedStorage.init,
# search_service.info,
start_sentry,
start,
],
on_shutdown=[redis.disconnect],
debug=True, debug=True,
) )
app.add_middleware(ExceptionHandlerMiddleware)
if "dev" in sys.argv:
app.add_middleware(
CORSMiddleware,
allow_origins=["https://localhost:3000"],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)

View File

@@ -1,18 +1,14 @@
log_format custom '$remote_addr - $remote_user [$time_local] "$request" '
'origin=$http_origin allow_origin=$allow_origin status=$status '
'"$http_referer" "$http_user_agent"';
{{ $proxy_settings := "proxy_http_version 1.1; proxy_set_header Upgrade $http_upgrade; proxy_set_header Connection $http_connection; proxy_set_header Host $http_host; proxy_set_header X-Request-Start $msec;" }} {{ $proxy_settings := "proxy_http_version 1.1; proxy_set_header Upgrade $http_upgrade; proxy_set_header Connection $http_connection; proxy_set_header Host $http_host; proxy_set_header X-Request-Start $msec;" }}
{{ $gzip_settings := "gzip on; gzip_min_length 1100; gzip_buffers 4 32k; gzip_types text/css text/javascript text/xml text/plain text/x-component application/javascript application/x-javascript application/json application/xml application/rss+xml font/truetype application/x-font-ttf font/opentype application/vnd.ms-fontobject image/svg+xml; gzip_vary on; gzip_comp_level 6;" }} {{ $gzip_settings := "gzip on; gzip_min_length 1100; gzip_buffers 4 32k; gzip_types text/css text/javascript text/xml text/plain text/x-component application/javascript application/x-javascript application/json application/xml application/rss+xml font/truetype application/x-font-ttf font/opentype application/vnd.ms-fontobject image/svg+xml; gzip_vary on; gzip_comp_level 6;" }}
{{ $cors_headers_options := "if ($request_method = 'OPTIONS') { add_header 'Access-Control-Allow-Origin' '$allow_origin' always; add_header 'Access-Control-Allow-Methods' 'GET, POST, OPTIONS'; add_header 'Access-Control-Allow-Headers' 'DNT,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Range,Authorization'; add_header 'Access-Control-Allow-Credentials' 'true'; add_header 'Access-Control-Max-Age' 1728000; add_header 'Content-Type' 'text/plain; charset=utf-8'; add_header 'Content-Length' 0; return 204; }" }} proxy_cache_path /var/cache/nginx levels=1:2 keys_zone=my_cache:10m max_size=1g
{{ $cors_headers_post := "if ($request_method = 'POST') { add_header 'Access-Control-Allow-Origin' '$allow_origin' always; add_header 'Access-Control-Allow-Methods' 'GET, POST, OPTIONS' always; add_header 'Access-Control-Allow-Headers' 'DNT,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Range,Authorization' always; add_header 'Access-Control-Expose-Headers' 'Content-Length,Content-Range' always; add_header 'Access-Control-Allow-Credentials' 'true' always; }" }}
{{ $cors_headers_get := "if ($request_method = 'GET') { add_header 'Access-Control-Allow-Origin' '$allow_origin' always; add_header 'Access-Control-Allow-Methods' 'GET, POST, OPTIONS' always; add_header 'Access-Control-Allow-Headers' 'DNT,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Range,Authorization' always; add_header 'Access-Control-Expose-Headers' 'Content-Length,Content-Range' always; add_header 'Access-Control-Allow-Credentials' 'true' always; }" }}
map $http_origin $allow_origin {
~^https?:\/\/((.*\.)?localhost(:\d+)?|discoursio-webapp(-(.*))?\.vercel\.app|(.*\.)?discours\.io)$ $http_origin;
default "";
}
proxy_cache_path /var/cache/nginx levels=1:2 keys_zone=my_cache:10m max_size=1g
inactive=60m use_temp_path=off; inactive=60m use_temp_path=off;
limit_conn_zone $binary_remote_addr zone=addr:10m; limit_conn_zone $binary_remote_addr zone=addr:10m;
limit_req_zone $binary_remote_addr zone=req_zone:10m rate=20r/s;
{{ range $port_map := .PROXY_PORT_MAP | split " " }} {{ range $port_map := .PROXY_PORT_MAP | split " " }}
{{ $port_map_list := $port_map | split ":" }} {{ $port_map_list := $port_map | split ":" }}
@@ -25,14 +21,15 @@ server {
listen [::]:{{ $listen_port }}; listen [::]:{{ $listen_port }};
listen {{ $listen_port }}; listen {{ $listen_port }};
server_name {{ $.NOSSL_SERVER_NAME }}; server_name {{ $.NOSSL_SERVER_NAME }};
access_log /var/log/nginx/{{ $.APP }}-access.log; access_log /var/log/nginx/{{ $.APP }}-access.log custom;
error_log /var/log/nginx/{{ $.APP }}-error.log; error_log /var/log/nginx/{{ $.APP }}-error.log;
client_max_body_size 100M;
{{ else if eq $scheme "https" }} {{ else if eq $scheme "https" }}
listen [::]:{{ $listen_port }} ssl http2; listen [::]:{{ $listen_port }} ssl http2;
listen {{ $listen_port }} ssl http2; listen {{ $listen_port }} ssl http2;
server_name {{ $.NOSSL_SERVER_NAME }}; server_name {{ $.NOSSL_SERVER_NAME }};
access_log /var/log/nginx/{{ $.APP }}-access.log; access_log /var/log/nginx/{{ $.APP }}-access.log custom;
error_log /var/log/nginx/{{ $.APP }}-error.log; error_log /var/log/nginx/{{ $.APP }}-error.log;
ssl_certificate {{ $.APP_SSL_PATH }}/server.crt; ssl_certificate {{ $.APP_SSL_PATH }}/server.crt;
ssl_certificate_key {{ $.APP_SSL_PATH }}/server.key; ssl_certificate_key {{ $.APP_SSL_PATH }}/server.key;
@@ -42,32 +39,63 @@ server {
keepalive_timeout 70; keepalive_timeout 70;
keepalive_requests 500; keepalive_requests 500;
proxy_read_timeout 3600; proxy_read_timeout 3600;
limit_conn addr 1000; limit_conn addr 10000;
client_max_body_size 100M;
{{ end }} {{ end }}
location / { location / {
proxy_pass http://{{ $.APP }}-{{ $upstream_port }}; proxy_pass http://{{ $.APP }}-{{ $upstream_port }};
{{ $proxy_settings }} {{ $proxy_settings }}
{{ $gzip_settings }} {{ $gzip_settings }}
{{ $cors_headers_options }}
{{ $cors_headers_post }} # Handle CORS for OPTIONS method
{{ $cors_headers_get }} if ($request_method = 'OPTIONS') {
add_header 'Access-Control-Allow-Origin' $allow_origin always;
add_header 'Access-Control-Allow-Methods' 'POST, GET, OPTIONS';
add_header 'Access-Control-Allow-Headers' 'Content-Type, Authorization' always;
add_header 'Access-Control-Allow-Credentials' 'true' always;
add_header 'Access-Control-Max-Age' 1728000;
add_header 'Content-Type' 'text/plain; charset=utf-8';
add_header 'Content-Length' 0;
return 204;
}
# Handle CORS for POST method
if ($request_method = 'POST') {
add_header 'Access-Control-Allow-Origin' $allow_origin always;
add_header 'Access-Control-Allow-Methods' 'POST, GET, OPTIONS' always;
add_header 'Access-Control-Allow-Headers' 'Content-Type, Authorization' always;
add_header 'Access-Control-Allow-Credentials' 'true' always;
}
# Handle CORS for GET method
if ($request_method = 'GET') {
add_header 'Access-Control-Allow-Origin' $allow_origin always;
add_header 'Access-Control-Allow-Methods' 'POST, GET, OPTIONS' always;
add_header 'Access-Control-Allow-Headers' 'Content-Type, Authorization' always;
add_header 'Access-Control-Allow-Credentials' 'true' always;
}
proxy_cache my_cache; proxy_cache my_cache;
proxy_cache_revalidate on; proxy_cache_revalidate on;
proxy_cache_min_uses 2; proxy_cache_min_uses 2;
proxy_cache_use_stale error timeout updating http_500 http_502 http_503 http_504; proxy_cache_use_stale error timeout updating http_500 http_502 http_503 http_504;
proxy_cache_background_update on; proxy_cache_background_update on;
proxy_cache_lock on; proxy_cache_lock on;
# Connections and request limits increase (bad for DDos)
limit_req zone=req_zone burst=10 nodelay;
} }
location ~* \.(jpg|jpeg|png|gif|ico|css|js)$ { location ~* \.(jpg|jpeg|png|gif|ico|css|js)$ {
expires 30d; # This means that the client can cache these resources for 30 days. proxy_pass http://{{ $.APP }}-{{ $upstream_port }};
add_header Cache-Control "public, no-transform"; expires 30d;
add_header Cache-Control "public, no-transform";
} }
location ~* \.(mp3)$ { location ~* \.(mp3|wav|ogg|flac|aac|aif|webm)$ {
proxy_pass http://{{ $.APP }}-{{ $upstream_port }};
if ($request_method = 'GET') { if ($request_method = 'GET') {
add_header 'Access-Control-Allow-Origin' $allow_origin always; add_header 'Access-Control-Allow-Origin' $allow_origin always;
add_header 'Access-Control-Allow-Methods' 'GET, POST, OPTIONS' always; add_header 'Access-Control-Allow-Methods' 'GET, POST, OPTIONS' always;

View File

@@ -1,46 +1,137 @@
import time import time
from sqlalchemy import JSON, Boolean, Column, ForeignKey, Integer, String from sqlalchemy import JSON, Boolean, Column, ForeignKey, Index, Integer, String
from sqlalchemy_utils import TSVectorType
from services.db import Base from services.db import Base
# from sqlalchemy_utils import TSVectorType
class AuthorRating(Base): class AuthorRating(Base):
__tablename__ = 'author_rating' """
Рейтинг автора от другого автора.
Attributes:
rater (int): ID оценивающего автора
author (int): ID оцениваемого автора
plus (bool): Положительная/отрицательная оценка
"""
__tablename__ = "author_rating"
id = None # type: ignore id = None # type: ignore
rater = Column(ForeignKey('author.id'), primary_key=True) rater = Column(ForeignKey("author.id"), primary_key=True)
author = Column(ForeignKey('author.id'), primary_key=True) author = Column(ForeignKey("author.id"), primary_key=True)
plus = Column(Boolean) plus = Column(Boolean)
# Определяем индексы
__table_args__ = (
# Индекс для быстрого поиска всех оценок конкретного автора
Index("idx_author_rating_author", "author"),
# Индекс для быстрого поиска всех оценок, оставленных конкретным автором
Index("idx_author_rating_rater", "rater"),
)
class AuthorFollower(Base): class AuthorFollower(Base):
__tablename__ = 'author_follower' """
Подписка одного автора на другого.
Attributes:
follower (int): ID подписчика
author (int): ID автора, на которого подписываются
created_at (int): Время создания подписки
auto (bool): Признак автоматической подписки
"""
__tablename__ = "author_follower"
id = None # type: ignore id = None # type: ignore
follower = Column(ForeignKey('author.id'), primary_key=True) follower = Column(ForeignKey("author.id"), primary_key=True)
author = Column(ForeignKey('author.id'), primary_key=True) author = Column(ForeignKey("author.id"), primary_key=True)
created_at = Column(Integer, nullable=False, default=lambda: int(time.time())) created_at = Column(Integer, nullable=False, default=lambda: int(time.time()))
auto = Column(Boolean, nullable=False, default=False) auto = Column(Boolean, nullable=False, default=False)
# Определяем индексы
__table_args__ = (
# Индекс для быстрого поиска всех подписчиков автора
Index("idx_author_follower_author", "author"),
# Индекс для быстрого поиска всех авторов, на которых подписан конкретный автор
Index("idx_author_follower_follower", "follower"),
)
class AuthorBookmark(Base):
"""
Закладка автора на публикацию.
Attributes:
author (int): ID автора
shout (int): ID публикации
"""
__tablename__ = "author_bookmark"
id = None # type: ignore
author = Column(ForeignKey("author.id"), primary_key=True)
shout = Column(ForeignKey("shout.id"), primary_key=True)
# Определяем индексы
__table_args__ = (
# Индекс для быстрого поиска всех закладок автора
Index("idx_author_bookmark_author", "author"),
# Индекс для быстрого поиска всех авторов, добавивших публикацию в закладки
Index("idx_author_bookmark_shout", "shout"),
)
class Author(Base): class Author(Base):
__tablename__ = 'author' """
Модель автора в системе.
Attributes:
user (str): Идентификатор пользователя в системе авторизации
name (str): Отображаемое имя
slug (str): Уникальный строковый идентификатор
bio (str): Краткая биография/статус
about (str): Полное описание
pic (str): URL изображения профиля
links (dict): Ссылки на социальные сети и сайты
created_at (int): Время создания профиля
last_seen (int): Время последнего посещения
updated_at (int): Время последнего обновления
deleted_at (int): Время удаления (если профиль удален)
"""
__tablename__ = "author"
user = Column(String) # unbounded link with authorizer's User type user = Column(String) # unbounded link with authorizer's User type
name = Column(String, nullable=True, comment='Display name') name = Column(String, nullable=True, comment="Display name")
slug = Column(String, unique=True, comment="Author's slug") slug = Column(String, unique=True, comment="Author's slug")
bio = Column(String, nullable=True, comment='Bio') # status description bio = Column(String, nullable=True, comment="Bio") # status description
about = Column(String, nullable=True, comment='About') # long and formatted about = Column(String, nullable=True, comment="About") # long and formatted
pic = Column(String, nullable=True, comment='Picture') pic = Column(String, nullable=True, comment="Picture")
links = Column(JSON, nullable=True, comment='Links') links = Column(JSON, nullable=True, comment="Links")
created_at = Column(Integer, nullable=False, default=lambda: int(time.time())) created_at = Column(Integer, nullable=False, default=lambda: int(time.time()))
last_seen = Column(Integer, nullable=False, default=lambda: int(time.time())) last_seen = Column(Integer, nullable=False, default=lambda: int(time.time()))
updated_at = Column(Integer, nullable=False, default=lambda: int(time.time())) updated_at = Column(Integer, nullable=False, default=lambda: int(time.time()))
deleted_at = Column(Integer, nullable=True, comment='Deleted at') deleted_at = Column(Integer, nullable=True, comment="Deleted at")
search_vector = Column( # search_vector = Column(
TSVectorType('name', 'slug', 'bio', 'about', regconfig='pg_catalog.russian') # TSVectorType("name", "slug", "bio", "about", regconfig="pg_catalog.russian")
# )
# Определяем индексы
__table_args__ = (
# Индекс для быстрого поиска по slug
Index("idx_author_slug", "slug"),
# Индекс для быстрого поиска по идентификатору пользователя
Index("idx_author_user", "user"),
# Индекс для фильтрации неудаленных авторов
Index("idx_author_deleted_at", "deleted_at", postgresql_where=deleted_at.is_(None)),
# Индекс для сортировки по времени создания (для новых авторов)
Index("idx_author_created_at", "created_at"),
# Индекс для сортировки по времени последнего посещения
Index("idx_author_last_seen", "last_seen"),
) )

View File

@@ -6,20 +6,20 @@ from services.db import Base
class ShoutCollection(Base): class ShoutCollection(Base):
__tablename__ = 'shout_collection' __tablename__ = "shout_collection"
id = None # type: ignore id = None # type: ignore
shout = Column(ForeignKey('shout.id'), primary_key=True) shout = Column(ForeignKey("shout.id"), primary_key=True)
collection = Column(ForeignKey('collection.id'), primary_key=True) collection = Column(ForeignKey("collection.id"), primary_key=True)
class Collection(Base): class Collection(Base):
__tablename__ = 'collection' __tablename__ = "collection"
slug = Column(String, unique=True) slug = Column(String, unique=True)
title = Column(String, nullable=False, comment='Title') title = Column(String, nullable=False, comment="Title")
body = Column(String, nullable=True, comment='Body') body = Column(String, nullable=True, comment="Body")
pic = Column(String, nullable=True, comment='Picture') pic = Column(String, nullable=True, comment="Picture")
created_at = Column(Integer, default=lambda: int(time.time())) created_at = Column(Integer, default=lambda: int(time.time()))
created_by = Column(ForeignKey('author.id'), comment='Created By') created_by = Column(ForeignKey("author.id"), comment="Created By")
published_at = Column(Integer, default=lambda: int(time.time())) published_at = Column(Integer, default=lambda: int(time.time()))

View File

@@ -1,29 +1,106 @@
import enum
import time import time
from sqlalchemy import Column, ForeignKey, Integer, String from sqlalchemy import Column, ForeignKey, Integer, String, Text, distinct, func
from sqlalchemy.orm import relationship from sqlalchemy.ext.hybrid import hybrid_property
from orm.author import Author from orm.author import Author
from services.db import Base from services.db import Base
class CommunityAuthor(Base): class CommunityRole(enum.Enum):
__tablename__ = 'community_author' READER = "reader" # can read and comment
AUTHOR = "author" # + can vote and invite collaborators
ARTIST = "artist" # + can be credited as featured artist
EXPERT = "expert" # + can add proof or disproof to shouts, can manage topics
EDITOR = "editor" # + can manage topics, comments and community settings
id = None # type: ignore @classmethod
author = Column(ForeignKey('author.id'), primary_key=True) def as_string_array(cls, roles):
community = Column(ForeignKey('community.id'), primary_key=True) return [role.value for role in roles]
class CommunityFollower(Base):
__tablename__ = "community_author"
author = Column(ForeignKey("author.id"), primary_key=True)
community = Column(ForeignKey("community.id"), primary_key=True)
joined_at = Column(Integer, nullable=False, default=lambda: int(time.time())) joined_at = Column(Integer, nullable=False, default=lambda: int(time.time()))
role = Column(String, nullable=False) roles = Column(Text, nullable=True, comment="Roles (comma-separated)")
def set_roles(self, roles):
self.roles = CommunityRole.as_string_array(roles)
def get_roles(self):
return [CommunityRole(role) for role in self.roles]
class Community(Base): class Community(Base):
__tablename__ = 'community' __tablename__ = "community"
name = Column(String, nullable=False) name = Column(String, nullable=False)
slug = Column(String, nullable=False, unique=True) slug = Column(String, nullable=False, unique=True)
desc = Column(String, nullable=False, default='') desc = Column(String, nullable=False, default="")
pic = Column(String, nullable=False, default='') pic = Column(String, nullable=False, default="")
created_at = Column(Integer, nullable=False, default=lambda: int(time.time())) created_at = Column(Integer, nullable=False, default=lambda: int(time.time()))
created_by = Column(ForeignKey("author.id"), nullable=False)
authors = relationship(Author, secondary='community_author') @hybrid_property
def stat(self):
return CommunityStats(self)
@property
def role_list(self):
return self.roles.split(",") if self.roles else []
@role_list.setter
def role_list(self, value):
self.roles = ",".join(value) if value else None
class CommunityStats:
def __init__(self, community):
self.community = community
@property
def shouts(self):
from orm.shout import Shout
return self.community.session.query(func.count(Shout.id)).filter(Shout.community == self.community.id).scalar()
@property
def followers(self):
return (
self.community.session.query(func.count(CommunityFollower.author))
.filter(CommunityFollower.community == self.community.id)
.scalar()
)
@property
def authors(self):
from orm.shout import Shout
# author has a shout with community id and its featured_at is not null
return (
self.community.session.query(func.count(distinct(Author.id)))
.join(Shout)
.filter(Shout.community == self.community.id, Shout.featured_at.is_not(None), Author.id.in_(Shout.authors))
.scalar()
)
class CommunityAuthor(Base):
__tablename__ = "community_author"
id = Column(Integer, primary_key=True)
community_id = Column(Integer, ForeignKey("community.id"))
author_id = Column(Integer, ForeignKey("author.id"))
roles = Column(Text, nullable=True, comment="Roles (comma-separated)")
@property
def role_list(self):
return self.roles.split(",") if self.roles else []
@role_list.setter
def role_list(self, value):
self.roles = ",".join(value) if value else None

55
orm/draft.py Normal file
View File

@@ -0,0 +1,55 @@
import time
from sqlalchemy import JSON, Boolean, Column, ForeignKey, Integer, String
from sqlalchemy.orm import relationship
from orm.author import Author
from orm.topic import Topic
from services.db import Base
class DraftTopic(Base):
__tablename__ = "draft_topic"
id = None # type: ignore
shout = Column(ForeignKey("draft.id"), primary_key=True, index=True)
topic = Column(ForeignKey("topic.id"), primary_key=True, index=True)
main = Column(Boolean, nullable=True)
class DraftAuthor(Base):
__tablename__ = "draft_author"
id = None # type: ignore
shout = Column(ForeignKey("draft.id"), primary_key=True, index=True)
author = Column(ForeignKey("author.id"), primary_key=True, index=True)
caption = Column(String, nullable=True, default="")
class Draft(Base):
__tablename__ = "draft"
# required
created_at: int = Column(Integer, nullable=False, default=lambda: int(time.time()))
created_by: int = Column(ForeignKey("author.id"), nullable=False)
# optional
layout: str = Column(String, nullable=True, default="article")
slug: str = Column(String, unique=True)
title: str = Column(String, nullable=True)
subtitle: str | None = Column(String, nullable=True)
lead: str | None = Column(String, nullable=True)
description: str | None = Column(String, nullable=True)
body: str = Column(String, nullable=False, comment="Body")
media: dict | None = Column(JSON, nullable=True)
cover: str | None = Column(String, nullable=True, comment="Cover image url")
cover_caption: str | None = Column(String, nullable=True, comment="Cover image alt caption")
lang: str = Column(String, nullable=False, default="ru", comment="Language")
seo: str | None = Column(String, nullable=True) # JSON
# auto
updated_at: int | None = Column(Integer, nullable=True, index=True)
deleted_at: int | None = Column(Integer, nullable=True, index=True)
updated_by: int | None = Column(ForeignKey("author.id"), nullable=True)
deleted_by: int | None = Column(ForeignKey("author.id"), nullable=True)
authors = relationship(Author, secondary="draft_author")
topics = relationship(Topic, secondary="draft_topic")

View File

@@ -1,4 +1,4 @@
from enum import Enum as Enumeration import enum
from sqlalchemy import Column, ForeignKey, String from sqlalchemy import Column, ForeignKey, String
from sqlalchemy.orm import relationship from sqlalchemy.orm import relationship
@@ -6,20 +6,30 @@ from sqlalchemy.orm import relationship
from services.db import Base from services.db import Base
class InviteStatus(Enumeration): class InviteStatus(enum.Enum):
PENDING = 'PENDING' PENDING = "PENDING"
ACCEPTED = 'ACCEPTED' ACCEPTED = "ACCEPTED"
REJECTED = 'REJECTED' REJECTED = "REJECTED"
@classmethod
def from_string(cls, value):
return cls(value)
class Invite(Base): class Invite(Base):
__tablename__ = 'invite' __tablename__ = "invite"
inviter_id = Column(ForeignKey('author.id'), primary_key=True) inviter_id = Column(ForeignKey("author.id"), primary_key=True)
author_id = Column(ForeignKey('author.id'), primary_key=True) author_id = Column(ForeignKey("author.id"), primary_key=True)
shout_id = Column(ForeignKey('shout.id'), primary_key=True) shout_id = Column(ForeignKey("shout.id"), primary_key=True)
status = Column(String, default=InviteStatus.PENDING.value) status = Column(String, default=InviteStatus.PENDING.value)
inviter = relationship('author', foreign_keys=[inviter_id]) inviter = relationship("Author", foreign_keys=[inviter_id])
author = relationship('author', foreign_keys=[author_id]) author = relationship("Author", foreign_keys=[author_id])
shout = relationship('shout') shout = relationship("Shout")
def set_status(self, status: InviteStatus):
self.status = status.value
def get_status(self) -> InviteStatus:
return InviteStatus.from_string(self.status)

View File

@@ -1,51 +1,63 @@
import enum
import time import time
from enum import Enum as Enumeration
from sqlalchemy import JSON, Column, ForeignKey, Integer, String from sqlalchemy import JSON, Column, ForeignKey, Integer, String
from sqlalchemy.orm import relationship from sqlalchemy.orm import relationship
from sqlalchemy.exc import ProgrammingError
from orm.author import Author from orm.author import Author
from services.db import Base, engine from services.db import Base
from services.logger import root_logger as logger
class NotificationEntity(Enumeration): class NotificationEntity(enum.Enum):
REACTION = 'reaction' REACTION = "reaction"
SHOUT = 'shout' SHOUT = "shout"
FOLLOWER = 'follower' FOLLOWER = "follower"
COMMUNITY = "community"
@classmethod
def from_string(cls, value):
return cls(value)
class NotificationAction(Enumeration): class NotificationAction(enum.Enum):
CREATE = 'create' CREATE = "create"
UPDATE = 'update' UPDATE = "update"
DELETE = 'delete' DELETE = "delete"
SEEN = 'seen' SEEN = "seen"
FOLLOW = 'follow' FOLLOW = "follow"
UNFOLLOW = 'unfollow' UNFOLLOW = "unfollow"
@classmethod
def from_string(cls, value):
return cls(value)
class NotificationSeen(Base): class NotificationSeen(Base):
__tablename__ = 'notification_seen' __tablename__ = "notification_seen"
viewer = Column(ForeignKey('author.id')) viewer = Column(ForeignKey("author.id"), primary_key=True)
notification = Column(ForeignKey('notification.id')) notification = Column(ForeignKey("notification.id"), primary_key=True)
class Notification(Base): class Notification(Base):
__tablename__ = 'notification' __tablename__ = "notification"
id = Column(Integer, primary_key=True, autoincrement=True)
created_at = Column(Integer, server_default=str(int(time.time()))) created_at = Column(Integer, server_default=str(int(time.time())))
entity = Column(String, nullable=False) entity = Column(String, nullable=False)
action = Column(String, nullable=False) action = Column(String, nullable=False)
payload = Column(JSON, nullable=True) payload = Column(JSON, nullable=True)
seen = relationship(lambda: Author, secondary='notification_seen') seen = relationship(Author, secondary="notification_seen")
def set_entity(self, entity: NotificationEntity):
self.entity = entity.value
try: def get_entity(self) -> NotificationEntity:
Notification.__table__.create(engine) return NotificationEntity.from_string(self.entity)
logger.info("Table `notification` was created.")
except ProgrammingError: def set_action(self, action: NotificationAction):
# Handle the exception here, for example by printing a message self.action = action.value
logger.info("Table `notification` already exists.")
def get_action(self) -> NotificationAction:
return NotificationAction.from_string(self.action)

View File

@@ -1,6 +1,5 @@
from orm.reaction import ReactionKind from orm.reaction import ReactionKind
PROPOSAL_REACTIONS = [ PROPOSAL_REACTIONS = [
ReactionKind.ACCEPT.value, ReactionKind.ACCEPT.value,
ReactionKind.REJECT.value, ReactionKind.REJECT.value,
@@ -10,15 +9,9 @@ PROPOSAL_REACTIONS = [
ReactionKind.PROPOSE.value, ReactionKind.PROPOSE.value,
] ]
PROOF_REACTIONS = [ PROOF_REACTIONS = [ReactionKind.PROOF.value, ReactionKind.DISPROOF.value]
ReactionKind.PROOF.value,
ReactionKind.DISPROOF.value
]
RATING_REACTIONS = [ RATING_REACTIONS = [ReactionKind.LIKE.value, ReactionKind.DISLIKE.value]
ReactionKind.LIKE.value,
ReactionKind.DISLIKE.value
]
def is_negative(x): def is_negative(x):

View File

@@ -10,36 +10,36 @@ class ReactionKind(Enumeration):
# TYPE = <reaction index> # rating diff # TYPE = <reaction index> # rating diff
# editor mode # editor mode
AGREE = 'AGREE' # +1 AGREE = "AGREE" # +1
DISAGREE = 'DISAGREE' # -1 DISAGREE = "DISAGREE" # -1
ASK = 'ASK' # +0 ASK = "ASK" # +0
PROPOSE = 'PROPOSE' # +0 PROPOSE = "PROPOSE" # +0
ACCEPT = 'ACCEPT' # +1 ACCEPT = "ACCEPT" # +1
REJECT = 'REJECT' # -1 REJECT = "REJECT" # -1
# expert mode # expert mode
PROOF = 'PROOF' # +1 PROOF = "PROOF" # +1
DISPROOF = 'DISPROOF' # -1 DISPROOF = "DISPROOF" # -1
# public feed # public feed
QUOTE = 'QUOTE' # +0 TODO: use to bookmark in collection QUOTE = "QUOTE" # +0 TODO: use to bookmark in collection
COMMENT = 'COMMENT' # +0 COMMENT = "COMMENT" # +0
LIKE = 'LIKE' # +1 LIKE = "LIKE" # +1
DISLIKE = 'DISLIKE' # -1 DISLIKE = "DISLIKE" # -1
class Reaction(Base): class Reaction(Base):
__tablename__ = 'reaction' __tablename__ = "reaction"
body = Column(String, default='', comment='Reaction Body') body = Column(String, default="", comment="Reaction Body")
created_at = Column(Integer, nullable=False, default=lambda: int(time.time())) created_at = Column(Integer, nullable=False, default=lambda: int(time.time()), index=True)
updated_at = Column(Integer, nullable=True, comment='Updated at') updated_at = Column(Integer, nullable=True, comment="Updated at", index=True)
deleted_at = Column(Integer, nullable=True, comment='Deleted at') deleted_at = Column(Integer, nullable=True, comment="Deleted at", index=True)
deleted_by = Column(ForeignKey('author.id'), nullable=True) deleted_by = Column(ForeignKey("author.id"), nullable=True)
reply_to = Column(ForeignKey('reaction.id'), nullable=True) reply_to = Column(ForeignKey("reaction.id"), nullable=True)
quote = Column(String, nullable=True, comment='Original quoted text') quote = Column(String, nullable=True, comment="Original quoted text")
shout = Column(ForeignKey('shout.id'), nullable=False) shout = Column(ForeignKey("shout.id"), nullable=False, index=True)
created_by = Column(ForeignKey('author.id'), nullable=False) created_by = Column(ForeignKey("author.id"), nullable=False)
kind = Column(String, nullable=False) kind = Column(String, nullable=False, index=True)
oid = Column(String) oid = Column(String)

View File

@@ -1,83 +1,127 @@
import time import time
from sqlalchemy import JSON, Boolean, Column, ForeignKey, Integer, String from sqlalchemy import JSON, Boolean, Column, ForeignKey, Index, Integer, String
from sqlalchemy.orm import relationship from sqlalchemy.orm import relationship
from orm.author import Author from orm.author import Author
from orm.community import Community
from orm.reaction import Reaction from orm.reaction import Reaction
from orm.topic import Topic from orm.topic import Topic
from services.db import Base from services.db import Base
class ShoutTopic(Base): class ShoutTopic(Base):
__tablename__ = 'shout_topic' """
Связь между публикацией и темой.
Attributes:
shout (int): ID публикации
topic (int): ID темы
main (bool): Признак основной темы
"""
__tablename__ = "shout_topic"
id = None # type: ignore id = None # type: ignore
shout = Column(ForeignKey('shout.id'), primary_key=True) shout = Column(ForeignKey("shout.id"), primary_key=True, index=True)
topic = Column(ForeignKey('topic.id'), primary_key=True) topic = Column(ForeignKey("topic.id"), primary_key=True, index=True)
main = Column(Boolean, nullable=True) main = Column(Boolean, nullable=True)
# Определяем дополнительные индексы
__table_args__ = (
# Оптимизированный составной индекс для запросов, которые ищут публикации по теме
Index("idx_shout_topic_topic_shout", "topic", "shout"),
)
class ShoutReactionsFollower(Base): class ShoutReactionsFollower(Base):
__tablename__ = 'shout_reactions_followers' __tablename__ = "shout_reactions_followers"
id = None # type: ignore id = None # type: ignore
follower = Column(ForeignKey('author.id'), primary_key=True) follower = Column(ForeignKey("author.id"), primary_key=True, index=True)
shout = Column(ForeignKey('shout.id'), primary_key=True) shout = Column(ForeignKey("shout.id"), primary_key=True, index=True)
auto = Column(Boolean, nullable=False, default=False) auto = Column(Boolean, nullable=False, default=False)
created_at = Column(Integer, nullable=False, default=lambda: int(time.time())) created_at = Column(Integer, nullable=False, default=lambda: int(time.time()))
deleted_at = Column(Integer, nullable=True) deleted_at = Column(Integer, nullable=True)
class ShoutAuthor(Base): class ShoutAuthor(Base):
__tablename__ = 'shout_author' """
Связь между публикацией и автором.
Attributes:
shout (int): ID публикации
author (int): ID автора
caption (str): Подпись автора
"""
__tablename__ = "shout_author"
id = None # type: ignore id = None # type: ignore
shout = Column(ForeignKey('shout.id'), primary_key=True) shout = Column(ForeignKey("shout.id"), primary_key=True, index=True)
author = Column(ForeignKey('author.id'), primary_key=True) author = Column(ForeignKey("author.id"), primary_key=True, index=True)
caption = Column(String, nullable=True, default='') caption = Column(String, nullable=True, default="")
# Определяем дополнительные индексы
class ShoutCommunity(Base): __table_args__ = (
__tablename__ = 'shout_community' # Оптимизированный индекс для запросов, которые ищут публикации по автору
Index("idx_shout_author_author_shout", "author", "shout"),
id = None # type: ignore )
shout = Column(ForeignKey('shout.id'), primary_key=True)
community = Column(ForeignKey('community.id'), primary_key=True)
class Shout(Base): class Shout(Base):
__tablename__ = 'shout' """
Публикация в системе.
"""
created_at = Column(Integer, nullable=False, default=lambda: int(time.time())) __tablename__ = "shout"
updated_at = Column(Integer, nullable=True)
published_at = Column(Integer, nullable=True)
featured_at = Column(Integer, nullable=True)
deleted_at = Column(Integer, nullable=True)
created_by = Column(ForeignKey('author.id'), nullable=False) created_at: int = Column(Integer, nullable=False, default=lambda: int(time.time()))
updated_by = Column(ForeignKey('author.id'), nullable=True) updated_at: int | None = Column(Integer, nullable=True, index=True)
deleted_by = Column(ForeignKey('author.id'), nullable=True) published_at: int | None = Column(Integer, nullable=True, index=True)
featured_at: int | None = Column(Integer, nullable=True, index=True)
deleted_at: int | None = Column(Integer, nullable=True, index=True)
body = Column(String, nullable=False, comment='Body') created_by: int = Column(ForeignKey("author.id"), nullable=False)
slug = Column(String, unique=True) updated_by: int | None = Column(ForeignKey("author.id"), nullable=True)
cover = Column(String, nullable=True, comment='Cover image url') deleted_by: int | None = Column(ForeignKey("author.id"), nullable=True)
cover_caption = Column(String, nullable=True, comment='Cover image alt caption') community: int = Column(ForeignKey("community.id"), nullable=False)
lead = Column(String, nullable=True)
description = Column(String, nullable=True)
title = Column(String, nullable=False)
subtitle = Column(String, nullable=True)
layout = Column(String, nullable=False, default='article')
media = Column(JSON, nullable=True)
authors = relationship(Author, secondary='shout_author') body: str = Column(String, nullable=False, comment="Body")
topics = relationship(Topic, secondary='shout_topic') slug: str = Column(String, unique=True)
communities = relationship(Community, secondary='shout_community') cover: str | None = Column(String, nullable=True, comment="Cover image url")
cover_caption: str | None = Column(String, nullable=True, comment="Cover image alt caption")
lead: str | None = Column(String, nullable=True)
description: str | None = Column(String, nullable=True)
title: str = Column(String, nullable=False)
subtitle: str | None = Column(String, nullable=True)
layout: str = Column(String, nullable=False, default="article")
media: dict | None = Column(JSON, nullable=True)
authors = relationship(Author, secondary="shout_author")
topics = relationship(Topic, secondary="shout_topic")
reactions = relationship(Reaction) reactions = relationship(Reaction)
lang = Column(String, nullable=False, default='ru', comment='Language') lang: str = Column(String, nullable=False, default="ru", comment="Language")
version_of = Column(ForeignKey('shout.id'), nullable=True) version_of: int | None = Column(ForeignKey("shout.id"), nullable=True)
oid = Column(String, nullable=True) oid: str | None = Column(String, nullable=True)
seo = Column(String, nullable=True) # JSON seo: str | None = Column(String, nullable=True) # JSON
draft: int | None = Column(ForeignKey("draft.id"), nullable=True)
# Определяем индексы
__table_args__ = (
# Индекс для быстрого поиска неудаленных публикаций
Index("idx_shout_deleted_at", "deleted_at", postgresql_where=deleted_at.is_(None)),
# Индекс для быстрой фильтрации по community
Index("idx_shout_community", "community"),
# Индекс для быстрого поиска по slug
Index("idx_shout_slug", "slug"),
# Составной индекс для фильтрации опубликованных неудаленных публикаций
Index(
"idx_shout_published_deleted",
"published_at",
"deleted_at",
postgresql_where=published_at.is_not(None) & deleted_at.is_(None),
),
)

View File

@@ -1,26 +1,66 @@
import time import time
from sqlalchemy import Boolean, Column, ForeignKey, Integer, String from sqlalchemy import JSON, Boolean, Column, ForeignKey, Index, Integer, String
from services.db import Base from services.db import Base
class TopicFollower(Base): class TopicFollower(Base):
__tablename__ = 'topic_followers' """
Связь между топиком и его подписчиком.
Attributes:
follower (int): ID подписчика
topic (int): ID топика
created_at (int): Время создания связи
auto (bool): Автоматическая подписка
"""
__tablename__ = "topic_followers"
id = None # type: ignore id = None # type: ignore
follower = Column(Integer, ForeignKey('author.id'), primary_key=True) follower = Column(Integer, ForeignKey("author.id"), primary_key=True)
topic = Column(Integer, ForeignKey('topic.id'), primary_key=True) topic = Column(Integer, ForeignKey("topic.id"), primary_key=True)
created_at = Column(Integer, nullable=False, default=int(time.time())) created_at = Column(Integer, nullable=False, default=int(time.time()))
auto = Column(Boolean, nullable=False, default=False) auto = Column(Boolean, nullable=False, default=False)
# Определяем индексы
__table_args__ = (
# Индекс для быстрого поиска всех подписчиков топика
Index("idx_topic_followers_topic", "topic"),
# Индекс для быстрого поиска всех топиков, на которые подписан автор
Index("idx_topic_followers_follower", "follower"),
)
class Topic(Base): class Topic(Base):
__tablename__ = 'topic' """
Модель топика (темы) публикаций.
Attributes:
slug (str): Уникальный строковый идентификатор темы
title (str): Название темы
body (str): Описание темы
pic (str): URL изображения темы
community (int): ID сообщества
oid (str): Старый ID
parent_ids (list): IDs родительских тем
"""
__tablename__ = "topic"
slug = Column(String, unique=True) slug = Column(String, unique=True)
title = Column(String, nullable=False, comment='Title') title = Column(String, nullable=False, comment="Title")
body = Column(String, nullable=True, comment='Body') body = Column(String, nullable=True, comment="Body")
pic = Column(String, nullable=True, comment='Picture') pic = Column(String, nullable=True, comment="Picture")
community = Column(ForeignKey('community.id'), default=1) community = Column(ForeignKey("community.id"), default=1)
oid = Column(String, nullable=True, comment='Old ID') oid = Column(String, nullable=True, comment="Old ID")
parent_ids = Column(JSON, nullable=True, comment="Parent Topic IDs")
# Определяем индексы
__table_args__ = (
# Индекс для быстрого поиска по slug
Index("idx_topic_slug", "slug"),
# Индекс для быстрого поиска по сообществу
Index("idx_topic_community", "community"),
)

View File

@@ -1,30 +0,0 @@
import time
from sqlalchemy import Boolean, Column, Integer, String
from services.db import Base
class User(Base):
__tablename__ = 'authorizer_users'
id = Column(String, primary_key=True, unique=True, nullable=False, default=None)
key = Column(String)
email = Column(String, unique=True)
email_verified_at = Column(Integer)
family_name = Column(String)
gender = Column(String)
given_name = Column(String)
is_multi_factor_auth_enabled = Column(Boolean)
middle_name = Column(String)
nickname = Column(String)
password = Column(String)
phone_number = Column(String, unique=True)
phone_number_verified_at = Column(Integer)
# preferred_username = Column(String, nullable=False)
picture = Column(String)
revoked_timestamp = Column(Integer)
roles = Column(String, default='author, reader')
signup_methods = Column(String, default='magic_link_login')
created_at = Column(Integer, default=lambda: int(time.time()))
updated_at = Column(Integer, default=lambda: int(time.time()))

View File

@@ -1,33 +0,0 @@
[tool.poetry]
name = "core"
version = "0.3.3"
description = "core module for discours.io"
authors = ["discoursio devteam"]
license = "MIT"
readme = "README.md"
[tool.poetry.dependencies]
python = "^3.12"
SQLAlchemy = "^2.0.29"
psycopg2-binary = "^2.9.9"
redis = {extras = ["hiredis"], version = "^5.0.1"}
sentry-sdk = {version = "^1.44.1", extras = ["starlette", "ariadne", "sqlalchemy"]}
starlette = "^0.37.2"
gql = "^3.5.0"
ariadne = "^0.23.0"
pre-commit = "^3.7.0"
granian = "^1.2.1"
google-analytics-data = "^0.18.7"
opensearch-py = "^2.5.0"
httpx = "^0.27.0"
dogpile-cache = "^1.3.1"
colorlog = "^6.8.2"
sqlalchemy-searchable = "^2.1.0"
[tool.poetry.group.dev.dependencies]
ruff = "^0.3.5"
isort = "^5.13.2"
[build-system]
requires = ["poetry-core>=1.0.0"]
build-backend = "poetry.core.masonry.api"

6
requirements.dev.txt Normal file
View File

@@ -0,0 +1,6 @@
fakeredis
pytest
pytest-asyncio
pytest-cov
mypy
ruff

17
requirements.txt Normal file
View File

@@ -0,0 +1,17 @@
# own auth
bcrypt
authlib
passlib
opensearch-py
google-analytics-data
colorlog
psycopg2-binary
httpx
redis[hiredis]
sentry-sdk[starlette,sqlalchemy]
starlette
gql
ariadne
granian
orjson
pydantic

View File

@@ -1,75 +1,128 @@
from resolvers.author import (get_author, get_author_followers, from cache.triggers import events_register
get_author_follows, get_author_follows_authors, from resolvers.author import ( # search_authors,
get_author_follows_topics, get_author_id, get_author,
get_authors_all, load_authors_by, search_authors, get_author_followers,
update_author) get_author_follows,
get_author_follows_authors,
get_author_follows_topics,
get_author_id,
get_authors_all,
load_authors_by,
update_author,
)
from resolvers.community import get_communities_all, get_community from resolvers.community import get_communities_all, get_community
from resolvers.editor import create_shout, delete_shout, update_shout from resolvers.draft import (
from resolvers.follower import (follow, get_shout_followers, create_draft,
get_topic_followers, unfollow) delete_draft,
from resolvers.notifier import (load_notifications, notification_mark_seen, load_drafts,
notifications_seen_after, publish_draft,
notifications_seen_thread) unpublish_draft,
from resolvers.rating import rate_author update_draft,
from resolvers.reaction import (create_reaction, delete_reaction, )
load_reactions_by, load_shouts_followed, from resolvers.feed import (
update_reaction) load_shouts_coauthored,
from resolvers.reader import (get_shout, load_shouts_by, load_shouts_feed, load_shouts_discussed,
load_shouts_random_top, load_shouts_random_topic, load_shouts_feed,
load_shouts_search, load_shouts_unrated) load_shouts_followed_by,
from resolvers.topic import (get_topic, get_topics_all, get_topics_by_author, )
get_topics_by_community) from resolvers.follower import follow, get_shout_followers, unfollow
from services.triggers import events_register from resolvers.notifier import (
load_notifications,
notification_mark_seen,
notifications_seen_after,
notifications_seen_thread,
)
from resolvers.rating import get_my_rates_comments, get_my_rates_shouts, rate_author
from resolvers.reaction import (
create_reaction,
delete_reaction,
load_comment_ratings,
load_reactions_by,
load_shout_comments,
load_shout_ratings,
update_reaction,
)
from resolvers.reader import (
get_shout,
load_shouts_by,
load_shouts_random_top,
load_shouts_search,
load_shouts_unrated,
)
from resolvers.topic import (
get_topic,
get_topic_authors,
get_topic_followers,
get_topics_all,
get_topics_by_author,
get_topics_by_community,
)
events_register() events_register()
__all__ = [ __all__ = [
# author # author
'get_author', "get_author",
'get_author_id', "get_author_id",
'get_author_follows', "get_author_followers",
'get_author_follows_topics', "get_author_follows",
'get_author_follows_authors', "get_author_follows_topics",
'get_authors_all', "get_author_follows_authors",
'load_authors_by', "get_authors_all",
'rate_author', "load_authors_by",
'update_author', "update_author",
'search_authors', ## "search_authors",
# community # community
'get_community', "get_community",
'get_communities_all', "get_communities_all",
# topic # topic
'get_topic', "get_topic",
'get_topics_all', "get_topics_all",
'get_topics_by_community', "get_topics_by_community",
'get_topics_by_author', "get_topics_by_author",
"get_topic_followers",
"get_topic_authors",
# reader # reader
'get_shout', "get_shout",
'load_shouts_by', "load_shouts_by",
'load_shouts_feed', "load_shouts_random_top",
'load_shouts_search', "load_shouts_search",
'load_shouts_followed', "load_shouts_unrated",
'load_shouts_unrated', # feed
'load_shouts_random_top', "load_shouts_feed",
'load_shouts_random_topic', "load_shouts_coauthored",
"load_shouts_discussed",
"load_shouts_with_topic",
"load_shouts_followed_by",
"load_shouts_authored_by",
# follower # follower
'follow', "follow",
'unfollow', "unfollow",
'get_topic_followers', "get_shout_followers",
'get_shout_followers',
'get_author_followers',
# editor
'create_shout',
'update_shout',
'delete_shout',
# reaction # reaction
'create_reaction', "create_reaction",
'update_reaction', "update_reaction",
'delete_reaction', "delete_reaction",
'load_reactions_by', "load_reactions_by",
"load_shout_comments",
"load_shout_ratings",
"load_comment_ratings",
# notifier # notifier
'load_notifications', "load_notifications",
'notifications_seen_thread', "notifications_seen_thread",
'notifications_seen_after', "notifications_seen_after",
'notification_mark_seen', "notification_mark_seen",
# rating
"rate_author",
"get_my_rates_comments",
"get_my_rates_shouts",
# draft
"load_drafts",
"create_draft",
"update_draft",
"delete_draft",
"publish_draft",
"publish_shout",
"unpublish_shout",
"unpublish_draft",
] ]

View File

@@ -1,30 +1,203 @@
import json import asyncio
import time import time
from typing import Optional
from sqlalchemy import and_, desc, or_, select, text from sqlalchemy import select, text
from sqlalchemy.orm import aliased
from sqlalchemy_searchable import search
from orm.author import Author, AuthorFollower from cache.cache import (
from orm.shout import ShoutAuthor, ShoutTopic cache_author,
from orm.topic import Topic cached_query,
from resolvers.stat import author_follows_authors, author_follows_topics, get_with_stat get_cached_author,
get_cached_author_by_user_id,
get_cached_author_followers,
get_cached_follower_authors,
get_cached_follower_topics,
invalidate_cache_by_prefix,
)
from orm.author import Author
from resolvers.stat import get_with_stat
from services.auth import login_required from services.auth import login_required
from services.cache import cache_author, cache_follower
from services.db import local_session from services.db import local_session
from services.encoders import CustomJSONEncoder from services.redis import redis
from services.logger import root_logger as logger
from services.memorycache import cache_region
from services.rediscache import redis
from services.schema import mutation, query from services.schema import mutation, query
from utils.logger import root_logger as logger
DEFAULT_COMMUNITIES = [1]
@mutation.field('update_author') # Вспомогательная функция для получения всех авторов без статистики
async def get_all_authors():
"""
Получает всех авторов без статистики.
Используется для случаев, когда нужен полный список авторов без дополнительной информации.
Returns:
list: Список всех авторов без статистики
"""
cache_key = "authors:all:basic"
# Функция для получения всех авторов из БД
async def fetch_all_authors():
logger.debug("Получаем список всех авторов из БД и кешируем результат")
with local_session() as session:
# Запрос на получение базовой информации об авторах
authors_query = select(Author).where(Author.deleted_at.is_(None))
authors = session.execute(authors_query).scalars().all()
# Преобразуем авторов в словари
return [author.dict() for author in authors]
# Используем универсальную функцию для кеширования запросов
return await cached_query(cache_key, fetch_all_authors)
# Вспомогательная функция для получения авторов со статистикой с пагинацией
async def get_authors_with_stats(limit=50, offset=0, by: Optional[str] = None):
"""
Получает авторов со статистикой с пагинацией.
Args:
limit: Максимальное количество возвращаемых авторов
offset: Смещение для пагинации
by: Опциональный параметр сортировки (new/active)
Returns:
list: Список авторов с их статистикой
"""
# Формируем ключ кеша с помощью универсальной функции
cache_key = f"authors:stats:limit={limit}:offset={offset}"
# Функция для получения авторов из БД
async def fetch_authors_with_stats():
logger.debug(f"Выполняем запрос на получение авторов со статистикой: limit={limit}, offset={offset}, by={by}")
with local_session() as session:
# Базовый запрос для получения авторов
base_query = select(Author).where(Author.deleted_at.is_(None))
# Применяем сортировку
if by:
if isinstance(by, dict):
# Обработка словаря параметров сортировки
from sqlalchemy import asc, desc
for field, direction in by.items():
column = getattr(Author, field, None)
if column:
if direction.lower() == "desc":
base_query = base_query.order_by(desc(column))
else:
base_query = base_query.order_by(column)
elif by == "new":
base_query = base_query.order_by(desc(Author.created_at))
elif by == "active":
base_query = base_query.order_by(desc(Author.last_seen))
else:
# По умолчанию сортируем по времени создания
base_query = base_query.order_by(desc(Author.created_at))
else:
base_query = base_query.order_by(desc(Author.created_at))
# Применяем лимит и смещение
base_query = base_query.limit(limit).offset(offset)
# Получаем авторов
authors = session.execute(base_query).scalars().all()
author_ids = [author.id for author in authors]
if not author_ids:
return []
# Оптимизированный запрос для получения статистики по публикациям для авторов
shouts_stats_query = f"""
SELECT sa.author, COUNT(DISTINCT s.id) as shouts_count
FROM shout_author sa
JOIN shout s ON sa.shout = s.id AND s.deleted_at IS NULL AND s.published_at IS NOT NULL
WHERE sa.author IN ({",".join(map(str, author_ids))})
GROUP BY sa.author
"""
shouts_stats = {row[0]: row[1] for row in session.execute(text(shouts_stats_query))}
# Запрос на получение статистики по подписчикам для авторов
followers_stats_query = f"""
SELECT author, COUNT(DISTINCT follower) as followers_count
FROM author_follower
WHERE author IN ({",".join(map(str, author_ids))})
GROUP BY author
"""
followers_stats = {row[0]: row[1] for row in session.execute(text(followers_stats_query))}
# Формируем результат с добавлением статистики
result = []
for author in authors:
author_dict = author.dict()
author_dict["stat"] = {
"shouts": shouts_stats.get(author.id, 0),
"followers": followers_stats.get(author.id, 0),
}
result.append(author_dict)
# Кешируем каждого автора отдельно для использования в других функциях
await cache_author(author_dict)
return result
# Используем универсальную функцию для кеширования запросов
return await cached_query(cache_key, fetch_authors_with_stats)
# Функция для инвалидации кеша авторов
async def invalidate_authors_cache(author_id=None):
"""
Инвалидирует кеши авторов при изменении данных.
Args:
author_id: Опциональный ID автора для точечной инвалидации.
Если не указан, инвалидируются все кеши авторов.
"""
if author_id:
# Точечная инвалидация конкретного автора
logger.debug(f"Инвалидация кеша для автора #{author_id}")
specific_keys = [
f"author:id:{author_id}",
f"author:followers:{author_id}",
f"author:follows-authors:{author_id}",
f"author:follows-topics:{author_id}",
f"author:follows-shouts:{author_id}",
]
# Получаем user_id автора, если есть
with local_session() as session:
author = session.query(Author).filter(Author.id == author_id).first()
if author and author.user:
specific_keys.append(f"author:user:{author.user.strip()}")
# Удаляем конкретные ключи
for key in specific_keys:
try:
await redis.execute("DEL", key)
logger.debug(f"Удален ключ кеша {key}")
except Exception as e:
logger.error(f"Ошибка при удалении ключа {key}: {e}")
# Также ищем и удаляем ключи коллекций, содержащих данные об этом авторе
collection_keys = await redis.execute("KEYS", "authors:stats:*")
if collection_keys:
await redis.execute("DEL", *collection_keys)
logger.debug(f"Удалено {len(collection_keys)} коллекционных ключей авторов")
else:
# Общая инвалидация всех кешей авторов
logger.debug("Полная инвалидация кеша авторов")
await invalidate_cache_by_prefix("authors")
@mutation.field("update_author")
@login_required @login_required
async def update_author(_, info, profile): async def update_author(_, info, profile):
user_id = info.context.get('user_id') user_id = info.context.get("user_id")
if not user_id: if not user_id:
return {'error': 'unauthorized', 'author': None} return {"error": "unauthorized", "author": None}
try: try:
with local_session() as session: with local_session() as session:
author = session.query(Author).where(Author.user == user_id).first() author = session.query(Author).where(Author.user == user_id).first()
@@ -32,297 +205,196 @@ async def update_author(_, info, profile):
Author.update(author, profile) Author.update(author, profile)
session.add(author) session.add(author)
session.commit() session.commit()
return {'error': None, 'author': author} author_query = select(Author).where(Author.user == user_id)
result = get_with_stat(author_query)
if result:
author_with_stat = result[0]
if isinstance(author_with_stat, Author):
author_dict = author_with_stat.dict()
# await cache_author(author_dict)
asyncio.create_task(cache_author(author_dict))
return {"error": None, "author": author}
except Exception as exc: except Exception as exc:
import traceback import traceback
logger.error(traceback.format_exc()) logger.error(traceback.format_exc())
return {'error': exc, 'author': None} return {"error": exc, "author": None}
@query.field('get_authors_all') @query.field("get_authors_all")
def get_authors_all(_, _info): async def get_authors_all(_, _info):
with local_session() as session: """
authors = session.query(Author).all() Получает список всех авторов без статистики.
return authors
Returns:
list: Список всех авторов
"""
return await get_all_authors()
@query.field('get_author') @query.field("get_authors_paginated")
async def get_author(_, _info, slug='', author_id=0): async def get_authors_paginated(_, _info, limit=50, offset=0, by=None):
author_query = '' """
author = None Получает список авторов с пагинацией и статистикой.
Args:
limit: Максимальное количество возвращаемых авторов
offset: Смещение для пагинации
by: Параметр сортировки (new/active)
Returns:
list: Список авторов с их статистикой
"""
return await get_authors_with_stats(limit, offset, by)
@query.field("get_author")
async def get_author(_, _info, slug="", author_id=0):
author_dict = None author_dict = None
try: try:
# lookup for cached author author_id = get_author_id_from(slug=slug, user="", author_id=author_id)
author_query = select(Author).filter(or_(Author.slug == slug, Author.id == author_id)) if not author_id:
[found_author] = local_session().execute(author_query).first() raise ValueError("cant find")
logger.debug(found_author) author_dict = await get_cached_author(int(author_id), get_with_stat)
if found_author:
logger.debug(f'found author id: {found_author.id}')
author_id = found_author.id if found_author.id else author_id
if author_id:
cached_result = await redis.execute('GET', f'author:{author_id}')
author_dict = json.loads(cached_result) if cached_result else None
# update stat from db if not author_dict or not author_dict.get("stat"):
if not author_dict or not author_dict.get('stat'): # update stat from db
author_query = select(Author).filter(Author.id == author_id)
result = get_with_stat(author_query) result = get_with_stat(author_query)
if not result: if result:
raise ValueError('Author not found') author_with_stat = result[0]
[author] = result if isinstance(author_with_stat, Author):
# use found author author_dict = author_with_stat.dict()
if isinstance(author, Author): # await cache_author(author_dict)
logger.debug(f'update @{author.slug} with id {author.id}') asyncio.create_task(cache_author(author_dict))
author_dict = author.dict()
await cache_author(author_dict)
except ValueError: except ValueError:
pass pass
except Exception as exc: except Exception as exc:
import traceback import traceback
logger.error(f'{exc}:\n{traceback.format_exc()}')
logger.error(f"{exc}:\n{traceback.format_exc()}")
return author_dict return author_dict
async def get_author_by_user_id(user_id: str): @query.field("get_author_id")
logger.info(f'getting author id for {user_id}') async def get_author_id(_, _info, user: str):
redis_key = f'user:{user_id}' user_id = user.strip()
logger.info(f"getting author id for {user_id}")
author = None author = None
try: try:
res = await redis.execute('GET', redis_key) author = await get_cached_author_by_user_id(user_id, get_with_stat)
if isinstance(res, str): if author:
author = json.loads(res) return author
author_id = author.get('id')
author_slug = author.get('slug')
if author_id:
logger.debug(f'got author @{author_slug} #{author_id} cached')
return author
author_query = select(Author).filter(Author.user == user_id) author_query = select(Author).filter(Author.user == user_id)
result = get_with_stat(author_query) result = get_with_stat(author_query)
if result: if result:
[author] = result author_with_stat = result[0]
await cache_author(author.dict()) if isinstance(author_with_stat, Author):
author_dict = author_with_stat.dict()
# await cache_author(author_dict)
asyncio.create_task(cache_author(author_dict))
return author_with_stat
except Exception as exc: except Exception as exc:
import traceback logger.error(f"Error getting author: {exc}")
return None
traceback.print_exc()
logger.error(exc)
return author
@query.field('get_author_id') @query.field("load_authors_by")
async def get_author_id(_, _info, user: str): async def load_authors_by(_, _info, by, limit, offset):
return await get_author_by_user_id(user) """
Загружает авторов по заданному критерию с пагинацией.
Args:
by: Критерий сортировки авторов (new/active)
limit: Максимальное количество возвращаемых авторов
offset: Смещение для пагинации
Returns:
list: Список авторов с учетом критерия
"""
# Используем оптимизированную функцию для получения авторов
return await get_authors_with_stats(limit, offset, by)
@query.field('load_authors_by') def get_author_id_from(slug="", user=None, author_id=None):
def load_authors_by(_, _info, by, limit, offset):
cache_key = f'{json.dumps(by)}_{limit}_{offset}'
@cache_region.cache_on_arguments(cache_key)
def _load_authors_by():
logger.debug(f'loading authors by {by}')
q = select(Author)
if by.get('slug'):
q = q.filter(Author.slug.ilike(f"%{by['slug']}%"))
elif by.get('name'):
q = q.filter(Author.name.ilike(f"%{by['name']}%"))
elif by.get('topic'):
q = (
q.join(ShoutAuthor)
.join(ShoutTopic)
.join(Topic)
.where(Topic.slug == str(by['topic']))
)
if by.get('last_seen'): # in unix time
before = int(time.time()) - by['last_seen']
q = q.filter(Author.last_seen > before)
elif by.get('created_at'): # in unix time
before = int(time.time()) - by['created_at']
q = q.filter(Author.created_at > before)
order = by.get('order')
if order in ['likes', 'shouts', 'followers']:
q = q.order_by(desc(text(f'{order}_stat')))
# q = q.distinct()
q = q.limit(limit).offset(offset)
authors = get_with_stat(q)
return authors
return _load_authors_by()
@query.field('get_author_follows')
async def get_author_follows(_, _info, slug='', user=None, author_id=0):
try: try:
author_query = select(Author) author_id = None
if user:
author_query = author_query.filter(Author.user == user)
elif slug:
author_query = author_query.filter(Author.slug == slug)
elif author_id:
author_query = author_query.filter(Author.id == author_id)
else:
raise ValueError('One of slug, user, or author_id must be provided')
[result] = local_session().execute(author_query)
if len(result) > 0:
# logger.debug(result)
[author] = result
# logger.debug(author)
if author and isinstance(author, Author):
# logger.debug(author.dict())
author_id = author.id
rkey = f'author:{author_id}:follows-authors'
logger.debug(f'getting {author_id} follows authors')
cached = await redis.execute('GET', rkey)
authors = []
if not cached:
authors = author_follows_authors(author_id)
prepared = [author.dict() for author in authors]
await redis.execute('SET', rkey, json.dumps(prepared, cls=CustomJSONEncoder))
elif isinstance(cached, str):
authors = json.loads(cached)
rkey = f'author:{author_id}:follows-topics'
cached = await redis.execute('GET', rkey)
topics = []
if cached and isinstance(cached, str):
topics = json.loads(cached)
if not cached:
topics = author_follows_topics(author_id)
prepared = [topic.dict() for topic in topics]
await redis.execute(
'SET', rkey, json.dumps(prepared, cls=CustomJSONEncoder)
)
return {
'topics': topics,
'authors': authors,
'communities': [
{'id': 1, 'name': 'Дискурс', 'slug': 'discours', 'pic': ''}
],
}
except Exception:
import traceback
traceback.print_exc()
return {'error': 'Author not found'}
@query.field('get_author_follows_topics')
async def get_author_follows_topics(_, _info, slug='', user=None, author_id=None):
with local_session() as session:
if user or slug:
author_id_result = (
session.query(Author.id)
.filter(or_(Author.user == user, Author.slug == slug))
.first()
)
author_id = author_id_result[0] if author_id_result else None
if not author_id:
raise ValueError('Author not found')
logger.debug(f'getting {author_id} follows topics')
rkey = f'author:{author_id}:follows-topics'
cached = await redis.execute('GET', rkey)
topics = []
if isinstance(cached, str):
topics = json.loads(cached)
if not cached:
topics = author_follows_topics(author_id)
prepared = [topic.dict() for topic in topics]
await redis.execute(
'SET', rkey, json.dumps(prepared, cls=CustomJSONEncoder)
)
return topics
@query.field('get_author_follows_authors')
async def get_author_follows_authors(_, _info, slug='', user=None, author_id=None):
with local_session() as session:
if user or slug:
author_id_result = (
session.query(Author.id)
.filter(or_(Author.user == user, Author.slug == slug))
.first()
)
author_id = author_id_result[0] if author_id_result else None
if author_id: if author_id:
logger.debug(f'getting {author_id} follows authors') return author_id
rkey = f'author:{author_id}:follows-authors' with local_session() as session:
cached = await redis.execute('GET', rkey)
authors = []
if isinstance(cached, str):
authors = json.loads(cached)
if not authors:
authors = author_follows_authors(author_id)
prepared = [author.dict() for author in authors]
await redis.execute(
'SET', rkey, json.dumps(prepared, cls=CustomJSONEncoder)
)
return authors
else:
raise ValueError('Author not found')
def create_author(user_id: str, slug: str, name: str = ''):
with local_session() as session:
try:
author = None author = None
if user_id: if slug:
author = session.query(Author).filter(Author.user == user_id).first()
elif slug:
author = session.query(Author).filter(Author.slug == slug).first() author = session.query(Author).filter(Author.slug == slug).first()
if not author: if author:
new_author = Author(user=user_id, slug=slug, name=name) author_id = author.id
session.add(new_author) return author_id
session.commit() if user:
logger.info(f'author created by webhook {new_author.dict()}') author = session.query(Author).filter(Author.user == user).first()
except Exception as exc: if author:
logger.debug(exc) author_id = author.id
@query.field('get_author_followers')
async def get_author_followers(_, _info, slug: str):
logger.debug(f'getting followers for @{slug}')
try:
author_alias = aliased(Author)
author_query = select(author_alias).filter(author_alias.slug == slug)
result = local_session().execute(author_query).first()
if result:
[author] = result
author_id = author.id
cached = await redis.execute('GET', f'author:{author_id}:followers')
if not cached:
author_follower_alias = aliased(AuthorFollower, name='af')
q = select(Author).join(
author_follower_alias,
and_(
author_follower_alias.author == author_id,
author_follower_alias.follower == Author.id,
),
)
results = get_with_stat(q)
if isinstance(results, list):
for follower in results:
await cache_follower(follower, author)
logger.debug(f'@{slug} cache updated with {len(results)} followers')
return results
else:
logger.debug(f'@{slug} got followers cached')
if isinstance(cached, str):
return json.loads(cached)
except Exception as exc: except Exception as exc:
import traceback
logger.error(exc) logger.error(exc)
logger.error(traceback.format_exc()) return author_id
@query.field("get_author_follows")
async def get_author_follows(_, _info, slug="", user=None, author_id=0):
logger.debug(f"getting follows for @{slug}")
author_id = get_author_id_from(slug=slug, user=user, author_id=author_id)
if not author_id:
return {}
followed_authors = await get_cached_follower_authors(author_id)
followed_topics = await get_cached_follower_topics(author_id)
# TODO: Get followed communities too
return {
"authors": followed_authors,
"topics": followed_topics,
"communities": DEFAULT_COMMUNITIES,
"shouts": [],
}
@query.field("get_author_follows_topics")
async def get_author_follows_topics(_, _info, slug="", user=None, author_id=None):
logger.debug(f"getting followed topics for @{slug}")
author_id = get_author_id_from(slug=slug, user=user, author_id=author_id)
if not author_id:
return [] return []
followed_topics = await get_cached_follower_topics(author_id)
return followed_topics
@query.field('search_authors') @query.field("get_author_follows_authors")
async def search_authors(_, _info, what: str): async def get_author_follows_authors(_, _info, slug="", user=None, author_id=None):
q = search(select(Author), what) logger.debug(f"getting followed authors for @{slug}")
return get_with_stat(q) author_id = get_author_id_from(slug=slug, user=user, author_id=author_id)
if not author_id:
return []
followed_authors = await get_cached_follower_authors(author_id)
return followed_authors
def create_author(user_id: str, slug: str, name: str = ""):
author = Author()
author.user = user_id # Связь с user_id из системы авторизации
author.slug = slug # Идентификатор из системы авторизации
author.created_at = author.updated_at = int(time.time())
author.name = name or slug # если не указано
with local_session() as session:
session.add(author)
session.commit()
return author
@query.field("get_author_followers")
async def get_author_followers(_, _info, slug: str = "", user: str = "", author_id: int = 0):
logger.debug(f"getting followers for author @{slug} or ID:{author_id}")
author_id = get_author_id_from(slug=slug, user=user, author_id=author_id)
if not author_id:
return []
followers = await get_cached_author_followers(author_id)
return followers

83
resolvers/bookmark.py Normal file
View File

@@ -0,0 +1,83 @@
from operator import and_
from graphql import GraphQLError
from sqlalchemy import delete, insert
from orm.author import AuthorBookmark
from orm.shout import Shout
from resolvers.feed import apply_options
from resolvers.reader import get_shouts_with_links, query_with_stat
from services.auth import login_required
from services.common_result import CommonResult
from services.db import local_session
from services.schema import mutation, query
@query.field("load_shouts_bookmarked")
@login_required
def load_shouts_bookmarked(_, info, options):
"""
Load bookmarked shouts for the authenticated user.
Args:
limit (int): Maximum number of shouts to return.
offset (int): Number of shouts to skip.
Returns:
list: List of bookmarked shouts.
"""
author_dict = info.context.get("author", {})
author_id = author_dict.get("id")
if not author_id:
raise GraphQLError("User not authenticated")
q = query_with_stat(info)
q = q.join(AuthorBookmark)
q = q.filter(
and_(
Shout.id == AuthorBookmark.shout,
AuthorBookmark.author == author_id,
)
)
q, limit, offset = apply_options(q, options, author_id)
return get_shouts_with_links(info, q, limit, offset)
@mutation.field("toggle_bookmark_shout")
def toggle_bookmark_shout(_, info, slug: str) -> CommonResult:
"""
Toggle bookmark status for a specific shout.
Args:
slug (str): Unique identifier of the shout.
Returns:
CommonResult: Result of the operation with bookmark status.
"""
author_dict = info.context.get("author", {})
author_id = author_dict.get("id")
if not author_id:
raise GraphQLError("User not authenticated")
with local_session() as db:
shout = db.query(Shout).filter(Shout.slug == slug).first()
if not shout:
raise GraphQLError("Shout not found")
existing_bookmark = (
db.query(AuthorBookmark)
.filter(AuthorBookmark.author == author_id, AuthorBookmark.shout == shout.id)
.first()
)
if existing_bookmark:
db.execute(
delete(AuthorBookmark).where(AuthorBookmark.author == author_id, AuthorBookmark.shout == shout.id)
)
result = False
else:
db.execute(insert(AuthorBookmark).values(author=author_id, shout=shout.id))
result = True
db.commit()
return result

View File

@@ -6,78 +6,73 @@ from services.db import local_session
from services.schema import mutation from services.schema import mutation
@mutation.field('accept_invite') @mutation.field("accept_invite")
@login_required @login_required
async def accept_invite(_, info, invite_id: int): async def accept_invite(_, info, invite_id: int):
user_id = info.context['user_id'] info.context["user_id"]
author_dict = info.context["author"]
# Check if the user exists author_id = author_dict.get("id")
with local_session() as session: if author_id:
author = session.query(Author).filter(Author.user == user_id).first() author_id = int(author_id)
if author: # Check if the user exists
with local_session() as session:
# Check if the invite exists # Check if the invite exists
invite = session.query(Invite).filter(Invite.id == invite_id).first() invite = session.query(Invite).filter(Invite.id == invite_id).first()
if ( if invite and invite.author_id is author_id and invite.status is InviteStatus.PENDING.value:
invite
and invite.author_d is author.id
and invite.status is InviteStatus.PENDING.value
):
# Add the user to the shout authors # Add the user to the shout authors
shout = session.query(Shout).filter(Shout.id == invite.shout_id).first() shout = session.query(Shout).filter(Shout.id == invite.shout_id).first()
if shout: if shout:
if author not in shout.authors: if author_id not in shout.authors:
shout.authors.append(author) author = session.query(Author).filter(Author.id == author_id).first()
session.delete(invite) if author:
session.add(shout) shout.authors.append(author)
session.commit() session.add(shout)
return {'success': True, 'message': 'Invite accepted'} session.delete(invite)
session.commit()
return {"success": True, "message": "Invite accepted"}
else: else:
return {'error': 'Shout not found'} return {"error": "Shout not found"}
else: else:
return {'error': 'Invalid invite or already accepted/rejected'} return {"error": "Invalid invite or already accepted/rejected"}
else: else:
return {'error': 'User not found'} return {"error": "Unauthorized"}
@mutation.field('reject_invite') @mutation.field("reject_invite")
@login_required @login_required
async def reject_invite(_, info, invite_id: int): async def reject_invite(_, info, invite_id: int):
user_id = info.context['user_id'] info.context["user_id"]
author_dict = info.context["author"]
author_id = author_dict.get("id")
# Check if the user exists if author_id:
with local_session() as session: # Check if the user exists
author = session.query(Author).filter(Author.user == user_id).first() with local_session() as session:
if author: author_id = int(author_id)
# Check if the invite exists # Check if the invite exists
invite = session.query(Invite).filter(Invite.id == invite_id).first() invite = session.query(Invite).filter(Invite.id == invite_id).first()
if ( if invite and invite.author_id is author_id and invite.status is InviteStatus.PENDING.value:
invite
and invite.author_id is author.id
and invite.status is InviteStatus.PENDING.value
):
# Delete the invite # Delete the invite
session.delete(invite) session.delete(invite)
session.commit() session.commit()
return {'success': True, 'message': 'Invite rejected'} return {"success": True, "message": "Invite rejected"}
else: else:
return {'error': 'Invalid invite or already accepted/rejected'} return {"error": "Invalid invite or already accepted/rejected"}
else: return {"error": "User not found"}
return {'error': 'User not found'}
@mutation.field('create_invite') @mutation.field("create_invite")
@login_required @login_required
async def create_invite(_, info, slug: str = '', author_id: int = 0): async def create_invite(_, info, slug: str = "", author_id: int = 0):
user_id = info.context['user_id'] user_id = info.context["user_id"]
author_dict = info.context["author"]
# Check if the inviter is the owner of the shout author_id = author_dict.get("id")
with local_session() as session: if author_id:
shout = session.query(Shout).filter(Shout.slug == slug).first() # Check if the inviter is the owner of the shout
inviter = session.query(Author).filter(Author.user == user_id).first() with local_session() as session:
if inviter and shout and shout.authors and inviter.id is shout.created_by: shout = session.query(Shout).filter(Shout.slug == slug).first()
# Check if the author is a valid author inviter = session.query(Author).filter(Author.user == user_id).first()
author = session.query(Author).filter(Author.id == author_id).first() if inviter and shout and shout.authors and inviter.id is shout.created_by:
if author:
# Check if an invite already exists # Check if an invite already exists
existing_invite = ( existing_invite = (
session.query(Invite) session.query(Invite)
@@ -90,7 +85,7 @@ async def create_invite(_, info, slug: str = '', author_id: int = 0):
.first() .first()
) )
if existing_invite: if existing_invite:
return {'error': 'Invite already sent'} return {"error": "Invite already sent"}
# Create a new invite # Create a new invite
new_invite = Invite( new_invite = Invite(
@@ -102,52 +97,51 @@ async def create_invite(_, info, slug: str = '', author_id: int = 0):
session.add(new_invite) session.add(new_invite)
session.commit() session.commit()
return {'error': None, 'invite': new_invite} return {"error": None, "invite": new_invite}
else: else:
return {'error': 'Invalid author'} return {"error": "Invalid author"}
else: else:
return {'error': 'Access denied'} return {"error": "Access denied"}
@mutation.field('remove_author') @mutation.field("remove_author")
@login_required @login_required
async def remove_author(_, info, slug: str = '', author_id: int = 0): async def remove_author(_, info, slug: str = "", author_id: int = 0):
user_id = info.context['user_id'] user_id = info.context["user_id"]
with local_session() as session: with local_session() as session:
author = session.query(Author).filter(Author.user == user_id).first() author = session.query(Author).filter(Author.user == user_id).first()
if author: if author:
shout = session.query(Shout).filter(Shout.slug == slug).first() shout = session.query(Shout).filter(Shout.slug == slug).first()
# NOTE: owner should be first in a list # NOTE: owner should be first in a list
if shout and author.id is shout.created_by: if shout and author.id is shout.created_by:
shout.authors = [ shout.authors = [author for author in shout.authors if author.id != author_id]
author for author in shout.authors if author.id != author_id
]
session.commit() session.commit()
return {} return {}
return {'error': 'Access denied'} return {"error": "Access denied"}
@mutation.field('remove_invite') @mutation.field("remove_invite")
@login_required @login_required
async def remove_invite(_, info, invite_id: int): async def remove_invite(_, info, invite_id: int):
user_id = info.context['user_id'] info.context["user_id"]
# Check if the user exists author_dict = info.context["author"]
with local_session() as session: author_id = author_dict.get("id")
author = session.query(Author).filter(Author.user == user_id).first() if isinstance(author_id, int):
if author: # Check if the user exists
with local_session() as session:
# Check if the invite exists # Check if the invite exists
invite = session.query(Invite).filter(Invite.id == invite_id).first() invite = session.query(Invite).filter(Invite.id == invite_id).first()
if isinstance(invite, Invite): if isinstance(invite, Invite):
shout = session.query(Shout).filter(Shout.id == invite.shout_id).first() shout = session.query(Shout).filter(Shout.id == invite.shout_id).first()
if shout and shout.deleted_at is None and invite: if shout and shout.deleted_at is None and invite:
if invite.inviter_id is author.id or author.id is shout.created_by: if invite.inviter_id is author_id or author_id == shout.created_by:
if invite.status is InviteStatus.PENDING.value: if invite.status is InviteStatus.PENDING.value:
# Delete the invite # Delete the invite
session.delete(invite) session.delete(invite)
session.commit() session.commit()
return {} return {}
else: else:
return {'error': 'Invalid invite or already accepted/rejected'} return {"error": "Invalid invite or already accepted/rejected"}
else: else:
return {'error': 'Author not found'} return {"error": "Author not found"}

View File

@@ -1,89 +1,97 @@
from sqlalchemy import and_, distinct, func, select
from sqlalchemy.orm import aliased
from orm.author import Author from orm.author import Author
from orm.community import Community, CommunityAuthor from orm.community import Community, CommunityFollower
from orm.shout import ShoutCommunity
from services.db import local_session from services.db import local_session
from services.logger import root_logger as logger from services.schema import mutation, query
from services.schema import query
def add_community_stat_columns(q): @query.field("get_communities_all")
community_followers = aliased(CommunityAuthor)
shout_community_aliased = aliased(ShoutCommunity)
q = q.outerjoin(shout_community_aliased).add_columns(
func.count(distinct(shout_community_aliased.shout)).label('shouts_stat')
)
q = q.outerjoin(
community_followers, community_followers.author == Author.id
).add_columns(
func.count(distinct(community_followers.follower)).label('followers_stat')
)
q = q.group_by(Author.id)
return q
def get_communities_from_query(q):
ccc = []
with local_session() as session:
for [c, shouts_stat, followers_stat] in session.execute(q):
c.stat = {
'shouts': shouts_stat,
'followers': followers_stat,
# "commented": commented_stat,
}
ccc.append(c)
return ccc
# for mutation.field("follow")
def community_follow(follower_id, slug):
try:
with local_session() as session:
community = session.query(Community).where(Community.slug == slug).first()
if isinstance(community, Community):
cf = CommunityAuthor(author=follower_id, community=community.id)
session.add(cf)
session.commit()
return True
except Exception as ex:
logger.debug(ex)
return False
# for mutation.field("unfollow")
def community_unfollow(follower_id, slug):
with local_session() as session:
flw = (
session.query(CommunityAuthor)
.join(Community, Community.id == CommunityAuthor.community)
.filter(and_(CommunityAuthor.author == follower_id, Community.slug == slug))
.first()
)
if flw:
session.delete(flw)
session.commit()
return True
return False
@query.field('get_communities_all')
async def get_communities_all(_, _info): async def get_communities_all(_, _info):
q = select(Author) return local_session().query(Community).all()
q = add_community_stat_columns(q)
return get_communities_from_query(q)
@query.field('get_community') @query.field("get_community")
async def get_community(_, _info, slug: str): async def get_community(_, _info, slug: str):
q = select(Community).where(Community.slug == slug) q = local_session().query(Community).where(Community.slug == slug)
q = add_community_stat_columns(q) return q.first()
communities = get_communities_from_query(q)
return communities[0] @query.field("get_communities_by_author")
async def get_communities_by_author(_, _info, slug="", user="", author_id=0):
with local_session() as session:
q = session.query(Community).join(CommunityFollower)
if slug:
author_id = session.query(Author).where(Author.slug == slug).first().id
q = q.where(CommunityFollower.author == author_id)
if user:
author_id = session.query(Author).where(Author.user == user).first().id
q = q.where(CommunityFollower.author == author_id)
if author_id:
q = q.where(CommunityFollower.author == author_id)
return q.all()
return []
@mutation.field("join_community")
async def join_community(_, info, slug: str):
author_dict = info.context.get("author", {})
author_id = author_dict.get("id")
with local_session() as session:
community = session.query(Community).where(Community.slug == slug).first()
if not community:
return {"ok": False, "error": "Community not found"}
session.add(CommunityFollower(community=community.id, author=author_id))
session.commit()
return {"ok": True}
@mutation.field("leave_community")
async def leave_community(_, info, slug: str):
author_dict = info.context.get("author", {})
author_id = author_dict.get("id")
with local_session() as session:
session.query(CommunityFollower).where(
CommunityFollower.author == author_id, CommunityFollower.community == slug
).delete()
session.commit()
return {"ok": True}
@mutation.field("create_community")
async def create_community(_, info, community_data):
author_dict = info.context.get("author", {})
author_id = author_dict.get("id")
with local_session() as session:
session.add(Community(author=author_id, **community_data))
session.commit()
return {"ok": True}
@mutation.field("update_community")
async def update_community(_, info, community_data):
author_dict = info.context.get("author", {})
author_id = author_dict.get("id")
slug = community_data.get("slug")
if slug:
with local_session() as session:
try:
session.query(Community).where(Community.created_by == author_id, Community.slug == slug).update(
community_data
)
session.commit()
except Exception as e:
return {"ok": False, "error": str(e)}
return {"ok": True}
return {"ok": False, "error": "Please, set community slug in input"}
@mutation.field("delete_community")
async def delete_community(_, info, slug: str):
author_dict = info.context.get("author", {})
author_id = author_dict.get("id")
with local_session() as session:
try:
session.query(Community).where(Community.slug == slug, Community.created_by == author_id).delete()
session.commit()
return {"ok": True}
except Exception as e:
return {"ok": False, "error": str(e)}

352
resolvers/draft.py Normal file
View File

@@ -0,0 +1,352 @@
import time
from operator import or_
from sqlalchemy.sql import and_
from cache.cache import (
cache_author,
cache_by_id,
cache_topic,
invalidate_shout_related_cache,
invalidate_shouts_cache,
)
from orm.author import Author
from orm.draft import Draft
from orm.shout import Shout, ShoutAuthor, ShoutTopic
from orm.topic import Topic
from services.auth import login_required
from services.db import local_session
from services.notify import notify_shout
from services.schema import mutation, query
from services.search import search_service
from utils.logger import root_logger as logger
def create_shout_from_draft(session, draft, author_id):
# Создаем новую публикацию
shout = Shout(
body=draft.body,
slug=draft.slug,
cover=draft.cover,
cover_caption=draft.cover_caption,
lead=draft.lead,
description=draft.description,
title=draft.title,
subtitle=draft.subtitle,
layout=draft.layout,
media=draft.media,
lang=draft.lang,
seo=draft.seo,
created_by=author_id,
community=draft.community,
draft=draft.id,
deleted_at=None,
)
return shout
@query.field("load_drafts")
@login_required
async def load_drafts(_, info):
user_id = info.context.get("user_id")
author_dict = info.context.get("author", {})
author_id = author_dict.get("id")
if not user_id or not author_id:
return {"error": "User ID and author ID are required"}
with local_session() as session:
drafts = (
session.query(Draft)
.filter(or_(Draft.authors.any(Author.id == author_id), Draft.created_by == author_id))
.all()
)
return {"drafts": drafts}
@mutation.field("create_draft")
@login_required
async def create_draft(_, info, draft_input):
"""Create a new draft.
Args:
info: GraphQL context
draft_input (dict): Draft data including optional fields:
- title (str, required) - заголовок черновика
- body (str, required) - текст черновика
- slug (str)
- etc.
Returns:
dict: Contains either:
- draft: The created draft object
- error: Error message if creation failed
Example:
>>> async def test_create():
... context = {'user_id': '123', 'author': {'id': 1}}
... info = type('Info', (), {'context': context})()
... result = await create_draft(None, info, {'title': 'Test'})
... assert result.get('error') is None
... assert result['draft'].title == 'Test'
... return result
"""
user_id = info.context.get("user_id")
author_dict = info.context.get("author", {})
author_id = author_dict.get("id")
if not user_id or not author_id:
return {"error": "Author ID is required"}
# Проверяем обязательные поля
if "body" not in draft_input or not draft_input["body"]:
draft_input["body"] = "" # Пустая строка вместо NULL
if "title" not in draft_input or not draft_input["title"]:
draft_input["title"] = "" # Пустая строка вместо NULL
try:
with local_session() as session:
# Remove id from input if present since it's auto-generated
if "id" in draft_input:
del draft_input["id"]
# Добавляем текущее время создания
draft_input["created_at"] = int(time.time())
draft = Draft(created_by=author_id, **draft_input)
session.add(draft)
session.commit()
return {"draft": draft}
except Exception as e:
logger.error(f"Failed to create draft: {e}", exc_info=True)
return {"error": f"Failed to create draft: {str(e)}"}
@mutation.field("update_draft")
@login_required
async def update_draft(_, info, draft_id: int, draft_input):
"""Обновляет черновик публикации.
Args:
draft_id: ID черновика для обновления
draft_input: Данные для обновления черновика
Returns:
dict: Обновленный черновик или сообщение об ошибке
"""
user_id = info.context.get("user_id")
author_dict = info.context.get("author", {})
author_id = author_dict.get("id")
if not user_id or not author_id:
return {"error": "Author ID are required"}
with local_session() as session:
draft = session.query(Draft).filter(Draft.id == draft_id).first()
if not draft:
return {"error": "Draft not found"}
Draft.update(draft, draft_input)
draft.updated_at = int(time.time())
session.commit()
return {"draft": draft}
@mutation.field("delete_draft")
@login_required
async def delete_draft(_, info, draft_id: int):
author_dict = info.context.get("author", {})
author_id = author_dict.get("id")
with local_session() as session:
draft = session.query(Draft).filter(Draft.id == draft_id).first()
if not draft:
return {"error": "Draft not found"}
if author_id != draft.created_by and draft.authors.filter(Author.id == author_id).count() == 0:
return {"error": "You are not allowed to delete this draft"}
session.delete(draft)
session.commit()
return {"draft": draft}
@mutation.field("publish_draft")
@login_required
async def publish_draft(_, info, draft_id: int):
user_id = info.context.get("user_id")
author_dict = info.context.get("author", {})
author_id = author_dict.get("id")
if not user_id or not author_id:
return {"error": "User ID and author ID are required"}
with local_session() as session:
draft = session.query(Draft).filter(Draft.id == draft_id).first()
if not draft:
return {"error": "Draft not found"}
shout = create_shout_from_draft(session, draft, author_id)
session.add(shout)
session.commit()
return {"shout": shout, "draft": draft}
@mutation.field("unpublish_draft")
@login_required
async def unpublish_draft(_, info, draft_id: int):
user_id = info.context.get("user_id")
author_dict = info.context.get("author", {})
author_id = author_dict.get("id")
if not user_id or not author_id:
return {"error": "User ID and author ID are required"}
with local_session() as session:
draft = session.query(Draft).filter(Draft.id == draft_id).first()
if not draft:
return {"error": "Draft not found"}
shout = session.query(Shout).filter(Shout.draft == draft.id).first()
if shout:
shout.published_at = None
session.commit()
return {"shout": shout, "draft": draft}
return {"error": "Failed to unpublish draft"}
@mutation.field("publish_shout")
@login_required
async def publish_shout(_, info, shout_id: int):
"""Publish draft as a shout or update existing shout.
Args:
shout_id: ID существующей публикации или 0 для новой
draft: Объект черновика (опционально)
"""
user_id = info.context.get("user_id")
author_dict = info.context.get("author", {})
author_id = author_dict.get("id")
now = int(time.time())
if not user_id or not author_id:
return {"error": "User ID and author ID are required"}
try:
with local_session() as session:
shout = session.query(Shout).filter(Shout.id == shout_id).first()
if not shout:
return {"error": "Shout not found"}
was_published = shout.published_at is not None
draft = session.query(Draft).where(Draft.id == shout.draft).first()
if not draft:
return {"error": "Draft not found"}
# Находим черновик если не передан
if not shout:
shout = create_shout_from_draft(session, draft, author_id)
else:
# Обновляем существующую публикацию
shout.draft = draft.id
shout.created_by = author_id
shout.title = draft.title
shout.subtitle = draft.subtitle
shout.body = draft.body
shout.cover = draft.cover
shout.cover_caption = draft.cover_caption
shout.lead = draft.lead
shout.description = draft.description
shout.layout = draft.layout
shout.media = draft.media
shout.lang = draft.lang
shout.seo = draft.seo
draft.updated_at = now
shout.updated_at = now
# Устанавливаем published_at только если была ранее снята с публикации
if not was_published:
shout.published_at = now
# Обрабатываем связи с авторами
if (
not session.query(ShoutAuthor)
.filter(and_(ShoutAuthor.shout == shout.id, ShoutAuthor.author == author_id))
.first()
):
sa = ShoutAuthor(shout=shout.id, author=author_id)
session.add(sa)
# Обрабатываем темы
if draft.topics:
for topic in draft.topics:
st = ShoutTopic(
topic=topic.id, shout=shout.id, main=topic.main if hasattr(topic, "main") else False
)
session.add(st)
session.add(shout)
session.add(draft)
session.flush()
# Инвалидируем кэш только если это новая публикация или была снята с публикации
if not was_published:
cache_keys = ["feed", f"author_{author_id}", "random_top", "unrated"]
# Добавляем ключи для тем
for topic in shout.topics:
cache_keys.append(f"topic_{topic.id}")
cache_keys.append(f"topic_shouts_{topic.id}")
await cache_by_id(Topic, topic.id, cache_topic)
# Инвалидируем кэш
await invalidate_shouts_cache(cache_keys)
await invalidate_shout_related_cache(shout, author_id)
# Обновляем кэш авторов
for author in shout.authors:
await cache_by_id(Author, author.id, cache_author)
# Отправляем уведомление о публикации
await notify_shout(shout.dict(), "published")
# Обновляем поисковый индекс
search_service.index(shout)
else:
# Для уже опубликованных материалов просто отправляем уведомление об обновлении
await notify_shout(shout.dict(), "update")
session.commit()
return {"shout": shout}
except Exception as e:
logger.error(f"Failed to publish shout: {e}", exc_info=True)
if "session" in locals():
session.rollback()
return {"error": f"Failed to publish shout: {str(e)}"}
@mutation.field("unpublish_shout")
@login_required
async def unpublish_shout(_, info, shout_id: int):
"""Unpublish a shout.
Args:
shout_id: The ID of the shout to unpublish
Returns:
dict: The unpublished shout or an error message
"""
author_dict = info.context.get("author", {})
author_id = author_dict.get("id")
if not author_id:
return {"error": "Author ID is required"}
shout = None
with local_session() as session:
try:
shout = session.query(Shout).filter(Shout.id == shout_id).first()
shout.published_at = None
session.commit()
invalidate_shout_related_cache(shout)
invalidate_shouts_cache()
except Exception:
session.rollback()
return {"error": "Failed to unpublish shout"}
return {"shout": shout}

View File

@@ -1,368 +1,675 @@
import time import time
import orjson
from sqlalchemy import and_, desc, select from sqlalchemy import and_, desc, select
from sqlalchemy.orm import joinedload from sqlalchemy.orm import joinedload
from sqlalchemy.sql.functions import coalesce from sqlalchemy.sql.functions import coalesce
from cache.cache import (
cache_author,
cache_topic,
invalidate_shout_related_cache,
invalidate_shouts_cache,
)
from orm.author import Author from orm.author import Author
from orm.rating import is_negative, is_positive
from orm.reaction import Reaction, ReactionKind
from orm.shout import Shout, ShoutAuthor, ShoutTopic from orm.shout import Shout, ShoutAuthor, ShoutTopic
from orm.topic import Topic from orm.topic import Topic
from resolvers.follower import reactions_follow, reactions_unfollow from resolvers.follower import follow, unfollow
from resolvers.stat import get_with_stat
from services.auth import login_required from services.auth import login_required
from services.db import local_session from services.db import local_session
from services.diff import apply_diff, get_diff
from services.logger import root_logger as logger
from services.notify import notify_shout from services.notify import notify_shout
from services.schema import mutation, query from services.schema import query
from services.search import search_service from services.search import search_service
from utils.logger import root_logger as logger
@query.field('get_my_shout') async def cache_by_id(entity, entity_id: int, cache_method):
"""Cache an entity by its ID using the provided cache method.
Args:
entity: The SQLAlchemy model class to query
entity_id (int): The ID of the entity to cache
cache_method: The caching function to use
Returns:
dict: The cached entity data if successful, None if entity not found
Example:
>>> async def test_cache():
... author = await cache_by_id(Author, 1, cache_author)
... assert author['id'] == 1
... assert 'name' in author
... return author
"""
caching_query = select(entity).filter(entity.id == entity_id)
result = get_with_stat(caching_query)
if not result or not result[0]:
logger.warning(f"{entity.__name__} with id {entity_id} not found")
return
x = result[0]
d = x.dict() # convert object to dictionary
cache_method(d)
return d
@query.field("get_my_shout")
@login_required @login_required
async def get_my_shout(_, info, shout_id: int): async def get_my_shout(_, info, shout_id: int):
"""Get a shout by ID if the requesting user has permission to view it.
DEPRECATED: use `load_drafts` instead
Args:
info: GraphQL resolver info containing context
shout_id (int): ID of the shout to retrieve
Returns:
dict: Contains either:
- error (str): Error message if retrieval failed
- shout (Shout): The requested shout if found and accessible
Permissions:
User must be:
- The shout creator
- Listed as an author
- Have editor role
Example:
>>> async def test_get_my_shout():
... context = {'user_id': '123', 'author': {'id': 1}, 'roles': []}
... info = type('Info', (), {'context': context})()
... result = await get_my_shout(None, info, 1)
... assert result['error'] is None
... assert result['shout'].id == 1
... return result
"""
user_id = info.context.get("user_id", "")
author_dict = info.context.get("author", {})
author_id = author_dict.get("id")
roles = info.context.get("roles", [])
shout = None
if not user_id or not author_id:
return {"error": "unauthorized", "shout": None}
with local_session() as session: with local_session() as session:
user_id = info.context.get('user_id', '')
if not user_id:
return {'error': 'unauthorized', 'shout': None}
shout = ( shout = (
session.query(Shout) session.query(Shout)
.filter(Shout.id == shout_id) .filter(Shout.id == shout_id)
.options(joinedload(Shout.authors), joinedload(Shout.topics)) .options(joinedload(Shout.authors), joinedload(Shout.topics))
.filter(and_(Shout.deleted_at.is_(None), Shout.published_at.is_(None))) .filter(Shout.deleted_at.is_(None))
.first() .first()
) )
if not shout: if not shout:
return {'error': 'no shout found', 'shout': None} return {"error": "no shout found", "shout": None}
if not bool(shout.published_at):
author = session.query(Author).filter(Author.user == user_id).first() # Преобразуем media JSON в список объектов MediaItem
if not author: if hasattr(shout, "media") and shout.media:
return {'error': 'no author found', 'shout': None} if isinstance(shout.media, str):
roles = info.context.get('roles', []) try:
if 'editor' not in roles and not filter( shout.media = orjson.loads(shout.media)
lambda x: x.id == author.id, [x for x in shout.authors] except Exception as e:
): logger.error(f"Error parsing shout media: {e}")
return {'error': 'forbidden', 'shout': None} shout.media = []
return {'error': None, 'shout': shout} if not isinstance(shout.media, list):
shout.media = [shout.media] if shout.media else []
else:
shout.media = []
logger.debug(f"got {len(shout.authors)} shout authors, created by {shout.created_by}")
is_editor = "editor" in roles
logger.debug(f"viewer is{'' if is_editor else ' not'} editor")
is_creator = author_id == shout.created_by
logger.debug(f"viewer is{'' if is_creator else ' not'} creator")
is_author = bool(list(filter(lambda x: x.id == int(author_id), [x for x in shout.authors])))
logger.debug(f"viewer is{'' if is_creator else ' not'} author")
can_edit = is_editor or is_author or is_creator
if not can_edit:
return {"error": "forbidden", "shout": None}
logger.debug("got shout editor with data")
return {"error": None, "shout": shout}
@query.field('get_shouts_drafts') @query.field("get_shouts_drafts")
@login_required @login_required
async def get_shouts_drafts(_, info): async def get_shouts_drafts(_, info):
user_id = info.context.get('user_id') # user_id = info.context.get("user_id")
author_dict = info.context.get("author")
if not author_dict:
return {"error": "author profile was not found"}
author_id = author_dict.get("id")
shouts = [] shouts = []
with local_session() as session: with local_session() as session:
author = session.query(Author).filter(Author.user == user_id).first() if author_id:
if author:
q = ( q = (
select(Shout) select(Shout)
.options(joinedload(Shout.authors), joinedload(Shout.topics)) .options(joinedload(Shout.authors), joinedload(Shout.topics))
.filter(and_(Shout.deleted_at.is_(None), Shout.created_by == author.id)) .filter(and_(Shout.deleted_at.is_(None), Shout.created_by == int(author_id)))
.filter(Shout.published_at.is_(None)) .filter(Shout.published_at.is_(None))
.order_by(desc(coalesce(Shout.updated_at, Shout.created_at))) .order_by(desc(coalesce(Shout.updated_at, Shout.created_at)))
.group_by(Shout.id) .group_by(Shout.id)
) )
shouts = [shout for [shout] in session.execute(q).unique()] shouts = [shout for [shout] in session.execute(q).unique()]
return shouts return {"shouts": shouts}
@mutation.field('create_shout') # @mutation.field("create_shout")
@login_required # @login_required
async def create_shout(_, info, inp): async def create_shout(_, info, inp):
user_id = info.context.get('user_id') logger.info(f"Starting create_shout with input: {inp}")
if user_id: user_id = info.context.get("user_id")
with local_session() as session: author_dict = info.context.get("author")
author = session.query(Author).filter(Author.user == user_id).first() logger.debug(f"Context user_id: {user_id}, author: {author_dict}")
if isinstance(author, Author):
if not author_dict:
logger.error("Author profile not found in context")
return {"error": "author profile was not found"}
author_id = author_dict.get("id")
if user_id and author_id:
try:
with local_session() as session:
author_id = int(author_id)
current_time = int(time.time()) current_time = int(time.time())
slug = inp.get('slug') or f'draft-{current_time}' slug = inp.get("slug") or f"draft-{current_time}"
shout_dict = {
'title': inp.get('title', ''), logger.info(f"Creating shout with input: {inp}")
'subtitle': inp.get('subtitle', ''), # Создаем публикацию без topics
'lead': inp.get('lead', ''), new_shout = Shout(
'description': inp.get('description', ''), slug=slug,
'body': inp.get('body', ''), body=inp.get("body", ""),
'layout': inp.get('layout', 'article'), layout=inp.get("layout", "article"),
'created_by': author.id, title=inp.get("title", ""),
'authors': [], created_by=author_id,
'slug': slug, created_at=current_time,
'topics': inp.get('topics', []), community=1,
'published_at': None,
'created_at': current_time, # Set created_at as Unix timestamp
}
same_slug_shout = (
session.query(Shout)
.filter(Shout.slug == shout_dict.get('slug'))
.first()
) )
# Проверяем уникальность slug
logger.debug(f"Checking for existing slug: {slug}")
same_slug_shout = session.query(Shout).filter(Shout.slug == new_shout.slug).first()
c = 1 c = 1
while same_slug_shout is not None: while same_slug_shout is not None:
same_slug_shout = ( logger.debug(f"Found duplicate slug, trying iteration {c}")
session.query(Shout) new_shout.slug = f"{slug}-{c}"
.filter(Shout.slug == shout_dict.get('slug')) same_slug_shout = session.query(Shout).filter(Shout.slug == new_shout.slug).first()
.first()
)
c += 1 c += 1
shout_dict['slug'] += f'-{c}'
new_shout = Shout(**shout_dict)
session.add(new_shout)
session.commit()
# NOTE: requesting new shout back
shout = session.query(Shout).where(Shout.slug == slug).first()
if shout:
sa = ShoutAuthor(shout=shout.id, author=author.id)
session.add(sa)
topics = (
session.query(Topic)
.filter(Topic.slug.in_(inp.get('topics', [])))
.all()
)
for topic in topics:
t = ShoutTopic(topic=topic.id, shout=shout.id)
session.add(t)
try:
logger.info("Creating new shout object")
session.add(new_shout)
session.commit() session.commit()
logger.info(f"Created shout with ID: {new_shout.id}")
except Exception as e:
logger.error(f"Error creating shout object: {e}", exc_info=True)
return {"error": f"Database error: {str(e)}"}
reactions_follow(author.id, shout.id, True) # Связываем с автором
try:
logger.debug(f"Linking author {author_id} to shout {new_shout.id}")
sa = ShoutAuthor(shout=new_shout.id, author=author_id)
session.add(sa)
except Exception as e:
logger.error(f"Error linking author: {e}", exc_info=True)
return {"error": f"Error linking author: {str(e)}"}
# notifier # Связываем с темами
# await notify_shout(shout_dict, 'create')
return {'shout': shout} input_topics = inp.get("topics", [])
if input_topics:
try:
logger.debug(f"Linking topics: {[t.slug for t in input_topics]}")
main_topic = inp.get("main_topic")
for topic in input_topics:
st = ShoutTopic(
topic=topic.id,
shout=new_shout.id,
main=(topic.slug == main_topic) if main_topic else False,
)
session.add(st)
logger.debug(f"Added topic {topic.slug} {'(main)' if st.main else ''}")
except Exception as e:
logger.error(f"Error linking topics: {e}", exc_info=True)
return {"error": f"Error linking topics: {str(e)}"}
return {'error': 'cant create shout' if user_id else 'unauthorized'} try:
session.commit()
logger.info("Final commit successful")
except Exception as e:
logger.error(f"Error in final commit: {e}", exc_info=True)
return {"error": f"Error in final commit: {str(e)}"}
# Получаем созданную публикацию
shout = session.query(Shout).filter(Shout.id == new_shout.id).first()
# Подписываем автора
try:
logger.debug("Following created shout")
await follow(None, info, "shout", shout.slug)
except Exception as e:
logger.warning(f"Error following shout: {e}", exc_info=True)
logger.info(f"Successfully created shout {shout.id}")
return {"shout": shout}
except Exception as e:
logger.error(f"Unexpected error in create_shout: {e}", exc_info=True)
return {"error": f"Unexpected error: {str(e)}"}
error_msg = "cant create shout" if user_id else "unauthorized"
logger.error(f"Create shout failed: {error_msg}")
return {"error": error_msg}
def patch_main_topic(session, main_topic, shout): def patch_main_topic(session, main_topic_slug, shout):
"""Update the main topic for a shout."""
logger.info(f"Starting patch_main_topic for shout#{shout.id} with slug '{main_topic_slug}'")
logger.debug(f"Current shout topics: {[(t.topic.slug, t.main) for t in shout.topics]}")
with session.begin(): with session.begin():
shout = ( # Получаем текущий главный топик
session.query(Shout) old_main = (
.options(joinedload(Shout.topics)) session.query(ShoutTopic).filter(and_(ShoutTopic.shout == shout.id, ShoutTopic.main.is_(True))).first()
.filter(Shout.id == shout.id)
.first()
) )
if not shout: if old_main:
logger.info(f"Found current main topic: {old_main.topic.slug}")
else:
logger.info("No current main topic found")
# Находим новый главный топик
main_topic = session.query(Topic).filter(Topic.slug == main_topic_slug).first()
if not main_topic:
logger.error(f"Main topic with slug '{main_topic_slug}' not found")
return return
old_main_topic = (
logger.info(f"Found new main topic: {main_topic.slug} (id={main_topic.id})")
# Находим связь с новым главным топиком
new_main = (
session.query(ShoutTopic) session.query(ShoutTopic)
.filter(and_(ShoutTopic.shout == shout.id, ShoutTopic.main.is_(True))) .filter(and_(ShoutTopic.shout == shout.id, ShoutTopic.topic == main_topic.id))
.first() .first()
) )
logger.debug(f"Found new main topic relation: {new_main is not None}")
main_topic = session.query(Topic).filter(Topic.slug == main_topic).first() if old_main and new_main and old_main is not new_main:
logger.info(f"Updating main topic flags: {old_main.topic.slug} -> {new_main.topic.slug}")
old_main.main = False
session.add(old_main)
if main_topic: new_main.main = True
new_main_topic = ( session.add(new_main)
session.query(ShoutTopic)
.filter(
and_(
ShoutTopic.shout == shout.id, ShoutTopic.topic == main_topic.id
)
)
.first()
)
if ( session.flush()
old_main_topic logger.info(f"Main topic updated for shout#{shout.id}")
and new_main_topic else:
and old_main_topic is not new_main_topic logger.warning(f"No changes needed for main topic (old={old_main is not None}, new={new_main is not None})")
):
ShoutTopic.update(old_main_topic, {'main': False})
session.add(old_main_topic)
ShoutTopic.update(new_main_topic, {'main': True})
session.add(new_main_topic)
def patch_topics(session, shout, topics_input): def patch_topics(session, shout, topics_input):
new_topics_to_link = [ """Update the topics associated with a shout.
Topic(**new_topic) for new_topic in topics_input if new_topic['id'] < 0
] Args:
session: SQLAlchemy session
shout (Shout): The shout to update
topics_input (list): List of topic dicts with fields:
- id (int): Topic ID (<0 for new topics)
- slug (str): Topic slug
- title (str): Topic title (for new topics)
Side Effects:
- Creates new topics if needed
- Updates shout-topic associations
- Refreshes shout object with new topics
Example:
>>> def test_patch_topics():
... topics = [
... {'id': -1, 'slug': 'new-topic', 'title': 'New Topic'},
... {'id': 1, 'slug': 'existing-topic'}
... ]
... with local_session() as session:
... shout = session.query(Shout).first()
... patch_topics(session, shout, topics)
... assert len(shout.topics) == 2
... assert any(t.slug == 'new-topic' for t in shout.topics)
... return shout.topics
"""
logger.info(f"Starting patch_topics for shout#{shout.id}")
logger.info(f"Received topics_input: {topics_input}")
# Создаем новые топики если есть
new_topics_to_link = [Topic(**new_topic) for new_topic in topics_input if new_topic["id"] < 0]
if new_topics_to_link: if new_topics_to_link:
logger.info(f"Creating new topics: {[t.dict() for t in new_topics_to_link]}")
session.add_all(new_topics_to_link) session.add_all(new_topics_to_link)
session.commit() session.flush()
for new_topic_to_link in new_topics_to_link: # Получаем текущие связи
created_unlinked_topic = ShoutTopic(shout=shout.id, topic=new_topic_to_link.id) current_links = session.query(ShoutTopic).filter(ShoutTopic.shout == shout.id).all()
session.add(created_unlinked_topic) logger.info(f"Current topic links: {[{t.topic: t.main} for t in current_links]}")
existing_topics_input = [ # Удаляем старые связи
topic_input for topic_input in topics_input if topic_input.get('id', 0) > 0 if current_links:
] logger.info(f"Removing old topic links for shout#{shout.id}")
existing_topic_to_link_ids = [ for link in current_links:
existing_topic_input['id'] session.delete(link)
for existing_topic_input in existing_topics_input session.flush()
if existing_topic_input['id'] not in [topic.id for topic in shout.topics]
]
for existing_topic_to_link_id in existing_topic_to_link_ids: # Создаем новые связи
created_unlinked_topic = ShoutTopic( for topic_input in topics_input:
shout=shout.id, topic=existing_topic_to_link_id topic_id = topic_input["id"]
) if topic_id < 0:
session.add(created_unlinked_topic) topic = next(t for t in new_topics_to_link if t.slug == topic_input["slug"])
topic_id = topic.id
topic_to_unlink_ids = [ logger.info(f"Creating new topic link: shout#{shout.id} -> topic#{topic_id}")
topic.id new_link = ShoutTopic(shout=shout.id, topic=topic_id, main=False)
for topic in shout.topics session.add(new_link)
if topic.id not in [topic_input['id'] for topic_input in existing_topics_input]
]
session.query(ShoutTopic).filter( session.flush()
and_(ShoutTopic.shout == shout.id, ShoutTopic.topic.in_(topic_to_unlink_ids)) # Обновляем связи в объекте шаута
).delete(synchronize_session=False) session.refresh(shout)
logger.info(f"Successfully updated topics for shout#{shout.id}")
logger.info(f"Final shout topics: {[t.dict() for t in shout.topics]}")
@mutation.field('update_shout') # @mutation.field("update_shout")
@login_required # @login_required
async def update_shout(_, info, shout_id: int, shout_input=None, publish=False): async def update_shout(_, info, shout_id: int, shout_input=None, publish=False):
user_id = info.context.get('user_id') logger.info(f"Starting update_shout with id={shout_id}, publish={publish}")
roles = info.context.get('roles', []) logger.debug(f"Full shout_input: {shout_input}")
user_id = info.context.get("user_id")
roles = info.context.get("roles", [])
author_dict = info.context.get("author")
if not author_dict:
logger.error("Author profile not found")
return {"error": "author profile was not found"}
author_id = author_dict.get("id")
shout_input = shout_input or {} shout_input = shout_input or {}
current_time = int(time.time()) current_time = int(time.time())
shout_id = shout_id or shout_input.get('id', shout_id) shout_id = shout_id or shout_input.get("id", shout_id)
slug = shout_input.get('slug') slug = shout_input.get("slug")
if not user_id: if not user_id:
return {'error': 'unauthorized'} logger.error("Unauthorized update attempt")
return {"error": "unauthorized"}
try: try:
with local_session() as session: with local_session() as session:
author = session.query(Author).filter(Author.user == user_id).first() if author_id:
if author: logger.info(f"Processing update for shout#{shout_id} by author #{author_id}")
logger.info(f'author for shout#{shout_id} detected {author.dict()}') shout_by_id = (
shout_by_id = session.query(Shout).filter(Shout.id == shout_id).first() session.query(Shout)
.options(joinedload(Shout.topics).joinedload(ShoutTopic.topic), joinedload(Shout.authors))
.filter(Shout.id == shout_id)
.first()
)
if not shout_by_id: if not shout_by_id:
return {'error': 'shout not found'} logger.error(f"shout#{shout_id} not found")
return {"error": "shout not found"}
logger.info(f"Found shout#{shout_id}")
# Логируем текущие топики
current_topics = (
[{"id": t.id, "slug": t.slug, "title": t.title} for t in shout_by_id.topics]
if shout_by_id.topics
else []
)
logger.info(f"Current topics for shout#{shout_id}: {current_topics}")
if slug != shout_by_id.slug: if slug != shout_by_id.slug:
same_slug_shout = ( same_slug_shout = session.query(Shout).filter(Shout.slug == slug).first()
session.query(Shout).filter(Shout.slug == slug).first()
)
c = 1 c = 1
while same_slug_shout is not None: while same_slug_shout is not None:
c += 1 c += 1
slug = f'{slug}-{c}' slug = f"{slug}-{c}"
same_slug_shout = ( same_slug_shout = session.query(Shout).filter(Shout.slug == slug).first()
session.query(Shout).filter(Shout.slug == slug).first() shout_input["slug"] = slug
) logger.info(f"shout#{shout_id} slug patched")
shout_input['slug'] = slug
if filter(lambda x: x.id == author_id, [x for x in shout_by_id.authors]) or "editor" in roles:
logger.info(f"Author #{author_id} has permission to edit shout#{shout_id}")
if (
filter(
lambda x: x.id == author.id, [x for x in shout_by_id.authors]
)
or 'editor' in roles
):
# topics patch # topics patch
topics_input = shout_input.get('topics') topics_input = shout_input.get("topics")
if topics_input: if topics_input:
patch_topics(session, shout_by_id, topics_input) logger.info(f"Received topics_input for shout#{shout_id}: {topics_input}")
del shout_input['topics'] try:
patch_topics(session, shout_by_id, topics_input)
logger.info(f"Successfully patched topics for shout#{shout_id}")
# Обновляем связи в сессии после patch_topics
session.refresh(shout_by_id)
except Exception as e:
logger.error(f"Error patching topics: {e}", exc_info=True)
return {"error": f"Failed to update topics: {str(e)}"}
del shout_input["topics"]
for tpc in topics_input:
await cache_by_id(Topic, tpc["id"], cache_topic)
else:
logger.warning(f"No topics_input received for shout#{shout_id}")
# main topic # main topic
main_topic = shout_input.get('main_topic') main_topic = shout_input.get("main_topic")
if main_topic: if main_topic:
logger.info(f"Updating main topic for shout#{shout_id} to {main_topic}")
patch_main_topic(session, main_topic, shout_by_id) patch_main_topic(session, main_topic, shout_by_id)
shout_input['updated_at'] = current_time shout_input["updated_at"] = current_time
shout_input['published_at'] = current_time if publish else None if publish:
logger.info(f"Publishing shout#{shout_id}")
shout_input["published_at"] = current_time
# Проверяем наличие связи с автором
logger.info(f"Checking author link for shout#{shout_id} and author#{author_id}")
author_link = (
session.query(ShoutAuthor)
.filter(and_(ShoutAuthor.shout == shout_id, ShoutAuthor.author == author_id))
.first()
)
if not author_link:
logger.info(f"Adding missing author link for shout#{shout_id}")
sa = ShoutAuthor(shout=shout_id, author=author_id)
session.add(sa)
session.flush()
logger.info("Author link added successfully")
else:
logger.info("Author link already exists")
# Логируем финальное состояние перед сохранением
logger.info(f"Final shout_input for update: {shout_input}")
Shout.update(shout_by_id, shout_input) Shout.update(shout_by_id, shout_input)
session.add(shout_by_id) session.add(shout_by_id)
session.commit()
shout_dict = shout_by_id.dict() try:
session.commit()
# Обновляем объект после коммита чтобы получить все связи
session.refresh(shout_by_id)
logger.info(f"Successfully committed updates for shout#{shout_id}")
except Exception as e:
logger.error(f"Commit failed: {e}", exc_info=True)
return {"error": f"Failed to save changes: {str(e)}"}
# После обновления проверяем топики
updated_topics = (
[{"id": t.id, "slug": t.slug, "title": t.title} for t in shout_by_id.topics]
if shout_by_id.topics
else []
)
logger.info(f"Updated topics for shout#{shout_id}: {updated_topics}")
# Инвалидация кэша после обновления
try:
logger.info("Invalidating cache after shout update")
cache_keys = [
"feed", # лента
f"author_{author_id}", # публикации автора
"random_top", # случайные топовые
"unrated", # неоцененные
]
# Добавляем ключи для тем публикации
for topic in shout_by_id.topics:
cache_keys.append(f"topic_{topic.id}")
cache_keys.append(f"topic_shouts_{topic.id}")
await invalidate_shouts_cache(cache_keys)
await invalidate_shout_related_cache(shout_by_id, author_id)
# Обновляем кэш тем и авторов
for topic in shout_by_id.topics:
await cache_by_id(Topic, topic.id, cache_topic)
for author in shout_by_id.authors:
await cache_author(author.dict())
logger.info("Cache invalidated successfully")
except Exception as cache_error:
logger.warning(f"Cache invalidation error: {cache_error}", exc_info=True)
if not publish: if not publish:
await notify_shout(shout_dict, 'update') await notify_shout(shout_by_id.dict(), "update")
else: else:
await notify_shout(shout_dict, 'published') await notify_shout(shout_by_id.dict(), "published")
# search service indexing # search service indexing
search_service.index(shout_by_id) search_service.index(shout_by_id)
for a in shout_by_id.authors:
await cache_by_id(Author, a.id, cache_author)
logger.info(f"shout#{shout_id} updated")
# Получаем полные данные шаута со связями
shout_with_relations = (
session.query(Shout)
.options(joinedload(Shout.topics).joinedload(ShoutTopic.topic), joinedload(Shout.authors))
.filter(Shout.id == shout_id)
.first()
)
return {'shout': shout_dict, 'error': None} # Создаем словарь с базовыми полями
shout_dict = shout_with_relations.dict()
# Явно добавляем связанные данные
shout_dict["topics"] = (
[
{"id": topic.id, "slug": topic.slug, "title": topic.title}
for topic in shout_with_relations.topics
]
if shout_with_relations.topics
else []
)
# Add main_topic to the shout dictionary
shout_dict["main_topic"] = get_main_topic(shout_with_relations.topics)
shout_dict["authors"] = (
[
{"id": author.id, "name": author.name, "slug": author.slug}
for author in shout_with_relations.authors
]
if shout_with_relations.authors
else []
)
logger.info(f"Final shout data with relations: {shout_dict}")
logger.debug(
f"Loaded topics details: {[(t.topic.slug if t.topic else 'no-topic', t.main) for t in shout_with_relations.topics]}"
)
return {"shout": shout_dict, "error": None}
else: else:
return {'error': 'access denied', 'shout': None} logger.warning(f"Access denied: author #{author_id} cannot edit shout#{shout_id}")
return {"error": "access denied", "shout": None}
except Exception as exc: except Exception as exc:
import traceback logger.error(f"Unexpected error in update_shout: {exc}", exc_info=True)
logger.error(f"Failed input data: {shout_input}")
return {"error": "cant update shout"}
traceback.print_exc() return {"error": "cant update shout"}
logger.error(exc)
logger.error(f' cannot update with data: {shout_input}')
return {'error': 'cant update shout'}
@mutation.field('delete_shout') # @mutation.field("delete_shout")
@login_required # @login_required
async def delete_shout(_, info, shout_id: int): async def delete_shout(_, info, shout_id: int):
user_id = info.context.get('user_id') user_id = info.context.get("user_id")
roles = info.context.get('roles') roles = info.context.get("roles", [])
if user_id: author_dict = info.context.get("author")
if not author_dict:
return {"error": "author profile was not found"}
author_id = author_dict.get("id")
if user_id and author_id:
author_id = int(author_id)
with local_session() as session: with local_session() as session:
author = session.query(Author).filter(Author.user == user_id).first()
shout = session.query(Shout).filter(Shout.id == shout_id).first() shout = session.query(Shout).filter(Shout.id == shout_id).first()
if not shout: if not isinstance(shout, Shout):
return {'error': 'invalid shout id'} return {"error": "invalid shout id"}
if author and shout:
# NOTE: only owner and editor can mark the shout as deleted
if shout.created_by == author.id or 'editor' in roles:
for author_id in shout.authors:
reactions_unfollow(author_id, shout_id)
shout_dict = shout.dict()
shout_dict['deleted_at'] = int(time.time())
Shout.update(shout, shout_dict)
session.add(shout)
session.commit()
await notify_shout(shout_dict, 'delete')
return {'error': None}
else:
return {'error': 'access denied'}
def handle_proposing(session, r, shout):
if is_positive(r.kind):
replied_reaction = (
session.query(Reaction)
.filter(Reaction.id == r.reply_to, Reaction.shout == r.shout)
.first()
)
if (
replied_reaction
and replied_reaction.kind is ReactionKind.PROPOSE.value
and replied_reaction.quote
):
# patch all the proposals' quotes
proposals = (
session.query(Reaction)
.filter(
and_(
Reaction.shout == r.shout,
Reaction.kind == ReactionKind.PROPOSE.value,
)
)
.all()
)
for proposal in proposals:
if proposal.quote:
proposal_diff = get_diff(shout.body, proposal.quote)
proposal_dict = proposal.dict()
proposal_dict['quote'] = apply_diff(
replied_reaction.quote, proposal_diff
)
Reaction.update(proposal, proposal_dict)
session.add(proposal)
# patch shout's body
shout_dict = shout.dict() shout_dict = shout.dict()
shout_dict['body'] = replied_reaction.quote # NOTE: only owner and editor can mark the shout as deleted
Shout.update(shout, shout_dict) if shout_dict["created_by"] == author_id or "editor" in roles:
session.add(shout) shout_dict["deleted_at"] = int(time.time())
session.commit() Shout.update(shout, shout_dict)
session.add(shout)
session.commit()
if is_negative(r.kind): for author in shout.authors:
# TODO: rejection logic await cache_by_id(Author, author.id, cache_author)
pass info.context["author"] = author.dict()
info.context["user_id"] = author.user
unfollow(None, info, "shout", shout.slug)
for topic in shout.topics:
await cache_by_id(Topic, topic.id, cache_topic)
await notify_shout(shout_dict, "delete")
return {"error": None}
else:
return {"error": "access denied"}
def get_main_topic(topics):
"""Get the main topic from a list of ShoutTopic objects."""
logger.info(f"Starting get_main_topic with {len(topics) if topics else 0} topics")
logger.debug(
f"Topics data: {[(t.topic.slug if t.topic else 'no-topic', t.main) for t in topics] if topics else []}"
)
if not topics:
logger.warning("No topics provided to get_main_topic")
return {"id": 0, "title": "no topic", "slug": "notopic", "is_main": True}
# Find first main topic in original order
main_topic_rel = next((st for st in topics if st.main), None)
logger.debug(
f"Found main topic relation: {main_topic_rel.topic.slug if main_topic_rel and main_topic_rel.topic else None}"
)
if main_topic_rel and main_topic_rel.topic:
result = {
"slug": main_topic_rel.topic.slug,
"title": main_topic_rel.topic.title,
"id": main_topic_rel.topic.id,
"is_main": True,
}
logger.info(f"Returning main topic: {result}")
return result
# If no main found but topics exist, return first
if topics and topics[0].topic:
logger.info(f"No main topic found, using first topic: {topics[0].topic.slug}")
result = {
"slug": topics[0].topic.slug,
"title": topics[0].topic.title,
"id": topics[0].topic.id,
"is_main": True,
}
return result
logger.warning("No valid topics found, returning default")
return {"slug": "notopic", "title": "no topic", "id": 0, "is_main": True}

198
resolvers/feed.py Normal file
View File

@@ -0,0 +1,198 @@
from typing import List
from sqlalchemy import and_, select
from orm.author import Author, AuthorFollower
from orm.shout import Shout, ShoutAuthor, ShoutReactionsFollower, ShoutTopic
from orm.topic import Topic, TopicFollower
from resolvers.reader import (
apply_options,
get_shouts_with_links,
has_field,
query_with_stat,
)
from services.auth import login_required
from services.db import local_session
from services.schema import query
from utils.logger import root_logger as logger
@query.field("load_shouts_coauthored")
@login_required
async def load_shouts_coauthored(_, info, options):
"""
Загрузка публикаций, написанных в соавторстве с пользователем.
:param info: Информаци о контексте GraphQL.
:param options: Опции фильтрации и сортировки.
:return: Список публикаций в соавтостве.
"""
author_id = info.context.get("author", {}).get("id")
if not author_id:
return []
q = query_with_stat(info)
q = q.filter(Shout.authors.any(id=author_id))
q, limit, offset = apply_options(q, options)
return get_shouts_with_links(info, q, limit, offset=offset)
@query.field("load_shouts_discussed")
@login_required
async def load_shouts_discussed(_, info, options):
"""
Загрузка публикаций, которые обсуждались пользователем.
:param info: Информация о контексте GraphQL.
:param options: Опции фильтрации и сортировки.
:return: Список публикаций, обсужденых пользователем.
"""
author_id = info.context.get("author", {}).get("id")
if not author_id:
return []
q = query_with_stat(info)
options["filters"]["commented"] = True
q, limit, offset = apply_options(q, options, author_id)
return get_shouts_with_links(info, q, limit, offset=offset)
def shouts_by_follower(info, follower_id: int, options):
"""
Загружает публикации, на которые подписан автор.
- по авторам
- по темам
- по реакциям
:param info: Информация о контексте GraphQL.
:param follower_id: Идентификатор автора.
:param options: Опции фильтрации и сортировки.
:return: Список публикаций.
"""
q = query_with_stat(info)
reader_followed_authors = select(AuthorFollower.author).where(AuthorFollower.follower == follower_id)
reader_followed_topics = select(TopicFollower.topic).where(TopicFollower.follower == follower_id)
reader_followed_shouts = select(ShoutReactionsFollower.shout).where(ShoutReactionsFollower.follower == follower_id)
followed_subquery = (
select(Shout.id)
.join(ShoutAuthor, ShoutAuthor.shout == Shout.id)
.join(ShoutTopic, ShoutTopic.shout == Shout.id)
.where(
ShoutAuthor.author.in_(reader_followed_authors)
| ShoutTopic.topic.in_(reader_followed_topics)
| Shout.id.in_(reader_followed_shouts)
)
.scalar_subquery()
)
q = q.filter(Shout.id.in_(followed_subquery))
q, limit, offset = apply_options(q, options)
shouts = get_shouts_with_links(info, q, limit, offset=offset)
return shouts
@query.field("load_shouts_followed_by")
async def load_shouts_followed_by(_, info, slug: str, options) -> List[Shout]:
"""
Загружает публикации, на которые подписан автор по slug.
:param info: Информация о контексте GraphQL.
:param slug: Slug автора.
:param options: Опции фильтрации и сортировки.
:return: Список публикаций.
"""
with local_session() as session:
author = session.query(Author).filter(Author.slug == slug).first()
if author:
follower_id = author.dict()["id"]
shouts = shouts_by_follower(info, follower_id, options)
return shouts
return []
@query.field("load_shouts_feed")
@login_required
async def load_shouts_feed(_, info, options) -> List[Shout]:
"""
Загружает публикации, на которые подписан авторизованный пользователь.
:param info: Информация о контексте GraphQL.
:param options: Опции фильтрации и сортировки.
:return: Список публикаций.
"""
author_id = info.context.get("author", {}).get("id")
return shouts_by_follower(info, author_id, options) if author_id else []
@query.field("load_shouts_authored_by")
async def load_shouts_authored_by(_, info, slug: str, options) -> List[Shout]:
"""
Загружает публикации, написанные автором по slug.
:param info: Информация о контексте GraphQL.
:param slug: Slug автора.
:param options: Опции фильтрации и сортировки.
:return: Список публикаций.
"""
with local_session() as session:
author = session.query(Author).filter(Author.slug == slug).first()
if author:
try:
author_id: int = author.dict()["id"]
q = (
query_with_stat(info)
if has_field(info, "stat")
else select(Shout).filter(and_(Shout.published_at.is_not(None), Shout.deleted_at.is_(None)))
)
q = q.filter(Shout.authors.any(id=author_id))
q, limit, offset = apply_options(q, options, author_id)
shouts = get_shouts_with_links(info, q, limit, offset=offset)
return shouts
except Exception as error:
logger.debug(error)
return []
@query.field("load_shouts_with_topic")
async def load_shouts_with_topic(_, info, slug: str, options) -> List[Shout]:
"""
Загружает публикации, связанные с темой по slug.
:param info: Информация о контексте GraphQL.
:param slug: Slug темы.
:param options: Опции фильтрации и сортировки.
:return: Список публикаций.
"""
with local_session() as session:
topic = session.query(Topic).filter(Topic.slug == slug).first()
if topic:
try:
topic_id: int = topic.dict()["id"]
q = (
query_with_stat(info)
if has_field(info, "stat")
else select(Shout).filter(and_(Shout.published_at.is_not(None), Shout.deleted_at.is_(None)))
)
q = q.filter(Shout.topics.any(id=topic_id))
q, limit, offset = apply_options(q, options)
shouts = get_shouts_with_links(info, q, limit, offset=offset)
return shouts
except Exception as error:
logger.debug(error)
return []
def apply_filters(q, filters):
"""
Применяет фильтры к запросу
"""
logger.info(f"Applying filters: {filters}")
if filters.get("published"):
q = q.filter(Shout.published_at.is_not(None))
logger.info("Added published filter")
if filters.get("topic"):
topic_slug = filters["topic"]
q = q.join(ShoutTopic).join(Topic).filter(Topic.slug == topic_slug)
logger.info(f"Added topic filter: {topic_slug}")
return q

View File

@@ -1,298 +1,222 @@
import json
import time
from typing import List from typing import List
from psycopg2.errors import UniqueViolation from graphql import GraphQLError
from sqlalchemy import or_, select from sqlalchemy import select
from sqlalchemy.sql import and_ from sqlalchemy.sql import and_
from cache.cache import (
cache_author,
cache_topic,
get_cached_follower_authors,
get_cached_follower_topics,
)
from orm.author import Author, AuthorFollower from orm.author import Author, AuthorFollower
from orm.community import Community from orm.community import Community, CommunityFollower
from orm.reaction import Reaction from orm.reaction import Reaction
from orm.shout import Shout, ShoutReactionsFollower from orm.shout import Shout, ShoutReactionsFollower
from orm.topic import Topic, TopicFollower from orm.topic import Topic, TopicFollower
from resolvers.stat import author_follows_authors, author_follows_topics, get_with_stat from resolvers.stat import get_with_stat
from services.auth import login_required from services.auth import login_required
from services.cache import DEFAULT_FOLLOWS
from services.db import local_session from services.db import local_session
from services.logger import root_logger as logger
from services.notify import notify_follower from services.notify import notify_follower
from services.rediscache import redis
from services.schema import mutation, query from services.schema import mutation, query
from utils.logger import root_logger as logger
@mutation.field('follow') @mutation.field("follow")
@login_required @login_required
async def follow(_, info, what, slug): async def follow(_, info, what, slug="", entity_id=0):
follows = [] logger.debug("Начало выполнения функции 'follow'")
error = None user_id = info.context.get("user_id")
user_id = info.context.get('user_id') follower_dict = info.context.get("author")
if not user_id: logger.debug(f"follower: {follower_dict}")
return {'error': 'unauthorized'}
follower = local_session().query(Author).filter(Author.user == user_id).first() if not user_id or not follower_dict:
if not follower: return GraphQLError("unauthorized")
return {'error': 'cant find follower'}
if what == 'AUTHOR':
error = author_follow(follower.id, slug)
if not error:
author = local_session().query(Author).where(Author.slug == slug).first()
if author:
await notify_follower(follower.dict(), author.id, 'follow')
elif what == 'TOPIC': follower_id = follower_dict.get("id")
error = topic_follow(follower.id, slug) logger.debug(f"follower_id: {follower_id}")
elif what == 'COMMUNITY': entity_classes = {
# FIXME: when more communities "AUTHOR": (Author, AuthorFollower, get_cached_follower_authors, cache_author),
follows = local_session().execute(select(Community)) "TOPIC": (Topic, TopicFollower, get_cached_follower_topics, cache_topic),
"COMMUNITY": (Community, CommunityFollower, None, None), # Нет методов кэша для сообщества
"SHOUT": (Shout, ShoutReactionsFollower, None, None), # Нет методов кэша для shout
}
elif what == 'SHOUT': if what not in entity_classes:
error = reactions_follow(follower.id, slug) logger.error(f"Неверный тип для следования: {what}")
return {"error": "invalid follow type"}
if error: entity_class, follower_class, get_cached_follows_method, cache_method = entity_classes[what]
return {'error': error} entity_type = what.lower()
entity_dict = None
entity = what.lower()
follows_str = await redis.execute('GET', f'author:{follower.id}:follows-{entity}s')
if follows_str:
follows = json.loads(follows_str)
return { f'{entity}s': follows }
@mutation.field('unfollow')
@login_required
async def unfollow(_, info, what, slug):
follows = []
error = None
user_id = info.context.get('user_id')
if not user_id:
return {'error': 'unauthorized'}
follower = local_session().query(Author).filter(Author.user == user_id).first()
if not follower:
return {'error': 'follower profile is not found'}
if what == 'AUTHOR':
error = author_unfollow(follower.id, slug)
# NOTE: after triggers should update cached stats
if not error:
logger.info(f'@{follower.slug} unfollowed @{slug}')
author = local_session().query(Author).where(Author.slug == slug).first()
if author:
await notify_follower(follower.dict(), author.id, 'unfollow')
elif what == 'TOPIC':
error = topic_unfollow(follower.id, slug)
elif what == 'COMMUNITY':
follows = local_session().execute(select(Community))
elif what == 'SHOUT':
error = reactions_unfollow(follower.id, slug)
entity = what.lower()
follows_str = await redis.execute('GET', f'author:{follower.id}:follows-{entity}s')
if follows_str:
follows = json.loads(follows_str)
return {'error': error, f'{entity}s': follows}
async def get_follows_by_user_id(user_id: str):
if not user_id:
return {'error': 'unauthorized'}
author = await redis.execute('GET', f'user:{user_id}')
if isinstance(author, str):
author = json.loads(author)
if not author:
with local_session() as session:
author = session.query(Author).filter(Author.user == user_id).first()
if not author:
return {'error': 'cant find author'}
author = author.dict()
last_seen = author.get('last_seen', 0) if isinstance(author, dict) else 0
follows = DEFAULT_FOLLOWS
day_old = int(time.time()) - last_seen > 24 * 60 * 60
if day_old:
author_id = json.loads(str(author)).get('id')
if author_id:
topics = author_follows_topics(author_id)
authors = author_follows_authors(author_id)
follows = {
'topics': topics,
'authors': authors,
'communities': [
{'id': 1, 'name': 'Дискурс', 'slug': 'discours', 'pic': ''}
],
}
else:
logger.debug(f'getting follows for {user_id} from redis')
res = await redis.execute('GET', f'user:{user_id}:follows')
if isinstance(res, str):
follows = json.loads(res)
return follows
def topic_follow(follower_id, slug):
try: try:
logger.debug("Попытка получить сущность из базы данных")
with local_session() as session: with local_session() as session:
topic = session.query(Topic).where(Topic.slug == slug).one() entity_query = select(entity_class).filter(entity_class.slug == slug)
_following = TopicFollower(topic=topic.id, follower=follower_id) entities = get_with_stat(entity_query)
return None [entity] = entities
except UniqueViolation as error: if not entity:
logger.warn(error) logger.warning(f"{what.lower()} не найден по slug: {slug}")
return 'already followed' return {"error": f"{what.lower()} not found"}
if not entity_id and entity:
entity_id = entity.id
entity_dict = entity.dict()
logger.debug(f"entity_id: {entity_id}, entity_dict: {entity_dict}")
if entity_id:
logger.debug("Проверка существующей подписки")
with local_session() as session:
existing_sub = (
session.query(follower_class)
.filter(follower_class.follower == follower_id, getattr(follower_class, entity_type) == entity_id)
.first()
)
if existing_sub:
logger.info(f"Пользователь {follower_id} уже подписан на {what.lower()} с ID {entity_id}")
else:
logger.debug("Добавление новой записи в базу данных")
sub = follower_class(follower=follower_id, **{entity_type: entity_id})
logger.debug(f"Создан объект подписки: {sub}")
session.add(sub)
session.commit()
logger.info(f"Пользователь {follower_id} подписался на {what.lower()} с ID {entity_id}")
follows = None
if cache_method:
logger.debug("Обновление кэша")
await cache_method(entity_dict)
if get_cached_follows_method:
logger.debug("Получение подписок из кэша")
existing_follows = await get_cached_follows_method(follower_id)
follows = [*existing_follows, entity_dict] if not existing_sub else existing_follows
logger.debug("Обновлен список подписок")
if what == "AUTHOR" and not existing_sub:
logger.debug("Отправка уведомления автору о подписке")
await notify_follower(follower=follower_dict, author_id=entity_id, action="follow")
except Exception as exc: except Exception as exc:
logger.error(exc) logger.exception("Произошла ошибка в функции 'follow'")
return exc return {"error": str(exc)}
return {f"{what.lower()}s": follows}
def topic_unfollow(follower_id, slug): @mutation.field("unfollow")
@login_required
async def unfollow(_, info, what, slug="", entity_id=0):
logger.debug("Начало выполнения функции 'unfollow'")
user_id = info.context.get("user_id")
follower_dict = info.context.get("author")
logger.debug(f"follower: {follower_dict}")
if not user_id or not follower_dict:
logger.warning("Неавторизованный доступ при попытке отписаться")
return {"error": "unauthorized"}
follower_id = follower_dict.get("id")
logger.debug(f"follower_id: {follower_id}")
entity_classes = {
"AUTHOR": (Author, AuthorFollower, get_cached_follower_authors, cache_author),
"TOPIC": (Topic, TopicFollower, get_cached_follower_topics, cache_topic),
"COMMUNITY": (Community, CommunityFollower, None, None), # Нет методов кэша для сообщества
"SHOUT": (Shout, ShoutReactionsFollower, None, None), # Нет методов кэша для shout
}
if what not in entity_classes:
logger.error(f"Неверный тип для отписки: {what}")
return {"error": "invalid unfollow type"}
entity_class, follower_class, get_cached_follows_method, cache_method = entity_classes[what]
entity_type = what.lower()
follows = []
error = None
try: try:
logger.debug("Попытка получить сущность из базы данных")
with local_session() as session: with local_session() as session:
entity = session.query(entity_class).filter(entity_class.slug == slug).first()
logger.debug(f"Полученная сущность: {entity}")
if not entity:
logger.warning(f"{what.lower()} не найден по slug: {slug}")
return {"error": f"{what.lower()} not found"}
if entity and not entity_id:
entity_id = entity.id
logger.debug(f"entity_id: {entity_id}")
sub = ( sub = (
session.query(TopicFollower) session.query(follower_class)
.join(Topic) .filter(
.filter(and_(TopicFollower.follower == follower_id, Topic.slug == slug)) and_(
getattr(follower_class, "follower") == follower_id,
getattr(follower_class, entity_type) == entity_id,
)
)
.first() .first()
) )
logger.debug(f"Найдена подписка для удаления: {sub}")
if sub: if sub:
session.delete(sub) session.delete(sub)
session.commit() session.commit()
return None logger.info(f"Пользователь {follower_id} отписался от {what.lower()} с ID {entity_id}")
except UniqueViolation as error:
logger.warn(error)
return 'already unfollowed'
except Exception as ex:
logger.debug(ex)
return ex
if cache_method:
logger.debug("Обновление кэша после отписки")
await cache_method(entity.dict())
if get_cached_follows_method:
logger.debug("Получение подписок из кэша")
existing_follows = await get_cached_follows_method(follower_id)
follows = filter(lambda x: x["id"] != entity_id, existing_follows)
logger.debug("Обновлен список подписок")
def reactions_follow(author_id, shout_id, auto=False): if what == "AUTHOR":
try: logger.debug("Отправка уведомления автору об отписке")
with local_session() as session: await notify_follower(follower=follower_dict, author_id=entity_id, action="unfollow")
shout = session.query(Shout).where(Shout.id == shout_id).one() else:
return {"error": "following was not found", f"{entity_type}s": follows}
following = (
session.query(ShoutReactionsFollower)
.where(
and_(
ShoutReactionsFollower.follower == author_id,
ShoutReactionsFollower.shout == shout.id,
)
)
.first()
)
if not following:
following = ShoutReactionsFollower(
follower=author_id, shout=shout.id, auto=auto
)
session.add(following)
session.commit()
return None
except UniqueViolation as error:
logger.warn(error)
return 'already followed'
except Exception as exc: except Exception as exc:
return exc logger.exception("Произошла ошибка в функции 'unfollow'")
def reactions_unfollow(author_id, shout_id: int):
try:
with local_session() as session:
shout = session.query(Shout).where(Shout.id == shout_id).one()
following = (
session.query(ShoutReactionsFollower)
.where(
and_(
ShoutReactionsFollower.follower == author_id,
ShoutReactionsFollower.shout == shout.id,
)
)
.first()
)
if following:
session.delete(following)
session.commit()
return None
except UniqueViolation as error:
logger.warn(error)
return 'already unfollowed'
except Exception as ex:
import traceback import traceback
traceback.print_exc() traceback.print_exc()
return ex return {"error": str(exc)}
# logger.debug(f"Функция 'unfollow' завершена успешно с результатом: {entity_type}s={follows}, error={error}")
return {f"{entity_type}s": follows, "error": error}
# for mutation.field("follow") @query.field("get_shout_followers")
def author_follow(follower_id, slug): def get_shout_followers(_, _info, slug: str = "", shout_id: int | None = None) -> List[Author]:
try: logger.debug("Начало выполнения функции 'get_shout_followers'")
with local_session() as session:
author = session.query(Author).where(Author.slug == slug).one()
af = AuthorFollower(follower=follower_id, author=author.id)
session.add(af)
session.commit()
return None
except UniqueViolation as error:
logger.warn(error)
return 'already followed'
except Exception as exc:
import traceback
traceback.print_exc()
return exc
# for mutation.field("unfollow")
def author_unfollow(follower_id, slug):
try:
with local_session() as session:
flw = (
session.query(AuthorFollower)
.join(Author, Author.id == AuthorFollower.author)
.filter(
and_(AuthorFollower.follower == follower_id, Author.slug == slug)
)
.first()
)
if flw:
session.delete(flw)
session.commit()
return None
except UniqueViolation as error:
logger.warn(error)
return 'already unfollowed'
except Exception as exc:
return exc
@query.field('get_topic_followers')
async def get_topic_followers(_, _info, slug: str, topic_id: int) -> List[Author]:
q = select(Author)
q = (
q.join(TopicFollower, TopicFollower.follower == Author.id)
.join(Topic, Topic.id == TopicFollower.topic)
.filter(or_(Topic.slug == slug, Topic.id == topic_id))
)
return get_with_stat(q)
@query.field('get_shout_followers')
def get_shout_followers(
_, _info, slug: str = '', shout_id: int | None = None
) -> List[Author]:
followers = [] followers = []
with local_session() as session: try:
shout = None with local_session() as session:
if slug: shout = None
shout = session.query(Shout).filter(Shout.slug == slug).first() if slug:
elif shout_id: shout = session.query(Shout).filter(Shout.slug == slug).first()
shout = session.query(Shout).filter(Shout.id == shout_id).first() logger.debug(f"Найден shout по slug: {slug} -> {shout}")
if shout: elif shout_id:
reactions = session.query(Reaction).filter(Reaction.shout == shout.id).all() shout = session.query(Shout).filter(Shout.id == shout_id).first()
for r in reactions: logger.debug(f"Найден shout по ID: {shout_id} -> {shout}")
followers.append(r.created_by)
if shout:
reactions = session.query(Reaction).filter(Reaction.shout == shout.id).all()
logger.debug(f"Полученные реакции для shout ID {shout.id}: {reactions}")
for r in reactions:
followers.append(r.created_by)
logger.debug(f"Добавлен follower: {r.created_by}")
except Exception as _exc:
import traceback
traceback.print_exc()
logger.exception("Произошла ошибка в функции 'get_shout_followers'")
return []
# logger.debug(f"Функция 'get_shout_followers' завершена с {len(followers)} подписчиками")
return followers return followers

View File

@@ -1,27 +1,29 @@
import json
import time import time
from typing import List, Tuple from typing import List, Tuple
import orjson
from sqlalchemy import and_, select from sqlalchemy import and_, select
from sqlalchemy.exc import SQLAlchemyError from sqlalchemy.exc import SQLAlchemyError
from sqlalchemy.orm import aliased from sqlalchemy.orm import aliased
from sqlalchemy.sql import not_ from sqlalchemy.sql import not_
from orm.author import Author from orm.author import Author
from orm.notification import (Notification, NotificationAction, from orm.notification import (
NotificationEntity, NotificationSeen) Notification,
NotificationAction,
NotificationEntity,
NotificationSeen,
)
from orm.shout import Shout from orm.shout import Shout
from services.auth import login_required from services.auth import login_required
from services.db import local_session from services.db import local_session
from services.logger import root_logger as logger
from services.schema import mutation, query from services.schema import mutation, query
from utils.logger import root_logger as logger
def query_notifications( def query_notifications(author_id: int, after: int = 0) -> Tuple[int, int, List[Tuple[Notification, bool]]]:
author_id: int, after: int = 0
) -> Tuple[int, int, List[Tuple[Notification, bool]]]:
notification_seen_alias = aliased(NotificationSeen) notification_seen_alias = aliased(NotificationSeen)
q = select(Notification, notification_seen_alias.viewer.label('seen')).outerjoin( q = select(Notification, notification_seen_alias.viewer.label("seen")).outerjoin(
NotificationSeen, NotificationSeen,
and_( and_(
NotificationSeen.viewer == author_id, NotificationSeen.viewer == author_id,
@@ -64,25 +66,21 @@ def query_notifications(
return total, unread, notifications return total, unread, notifications
def group_notification( def group_notification(thread, authors=None, shout=None, reactions=None, entity="follower", action="follow"):
thread, authors=None, shout=None, reactions=None, entity='follower', action='follow'
):
reactions = reactions or [] reactions = reactions or []
authors = authors or [] authors = authors or []
return { return {
'thread': thread, "thread": thread,
'authors': authors, "authors": authors,
'updated_at': int(time.time()), "updated_at": int(time.time()),
'shout': shout, "shout": shout,
'reactions': reactions, "reactions": reactions,
'entity': entity, "entity": entity,
'action': action, "action": action,
} }
def get_notifications_grouped( def get_notifications_grouped(author_id: int, after: int = 0, limit: int = 10, offset: int = 0):
author_id: int, after: int = 0, limit: int = 10, offset: int = 0
):
""" """
Retrieves notifications for a given author. Retrieves notifications for a given author.
@@ -117,13 +115,13 @@ def get_notifications_grouped(
if (groups_amount + offset) >= limit: if (groups_amount + offset) >= limit:
break break
payload = json.loads(str(notification.payload)) payload = orjson.loads(str(notification.payload))
if str(notification.entity) == NotificationEntity.SHOUT.value: if str(notification.entity) == NotificationEntity.SHOUT.value:
shout = payload shout = payload
shout_id = shout.get('id') shout_id = shout.get("id")
author_id = shout.get('created_by') author_id = shout.get("created_by")
thread_id = f'shout-{shout_id}' thread_id = f"shout-{shout_id}"
with local_session() as session: with local_session() as session:
author = session.query(Author).filter(Author.id == author_id).first() author = session.query(Author).filter(Author.id == author_id).first()
shout = session.query(Shout).filter(Shout.id == shout_id).first() shout = session.query(Shout).filter(Shout.id == shout_id).first()
@@ -142,31 +140,27 @@ def get_notifications_grouped(
elif str(notification.entity) == NotificationEntity.REACTION.value: elif str(notification.entity) == NotificationEntity.REACTION.value:
reaction = payload reaction = payload
if not isinstance(shout, dict): if not isinstance(reaction, dict):
raise ValueError('reaction data is not consistent') raise ValueError("reaction data is not consistent")
shout_id = shout.get('shout') shout_id = reaction.get("shout")
author_id = shout.get('created_by', 0) author_id = reaction.get("created_by", 0)
if shout_id and author_id: if shout_id and author_id:
with local_session() as session: with local_session() as session:
author = ( author = session.query(Author).filter(Author.id == author_id).first()
session.query(Author).filter(Author.id == author_id).first()
)
shout = session.query(Shout).filter(Shout.id == shout_id).first() shout = session.query(Shout).filter(Shout.id == shout_id).first()
if shout and author: if shout and author:
author = author.dict() author = author.dict()
shout = shout.dict() shout = shout.dict()
reply_id = reaction.get('reply_to') reply_id = reaction.get("reply_to")
thread_id = f'shout-{shout_id}' thread_id = f"shout-{shout_id}"
if reply_id and reaction.get('kind', '').lower() == 'comment': if reply_id and reaction.get("kind", "").lower() == "comment":
thread_id += f'{reply_id}' thread_id += f"{reply_id}"
existing_group = groups_by_thread.get(thread_id) existing_group = groups_by_thread.get(thread_id)
if existing_group: if existing_group:
existing_group['seen'] = False existing_group["seen"] = False
existing_group['authors'].append(author_id) existing_group["authors"].append(author_id)
existing_group['reactions'] = ( existing_group["reactions"] = existing_group["reactions"] or []
existing_group['reactions'] or [] existing_group["reactions"].append(reaction)
)
existing_group['reactions'].append(reaction)
groups_by_thread[thread_id] = existing_group groups_by_thread[thread_id] = existing_group
else: else:
group = group_notification( group = group_notification(
@@ -181,18 +175,18 @@ def get_notifications_grouped(
groups_by_thread[thread_id] = group groups_by_thread[thread_id] = group
groups_amount += 1 groups_amount += 1
elif str(notification.entity) == 'follower': elif str(notification.entity) == "follower":
thread_id = 'followers' thread_id = "followers"
follower = json.loads(payload) follower = orjson.loads(payload)
group = groups_by_thread.get(thread_id) group = groups_by_thread.get(thread_id)
if group: if group:
if str(notification.action) == 'follow': if str(notification.action) == "follow":
group['authors'].append(follower) group["authors"].append(follower)
elif str(notification.action) == 'unfollow': elif str(notification.action) == "unfollow":
follower_id = follower.get('id') follower_id = follower.get("id")
for author in group['authors']: for author in group["authors"]:
if author.get('id') == follower_id: if author.get("id") == follower_id:
group['authors'].remove(author) group["authors"].remove(author)
break break
else: else:
group = group_notification( group = group_notification(
@@ -206,10 +200,11 @@ def get_notifications_grouped(
return groups_by_thread, unread, total return groups_by_thread, unread, total
@query.field('load_notifications') @query.field("load_notifications")
@login_required @login_required
async def load_notifications(_, info, after: int, limit: int = 50, offset=0): async def load_notifications(_, info, after: int, limit: int = 50, offset=0):
author_id = info.context.get('author_id') author_dict = info.context.get("author")
author_id = author_dict.get("id")
error = None error = None
total = 0 total = 0
unread = 0 unread = 0
@@ -217,24 +212,22 @@ async def load_notifications(_, info, after: int, limit: int = 50, offset=0):
try: try:
if author_id: if author_id:
groups, unread, total = get_notifications_grouped(author_id, after, limit) groups, unread, total = get_notifications_grouped(author_id, after, limit)
notifications = sorted( notifications = sorted(groups.values(), key=lambda group: group.updated_at, reverse=True)
groups.values(), key=lambda group: group.updated_at, reverse=True
)
except Exception as e: except Exception as e:
error = e error = e
logger.error(e) logger.error(e)
return { return {
'notifications': notifications, "notifications": notifications,
'total': total, "total": total,
'unread': unread, "unread": unread,
'error': error, "error": error,
} }
@mutation.field('notification_mark_seen') @mutation.field("notification_mark_seen")
@login_required @login_required
async def notification_mark_seen(_, info, notification_id: int): async def notification_mark_seen(_, info, notification_id: int):
author_id = info.context.get('author_id') author_id = info.context.get("author", {}).get("id")
if author_id: if author_id:
with local_session() as session: with local_session() as session:
try: try:
@@ -243,25 +236,21 @@ async def notification_mark_seen(_, info, notification_id: int):
session.commit() session.commit()
except SQLAlchemyError as e: except SQLAlchemyError as e:
session.rollback() session.rollback()
logger.error(f'seen mutation failed: {e}') logger.error(f"seen mutation failed: {e}")
return {'error': 'cant mark as read'} return {"error": "cant mark as read"}
return {'error': None} return {"error": None}
@mutation.field('notifications_seen_after') @mutation.field("notifications_seen_after")
@login_required @login_required
async def notifications_seen_after(_, info, after: int): async def notifications_seen_after(_, info, after: int):
# TODO: use latest loaded notification_id as input offset parameter # TODO: use latest loaded notification_id as input offset parameter
error = None error = None
try: try:
author_id = info.context.get('author_id') author_id = info.context.get("author", {}).get("id")
if author_id: if author_id:
with local_session() as session: with local_session() as session:
nnn = ( nnn = session.query(Notification).filter(and_(Notification.created_at > after)).all()
session.query(Notification)
.filter(and_(Notification.created_at > after))
.all()
)
for n in nnn: for n in nnn:
try: try:
ns = NotificationSeen(notification=n.id, viewer=author_id) ns = NotificationSeen(notification=n.id, viewer=author_id)
@@ -271,24 +260,24 @@ async def notifications_seen_after(_, info, after: int):
session.rollback() session.rollback()
except Exception as e: except Exception as e:
print(e) print(e)
error = 'cant mark as read' error = "cant mark as read"
return {'error': error} return {"error": error}
@mutation.field('notifications_seen_thread') @mutation.field("notifications_seen_thread")
@login_required @login_required
async def notifications_seen_thread(_, info, thread: str, after: int): async def notifications_seen_thread(_, info, thread: str, after: int):
error = None error = None
author_id = info.context.get('author_id') author_id = info.context.get("author", {}).get("id")
if author_id: if author_id:
[shout_id, reply_to_id] = thread.split(':') [shout_id, reply_to_id] = thread.split(":")
with local_session() as session: with local_session() as session:
# TODO: handle new follower and new shout notifications # TODO: handle new follower and new shout notifications
new_reaction_notifications = ( new_reaction_notifications = (
session.query(Notification) session.query(Notification)
.filter( .filter(
Notification.action == 'create', Notification.action == "create",
Notification.entity == 'reaction', Notification.entity == "reaction",
Notification.created_at > after, Notification.created_at > after,
) )
.all() .all()
@@ -296,24 +285,24 @@ async def notifications_seen_thread(_, info, thread: str, after: int):
removed_reaction_notifications = ( removed_reaction_notifications = (
session.query(Notification) session.query(Notification)
.filter( .filter(
Notification.action == 'delete', Notification.action == "delete",
Notification.entity == 'reaction', Notification.entity == "reaction",
Notification.created_at > after, Notification.created_at > after,
) )
.all() .all()
) )
exclude = set() exclude = set()
for nr in removed_reaction_notifications: for nr in removed_reaction_notifications:
reaction = json.loads(str(nr.payload)) reaction = orjson.loads(str(nr.payload))
reaction_id = reaction.get('id') reaction_id = reaction.get("id")
exclude.add(reaction_id) exclude.add(reaction_id)
for n in new_reaction_notifications: for n in new_reaction_notifications:
reaction = json.loads(str(n.payload)) reaction = orjson.loads(str(n.payload))
reaction_id = reaction.get('id') reaction_id = reaction.get("id")
if ( if (
reaction_id not in exclude reaction_id not in exclude
and reaction.get('shout') == shout_id and reaction.get("shout") == shout_id
and reaction.get('reply_to') == reply_to_id and reaction.get("reply_to") == reply_to_id
): ):
try: try:
ns = NotificationSeen(notification=n.id, viewer=author_id) ns = NotificationSeen(notification=n.id, viewer=author_id)
@@ -323,5 +312,5 @@ async def notifications_seen_thread(_, info, thread: str, after: int):
logger.warn(e) logger.warn(e)
session.rollback() session.rollback()
else: else:
error = 'You are not logged in' error = "You are not logged in"
return {'error': error} return {"error": error}

49
resolvers/proposals.py Normal file
View File

@@ -0,0 +1,49 @@
from sqlalchemy import and_
from orm.rating import is_negative, is_positive
from orm.reaction import Reaction, ReactionKind
from orm.shout import Shout
from services.db import local_session
from utils.diff import apply_diff, get_diff
def handle_proposing(kind: ReactionKind, reply_to: int, shout_id: int):
with local_session() as session:
if is_positive(kind):
replied_reaction = (
session.query(Reaction).filter(Reaction.id == reply_to, Reaction.shout == shout_id).first()
)
if replied_reaction and replied_reaction.kind is ReactionKind.PROPOSE.value and replied_reaction.quote:
# patch all the proposals' quotes
proposals = (
session.query(Reaction)
.filter(
and_(
Reaction.shout == shout_id,
Reaction.kind == ReactionKind.PROPOSE.value,
)
)
.all()
)
# patch shout's body
shout = session.query(Shout).filter(Shout.id == shout_id).first()
body = replied_reaction.quote
Shout.update(shout, {body})
session.add(shout)
session.commit()
# реакция содержит цитату -> обновляются все предложения
# (proposals) для соответствующего Shout.
for proposal in proposals:
if proposal.quote:
proposal_diff = get_diff(shout.body, proposal.quote)
proposal_dict = proposal.dict()
proposal_dict["quote"] = apply_diff(replied_reaction.quote, proposal_diff)
Reaction.update(proposal, proposal_dict)
session.add(proposal)
if is_negative(kind):
# TODO: rejection logic
pass

View File

@@ -0,0 +1,25 @@
{
"include": [
"."
],
"exclude": [
"**/node_modules",
"**/__pycache__",
"**/.*"
],
"defineConstant": {
"DEBUG": true
},
"venvPath": ".",
"venv": ".venv",
"pythonVersion": "3.11",
"typeCheckingMode": "strict",
"reportMissingImports": true,
"reportMissingTypeStubs": false,
"reportUnknownMemberType": false,
"reportUnknownParameterType": false,
"reportUnknownVariableType": false,
"reportUnknownArgumentType": false,
"reportPrivateUsage": false,
"reportUntypedFunctionDecorator": false
}

View File

@@ -6,23 +6,107 @@ from orm.reaction import Reaction, ReactionKind
from orm.shout import Shout from orm.shout import Shout
from services.auth import login_required from services.auth import login_required
from services.db import local_session from services.db import local_session
from services.schema import mutation from services.schema import mutation, query
from utils.logger import root_logger as logger
@mutation.field('rate_author') @query.field("get_my_rates_comments")
@login_required @login_required
async def rate_author(_, info, rated_slug, value): async def get_my_rates_comments(_, info, comments: list[int]) -> list[dict]:
user_id = info.context['user_id'] """
Получение реакций пользователя на комментарии
Args:
info: Контекст запроса
comments: Список ID комментариев
Returns:
list[dict]: Список словарей с реакциями пользователя на комментарии
Каждый словарь содержит:
- comment_id: ID комментария
- my_rate: Тип реакции (LIKE/DISLIKE)
"""
author_dict = info.context.get("author") if info.context else None
author_id = author_dict.get("id") if author_dict else None
if not author_id:
return [] # Возвращаем пустой список вместо словаря с ошибкой
# Подзапрос для реакций текущего пользователя
rated_query = (
select(Reaction.id.label("comment_id"), Reaction.kind.label("my_rate"))
.where(
and_(
Reaction.reply_to.in_(comments),
Reaction.created_by == author_id,
Reaction.deleted_at.is_(None),
Reaction.kind.in_([ReactionKind.LIKE.value, ReactionKind.DISLIKE.value]),
)
)
.order_by(Reaction.shout, Reaction.created_at.desc())
.distinct(Reaction.shout)
)
with local_session() as session:
comments_result = session.execute(rated_query).all()
return [{"comment_id": row.comment_id, "my_rate": row.my_rate} for row in comments_result]
@query.field("get_my_rates_shouts")
@login_required
async def get_my_rates_shouts(_, info, shouts):
"""
Получение реакций пользователя на публикации
"""
author_dict = info.context.get("author") if info.context else None
author_id = author_dict.get("id") if author_dict else None
if not author_id:
return []
with local_session() as session: with local_session() as session:
try:
stmt = (
select(Reaction)
.where(
and_(
Reaction.shout.in_(shouts),
Reaction.reply_to.is_(None),
Reaction.created_by == author_id,
Reaction.deleted_at.is_(None),
Reaction.kind.in_([ReactionKind.LIKE.value, ReactionKind.DISLIKE.value]),
)
)
.order_by(Reaction.shout, Reaction.created_at.desc())
.distinct(Reaction.shout)
)
result = session.execute(stmt).all()
return [
{
"shout_id": row[0].shout, # Получаем shout_id из объекта Reaction
"my_rate": row[0].kind, # Получаем kind (my_rate) из объекта Reaction
}
for row in result
]
except Exception as e:
logger.error(f"Error in get_my_rates_shouts: {e}")
return []
@mutation.field("rate_author")
@login_required
async def rate_author(_, info, rated_slug, value):
info.context["user_id"]
rater_id = info.context.get("author", {}).get("id")
with local_session() as session:
rater_id = int(rater_id)
rated_author = session.query(Author).filter(Author.slug == rated_slug).first() rated_author = session.query(Author).filter(Author.slug == rated_slug).first()
rater = session.query(Author).filter(Author.slug == user_id).first() if rater_id and rated_author:
if rater and rated_author:
rating: AuthorRating = ( rating: AuthorRating = (
session.query(AuthorRating) session.query(AuthorRating)
.filter( .filter(
and_( and_(
AuthorRating.rater == rater.id, AuthorRating.rater == rater_id,
AuthorRating.author == rated_author.id, AuthorRating.author == rated_author.id,
) )
) )
@@ -35,13 +119,11 @@ async def rate_author(_, info, rated_slug, value):
return {} return {}
else: else:
try: try:
rating = AuthorRating( rating = AuthorRating(rater=rater_id, author=rated_author.id, plus=value > 0)
rater=rater.id, author=rated_author.id, plus=value > 0
)
session.add(rating) session.add(rating)
session.commit() session.commit()
except Exception as err: except Exception as err:
return {'error': err} return {"error": err}
return {} return {}
@@ -105,9 +187,7 @@ def count_author_shouts_rating(session, author_id) -> int:
def get_author_rating_old(session, author: Author): def get_author_rating_old(session, author: Author):
likes_count = ( likes_count = (
session.query(AuthorRating) session.query(AuthorRating).filter(and_(AuthorRating.author == author.id, AuthorRating.plus.is_(True))).count()
.filter(and_(AuthorRating.author == author.id, AuthorRating.plus.is_(True)))
.count()
) )
dislikes_count = ( dislikes_count = (
session.query(AuthorRating) session.query(AuthorRating)
@@ -129,7 +209,7 @@ def get_author_rating_shouts(session, author: Author) -> int:
) )
), ),
0, 0,
).label('shouts_rating') ).label("shouts_rating")
) )
.select_from(Reaction) .select_from(Reaction)
.outerjoin(Shout, Shout.authors.any(id=author.id)) .outerjoin(Shout, Shout.authors.any(id=author.id))
@@ -159,7 +239,7 @@ def get_author_rating_comments(session, author: Author) -> int:
) )
), ),
0, 0,
).label('shouts_rating') ).label("shouts_rating")
) )
.select_from(Reaction) .select_from(Reaction)
.outerjoin( .outerjoin(
@@ -167,9 +247,7 @@ def get_author_rating_comments(session, author: Author) -> int:
and_( and_(
replied_comment.kind == ReactionKind.COMMENT.value, replied_comment.kind == ReactionKind.COMMENT.value,
replied_comment.created_by == author.id, replied_comment.created_by == author.id,
Reaction.kind.in_( Reaction.kind.in_([ReactionKind.LIKE.value, ReactionKind.DISLIKE.value]),
[ReactionKind.LIKE.value, ReactionKind.DISLIKE.value]
),
Reaction.reply_to == replied_comment.id, Reaction.reply_to == replied_comment.id,
Reaction.deleted_at.is_(None), Reaction.deleted_at.is_(None),
), ),
@@ -184,9 +262,7 @@ def add_author_rating_columns(q, group_list):
# old karma # old karma
q = q.outerjoin(AuthorRating, AuthorRating.author == Author.id) q = q.outerjoin(AuthorRating, AuthorRating.author == Author.id)
q = q.add_columns( q = q.add_columns(func.sum(case((AuthorRating.plus == true(), 1), else_=-1)).label("rating"))
func.sum(case((AuthorRating.plus == true(), 1), else_=-1)).label('rating')
)
# by shouts rating # by shouts rating
shout_reaction = aliased(Reaction) shout_reaction = aliased(Reaction)
@@ -200,8 +276,9 @@ def add_author_rating_columns(q, group_list):
(shout_reaction.kind == ReactionKind.DISLIKE.value, -1), (shout_reaction.kind == ReactionKind.DISLIKE.value, -1),
else_=0, else_=0,
) )
) ),
).label('shouts_rating'), 0,
).label("shouts_rating"),
) )
.select_from(shout_reaction) .select_from(shout_reaction)
.outerjoin(Shout, Shout.authors.any(id=Author.id)) .outerjoin(Shout, Shout.authors.any(id=Author.id))
@@ -234,8 +311,9 @@ def add_author_rating_columns(q, group_list):
(reaction_2.kind == ReactionKind.DISLIKE.value, -1), (reaction_2.kind == ReactionKind.DISLIKE.value, -1),
else_=0, else_=0,
) )
) ),
).label('comments_rating'), 0,
).label("comments_rating"),
) )
.select_from(reaction_2) .select_from(reaction_2)
.outerjoin( .outerjoin(
@@ -243,9 +321,7 @@ def add_author_rating_columns(q, group_list):
and_( and_(
replied_comment.kind == ReactionKind.COMMENT.value, replied_comment.kind == ReactionKind.COMMENT.value,
replied_comment.created_by == Author.id, replied_comment.created_by == Author.id,
reaction_2.kind.in_( reaction_2.kind.in_([ReactionKind.LIKE.value, ReactionKind.DISLIKE.value]),
[ReactionKind.LIKE.value, ReactionKind.DISLIKE.value]
),
reaction_2.reply_to == replied_comment.id, reaction_2.reply_to == replied_comment.id,
reaction_2.deleted_at.is_(None), reaction_2.deleted_at.is_(None),
), ),

File diff suppressed because it is too large Load Diff

View File

@@ -1,77 +1,316 @@
from sqlalchemy import bindparam, distinct, or_, text import orjson
from sqlalchemy.orm import aliased, joinedload from graphql import GraphQLResolveInfo
from sqlalchemy.sql.expression import (and_, asc, case, desc, func, nulls_last, from sqlalchemy import and_, nulls_last, text
select) from sqlalchemy.orm import aliased
from sqlalchemy.sql.expression import asc, case, desc, func, select
from orm.author import Author, AuthorFollower from orm.author import Author
from orm.reaction import Reaction, ReactionKind from orm.reaction import Reaction, ReactionKind
from orm.shout import Shout, ShoutAuthor, ShoutTopic from orm.shout import Shout, ShoutAuthor, ShoutTopic
from orm.topic import Topic, TopicFollower from orm.topic import Topic
from resolvers.reaction import add_reaction_stat_columns from services.db import json_array_builder, json_builder, local_session
from resolvers.topic import get_topics_random
from services.auth import login_required
from services.db import local_session
from services.logger import root_logger as logger
from services.schema import query from services.schema import query
from services.search import search_text from services.search import search_text
from services.viewed import ViewedStorage from services.viewed import ViewedStorage
from utils.logger import root_logger as logger
def query_shouts(): def apply_options(q, options, reactions_created_by=0):
return ( """
select(Shout) Применяет опции фильтрации и сортировки
.options(joinedload(Shout.authors), joinedload(Shout.topics)) [опционально] выбирая те публикации, на которые есть реакции/комментарии от указанного автора
.where(and_(Shout.published_at.is_not(None), Shout.deleted_at.is_(None)))
:param q: Исходный запрос.
:param options: Опции фильтрации и сортировки.
:param reactions_created_by: Идентификатор автора.
:return: Запрос с примененными опциями.
"""
filters = options.get("filters")
if isinstance(filters, dict):
q = apply_filters(q, filters)
if reactions_created_by:
q = q.join(Reaction, Reaction.shout == Shout.id)
q = q.filter(Reaction.created_by == reactions_created_by)
if "commented" in filters:
q = q.filter(Reaction.body.is_not(None))
q = apply_sorting(q, options)
limit = options.get("limit", 10)
offset = options.get("offset", 0)
return q, limit, offset
def has_field(info, fieldname: str) -> bool:
"""
Проверяет, запрошено ли поле :fieldname: в GraphQL запросе
:param info: Информация о контексте GraphQL
:param fieldname: Имя запрашиваемого поля
:return: True, если поле запрошено, False в противном случае
"""
field_node = info.field_nodes[0]
for selection in field_node.selection_set.selections:
if hasattr(selection, "name") and selection.name.value == fieldname:
return True
return False
def query_with_stat(info):
"""
:param info: Информация о контексте GraphQL - для получения id авторизованного пользователя
:return: Запрос с подзапросами статистики.
Добавляет подзапрос статистики
"""
q = select(Shout).filter(
and_(
Shout.published_at.is_not(None), # Проверяем published_at
Shout.deleted_at.is_(None), # Проверяем deleted_at
)
) )
# Главный автор
main_author = aliased(Author)
q = q.join(main_author, main_author.id == Shout.created_by)
q = q.add_columns(
json_builder(
"id",
main_author.id,
"name",
main_author.name,
"slug",
main_author.slug,
"pic",
main_author.pic,
"created_at",
main_author.created_at,
).label("main_author")
)
def filter_my(info, session, q): if has_field(info, "main_topic"):
reader_id = None main_topic_join = aliased(ShoutTopic)
user_id = None main_topic = aliased(Topic)
if isinstance(info.context, dict): q = q.join(main_topic_join, and_(main_topic_join.shout == Shout.id, main_topic_join.main.is_(True)))
user_id = info.context.get('user_id') q = q.join(main_topic, main_topic.id == main_topic_join.topic)
if user_id: q = q.add_columns(
reader = session.query(Author).filter(Author.user == user_id).first() json_builder(
if reader: "id", main_topic.id, "title", main_topic.title, "slug", main_topic.slug, "is_main", main_topic_join.main
reader_followed_authors = select(AuthorFollower.author).where( ).label("main_topic")
AuthorFollower.follower == reader.id )
)
reader_followed_topics = select(TopicFollower.topic).where(
TopicFollower.follower == reader.id
)
subquery = ( if has_field(info, "authors"):
select(Shout.id) authors_subquery = (
.where(Shout.id == ShoutAuthor.shout) select(
.where(Shout.id == ShoutTopic.shout) ShoutAuthor.shout,
.where( json_array_builder(
(ShoutAuthor.author.in_(reader_followed_authors)) json_builder(
| (ShoutTopic.topic.in_(reader_followed_topics)) "id",
Author.id,
"name",
Author.name,
"slug",
Author.slug,
"pic",
Author.pic,
"caption",
ShoutAuthor.caption,
"created_at",
Author.created_at,
)
).label("authors"),
)
.outerjoin(Author, ShoutAuthor.author == Author.id)
.where(ShoutAuthor.shout == Shout.id)
.group_by(ShoutAuthor.shout)
.subquery()
)
q = q.outerjoin(authors_subquery, authors_subquery.c.shout == Shout.id)
q = q.add_columns(authors_subquery.c.authors)
if has_field(info, "topics"):
topics_subquery = (
select(
ShoutTopic.shout,
json_array_builder(
json_builder("id", Topic.id, "title", Topic.title, "slug", Topic.slug, "is_main", ShoutTopic.main)
).label("topics"),
)
.outerjoin(Topic, ShoutTopic.topic == Topic.id)
.where(ShoutTopic.shout == Shout.id)
.group_by(ShoutTopic.shout)
.subquery()
)
q = q.outerjoin(topics_subquery, topics_subquery.c.shout == Shout.id)
q = q.add_columns(topics_subquery.c.topics)
if has_field(info, "stat"):
stats_subquery = (
select(
Reaction.shout,
func.count(func.distinct(Reaction.id))
.filter(Reaction.kind == ReactionKind.COMMENT.value)
.label("comments_count"),
func.sum(
case(
(Reaction.kind == ReactionKind.LIKE.value, 1),
(Reaction.kind == ReactionKind.DISLIKE.value, -1),
else_=0,
)
) )
.filter(Reaction.reply_to.is_(None))
.label("rating"),
func.max(Reaction.created_at)
.filter(Reaction.kind == ReactionKind.COMMENT.value)
.label("last_commented_at"),
) )
q = q.filter(Shout.id.in_(subquery)) .where(Reaction.deleted_at.is_(None))
reader_id = reader.id .group_by(Reaction.shout)
return q, reader_id .subquery()
)
q = q.outerjoin(stats_subquery, stats_subquery.c.shout == Shout.id)
q = q.add_columns(
json_builder(
"comments_count",
func.coalesce(stats_subquery.c.comments_count, 0),
"rating",
func.coalesce(stats_subquery.c.rating, 0),
"last_commented_at",
func.coalesce(stats_subquery.c.last_commented_at, 0),
).label("stat")
)
return q
def apply_filters(q, filters, author_id=None): def get_shouts_with_links(info, q, limit=20, offset=0):
"""
получение публикаций с применением пагинации
"""
shouts = []
try:
# logger.info(f"Starting get_shouts_with_links with limit={limit}, offset={offset}")
q = q.limit(limit).offset(offset)
with local_session() as session:
shouts_result = session.execute(q).all()
# logger.info(f"Got {len(shouts_result) if shouts_result else 0} shouts from query")
if not shouts_result:
logger.warning("No shouts found in query result")
return []
for idx, row in enumerate(shouts_result):
try:
shout = None
if hasattr(row, "Shout"):
shout = row.Shout
# logger.debug(f"Processing shout#{shout.id} at index {idx}")
if shout:
shout_id = int(f"{shout.id}")
shout_dict = shout.dict()
if has_field(info, "created_by") and shout_dict.get("created_by"):
main_author_id = shout_dict.get("created_by")
a = session.query(Author).filter(Author.id == main_author_id).first()
shout_dict["created_by"] = {
"id": main_author_id,
"name": a.name,
"slug": a.slug,
"pic": a.pic,
}
if has_field(info, "stat"):
stat = {}
if isinstance(row.stat, str):
stat = orjson.loads(row.stat)
elif isinstance(row.stat, dict):
stat = row.stat
viewed = ViewedStorage.get_shout(shout_id=shout_id) or 0
shout_dict["stat"] = {**stat, "viewed": viewed, "commented": stat.get("comments_count", 0)}
# Обработка main_topic и topics
topics = None
if has_field(info, "topics") and hasattr(row, "topics"):
topics = orjson.loads(row.topics) if isinstance(row.topics, str) else row.topics
# logger.debug(f"Shout#{shout_id} topics: {topics}")
shout_dict["topics"] = topics
if has_field(info, "main_topic"):
main_topic = None
if hasattr(row, "main_topic"):
# logger.debug(f"Raw main_topic for shout#{shout_id}: {row.main_topic}")
main_topic = (
orjson.loads(row.main_topic) if isinstance(row.main_topic, str) else row.main_topic
)
# logger.debug(f"Parsed main_topic for shout#{shout_id}: {main_topic}")
if not main_topic and topics and len(topics) > 0:
# logger.info(f"No main_topic found for shout#{shout_id}, using first topic from list")
main_topic = {
"id": topics[0]["id"],
"title": topics[0]["title"],
"slug": topics[0]["slug"],
"is_main": True,
}
elif not main_topic:
logger.warning(f"No main_topic and no topics found for shout#{shout_id}")
main_topic = {"id": 0, "title": "no topic", "slug": "notopic", "is_main": True}
shout_dict["main_topic"] = main_topic
# logger.debug(f"Final main_topic for shout#{shout_id}: {main_topic}")
if has_field(info, "authors") and hasattr(row, "authors"):
shout_dict["authors"] = (
orjson.loads(row.authors) if isinstance(row.authors, str) else row.authors
)
if has_field(info, "media") and shout.media:
# Обработка поля media
media_data = shout.media
if isinstance(media_data, str):
try:
media_data = orjson.loads(media_data)
except orjson.JSONDecodeError:
media_data = []
shout_dict["media"] = [media_data] if isinstance(media_data, dict) else media_data
shouts.append(shout_dict)
except Exception as row_error:
logger.error(f"Error processing row {idx}: {row_error}", exc_info=True)
continue
except Exception as e:
logger.error(f"Fatal error in get_shouts_with_links: {e}", exc_info=True)
raise
finally:
logger.info(f"Returning {len(shouts)} shouts from get_shouts_with_links")
return shouts
def apply_filters(q, filters):
"""
Применение общих фильтров к запросу.
:param q: Исходный запрос.
:param filters: Словарь фильтров.
:return: Запрос с примененными фильтрами.
"""
if isinstance(filters, dict): if isinstance(filters, dict):
if filters.get('reacted'): if "featured" in filters:
q.join(Reaction, Reaction.created_by == author_id) featured_filter = filters.get("featured")
if featured_filter:
by_featured = filters.get('featured') q = q.filter(Shout.featured_at.is_not(None))
if by_featured: else:
q = q.filter(Shout.featured_at.is_not(None)) q = q.filter(Shout.featured_at.is_(None))
by_layouts = filters.get('layouts') by_layouts = filters.get("layouts")
if by_layouts: if by_layouts and isinstance(by_layouts, list):
q = q.filter(Shout.layout.in_(by_layouts)) q = q.filter(Shout.layout.in_(by_layouts))
by_author = filters.get('author') by_author = filters.get("author")
if by_author: if by_author:
q = q.filter(Shout.authors.any(slug=by_author)) q = q.filter(Shout.authors.any(slug=by_author))
by_topic = filters.get('topic') by_topic = filters.get("topic")
if by_topic: if by_topic:
q = q.filter(Shout.topics.any(slug=by_topic)) q = q.filter(Shout.topics.any(slug=by_topic))
by_after = filters.get('after') by_after = filters.get("after")
if by_after: if by_after:
ts = int(by_after) ts = int(by_after)
q = q.filter(Shout.created_at > ts) q = q.filter(Shout.created_at > ts)
@@ -79,333 +318,176 @@ def apply_filters(q, filters, author_id=None):
return q return q
@query.field('get_shout') @query.field("get_shout")
async def get_shout(_, info, slug: str): async def get_shout(_, info: GraphQLResolveInfo, slug="", shout_id=0):
with local_session() as session:
q = query_shouts()
aliased_reaction = aliased(Reaction)
q = add_reaction_stat_columns(q, aliased_reaction)
q = q.filter(Shout.slug == slug)
q = q.group_by(Shout.id)
results = session.execute(q).first()
if results:
[
shout,
reacted_stat,
commented_stat,
likes_stat,
dislikes_stat,
last_comment,
] = results
shout.stat = {
'viewed': await ViewedStorage.get_shout(shout.slug),
'reacted': reacted_stat,
'commented': commented_stat,
'rating': int(likes_stat or 0) - int(dislikes_stat or 0),
'last_comment': last_comment,
}
for author_caption in (
session.query(ShoutAuthor)
.join(Shout)
.where(
and_(
Shout.slug == slug,
Shout.published_at.is_not(None),
Shout.deleted_at.is_(None),
)
)
):
for author in shout.authors:
if author.id == author_caption.author:
author.caption = author_caption.caption
main_topic = (
session.query(Topic.slug)
.join(
ShoutTopic,
and_(
ShoutTopic.topic == Topic.id,
ShoutTopic.shout == shout.id,
ShoutTopic.main.is_(True),
),
)
.first()
)
if main_topic:
shout.main_topic = main_topic[0]
return shout
@query.field('load_shouts_by')
async def load_shouts_by(_, _info, options):
""" """
:param options: { Получение публикации по slug или id.
filters: {
layouts: ['audio', 'video', ..],
reacted: True,
featured: True, // filter featured-only
author: 'discours',
topic: 'culture',
after: 1234567 // unixtime
}
offset: 0
limit: 50
order_by: 'created_at' | 'commented' | 'likes_stat'
order_by_desc: true
} :param _: Корневой объект запроса (не используется)
:return: Shout[] :param info: Информация о контексте GraphQL
:param slug: Уникальный идентификатор публикации
:param shout_id: ID публикации
:return: Данные публикации с включенной статистикой
""" """
try:
# Получаем базовый запрос с подзапросами статистики
q = query_with_stat(info)
# base # Применяем фильтр по slug или id
q = query_shouts() if slug:
q = q.where(Shout.slug == slug)
elif shout_id:
q = q.where(Shout.id == shout_id)
else:
return None
# stats # Получаем результат через get_shouts_with_stats с limit=1
aliased_reaction = aliased(Reaction) shouts = get_shouts_with_links(info, q, limit=1)
q = add_reaction_stat_columns(q, aliased_reaction)
# filters # Возвращаем первую (и единственную) публикацию, если она найдена
filters = options.get('filters', {}) return shouts[0] if shouts else None
q = apply_filters(q, filters)
# group except Exception as exc:
q = q.group_by(Shout.id) logger.error(f"Error in get_shout: {exc}", exc_info=True)
return None
# order
order_by = Shout.featured_at if filters.get('featured') else Shout.published_at
order_str = options.get('order_by')
if order_str in ['likes', 'shouts', 'followers', 'comments', 'last_comment']:
q = q.order_by(desc(text(f'{order_str}_stat')))
query_order_by = (
desc(order_by) if options.get('order_by_desc', True) else asc(order_by)
)
q = q.order_by(nulls_last(query_order_by))
# limit offset
offset = options.get('offset', 0)
limit = options.get('limit', 10)
q = q.limit(limit).offset(offset)
shouts = []
with local_session() as session:
for [
shout,
reacted_stat,
commented_stat,
likes_stat,
dislikes_stat,
last_comment,
] in session.execute(q).unique():
main_topic = (
session.query(Topic.slug)
.join(
ShoutTopic,
and_(
ShoutTopic.topic == Topic.id,
ShoutTopic.shout == shout.id,
ShoutTopic.main.is_(True),
),
)
.first()
)
if main_topic:
shout.main_topic = main_topic[0]
shout.stat = {
'viewed': await ViewedStorage.get_shout(shout.slug),
'reacted': reacted_stat,
'commented': commented_stat,
'rating': int(likes_stat) - int(dislikes_stat),
'last_comment': last_comment,
}
shouts.append(shout)
return shouts
@query.field('load_shouts_feed') def apply_sorting(q, options):
@login_required """
async def load_shouts_feed(_, info, options): Применение сортировки с сохранением порядка
shouts = [] """
with local_session() as session: order_str = options.get("order_by")
q = query_shouts() if order_str in ["rating", "comments_count", "last_commented_at"]:
query_order_by = desc(text(order_str)) if options.get("order_by_desc", True) else asc(text(order_str))
aliased_reaction = aliased(Reaction) q = q.distinct(text(order_str), Shout.id).order_by( # DISTINCT ON включает поле сортировки
q = add_reaction_stat_columns(q, aliased_reaction) nulls_last(query_order_by), Shout.id
# filters
filters = options.get('filters', {})
if filters:
q, reader_id = filter_my(info, session, q)
q = apply_filters(q, filters, reader_id)
# sort order
order_by = options.get(
'order_by',
Shout.featured_at if filters.get('featured') else Shout.published_at,
) )
else:
q = q.distinct(Shout.published_at, Shout.id).order_by(Shout.published_at.desc(), Shout.id)
query_order_by = ( return q
desc(order_by) if options.get('order_by_desc', True) else asc(order_by)
)
# pagination
offset = options.get('offset', 0)
limit = options.get('limit', 10)
q = (
q.group_by(Shout.id)
.order_by(nulls_last(query_order_by))
.limit(limit)
.offset(offset)
)
# print(q.compile(compile_kwargs={"literal_binds": True}))
for [
shout,
reacted_stat,
commented_stat,
likes_stat,
dislikes_stat,
last_comment,
] in session.execute(q).unique():
main_topic = (
session.query(Topic.slug)
.join(
ShoutTopic,
and_(
ShoutTopic.topic == Topic.id,
ShoutTopic.shout == shout.id,
ShoutTopic.main.is_(True),
),
)
.first()
)
if main_topic:
shout.main_topic = main_topic[0]
shout.stat = {
'viewed': await ViewedStorage.get_shout(shout.slug),
'reacted': reacted_stat,
'commented': commented_stat,
'rating': likes_stat - dislikes_stat,
'last_comment': last_comment,
}
shouts.append(shout)
return shouts
@query.field('load_shouts_search') @query.field("load_shouts_by")
async def load_shouts_search(_, _info, text, limit=50, offset=0): async def load_shouts_by(_, info: GraphQLResolveInfo, options):
"""
Загрузка публикаций с фильтрацией, сортировкой и пагинацией.
:param _: Корневой объект запроса (не используется)
:param info: Информация о контексте GraphQL
:param options: Опции фильтрации и сортировки
:return: Список публикаций, удовлетворяющих критериям
"""
# Базовый запрос со статистикой
q = query_with_stat(info)
# Применяем остальные опции фильтрации
q, limit, offset = apply_options(q, options)
# Передача сформированного запроса в метод получения публикаций с учетом сортировки и пагинации
return get_shouts_with_links(info, q, limit, offset)
@query.field("load_shouts_search")
async def load_shouts_search(_, info, text, options):
"""
Поиск публикаций по тексту.
:param _: Корневой объект запроса (не используется)
:param info: Информация о контексте GraphQL
:param text: Строка поиска.
:param options: Опции фильтрации и сортировки.
:return: Список публикаций, найденных по тексту.
"""
limit = options.get("limit", 10)
offset = options.get("offset", 0)
if isinstance(text, str) and len(text) > 2: if isinstance(text, str) and len(text) > 2:
results = await search_text(text, limit, offset) results = await search_text(text, limit, offset)
logger.debug(results) scores = {}
return results hits_ids = []
for sr in results:
shout_id = sr.get("id")
if shout_id:
shout_id = str(shout_id)
scores[shout_id] = sr.get("score")
hits_ids.append(shout_id)
q = (
query_with_stat(info)
if has_field(info, "stat")
else select(Shout).filter(and_(Shout.published_at.is_not(None), Shout.deleted_at.is_(None)))
)
q = q.filter(Shout.id.in_(hits_ids))
q = apply_filters(q, options)
q = apply_sorting(q, options)
shouts = get_shouts_with_links(info, q, limit, offset)
for shout in shouts:
shout.score = scores[f"{shout.id}"]
shouts.sort(key=lambda x: x.score, reverse=True)
return shouts
return [] return []
@query.field('load_shouts_unrated') @query.field("load_shouts_unrated")
@login_required async def load_shouts_unrated(_, info, options):
async def load_shouts_unrated(_, info, limit: int = 50, offset: int = 0): """
q = query_shouts() Загрузка публикаций с менее чем 3 реакциями типа LIKE/DISLIKE
q = (
q.outerjoin( :param _: Корневой объект запроса (не используется)
Reaction, :param info: Информация о контексте GraphQL
and_( :param options: Опции фильтрации и сортировки.
Reaction.shout == Shout.id, :return: Список публикаций.
Reaction.replyTo.is_(None), """
Reaction.kind.in_( rated_shouts = (
[ReactionKind.LIKE.value, ReactionKind.DISLIKE.value] select(Reaction.shout)
),
),
)
.outerjoin(Author, Author.user == bindparam('user_id'))
.where( .where(
and_( and_(
Shout.deleted_at.is_(None), Reaction.deleted_at.is_(None), Reaction.kind.in_([ReactionKind.LIKE.value, ReactionKind.DISLIKE.value])
Shout.layout.is_not(None),
or_(Author.id.is_(None), Reaction.created_by != Author.id),
) )
) )
.group_by(Reaction.shout)
.having(func.count("*") >= 3)
.scalar_subquery()
) )
# 3 or fewer votes is 0, 1, 2 or 3 votes (null, reaction id1, reaction id2, reaction id3) q = select(Shout).where(and_(Shout.published_at.is_not(None), Shout.deleted_at.is_(None)))
q = q.having(func.count(distinct(Reaction.id)) <= 4) q = q.join(Author, Author.id == Shout.created_by)
q = q.add_columns(
aliased_reaction = aliased(Reaction) json_builder("id", Author.id, "name", Author.name, "slug", Author.slug, "pic", Author.pic).label("main_author")
q = add_reaction_stat_columns(q, aliased_reaction)
q = q.group_by(Shout.id).order_by(func.random()).limit(limit).offset(offset)
user_id = info.context.get('user_id') if isinstance(info.context, dict) else None
if user_id:
with local_session() as session:
author = session.query(Author).filter(Author.user == user_id).first()
if author:
return await get_shouts_from_query(q, author.id)
else:
return await get_shouts_from_query(q)
async def get_shouts_from_query(q, author_id=None):
shouts = []
with local_session() as session:
for [
shout,
reacted_stat,
commented_stat,
likes_stat,
dislikes_stat,
last_comment,
] in session.execute(q, {'author_id': author_id}).unique():
shouts.append(shout)
shout.stat = {
'viewed': await ViewedStorage.get_shout(shout_slug=shout.slug),
'reacted': reacted_stat,
'commented': commented_stat,
'rating': int(likes_stat or 0) - int(dislikes_stat or 0),
'last_comment': last_comment,
}
return shouts
@query.field('load_shouts_random_top')
async def load_shouts_random_top(_, _info, options):
"""
:param _
:param _info: GraphQLInfoContext
:param options: {
filters: {
layouts: ['music']
after: 13245678
}
random_limit: 100
limit: 50
offset: 0
}
:return: Shout[]
"""
aliased_reaction = aliased(Reaction)
subquery = (
select(Shout.id)
.outerjoin(aliased_reaction)
.where(and_(Shout.deleted_at.is_(None), Shout.layout.is_not(None)))
) )
q = q.join(ShoutTopic, and_(ShoutTopic.shout == Shout.id, ShoutTopic.main.is_(True)))
q = q.join(Topic, Topic.id == ShoutTopic.topic)
q = q.add_columns(json_builder("id", Topic.id, "title", Topic.title, "slug", Topic.slug).label("main_topic"))
q = q.where(Shout.id.not_in(rated_shouts))
q = q.order_by(func.random())
subquery = apply_filters(subquery, options.get('filters', {})) limit = options.get("limit", 5)
offset = options.get("offset", 0)
return get_shouts_with_links(info, q, limit, offset)
@query.field("load_shouts_random_top")
async def load_shouts_random_top(_, info, options):
"""
Загрузка случайных публикаций, упорядоченных по топовым реакциям.
:param _info: Информация о контексте GraphQL.
:param options: Опции фильтрации и сортировки.
:return: Список случайных публикаций.
"""
aliased_reaction = aliased(Reaction)
subquery = select(Shout.id).outerjoin(aliased_reaction).where(Shout.deleted_at.is_(None))
filters = options.get("filters")
if isinstance(filters, dict):
subquery = apply_filters(subquery, filters)
subquery = subquery.group_by(Shout.id).order_by( subquery = subquery.group_by(Shout.id).order_by(
desc( desc(
func.sum( func.sum(
case( case(
# do not count comments' reactions # не учитывать реакции на комментарии
(aliased_reaction.reply_to.is_not(None), 0), (aliased_reaction.reply_to.is_not(None), 0),
(aliased_reaction.kind == ReactionKind.LIKE.value, 1), (aliased_reaction.kind == ReactionKind.LIKE.value, 1),
(aliased_reaction.kind == ReactionKind.DISLIKE.value, -1), (aliased_reaction.kind == ReactionKind.DISLIKE.value, -1),
@@ -415,58 +497,10 @@ async def load_shouts_random_top(_, _info, options):
) )
) )
random_limit = options.get('random_limit', 100) random_limit = options.get("random_limit", 100)
if random_limit: subquery = subquery.limit(random_limit)
subquery = subquery.limit(random_limit) q = query_with_stat(info)
q = q.filter(Shout.id.in_(subquery))
q = ( q = q.order_by(func.random())
select(Shout) limit = options.get("limit", 10)
.options(joinedload(Shout.authors), joinedload(Shout.topics)) return get_shouts_with_links(info, q, limit)
.where(Shout.id.in_(subquery))
)
q = add_reaction_stat_columns(q, aliased_reaction)
limit = options.get('limit', 10)
q = q.group_by(Shout.id).order_by(func.random()).limit(limit)
shouts = await get_shouts_from_query(q)
return shouts
@query.field('load_shouts_random_topic')
async def load_shouts_random_topic(_, info, limit: int = 10):
[topic] = get_topics_random(None, None, 1)
if topic:
shouts = fetch_shouts_by_topic(topic, limit)
if shouts:
return {'topic': topic, 'shouts': shouts}
return {
'error': 'failed to get random topic after few retries',
'shouts': [],
'topic': {},
}
def fetch_shouts_by_topic(topic, limit):
q = (
select(Shout)
.options(joinedload(Shout.authors), joinedload(Shout.topics))
.filter(
and_(
Shout.deleted_at.is_(None),
Shout.featured_at.is_not(None),
Shout.topics.any(slug=topic.slug),
)
)
)
aliased_reaction = aliased(Reaction)
q = add_reaction_stat_columns(q, aliased_reaction)
q = q.group_by(Shout.id).order_by(desc(Shout.created_at)).limit(limit)
shouts = get_shouts_from_query(q)
return shouts

View File

@@ -1,45 +1,167 @@
import asyncio
from sqlalchemy import and_, distinct, func, join, select from sqlalchemy import and_, distinct, func, join, select
from sqlalchemy.orm import aliased from sqlalchemy.orm import aliased
from cache.cache import cache_author
from orm.author import Author, AuthorFollower from orm.author import Author, AuthorFollower
from orm.reaction import Reaction, ReactionKind from orm.reaction import Reaction, ReactionKind
from orm.shout import Shout, ShoutAuthor, ShoutTopic from orm.shout import Shout, ShoutAuthor, ShoutTopic
from orm.topic import Topic, TopicFollower from orm.topic import Topic, TopicFollower
from services.db import local_session from services.db import local_session
from services.cache import cache_author from utils.logger import root_logger as logger
from services.logger import root_logger as logger
def add_topic_stat_columns(q): def add_topic_stat_columns(q):
aliased_shout_topic = aliased(ShoutTopic) """
aliased_authors = aliased(ShoutAuthor) Добавляет статистические колонки к запросу тем.
aliased_followers = aliased(TopicFollower)
aliased_shout = aliased(Shout)
# shouts :param q: SQL-запрос для получения тем.
q = q.outerjoin(aliased_shout_topic, aliased_shout_topic.topic == Topic.id) :return: Запрос с добавленными колонками статистики.
q = q.add_columns(func.count(distinct(aliased_shout_topic.shout)).label('shouts_stat')) """
# Создаем алиасы для предотвращения конфликтов имен
aliased_shout = aliased(ShoutTopic)
# authors # Создаем новый объект запроса для тем
q = q.outerjoin(aliased_shout, and_( new_q = select(Topic)
aliased_shout.id == aliased_shout_topic.shout,
aliased_shout.published_at.is_not(None),
aliased_shout.deleted_at.is_(None)
))
q = q.outerjoin(aliased_authors, aliased_shout.authors.any(id=aliased_authors.id))
q = q.add_columns(func.count(distinct(aliased_authors.author)).label('authors_stat'))
# followers # Применяем необходимые фильтры и добавляем колонки статистики
q = q.outerjoin(aliased_followers, aliased_followers.topic == Topic.id) new_q = (
q = q.add_columns( new_q.join(
func.count(distinct(aliased_followers.follower)).label('followers_stat') aliased_shout,
aliased_shout.topic == Topic.id,
)
.join(
Shout,
and_(
aliased_shout.shout == Shout.id,
Shout.deleted_at.is_(None),
),
)
.add_columns(
func.count(distinct(aliased_shout.shout)).label("shouts_stat")
) # Подсчет уникальных публикаций для темы
) )
# comments aliased_follower = aliased(TopicFollower)
# Добавляем количество подписчиков темы
new_q = new_q.outerjoin(aliased_follower, aliased_follower.topic == Topic.id).add_columns(
func.count(distinct(aliased_follower.follower)).label("followers_stat")
)
# Группировка по идентификатору темы
new_q = new_q.group_by(Topic.id)
return new_q
def add_author_stat_columns(q):
"""
Добавляет статистические колонки к запросу авторов.
:param q: SQL-запрос для получения авторов.
:return: Запрос с добавленными колонками статистики.
"""
# Подзапрос для подсчета публикаций
shouts_subq = (
select(func.count(distinct(Shout.id)))
.select_from(ShoutAuthor)
.join(Shout, and_(Shout.id == ShoutAuthor.shout, Shout.deleted_at.is_(None)))
.where(ShoutAuthor.author == Author.id)
.scalar_subquery()
)
# Подзапрос для подсчета подписчиков
followers_subq = (
select(func.count(distinct(AuthorFollower.follower)))
.where(AuthorFollower.author == Author.id)
.scalar_subquery()
)
# Основной запрос
q = (
q.select_from(Author)
.add_columns(shouts_subq.label("shouts_stat"), followers_subq.label("followers_stat"))
.group_by(Author.id)
)
return q
def get_topic_shouts_stat(topic_id: int) -> int:
"""
Получает количество опубликованных постов для темы
"""
q = (
select(func.count(distinct(ShoutTopic.shout)))
.select_from(join(ShoutTopic, Shout, ShoutTopic.shout == Shout.id))
.filter(
and_(
ShoutTopic.topic == topic_id,
Shout.published_at.is_not(None),
Shout.deleted_at.is_(None),
)
)
)
with local_session() as session:
result = session.execute(q).first()
return result[0] if result else 0
def get_topic_authors_stat(topic_id: int) -> int:
"""
Получает количество уникальных авторов для указанной темы.
:param topic_id: Идентификатор темы.
:return: Количество уникальных авторов, связанных с темой.
"""
count_query = (
select(func.count(distinct(ShoutAuthor.author)))
.select_from(join(ShoutTopic, Shout, ShoutTopic.shout == Shout.id))
.join(ShoutAuthor, ShoutAuthor.shout == Shout.id)
.filter(
and_(
ShoutTopic.topic == topic_id,
Shout.published_at.is_not(None),
Shout.deleted_at.is_(None),
)
)
)
# Выполнение запроса и получение результата
with local_session() as session:
result = session.execute(count_query).first()
return result[0] if result else 0
def get_topic_followers_stat(topic_id: int) -> int:
"""
Получает количество подписчиков для указанной темы.
:param topic_id: Идентификатор темы.
:return: Количество уникальных подписчиков темы.
"""
aliased_followers = aliased(TopicFollower)
q = select(func.count(distinct(aliased_followers.follower))).filter(aliased_followers.topic == topic_id)
with local_session() as session:
result = session.execute(q).first()
return result[0] if result else 0
def get_topic_comments_stat(topic_id: int) -> int:
"""
Получает количество комментариев для всех публикаций в указанной теме.
:param topic_id: Идентификатор темы.
:return: Общее количество комментариев к публикациям темы.
"""
# Подзапрос для получения количества комментариев для каждой публикации
sub_comments = ( sub_comments = (
select( select(
Shout.id.label('shout_id'), Shout.id.label("shout_id"),
func.coalesce(func.count(Reaction.id)).label('comments_count') func.coalesce(func.count(Reaction.id), 0).label("comments_count"),
) )
.join(ShoutTopic, ShoutTopic.shout == Shout.id) .join(ShoutTopic, ShoutTopic.shout == Shout.id)
.join(Topic, ShoutTopic.topic == Topic.id) .join(Topic, ShoutTopic.topic == Topic.id)
@@ -54,41 +176,77 @@ def add_topic_stat_columns(q):
.group_by(Shout.id) .group_by(Shout.id)
.subquery() .subquery()
) )
q = q.outerjoin(sub_comments, aliased_shout_topic.shout == sub_comments.c.shout_id) # Запрос для суммирования количества комментариев по теме
q = q.add_columns(func.coalesce(sub_comments.c.comments_count, 0).label('comments_stat')) q = select(func.coalesce(func.sum(sub_comments.c.comments_count), 0)).filter(ShoutTopic.topic == topic_id)
q = q.outerjoin(sub_comments, ShoutTopic.shout == sub_comments.c.shout_id)
group_list = [Topic.id, sub_comments.c.comments_count] with local_session() as session:
result = session.execute(q).first()
q = q.group_by(*group_list) return result[0] if result else 0
logger.debug(q)
return q
def add_author_stat_columns(q): def get_author_shouts_stat(author_id: int) -> int:
"""
Получает количество опубликованных постов для автора
"""
aliased_shout_author = aliased(ShoutAuthor) aliased_shout_author = aliased(ShoutAuthor)
aliased_authors = aliased(AuthorFollower) aliased_shout = aliased(Shout)
aliased_followers = aliased(AuthorFollower)
q = q.outerjoin(aliased_shout_author, aliased_shout_author.author == Author.id) q = (
q = q.add_columns( select(func.count(distinct(aliased_shout.id)))
func.count(distinct(aliased_shout_author.shout)).label('shouts_stat') .select_from(aliased_shout)
) .join(aliased_shout_author, aliased_shout.id == aliased_shout_author.shout)
.filter(
q = q.outerjoin(aliased_authors, aliased_authors.follower == Author.id) and_(
q = q.add_columns( aliased_shout_author.author == author_id,
func.count(distinct(aliased_authors.author)).label('authors_stat') aliased_shout.published_at.is_not(None),
) aliased_shout.deleted_at.is_(None), # Добавляем проверку на удаление
)
q = q.outerjoin(aliased_followers, aliased_followers.author == Author.id)
q = q.add_columns(
func.count(distinct(aliased_followers.follower)).label('followers_stat')
)
# Create a subquery for comments count
sub_comments = (
select(
Author.id, func.coalesce(func.count(Reaction.id)).label('comments_count')
) )
)
with local_session() as session:
result = session.execute(q).first()
return result[0] if result else 0
def get_author_authors_stat(author_id: int) -> int:
"""
Получает количество авторов, на которых подписан указанный автор.
:param author_id: Идентификатор автора.
:return: Количество уникальных авторов, на которых подписан автор.
"""
aliased_authors = aliased(AuthorFollower)
q = select(func.count(distinct(aliased_authors.author))).filter(
and_(
aliased_authors.follower == author_id,
aliased_authors.author != author_id,
)
)
with local_session() as session:
result = session.execute(q).first()
return result[0] if result else 0
def get_author_followers_stat(author_id: int) -> int:
"""
Получает количество подписчиков для указанного автора.
:param author_id: Идентификатор автора.
:return: Количество уникальных подписчиков автора.
"""
aliased_followers = aliased(AuthorFollower)
q = select(func.count(distinct(aliased_followers.follower))).filter(aliased_followers.author == author_id)
with local_session() as session:
result = session.execute(q).first()
return result[0] if result else 0
def get_author_comments_stat(author_id):
q = (
select(func.coalesce(func.count(Reaction.id), 0).label("comments_count"))
.select_from(Author)
.outerjoin( .outerjoin(
Reaction, Reaction,
and_( and_(
@@ -97,68 +255,98 @@ def add_author_stat_columns(q):
Reaction.deleted_at.is_(None), Reaction.deleted_at.is_(None),
), ),
) )
.where(Author.id == author_id)
.group_by(Author.id) .group_by(Author.id)
.subquery()
) )
q = q.outerjoin(sub_comments, Author.id == sub_comments.c.id) with local_session() as session:
q = q.add_columns(sub_comments.c.comments_count) result = session.execute(q).first()
group_list = [Author.id, sub_comments.c.comments_count] return result.comments_count if result else 0
q = q.group_by(*group_list)
return q
def get_with_stat(q): def get_with_stat(q):
"""
Выполняет запрос с добавлением статистики.
:param q: SQL-запрос для выполнения.
:return: Список объектов с добавленной статистикой.
"""
records = [] records = []
try: try:
is_author = f'{q}'.lower().startswith('select author')
is_topic = f'{q}'.lower().startswith('select topic')
if is_author:
q = add_author_stat_columns(q)
elif is_topic:
q = add_topic_stat_columns(q)
with local_session() as session: with local_session() as session:
# Определяем, является ли запрос запросом авторов
author_prefixes = ("select author", "select * from author")
is_author = f"{q}".lower().startswith(author_prefixes)
# Добавляем колонки статистики в запрос
q = add_author_stat_columns(q) if is_author else add_topic_stat_columns(q)
# Выполняем запрос
result = session.execute(q) result = session.execute(q)
for cols in result: for cols in result:
entity = cols[0] entity = cols[0]
stat = dict() stat = dict()
stat['shouts'] = cols[1] stat["shouts"] = cols[1] # Статистика по публикациям
stat['authors'] = cols[2] stat["followers"] = cols[2] # Статистика по подписчикам
stat['followers'] = cols[3]
if is_author: if is_author:
stat['comments'] = cols[4] stat["authors"] = get_author_authors_stat(entity.id) # Статистика по подпискам на авторов
stat["comments"] = get_author_comments_stat(entity.id) # Статистика по комментариям
else:
stat["authors"] = get_topic_authors_stat(entity.id) # Статистика по авторам темы
entity.stat = stat entity.stat = stat
records.append(entity) records.append(entity)
except Exception as exc: except Exception as exc:
import traceback import traceback
logger.error(exc, traceback.format_exc())
raise Exception(exc) logger.debug(q)
traceback.print_exc()
logger.error(exc, exc_info=True)
return records return records
def author_follows_authors(author_id: int): def author_follows_authors(author_id: int):
af = aliased(AuthorFollower, name='af') """
q = ( Получает список авторов, на которых подписан указанный автор.
select(Author)
.select_from(join(Author, af, Author.id == af.author)) :param author_id: Идентификатор автора.
.where(af.follower == author_id) :return: Список авторов с добавленной статистикой.
"""
af = aliased(AuthorFollower, name="af")
author_follows_authors_query = (
select(Author).select_from(join(Author, af, Author.id == af.author)).where(af.follower == author_id)
) )
return get_with_stat(q) return get_with_stat(author_follows_authors_query)
def author_follows_topics(author_id: int): def author_follows_topics(author_id: int):
q = ( """
Получает список тем, на которые подписан указанный автор.
:param author_id: Идентификатор автора.
:return: Список тем с добавленной статистикой.
"""
author_follows_topics_query = (
select(Topic) select(Topic)
.select_from(join(Topic, TopicFollower, Topic.id == TopicFollower.topic)) .select_from(join(Topic, TopicFollower, Topic.id == TopicFollower.topic))
.where(TopicFollower.follower == author_id) .where(TopicFollower.follower == author_id)
) )
return get_with_stat(q) return get_with_stat(author_follows_topics_query)
async def update_author_stat(author: Author): def update_author_stat(author_id: int):
author_with_stat = get_with_stat(select(Author).where(Author.id==author.id)) """
if isinstance(author_with_stat, Author): Обновляет статистику для указанного автора и сохраняет её в кэше.
author_dict = author_with_stat.dict()
await cache_author(author_dict) :param author_id: Идентификатор автора.
"""
author_query = select(Author).where(Author.id == author_id)
try:
result = get_with_stat(author_query)
if result:
author_with_stat = result[0]
if isinstance(author_with_stat, Author):
author_dict = author_with_stat.dict()
# Асинхронное кэширование данных автора
asyncio.create_task(cache_author(author_dict))
except Exception as exc:
logger.error(exc, exc_info=True)

View File

@@ -1,119 +1,348 @@
from sqlalchemy import distinct, func, select from sqlalchemy import desc, select, text
from cache.cache import (
cache_topic,
cached_query,
get_cached_topic_authors,
get_cached_topic_by_slug,
get_cached_topic_followers,
invalidate_cache_by_prefix,
)
from orm.author import Author from orm.author import Author
from orm.shout import ShoutTopic
from orm.topic import Topic from orm.topic import Topic
from resolvers.stat import get_with_stat from resolvers.stat import get_with_stat
from services.auth import login_required from services.auth import login_required
from services.db import local_session from services.db import local_session
from services.memorycache import cache_region from services.redis import redis
from services.schema import mutation, query from services.schema import mutation, query
from utils.logger import root_logger as logger
@query.field('get_topics_all') # Вспомогательная функция для получения всех тем без статистики
def get_topics_all(_, _info): async def get_all_topics():
cache_key = 'get_topics_all' """
Получает все темы без статистики.
Используется для случаев, когда нужен полный список тем без дополнительной информации.
@cache_region.cache_on_arguments(cache_key) Returns:
def _get_topics_all(): list: Список всех тем без статистики
return get_with_stat(select(Topic)) """
cache_key = "topics:all:basic"
return _get_topics_all() # Функция для получения всех тем из БД
async def fetch_all_topics():
logger.debug("Получаем список всех тем из БД и кешируем результат")
with local_session() as session:
# Запрос на получение базовой информации о темах
topics_query = select(Topic)
topics = session.execute(topics_query).scalars().all()
# Преобразуем темы в словари
return [topic.dict() for topic in topics]
# Используем универсальную функцию для кеширования запросов
return await cached_query(cache_key, fetch_all_topics)
@query.field('get_topics_by_community') # Вспомогательная функция для получения тем со статистикой с пагинацией
def get_topics_by_community(_, _info, community_id: int): async def get_topics_with_stats(limit=100, offset=0, community_id=None, by=None):
cache_key = f'get_topics_by_community_{community_id}' """
Получает темы со статистикой с пагинацией.
@cache_region.cache_on_arguments(cache_key) Args:
def _get_topics_by_community(): limit: Максимальное количество возвращаемых тем
q = select(Topic).where(Topic.community == community_id) offset: Смещение для пагинации
return get_with_stat(q) community_id: Опциональный ID сообщества для фильтрации
by: Опциональный параметр сортировки
return _get_topics_by_community() Returns:
list: Список тем с их статистикой
"""
# Формируем ключ кеша с помощью универсальной функции
cache_key = f"topics:stats:limit={limit}:offset={offset}:community_id={community_id}"
# Функция для получения тем из БД
async def fetch_topics_with_stats():
logger.debug(f"Выполняем запрос на получение тем со статистикой: limit={limit}, offset={offset}")
with local_session() as session:
# Базовый запрос для получения тем
base_query = select(Topic)
# Добавляем фильтр по сообществу, если указан
if community_id:
base_query = base_query.where(Topic.community == community_id)
# Применяем сортировку на основе параметра by
if by:
if isinstance(by, dict):
# Обработка словаря параметров сортировки
for field, direction in by.items():
column = getattr(Topic, field, None)
if column:
if direction.lower() == "desc":
base_query = base_query.order_by(desc(column))
else:
base_query = base_query.order_by(column)
elif by == "popular":
# Сортировка по популярности (количеству публикаций)
# Примечание: это требует дополнительного запроса или подзапроса
base_query = base_query.order_by(
desc(Topic.id)
) # Временно, нужно заменить на proper implementation
else:
# По умолчанию сортируем по ID в обратном порядке
base_query = base_query.order_by(desc(Topic.id))
else:
# По умолчанию сортируем по ID в обратном порядке
base_query = base_query.order_by(desc(Topic.id))
# Применяем лимит и смещение
base_query = base_query.limit(limit).offset(offset)
# Получаем темы
topics = session.execute(base_query).scalars().all()
topic_ids = [topic.id for topic in topics]
if not topic_ids:
return []
# Запрос на получение статистики по публикациям для выбранных тем
shouts_stats_query = f"""
SELECT st.topic, COUNT(DISTINCT s.id) as shouts_count
FROM shout_topic st
JOIN shout s ON st.shout = s.id AND s.deleted_at IS NULL
WHERE st.topic IN ({",".join(map(str, topic_ids))})
GROUP BY st.topic
"""
shouts_stats = {row[0]: row[1] for row in session.execute(text(shouts_stats_query))}
# Запрос на получение статистики по подписчикам для выбранных тем
followers_stats_query = f"""
SELECT topic, COUNT(DISTINCT follower) as followers_count
FROM topic_followers
WHERE topic IN ({",".join(map(str, topic_ids))})
GROUP BY topic
"""
followers_stats = {row[0]: row[1] for row in session.execute(text(followers_stats_query))}
# Формируем результат с добавлением статистики
result = []
for topic in topics:
topic_dict = topic.dict()
topic_dict["stat"] = {
"shouts": shouts_stats.get(topic.id, 0),
"followers": followers_stats.get(topic.id, 0),
}
result.append(topic_dict)
# Кешируем каждую тему отдельно для использования в других функциях
await cache_topic(topic_dict)
return result
# Используем универсальную функцию для кеширования запросов
return await cached_query(cache_key, fetch_topics_with_stats)
@query.field('get_topics_by_author') # Функция для инвалидации кеша тем
async def get_topics_by_author(_, _info, author_id=0, slug='', user=''): async def invalidate_topics_cache(topic_id=None):
q = select(Topic) """
Инвалидирует кеши тем при изменении данных.
Args:
topic_id: Опциональный ID темы для точечной инвалидации.
Если не указан, инвалидируются все кеши тем.
"""
if topic_id:
# Точечная инвалидация конкретной темы
logger.debug(f"Инвалидация кеша для темы #{topic_id}")
specific_keys = [
f"topic:id:{topic_id}",
f"topic:authors:{topic_id}",
f"topic:followers:{topic_id}",
f"topic_shouts_{topic_id}",
]
# Получаем slug темы, если есть
with local_session() as session:
topic = session.query(Topic).filter(Topic.id == topic_id).first()
if topic and topic.slug:
specific_keys.append(f"topic:slug:{topic.slug}")
# Удаляем конкретные ключи
for key in specific_keys:
try:
await redis.execute("DEL", key)
logger.debug(f"Удален ключ кеша {key}")
except Exception as e:
logger.error(f"Ошибка при удалении ключа {key}: {e}")
# Также ищем и удаляем ключи коллекций, содержащих данные об этой теме
collection_keys = await redis.execute("KEYS", "topics:stats:*")
if collection_keys:
await redis.execute("DEL", *collection_keys)
logger.debug(f"Удалено {len(collection_keys)} коллекционных ключей тем")
else:
# Общая инвалидация всех кешей тем
logger.debug("Полная инвалидация кеша тем")
await invalidate_cache_by_prefix("topics")
# Запрос на получение всех тем
@query.field("get_topics_all")
async def get_topics_all(_, _info):
"""
Получает список всех тем без статистики.
Returns:
list: Список всех тем
"""
return await get_all_topics()
# Запрос на получение тем с пагинацией и статистикой
@query.field("get_topics_paginated")
async def get_topics_paginated(_, _info, limit=100, offset=0, by=None):
"""
Получает список тем с пагинацией и статистикой.
Args:
limit: Максимальное количество возвращаемых тем
offset: Смещение для пагинации
by: Опциональные параметры сортировки
Returns:
list: Список тем с их статистикой
"""
return await get_topics_with_stats(limit, offset, None, by)
# Запрос на получение тем по сообществу
@query.field("get_topics_by_community")
async def get_topics_by_community(_, _info, community_id: int, limit=100, offset=0, by=None):
"""
Получает список тем, принадлежащих указанному сообществу с пагинацией и статистикой.
Args:
community_id: ID сообщества
limit: Максимальное количество возвращаемых тем
offset: Смещение для пагинации
by: Опциональные параметры сортировки
Returns:
list: Список тем с их статистикой
"""
return await get_topics_with_stats(limit, offset, community_id, by)
# Запрос на получение тем по автору
@query.field("get_topics_by_author")
async def get_topics_by_author(_, _info, author_id=0, slug="", user=""):
topics_by_author_query = select(Topic)
if author_id: if author_id:
q = q.join(Author).where(Author.id == author_id) topics_by_author_query = topics_by_author_query.join(Author).where(Author.id == author_id)
elif slug: elif slug:
q = q.join(Author).where(Author.slug == slug) topics_by_author_query = topics_by_author_query.join(Author).where(Author.slug == slug)
elif user: elif user:
q = q.join(Author).where(Author.user == user) topics_by_author_query = topics_by_author_query.join(Author).where(Author.user == user)
return get_with_stat(q) return get_with_stat(topics_by_author_query)
@query.field('get_topic') # Запрос на получение одной темы по её slug
def get_topic(_, _info, slug: str): @query.field("get_topic")
q = select(Topic).filter(Topic.slug == slug) async def get_topic(_, _info, slug: str):
result = get_with_stat(q) topic = await get_cached_topic_by_slug(slug, get_with_stat)
for topic in result: if topic:
return topic return topic
@mutation.field('create_topic') # Мутация для создания новой темы
@mutation.field("create_topic")
@login_required @login_required
async def create_topic(_, _info, inp): async def create_topic(_, _info, topic_input):
with local_session() as session: with local_session() as session:
# TODO: check user permissions to create topic for exact community # TODO: проверить права пользователя на создание темы для конкретного сообщества
# and actor is permitted to craete it # и разрешение на создание
new_topic = Topic(**inp) new_topic = Topic(**topic_input)
session.add(new_topic) session.add(new_topic)
session.commit() session.commit()
return {'topic': new_topic} # Инвалидируем кеш всех тем
await invalidate_topics_cache()
return {"topic": new_topic}
@mutation.field('update_topic') # Мутация для обновления темы
@mutation.field("update_topic")
@login_required @login_required
async def update_topic(_, _info, inp): async def update_topic(_, _info, topic_input):
slug = inp['slug'] slug = topic_input["slug"]
with local_session() as session: with local_session() as session:
topic = session.query(Topic).filter(Topic.slug == slug).first() topic = session.query(Topic).filter(Topic.slug == slug).first()
if not topic: if not topic:
return {'error': 'topic not found'} return {"error": "topic not found"}
else: else:
Topic.update(topic, inp) old_slug = topic.slug
Topic.update(topic, topic_input)
session.add(topic) session.add(topic)
session.commit() session.commit()
return {'topic': topic} # Инвалидируем кеш только для этой конкретной темы
await invalidate_topics_cache(topic.id)
# Если slug изменился, удаляем старый ключ
if old_slug != topic.slug:
await redis.execute("DEL", f"topic:slug:{old_slug}")
logger.debug(f"Удален ключ кеша для старого slug: {old_slug}")
return {"topic": topic}
@mutation.field('delete_topic') # Мутация для удаления темы
@mutation.field("delete_topic")
@login_required @login_required
async def delete_topic(_, info, slug: str): async def delete_topic(_, info, slug: str):
user_id = info.context['user_id'] user_id = info.context["user_id"]
with local_session() as session: with local_session() as session:
t: Topic = session.query(Topic).filter(Topic.slug == slug).first() t: Topic = session.query(Topic).filter(Topic.slug == slug).first()
if not t: if not t:
return {'error': 'invalid topic slug'} return {"error": "invalid topic slug"}
author = session.query(Author).filter(Author.user == user_id).first() author = session.query(Author).filter(Author.user == user_id).first()
if author: if author:
if t.created_by != author.id: if t.created_by != author.id:
return {'error': 'access denied'} return {"error": "access denied"}
session.delete(t) session.delete(t)
session.commit() session.commit()
# Инвалидируем кеш всех тем и конкретной темы
await invalidate_topics_cache()
await redis.execute("DEL", f"topic:slug:{slug}")
await redis.execute("DEL", f"topic:id:{t.id}")
return {} return {}
return {'error': 'access denied'} return {"error": "access denied"}
@query.field('get_topics_random') # Запрос на получение подписчиков темы
def get_topics_random(_, _info, amount=12): @query.field("get_topic_followers")
q = select(Topic) async def get_topic_followers(_, _info, slug: str):
q = q.join(ShoutTopic) logger.debug(f"getting followers for @{slug}")
q = q.group_by(Topic.id) topic = await get_cached_topic_by_slug(slug, get_with_stat)
q = q.having(func.count(distinct(ShoutTopic.shout)) > 2) topic_id = topic.id if isinstance(topic, Topic) else topic.get("id")
q = q.order_by(func.random()).limit(amount) followers = await get_cached_topic_followers(topic_id)
return followers
topics = []
with local_session() as session:
for [topic] in session.execute(q):
topics.append(topic)
return topics # Запрос на получение авторов темы
@query.field("get_topic_authors")
async def get_topic_authors(_, _info, slug: str):
logger.debug(f"getting authors for @{slug}")
topic = await get_cached_topic_by_slug(slug, get_with_stat)
topic_id = topic.id if isinstance(topic, Topic) else topic.get("id")
authors = await get_cached_topic_authors(topic_id)
return authors

View File

@@ -13,6 +13,12 @@ enum ReactionSort {
dislike dislike
} }
enum ShoutsOrderBy {
last_commented_at
rating
comments_count
}
enum ReactionKind { enum ReactionKind {
# collabs # collabs

View File

@@ -1,15 +1,47 @@
input ShoutInput { input MediaItemInput {
slug: String url: String
title: String title: String
body: String body: String
source: String
pic: String
date: String
genre: String
artist: String
lyrics: String
}
input AuthorInput {
id: Int!
slug: String
}
input TopicInput {
id: Int
slug: String!
title: String
body: String
pic: String
}
input DraftInput {
id: Int
# no created_at, updated_at, deleted_at, updated_by, deleted_by
layout: String
shout_id: Int # Changed from shout: Shout
author_ids: [Int!] # Changed from authors: [Author]
topic_ids: [Int!] # Changed from topics: [Topic]
main_topic_id: Int # Changed from main_topic: Topic
media: [MediaItemInput] # Changed to use MediaItemInput
lead: String lead: String
description: String description: String
layout: String
media: String
topics: [TopicInput]
community: Int
subtitle: String subtitle: String
lang: String
seo: String
body: String
title: String
slug: String
cover: String cover: String
cover_caption: String
} }
input ProfileInput { input ProfileInput {
@@ -21,14 +53,6 @@ input ProfileInput {
about: String about: String
} }
input TopicInput {
id: Int
slug: String!
title: String
body: String
pic: String
}
input ReactionInput { input ReactionInput {
id: Int id: Int
kind: ReactionKind! kind: ReactionKind!
@@ -54,17 +78,16 @@ input LoadShoutsFilters {
author: String author: String
layouts: [String] layouts: [String]
featured: Boolean featured: Boolean
reacted: Boolean reacted: Boolean # requires auth, used in load_shouts_feed
after: Int after: Int
} }
input LoadShoutsOptions { input LoadShoutsOptions {
filters: LoadShoutsFilters filters: LoadShoutsFilters
with_author_captions: Boolean
limit: Int! limit: Int!
random_limit: Int random_limit: Int
offset: Int offset: Int
order_by: String order_by: ShoutsOrderBy
order_by_desc: Boolean order_by_desc: Boolean
} }
@@ -72,15 +95,23 @@ input ReactionBy {
shout: String shout: String
shouts: [String] shouts: [String]
search: String search: String
comment: Boolean kinds: [ReactionKind]
rating: Boolean reply_to: Int # filter
topic: String topic: String
created_by: Int created_by: Int
author: String
after: Int after: Int
sort: ReactionSort sort: ReactionSort # sort
} }
input NotificationSeenInput { input NotificationSeenInput {
notifications: [Int] notifications: [Int]
thread: Int thread: Int
} }
input CommunityInput {
slug: String
name: String
desc: String
pic: String
}

View File

@@ -3,18 +3,23 @@ type Mutation {
rate_author(rated_slug: String!, value: Int!): CommonResult! rate_author(rated_slug: String!, value: Int!): CommonResult!
update_author(profile: ProfileInput!): CommonResult! update_author(profile: ProfileInput!): CommonResult!
# editor # draft
create_shout(inp: ShoutInput!): CommonResult! create_draft(draft_input: DraftInput!): CommonResult!
update_shout(shout_id: Int!, shout_input: ShoutInput, publish: Boolean): CommonResult! update_draft(draft_id: Int!, draft_input: DraftInput!): CommonResult!
delete_shout(shout_id: Int!): CommonResult! delete_draft(draft_id: Int!): CommonResult!
# publication
publish_shout(shout_id: Int!): CommonResult!
publish_draft(draft_id: Int!): CommonResult!
unpublish_draft(draft_id: Int!): CommonResult!
unpublish_shout(shout_id: Int!): CommonResult!
# follower # follower
follow(what: FollowingEntity!, slug: String!): CommonResult! follow(what: FollowingEntity!, slug: String!): AuthorFollowsResult!
unfollow(what: FollowingEntity!, slug: String!): CommonResult! unfollow(what: FollowingEntity!, slug: String!): AuthorFollowsResult!
# topic # topic
create_topic(input: TopicInput!): CommonResult! create_topic(topic_input: TopicInput!): CommonResult!
update_topic(input: TopicInput!): CommonResult! update_topic(topic_input: TopicInput!): CommonResult!
delete_topic(slug: String!): CommonResult! delete_topic(slug: String!): CommonResult!
# reaction # reaction
@@ -29,8 +34,18 @@ type Mutation {
accept_invite(invite_id: Int!): CommonResult! accept_invite(invite_id: Int!): CommonResult!
reject_invite(invite_id: Int!): CommonResult! reject_invite(invite_id: Int!): CommonResult!
# bookmark
toggle_bookmark_shout(slug: String!): CommonResult!
# notifier # notifier
notification_mark_seen(notification_id: Int!, seen: Boolean): CommonResult! notification_mark_seen(notification_id: Int!, seen: Boolean): CommonResult!
notifications_seen_after(after: Int!, seen: Boolean): CommonResult! notifications_seen_after(after: Int!, seen: Boolean): CommonResult!
notifications_seen_thread(thread_id: String!, seen: Boolean): CommonResult! notifications_seen_thread(thread_id: String!, seen: Boolean): CommonResult!
# community
join_community(slug: String!): CommonResult!
leave_community(slug: String!): CommonResult!
create_community(community_input: CommunityInput!): CommonResult!
update_community(community_input: CommunityInput!): CommonResult!
delete_community(slug: String!): CommonResult!
} }

View File

@@ -4,41 +4,58 @@ type Query {
get_author_id(user: String!): Author get_author_id(user: String!): Author
get_authors_all: [Author] get_authors_all: [Author]
load_authors_by(by: AuthorsBy!, limit: Int, offset: Int): [Author] load_authors_by(by: AuthorsBy!, limit: Int, offset: Int): [Author]
search_authors(what: String!): [Author] # search_authors(what: String!): [Author]
# community # community
get_community: Community get_community: Community
get_communities_all: [Community] get_communities_all: [Community]
get_communities_by_author(slug: String, user: String, author_id: Int): [Community]
# follower # follower
get_shout_followers(slug: String, shout_id: Int): [Author] get_shout_followers(slug: String, shout_id: Int): [Author]
get_topic_followers(slug: String, topic_id: Int): [Author] get_topic_followers(slug: String): [Author]
get_topic_authors(slug: String): [Author]
get_author_followers(slug: String, user: String, author_id: Int): [Author] get_author_followers(slug: String, user: String, author_id: Int): [Author]
get_author_follows(slug: String, user: String, author_id: Int): AuthorFollowsResult! get_author_follows(slug: String, user: String, author_id: Int): CommonResult!
get_author_follows_topics(slug: String, user: String, author_id: Int): [Topic] get_author_follows_topics(slug: String, user: String, author_id: Int): [Topic]
get_author_follows_authors(slug: String, user: String, author_id: Int): [Author] get_author_follows_authors(slug: String, user: String, author_id: Int): [Author]
load_shouts_followed(follower_id: Int!, limit: Int, offset: Int): [Shout] # userReactedShouts
# reaction # reaction
load_reactions_by(by: ReactionBy!, limit: Int, offset: Int): [Reaction] load_reactions_by(by: ReactionBy!, limit: Int, offset: Int): [Reaction]
load_shout_comments(shout: Int!, limit: Int, offset: Int): [Reaction]
load_shout_ratings(shout: Int!, limit: Int, offset: Int): [Reaction]
load_comment_ratings(comment: Int!, limit: Int, offset: Int): [Reaction]
# reader # reader
get_shout(slug: String, shout_id: Int): Shout get_shout(slug: String, shout_id: Int): Shout
load_shouts_by(options: LoadShoutsOptions): [Shout] load_shouts_by(options: LoadShoutsOptions): [Shout]
load_shouts_search(text: String!, limit: Int, offset: Int): [SearchResult] load_shouts_search(text: String!, options: LoadShoutsOptions): [SearchResult]
load_shouts_bookmarked(options: LoadShoutsOptions): [Shout]
# rating
get_my_rates_shouts(shouts: [Int!]!): [MyRateShout]
get_my_rates_comments(comments: [Int!]!): [MyRateComment]
# public feeds
load_shouts_with_topic(slug: String, options: LoadShoutsOptions): [Shout] # topic feed
load_shouts_random_top(options: LoadShoutsOptions): [Shout] # random order, fixed filter, limit offset can be used
load_shouts_authored_by(slug: String, options: LoadShoutsOptions): [Shout] # author feed
load_shouts_followed_by(slug: String, options: LoadShoutsOptions): [Shout] # another author feed
# my feeds
load_shouts_feed(options: LoadShoutsOptions): [Shout] load_shouts_feed(options: LoadShoutsOptions): [Shout]
load_shouts_unrated(limit: Int, offset: Int): [Shout] load_shouts_unrated(options: LoadShoutsOptions): [Shout]
load_shouts_random_top(options: LoadShoutsOptions): [Shout] load_shouts_coauthored(options: LoadShoutsOptions): [Shout]
load_shouts_random_topic(limit: Int!): CommonResult! # { topic shouts } load_shouts_discussed(options: LoadShoutsOptions): [Shout]
# editor # editor
get_my_shout(shout_id: Int!): CommonResult! get_my_shout(shout_id: Int!): CommonResult!
get_shouts_drafts: [Shout] get_shouts_drafts: CommonResult!
load_drafts: CommonResult!
# topic # topic
get_topic(slug: String!): Topic get_topic(slug: String!): Topic
get_topics_all: [Topic] get_topics_all: [Topic]
get_topics_random(amount: Int): [Topic]
get_topics_by_author(slug: String, user: String, author_id: Int): [Topic] get_topics_by_author(slug: String, user: String, author_id: Int): [Topic]
get_topics_by_community(slug: String, community_id: Int): [Topic] get_topics_by_community(slug: String, community_id: Int): [Topic]

View File

@@ -1,5 +1,6 @@
type AuthorStat { type AuthorStat {
shouts: Int shouts: Int
topics: Int
authors: Int authors: Int
followers: Int followers: Int
rating: Int rating: Int
@@ -56,26 +57,41 @@ type Reaction {
# old_thread: String # old_thread: String
} }
type MediaItem {
url: String
title: String
body: String
source: String # image
pic: String
# audio specific properties
date: String
genre: String
artist: String
lyrics: String
}
type Shout { type Shout {
id: Int! id: Int!
title: String!
slug: String! slug: String!
body: String! body: String!
layout: String!
lead: String lead: String
description: String description: String
main_topic: String
topics: [Topic]
created_by: Author!
updated_by: Author
deleted_by: Author
authors: [Author]
communities: [Community]
title: String!
subtitle: String subtitle: String
lang: String lang: String
community: String
cover: String cover: String
cover_caption: String cover_caption: String
layout: String!
community: Community!
main_topic: Topic
created_by: Author!
topics: [Topic]
authors: [Author]
updated_by: Author
deleted_by: Author
created_at: Int! created_at: Int!
updated_at: Int updated_at: Int
@@ -84,19 +100,51 @@ type Shout {
deleted_at: Int deleted_at: Int
version_of: Shout # TODO: use version_of somewhere version_of: Shout # TODO: use version_of somewhere
draft: Draft
media: String media: [MediaItem]
stat: Stat stat: Stat
score: Float score: Float
} }
type Draft {
id: Int!
created_at: Int!
created_by: Author!
layout: String
slug: String
title: String
subtitle: String
lead: String
description: String
body: String
media: [MediaItem]
cover: String
cover_caption: String
lang: String
seo: String
# auto
updated_at: Int
deleted_at: Int
updated_by: Author
deleted_by: Author
authors: [Author]
topics: [Topic]
}
type Stat { type Stat {
viewed: Int
reacted: Int
rating: Int rating: Int
commented: Int commented: Int
ranking: Int viewed: Int
last_comment: Int last_commented_at: Int
}
type CommunityStat {
shouts: Int!
followers: Int!
authors: Int!
} }
type Community { type Community {
@@ -107,6 +155,7 @@ type Community {
pic: String! pic: String!
created_at: Int! created_at: Int!
created_by: Author! created_by: Author!
stat: CommunityStat
} }
type Collection { type Collection {
@@ -135,12 +184,15 @@ type Topic {
pic: String pic: String
stat: TopicStat stat: TopicStat
oid: String oid: String
is_main: Boolean
} }
# output type # output type
type CommonResult { type CommonResult {
error: String error: String
drafts: [Draft]
draft: Draft
slugs: [String] slugs: [String]
shout: Shout shout: Shout
shouts: [Shout] shouts: [Shout]
@@ -158,7 +210,7 @@ type SearchResult {
slug: String! slug: String!
title: String! title: String!
cover: String cover: String
main_topic: String main_topic: Topic
created_at: Int created_at: Int
authors: [Author] authors: [Author]
topics: [Topic] topics: [Topic]
@@ -176,7 +228,6 @@ type Invite {
type AuthorFollowsResult { type AuthorFollowsResult {
topics: [Topic] topics: [Topic]
authors: [Author] authors: [Author]
# shouts: [Shout]
communities: [Community] communities: [Community]
error: String error: String
} }
@@ -211,3 +262,15 @@ type NotificationsResult {
total: Int! total: Int!
error: String error: String
} }
type MyRateShout {
shout_id: Int!
my_rate: ReactionKind
}
type MyRateComment {
shout_id: Int
comment_id: Int!
my_rate: ReactionKind
}

View File

@@ -1,18 +0,0 @@
from granian.constants import Interfaces
from granian.server import Granian
from services.logger import root_logger as logger
from settings import PORT
if __name__ == '__main__':
logger.info('started')
granian_instance = Granian(
'main:app',
address='0.0.0.0', # noqa S104
port=PORT,
threads=4,
websockets=False,
interface=Interfaces.ASGI,
)
granian_instance.serve()

View File

@@ -1,109 +1,183 @@
from functools import wraps from functools import wraps
import httpx from cache.cache import get_cached_author_by_user_id
from starlette.exceptions import HTTPException from resolvers.stat import get_with_stat
from services.schema import request_graphql_data
from services.logger import root_logger as logger
from settings import ADMIN_SECRET, AUTH_URL from settings import ADMIN_SECRET, AUTH_URL
from utils.logger import root_logger as logger
# Список разрешенных заголовков
async def request_data(gql, headers=None): ALLOWED_HEADERS = ["Authorization", "Content-Type"]
if headers is None:
headers = {'Content-Type': 'application/json'}
try:
async with httpx.AsyncClient() as client:
response = await client.post(AUTH_URL, json=gql, headers=headers)
if response.status_code == 200:
data = response.json()
errors = data.get('errors')
if errors:
logger.error(f'HTTP Errors: {errors}')
else:
return data
except Exception as e:
# Handling and logging exceptions during authentication check
logger.error(f'request_data error: {e}')
return None
async def check_auth(req): async def check_auth(req):
token = req.headers.get('Authorization') """
user_id = '' Проверка авторизации пользователя.
Эта функция проверяет токен авторизации, переданный в заголовках запроса,
и возвращает идентификатор пользователя и его роли.
Параметры:
- req: Входящий GraphQL запрос, содержащий заголовок авторизации.
Возвращает:
- user_id: str - Идентификатор пользователя.
- user_roles: list[str] - Список ролей пользователя.
"""
token = req.headers.get("Authorization")
host = req.headers.get("host", "")
logger.debug(f"check_auth: host={host}")
auth_url = AUTH_URL
if ".dscrs.site" in host or "localhost" in host:
auth_url = "https://auth.dscrs.site/graphql"
user_id = ""
user_roles = [] user_roles = []
if token: if token:
# Проверяем и очищаем токен от префикса Bearer если он есть
if token.startswith("Bearer "):
token = token.split("Bearer ")[-1].strip()
# Logging the authentication token # Logging the authentication token
logger.debug(f'{token}') logger.debug(f"TOKEN: {token}")
query_name = 'validate_jwt_token' query_name = "validate_jwt_token"
operation = 'ValidateToken' operation = "ValidateToken"
variables = {'params': {'token_type': 'access_token', 'token': token}} variables = {"params": {"token_type": "access_token", "token": token}}
# Только необходимые заголовки для GraphQL запроса
headers = {"Content-Type": "application/json"}
gql = { gql = {
'query': f'query {operation}($params: ValidateJWTTokenInput!) {{' "query": f"query {operation}($params: ValidateJWTTokenInput!)"
+ f'{query_name}(params: $params) {{ is_valid claims }} ' + "{"
+ '}', + f"{query_name}(params: $params) {{ is_valid claims }} "
'variables': variables, + "}",
'operationName': operation, "variables": variables,
"operationName": operation,
} }
data = await request_data(gql) data = await request_graphql_data(gql, url=auth_url, headers=headers)
if data: if data:
logger.debug(data) logger.debug(f"Auth response: {data}")
user_data = data.get('data', {}).get(query_name, {}).get('claims', {}) validation_result = data.get("data", {}).get(query_name, {})
user_id = user_data.get('sub', '') logger.debug(f"Validation result: {validation_result}")
user_roles = user_data.get('allowed_roles', []) is_valid = validation_result.get("is_valid", False)
if not is_valid:
logger.error(f"Token validation failed: {validation_result}")
return "", []
user_data = validation_result.get("claims", {})
logger.debug(f"User claims: {user_data}")
user_id = user_data.get("sub", "")
user_roles = user_data.get("allowed_roles", [])
return user_id, user_roles return user_id, user_roles
async def add_user_role(user_id): async def add_user_role(user_id):
logger.info(f'add author role for user_id: {user_id}') """
query_name = '_update_user' Добавление роли пользователя.
operation = 'UpdateUserRoles'
Эта функция добавляет роли "author" и "reader" для указанного пользователя
в системе авторизации.
Параметры:
- user_id: str - Идентификатор пользователя, которому нужно добавить роли.
Возвращает:
- user_id: str - Идентификатор пользователя, если операция прошла успешно.
"""
logger.info(f"add author role for user_id: {user_id}")
query_name = "_update_user"
operation = "UpdateUserRoles"
headers = { headers = {
'Content-Type': 'application/json', "Content-Type": "application/json",
'x-authorizer-admin-secret': ADMIN_SECRET, "x-authorizer-admin-secret": ADMIN_SECRET,
} }
variables = {'params': {'roles': 'author, reader', 'id': user_id}} variables = {"params": {"roles": "author, reader", "id": user_id}}
gql = { gql = {
'query': f'mutation {operation}($params: UpdateUserInput!) {{ {query_name}(params: $params) {{ id roles }} }}', "query": f"mutation {operation}($params: UpdateUserInput!) {{ {query_name}(params: $params) {{ id roles }} }}",
'variables': variables, "variables": variables,
'operationName': operation, "operationName": operation,
} }
data = await request_data(gql, headers) data = await request_graphql_data(gql, headers=headers)
if data: if data:
user_id = data.get('data', {}).get(query_name, {}).get('id') user_id = data.get("data", {}).get(query_name, {}).get("id")
return user_id return user_id
def login_required(f): def login_required(f):
"""
Декоратор для проверки авторизации пользователя.
Этот декоратор проверяет, авторизован ли пользователь, <20><> добавляет
информацию о пользователе в контекст функции.
Параметры:
- f: Функция, которую нужно декорировать.
Возвращает:
- Обернутую функцию с добавленной проверкой авторизации.
"""
@wraps(f) @wraps(f)
async def decorated_function(*args, **kwargs): async def decorated_function(*args, **kwargs):
info = args[1] info = args[1]
req = info.context.get('request') req = info.context.get("request")
authorized = await check_auth(req) user_id, user_roles = await check_auth(req)
if authorized: if user_id and user_roles:
logger.info(authorized) logger.info(f" got {user_id} roles: {user_roles}")
user_id, user_roles = authorized info.context["user_id"] = user_id.strip()
if user_id and user_roles: info.context["roles"] = user_roles
logger.info(f' got {user_id} roles: {user_roles}') author = await get_cached_author_by_user_id(user_id, get_with_stat)
info.context['user_id'] = user_id.strip() if not author:
info.context['roles'] = user_roles logger.error(f"author profile not found for user {user_id}")
info.context["author"] = author
return await f(*args, **kwargs) return await f(*args, **kwargs)
return decorated_function return decorated_function
def auth_request(f): def login_accepted(f):
"""
Декоратор для добавления данных авторизации в контекст.
Этот декоратор добавляет данные авторизации в контекст, если они доступны,
но не блокирует доступ для неавторизованных пользователей.
Параметры:
- f: Функция, которую нужно декорировать.
Возвращает:
- Обернутую функцию с добавленной проверкой авторизации.
"""
@wraps(f) @wraps(f)
async def decorated_function(*args, **kwargs): async def decorated_function(*args, **kwargs):
req = args[0] info = args[1]
authorized = await check_auth(req) req = info.context.get("request")
if authorized:
user_id, user_roles = authorized logger.debug("login_accepted: Проверка авторизации пользователя.")
if user_id and user_roles: user_id, user_roles = await check_auth(req)
logger.info(f' got {user_id} roles: {user_roles}') logger.debug(f"login_accepted: user_id={user_id}, user_roles={user_roles}")
req['user_id'] = user_id.strip()
req['roles'] = user_roles if user_id and user_roles:
return await f(*args, **kwargs) logger.info(f"login_accepted: Пользователь авторизован: {user_id} с ролями {user_roles}")
info.context["user_id"] = user_id.strip()
info.context["roles"] = user_roles
# Пробуем получить профиль автора
author = await get_cached_author_by_user_id(user_id, get_with_stat)
if author:
logger.debug(f"login_accepted: Найден профиль автора: {author}")
# Предполагается, что `author` является объектом с атрибутом `id`
info.context["author"] = author.dict()
else:
logger.error(
f"login_accepted: Профиль автора не найден для пользователя {user_id}. Используем базовые данные."
) # Используем базовую информацию об автор
else: else:
raise HTTPException(status_code=401, detail='Unauthorized') logger.debug("login_accepted: Пользователь не авторизован. Очищаем контекст.")
info.context["user_id"] = None
info.context["roles"] = None
info.context["author"] = None
return await f(*args, **kwargs)
return decorated_function return decorated_function

View File

@@ -1,113 +0,0 @@
import json
from orm.author import Author
from orm.topic import Topic
from services.encoders import CustomJSONEncoder
from services.rediscache import redis
DEFAULT_FOLLOWS = {
'topics': [],
'authors': [],
'communities': [{'id': 1, 'name': 'Дискурс', 'slug': 'discours', 'pic': ''}],
}
async def cache_author(author: dict):
payload = json.dumps(author, cls=CustomJSONEncoder)
await redis.execute('SET', f'user:{author.get("user")}', payload)
await redis.execute('SET', f'author:{author.get("id")}', payload)
# update stat all field for followers' caches in <authors> list
followers_str = await redis.execute('GET', f'author:{author.get("id")}:followers')
followers = []
if followers_str:
followers = json.loads(followers_str)
if isinstance(followers, list):
for follower in followers:
follower_follows_authors = []
follower_follows_authors_str = await redis.execute('GET', f'author:{author.get("id")}:follows-authors')
if follower_follows_authors_str:
follower_follows_authors = json.loads(follower_follows_authors_str)
c = 0
for old_author in follower_follows_authors:
if int(old_author.get('id')) == int(author.get('id', 0)):
follower_follows_authors[c] = author
break # exit the loop since we found and updated the author
c += 1
else:
# author not found in the list, so add the new author with the updated stat field
follower_follows_authors.append(author)
# update stat field for all authors' caches in <followers> list
follows_str = await redis.execute('GET', f'author:{author.get("id")}:follows-authors')
follows_authors = []
if follows_str:
follows_authors = json.loads(follows_str)
if isinstance(follows_authors, list):
for followed_author in follows_authors:
followed_author_followers = []
followed_author_followers_str = await redis.execute('GET', f'author:{author.get("id")}:followers')
if followed_author_followers_str:
followed_author_followers = json.loads(followed_author_followers_str)
c = 0
for old_follower in followed_author_followers:
if int(old_follower.get('id')) == int(author.get('id', 0)):
followed_author_followers[c] = author
break # exit the loop since we found and updated the author
c += 1
else:
# author not found in the list, so add the new author with the updated stat field
followed_author_followers.append(author)
async def cache_follows(follower: Author, entity_type: str, entity, is_insert=True):
# prepare
follows = []
redis_key = f'author:{follower.id}:follows-{entity_type}s'
follows_str = await redis.execute('GET', redis_key)
if isinstance(follows_str, str):
follows = json.loads(follows_str)
if is_insert:
follows.append(entity)
else:
entity_id = entity.get('id')
if not entity_id:
raise Exception('wrong entity')
# Remove the entity from follows
follows = [e for e in follows if e['id'] != entity_id]
# update follows cache
updated_data = [t.dict() if isinstance(t, Topic) else t for t in follows]
payload = json.dumps(updated_data, cls=CustomJSONEncoder)
await redis.execute('SET', redis_key, payload)
# update follower's stats everywhere
author_str = await redis.execute('GET', f'author:{follower.id}')
if author_str:
author = json.loads(author_str)
author['stat'][f'{entity_type}s'] = len(updated_data)
await cache_author(author)
return follows
async def cache_follower(follower: Author, author: Author, is_insert=True):
redis_key = f'author:{author.id}:followers'
followers_str = await redis.execute('GET', redis_key)
followers = []
if isinstance(followers_str, str):
followers = json.loads(followers_str)
if is_insert:
# Remove the entity from followers
followers = [e for e in followers if e['id'] != author.id]
else:
followers.append(follower)
updated_followers = [f.dict() if isinstance(f, Author) else f for f in followers]
payload = json.dumps(updated_followers, cls=CustomJSONEncoder)
await redis.execute('SET', redis_key, payload)
author_str = await redis.execute('GET', f'author:{follower.id}')
if author_str:
author = json.loads(author_str)
author['stat']['followers'] = len(updated_followers)
await cache_author(author)
return followers

24
services/common_result.py Normal file
View File

@@ -0,0 +1,24 @@
from dataclasses import dataclass
from typing import List, Optional
from orm.author import Author
from orm.community import Community
from orm.reaction import Reaction
from orm.shout import Shout
from orm.topic import Topic
@dataclass
class CommonResult:
error: Optional[str] = None
slugs: Optional[List[str]] = None
shout: Optional[Shout] = None
shouts: Optional[List[Shout]] = None
author: Optional[Author] = None
authors: Optional[List[Author]] = None
reaction: Optional[Reaction] = None
reactions: Optional[List[Reaction]] = None
topic: Optional[Topic] = None
topics: Optional[List[Topic]] = None
community: Optional[Community] = None
communities: Optional[List[Community]] = None

View File

@@ -1,31 +1,141 @@
import json
import math import math
import time import time
import traceback import traceback
import warnings import warnings
from typing import Any, Callable, Dict, TypeVar from typing import Any, Callable, Dict, TypeVar
from sqlalchemy import (JSON, Column, Engine, Integer, create_engine, event, import orjson
exc, inspect) import sqlalchemy
from sqlalchemy.ext.declarative import declarative_base from sqlalchemy import (
from sqlalchemy.orm import Session, configure_mappers JSON,
Column,
Engine,
Index,
Integer,
create_engine,
event,
exc,
func,
inspect,
text,
)
from sqlalchemy.orm import Session, configure_mappers, declarative_base
from sqlalchemy.sql.schema import Table from sqlalchemy.sql.schema import Table
from sqlalchemy_searchable import make_searchable
from services.logger import root_logger as logger
from settings import DB_URL from settings import DB_URL
from utils.logger import root_logger as logger
if DB_URL.startswith("postgres"):
engine = create_engine(
DB_URL,
echo=False,
pool_size=10,
max_overflow=20,
pool_timeout=30, # Время ожидания свободного соединения
pool_recycle=1800, # Время жизни соединения
pool_pre_ping=True, # Добавить проверку соединений
connect_args={
"sslmode": "disable",
"connect_timeout": 40, # Добавить таймаут подключения
},
)
else:
engine = create_engine(DB_URL, echo=False, connect_args={"check_same_thread": False})
# Подключение к базе данных SQLAlchemy
engine = create_engine(DB_URL, echo=False, pool_size=10, max_overflow=20)
inspector = inspect(engine) inspector = inspect(engine)
configure_mappers() configure_mappers()
T = TypeVar('T') T = TypeVar("T")
REGISTRY: Dict[str, type] = {} REGISTRY: Dict[str, type] = {}
FILTERED_FIELDS = ['_sa_instance_state', 'search_vector'] FILTERED_FIELDS = ["_sa_instance_state", "search_vector"]
def create_table_if_not_exists(engine, table):
inspector = inspect(engine)
if table and not inspector.has_table(table.__tablename__):
table.__table__.create(engine)
logger.info(f"Table '{table.__tablename__}' created.")
else:
logger.info(f"Table '{table.__tablename__}' ok.")
def sync_indexes():
"""
Синхронизирует индексы в БД с индексами, определенными в моделях SQLAlchemy.
Создает недостающие индексы, если они определены в моделях, но отсутствуют в БД.
Использует pg_catalog для PostgreSQL для получения списка существующих индексов.
"""
if not DB_URL.startswith("postgres"):
logger.warning("Функция sync_indexes поддерживается только для PostgreSQL.")
return
logger.info("Начинаем синхронизацию индексов в базе данных...")
# Получаем все существующие индексы в БД
with local_session() as session:
existing_indexes_query = text("""
SELECT
t.relname AS table_name,
i.relname AS index_name
FROM
pg_catalog.pg_class i
JOIN
pg_catalog.pg_index ix ON ix.indexrelid = i.oid
JOIN
pg_catalog.pg_class t ON t.oid = ix.indrelid
JOIN
pg_catalog.pg_namespace n ON n.oid = i.relnamespace
WHERE
i.relkind = 'i'
AND n.nspname = 'public'
AND t.relkind = 'r'
ORDER BY
t.relname, i.relname;
""")
existing_indexes = {row[1].lower() for row in session.execute(existing_indexes_query)}
logger.debug(f"Найдено {len(existing_indexes)} существующих индексов в БД")
# Проверяем каждую модель и её индексы
for _model_name, model_class in REGISTRY.items():
if hasattr(model_class, "__table__") and hasattr(model_class, "__table_args__"):
table_args = model_class.__table_args__
# Если table_args - это кортеж, ищем в нём объекты Index
if isinstance(table_args, tuple):
for arg in table_args:
if isinstance(arg, Index):
index_name = arg.name.lower()
# Проверяем, существует ли индекс в БД
if index_name not in existing_indexes:
logger.info(
f"Создаем отсутствующий индекс {index_name} для таблицы {model_class.__tablename__}"
)
# Создаем индекс если он отсутствует
try:
arg.create(engine)
logger.info(f"Индекс {index_name} успешно создан")
except Exception as e:
logger.error(f"Ошибка при создании индекса {index_name}: {e}")
else:
logger.debug(f"Индекс {index_name} уже существует")
# Анализируем таблицы для оптимизации запросов
for model_name, model_class in REGISTRY.items():
if hasattr(model_class, "__tablename__"):
try:
session.execute(text(f"ANALYZE {model_class.__tablename__}"))
logger.debug(f"Таблица {model_class.__tablename__} проанализирована")
except Exception as e:
logger.error(f"Ошибка при анализе таблицы {model_class.__tablename__}: {e}")
logger.info("Синхронизация индексов завершена.")
# noinspection PyUnusedLocal # noinspection PyUnusedLocal
def local_session(src=''): def local_session(src=""):
return Session(bind=engine, expire_on_commit=False) return Session(bind=engine, expire_on_commit=False)
@@ -36,7 +146,7 @@ class Base(declarative_base()):
__init__: Callable __init__: Callable
__allow_unmapped__ = True __allow_unmapped__ = True
__abstract__ = True __abstract__ = True
__table_args__ = {'extend_existing': True} __table_args__ = {"extend_existing": True}
id = Column(Integer, primary_key=True) id = Column(Integer, primary_key=True)
@@ -44,25 +154,26 @@ class Base(declarative_base()):
REGISTRY[cls.__name__] = cls REGISTRY[cls.__name__] = cls
def dict(self) -> Dict[str, Any]: def dict(self) -> Dict[str, Any]:
column_names = filter( column_names = filter(lambda x: x not in FILTERED_FIELDS, self.__table__.columns.keys())
lambda x: x not in FILTERED_FIELDS, self.__table__.columns.keys() data = {}
)
try: try:
data = {} for column_name in column_names:
for c in column_names: value = getattr(self, column_name)
value = getattr(self, c) # Check if the value is JSON and decode it if necessary
if isinstance(value, JSON): if isinstance(value, (str, bytes)) and isinstance(self.__table__.columns[column_name].type, JSON):
# save JSON column as dict try:
data[c] = json.loads(str(value)) data[column_name] = orjson.loads(value)
except (TypeError, orjson.JSONDecodeError) as e:
logger.error(f"Error decoding JSON for column '{column_name}': {e}")
data[column_name] = value
else: else:
data[c] = value data[column_name] = value
# Add synthetic field .stat # Add synthetic field .stat if it exists
if hasattr(self, 'stat'): if hasattr(self, "stat"):
data['stat'] = self.stat data["stat"] = self.stat
return data
except Exception as e: except Exception as e:
logger.error(f'Error occurred while converting object to dictionary: {e}') logger.error(f"Error occurred while converting object to dictionary: {e}")
return {} return data
def update(self, values: Dict[str, Any]) -> None: def update(self, values: Dict[str, Any]) -> None:
for key, value in values.items(): for key, value in values.items():
@@ -70,42 +181,81 @@ class Base(declarative_base()):
setattr(self, key, value) setattr(self, key, value)
make_searchable(Base.metadata) # make_searchable(Base.metadata)
Base.metadata.create_all(bind=engine) # Base.metadata.create_all(bind=engine)
# Функция для вывода полного трейсбека при предупреждениях # Функция для вывода полного трейсбека при предупреждениях
def warning_with_traceback( def warning_with_traceback(message: Warning | str, category, filename: str, lineno: int, file=None, line=None):
message: Warning | str, category, filename: str, lineno: int, file=None, line=None
):
tb = traceback.format_stack() tb = traceback.format_stack()
tb_str = ''.join(tb) tb_str = "".join(tb)
return f'{message} ({filename}, {lineno}): {category.__name__}\n{tb_str}' return f"{message} ({filename}, {lineno}): {category.__name__}\n{tb_str}"
# Установка функции вывода трейсбека для предупреждений SQLAlchemy # Установка функции вывода трейсбека для предупреждений SQLAlchemy
warnings.showwarning = warning_with_traceback warnings.showwarning = warning_with_traceback
warnings.simplefilter('always', exc.SAWarning) warnings.simplefilter("always", exc.SAWarning)
@event.listens_for(Engine, 'before_cursor_execute') # Функция для извлечения SQL-запроса из контекста
def get_statement_from_context(context):
query = ""
compiled = context.compiled
if compiled:
compiled_statement = compiled.string
compiled_parameters = compiled.params
if compiled_statement:
if compiled_parameters:
try:
# Безопасное форматирование параметров
query = compiled_statement % compiled_parameters
except Exception as e:
logger.error(f"Error formatting query: {e}")
else:
query = compiled_statement
if query:
query = query.replace("\n", " ").replace(" ", " ").replace(" ", " ").strip()
return query
# Обработчик события перед выполнением запроса
@event.listens_for(Engine, "before_cursor_execute")
def before_cursor_execute(conn, cursor, statement, parameters, context, executemany): def before_cursor_execute(conn, cursor, statement, parameters, context, executemany):
conn.query_start_time = time.time() conn.query_start_time = time.time()
conn.last_statement = '' conn.cursor_id = id(cursor) # Отслеживание конкретного курсора
@event.listens_for(Engine, 'after_cursor_execute') # Обработчик события после выполнения запроса
@event.listens_for(Engine, "after_cursor_execute")
def after_cursor_execute(conn, cursor, statement, parameters, context, executemany): def after_cursor_execute(conn, cursor, statement, parameters, context, executemany):
compiled_statement = context.compiled.string if hasattr(conn, "cursor_id") and conn.cursor_id == id(cursor):
compiled_parameters = context.compiled.params query = get_statement_from_context(context)
if compiled_statement: if query:
elapsed = time.time() - conn.query_start_time elapsed = time.time() - conn.query_start_time
if compiled_parameters is not None: if elapsed > 1:
query = compiled_statement.format(*compiled_parameters) query_end = query[-16:]
else: query = query.split(query_end)[0] + query_end
query = compiled_statement # or handle this case in a way that makes sense for your application logger.debug(query)
elapsed_n = math.floor(elapsed)
logger.debug("*" * (elapsed_n))
logger.debug(f"{elapsed:.3f} s")
del conn.cursor_id # Удаление идентификатора курсора после выполнения
if elapsed > 1 and conn.last_statement != query: def get_json_builder():
conn.last_statement = query """
logger.debug(f"\n{query}\n{'*' * math.floor(elapsed)} {elapsed:.3f} s\n") Возвращает подходящие функции для построения JSON объектов в зависимости от драйвера БД
"""
dialect = engine.dialect.name
json_cast = lambda x: x # noqa: E731
if dialect.startswith("postgres"):
json_cast = lambda x: func.cast(x, sqlalchemy.Text) # noqa: E731
return func.json_build_object, func.json_agg, json_cast
elif dialect.startswith("sqlite") or dialect.startswith("mysql"):
return func.json_object, func.json_group_array, json_cast
else:
raise NotImplementedError(f"JSON builder not implemented for dialect {dialect}")
# Используем их в коде
json_builder, json_array_builder, json_cast = get_json_builder()

View File

@@ -1,9 +0,0 @@
import json
from decimal import Decimal
class CustomJSONEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, Decimal):
return str(obj)
return super().default(obj)

17
services/exception.py Normal file
View File

@@ -0,0 +1,17 @@
import logging
from starlette.middleware.base import BaseHTTPMiddleware
from starlette.responses import JSONResponse
logger = logging.getLogger("exception")
logging.basicConfig(level=logging.DEBUG)
class ExceptionHandlerMiddleware(BaseHTTPMiddleware):
async def dispatch(self, request, call_next):
try:
response = await call_next(request)
return response
except Exception as exc:
logger.exception(exc)
return JSONResponse({"detail": "An error occurred. Please try again later."}, status_code=500)

View File

@@ -1,81 +0,0 @@
import logging
import colorlog
# Define the color scheme
color_scheme = {
'DEBUG': 'light_black',
'INFO': 'green',
'WARNING': 'yellow',
'ERROR': 'red',
'CRITICAL': 'red,bg_white',
}
# Define secondary log colors
secondary_colors = {
'log_name': {'DEBUG': 'blue'},
'asctime': {'DEBUG': 'cyan'},
'process': {'DEBUG': 'purple'},
'module': {'DEBUG': 'light_black,bg_blue'},
'funcName': {'DEBUG': 'light_white,bg_blue'}, # Add this line
}
# Define the log format string
fmt_string = '%(log_color)s%(levelname)s: %(log_color)s[%(module)s.%(funcName)s]%(reset)s %(white)s%(message)s'
# Define formatting configuration
fmt_config = {
'log_colors': color_scheme,
'secondary_log_colors': secondary_colors,
'style': '%',
'reset': True,
}
class MultilineColoredFormatter(colorlog.ColoredFormatter):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.log_colors = kwargs.pop('log_colors', {})
self.secondary_log_colors = kwargs.pop('secondary_log_colors', {})
def format(self, record):
message = record.getMessage()
if '\n' in message:
lines = message.split('\n')
first_line = lines[0]
record.message = first_line
formatted_first_line = super().format(record)
formatted_lines = [formatted_first_line]
for line in lines[1:]:
formatted_lines.append(line)
return '\n'.join(formatted_lines)
else:
return super().format(record)
# Create a MultilineColoredFormatter object for colorized logging
formatter = MultilineColoredFormatter(fmt_string, **fmt_config)
# Create a stream handler for logging output
stream = logging.StreamHandler()
stream.setFormatter(formatter)
def get_colorful_logger(name='main'):
# Create and configure the logger
logger = logging.getLogger(name)
logger.setLevel(logging.DEBUG)
logger.addHandler(stream)
return logger
# Set up the root logger with the same formatting
root_logger = logging.getLogger()
root_logger.setLevel(logging.DEBUG)
root_logger.addHandler(stream)
ignore_logs = ['_trace', 'httpx', '_client', '_trace.atrace', 'aiohttp', '_client']
for lgr in ignore_logs:
loggr = logging.getLogger(lgr)
loggr.setLevel(logging.INFO)

View File

@@ -1,11 +0,0 @@
from dogpile.cache import make_region
from settings import REDIS_URL
# Создание региона кэша с TTL
cache_region = make_region()
cache_region.configure(
'dogpile.cache.redis',
arguments={'url': f'{REDIS_URL}/1'},
expiration_time=3600, # Cache expiration time in seconds
)

View File

@@ -1,9 +1,10 @@
import json import orjson
from orm.notification import Notification from orm.notification import Notification
from services.db import local_session from services.db import local_session
from services.rediscache import redis from services.redis import redis
from services.logger import root_logger as logger from utils.logger import root_logger as logger
def save_notification(action: str, entity: str, payload): def save_notification(action: str, entity: str, payload):
with local_session() as session: with local_session() as session:
@@ -12,44 +13,43 @@ def save_notification(action: str, entity: str, payload):
session.commit() session.commit()
async def notify_reaction(reaction, action: str = 'create'): async def notify_reaction(reaction, action: str = "create"):
channel_name = 'reaction' channel_name = "reaction"
data = {'payload': reaction, 'action': action} data = {"payload": reaction, "action": action}
try: try:
save_notification(action, channel_name, data.get('payload')) save_notification(action, channel_name, data.get("payload"))
await redis.publish(channel_name, json.dumps(data)) await redis.publish(channel_name, orjson.dumps(data))
except Exception as e: except Exception as e:
logger.error(f'Failed to publish to channel {channel_name}: {e}') logger.error(f"Failed to publish to channel {channel_name}: {e}")
async def notify_shout(shout, action: str = 'update'): async def notify_shout(shout, action: str = "update"):
channel_name = 'shout' channel_name = "shout"
data = {'payload': shout, 'action': action} data = {"payload": shout, "action": action}
try: try:
save_notification(action, channel_name, data.get('payload')) save_notification(action, channel_name, data.get("payload"))
await redis.publish(channel_name, json.dumps(data)) await redis.publish(channel_name, orjson.dumps(data))
except Exception as e: except Exception as e:
logger.error(f'Failed to publish to channel {channel_name}: {e}') logger.error(f"Failed to publish to channel {channel_name}: {e}")
async def notify_follower(follower: dict, author_id: int, action: str = 'follow'): async def notify_follower(follower: dict, author_id: int, action: str = "follow"):
channel_name = f'follower:{author_id}' channel_name = f"follower:{author_id}"
try: try:
# Simplify dictionary before publishing # Simplify dictionary before publishing
simplified_follower = {k: follower[k] for k in ['id', 'name', 'slug', 'pic']} simplified_follower = {k: follower[k] for k in ["id", "name", "slug", "pic"]}
data = {'payload': simplified_follower, 'action': action} data = {"payload": simplified_follower, "action": action}
# save in channel # save in channel
save_notification(action, channel_name, data.get('payload')) save_notification(action, channel_name, data.get("payload"))
# Convert data to JSON string # Convert data to JSON string
json_data = json.dumps(data) json_data = orjson.dumps(data)
# Ensure the data is not empty before publishing # Ensure the data is not empty before publishing
if json_data: if json_data:
# Use the 'await' keyword when publishing # Use the 'await' keyword when publishing
await redis.publish(channel_name, json_data) await redis.publish(channel_name, json_data)
except Exception as e: except Exception as e:
# Log the error and re-raise it # Log the error and re-raise it
logger.error(f'Failed to publish to channel {channel_name}: {e}') logger.error(f"Failed to publish to channel {channel_name}: {e}")

170
services/pretopic.py Normal file
View File

@@ -0,0 +1,170 @@
import concurrent.futures
from typing import Dict, List, Tuple
from txtai.embeddings import Embeddings
from services.logger import root_logger as logger
class TopicClassifier:
def __init__(self, shouts_by_topic: Dict[str, str], publications: List[Dict[str, str]]):
"""
Инициализация классификатора тем и поиска публикаций.
Args:
shouts_by_topic: Словарь {тема: текст_всех_публикаций}
publications: Список публикаций с полями 'id', 'title', 'text'
"""
self.shouts_by_topic = shouts_by_topic
self.topics = list(shouts_by_topic.keys())
self.publications = publications
self.topic_embeddings = None # Для классификации тем
self.search_embeddings = None # Для поиска публикаций
self._initialization_future = None
self._executor = concurrent.futures.ThreadPoolExecutor(max_workers=1)
def initialize(self) -> None:
"""
Асинхронная инициализация векторных представлений.
"""
if self._initialization_future is None:
self._initialization_future = self._executor.submit(self._prepare_embeddings)
logger.info("Векторизация текстов начата в фоновом режиме...")
def _prepare_embeddings(self) -> None:
"""
Подготавливает векторные представления для тем и поиска.
"""
logger.info("Начинается подготовка векторных представлений...")
# Модель для русского языка
# TODO: model local caching
model_path = "sentence-transformers/paraphrase-multilingual-mpnet-base-v2"
# Инициализируем embeddings для классификации тем
self.topic_embeddings = Embeddings(path=model_path)
topic_documents = [(topic, text) for topic, text in self.shouts_by_topic.items()]
self.topic_embeddings.index(topic_documents)
# Инициализируем embeddings для поиска публикаций
self.search_embeddings = Embeddings(path=model_path)
search_documents = [(str(pub["id"]), f"{pub['title']} {pub['text']}") for pub in self.publications]
self.search_embeddings.index(search_documents)
logger.info("Подготовка векторных представлений завершена.")
def predict_topic(self, text: str) -> Tuple[float, str]:
"""
Предсказывает тему для заданного текста из известного набора тем.
Args:
text: Текст для классификации
Returns:
Tuple[float, str]: (уверенность, тема)
"""
if not self.is_ready():
logger.error("Векторные представления не готовы. Вызовите initialize() и дождитесь завершения.")
return 0.0, "unknown"
try:
# Ищем наиболее похожую тему
results = self.topic_embeddings.search(text, 1)
if not results:
return 0.0, "unknown"
score, topic = results[0]
return float(score), topic
except Exception as e:
logger.error(f"Ошибка при определении темы: {str(e)}")
return 0.0, "unknown"
def search_similar(self, query: str, limit: int = 5) -> List[Dict[str, any]]:
"""
Ищет публикации похожие на поисковый запрос.
Args:
query: Поисковый запрос
limit: Максимальное количество результатов
Returns:
List[Dict]: Список найденных публикаций с оценкой релевантности
"""
if not self.is_ready():
logger.error("Векторные представления не готовы. Вызовите initialize() и дождитесь завершения.")
return []
try:
# Ищем похожие публикации
results = self.search_embeddings.search(query, limit)
# Формируем результаты
found_publications = []
for score, pub_id in results:
# Находим публикацию по id
publication = next((pub for pub in self.publications if str(pub["id"]) == pub_id), None)
if publication:
found_publications.append({**publication, "relevance": float(score)})
return found_publications
except Exception as e:
logger.error(f"Ошибка при поиске публикаций: {str(e)}")
return []
def is_ready(self) -> bool:
"""
Проверяет, готовы ли векторные представления.
"""
return self.topic_embeddings is not None and self.search_embeddings is not None
def wait_until_ready(self) -> None:
"""
Ожидает завершения подготовки векторных представлений.
"""
if self._initialization_future:
self._initialization_future.result()
def __del__(self):
"""
Очистка ресурсов при удалении объекта.
"""
if self._executor:
self._executor.shutdown(wait=False)
# Пример использования:
"""
shouts_by_topic = {
"Спорт": "... большой текст со всеми спортивными публикациями ...",
"Технологии": "... большой текст со всеми технологическими публикациями ...",
"Политика": "... большой текст со всеми политическими публикациями ..."
}
publications = [
{
'id': 1,
'title': 'Новый процессор AMD',
'text': 'Компания AMD представила новый процессор...'
},
{
'id': 2,
'title': 'Футбольный матч',
'text': 'Вчера состоялся решающий матч...'
}
]
# Создание классификатора
classifier = TopicClassifier(shouts_by_topic, publications)
classifier.initialize()
classifier.wait_until_ready()
# Определение темы текста
text = "Новый процессор показал высокую производительность"
score, topic = classifier.predict_topic(text)
print(f"Тема: {topic} (уверенность: {score:.4f})")
# Поиск похожих публикаций
query = "процессор AMD производительность"
similar_publications = classifier.search_similar(query, limit=3)
for pub in similar_publications:
print(f"\nНайдена публикация (релевантность: {pub['relevance']:.4f}):")
print(f"Заголовок: {pub['title']}")
print(f"Текст: {pub['text'][:100]}...")
"""

View File

@@ -1,38 +1,41 @@
import logging import logging
import redis.asyncio as aredis from redis.asyncio import Redis
from settings import REDIS_URL from settings import REDIS_URL
# Set redis logging level to suppress DEBUG messages # Set redis logging level to suppress DEBUG messages
logger = logging.getLogger('redis') logger = logging.getLogger("redis")
logger.setLevel(logging.WARNING) logger.setLevel(logging.WARNING)
class RedisCache: class RedisService:
def __init__(self, uri=REDIS_URL): def __init__(self, uri=REDIS_URL):
self._uri: str = uri self._uri: str = uri
self.pubsub_channels = [] self.pubsub_channels = []
self._client = None self._client = None
async def connect(self): async def connect(self):
self._client = aredis.Redis.from_url(self._uri, decode_responses=True) if self._uri:
self._client = await Redis.from_url(self._uri, decode_responses=True)
logger.info("Redis connection was established.")
async def disconnect(self): async def disconnect(self):
if self._client: if isinstance(self._client, Redis):
await self._client.close() await self._client.close()
logger.info("Redis connection was closed.")
async def execute(self, command, *args, **kwargs): async def execute(self, command, *args, **kwargs):
if self._client: if self._client:
try: try:
logger.debug(f'{command} {args} {kwargs}') logger.debug(f"{command}") # {args[0]}") # {args} {kwargs}")
for arg in args: for arg in args:
if isinstance(arg, dict): if isinstance(arg, dict):
if arg.get('_sa_instance_state'): if arg.get("_sa_instance_state"):
del arg['_sa_instance_state'] del arg["_sa_instance_state"]
r = await self._client.execute_command(command, *args, **kwargs) r = await self._client.execute_command(command, *args, **kwargs)
logger.debug(type(r)) # logger.debug(type(r))
logger.debug(r) # logger.debug(r)
return r return r
except Exception as e: except Exception as e:
logger.error(e) logger.error(e)
@@ -57,7 +60,22 @@ class RedisCache:
return return
await self._client.publish(channel, data) await self._client.publish(channel, data)
async def set(self, key, data, ex=None):
# Prepare the command arguments
args = [key, data]
redis = RedisCache() # If an expiration time is provided, add it to the arguments
if ex is not None:
args.append("EX")
args.append(ex)
__all__ = ['redis'] # Execute the command with the provided arguments
await self.execute("set", *args)
async def get(self, key):
return await self.execute("get", key)
redis = RedisService()
__all__ = ["redis"]

View File

@@ -1,5 +1,87 @@
from asyncio.log import logger
import httpx
from ariadne import MutationType, QueryType from ariadne import MutationType, QueryType
from services.db import create_table_if_not_exists, local_session
from settings import AUTH_URL
query = QueryType() query = QueryType()
mutation = MutationType() mutation = MutationType()
resolvers = [query, mutation] resolvers = [query, mutation]
async def request_graphql_data(gql, url=AUTH_URL, headers=None):
"""
Выполняет GraphQL запрос к указанному URL
:param gql: GraphQL запрос
:param url: URL для запроса, по умолчанию AUTH_URL
:param headers: Заголовки запроса
:return: Результат запроса или None в случае ошибки
"""
if not url:
return None
if headers is None:
headers = {"Content-Type": "application/json"}
try:
async with httpx.AsyncClient() as client:
response = await client.post(url, json=gql, headers=headers)
if response.status_code == 200:
data = response.json()
errors = data.get("errors")
if errors:
logger.error(f"{url} response: {data}")
else:
return data
else:
logger.error(f"{url}: {response.status_code} {response.text}")
except Exception as _e:
import traceback
logger.error(f"request_graphql_data error: {traceback.format_exc()}")
return None
def create_all_tables():
"""Create all database tables in the correct order."""
from orm import author, community, draft, notification, reaction, shout, topic
# Порядок важен - сначала таблицы без внешних ключей, затем зависимые таблицы
models_in_order = [
# user.User, # Базовая таблица auth
author.Author, # Базовая таблица
community.Community, # Базовая таблица
topic.Topic, # Базовая таблица
# Связи для базовых таблиц
author.AuthorFollower, # Зависит от Author
community.CommunityFollower, # Зависит от Community
topic.TopicFollower, # Зависит от Topic
# Черновики (теперь без зависимости от Shout)
draft.Draft, # Зависит только от Author
draft.DraftAuthor, # Зависит от Draft и Author
draft.DraftTopic, # Зависит от Draft и Topic
# Основные таблицы контента
shout.Shout, # Зависит от Author и Draft
shout.ShoutAuthor, # Зависит от Shout и Author
shout.ShoutTopic, # Зависит от Shout и Topic
# Реакции
reaction.Reaction, # Зависит от Author и Shout
shout.ShoutReactionsFollower, # Зависит от Shout и Reaction
# Дополнительные таблицы
author.AuthorRating, # Зависит от Author
notification.Notification, # Зависит от Author
notification.NotificationSeen, # Зависит от Notification
# collection.Collection,
# collection.ShoutCollection,
# invite.Invite
]
with local_session() as session:
for model in models_in_order:
try:
create_table_if_not_exists(session.get_bind(), model)
# logger.info(f"Created or verified table: {model.__tablename__}")
except Exception as e:
logger.error(f"Error creating table {model.__tablename__}: {e}")
raise

View File

@@ -1,161 +1,219 @@
import asyncio import asyncio
import json import json
import logging
import os import os
import orjson
from opensearchpy import OpenSearch from opensearchpy import OpenSearch
from services.encoders import CustomJSONEncoder from services.redis import redis
from services.logger import root_logger as logger from utils.encoders import CustomJSONEncoder
from services.rediscache import redis
ELASTIC_HOST = os.environ.get('ELASTIC_HOST', '').replace('https://', '') # Set redis logging level to suppress DEBUG messages
ELASTIC_USER = os.environ.get('ELASTIC_USER', '') logger = logging.getLogger("search")
ELASTIC_PASSWORD = os.environ.get('ELASTIC_PASSWORD', '') logger.setLevel(logging.WARNING)
ELASTIC_PORT = os.environ.get('ELASTIC_PORT', 9200)
ELASTIC_AUTH = f'{ELASTIC_USER}:{ELASTIC_PASSWORD}' if ELASTIC_USER else '' ELASTIC_HOST = os.environ.get("ELASTIC_HOST", "").replace("https://", "")
ELASTIC_USER = os.environ.get("ELASTIC_USER", "")
ELASTIC_PASSWORD = os.environ.get("ELASTIC_PASSWORD", "")
ELASTIC_PORT = os.environ.get("ELASTIC_PORT", 9200)
ELASTIC_URL = os.environ.get( ELASTIC_URL = os.environ.get(
'ELASTIC_URL', f'https://{ELASTIC_AUTH}@{ELASTIC_HOST}:{ELASTIC_PORT}' "ELASTIC_URL",
f"https://{ELASTIC_USER}:{ELASTIC_PASSWORD}@{ELASTIC_HOST}:{ELASTIC_PORT}",
) )
REDIS_TTL = 86400 # 1 day in seconds REDIS_TTL = 86400 # 1 день в секундах
index_settings = { index_settings = {
'settings': { "settings": {
'index': {'number_of_shards': 1, 'auto_expand_replicas': '0-all'}, "index": {"number_of_shards": 1, "auto_expand_replicas": "0-all"},
'analysis': { "analysis": {
'analyzer': { "analyzer": {
'ru': { "ru": {
'tokenizer': 'standard', "tokenizer": "standard",
'filter': ['lowercase', 'ru_stop', 'ru_stemmer'], "filter": ["lowercase", "ru_stop", "ru_stemmer"],
} }
}, },
'filter': { "filter": {
'ru_stemmer': {'type': 'stemmer', 'language': 'russian'}, "ru_stemmer": {"type": "stemmer", "language": "russian"},
'ru_stop': {'type': 'stop', 'stopwords': '_russian_'}, "ru_stop": {"type": "stop", "stopwords": "_russian_"},
}, },
}, },
}, },
'mappings': { "mappings": {
'properties': { "properties": {
'body': {'type': 'text', 'analyzer': 'ru'}, "body": {"type": "text", "analyzer": "ru"},
'title': {'type': 'text', 'analyzer': 'ru'}, "title": {"type": "text", "analyzer": "ru"},
'subtitle': {'type': 'text', 'analyzer': 'ru'}, "subtitle": {"type": "text", "analyzer": "ru"},
'lead': {'type': 'text', 'analyzer': 'ru'}, "lead": {"type": "text", "analyzer": "ru"},
# 'author': {'type': 'text'}, "media": {"type": "text", "analyzer": "ru"},
} }
}, },
} }
expected_mapping = index_settings['mappings'] expected_mapping = index_settings["mappings"]
# Create an event loop # Создание цикла событий
search_loop = asyncio.get_event_loop() search_loop = asyncio.get_event_loop()
# В начале файла добавим флаг
SEARCH_ENABLED = bool(os.environ.get("ELASTIC_HOST", ""))
def get_indices_stats():
indices_stats = search_service.client.cat.indices(format="json")
for index_info in indices_stats:
index_name = index_info["index"]
if not index_name.startswith("."):
index_health = index_info["health"]
index_status = index_info["status"]
pri_shards = index_info["pri"]
rep_shards = index_info["rep"]
docs_count = index_info["docs.count"]
docs_deleted = index_info["docs.deleted"]
store_size = index_info["store.size"]
pri_store_size = index_info["pri.store.size"]
logger.info(f"Index: {index_name}")
logger.info(f"Health: {index_health}")
logger.info(f"Status: {index_status}")
logger.info(f"Primary Shards: {pri_shards}")
logger.info(f"Replica Shards: {rep_shards}")
logger.info(f"Documents Count: {docs_count}")
logger.info(f"Deleted Documents: {docs_deleted}")
logger.info(f"Store Size: {store_size}")
logger.info(f"Primary Store Size: {pri_store_size}")
class SearchService: class SearchService:
def __init__(self, index_name='search_index'): def __init__(self, index_name="search_index"):
logger.info("Инициализируем поиск...")
self.index_name = index_name self.index_name = index_name
self.client = None self.client = None
self.lock = asyncio.Lock() # Create an asyncio lock self.lock = asyncio.Lock()
# Only initialize the instance if it's not already initialized # Инициализация клиента OpenSearch только если поиск включен
if ELASTIC_HOST: if SEARCH_ENABLED:
try: try:
self.client = OpenSearch( self.client = OpenSearch(
hosts=[{'host': ELASTIC_HOST, 'port': ELASTIC_PORT}], hosts=[{"host": ELASTIC_HOST, "port": ELASTIC_PORT}],
http_compress=True, http_compress=True,
http_auth=(ELASTIC_USER, ELASTIC_PASSWORD), http_auth=(ELASTIC_USER, ELASTIC_PASSWORD),
use_ssl=True, use_ssl=True,
verify_certs=False, verify_certs=False,
ssl_assert_hostname=False, ssl_assert_hostname=False,
ssl_show_warn=False, ssl_show_warn=False,
# ca_certs = ca_certs_path
) )
logger.info(' Клиент OpenSearch.org подключен') logger.info("Клиент OpenSearch.org подключен")
# Create a task and run it in the event loop
search_loop.create_task(self.check_index()) search_loop.create_task(self.check_index())
except Exception as exc: except Exception as exc:
logger.error(f' {exc}') logger.warning(f"Поиск отключен из-за ошибки подключения: {exc}")
self.client = None self.client = None
def info(self):
if isinstance(self.client, OpenSearch):
logger.info(' Поиск подключен') # : {self.client.info()}')
else: else:
logger.info(' * Задайте переменные среды для подключения к серверу поиска') logger.info("Поиск отключен (ELASTIC_HOST не установлен)")
async def info(self):
if not SEARCH_ENABLED:
return {"status": "disabled"}
try:
return get_indices_stats()
except Exception as e:
logger.error(f"Failed to get search info: {e}")
return {"status": "error", "message": str(e)}
def delete_index(self): def delete_index(self):
if self.client: if self.client:
logger.debug(f' Удаляем индекс {self.index_name}') logger.warning(f"[!!!] Удаляем индекс {self.index_name}")
self.client.indices.delete(index=self.index_name, ignore_unavailable=True) self.client.indices.delete(index=self.index_name, ignore_unavailable=True)
def create_index(self): def create_index(self):
if self.client: if self.client:
logger.debug(f'Создается индекс: {self.index_name}') logger.info(f"Создается индекс: {self.index_name}")
self.delete_index()
self.client.indices.create(index=self.index_name, body=index_settings) self.client.indices.create(index=self.index_name, body=index_settings)
logger.debug(f'Индекс {self.index_name} создан') logger.info(f"Индекс {self.index_name} создан")
async def check_index(self): async def check_index(self):
if self.client: if self.client:
logger.debug(f' Проверяем индекс {self.index_name}...') logger.info(f"Проверяем индекс {self.index_name}...")
if not self.client.indices.exists(index=self.index_name): if not self.client.indices.exists(index=self.index_name):
self.create_index() self.create_index()
self.client.indices.put_mapping( self.client.indices.put_mapping(index=self.index_name, body=expected_mapping)
index=self.index_name, body=expected_mapping
)
else: else:
logger.info(f'найден существующий индекс {self.index_name}') logger.info(f"Найден существующий индекс {self.index_name}")
# Check if the mapping is correct, and recreate the index if needed # Проверка и обновление структуры индекса, если необходимо
result = self.client.indices.get_mapping(index=self.index_name) result = self.client.indices.get_mapping(index=self.index_name)
if isinstance(result, str): if isinstance(result, str):
result = json.loads(result) result = orjson.loads(result)
if isinstance(result, dict): if isinstance(result, dict):
mapping = result.get('mapping') mapping = result.get(self.index_name, {}).get("mappings")
if mapping and mapping != expected_mapping: logger.info(f"Найдена структура индексации: {mapping['properties'].keys()}")
logger.debug(f' найдена структура индексации: {mapping}') expected_keys = expected_mapping["properties"].keys()
logger.warn( if mapping and mapping["properties"].keys() != expected_keys:
' требуется другая структура индексации, переиндексация' logger.info(f"Ожидаемая структура индексации: {expected_mapping}")
) logger.warning("[!!!] Требуется переиндексация всех данных")
await self.recreate_index() self.delete_index()
self.client = None
async def recreate_index(self): else:
if self.client: logger.error("клиент не инициализован, невозможно проверить индекс")
async with self.lock:
self.client.indices.delete(
index=self.index_name, ignore_unavailable=True
)
await self.check_index()
def index(self, shout): def index(self, shout):
if self.client: if not SEARCH_ENABLED:
id_ = str(shout.id) return
logger.debug(f' Индексируем пост {id_}')
asyncio.create_task(self.perform_index(shout))
async def perform_index(self, shout):
if self.client: if self.client:
self.client.index(index=self.index_name, id=str(shout.id), body=shout.dict()) logger.info(f"Индексируем пост {shout.id}")
index_body = {
"body": shout.body,
"title": shout.title,
"subtitle": shout.subtitle,
"lead": shout.lead,
"media": shout.media,
}
asyncio.create_task(self.perform_index(shout, index_body))
async def perform_index(self, shout, index_body):
if self.client:
try:
await asyncio.wait_for(
self.client.index(index=self.index_name, id=str(shout.id), body=index_body), timeout=40.0
)
except asyncio.TimeoutError:
logger.error(f"Indexing timeout for shout {shout.id}")
except Exception as e:
logger.error(f"Indexing error for shout {shout.id}: {e}")
async def search(self, text, limit, offset): async def search(self, text, limit, offset):
logger.debug(f' Ищем: {text}') if not SEARCH_ENABLED:
search_body = {'query': {'match': {'_all': text}}} return []
logger.info(f"Ищем: {text} {offset}+{limit}")
search_body = {
"query": {"multi_match": {"query": text, "fields": ["title", "lead", "subtitle", "body", "media"]}}
}
if self.client: if self.client:
search_response = self.client.search( search_response = self.client.search(
index=self.index_name, body=search_body, size=limit, from_=offset index=self.index_name,
body=search_body,
size=limit,
from_=offset,
_source=False,
_source_excludes=["title", "body", "subtitle", "media", "lead", "_index"],
) )
hits = search_response['hits']['hits'] hits = search_response["hits"]["hits"]
results = [{"id": hit["_id"], "score": hit["_score"]} for hit in hits]
results = [{**hit['_source'], 'score': hit['_score']} for hit in hits] # если результаты не пустые
if results:
# Use Redis as cache with TTL # Кэширование в Redis с TTL
redis_key = f'search:{text}' redis_key = f"search:{text}:{offset}+{limit}"
await redis.execute( await redis.execute(
'SETEX', "SETEX",
redis_key, redis_key,
REDIS_TTL, REDIS_TTL,
json.dumps(results, cls=CustomJSONEncoder), json.dumps(results, cls=CustomJSONEncoder),
) )
return results
return [] return []
@@ -165,6 +223,10 @@ search_service = SearchService()
async def search_text(text: str, limit: int = 50, offset: int = 0): async def search_text(text: str, limit: int = 50, offset: int = 0):
payload = [] payload = []
if search_service.client: if search_service.client:
# Use OpenSearchService.search_post method # Использование метода search_post из OpenSearchService
payload = await search_service.search(text, limit, offset) payload = await search_service.search(text, limit, offset)
return payload return payload
# Проверить что URL корректный
OPENSEARCH_URL = os.getenv("OPENSEARCH_URL", "rc1a-3n5pi3bhuj9gieel.mdb.yandexcloud.net")

View File

@@ -1,3 +1,5 @@
import logging
import sentry_sdk import sentry_sdk
from sentry_sdk.integrations.ariadne import AriadneIntegration from sentry_sdk.integrations.ariadne import AriadneIntegration
from sentry_sdk.integrations.sqlalchemy import SqlalchemyIntegration from sentry_sdk.integrations.sqlalchemy import SqlalchemyIntegration
@@ -5,26 +7,24 @@ from sentry_sdk.integrations.starlette import StarletteIntegration
from settings import GLITCHTIP_DSN from settings import GLITCHTIP_DSN
logger = logging.getLogger(__name__)
# Настройка логирования для отправки логов в Sentry
sentry_logging_handler = sentry_sdk.integrations.logging.SentryHandler(level=logging.WARNING)
logger.addHandler(sentry_logging_handler)
logger.setLevel(logging.DEBUG) # Более подробное логирование
def start_sentry(): def start_sentry():
# sentry monitoring
try: try:
logger.info("[services.sentry] Sentry init started...")
sentry_sdk.init( sentry_sdk.init(
GLITCHTIP_DSN, dsn=GLITCHTIP_DSN,
# Set traces_sample_rate to 1.0 to capture 100% traces_sample_rate=1.0, # Захват 100% транзакций
# of transactions for performance monitoring. profiles_sample_rate=1.0, # Профилирование 100% транзакций
traces_sample_rate=1.0,
# Set profiles_sample_rate to 1.0 to profile 100%
# of sampled transactions.
# We recommend adjusting this value in production.
profiles_sample_rate=1.0,
enable_tracing=True, enable_tracing=True,
integrations=[ integrations=[StarletteIntegration(), AriadneIntegration(), SqlalchemyIntegration()],
StarletteIntegration(), send_default_pii=True, # Отправка информации о пользователе (PII)
AriadneIntegration(),
SqlalchemyIntegration(),
],
) )
except Exception as e: logger.info("[services.sentry] Sentry initialized successfully.")
print('[services.sentry] init error') except Exception as _e:
print(e) logger.warning("[services.sentry] Failed to initialize Sentry", exc_info=True)

View File

@@ -1,153 +0,0 @@
import asyncio
import json
from sqlalchemy import event, select
from orm.author import Author, AuthorFollower
from orm.reaction import Reaction
from orm.shout import Shout, ShoutAuthor
from orm.topic import TopicFollower, Topic
from resolvers.stat import get_with_stat
from services.encoders import CustomJSONEncoder
from services.rediscache import redis
from services.logger import root_logger as logger
from services.cache import cache_author, cache_follows, cache_follower
DEFAULT_FOLLOWS = {
'topics': [],
'authors': [],
'communities': [{'id': 1, 'name': 'Дискурс', 'slug': 'discours', 'pic': ''}],
}
async def handle_author_follower_change(author_id: int, follower_id: int, is_insert: bool):
logger.info(author_id)
author_query = select(Author).select_from(Author).filter(Author.id == author_id)
[author] = get_with_stat(author_query)
follower_query = select(Author).select_from(Author).filter(Author.id == follower_id)
[follower] = get_with_stat(follower_query)
if follower and author:
await cache_author(author.dict())
await cache_author(follower.dict())
await cache_follows(follower, 'author', author.dict(), is_insert)
await cache_follower(follower, author, is_insert)
async def handle_topic_follower_change(topic_id: int, follower_id: int, is_insert: bool):
logger.info(topic_id)
topic_query = select(Topic).filter(Topic.id == topic_id)
[topic] = get_with_stat(topic_query)
follower_query = select(Author).filter(Author.id == follower_id)
[follower] = get_with_stat(follower_query)
if follower and topic:
await cache_author(follower.dict())
await redis.execute('SET', f'topic:{topic.id}', json.dumps(topic.dict(), cls=CustomJSONEncoder))
await cache_follows(follower, 'topic', topic.dict(), is_insert)
# handle_author_follow and handle_topic_follow -> cache_author, cache_follows, cache_followers
def after_shout_update(_mapper, _connection, shout: Shout):
logger.info('after shout update')
# Main query to get authors associated with the shout through ShoutAuthor
authors_query = (
select(Author)
.select_from(ShoutAuthor) # Select from ShoutAuthor
.join(Author, Author.id == ShoutAuthor.author) # Join with Author
.filter(ShoutAuthor.shout == shout.id) # Filter by shout.id
)
for author_with_stat in get_with_stat(authors_query):
asyncio.create_task(cache_author(author_with_stat.dict()))
def after_reaction_update(mapper, connection, reaction: Reaction):
logger.info('after reaction update')
try:
author_subquery = select(Author).where(Author.id == reaction.created_by)
replied_author_subquery = (
select(Author)
.join(Reaction, Author.id == Reaction.created_by)
.where(Reaction.id == reaction.reply_to)
)
author_query = (
select(author_subquery.subquery())
.select_from(author_subquery.subquery())
.union(
select(replied_author_subquery.subquery()).select_from(
replied_author_subquery.subquery()
)
)
)
for author_with_stat in get_with_stat(author_query):
asyncio.create_task(cache_author(author_with_stat.dict()))
shout = connection.execute(
select(Shout).select_from(Shout).where(Shout.id == reaction.shout)
).first()
if shout:
after_shout_update(mapper, connection, shout)
except Exception as exc:
logger.error(exc)
import traceback
traceback.print_exc()
def after_author_update(_mapper, _connection, author: Author):
logger.info('after author update')
q = select(Author).where(Author.id == author.id)
result = get_with_stat(q)
if result:
[author_with_stat] = result
asyncio.create_task(cache_author(author_with_stat.dict()))
def after_topic_follower_insert(_mapper, _connection, target: TopicFollower):
logger.info(target)
asyncio.create_task(
handle_topic_follower_change(target.topic, target.follower, True)
)
def after_topic_follower_delete(_mapper, _connection, target: TopicFollower):
logger.info(target)
asyncio.create_task(
handle_topic_follower_change(target.topic, target.follower, False)
)
def after_author_follower_insert(_mapper, _connection, target: AuthorFollower):
logger.info(target)
asyncio.create_task(
handle_author_follower_change(target.author, target.follower, True)
)
def after_author_follower_delete(_mapper, _connection, target: AuthorFollower):
logger.info(target)
asyncio.create_task(
handle_author_follower_change(target.author, target.follower, False)
)
def events_register():
event.listen(Shout, 'after_insert', after_shout_update)
event.listen(Shout, 'after_update', after_shout_update)
event.listen(Reaction, 'after_insert', after_reaction_update)
event.listen(Reaction, 'after_update', after_reaction_update)
event.listen(Author, 'after_insert', after_author_update)
event.listen(Author, 'after_update', after_author_update)
event.listen(AuthorFollower, 'after_insert', after_author_follower_insert)
event.listen(AuthorFollower, 'after_delete', after_author_follower_delete)
event.listen(TopicFollower, 'after_insert', after_topic_follower_insert)
event.listen(TopicFollower, 'after_delete', after_topic_follower_delete)
logger.info('cache events were registered!')

View File

@@ -1,24 +0,0 @@
import json
from services.rediscache import redis
async def get_unread_counter(chat_id: str, author_id: int) -> int:
r = await redis.execute('LLEN', f'chats/{chat_id}/unread/{author_id}')
if isinstance(r, str):
return int(r)
elif isinstance(r, int):
return r
else:
return 0
async def get_total_unread_counter(author_id: int) -> int:
chats_set = await redis.execute('SMEMBERS', f'chats_by_author/{author_id}')
s = 0
if isinstance(chats_set, str):
chats_set = json.loads(chats_set)
if isinstance(chats_set, list):
for chat_id in chats_set:
s += await get_unread_counter(chat_id, author_id)
return s

View File

@@ -1,28 +1,35 @@
import asyncio import asyncio
import json
import os import os
import time import time
from datetime import datetime, timedelta, timezone from datetime import datetime, timedelta, timezone
from typing import Dict from typing import Dict
import orjson
# ga # ga
from google.analytics.data_v1beta import BetaAnalyticsDataClient from google.analytics.data_v1beta import BetaAnalyticsDataClient
from google.analytics.data_v1beta.types import (DateRange, Dimension, Metric, from google.analytics.data_v1beta.types import (
RunReportRequest) DateRange,
Dimension,
Metric,
RunReportRequest,
)
from google.analytics.data_v1beta.types import Filter as GAFilter
from orm.author import Author from orm.author import Author
from orm.shout import Shout, ShoutAuthor, ShoutTopic from orm.shout import Shout, ShoutAuthor, ShoutTopic
from orm.topic import Topic from orm.topic import Topic
from services.db import local_session from services.db import local_session
from services.logger import root_logger as logger from utils.logger import root_logger as logger
GOOGLE_KEYFILE_PATH = os.environ.get('GOOGLE_KEYFILE_PATH', '/dump/google-service.json') GOOGLE_KEYFILE_PATH = os.environ.get("GOOGLE_KEYFILE_PATH", "/dump/google-service.json")
GOOGLE_PROPERTY_ID = os.environ.get('GOOGLE_PROPERTY_ID', '') GOOGLE_PROPERTY_ID = os.environ.get("GOOGLE_PROPERTY_ID", "")
VIEWS_FILEPATH = '/dump/views.json' VIEWS_FILEPATH = "/dump/views.json"
class ViewedStorage: class ViewedStorage:
lock = asyncio.Lock() lock = asyncio.Lock()
precounted_by_slug = {}
views_by_shout = {} views_by_shout = {}
shouts_by_topic = {} shouts_by_topic = {}
shouts_by_author = {} shouts_by_author = {}
@@ -30,8 +37,8 @@ class ViewedStorage:
period = 60 * 60 # каждый час period = 60 * 60 # каждый час
analytics_client: BetaAnalyticsDataClient | None = None analytics_client: BetaAnalyticsDataClient | None = None
auth_result = None auth_result = None
disabled = False running = False
start_date = int(time.time()) start_date = datetime.now().strftime("%Y-%m-%d")
@staticmethod @staticmethod
async def init(): async def init():
@@ -41,67 +48,66 @@ class ViewedStorage:
# Загрузка предварительно подсчитанных просмотров из файла JSON # Загрузка предварительно подсчитанных просмотров из файла JSON
self.load_precounted_views() self.load_precounted_views()
os.environ.setdefault('GOOGLE_APPLICATION_CREDENTIALS', GOOGLE_KEYFILE_PATH) os.environ.setdefault("GOOGLE_APPLICATION_CREDENTIALS", GOOGLE_KEYFILE_PATH)
if GOOGLE_KEYFILE_PATH and os.path.isfile(GOOGLE_KEYFILE_PATH): if GOOGLE_KEYFILE_PATH and os.path.isfile(GOOGLE_KEYFILE_PATH):
# Using a default constructor instructs the client to use the credentials # Using a default constructor instructs the client to use the credentials
# specified in GOOGLE_APPLICATION_CREDENTIALS environment variable. # specified in GOOGLE_APPLICATION_CREDENTIALS environment variable.
self.analytics_client = BetaAnalyticsDataClient() self.analytics_client = BetaAnalyticsDataClient()
logger.info(' * Клиент Google Analytics успешно авторизован') logger.info(" * Google Analytics credentials accepted")
# Запуск фоновой задачи # Запуск фоновой задачи
_task = asyncio.create_task(self.worker()) _task = asyncio.create_task(self.worker())
else: else:
logger.info(' * Пожалуйста, добавьте ключевой файл Google Analytics') logger.warning(" * please, add Google Analytics credentials file")
self.disabled = True self.running = False
@staticmethod @staticmethod
def load_precounted_views(): def load_precounted_views():
"""Загрузка предварительно подсчитанных просмотров из файла JSON""" """Загрузка предварительно подсчитанных просмотров из файла JSON"""
self = ViewedStorage self = ViewedStorage
viewfile_path = VIEWS_FILEPATH
if not os.path.exists(viewfile_path):
viewfile_path = os.path.join(os.path.curdir, "views.json")
if not os.path.exists(viewfile_path):
logger.warning(" * views.json not found")
return
logger.info(f" * loading views from {viewfile_path}")
try: try:
if os.path.exists(VIEWS_FILEPATH): start_date_int = os.path.getmtime(viewfile_path)
self.file_modification_timestamp = os.path.getmtime(VIEWS_FILEPATH) start_date_str = datetime.fromtimestamp(start_date_int).strftime("%Y-%m-%d")
self.start_date = datetime.fromtimestamp( self.start_date = start_date_str
self.file_modification_timestamp now_date = datetime.now().strftime("%Y-%m-%d")
).strftime('%Y-%m-%d')
now_date = datetime.now().strftime('%Y-%m-%d')
if now_date == self.start_date: if now_date == self.start_date:
logger.info(' * Данные актуализованы!') logger.info(" * views data is up to date!")
else:
logger.warn(
f' * Файл просмотров {VIEWS_FILEPATH} устарел: {self.start_date}'
)
with open(VIEWS_FILEPATH, 'r') as file:
precounted_views = json.load(file)
self.views_by_shout.update(precounted_views)
logger.info(
f' * {len(precounted_views)} публикаций с просмотрами успешно загружены.'
)
else: else:
logger.info(' * Файл просмотров не найден.') logger.warn(f" * {viewfile_path} is too old: {self.start_date}")
with open(viewfile_path, "r") as file:
precounted_views = orjson.loads(file.read())
self.precounted_by_slug.update(precounted_views)
logger.info(f" * {len(precounted_views)} shouts with views was loaded.")
except Exception as e: except Exception as e:
logger.error(f'Ошибка загрузки предварительно подсчитанных просмотров: {e}') logger.error(f"precounted views loading error: {e}")
# noinspection PyTypeChecker # noinspection PyTypeChecker
@staticmethod @staticmethod
async def update_pages(): async def update_pages():
"""Запрос всех страниц от Google Analytics, отсортированных по количеству просмотров""" """Запрос всех страниц от Google Analytics, отсортрованных по количеству просмотров"""
self = ViewedStorage self = ViewedStorage
logger.info(' ⎧ Обновление данных просмотров от Google Analytics ---') logger.info(" ⎧ views update from Google Analytics ---")
if not self.disabled: if self.running:
try: try:
start = time.time() start = time.time()
async with self.lock: async with self.lock:
if self.analytics_client: if self.analytics_client:
request = RunReportRequest( request = RunReportRequest(
property=f'properties/{GOOGLE_PROPERTY_ID}', property=f"properties/{GOOGLE_PROPERTY_ID}",
dimensions=[Dimension(name='pagePath')], dimensions=[Dimension(name="pagePath")],
metrics=[Metric(name='screenPageViews')], metrics=[Metric(name="screenPageViews")],
date_ranges=[ date_ranges=[DateRange(start_date=self.start_date, end_date="today")],
DateRange(start_date=self.start_date, end_date='today')
],
) )
response = self.analytics_client.run_report(request) response = self.analytics_client.run_report(request)
if response and isinstance(response.rows, list): if response and isinstance(response.rows, list):
@@ -114,115 +120,152 @@ class ViewedStorage:
# Извлечение путей страниц из ответа Google Analytics # Извлечение путей страниц из ответа Google Analytics
if isinstance(row.dimension_values, list): if isinstance(row.dimension_values, list):
page_path = row.dimension_values[0].value page_path = row.dimension_values[0].value
slug = page_path.split('discours.io/')[-1] slug = page_path.split("discours.io/")[-1]
views_count = int(row.metric_values[0].value) fresh_views = int(row.metric_values[0].value)
# Обновление данных в хранилище # Обновление данных в хранилище
self.views_by_shout[slug] = self.views_by_shout.get( self.views_by_shout[slug] = self.views_by_shout.get(slug, 0)
slug, 0 self.views_by_shout[slug] += fresh_views
)
self.views_by_shout[slug] += views_count
self.update_topics(slug) self.update_topics(slug)
# Запись путей страниц для логирования # Запись путей страниц для логирования
slugs.add(slug) slugs.add(slug)
logger.info(f' ⎪ Собрано страниц: {len(slugs)} ') logger.info(f" ⎪ collected pages: {len(slugs)} ")
end = time.time() end = time.time()
logger.info(' ⎪ Обновление страниц заняло %fs ' % (end - start)) logger.info(" ⎪ views update time: %fs " % (end - start))
except Exception as error: except Exception as error:
logger.error(error) logger.error(error)
self.running = False
@staticmethod @staticmethod
async def get_shout(shout_slug) -> int: def get_shout(shout_slug="", shout_id=0) -> int:
"""Получение метрики просмотров shout по slug""" """Получение метрики просмотров shout по slug или id."""
self = ViewedStorage self = ViewedStorage
async with self.lock: fresh_views = self.views_by_shout.get(shout_slug, 0)
return self.views_by_shout.get(shout_slug, 0) precounted_views = self.precounted_by_slug.get(shout_slug, 0)
return fresh_views + precounted_views
@staticmethod @staticmethod
async def get_shout_media(shout_slug) -> Dict[str, int]: def get_shout_media(shout_slug) -> Dict[str, int]:
"""Получение метрики воспроизведения shout по slug""" """Получение метрики воспроизведения shout по slug."""
self = ViewedStorage self = ViewedStorage
async with self.lock:
return self.views_by_shout.get(shout_slug, 0) # TODO: get media plays from Google Analytics
return self.views_by_shout.get(shout_slug, 0)
@staticmethod @staticmethod
async def get_topic(topic_slug) -> int: def get_topic(topic_slug) -> int:
"""Получение суммарного значения просмотров темы""" """Получение суммарного значения просмотров темы."""
self = ViewedStorage self = ViewedStorage
topic_views = 0 return sum(self.views_by_shout.get(shout_slug, 0) for shout_slug in self.shouts_by_topic.get(topic_slug, []))
async with self.lock:
for shout_slug in self.shouts_by_topic.get(topic_slug, []):
topic_views += self.views_by_shout.get(shout_slug, 0)
return topic_views
@staticmethod @staticmethod
async def get_author(author_slug) -> int: def get_author(author_slug) -> int:
"""Получение суммарного значения просмотров автора""" """Получение суммарного значения просмотров автора."""
self = ViewedStorage self = ViewedStorage
author_views = 0 return sum(self.views_by_shout.get(shout_slug, 0) for shout_slug in self.shouts_by_author.get(author_slug, []))
async with self.lock:
for shout_slug in self.shouts_by_author.get(author_slug, []):
author_views += self.views_by_shout.get(shout_slug, 0)
return author_views
@staticmethod @staticmethod
def update_topics(shout_slug): def update_topics(shout_slug):
"""Обновление счетчиков темы по slug shout""" """Обновление счетчиков темы по slug shout"""
self = ViewedStorage self = ViewedStorage
with local_session() as session: with local_session() as session:
# Определение вспомогательной функции для избежания повторения кода # Определение вспомогательной функции для избежа<EFBFBD><EFBFBD>ия повторения кода
def update_groups(dictionary, key, value): def update_groups(dictionary, key, value):
dictionary[key] = list(set(dictionary.get(key, []) + [value])) dictionary[key] = list(set(dictionary.get(key, []) + [value]))
# Обновление тем и авторов с использованием вспомогательной функции # Обновление тем и авторов с использованием вспомогательной функции
for [_shout_topic, topic] in ( for [_st, topic] in (
session.query(ShoutTopic, Topic) session.query(ShoutTopic, Topic).join(Topic).join(Shout).where(Shout.slug == shout_slug).all()
.join(Topic)
.join(Shout)
.where(Shout.slug == shout_slug)
.all()
): ):
update_groups(self.shouts_by_topic, topic.slug, shout_slug) update_groups(self.shouts_by_topic, topic.slug, shout_slug)
for [_shout_topic, author] in ( for [_st, author] in (
session.query(ShoutAuthor, Author) session.query(ShoutAuthor, Author).join(Author).join(Shout).where(Shout.slug == shout_slug).all()
.join(Author)
.join(Shout)
.where(Shout.slug == shout_slug)
.all()
): ):
update_groups(self.shouts_by_author, author.slug, shout_slug) update_groups(self.shouts_by_author, author.slug, shout_slug)
@staticmethod
async def stop():
"""Остановка фоновой задачи"""
self = ViewedStorage
async with self.lock:
self.running = False
logger.info("ViewedStorage worker was stopped.")
@staticmethod @staticmethod
async def worker(): async def worker():
"""Асинхронная задача обновления""" """Асинхронная задача обновления"""
failed = 0 failed = 0
self = ViewedStorage self = ViewedStorage
if self.disabled:
return
while True: while self.running:
try: try:
await self.update_pages() await self.update_pages()
failed = 0 failed = 0
except Exception as exc: except Exception as exc:
failed += 1 failed += 1
logger.debug(exc) logger.debug(exc)
logger.info(' - Обновление не удалось #%d, ожидание 10 секунд' % failed) logger.info(" - update failed #%d, wait 10 secs" % failed)
if failed > 3: if failed > 3:
logger.info(' - Больше не пытаемся обновить') logger.info(" - views update failed, not trying anymore")
self.running = False
break break
if failed == 0: if failed == 0:
when = datetime.now(timezone.utc) + timedelta(seconds=self.period) when = datetime.now(timezone.utc) + timedelta(seconds=self.period)
t = format(when.astimezone().isoformat()) t = format(when.astimezone().isoformat())
logger.info( logger.info(" ⎩ next update: %s" % (t.split("T")[0] + " " + t.split("T")[1].split(".")[0]))
' ⎩ Следующее обновление: %s'
% (t.split('T')[0] + ' ' + t.split('T')[1].split('.')[0])
)
await asyncio.sleep(self.period) await asyncio.sleep(self.period)
else: else:
await asyncio.sleep(10) await asyncio.sleep(10)
logger.info(' - Попытка снова обновить данные') logger.info(" - try to update views again")
@staticmethod
async def update_slug_views(slug: str) -> int:
"""
Получает fresh статистику просмотров для указанного slug.
Args:
slug: Идентификатор страницы
Returns:
int: Количество просмотров
"""
self = ViewedStorage
if not self.analytics_client:
logger.warning("Google Analytics client not initialized")
return 0
try:
# Создаем фильтр для точного совпадения конца URL
request = RunReportRequest(
property=f"properties/{GOOGLE_PROPERTY_ID}",
date_ranges=[DateRange(start_date=self.start_date, end_date="today")],
dimensions=[Dimension(name="pagePath")],
dimension_filter=GAFilter(
field_name="pagePath",
string_filter=GAFilter.StringFilter(
value=f".*/{slug}$", # Используем регулярное выражение для точного совпадения конца URL
match_type=GAFilter.StringFilter.MatchType.FULL_REGEXP,
case_sensitive=False, # Включаем чувствительность к регистру для точности
),
),
metrics=[Metric(name="screenPageViews")],
)
response = self.analytics_client.run_report(request)
if not response.rows:
return 0
views = int(response.rows[0].metric_values[0].value)
# Кэшируем результат
self.views_by_shout[slug] = views
return views
except Exception as e:
logger.error(f"Google Analytics API Error: {e}")
return 0

View File

@@ -1,13 +1,124 @@
import asyncio
import os import os
import re import re
from asyncio.log import logger
from sqlalchemy import select
from starlette.endpoints import HTTPEndpoint from starlette.endpoints import HTTPEndpoint
from starlette.exceptions import HTTPException from starlette.exceptions import HTTPException
from starlette.requests import Request from starlette.requests import Request
from starlette.responses import JSONResponse from starlette.responses import JSONResponse
from cache.cache import cache_author
from orm.author import Author from orm.author import Author
from resolvers.stat import get_with_stat
from services.db import local_session from services.db import local_session
from services.schema import request_graphql_data
from settings import ADMIN_SECRET, WEBHOOK_SECRET
async def check_webhook_existence():
"""
Проверяет существование вебхука для user.login события
Returns:
tuple: (bool, str, str) - существует ли вебхук, его id и endpoint если существует
"""
logger.info("check_webhook_existence called")
if not ADMIN_SECRET:
logger.error("ADMIN_SECRET is not set")
return False, None, None
headers = {"Content-Type": "application/json", "X-Authorizer-Admin-Secret": ADMIN_SECRET}
operation = "GetWebhooks"
query_name = "_webhooks"
variables = {"params": {}}
# https://docs.authorizer.dev/core/graphql-api#_webhooks
gql = {
"query": f"query {operation}($params: PaginatedInput!)"
+ "{"
+ f"{query_name}(params: $params) {{ webhooks {{ id event_name endpoint }} }} "
+ "}",
"variables": variables,
"operationName": operation,
}
result = await request_graphql_data(gql, headers=headers)
if result:
webhooks = result.get("data", {}).get(query_name, {}).get("webhooks", [])
logger.info(webhooks)
for webhook in webhooks:
if webhook["event_name"].startswith("user.login"):
return True, webhook["id"], webhook["endpoint"]
return False, None, None
async def create_webhook_endpoint():
"""
Создает вебхук для user.login события.
Если существует старый вебхук - удаляет его и создает новый.
"""
logger.info("create_webhook_endpoint called")
headers = {"Content-Type": "application/json", "X-Authorizer-Admin-Secret": ADMIN_SECRET}
exists, webhook_id, current_endpoint = await check_webhook_existence()
# Определяем endpoint в зависимости от окружения
host = os.environ.get("HOST", "core.dscrs.site")
endpoint = f"https://{host}/new-author"
if exists:
# Если вебхук существует, но с другим endpoint или с модифицированным именем
if current_endpoint != endpoint or webhook_id:
# https://docs.authorizer.dev/core/graphql-api#_delete_webhook
operation = "DeleteWebhook"
query_name = "_delete_webhook"
variables = {"params": {"id": webhook_id}} # Изменено с id на webhook_id
gql = {
"query": f"mutation {operation}($params: WebhookRequest!)"
+ "{"
+ f"{query_name}(params: $params) {{ message }} "
+ "}",
"variables": variables,
"operationName": operation,
}
try:
await request_graphql_data(gql, headers=headers)
exists = False
except Exception as e:
logger.error(f"Failed to delete webhook: {e}")
# Продолжаем выполнение даже при ошибке удаления
exists = False
else:
logger.info(f"Webhook already exists and configured correctly: {webhook_id}")
return
if not exists:
# https://docs.authorizer.dev/core/graphql-api#_add_webhook
operation = "AddWebhook"
query_name = "_add_webhook"
variables = {
"params": {
"event_name": "user.login",
"endpoint": endpoint,
"enabled": True,
"headers": {"Authorization": WEBHOOK_SECRET},
}
}
gql = {
"query": f"mutation {operation}($params: AddWebhookRequest!)"
+ "{"
+ f"{query_name}(params: $params) {{ message }} "
+ "}",
"variables": variables,
"operationName": operation,
}
try:
result = await request_graphql_data(gql, headers=headers)
logger.info(result)
except Exception as e:
logger.error(f"Failed to create webhook: {e}")
class WebhookEndpoint(HTTPEndpoint): class WebhookEndpoint(HTTPEndpoint):
@@ -15,50 +126,50 @@ class WebhookEndpoint(HTTPEndpoint):
try: try:
data = await request.json() data = await request.json()
if not data: if not data:
raise HTTPException(status_code=400, detail='Request body is empty') raise HTTPException(status_code=400, detail="Request body is empty")
auth = request.headers.get('Authorization') auth = request.headers.get("Authorization")
if not auth or auth != os.environ.get('WEBHOOK_SECRET'): if not auth or auth != os.environ.get("WEBHOOK_SECRET"):
raise HTTPException( raise HTTPException(status_code=401, detail="Invalid Authorization header")
status_code=401, detail='Invalid Authorization header'
)
# logger.debug(data) # logger.debug(data)
user = data.get('user') user = data.get("user")
if not isinstance(user, dict): if not isinstance(user, dict):
raise HTTPException( raise HTTPException(status_code=400, detail="User data is not a dictionary")
status_code=400, detail='User data is not a dictionary' #
)
user_id: str = user.get('id')
name: str = ( name: str = (
f"{user.get('given_name', user.get('slug'))} {user.get('middle_name', '')}" f"{user.get('given_name', user.get('slug'))} {user.get('middle_name', '')}"
+ "{user.get('family_name', '')}".strip() + f"{user.get('family_name', '')}".strip()
) or 'Аноним' ) or "Аноним"
email: str = user.get('email', '') user_id: str = user.get("id", "")
pic: str = user.get('picture', '') email: str = user.get("email", "")
pic: str = user.get("picture", "")
if user_id:
with local_session() as session:
author = session.query(Author).filter(Author.user == user_id).first()
if not author:
# If the author does not exist, create a new one
slug: str = email.split("@")[0].replace(".", "-").lower()
slug: str = re.sub("[^0-9a-z]+", "-", slug)
while True:
author = session.query(Author).filter(Author.slug == slug).first()
if not author:
break
slug = f"{slug}-{len(session.query(Author).filter(Author.email == email).all()) + 1}"
author = Author(user=user_id, slug=slug, name=name, pic=pic)
session.add(author)
session.commit()
author_query = select(Author).filter(Author.user == user_id)
result = get_with_stat(author_query)
if result:
author_with_stat = result[0]
author_dict = author_with_stat.dict()
# await cache_author(author_with_stat)
asyncio.create_task(cache_author(author_dict))
with local_session() as session: return JSONResponse({"status": "success"})
author = session.query(Author).filter(Author.user == user_id).first()
if not author:
# If the author does not exist, create a new one
slug: str = email.split('@')[0].replace('.', '-').lower()
slug: str = re.sub('[^0-9a-z]+', '-', slug)
while True:
author = (
session.query(Author).filter(Author.slug == slug).first()
)
if not author:
break
slug = f'{slug}-{len(session.query(Author).filter(Author.email == email).all()) + 1}'
author = Author(user=user_id, slug=slug, name=name, pic=pic)
session.add(author)
session.commit()
return JSONResponse({'status': 'success'})
except HTTPException as e: except HTTPException as e:
return JSONResponse( return JSONResponse({"status": "error", "message": str(e.detail)}, status_code=e.status_code)
{'status': 'error', 'message': str(e.detail)}, status_code=e.status_code
)
except Exception as e: except Exception as e:
import traceback import traceback
traceback.print_exc() traceback.print_exc()
return JSONResponse({'status': 'error', 'message': str(e)}, status_code=500) return JSONResponse({"status": "error", "message": str(e)}, status_code=500)

View File

@@ -1,17 +1,29 @@
import sys import sys
from os import environ from os import environ
PORT = 8000 MODE = "development" if "dev" in sys.argv else "production"
DB_URL = ( DEV_SERVER_PID_FILE_NAME = "dev-server.pid"
environ.get('DATABASE_URL', '').replace('postgres://', 'postgresql://')
or environ.get('DB_URL', '').replace('postgres://', 'postgresql://')
or 'postgresql://postgres@localhost:5432/discoursio'
)
REDIS_URL = environ.get('REDIS_URL') or 'redis://127.0.0.1'
API_BASE = environ.get('API_BASE') or ''
AUTH_URL = environ.get('AUTH_URL') or ''
GLITCHTIP_DSN = environ.get('GLITCHTIP_DSN')
DEV_SERVER_PID_FILE_NAME = 'dev-server.pid'
MODE = 'development' if 'dev' in sys.argv else 'production'
ADMIN_SECRET = environ.get('AUTH_SECRET') or 'nothing' PORT = environ.get("PORT") or 8000
# storages
DB_URL = (
environ.get("DATABASE_URL", "").replace("postgres://", "postgresql://")
or environ.get("DB_URL", "").replace("postgres://", "postgresql://")
or "sqlite:///discoursio.db"
)
REDIS_URL = environ.get("REDIS_URL") or "redis://127.0.0.1"
# debug
GLITCHTIP_DSN = environ.get("GLITCHTIP_DSN")
# authorizer.dev
AUTH_URL = environ.get("AUTH_URL") or "https://auth.discours.io/graphql"
ADMIN_SECRET = environ.get("AUTH_SECRET") or "nothing"
WEBHOOK_SECRET = environ.get("WEBHOOK_SECRET") or "nothing-else"
# own auth
ONETIME_TOKEN_LIFE_SPAN = 60 * 60 * 24 * 3 # 3 days
SESSION_TOKEN_LIFE_SPAN = 60 * 60 * 24 * 30 # 30 days
JWT_ALGORITHM = "HS256"
JWT_SECRET_KEY = environ.get("JWT_SECRET") or "nothing-else-jwt-secret-matters"

60
tests/conftest.py Normal file
View File

@@ -0,0 +1,60 @@
import asyncio
import os
import pytest
from sqlalchemy import create_engine
from sqlalchemy.orm import Session
from starlette.testclient import TestClient
from main import app
from services.db import Base
from services.redis import redis
# Use SQLite for testing
TEST_DB_URL = "sqlite:///test.db"
@pytest.fixture(scope="session")
def event_loop():
"""Create an instance of the default event loop for the test session."""
loop = asyncio.get_event_loop_policy().new_event_loop()
yield loop
loop.close()
@pytest.fixture(scope="session")
def test_engine():
"""Create a test database engine."""
engine = create_engine(TEST_DB_URL)
Base.metadata.create_all(engine)
yield engine
Base.metadata.drop_all(engine)
os.remove("test.db")
@pytest.fixture
def db_session(test_engine):
"""Create a new database session for a test."""
connection = test_engine.connect()
transaction = connection.begin()
session = Session(bind=connection)
yield session
session.close()
transaction.rollback()
connection.close()
@pytest.fixture
async def redis_client():
"""Create a test Redis client."""
await redis.connect()
yield redis
await redis.disconnect()
@pytest.fixture
def test_client():
"""Create a TestClient instance."""
return TestClient(app)

94
tests/test_drafts.py Normal file
View File

@@ -0,0 +1,94 @@
import pytest
from orm.author import Author
from orm.shout import Shout
@pytest.fixture
def test_author(db_session):
"""Create a test author."""
author = Author(name="Test Author", slug="test-author", user="test-user-id")
db_session.add(author)
db_session.commit()
return author
@pytest.fixture
def test_shout(db_session):
"""Create test shout with required fields."""
author = Author(name="Test Author", slug="test-author", user="test-user-id")
db_session.add(author)
db_session.flush()
shout = Shout(
title="Test Shout",
slug="test-shout",
created_by=author.id, # Обязательное поле
body="Test body",
layout="article",
lang="ru",
)
db_session.add(shout)
db_session.commit()
return shout
@pytest.mark.asyncio
async def test_create_shout(test_client, db_session, test_author):
"""Test creating a new shout."""
response = test_client.post(
"/",
json={
"query": """
mutation CreateDraft($draft_input: DraftInput!) {
create_draft(draft_input: $draft_input) {
error
draft {
id
title
body
}
}
}
""",
"variables": {
"input": {
"title": "Test Shout",
"body": "This is a test shout",
}
},
},
)
assert response.status_code == 200
data = response.json()
assert "errors" not in data
assert data["data"]["create_draft"]["draft"]["title"] == "Test Shout"
@pytest.mark.asyncio
async def test_load_drafts(test_client, db_session):
"""Test retrieving a shout."""
response = test_client.post(
"/",
json={
"query": """
query {
load_drafts {
error
drafts {
id
title
body
}
}
}
""",
"variables": {"slug": "test-shout"},
},
)
assert response.status_code == 200
data = response.json()
assert "errors" not in data
assert data["data"]["load_drafts"]["drafts"] == []

64
tests/test_reactions.py Normal file
View File

@@ -0,0 +1,64 @@
from datetime import datetime
import pytest
from orm.author import Author
from orm.reaction import ReactionKind
from orm.shout import Shout
@pytest.fixture
def test_setup(db_session):
"""Set up test data."""
now = int(datetime.now().timestamp())
author = Author(name="Test Author", slug="test-author", user="test-user-id")
db_session.add(author)
db_session.flush()
shout = Shout(
title="Test Shout",
slug="test-shout",
created_by=author.id,
body="This is a test shout",
layout="article",
lang="ru",
community=1,
created_at=now,
updated_at=now,
)
db_session.add_all([author, shout])
db_session.commit()
return {"author": author, "shout": shout}
@pytest.mark.asyncio
async def test_create_reaction(test_client, db_session, test_setup):
"""Test creating a reaction on a shout."""
response = test_client.post(
"/",
json={
"query": """
mutation CreateReaction($reaction: ReactionInput!) {
create_reaction(reaction: $reaction) {
error
reaction {
id
kind
body
created_by {
name
}
}
}
}
""",
"variables": {
"reaction": {"shout": test_setup["shout"].id, "kind": ReactionKind.LIKE.value, "body": "Great post!"}
},
},
)
assert response.status_code == 200
data = response.json()
assert "error" not in data
assert data["data"]["create_reaction"]["reaction"]["kind"] == ReactionKind.LIKE.value

85
tests/test_shouts.py Normal file
View File

@@ -0,0 +1,85 @@
from datetime import datetime
import pytest
from orm.author import Author
from orm.shout import Shout
@pytest.fixture
def test_shout(db_session):
"""Create test shout with required fields."""
now = int(datetime.now().timestamp())
author = Author(name="Test Author", slug="test-author", user="test-user-id")
db_session.add(author)
db_session.flush()
now = int(datetime.now().timestamp())
shout = Shout(
title="Test Shout",
slug="test-shout",
created_by=author.id,
body="Test body",
layout="article",
lang="ru",
community=1,
created_at=now,
updated_at=now,
)
db_session.add(shout)
db_session.commit()
return shout
@pytest.mark.asyncio
async def test_get_shout(test_client, db_session):
"""Test retrieving a shout."""
# Создаем автора
author = Author(name="Test Author", slug="test-author", user="test-user-id")
db_session.add(author)
db_session.flush()
now = int(datetime.now().timestamp())
# Создаем публикацию со всеми обязательными полями
shout = Shout(
title="Test Shout",
body="This is a test shout",
slug="test-shout",
created_by=author.id,
layout="article",
lang="ru",
community=1,
created_at=now,
updated_at=now,
)
db_session.add(shout)
db_session.commit()
response = test_client.post(
"/",
json={
"query": """
query GetShout($slug: String!) {
get_shout(slug: $slug) {
id
title
body
created_at
updated_at
created_by {
id
name
slug
}
}
}
""",
"variables": {"slug": "test-shout"},
},
)
data = response.json()
assert response.status_code == 200
assert "errors" not in data
assert data["data"]["get_shout"]["title"] == "Test Shout"

70
tests/test_validations.py Normal file
View File

@@ -0,0 +1,70 @@
from datetime import datetime, timedelta
import pytest
from pydantic import ValidationError
from auth.validations import (
AuthInput,
AuthResponse,
TokenPayload,
UserRegistrationInput,
)
class TestAuthValidations:
def test_auth_input(self):
"""Test basic auth input validation"""
# Valid case
auth = AuthInput(user_id="123", username="testuser", token="1234567890abcdef1234567890abcdef")
assert auth.user_id == "123"
assert auth.username == "testuser"
# Invalid cases
with pytest.raises(ValidationError):
AuthInput(user_id="", username="test", token="x" * 32)
with pytest.raises(ValidationError):
AuthInput(user_id="123", username="t", token="x" * 32)
def test_user_registration(self):
"""Test user registration validation"""
# Valid case
user = UserRegistrationInput(email="test@example.com", password="SecurePass123!", name="Test User")
assert user.email == "test@example.com"
assert user.name == "Test User"
# Test email validation
with pytest.raises(ValidationError) as exc:
UserRegistrationInput(email="invalid-email", password="SecurePass123!", name="Test")
assert "Invalid email format" in str(exc.value)
# Test password validation
with pytest.raises(ValidationError) as exc:
UserRegistrationInput(email="test@example.com", password="weak", name="Test")
assert "String should have at least 8 characters" in str(exc.value)
def test_token_payload(self):
"""Test token payload validation"""
now = datetime.utcnow()
exp = now + timedelta(hours=1)
payload = TokenPayload(user_id="123", username="testuser", exp=exp, iat=now)
assert payload.user_id == "123"
assert payload.username == "testuser"
assert payload.scopes == [] # Default empty list
def test_auth_response(self):
"""Test auth response validation"""
# Success case
success_resp = AuthResponse(success=True, token="valid_token", user={"id": "123", "name": "Test"})
assert success_resp.success is True
assert success_resp.token == "valid_token"
# Error case
error_resp = AuthResponse(success=False, error="Invalid credentials")
assert error_resp.success is False
assert error_resp.error == "Invalid credentials"
# Invalid case - отсутствует обязательное поле token при success=True
with pytest.raises(ValidationError):
AuthResponse(success=True, user={"id": "123", "name": "Test"})

View File

@@ -29,19 +29,19 @@ def apply_diff(original, diff):
The modified string. The modified string.
""" """
result = [] result = []
pattern = re.compile(r'^(\+|-) ') pattern = re.compile(r"^(\+|-) ")
for line in diff: for line in diff:
match = pattern.match(line) match = pattern.match(line)
if match: if match:
op = match.group(1) op = match.group(1)
content = line[2:] content = line[2:]
if op == '+': if op == "+":
result.append(content) result.append(content)
elif op == '-': elif op == "-":
# Ignore deleted lines # Ignore deleted lines
pass pass
else: else:
result.append(line) result.append(line)
return ' '.join(result) return " ".join(result)

28
utils/encoders.py Normal file
View File

@@ -0,0 +1,28 @@
from decimal import Decimal
from json import JSONEncoder
class CustomJSONEncoder(JSONEncoder):
"""
Расширенный JSON энкодер с поддержкой сериализации объектов SQLAlchemy.
Примеры:
>>> import json
>>> from decimal import Decimal
>>> from orm.topic import Topic
>>> json.dumps(Decimal("10.50"), cls=CustomJSONEncoder)
'"10.50"'
>>> topic = Topic(id=1, slug="test")
>>> json.dumps(topic, cls=CustomJSONEncoder)
'{"id": 1, "slug": "test", ...}'
"""
def default(self, obj):
if isinstance(obj, Decimal):
return str(obj)
# Проверяем, есть ли у объекта метод dict() (как у моделей SQLAlchemy)
if hasattr(obj, "dict") and callable(obj.dict):
return obj.dict()
return super().default(obj)

111
utils/logger.py Normal file
View File

@@ -0,0 +1,111 @@
import logging
from pathlib import Path
import colorlog
_lib_path = Path(__file__).parents[1]
_leng_path = len(_lib_path.as_posix())
def filter(record: logging.LogRecord):
# Define `package` attribute with the relative path.
record.package = record.pathname[_leng_path + 1 :].replace(".py", "")
record.emoji = (
"🔍"
if record.levelno == logging.DEBUG
else ""
if record.levelno == logging.INFO
else "🚧"
if record.levelno == logging.WARNING
else ""
if record.levelno == logging.ERROR
else "🧨"
if record.levelno == logging.CRITICAL
else ""
)
return record
# Define the color scheme
color_scheme = {
"DEBUG": "light_black",
"INFO": "green",
"WARNING": "yellow",
"ERROR": "red",
"CRITICAL": "red,bg_white",
}
# Define secondary log colors
secondary_colors = {
"log_name": {"DEBUG": "blue"},
"asctime": {"DEBUG": "cyan"},
"process": {"DEBUG": "purple"},
"module": {"DEBUG": "light_black,bg_blue"},
"funcName": {"DEBUG": "light_white,bg_blue"}, # Add this line
}
# Define the log format string
fmt_string = "%(emoji)s%(log_color)s%(package)s.%(funcName)s%(reset)s %(white)s%(message)s"
# Define formatting configuration
fmt_config = {
"log_colors": color_scheme,
"secondary_log_colors": secondary_colors,
"style": "%",
"reset": True,
}
class MultilineColoredFormatter(colorlog.ColoredFormatter):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.log_colors = kwargs.pop("log_colors", {})
self.secondary_log_colors = kwargs.pop("secondary_log_colors", {})
def format(self, record):
# Add default emoji if not present
if not hasattr(record, "emoji"):
record = filter(record)
message = record.getMessage()
if "\n" in message:
lines = message.split("\n")
first_line = lines[0]
record.message = first_line
formatted_first_line = super().format(record)
formatted_lines = [formatted_first_line]
for line in lines[1:]:
formatted_lines.append(line)
return "\n".join(formatted_lines)
else:
return super().format(record)
# Create a MultilineColoredFormatter object for colorized logging
formatter = MultilineColoredFormatter(fmt_string, **fmt_config)
# Create a stream handler for logging output
stream = logging.StreamHandler()
stream.setFormatter(formatter)
def get_colorful_logger(name="main"):
# Create and configure the logger
logger = logging.getLogger(name)
logger.setLevel(logging.DEBUG)
logger.addHandler(stream)
logger.addFilter(filter)
return logger
# Set up the root logger with the same formatting
root_logger = logging.getLogger()
root_logger.setLevel(logging.DEBUG)
root_logger.addHandler(stream)
root_logger.addFilter(filter)
ignore_logs = ["_trace", "httpx", "_client", "atrace", "aiohttp", "_client"]
for lgr in ignore_logs:
loggr = logging.getLogger(lgr)
loggr.setLevel(logging.INFO)