1151 Commits

Author SHA1 Message Date
9911a9410d ..
Some checks failed
Deploy on push / deploy (push) Failing after 7s
2024-04-10 16:09:03 +03:00
25868ec27b logger-fix
Some checks failed
Deploy on push / deploy (push) Failing after 6s
2024-04-09 22:37:58 +03:00
25a65d09d6 tolerate
Some checks failed
Deploy on push / deploy (push) Failing after 6s
2024-04-09 22:35:11 +03:00
cd99041bcc add-author-stat-fix
Some checks failed
Deploy on push / deploy (push) Failing after 6s
2024-04-09 22:32:15 +03:00
1110f7d8ec any-id-fix-2
Some checks failed
Deploy on push / deploy (push) Failing after 7s
2024-04-09 22:24:47 +03:00
e0df7e7436 any-id-fix
Some checks failed
Deploy on push / deploy (push) Failing after 6s
2024-04-09 22:09:26 +03:00
44647bbf39 author-stat-fix
Some checks failed
Deploy on push / deploy (push) Failing after 5s
2024-04-09 22:06:00 +03:00
103fcfd045 trace-fix
Some checks failed
Deploy on push / deploy (push) Failing after 6s
2024-04-09 22:02:26 +03:00
3f2c00a1df get-author-fix-3
Some checks failed
Deploy on push / deploy (push) Failing after 6s
2024-04-09 21:53:35 +03:00
3cc680754b get-author-fix-3
Some checks failed
Deploy on push / deploy (push) Failing after 7s
2024-04-09 21:51:24 +03:00
d7db2689c8 get-author-fix
Some checks failed
Deploy on push / deploy (push) Failing after 7s
2024-04-09 21:46:21 +03:00
23288d1f91 query-debug-3
Some checks failed
Deploy on push / deploy (push) Failing after 6s
2024-04-09 21:39:59 +03:00
1b00086148 query-debug-2
Some checks failed
Deploy on push / deploy (push) Failing after 6s
2024-04-09 21:15:38 +03:00
0501b0f38e outerjoin-fix
Some checks failed
Deploy on push / deploy (push) Failing after 6s
2024-04-09 21:08:47 +03:00
6703e3d093 authors-stat-fix 2024-04-09 21:06:34 +03:00
10c24fe400 topic-stat-fix 2024-04-09 21:05:24 +03:00
489c3c3232 any-fix-4 2024-04-09 20:59:03 +03:00
64f473e037 any-fix-3 2024-04-09 20:56:47 +03:00
202c8461f5 any-fix-2 2024-04-09 20:55:35 +03:00
cf64090ac3 any-fix
Some checks failed
Deploy on push / deploy (push) Failing after 6s
2024-04-09 20:54:00 +03:00
f22b37cc91 has-fix
Some checks failed
Deploy on push / deploy (push) Failing after 6s
2024-04-09 20:51:32 +03:00
e9fa53aff9 glitchtip
Some checks failed
Deploy on push / deploy (push) Failing after 6s
2024-04-09 19:50:27 +03:00
d3262accc5 shout-topic-comments
Some checks failed
Deploy on push / deploy (push) Failing after 7s
2024-04-09 19:48:02 +03:00
142a5f09af ..
Some checks failed
Deploy on push / deploy (push) Failing after 6s
2024-04-09 19:40:44 +03:00
c6a4f04779 topic-stat-fix
Some checks failed
Deploy on push / deploy (push) Failing after 6s
2024-04-09 19:38:02 +03:00
4fe15d1440 topic-stat-join-fix
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-04-09 18:06:29 +03:00
e529ecbe41 params-fix-2 2024-04-09 17:57:22 +03:00
7be4642f5d params-fix
All checks were successful
Deploy on push / deploy (push) Successful in 35s
2024-04-09 17:55:07 +03:00
3fd94dc0fa notification-check
All checks were successful
Deploy on push / deploy (push) Successful in 34s
2024-04-09 17:51:23 +03:00
9e6f81606b import-fix
All checks were successful
Deploy on push / deploy (push) Successful in 26s
2024-04-09 16:59:41 +03:00
2bf456b343 reactions-cache-update
All checks were successful
Deploy on push / deploy (push) Successful in 34s
2024-04-09 16:43:06 +03:00
1769b0925b follow-if-liked-fix
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-04-09 14:03:50 +03:00
5e8c1ac30b tolerate-notifier-fails 2024-04-09 13:46:27 +03:00
6e17b89f26 author-get-fix
All checks were successful
Deploy on push / deploy (push) Successful in 24s
2024-04-09 13:41:30 +03:00
739b7b40d6 follower-id-fix
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-04-09 13:38:44 +03:00
b3eda4a0e1 result-fix
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-04-09 13:32:11 +03:00
dd0c5d15fd fix2
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-04-09 13:30:48 +03:00
e587ed05df found-author-fix 2024-04-09 13:30:02 +03:00
5bbfd2249f topic-cache-fix
All checks were successful
Deploy on push / deploy (push) Successful in 24s
2024-04-09 11:30:20 +03:00
d3ae078b20 refactored-cache-following
All checks were successful
Deploy on push / deploy (push) Successful in 36s
2024-04-09 11:17:32 +03:00
b802bb029a cache-upgrade
All checks were successful
Deploy on push / deploy (push) Successful in 22s
2024-04-08 21:33:47 +03:00
d1cd69eb2a async-fig
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-04-08 12:42:45 +03:00
c301256751 precommit
All checks were successful
Deploy on push / deploy (push) Successful in 49s
2024-04-08 10:38:58 +03:00
df15e63dde reindex-fix
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-04-08 10:23:54 +03:00
aa1693cc16 sentry-init-fix
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-04-08 09:17:05 +03:00
Stepan Vladovskiy
8aa133aab1 feat: nginx with limit_conn_zone 10m change place
All checks were successful
Deploy on push / deploy (push) Successful in 24s
2024-04-07 14:31:38 -03:00
Stepan Vladovskiy
acaea73a38 feat: with limit_conn_zone 10m
Some checks failed
Deploy on push / deploy (push) Failing after 20s
2024-04-07 14:29:21 -03:00
Stepan Vladovskiy
f4c43f7c00 feat: events worker_connections in global nginx.conf
Some checks failed
Deploy on push / deploy (push) Failing after 20s
2024-04-07 14:25:56 -03:00
Stepan Vladovskiy
7c19291ba9 feat: nginx worker events config in Dockerfile
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-04-07 13:45:59 -03:00
Stepan Vladovskiy
0da9c87f5a feat: nginx with cach, keepalive, proxy_read, users_from_one_ip, workers
Some checks failed
Deploy on push / deploy (push) Failing after 21s
2024-04-07 13:39:37 -03:00
Stepan Vladovskiy
c9369e3c08 feat: simple glitchtip setup, without all
All checks were successful
Deploy on push / deploy (push) Successful in 22s
2024-04-03 01:21:19 -03:00
Stepan Vladovskiy
4166f8e695 feat: make all like in docs
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-04-01 17:44:18 -03:00
Stepan Vladovskiy
c8776df610 debug: with glitchtip amd middleware in main.py
All checks were successful
Deploy on push / deploy (push) Successful in 22s
2024-04-01 00:25:14 -03:00
Stepan Vladovskiy
deb8da2363 feat: with glitchtip amd middleware in main.py
All checks were successful
Deploy on push / deploy (push) Successful in 38s
2024-04-01 00:11:48 -03:00
Stepan Vladovskiy
1970b197a5 feat: with glitchtip in main.py
All checks were successful
Deploy on push / deploy (push) Successful in 34s
2024-04-01 00:01:38 -03:00
232f41b905 isolate-ratings
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-03-29 14:44:44 +03:00
c159490413 rating-fix-9
All checks were successful
Deploy on push / deploy (push) Successful in 21s
2024-03-29 03:03:37 +03:00
dd840b63ca rating-fix-8
All checks were successful
Deploy on push / deploy (push) Successful in 22s
2024-03-29 02:56:25 +03:00
d06b8eaa4e rating-fix-7
All checks were successful
Deploy on push / deploy (push) Successful in 22s
2024-03-29 02:50:38 +03:00
d529daea25 rating-fix-6
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-03-29 02:45:23 +03:00
489e6b39a9 rating-fix-5
All checks were successful
Deploy on push / deploy (push) Successful in 21s
2024-03-29 02:40:08 +03:00
943b52e067 rating-fix-4
All checks were successful
Deploy on push / deploy (push) Successful in 22s
2024-03-29 02:37:26 +03:00
99895d1b94 rating-fix-3
All checks were successful
Deploy on push / deploy (push) Successful in 22s
2024-03-29 02:31:59 +03:00
3f68e25230 rating-fix-2
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-03-29 02:29:16 +03:00
9cc0c5b011 rating-fix
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-03-29 02:15:38 +03:00
a4dd56ee44 comments-rating-fix-3
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-03-29 01:49:30 +03:00
53c067ff80 comments-rating-fix-2
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-03-29 01:34:50 +03:00
cc8f08588c comments-rating-fix
All checks were successful
Deploy on push / deploy (push) Successful in 24s
2024-03-29 00:36:19 +03:00
b8f08c3411 comments-rating
All checks were successful
Deploy on push / deploy (push) Successful in 24s
2024-03-29 00:29:28 +03:00
8f532b0023 author-stat-fix-9 2024-03-28 23:59:53 +03:00
4b5c101f2f author-stat-fix-8 2024-03-28 23:59:26 +03:00
f8f3a32556 author-stat-fix-7 2024-03-28 23:39:12 +03:00
8ff0e6786b author-stat-fix-6
All checks were successful
Deploy on push / deploy (push) Successful in 24s
2024-03-28 23:33:56 +03:00
e9c852d23d author-stat-fix-5
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-03-28 23:26:45 +03:00
feede764bf author-stat-fix-4
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-03-28 23:19:07 +03:00
e426a2b087 author-stat-fix-3
All checks were successful
Deploy on push / deploy (push) Successful in 22s
2024-03-28 22:53:02 +03:00
284250770e author-stat-fix
All checks were successful
Deploy on push / deploy (push) Successful in 22s
2024-03-28 22:51:09 +03:00
d74a6dedaa comments-count-fix
All checks were successful
Deploy on push / deploy (push) Successful in 22s
2024-03-28 22:41:48 +03:00
0a767a14b6 author-rating-4
All checks were successful
Deploy on push / deploy (push) Successful in 24s
2024-03-28 22:31:33 +03:00
2f4019ca6f author-rating-3
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-03-28 22:29:51 +03:00
b023773cc6 author-rating-2
All checks were successful
Deploy on push / deploy (push) Successful in 24s
2024-03-28 22:26:46 +03:00
34e12975fe get-author-stat-debug
All checks were successful
Deploy on push / deploy (push) Successful in 24s
2024-03-28 22:10:01 +03:00
c9605cf918 get
All checks were successful
Deploy on push / deploy (push) Successful in 22s
2024-03-28 20:45:26 +03:00
ea16de3f1a rating-fix
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-03-28 20:45:03 +03:00
d6bf3e1602 rating-stat-fix
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-03-28 20:42:22 +03:00
029e6af161 debloat-get-author
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-03-28 20:36:35 +03:00
5c41312b1d with-stat-cached-fix-3
All checks were successful
Deploy on push / deploy (push) Successful in 21s
2024-03-28 19:51:09 +03:00
495b296508 with-stat-cached-fix-2
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-03-28 19:45:21 +03:00
1eeff25b4d with-stat-cached-fix
All checks were successful
Deploy on push / deploy (push) Successful in 21s
2024-03-28 19:40:54 +03:00
1f012ae5c9 revalidate-stat
All checks were successful
Deploy on push / deploy (push) Successful in 21s
2024-03-28 19:39:10 +03:00
77440388d3 author-refactored
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-03-28 19:36:27 +03:00
736877d50e cached-stat-fix
All checks were successful
Deploy on push / deploy (push) Successful in 22s
2024-03-28 19:22:47 +03:00
0f57bea256 renew-stat 2024-03-28 19:21:57 +03:00
9647ec9708 scalar
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-03-28 19:16:47 +03:00
a4957ef0ad stat-fix
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-03-28 19:14:39 +03:00
2d538a292a refactored-get-author-4
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-03-28 19:11:26 +03:00
9d8831d7ed refactored-get-author-3
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-03-28 19:08:55 +03:00
8826af02b5 refactored-get-author
All checks were successful
Deploy on push / deploy (push) Successful in 24s
2024-03-28 19:05:27 +03:00
e103b283cb dblog-debug5
All checks were successful
Deploy on push / deploy (push) Successful in 22s
2024-03-28 16:37:04 +03:00
9a12cbcdde dblog-debug3
All checks were successful
Deploy on push / deploy (push) Successful in 36s
2024-03-28 16:34:27 +03:00
6bc4fe42c4 dblog-debug3
All checks were successful
Deploy on push / deploy (push) Successful in 22s
2024-03-28 16:30:04 +03:00
556857fc28 dblog-debug2
All checks were successful
Deploy on push / deploy (push) Successful in 22s
2024-03-28 16:28:17 +03:00
23fb4227ad dblog-debug
All checks were successful
Deploy on push / deploy (push) Successful in 22s
2024-03-28 16:25:51 +03:00
057b43730e dblog-fix-2
All checks were successful
Deploy on push / deploy (push) Successful in 22s
2024-03-28 16:17:34 +03:00
bb0412bb5c dblog-fix
All checks were successful
Deploy on push / deploy (push) Successful in 22s
2024-03-28 16:05:28 +03:00
e9be761420 dblog-fox
All checks were successful
Deploy on push / deploy (push) Successful in 22s
2024-03-28 16:01:48 +03:00
9bda7cef95 fmt
All checks were successful
Deploy on push / deploy (push) Successful in 22s
2024-03-28 15:56:32 +03:00
7f913050ee author-follows-result-type-debug
All checks were successful
Deploy on push / deploy (push) Successful in 21s
2024-03-28 15:48:58 +03:00
73c3d47f1b author-follows-result-type-debug
All checks were successful
Deploy on push / deploy (push) Successful in 22s
2024-03-28 15:43:41 +03:00
72b9bb407d compact-author-ratings
All checks were successful
Deploy on push / deploy (push) Successful in 22s
2024-03-28 15:38:14 +03:00
1eb3d54dd0 author-follows-result-type-
All checks were successful
Deploy on push / deploy (push) Successful in 22s
2024-03-28 15:11:08 +03:00
e7149e905a author-follows-result-type
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-03-28 15:04:46 +03:00
2ee87c975a get_author-follows-fixed 2024-03-28 14:58:47 +03:00
cf6230e8d6 get_author-follows-fixed
All checks were successful
Deploy on push / deploy (push) Successful in 22s
2024-03-28 14:57:21 +03:00
054077c99e get_author-follows-debug-3
All checks were successful
Deploy on push / deploy (push) Successful in 22s
2024-03-28 14:56:08 +03:00
3d28370362 get_author-follows-debug-2
All checks were successful
Deploy on push / deploy (push) Successful in 22s
2024-03-28 14:38:03 +03:00
6c9fd23e67 get_author-follows-debгп
All checks were successful
Deploy on push / deploy (push) Successful in 21s
2024-03-28 14:13:18 +03:00
95c54ff0c4 get_author-follows-debug
All checks were successful
Deploy on push / deploy (push) Successful in 22s
2024-03-28 14:09:11 +03:00
e2faec5893 scalar-fix
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-03-28 14:05:46 +03:00
6f016f236d caching-fixes 2024-03-28 14:05:06 +03:00
7907e5bc4f get_author_follows-fix
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-03-28 13:37:28 +03:00
65fd4df5ef get_author_follows-fix 2024-03-28 13:33:41 +03:00
235b908766 logger-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m17s
2024-03-26 11:57:00 +03:00
3eacc142f2 unrated-fix
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-03-25 21:07:32 +03:00
9eb2ad21d0 filters-fix
All checks were successful
Deploy on push / deploy (push) Successful in 21s
2024-03-25 20:41:28 +03:00
f03a6d0efe filter-my-fix
All checks were successful
Deploy on push / deploy (push) Successful in 21s
2024-03-25 20:38:46 +03:00
e9611fc8c1 feed-filters-fix
All checks were successful
Deploy on push / deploy (push) Successful in 21s
2024-03-25 20:28:58 +03:00
337fa82fb4 last-comment-fix
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-03-25 19:50:23 +03:00
d92d280595 typo-fix
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-03-25 15:31:16 +03:00
fab57469d3 random-top-shouts
All checks were successful
Deploy on push / deploy (push) Successful in 46s
2024-03-25 15:03:03 +03:00
4daf746976 views-independant 2024-03-19 16:24:25 +03:00
e97ffacd23 update-after-debug-2 2024-03-18 15:01:43 +03:00
c346481ade update-after-debug 2024-03-18 15:01:10 +03:00
818b4ccae9 debug-get-with-stat 2024-03-14 10:21:04 +03:00
837763ed64 get-author-fix-2 2024-03-14 09:59:38 +03:00
ab36dfe233 debug-get-author 2024-03-14 09:55:14 +03:00
64b1498215 authorid-fix
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-03-14 01:35:09 +03:00
ff7c5df8de trigdeploy
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-03-13 23:02:41 +03:00
3231e42428 query-fix 2024-03-13 15:53:40 +03:00
324f069844 following-error-fix
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-03-13 15:35:49 +03:00
1dd34d5818 following-error 2024-03-13 15:34:17 +03:00
4c0f3087db nginx-ports
Some checks failed
Deploy on push / deploy (push) Failing after 19s
2024-03-13 12:50:40 +03:00
13bff800f0 author-id-faster
Some checks failed
Deploy on push / deploy (push) Failing after 20s
2024-03-13 12:44:08 +03:00
13e2a4b7ba log-color-fix-4
Some checks failed
Deploy on push / deploy (push) Failing after 20s
2024-03-12 18:27:58 +03:00
9a15cda218 log-color-fix-3 2024-03-12 18:24:44 +03:00
695c9a97eb log-color-fix 2024-03-12 18:17:28 +03:00
b6691b1b7b logger-fix 2024-03-12 18:14:34 +03:00
4667168636 logs-fox 2024-03-12 17:48:34 +03:00
9c7c5fb8d2 multiline-logger-fix 2024-03-12 17:40:55 +03:00
e99acd591a cached-all 2024-03-12 17:26:52 +03:00
a3303837d5 cached-load-fix-2 2024-03-12 17:00:20 +03:00
567f41c0c3 cached-load-fix 2024-03-12 16:50:14 +03:00
23547546cb cached-authors-fix 2024-03-12 16:46:18 +03:00
0b8776a87f topics-fix 2024-03-12 16:23:01 +03:00
358cc86197 debug-topics 2024-03-12 16:21:28 +03:00
6064f0326a dogpiled-cache-authors 2024-03-12 16:18:07 +03:00
625836afee authorsby-not-cached 2024-03-12 16:07:21 +03:00
3e57ef5948 views-log-fix 2024-03-12 15:57:46 +03:00
9b7aa57a18 cache-reform 2024-03-12 15:50:57 +03:00
d1a510b093 use-cached-following 2024-03-12 15:28:20 +03:00
26a527473f use-cached-authors 2024-03-12 15:26:36 +03:00
d5a9a18c04 dict-fix 2024-03-12 15:05:45 +03:00
480485c20a circular-fix 2024-03-12 15:01:45 +03:00
37319c2091 cache-events-fix 2024-03-12 14:59:36 +03:00
91ffcb85df typechecker-column-fix 2024-03-12 10:52:32 +03:00
04f7231fe9 refactored-2 2024-03-12 10:36:34 +03:00
a7944f5176 refactored 2024-03-12 10:35:33 +03:00
0e1df1e7ca followers-update-fix 2024-03-12 08:00:42 +03:00
059dd0f9b4 remove-follow-debug 2024-03-11 17:07:37 +03:00
78dbde6273 dict-fix 2024-03-11 16:58:31 +03:00
e6f5cfcb8d return-error-on-follow 2024-03-11 16:52:16 +03:00
ebf08ea2ed clean 2024-03-11 16:17:52 +03:00
c6e045d5ee follows-brushed 2024-03-11 16:12:28 +03:00
4bc469ab04 dbeug-follow-3 2024-03-11 15:50:44 +03:00
11f3cdeb7c dbeug-follow-2 2024-03-11 15:41:24 +03:00
9944277908 tuple-fix 2024-03-11 15:21:34 +03:00
8b5a50b7ae author-id-fix 2024-03-11 15:19:10 +03:00
b45ad1082d author-id-fix 2024-03-11 15:18:51 +03:00
10f8faccdd follows-return 2024-03-11 15:15:28 +03:00
4898e43f57 follow-unfollow-2 2024-03-11 15:13:46 +03:00
df55b68a5a follow-unfollow 2024-03-11 14:49:42 +03:00
23be0da876 search-log 2024-03-11 13:47:12 +03:00
e50bbcdb7c debug-unfollow 2024-03-11 13:44:48 +03:00
b3196f6dcb reaction-after-fix 2024-03-11 13:41:15 +03:00
ebbd1d729e reaction-after-debug 2024-03-11 13:39:12 +03:00
e6cd0ecadc unfollow-fix-2 2024-03-11 13:37:35 +03:00
1572c77882 remove-logs 2024-03-11 12:43:37 +03:00
bda2b7b59a unfollow-debug 2024-03-11 12:41:00 +03:00
7234eb9519 less-log 2024-03-11 12:37:34 +03:00
b18ba16aab update_follows_for_author-call-fix 2024-03-11 12:25:08 +03:00
b58406866c set_follows_authors_cache-fix 2024-03-11 12:22:28 +03:00
9933545383 debug-get_author_follows 2024-03-11 12:20:50 +03:00
1c7729a5b9 query-fixes 2024-03-11 12:10:14 +03:00
e23c49b6c6 redis-fix 2024-03-11 12:07:19 +03:00
5f7087b0df follow-fix 2024-03-11 12:03:41 +03:00
1162c62a9b logger-auth 2024-03-11 11:59:20 +03:00
6243c27390 handle-no-userid 2024-03-11 11:56:14 +03:00
bf1068d070 follow-unfollow-fix 2024-03-11 11:33:39 +03:00
20cc14adc6 debug-follow 2024-03-11 11:16:12 +03:00
94be60304e refactored-get-my-shout-topics
Some checks failed
Deploy on push / deploy (push) Failing after 20s
2024-03-07 14:46:03 +03:00
0182b501fe refactored-get-my-shout-2 2024-03-07 14:44:07 +03:00
0d111bda47 refactored-get-my-shout 2024-03-07 14:42:48 +03:00
6f3ed3704a get-my-shout-api-fix 2024-03-07 14:29:45 +03:00
61088320c9 patch-main-topic-fix 2024-03-07 11:55:23 +03:00
e378cbd442 rm-reaction-fix 2024-03-07 10:18:05 +03:00
c84aae40d3 rm-reaction-debug 2024-03-07 08:13:19 +03:00
e4e681a9ab logs-with-params 2024-03-06 22:18:32 +03:00
5c7b28de90 custom-encoder-fix-3 2024-03-06 22:05:17 +03:00
7a5cbf7438 custom-encoder-fix-2 2024-03-06 22:00:37 +03:00
2b89ab7c78 custom-encoder-fix 2024-03-06 21:57:04 +03:00
4aa4303a59 groupby-fix-2 2024-03-06 15:17:46 +03:00
b13d57ca17 groupby-fix 2024-03-06 15:16:29 +03:00
54eeb5b549 subquery-fix-2 2024-03-06 15:13:05 +03:00
83f12202a8 subquery-fix 2024-03-06 15:11:01 +03:00
045217c011 shout-id-fix 2024-03-06 15:08:20 +03:00
30f5b09a51 typo-fix 2024-03-06 14:27:30 +03:00
7199539a28 reaction-cudl-log 2024-03-06 14:09:21 +03:00
2c1bfaf0fe topics-comments-stat 2024-03-06 13:43:30 +03:00
70c5233305 oauth-name-patch 2024-03-06 13:04:23 +03:00
b82a4bb2fa add_author_stat-fix-2 2024-03-06 12:34:17 +03:00
9f881c0641 add_author_stat-fix+fmt 2024-03-06 12:25:55 +03:00
70589a35da cosmetics 2024-03-06 12:15:26 +03:00
6e046a677c less-cond 2024-03-06 12:09:46 +03:00
c55f696bf3 typo-fix 2024-03-06 12:07:40 +03:00
8bbbe2b0c7 delete-reaction-fix
Some checks failed
Deploy on push / deploy (push) Failing after 18s
2024-03-06 12:03:26 +03:00
cb535cffea forbidden-fix
Some checks failed
Deploy on push / deploy (push) Failing after 20s
2024-03-06 10:44:08 +03:00
b09ea39668 get-my-shout-resolver 2024-03-05 20:12:17 +03:00
5d8c46e76c drafts-resolver-1
Some checks failed
Deploy on push / deploy (push) Failing after 19s
2024-03-05 18:53:18 +03:00
b5727b1b85 update-shout-fix-10 2024-03-05 18:17:48 +03:00
13f6c43df2 update-shout-fix-9 2024-03-05 18:15:21 +03:00
f378925a16 update-shout-fix-8 2024-03-05 18:13:39 +03:00
f68778e529 update-shout-fix-7 2024-03-05 18:10:58 +03:00
fa76d6c7b4 update-shout-fix-6 2024-03-05 18:04:47 +03:00
ee7c464065 update-shout-fix-5 2024-03-05 18:01:47 +03:00
78c7a41c46 update-shout-fix-4 2024-03-05 18:01:29 +03:00
5943f9bf81 update-shout-fix-3 2024-03-05 17:59:00 +03:00
7c75c2accc update-shout-fix-2 2024-03-05 17:53:49 +03:00
12a9880815 update-shout-fix
Some checks failed
Deploy on push / deploy (push) Failing after 19s
2024-03-05 16:59:55 +03:00
130942d9dd decor-order-fix 2024-03-05 14:56:19 +03:00
005889c470 less-scope-exception-5 2024-03-05 14:50:50 +03:00
16c425fd5e less-scope-exception-4 2024-03-05 14:45:53 +03:00
cc3e7b982b less-scope-exception- 2024-03-05 14:44:19 +03:00
3e96366887 less-scope-exception-2 2024-03-05 14:39:47 +03:00
c8b55d0d5b less-scope-exception 2024-03-05 14:38:04 +03:00
1099f8a185 401-ex 2024-03-05 12:50:01 +03:00
8a449bbe7a get-shout-access
Some checks failed
Deploy on push / deploy (push) Failing after 19s
2024-03-05 12:00:45 +03:00
ef25ebc7bc result-fix-2
Some checks failed
Deploy on push / deploy (push) Failing after 19s
2024-03-04 21:08:01 +03:00
2f4747a5de result-fix 2024-03-04 20:34:11 +03:00
e4915dcd7d debug-logs
Some checks failed
Deploy on push / deploy (push) Failing after 20s
2024-03-04 20:25:47 +03:00
b62f40d549 webhook-fix
Some checks failed
Deploy on push / deploy (push) Failing after 19s
2024-03-04 20:24:17 +03:00
21bcda1e3b webhook-fix 2024-03-04 19:08:21 +03:00
5ff28ce31b schema-update 2024-03-04 15:48:04 +03:00
36fefd93be offset-entity-fix
Some checks failed
Deploy on push / deploy (push) Failing after 18s
2024-03-04 15:47:17 +03:00
abfe9f6e0e notifier-fixes
Some checks failed
Deploy on push / deploy (push) Failing after 18s
2024-03-04 13:43:02 +03:00
88ca5a1362 notifier-schema-fix
Some checks failed
Deploy on push / deploy (push) Failing after 19s
2024-03-04 10:59:14 +03:00
3016a75332 notifier-integration
Some checks failed
Deploy on push / deploy (push) Failing after 19s
2024-03-04 10:35:33 +03:00
ad0dc98bc9 webhook-fix 2024-03-03 16:59:15 +03:00
ab7d677a20 long-queries-only 2024-03-01 12:20:06 +03:00
da0a709ce7 sqlalchemy-warn-fix 2024-03-01 12:18:06 +03:00
ef36e38007 groupby-fix 2024-03-01 10:32:18 +03:00
3a04a69d24 typofix+topic-stat 2024-03-01 09:59:19 +03:00
c41ae4ba98 comments-stat-subquery 2024-03-01 09:56:36 +03:00
b0136fd9bc follows-return
Some checks failed
Deploy on push / deploy (push) Failing after 20s
2024-03-01 00:51:49 +03:00
bdf78bb45d comments-order-hotfix-2
Some checks failed
Deploy on push / deploy (push) Failing after 20s
2024-02-29 20:00:35 +03:00
bd905021ae coalesce-desc-sord 2024-02-29 15:52:36 +03:00
978595c246 drafts-ordered 2024-02-29 15:50:26 +03:00
dfbfa9335c get-author-slug-fix 2024-02-29 15:47:32 +03:00
1a563420d3 reaction-sort-type 2024-02-29 15:39:55 +03:00
4d992f1b60 aliased-revert 2024-02-29 15:21:46 +03:00
cc16163673 order-field-fix-2 2024-02-29 15:19:53 +03:00
395120ad7a order-field-fix 2024-02-29 15:17:42 +03:00
acb804f78c sa-warns-back 2024-02-29 15:15:04 +03:00
0437052280 comments-sort-order-fix 2024-02-29 15:10:59 +03:00
fc3bb52431 reindex-fix-6 2024-02-29 14:56:50 +03:00
cb85e24a11 recreate-fixed-4 2024-02-29 14:48:08 +03:00
c8acf6a9ac recreate-fixed-2
Some checks failed
Deploy on push / deploy (push) Failing after 18s
2024-02-29 14:41:32 +03:00
8de765ed50 recreate-fixed 2024-02-29 14:28:51 +03:00
7ad9b7919a search-logs
Some checks failed
Deploy on push / deploy (push) Failing after 19s
2024-02-29 14:24:53 +03:00
5df82704b3 indexing-fix- 2024-02-29 14:17:10 +03:00
2b530131e5 indexing-fix-5 2024-02-29 14:12:35 +03:00
67d1a3ae5c indexing-fix-4 2024-02-29 14:11:48 +03:00
ca3065f741 indexing-fix-3 2024-02-29 14:09:50 +03:00
f07fd646d3 indexing-fix
Some checks failed
Deploy on push / deploy (push) Failing after 18s
2024-02-29 14:04:24 +03:00
0ea4e596d2 indexing 2024-02-29 13:55:44 +03:00
14c2750d92 search-thread-2 2024-02-29 13:49:34 +03:00
b4f86526a2 search-thread 2024-02-29 13:48:20 +03:00
24cbba0746 search-reindex-fix
Some checks failed
Deploy on push / deploy (push) Failing after 19s
2024-02-29 13:43:41 +03:00
e656920f7b search-reindex-fix 2024-02-29 13:43:30 +03:00
435279735b viewes-service-refactor
Some checks failed
Deploy on push / deploy (push) Failing after 19s
2024-02-29 13:18:17 +03:00
9f30f251d6 update-shout-fix 2024-02-29 13:14:14 +03:00
d28024a69b logs-fix 2024-02-29 13:04:25 +03:00
cfb0ba910f redeploy 2024-02-29 12:14:45 +03:00
62b90d73a7 views-logs-fix 2024-02-29 11:48:18 +03:00
aaa39e0a0d no-cursor-events
All checks were successful
Deploy on push / deploy (push) Successful in 1m31s
2024-02-29 11:29:28 +03:00
5bec25fc23 less-logs
Some checks failed
Deploy on push / deploy (push) Failing after 5s
2024-02-29 11:12:54 +03:00
a3c94a9ab7 load-authors-by-fix
Some checks failed
Deploy on push / deploy (push) Failing after 5s
2024-02-29 11:00:41 +03:00
5e8b7cfe98 followers-cache-fixes 2024-02-29 10:42:17 +03:00
977b86a3c6 fix-followers-save 2024-02-29 10:39:07 +03:00
5e400a7618 redis-keys-renamed 2024-02-29 10:34:22 +03:00
10248ffd8c debug-followers-cache 2024-02-29 10:31:49 +03:00
f774c54cc2 followers-cached
Some checks failed
Deploy on push / deploy (push) Failing after 5s
2024-02-29 10:23:08 +03:00
caf45f3d42 .dict-fxt
Some checks failed
Deploy on push / deploy (push) Failing after 6s
2024-02-29 10:02:29 +03:00
ad5b4a81c3 get-author-debug
Some checks failed
Deploy on push / deploy (push) Failing after 5s
2024-02-29 09:48:41 +03:00
ceecef6a7a return-none
Some checks failed
Deploy on push / deploy (push) Failing after 5s
2024-02-29 09:44:04 +03:00
b26da8f316 search-debug
Some checks failed
Deploy on push / deploy (push) Failing after 6s
2024-02-29 09:34:40 +03:00
f52c13e082 staging-deploy-test
Some checks failed
Deploy on push / deploy (push) Failing after 6s
2024-02-29 07:56:23 +03:00
31320c9972 revert-2-queries-less-price
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-02-28 19:24:05 +03:00
b99ed1a7d1 groupby-fix
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-02-28 19:14:57 +03:00
6c0b43bd14 random-topic-shouts-patch-2
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-02-28 18:20:58 +03:00
7a3ce4a982 .c
All checks were successful
Deploy on push / deploy (push) Successful in 24s
2024-02-28 18:15:19 +03:00
ac1fc151ab random-topic-shouts-patch
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-02-28 18:11:51 +03:00
129c4bccf4 get-followers-scalar-fix
All checks were successful
Deploy on push / deploy (push) Successful in 28s
2024-02-27 17:03:21 +03:00
a993741cf2 get-followers-fix-3
All checks were successful
Deploy on push / deploy (push) Successful in 24s
2024-02-27 16:56:00 +03:00
04d918749f get-followers-fix-2
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-02-27 16:52:11 +03:00
fa7b05a86e get-author-followers-fix
All checks were successful
Deploy on push / deploy (push) Successful in 24s
2024-02-27 16:42:26 +03:00
eadae7f639 logger-improved-2
All checks were successful
Deploy on push / deploy (push) Successful in 24s
2024-02-27 16:41:09 +03:00
4c328370c2 logger-improved
All checks were successful
Deploy on push / deploy (push) Successful in 29s
2024-02-27 16:33:25 +03:00
eb295549fb update-tolerate 2024-02-27 16:28:54 +03:00
2e68128dfc cache-refactored
All checks were successful
Deploy on push / deploy (push) Successful in 24s
2024-02-27 15:40:53 +03:00
564a8c10b7 cache-author-with-stat
All checks were successful
Deploy on push / deploy (push) Successful in 24s
2024-02-27 14:53:13 +03:00
8d058b4902 delete-shout-tolerate
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-02-27 14:29:28 +03:00
52f46555a7 auth-fix
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-02-27 14:16:54 +03:00
fc0e3b5541 authlogs2
All checks were successful
Deploy on push / deploy (push) Successful in 26s
2024-02-27 14:06:00 +03:00
def6921215 authlogs
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-02-27 13:56:21 +03:00
a962435898 root-auth-logs-3
All checks were successful
Deploy on push / deploy (push) Successful in 26s
2024-02-27 13:55:11 +03:00
7434c47755 root-auth-logs-2 2024-02-27 13:54:47 +03:00
401c058f32 root-auth-logs
All checks were successful
Deploy on push / deploy (push) Successful in 26s
2024-02-27 13:49:06 +03:00
9f49cde0d7 notuple
All checks were successful
Deploy on push / deploy (push) Successful in 30s
2024-02-27 13:40:56 +03:00
03568ecea0 login-required-async-fix
All checks were successful
Deploy on push / deploy (push) Successful in 26s
2024-02-27 13:21:50 +03:00
4ee4c3595a async-create-shout-fix
All checks were successful
Deploy on push / deploy (push) Successful in 24s
2024-02-27 13:07:14 +03:00
82e129a589 less-fields-author-serlect-after-reaction 2024-02-27 12:58:24 +03:00
193332f6d8 Merge branch 'dev' of https://dev.discours.io/discours.io/core into dev
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-02-27 12:49:17 +03:00
cbd8ba6b68 authors-subquery-json-fix 2024-02-27 12:47:42 +03:00
Stepan Vladovskiy
145c5cdbc2 feat: Cors with mp3 and clean up basura
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-02-27 06:05:01 -03:00
ef2f8dca82 compound-select-fix-2
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-02-27 11:22:48 +03:00
a5636af259 compound-select-fix
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-02-27 11:19:46 +03:00
8914dfc8b0 select_from-author-2
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-02-27 11:09:04 +03:00
23b7fe7af9 select_from-author
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-02-27 11:07:24 +03:00
1214dc03d9 less-logs
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-02-27 10:53:53 +03:00
fc6b8d3a08 debug-cached-authpr
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-02-27 10:41:36 +03:00
3efcfef537 sort-fix-2
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-02-26 20:26:57 +03:00
be27e7306c sort-fix
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-02-26 20:07:42 +03:00
02b504cc4f no-distinct
All checks were successful
Deploy on push / deploy (push) Successful in 30s
2024-02-26 20:05:07 +03:00
02b2aad813 no-comments-stat
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-02-26 19:50:54 +03:00
2ae3f2875f comments_stat-0
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-02-26 19:44:13 +03:00
fbee450bde comments_stat
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-02-26 19:38:22 +03:00
248620622a reactions-distinct
All checks were successful
Deploy on push / deploy (push) Successful in 26s
2024-02-26 18:16:52 +03:00
172b3af6df no-distinct-fix
All checks were successful
Deploy on push / deploy (push) Successful in 24s
2024-02-26 18:12:09 +03:00
c905666591 json-as-dict
All checks were successful
Deploy on push / deploy (push) Successful in 24s
2024-02-26 18:04:34 +03:00
72aa96a99f dict-patch
All checks were successful
Deploy on push / deploy (push) Successful in 26s
2024-02-26 18:00:55 +03:00
431b14bf5b orderby-fix
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-02-26 16:04:39 +03:00
3c0a1cf592 less-redis-log
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-02-26 15:56:13 +03:00
851a661c6f distinct-reactions
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-02-26 15:46:30 +03:00
fec363063d distinct
All checks were successful
Deploy on push / deploy (push) Successful in 29s
2024-02-26 15:21:12 +03:00
ced8c9f75c error-handle-create-shout-2
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-02-26 12:52:22 +03:00
4a57866c3d error-handle-create-shout
All checks were successful
Deploy on push / deploy (push) Successful in 24s
2024-02-26 12:22:55 +03:00
a93fa7fb18 async-login-requiered
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-02-26 12:14:08 +03:00
2257c3375a nodict
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-02-26 11:57:18 +03:00
ecbeb5b85e shout-author-create-fix
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-02-26 11:52:57 +03:00
33a59a4acc after-shouts-update-fix
All checks were successful
Deploy on push / deploy (push) Successful in 33s
2024-02-26 11:47:52 +03:00
886ca8c0ff setex-fix
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-02-26 05:52:08 +03:00
ebbbe05237 get-author-fix-6
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-02-26 05:43:35 +03:00
8fb161470f preparing-cache-data
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-02-26 05:36:18 +03:00
28d2227c39 get-author-fix-5
All checks were successful
Deploy on push / deploy (push) Successful in 24s
2024-02-26 05:23:18 +03:00
8b8a284e59 more-caching
All checks were successful
Deploy on push / deploy (push) Successful in 24s
2024-02-26 05:06:27 +03:00
732bd2b098 caching-follows
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-02-26 04:58:27 +03:00
f40eff2822 events-fix
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-02-26 04:46:23 +03:00
eab1700b0d get-author-fix-3
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-02-26 04:22:06 +03:00
a00c68068f follows-cache-fix
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-02-26 03:49:56 +03:00
5478ff45e7 get-author-fix-3
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-02-26 02:07:46 +03:00
8635fd9c08 comments-stat-off-2
All checks were successful
Deploy on push / deploy (push) Successful in 24s
2024-02-26 01:24:32 +03:00
90a6e23e61 comments-stat-off
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-02-26 01:10:15 +03:00
152730526f get-author-fix
All checks were successful
Deploy on push / deploy (push) Successful in 24s
2024-02-26 01:06:10 +03:00
f12d2fc560 get-author-fix
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-02-26 01:03:11 +03:00
a7f14ee473 author.stat.comments
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-02-26 00:29:14 +03:00
5ca072dfaa events-trigger-query-fix
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-02-26 00:06:37 +03:00
b02b8276a6 get-author-fix
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-02-25 22:45:36 +03:00
8be96daae4 cache-update-fix
All checks were successful
Deploy on push / deploy (push) Successful in 24s
2024-02-25 21:47:14 +03:00
fc774adb9f search-authors-fix
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-02-25 21:43:30 +03:00
8b3cfebc47 Merge remote-tracking branch 'origin/dev' into dev 2024-02-25 21:27:17 +03:00
f596a9bf2c update-author_cache 2024-02-25 21:27:07 +03:00
7a89bb2783 update-author_cache
All checks were successful
Deploy on push / deploy (push) Successful in 1m26s
2024-02-25 21:16:34 +03:00
314c54969b sa-warning-fix
Some checks failed
Deploy on push / deploy (push) Failing after 1m4s
2024-02-25 20:58:48 +03:00
c7fe7f458c aliased-author-fix
All checks were successful
Deploy on push / deploy (push) Successful in 2m12s
2024-02-25 19:44:33 +03:00
9ea10ba5c1 dockerfile-revert
All checks were successful
Deploy on push / deploy (push) Successful in 1m46s
2024-02-25 19:32:36 +03:00
695c5efbc8 dockerfile-update-4
Some checks failed
Deploy on push / deploy (push) Failing after 2m39s
2024-02-25 19:29:32 +03:00
feea5845a8 dockerfile-update-3
Some checks failed
Deploy on push / deploy (push) Failing after 28s
2024-02-25 19:27:41 +03:00
3b5a6973ef dockerfile-fix
Some checks failed
Deploy on push / deploy (push) Failing after 8s
2024-02-25 19:08:20 +03:00
b12db9af0e faster-get-author
Some checks failed
Deploy on push / deploy (push) Failing after 8s
2024-02-25 19:02:15 +03:00
1e922e3161 create-all-fix
Some checks failed
Deploy on push / deploy (push) Failing after 35s
2024-02-25 18:36:08 +03:00
a760d253b3 configure-mappers-call-fix-3
Some checks failed
Deploy on push / deploy (push) Failing after 7s
2024-02-25 18:26:23 +03:00
b5240d9508 configure-mappers-call-fix-2
All checks were successful
Deploy on push / deploy (push) Successful in 2m10s
2024-02-25 18:19:12 +03:00
4dbd593cba configure-mappers-call-fix
Some checks failed
Deploy on push / deploy (push) Failing after 35s
2024-02-25 18:08:02 +03:00
309ac2d929 desc-order-fix 2024-02-25 17:58:09 +03:00
2e635abe5e sql-text-fix-order
All checks were successful
Deploy on push / deploy (push) Successful in 1m56s
2024-02-25 17:49:15 +03:00
26c12b2aad order-by-text-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m39s
2024-02-25 17:39:38 +03:00
ad1bb4af19 search-pg-catalog 2024-02-25 16:46:27 +03:00
2222f6fc19 searchable
All checks were successful
Deploy on push / deploy (push) Successful in 4m21s
2024-02-25 16:43:04 +03:00
4b83f5d0f5 sql-text-fix
All checks were successful
Deploy on push / deploy (push) Successful in 3m35s
2024-02-25 16:15:07 +03:00
857a3648a3 pgtrgm-add
All checks were successful
Deploy on push / deploy (push) Successful in 1m49s
2024-02-25 16:12:08 +03:00
a4745df71b sql-text-fix
All checks were successful
Deploy on push / deploy (push) Successful in 2m1s
2024-02-25 16:04:15 +03:00
8b15ef9429 fmt
Some checks failed
Deploy on push / deploy (push) Failing after 7s
2024-02-25 16:00:50 +03:00
07a9e7ef56 engine-exec-2
Some checks failed
Deploy on push / deploy (push) Failing after 6s
2024-02-25 15:56:28 +03:00
146d49be5b table-name-fix-2
Some checks failed
Deploy on push / deploy (push) Failing after 1m43s
2024-02-25 15:47:28 +03:00
ccc5c98a14 typo-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m43s
2024-02-25 15:33:07 +03:00
a149091e3c search-authors-fmt
All checks were successful
Deploy on push / deploy (push) Successful in 1m25s
2024-02-25 15:22:48 +03:00
9aabfacf84 little-redis-cache
All checks were successful
Deploy on push / deploy (push) Successful in 1m25s
2024-02-25 14:58:16 +03:00
9c6a349cc7 re-alias-author
All checks were successful
Deploy on push / deploy (push) Successful in 1m8s
2024-02-25 14:41:04 +03:00
fc58208bdd more-logs
All checks were successful
Deploy on push / deploy (push) Successful in 1m29s
2024-02-25 14:39:26 +03:00
60e7cd03b7 logs
All checks were successful
Deploy on push / deploy (push) Successful in 2m23s
2024-02-25 14:26:44 +03:00
5d8638867d no-ratings-check
All checks were successful
Deploy on push / deploy (push) Successful in 1m30s
2024-02-25 14:08:09 +03:00
fc0e4bb2df aliased-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m45s
2024-02-25 13:54:28 +03:00
c863dda81b ratings-subquery-fix-2
All checks were successful
Deploy on push / deploy (push) Successful in 1m35s
2024-02-25 13:45:33 +03:00
8d47c02511 ratings-subquery-fix 2024-02-25 13:43:12 +03:00
c216161ece one-joined-query
All checks were successful
Deploy on push / deploy (push) Successful in 1m32s
2024-02-25 13:29:57 +03:00
eb4a4fef61 import-fix-3
All checks were successful
Deploy on push / deploy (push) Successful in 1m19s
2024-02-25 12:10:09 +03:00
7370c8ca2d import-fix-2
All checks were successful
Deploy on push / deploy (push) Successful in 1m30s
2024-02-25 12:06:41 +03:00
42313184b0 import-fix
All checks were successful
Deploy on push / deploy (push) Successful in 3m59s
2024-02-25 11:35:06 +03:00
efa6ac7d60 get-author-followers-fix
All checks were successful
Deploy on push / deploy (push) Successful in 2m10s
2024-02-25 11:27:08 +03:00
b2357e0afb debug-stat
All checks were successful
Deploy on push / deploy (push) Successful in 1m21s
2024-02-25 09:48:16 +03:00
d58bbe3499 load-authors-by-rating
All checks were successful
Deploy on push / deploy (push) Successful in 1m46s
2024-02-25 09:31:06 +03:00
40305ad35d fix-sawarning
All checks were successful
Deploy on push / deploy (push) Successful in 1m40s
2024-02-25 00:42:22 +03:00
3097c33e44 full-traceback-on-sawarning
All checks were successful
Deploy on push / deploy (push) Successful in 2m11s
2024-02-25 00:06:54 +03:00
6f11652320 fix-int
All checks were successful
Deploy on push / deploy (push) Successful in 1m37s
2024-02-24 21:56:09 +03:00
f5b3cd8f97 debug-query-follows
All checks were successful
Deploy on push / deploy (push) Successful in 1m24s
2024-02-24 21:52:16 +03:00
eaaace4d28 fmt
All checks were successful
Deploy on push / deploy (push) Successful in 3m45s
2024-02-24 21:45:38 +03:00
12137eccda minor-fixes
All checks were successful
Deploy on push / deploy (push) Successful in 1m48s
2024-02-24 21:30:19 +03:00
d7c9622ffa int-id-fix
All checks were successful
Deploy on push / deploy (push) Successful in 5m45s
2024-02-24 21:15:11 +03:00
5e72a08e0f circular-fix-2
All checks were successful
Deploy on push / deploy (push) Successful in 3m49s
2024-02-24 20:42:19 +03:00
a3244fc74b circular-fix
Some checks failed
Deploy on push / deploy (push) Failing after 6s
2024-02-24 19:53:47 +03:00
f1444cbe10 stat-fn-moved
All checks were successful
Deploy on push / deploy (push) Successful in 1m59s
2024-02-24 19:23:53 +03:00
3e58164ae8 ratings-true
All checks were successful
Deploy on push / deploy (push) Successful in 1m42s
2024-02-24 19:12:35 +03:00
003fa1bbac types fixes 2024-02-24 13:22:35 +03:00
0ca83cc91e cache authors by id 2024-02-24 09:26:31 +03:00
02a7b64449 unauthorized-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m52s
2024-02-24 00:00:46 +03:00
dae2c7b689 select-from-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m42s
2024-02-23 23:42:49 +03:00
11ea8b7efb fieldname-fix
All checks were successful
Deploy on push / deploy (push) Successful in 2m2s
2024-02-23 23:26:12 +03:00
1edf93f7ce follows-stat-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m43s
2024-02-23 23:15:16 +03:00
8b9ac594cd query-fixed
All checks were successful
Deploy on push / deploy (push) Successful in 2m36s
2024-02-23 22:43:50 +03:00
fbbc408df6 clean
All checks were successful
Deploy on push / deploy (push) Successful in 1m27s
2024-02-23 22:24:48 +03:00
f16f345040 topics-with-stat-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m23s
2024-02-23 22:14:08 +03:00
2f81a5cf12 coalesce
All checks were successful
Deploy on push / deploy (push) Successful in 1m37s
2024-02-23 21:34:02 +03:00
586672b279 fieldnames-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m48s
2024-02-23 21:27:38 +03:00
f04e20426f topic-stat-query-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m19s
2024-02-23 21:22:55 +03:00
a05072fd71 separated-follows
All checks were successful
Deploy on push / deploy (push) Successful in 1m43s
2024-02-23 21:10:11 +03:00
3bc7946ab3 stat-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m32s
2024-02-23 20:25:52 +03:00
e80b3ac770 fmt+refactor
All checks were successful
Deploy on push / deploy (push) Successful in 24s
2024-02-23 19:35:40 +03:00
14947225a6 same-shout-on-update-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m39s
2024-02-23 15:38:13 +03:00
2e2eba68a2 db-adapter-fixes
All checks were successful
Deploy to core / deploy (push) Successful in 1m7s
2024-02-23 15:02:14 +03:00
32bc750071 revert-auth-nocache
All checks were successful
Deploy to core / deploy (push) Successful in 1m35s
2024-02-23 14:53:14 +03:00
a0f75c0505 stat-fix-2
All checks were successful
Deploy to core / deploy (push) Successful in 1m43s
2024-02-23 14:43:14 +03:00
5b34cab6bc add-columns-stat
All checks were successful
Deploy to core / deploy (push) Successful in 23s
2024-02-23 14:40:38 +03:00
cc80c92ad3 stat-fix
All checks were successful
Deploy to core / deploy (push) Successful in 3m59s
2024-02-23 14:34:43 +03:00
a55fa8d2ff trace-more
All checks were successful
Deploy to core / deploy (push) Successful in 1m30s
2024-02-23 14:23:13 +03:00
9999c362d4 auth-cache-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m58s
2024-02-23 14:19:54 +03:00
64012344cb alias-fix-2
All checks were successful
Deploy to core / deploy (push) Successful in 1m25s
2024-02-23 14:09:12 +03:00
6e0da78658 alias-fix
All checks were successful
Deploy to core / deploy (push) Successful in 2m7s
2024-02-23 14:05:46 +03:00
14e2828e2d aliased-more
All checks were successful
Deploy to core / deploy (push) Successful in 2m1s
2024-02-23 13:52:31 +03:00
595e4ba87d nosearchinfo
All checks were successful
Deploy to core / deploy (push) Successful in 4m8s
2024-02-23 13:40:40 +03:00
72aa21c9cd get-topic-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m15s
2024-02-23 13:34:31 +03:00
17f79e1622 Merge branch 'dev' of https://dev.discours.io/discours.io/core into dev
All checks were successful
Deploy to core / deploy (push) Successful in 5m51s
2024-02-23 10:20:13 +03:00
ec08e85e8f select-from-fix 2024-02-23 10:14:58 +03:00
6ed09d5851 reverte 2024-02-23 04:08:29 +03:00
f8b4b0b96f gixing-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m7s
2024-02-23 03:59:28 +03:00
ef7f2d7b92 aliasing
All checks were successful
Deploy to core / deploy (push) Successful in 1m25s
2024-02-23 03:28:46 +03:00
8d97463c1d join-clause-groupby-fixes
All checks were successful
Deploy to core / deploy (push) Successful in 1m6s
2024-02-23 03:03:34 +03:00
60c7ab5fe4 separate-getter-follows
All checks were successful
Deploy to core / deploy (push) Successful in 3m44s
2024-02-23 02:53:19 +03:00
392cfb19bd separate-getter
All checks were successful
Deploy to core / deploy (push) Successful in 1m53s
2024-02-23 02:49:34 +03:00
3d34c6c540 stat-refactored
All checks were successful
Deploy to core / deploy (push) Successful in 1m42s
2024-02-23 02:08:43 +03:00
b0e2551e9b groupby-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m23s
2024-02-23 00:03:12 +03:00
54f7dd9c1f select-from
All checks were successful
Deploy to core / deploy (push) Successful in 1m19s
2024-02-22 23:53:28 +03:00
d69f29bda3 move-author-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m26s
2024-02-22 23:32:26 +03:00
f8dafda86b no-select-from-fix 2024-02-22 23:13:29 +03:00
96b698f7ff select-from-fix-aliased
All checks were successful
Deploy to core / deploy (push) Successful in 1m53s
2024-02-22 23:13:00 +03:00
a877e1a7b8 select-from-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m44s
2024-02-22 23:07:08 +03:00
00b7aab220 debug-auth
All checks were successful
Deploy to core / deploy (push) Successful in 2m0s
2024-02-22 23:01:13 +03:00
5303aef4f0 alias-fix
Some checks failed
Deploy to core / deploy (push) Failing after 1m9s
2024-02-22 22:56:58 +03:00
078e8ab7d1 aliased
All checks were successful
Deploy to core / deploy (push) Successful in 2m1s
2024-02-22 21:22:22 +03:00
ebf342c73b webhook-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m28s
2024-02-22 21:18:20 +03:00
ce736e2624 session-commit-fix
Some checks failed
Deploy to core / deploy (push) Failing after 1m15s
2024-02-22 21:10:43 +03:00
88a0d58751 update-last-seen-aware
All checks were successful
Deploy to core / deploy (push) Successful in 1m39s
2024-02-22 13:20:14 +03:00
4a1ee2ac80 add-topic-stats
Some checks failed
Deploy to core / deploy (push) Failing after 6s
2024-02-22 13:12:34 +03:00
a5416143df query-follows-fix
Some checks failed
Deploy to core / deploy (push) Has been cancelled
2024-02-22 13:12:01 +03:00
d9abea9840 get-user-followsx
All checks were successful
Deploy to core / deploy (push) Successful in 1m25s
2024-02-22 13:07:09 +03:00
0f038ac6d7 caching-author-fix
Some checks failed
Deploy to core / deploy (push) Failing after 1m10s
2024-02-22 13:01:38 +03:00
187c14d6b0 slug-patch-on-create
All checks were successful
Deploy to core / deploy (push) Successful in 1m29s
2024-02-22 12:23:46 +03:00
8d06f59702 port-fix
Some checks failed
Deploy to core / deploy (push) Failing after 6s
2024-02-21 23:14:06 +03:00
750f00c6ac 1sec-delay
Some checks failed
Deploy to core / deploy (push) Has been cancelled
2024-02-21 23:12:47 +03:00
aed1885278 row-adapt
All checks were successful
Deploy to core / deploy (push) Successful in 2m39s
2024-02-21 22:29:27 +03:00
1796d0c82d small-fix
Some checks failed
Deploy to core / deploy (push) Failing after 6m38s
2024-02-21 22:20:17 +03:00
fc3f859602 profiling-less
Some checks failed
Deploy to core / deploy (push) Failing after 6s
2024-02-21 22:16:29 +03:00
d50064a97e query-fix7
Some checks failed
Deploy to core / deploy (push) Has been cancelled
2024-02-21 22:12:31 +03:00
332be3f12b query-fix6
All checks were successful
Deploy to core / deploy (push) Successful in 4m16s
2024-02-21 22:03:57 +03:00
da33ae92a9 query-fix5
All checks were successful
Deploy to core / deploy (push) Successful in 2m40s
2024-02-21 21:53:11 +03:00
f49fb2d01d db-profiling-simple
All checks were successful
Deploy to core / deploy (push) Successful in 4m19s
2024-02-21 21:47:00 +03:00
296721d2b1 fix-queru-more-2
All checks were successful
Deploy to core / deploy (push) Successful in 2m35s
2024-02-21 21:33:27 +03:00
5f4e30866f fix-queru-more
All checks were successful
Deploy to core / deploy (push) Successful in 2m31s
2024-02-21 21:25:23 +03:00
1c04125921 noworker-5
All checks were successful
Deploy to core / deploy (push) Successful in 3m11s
2024-02-21 21:04:57 +03:00
3db2efdf79 noworker-3
All checks were successful
Deploy to core / deploy (push) Successful in 4m47s
2024-02-21 20:50:26 +03:00
cb64cd66da noworker
All checks were successful
Deploy to core / deploy (push) Successful in 2m51s
2024-02-21 20:46:29 +03:00
9b2d1c96ba fix
Some checks failed
Deploy to core / deploy (push) Failing after 1m10s
2024-02-21 20:38:12 +03:00
1f0d5ae8e8 batch-load-fix
Some checks failed
Deploy to core / deploy (push) Failing after 16m25s
2024-02-21 20:12:47 +03:00
784f790b83 stats-follows
All checks were successful
Deploy to core / deploy (push) Successful in 3m20s
2024-02-21 19:48:33 +03:00
1eac614e35 Merge branch 'dev' of https://dev.discours.io/discours.io/core into dev 2024-02-21 19:48:04 +03:00
214af0cf51 fmt 2024-02-21 19:45:53 +03:00
823e59ea74 fmt
Some checks failed
Deploy to core / deploy (push) Failing after 15m33s
2024-02-21 19:14:58 +03:00
88cd6e1060 dict-query-fix
Some checks failed
Deploy to core / deploy (push) Has been cancelled
2024-02-21 19:12:24 +03:00
5b8347ee54 query-fix-2 2024-02-21 19:11:49 +03:00
2e07219732 initial-delay 2024-02-21 19:06:39 +03:00
59c46172c4 almost-dict
Some checks are pending
Deploy to core / deploy (push) Waiting to run
2024-02-21 19:03:49 +03:00
2e3d85b43d select-fix
Some checks are pending
Deploy to core / deploy (push) Waiting to run
2024-02-21 18:55:21 +03:00
b7cbef01a3 dictify
All checks were successful
Deploy to core / deploy (push) Successful in 2m31s
2024-02-21 18:51:37 +03:00
3f361b1af7 sqlfix
Some checks are pending
Deploy to core / deploy (push) Waiting to run
2024-02-21 18:38:15 +03:00
3ae706d6db healhchecks-warn-out
All checks were successful
Deploy to core / deploy (push) Successful in 1m55s
2024-02-21 18:33:42 +03:00
960cdf30da batch-render-follows
Some checks are pending
Deploy to core / deploy (push) Waiting to run
2024-02-21 18:26:18 +03:00
ab31d0d296 query_follows-fix
Some checks are pending
Deploy to core / deploy (push) Waiting to run
2024-02-21 18:13:43 +03:00
67fa44b062 redis-save-fi
Some checks failed
Deploy to core / deploy (push) Failing after 1m45s
2024-02-21 18:07:02 +03:00
74e639737e profiling-fix-2 2024-02-21 18:03:02 +03:00
be9f62eb76 profiling-db
Some checks failed
Deploy to core / deploy (push) Failing after 2m5s
2024-02-21 17:55:54 +03:00
e69046a1f8 cache-fixed
Some checks failed
Deploy to core / deploy (push) Failing after 15m39s
2024-02-21 17:37:58 +03:00
63f5a708b7 update-redis-api
Some checks failed
Deploy to core / deploy (push) Failing after 1m27s
2024-02-21 16:06:24 +03:00
33330fb052 logger-restore
All checks were successful
Deploy to core / deploy (push) Successful in 1m41s
2024-02-21 14:23:42 +03:00
a40eb878be async-events-fix
Some checks failed
Deploy to core / deploy (push) Failing after 1m35s
2024-02-21 14:21:04 +03:00
9da452c2f0 follower-resolver-fix
Some checks failed
Deploy to core / deploy (push) Failing after 1m30s
2024-02-21 13:59:17 +03:00
3b867ded20 redis-hset-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m25s
2024-02-21 13:51:07 +03:00
2cfcab744e fmt
All checks were successful
Deploy to core / deploy (push) Successful in 1m55s
2024-02-21 13:47:33 +03:00
f75eb13971 less-log 2024-02-21 13:45:33 +03:00
9118ae9268 logger-query-id
All checks were successful
Deploy to core / deploy (push) Successful in 1m13s
2024-02-21 13:44:36 +03:00
4ca884f257 debug-get-author-but-userid
All checks were successful
Deploy to core / deploy (push) Successful in 1m42s
2024-02-21 13:27:00 +03:00
9c14f4b4d3 logger-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m51s
2024-02-21 13:22:46 +03:00
fb48bee8df get_author_by_user_id-fix
Some checks failed
Deploy to core / deploy (push) Failing after 6s
2024-02-21 13:16:39 +03:00
ba436de055 lesslog
Some checks failed
Deploy to core / deploy (push) Has been cancelled
2024-02-21 13:13:43 +03:00
253ee11bb9 logger-timing-logix-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m27s
2024-02-21 13:09:40 +03:00
731f9a45df logger-timing-logix-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m41s
2024-02-21 13:02:49 +03:00
73f020ae5d fix-circular
All checks were successful
Deploy to core / deploy (push) Successful in 1m59s
2024-02-21 12:34:12 +03:00
762857ffbe trigger-fix
Some checks failed
Deploy to core / deploy (push) Failing after 1m59s
2024-02-21 12:22:55 +03:00
8f6416a73c trigger-get-author-fixes
Some checks failed
Deploy to core / deploy (push) Failing after 3m32s
2024-02-21 12:10:30 +03:00
4cde1c14b4 handle-shouts-paginating
Some checks failed
Deploy to core / deploy (push) Failing after 1m33s
2024-02-21 11:59:47 +03:00
ee577c75fd graphql-schema-update
Some checks failed
Deploy to core / deploy (push) Failing after 1m46s
2024-02-21 11:52:57 +03:00
9eee73acf3 shouts-follows
All checks were successful
Deploy to core / deploy (push) Successful in 5m54s
2024-02-21 11:35:13 +03:00
7cf702eb98 fmt
All checks were successful
Deploy to core / deploy (push) Successful in 2m0s
2024-02-21 10:27:16 +03:00
4f26812340 appdata-triggers
All checks were successful
Deploy to core / deploy (push) Successful in 1m16s
2024-02-20 21:57:39 +03:00
66f1c654cf format-multiline-log-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m8s
2024-02-20 19:45:55 +03:00
abc752c629 format-multiline-log-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m10s
2024-02-20 19:42:14 +03:00
333340056e logger-3301
All checks were successful
Deploy to core / deploy (push) Successful in 1m25s
2024-02-20 19:37:20 +03:00
3c03688544 logger-3000
Some checks failed
Deploy to core / deploy (push) Failing after 1m9s
2024-02-20 19:33:24 +03:00
b59a8ef323 root-logger
All checks were successful
Deploy to core / deploy (push) Successful in 1m27s
2024-02-20 19:23:38 +03:00
183755e637 one-logger
All checks were successful
Deploy to core / deploy (push) Successful in 1m24s
2024-02-20 19:19:46 +03:00
822815fdac logger-3
All checks were successful
Deploy to core / deploy (push) Successful in 1m10s
2024-02-20 19:01:50 +03:00
9f10a23345 typo-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m24s
2024-02-20 18:46:30 +03:00
86754c341d log
Some checks failed
Deploy to core / deploy (push) Failing after 2m7s
2024-02-20 18:42:14 +03:00
20e9add575 log
Some checks failed
Deploy to core / deploy (push) Failing after 3m51s
2024-02-20 18:37:53 +03:00
b5cdface63 logger-fix
Some checks failed
Deploy to core / deploy (push) Failing after 1m25s
2024-02-20 18:28:38 +03:00
dd2301343f loggerfix
All checks were successful
Deploy to core / deploy (push) Successful in 1m25s
2024-02-20 18:22:54 +03:00
f7d0d10d50 debug-auth 2024-02-20 18:20:57 +03:00
e85c179d93 muiltilinelog
All checks were successful
Deploy to core / deploy (push) Successful in 1m39s
2024-02-20 18:16:17 +03:00
d8a4481aab logger-fix
All checks were successful
Deploy to core / deploy (push) Successful in 3m17s
2024-02-20 18:10:36 +03:00
cbb4533855 depfix
All checks were successful
Deploy to core / deploy (push) Successful in 1m32s
2024-02-20 18:04:59 +03:00
40e52b4d71 nosentry
Some checks failed
Deploy to core / deploy (push) Failing after 1m56s
2024-02-20 17:54:43 +03:00
f283ea048b logs-fix
Some checks failed
Deploy to core / deploy (push) Failing after 1m28s
2024-02-20 17:49:21 +03:00
0febd91b25 logs-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m22s
2024-02-20 17:46:33 +03:00
0e701020bb lesslog
All checks were successful
Deploy to core / deploy (push) Successful in 3m34s
2024-02-20 17:27:30 +03:00
0d1b73878e debug-auth 2024-02-20 17:22:55 +03:00
5af3dcb132 typo-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m26s
2024-02-20 12:58:16 +03:00
8b08e23801 fixmodel
All checks were successful
Deploy to core / deploy (push) Successful in 1m27s
2024-02-20 12:53:15 +03:00
6377bc3d64 revert
Some checks failed
Deploy to core / deploy (push) Failing after 6s
2024-02-20 12:40:22 +03:00
811086de83 simpler-author-model
All checks were successful
Deploy to core / deploy (push) Successful in 1m22s
2024-02-20 12:04:45 +03:00
a00fe8b8ef orm-update2
All checks were successful
Deploy to core / deploy (push) Successful in 1m28s
2024-02-20 11:53:55 +03:00
d590884dca change-index
All checks were successful
Deploy to core / deploy (push) Successful in 3m51s
2024-02-20 11:47:37 +03:00
da9ccbd0cc ratings-model-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m29s
2024-02-20 10:52:30 +03:00
69984788fa no-unique-index
All checks were successful
Deploy to core / deploy (push) Successful in 1m33s
2024-02-19 17:22:38 +03:00
981a4c4fce buildsystemver-fix-2
All checks were successful
Deploy to core / deploy (push) Successful in 1m10s
2024-02-19 16:29:05 +03:00
67d6d7134a buildsystemver-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m24s
2024-02-19 16:23:24 +03:00
2d75593cc2 realname-core
All checks were successful
Deploy to core / deploy (push) Successful in 1m57s
2024-02-19 16:18:35 +03:00
e483ea9329 no-searchclient-info
All checks were successful
Deploy to core / deploy (push) Successful in 1m24s
2024-02-19 16:07:52 +03:00
09887bc516 handle-exception
All checks were successful
Deploy to core / deploy (push) Successful in 1m23s
2024-02-19 15:51:09 +03:00
74233e96ff auth-cached-fix-2
All checks were successful
Deploy to core / deploy (push) Successful in 1m31s
2024-02-19 15:31:51 +03:00
e5edc97ab1 auth-cached-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m52s
2024-02-19 15:18:25 +03:00
75edee4fe9 we-all-made-of-stars
All checks were successful
Deploy to core / deploy (push) Successful in 1m21s
2024-02-19 14:54:13 +03:00
37230a8392 ruff-up 2024-02-19 14:46:45 +03:00
6d3c0ee39e isort+authfix
All checks were successful
Deploy to core / deploy (push) Successful in 1m36s
2024-02-19 14:45:55 +03:00
b89060f15f model-index-slug
All checks were successful
Deploy to core / deploy (push) Successful in 1m39s
2024-02-19 13:25:47 +03:00
8193bd0178 all-cached-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m30s
2024-02-19 13:16:44 +03:00
1fa97908b2 auth-cached-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m38s
2024-02-19 12:56:58 +03:00
a39db6991c depfix
All checks were successful
Deploy to core / deploy (push) Successful in 2m18s
2024-02-19 12:49:33 +03:00
add5f6df63 cache-jwt-validation
Some checks failed
Deploy to core / deploy (push) Failing after 28s
2024-02-19 12:40:26 +03:00
cf8934c605 schema-main 2024-02-19 11:58:31 +03:00
680242f1e3 schema-main 2024-02-19 11:58:02 +03:00
0301d8041d schema-move-test
All checks were successful
Deploy to core / deploy (push) Successful in 1m54s
2024-02-19 11:20:13 +03:00
2464b91f9b fix-env
Some checks failed
Deploy to core / deploy (push) Failing after 5s
2024-02-19 11:16:48 +03:00
ddf203a869 healthcheck
Some checks failed
Deploy to core / deploy (push) Has been cancelled
2024-02-19 11:15:53 +03:00
b01bf77d8e fix-pjs
Some checks failed
Deploy to core / deploy (push) Failing after 1m18s
2024-02-19 11:13:05 +03:00
22466e65e2 4threads-1worker
Some checks failed
Deploy to core / deploy (push) Failing after 8s
2024-02-19 11:12:00 +03:00
e4036c8a79 no-aiohttp
Some checks failed
Deploy to core / deploy (push) Failing after 9s
2024-02-19 11:11:13 +03:00
5772db6a36 query-time-log
Some checks failed
Deploy to core / deploy (push) Failing after 10s
2024-02-19 11:10:12 +03:00
f01dde845c fixrating
Some checks failed
Deploy to core / deploy (push) Failing after 5s
2024-02-19 10:33:15 +03:00
e6720ccaaf restore-struct
Some checks failed
Deploy to core / deploy (push) Failing after 7s
2024-02-19 10:14:14 +03:00
7b8e9fbea6 sql-profiling
Some checks failed
Deploy to core / deploy (push) Failing after 1m24s
2024-02-19 10:06:46 +03:00
aa55e952aa dockerfile fix 2024-02-19 09:56:23 +03:00
f74358be76 rating in orm
Some checks failed
Deploy to core / deploy (push) Failing after 3m55s
2024-02-19 09:50:15 +03:00
ca22ac9b13 dev-deploy
All checks were successful
Deploy to core / deploy (push) Successful in 1m21s
2024-02-19 09:40:30 +03:00
1092b8a2ca ml
All checks were successful
Deploy to core / deploy (push) Successful in 6s
2024-02-19 09:38:18 +03:00
a1ed480567 shout-id
All checks were successful
Deploy to core / deploy (push) Successful in 6s
2024-02-17 21:55:50 +03:00
f3df37a41b update-reaction-fix-3
All checks were successful
Deploy to core / deploy (push) Successful in 1m24s
2024-02-17 21:44:22 +03:00
c6df11dc7d update-reaction-fix-2
Some checks failed
Deploy to core / deploy (push) Failing after 1m9s
2024-02-17 21:04:01 +03:00
47ecf4bd1a dockerfile-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m52s
2024-02-17 13:25:24 +03:00
93d536bdba dockerfile-fix
Some checks failed
Deploy to core / deploy (push) Failing after 30s
2024-02-17 13:21:52 +03:00
8a4e4ce6d5 linter-update
Some checks failed
Deploy to core / deploy (push) Failing after 50s
2024-02-17 13:18:54 +03:00
92246bc9d1 create-update-shout-fix
Some checks failed
Deploy to core / deploy (push) Failing after 46s
2024-02-17 09:35:11 +03:00
6ef2c47e11 id-optional-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m58s
2024-02-16 19:59:12 +03:00
0a74ed0f63 update-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m37s
2024-02-16 19:46:57 +03:00
7aaa9e8d8b sentry-enable
Some checks failed
Deploy to core / deploy (push) Failing after 46s
2024-02-16 12:44:19 +03:00
9a2d7b6f11 fmt
All checks were successful
Deploy to core / deploy (push) Successful in 1m53s
2024-02-16 12:40:41 +03:00
994469c2e3 cleaner-main
All checks were successful
Deploy to core / deploy (push) Successful in 1m51s
2024-02-16 12:34:39 +03:00
79ec5a1841 log-fix
All checks were successful
Deploy to core / deploy (push) Successful in 2m25s
2024-02-16 12:16:00 +03:00
233c71385f more-instance-check
All checks were successful
Deploy to core / deploy (push) Successful in 2m8s
2024-02-15 18:17:18 +03:00
e9ed01e797 postprocess-query-for-order-4
All checks were successful
Deploy to core / deploy (push) Successful in 2m29s
2024-02-14 12:07:55 +03:00
2e60fd2cc7 postprocess-query-for-order-3
All checks were successful
Deploy to core / deploy (push) Successful in 2m9s
2024-02-14 12:05:19 +03:00
9b174d94c6 postprocess-query-for-order-2
All checks were successful
Deploy to core / deploy (push) Successful in 1m43s
2024-02-14 10:51:43 +03:00
3488282c14 postprocess-query-for-order
All checks were successful
Deploy to core / deploy (push) Successful in 2m27s
2024-02-14 10:47:54 +03:00
c732ec8136 reply-to-empty-fix
Some checks failed
Deploy to core / deploy (push) Failing after 50s
2024-02-07 19:50:01 +03:00
180dab1c06 filter-rating-only
All checks were successful
Deploy to core / deploy (push) Successful in 1m58s
2024-02-07 18:39:55 +03:00
85931d04ba delete-reaction-fix
Some checks failed
Deploy to core / deploy (push) Failing after 2m11s
2024-02-07 16:41:17 +03:00
7746d1992f fmt
All checks were successful
Deploy to core / deploy (push) Successful in 1m42s
2024-02-05 12:47:26 +03:00
77dddedae6 no-notify-on-entity-create
All checks were successful
Deploy to core / deploy (push) Successful in 1m42s
2024-02-05 10:08:11 +03:00
23468e4b3e debug-3
All checks were successful
Deploy to core / deploy (push) Successful in 1m38s
2024-02-03 20:13:51 +03:00
e7a1697f11 get-my-followings-fix
Some checks failed
Deploy to core / deploy (push) Failing after 1m34s
2024-02-03 20:08:22 +03:00
e4846f8abb readme-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m36s
2024-02-03 18:40:25 +03:00
33193b2345 update_profile-
All checks were successful
Deploy to core / deploy (push) Successful in 1m37s
2024-02-03 17:44:28 +03:00
2008345e69 common-result-type
All checks were successful
Deploy to core / deploy (push) Successful in 1m38s
2024-02-03 17:35:57 +03:00
d3b2eddf58 return-type-fix
Some checks failed
Deploy to core / deploy (push) Failing after 1m30s
2024-02-03 17:31:00 +03:00
18521f3fc5 schema-fix
Some checks failed
Deploy to core / deploy (push) Failing after 1m33s
2024-02-03 17:18:20 +03:00
1b4315fcce bye-following-manageer
All checks were successful
Deploy to core / deploy (push) Successful in 1m43s
2024-02-03 17:00:48 +03:00
53ceac108f full-preload
Some checks failed
Deploy to core / deploy (push) Failing after 1m32s
2024-02-03 16:17:00 +03:00
066770febc logs
All checks were successful
Deploy to core / deploy (push) Successful in 1m37s
2024-02-03 12:51:52 +03:00
83390912e9 following-manager-upgrade
All checks were successful
Deploy to core / deploy (push) Successful in 1m42s
2024-02-03 12:48:36 +03:00
7f04eba208 comment-filter-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m41s
2024-02-03 12:10:38 +03:00
dea03ffa4c load-reactions-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m39s
2024-02-03 01:39:57 +03:00
d6151c00c8 update-fix-2
All checks were successful
Deploy to core / deploy (push) Successful in 1m49s
2024-02-02 23:59:42 +03:00
b0e981ece4 update-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m34s
2024-02-02 23:49:12 +03:00
7cd7447796 revied
All checks were successful
Deploy to core / deploy (push) Successful in 1m37s
2024-02-02 23:38:42 +03:00
8cc7e21338 revised
Some checks failed
Deploy to core / deploy (push) Has been cancelled
2024-02-02 23:38:16 +03:00
6d3bd13218 check-twice
All checks were successful
Deploy to core / deploy (push) Successful in 1m38s
2024-02-02 23:16:04 +03:00
516945ddec publish-fix
Some checks failed
Deploy to core / deploy (push) Failing after 5s
2024-02-02 21:04:21 +03:00
410d426ea5 indexing-serializer-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m39s
2024-02-02 20:54:17 +03:00
1be8eeb810 typo-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m41s
2024-02-02 19:57:34 +03:00
61528e5269 visibility-no-need
All checks were successful
Deploy to core / deploy (push) Successful in 1m36s
2024-02-02 19:36:30 +03:00
e3ee65f79a unfeature-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m37s
2024-02-02 19:29:26 +03:00
fa2b0eeffa name
All checks were successful
Deploy to core / deploy (push) Successful in 29s
2024-02-02 16:00:57 +03:00
d1f4b05e8d name
Some checks failed
Deploy to core / deploy (push) Failing after 6s
2024-02-02 15:59:56 +03:00
7a3830653e fmt
Some checks failed
Deploy to core / deploy (push) Has been cancelled
2024-02-02 15:59:22 +03:00
08b69e5d0a packaging-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m41s
2024-02-02 15:16:53 +03:00
c00361b2ec featured-id-patch
All checks were successful
Deploy to core / deploy (push) Successful in 1m40s
2024-02-02 15:05:20 +03:00
bd5f910f8c delete-shout-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m42s
2024-01-31 22:47:30 +03:00
fbbe6b0751 following-set-fix-4
All checks were successful
Deploy to core / deploy (push) Successful in 1m41s
2024-01-31 18:23:00 +03:00
a6d604f233 following-set-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m38s
2024-01-31 18:18:36 +03:00
5a810fa126 following-fix-3
All checks were successful
Deploy to core / deploy (push) Successful in 1m39s
2024-01-31 17:48:36 +03:00
77907c73e0 following-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m43s
2024-01-31 17:45:02 +03:00
ff30960608 following-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m41s
2024-01-31 17:11:53 +03:00
1fb37f8aa0 create-reaction-fix-2
All checks were successful
Deploy to core / deploy (push) Successful in 1m39s
2024-01-31 03:09:58 +03:00
75cff9dbed create-reaction-fox
All checks were successful
Deploy to core / deploy (push) Successful in 1m40s
2024-01-31 02:46:52 +03:00
880e295b45 unique-reactions
All checks were successful
Deploy to core / deploy (push) Successful in 1m39s
2024-01-31 01:53:54 +03:00
fceb3b61c7 logs-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m44s
2024-01-30 14:00:53 +03:00
e28f03d7db author-shouts-counter-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m49s
2024-01-30 11:58:17 +03:00
e4d7284681 reacted-stat-restore
All checks were successful
Deploy to core / deploy (push) Successful in 1m38s
2024-01-29 15:20:28 +03:00
325927739e info-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m42s
2024-01-29 13:02:14 +03:00
774a5ee596 reorg-code
Some checks failed
Deploy to core / deploy (push) Failing after 1m32s
2024-01-29 11:09:10 +03:00
b975e174ca lesslog
Some checks failed
Deploy to core / deploy (push) Failing after 1m32s
2024-01-29 11:04:50 +03:00
98b379c8e1 lock-more-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m39s
2024-01-29 11:01:04 +03:00
133067d09a await-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m40s
2024-01-29 10:48:36 +03:00
e6f12e9106 lesslog
Some checks failed
Deploy to core / deploy (push) Has been cancelled
2024-01-29 10:47:31 +03:00
e6366d15f6 debug-search-results
All checks were successful
Deploy to core / deploy (push) Successful in 1m45s
2024-01-29 10:37:21 +03:00
ae9e025959 cache-success-only
All checks were successful
Deploy to core / deploy (push) Successful in 1m46s
2024-01-29 09:45:00 +03:00
2f2fa346ed bloatcodeless
All checks were successful
Deploy to core / deploy (push) Successful in 1m38s
2024-01-29 07:04:37 +03:00
b9d602eedf not-error-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m34s
2024-01-29 06:56:34 +03:00
9f9ea93526 release-lock-fix
Some checks failed
Deploy to core / deploy (push) Failing after 6s
2024-01-29 06:52:51 +03:00
520b43ee48 bypass-fix
Some checks failed
Deploy to core / deploy (push) Has been cancelled
2024-01-29 06:51:26 +03:00
d595a18de4 logs-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m34s
2024-01-29 06:48:11 +03:00
f164fd66d4 index-restruct-2
All checks were successful
Deploy to core / deploy (push) Successful in 1m43s
2024-01-29 06:45:07 +03:00
5002e85177 index-restruct
All checks were successful
Deploy to core / deploy (push) Successful in 1m40s
2024-01-29 06:42:02 +03:00
56bf5b2874 simpler-disabled
All checks were successful
Deploy to core / deploy (push) Successful in 1m41s
2024-01-29 06:18:36 +03:00
8a88a98b53 ignore-unavial-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m36s
2024-01-29 06:09:40 +03:00
4b9382c47d stability-2
Some checks failed
Deploy to core / deploy (push) Failing after 1m33s
2024-01-29 06:03:37 +03:00
cf23d343d1 stability-fail
All checks were successful
Deploy to core / deploy (push) Successful in 1m35s
2024-01-29 05:56:28 +03:00
9e18697cac disabling
All checks were successful
Deploy to core / deploy (push) Successful in 1m35s
2024-01-29 05:37:10 +03:00
b574673f00 search-indicies
All checks were successful
Deploy to core / deploy (push) Successful in 1m35s
2024-01-29 05:26:49 +03:00
62018534fd index-name-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m34s
2024-01-29 05:20:24 +03:00
f86d2f0cd6 readme-fix 2024-01-29 05:17:47 +03:00
4a6863c474 check-if-exists
All checks were successful
Deploy to core / deploy (push) Successful in 1m33s
2024-01-29 05:13:37 +03:00
f38ee9239f tolerate-error
All checks were successful
Deploy to core / deploy (push) Successful in 1m35s
2024-01-29 05:07:30 +03:00
ff3ccc6174 opensearch-client-2
All checks were successful
Deploy to core / deploy (push) Successful in 1m33s
2024-01-29 05:03:20 +03:00
69eb41fc8d opensearch-client
Some checks failed
Deploy to core / deploy (push) Failing after 1m30s
2024-01-29 05:00:54 +03:00
6c398fc593 disabled-logix
Some checks failed
Deploy to core / deploy (push) Failing after 1m30s
2024-01-29 04:47:53 +03:00
258bb4e779 logs-fox
Some checks failed
Deploy to core / deploy (push) Failing after 5s
2024-01-29 04:43:02 +03:00
e1a27b55cd inner-search-3
Some checks failed
Deploy to core / deploy (push) Has been cancelled
2024-01-29 04:41:46 +03:00
2663d1cbc5 allow-selfsigned
All checks were successful
Deploy to core / deploy (push) Successful in 1m38s
2024-01-29 04:21:28 +03:00
8ff1949170 inner-search-2
All checks were successful
Deploy to core / deploy (push) Successful in 1m36s
2024-01-29 04:09:54 +03:00
2c2932caeb inner-search
All checks were successful
Deploy to core / deploy (push) Successful in 1m39s
2024-01-29 03:27:30 +03:00
35f7a35f27 scored-subquery-fix-3
All checks were successful
Deploy to core / deploy (push) Successful in 1m38s
2024-01-29 01:57:34 +03:00
1066b85e1b scored-subquery-fix-2
All checks were successful
Deploy to core / deploy (push) Successful in 1m43s
2024-01-29 01:25:47 +03:00
982d424e1b merged
All checks were successful
Deploy to core / deploy (push) Successful in 1m42s
2024-01-29 00:43:13 +03:00
f749ac7999 scored-subquery-fix 2024-01-29 00:42:03 +03:00
Stepan Vladovskii
84078c7cfe feat: no force any more for CI deploy from Gitea
All checks were successful
Deploy to core / deploy (push) Successful in 1m43s
2024-01-28 18:37:47 -03:00
86f2c51f5a virtual-score-column-fix-2
All checks were successful
Deploy to core / deploy (push) Successful in 1m42s
2024-01-29 00:31:48 +03:00
18fc08f6c8 virtual-score-column-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m44s
2024-01-29 00:28:04 +03:00
b92431e802 search-simpler-query-fix-6
All checks were successful
Deploy to core / deploy (push) Successful in 1m42s
2024-01-28 23:57:34 +03:00
01b9091310 search-simpler-query-fix-5
All checks were successful
Deploy to core / deploy (push) Successful in 1m41s
2024-01-28 23:52:58 +03:00
77114c66ec search-simpler-query-fix-4
All checks were successful
Deploy to core / deploy (push) Successful in 1m39s
2024-01-28 23:46:01 +03:00
30a281a693 search-simpler-query-fix-2-3
All checks were successful
Deploy to core / deploy (push) Successful in 1m47s
2024-01-28 23:42:35 +03:00
c061e5cdb3 search-simpler-query-fix-2
All checks were successful
Deploy to core / deploy (push) Successful in 1m42s
2024-01-28 23:27:40 +03:00
5e4ef40b21 search-simpler-query-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m45s
2024-01-28 23:21:02 +03:00
00a672f96e slug-string-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m41s
2024-01-28 18:56:06 +03:00
263ceac5a3 found-keys-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m42s
2024-01-28 18:51:12 +03:00
c90b0bd994 ga-metric-fieldname-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m44s
2024-01-28 18:33:04 +03:00
ef9fbe7c88 trig-ga
All checks were successful
Deploy to core / deploy (push) Successful in 1m41s
2024-01-28 18:17:34 +03:00
4bd7e7d0a1 creds-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m59s
2024-01-28 16:38:17 +03:00
7f203bf900 deps-fix
Some checks failed
Deploy to core / deploy (push) Failing after 2m13s
2024-01-28 16:33:45 +03:00
ebdfdb2613 ga4-data-api-usage
Some checks failed
Deploy to core / deploy (push) Failing after 2m16s
2024-01-28 16:26:40 +03:00
bba87bbf1d daterange-fix
All checks were successful
Deploy to core / deploy (push) Successful in 2m44s
2024-01-28 15:54:38 +03:00
bd004f6fce no-view-id
All checks were successful
Deploy to core / deploy (push) Successful in 3m21s
2024-01-28 15:40:44 +03:00
753a77ae72 daterange-format-fix
Some checks are pending
Deploy to core / deploy (push) Waiting to run
2024-01-28 14:28:03 +03:00
37b6776bdb viewed-service-fix
Some checks are pending
Deploy to core / deploy (push) Waiting to run
2024-01-28 14:20:22 +03:00
38645d063a logs-fix 2024-01-28 12:03:41 +03:00
08845152d1 Merge branch 'feature/core' of v2.discours.io:core into feature/core 2024-01-28 11:42:40 +03:00
dd2ef55f04 Merge branch 'feature/core' of v2.discours.io:core into feature/core
Some checks failed
Deploy to core / deploy (push) Has been cancelled
2024-01-28 11:40:00 +03:00
a98284522b Merge branch 'feature/core' of v2.discours.io:core into feature/core 2024-01-28 10:03:51 +03:00
8a0da7381b Merge branch 'feature/core' of https://dev.discours.io/discours.io/core into feature/core 2024-01-28 10:01:28 +03:00
Stepan Vladovskii
bed2f89964 debug: main.py with import sentry-sdk
All checks were successful
Deploy to core / deploy (push) Successful in 1m38s
2024-01-27 22:11:39 -03:00
Stepan Vladovskii
0eef9b3061 debug: main.py with import sentry-sdk
Some checks failed
Deploy to core / deploy (push) Failing after 1m29s
2024-01-27 22:02:22 -03:00
Stepan Vladovskii
d7a3c840ea feat: gitea runner push branch feature/core to v2.discours.io/core feauter/core
Some checks failed
Deploy to core / deploy (push) Failing after 1m32s
2024-01-27 21:55:12 -03:00
Stepan Vladovskii
2c9155cd54 feat: gitea runner branch feature/core to v2.discours.io/core
All checks were successful
Deploy to core / deploy (push) Successful in 6s
2024-01-27 21:50:13 -03:00
Stepan Vladovskii
405337da27 feat: add Sentry Reddis perfomance monitoring 2024-01-27 21:45:24 -03:00
to
f73c2094d9 Update README.md 2024-01-27 08:48:03 +00:00
to
7235d2acc4 Update README.md 2024-01-27 08:45:29 +00:00
to
db33c625db Update README.md 2024-01-27 08:36:03 +00:00
7e4aa83b8e joined-search-fix 2024-01-26 18:28:02 +03:00
6116254d9f search-fix-3 2024-01-26 18:19:10 +03:00
90f164521b search-fix-2 2024-01-26 18:09:25 +03:00
24da021a62 search-fix-2 2024-01-26 17:58:01 +03:00
e7e9089b7c query-fix 2024-01-26 13:28:49 +03:00
59dec8cad6 query-fix 2024-01-26 04:24:47 +03:00
1b80d596cb search-fix-2 2024-01-26 04:05:25 +03:00
3f703ad357 add-granian 2024-01-25 22:58:35 +03:00
e2f2976572 portfix 2024-01-25 22:55:00 +03:00
f3acf878aa Merge branch 'feature/core' of https://dev.discours.io/discours.io/core into feature/core 2024-01-25 22:47:40 +03:00
4a5f1d634a granian+precommit 2024-01-25 22:41:27 +03:00
ad3fd32a6e precommit-3 2024-01-25 11:05:28 +03:00
623e532533 precommit-installed 2024-01-25 11:04:00 +03:00
9aea7b02fb precommit 2024-01-25 11:02:31 +03:00
Stepan Vladovskii
1db943acc0 debug: deploy in branch main of core dokku app 2024-01-24 22:47:36 -03:00
Stepan Vladovskii
ebbbcc97f2 feat: yess, it was deploy on staging
All checks were successful
Deploy to core / deploy (push) Successful in 5s
2024-01-24 21:07:05 -03:00
Stepan Vladovskii
e8d85d9914 debug: simplify main.yml for actions
All checks were successful
Deploy to core / deploy (push) Successful in 1m56s
2024-01-24 20:43:10 -03:00
Stepan Vladovskii
2d73a5b874 debug: simplify main.yml for actions 2024-01-24 20:42:51 -03:00
Stepan Vladovskii
1883f0d733 debug: simplify main.yml for actions
Some checks failed
Deploy to core / deploy (push) Failing after 5s
2024-01-24 20:39:26 -03:00
Stepan Vladovskii
3332088b21 debug: actions without yml strange contex 2024-01-24 19:14:41 -03:00
Stepan Vladovskii
284f91b851 feat: change workflow for use branch feature/core in app core 2024-01-24 19:04:36 -03:00
ccbbc04051 .. 2024-01-24 18:19:26 +03:00
7fe026cb41 Merge branch 'feature/core' of https://dev.discours.io/discours.io/core into feature/core
All checks were successful
deploy / deploy (push) Successful in 1m55s
2024-01-24 15:36:46 +03:00
8c33955d5c redis-service-fix 2024-01-24 15:36:34 +03:00
Stepan Vladovskii
ac31a96a89 feat: migrate CI to v2 strange update of yml
All checks were successful
deploy / deploy (push) Successful in 1m48s
2024-01-23 23:31:49 -03:00
Stepan Vladovskii
0923070111 feat: migrate CI to v2
Some checks failed
deploy / deploy (push) Failing after 5s
2024-01-23 22:42:38 -03:00
06699a000a delete-reaction-schema-fix 2024-01-23 22:52:40 +03:00
f5f5cea184 load-shouts-feed 2024-01-23 22:20:43 +03:00
92dd45d278 auth-uncache 2024-01-23 22:12:22 +03:00
86e142292f cache-fix 2024-01-23 21:59:46 +03:00
c41fe8b6c9 cached-auth 2024-01-23 21:34:51 +03:00
987eb8c078 visibility-fix 2024-01-23 19:51:26 +03:00
3a6c805bcf rating-fix 2024-01-23 18:07:37 +03:00
e2e85376f0 no-return-reaction-fix 2024-01-23 17:14:43 +03:00
3f65652a5f 0.2.21-ga 2024-01-23 16:04:38 +03:00
954e6dabb7 no-rating-stat 2024-01-23 11:50:58 +03:00
d6dc374b01 community-stats-fix 2024-01-23 05:03:23 +03:00
ce5077a529 reacted_shouts_updates-fix 2024-01-23 04:58:45 +03:00
43f0c517b3 load-random-top-fix 2024-01-23 04:34:48 +03:00
e0395b0ab6 unread-fixes 2024-01-23 04:03:15 +03:00
6f5b5c364a self-regulation-logix-fix 2024-01-23 03:12:59 +03:00
8f846b6f7a refactored 2024-01-23 03:06:48 +03:00
c6088c5705 notifier-call-fix 2024-01-23 02:47:23 +03:00
f4e8f29fdd following-fix-2 2024-01-23 02:41:37 +03:00
5548d6d1f7 following-fix 2024-01-23 02:37:18 +03:00
6c5ce12b7e wrap-order-fix 2024-01-23 02:28:54 +03:00
bb2edd13e9 follow-debug 2024-01-23 02:23:31 +03:00
adbcec2511 reaction-kind-fix 2024-01-23 02:09:42 +03:00
0a38ae8e7e rating-fix 2024-01-23 02:08:59 +03:00
438baeb1a2 reaction-api-upgrade 2024-01-23 01:57:25 +03:00
4cb70d951a rating-sum-fix 2024-01-23 01:51:38 +03:00
9782cf402e create-reaction-fix-7 2024-01-23 01:21:01 +03:00
257ff43eaa create-reaction-debug-6 2024-01-23 01:11:34 +03:00
31f2414064 create-reaction-debug-4 2024-01-23 00:51:50 +03:00
3e6354afed craete-reaction-fix 2024-01-23 00:36:52 +03:00
8eb36f0cc3 create-reaction-debug-2 2024-01-23 00:27:57 +03:00
6be7ada9a1 create-reaction-revision 2024-01-22 23:54:02 +03:00
ad45cd4b10 minor-fixes
Some checks failed
deploy / deploy (push) Failing after 4s
2024-01-22 22:21:41 +03:00
0ebea28cce schema-upgrade
Some checks failed
deploy / deploy (push) Failing after 5s
2024-01-22 21:38:38 +03:00
a3688ba29a viewed-by-author-by-topic-feat
Some checks failed
deploy / deploy (push) Failing after 5s
2024-01-22 21:20:17 +03:00
f67ef7dd05 create-shout-fix 2024-01-22 19:57:48 +03:00
ff6637a51e precounted-views-import 2024-01-22 19:17:39 +03:00
f08a00e3c2 imports-fix 2024-01-22 18:48:58 +03:00
cdb54dbbe0 schema-path-fix 2024-01-22 18:45:35 +03:00
9bd458c47c add
Some checks failed
deploy / deploy (push) Failing after 6s
2024-01-22 18:42:45 +03:00
7b5330625b get-my-followed-fix-2 2024-01-18 15:30:53 +03:00
4320c9674c get-my-followed-fix
Some checks failed
deploy / deploy (push) Failing after 4s
2024-01-18 15:12:40 +03:00
9812b308b3 load_shouts_random_top-fix
Some checks failed
deploy / deploy (push) Failing after 4s
2024-01-18 14:45:47 +03:00
a43eaee8e0 ackee-load-fix
Some checks failed
deploy / deploy (push) Failing after 8s
2024-01-13 15:57:35 +03:00
033a8b6534 viewed-service-fix
Some checks failed
deploy / deploy (push) Failing after 4s
2024-01-13 15:44:56 +03:00
8f690af6ef from-topic-follower-fix
Some checks failed
deploy / deploy (push) Failing after 5s
2024-01-13 11:49:12 +03:00
8050a7e828 reactions-by-fix+reacted-shouts-fix
Some checks failed
deploy / deploy (push) Failing after 4s
2024-01-13 11:15:45 +03:00
d561deeb73 v2-deploy
Some checks failed
deploy / deploy (push) Failing after 5s
2024-01-13 11:03:35 +03:00
9c804bc873 get-my-followed-fix
Some checks failed
deploy / deploy (push) Has been cancelled
2024-01-13 11:01:59 +03:00
28f1f1cc57 reactions-sort-groupby-fix
All checks were successful
deploy / deploy (push) Successful in 1m44s
2024-01-13 10:27:45 +03:00
3a0683137d reactions-order-fix 2024-01-13 09:59:56 +03:00
10be35c78c dokku-conf 2024-01-11 20:23:02 +03:00
bd31c0afc5 no-presence-sigil 2024-01-11 20:02:39 +03:00
d9e1fb5161 no-gateway-sigil 2024-01-11 19:52:10 +03:00
3175fbd4a4 start-fix
All checks were successful
deploy / deploy (push) Successful in 1m36s
2024-01-10 16:36:42 +03:00
1b2b060b23 0.2.19-fixes
Some checks failed
deploy / deploy (push) Failing after 1m35s
2024-01-10 16:29:49 +03:00
14dc1c761a fix-get-author-i
All checks were successful
deploy / deploy (push) Successful in 1m31s
2023-12-29 02:31:44 +03:00
aa9ffd3053 ratings-update
All checks were successful
deploy / deploy (push) Successful in 1m30s
2023-12-28 01:37:54 +03:00
0ba38ac700 author-fix-3
All checks were successful
deploy / deploy (push) Successful in 1m30s
2023-12-28 01:09:38 +03:00
9968fb27f4 author-fix
All checks were successful
deploy / deploy (push) Successful in 1m29s
2023-12-28 01:05:52 +03:00
6207f7d3ed author-rating-fix
All checks were successful
deploy / deploy (push) Successful in 1m33s
2023-12-28 00:30:18 +03:00
da3e7e55fd logs-gic
All checks were successful
deploy / deploy (push) Successful in 1m32s
2023-12-25 10:48:50 +03:00
48b8209e23 search-query-fix-7
All checks were successful
deploy / deploy (push) Successful in 1m28s
2023-12-25 06:16:40 +03:00
c4c7ce0ad4 search-query-fix-7
All checks were successful
deploy / deploy (push) Successful in 1m31s
2023-12-25 05:04:53 +03:00
5492887a10 search-query-fix-6
All checks were successful
deploy / deploy (push) Successful in 1m27s
2023-12-25 05:01:49 +03:00
ec70549e48 search-query-fix-5
All checks were successful
deploy / deploy (push) Successful in 1m28s
2023-12-25 04:56:30 +03:00
c76e1625f3 search-query-fix-4
All checks were successful
deploy / deploy (push) Successful in 1m29s
2023-12-25 04:52:40 +03:00
d528da9b4a search-query-fix-3
All checks were successful
deploy / deploy (push) Successful in 1m27s
2023-12-25 04:45:21 +03:00
f4f1b3bb45 search-query-fix
Some checks failed
deploy / deploy (push) Failing after 22s
2023-12-25 04:35:21 +03:00
15fbc56d78 search-results-fix
Some checks failed
deploy / deploy (push) Failing after 1m24s
2023-12-25 04:27:02 +03:00
a4b0fd1a46 add-role-feature
Some checks failed
deploy / deploy (push) Failing after 1m23s
2023-12-25 01:42:39 +03:00
2547bd111b logs-fix
All checks were successful
deploy / deploy (push) Successful in 1m28s
2023-12-25 01:13:17 +03:00
935a12945d case-fix
All checks were successful
deploy / deploy (push) Successful in 1m26s
2023-12-25 01:08:31 +03:00
0ea9f45854 load-random-topic-fix
All checks were successful
deploy / deploy (push) Successful in 1m30s
2023-12-25 01:06:27 +03:00
c236768c07 trig
All checks were successful
deploy / deploy (push) Successful in 1m30s
2023-12-25 00:02:54 +03:00
88d33f96b0 commented-fix-2
All checks were successful
deploy / deploy (push) Successful in 1m28s
2023-12-24 21:38:16 +03:00
f9abe421aa commented-fix
All checks were successful
deploy / deploy (push) Successful in 1m29s
2023-12-24 20:46:50 +03:00
8c67438d01 commented-outerjoin-fix
Some checks failed
deploy / deploy (push) Failing after 5s
2023-12-24 18:34:06 +03:00
392712c604 sqlalchemy-debug
Some checks failed
deploy / deploy (push) Has been cancelled
2023-12-24 17:25:57 +03:00
8856bfc978 resolvers-fix
All checks were successful
deploy / deploy (push) Successful in 1m30s
2023-12-23 22:00:22 +03:00
bf2c5b67e3 cache-fix
Some checks failed
deploy / deploy (push) Failing after 9s
2023-12-23 08:40:41 +03:00
8e28e3d86d model-fix
All checks were successful
deploy / deploy (push) Successful in 1m22s
2023-12-22 21:25:21 +03:00
4fb581de2d random-topic-fix-2
All checks were successful
deploy / deploy (push) Successful in 1m23s
2023-12-22 21:22:23 +03:00
d9d2e5e954 random-topic-fix
All checks were successful
deploy / deploy (push) Successful in 1m25s
2023-12-22 21:15:26 +03:00
d65687afb3 unrated-fi
All checks were successful
deploy / deploy (push) Successful in 1m35s
2023-12-22 21:12:42 +03:00
d3ea567797 postmerge
All checks were successful
deploy / deploy (push) Successful in 1m26s
2023-12-22 21:08:37 +03:00
4e769332b7 viewed-fix
All checks were successful
deploy / deploy (push) Successful in 1m22s
2023-12-22 12:09:24 +03:00
b502c581f7 search-result-schema-fix-5
All checks were successful
deploy / deploy (push) Successful in 1m29s
2023-12-19 15:42:46 +03:00
56cdd4e0f9 search-result-schema-fix-4
All checks were successful
deploy / deploy (push) Successful in 1m19s
2023-12-19 15:32:34 +03:00
d14f0c2f95 search-result-schema-fix-3
All checks were successful
deploy / deploy (push) Successful in 1m21s
2023-12-19 15:28:55 +03:00
5aa8258f16 search-result-schema-fix
All checks were successful
deploy / deploy (push) Successful in 1m21s
2023-12-19 15:18:58 +03:00
71000aad35 search-debug
All checks were successful
deploy / deploy (push) Successful in 1m22s
2023-12-19 15:03:27 +03:00
f52db8f9e5 get-authors-all
All checks were successful
deploy / deploy (push) Successful in 2m3s
2023-12-19 11:09:50 +03:00
8e8952dd46 last-seen-mark-remove
All checks were successful
deploy / deploy (push) Successful in 1m21s
2023-12-18 18:37:39 +03:00
8830908307 auth-connector-less
All checks were successful
deploy / deploy (push) Successful in 1m20s
2023-12-18 10:12:17 +03:00
64b571fccd schema-fix
All checks were successful
deploy / deploy (push) Successful in 1m33s
2023-12-18 03:55:12 +03:00
a2ab5e8473 update-last-seen-author
All checks were successful
deploy / deploy (push) Successful in 1m24s
2023-12-18 01:20:00 +03:00
a6c5243c06 viewed-service-fixes
All checks were successful
deploy / deploy (push) Successful in 1m23s
2023-12-17 23:30:20 +03:00
2c6b872acb following-fix-5
All checks were successful
deploy / deploy (push) Successful in 1m25s
2023-12-17 15:30:28 +03:00
5bac172cce less-logs-auth
All checks were successful
deploy / deploy (push) Successful in 1m25s
2023-12-17 15:27:26 +03:00
49fe665d4d following-fix-4
All checks were successful
deploy / deploy (push) Successful in 1m26s
2023-12-17 15:22:07 +03:00
5cccaf43f7 following-fix-3
All checks were successful
deploy / deploy (push) Successful in 1m25s
2023-12-17 15:15:08 +03:00
ea5b9e5b09 following-fix
All checks were successful
deploy / deploy (push) Successful in 1m25s
2023-12-17 15:07:53 +03:00
a79f3cd5ec community-author-fix-2
All checks were successful
deploy / deploy (push) Successful in 1m26s
2023-12-17 09:23:15 +03:00
af4c1efd1c less-logs
All checks were successful
deploy / deploy (push) Successful in 1m26s
2023-12-17 09:20:33 +03:00
312900cec1 community-author-fix
All checks were successful
deploy / deploy (push) Successful in 1m41s
2023-12-17 09:17:23 +03:00
edf20466d6 formatting
All checks were successful
deploy / deploy (push) Successful in 1m28s
2023-12-17 08:40:05 +03:00
509f4409ff upgraded-resolvers-fix
All checks were successful
deploy / deploy (push) Successful in 1m29s
2023-12-17 08:28:34 +03:00
bb0a218eb7 new-resolvers
All checks were successful
deploy / deploy (push) Successful in 1m25s
2023-12-17 08:16:08 +03:00
81173f989a version-upgrade-0.2.18
All checks were successful
deploy / deploy (push) Successful in 1m25s
2023-12-17 08:08:35 +03:00
4697b44504 import-fix
All checks were successful
deploy / deploy (push) Successful in 1m55s
2023-12-17 07:59:16 +03:00
cd0ba88462 comminity-author-link-name-fix
Some checks failed
deploy / deploy (push) Failing after 1m22s
2023-12-16 20:03:00 +03:00
d0ce4dd3d3 webhook-name-fix
Some checks failed
deploy / deploy (push) Failing after 1m24s
2023-12-16 19:59:43 +03:00
692dd9cfe0 resolvers-updates
Some checks failed
deploy / deploy (push) Failing after 1m30s
2023-12-16 18:24:30 +03:00
bf7bc03e50 webhook-fix-3
All checks were successful
deploy / deploy (push) Successful in 1m32s
2023-12-15 19:27:23 +03:00
642c4eeb9d debug-webhook
All checks were successful
deploy / deploy (push) Successful in 1m32s
2023-12-15 18:46:53 +03:00
7e16ee97fa webhook-debug
All checks were successful
deploy / deploy (push) Successful in 1m28s
2023-12-15 18:28:44 +03:00
a8ee8cde0b author-hook-fix-2
All checks were successful
deploy / deploy (push) Successful in 1m27s
2023-12-15 17:37:32 +03:00
f9afe3d9dd author-hook-fix
All checks were successful
deploy / deploy (push) Successful in 1m29s
2023-12-15 17:25:21 +03:00
1ca23cc159 author-debug
All checks were successful
deploy / deploy (push) Successful in 1m35s
2023-12-15 16:59:03 +03:00
50016c0ba7 auth-debug
All checks were successful
deploy / deploy (push) Successful in 1m33s
2023-12-15 16:55:12 +03:00
db7aee730f debug-get-author
All checks were successful
deploy / deploy (push) Successful in 1m41s
2023-12-15 16:48:47 +03:00
68978fa1c0 json-fix
All checks were successful
deploy / deploy (push) Successful in 32s
2023-12-14 03:06:35 +03:00
ab9be5ef14 encode-try
All checks were successful
deploy / deploy (push) Successful in 1m42s
2023-12-14 00:57:32 +03:00
2f13943781 fix-operation
All checks were successful
deploy / deploy (push) Successful in 1m40s
2023-12-14 00:53:37 +03:00
afb65d396b operation-name-fix
All checks were successful
deploy / deploy (push) Successful in 1m34s
2023-12-14 00:47:02 +03:00
b36a655090 logs-fix
All checks were successful
deploy / deploy (push) Successful in 1m36s
2023-12-14 00:17:20 +03:00
8fb2764bc1 debug-gql
All checks were successful
deploy / deploy (push) Successful in 1m32s
2023-12-14 00:10:34 +03:00
2518e0357b dep-fix-2
All checks were successful
deploy / deploy (push) Successful in 1m33s
2023-12-13 23:54:38 +03:00
2fb48d76b6 dep-fix
All checks were successful
deploy / deploy (push) Successful in 1m39s
2023-12-13 23:48:42 +03:00
510402032d auth-connector-fix-3
Some checks failed
deploy / deploy (push) Failing after 1m28s
2023-12-13 23:42:52 +03:00
f51d7539eb auth-connector-fix-2
Some checks failed
deploy / deploy (push) Has been cancelled
2023-12-13 23:42:19 +03:00
99349dcad6 auth-connector-fix
Some checks failed
deploy / deploy (push) Failing after 1m31s
2023-12-13 23:39:25 +03:00
c97bd9c784 debug-get-author-2
All checks were successful
deploy / deploy (push) Successful in 1m34s
2023-12-13 22:59:21 +03:00
c68900babf debug-response-3
All checks were successful
deploy / deploy (push) Successful in 1m34s
2023-12-13 21:33:23 +03:00
d1447d3c05 debug-response
All checks were successful
deploy / deploy (push) Successful in 1m35s
2023-12-13 20:49:26 +03:00
fa0e815f13 gql-fix
All checks were successful
deploy / deploy (push) Successful in 1m36s
2023-12-13 20:42:00 +03:00
a86739ed1b debug-response
All checks were successful
deploy / deploy (push) Successful in 1m36s
2023-12-13 20:13:57 +03:00
29c02158b7 debug-authors-2
All checks were successful
deploy / deploy (push) Successful in 1m30s
2023-12-13 16:32:02 +03:00
4bd5109034 debug-authors
All checks were successful
deploy / deploy (push) Successful in 1m33s
2023-12-13 16:27:51 +03:00
359cfb1b75 validate-jwt
All checks were successful
deploy / deploy (push) Successful in 1m35s
2023-12-13 16:20:06 +03:00
a72dd5675e authorizer-full-vars-fix-2
All checks were successful
deploy / deploy (push) Successful in 1m30s
2023-12-12 11:25:34 +03:00
d27a6897cc authorizer-full-vars-fix
All checks were successful
deploy / deploy (push) Successful in 1m31s
2023-12-12 11:19:22 +03:00
74ca120879 authorizer-connector-debug
All checks were successful
deploy / deploy (push) Successful in 1m32s
2023-12-12 10:30:32 +03:00
954c3740cd authorizer-connector-fix-7
All checks were successful
deploy / deploy (push) Successful in 1m42s
2023-12-12 08:00:46 +03:00
3b7b47599c authorizer-connector-fix-7
All checks were successful
deploy / deploy (push) Successful in 1m30s
2023-12-11 23:06:51 +03:00
2f3ceae8c2 authorizer-connector-fix-6
All checks were successful
deploy / deploy (push) Successful in 1m30s
2023-12-11 22:56:59 +03:00
27612186de authorizer-connector-fix-5
All checks were successful
deploy / deploy (push) Successful in 1m34s
2023-12-11 22:50:13 +03:00
54acfe2b89 authorizer-connector-fix-4
All checks were successful
deploy / deploy (push) Successful in 1m32s
2023-12-11 22:39:58 +03:00
ccfeb89e66 authorizer-connector-fix-3
All checks were successful
deploy / deploy (push) Successful in 1m31s
2023-12-11 22:36:46 +03:00
7937fb89d4 authorizer-connector-fix-2
All checks were successful
deploy / deploy (push) Successful in 1m30s
2023-12-11 22:12:18 +03:00
7d0268ec52 authorizer-connector-fix
Some checks failed
deploy / deploy (push) Has been cancelled
2023-12-11 22:10:45 +03:00
2184fcf1f9 reaction-order-fix
All checks were successful
deploy / deploy (push) Successful in 1m48s
2023-12-11 17:57:34 +03:00
159c151ae7 reactions-sort-order-fix
Some checks failed
deploy / deploy (push) Failing after 1m22s
2023-12-10 01:47:22 +03:00
de63f313a5 paginated-authors
All checks were successful
deploy / deploy (push) Successful in 1m29s
2023-12-09 22:02:04 +03:00
275a1f9a08 pop-fix-3
All checks were successful
deploy / deploy (push) Successful in 1m28s
2023-12-09 21:21:38 +03:00
1f6f722eef pop-fix-2
All checks were successful
deploy / deploy (push) Successful in 1m30s
2023-12-09 21:15:30 +03:00
b992a73698 pop-fix
All checks were successful
deploy / deploy (push) Successful in 1m55s
2023-12-09 21:03:53 +03:00
d37f68869c alchemy-fix
All checks were successful
deploy / deploy (push) Successful in 1m28s
2023-12-09 20:15:57 +03:00
0b69b0b856 import-fix
All checks were successful
deploy / deploy (push) Successful in 1m28s
2023-12-09 20:12:04 +03:00
3acedcc7d6 main_topic-fix
All checks were successful
deploy / deploy (push) Successful in 1m28s
2023-12-09 19:45:02 +03:00
724e9bd5a0 add-main_topic
All checks were successful
deploy / deploy (push) Successful in 1m32s
2023-12-09 19:22:47 +03:00
c1adaf3ed6 schema-fix-author-fix
All checks were successful
deploy / deploy (push) Successful in 1m42s
2023-12-07 21:29:25 +03:00
bb55cfaefe get-shout-fix 2023-12-03 01:42:16 +03:00
b93d91528b search-resolver-fix 2023-12-03 01:22:16 +03:00
4f857e1425 revert-fix 2023-12-03 01:14:36 +03:00
748e3c6db3 join-cond-2 2023-12-03 00:39:06 +03:00
e2271e38e1 join-cond 2023-12-03 00:36:22 +03:00
a6df648af1 stat-fix-8 2023-12-03 00:29:57 +03:00
a3294de4dc stat-fix-7 2023-12-02 23:46:01 +03:00
89c453fedc stat-fix-6 2023-12-02 23:44:36 +03:00
ebe034a527 stat-fix-5 2023-12-02 23:38:28 +03:00
2e3e79f51e stat-fix-4 2023-12-02 23:35:06 +03:00
fcdaabd10d stat-fix-3 2023-12-02 23:30:06 +03:00
807f6ba5b1 stat-fix-2 2023-12-02 23:23:16 +03:00
16bbe995b7 stat-fix 2023-12-02 23:17:26 +03:00
6c607732a8 all-authors-fix-2 2023-12-02 22:45:41 +03:00
1cdf286594 all-authors-fix 2023-12-02 22:33:00 +03:00
fc3745f07e groupby-fix-2 2023-12-02 22:17:09 +03:00
a8b8637057 groupby-fix 2023-12-02 22:13:47 +03:00
34940178ad resolvers-fix 2023-12-02 09:25:08 +03:00
5fe27f9c0c .. 2023-12-01 13:00:10 +03:00
c049f882f3 joinedload-fix-5
All checks were successful
deploy / deploy (push) Successful in 5s
2023-11-30 20:08:56 +03:00
dbab772e62 joinedload-fix-2 2023-11-30 19:41:53 +03:00
e82ca2e385 joinedload-fix 2023-11-30 19:37:53 +03:00
f1ccef7919 no-debug 2023-11-30 16:07:30 +03:00
5f0a8f3b10 replyto-fix 2023-11-30 15:12:12 +03:00
95507ffa48 topicstat-fix 2023-11-30 15:07:08 +03:00
ecf0727631 joined-createdby-fix 2023-11-30 14:04:55 +03:00
e2f2dff755 topics-sql-debug 2023-11-30 13:30:50 +03:00
919aaa951f string-enum-fix 2023-11-30 11:40:27 +03:00
1362eaa125 createdby-fix 2023-11-30 11:27:06 +03:00
685988c219 createdby 2023-11-30 11:04:03 +03:00
2d3f7a51b4 enum-fix 2023-11-30 10:38:41 +03:00
537d588853 stats-fix 2023-11-30 00:21:22 +03:00
f57719d182 author-stats 2023-11-29 23:53:26 +03:00
ece918ac2c plus-fix 2023-11-29 23:22:39 +03:00
a0ee3a1be9 less-classes 2023-11-29 21:11:05 +03:00
dc80255fc7 schema-fix 2023-11-29 15:14:21 +03:00
28853c3a4b published-filter 2023-11-29 15:11:05 +03:00
4a1d7280fc schema-fix 2023-11-29 15:01:51 +03:00
ecaa4ffbc5 param-fox 2023-11-29 14:28:08 +03:00
3454766063 reaction-fix 2023-11-29 14:24:59 +03:00
cd955ecf8a createdat-fix 2023-11-29 14:16:09 +03:00
a950f57efc groupby-createdby 2023-11-29 13:56:26 +03:00
cdb9d31fa4 query-fix 2023-11-29 13:50:20 +03:00
6bac6b737e isnot-fix 2023-11-29 13:44:40 +03:00
af761f916f reactions-filters-fix
Some checks failed
deploy / deploy (push) Has been cancelled
2023-11-29 12:59:00 +03:00
f930822d8a filters-fix-2
Some checks are pending
deploy / deploy (push) Waiting to run
2023-11-29 12:33:33 +03:00
64e8c8afd7 filters-fix 2023-11-29 12:29:09 +03:00
44b7a3da98 visibility-fix-2
Some checks are pending
deploy / deploy (push) Waiting to run
2023-11-29 12:19:01 +03:00
0920af7e77 visibility-filter-fix
Some checks are pending
deploy / deploy (push) Waiting to run
2023-11-29 12:16:37 +03:00
fe4e37663e pyrafixes
Some checks are pending
deploy / deploy (push) Waiting to run
2023-11-29 11:00:00 +03:00
63eb952655 aiohttp-try 2023-11-29 10:23:41 +03:00
36ab83d02f shoutauthor-fix
Some checks failed
deploy / deploy (push) Has been cancelled
2023-11-29 09:32:24 +03:00
cefc77e8e4 sentry-add
Some checks are pending
deploy / deploy (push) Waiting to run
2023-11-29 09:14:23 +03:00
4b77cea690 sentry-integrations 2023-11-29 07:48:31 +03:00
4ca9491824 routes-fix
Some checks failed
deploy / deploy (push) Has been cancelled
2023-11-29 00:19:33 +03:00
6cd2fc0f80 typed-endpoint
Some checks are pending
deploy / deploy (push) Waiting to run
2023-11-29 00:13:46 +03:00
aaf4c0b876 trig-ci
Some checks failed
deploy / deploy (push) Has been cancelled
2023-11-28 23:13:42 +03:00
269c0e449f webhook-fix
All checks were successful
deploy / deploy (push) Successful in 9s
2023-11-28 22:13:53 +03:00
0c2af2bdf4 new-author-webhook-endpoint
All checks were successful
deploy / deploy (push) Successful in 2m14s
2023-11-28 22:07:53 +03:00
a241a098b9 create-invite-fix
All checks were successful
deploy / deploy (push) Successful in 2m20s
2023-11-28 15:56:32 +03:00
01d7dadd78 load-shouts-filters
All checks were successful
deploy / deploy (push) Successful in 2m17s
2023-11-28 14:17:21 +03:00
168a7079f6 schema-fix
All checks were successful
deploy / deploy (push) Successful in 2m16s
2023-11-28 13:59:36 +03:00
a21efb99df author-invitee-fix
All checks were successful
deploy / deploy (push) Successful in 2m8s
2023-11-28 13:55:05 +03:00
0240005ed1 invite-feature
All checks were successful
deploy / deploy (push) Successful in 2m10s
2023-11-28 13:46:06 +03:00
13ba5ebaed shout-followers
All checks were successful
deploy / deploy (push) Successful in 2m20s
2023-11-28 12:11:45 +03:00
20f7c22441 0.2.16-resolvers-revision
All checks were successful
deploy / deploy (push) Successful in 2m22s
2023-11-28 10:53:48 +03:00
3cf86d9e6e isnot-fix
All checks were successful
deploy / deploy (push) Successful in 2m11s
2023-11-28 08:56:57 +03:00
14ae7fbcc9 resolvers-fix
All checks were successful
deploy / deploy (push) Successful in 2m14s
2023-11-27 21:18:52 +03:00
5f8ec549df emptybody-fix
All checks were successful
deploy / deploy (push) Successful in 2m12s
2023-11-27 21:03:59 +03:00
3b0aedf959 loadshouts-fix
All checks were successful
deploy / deploy (push) Successful in 2m10s
2023-11-27 20:35:26 +03:00
53a0f2e328 iffix
All checks were successful
deploy / deploy (push) Successful in 2m20s
2023-11-27 19:15:34 +03:00
caa2dbfdf3 reaction-model-fix
All checks were successful
deploy / deploy (push) Successful in 2m19s
2023-11-27 19:03:47 +03:00
909ddbd79d pyright-fix
All checks were successful
deploy / deploy (push) Successful in 2m16s
2023-11-27 11:12:42 +03:00
fe60d625e5 notest 2023-11-24 05:21:31 +03:00
4e7250acef logs-fix
All checks were successful
deploy / deploy (push) Successful in 2m5s
2023-11-24 04:53:30 +03:00
167eed436d my-subs-fix
All checks were successful
deploy / deploy (push) Successful in 2m6s
2023-11-24 04:13:55 +03:00
7257f52aeb query-schema-fix
All checks were successful
deploy / deploy (push) Successful in 2m2s
2023-11-24 02:10:13 +03:00
a63cf24812 0.2.15
Some checks failed
deploy / deploy (push) Failing after 1m58s
2023-11-24 02:00:28 +03:00
c150d28447 schema-fix 2023-11-23 23:30:00 +03:00
7d5dc8b8cd nochecks
All checks were successful
deploy / deploy (push) Successful in 2m11s
2023-11-23 01:19:50 +03:00
3ab5d53439 curl-fix
Some checks failed
deploy / deploy (push) Failing after 22s
2023-11-23 00:19:15 +03:00
4b85b602c2 community-fix-2
Some checks failed
deploy / deploy (push) Failing after 1m50s
2023-11-23 00:12:23 +03:00
bdae67804e community-fix
Some checks failed
deploy / deploy (push) Failing after 1m56s
2023-11-23 00:05:04 +03:00
af5746c5d8 imports-fix
Some checks failed
deploy / deploy (push) Failing after 2m1s
2023-11-22 21:23:15 +03:00
3379376016 binary-back-dburl-fix
Some checks failed
deploy / deploy (push) Failing after 1m53s
2023-11-22 21:06:45 +03:00
998340baf8 psycopg2-ix
Some checks failed
deploy / deploy (push) Failing after 1m36s
2023-11-22 21:04:51 +03:00
9ee850ddb7 import-fix
Some checks failed
deploy / deploy (push) Failing after 25s
2023-11-22 20:56:25 +03:00
db76ba3733 0.2.14
Some checks failed
deploy / deploy (push) Failing after 2m1s
2023-11-22 19:38:39 +03:00
e2082b48d3 orm-fix
Some checks failed
deploy / deploy (push) Failing after 1m46s
2023-11-04 12:43:08 +03:00
435d1e4505 new-version-0-2-13
Some checks failed
deploy / deploy (push) Failing after 1m54s
2023-11-03 13:10:22 +03:00
1f5e5472c9 refactoring
Some checks failed
deploy / deploy (push) Failing after 1m56s
2023-10-25 21:33:53 +03:00
20e1fa989a shout-community-fix
Some checks failed
deploy / deploy (push) Failing after 2m18s
2023-10-25 20:25:53 +03:00
04dedaa3a3 updates-fixes
Some checks failed
deploy / deploy (push) Failing after 2m0s
2023-10-25 20:02:01 +03:00
46e684b28d core-update
Some checks failed
deploy / deploy (push) Failing after 2m0s
2023-10-25 19:55:30 +03:00
e151034bab fix-imports
Some checks failed
deploy / deploy (push) Failing after 1m43s
2023-10-23 17:51:13 +03:00
bf241a8fbd merged-isolated-core
Some checks failed
deploy / deploy (push) Failing after 1m46s
2023-10-23 17:47:11 +03:00
b675188013 upd
All checks were successful
deploy / deploy (push) Successful in 1m33s
2023-10-19 17:42:42 +03:00
fa7a04077a feat: /connect/
All checks were successful
deploy / deploy (push) Successful in 28s
2023-10-18 07:33:36 -03:00
24be18abf1 feat: /connect/=
All checks were successful
deploy / deploy (push) Successful in 27s
2023-10-18 07:30:22 -03:00
83b5c2c139 feat: presence-8080
Some checks failed
deploy / deploy (push) Failing after 5s
2023-10-18 07:27:06 -03:00
9e84d6ea37 feat: presence-8080
Some checks are pending
deploy / deploy (push) Waiting to run
2023-10-18 07:25:34 -03:00
4da963f9c8 Noop commit to sync with server changes
All checks were successful
deploy / deploy (push) Successful in 29s
2023-10-18 07:18:34 -03:00
c1d6a2d4e3 feat: add wildcat to /connect for handle Token
Some checks failed
deploy / deploy (push) Failing after 5s
2023-10-18 07:13:10 -03:00
518bc4020b feat: add wildcat to /connect for handle Token
Some checks are pending
deploy / deploy (push) Waiting to run
2023-10-18 07:11:08 -03:00
e13cdd7298 feat: add wildcat to /connect for handle Token
All checks were successful
deploy / deploy (push) Successful in 28s
2023-10-18 06:57:40 -03:00
4fec0ca7fb fix-follow-author-notification
All checks were successful
deploy / deploy (push) Successful in 30s
2023-10-16 22:24:10 +03:00
b13d532da2 postmerge
All checks were successful
deploy / deploy (push) Successful in 27s
2023-10-16 20:45:01 +03:00
b03ac825b6 unread-fix3
All checks were successful
deploy / deploy (push) Successful in 27s
2023-10-16 19:13:39 +03:00
49423ffb93 unread-fix-2
All checks were successful
deploy / deploy (push) Successful in 29s
2023-10-16 19:00:18 +03:00
faa97d27c2 unread-fix
All checks were successful
deploy / deploy (push) Successful in 27s
2023-10-16 18:56:03 +03:00
6e0cb18909 cleanup-notifications
All checks were successful
deploy / deploy (push) Successful in 29s
2023-10-16 18:30:54 +03:00
066bf72547 cleanup-orm
Some checks failed
deploy / deploy (push) Failing after 23s
2023-10-16 18:28:43 +03:00
bc08ece4c3 user User for awhile, filter follower fields
Some checks failed
deploy / deploy (push) Failing after 23s
2023-10-16 18:25:15 +03:00
562a919fca post-merge
Some checks failed
deploy / deploy (push) Failing after 22s
2023-10-16 18:21:05 +03:00
51ad266b62 Merge branch 'main' of https://github.com/Discours/discours-backend into feature/refactoring-services 2023-10-16 18:19:06 +03:00
15ef976538 using presence service 2023-10-16 18:18:29 +03:00
823b3c56c1 presence service interface fix 2023-10-16 17:51:08 +03:00
34e6a03a89 following manager does not manage chats 2023-10-16 17:50:40 +03:00
0c75902a64 fix-unread 2023-10-16 17:50:05 +03:00
582a21408e feat:test
All checks were successful
deploy / deploy (push) Successful in 1m40s
2023-10-16 09:14:37 -03:00
9a7852e17c feat: add to CI/CD piplin
All checks were successful
deploy / deploy (push) Successful in 29s
2023-10-15 15:33:11 -03:00
cbd4c41d32 feat: add to CI/CD piplin
Some checks failed
deploy / deploy (push) Failing after 22s
2023-10-15 15:32:06 -03:00
fd304768b7 feat: add to CI/CD piplin
Some checks failed
deploy / deploy (push) Failing after 5s
2023-10-15 15:27:44 -03:00
fe078809d6 feat: add to CI/CD piplin 2023-10-15 15:26:48 -03:00
36d36defd8 debug: sigil / after proxy connect 2023-10-15 15:26:48 -03:00
6047a3b259 unread-counter-fix-2 2023-10-13 15:20:06 +03:00
f86da630e8 redis-debug-fix 2023-10-13 15:17:44 +03:00
7348e5d9fe unread-counter-fix2 2023-10-13 15:13:01 +03:00
f5da6d450b unread-counter-fix 2023-10-13 15:10:56 +03:00
882ff39f28 redis-debug 2023-10-13 15:01:35 +03:00
7cd5929df2 token-type-tolerance 2023-10-13 14:47:31 +03:00
e9f68c8fb1 token-type-tolerance 2023-10-13 14:45:24 +03:00
792d60453a new-query-fix2 2023-10-13 14:35:10 +03:00
e648091a3c new-query-fix 2023-10-13 14:32:55 +03:00
1b7aa6aa0a some-more-queries-fix-3 2023-10-13 14:07:13 +03:00
d881f9da27 some-more-queries-fix-2 2023-10-13 14:02:44 +03:00
3f1aff2d0f some-more-queries-fix 2023-10-13 14:00:30 +03:00
d4dbf5c0ae some-more-queries 2023-10-13 13:59:24 +03:00
fed154c7f1 fix-redis 2023-10-13 13:48:17 +03:00
c1abace1c0 few-more-resolvers-fix-2 2023-10-13 13:46:34 +03:00
31824cccc9 few-more-resolvers-fix 2023-10-13 13:45:27 +03:00
85a9077792 few-more-resolvers 2023-10-13 13:41:47 +03:00
bbd8f61408 redis update 2023-10-13 13:13:45 +03:00
82618bf7f3 merged 2023-10-11 23:00:15 +03:00
9720b9f26b Merge branch 'feature/refactoring-services' of https://dev.discours.io/discours.io/backend into feature/refactoring-services 2023-10-11 22:59:13 +03:00
2c15852e9b fix-str 2023-10-11 22:59:05 +03:00
e39450d33b fix: sigil proxy for /connect 2023-10-11 10:49:52 -03:00
df2f097e11 fix: sigil proxy for /connect 2023-10-11 10:42:18 -03:00
a14c70e8c7 unmerge 2023-10-11 15:56:28 +03:00
9c651a6d72 debug 2023-10-11 15:41:04 +03:00
09d77bb1d1 merge-fix-7 2023-10-11 13:07:49 +03:00
eca3de7579 merge-fix-6 2023-10-11 13:02:17 +03:00
2fafe8b618 merged-fix-5 2023-10-11 12:26:08 +03:00
62020bd668 merged-fix-4 2023-10-11 12:23:09 +03:00
f1bdd7a0f8 merged-fix-3 2023-10-11 12:20:58 +03:00
6e63be30e0 merged-fix-2 2023-10-11 12:00:36 +03:00
d89235e82a merged-fix 2023-10-11 11:57:58 +03:00
6252671b85 merged 2023-10-11 11:56:46 +03:00
0e8b39bed6 Merge branch 'main' of dev.discours.io:discoursio-api into feature/refactoring-services 2023-10-11 10:28:04 +03:00
d50a510d52 fix-profile 2023-10-11 08:36:40 +03:00
e1245d1f46 feat: sigil with logs and reguest methods 2023-10-10 09:13:25 -03:00
fbeaac5cad feat: sigil with logs and reguest methods 2023-10-10 09:13:25 -03:00
d6913d6ff5 feat: sigil with logs and reguest methods 2023-10-10 07:52:43 -03:00
93b86eab86 feat: sigil with logs and reguest methods 2023-10-10 07:48:33 -03:00
0eed70c102 port=8080 2023-10-10 01:09:15 +03:00
14fa314e2a fix-load 2023-10-10 00:34:51 +03:00
ad97aa2227 fix-slug-raise-error 2023-10-10 00:29:22 +03:00
57aa4caa84 started-log 2023-10-10 00:22:16 +03:00
0bd44d1fab new-sigi 2023-10-09 23:47:18 +03:00
177a47ba7c _Service-redeploy3 2023-10-06 12:57:08 +03:00
32b00d5065 merged 2023-10-06 12:51:48 +03:00
01be3ac95e schema-sdl-serv 2023-10-06 12:51:07 +03:00
d1366d0b88 feat: @read about keys 2023-10-06 06:14:24 -03:00
fada9a289a feat: right schema in schema.py 2023-10-06 06:05:01 -03:00
6d56e8b3a7 feat: right schema in schema.py 2023-10-06 06:02:11 -03:00
c5ea08f939 feat: add to SDL full Query Mutation schema 2023-10-06 05:47:41 -03:00
d9f47183c8 feat: add in schema.py resolver fro _server 2023-10-06 03:51:23 -03:00
ilya-bkv
6ddfc11a91 getAuthor add stat 2023-10-06 03:39:09 -03:00
2697ec4fcd _Service-redeploy2 2023-10-06 06:39:01 +03:00
e244549a1d _Service 2023-10-06 06:29:52 +03:00
150449a0cf port=80 2023-10-06 05:33:51 +03:00
aa5709c695 fix-reqs2 2023-10-06 03:56:27 +03:00
8a3aa1dae6 fix-reqs 2023-10-06 03:55:43 +03:00
12f65bd8fa fix-poetry-deps 2023-10-06 03:33:48 +03:00
bab6990c87 fix-poetryenv 2023-10-06 03:31:45 +03:00
34f9139742 fix-dockerfile 2023-10-06 03:24:40 +03:00
b64d9d5014 poetry-rty 2023-10-06 03:22:37 +03:00
12416c1b83 path-fix 2023-10-06 02:03:36 +03:00
b2e196d261 forked-ariadne 2023-10-06 02:01:18 +03:00
0e8e8f4d04 git+ssh 2023-10-06 01:49:34 +03:00
8de2eb385b async-fix 2023-10-06 01:45:32 +03:00
12c43dbf32 fix-sync 2023-10-06 01:22:58 +03:00
d34597e349 debug-stat 2023-10-06 01:15:23 +03:00
78a3354d5f logs-fix 2023-10-06 01:12:34 +03:00
720d8a4a68 no-sigil-here2 2023-10-06 01:02:51 +03:00
ffa3fbb252 no-sigil-here 2023-10-06 01:02:14 +03:00
400fff4ef0 schema-no-subs2 2023-10-06 00:42:34 +03:00
4f0377c57d schema-no-subs 2023-10-06 00:34:08 +03:00
7761ccf2d5 schema-path-fix 2023-10-06 00:22:54 +03:00
4de1e64ba2 schema-fix 2023-10-06 00:20:02 +03:00
bbc5dc441d requests-transport 2023-10-06 00:17:24 +03:00
120208a621 rollback-requests 2023-10-06 00:10:46 +03:00
8524d0f843 edge 2023-10-06 00:05:15 +03:00
d26d444975 deps-workaround2 2023-10-06 00:02:25 +03:00
e0bd938a6e deps-workaround 2023-10-05 23:57:04 +03:00
aed91c6375 deps... 2023-10-05 23:55:23 +03:00
34f3098a0d import-fix6 2023-10-05 23:50:14 +03:00
c57f3857a6 import-fix4 2023-10-05 23:47:51 +03:00
c665c0056c import-fix4 2023-10-05 23:45:21 +03:00
d30b4c7d2b import-fix3 2023-10-05 23:42:48 +03:00
f468ccca93 import-fix2 2023-10-05 23:34:02 +03:00
d5b0aaba9b import-fix 2023-10-05 23:31:21 +03:00
da5bbc79b4 deps... 2023-10-05 23:25:52 +03:00
3c936e7860 deps... 2023-10-05 23:22:11 +03:00
46044a0f98 migration-removed 2023-10-05 23:18:55 +03:00
5fedd007c7 git-dep3 2023-10-05 23:18:06 +03:00
3d659caa6e git-dep2 2023-10-05 23:05:09 +03:00
9d2cd9f21f git-dep 2023-10-05 23:04:09 +03:00
f068869727 git-install 2023-10-05 23:01:25 +03:00
45d187786b fix-imports 2023-10-05 22:59:50 +03:00
f6e3320e18 async-mail 2023-10-05 22:47:02 +03:00
9537814718 deps-fixes 2023-10-05 22:38:35 +03:00
458823b894 dockerfile-fix 2023-10-05 22:19:20 +03:00
b8e6f7bb5a requests-removed+fixes 2023-10-05 22:18:05 +03:00
fbc85f6c2d aioredis-removed 2023-10-05 22:00:24 +03:00
deac939ed8 restructured,inbox-removed 2023-10-05 21:46:18 +03:00
6dfec6714a Merge branch 'main' of https://github.com/Discours/discours-backend 2023-10-05 20:22:48 +03:00
2c72189055 lintbump 2023-09-28 15:51:28 +03:00
131 changed files with 5599 additions and 10956 deletions

View File

@@ -1,6 +0,0 @@
[flake8]
ignore = E203,W504,W191,W503
exclude = .git,__pycache__,orm/rbac.py
max-complexity = 10
max-line-length = 108
indent-string = ' '

34
.gitea/workflows/main.yml Normal file
View File

@@ -0,0 +1,34 @@
name: 'Deploy on push'
on: [push]
jobs:
deploy:
runs-on: ubuntu-latest
steps:
- name: Cloning repo
uses: actions/checkout@v2
with:
fetch-depth: 0
- name: Get Repo Name
id: repo_name
run: echo "::set-output name=repo::$(echo ${GITHUB_REPOSITORY##*/})"
- name: Get Branch Name
id: branch_name
run: echo "::set-output name=branch::$(echo ${GITHUB_REF##*/})"
- name: Push to dokku for main branch
if: github.ref == 'refs/heads/main'
uses: dokku/github-action@master
with:
branch: 'main'
git_remote_url: 'ssh://dokku@v2.discours.io:22/discoursio-api'
ssh_private_key: ${{ secrets.SSH_PRIVATE_KEY }}
- name: Push to dokku for dev branch
if: github.ref == 'refs/heads/dev'
uses: dokku/github-action@master
with:
branch: 'dev'
git_remote_url: 'ssh://dokku@staging.discours.io:22/core'
ssh_private_key: ${{ secrets.SSH_PRIVATE_KEY }}

4
.gitignore vendored
View File

@@ -147,3 +147,7 @@ migration/content/**/*.md
*.csv
dev-server.pid
backups/
poetry.lock
.ruff_cache
.jj
.zed

View File

@@ -1,44 +1,18 @@
exclude: |
(?x)(
^tests/unit_tests/resource|
_grpc.py|
_pb2.py
)
default_language_version:
python: python3.8
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v3.2.0
rev: v4.5.0
hooks:
- id: check-added-large-files
- id: check-case-conflict
- id: check-docstring-first
- id: check-json
- id: check-merge-conflict
- id: check-toml
- id: check-yaml
- id: check-toml
- id: end-of-file-fixer
- id: trailing-whitespace
- id: check-added-large-files
- id: detect-private-key
- id: check-ast
- id: check-merge-conflict
- repo: https://github.com/timothycrosley/isort
rev: 5.5.3
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.3.5
hooks:
- id: isort
- repo: https://github.com/ambv/black
rev: 20.8b1
hooks:
- id: black
args:
- --line-length=100
- --skip-string-normalization
- repo: https://gitlab.com/pycqa/flake8
rev: 3.8.3
hooks:
- id: flake8
args:
- --max-line-length=100
- --disable=protected-access
- id: ruff
args: [--fix]

145
CHANGELOG.txt Normal file
View File

@@ -0,0 +1,145 @@
[0.3.3]
- feat: sentry integration enabled with glitchtip
- fix: reindex on update shout
- packages upgrade, isort
[0.3.2]
- redis cache for what author follows
- redis cache for followers
- graphql add query: get topic followers
[0.3.1]
- enabling sentry
- long query log report added
- editor fixes
- authors links cannot be updated by update_shout anymore
[0.3.0]
- Shout.featured_at timestamp of the frontpage featuring event
- added proposal accepting logics
- schema modulized
- Shout.visibility removed
[0.2.22]
- added precommit hook
- fmt
- granian asgi
[0.2.21]
- fix: rating logix
- fix: load_top_random_shouts
- resolvers: add_stat_* refactored
- services: use google analytics
- services: minor fixes search
[0.2.20]
- services: ackee removed
- services: following manager fixed
- services: import views.json
[0.2.19]
- fix: adding 'author' role
- fix: stripping user_id in auth connector
[0.2.18]
- schema: added Shout.seo string field
- resolvers: added /new-author webhook resolver
- resolvers: added reader.load_shouts_top_random
- resolvers: added reader.load_shouts_unrated
- resolvers: community follower id property name is .author
- resolvers: get_authors_all and load_authors_by
- services: auth connector upgraded
[0.2.17]
- schema: enum types workaround, ReactionKind, InviteStatus, ShoutVisibility
- schema: Shout.created_by, Shout.updated_by
- schema: Shout.authors can be empty
- resovlers: optimized reacted shouts updates query
[0.2.16]
- resolvers: collab inviting logics
- resolvers: queries and mutations revision and renaming
- resolvers: delete_topic(slug) implemented
- resolvers: added get_shout_followers
- resolvers: load_shouts_by filters implemented
- orm: invite entity
- schema: Reaction.range -> Reaction.quote
- filters: time_ago -> after
- httpx -> aiohttp
[0.2.15]
- schema: Shout.created_by removed
- schema: Shout.mainTopic removed
- services: cached elasticsearch connector
- services: auth is using user_id from authorizer
- resolvers: notify_* usage fixes
- resolvers: getAuthor now accepts slug, user_id or author_id
- resolvers: login_required usage fixes
[0.2.14]
- schema: some fixes from migrator
- schema: .days -> .time_ago
- schema: excludeLayout + layout in filters -> layouts
- services: db access simpler, no contextmanager
- services: removed Base.create() method
- services: rediscache updated
- resolvers: get_reacted_shouts_updates as followedReactions query
[0.2.13]
- services: db context manager
- services: ViewedStorage fixes
- services: views are not stored in core db anymore
- schema: snake case in model fields names
- schema: no DateTime scalar
- resolvers: get_my_feed comments filter reactions body.is_not('')
- resolvers: get_my_feed query fix
- resolvers: LoadReactionsBy.days -> LoadReactionsBy.time_ago
- resolvers: LoadShoutsBy.days -> LoadShoutsBy.time_ago
[0.2.12]
- Author.userpic -> Author.pic
- CommunityAuthor.role is string now
- Author.user is string now
[0.2.11]
- redis interface updated
- viewed interface updated
- presence interface updated
- notify on create, update, delete for reaction and shout
- notify on follow / unfollow author
- use pyproject
- devmode fixed
[0.2.10]
- community resolvers connected
[0.2.9]
- starlette is back, aiohttp removed
- aioredis replaced with aredis
[0.2.8]
- refactored
[0.2.7]
- loadFollowedReactions now with login_required
- notifier service api draft
- added shout visibility kind in schema
- community isolated from author in orm
[0.2.6]
- redis connection pool
- auth context fixes
- communities orm, resolvers, schema
[0.2.5]
- restructured
- all users have their profiles as authors in core
- gittask, inbox and auth logics removed
- settings moved to base and now smaller
- new outside auth schema
- removed gittask, auth, inbox, migration

5
CHECKS
View File

@@ -1,5 +0,0 @@
WAIT=10
TIMEOUT=10
ATTEMPTS=10
/

View File

@@ -1,9 +1,25 @@
FROM python:3.10
FROM python:alpine
EXPOSE 8080
ADD nginx.conf.sigil ./
RUN /usr/local/bin/python -m pip install --upgrade pip
WORKDIR /usr/src/app
COPY requirements.txt ./
RUN pip install -r requirements.txt
COPY . .
# Update package lists and install necessary dependencies
RUN apk update && \
apk add --no-cache build-base icu-data-full curl python3-dev musl-dev && \
curl -sSL https://install.python-poetry.org | python
# Set working directory
WORKDIR /app
# Copy only the pyproject.toml file initially
COPY pyproject.toml /app/
# Install poetry and dependencies
RUN pip install poetry && \
poetry config virtualenvs.create false && \
poetry install --no-root --only main
# Copy the rest of the files
COPY . /app
# Expose the port
EXPOSE 8000
CMD ["python", "server.py"]

View File

@@ -1,2 +0,0 @@
web: python server.py

View File

@@ -1,45 +1,56 @@
# discoursio-api
## Техстек
- sqlalchemy
- redis
- ariadne
- starlette
- uvicorn
- granian
# Local development
## Локальная разработка
Install deps first
Подготовьте зависимости
on osx
osx:
```
brew install redis nginx postgres
brew services start redis
```
on debian/ubuntu
debian/ubuntu:
```
apt install redis nginx
```
First, install Postgres. Then you'll need some data, so migrate it:
```
createdb discoursio
python server.py migrate
Затем запустите postgres, redis и наш API-сервер:
```shell
mkdir .venv
python3.12 -m venv .venv
poetry env use .venv/bin/python3.12
poetry update
poetry run server.py
```
## Подключенные сервисы
Then run nginx, redis and API server
```
redis-server
pip install -r requirements.txt
python3 server.py dev
```
Для межсерверной коммуникации используются отдельные логики, папка `services/*` содержит адаптеры для использования базы данных, `redis`, кеширование и клиенты для запросов GraphQL.
# How to do an authorized request
### auth.py
Put the header 'Authorization' with token from signIn query or registerUser mutation.
Задайте переменную окружения `WEBHOOK_SECRET` чтобы принимать запросы по адресу `/new-author` от [сервера авторизации](https://dev.discours.io/devstack/authorizer). Событие ожидается при создании нового пользователя. Для авторизованных запросов и мутаций фронтенд добавляет к запросу токен авторизации в заголовок `Authorization`.
# How to debug Ackee
### viewed.py
Set ACKEE_TOKEN var
Задайте переменные окружения `GOOGLE_KEYFILE_PATH` и `GOOGLE_PROPERTY_ID` для получения данных из [Google Analytics](https://developers.google.com/analytics?hl=ru).
### search.py
Позволяет получать результаты пользовательских поисковых запросов в кешируемом виде от ElasticSearch с оценкой `score`, объединенные с запросами к базе данных, запрашиваем через GraphQL API `load_shouts_search`. Требует установка `ELASTIC_HOST`, `ELASTIC_PORT`, `ELASTIC_USER` и `ELASTIC_PASSWORD`.
### notify.py
Отправка уведомлений по Redis PubSub каналам, согласно структуре данных, за которую отвечает [сервис уведомлений](https://dev.discours.io/discours.io/notifier)
### unread.py
Счетчик непрочитанных сообщений получается через Redis-запрос к данным [сервиса сообщений](https://dev.discours.io/discours.io/inbox).

6
__init__.py Normal file
View File

@@ -0,0 +1,6 @@
import os
import sys
# Получаем путь к корневой директории проекта
root_path = os.path.abspath(os.path.dirname(__file__))
sys.path.append(root_path)

View File

@@ -1,75 +0,0 @@
import re
import nltk
from bs4 import BeautifulSoup
from nltk.corpus import stopwords
from pymystem3 import Mystem
from string import punctuation
from transformers import BertTokenizer
nltk.download("stopwords")
def get_clear_text(text):
soup = BeautifulSoup(text, 'html.parser')
# extract the plain text from the HTML document without tags
clear_text = ''
for tag in soup.find_all():
clear_text += tag.string or ''
clear_text = re.sub(pattern='[\u202F\u00A0\n]+', repl=' ', string=clear_text)
# only words
clear_text = re.sub(pattern='[^A-ZА-ЯЁ -]', repl='', string=clear_text, flags=re.IGNORECASE)
clear_text = re.sub(pattern='\s+', repl=' ', string=clear_text)
clear_text = clear_text.lower()
mystem = Mystem()
russian_stopwords = stopwords.words("russian")
tokens = mystem.lemmatize(clear_text)
tokens = [token for token in tokens if token not in russian_stopwords \
and token != " " \
and token.strip() not in punctuation]
clear_text = " ".join(tokens)
return clear_text
# if __name__ == '__main__':
#
# # initialize the tokenizer with the pre-trained BERT model and vocabulary
# tokenizer = BertTokenizer.from_pretrained('bert-base-multilingual-cased')
#
# # split each text into smaller segments of maximum length 512
# max_length = 512
# segmented_texts = []
# for text in [clear_text1, clear_text2]:
# segmented_text = []
# for i in range(0, len(text), max_length):
# segment = text[i:i+max_length]
# segmented_text.append(segment)
# segmented_texts.append(segmented_text)
#
# # tokenize each segment using the BERT tokenizer
# tokenized_texts = []
# for segmented_text in segmented_texts:
# tokenized_text = []
# for segment in segmented_text:
# segment_tokens = tokenizer.tokenize(segment)
# segment_tokens = ['[CLS]'] + segment_tokens + ['[SEP]']
# tokenized_text.append(segment_tokens)
# tokenized_texts.append(tokenized_text)
#
# input_ids = []
# for tokenized_text in tokenized_texts:
# input_id = []
# for segment_tokens in tokenized_text:
# segment_id = tokenizer.convert_tokens_to_ids(segment_tokens)
# input_id.append(segment_id)
# input_ids.append(input_id)
#
# print(input_ids)

View File

@@ -1,110 +0,0 @@
# A generic, single database configuration.
[alembic]
# path to migration scripts
script_location = alembic
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
# Uncomment the line below if you want the files to be prepended with date and time
# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
# for all available tokens
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
# sys.path path, will be prepended to sys.path if present.
# defaults to the current working directory.
prepend_sys_path = .
# timezone to use when rendering the date within the migration file
# as well as the filename.
# If specified, requires the python-dateutil library that can be
# installed by adding `alembic[tz]` to the pip requirements
# string value is passed to dateutil.tz.gettz()
# leave blank for localtime
# timezone =
# max length of characters to apply to the
# "slug" field
# truncate_slug_length = 40
# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false
# set to 'true' to allow .pyc and .pyo files without
# a source .py file to be detected as revisions in the
# versions/ directory
# sourceless = false
# version location specification; This defaults
# to alembic/versions. When using multiple version
# directories, initial revisions must be specified with --version-path.
# The path separator used here should be the separator specified by "version_path_separator" below.
# version_locations = %(here)s/bar:%(here)s/bat:alembic/versions
# version path separator; As mentioned above, this is the character used to split
# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
# Valid values for version_path_separator are:
#
# version_path_separator = :
# version_path_separator = ;
# version_path_separator = space
version_path_separator = os # Use os.pathsep. Default configuration used for new projects.
# set to 'true' to search source files recursively
# in each "version_locations" directory
# new in Alembic version 1.10
# recursive_version_locations = false
# the output encoding used when revision files
# are written from script.py.mako
# output_encoding = utf-8
sqlalchemy.url = %(DB_URL)
[post_write_hooks]
# post_write_hooks defines scripts or Python functions that are run
# on newly generated revision scripts. See the documentation for further
# detail and examples
# format using "black" - use the console_scripts runner, against the "black" entrypoint
# hooks = black
# black.type = console_scripts
# black.entrypoint = black
# black.options = -l 79 REVISION_SCRIPT_FILENAME
# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
qualname =
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S

View File

@@ -1,3 +0,0 @@
Generic single-database configuration.
https://alembic.sqlalchemy.org/en/latest/tutorial.html

View File

@@ -1,80 +0,0 @@
from logging.config import fileConfig
from sqlalchemy import engine_from_config
from sqlalchemy import pool
from alembic import context
from settings import DB_URL
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# override DB_URL
config.set_section_option(config.config_ini_section, "DB_URL", DB_URL)
# Interpret the config file for Python logging.
# This line sets up loggers basically.
if config.config_file_name is not None:
fileConfig(config.config_file_name)
from base.orm import Base
target_metadata = [Base.metadata]
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def run_migrations_offline() -> None:
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online() -> None:
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
connectable = engine_from_config(
config.get_section(config.config_ini_section, {}),
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)
with connectable.connect() as connection:
context.configure(
connection=connection, target_metadata=target_metadata
)
with context.begin_transaction():
context.run_migrations()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()

View File

@@ -1,26 +0,0 @@
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision: str = ${repr(up_revision)}
down_revision: Union[str, None] = ${repr(down_revision)}
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
def upgrade() -> None:
${upgrades if upgrades else "pass"}
def downgrade() -> None:
${downgrades if downgrades else "pass"}

View File

@@ -1,26 +0,0 @@
"""init alembic
Revision ID: fe943b098418
Revises:
Create Date: 2023-08-19 01:37:57.031933
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = 'fe943b098418'
down_revision: Union[str, None] = None
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
pass
def downgrade() -> None:
pass

15
app.json Normal file
View File

@@ -0,0 +1,15 @@
{
"healthchecks": {
"web": [
{
"type": "startup",
"name": "web check",
"description": "Checking if the app responds to the GET /",
"path": "/",
"attempts": 3,
"warn": true,
"initialDelay": 1
}
]
}
}

View File

@@ -1,91 +0,0 @@
from functools import wraps
from typing import Optional, Tuple
from graphql.type import GraphQLResolveInfo
from sqlalchemy.orm import joinedload, exc
from starlette.authentication import AuthenticationBackend
from starlette.requests import HTTPConnection
from auth.credentials import AuthCredentials, AuthUser
from base.orm import local_session
from orm.user import User, Role
from settings import SESSION_TOKEN_HEADER
from auth.tokenstorage import SessionToken
from base.exceptions import OperationNotAllowed
class JWTAuthenticate(AuthenticationBackend):
async def authenticate(
self, request: HTTPConnection
) -> Optional[Tuple[AuthCredentials, AuthUser]]:
if SESSION_TOKEN_HEADER not in request.headers:
return AuthCredentials(scopes={}), AuthUser(user_id=None, username='')
token = request.headers.get(SESSION_TOKEN_HEADER)
if not token:
print("[auth.authenticate] no token in header %s" % SESSION_TOKEN_HEADER)
return AuthCredentials(scopes={}, error_message=str("no token")), AuthUser(
user_id=None, username=''
)
if len(token.split('.')) > 1:
payload = await SessionToken.verify(token)
with local_session() as session:
try:
user = (
session.query(User).options(
joinedload(User.roles).options(joinedload(Role.permissions)),
joinedload(User.ratings)
).filter(
User.id == payload.user_id
).one()
)
scopes = {} # TODO: integrate await user.get_permission()
return (
AuthCredentials(
user_id=payload.user_id,
scopes=scopes,
logged_in=True
),
AuthUser(user_id=user.id, username=''),
)
except exc.NoResultFound:
pass
return AuthCredentials(scopes={}, error_message=str('Invalid token')), AuthUser(user_id=None, username='')
def login_required(func):
@wraps(func)
async def wrap(parent, info: GraphQLResolveInfo, *args, **kwargs):
# print('[auth.authenticate] login required for %r with info %r' % (func, info)) # debug only
auth: AuthCredentials = info.context["request"].auth
# print(auth)
if not auth or not auth.logged_in:
# raise Unauthorized(auth.error_message or "Please login")
return {
"error": "Please login first"
}
return await func(parent, info, *args, **kwargs)
return wrap
def permission_required(resource, operation, func):
@wraps(func)
async def wrap(parent, info: GraphQLResolveInfo, *args, **kwargs):
print('[auth.authenticate] permission_required for %r with info %r' % (func, info)) # debug only
auth: AuthCredentials = info.context["request"].auth
if not auth.logged_in:
raise OperationNotAllowed(auth.error_message or "Please login")
# TODO: add actual check permission logix here
return await func(parent, info, *args, **kwargs)
return wrap

View File

@@ -1,45 +0,0 @@
from typing import List, Optional, Text
from pydantic import BaseModel
# from base.exceptions import Unauthorized
class Permission(BaseModel):
name: Text
class AuthCredentials(BaseModel):
user_id: Optional[int] = None
scopes: Optional[dict] = {}
logged_in: bool = False
error_message: str = ""
@property
def is_admin(self):
# TODO: check admin logix
return True
async def permissions(self) -> List[Permission]:
if self.user_id is None:
# raise Unauthorized("Please login first")
return {
"error": "Please login first"
}
else:
# TODO: implement permissions logix
print(self.user_id)
return NotImplemented()
class AuthUser(BaseModel):
user_id: Optional[int]
username: Optional[str]
@property
def is_authenticated(self) -> bool:
return self.user_id is not None
@property
def display_id(self) -> int:
return self.user_id

View File

@@ -1,37 +0,0 @@
import requests
from settings import MAILGUN_API_KEY, MAILGUN_DOMAIN
api_url = "https://api.mailgun.net/v3/%s/messages" % (MAILGUN_DOMAIN or 'discours.io')
noreply = "discours.io <noreply@%s>" % (MAILGUN_DOMAIN or 'discours.io')
lang_subject = {
"ru": "Подтверждение почты",
"en": "Confirm email"
}
async def send_auth_email(user, token, lang="ru", template="email_confirmation"):
try:
to = "%s <%s>" % (user.name, user.email)
if lang not in ['ru', 'en']:
lang = 'ru'
subject = lang_subject.get(lang, lang_subject["en"])
template = template + "_" + lang
payload = {
"from": noreply,
"to": to,
"subject": subject,
"template": template,
"h:X-Mailgun-Variables": "{ \"token\": \"%s\" }" % token
}
print('[auth.email] payload: %r' % payload)
# debug
# print('http://localhost:3000/?modal=auth&mode=confirm-email&token=%s' % token)
response = requests.post(
api_url,
auth=("api", MAILGUN_API_KEY),
data=payload
)
response.raise_for_status()
except Exception as e:
print(e)

View File

@@ -1,117 +0,0 @@
from binascii import hexlify
from hashlib import sha256
from jwt import DecodeError, ExpiredSignatureError
from passlib.hash import bcrypt
from sqlalchemy import or_
from auth.jwtcodec import JWTCodec
from auth.tokenstorage import TokenStorage
# from base.exceptions import InvalidPassword, InvalidToken
from base.orm import local_session
from orm import User
from validations.auth import AuthInput
class Password:
@staticmethod
def _to_bytes(data: str) -> bytes:
return bytes(data.encode())
@classmethod
def _get_sha256(cls, password: str) -> bytes:
bytes_password = cls._to_bytes(password)
return hexlify(sha256(bytes_password).digest())
@staticmethod
def encode(password: str) -> str:
password_sha256 = Password._get_sha256(password)
return bcrypt.using(rounds=10).hash(password_sha256)
@staticmethod
def verify(password: str, hashed: str) -> bool:
"""
Verify that password hash is equal to specified hash. Hash format:
$2a$10$Ro0CUfOqk6cXEKf3dyaM7OhSCvnwM9s4wIX9JeLapehKK5YdLxKcm
\__/\/ \____________________/\_____________________________/
| | Salt Hash
| Cost
Version
More info: https://passlib.readthedocs.io/en/stable/lib/passlib.hash.bcrypt.html
:param password: clear text password
:param hashed: hash of the password
:return: True if clear text password matches specified hash
"""
hashed_bytes = Password._to_bytes(hashed)
password_sha256 = Password._get_sha256(password)
return bcrypt.verify(password_sha256, hashed_bytes)
class Identity:
@staticmethod
def password(orm_user: User, password: str) -> User:
user = User(**orm_user.dict())
if not user.password:
# raise InvalidPassword("User password is empty")
return {
"error": "User password is empty"
}
if not Password.verify(password, user.password):
# raise InvalidPassword("Wrong user password")
return {
"error": "Wrong user password"
}
return user
@staticmethod
def oauth(inp: AuthInput) -> User:
with local_session() as session:
user = (
session.query(User)
.filter(or_(User.oauth == inp["oauth"], User.email == inp["email"]))
.first()
)
if not user:
user = User.create(**inp)
if not user.oauth:
user.oauth = inp["oauth"]
session.commit()
user = User(**user.dict())
return user
@staticmethod
async def onetime(token: str) -> User:
try:
print('[auth.identity] using one time token')
payload = JWTCodec.decode(token)
if not await TokenStorage.exist(f"{payload.user_id}-{payload.username}-{token}"):
# raise InvalidToken("Login token has expired, please login again")
return {
"error": "Token has expired"
}
except ExpiredSignatureError:
# raise InvalidToken("Login token has expired, please try again")
return {
"error": "Token has expired"
}
except DecodeError:
# raise InvalidToken("token format error") from e
return {
"error": "Token format error"
}
with local_session() as session:
user = session.query(User).filter_by(id=payload.user_id).first()
if not user:
# raise Exception("user not exist")
return {
"error": "User does not exist"
}
if not user.emailConfirmed:
user.emailConfirmed = True
session.commit()
return user

View File

@@ -1,50 +0,0 @@
from datetime import datetime, timezone
import jwt
from base.exceptions import ExpiredToken, InvalidToken
from validations.auth import TokenPayload, AuthInput
from settings import JWT_ALGORITHM, JWT_SECRET_KEY
class JWTCodec:
@staticmethod
def encode(user: AuthInput, exp: datetime) -> str:
payload = {
"user_id": user.id,
"username": user.email or user.phone,
"exp": exp,
"iat": datetime.now(tz=timezone.utc),
"iss": "discours"
}
try:
return jwt.encode(payload, JWT_SECRET_KEY, JWT_ALGORITHM)
except Exception as e:
print('[auth.jwtcodec] JWT encode error %r' % e)
@staticmethod
def decode(token: str, verify_exp: bool = True) -> TokenPayload:
r = None
payload = None
try:
payload = jwt.decode(
token,
key=JWT_SECRET_KEY,
options={
"verify_exp": verify_exp,
# "verify_signature": False
},
algorithms=[JWT_ALGORITHM],
issuer="discours"
)
r = TokenPayload(**payload)
# print('[auth.jwtcodec] debug token %r' % r)
return r
except jwt.InvalidIssuedAtError:
print('[auth.jwtcodec] invalid issued at: %r' % payload)
raise ExpiredToken('check token issued time')
except jwt.ExpiredSignatureError:
print('[auth.jwtcodec] expired signature %r' % payload)
raise ExpiredToken('check token lifetime')
except jwt.InvalidTokenError:
raise InvalidToken('token is not valid')
except jwt.InvalidSignatureError:
raise InvalidToken('token is not valid')

View File

@@ -1,89 +0,0 @@
from authlib.integrations.starlette_client import OAuth
from starlette.responses import RedirectResponse
from auth.identity import Identity
from auth.tokenstorage import TokenStorage
from settings import OAUTH_CLIENTS, FRONTEND_URL
oauth = OAuth()
oauth.register(
name="facebook",
client_id=OAUTH_CLIENTS["FACEBOOK"]["id"],
client_secret=OAUTH_CLIENTS["FACEBOOK"]["key"],
access_token_url="https://graph.facebook.com/v11.0/oauth/access_token",
access_token_params=None,
authorize_url="https://www.facebook.com/v11.0/dialog/oauth",
authorize_params=None,
api_base_url="https://graph.facebook.com/",
client_kwargs={"scope": "public_profile email"},
)
oauth.register(
name="github",
client_id=OAUTH_CLIENTS["GITHUB"]["id"],
client_secret=OAUTH_CLIENTS["GITHUB"]["key"],
access_token_url="https://github.com/login/oauth/access_token",
access_token_params=None,
authorize_url="https://github.com/login/oauth/authorize",
authorize_params=None,
api_base_url="https://api.github.com/",
client_kwargs={"scope": "user:email"},
)
oauth.register(
name="google",
client_id=OAUTH_CLIENTS["GOOGLE"]["id"],
client_secret=OAUTH_CLIENTS["GOOGLE"]["key"],
server_metadata_url="https://accounts.google.com/.well-known/openid-configuration",
client_kwargs={"scope": "openid email profile"},
)
async def google_profile(client, request, token):
profile = await client.parse_id_token(request, token)
profile["id"] = profile["sub"]
return profile
async def facebook_profile(client, request, token):
profile = await client.get("me?fields=name,id,email", token=token)
return profile.json()
async def github_profile(client, request, token):
profile = await client.get("user", token=token)
return profile.json()
profile_callbacks = {
"google": google_profile,
"facebook": facebook_profile,
"github": github_profile,
}
async def oauth_login(request):
provider = request.path_params["provider"]
request.session["provider"] = provider
client = oauth.create_client(provider)
redirect_uri = "https://v2.discours.io/oauth-authorize"
return await client.authorize_redirect(request, redirect_uri)
async def oauth_authorize(request):
provider = request.session["provider"]
client = oauth.create_client(provider)
token = await client.authorize_access_token(request)
get_profile = profile_callbacks[provider]
profile = await get_profile(client, request, token)
user_oauth_info = "%s:%s" % (provider, profile["id"])
user_input = {
"oauth": user_oauth_info,
"email": profile["email"],
"username": profile["name"],
}
user = Identity.oauth(user_input)
session_token = await TokenStorage.create_session(user)
response = RedirectResponse(url=FRONTEND_URL + "/confirm")
response.set_cookie("token", session_token)
return response

View File

@@ -1,73 +0,0 @@
from datetime import datetime, timedelta, timezone
from auth.jwtcodec import JWTCodec
from validations.auth import AuthInput
from base.redis import redis
from settings import SESSION_TOKEN_LIFE_SPAN, ONETIME_TOKEN_LIFE_SPAN
async def save(token_key, life_span, auto_delete=True):
await redis.execute("SET", token_key, "True")
if auto_delete:
expire_at = (datetime.now(tz=timezone.utc) + timedelta(seconds=life_span)).timestamp()
await redis.execute("EXPIREAT", token_key, int(expire_at))
class SessionToken:
@classmethod
async def verify(cls, token: str):
"""
Rules for a token to be valid.
- token format is legal
- token exists in redis database
- token is not expired
"""
try:
return JWTCodec.decode(token)
except Exception as e:
raise e
@classmethod
async def get(cls, payload, token):
return await TokenStorage.get(f"{payload.user_id}-{payload.username}-{token}")
class TokenStorage:
@staticmethod
async def get(token_key):
print('[tokenstorage.get] ' + token_key)
# 2041-user@domain.zn-eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJ1c2VyX2lkIjoyMDQxLCJ1c2VybmFtZSI6ImFudG9uLnJld2luK3Rlc3QtbG9hZGNoYXRAZ21haWwuY29tIiwiZXhwIjoxNjcxNzgwNjE2LCJpYXQiOjE2NjkxODg2MTYsImlzcyI6ImRpc2NvdXJzIn0.Nml4oV6iMjMmc6xwM7lTKEZJKBXvJFEIZ-Up1C1rITQ
return await redis.execute("GET", token_key)
@staticmethod
async def create_onetime(user: AuthInput) -> str:
life_span = ONETIME_TOKEN_LIFE_SPAN
exp = datetime.now(tz=timezone.utc) + timedelta(seconds=life_span)
one_time_token = JWTCodec.encode(user, exp)
await save(f"{user.id}-{user.username}-{one_time_token}", life_span)
return one_time_token
@staticmethod
async def create_session(user: AuthInput) -> str:
life_span = SESSION_TOKEN_LIFE_SPAN
exp = datetime.now(tz=timezone.utc) + timedelta(seconds=life_span)
session_token = JWTCodec.encode(user, exp)
await save(f"{user.id}-{user.username}-{session_token}", life_span)
return session_token
@staticmethod
async def revoke(token: str) -> bool:
payload = None
try:
print("[auth.tokenstorage] revoke token")
payload = JWTCodec.decode(token)
except: # noqa
pass
else:
await redis.execute("DEL", f"{payload.user_id}-{payload.username}-{token}")
return True
@staticmethod
async def revoke_all(user: AuthInput):
tokens = await redis.execute("KEYS", f"{user.id}-*")
await redis.execute("DEL", *tokens)

View File

@@ -1,38 +0,0 @@
from graphql.error import GraphQLError
# TODO: remove traceback from logs for defined exceptions
class BaseHttpException(GraphQLError):
code = 500
message = "500 Server error"
class ExpiredToken(BaseHttpException):
code = 401
message = "401 Expired Token"
class InvalidToken(BaseHttpException):
code = 401
message = "401 Invalid Token"
class Unauthorized(BaseHttpException):
code = 401
message = "401 Unauthorized"
class ObjectNotExist(BaseHttpException):
code = 404
message = "404 Object Does Not Exist"
class OperationNotAllowed(BaseHttpException):
code = 403
message = "403 Operation Is Not Allowed"
class InvalidPassword(BaseHttpException):
code = 403
message = "403 Invalid Password"

View File

@@ -1,56 +0,0 @@
from typing import TypeVar, Any, Dict, Generic, Callable
from sqlalchemy import create_engine, Column, Integer
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import Session
from sqlalchemy.sql.schema import Table
from settings import DB_URL
engine = create_engine(
DB_URL, echo=False, pool_size=10, max_overflow=20
)
T = TypeVar("T")
REGISTRY: Dict[str, type] = {}
def local_session():
return Session(bind=engine, expire_on_commit=False)
class Base(declarative_base()):
__table__: Table
__tablename__: str
__new__: Callable
__init__: Callable
__allow_unmapped__ = True
__abstract__ = True
__table_args__ = {"extend_existing": True}
id = Column(Integer, primary_key=True)
def __init_subclass__(cls, **kwargs):
REGISTRY[cls.__name__] = cls
@classmethod
def create(cls: Generic[T], **kwargs) -> Generic[T]:
instance = cls(**kwargs)
return instance.save()
def save(self) -> Generic[T]:
with local_session() as session:
session.add(self)
session.commit()
return self
def update(self, input):
column_names = self.__table__.columns.keys()
for (name, value) in input.items():
if name in column_names:
setattr(self, name, value)
def dict(self) -> Dict[str, Any]:
column_names = self.__table__.columns.keys()
return {c: getattr(self, c) for c in column_names}

View File

@@ -1,44 +0,0 @@
from aioredis import from_url
from asyncio import sleep
from settings import REDIS_URL
class RedisCache:
def __init__(self, uri=REDIS_URL):
self._uri: str = uri
self._instance = None
async def connect(self):
if self._instance is not None:
return
self._instance = await from_url(self._uri, encoding="utf-8")
# print(self._instance)
async def disconnect(self):
if self._instance is None:
return
await self._instance.close()
# await self._instance.wait_closed() # deprecated
self._instance = None
async def execute(self, command, *args, **kwargs):
while not self._instance:
await sleep(1)
try:
# print("[redis] " + command + ' ' + ' '.join(args))
return await self._instance.execute_command(command, *args, **kwargs)
except Exception:
pass
async def lrange(self, key, start, stop):
# print(f"[redis] LRANGE {key} {start} {stop}")
return await self._instance.lrange(key, start, stop)
async def mget(self, key, *keys):
# print(f"[redis] MGET {key} {keys}")
return await self._instance.mget(key, *keys)
redis = RedisCache()
__all__ = ["redis"]

View File

@@ -1,13 +0,0 @@
from ariadne import MutationType, QueryType, ScalarType
datetime_scalar = ScalarType("DateTime")
@datetime_scalar.serializer
def serialize_datetime(value):
return value.isoformat()
query = QueryType()
mutation = MutationType()
resolvers = [query, mutation, datetime_scalar]

16
lint.sh
View File

@@ -1,16 +0,0 @@
#!/usr/bin/env bash
set -e
find . -name "*.py[co]" -o -name __pycache__ -exec rm -rf {} +
#rm -rf .mypy_cache
echo "> isort"
isort --gitignore --settings-file=setup.cfg .
echo "> brunette"
brunette --config=setup.cfg .
echo "> flake8"
flake8 --config=setup.cfg .
echo "> mypy"
mypy --config-file=setup.cfg .
echo "> prettyjson"
python3 -m scripts.prettyjson

108
main.py
View File

@@ -1,99 +1,45 @@
import asyncio
import os
from importlib import import_module
from os.path import exists
from ariadne import load_schema_from_path, make_executable_schema
from ariadne.asgi import GraphQL
from starlette.applications import Starlette
from starlette.middleware import Middleware
from starlette.middleware.authentication import AuthenticationMiddleware
from starlette.middleware.sessions import SessionMiddleware
from starlette.routing import Route
from orm import init_tables
from auth.authenticate import JWTAuthenticate
from auth.oauth import oauth_login, oauth_authorize
from base.redis import redis
from base.resolvers import resolvers
from resolvers.auth import confirm_email_handler
from resolvers.upload import upload_handler
from services.main import storages_init
from services.notifications.notification_service import notification_service
from services.stat.viewed import ViewedStorage
# from services.zine.gittask import GitTask
from settings import DEV_SERVER_PID_FILE_NAME, SENTRY_DSN, SESSION_SECRET_KEY
from services.notifications.sse import sse_subscribe_handler
from services.rediscache import redis
from services.schema import resolvers
from services.sentry import start_sentry
from services.viewed import ViewedStorage
from services.webhook import WebhookEndpoint
from settings import DEV_SERVER_PID_FILE_NAME, MODE
import_module("resolvers")
schema = make_executable_schema(load_schema_from_path("schema.graphql"), resolvers) # type: ignore
middleware = [
Middleware(AuthenticationMiddleware, backend=JWTAuthenticate()),
Middleware(SessionMiddleware, secret_key=SESSION_SECRET_KEY),
]
import_module('resolvers')
schema = make_executable_schema(load_schema_from_path('schema/'), resolvers)
async def start_up():
init_tables()
await redis.connect()
await storages_init()
views_stat_task = asyncio.create_task(ViewedStorage().worker())
print(views_stat_task)
# git_task = asyncio.create_task(GitTask.git_task_worker())
# print(git_task)
notification_service_task = asyncio.create_task(notification_service.worker())
print(notification_service_task)
try:
import sentry_sdk
sentry_sdk.init(SENTRY_DSN)
except Exception as e:
print('[sentry] init error')
print(e)
async def dev_start_up():
if exists(DEV_SERVER_PID_FILE_NAME):
await redis.connect()
return
else:
async def start():
if MODE == 'development':
if not exists(DEV_SERVER_PID_FILE_NAME):
# pid file management
with open(DEV_SERVER_PID_FILE_NAME, 'w', encoding='utf-8') as f:
f.write(str(os.getpid()))
await start_up()
print(f'[main] process started in {MODE} mode')
async def shutdown():
await redis.disconnect()
routes = [
# Route("/messages", endpoint=sse_messages),
Route("/oauth/{provider}", endpoint=oauth_login),
Route("/oauth-authorize", endpoint=oauth_authorize),
Route("/confirm/{token}", endpoint=confirm_email_handler),
Route("/upload", endpoint=upload_handler, methods=['POST']),
Route("/subscribe/{user_id}", endpoint=sse_subscribe_handler),
]
# main starlette app object with ariadne mounted in root
app = Starlette(
on_startup=[start_up],
on_shutdown=[shutdown],
middleware=middleware,
routes=routes,
)
app.mount("/", GraphQL(
schema
))
dev_app = Starlette(
routes=[
Route('/', GraphQL(schema, debug=True)),
Route('/new-author', WebhookEndpoint),
],
on_startup=[
redis.connect,
ViewedStorage.init,
# search_service.info,
start_sentry,
start,
],
on_shutdown=[redis.disconnect],
debug=True,
on_startup=[dev_start_up],
on_shutdown=[shutdown],
middleware=middleware,
routes=routes,
)
dev_app.mount("/", GraphQL(
schema,
debug=True
))

View File

@@ -1,19 +0,0 @@
database_name="discoursio"
echo "DATABASE MIGRATION STARTED"
echo "Dropping database $database_name"
dropdb $database_name --force
if [ $? -ne 0 ]; then { echo "Failed to drop database, aborting." ; exit 1; } fi
echo "Database $database_name dropped"
echo "Creating database $database_name"
createdb $database_name
if [ $? -ne 0 ]; then { echo "Failed to create database, aborting." ; exit 1; } fi
echo "Database $database_name successfully created"
echo "Start migration"
python3 server.py migrate
if [ $? -ne 0 ]; then { echo "Migration failed, aborting." ; exit 1; } fi
echo 'Done!'

View File

@@ -1,292 +0,0 @@
""" cmd managed migration """
import asyncio
import gc
import json
import sys
from datetime import datetime, timezone
import bs4
from migration.export import export_mdx
from migration.tables.comments import migrate as migrateComment
from migration.tables.comments import migrate_2stage as migrateComment_2stage
from migration.tables.content_items import get_shout_slug
from migration.tables.content_items import migrate as migrateShout
from migration.tables.remarks import migrate as migrateRemark
from migration.tables.topics import migrate as migrateTopic
from migration.tables.users import migrate as migrateUser, post_migrate as users_post_migrate
from migration.tables.users import migrate_2stage as migrateUser_2stage
from orm import init_tables
from orm.reaction import Reaction
TODAY = datetime.strftime(datetime.now(tz=timezone.utc), "%Y%m%d")
OLD_DATE = "2016-03-05 22:22:00.350000"
async def users_handle(storage):
"""migrating users first"""
counter = 0
id_map = {}
print("[migration] migrating %d users" % (len(storage["users"]["data"])))
for entry in storage["users"]["data"]:
oid = entry["_id"]
user = migrateUser(entry)
storage["users"]["by_oid"][oid] = user # full
del user["password"]
del user["emailConfirmed"]
del user["username"]
del user["email"]
storage["users"]["by_slug"][user["slug"]] = user # public
id_map[user["oid"]] = user["slug"]
counter += 1
ce = 0
for entry in storage["users"]["data"]:
ce += migrateUser_2stage(entry, id_map)
users_post_migrate()
async def topics_handle(storage):
"""topics from categories and tags"""
counter = 0
for t in storage["topics"]["tags"] + storage["topics"]["cats"]:
if t["slug"] in storage["replacements"]:
t["slug"] = storage["replacements"][t["slug"]]
topic = migrateTopic(t)
storage["topics"]["by_oid"][t["_id"]] = topic
storage["topics"]["by_slug"][t["slug"]] = topic
counter += 1
else:
print("[migration] topic " + t["slug"] + " ignored")
for oldslug, newslug in storage["replacements"].items():
if oldslug != newslug and oldslug in storage["topics"]["by_slug"]:
oid = storage["topics"]["by_slug"][oldslug]["_id"]
del storage["topics"]["by_slug"][oldslug]
storage["topics"]["by_oid"][oid] = storage["topics"]["by_slug"][newslug]
print("[migration] " + str(counter) + " topics migrated")
print(
"[migration] "
+ str(len(storage["topics"]["by_oid"].values()))
+ " topics by oid"
)
print(
"[migration] "
+ str(len(storage["topics"]["by_slug"].values()))
+ " topics by slug"
)
async def shouts_handle(storage, args):
"""migrating content items one by one"""
counter = 0
discours_author = 0
anonymous_author = 0
pub_counter = 0
ignored = 0
topics_dataset_bodies = []
topics_dataset_tlist = []
for entry in storage["shouts"]["data"]:
gc.collect()
# slug
slug = get_shout_slug(entry)
# single slug mode
if "-" in args and slug not in args:
continue
# migrate
shout_dict = await migrateShout(entry, storage)
if shout_dict:
storage["shouts"]["by_oid"][entry["_id"]] = shout_dict
storage["shouts"]["by_slug"][shout_dict["slug"]] = shout_dict
# shouts.topics
if not shout_dict["topics"]:
print("[migration] no topics!")
# with author
author = shout_dict["authors"][0]
if author["slug"] == "discours":
discours_author += 1
if author["slug"] == "anonymous":
anonymous_author += 1
# print('[migration] ' + shout['slug'] + ' with author ' + author)
if entry.get("published"):
if "mdx" in args:
export_mdx(shout_dict)
pub_counter += 1
# print main counter
counter += 1
print('[migration] shouts_handle %d: %s @%s' % (
(counter + 1), shout_dict["slug"], author["slug"]
))
b = bs4.BeautifulSoup(shout_dict["body"], "html.parser")
texts = [shout_dict["title"].lower().replace(r"[^а-яА-Яa-zA-Z]", "")]
texts = texts + b.findAll(text=True)
topics_dataset_bodies.append(" ".join([x.strip().lower() for x in texts]))
topics_dataset_tlist.append(shout_dict["topics"])
else:
ignored += 1
# np.savetxt('topics_dataset.csv', (topics_dataset_bodies, topics_dataset_tlist), delimiter=',
# ', fmt='%s')
print("[migration] " + str(counter) + " content items were migrated")
print("[migration] " + str(pub_counter) + " have been published")
print("[migration] " + str(discours_author) + " authored by @discours")
print("[migration] " + str(anonymous_author) + " authored by @anonymous")
async def remarks_handle(storage):
print("[migration] comments")
c = 0
for entry_remark in storage["remarks"]["data"]:
remark = await migrateRemark(entry_remark, storage)
c += 1
print("[migration] " + str(c) + " remarks migrated")
async def comments_handle(storage):
print("[migration] comments")
id_map = {}
ignored_counter = 0
missed_shouts = {}
for oldcomment in storage["reactions"]["data"]:
if not oldcomment.get("deleted"):
reaction = await migrateComment(oldcomment, storage)
if type(reaction) == str:
missed_shouts[reaction] = oldcomment
elif type(reaction) == Reaction:
reaction = reaction.dict()
rid = reaction["id"]
oid = reaction["oid"]
id_map[oid] = rid
else:
ignored_counter += 1
for reaction in storage["reactions"]["data"]:
migrateComment_2stage(reaction, id_map)
print("[migration] " + str(len(id_map)) + " comments migrated")
print("[migration] " + str(ignored_counter) + " comments ignored")
print("[migration] " + str(len(missed_shouts.keys())) + " commented shouts missed")
missed_counter = 0
for missed in missed_shouts.values():
missed_counter += len(missed)
print("[migration] " + str(missed_counter) + " comments dropped")
async def all_handle(storage, args):
print("[migration] handle everything")
await users_handle(storage)
await topics_handle(storage)
print("[migration] users and topics are migrated")
await shouts_handle(storage, args)
# print("[migration] remarks...")
# await remarks_handle(storage)
print("[migration] migrating comments")
await comments_handle(storage)
# export_email_subscriptions()
print("[migration] done!")
def data_load():
storage = {
"content_items": {
"by_oid": {},
"by_slug": {},
},
"shouts": {"by_oid": {}, "by_slug": {}, "data": []},
"reactions": {"by_oid": {}, "by_slug": {}, "by_content": {}, "data": []},
"topics": {
"by_oid": {},
"by_slug": {},
"cats": [],
"tags": [],
},
"remarks": {"data": []},
"users": {"by_oid": {}, "by_slug": {}, "data": []},
"replacements": json.loads(open("migration/tables/replacements.json").read()),
}
try:
users_data = json.loads(open("migration/data/users.json").read())
print("[migration.load] " + str(len(users_data)) + " users ")
tags_data = json.loads(open("migration/data/tags.json").read())
storage["topics"]["tags"] = tags_data
print("[migration.load] " + str(len(tags_data)) + " tags ")
cats_data = json.loads(
open("migration/data/content_item_categories.json").read()
)
storage["topics"]["cats"] = cats_data
print("[migration.load] " + str(len(cats_data)) + " cats ")
comments_data = json.loads(open("migration/data/comments.json").read())
storage["reactions"]["data"] = comments_data
print("[migration.load] " + str(len(comments_data)) + " comments ")
content_data = json.loads(open("migration/data/content_items.json").read())
storage["shouts"]["data"] = content_data
print("[migration.load] " + str(len(content_data)) + " content items ")
remarks_data = json.loads(open("migration/data/remarks.json").read())
storage["remarks"]["data"] = remarks_data
print("[migration.load] " + str(len(remarks_data)) + " remarks data ")
# fill out storage
for x in users_data:
storage["users"]["by_oid"][x["_id"]] = x
# storage['users']['by_slug'][x['slug']] = x
# no user.slug yet
print(
"[migration.load] "
+ str(len(storage["users"]["by_oid"].keys()))
+ " users by oid"
)
for x in tags_data:
storage["topics"]["by_oid"][x["_id"]] = x
storage["topics"]["by_slug"][x["slug"]] = x
for x in cats_data:
storage["topics"]["by_oid"][x["_id"]] = x
storage["topics"]["by_slug"][x["slug"]] = x
print(
"[migration.load] "
+ str(len(storage["topics"]["by_slug"].keys()))
+ " topics by slug"
)
for item in content_data:
slug = get_shout_slug(item)
storage["content_items"]["by_slug"][slug] = item
storage["content_items"]["by_oid"][item["_id"]] = item
print("[migration.load] " + str(len(content_data)) + " content items")
for x in comments_data:
storage["reactions"]["by_oid"][x["_id"]] = x
cid = x["contentItem"]
storage["reactions"]["by_content"][cid] = x
ci = storage["content_items"]["by_oid"].get(cid, {})
if "slug" in ci:
storage["reactions"]["by_slug"][ci["slug"]] = x
print(
"[migration.load] "
+ str(len(storage["reactions"]["by_content"].keys()))
+ " with comments"
)
storage["users"]["data"] = users_data
storage["topics"]["tags"] = tags_data
storage["topics"]["cats"] = cats_data
storage["shouts"]["data"] = content_data
storage["reactions"]["data"] = comments_data
except Exception as e:
raise e
return storage
async def handling_migration():
init_tables()
await all_handle(data_load(), sys.argv)
def process():
loop = asyncio.get_event_loop()
loop.run_until_complete(handling_migration())
if __name__ == "__main__":
process()

View File

@@ -1,32 +0,0 @@
import json
import os
import bson
import gc
from .utils import DateTimeEncoder
def json_tables():
print("[migration] unpack dump/discours/*.bson to migration/data/*.json")
data = {
"content_items": [],
"content_item_categories": [],
"tags": [],
"email_subscriptions": [],
"users": [],
"comments": [],
"remarks": []
}
for table in data.keys():
print('[migration] bson2json for ' + table)
gc.collect()
lc = []
bs = open("dump/discours/" + table + ".bson", "rb").read()
base = 0
while base < len(bs):
base, d = bson.decode_document(bs, base)
lc.append(d)
data[table] = lc
open(os.getcwd() + "/migration/data/" + table + ".json", "w").write(
json.dumps(lc, cls=DateTimeEncoder)
)

View File

@@ -1,159 +0,0 @@
import json
import os
from datetime import datetime, timezone
import frontmatter
from .extract import extract_html, extract_media
from .utils import DateTimeEncoder
OLD_DATE = "2016-03-05 22:22:00.350000"
EXPORT_DEST = "../discoursio-web/data/"
parentDir = "/".join(os.getcwd().split("/")[:-1])
contentDir = parentDir + "/discoursio-web/content/"
ts = datetime.now(tz=timezone.utc)
def get_metadata(r):
authors = []
for a in r["authors"]:
authors.append(
{ # a short version for public listings
"slug": a.slug or "discours",
"name": a.name or "Дискурс",
"userpic": a.userpic or "https://discours.io/static/img/discours.png",
}
)
metadata = {}
metadata["title"] = r.get("title", "").replace("{", "(").replace("}", ")")
metadata["authors"] = authors
metadata["createdAt"] = r.get("createdAt", ts)
metadata["layout"] = r["layout"]
metadata["topics"] = [topic for topic in r["topics"]]
metadata["topics"].sort()
if r.get("cover", False):
metadata["cover"] = r.get("cover")
return metadata
def export_mdx(r):
# print('[export] mdx %s' % r['slug'])
content = ""
metadata = get_metadata(r)
content = frontmatter.dumps(frontmatter.Post(r["body"], **metadata))
ext = "mdx"
filepath = contentDir + r["slug"]
bc = bytes(content, "utf-8").decode("utf-8", "ignore")
open(filepath + "." + ext, "w").write(bc)
def export_body(shout, storage):
entry = storage["content_items"]["by_oid"][shout["oid"]]
if entry:
body = extract_html(entry)
media = extract_media(entry)
shout["body"] = body # prepare_html_body(entry) # prepare_md_body(entry)
shout["media"] = media
export_mdx(shout)
print("[export] html for %s" % shout["slug"])
open(contentDir + shout["slug"] + ".html", "w").write(body)
else:
raise Exception("no content_items entry found")
def export_slug(slug, storage):
shout = storage["shouts"]["by_slug"][slug]
shout = storage["shouts"]["by_slug"].get(slug)
assert shout, "[export] no shout found by slug: %s " % slug
author = shout["authors"][0]
assert author, "[export] no author error"
export_body(shout, storage)
def export_email_subscriptions():
email_subscriptions_data = json.loads(
open("migration/data/email_subscriptions.json").read()
)
for data in email_subscriptions_data:
# TODO: migrate to mailgun list manually
# migrate_email_subscription(data)
pass
print(
"[migration] "
+ str(len(email_subscriptions_data))
+ " email subscriptions exported"
)
def export_shouts(storage):
# update what was just migrated or load json again
if len(storage["users"]["by_slugs"].keys()) == 0:
storage["users"]["by_slugs"] = json.loads(
open(EXPORT_DEST + "authors.json").read()
)
print(
"[migration] "
+ str(len(storage["users"]["by_slugs"].keys()))
+ " exported authors "
)
if len(storage["shouts"]["by_slugs"].keys()) == 0:
storage["shouts"]["by_slugs"] = json.loads(
open(EXPORT_DEST + "articles.json").read()
)
print(
"[migration] "
+ str(len(storage["shouts"]["by_slugs"].keys()))
+ " exported articles "
)
for slug in storage["shouts"]["by_slugs"].keys():
export_slug(slug, storage)
def export_json(
export_articles={}, export_authors={}, export_topics={}, export_comments={}
):
open(EXPORT_DEST + "authors.json", "w").write(
json.dumps(
export_authors,
cls=DateTimeEncoder,
indent=4,
sort_keys=True,
ensure_ascii=False,
)
)
print("[migration] " + str(len(export_authors.items())) + " authors exported")
open(EXPORT_DEST + "topics.json", "w").write(
json.dumps(
export_topics,
cls=DateTimeEncoder,
indent=4,
sort_keys=True,
ensure_ascii=False,
)
)
print("[migration] " + str(len(export_topics.keys())) + " topics exported")
open(EXPORT_DEST + "articles.json", "w").write(
json.dumps(
export_articles,
cls=DateTimeEncoder,
indent=4,
sort_keys=True,
ensure_ascii=False,
)
)
print("[migration] " + str(len(export_articles.items())) + " articles exported")
open(EXPORT_DEST + "comments.json", "w").write(
json.dumps(
export_comments,
cls=DateTimeEncoder,
indent=4,
sort_keys=True,
ensure_ascii=False,
)
)
print(
"[migration] "
+ str(len(export_comments.items()))
+ " exported articles with comments"
)

View File

@@ -1,434 +0,0 @@
import base64
import os
import re
import uuid
from bs4 import BeautifulSoup
TOOLTIP_REGEX = r"(\/\/\/(.+)\/\/\/)"
contentDir = os.path.join(
os.path.dirname(os.path.realpath(__file__)), "..", "..", "discoursio-web", "content"
)
s3 = "https://discours-io.s3.amazonaws.com/"
cdn = "https://assets.discours.io"
def replace_tooltips(body):
# change if you prefer regexp
newbody = body
matches = list(re.finditer(TOOLTIP_REGEX, body, re.IGNORECASE | re.MULTILINE))[1:]
for match in matches:
newbody = body.replace(
match.group(1), '<Tooltip text="' + match.group(2) + '" />'
) # NOTE: doesn't work
if len(matches) > 0:
print("[extract] found %d tooltips" % len(matches))
return newbody
def extract_footnotes(body, shout_dict):
parts = body.split("&&&")
lll = len(parts)
newparts = list(parts)
placed = False
if lll & 1:
if lll > 1:
i = 1
print("[extract] found %d footnotes in body" % (lll - 1))
for part in parts[1:]:
if i & 1:
placed = True
if 'a class="footnote-url" href=' in part:
print("[extract] footnote: " + part)
fn = 'a class="footnote-url" href="'
exxtracted_link = part.split(fn, 1)[1].split('"', 1)[0]
extracted_body = part.split(fn, 1)[1].split('>', 1)[1].split('</a>', 1)[0]
print("[extract] footnote link: " + extracted_link)
with local_session() as session:
Reaction.create({
"shout": shout_dict['id'],
"kind": ReactionKind.FOOTNOTE,
"body": extracted_body,
"range": str(body.index(fn + link) - len('<')) + ':' + str(body.index(extracted_body) + len('</a>'))
})
newparts[i] = "<a href='#'></a>"
else:
newparts[i] = part
i += 1
return ("".join(newparts), placed)
def place_tooltips(body):
parts = body.split("&&&")
lll = len(parts)
newparts = list(parts)
placed = False
if lll & 1:
if lll > 1:
i = 1
print("[extract] found %d tooltips" % (lll - 1))
for part in parts[1:]:
if i & 1:
placed = True
if 'a class="footnote-url" href=' in part:
print("[extract] footnote: " + part)
fn = 'a class="footnote-url" href="'
link = part.split(fn, 1)[1].split('"', 1)[0]
extracted_part = (
part.split(fn, 1)[0] + " " + part.split("/", 1)[-1]
)
newparts[i] = (
"<Tooltip"
+ (' link="' + link + '" ' if link else "")
+ ">"
+ extracted_part
+ "</Tooltip>"
)
else:
newparts[i] = "<Tooltip>%s</Tooltip>" % part
# print('[extract] ' + newparts[i])
else:
# print('[extract] ' + part[:10] + '..')
newparts[i] = part
i += 1
return ("".join(newparts), placed)
IMG_REGEX = r"\!\[(.*?)\]\((data\:image\/(png|jpeg|jpg);base64\,((?:[A-Za-z\d+\/]{4})*(?:[A-Za-z\d+\/]{3}="
IMG_REGEX += r"|[A-Za-z\d+\/]{2}==)))\)"
parentDir = "/".join(os.getcwd().split("/")[:-1])
public = parentDir + "/discoursio-web/public"
cache = {}
def reextract_images(body, oid):
# change if you prefer regexp
matches = list(re.finditer(IMG_REGEX, body, re.IGNORECASE | re.MULTILINE))[1:]
i = 0
for match in matches:
print("[extract] image " + match.group(1))
ext = match.group(3)
name = oid + str(i)
link = public + "/upload/image-" + name + "." + ext
img = match.group(4)
title = match.group(1) # NOTE: this is not the title
if img not in cache:
content = base64.b64decode(img + "==")
print(str(len(img)) + " image bytes been written")
open("../" + link, "wb").write(content)
cache[img] = name
i += 1
else:
print("[extract] image cached " + cache[img])
body.replace(
str(match), "![" + title + "](" + cdn + link + ")"
) # WARNING: this does not work
return body
IMAGES = {
"data:image/png": "png",
"data:image/jpg": "jpg",
"data:image/jpeg": "jpg",
}
b64 = ";base64,"
def extract_imageparts(bodyparts, prefix):
# recursive loop
newparts = list(bodyparts)
for current in bodyparts:
i = bodyparts.index(current)
for mime in IMAGES.keys():
if mime == current[-len(mime) :] and (i + 1 < len(bodyparts)):
print("[extract] " + mime)
next = bodyparts[i + 1]
ext = IMAGES[mime]
b64end = next.index(")")
b64encoded = next[:b64end]
name = prefix + "-" + str(len(cache))
link = "/upload/image-" + name + "." + ext
print("[extract] name: " + name)
print("[extract] link: " + link)
print("[extract] %d bytes" % len(b64encoded))
if b64encoded not in cache:
try:
content = base64.b64decode(b64encoded + "==")
open(public + link, "wb").write(content)
print(
"[extract] "
+ str(len(content))
+ " image bytes been written"
)
cache[b64encoded] = name
except Exception:
raise Exception
# raise Exception('[extract] error decoding image %r' %b64encoded)
else:
print("[extract] cached link " + cache[b64encoded])
name = cache[b64encoded]
link = cdn + "/upload/image-" + name + "." + ext
newparts[i] = (
current[: -len(mime)]
+ current[-len(mime) :]
+ link
+ next[-b64end:]
)
newparts[i + 1] = next[:-b64end]
break
return (
extract_imageparts(
newparts[i] + newparts[i + 1] + b64.join(bodyparts[(i + 2) :]), prefix
)
if len(bodyparts) > (i + 1)
else "".join(newparts)
)
def extract_dataimages(parts, prefix):
newparts = list(parts)
for part in parts:
i = parts.index(part)
if part.endswith("]("):
[ext, rest] = parts[i + 1].split(b64)
name = prefix + "-" + str(len(cache))
if ext == "/jpeg":
ext = "jpg"
else:
ext = ext.replace("/", "")
link = "/upload/image-" + name + "." + ext
print("[extract] filename: " + link)
b64end = rest.find(")")
if b64end != -1:
b64encoded = rest[:b64end]
print("[extract] %d text bytes" % len(b64encoded))
# write if not cached
if b64encoded not in cache:
try:
content = base64.b64decode(b64encoded + "==")
open(public + link, "wb").write(content)
print("[extract] " + str(len(content)) + " image bytes")
cache[b64encoded] = name
except Exception:
raise Exception
# raise Exception('[extract] error decoding image %r' %b64encoded)
else:
print("[extract] 0 image bytes, cached for " + cache[b64encoded])
name = cache[b64encoded]
# update link with CDN
link = cdn + "/upload/image-" + name + "." + ext
# patch newparts
newparts[i + 1] = link + rest[b64end:]
else:
raise Exception("cannot find the end of base64 encoded string")
else:
print("[extract] dataimage skipping part " + str(i))
continue
return "".join(newparts)
di = "data:image"
def extract_md_images(body, prefix):
newbody = ""
body = (
body.replace("\n! [](" + di, "\n ![](" + di)
.replace("\n[](" + di, "\n![](" + di)
.replace(" [](" + di, " ![](" + di)
)
parts = body.split(di)
if len(parts) > 1:
newbody = extract_dataimages(parts, prefix)
else:
newbody = body
return newbody
def cleanup_md(body):
newbody = (
body.replace("<", "")
.replace(">", "")
.replace("{", "(")
.replace("}", ")")
.replace("", "...")
.replace(" __ ", " ")
.replace("_ _", " ")
.replace("****", "")
.replace("\u00a0", " ")
.replace("\u02c6", "^")
.replace("\u00a0", " ")
.replace("\ufeff", "")
.replace("\u200b", "")
.replace("\u200c", "")
) # .replace('\u2212', '-')
return newbody
def extract_md(body, shout_dict = None):
newbody = body
if newbody:
newbody = cleanup_md(newbody)
if not newbody:
raise Exception("cleanup error")
if shout_dict:
uid = shout_dict['id'] or uuid.uuid4()
newbody = extract_md_images(newbody, uid)
if not newbody:
raise Exception("extract_images error")
newbody, placed = extract_footnotes(body, shout_dict)
if not newbody:
raise Exception("extract_footnotes error")
return newbody
def extract_media(entry):
''' normalized media extraction method '''
# media [ { title pic url body } ]}
kind = entry.get("type")
if not kind:
print(entry)
raise Exception("shout no layout")
media = []
for m in entry.get("media") or []:
# title
title = m.get("title", "").replace("\n", " ").replace("&nbsp;", " ")
artist = m.get("performer") or m.get("artist")
if artist:
title = artist + " - " + title
# pic
url = m.get("fileUrl") or m.get("url", "")
pic = ""
if m.get("thumborId"):
pic = cdn + "/unsafe/1600x/" + m["thumborId"]
# url
if not url:
if kind == "Image":
url = pic
elif "youtubeId" in m:
url = "https://youtube.com/?watch=" + m["youtubeId"]
elif "vimeoId" in m:
url = "https://vimeo.com/" + m["vimeoId"]
# body
body = m.get("body") or m.get("literatureBody") or ""
media.append({
"url": url,
"pic": pic,
"title": title,
"body": body
})
return media
def prepare_html_body(entry):
# body modifications
body = ""
kind = entry.get("type")
addon = ""
if kind == "Video":
addon = ""
for m in entry.get("media") or []:
if "youtubeId" in m:
addon += '<iframe width="420" height="345" src="http://www.youtube.com/embed/'
addon += m["youtubeId"]
addon += '?autoplay=1" frameborder="0" allowfullscreen></iframe>\n'
elif "vimeoId" in m:
addon += '<iframe src="https://player.vimeo.com/video/'
addon += m["vimeoId"]
addon += ' width="420" height="345" frameborder="0" allow="autoplay; fullscreen"'
addon += " allowfullscreen></iframe>"
else:
print("[extract] media is not supported")
print(m)
body += addon
elif kind == "Music":
addon = ""
for m in entry.get("media") or []:
artist = m.get("performer")
trackname = ""
if artist:
trackname += artist + " - "
if "title" in m:
trackname += m.get("title", "")
addon += "<figure><figcaption>"
addon += trackname
addon += '</figcaption><audio controls src="'
addon += m.get("fileUrl", "")
addon += '"></audio></figure>'
body += addon
body = extract_html(entry)
# if body_orig: body += extract_md(html2text(body_orig), entry['_id'])
return body
def cleanup_html(body: str) -> str:
new_body = body
regex_remove = [
r"style=\"width:\s*\d+px;height:\s*\d+px;\"",
r"style=\"width:\s*\d+px;\"",
r"style=\"color: #000000;\"",
r"style=\"float: none;\"",
r"style=\"background: white;\"",
r"class=\"Apple-interchange-newline\"",
r"class=\"MsoNormalCxSpMiddle\"",
r"class=\"MsoNormal\"",
r"lang=\"EN-US\"",
r"id=\"docs-internal-guid-[\w-]+\"",
r"<p>\s*</p>",
r"<span></span>",
r"<i>\s*</i>",
r"<b>\s*</b>",
r"<h1>\s*</h1>",
r"<h2>\s*</h2>",
r"<h3>\s*</h3>",
r"<h4>\s*</h4>",
r"<div>\s*</div>",
]
regex_replace = {
r"<br>\s*</p>": "</p>"
}
changed = True
while changed:
# we need several iterations to clean nested tags this way
changed = False
new_body_iteration = new_body
for regex in regex_remove:
new_body = re.sub(regex, "", new_body)
for regex, replace in regex_replace.items():
new_body = re.sub(regex, replace, new_body)
if new_body_iteration != new_body:
changed = True
return new_body
def extract_html(entry, shout_id = None, cleanup=False):
body_orig = (entry.get("body") or "").replace('\(', '(').replace('\)', ')')
if cleanup:
# we do that before bs parsing to catch the invalid html
body_clean = cleanup_html(body_orig)
if body_clean != body_orig:
print(f"[migration] html cleaned for slug {entry.get('slug', None)}")
body_orig = body_clean
if shout_id:
extract_footnotes(body_orig, shout_id)
body_html = str(BeautifulSoup(body_orig, features="html.parser"))
if cleanup:
# we do that after bs parsing because it can add dummy tags
body_clean_html = cleanup_html(body_html)
if body_clean_html != body_html:
print(f"[migration] html cleaned after bs4 for slug {entry.get('slug', None)}")
body_html = body_clean_html
return body_html

File diff suppressed because it is too large Load Diff

View File

@@ -1,3 +0,0 @@
from .cli import main
main()

View File

@@ -1,323 +0,0 @@
import argparse
import sys
from . import HTML2Text, __version__, config
# noinspection DuplicatedCode
def main() -> None:
baseurl = ""
class bcolors:
HEADER = "\033[95m"
OKBLUE = "\033[94m"
OKGREEN = "\033[92m"
WARNING = "\033[93m"
FAIL = "\033[91m"
ENDC = "\033[0m"
BOLD = "\033[1m"
UNDERLINE = "\033[4m"
p = argparse.ArgumentParser()
p.add_argument(
"--default-image-alt",
dest="default_image_alt",
default=config.DEFAULT_IMAGE_ALT,
help="The default alt string for images with missing ones",
)
p.add_argument(
"--pad-tables",
dest="pad_tables",
action="store_true",
default=config.PAD_TABLES,
help="pad the cells to equal column width in tables",
)
p.add_argument(
"--no-wrap-links",
dest="wrap_links",
action="store_false",
default=config.WRAP_LINKS,
help="don't wrap links during conversion",
)
p.add_argument(
"--wrap-list-items",
dest="wrap_list_items",
action="store_true",
default=config.WRAP_LIST_ITEMS,
help="wrap list items during conversion",
)
p.add_argument(
"--wrap-tables",
dest="wrap_tables",
action="store_true",
default=config.WRAP_TABLES,
help="wrap tables",
)
p.add_argument(
"--ignore-emphasis",
dest="ignore_emphasis",
action="store_true",
default=config.IGNORE_EMPHASIS,
help="don't include any formatting for emphasis",
)
p.add_argument(
"--reference-links",
dest="inline_links",
action="store_false",
default=config.INLINE_LINKS,
help="use reference style links instead of inline links",
)
p.add_argument(
"--ignore-links",
dest="ignore_links",
action="store_true",
default=config.IGNORE_ANCHORS,
help="don't include any formatting for links",
)
p.add_argument(
"--ignore-mailto-links",
action="store_true",
dest="ignore_mailto_links",
default=config.IGNORE_MAILTO_LINKS,
help="don't include mailto: links",
)
p.add_argument(
"--protect-links",
dest="protect_links",
action="store_true",
default=config.PROTECT_LINKS,
help="protect links from line breaks surrounding them with angle brackets",
)
p.add_argument(
"--ignore-images",
dest="ignore_images",
action="store_true",
default=config.IGNORE_IMAGES,
help="don't include any formatting for images",
)
p.add_argument(
"--images-as-html",
dest="images_as_html",
action="store_true",
default=config.IMAGES_AS_HTML,
help=(
"Always write image tags as raw html; preserves `height`, `width` and "
"`alt` if possible."
),
)
p.add_argument(
"--images-to-alt",
dest="images_to_alt",
action="store_true",
default=config.IMAGES_TO_ALT,
help="Discard image data, only keep alt text",
)
p.add_argument(
"--images-with-size",
dest="images_with_size",
action="store_true",
default=config.IMAGES_WITH_SIZE,
help=(
"Write image tags with height and width attrs as raw html to retain "
"dimensions"
),
)
p.add_argument(
"-g",
"--google-doc",
action="store_true",
dest="google_doc",
default=False,
help="convert an html-exported Google Document",
)
p.add_argument(
"-d",
"--dash-unordered-list",
action="store_true",
dest="ul_style_dash",
default=False,
help="use a dash rather than a star for unordered list items",
)
p.add_argument(
"-e",
"--asterisk-emphasis",
action="store_true",
dest="em_style_asterisk",
default=False,
help="use an asterisk rather than an underscore for emphasized text",
)
p.add_argument(
"-b",
"--body-width",
dest="body_width",
type=int,
default=config.BODY_WIDTH,
help="number of characters per output line, 0 for no wrap",
)
p.add_argument(
"-i",
"--google-list-indent",
dest="list_indent",
type=int,
default=config.GOOGLE_LIST_INDENT,
help="number of pixels Google indents nested lists",
)
p.add_argument(
"-s",
"--hide-strikethrough",
action="store_true",
dest="hide_strikethrough",
default=False,
help="hide strike-through text. only relevant when -g is " "specified as well",
)
p.add_argument(
"--escape-all",
action="store_true",
dest="escape_snob",
default=False,
help=(
"Escape all special characters. Output is less readable, but avoids "
"corner case formatting issues."
),
)
p.add_argument(
"--bypass-tables",
action="store_true",
dest="bypass_tables",
default=config.BYPASS_TABLES,
help="Format tables in HTML rather than Markdown syntax.",
)
p.add_argument(
"--ignore-tables",
action="store_true",
dest="ignore_tables",
default=config.IGNORE_TABLES,
help="Ignore table-related tags (table, th, td, tr) " "while keeping rows.",
)
p.add_argument(
"--single-line-break",
action="store_true",
dest="single_line_break",
default=config.SINGLE_LINE_BREAK,
help=(
"Use a single line break after a block element rather than two line "
"breaks. NOTE: Requires --body-width=0"
),
)
p.add_argument(
"--unicode-snob",
action="store_true",
dest="unicode_snob",
default=config.UNICODE_SNOB,
help="Use unicode throughout document",
)
p.add_argument(
"--no-automatic-links",
action="store_false",
dest="use_automatic_links",
default=config.USE_AUTOMATIC_LINKS,
help="Do not use automatic links wherever applicable",
)
p.add_argument(
"--no-skip-internal-links",
action="store_false",
dest="skip_internal_links",
default=config.SKIP_INTERNAL_LINKS,
help="Do not skip internal links",
)
p.add_argument(
"--links-after-para",
action="store_true",
dest="links_each_paragraph",
default=config.LINKS_EACH_PARAGRAPH,
help="Put links after each paragraph instead of document",
)
p.add_argument(
"--mark-code",
action="store_true",
dest="mark_code",
default=config.MARK_CODE,
help="Mark program code blocks with [code]...[/code]",
)
p.add_argument(
"--decode-errors",
dest="decode_errors",
default=config.DECODE_ERRORS,
help=(
"What to do in case of decode errors.'ignore', 'strict' and 'replace' are "
"acceptable values"
),
)
p.add_argument(
"--open-quote",
dest="open_quote",
default=config.OPEN_QUOTE,
help="The character used to open quotes",
)
p.add_argument(
"--close-quote",
dest="close_quote",
default=config.CLOSE_QUOTE,
help="The character used to close quotes",
)
p.add_argument(
"--version", action="version", version=".".join(map(str, __version__))
)
p.add_argument("filename", nargs="?")
p.add_argument("encoding", nargs="?", default="utf-8")
args = p.parse_args()
if args.filename and args.filename != "-":
with open(args.filename, "rb") as fp:
data = fp.read()
else:
data = sys.stdin.buffer.read()
try:
html = data.decode(args.encoding, args.decode_errors)
except UnicodeDecodeError as err:
warning = bcolors.WARNING + "Warning:" + bcolors.ENDC
warning += " Use the " + bcolors.OKGREEN
warning += "--decode-errors=ignore" + bcolors.ENDC + " flag."
print(warning)
raise err
h = HTML2Text(baseurl=baseurl)
# handle options
if args.ul_style_dash:
h.ul_item_mark = "-"
if args.em_style_asterisk:
h.emphasis_mark = "*"
h.strong_mark = "__"
h.body_width = args.body_width
h.google_list_indent = args.list_indent
h.ignore_emphasis = args.ignore_emphasis
h.ignore_links = args.ignore_links
h.ignore_mailto_links = args.ignore_mailto_links
h.protect_links = args.protect_links
h.ignore_images = args.ignore_images
h.images_as_html = args.images_as_html
h.images_to_alt = args.images_to_alt
h.images_with_size = args.images_with_size
h.google_doc = args.google_doc
h.hide_strikethrough = args.hide_strikethrough
h.escape_snob = args.escape_snob
h.bypass_tables = args.bypass_tables
h.ignore_tables = args.ignore_tables
h.single_line_break = args.single_line_break
h.inline_links = args.inline_links
h.unicode_snob = args.unicode_snob
h.use_automatic_links = args.use_automatic_links
h.skip_internal_links = args.skip_internal_links
h.links_each_paragraph = args.links_each_paragraph
h.mark_code = args.mark_code
h.wrap_links = args.wrap_links
h.wrap_list_items = args.wrap_list_items
h.wrap_tables = args.wrap_tables
h.pad_tables = args.pad_tables
h.default_image_alt = args.default_image_alt
h.open_quote = args.open_quote
h.close_quote = args.close_quote
sys.stdout.write(h.handle(html))

View File

@@ -1,164 +0,0 @@
import re
# Use Unicode characters instead of their ascii pseudo-replacements
UNICODE_SNOB = True
# Marker to use for marking tables for padding post processing
TABLE_MARKER_FOR_PAD = "special_marker_for_table_padding"
# Escape all special characters. Output is less readable, but avoids
# corner case formatting issues.
ESCAPE_SNOB = True
# Put the links after each paragraph instead of at the end.
LINKS_EACH_PARAGRAPH = False
# Wrap long lines at position. 0 for no wrapping.
BODY_WIDTH = 0
# Don't show internal links (href="#local-anchor") -- corresponding link
# targets won't be visible in the plain text file anyway.
SKIP_INTERNAL_LINKS = False
# Use inline, rather than reference, formatting for images and links
INLINE_LINKS = True
# Protect links from line breaks surrounding them with angle brackets (in
# addition to their square brackets)
PROTECT_LINKS = True
WRAP_LINKS = True
# Wrap list items.
WRAP_LIST_ITEMS = False
# Wrap tables
WRAP_TABLES = False
# Number of pixels Google indents nested lists
GOOGLE_LIST_INDENT = 36
# Values Google and others may use to indicate bold text
BOLD_TEXT_STYLE_VALUES = ("bold", "700", "800", "900")
IGNORE_ANCHORS = False
IGNORE_MAILTO_LINKS = False
IGNORE_IMAGES = False
IMAGES_AS_HTML = False
IMAGES_TO_ALT = False
IMAGES_WITH_SIZE = False
IGNORE_EMPHASIS = False
MARK_CODE = True
DECODE_ERRORS = "strict"
DEFAULT_IMAGE_ALT = ""
PAD_TABLES = True
# Convert links with same href and text to <href> format
# if they are absolute links
USE_AUTOMATIC_LINKS = True
# For checking space-only lines on line 771
RE_SPACE = re.compile(r"\s\+")
RE_ORDERED_LIST_MATCHER = re.compile(r"\d+\.\s")
RE_UNORDERED_LIST_MATCHER = re.compile(r"[-\*\+]\s")
RE_MD_CHARS_MATCHER = re.compile(r"([\\\[\]\(\)])")
RE_MD_CHARS_MATCHER_ALL = re.compile(r"([`\*_{}\[\]\(\)#!])")
# to find links in the text
RE_LINK = re.compile(r"(\[.*?\] ?\(.*?\))|(\[.*?\]:.*?)")
# to find table separators
RE_TABLE = re.compile(r" \| ")
RE_MD_DOT_MATCHER = re.compile(
r"""
^ # start of line
(\s*\d+) # optional whitespace and a number
(\.) # dot
(?=\s) # lookahead assert whitespace
""",
re.MULTILINE | re.VERBOSE,
)
RE_MD_PLUS_MATCHER = re.compile(
r"""
^
(\s*)
(\+)
(?=\s)
""",
flags=re.MULTILINE | re.VERBOSE,
)
RE_MD_DASH_MATCHER = re.compile(
r"""
^
(\s*)
(-)
(?=\s|\-) # followed by whitespace (bullet list, or spaced out hr)
# or another dash (header or hr)
""",
flags=re.MULTILINE | re.VERBOSE,
)
RE_SLASH_CHARS = r"\`*_{}[]()#+-.!"
RE_MD_BACKSLASH_MATCHER = re.compile(
r"""
(\\) # match one slash
(?=[%s]) # followed by a char that requires escaping
"""
% re.escape(RE_SLASH_CHARS),
flags=re.VERBOSE,
)
UNIFIABLE = {
"rsquo": "'",
"lsquo": "'",
"rdquo": '"',
"ldquo": '"',
"copy": "(C)",
"mdash": "--",
"nbsp": " ",
"rarr": "->",
"larr": "<-",
"middot": "*",
"ndash": "-",
"oelig": "oe",
"aelig": "ae",
"agrave": "a",
"aacute": "a",
"acirc": "a",
"atilde": "a",
"auml": "a",
"aring": "a",
"egrave": "e",
"eacute": "e",
"ecirc": "e",
"euml": "e",
"igrave": "i",
"iacute": "i",
"icirc": "i",
"iuml": "i",
"ograve": "o",
"oacute": "o",
"ocirc": "o",
"otilde": "o",
"ouml": "o",
"ugrave": "u",
"uacute": "u",
"ucirc": "u",
"uuml": "u",
"lrm": "",
"rlm": "",
}
# Format tables in HTML rather than Markdown syntax
BYPASS_TABLES = False
# Ignore table-related tags (table, th, td, tr) while keeping rows
IGNORE_TABLES = False
# Use a single line break after a block element rather than two line breaks.
# NOTE: Requires body width setting to be 0.
SINGLE_LINE_BREAK = False
# Use double quotation marks when converting the <q> tag.
OPEN_QUOTE = '"'
CLOSE_QUOTE = '"'

View File

@@ -1,18 +0,0 @@
from typing import Dict, Optional
class AnchorElement:
__slots__ = ["attrs", "count", "outcount"]
def __init__(self, attrs: Dict[str, Optional[str]], count: int, outcount: int):
self.attrs = attrs
self.count = count
self.outcount = outcount
class ListElement:
__slots__ = ["name", "num"]
def __init__(self, name: str, num: int):
self.name = name
self.num = num

View File

@@ -1,3 +0,0 @@
class OutCallback:
def __call__(self, s: str) -> None:
...

View File

@@ -1,287 +0,0 @@
import html.entities
from typing import Dict, List, Optional
from . import config
unifiable_n = {
html.entities.name2codepoint[k]: v
for k, v in config.UNIFIABLE.items()
if k != "nbsp"
}
def hn(tag: str) -> int:
if tag[0] == "h" and len(tag) == 2:
n = tag[1]
if "0" < n <= "9":
return int(n)
return 0
def dumb_property_dict(style: str) -> Dict[str, str]:
"""
:returns: A hash of css attributes
"""
return {
x.strip().lower(): y.strip().lower()
for x, y in [z.split(":", 1) for z in style.split(";") if ":" in z]
}
def dumb_css_parser(data: str) -> Dict[str, Dict[str, str]]:
"""
:type data: str
:returns: A hash of css selectors, each of which contains a hash of
css attributes.
:rtype: dict
"""
# remove @import sentences
data += ";"
importIndex = data.find("@import")
while importIndex != -1:
data = data[0:importIndex] + data[data.find(";", importIndex) + 1 :]
importIndex = data.find("@import")
# parse the css. reverted from dictionary comprehension in order to
# support older pythons
pairs = [x.split("{") for x in data.split("}") if "{" in x.strip()]
try:
elements = {a.strip(): dumb_property_dict(b) for a, b in pairs}
except ValueError:
elements = {} # not that important
return elements
def element_style(
attrs: Dict[str, Optional[str]],
style_def: Dict[str, Dict[str, str]],
parent_style: Dict[str, str],
) -> Dict[str, str]:
"""
:type attrs: dict
:type style_def: dict
:type style_def: dict
:returns: A hash of the 'final' style attributes of the element
:rtype: dict
"""
style = parent_style.copy()
if attrs.get("class"):
for css_class in attrs["class"].split():
css_style = style_def.get("." + css_class, {})
style.update(css_style)
if attrs.get("style"):
immediate_style = dumb_property_dict(attrs["style"])
style.update(immediate_style)
return style
def google_list_style(style: Dict[str, str]) -> str:
"""
Finds out whether this is an ordered or unordered list
:type style: dict
:rtype: str
"""
if "list-style-type" in style:
list_style = style["list-style-type"]
if list_style in ["disc", "circle", "square", "none"]:
return "ul"
return "ol"
def google_has_height(style: Dict[str, str]) -> bool:
"""
Check if the style of the element has the 'height' attribute
explicitly defined
:type style: dict
:rtype: bool
"""
return "height" in style
def google_text_emphasis(style: Dict[str, str]) -> List[str]:
"""
:type style: dict
:returns: A list of all emphasis modifiers of the element
:rtype: list
"""
emphasis = []
if "text-decoration" in style:
emphasis.append(style["text-decoration"])
if "font-style" in style:
emphasis.append(style["font-style"])
if "font-weight" in style:
emphasis.append(style["font-weight"])
return emphasis
def google_fixed_width_font(style: Dict[str, str]) -> bool:
"""
Check if the css of the current element defines a fixed width font
:type style: dict
:rtype: bool
"""
font_family = ""
if "font-family" in style:
font_family = style["font-family"]
return "courier new" == font_family or "consolas" == font_family
def list_numbering_start(attrs: Dict[str, Optional[str]]) -> int:
"""
Extract numbering from list element attributes
:type attrs: dict
:rtype: int or None
"""
if attrs.get("start"):
try:
return int(attrs["start"]) - 1
except ValueError:
pass
return 0
def skipwrap(
para: str, wrap_links: bool, wrap_list_items: bool, wrap_tables: bool
) -> bool:
# If it appears to contain a link
# don't wrap
if not wrap_links and config.RE_LINK.search(para):
return True
# If the text begins with four spaces or one tab, it's a code block;
# don't wrap
if para[0:4] == " " or para[0] == "\t":
return True
# If the text begins with only two "--", possibly preceded by
# whitespace, that's an emdash; so wrap.
stripped = para.lstrip()
if stripped[0:2] == "--" and len(stripped) > 2 and stripped[2] != "-":
return False
# I'm not sure what this is for; I thought it was to detect lists,
# but there's a <br>-inside-<span> case in one of the tests that
# also depends upon it.
if stripped[0:1] in ("-", "*") and not stripped[0:2] == "**":
return not wrap_list_items
# If text contains a pipe character it is likely a table
if not wrap_tables and config.RE_TABLE.search(para):
return True
# If the text begins with a single -, *, or +, followed by a space,
# or an integer, followed by a ., followed by a space (in either
# case optionally proceeded by whitespace), it's a list; don't wrap.
return bool(
config.RE_ORDERED_LIST_MATCHER.match(stripped)
or config.RE_UNORDERED_LIST_MATCHER.match(stripped)
)
def escape_md(text: str) -> str:
"""
Escapes markdown-sensitive characters within other markdown
constructs.
"""
return config.RE_MD_CHARS_MATCHER.sub(r"\\\1", text)
def escape_md_section(text: str, snob: bool = False) -> str:
"""
Escapes markdown-sensitive characters across whole document sections.
"""
text = config.RE_MD_BACKSLASH_MATCHER.sub(r"\\\1", text)
if snob:
text = config.RE_MD_CHARS_MATCHER_ALL.sub(r"\\\1", text)
text = config.RE_MD_DOT_MATCHER.sub(r"\1\\\2", text)
text = config.RE_MD_PLUS_MATCHER.sub(r"\1\\\2", text)
text = config.RE_MD_DASH_MATCHER.sub(r"\1\\\2", text)
return text
def reformat_table(lines: List[str], right_margin: int) -> List[str]:
"""
Given the lines of a table
padds the cells and returns the new lines
"""
# find the maximum width of the columns
max_width = [len(x.rstrip()) + right_margin for x in lines[0].split("|")]
max_cols = len(max_width)
for line in lines:
cols = [x.rstrip() for x in line.split("|")]
num_cols = len(cols)
# don't drop any data if colspan attributes result in unequal lengths
if num_cols < max_cols:
cols += [""] * (max_cols - num_cols)
elif max_cols < num_cols:
max_width += [len(x) + right_margin for x in cols[-(num_cols - max_cols) :]]
max_cols = num_cols
max_width = [
max(len(x) + right_margin, old_len) for x, old_len in zip(cols, max_width)
]
# reformat
new_lines = []
for line in lines:
cols = [x.rstrip() for x in line.split("|")]
if set(line.strip()) == set("-|"):
filler = "-"
new_cols = [
x.rstrip() + (filler * (M - len(x.rstrip())))
for x, M in zip(cols, max_width)
]
new_lines.append("|-" + "|".join(new_cols) + "|")
else:
filler = " "
new_cols = [
x.rstrip() + (filler * (M - len(x.rstrip())))
for x, M in zip(cols, max_width)
]
new_lines.append("| " + "|".join(new_cols) + "|")
return new_lines
def pad_tables_in_text(text: str, right_margin: int = 1) -> str:
"""
Provide padding for tables in the text
"""
lines = text.split("\n")
table_buffer = [] # type: List[str]
table_started = False
new_lines = []
for line in lines:
# Toggle table started
if config.TABLE_MARKER_FOR_PAD in line:
table_started = not table_started
if not table_started:
table = reformat_table(table_buffer, right_margin)
new_lines.extend(table)
table_buffer = []
new_lines.append("")
continue
# Process lines
if table_started:
table_buffer.append(line)
else:
new_lines.append(line)
return "\n".join(new_lines)

View File

@@ -1 +0,0 @@
__all__ = (["users", "topics", "content_items", "comments"],)

View File

@@ -1,211 +0,0 @@
from datetime import datetime, timezone
from dateutil.parser import parse as date_parse
from base.orm import local_session
from migration.html2text import html2text
from orm.reaction import Reaction, ReactionKind
from orm.shout import ShoutReactionsFollower
from orm.topic import TopicFollower
from orm.user import User
from orm.shout import Shout
ts = datetime.now(tz=timezone.utc)
def auto_followers(session, topics, reaction_dict):
# creating shout's reactions following for reaction author
following1 = session.query(
ShoutReactionsFollower
).where(
ShoutReactionsFollower.follower == reaction_dict["createdBy"]
).filter(
ShoutReactionsFollower.shout == reaction_dict["shout"]
).first()
if not following1:
following1 = ShoutReactionsFollower.create(
follower=reaction_dict["createdBy"],
shout=reaction_dict["shout"],
auto=True
)
session.add(following1)
# creating topics followings for reaction author
for t in topics:
tf = session.query(
TopicFollower
).where(
TopicFollower.follower == reaction_dict["createdBy"]
).filter(
TopicFollower.topic == t['id']
).first()
if not tf:
topic_following = TopicFollower.create(
follower=reaction_dict["createdBy"],
topic=t['id'],
auto=True
)
session.add(topic_following)
def migrate_ratings(session, entry, reaction_dict):
for comment_rating_old in entry.get("ratings", []):
rater = (
session.query(User)
.filter(User.oid == comment_rating_old["createdBy"])
.first()
)
re_reaction_dict = {
"shout": reaction_dict["shout"],
"replyTo": reaction_dict["id"],
"kind": ReactionKind.LIKE
if comment_rating_old["value"] > 0
else ReactionKind.DISLIKE,
"createdBy": rater.id if rater else 1,
}
cts = comment_rating_old.get("createdAt")
if cts:
re_reaction_dict["createdAt"] = date_parse(cts)
try:
# creating reaction from old rating
rr = Reaction.create(**re_reaction_dict)
following2 = session.query(
ShoutReactionsFollower
).where(
ShoutReactionsFollower.follower == re_reaction_dict['createdBy']
).filter(
ShoutReactionsFollower.shout == rr.shout
).first()
if not following2:
following2 = ShoutReactionsFollower.create(
follower=re_reaction_dict['createdBy'],
shout=rr.shout,
auto=True
)
session.add(following2)
session.add(rr)
except Exception as e:
print("[migration] comment rating error: %r" % re_reaction_dict)
raise e
session.commit()
async def migrate(entry, storage):
"""
{
"_id": "hdtwS8fSyFLxXCgSC",
"body": "<p>",
"contentItem": "mnK8KsJHPRi8DrybQ",
"createdBy": "bMFPuyNg6qAD2mhXe",
"thread": "01/",
"createdAt": "2016-04-19 04:33:53+00:00",
"ratings": [
{ "createdBy": "AqmRukvRiExNpAe8C", "value": 1 },
{ "createdBy": "YdE76Wth3yqymKEu5", "value": 1 }
],
"rating": 2,
"updatedAt": "2020-05-27 19:22:57.091000+00:00",
"updatedBy": "0"
}
->
type Reaction {
id: Int!
shout: Shout!
createdAt: DateTime!
createdBy: User!
updatedAt: DateTime
deletedAt: DateTime
deletedBy: User
range: String # full / 0:2340
kind: ReactionKind!
body: String
replyTo: Reaction
stat: Stat
old_id: String
old_thread: String
}
"""
old_ts = entry.get("createdAt")
reaction_dict = {
"createdAt": (ts if not old_ts else date_parse(old_ts)),
"body": html2text(entry.get("body", "")),
"oid": entry["_id"],
}
shout_oid = entry.get("contentItem")
if shout_oid not in storage["shouts"]["by_oid"]:
if len(storage["shouts"]["by_oid"]) > 0:
return shout_oid
else:
print("[migration] no shouts migrated yet")
raise Exception
return
else:
stage = "started"
reaction = None
with local_session() as session:
author = session.query(User).filter(User.oid == entry["createdBy"]).first()
old_shout = storage["shouts"]["by_oid"].get(shout_oid)
if not old_shout:
raise Exception("no old shout in storage")
else:
stage = "author and old id found"
try:
shout = session.query(
Shout
).where(Shout.slug == old_shout["slug"]).one()
if shout:
reaction_dict["shout"] = shout.id
reaction_dict["createdBy"] = author.id if author else 1
reaction_dict["kind"] = ReactionKind.COMMENT
# creating reaction from old comment
reaction = Reaction.create(**reaction_dict)
session.add(reaction)
# session.commit()
stage = "new reaction commited"
reaction_dict = reaction.dict()
topics = [t.dict() for t in shout.topics]
auto_followers(session, topics, reaction_dict)
migrate_ratings(session, entry, reaction_dict)
return reaction
except Exception as e:
print(e)
print(reaction)
raise Exception(stage)
return
def migrate_2stage(old_comment, idmap):
if old_comment.get('body'):
new_id = idmap.get(old_comment.get('oid'))
new_id = idmap.get(old_comment.get('_id'))
if new_id:
new_replyto_id = None
old_replyto_id = old_comment.get("replyTo")
if old_replyto_id:
new_replyto_id = int(idmap.get(old_replyto_id, "0"))
with local_session() as session:
comment = session.query(Reaction).where(Reaction.id == new_id).first()
try:
if new_replyto_id:
new_reply = session.query(Reaction).where(Reaction.id == new_replyto_id).first()
if not new_reply:
print(new_replyto_id)
raise Exception("cannot find reply by id!")
comment.replyTo = new_reply.id
session.add(comment)
srf = session.query(ShoutReactionsFollower).where(
ShoutReactionsFollower.shout == comment.shout
).filter(
ShoutReactionsFollower.follower == comment.createdBy
).first()
if not srf:
srf = ShoutReactionsFollower.create(
shout=comment.shout, follower=comment.createdBy, auto=True
)
session.add(srf)
session.commit()
except Exception:
raise Exception("cannot find a comment by oldid")

View File

@@ -1,399 +0,0 @@
from datetime import datetime, timezone
import json
from dateutil.parser import parse as date_parse
from sqlalchemy.exc import IntegrityError
from transliterate import translit
from base.orm import local_session
from migration.extract import extract_html, extract_media
from orm.reaction import Reaction, ReactionKind
from orm.shout import Shout, ShoutTopic, ShoutReactionsFollower
from orm.user import User
from orm.topic import TopicFollower, Topic
from services.stat.viewed import ViewedStorage
import re
OLD_DATE = "2016-03-05 22:22:00.350000"
ts = datetime.now(tz=timezone.utc)
type2layout = {
"Article": "article",
"Literature": "literature",
"Music": "music",
"Video": "video",
"Image": "image",
}
anondict = {"slug": "anonymous", "id": 1, "name": "Аноним"}
discours = {"slug": "discours", "id": 2, "name": "Дискурс"}
def get_shout_slug(entry):
slug = entry.get("slug", "")
if not slug:
for friend in entry.get("friendlySlugs", []):
slug = friend.get("slug", "")
if slug:
break
slug = re.sub('[^0-9a-zA-Z]+', '-', slug)
return slug
def create_author_from_app(app):
user = None
userdata = None
# check if email is used
if app['email']:
with local_session() as session:
user = session.query(User).where(User.email == app['email']).first()
if not user:
# print('[migration] app %r' % app)
name = app.get('name')
if name:
slug = translit(name, "ru", reversed=True).lower()
slug = re.sub('[^0-9a-zA-Z]+', '-', slug)
print('[migration] created slug %s' % slug)
# check if slug is used
if slug:
user = session.query(User).where(User.slug == slug).first()
# get slug from email
if user:
slug = app['email'].split('@')[0]
user = session.query(User).where(User.slug == slug).first()
# one more try
if user:
slug += '-author'
user = session.query(User).where(User.slug == slug).first()
# create user with application data
if not user:
userdata = {
"username": app["email"],
"email": app["email"],
"name": app.get("name", ""),
"emailConfirmed": False,
"slug": slug,
"createdAt": ts,
"lastSeen": ts,
}
# print('[migration] userdata %r' % userdata)
user = User.create(**userdata)
session.add(user)
session.commit()
userdata['id'] = user.id
userdata = user.dict()
return userdata
else:
raise Exception("app is not ok", app)
async def create_shout(shout_dict):
s = Shout.create(**shout_dict)
author = s.authors[0]
with local_session() as session:
srf = session.query(ShoutReactionsFollower).where(
ShoutReactionsFollower.shout == s.id
).filter(
ShoutReactionsFollower.follower == author.id
).first()
if not srf:
srf = ShoutReactionsFollower.create(shout=s.id, follower=author.id, auto=True)
session.add(srf)
session.commit()
return s
async def get_user(entry, storage):
app = entry.get("application")
userdata = None
user_oid = None
if app:
userdata = create_author_from_app(app)
else:
user_oid = entry.get("createdBy")
if user_oid == "0":
userdata = discours
elif user_oid:
userdata = storage["users"]["by_oid"].get(user_oid)
if not userdata:
print('no userdata by oid, anonymous')
userdata = anondict
print(app)
# cleanup slug
if userdata:
slug = userdata.get("slug", "")
if slug:
slug = re.sub('[^0-9a-zA-Z]+', '-', slug)
userdata["slug"] = slug
else:
userdata = anondict
user = await process_user(userdata, storage, user_oid)
return user, user_oid
async def migrate(entry, storage):
author, user_oid = await get_user(entry, storage)
r = {
"layout": type2layout[entry["type"]],
"title": entry["title"],
"authors": [author, ],
"slug": get_shout_slug(entry),
"cover": (
"https://assets.discours.io/unsafe/1600x/" +
entry["thumborId"] if entry.get("thumborId") else entry.get("image", {}).get("url")
),
"visibility": "public" if entry.get("published") else "authors",
"publishedAt": date_parse(entry.get("publishedAt")) if entry.get("published") else None,
"deletedAt": date_parse(entry.get("deletedAt")) if entry.get("deletedAt") else None,
"createdAt": date_parse(entry.get("createdAt", OLD_DATE)),
"updatedAt": date_parse(entry["updatedAt"]) if "updatedAt" in entry else ts,
"createdBy": author.id,
"topics": await add_topics_follower(entry, storage, author),
"body": extract_html(entry, cleanup=True)
}
# main topic patch
r['mainTopic'] = r['topics'][0]
# published author auto-confirm
if entry.get("published"):
with local_session() as session:
# update user.emailConfirmed if published
author.emailConfirmed = True
session.add(author)
session.commit()
# media
media = extract_media(entry)
r["media"] = json.dumps(media, ensure_ascii=True) if media else None
# ----------------------------------- copy
shout_dict = r.copy()
del shout_dict["topics"]
try:
# save shout to db
shout_dict["oid"] = entry.get("_id", "")
shout = await create_shout(shout_dict)
except IntegrityError as e:
print('[migration] create_shout integrity error', e)
shout = await resolve_create_shout(shout_dict)
except Exception as e:
raise Exception(e)
# udpate data
shout_dict = shout.dict()
shout_dict["authors"] = [author.dict(), ]
# shout topics aftermath
shout_dict["topics"] = await topics_aftermath(r, storage)
# content_item ratings to reactions
await content_ratings_to_reactions(entry, shout_dict["slug"])
# shout views
await ViewedStorage.increment(shout_dict["slug"], amount=entry.get("views", 1), viewer='old-discours')
# del shout_dict['ratings']
storage["shouts"]["by_oid"][entry["_id"]] = shout_dict
storage["shouts"]["by_slug"][shout_dict["slug"]] = shout_dict
return shout_dict
async def add_topics_follower(entry, storage, user):
topics = set([])
category = entry.get("category")
topics_by_oid = storage["topics"]["by_oid"]
oids = [category, ] + entry.get("tags", [])
for toid in oids:
tslug = topics_by_oid.get(toid, {}).get("slug")
if tslug:
topics.add(tslug)
ttt = list(topics)
# add author as TopicFollower
with local_session() as session:
for tpcslug in topics:
try:
tpc = session.query(Topic).where(Topic.slug == tpcslug).first()
if tpc:
tf = session.query(
TopicFollower
).where(
TopicFollower.follower == user.id
).filter(
TopicFollower.topic == tpc.id
).first()
if not tf:
tf = TopicFollower.create(
topic=tpc.id,
follower=user.id,
auto=True
)
session.add(tf)
session.commit()
except IntegrityError:
print('[migration.shout] hidden by topic ' + tpc.slug)
# main topic
maintopic = storage["replacements"].get(topics_by_oid.get(category, {}).get("slug"))
if maintopic in ttt:
ttt.remove(maintopic)
ttt.insert(0, maintopic)
return ttt
async def process_user(userdata, storage, oid):
with local_session() as session:
uid = userdata.get("id") # anonymous as
if not uid:
print(userdata)
print("has no id field, set it @anonymous")
userdata = anondict
uid = 1
user = session.query(User).filter(User.id == uid).first()
if not user:
try:
slug = userdata["slug"].lower().strip()
slug = re.sub('[^0-9a-zA-Z]+', '-', slug)
userdata["slug"] = slug
user = User.create(**userdata)
session.add(user)
session.commit()
except IntegrityError:
print(f"[migration] user creating with slug {userdata['slug']}")
print("[migration] from userdata")
print(userdata)
raise Exception("[migration] cannot create user in content_items.get_user()")
if user.id == 946:
print("[migration] ***************** ALPINA")
if user.id == 2:
print("[migration] +++++++++++++++++ DISCOURS")
userdata["id"] = user.id
userdata["createdAt"] = user.createdAt
storage["users"]["by_slug"][userdata["slug"]] = userdata
storage["users"]["by_oid"][oid] = userdata
if not user:
raise Exception("could not get a user")
return user
async def resolve_create_shout(shout_dict):
with local_session() as session:
s = session.query(Shout).filter(Shout.slug == shout_dict["slug"]).first()
bump = False
if s:
if s.createdAt != shout_dict['createdAt']:
# create new with different slug
shout_dict["slug"] += '-' + shout_dict["layout"]
try:
await create_shout(shout_dict)
except IntegrityError as e:
print(e)
bump = True
else:
# update old
for key in shout_dict:
if key in s.__dict__:
if s.__dict__[key] != shout_dict[key]:
print(
"[migration] shout already exists, but differs in %s"
% key
)
bump = True
else:
print("[migration] shout already exists, but lacks %s" % key)
bump = True
if bump:
s.update(shout_dict)
else:
print("[migration] something went wrong with shout: \n%r" % shout_dict)
raise Exception("")
session.commit()
return s
async def topics_aftermath(entry, storage):
r = []
for tpc in filter(lambda x: bool(x), entry["topics"]):
oldslug = tpc
newslug = storage["replacements"].get(oldslug, oldslug)
if newslug:
with local_session() as session:
shout = session.query(Shout).where(Shout.slug == entry["slug"]).first()
new_topic = session.query(Topic).where(Topic.slug == newslug).first()
shout_topic_old = (
session.query(ShoutTopic)
.join(Shout)
.join(Topic)
.filter(Shout.slug == entry["slug"])
.filter(Topic.slug == oldslug)
.first()
)
if shout_topic_old:
shout_topic_old.update({"topic": new_topic.id})
else:
shout_topic_new = (
session.query(ShoutTopic)
.join(Shout)
.join(Topic)
.filter(Shout.slug == entry["slug"])
.filter(Topic.slug == newslug)
.first()
)
if not shout_topic_new:
try:
ShoutTopic.create(
**{"shout": shout.id, "topic": new_topic.id}
)
except Exception:
print("[migration] shout topic error: " + newslug)
session.commit()
if newslug not in r:
r.append(newslug)
else:
print("[migration] ignored topic slug: \n%r" % tpc["slug"])
# raise Exception
return r
async def content_ratings_to_reactions(entry, slug):
try:
with local_session() as session:
for content_rating in entry.get("ratings", []):
rater = (
session.query(User)
.filter(User.oid == content_rating["createdBy"])
.first()
) or User.default_user
shout = session.query(Shout).where(Shout.slug == slug).first()
cts = content_rating.get("createdAt")
reaction_dict = {
"createdAt": date_parse(cts) if cts else None,
"kind": ReactionKind.LIKE
if content_rating["value"] > 0
else ReactionKind.DISLIKE,
"createdBy": rater.id,
"shout": shout.id
}
reaction = (
session.query(Reaction)
.filter(Reaction.shout == reaction_dict["shout"])
.filter(Reaction.createdBy == reaction_dict["createdBy"])
.filter(Reaction.kind == reaction_dict["kind"])
.first()
)
if reaction:
k = ReactionKind.AGREE if content_rating["value"] > 0 else ReactionKind.DISAGREE
reaction_dict["kind"] = k
reaction.update(reaction_dict)
session.add(reaction)
else:
rea = Reaction.create(**reaction_dict)
session.add(rea)
# shout_dict['ratings'].append(reaction_dict)
session.commit()
except Exception:
print("[migration] content_item.ratings error: \n%r" % content_rating)

View File

@@ -1,42 +0,0 @@
from base.orm import local_session
from migration.extract import extract_md
from migration.html2text import html2text
from orm.reaction import Reaction, ReactionKind
def migrate(entry, storage):
post_oid = entry['contentItem']
print(post_oid)
shout_dict = storage['shouts']['by_oid'].get(post_oid)
if shout_dict:
print(shout_dict['body'])
remark = {
"shout": shout_dict['id'],
"body": extract_md(
html2text(entry['body']),
shout_dict
),
"kind": ReactionKind.REMARK
}
if entry.get('textBefore'):
remark['range'] = str(
shout_dict['body']
.index(
entry['textBefore'] or ''
)
) + ':' + str(
shout_dict['body']
.index(
entry['textAfter'] or ''
) + len(
entry['textAfter'] or ''
)
)
with local_session() as session:
rmrk = Reaction.create(**remark)
session.commit()
del rmrk["_sa_instance_state"]
return rmrk
return

View File

@@ -1,828 +0,0 @@
{
"207": "207",
"1990-e": "90s",
"2000-e": "2000s",
"90-e": "90s",
"Georgia": "georgia",
"Japan": "japan",
"Sweden": "sweden",
"abstraktsiya": "abstract",
"absurdism": "absurdism",
"acclimatization": "acclimatisation",
"activism": "activism",
"adolf-gitler": "adolf-hitler",
"afrika": "africa",
"agata-kristi": "agatha-christie",
"agressivnoe-povedenie": "agression",
"agressiya": "agression",
"aktsii": "actions",
"aktsionizm": "actionism",
"alber-kamyu": "albert-kamus",
"albomy": "albums",
"aleksandr-griboedov": "aleksander-griboedov",
"aleksandr-pushkin": "aleksander-pushkin",
"aleksandr-solzhenitsyn": "aleksander-solzhenitsyn",
"aleksandr-vvedenskiy": "aleksander-vvedensky",
"aleksey-navalnyy": "alexey-navalny",
"alfavit": "alphabet",
"alkogol": "alcohol",
"alternativa": "alternative",
"alternative": "alternative",
"alternativnaya-istoriya": "alternative-history",
"amerika": "america",
"anarhizm": "anarchism",
"anatoliy-mariengof": "anatoly-mariengof",
"ancient-russia": "ancient-russia",
"andegraund": "underground",
"andrey-platonov": "andrey-platonov",
"andrey-rodionov": "andrey-rodionov",
"andrey-tarkovskiy": "andrey-tarkovsky",
"angliyskie-istorii": "english-stories",
"angliyskiy-yazyk": "english-langugae",
"ango": "ango",
"animation": "animation",
"animatsiya": "animation",
"anime": "anime",
"anri-volohonskiy": "anri-volohonsky",
"antifashizm": "anti-faschism",
"antiquity": "antiquity",
"antiutopiya": "dystopia",
"anton-dolin": "anton-dolin",
"antropology": "antropology",
"antropotsen": "antropocenus",
"architecture": "architecture",
"arheologiya": "archeology",
"arhetipy": "archetypes",
"arhiv": "archive",
"aristokraty": "aristocracy",
"aristotel": "aristotle",
"arktika": "arctic",
"armiya": "army",
"armiya-1": "army",
"art": "art",
"art-is": "art-is",
"artists": "artists",
"ateizm": "atheism",
"audio-poetry": "audio-poetry",
"audiopoeziya": "audio-poetry",
"audiospektakl": "audio-spectacles",
"auktsyon": "auktsyon",
"avangard": "avantgarde",
"avtofikshn": "autofiction",
"avtorskaya-pesnya": "bardsongs",
"azbuka-immigratsii": "immigration-basics",
"aziatskiy-kinematograf": "asian-cinema",
"b-movie": "b-movie",
"bannye-chteniya": "sauna-reading",
"bardsongs": "bardsongs",
"bdsm": "bdsm",
"beecake": "beecake",
"belarus": "belarus",
"belgiya": "belgium",
"bertold-breht": "berttold-brecht",
"bezumie": "madness",
"biography": "biography",
"biologiya": "biology",
"bipolyarnoe-rasstroystvo": "bipolar-disorder",
"bitniki": "beatnics",
"biznes": "business",
"blizhniy-vostok": "middle-east",
"blizost": "closeness",
"blocked-in-russia": "blocked-in-russia",
"blokada": "blockade",
"bob-dilan": "bob-dylan",
"bog": "god",
"bol": "pain",
"bolotnoe-delo": "bolotnaya-case",
"books": "books",
"boris-eltsin": "boris-eltsin",
"boris-godunov": "boris-godunov",
"boris-grebenschikov": "boris-grebenschikov",
"boris-nemtsov": "boris-nemtsov",
"boris-pasternak": "boris-pasternak",
"brak": "marriage",
"bret-iston-ellis": "bret-iston-ellis",
"buddizm": "buddhism",
"bullying": "bullying",
"bunt": "riot",
"burning-man": "burning-man",
"bytie": "being",
"byurokratiya": "bureaucracy",
"capitalism": "capitalism",
"censored-in-russia": "censored-in-russia",
"ch-rno-beloe": "black-and-white",
"ch-rnyy-yumor": "black-humour",
"chapters": "chapters",
"charity": "charity",
"chayldfri": "childfree",
"chechenskaya-voyna": "chechen-war",
"chechnya": "chechnya",
"chelovek": "male",
"chernobyl": "chernobyl",
"chernyy-yumor": "black-humour",
"children": "children",
"china": "china",
"chinovniki": "bureaucracy",
"chukotka": "chukotka",
"chuma": "plague",
"church": "church",
"cinema": "cinema",
"city": "city",
"civil-position": "civil-position",
"clips": "clips",
"collage": "collage",
"comics": "comics",
"conspiracy-theory": "conspiracy-theory",
"contemporary-art": "contemporary-art",
"contemporary-poetry": "poetry",
"contemporary-prose": "prose",
"coronavirus": "coronavirus",
"corruption": "corruption",
"creative-writing-school": "creative-writing-school",
"crime": "crime",
"criticism": "criticism",
"critiques": "reviews",
"culture": "culture",
"dadaizm": "dadaism",
"daniel-defo": "daniel-defoe",
"daniil-harms": "daniil-kharms",
"dante-aligeri": "dante-alighieri",
"darkveyv": "darkwave",
"death": "death",
"debaty": "debats",
"delo-seti": "seti-case",
"democracy": "democracy",
"demografiya": "demographics",
"demonstrations": "demonstrations",
"depression": "depression",
"derevnya": "village",
"derrida": "derrida",
"design": "design",
"detskie-doma": "orphanages",
"detstvo": "childhood",
"devid-linch": "david-linch",
"devyanostye": "90s",
"dialog": "dialogue",
"digital": "digital",
"digital-art": "digital-art",
"dinozavry": "dinosaurs",
"directing": "directing",
"diskurs": "discours",
"diskurs-1": "discourse",
"diskurs-analiz": "discourse-analytics",
"dissidenty": "dissidents",
"diy": "diy",
"dmitriy-donskoy": "dmitriy-donskoy",
"dmitriy-prigov": "dmitriy-prigov",
"dnevnik-1": "dairy",
"dnevniki": "dairies",
"documentary": "documentary",
"dokumentalnaya-poema": "documentary-poem",
"dokumentalnaya-poeziya": "documentary-poetry",
"dokumenty": "doсuments",
"domashnee-nasilie": "home-terror",
"donald-tramp": "donald-trump",
"donbass": "donbass",
"donbass-diary": "donbass-diary",
"donorstvo": "donation",
"dozhd": "rain",
"drama": "drama",
"dramaturgy": "dramaturgy",
"drawing": "drawing",
"drevo-zhizni": "tree-of-life",
"drugs": "drugs",
"duh": "spirit",
"dzhaz": "jazz",
"dzhek-keruak": "jack-keruak",
"dzhim-morrison": "jim-morrison",
"dzhordzh-romero": "george-romero",
"dzhordzho-agamben": "giorgio-agamben",
"ecology": "ecology",
"economics": "economics",
"eda": "food",
"editorial-statements": "editorial-statements",
"eduard-limonov": "eduard-limonov",
"education": "education",
"egor-letov": "egor-letov",
"ekspat": "expat",
"eksperiment": "experiments",
"eksperimentalnaya-muzyka": "experimental-music",
"ekspressionizm": "expressionism",
"ekstremizm": "extremism",
"ekzistentsializm-1": "existentialism",
"ekzistentsiya": "existence",
"elections": "elections",
"electronic": "electronics",
"electronics": "electronics",
"elena-glinskaya": "elena-glinskaya",
"elena-guro": "elena-guro",
"elizaveta-mnatsakanova": "elizaveta-mnatsakanova",
"embient": "ambient",
"emigration": "emigration",
"emil-dyurkgeym": "emile-durkheim",
"emotsii": "emotions",
"empiric": "empiric",
"epidemiya": "pandemic",
"erich-von-neff": "erich-von-neff",
"erotika": "erotics",
"essay": "essay",
"estetika": "aestetics",
"etika": "ethics",
"etno": "ethno",
"etnos": "ethnics",
"everyday-life": "everyday-life",
"evgeniy-onegin": "eugene-onegin",
"evolyutsiya": "evolution",
"exhibitions": "exhibitions",
"experience": "experiences",
"experimental": "experimental",
"experimental-music": "experimental-music",
"explanation": "explanation",
"faktcheking": "fact-checking",
"falsifikatsii": "falsifications",
"family": "family",
"fanfiki": "fan-fiction",
"fantastika": "sci-fi",
"fatalizm": "fatalism",
"fedor-dostoevskiy": "fedor-dostoevsky",
"fedor-ioannovich": "fedor-ioannovich",
"feleton": "feuilleton",
"feminism": "feminism",
"fenomenologiya": "phenomenology",
"fentezi": "fantasy",
"festival": "festival",
"festival-territoriya": "festival-territory",
"folk": "folk",
"folklor": "folklore",
"fotoreportazh": "photoreports",
"france": "france",
"frants-kafka": "franz-kafka",
"frederik-begbeder": "frederick-begbeder",
"freedom": "freedom",
"friendship": "friendship",
"fsb": "fsb",
"futbol": "footbool",
"future": "future",
"futuristy": "futurists",
"futurizm": "futurism",
"galereya": "gallery",
"galereya-anna-nova": "gallery-anna-nova",
"gdr": "gdr",
"gender": "gender",
"gendernyy-diskurs": "gender",
"gennadiy-aygi": "gennadiy-aygi",
"gerhard-rihter": "gerhard-rihter",
"germaniya": "germany",
"germenevtika": "hermeneutics",
"geroi": "heroes",
"girls": "girls",
"gkchp": "gkchp",
"glitch": "glitch",
"globalizatsiya": "globalisation",
"gollivud": "hollywood",
"gonzo": "gonzo",
"gore-ot-uma": "woe-from-wit",
"graffiti": "graffiti",
"graficheskaya-novella": "graphic-novell",
"graphics": "graphics",
"gravyura": "engraving",
"grazhdanskaya-oborona": "grazhdanskaya-oborona",
"gretsiya": "greece",
"griby": "mushrooms",
"gruziya-2": "georgia",
"gulag": "gulag",
"han-batyy": "khan-batyy",
"hayku": "haiku",
"health": "health",
"himiya": "chemistry",
"hip-hop": "hip-hop",
"history": "history",
"history-of-russia": "history-of-russia",
"holokost": "holocaust",
"horeografiya": "choreography",
"horror": "horror",
"hospis": "hospice",
"hristianstvo": "christianity",
"humans": "humans",
"humour": "humour",
"ideologiya": "ideology",
"idm": "idm",
"igil": "isis",
"igor-pomerantsev": "igor-pomerantsev",
"igra": "game",
"igra-prestolov": "game-of-throne",
"igry": "games",
"iisus-hristos": "jesus-christ",
"illness": "illness",
"illustration-history": "illustration-history",
"illustrations": "illustrations",
"imazhinizm": "imagism",
"immanuil-kant": "immanuel-kant",
"impressionizm": "impressionism",
"improvizatsiya": "improvisation",
"indi": "indie",
"individualizm": "individualism",
"infografika": "infographics",
"informatsiya": "information",
"ingmar-bergman": "ingmar-bergman",
"inklyuziya": "inclusion",
"installyatsiya": "installation",
"internet": "internet",
"interview": "interview",
"invalidnost": "disability",
"investigations": "investigations",
"iosif-brodskiy": "joseph-brodsky",
"iosif-stalin": "joseph-stalin",
"iskusstvennyy-intellekt": "artificial-intelligence",
"islam": "islam",
"istoriya-moskvy": "moscow-history",
"istoriya-nauki": "history-of-sceince",
"istoriya-o-medsestre": "nurse-story",
"istoriya-teatra": "theatre-history",
"italiya": "italy",
"italyanskiy-yazyk": "italian-language",
"iudaika": "judaica",
"ivan-groznyy": "ivan-grozny",
"ivan-iii-gorbatyy": "ivan-iii-gorbaty",
"ivan-kalita": "ivan-kalita",
"ivan-krylov": "ivan-krylov",
"izobreteniya": "inventions",
"izrail-1": "israel",
"jazz": "jazz",
"john-lennon": "john-lennon",
"journalism": "journalism",
"justice": "justice",
"k-pop": "k-pop",
"kalligrafiya": "calligraphy",
"karikatura": "caricatures",
"kartochki-rubinshteyna": "rubinstein-cards",
"katrin-nenasheva": "katrin-nenasheva",
"kavarga": "kavarga",
"kavkaz": "caucasus",
"kazan": "kazan",
"kiberbezopasnost": "cybersecurity",
"kinoklub": "cinema-club",
"kinokritika": "film-criticism",
"kirill-serebrennikov": "kirill-serebrennikov",
"kladbische": "cemetery",
"klassika": "classic",
"kollektivnoe-bessoznatelnoe": "сollective-unconscious",
"komediya": "comedy",
"kommunikatsii": "communications",
"kommunizm": "communism",
"kommuny": "communes",
"kompyuternye-igry": "computer-games",
"konets-vesny": "end-of-spring",
"konservatizm": "conservatism",
"kontrkultura": "counter-culture",
"kontseptualizm": "conceptualism",
"korotkometrazhka": "cinema-shorts",
"kosmos": "cosmos",
"kraudfanding": "crowdfunding",
"kriptovalyuty": "cryptocurrencies",
"krizis": "crisis",
"krov": "blood",
"krym": "crimea",
"kulturologiya": "culturology",
"kulty": "cults",
"kurdistan": "kurdistan",
"kurt-kobeyn": "kurt-cobain",
"kurt-vonnegut": "kurt-vonnegut",
"kvir": "queer",
"laboratoriya": "lab",
"language": "languages",
"lars-fon-trier": "lars-fon-trier",
"laws": "laws",
"lectures": "lectures",
"leto": "summer",
"lev-tolstoy": "leo-tolstoy",
"lgbt": "lgbt",
"liberalizm": "liberalism",
"libertarianstvo": "libertarianism",
"life": "life",
"likbez": "likbez",
"lingvistika": "linguistics",
"lirika": "lirics",
"literary-studies": "literary-studies",
"literature": "literature",
"literaturnyykaver": "literature-cover",
"lo-fi": "lo-fi",
"lomonosov": "lomonosov",
"love": "love",
"luzha-goluboy-krovi": "luzha-goluboy-krovi",
"lyudvig-vitgenshteyn": "ludwig-wittgenstein",
"lzhedmitriy": "false-dmitry",
"lzhenauka": "pseudoscience",
"magiya": "magic",
"maks-veber": "max-weber",
"manifests": "manifests",
"manipulyatsii-soznaniem": "mind-manipulation",
"marina-abramovich": "marina-abramovich",
"marketing": "marketing",
"marksizm": "marxism",
"marsel-dyushan": "marchel-duchamp",
"marsel-prust": "marcel-proust",
"martin-haydegger": "martin-hidegger",
"matematika": "maths",
"mayakovskiy": "vladimir-mayakovsky",
"media": "media",
"medicine": "medicine",
"memuary": "memoirs",
"menedzhment": "management",
"menty": "police",
"merab-mamardashvili": "merab-mamardashvili",
"mest": "revenge",
"metamodernizm": "metamodern",
"metavselennaya": "metaverse",
"metro": "metro",
"mifologiya": "mythology",
"mify": "myth",
"mihael-haneke": "michael-haneke",
"mihail-baryshnikov": "mihail-baryshnikov",
"mihail-bulgakov": "mihail-bulgakov",
"mikrotonalnaya-muzyka": "mikrotone-muzyka",
"minimalizm": "minimalism",
"minkult-privet": "minkult-privet",
"mir": "world",
"mirovozzrenie": "mindsets",
"mishel-fuko": "michel-foucault",
"mistika": "mystics",
"mitropolit-makariy": "mitropolit-makariy",
"mlm": "mlm",
"mobilizatsiya": "mobilisation",
"moda": "fashion",
"modernizm": "modernism",
"mokyumentari": "mockumentary",
"molodezh": "youth",
"moloko-plus": "moloko-plus",
"money": "money",
"monologs": "monologues",
"monstratsiya": "monstration",
"moralnaya-otvetstvennost": "moral-responsibility",
"more": "sea",
"moscow": "moscow",
"moshennichestvo": "frauds",
"moskovskiy-romanticheskiy-kontseptualizm": "moscow-romantic-conceptualism",
"moskovskoe-delo": "moscow-case",
"movies": "movies",
"mozg": "brain",
"multiplikatsiya": "animation",
"music": "music",
"musulmanstvo": "islam",
"muzei": "museum",
"muzey": "museum",
"muzhchiny": "man",
"myshlenie": "thinking",
"nagornyy-karabah": "nagorno-karabakh",
"nasilie-1": "violence",
"natsionalizm": "nationalism",
"natsionalnaya-ideya": "national-idea",
"natsizm": "nazism",
"natyurmort": "nature-morte",
"nauchpop": "pop-science",
"nbp": "nbp",
"nenavist": "hate",
"neofitsialnaya-literatura": "unofficial-literature",
"neoklassika": "neoclassic",
"neprozrachnye-smysly": "hidden-meanings",
"neravenstvo": "inequality",
"net-voyne": "no-war",
"new-year": "new-year",
"neyronauka": "neuro-science",
"neyroseti": "neural-networks",
"niu-vshe": "hse",
"nizhniy-novgorod": "nizhny-novgorod",
"nko": "nonprofits",
"nlo": "ufo",
"nobelevskaya-premiya": "nobel-prize",
"noize-mc": "noize-mc",
"nonkonformizm": "nonconformism",
"notforall": "notforall",
"novaya-drama": "new-drama",
"novosti": "news",
"noyz": "noise",
"nuar": "noir",
"oberiu": "oberiu",
"ocherk": "etudes",
"ochevidnyy-nuar": "ochevidnyy-nuar",
"odinochestvo": "loneliness",
"odna-kniga-odna-istoriya": "one-book-one-story",
"okrainy": "outskirts",
"omon": "swat",
"opinions": "opinions",
"oppozitsiya": "opposition",
"orhan-pamuk": "orhan-pamuk",
"ornitologiya": "ornitology",
"osen": "autumn",
"osip-mandelshtam": "osip-mandelshtam",
"oskar-uayld": "oscar-wilde",
"osoznanie": "awareness",
"otnosheniya": "relationship",
"pablo-pikasso": "pablo-picasso",
"painting": "painting",
"paintings": "painting",
"pamyat": "memory",
"pandemiya": "pandemic",
"parizh": "paris",
"patriotizm": "patriotism",
"patsifizm": "pacifism",
"paul-tselan": "paul-tselan",
"per-burd": "pierre-bourdieu",
"perezhivaniya": "worries",
"performance": "performance",
"peyzazh": "landscape",
"philology": "philology",
"philosophy": "philosophy",
"photo": "photography",
"photography": "photography",
"photoprojects": "photoprojects",
"plakaty": "posters",
"plastilin": "plasticine",
"plays": "plays",
"podrostki": "teenagers",
"poema": "poem",
"poems": "poems",
"poeticheskaya-proza": "poetic-prose",
"poetry": "poetry",
"poetry-of-squares": "poetry-of-squares",
"poetry-slam": "poetry-slam",
"pokoy": "peace",
"police": "police",
"politicheskoe-fentezi": "political-fantasy",
"politics": "politics",
"politzaklyuchennye": "political-prisoners",
"polsha": "poland",
"pomosch": "help",
"pop-art": "pop-art",
"pop-culture": "pop-culture",
"populyarnaya-psihologiya": "popular-psychology",
"pornografiya": "pornography",
"portret": "portrait",
"poslovitsy": "proverbs",
"post-pank": "post-punk",
"post-rok": "post-rock",
"postmodernism": "postmodernism",
"povest": "novells",
"povsednevnost": "everyday-life",
"power": "power",
"pravo": "right",
"pravoslavie": "orthodox",
"pravozaschitniki": "human-rights-activism",
"prazdnik": "holidays",
"predatelstvo": "betrayal",
"predprinimatelstvo": "entrepreneurship",
"premera": "premier",
"premiya-oskar": "oscar-prize",
"pribaltika-1": "baltic",
"priroda": "nature",
"prison": "prison",
"pritcha": "parable",
"privatnost": "privacy",
"progress": "progress",
"projects": "projects",
"prokrastinatsiya": "procrastination",
"propaganda": "propaganda",
"proschenie": "forgiveness",
"prose": "prose",
"proshloe": "past",
"prostitutsiya": "prostitution",
"prosveschenie": "enlightenment",
"protests": "protests",
"psalmy": "psalms",
"psihoanaliz": "psychoanalysis",
"psihodeliki": "psychodelics",
"pskov": "pskov",
"psychiatry": "psychiatry",
"psychology": "psychology",
"ptitsy": "birds",
"punk": "punk",
"r-b": "rnb",
"rasizm": "racism",
"realizm": "realism",
"redaktura": "editing",
"refleksiya": "reflection",
"reggi": "reggae",
"religion": "religion",
"rene-zhirar": "rene-girard",
"renesanss": "renessance",
"renovatsiya": "renovation",
"rep": "rap",
"reportage": "reportage",
"reportazh-1": "reportage",
"repressions": "repressions",
"research": "research",
"retroveyv": "retrowave",
"review": "review",
"revolution": "revolution",
"rezo-gabriadze": "rezo-gabriadze",
"risunki": "painting",
"roboty": "robots",
"rock": "rock",
"roditeli": "parents",
"romantizm": "romantism",
"romany": "novell",
"ronald-reygan": "ronald-reygan",
"roskomnadzor": "roskomnadzor",
"rossiyskoe-kino": "russian-cinema",
"rouling": "rowling",
"rozhava": "rojava",
"rpts": "rpts",
"rus-na-grani-sryva": "rus-na-grani-sryva",
"russia": "russia",
"russian-language": "russian-language",
"russian-literature": "russian-literature",
"russkaya-toska": "russian-toska",
"russkiy-mir": "russkiy-mir",
"salo": "lard",
"salvador-dali": "salvador-dali",
"samoidentifikatsiya": "self-identity",
"samoopredelenie": "self-definition",
"sankt-peterburg": "saint-petersburg",
"sasha-skochilenko": "sasha-skochilenko",
"satira": "satiric",
"saund-art": "sound-art",
"schaste": "happiness",
"school": "school",
"science": "science",
"sculpture": "sculpture",
"second-world-war": "second-world-war",
"sekond-hend": "second-hand",
"seksprosvet": "sex-education",
"seksualizirovannoe-nasilie": "sexualized-violence",
"seksualnoe-nasilie": "sexualized-violence",
"sekty": "sects",
"semi": "semi",
"semiotics": "semiotics",
"serbiya": "serbia",
"sergey-bodrov-mladshiy": "sergey-bodrov-junior",
"sergey-solov-v": "sergey-solovyov",
"serialy": "series",
"sever": "north",
"severnaya-koreya": "north-korea",
"sex": "sex",
"shotlandiya": "scotland",
"shugeyz": "shoegaze",
"siloviki": "siloviki",
"simeon-bekbulatovich": "simeon-bekbulatovich",
"simvolizm": "simbolism",
"siriya": "siria",
"skulptura": "sculpture",
"slavoy-zhizhek": "slavoj-zizek",
"smert-1": "death",
"smysl": "meaning",
"sny": "dreams",
"sobytiya": "events",
"social": "society",
"society": "society",
"sociology": "sociology",
"sofya-paleolog": "sofya-paleolog",
"sofya-vitovtovna": "sofya-vitovtovna",
"soobschestva": "communities",
"soprotivlenie": "resistence",
"sotsializm": "socialism",
"sotsialnaya-filosofiya": "social-philosophy",
"sotsiologiya-1": "sociology",
"sotsseti": "social-networks",
"sotvorenie-tretego-rima": "third-rome",
"sovremennost": "modernity",
"spaces": "spaces",
"spektakl": "spectacles",
"spetseffekty": "special-fx",
"spetsoperatsiya": "special-operation",
"spetssluzhby": "special-services",
"sport": "sport",
"srednevekove": "middle-age",
"state": "state",
"statistika": "statistics",
"stendap": "stand-up",
"stihi": "poetry",
"stoitsizm": "stoicism",
"stories": "stories",
"stoyanie-na-ugre": "stoyanie-na-ugre",
"strah": "fear",
"street-art": "street-art",
"stsenarii": "scenarios",
"sud": "court",
"summary": "summary",
"supergeroi": "superheroes",
"svetlana-aleksievich": "svetlana-aleksievich",
"svobodu-ivanu-golunovu": "free-ivan-golunov",
"syurrealizm": "surrealism",
"tales": "tales",
"tanets": "dance",
"tataro-mongolskoe-igo": "mongol-tatar-yoke",
"tatuirovki": "tattoo",
"technology": "technology",
"televidenie": "television",
"telo": "body",
"telo-kak-iskusstvo": "body-as-art",
"terrorizm": "terrorism",
"tests": "tests",
"text": "texts",
"the-beatles": "the-beatles",
"theater": "theater",
"theory": "theory",
"tokio": "tokio",
"torture": "torture",
"totalitarizm": "totalitarism",
"traditions": "traditions",
"tragicomedy": "tragicomedy",
"transgendernost": "transgender",
"translation": "translation",
"transport": "transport",
"travel": "travel",
"travma": "trauma",
"trendy": "trends",
"tretiy-reyh": "third-reich",
"triller": "thriller",
"tsar": "central-african-republic",
"tsar-edip": "oedipus",
"tsarevich-dmitriy": "tsarevich-dmitry",
"tsennosti": "values",
"tsenzura": "censorship",
"tseremonii": "ceremonies",
"turizm": "tourism",
"tvorchestvo": "creativity",
"ugnetennyy-zhilischnyy-klass": "oppressed-housing-class",
"uilyam-shekspir": "william-shakespeare",
"ukraina-2": "ukraine",
"ukraine": "ukraine",
"university": "university",
"urban-studies": "urban-studies",
"uroki-literatury": "literature-lessons",
"usa": "usa",
"ussr": "ussr",
"utopiya": "utopia",
"utrata": "loss",
"valter-benyamin": "valter-benyamin",
"varlam-shalamov": "varlam-shalamov",
"vasiliy-ii-temnyy": "basil-ii-temnyy",
"vasiliy-iii": "basil-iii",
"vdnh": "vdnh",
"vechnost": "ethernety",
"velikobritaniya": "great-britain",
"velimir-hlebnikov": "velimir-hlebnikov",
"velkom-tu-greyt-britn": "welcome-to-great-britain",
"venedikt-erofeev": "venedikt-erofeev",
"venetsiya": "veneece",
"vengriya": "hungary",
"verlibry": "free-verse",
"veschi": "things",
"vessels": "vessels",
"veterany": "veterans",
"video": "video",
"videoart": "videoart",
"videoklip": "clips",
"videopoeziya": "video-poetry",
"viktor-astafev": "viktor-astafev",
"viktor-pelevin": "viktor-pelevin",
"vilgelm-rayh": "wilhelm-reich",
"vinzavod": "vinzavod",
"violence": "violence",
"visual-culture": "visual-culture",
"vizualnaya-poeziya": "visual-poetry",
"vladimir-lenin": "vladimir-lenin",
"vladimir-mayakovskiy": "vladimir-mayakovsky",
"vladimir-nabokov": "vladimir-nabokov",
"vladimir-putin": "vladimir-putin",
"vladimir-sorokin": "vladimir-sorokin",
"vladimir-voynovich": "vladimir-voynovich",
"vnutrenniy-opyt": "inner-expirience",
"volga": "volga",
"volontery": "volonteurs",
"vong-karvay": "wong-karwai",
"vospominaniya": "memories",
"vostok": "east",
"voyna-na-ukraine": "war-in-ukraine",
"voyna-v-ukraine": "war-in-ukraine",
"vremya": "time",
"vudi-allen": "woody-allen",
"vynuzhdennye-otnosheniya": "forced-relationship",
"war": "war",
"war-in-ukraine-images": "war-in-ukrahine-images",
"women": "women",
"work": "work",
"writers": "writers",
"xx-century": "xx-century",
"yakob-yordans": "yakob-yordans",
"yan-vermeer": "yan-vermeer",
"yanka-dyagileva": "yanka-dyagileva",
"yaponskaya-literatura": "japan-literature",
"yazychestvo": "paganism",
"youth": "youth",
"yozef-rot": "yozef-rot",
"yurgen-habermas": "jorgen-habermas",
"za-liniey-mannergeyma": "behind-mannerheim-line",
"zabota": "care",
"zahar-prilepin": "zahar-prilepin",
"zakonodatelstvo": "laws",
"zakony-mira": "world-laws",
"zametki": "notes",
"zhelanie": "wish",
"zhivotnye": "animals",
"zhoze-saramago": "jose-saramago",
"zigmund-freyd": "sigmund-freud",
"zolotaya-orda": "golden-horde",
"zombi": "zombie",
"zombi-simpsony": "zombie-simpsons"
}

View File

@@ -1,32 +0,0 @@
from base.orm import local_session
from migration.extract import extract_md
from migration.html2text import html2text
from orm import Topic
def migrate(entry):
body_orig = entry.get("description", "").replace("&nbsp;", " ")
topic_dict = {
"slug": entry["slug"],
"oid": entry["_id"],
"title": entry["title"].replace("&nbsp;", " "),
"body": extract_md(html2text(body_orig))
}
with local_session() as session:
slug = topic_dict["slug"]
topic = session.query(Topic).filter(Topic.slug == slug).first() or Topic.create(
**topic_dict
)
if not topic:
raise Exception("no topic!")
if topic:
if len(topic.title) > len(topic_dict["title"]):
Topic.update(topic, {"title": topic_dict["title"]})
if len(topic.body) < len(topic_dict["body"]):
Topic.update(topic, {"body": topic_dict["body"]})
session.commit()
# print(topic.__dict__)
rt = topic.__dict__.copy()
del rt["_sa_instance_state"]
return rt

View File

@@ -1,163 +0,0 @@
import re
from bs4 import BeautifulSoup
from dateutil.parser import parse
from sqlalchemy.exc import IntegrityError
from base.orm import local_session
from orm.user import AuthorFollower, User, UserRating
def migrate(entry):
if "subscribedTo" in entry:
del entry["subscribedTo"]
email = entry["emails"][0]["address"]
user_dict = {
"oid": entry["_id"],
"roles": [],
"ratings": [],
"username": email,
"email": email,
"createdAt": parse(entry["createdAt"]),
"emailConfirmed": ("@discours.io" in email) or bool(entry["emails"][0]["verified"]),
"muted": False, # amnesty
"links": [],
"name": "anonymous",
"password": entry["services"]["password"].get("bcrypt")
}
if "updatedAt" in entry:
user_dict["updatedAt"] = parse(entry["updatedAt"])
if "wasOnlineAt" in entry:
user_dict["lastSeen"] = parse(entry["wasOnlineAt"])
if entry.get("profile"):
# slug
slug = entry["profile"].get("path").lower()
slug = re.sub('[^0-9a-zA-Z]+', '-', slug).strip()
user_dict["slug"] = slug
bio = (entry.get("profile", {"bio": ""}).get("bio") or "").replace('\(', '(').replace('\)', ')')
bio_text = BeautifulSoup(bio, features="lxml").text
if len(bio_text) > 120:
user_dict["about"] = bio_text
else:
user_dict["bio"] = bio_text
# userpic
try:
user_dict["userpic"] = (
"https://assets.discours.io/unsafe/100x/"
+ entry["profile"]["thumborId"]
)
except KeyError:
try:
user_dict["userpic"] = entry["profile"]["image"]["url"]
except KeyError:
user_dict["userpic"] = ""
# name
fn = entry["profile"].get("firstName", "")
ln = entry["profile"].get("lastName", "")
name = fn if fn else ""
name = (name + " " + ln) if ln else name
if not name:
name = slug if slug else "anonymous"
name = (
entry["profile"]["path"].lower().strip().replace(" ", "-")
if len(name) < 2
else name
)
user_dict["name"] = name
# links
fb = entry["profile"].get("facebook", False)
if fb:
user_dict["links"].append(fb)
vk = entry["profile"].get("vkontakte", False)
if vk:
user_dict["links"].append(vk)
tr = entry["profile"].get("twitter", False)
if tr:
user_dict["links"].append(tr)
ws = entry["profile"].get("website", False)
if ws:
user_dict["links"].append(ws)
# some checks
if not user_dict["slug"] and len(user_dict["links"]) > 0:
user_dict["slug"] = user_dict["links"][0].split("/")[-1]
user_dict["slug"] = user_dict.get("slug", user_dict["email"].split("@")[0])
oid = user_dict["oid"]
user_dict["slug"] = user_dict["slug"].lower().strip().replace(" ", "-")
try:
user = User.create(**user_dict.copy())
except IntegrityError:
print("[migration] cannot create user " + user_dict["slug"])
with local_session() as session:
old_user = (
session.query(User).filter(User.slug == user_dict["slug"]).first()
)
old_user.oid = oid
old_user.password = user_dict["password"]
session.commit()
user = old_user
if not user:
print("[migration] ERROR: cannot find user " + user_dict["slug"])
raise Exception
user_dict["id"] = user.id
return user_dict
def post_migrate():
old_discours_dict = {
"slug": "old-discours",
"username": "old-discours",
"email": "old@discours.io",
"name": "Просмотры на старой версии сайта"
}
with local_session() as session:
old_discours_user = User.create(**old_discours_dict)
session.add(old_discours_user)
session.commit()
def migrate_2stage(entry, id_map):
ce = 0
for rating_entry in entry.get("ratings", []):
rater_oid = rating_entry["createdBy"]
rater_slug = id_map.get(rater_oid)
if not rater_slug:
ce += 1
# print(rating_entry)
continue
oid = entry["_id"]
author_slug = id_map.get(oid)
with local_session() as session:
try:
rater = session.query(User).where(User.slug == rater_slug).one()
user = session.query(User).where(User.slug == author_slug).one()
user_rating_dict = {
"value": rating_entry["value"],
"rater": rater.id,
"user": user.id,
}
user_rating = UserRating.create(**user_rating_dict)
if user_rating_dict['value'] > 0:
af = AuthorFollower.create(
author=user.id,
follower=rater.id,
auto=True
)
session.add(af)
session.add(user_rating)
session.commit()
except IntegrityError:
print("[migration] cannot rate " + author_slug + "`s by " + rater_slug)
except Exception as e:
print(e)
return ce

View File

@@ -1,10 +0,0 @@
from datetime import datetime
from json import JSONEncoder
class DateTimeEncoder(JSONEncoder):
def default(self, z):
if isinstance(z, datetime):
return str(z)
else:
return super().default(z)

View File

@@ -1,145 +1,75 @@
{{ range $port_map := .PROXY_PORT_MAP | split " " }}
{{ $port_map_list := $port_map | split ":" }}
{{ $scheme := index $port_map_list 0 }}
{{ $listen_port := index $port_map_list 1 }}
{{ $upstream_port := index $port_map_list 2 }}
{{ $proxy_settings := "proxy_http_version 1.1; proxy_set_header Upgrade $http_upgrade; proxy_set_header Connection $http_connection; proxy_set_header Host $http_host; proxy_set_header X-Request-Start $msec;" }}
{{ $gzip_settings := "gzip on; gzip_min_length 1100; gzip_buffers 4 32k; gzip_types text/css text/javascript text/xml text/plain text/x-component application/javascript application/x-javascript application/json application/xml application/rss+xml font/truetype application/x-font-ttf font/opentype application/vnd.ms-fontobject image/svg+xml; gzip_vary on; gzip_comp_level 6;" }}
{{ $cors_headers_options := "if ($request_method = 'OPTIONS') { add_header 'Access-Control-Allow-Origin' '$allow_origin' always; add_header 'Access-Control-Allow-Methods' 'GET, POST, OPTIONS'; add_header 'Access-Control-Allow-Headers' 'DNT,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Range,Authorization'; add_header 'Access-Control-Allow-Credentials' 'true'; add_header 'Access-Control-Max-Age' 1728000; add_header 'Content-Type' 'text/plain; charset=utf-8'; add_header 'Content-Length' 0; return 204; }" }}
{{ $cors_headers_post := "if ($request_method = 'POST') { add_header 'Access-Control-Allow-Origin' '$allow_origin' always; add_header 'Access-Control-Allow-Methods' 'GET, POST, OPTIONS' always; add_header 'Access-Control-Allow-Headers' 'DNT,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Range,Authorization' always; add_header 'Access-Control-Expose-Headers' 'Content-Length,Content-Range' always; add_header 'Access-Control-Allow-Credentials' 'true' always; }" }}
{{ $cors_headers_get := "if ($request_method = 'GET') { add_header 'Access-Control-Allow-Origin' '$allow_origin' always; add_header 'Access-Control-Allow-Methods' 'GET, POST, OPTIONS' always; add_header 'Access-Control-Allow-Headers' 'DNT,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Range,Authorization' always; add_header 'Access-Control-Expose-Headers' 'Content-Length,Content-Range' always; add_header 'Access-Control-Allow-Credentials' 'true' always; }" }}
map $http_origin $allow_origin {
~^https?:\/\/((.*\.)?localhost(:\d+)?|discoursio-webapp(-(.*))?\.vercel\.app|(.*\.)?discours\.io)$ $http_origin;
default "";
}
{{ if eq $scheme "http" }}
proxy_cache_path /var/cache/nginx levels=1:2 keys_zone=my_cache:10m max_size=1g
inactive=60m use_temp_path=off;
limit_conn_zone $binary_remote_addr zone=addr:10m;
{{ range $port_map := .PROXY_PORT_MAP | split " " }}
{{ $port_map_list := $port_map | split ":" }}
{{ $scheme := index $port_map_list 0 }}
{{ $listen_port := index $port_map_list 1 }}
{{ $upstream_port := index $port_map_list 2 }}
server {
listen [{{ $.NGINX_BIND_ADDRESS_IP6 }}]:{{ $listen_port }};
listen {{ if $.NGINX_BIND_ADDRESS_IP4 }}{{ $.NGINX_BIND_ADDRESS_IP4 }}:{{end}}{{ $listen_port }};
{{ if $.NOSSL_SERVER_NAME }}server_name {{ $.NOSSL_SERVER_NAME }}; {{ end }}
access_log {{ $.NGINX_ACCESS_LOG_PATH }}{{ if and ($.NGINX_ACCESS_LOG_FORMAT) (ne $.NGINX_ACCESS_LOG_PATH "off") }} {{ $.NGINX_ACCESS_LOG_FORMAT }}{{ end }};
error_log {{ $.NGINX_ERROR_LOG_PATH }};
{{ if (and (eq $listen_port "80") ($.SSL_INUSE)) }}
include {{ $.DOKKU_ROOT }}/{{ $.APP }}/nginx.conf.d/*.conf;
location / {
return 301 https://$host:{{ $.PROXY_SSL_PORT }}$request_uri;
}
{{ else }}
location / {
gzip on;
gzip_min_length 1100;
gzip_buffers 4 32k;
gzip_types text/css text/javascript text/xml text/plain text/x-component application/javascript application/x-javascript application/json application/xml application/rss+xml font/truetype application/x-font-ttf font/opentype application/vnd.ms-fontobject image/svg+xml;
gzip_vary on;
gzip_comp_level 6;
proxy_pass http://{{ $.APP }}-{{ $upstream_port }};
proxy_http_version 1.1;
proxy_read_timeout {{ $.PROXY_READ_TIMEOUT }};
proxy_buffer_size {{ $.PROXY_BUFFER_SIZE }};
proxy_buffering {{ $.PROXY_BUFFERING }};
proxy_buffers {{ $.PROXY_BUFFERS }};
proxy_busy_buffers_size {{ $.PROXY_BUSY_BUFFERS_SIZE }};
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection $http_connection;
proxy_set_header Host $http_host;
proxy_set_header X-Forwarded-For {{ $.PROXY_X_FORWARDED_FOR }};
proxy_set_header X-Forwarded-Port {{ $.PROXY_X_FORWARDED_PORT }};
proxy_set_header X-Forwarded-Proto {{ $.PROXY_X_FORWARDED_PROTO }};
proxy_set_header X-Request-Start $msec;
{{ if $.PROXY_X_FORWARDED_SSL }}proxy_set_header X-Forwarded-Ssl {{ $.PROXY_X_FORWARDED_SSL }};{{ end }}
}
{{ if $.CLIENT_MAX_BODY_SIZE }}client_max_body_size {{ $.CLIENT_MAX_BODY_SIZE }};{{ end }}
include {{ $.DOKKU_ROOT }}/{{ $.APP }}/nginx.conf.d/*.conf;
error_page 400 401 402 403 405 406 407 408 409 410 411 412 413 414 415 416 417 418 420 422 423 424 426 428 429 431 444 449 450 451 /400-error.html;
location /400-error.html {
root {{ $.DOKKU_LIB_ROOT }}/data/nginx-vhosts/dokku-errors;
internal;
}
error_page 404 /404-error.html;
location /404-error.html {
root {{ $.DOKKU_LIB_ROOT }}/data/nginx-vhosts/dokku-errors;
internal;
}
error_page 500 501 502 503 504 505 506 507 508 509 510 511 /500-error.html;
location /500-error.html {
root {{ $.DOKKU_LIB_ROOT }}/data/nginx-vhosts/dokku-errors;
internal;
}
{{ end }}
}
{{ else if eq $scheme "https"}}
server {
listen [{{ $.NGINX_BIND_ADDRESS_IP6 }}]:{{ $listen_port }} ssl {{ if eq $.HTTP2_SUPPORTED "true" }}http2{{ else if eq $.SPDY_SUPPORTED "true" }}spdy{{ end }};
listen {{ if $.NGINX_BIND_ADDRESS_IP4 }}{{ $.NGINX_BIND_ADDRESS_IP4 }}:{{end}}{{ $listen_port }} ssl {{ if eq $.HTTP2_SUPPORTED "true" }}http2{{ else if eq $.SPDY_SUPPORTED "true" }}spdy{{ end }};
{{ if $.SSL_SERVER_NAME }}server_name {{ $.SSL_SERVER_NAME }}; {{ end }}
{{ if $.NOSSL_SERVER_NAME }}server_name {{ $.NOSSL_SERVER_NAME }}; {{ end }}
access_log {{ $.NGINX_ACCESS_LOG_PATH }}{{ if and ($.NGINX_ACCESS_LOG_FORMAT) (ne $.NGINX_ACCESS_LOG_PATH "off") }} {{ $.NGINX_ACCESS_LOG_FORMAT }}{{ end }};
error_log {{ $.NGINX_ERROR_LOG_PATH }};
{{ if eq $scheme "http" }}
listen [::]:{{ $listen_port }};
listen {{ $listen_port }};
server_name {{ $.NOSSL_SERVER_NAME }};
access_log /var/log/nginx/{{ $.APP }}-access.log;
error_log /var/log/nginx/{{ $.APP }}-error.log;
{{ else if eq $scheme "https" }}
listen [::]:{{ $listen_port }} ssl http2;
listen {{ $listen_port }} ssl http2;
server_name {{ $.NOSSL_SERVER_NAME }};
access_log /var/log/nginx/{{ $.APP }}-access.log;
error_log /var/log/nginx/{{ $.APP }}-error.log;
ssl_certificate {{ $.APP_SSL_PATH }}/server.crt;
ssl_certificate_key {{ $.APP_SSL_PATH }}/server.key;
ssl_protocols TLSv1.2 {{ if eq $.TLS13_SUPPORTED "true" }}TLSv1.3{{ end }};
ssl_protocols TLSv1.2 TLSv1.3;
ssl_prefer_server_ciphers off;
keepalive_timeout 70;
{{ if and (eq $.SPDY_SUPPORTED "true") (ne $.HTTP2_SUPPORTED "true") }}add_header Alternate-Protocol {{ $.PROXY_SSL_PORT }}:npn-spdy/2;{{ end }}
keepalive_requests 500;
proxy_read_timeout 3600;
limit_conn addr 1000;
{{ end }}
location / {
gzip on;
gzip_min_length 1100;
gzip_buffers 4 32k;
gzip_types text/css text/javascript text/xml text/plain text/x-component application/javascript application/x-javascript application/json application/xml application/rss+xml font/truetype application/x-font-ttf font/opentype application/vnd.ms-fontobject image/svg+xml;
gzip_vary on;
gzip_comp_level 6;
proxy_pass http://{{ $.APP }}-{{ $upstream_port }};
{{ if eq $.HTTP2_PUSH_SUPPORTED "true" }}http2_push_preload on; {{ end }}
proxy_http_version 1.1;
proxy_read_timeout {{ $.PROXY_READ_TIMEOUT }};
proxy_buffer_size {{ $.PROXY_BUFFER_SIZE }};
proxy_buffering {{ $.PROXY_BUFFERING }};
proxy_buffers {{ $.PROXY_BUFFERS }};
proxy_busy_buffers_size {{ $.PROXY_BUSY_BUFFERS_SIZE }};
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection $http_connection;
proxy_set_header Host $http_host;
proxy_set_header X-Forwarded-For {{ $.PROXY_X_FORWARDED_FOR }};
proxy_set_header X-Forwarded-Port {{ $.PROXY_X_FORWARDED_PORT }};
proxy_set_header X-Forwarded-Proto {{ $.PROXY_X_FORWARDED_PROTO }};
proxy_set_header X-Request-Start $msec;
{{ if $.PROXY_X_FORWARDED_SSL }}proxy_set_header X-Forwarded-Ssl {{ $.PROXY_X_FORWARDED_SSL }};{{ end }}
{{ $proxy_settings }}
{{ $gzip_settings }}
{{ $cors_headers_options }}
{{ $cors_headers_post }}
{{ $cors_headers_get }}
if ($request_method = 'OPTIONS') {
add_header 'Access-Control-Allow-Origin' '$allow_origin' always;
add_header 'Access-Control-Allow-Methods' 'GET, POST, OPTIONS';
#
# Custom headers and headers various browsers *should* be OK with but aren't
#
add_header 'Access-Control-Allow-Headers' 'DNT,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Range,Authorization';
add_header 'Access-Control-Allow-Credentials' 'true';
#
# Tell client that this pre-flight info is valid for 20 days
#
add_header 'Access-Control-Max-Age' 1728000;
add_header 'Content-Type' 'text/plain; charset=utf-8';
add_header 'Content-Length' 0;
return 204;
proxy_cache my_cache;
proxy_cache_revalidate on;
proxy_cache_min_uses 2;
proxy_cache_use_stale error timeout updating http_500 http_502 http_503 http_504;
proxy_cache_background_update on;
proxy_cache_lock on;
}
if ($request_method = 'POST') {
add_header 'Access-Control-Allow-Origin' '$allow_origin' always;
add_header 'Access-Control-Allow-Methods' 'GET, POST, OPTIONS' always;
add_header 'Access-Control-Allow-Headers' 'DNT,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Range,Authorization' always;
add_header 'Access-Control-Expose-Headers' 'Content-Length,Content-Range' always;
add_header 'Access-Control-Allow-Credentials' 'true' always;
location ~* \.(jpg|jpeg|png|gif|ico|css|js)$ {
expires 30d; # This means that the client can cache these resources for 30 days.
add_header Cache-Control "public, no-transform";
}
location ~* \.(mp3)$ {
if ($request_method = 'GET') {
add_header 'Access-Control-Allow-Origin' '$allow_origin' always;
add_header 'Access-Control-Allow-Origin' $allow_origin always;
add_header 'Access-Control-Allow-Methods' 'GET, POST, OPTIONS' always;
add_header 'Access-Control-Allow-Headers' 'DNT,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Range,Authorization' always;
add_header 'Access-Control-Expose-Headers' 'Content-Length,Content-Range' always;
@@ -147,80 +77,43 @@ server {
}
}
{{ if $.CLIENT_MAX_BODY_SIZE }}client_max_body_size {{ $.CLIENT_MAX_BODY_SIZE }};{{ end }}
include {{ $.DOKKU_ROOT }}/{{ $.APP }}/nginx.conf.d/*.conf;
error_page 400 401 402 403 405 406 407 408 409 410 411 412 413 414 415 416 417 418 420 422 423 424 426 428 429 431 444 449 450 451 /400-error.html;
location /400-error.html {
root {{ $.DOKKU_LIB_ROOT }}/data/nginx-vhosts/dokku-errors;
root /var/lib/dokku/data/nginx-vhosts/dokku-errors;
internal;
}
error_page 404 /404-error.html;
location /404-error.html {
root {{ $.DOKKU_LIB_ROOT }}/data/nginx-vhosts/dokku-errors;
root /var/lib/dokku/data/nginx-vhosts/dokku-errors;
internal;
}
error_page 500 501 503 504 505 506 507 508 509 510 511 /500-error.html;
location /500-error.html {
root {{ $.DOKKU_LIB_ROOT }}/data/nginx-vhosts/dokku-errors;
root /var/lib/dokku/data/nginx-vhosts/dokku-errors;
internal;
}
error_page 502 /502-error.html;
location /502-error.html {
root {{ $.DOKKU_LIB_ROOT }}/data/nginx-vhosts/dokku-errors;
root /var/lib/dokku/data/nginx-vhosts/dokku-errors;
internal;
}
}
{{ else if eq $scheme "grpc"}}
{{ if eq $.GRPC_SUPPORTED "true"}}{{ if eq $.HTTP2_SUPPORTED "true"}}
server {
listen [{{ $.NGINX_BIND_ADDRESS_IP6 }}]:{{ $listen_port }} http2;
listen {{ if $.NGINX_BIND_ADDRESS_IP4 }}{{ $.NGINX_BIND_ADDRESS_IP4 }}:{{end}}{{ $listen_port }} http2;
{{ if $.NOSSL_SERVER_NAME }}server_name {{ $.NOSSL_SERVER_NAME }}; {{ end }}
access_log {{ $.NGINX_ACCESS_LOG_PATH }}{{ if and ($.NGINX_ACCESS_LOG_FORMAT) (ne $.NGINX_ACCESS_LOG_PATH "off") }} {{ $.NGINX_ACCESS_LOG_FORMAT }}{{ end }};
error_log {{ $.NGINX_ERROR_LOG_PATH }};
location / {
grpc_pass grpc://{{ $.APP }}-{{ $upstream_port }};
}
{{ if $.CLIENT_MAX_BODY_SIZE }}client_max_body_size {{ $.CLIENT_MAX_BODY_SIZE }};{{ end }}
include {{ $.DOKKU_ROOT }}/{{ $.APP }}/nginx.conf.d/*.conf;
}
{{ end }}{{ end }}
{{ else if eq $scheme "grpcs"}}
{{ if eq $.GRPC_SUPPORTED "true"}}{{ if eq $.HTTP2_SUPPORTED "true"}}
server {
listen [{{ $.NGINX_BIND_ADDRESS_IP6 }}]:{{ $listen_port }} ssl http2;
listen {{ if $.NGINX_BIND_ADDRESS_IP4 }}{{ $.NGINX_BIND_ADDRESS_IP4 }}:{{end}}{{ $listen_port }} ssl http2;
{{ if $.NOSSL_SERVER_NAME }}server_name {{ $.NOSSL_SERVER_NAME }}; {{ end }}
access_log {{ $.NGINX_ACCESS_LOG_PATH }}{{ if and ($.NGINX_ACCESS_LOG_FORMAT) (ne $.NGINX_ACCESS_LOG_PATH "off") }} {{ $.NGINX_ACCESS_LOG_FORMAT }}{{ end }};
error_log {{ $.NGINX_ERROR_LOG_PATH }};
ssl_certificate {{ $.APP_SSL_PATH }}/server.crt;
ssl_certificate_key {{ $.APP_SSL_PATH }}/server.key;
ssl_protocols TLSv1.2 {{ if eq $.TLS13_SUPPORTED "true" }}TLSv1.3{{ end }};
ssl_prefer_server_ciphers off;
location / {
grpc_pass grpc://{{ $.APP }}-{{ $upstream_port }};
}
{{ if $.CLIENT_MAX_BODY_SIZE }}client_max_body_size {{ $.CLIENT_MAX_BODY_SIZE }};{{ end }}
include {{ $.DOKKU_ROOT }}/{{ $.APP }}/nginx.conf.d/*.conf;
}
{{ end }}{{ end }}
{{ end }}
{{ end }}
{{ if $.DOKKU_APP_WEB_LISTENERS }}
{{ range $upstream_port := $.PROXY_UPSTREAM_PORTS | split " " }}
upstream {{ $.APP }}-{{ $upstream_port }} {
{{ range $listeners := $.DOKKU_APP_WEB_LISTENERS | split " " }}
{{ $listener_list := $listeners | split ":" }}
{{ $listener_ip := index $listener_list 0 }}
server {{ $listener_ip }}:{{ $upstream_port }};{{ end }}
{{ $listener_port := index $listener_list 1 }}
server {{ $listener_ip }}:{{ $upstream_port }};
{{ end }}
}
{{ end }}{{ end }}
{{ end }}

View File

@@ -1,36 +0,0 @@
from base.orm import Base, engine
from orm.community import Community
from orm.notification import Notification
from orm.rbac import Operation, Resource, Permission, Role
from orm.reaction import Reaction
from orm.shout import Shout
from orm.topic import Topic, TopicFollower
from orm.user import User, UserRating
def init_tables():
Base.metadata.create_all(engine)
Operation.init_table()
Resource.init_table()
User.init_table()
Community.init_table()
Role.init_table()
UserRating.init_table()
Shout.init_table()
print("[orm] tables initialized")
__all__ = [
"User",
"Role",
"Operation",
"Permission",
"Community",
"Shout",
"Topic",
"TopicFollower",
"Notification",
"Reaction",
"UserRating",
"init_tables"
]

46
orm/author.py Normal file
View File

@@ -0,0 +1,46 @@
import time
from sqlalchemy import JSON, Boolean, Column, ForeignKey, Integer, String
from sqlalchemy_utils import TSVectorType
from services.db import Base
class AuthorRating(Base):
__tablename__ = 'author_rating'
id = None # type: ignore
rater = Column(ForeignKey('author.id'), primary_key=True)
author = Column(ForeignKey('author.id'), primary_key=True)
plus = Column(Boolean)
class AuthorFollower(Base):
__tablename__ = 'author_follower'
id = None # type: ignore
follower = Column(ForeignKey('author.id'), primary_key=True)
author = Column(ForeignKey('author.id'), primary_key=True)
created_at = Column(Integer, nullable=False, default=lambda: int(time.time()))
auto = Column(Boolean, nullable=False, default=False)
class Author(Base):
__tablename__ = 'author'
user = Column(String) # unbounded link with authorizer's User type
name = Column(String, nullable=True, comment='Display name')
slug = Column(String, unique=True, comment="Author's slug")
bio = Column(String, nullable=True, comment='Bio') # status description
about = Column(String, nullable=True, comment='About') # long and formatted
pic = Column(String, nullable=True, comment='Picture')
links = Column(JSON, nullable=True, comment='Links')
created_at = Column(Integer, nullable=False, default=lambda: int(time.time()))
last_seen = Column(Integer, nullable=False, default=lambda: int(time.time()))
updated_at = Column(Integer, nullable=False, default=lambda: int(time.time()))
deleted_at = Column(Integer, nullable=True, comment='Deleted at')
search_vector = Column(
TSVectorType('name', 'slug', 'bio', 'about', regconfig='pg_catalog.russian')
)

View File

@@ -1,25 +1,25 @@
from datetime import datetime
import time
from sqlalchemy import Column, DateTime, ForeignKey, String
from sqlalchemy import Column, ForeignKey, Integer, String
from base.orm import Base
from services.db import Base
class ShoutCollection(Base):
__tablename__ = "shout_collection"
__tablename__ = 'shout_collection'
id = None # type: ignore
shout = Column(ForeignKey("shout.id"), primary_key=True)
collection = Column(ForeignKey("collection.id"), primary_key=True)
shout = Column(ForeignKey('shout.id'), primary_key=True)
collection = Column(ForeignKey('collection.id'), primary_key=True)
class Collection(Base):
__tablename__ = "collection"
__tablename__ = 'collection'
slug = Column(String, unique=True)
title = Column(String, nullable=False, comment="Title")
body = Column(String, nullable=True, comment="Body")
pic = Column(String, nullable=True, comment="Picture")
createdAt = Column(DateTime, default=datetime.now, comment="Created At")
createdBy = Column(ForeignKey("user.id"), comment="Created By")
publishedAt = Column(DateTime, default=datetime.now, comment="Published At")
title = Column(String, nullable=False, comment='Title')
body = Column(String, nullable=True, comment='Body')
pic = Column(String, nullable=True, comment='Picture')
created_at = Column(Integer, default=lambda: int(time.time()))
created_by = Column(ForeignKey('author.id'), comment='Created By')
published_at = Column(Integer, default=lambda: int(time.time()))

View File

@@ -1,41 +1,29 @@
from datetime import datetime
import time
from sqlalchemy import Column, String, ForeignKey, DateTime
from base.orm import Base, local_session
from sqlalchemy import Column, ForeignKey, Integer, String
from sqlalchemy.orm import relationship
from orm.author import Author
from services.db import Base
class CommunityFollower(Base):
__tablename__ = "community_followers"
class CommunityAuthor(Base):
__tablename__ = 'community_author'
id = None # type: ignore
follower = Column(ForeignKey("user.id"), primary_key=True)
community = Column(ForeignKey("community.id"), primary_key=True)
joinedAt = Column(
DateTime, nullable=False, default=datetime.now, comment="Created at"
)
# role = Column(ForeignKey(Role.id), nullable=False, comment="Role for member")
author = Column(ForeignKey('author.id'), primary_key=True)
community = Column(ForeignKey('community.id'), primary_key=True)
joined_at = Column(Integer, nullable=False, default=lambda: int(time.time()))
role = Column(String, nullable=False)
class Community(Base):
__tablename__ = "community"
__tablename__ = 'community'
name = Column(String, nullable=False, comment="Name")
slug = Column(String, nullable=False, unique=True, comment="Slug")
desc = Column(String, nullable=False, default="")
pic = Column(String, nullable=False, default="")
createdAt = Column(
DateTime, nullable=False, default=datetime.now, comment="Created at"
)
name = Column(String, nullable=False)
slug = Column(String, nullable=False, unique=True)
desc = Column(String, nullable=False, default='')
pic = Column(String, nullable=False, default='')
created_at = Column(Integer, nullable=False, default=lambda: int(time.time()))
@staticmethod
def init_table():
with local_session() as session:
d = (
session.query(Community).filter(Community.slug == "discours").first()
)
if not d:
d = Community.create(name="Дискурс", slug="discours")
session.add(d)
session.commit()
Community.default_community = d
print('[orm] default community id: %s' % d.id)
authors = relationship(Author, secondary='community_author')

25
orm/invite.py Normal file
View File

@@ -0,0 +1,25 @@
from enum import Enum as Enumeration
from sqlalchemy import Column, ForeignKey, String
from sqlalchemy.orm import relationship
from services.db import Base
class InviteStatus(Enumeration):
PENDING = 'PENDING'
ACCEPTED = 'ACCEPTED'
REJECTED = 'REJECTED'
class Invite(Base):
__tablename__ = 'invite'
inviter_id = Column(ForeignKey('author.id'), primary_key=True)
author_id = Column(ForeignKey('author.id'), primary_key=True)
shout_id = Column(ForeignKey('shout.id'), primary_key=True)
status = Column(String, default=InviteStatus.PENDING.value)
inviter = relationship('author', foreign_keys=[inviter_id])
author = relationship('author', foreign_keys=[author_id])
shout = relationship('shout')

View File

@@ -1,24 +1,51 @@
from datetime import datetime
from sqlalchemy import Column, Enum, ForeignKey, DateTime, Boolean, Integer
from sqlalchemy.dialects.postgresql import JSONB
from base.orm import Base
import time
from enum import Enum as Enumeration
from sqlalchemy import JSON, Column, ForeignKey, Integer, String
from sqlalchemy.orm import relationship
from sqlalchemy.exc import ProgrammingError
class NotificationType(Enumeration):
NEW_COMMENT = 1
NEW_REPLY = 2
from orm.author import Author
from services.db import Base, engine
from services.logger import root_logger as logger
class NotificationEntity(Enumeration):
REACTION = 'reaction'
SHOUT = 'shout'
FOLLOWER = 'follower'
class NotificationAction(Enumeration):
CREATE = 'create'
UPDATE = 'update'
DELETE = 'delete'
SEEN = 'seen'
FOLLOW = 'follow'
UNFOLLOW = 'unfollow'
class NotificationSeen(Base):
__tablename__ = 'notification_seen'
viewer = Column(ForeignKey('author.id'))
notification = Column(ForeignKey('notification.id'))
class Notification(Base):
__tablename__ = "notification"
__tablename__ = 'notification'
shout = Column(ForeignKey("shout.id"), index=True)
reaction = Column(ForeignKey("reaction.id"), index=True)
user = Column(ForeignKey("user.id"), index=True)
createdAt = Column(DateTime, nullable=False, default=datetime.now, index=True)
seen = Column(Boolean, nullable=False, default=False, index=True)
type = Column(Enum(NotificationType), nullable=False)
data = Column(JSONB, nullable=True)
occurrences = Column(Integer, default=1)
created_at = Column(Integer, server_default=str(int(time.time())))
entity = Column(String, nullable=False)
action = Column(String, nullable=False)
payload = Column(JSON, nullable=True)
seen = relationship(lambda: Author, secondary='notification_seen')
try:
Notification.__table__.create(engine)
logger.info("Table `notification` was created.")
except ProgrammingError:
# Handle the exception here, for example by printing a message
logger.info("Table `notification` already exists.")

37
orm/rating.py Normal file
View File

@@ -0,0 +1,37 @@
from orm.reaction import ReactionKind
PROPOSAL_REACTIONS = [
ReactionKind.ACCEPT.value,
ReactionKind.REJECT.value,
ReactionKind.AGREE.value,
ReactionKind.DISAGREE.value,
ReactionKind.ASK.value,
ReactionKind.PROPOSE.value,
]
PROOF_REACTIONS = [
ReactionKind.PROOF.value,
ReactionKind.DISPROOF.value
]
RATING_REACTIONS = [
ReactionKind.LIKE.value,
ReactionKind.DISLIKE.value
]
def is_negative(x):
return x in [
ReactionKind.DISLIKE.value,
ReactionKind.DISPROOF.value,
ReactionKind.REJECT.value,
]
def is_positive(x):
return x in [
ReactionKind.ACCEPT.value,
ReactionKind.LIKE.value,
ReactionKind.PROOF.value,
]

View File

@@ -1,173 +0,0 @@
import warnings
from sqlalchemy import String, Column, ForeignKey, UniqueConstraint, TypeDecorator
from sqlalchemy.orm import relationship
from base.orm import Base, REGISTRY, engine, local_session
# Role Based Access Control #
class ClassType(TypeDecorator):
impl = String
@property
def python_type(self):
return NotImplemented
def process_literal_param(self, value, dialect):
return NotImplemented
def process_bind_param(self, value, dialect):
return value.__name__ if isinstance(value, type) else str(value)
def process_result_value(self, value, dialect):
class_ = REGISTRY.get(value)
if class_ is None:
warnings.warn(f"Can't find class <{value}>,find it yourself!", stacklevel=2)
return class_
class Role(Base):
__tablename__ = "role"
name = Column(String, nullable=False, comment="Role Name")
desc = Column(String, nullable=True, comment="Role Description")
community = Column(
ForeignKey("community.id", ondelete="CASCADE"),
nullable=False,
comment="Community",
)
permissions = relationship(lambda: Permission)
@staticmethod
def init_table():
with local_session() as session:
r = session.query(Role).filter(Role.name == "author").first()
if r:
Role.default_role = r
return
r1 = Role.create(
name="author",
desc="Role for an author",
community=1,
)
session.add(r1)
Role.default_role = r1
r2 = Role.create(
name="reader",
desc="Role for a reader",
community=1,
)
session.add(r2)
r3 = Role.create(
name="expert",
desc="Role for an expert",
community=1,
)
session.add(r3)
r4 = Role.create(
name="editor",
desc="Role for an editor",
community=1,
)
session.add(r4)
class Operation(Base):
__tablename__ = "operation"
name = Column(String, nullable=False, unique=True, comment="Operation Name")
@staticmethod
def init_table():
with local_session() as session:
for name in ["create", "update", "delete", "load"]:
"""
* everyone can:
- load shouts
- load topics
- load reactions
- create an account to become a READER
* readers can:
- update and delete their account
- load chats
- load messages
- create reaction of some shout's author allowed kinds
- create shout to become an AUTHOR
* authors can:
- update and delete their shout
- invite other authors to edit shout and chat
- manage allowed reactions for their shout
* pros can:
- create/update/delete their community
- create/update/delete topics for their community
"""
op = session.query(Operation).filter(Operation.name == name).first()
if not op:
op = Operation.create(name=name)
session.add(op)
session.commit()
class Resource(Base):
__tablename__ = "resource"
resourceClass = Column(
String, nullable=False, unique=True, comment="Resource class"
)
name = Column(String, nullable=False, unique=True, comment="Resource name")
# TODO: community = Column(ForeignKey())
@staticmethod
def init_table():
with local_session() as session:
for res in ["shout", "topic", "reaction", "chat", "message", "invite", "community", "user"]:
r = session.query(Resource).filter(Resource.name == res).first()
if not r:
r = Resource.create(name=res, resourceClass=res)
session.add(r)
session.commit()
class Permission(Base):
__tablename__ = "permission"
__table_args__ = (
UniqueConstraint("role", "operation", "resource"),
{"extend_existing": True},
)
role = Column(
ForeignKey("role.id", ondelete="CASCADE"), nullable=False, comment="Role"
)
operation = Column(
ForeignKey("operation.id", ondelete="CASCADE"),
nullable=False,
comment="Operation",
)
resource = Column(
ForeignKey("resource.id", ondelete="CASCADE"),
nullable=False,
comment="Resource",
)
if __name__ == "__main__":
Base.metadata.create_all(engine)
ops = [
Permission(role=1, operation=1, resource=1),
Permission(role=1, operation=2, resource=1),
Permission(role=1, operation=3, resource=1),
Permission(role=1, operation=4, resource=1),
Permission(role=2, operation=4, resource=1),
]
global_session.add_all(ops)
global_session.commit()

View File

@@ -1,44 +1,45 @@
from datetime import datetime
import time
from enum import Enum as Enumeration
from sqlalchemy import Column, DateTime, Enum, ForeignKey, String
from sqlalchemy import Column, ForeignKey, Integer, String
from base.orm import Base
from services.db import Base
class ReactionKind(Enumeration):
AGREE = 1 # +1
DISAGREE = 2 # -1
PROOF = 3 # +1
DISPROOF = 4 # -1
ASK = 5 # +0
PROPOSE = 6 # +0
QUOTE = 7 # +0 bookmark
COMMENT = 8 # +0
ACCEPT = 9 # +1
REJECT = 0 # -1
LIKE = 11 # +1
DISLIKE = 12 # -1
REMARK = 13 # 0
FOOTNOTE = 14 # 0
# TYPE = <reaction index> # rating diff
# editor mode
AGREE = 'AGREE' # +1
DISAGREE = 'DISAGREE' # -1
ASK = 'ASK' # +0
PROPOSE = 'PROPOSE' # +0
ACCEPT = 'ACCEPT' # +1
REJECT = 'REJECT' # -1
# expert mode
PROOF = 'PROOF' # +1
DISPROOF = 'DISPROOF' # -1
# public feed
QUOTE = 'QUOTE' # +0 TODO: use to bookmark in collection
COMMENT = 'COMMENT' # +0
LIKE = 'LIKE' # +1
DISLIKE = 'DISLIKE' # -1
class Reaction(Base):
__tablename__ = "reaction"
body = Column(String, nullable=True, comment="Reaction Body")
createdAt = Column(
DateTime, nullable=False, default=datetime.now, comment="Created at"
)
createdBy = Column(ForeignKey("user.id"), nullable=False, index=True, comment="Sender")
updatedAt = Column(DateTime, nullable=True, comment="Updated at")
updatedBy = Column(ForeignKey("user.id"), nullable=True, index=True, comment="Last Editor")
deletedAt = Column(DateTime, nullable=True, comment="Deleted at")
deletedBy = Column(ForeignKey("user.id"), nullable=True, index=True, comment="Deleted by")
shout = Column(ForeignKey("shout.id"), nullable=False, index=True)
replyTo = Column(
ForeignKey("reaction.id"), nullable=True, comment="Reply to reaction ID"
)
range = Column(String, nullable=True, comment="Range in format <start index>:<end>")
kind = Column(Enum(ReactionKind), nullable=False, comment="Reaction kind")
oid = Column(String, nullable=True, comment="Old ID")
__tablename__ = 'reaction'
body = Column(String, default='', comment='Reaction Body')
created_at = Column(Integer, nullable=False, default=lambda: int(time.time()))
updated_at = Column(Integer, nullable=True, comment='Updated at')
deleted_at = Column(Integer, nullable=True, comment='Deleted at')
deleted_by = Column(ForeignKey('author.id'), nullable=True)
reply_to = Column(ForeignKey('reaction.id'), nullable=True)
quote = Column(String, nullable=True, comment='Original quoted text')
shout = Column(ForeignKey('shout.id'), nullable=False)
created_by = Column(ForeignKey('author.id'), nullable=False)
kind = Column(String, nullable=False)
oid = Column(String)

View File

@@ -1,94 +1,83 @@
from datetime import datetime
import time
from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, String, JSON
from sqlalchemy.orm import column_property, relationship
from sqlalchemy import JSON, Boolean, Column, ForeignKey, Integer, String
from sqlalchemy.orm import relationship
from base.orm import Base, local_session
from orm.author import Author
from orm.community import Community
from orm.reaction import Reaction
from orm.topic import Topic
from orm.user import User
from services.db import Base
class ShoutTopic(Base):
__tablename__ = "shout_topic"
__tablename__ = 'shout_topic'
id = None # type: ignore
shout = Column(ForeignKey("shout.id"), primary_key=True, index=True)
topic = Column(ForeignKey("topic.id"), primary_key=True, index=True)
shout = Column(ForeignKey('shout.id'), primary_key=True)
topic = Column(ForeignKey('topic.id'), primary_key=True)
main = Column(Boolean, nullable=True)
class ShoutReactionsFollower(Base):
__tablename__ = "shout_reactions_followers"
__tablename__ = 'shout_reactions_followers'
id = None # type: ignore
follower = Column(ForeignKey("user.id"), primary_key=True, index=True)
shout = Column(ForeignKey("shout.id"), primary_key=True, index=True)
follower = Column(ForeignKey('author.id'), primary_key=True)
shout = Column(ForeignKey('shout.id'), primary_key=True)
auto = Column(Boolean, nullable=False, default=False)
createdAt = Column(
DateTime, nullable=False, default=datetime.now, comment="Created at"
)
deletedAt = Column(DateTime, nullable=True)
created_at = Column(Integer, nullable=False, default=lambda: int(time.time()))
deleted_at = Column(Integer, nullable=True)
class ShoutAuthor(Base):
__tablename__ = "shout_author"
__tablename__ = 'shout_author'
id = None # type: ignore
shout = Column(ForeignKey("shout.id"), primary_key=True, index=True)
user = Column(ForeignKey("user.id"), primary_key=True, index=True)
caption = Column(String, nullable=True, default="")
shout = Column(ForeignKey('shout.id'), primary_key=True)
author = Column(ForeignKey('author.id'), primary_key=True)
caption = Column(String, nullable=True, default='')
class ShoutCommunity(Base):
__tablename__ = 'shout_community'
id = None # type: ignore
shout = Column(ForeignKey('shout.id'), primary_key=True)
community = Column(ForeignKey('community.id'), primary_key=True)
class Shout(Base):
__tablename__ = "shout"
__tablename__ = 'shout'
# timestamps
createdAt = Column(DateTime, nullable=False, default=datetime.now, comment="Created at")
updatedAt = Column(DateTime, nullable=True, comment="Updated at")
publishedAt = Column(DateTime, nullable=True)
deletedAt = Column(DateTime, nullable=True)
created_at = Column(Integer, nullable=False, default=lambda: int(time.time()))
updated_at = Column(Integer, nullable=True)
published_at = Column(Integer, nullable=True)
featured_at = Column(Integer, nullable=True)
deleted_at = Column(Integer, nullable=True)
createdBy = Column(ForeignKey("user.id"), comment="Created By")
deletedBy = Column(ForeignKey("user.id"), nullable=True)
created_by = Column(ForeignKey('author.id'), nullable=False)
updated_by = Column(ForeignKey('author.id'), nullable=True)
deleted_by = Column(ForeignKey('author.id'), nullable=True)
body = Column(String, nullable=False, comment='Body')
slug = Column(String, unique=True)
cover = Column(String, nullable=True, comment="Cover image url")
cover = Column(String, nullable=True, comment='Cover image url')
cover_caption = Column(String, nullable=True, comment='Cover image alt caption')
lead = Column(String, nullable=True)
description = Column(String, nullable=True)
body = Column(String, nullable=False, comment="Body")
title = Column(String, nullable=True)
title = Column(String, nullable=False)
subtitle = Column(String, nullable=True)
layout = Column(String, nullable=True)
layout = Column(String, nullable=False, default='article')
media = Column(JSON, nullable=True)
authors = relationship(lambda: User, secondary=ShoutAuthor.__tablename__)
topics = relationship(lambda: Topic, secondary=ShoutTopic.__tablename__)
# views from the old Discours website
viewsOld = Column(Integer, default=0)
# views from Ackee tracker on the new Discours website
viewsAckee = Column(Integer, default=0)
views = column_property(viewsOld + viewsAckee)
reactions = relationship(lambda: Reaction)
authors = relationship(Author, secondary='shout_author')
topics = relationship(Topic, secondary='shout_topic')
communities = relationship(Community, secondary='shout_community')
reactions = relationship(Reaction)
# TODO: these field should be used or modified
community = Column(ForeignKey("community.id"), default=1)
lang = Column(String, nullable=False, default='ru', comment="Language")
mainTopic = Column(ForeignKey("topic.slug"), nullable=True)
visibility = Column(String, nullable=True) # owner authors community public
versionOf = Column(ForeignKey("shout.id"), nullable=True)
lang = Column(String, nullable=False, default='ru', comment='Language')
version_of = Column(ForeignKey('shout.id'), nullable=True)
oid = Column(String, nullable=True)
@staticmethod
def init_table():
with local_session() as session:
s = session.query(Shout).first()
if not s:
entry = {
"slug": "genesis-block",
"body": "",
"title": "Ничего",
"lang": "ru"
}
s = Shout.create(**entry)
session.add(s)
session.commit()
seo = Column(String, nullable=True) # JSON

View File

@@ -1,30 +1,26 @@
from datetime import datetime
import time
from sqlalchemy import Boolean, Column, DateTime, ForeignKey, String
from sqlalchemy import Boolean, Column, ForeignKey, Integer, String
from base.orm import Base
from services.db import Base
class TopicFollower(Base):
__tablename__ = "topic_followers"
__tablename__ = 'topic_followers'
id = None # type: ignore
follower = Column(ForeignKey("user.id"), primary_key=True, index=True)
topic = Column(ForeignKey("topic.id"), primary_key=True, index=True)
createdAt = Column(
DateTime, nullable=False, default=datetime.now, comment="Created at"
)
follower = Column(Integer, ForeignKey('author.id'), primary_key=True)
topic = Column(Integer, ForeignKey('topic.id'), primary_key=True)
created_at = Column(Integer, nullable=False, default=int(time.time()))
auto = Column(Boolean, nullable=False, default=False)
class Topic(Base):
__tablename__ = "topic"
__tablename__ = 'topic'
slug = Column(String, unique=True)
title = Column(String, nullable=False, comment="Title")
body = Column(String, nullable=True, comment="Body")
pic = Column(String, nullable=True, comment="Picture")
community = Column(
ForeignKey("community.id"), default=1, comment="Community"
)
oid = Column(String, nullable=True, comment="Old ID")
title = Column(String, nullable=False, comment='Title')
body = Column(String, nullable=True, comment='Body')
pic = Column(String, nullable=True, comment='Picture')
community = Column(ForeignKey('community.id'), default=1)
oid = Column(String, nullable=True, comment='Old ID')

View File

@@ -1,106 +1,30 @@
from datetime import datetime
import time
from sqlalchemy import JSON as JSONType
from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, String
from sqlalchemy.orm import relationship
from base.orm import Base, local_session
from orm.rbac import Role
from sqlalchemy import Boolean, Column, Integer, String
class UserRating(Base):
__tablename__ = "user_rating"
id = None # type: ignore
rater = Column(ForeignKey("user.id"), primary_key=True, index=True)
user = Column(ForeignKey("user.id"), primary_key=True, index=True)
value = Column(Integer)
@staticmethod
def init_table():
pass
class UserRole(Base):
__tablename__ = "user_role"
id = None # type: ignore
user = Column(ForeignKey("user.id"), primary_key=True, index=True)
role = Column(ForeignKey("role.id"), primary_key=True, index=True)
class AuthorFollower(Base):
__tablename__ = "author_follower"
id = None # type: ignore
follower = Column(ForeignKey("user.id"), primary_key=True, index=True)
author = Column(ForeignKey("user.id"), primary_key=True, index=True)
createdAt = Column(
DateTime, nullable=False, default=datetime.now, comment="Created at"
)
auto = Column(Boolean, nullable=False, default=False)
from services.db import Base
class User(Base):
__tablename__ = "user"
default_user = None
__tablename__ = 'authorizer_users'
email = Column(String, unique=True, nullable=False, comment="Email")
username = Column(String, nullable=False, comment="Login")
password = Column(String, nullable=True, comment="Password")
bio = Column(String, nullable=True, comment="Bio") # status description
about = Column(String, nullable=True, comment="About") # long and formatted
userpic = Column(String, nullable=True, comment="Userpic")
name = Column(String, nullable=True, comment="Display name")
slug = Column(String, unique=True, comment="User's slug")
muted = Column(Boolean, default=False)
emailConfirmed = Column(Boolean, default=False)
createdAt = Column(
DateTime, nullable=False, default=datetime.now, comment="Created at"
)
lastSeen = Column(
DateTime, nullable=False, default=datetime.now, comment="Was online at"
)
deletedAt = Column(DateTime, nullable=True, comment="Deleted at")
links = Column(JSONType, nullable=True, comment="Links")
oauth = Column(String, nullable=True)
ratings = relationship(UserRating, foreign_keys=UserRating.user)
roles = relationship(lambda: Role, secondary=UserRole.__tablename__)
oid = Column(String, nullable=True)
@staticmethod
def init_table():
with local_session() as session:
default = session.query(User).filter(User.slug == "anonymous").first()
if not default:
default_dict = {
"email": "noreply@discours.io",
"username": "noreply@discours.io",
"name": "Аноним",
"slug": "anonymous",
}
default = User.create(**default_dict)
session.add(default)
discours_dict = {
"email": "welcome@discours.io",
"username": "welcome@discours.io",
"name": "Дискурс",
"slug": "discours",
}
discours = User.create(**discours_dict)
session.add(discours)
session.commit()
User.default_user = default
def get_permission(self):
scope = {}
for role in self.roles:
for p in role.permissions:
if p.resource not in scope:
scope[p.resource] = set()
scope[p.resource].add(p.operation)
print(scope)
return scope
# if __name__ == "__main__":
# print(User.get_permission(user_id=1)) # type: ignore
id = Column(String, primary_key=True, unique=True, nullable=False, default=None)
key = Column(String)
email = Column(String, unique=True)
email_verified_at = Column(Integer)
family_name = Column(String)
gender = Column(String)
given_name = Column(String)
is_multi_factor_auth_enabled = Column(Boolean)
middle_name = Column(String)
nickname = Column(String)
password = Column(String)
phone_number = Column(String, unique=True)
phone_number_verified_at = Column(Integer)
# preferred_username = Column(String, nullable=False)
picture = Column(String)
revoked_timestamp = Column(Integer)
roles = Column(String, default='author, reader')
signup_methods = Column(String, default='magic_link_login')
created_at = Column(Integer, default=lambda: int(time.time()))
updated_at = Column(Integer, default=lambda: int(time.time()))

33
pyproject.toml Normal file
View File

@@ -0,0 +1,33 @@
[tool.poetry]
name = "core"
version = "0.3.3"
description = "core module for discours.io"
authors = ["discoursio devteam"]
license = "MIT"
readme = "README.md"
[tool.poetry.dependencies]
python = "^3.12"
SQLAlchemy = "^2.0.29"
psycopg2-binary = "^2.9.9"
redis = {extras = ["hiredis"], version = "^5.0.1"}
sentry-sdk = {version = "^1.44.1", extras = ["starlette", "ariadne", "sqlalchemy"]}
starlette = "^0.37.2"
gql = "^3.5.0"
ariadne = "^0.23.0"
pre-commit = "^3.7.0"
granian = "^1.2.1"
google-analytics-data = "^0.18.7"
opensearch-py = "^2.5.0"
httpx = "^0.27.0"
dogpile-cache = "^1.3.1"
colorlog = "^6.8.2"
sqlalchemy-searchable = "^2.1.0"
[tool.poetry.group.dev.dependencies]
ruff = "^0.3.5"
isort = "^5.13.2"
[build-system]
requires = ["poetry-core>=1.0.0"]
build-backend = "poetry.core.masonry.api"

View File

@@ -1,4 +0,0 @@
isort
brunette
flake8
mypy

View File

@@ -1,40 +0,0 @@
python-frontmatter~=1.0.0
aioredis~=2.0.1
aiohttp
ariadne>=0.17.0
PyYAML>=5.4
pyjwt>=2.6.0
starlette~=0.23.1
sqlalchemy>=1.4.41
graphql-core>=3.0.3
gql~=3.4.0
uvicorn>=0.18.3
pydantic>=1.10.2
passlib~=1.7.4
authlib>=1.1.0
httpx>=0.23.0
psycopg2-binary
transliterate~=1.10.2
requests~=2.28.1
bcrypt>=4.0.0
bson~=0.5.10
flake8
DateTime~=4.7
asyncio~=3.4.3
python-dateutil~=2.8.2
beautifulsoup4~=4.11.1
lxml
sentry-sdk>=1.14.0
# sse_starlette
graphql-ws
nltk~=3.8.1
pymystem3~=0.2.0
transformers~=4.28.1
boto3~=1.28.2
botocore~=1.31.2
python-multipart~=0.0.6
alembic==1.11.3
Mako==1.2.4
MarkupSafe==2.1.3
sse-starlette==1.6.5
itsdangerous

View File

@@ -1,56 +0,0 @@
database_name="discoursio"
remote_backup_dir="/var/backups/mongodb"
user="root"
host="v2.discours.io"
server="$user@$host"
dump_dir="./dump"
local_backup_filename="discours-backup.bson.gz.tar"
echo "DATABASE RESET STARTED"
echo "server: $server"
echo "remote backup directory: $remote_backup_dir"
echo "Searching for last backup file..."
last_backup_filename=$(ssh $server "ls -t $remote_backup_dir | head -1")
if [ $? -ne 0 ]; then { echo "Failed to get last backup filename, aborting." ; exit 1; } fi
echo "Last backup file found: $last_backup_filename"
echo "Downloading..."
scp $server:$remote_backup_dir/"$last_backup_filename" "$local_backup_filename"
if [ $? -ne 0 ]; then { echo "Failed to download backup file, aborting." ; exit 1; } fi
echo "Backup file $local_backup_filename downloaded successfully"
echo "Creating dump directory: $dump_dir"
mkdir -p "$dump_dir"
if [ $? -ne 0 ]; then { echo "Failed to create dump directory, aborting." ; exit 1; } fi
echo "$dump_dir directory created"
echo "Unpacking backup file $local_backup_filename to $dump_dir"
tar -xzf "$local_backup_filename" --directory "$dump_dir" --strip-components 1
if [ $? -ne 0 ]; then { echo "Failed to unpack backup, aborting." ; exit 1; } fi
echo "Backup file $local_backup_filename successfully unpacked to $dump_dir"
echo "Removing backup file $local_backup_filename"
rm "$local_backup_filename"
if [ $? -ne 0 ]; then { echo "Failed to remove backup file, aborting." ; exit 1; } fi
echo "Backup file removed"
echo "Dropping database $database_name"
dropdb $database_name --force
if [ $? -ne 0 ]; then { echo "Failed to drop database, aborting." ; exit 1; } fi
echo "Database $database_name dropped"
echo "Creating database $database_name"
createdb $database_name
if [ $? -ne 0 ]; then { echo "Failed to create database, aborting." ; exit 1; } fi
echo "Database $database_name successfully created"
echo "BSON -> JSON"
python3 server.py bson
if [ $? -ne 0 ]; then { echo "BSON -> JSON failed, aborting." ; exit 1; } fi
echo "Start migration"
python3 server.py migrate
if [ $? -ne 0 ]; then { echo "Migration failed, aborting." ; exit 1; } fi
echo 'Done!'

View File

@@ -1,67 +1,75 @@
from resolvers.auth import (
login,
sign_out,
is_email_used,
register_by_email,
confirm_email,
auth_send_link,
get_current_user,
)
from resolvers.author import (get_author, get_author_followers,
get_author_follows, get_author_follows_authors,
get_author_follows_topics, get_author_id,
get_authors_all, load_authors_by, search_authors,
update_author)
from resolvers.community import get_communities_all, get_community
from resolvers.editor import create_shout, delete_shout, update_shout
from resolvers.follower import (follow, get_shout_followers,
get_topic_followers, unfollow)
from resolvers.notifier import (load_notifications, notification_mark_seen,
notifications_seen_after,
notifications_seen_thread)
from resolvers.rating import rate_author
from resolvers.reaction import (create_reaction, delete_reaction,
load_reactions_by, load_shouts_followed,
update_reaction)
from resolvers.reader import (get_shout, load_shouts_by, load_shouts_feed,
load_shouts_random_top, load_shouts_random_topic,
load_shouts_search, load_shouts_unrated)
from resolvers.topic import (get_topic, get_topics_all, get_topics_by_author,
get_topics_by_community)
from services.triggers import events_register
from resolvers.create.migrate import markdown_body
from resolvers.create.editor import create_shout, delete_shout, update_shout
events_register()
from resolvers.zine.profile import (
load_authors_by,
rate_user,
update_profile,
get_authors_all
)
from resolvers.zine.reactions import (
create_reaction,
delete_reaction,
update_reaction,
reactions_unfollow,
reactions_follow,
load_reactions_by
)
from resolvers.zine.topics import (
topic_follow,
topic_unfollow,
topics_by_author,
topics_by_community,
topics_all,
get_topic
)
from resolvers.zine.following import (
follow,
unfollow
)
from resolvers.zine.load import (
load_shout,
load_shouts_by
)
from resolvers.inbox.chats import (
create_chat,
delete_chat,
update_chat
)
from resolvers.inbox.messages import (
create_message,
delete_message,
update_message,
mark_as_read
)
from resolvers.inbox.load import (
load_chats,
load_messages_by,
load_recipients
)
from resolvers.inbox.search import search_recipients
from resolvers.notifications import load_notifications
__all__ = [
# author
'get_author',
'get_author_id',
'get_author_follows',
'get_author_follows_topics',
'get_author_follows_authors',
'get_authors_all',
'load_authors_by',
'rate_author',
'update_author',
'search_authors',
# community
'get_community',
'get_communities_all',
# topic
'get_topic',
'get_topics_all',
'get_topics_by_community',
'get_topics_by_author',
# reader
'get_shout',
'load_shouts_by',
'load_shouts_feed',
'load_shouts_search',
'load_shouts_followed',
'load_shouts_unrated',
'load_shouts_random_top',
'load_shouts_random_topic',
# follower
'follow',
'unfollow',
'get_topic_followers',
'get_shout_followers',
'get_author_followers',
# editor
'create_shout',
'update_shout',
'delete_shout',
# reaction
'create_reaction',
'update_reaction',
'delete_reaction',
'load_reactions_by',
# notifier
'load_notifications',
'notifications_seen_thread',
'notifications_seen_after',
'notification_mark_seen',
]

View File

@@ -1,202 +0,0 @@
# -*- coding: utf-8 -*-
from datetime import datetime, timezone
from urllib.parse import quote_plus
from graphql.type import GraphQLResolveInfo
from starlette.responses import RedirectResponse
from transliterate import translit
import re
from auth.authenticate import login_required
from auth.credentials import AuthCredentials
from auth.email import send_auth_email
from auth.identity import Identity, Password
from auth.jwtcodec import JWTCodec
from auth.tokenstorage import TokenStorage
from base.exceptions import (BaseHttpException, InvalidPassword, InvalidToken,
ObjectNotExist, Unauthorized)
from base.orm import local_session
from base.resolvers import mutation, query
from orm import Role, User
from resolvers.zine.profile import user_subscriptions
from settings import SESSION_TOKEN_HEADER, FRONTEND_URL
@mutation.field("getSession")
@login_required
async def get_current_user(_, info):
auth: AuthCredentials = info.context["request"].auth
token = info.context["request"].headers.get(SESSION_TOKEN_HEADER)
with local_session() as session:
user = session.query(User).where(User.id == auth.user_id).one()
user.lastSeen = datetime.now(tz=timezone.utc)
session.commit()
return {
"token": token,
"user": user,
"news": await user_subscriptions(user.id),
}
@mutation.field("confirmEmail")
async def confirm_email(_, info, token):
"""confirm owning email address"""
try:
print('[resolvers.auth] confirm email by token')
payload = JWTCodec.decode(token)
user_id = payload.user_id
await TokenStorage.get(f"{user_id}-{payload.username}-{token}")
with local_session() as session:
user = session.query(User).where(User.id == user_id).first()
session_token = await TokenStorage.create_session(user)
user.emailConfirmed = True
user.lastSeen = datetime.now(tz=timezone.utc)
session.add(user)
session.commit()
return {
"token": session_token,
"user": user,
"news": await user_subscriptions(user.id)
}
except InvalidToken as e:
raise InvalidToken(e.message)
except Exception as e:
print(e) # FIXME: debug only
return {"error": "email is not confirmed"}
async def confirm_email_handler(request):
token = request.path_params["token"] # one time
request.session["token"] = token
res = await confirm_email(None, {}, token)
print('[resolvers.auth] confirm_email request: %r' % request)
if "error" in res:
raise BaseHttpException(res['error'])
else:
response = RedirectResponse(url=FRONTEND_URL)
response.set_cookie("token", res["token"]) # session token
return response
def create_user(user_dict):
user = User(**user_dict)
with local_session() as session:
user.roles.append(session.query(Role).first())
session.add(user)
session.commit()
return user
def generate_unique_slug(src):
print('[resolvers.auth] generating slug from: ' + src)
slug = translit(src, "ru", reversed=True).replace(".", "-").lower()
slug = re.sub('[^0-9a-zA-Z]+', '-', slug)
if slug != src:
print('[resolvers.auth] translited name: ' + slug)
c = 1
with local_session() as session:
user = session.query(User).where(User.slug == slug).first()
while user:
user = session.query(User).where(User.slug == slug).first()
slug = slug + '-' + str(c)
c += 1
if not user:
unique_slug = slug
print('[resolvers.auth] ' + unique_slug)
return quote_plus(unique_slug.replace('\'', '')).replace('+', '-')
@mutation.field("registerUser")
async def register_by_email(_, _info, email: str, password: str = "", name: str = ""):
email = email.lower()
"""creates new user account"""
with local_session() as session:
user = session.query(User).filter(User.email == email).first()
if user:
raise Unauthorized("User already exist")
else:
slug = generate_unique_slug(name)
user = session.query(User).where(User.slug == slug).first()
if user:
slug = generate_unique_slug(email.split('@')[0])
user_dict = {
"email": email,
"username": email, # will be used to store phone number or some messenger network id
"name": name,
"slug": slug
}
if password:
user_dict["password"] = Password.encode(password)
user = create_user(user_dict)
user = await auth_send_link(_, _info, email)
return {"user": user}
@mutation.field("sendLink")
async def auth_send_link(_, _info, email, lang="ru", template="email_confirmation"):
email = email.lower()
"""send link with confirm code to email"""
with local_session() as session:
user = session.query(User).filter(User.email == email).first()
if not user:
raise ObjectNotExist("User not found")
else:
token = await TokenStorage.create_onetime(user)
await send_auth_email(user, token, lang, template)
return user
@query.field("signIn")
async def login(_, info, email: str, password: str = "", lang: str = "ru"):
email = email.lower()
with local_session() as session:
orm_user = session.query(User).filter(User.email == email).first()
if orm_user is None:
print(f"[auth] {email}: email not found")
# return {"error": "email not found"}
raise ObjectNotExist("User not found") # contains webserver status
if not password:
print(f"[auth] send confirm link to {email}")
token = await TokenStorage.create_onetime(orm_user)
await send_auth_email(orm_user, token, lang)
# FIXME: not an error, warning
return {"error": "no password, email link was sent"}
else:
# sign in using password
if not orm_user.emailConfirmed:
# not an error, warns users
return {"error": "please, confirm email"}
else:
try:
user = Identity.password(orm_user, password)
session_token = await TokenStorage.create_session(user)
print(f"[auth] user {email} authorized")
return {
"token": session_token,
"user": user,
"news": await user_subscriptions(user.id),
}
except InvalidPassword:
print(f"[auth] {email}: invalid password")
raise InvalidPassword("invalid password") # contains webserver status
# return {"error": "invalid password"}
@query.field("signOut")
@login_required
async def sign_out(_, info: GraphQLResolveInfo):
token = info.context["request"].headers.get(SESSION_TOKEN_HEADER, "")
status = await TokenStorage.revoke(token)
return status
@query.field("isEmailUsed")
async def is_email_used(_, _info, email):
email = email.lower()
with local_session() as session:
user = session.query(User).filter(User.email == email).first()
return user is not None

328
resolvers/author.py Normal file
View File

@@ -0,0 +1,328 @@
import json
import time
from sqlalchemy import and_, desc, or_, select, text
from sqlalchemy.orm import aliased
from sqlalchemy_searchable import search
from orm.author import Author, AuthorFollower
from orm.shout import ShoutAuthor, ShoutTopic
from orm.topic import Topic
from resolvers.stat import author_follows_authors, author_follows_topics, get_with_stat
from services.auth import login_required
from services.cache import cache_author, cache_follower
from services.db import local_session
from services.encoders import CustomJSONEncoder
from services.logger import root_logger as logger
from services.memorycache import cache_region
from services.rediscache import redis
from services.schema import mutation, query
@mutation.field('update_author')
@login_required
async def update_author(_, info, profile):
user_id = info.context.get('user_id')
if not user_id:
return {'error': 'unauthorized', 'author': None}
try:
with local_session() as session:
author = session.query(Author).where(Author.user == user_id).first()
if author:
Author.update(author, profile)
session.add(author)
session.commit()
return {'error': None, 'author': author}
except Exception as exc:
import traceback
logger.error(traceback.format_exc())
return {'error': exc, 'author': None}
@query.field('get_authors_all')
def get_authors_all(_, _info):
with local_session() as session:
authors = session.query(Author).all()
return authors
@query.field('get_author')
async def get_author(_, _info, slug='', author_id=0):
author_query = ''
author = None
author_dict = None
try:
# lookup for cached author
author_query = select(Author).filter(or_(Author.slug == slug, Author.id == author_id))
[found_author] = local_session().execute(author_query).first()
logger.debug(found_author)
if found_author:
logger.debug(f'found author id: {found_author.id}')
author_id = found_author.id if found_author.id else author_id
if author_id:
cached_result = await redis.execute('GET', f'author:{author_id}')
author_dict = json.loads(cached_result) if cached_result else None
# update stat from db
if not author_dict or not author_dict.get('stat'):
result = get_with_stat(author_query)
if not result:
raise ValueError('Author not found')
[author] = result
# use found author
if isinstance(author, Author):
logger.debug(f'update @{author.slug} with id {author.id}')
author_dict = author.dict()
await cache_author(author_dict)
except ValueError:
pass
except Exception as exc:
import traceback
logger.error(f'{exc}:\n{traceback.format_exc()}')
return author_dict
async def get_author_by_user_id(user_id: str):
logger.info(f'getting author id for {user_id}')
redis_key = f'user:{user_id}'
author = None
try:
res = await redis.execute('GET', redis_key)
if isinstance(res, str):
author = json.loads(res)
author_id = author.get('id')
author_slug = author.get('slug')
if author_id:
logger.debug(f'got author @{author_slug} #{author_id} cached')
return author
author_query = select(Author).filter(Author.user == user_id)
result = get_with_stat(author_query)
if result:
[author] = result
await cache_author(author.dict())
except Exception as exc:
import traceback
traceback.print_exc()
logger.error(exc)
return author
@query.field('get_author_id')
async def get_author_id(_, _info, user: str):
return await get_author_by_user_id(user)
@query.field('load_authors_by')
def load_authors_by(_, _info, by, limit, offset):
cache_key = f'{json.dumps(by)}_{limit}_{offset}'
@cache_region.cache_on_arguments(cache_key)
def _load_authors_by():
logger.debug(f'loading authors by {by}')
q = select(Author)
if by.get('slug'):
q = q.filter(Author.slug.ilike(f"%{by['slug']}%"))
elif by.get('name'):
q = q.filter(Author.name.ilike(f"%{by['name']}%"))
elif by.get('topic'):
q = (
q.join(ShoutAuthor)
.join(ShoutTopic)
.join(Topic)
.where(Topic.slug == str(by['topic']))
)
if by.get('last_seen'): # in unix time
before = int(time.time()) - by['last_seen']
q = q.filter(Author.last_seen > before)
elif by.get('created_at'): # in unix time
before = int(time.time()) - by['created_at']
q = q.filter(Author.created_at > before)
order = by.get('order')
if order in ['likes', 'shouts', 'followers']:
q = q.order_by(desc(text(f'{order}_stat')))
# q = q.distinct()
q = q.limit(limit).offset(offset)
authors = get_with_stat(q)
return authors
return _load_authors_by()
@query.field('get_author_follows')
async def get_author_follows(_, _info, slug='', user=None, author_id=0):
try:
author_query = select(Author)
if user:
author_query = author_query.filter(Author.user == user)
elif slug:
author_query = author_query.filter(Author.slug == slug)
elif author_id:
author_query = author_query.filter(Author.id == author_id)
else:
raise ValueError('One of slug, user, or author_id must be provided')
[result] = local_session().execute(author_query)
if len(result) > 0:
# logger.debug(result)
[author] = result
# logger.debug(author)
if author and isinstance(author, Author):
# logger.debug(author.dict())
author_id = author.id
rkey = f'author:{author_id}:follows-authors'
logger.debug(f'getting {author_id} follows authors')
cached = await redis.execute('GET', rkey)
authors = []
if not cached:
authors = author_follows_authors(author_id)
prepared = [author.dict() for author in authors]
await redis.execute('SET', rkey, json.dumps(prepared, cls=CustomJSONEncoder))
elif isinstance(cached, str):
authors = json.loads(cached)
rkey = f'author:{author_id}:follows-topics'
cached = await redis.execute('GET', rkey)
topics = []
if cached and isinstance(cached, str):
topics = json.loads(cached)
if not cached:
topics = author_follows_topics(author_id)
prepared = [topic.dict() for topic in topics]
await redis.execute(
'SET', rkey, json.dumps(prepared, cls=CustomJSONEncoder)
)
return {
'topics': topics,
'authors': authors,
'communities': [
{'id': 1, 'name': 'Дискурс', 'slug': 'discours', 'pic': ''}
],
}
except Exception:
import traceback
traceback.print_exc()
return {'error': 'Author not found'}
@query.field('get_author_follows_topics')
async def get_author_follows_topics(_, _info, slug='', user=None, author_id=None):
with local_session() as session:
if user or slug:
author_id_result = (
session.query(Author.id)
.filter(or_(Author.user == user, Author.slug == slug))
.first()
)
author_id = author_id_result[0] if author_id_result else None
if not author_id:
raise ValueError('Author not found')
logger.debug(f'getting {author_id} follows topics')
rkey = f'author:{author_id}:follows-topics'
cached = await redis.execute('GET', rkey)
topics = []
if isinstance(cached, str):
topics = json.loads(cached)
if not cached:
topics = author_follows_topics(author_id)
prepared = [topic.dict() for topic in topics]
await redis.execute(
'SET', rkey, json.dumps(prepared, cls=CustomJSONEncoder)
)
return topics
@query.field('get_author_follows_authors')
async def get_author_follows_authors(_, _info, slug='', user=None, author_id=None):
with local_session() as session:
if user or slug:
author_id_result = (
session.query(Author.id)
.filter(or_(Author.user == user, Author.slug == slug))
.first()
)
author_id = author_id_result[0] if author_id_result else None
if author_id:
logger.debug(f'getting {author_id} follows authors')
rkey = f'author:{author_id}:follows-authors'
cached = await redis.execute('GET', rkey)
authors = []
if isinstance(cached, str):
authors = json.loads(cached)
if not authors:
authors = author_follows_authors(author_id)
prepared = [author.dict() for author in authors]
await redis.execute(
'SET', rkey, json.dumps(prepared, cls=CustomJSONEncoder)
)
return authors
else:
raise ValueError('Author not found')
def create_author(user_id: str, slug: str, name: str = ''):
with local_session() as session:
try:
author = None
if user_id:
author = session.query(Author).filter(Author.user == user_id).first()
elif slug:
author = session.query(Author).filter(Author.slug == slug).first()
if not author:
new_author = Author(user=user_id, slug=slug, name=name)
session.add(new_author)
session.commit()
logger.info(f'author created by webhook {new_author.dict()}')
except Exception as exc:
logger.debug(exc)
@query.field('get_author_followers')
async def get_author_followers(_, _info, slug: str):
logger.debug(f'getting followers for @{slug}')
try:
author_alias = aliased(Author)
author_query = select(author_alias).filter(author_alias.slug == slug)
result = local_session().execute(author_query).first()
if result:
[author] = result
author_id = author.id
cached = await redis.execute('GET', f'author:{author_id}:followers')
if not cached:
author_follower_alias = aliased(AuthorFollower, name='af')
q = select(Author).join(
author_follower_alias,
and_(
author_follower_alias.author == author_id,
author_follower_alias.follower == Author.id,
),
)
results = get_with_stat(q)
if isinstance(results, list):
for follower in results:
await cache_follower(follower, author)
logger.debug(f'@{slug} cache updated with {len(results)} followers')
return results
else:
logger.debug(f'@{slug} got followers cached')
if isinstance(cached, str):
return json.loads(cached)
except Exception as exc:
import traceback
logger.error(exc)
logger.error(traceback.format_exc())
return []
@query.field('search_authors')
async def search_authors(_, _info, what: str):
q = search(select(Author), what)
return get_with_stat(q)

153
resolvers/collab.py Normal file
View File

@@ -0,0 +1,153 @@
from orm.author import Author
from orm.invite import Invite, InviteStatus
from orm.shout import Shout
from services.auth import login_required
from services.db import local_session
from services.schema import mutation
@mutation.field('accept_invite')
@login_required
async def accept_invite(_, info, invite_id: int):
user_id = info.context['user_id']
# Check if the user exists
with local_session() as session:
author = session.query(Author).filter(Author.user == user_id).first()
if author:
# Check if the invite exists
invite = session.query(Invite).filter(Invite.id == invite_id).first()
if (
invite
and invite.author_d is author.id
and invite.status is InviteStatus.PENDING.value
):
# Add the user to the shout authors
shout = session.query(Shout).filter(Shout.id == invite.shout_id).first()
if shout:
if author not in shout.authors:
shout.authors.append(author)
session.delete(invite)
session.add(shout)
session.commit()
return {'success': True, 'message': 'Invite accepted'}
else:
return {'error': 'Shout not found'}
else:
return {'error': 'Invalid invite or already accepted/rejected'}
else:
return {'error': 'User not found'}
@mutation.field('reject_invite')
@login_required
async def reject_invite(_, info, invite_id: int):
user_id = info.context['user_id']
# Check if the user exists
with local_session() as session:
author = session.query(Author).filter(Author.user == user_id).first()
if author:
# Check if the invite exists
invite = session.query(Invite).filter(Invite.id == invite_id).first()
if (
invite
and invite.author_id is author.id
and invite.status is InviteStatus.PENDING.value
):
# Delete the invite
session.delete(invite)
session.commit()
return {'success': True, 'message': 'Invite rejected'}
else:
return {'error': 'Invalid invite or already accepted/rejected'}
else:
return {'error': 'User not found'}
@mutation.field('create_invite')
@login_required
async def create_invite(_, info, slug: str = '', author_id: int = 0):
user_id = info.context['user_id']
# Check if the inviter is the owner of the shout
with local_session() as session:
shout = session.query(Shout).filter(Shout.slug == slug).first()
inviter = session.query(Author).filter(Author.user == user_id).first()
if inviter and shout and shout.authors and inviter.id is shout.created_by:
# Check if the author is a valid author
author = session.query(Author).filter(Author.id == author_id).first()
if author:
# Check if an invite already exists
existing_invite = (
session.query(Invite)
.filter(
Invite.inviter_id == inviter.id,
Invite.author_id == author_id,
Invite.shout_id == shout.id,
Invite.status == InviteStatus.PENDING.value,
)
.first()
)
if existing_invite:
return {'error': 'Invite already sent'}
# Create a new invite
new_invite = Invite(
inviter_id=user_id,
author_id=author_id,
shout_id=shout.id,
status=InviteStatus.PENDING.value,
)
session.add(new_invite)
session.commit()
return {'error': None, 'invite': new_invite}
else:
return {'error': 'Invalid author'}
else:
return {'error': 'Access denied'}
@mutation.field('remove_author')
@login_required
async def remove_author(_, info, slug: str = '', author_id: int = 0):
user_id = info.context['user_id']
with local_session() as session:
author = session.query(Author).filter(Author.user == user_id).first()
if author:
shout = session.query(Shout).filter(Shout.slug == slug).first()
# NOTE: owner should be first in a list
if shout and author.id is shout.created_by:
shout.authors = [
author for author in shout.authors if author.id != author_id
]
session.commit()
return {}
return {'error': 'Access denied'}
@mutation.field('remove_invite')
@login_required
async def remove_invite(_, info, invite_id: int):
user_id = info.context['user_id']
# Check if the user exists
with local_session() as session:
author = session.query(Author).filter(Author.user == user_id).first()
if author:
# Check if the invite exists
invite = session.query(Invite).filter(Invite.id == invite_id).first()
if isinstance(invite, Invite):
shout = session.query(Shout).filter(Shout.id == invite.shout_id).first()
if shout and shout.deleted_at is None and invite:
if invite.inviter_id is author.id or author.id is shout.created_by:
if invite.status is InviteStatus.PENDING.value:
# Delete the invite
session.delete(invite)
session.commit()
return {}
else:
return {'error': 'Invalid invite or already accepted/rejected'}
else:
return {'error': 'Author not found'}

89
resolvers/community.py Normal file
View File

@@ -0,0 +1,89 @@
from sqlalchemy import and_, distinct, func, select
from sqlalchemy.orm import aliased
from orm.author import Author
from orm.community import Community, CommunityAuthor
from orm.shout import ShoutCommunity
from services.db import local_session
from services.logger import root_logger as logger
from services.schema import query
def add_community_stat_columns(q):
community_followers = aliased(CommunityAuthor)
shout_community_aliased = aliased(ShoutCommunity)
q = q.outerjoin(shout_community_aliased).add_columns(
func.count(distinct(shout_community_aliased.shout)).label('shouts_stat')
)
q = q.outerjoin(
community_followers, community_followers.author == Author.id
).add_columns(
func.count(distinct(community_followers.follower)).label('followers_stat')
)
q = q.group_by(Author.id)
return q
def get_communities_from_query(q):
ccc = []
with local_session() as session:
for [c, shouts_stat, followers_stat] in session.execute(q):
c.stat = {
'shouts': shouts_stat,
'followers': followers_stat,
# "commented": commented_stat,
}
ccc.append(c)
return ccc
# for mutation.field("follow")
def community_follow(follower_id, slug):
try:
with local_session() as session:
community = session.query(Community).where(Community.slug == slug).first()
if isinstance(community, Community):
cf = CommunityAuthor(author=follower_id, community=community.id)
session.add(cf)
session.commit()
return True
except Exception as ex:
logger.debug(ex)
return False
# for mutation.field("unfollow")
def community_unfollow(follower_id, slug):
with local_session() as session:
flw = (
session.query(CommunityAuthor)
.join(Community, Community.id == CommunityAuthor.community)
.filter(and_(CommunityAuthor.author == follower_id, Community.slug == slug))
.first()
)
if flw:
session.delete(flw)
session.commit()
return True
return False
@query.field('get_communities_all')
async def get_communities_all(_, _info):
q = select(Author)
q = add_community_stat_columns(q)
return get_communities_from_query(q)
@query.field('get_community')
async def get_community(_, _info, slug: str):
q = select(Community).where(Community.slug == slug)
q = add_community_stat_columns(q)
communities = get_communities_from_query(q)
return communities[0]

View File

@@ -1,163 +0,0 @@
from datetime import datetime, timezone
from sqlalchemy import and_
from sqlalchemy.orm import joinedload
from auth.authenticate import login_required
from auth.credentials import AuthCredentials
from base.orm import local_session
from base.resolvers import mutation
from orm.shout import Shout, ShoutAuthor, ShoutTopic
from orm.topic import Topic
from resolvers.zine.reactions import reactions_follow, reactions_unfollow
@mutation.field("createShout")
@login_required
async def create_shout(_, info, inp):
auth: AuthCredentials = info.context["request"].auth
with local_session() as session:
topics = session.query(Topic).filter(Topic.slug.in_(inp.get('topics', []))).all()
new_shout = Shout.create(**{
"title": inp.get("title"),
"subtitle": inp.get('subtitle'),
"lead": inp.get('lead'),
"description": inp.get('description'),
"body": inp.get("body", ''),
"layout": inp.get("layout"),
"authors": inp.get("authors", []),
"slug": inp.get("slug"),
"mainTopic": inp.get("mainTopic"),
"visibility": "owner",
"createdBy": auth.user_id
})
for topic in topics:
t = ShoutTopic.create(topic=topic.id, shout=new_shout.id)
session.add(t)
# NOTE: shout made by one first author
sa = ShoutAuthor.create(shout=new_shout.id, user=auth.user_id)
session.add(sa)
session.add(new_shout)
reactions_follow(auth.user_id, new_shout.id, True)
session.commit()
# TODO
# GitTask(inp, user.username, user.email, "new shout %s" % new_shout.slug)
if new_shout.slug is None:
new_shout.slug = f"draft-{new_shout.id}"
session.commit()
return {"shout": new_shout}
@mutation.field("updateShout")
@login_required
async def update_shout(_, info, shout_id, shout_input=None, publish=False):
auth: AuthCredentials = info.context["request"].auth
with local_session() as session:
shout = session.query(Shout).options(
joinedload(Shout.authors),
joinedload(Shout.topics),
).filter(Shout.id == shout_id).first()
if not shout:
return {"error": "shout not found"}
if shout.createdBy != auth.user_id:
return {"error": "access denied"}
updated = False
if shout_input is not None:
topics_input = shout_input["topics"]
del shout_input["topics"]
new_topics_to_link = []
new_topics = [topic_input for topic_input in topics_input if topic_input["id"] < 0]
for new_topic in new_topics:
del new_topic["id"]
created_new_topic = Topic.create(**new_topic)
session.add(created_new_topic)
new_topics_to_link.append(created_new_topic)
if len(new_topics) > 0:
session.commit()
for new_topic_to_link in new_topics_to_link:
created_unlinked_topic = ShoutTopic.create(shout=shout.id, topic=new_topic_to_link.id)
session.add(created_unlinked_topic)
existing_topics_input = [topic_input for topic_input in topics_input if topic_input.get("id", 0) > 0]
existing_topic_to_link_ids = [existing_topic_input["id"] for existing_topic_input in existing_topics_input
if existing_topic_input["id"] not in [topic.id for topic in shout.topics]]
for existing_topic_to_link_id in existing_topic_to_link_ids:
created_unlinked_topic = ShoutTopic.create(shout=shout.id, topic=existing_topic_to_link_id)
session.add(created_unlinked_topic)
topic_to_unlink_ids = [topic.id for topic in shout.topics
if topic.id not in [topic_input["id"] for topic_input in existing_topics_input]]
shout_topics_to_remove = session.query(ShoutTopic).filter(
and_(
ShoutTopic.shout == shout.id,
ShoutTopic.topic.in_(topic_to_unlink_ids)
)
)
for shout_topic_to_remove in shout_topics_to_remove:
session.delete(shout_topic_to_remove)
shout_input["mainTopic"] = shout_input["mainTopic"]["slug"]
if shout_input["mainTopic"] == '':
del shout_input["mainTopic"]
shout.update(shout_input)
updated = True
if publish and shout.visibility == 'owner':
shout.visibility = "community"
shout.publishedAt = datetime.now(tz=timezone.utc)
updated = True
if updated:
shout.updatedAt = datetime.now(tz=timezone.utc)
session.commit()
# GitTask(inp, user.username, user.email, "update shout %s" % slug)
return {"shout": shout}
@mutation.field("deleteShout")
@login_required
async def delete_shout(_, info, shout_id):
auth: AuthCredentials = info.context["request"].auth
with local_session() as session:
shout = session.query(Shout).filter(Shout.id == shout_id).first()
if not shout:
return {"error": "invalid shout id"}
if auth.user_id != shout.createdBy:
return {"error": "access denied"}
for author_id in shout.authors:
reactions_unfollow(author_id, shout_id)
shout.deletedAt = datetime.now(tz=timezone.utc)
session.commit()
return {}

View File

@@ -1,11 +0,0 @@
from base.resolvers import query
from resolvers.auth import login_required
from migration.extract import extract_md
@login_required
@query.field("markdownBody")
def markdown_body(_, info, body: str):
body = extract_md(body)
return body

368
resolvers/editor.py Normal file
View File

@@ -0,0 +1,368 @@
import time
from sqlalchemy import and_, desc, select
from sqlalchemy.orm import joinedload
from sqlalchemy.sql.functions import coalesce
from orm.author import Author
from orm.rating import is_negative, is_positive
from orm.reaction import Reaction, ReactionKind
from orm.shout import Shout, ShoutAuthor, ShoutTopic
from orm.topic import Topic
from resolvers.follower import reactions_follow, reactions_unfollow
from services.auth import login_required
from services.db import local_session
from services.diff import apply_diff, get_diff
from services.logger import root_logger as logger
from services.notify import notify_shout
from services.schema import mutation, query
from services.search import search_service
@query.field('get_my_shout')
@login_required
async def get_my_shout(_, info, shout_id: int):
with local_session() as session:
user_id = info.context.get('user_id', '')
if not user_id:
return {'error': 'unauthorized', 'shout': None}
shout = (
session.query(Shout)
.filter(Shout.id == shout_id)
.options(joinedload(Shout.authors), joinedload(Shout.topics))
.filter(and_(Shout.deleted_at.is_(None), Shout.published_at.is_(None)))
.first()
)
if not shout:
return {'error': 'no shout found', 'shout': None}
if not bool(shout.published_at):
author = session.query(Author).filter(Author.user == user_id).first()
if not author:
return {'error': 'no author found', 'shout': None}
roles = info.context.get('roles', [])
if 'editor' not in roles and not filter(
lambda x: x.id == author.id, [x for x in shout.authors]
):
return {'error': 'forbidden', 'shout': None}
return {'error': None, 'shout': shout}
@query.field('get_shouts_drafts')
@login_required
async def get_shouts_drafts(_, info):
user_id = info.context.get('user_id')
shouts = []
with local_session() as session:
author = session.query(Author).filter(Author.user == user_id).first()
if author:
q = (
select(Shout)
.options(joinedload(Shout.authors), joinedload(Shout.topics))
.filter(and_(Shout.deleted_at.is_(None), Shout.created_by == author.id))
.filter(Shout.published_at.is_(None))
.order_by(desc(coalesce(Shout.updated_at, Shout.created_at)))
.group_by(Shout.id)
)
shouts = [shout for [shout] in session.execute(q).unique()]
return shouts
@mutation.field('create_shout')
@login_required
async def create_shout(_, info, inp):
user_id = info.context.get('user_id')
if user_id:
with local_session() as session:
author = session.query(Author).filter(Author.user == user_id).first()
if isinstance(author, Author):
current_time = int(time.time())
slug = inp.get('slug') or f'draft-{current_time}'
shout_dict = {
'title': inp.get('title', ''),
'subtitle': inp.get('subtitle', ''),
'lead': inp.get('lead', ''),
'description': inp.get('description', ''),
'body': inp.get('body', ''),
'layout': inp.get('layout', 'article'),
'created_by': author.id,
'authors': [],
'slug': slug,
'topics': inp.get('topics', []),
'published_at': None,
'created_at': current_time, # Set created_at as Unix timestamp
}
same_slug_shout = (
session.query(Shout)
.filter(Shout.slug == shout_dict.get('slug'))
.first()
)
c = 1
while same_slug_shout is not None:
same_slug_shout = (
session.query(Shout)
.filter(Shout.slug == shout_dict.get('slug'))
.first()
)
c += 1
shout_dict['slug'] += f'-{c}'
new_shout = Shout(**shout_dict)
session.add(new_shout)
session.commit()
# NOTE: requesting new shout back
shout = session.query(Shout).where(Shout.slug == slug).first()
if shout:
sa = ShoutAuthor(shout=shout.id, author=author.id)
session.add(sa)
topics = (
session.query(Topic)
.filter(Topic.slug.in_(inp.get('topics', [])))
.all()
)
for topic in topics:
t = ShoutTopic(topic=topic.id, shout=shout.id)
session.add(t)
session.commit()
reactions_follow(author.id, shout.id, True)
# notifier
# await notify_shout(shout_dict, 'create')
return {'shout': shout}
return {'error': 'cant create shout' if user_id else 'unauthorized'}
def patch_main_topic(session, main_topic, shout):
with session.begin():
shout = (
session.query(Shout)
.options(joinedload(Shout.topics))
.filter(Shout.id == shout.id)
.first()
)
if not shout:
return
old_main_topic = (
session.query(ShoutTopic)
.filter(and_(ShoutTopic.shout == shout.id, ShoutTopic.main.is_(True)))
.first()
)
main_topic = session.query(Topic).filter(Topic.slug == main_topic).first()
if main_topic:
new_main_topic = (
session.query(ShoutTopic)
.filter(
and_(
ShoutTopic.shout == shout.id, ShoutTopic.topic == main_topic.id
)
)
.first()
)
if (
old_main_topic
and new_main_topic
and old_main_topic is not new_main_topic
):
ShoutTopic.update(old_main_topic, {'main': False})
session.add(old_main_topic)
ShoutTopic.update(new_main_topic, {'main': True})
session.add(new_main_topic)
def patch_topics(session, shout, topics_input):
new_topics_to_link = [
Topic(**new_topic) for new_topic in topics_input if new_topic['id'] < 0
]
if new_topics_to_link:
session.add_all(new_topics_to_link)
session.commit()
for new_topic_to_link in new_topics_to_link:
created_unlinked_topic = ShoutTopic(shout=shout.id, topic=new_topic_to_link.id)
session.add(created_unlinked_topic)
existing_topics_input = [
topic_input for topic_input in topics_input if topic_input.get('id', 0) > 0
]
existing_topic_to_link_ids = [
existing_topic_input['id']
for existing_topic_input in existing_topics_input
if existing_topic_input['id'] not in [topic.id for topic in shout.topics]
]
for existing_topic_to_link_id in existing_topic_to_link_ids:
created_unlinked_topic = ShoutTopic(
shout=shout.id, topic=existing_topic_to_link_id
)
session.add(created_unlinked_topic)
topic_to_unlink_ids = [
topic.id
for topic in shout.topics
if topic.id not in [topic_input['id'] for topic_input in existing_topics_input]
]
session.query(ShoutTopic).filter(
and_(ShoutTopic.shout == shout.id, ShoutTopic.topic.in_(topic_to_unlink_ids))
).delete(synchronize_session=False)
@mutation.field('update_shout')
@login_required
async def update_shout(_, info, shout_id: int, shout_input=None, publish=False):
user_id = info.context.get('user_id')
roles = info.context.get('roles', [])
shout_input = shout_input or {}
current_time = int(time.time())
shout_id = shout_id or shout_input.get('id', shout_id)
slug = shout_input.get('slug')
if not user_id:
return {'error': 'unauthorized'}
try:
with local_session() as session:
author = session.query(Author).filter(Author.user == user_id).first()
if author:
logger.info(f'author for shout#{shout_id} detected {author.dict()}')
shout_by_id = session.query(Shout).filter(Shout.id == shout_id).first()
if not shout_by_id:
return {'error': 'shout not found'}
if slug != shout_by_id.slug:
same_slug_shout = (
session.query(Shout).filter(Shout.slug == slug).first()
)
c = 1
while same_slug_shout is not None:
c += 1
slug = f'{slug}-{c}'
same_slug_shout = (
session.query(Shout).filter(Shout.slug == slug).first()
)
shout_input['slug'] = slug
if (
filter(
lambda x: x.id == author.id, [x for x in shout_by_id.authors]
)
or 'editor' in roles
):
# topics patch
topics_input = shout_input.get('topics')
if topics_input:
patch_topics(session, shout_by_id, topics_input)
del shout_input['topics']
# main topic
main_topic = shout_input.get('main_topic')
if main_topic:
patch_main_topic(session, main_topic, shout_by_id)
shout_input['updated_at'] = current_time
shout_input['published_at'] = current_time if publish else None
Shout.update(shout_by_id, shout_input)
session.add(shout_by_id)
session.commit()
shout_dict = shout_by_id.dict()
if not publish:
await notify_shout(shout_dict, 'update')
else:
await notify_shout(shout_dict, 'published')
# search service indexing
search_service.index(shout_by_id)
return {'shout': shout_dict, 'error': None}
else:
return {'error': 'access denied', 'shout': None}
except Exception as exc:
import traceback
traceback.print_exc()
logger.error(exc)
logger.error(f' cannot update with data: {shout_input}')
return {'error': 'cant update shout'}
@mutation.field('delete_shout')
@login_required
async def delete_shout(_, info, shout_id: int):
user_id = info.context.get('user_id')
roles = info.context.get('roles')
if user_id:
with local_session() as session:
author = session.query(Author).filter(Author.user == user_id).first()
shout = session.query(Shout).filter(Shout.id == shout_id).first()
if not shout:
return {'error': 'invalid shout id'}
if author and shout:
# NOTE: only owner and editor can mark the shout as deleted
if shout.created_by == author.id or 'editor' in roles:
for author_id in shout.authors:
reactions_unfollow(author_id, shout_id)
shout_dict = shout.dict()
shout_dict['deleted_at'] = int(time.time())
Shout.update(shout, shout_dict)
session.add(shout)
session.commit()
await notify_shout(shout_dict, 'delete')
return {'error': None}
else:
return {'error': 'access denied'}
def handle_proposing(session, r, shout):
if is_positive(r.kind):
replied_reaction = (
session.query(Reaction)
.filter(Reaction.id == r.reply_to, Reaction.shout == r.shout)
.first()
)
if (
replied_reaction
and replied_reaction.kind is ReactionKind.PROPOSE.value
and replied_reaction.quote
):
# patch all the proposals' quotes
proposals = (
session.query(Reaction)
.filter(
and_(
Reaction.shout == r.shout,
Reaction.kind == ReactionKind.PROPOSE.value,
)
)
.all()
)
for proposal in proposals:
if proposal.quote:
proposal_diff = get_diff(shout.body, proposal.quote)
proposal_dict = proposal.dict()
proposal_dict['quote'] = apply_diff(
replied_reaction.quote, proposal_diff
)
Reaction.update(proposal, proposal_dict)
session.add(proposal)
# patch shout's body
shout_dict = shout.dict()
shout_dict['body'] = replied_reaction.quote
Shout.update(shout, shout_dict)
session.add(shout)
session.commit()
if is_negative(r.kind):
# TODO: rejection logic
pass

298
resolvers/follower.py Normal file
View File

@@ -0,0 +1,298 @@
import json
import time
from typing import List
from psycopg2.errors import UniqueViolation
from sqlalchemy import or_, select
from sqlalchemy.sql import and_
from orm.author import Author, AuthorFollower
from orm.community import Community
from orm.reaction import Reaction
from orm.shout import Shout, ShoutReactionsFollower
from orm.topic import Topic, TopicFollower
from resolvers.stat import author_follows_authors, author_follows_topics, get_with_stat
from services.auth import login_required
from services.cache import DEFAULT_FOLLOWS
from services.db import local_session
from services.logger import root_logger as logger
from services.notify import notify_follower
from services.rediscache import redis
from services.schema import mutation, query
@mutation.field('follow')
@login_required
async def follow(_, info, what, slug):
follows = []
error = None
user_id = info.context.get('user_id')
if not user_id:
return {'error': 'unauthorized'}
follower = local_session().query(Author).filter(Author.user == user_id).first()
if not follower:
return {'error': 'cant find follower'}
if what == 'AUTHOR':
error = author_follow(follower.id, slug)
if not error:
author = local_session().query(Author).where(Author.slug == slug).first()
if author:
await notify_follower(follower.dict(), author.id, 'follow')
elif what == 'TOPIC':
error = topic_follow(follower.id, slug)
elif what == 'COMMUNITY':
# FIXME: when more communities
follows = local_session().execute(select(Community))
elif what == 'SHOUT':
error = reactions_follow(follower.id, slug)
if error:
return {'error': error}
entity = what.lower()
follows_str = await redis.execute('GET', f'author:{follower.id}:follows-{entity}s')
if follows_str:
follows = json.loads(follows_str)
return { f'{entity}s': follows }
@mutation.field('unfollow')
@login_required
async def unfollow(_, info, what, slug):
follows = []
error = None
user_id = info.context.get('user_id')
if not user_id:
return {'error': 'unauthorized'}
follower = local_session().query(Author).filter(Author.user == user_id).first()
if not follower:
return {'error': 'follower profile is not found'}
if what == 'AUTHOR':
error = author_unfollow(follower.id, slug)
# NOTE: after triggers should update cached stats
if not error:
logger.info(f'@{follower.slug} unfollowed @{slug}')
author = local_session().query(Author).where(Author.slug == slug).first()
if author:
await notify_follower(follower.dict(), author.id, 'unfollow')
elif what == 'TOPIC':
error = topic_unfollow(follower.id, slug)
elif what == 'COMMUNITY':
follows = local_session().execute(select(Community))
elif what == 'SHOUT':
error = reactions_unfollow(follower.id, slug)
entity = what.lower()
follows_str = await redis.execute('GET', f'author:{follower.id}:follows-{entity}s')
if follows_str:
follows = json.loads(follows_str)
return {'error': error, f'{entity}s': follows}
async def get_follows_by_user_id(user_id: str):
if not user_id:
return {'error': 'unauthorized'}
author = await redis.execute('GET', f'user:{user_id}')
if isinstance(author, str):
author = json.loads(author)
if not author:
with local_session() as session:
author = session.query(Author).filter(Author.user == user_id).first()
if not author:
return {'error': 'cant find author'}
author = author.dict()
last_seen = author.get('last_seen', 0) if isinstance(author, dict) else 0
follows = DEFAULT_FOLLOWS
day_old = int(time.time()) - last_seen > 24 * 60 * 60
if day_old:
author_id = json.loads(str(author)).get('id')
if author_id:
topics = author_follows_topics(author_id)
authors = author_follows_authors(author_id)
follows = {
'topics': topics,
'authors': authors,
'communities': [
{'id': 1, 'name': 'Дискурс', 'slug': 'discours', 'pic': ''}
],
}
else:
logger.debug(f'getting follows for {user_id} from redis')
res = await redis.execute('GET', f'user:{user_id}:follows')
if isinstance(res, str):
follows = json.loads(res)
return follows
def topic_follow(follower_id, slug):
try:
with local_session() as session:
topic = session.query(Topic).where(Topic.slug == slug).one()
_following = TopicFollower(topic=topic.id, follower=follower_id)
return None
except UniqueViolation as error:
logger.warn(error)
return 'already followed'
except Exception as exc:
logger.error(exc)
return exc
def topic_unfollow(follower_id, slug):
try:
with local_session() as session:
sub = (
session.query(TopicFollower)
.join(Topic)
.filter(and_(TopicFollower.follower == follower_id, Topic.slug == slug))
.first()
)
if sub:
session.delete(sub)
session.commit()
return None
except UniqueViolation as error:
logger.warn(error)
return 'already unfollowed'
except Exception as ex:
logger.debug(ex)
return ex
def reactions_follow(author_id, shout_id, auto=False):
try:
with local_session() as session:
shout = session.query(Shout).where(Shout.id == shout_id).one()
following = (
session.query(ShoutReactionsFollower)
.where(
and_(
ShoutReactionsFollower.follower == author_id,
ShoutReactionsFollower.shout == shout.id,
)
)
.first()
)
if not following:
following = ShoutReactionsFollower(
follower=author_id, shout=shout.id, auto=auto
)
session.add(following)
session.commit()
return None
except UniqueViolation as error:
logger.warn(error)
return 'already followed'
except Exception as exc:
return exc
def reactions_unfollow(author_id, shout_id: int):
try:
with local_session() as session:
shout = session.query(Shout).where(Shout.id == shout_id).one()
following = (
session.query(ShoutReactionsFollower)
.where(
and_(
ShoutReactionsFollower.follower == author_id,
ShoutReactionsFollower.shout == shout.id,
)
)
.first()
)
if following:
session.delete(following)
session.commit()
return None
except UniqueViolation as error:
logger.warn(error)
return 'already unfollowed'
except Exception as ex:
import traceback
traceback.print_exc()
return ex
# for mutation.field("follow")
def author_follow(follower_id, slug):
try:
with local_session() as session:
author = session.query(Author).where(Author.slug == slug).one()
af = AuthorFollower(follower=follower_id, author=author.id)
session.add(af)
session.commit()
return None
except UniqueViolation as error:
logger.warn(error)
return 'already followed'
except Exception as exc:
import traceback
traceback.print_exc()
return exc
# for mutation.field("unfollow")
def author_unfollow(follower_id, slug):
try:
with local_session() as session:
flw = (
session.query(AuthorFollower)
.join(Author, Author.id == AuthorFollower.author)
.filter(
and_(AuthorFollower.follower == follower_id, Author.slug == slug)
)
.first()
)
if flw:
session.delete(flw)
session.commit()
return None
except UniqueViolation as error:
logger.warn(error)
return 'already unfollowed'
except Exception as exc:
return exc
@query.field('get_topic_followers')
async def get_topic_followers(_, _info, slug: str, topic_id: int) -> List[Author]:
q = select(Author)
q = (
q.join(TopicFollower, TopicFollower.follower == Author.id)
.join(Topic, Topic.id == TopicFollower.topic)
.filter(or_(Topic.slug == slug, Topic.id == topic_id))
)
return get_with_stat(q)
@query.field('get_shout_followers')
def get_shout_followers(
_, _info, slug: str = '', shout_id: int | None = None
) -> List[Author]:
followers = []
with local_session() as session:
shout = None
if slug:
shout = session.query(Shout).filter(Shout.slug == slug).first()
elif shout_id:
shout = session.query(Shout).filter(Shout.id == shout_id).first()
if shout:
reactions = session.query(Reaction).filter(Reaction.shout == shout.id).all()
for r in reactions:
followers.append(r.created_by)
return followers

View File

@@ -1,124 +0,0 @@
import json
import uuid
from datetime import datetime, timezone
from auth.authenticate import login_required
from auth.credentials import AuthCredentials
from base.redis import redis
from base.resolvers import mutation
from validations.inbox import Chat
@mutation.field("updateChat")
@login_required
async def update_chat(_, info, chat_new: Chat):
"""
updating chat
requires info["request"].user.slug to be in chat["admins"]
:param info: GraphQLInfo with request
:param chat_new: dict with chat data
:return: Result { error chat }
"""
auth: AuthCredentials = info.context["request"].auth
chat_id = chat_new["id"]
chat = await redis.execute("GET", f"chats/{chat_id}")
if not chat:
return {
"error": "chat not exist"
}
chat = dict(json.loads(chat))
# TODO
if auth.user_id in chat["admins"]:
chat.update({
"title": chat_new.get("title", chat["title"]),
"description": chat_new.get("description", chat["description"]),
"updatedAt": int(datetime.now(tz=timezone.utc).timestamp()),
"admins": chat_new.get("admins", chat.get("admins") or []),
"users": chat_new.get("users", chat["users"])
})
await redis.execute("SET", f"chats/{chat.id}", json.dumps(chat))
await redis.execute("COMMIT")
return {
"error": None,
"chat": chat
}
@mutation.field("createChat")
@login_required
async def create_chat(_, info, title="", members=[]):
auth: AuthCredentials = info.context["request"].auth
chat = {}
print('create_chat members: %r' % members)
if auth.user_id not in members:
members.append(int(auth.user_id))
# reuse chat craeted before if exists
if len(members) == 2 and title == "":
chat = None
print(members)
chatset1 = await redis.execute("SMEMBERS", f"chats_by_user/{members[0]}")
if not chatset1:
chatset1 = set([])
print(chatset1)
chatset2 = await redis.execute("SMEMBERS", f"chats_by_user/{members[1]}")
if not chatset2:
chatset2 = set([])
print(chatset2)
chatset = chatset1.intersection(chatset2)
print(chatset)
for c in chatset:
chat = await redis.execute("GET", f"chats/{c.decode('utf-8')}")
if chat:
chat = json.loads(chat)
if chat['title'] == "":
print('[inbox] createChat found old chat')
print(chat)
break
if chat:
return {
"chat": chat,
"error": "existed"
}
chat_id = str(uuid.uuid4())
chat = {
"id": chat_id,
"users": members,
"title": title,
"createdBy": auth.user_id,
"createdAt": int(datetime.now(tz=timezone.utc).timestamp()),
"updatedAt": int(datetime.now(tz=timezone.utc).timestamp()),
"admins": members if (len(members) == 2 and title == "") else []
}
for m in members:
await redis.execute("SADD", f"chats_by_user/{m}", chat_id)
await redis.execute("SET", f"chats/{chat_id}", json.dumps(chat))
await redis.execute("SET", f"chats/{chat_id}/next_message_id", str(0))
await redis.execute("COMMIT")
return {
"error": None,
"chat": chat
}
@mutation.field("deleteChat")
@login_required
async def delete_chat(_, info, chat_id: str):
auth: AuthCredentials = info.context["request"].auth
chat = await redis.execute("GET", f"/chats/{chat_id}")
if chat:
chat = dict(json.loads(chat))
if auth.user_id in chat['admins']:
await redis.execute("DEL", f"chats/{chat_id}")
await redis.execute("SREM", "chats_by_user/" + str(auth.user_id), chat_id)
await redis.execute("COMMIT")
else:
return {
"error": "chat not exist"
}

View File

@@ -1,152 +0,0 @@
import json
# from datetime import datetime, timedelta, timezone
from auth.authenticate import login_required
from auth.credentials import AuthCredentials
from base.redis import redis
from base.orm import local_session
from base.resolvers import query
from orm.user import User
from resolvers.zine.profile import followed_authors
from .unread import get_unread_counter
async def load_messages(chat_id: str, limit: int = 5, offset: int = 0, ids=[]):
''' load :limit messages for :chat_id with :offset '''
messages = []
message_ids = []
if ids:
message_ids += ids
try:
if limit:
mids = await redis.lrange(f"chats/{chat_id}/message_ids",
offset,
offset + limit
)
mids = [mid.decode("utf-8") for mid in mids]
message_ids += mids
except Exception as e:
print(e)
if message_ids:
message_keys = [f"chats/{chat_id}/messages/{mid}" for mid in message_ids]
messages = await redis.mget(*message_keys)
messages = [json.loads(msg.decode('utf-8')) for msg in messages]
replies = []
for m in messages:
rt = m.get('replyTo')
if rt:
rt = int(rt)
if rt not in message_ids:
replies.append(rt)
if replies:
messages += await load_messages(chat_id, limit=0, ids=replies)
return messages
@query.field("loadChats")
@login_required
async def load_chats(_, info, limit: int = 50, offset: int = 0):
""" load :limit chats of current user with :offset """
auth: AuthCredentials = info.context["request"].auth
cids = await redis.execute("SMEMBERS", "chats_by_user/" + str(auth.user_id))
if cids:
cids = list(cids)[offset:offset + limit]
if not cids:
print('[inbox.load] no chats were found')
cids = []
onliners = await redis.execute("SMEMBERS", "users-online")
if not onliners:
onliners = []
chats = []
for cid in cids:
cid = cid.decode("utf-8")
c = await redis.execute("GET", "chats/" + cid)
if c:
c = dict(json.loads(c))
c['messages'] = await load_messages(cid, 5, 0)
c['unread'] = await get_unread_counter(cid, auth.user_id)
with local_session() as session:
c['members'] = []
for uid in c["users"]:
a = session.query(User).where(User.id == uid).first()
if a:
c['members'].append({
"id": a.id,
"slug": a.slug,
"userpic": a.userpic,
"name": a.name,
"lastSeen": a.lastSeen,
"online": a.id in onliners
})
chats.append(c)
return {
"chats": chats,
"error": None
}
@query.field("loadMessagesBy")
@login_required
async def load_messages_by(_, info, by, limit: int = 10, offset: int = 0):
''' load :limit messages of :chat_id with :offset '''
auth: AuthCredentials = info.context["request"].auth
userchats = await redis.execute("SMEMBERS", "chats_by_user/" + str(auth.user_id))
userchats = [c.decode('utf-8') for c in userchats]
# print('[inbox] userchats: %r' % userchats)
if userchats:
# print('[inbox] loading messages by...')
messages = []
by_chat = by.get('chat')
if by_chat in userchats:
chat = await redis.execute("GET", f"chats/{by_chat}")
# print(chat)
if not chat:
return {
"messages": [],
"error": "chat not exist"
}
# everyone's messages in filtered chat
messages = await load_messages(by_chat, limit, offset)
return {
"messages": sorted(
list(messages),
key=lambda m: m['createdAt']
),
"error": None
}
else:
return {
"error": "Cannot access messages of this chat"
}
@query.field("loadRecipients")
async def load_recipients(_, info, limit=50, offset=0):
chat_users = []
auth: AuthCredentials = info.context["request"].auth
onliners = await redis.execute("SMEMBERS", "users-online")
if not onliners:
onliners = []
try:
chat_users += await followed_authors(auth.user_id)
limit = limit - len(chat_users)
except Exception:
pass
with local_session() as session:
chat_users += session.query(User).where(User.emailConfirmed).limit(limit).offset(offset)
members = []
for a in chat_users:
members.append({
"id": a.id,
"slug": a.slug,
"userpic": a.userpic,
"name": a.name,
"lastSeen": a.lastSeen,
"online": a.id in onliners
})
return {
"members": members,
"error": None
}

View File

@@ -1,142 +0,0 @@
import asyncio
import json
from typing import Any
from datetime import datetime, timezone
from graphql.type import GraphQLResolveInfo
from auth.authenticate import login_required
from auth.credentials import AuthCredentials
from base.redis import redis
from base.resolvers import mutation
from services.following import FollowingManager, FollowingResult, Following
from validations.inbox import Message
@mutation.field("createMessage")
@login_required
async def create_message(_, info, chat: str, body: str, replyTo=None):
""" create message with :body for :chat_id replying to :replyTo optionally """
auth: AuthCredentials = info.context["request"].auth
chat = await redis.execute("GET", f"chats/{chat}")
if not chat:
return {
"error": "chat is not exist"
}
else:
chat = dict(json.loads(chat))
message_id = await redis.execute("GET", f"chats/{chat['id']}/next_message_id")
message_id = int(message_id)
new_message = {
"chatId": chat['id'],
"id": message_id,
"author": auth.user_id,
"body": body,
"createdAt": int(datetime.now(tz=timezone.utc).timestamp())
}
if replyTo:
new_message['replyTo'] = replyTo
chat['updatedAt'] = new_message['createdAt']
await redis.execute("SET", f"chats/{chat['id']}", json.dumps(chat))
print(f"[inbox] creating message {new_message}")
await redis.execute(
"SET", f"chats/{chat['id']}/messages/{message_id}", json.dumps(new_message)
)
await redis.execute("LPUSH", f"chats/{chat['id']}/message_ids", str(message_id))
await redis.execute("SET", f"chats/{chat['id']}/next_message_id", str(message_id + 1))
users = chat["users"]
for user_slug in users:
await redis.execute(
"LPUSH", f"chats/{chat['id']}/unread/{user_slug}", str(message_id)
)
result = FollowingResult("NEW", 'chat', new_message)
await FollowingManager.push('chat', result)
return {
"message": new_message,
"error": None
}
@mutation.field("updateMessage")
@login_required
async def update_message(_, info, chat_id: str, message_id: int, body: str):
auth: AuthCredentials = info.context["request"].auth
chat = await redis.execute("GET", f"chats/{chat_id}")
if not chat:
return {"error": "chat not exist"}
message = await redis.execute("GET", f"chats/{chat_id}/messages/{message_id}")
if not message:
return {"error": "message not exist"}
message = json.loads(message)
if message["author"] != auth.user_id:
return {"error": "access denied"}
message["body"] = body
message["updatedAt"] = int(datetime.now(tz=timezone.utc).timestamp())
await redis.execute("SET", f"chats/{chat_id}/messages/{message_id}", json.dumps(message))
result = FollowingResult("UPDATED", 'chat', message)
await FollowingManager.push('chat', result)
return {
"message": message,
"error": None
}
@mutation.field("deleteMessage")
@login_required
async def delete_message(_, info, chat_id: str, message_id: int):
auth: AuthCredentials = info.context["request"].auth
chat = await redis.execute("GET", f"chats/{chat_id}")
if not chat:
return {"error": "chat not exist"}
chat = json.loads(chat)
message = await redis.execute("GET", f"chats/{chat_id}/messages/{str(message_id)}")
if not message:
return {"error": "message not exist"}
message = json.loads(message)
if message["author"] != auth.user_id:
return {"error": "access denied"}
await redis.execute("LREM", f"chats/{chat_id}/message_ids", 0, str(message_id))
await redis.execute("DEL", f"chats/{chat_id}/messages/{str(message_id)}")
users = chat["users"]
for user_id in users:
await redis.execute("LREM", f"chats/{chat_id}/unread/{user_id}", 0, str(message_id))
result = FollowingResult("DELETED", 'chat', message)
await FollowingManager.push(result)
return {}
@mutation.field("markAsRead")
@login_required
async def mark_as_read(_, info, chat_id: str, messages: [int]):
auth: AuthCredentials = info.context["request"].auth
chat = await redis.execute("GET", f"chats/{chat_id}")
if not chat:
return {"error": "chat not exist"}
chat = json.loads(chat)
users = set(chat["users"])
if auth.user_id not in users:
return {"error": "access denied"}
for message_id in messages:
await redis.execute("LREM", f"chats/{chat_id}/unread/{auth.user_id}", 0, str(message_id))
return {
"error": None
}

View File

@@ -1,95 +0,0 @@
import json
from datetime import datetime, timezone, timedelta
from auth.authenticate import login_required
from auth.credentials import AuthCredentials
from base.redis import redis
from base.resolvers import query
from base.orm import local_session
from orm.user import AuthorFollower, User
from resolvers.inbox.load import load_messages
@query.field("searchRecipients")
@login_required
async def search_recipients(_, info, query: str, limit: int = 50, offset: int = 0):
result = []
# TODO: maybe redis scan?
auth: AuthCredentials = info.context["request"].auth
talk_before = await redis.execute("GET", f"/chats_by_user/{auth.user_id}")
if talk_before:
talk_before = list(json.loads(talk_before))[offset:offset + limit]
for chat_id in talk_before:
members = await redis.execute("GET", f"/chats/{chat_id}/users")
if members:
members = list(json.loads(members))
for member in members:
if member.startswith(query):
if member not in result:
result.append(member)
more_amount = limit - len(result)
with local_session() as session:
# followings
result += session.query(AuthorFollower.author).join(
User, User.id == AuthorFollower.follower
).where(
User.slug.startswith(query)
).offset(offset + len(result)).limit(more_amount)
more_amount = limit
# followers
result += session.query(AuthorFollower.follower).join(
User, User.id == AuthorFollower.author
).where(
User.slug.startswith(query)
).offset(offset + len(result)).limit(offset + len(result) + limit)
return {
"members": list(result),
"error": None
}
@query.field("searchMessages")
@login_required
async def search_user_chats(by, messages, user_id: int, limit, offset):
cids = set([])
cids.union(set(await redis.execute("SMEMBERS", "chats_by_user/" + str(user_id))))
messages = []
by_author = by.get('author')
if by_author:
# all author's messages
cids.union(set(await redis.execute("SMEMBERS", f"chats_by_user/{by_author}")))
# author's messages in filtered chat
messages.union(set(filter(lambda m: m["author"] == by_author, list(messages))))
for c in cids:
c = c.decode('utf-8')
messages = await load_messages(c, limit, offset)
body_like = by.get('body')
if body_like:
# search in all messages in all user's chats
for c in cids:
# FIXME: use redis scan here
c = c.decode('utf-8')
mmm = await load_messages(c, limit, offset)
for m in mmm:
if body_like in m["body"]:
messages.add(m)
else:
# search in chat's messages
messages.extend(filter(lambda m: body_like in m["body"], list(messages)))
days = by.get("days")
if days:
messages.extend(filter(
list(messages),
key=lambda m: (
datetime.now(tz=timezone.utc) - int(m["createdAt"]) < timedelta(days=by["days"])
)
))
return {
"messages": messages,
"error": None
}

View File

@@ -1,22 +0,0 @@
from base.redis import redis
import json
async def get_unread_counter(chat_id: str, user_id: int):
try:
unread = await redis.execute("LLEN", f"chats/{chat_id.decode('utf-8')}/unread/{user_id}")
if unread:
return unread
except Exception:
return 0
async def get_total_unread_counter(user_id: int):
chats = await redis.execute("GET", f"chats_by_user/{str(user_id)}")
unread = 0
if chats:
chats = json.loads(chats)
for chat_id in chats:
n = await get_unread_counter(chat_id.decode('utf-8'), user_id)
unread += n
return unread

View File

@@ -1,87 +0,0 @@
from sqlalchemy import select, desc, and_, update
from auth.credentials import AuthCredentials
from base.resolvers import query, mutation
from auth.authenticate import login_required
from base.orm import local_session
from orm import Notification
@query.field("loadNotifications")
@login_required
async def load_notifications(_, info, params=None):
if params is None:
params = {}
auth: AuthCredentials = info.context["request"].auth
user_id = auth.user_id
limit = params.get('limit', 50)
offset = params.get('offset', 0)
q = select(Notification).where(
Notification.user == user_id
).order_by(desc(Notification.createdAt)).limit(limit).offset(offset)
notifications = []
with local_session() as session:
total_count = session.query(Notification).where(
Notification.user == user_id
).count()
total_unread_count = session.query(Notification).where(
and_(
Notification.user == user_id,
Notification.seen == False
)
).count()
for [notification] in session.execute(q):
notification.type = notification.type.name
notifications.append(notification)
return {
"notifications": notifications,
"totalCount": total_count,
"totalUnreadCount": total_unread_count
}
@mutation.field("markNotificationAsRead")
@login_required
async def mark_notification_as_read(_, info, notification_id: int):
auth: AuthCredentials = info.context["request"].auth
user_id = auth.user_id
with local_session() as session:
notification = session.query(Notification).where(
and_(Notification.id == notification_id, Notification.user == user_id)
).one()
notification.seen = True
session.commit()
return {}
@mutation.field("markAllNotificationsAsRead")
@login_required
async def mark_all_notifications_as_read(_, info):
auth: AuthCredentials = info.context["request"].auth
user_id = auth.user_id
statement = update(Notification).where(
and_(
Notification.user == user_id,
Notification.seen == False
)
).values(seen=True)
with local_session() as session:
try:
session.execute(statement)
session.commit()
except Exception as e:
session.rollback()
print(f"[mark_all_notifications_as_read] error: {str(e)}")
return {}

327
resolvers/notifier.py Normal file
View File

@@ -0,0 +1,327 @@
import json
import time
from typing import List, Tuple
from sqlalchemy import and_, select
from sqlalchemy.exc import SQLAlchemyError
from sqlalchemy.orm import aliased
from sqlalchemy.sql import not_
from orm.author import Author
from orm.notification import (Notification, NotificationAction,
NotificationEntity, NotificationSeen)
from orm.shout import Shout
from services.auth import login_required
from services.db import local_session
from services.logger import root_logger as logger
from services.schema import mutation, query
def query_notifications(
author_id: int, after: int = 0
) -> Tuple[int, int, List[Tuple[Notification, bool]]]:
notification_seen_alias = aliased(NotificationSeen)
q = select(Notification, notification_seen_alias.viewer.label('seen')).outerjoin(
NotificationSeen,
and_(
NotificationSeen.viewer == author_id,
NotificationSeen.notification == Notification.id,
),
)
if after:
q = q.filter(Notification.created_at > after)
q = q.group_by(NotificationSeen.notification, Notification.created_at)
with local_session() as session:
total = (
session.query(Notification)
.filter(
and_(
Notification.action == NotificationAction.CREATE.value,
Notification.created_at > after,
)
)
.count()
)
unread = (
session.query(Notification)
.filter(
and_(
Notification.action == NotificationAction.CREATE.value,
Notification.created_at > after,
not_(Notification.seen),
)
)
.count()
)
notifications_result = session.execute(q)
notifications = []
for n, seen in notifications_result:
notifications.append((n, seen))
return total, unread, notifications
def group_notification(
thread, authors=None, shout=None, reactions=None, entity='follower', action='follow'
):
reactions = reactions or []
authors = authors or []
return {
'thread': thread,
'authors': authors,
'updated_at': int(time.time()),
'shout': shout,
'reactions': reactions,
'entity': entity,
'action': action,
}
def get_notifications_grouped(
author_id: int, after: int = 0, limit: int = 10, offset: int = 0
):
"""
Retrieves notifications for a given author.
Args:
author_id (int): The ID of the author for whom notifications are retrieved.
after (int, optional): If provided, selects only notifications created after this timestamp will be considered.
limit (int, optional): The maximum number of groupa to retrieve.
offset (int, optional): offset
Returns:
Dict[str, NotificationGroup], int, int: A dictionary where keys are thread IDs
and values are NotificationGroup objects, unread and total amounts.
This function queries the database to retrieve notifications for the specified author, considering optional filters.
The result is a dictionary where each key is a thread ID, and the corresponding value is a NotificationGroup
containing information about the notifications within that thread.
NotificationGroup structure:
{
entity: str, # Type of entity (e.g., 'reaction', 'shout', 'follower').
updated_at: int, # Timestamp of the latest update in the thread.
shout: Optional[NotificationShout]
reactions: List[int], # List of reaction ids within the thread.
authors: List[NotificationAuthor], # List of authors involved in the thread.
}
"""
total, unread, notifications = query_notifications(author_id, after)
groups_by_thread = {}
groups_amount = 0
for notification, seen in notifications:
if (groups_amount + offset) >= limit:
break
payload = json.loads(str(notification.payload))
if str(notification.entity) == NotificationEntity.SHOUT.value:
shout = payload
shout_id = shout.get('id')
author_id = shout.get('created_by')
thread_id = f'shout-{shout_id}'
with local_session() as session:
author = session.query(Author).filter(Author.id == author_id).first()
shout = session.query(Shout).filter(Shout.id == shout_id).first()
if author and shout:
author = author.dict()
shout = shout.dict()
group = group_notification(
thread_id,
shout=shout,
authors=[author],
action=str(notification.action),
entity=str(notification.entity),
)
groups_by_thread[thread_id] = group
groups_amount += 1
elif str(notification.entity) == NotificationEntity.REACTION.value:
reaction = payload
if not isinstance(shout, dict):
raise ValueError('reaction data is not consistent')
shout_id = shout.get('shout')
author_id = shout.get('created_by', 0)
if shout_id and author_id:
with local_session() as session:
author = (
session.query(Author).filter(Author.id == author_id).first()
)
shout = session.query(Shout).filter(Shout.id == shout_id).first()
if shout and author:
author = author.dict()
shout = shout.dict()
reply_id = reaction.get('reply_to')
thread_id = f'shout-{shout_id}'
if reply_id and reaction.get('kind', '').lower() == 'comment':
thread_id += f'{reply_id}'
existing_group = groups_by_thread.get(thread_id)
if existing_group:
existing_group['seen'] = False
existing_group['authors'].append(author_id)
existing_group['reactions'] = (
existing_group['reactions'] or []
)
existing_group['reactions'].append(reaction)
groups_by_thread[thread_id] = existing_group
else:
group = group_notification(
thread_id,
authors=[author],
shout=shout,
reactions=[reaction],
entity=str(notification.entity),
action=str(notification.action),
)
if group:
groups_by_thread[thread_id] = group
groups_amount += 1
elif str(notification.entity) == 'follower':
thread_id = 'followers'
follower = json.loads(payload)
group = groups_by_thread.get(thread_id)
if group:
if str(notification.action) == 'follow':
group['authors'].append(follower)
elif str(notification.action) == 'unfollow':
follower_id = follower.get('id')
for author in group['authors']:
if author.get('id') == follower_id:
group['authors'].remove(author)
break
else:
group = group_notification(
thread_id,
authors=[follower],
entity=str(notification.entity),
action=str(notification.action),
)
groups_amount += 1
groups_by_thread[thread_id] = group
return groups_by_thread, unread, total
@query.field('load_notifications')
@login_required
async def load_notifications(_, info, after: int, limit: int = 50, offset=0):
author_id = info.context.get('author_id')
error = None
total = 0
unread = 0
notifications = []
try:
if author_id:
groups, unread, total = get_notifications_grouped(author_id, after, limit)
notifications = sorted(
groups.values(), key=lambda group: group.updated_at, reverse=True
)
except Exception as e:
error = e
logger.error(e)
return {
'notifications': notifications,
'total': total,
'unread': unread,
'error': error,
}
@mutation.field('notification_mark_seen')
@login_required
async def notification_mark_seen(_, info, notification_id: int):
author_id = info.context.get('author_id')
if author_id:
with local_session() as session:
try:
ns = NotificationSeen(notification=notification_id, viewer=author_id)
session.add(ns)
session.commit()
except SQLAlchemyError as e:
session.rollback()
logger.error(f'seen mutation failed: {e}')
return {'error': 'cant mark as read'}
return {'error': None}
@mutation.field('notifications_seen_after')
@login_required
async def notifications_seen_after(_, info, after: int):
# TODO: use latest loaded notification_id as input offset parameter
error = None
try:
author_id = info.context.get('author_id')
if author_id:
with local_session() as session:
nnn = (
session.query(Notification)
.filter(and_(Notification.created_at > after))
.all()
)
for n in nnn:
try:
ns = NotificationSeen(notification=n.id, viewer=author_id)
session.add(ns)
session.commit()
except SQLAlchemyError:
session.rollback()
except Exception as e:
print(e)
error = 'cant mark as read'
return {'error': error}
@mutation.field('notifications_seen_thread')
@login_required
async def notifications_seen_thread(_, info, thread: str, after: int):
error = None
author_id = info.context.get('author_id')
if author_id:
[shout_id, reply_to_id] = thread.split(':')
with local_session() as session:
# TODO: handle new follower and new shout notifications
new_reaction_notifications = (
session.query(Notification)
.filter(
Notification.action == 'create',
Notification.entity == 'reaction',
Notification.created_at > after,
)
.all()
)
removed_reaction_notifications = (
session.query(Notification)
.filter(
Notification.action == 'delete',
Notification.entity == 'reaction',
Notification.created_at > after,
)
.all()
)
exclude = set()
for nr in removed_reaction_notifications:
reaction = json.loads(str(nr.payload))
reaction_id = reaction.get('id')
exclude.add(reaction_id)
for n in new_reaction_notifications:
reaction = json.loads(str(n.payload))
reaction_id = reaction.get('id')
if (
reaction_id not in exclude
and reaction.get('shout') == shout_id
and reaction.get('reply_to') == reply_to_id
):
try:
ns = NotificationSeen(notification=n.id, viewer=author_id)
session.add(ns)
session.commit()
except Exception as e:
logger.warn(e)
session.rollback()
else:
error = 'You are not logged in'
return {'error': error}

261
resolvers/rating.py Normal file
View File

@@ -0,0 +1,261 @@
from sqlalchemy import and_, case, func, select, true
from sqlalchemy.orm import aliased
from orm.author import Author, AuthorRating
from orm.reaction import Reaction, ReactionKind
from orm.shout import Shout
from services.auth import login_required
from services.db import local_session
from services.schema import mutation
@mutation.field('rate_author')
@login_required
async def rate_author(_, info, rated_slug, value):
user_id = info.context['user_id']
with local_session() as session:
rated_author = session.query(Author).filter(Author.slug == rated_slug).first()
rater = session.query(Author).filter(Author.slug == user_id).first()
if rater and rated_author:
rating: AuthorRating = (
session.query(AuthorRating)
.filter(
and_(
AuthorRating.rater == rater.id,
AuthorRating.author == rated_author.id,
)
)
.first()
)
if rating:
rating.plus = value > 0
session.add(rating)
session.commit()
return {}
else:
try:
rating = AuthorRating(
rater=rater.id, author=rated_author.id, plus=value > 0
)
session.add(rating)
session.commit()
except Exception as err:
return {'error': err}
return {}
def count_author_comments_rating(session, author_id) -> int:
replied_alias = aliased(Reaction)
replies_likes = (
session.query(replied_alias)
.join(Reaction, replied_alias.id == Reaction.reply_to)
.where(
and_(
replied_alias.created_by == author_id,
replied_alias.kind == ReactionKind.COMMENT.value,
)
)
.filter(replied_alias.kind == ReactionKind.LIKE.value)
.count()
) or 0
replies_dislikes = (
session.query(replied_alias)
.join(Reaction, replied_alias.id == Reaction.reply_to)
.where(
and_(
replied_alias.created_by == author_id,
replied_alias.kind == ReactionKind.COMMENT.value,
)
)
.filter(replied_alias.kind == ReactionKind.DISLIKE.value)
.count()
) or 0
return replies_likes - replies_dislikes
def count_author_shouts_rating(session, author_id) -> int:
shouts_likes = (
session.query(Reaction, Shout)
.join(Shout, Shout.id == Reaction.shout)
.filter(
and_(
Shout.authors.any(id=author_id),
Reaction.kind == ReactionKind.LIKE.value,
)
)
.count()
or 0
)
shouts_dislikes = (
session.query(Reaction, Shout)
.join(Shout, Shout.id == Reaction.shout)
.filter(
and_(
Shout.authors.any(id=author_id),
Reaction.kind == ReactionKind.DISLIKE.value,
)
)
.count()
or 0
)
return shouts_likes - shouts_dislikes
def get_author_rating_old(session, author: Author):
likes_count = (
session.query(AuthorRating)
.filter(and_(AuthorRating.author == author.id, AuthorRating.plus.is_(True)))
.count()
)
dislikes_count = (
session.query(AuthorRating)
.filter(and_(AuthorRating.author == author.id, AuthorRating.plus.is_not(True)))
.count()
)
return likes_count - dislikes_count
def get_author_rating_shouts(session, author: Author) -> int:
q = (
select(
func.coalesce(
func.sum(
case(
(Reaction.kind == ReactionKind.LIKE.value, 1),
(Reaction.kind == ReactionKind.DISLIKE.value, -1),
else_=0,
)
),
0,
).label('shouts_rating')
)
.select_from(Reaction)
.outerjoin(Shout, Shout.authors.any(id=author.id))
.outerjoin(
Reaction,
and_(
Reaction.reply_to.is_(None),
Reaction.shout == Shout.id,
Reaction.deleted_at.is_(None),
),
)
)
result = session.execute(q).scalar()
return result
def get_author_rating_comments(session, author: Author) -> int:
replied_comment = aliased(Reaction)
q = (
select(
func.coalesce(
func.sum(
case(
(Reaction.kind == ReactionKind.LIKE.value, 1),
(Reaction.kind == ReactionKind.DISLIKE.value, -1),
else_=0,
)
),
0,
).label('shouts_rating')
)
.select_from(Reaction)
.outerjoin(
Reaction,
and_(
replied_comment.kind == ReactionKind.COMMENT.value,
replied_comment.created_by == author.id,
Reaction.kind.in_(
[ReactionKind.LIKE.value, ReactionKind.DISLIKE.value]
),
Reaction.reply_to == replied_comment.id,
Reaction.deleted_at.is_(None),
),
)
)
result = session.execute(q).scalar()
return result
def add_author_rating_columns(q, group_list):
# NOTE: method is not used
# old karma
q = q.outerjoin(AuthorRating, AuthorRating.author == Author.id)
q = q.add_columns(
func.sum(case((AuthorRating.plus == true(), 1), else_=-1)).label('rating')
)
# by shouts rating
shout_reaction = aliased(Reaction)
shouts_rating_subq = (
select(
Author.id,
func.coalesce(
func.sum(
case(
(shout_reaction.kind == ReactionKind.LIKE.value, 1),
(shout_reaction.kind == ReactionKind.DISLIKE.value, -1),
else_=0,
)
)
).label('shouts_rating'),
)
.select_from(shout_reaction)
.outerjoin(Shout, Shout.authors.any(id=Author.id))
.outerjoin(
shout_reaction,
and_(
shout_reaction.reply_to.is_(None),
shout_reaction.shout == Shout.id,
shout_reaction.deleted_at.is_(None),
),
)
.group_by(Author.id)
.subquery()
)
q = q.outerjoin(shouts_rating_subq, Author.id == shouts_rating_subq.c.id)
q = q.add_columns(shouts_rating_subq.c.shouts_rating)
group_list = [shouts_rating_subq.c.shouts_rating]
# by comments
replied_comment = aliased(Reaction)
reaction_2 = aliased(Reaction)
comments_subq = (
select(
Author.id,
func.coalesce(
func.sum(
case(
(reaction_2.kind == ReactionKind.LIKE.value, 1),
(reaction_2.kind == ReactionKind.DISLIKE.value, -1),
else_=0,
)
)
).label('comments_rating'),
)
.select_from(reaction_2)
.outerjoin(
replied_comment,
and_(
replied_comment.kind == ReactionKind.COMMENT.value,
replied_comment.created_by == Author.id,
reaction_2.kind.in_(
[ReactionKind.LIKE.value, ReactionKind.DISLIKE.value]
),
reaction_2.reply_to == replied_comment.id,
reaction_2.deleted_at.is_(None),
),
)
.group_by(Author.id)
.subquery()
)
q = q.outerjoin(comments_subq, Author.id == comments_subq.c.id)
q = q.add_columns(comments_subq.c.comments_rating)
group_list.extend([comments_subq.c.comments_rating])
return q, group_list

512
resolvers/reaction.py Normal file
View File

@@ -0,0 +1,512 @@
import time
from typing import List
from resolvers.stat import update_author_stat
from sqlalchemy import and_, asc, case, desc, func, select, text
from sqlalchemy.orm import aliased, joinedload
from sqlalchemy.sql import union
from orm.author import Author
from orm.rating import PROPOSAL_REACTIONS, RATING_REACTIONS, is_negative, is_positive
from orm.reaction import Reaction, ReactionKind
from orm.shout import Shout
from resolvers.editor import handle_proposing
from resolvers.follower import reactions_follow
from services.auth import add_user_role, login_required
from services.db import local_session
from services.logger import root_logger as logger
from services.notify import notify_reaction
from services.schema import mutation, query
from services.viewed import ViewedStorage
def add_reaction_stat_columns(q, aliased_reaction):
q = q.outerjoin(aliased_reaction).add_columns(
func.sum(aliased_reaction.id).label('reacted_stat'),
func.sum(
case((aliased_reaction.kind == str(ReactionKind.COMMENT.value), 1), else_=0)
).label('comments_stat'),
func.sum(
case((aliased_reaction.kind == str(ReactionKind.LIKE.value), 1), else_=0)
).label('likes_stat'),
func.sum(
case((aliased_reaction.kind == str(ReactionKind.DISLIKE.value), 1), else_=0)
).label('dislikes_stat'),
func.max(
case(
(aliased_reaction.kind != str(ReactionKind.COMMENT.value), None),
else_=aliased_reaction.created_at,
)
).label('last_comment_stat'),
)
return q
def is_featured_author(session, author_id):
"""checks if author has at least one featured publication"""
return (
session.query(Shout)
.where(Shout.authors.any(id=author_id))
.filter(and_(Shout.featured_at.is_not(None), Shout.deleted_at.is_(None)))
.count()
> 0
)
def check_to_feature(session, approver_id, reaction):
"""set shout to public if publicated approvers amount > 4"""
if not reaction.reply_to and is_positive(reaction.kind):
if is_featured_author(session, approver_id):
approvers = [approver_id]
# now count how many approvers are voted already
reacted_readers = (
session.query(Reaction).where(Reaction.shout == reaction.shout).all()
)
for reacted_reader in reacted_readers:
if is_featured_author(session, reacted_reader.id):
approvers.append(reacted_reader.id)
if len(approvers) > 4:
return True
return False
def check_to_unfeature(session, rejecter_id, reaction):
"""unfeature any shout if 20% of reactions are negative"""
if not reaction.reply_to and is_negative(reaction.kind):
if is_featured_author(session, rejecter_id):
reactions = (
session.query(Reaction)
.where(
and_(
Reaction.shout == reaction.shout,
Reaction.kind.in_(RATING_REACTIONS),
)
)
.all()
)
rejects = 0
for r in reactions:
approver = (
session.query(Author).filter(Author.id == r.created_by).first()
)
if is_featured_author(session, approver):
if is_negative(r.kind):
rejects += 1
if len(reactions) / rejects < 5:
return True
return False
async def set_featured(session, shout_id):
s = session.query(Shout).where(Shout.id == shout_id).first()
s.featured_at = int(time.time())
Shout.update(s, {'featured_at': int(time.time())})
author = session.query(Author).filter(Author.id == s.created_by).first()
if author:
await add_user_role(str(author.user))
session.add(s)
session.commit()
def set_unfeatured(session, shout_id):
s = session.query(Shout).where(Shout.id == shout_id).first()
Shout.update(s, {'featured_at': None})
session.add(s)
session.commit()
async def _create_reaction(session, shout, author, reaction):
r = Reaction(**reaction)
session.add(r)
session.commit()
rdict = r.dict()
# пересчет счетчика комментариев
if r.kind == ReactionKind.COMMENT.value:
await update_author_stat(author)
# collaborative editing
if (
rdict.get('reply_to')
and r.kind in PROPOSAL_REACTIONS
and author.id in shout.authors
):
handle_proposing(session, r, shout)
# рейтинг и саморегуляция
if r.kind in RATING_REACTIONS:
# self-regultaion mechanics
if check_to_unfeature(session, author.id, r):
set_unfeatured(session, shout.id)
elif check_to_feature(session, author.id, r):
await set_featured(session, shout.id)
# follow if liked
if r.kind == ReactionKind.LIKE.value:
try:
# reactions auto-following
reactions_follow(author.id, reaction['shout'], True)
except Exception:
pass
# обновление счетчика комментариев в кеше
if r.kind == ReactionKind.COMMENT.value:
await update_author_stat(author)
rdict['shout'] = shout.dict()
rdict['created_by'] = author.id
rdict['stat'] = {'commented': 0, 'reacted': 0, 'rating': 0}
# notifications call
await notify_reaction(rdict, 'create')
return rdict
def prepare_new_rating(reaction: dict, shout_id: int, session, author: Author):
kind = reaction.get('kind')
opposite_kind = (
ReactionKind.DISLIKE.value if is_positive(kind) else ReactionKind.LIKE.value
)
q = select(Reaction).filter(
and_(
Reaction.shout == shout_id,
Reaction.created_by == author.id,
Reaction.kind.in_(RATING_REACTIONS),
)
)
reply_to = reaction.get('reply_to')
if reply_to and isinstance(reply_to, int):
q = q.filter(Reaction.reply_to == reply_to)
rating_reactions = session.execute(q).all()
same_rating = filter(
lambda r: r.created_by == author.id and r.kind == opposite_kind,
rating_reactions,
)
opposite_rating = filter(
lambda r: r.created_by == author.id and r.kind == opposite_kind,
rating_reactions,
)
if same_rating:
return {'error': "You can't rate the same thing twice"}
elif opposite_rating:
return {'error': 'Remove opposite vote first'}
elif filter(lambda r: r.created_by == author.id, rating_reactions):
return {'error': "You can't rate your own thing"}
return
@mutation.field('create_reaction')
@login_required
async def create_reaction(_, info, reaction):
logger.debug(f'{info.context} for {reaction}')
user_id = info.context.get('user_id')
shout_id = reaction.get('shout')
if not shout_id:
return {'error': 'Shout ID is required to create a reaction.'}
try:
with local_session() as session:
shout = session.query(Shout).filter(Shout.id == shout_id).first()
author = session.query(Author).filter(Author.user == user_id).first()
if shout and author:
reaction['created_by'] = author.id
kind = reaction.get('kind')
shout_id = shout.id
if not kind and isinstance(reaction.get('body'), str):
kind = ReactionKind.COMMENT.value
if not kind:
return {'error': 'cannot create reaction without a kind'}
if kind in RATING_REACTIONS:
error_result = prepare_new_rating(reaction, shout_id, session, author)
if error_result:
return error_result
rdict = await _create_reaction(session, shout, author, reaction)
# TODO: call recount ratings periodically
return {'reaction': rdict}
except Exception as e:
import traceback
traceback.print_exc()
logger.error(f'{type(e).__name__}: {e}')
return {'error': 'Cannot create reaction.'}
@mutation.field('update_reaction')
@login_required
async def update_reaction(_, info, reaction):
logger.debug(f'{info.context} for {reaction}')
user_id = info.context.get('user_id')
roles = info.context.get('roles')
rid = reaction.get('id')
if rid and isinstance(rid, int) and user_id and roles:
del reaction['id']
with local_session() as session:
reaction_query = select(Reaction).filter(Reaction.id == rid)
aliased_reaction = aliased(Reaction)
reaction_query = add_reaction_stat_columns(reaction_query, aliased_reaction)
reaction_query = reaction_query.group_by(Reaction.id)
try:
[r, reacted_stat, commented_stat, likes_stat, dislikes_stat, _l] = (
session.execute(reaction_query).unique().first()
)
if not r:
return {'error': 'invalid reaction id'}
author = session.query(Author).filter(Author.user == user_id).first()
if author:
if r.created_by != author.id and 'editor' not in roles:
return {'error': 'access denied'}
body = reaction.get('body')
if body:
r.body = body
r.updated_at = int(time.time())
if r.kind != reaction['kind']:
# Определение изменения мнения может быть реализовано здесь
pass
Reaction.update(r, reaction)
session.add(r)
session.commit()
r.stat = {
'reacted': reacted_stat,
'commented': commented_stat,
'rating': int(likes_stat or 0) - int(dislikes_stat or 0),
}
await notify_reaction(r.dict(), 'update')
return {'reaction': r}
else:
return {'error': 'not authorized'}
except Exception:
import traceback
traceback.print_exc()
return {'error': 'cannot create reaction'}
@mutation.field('delete_reaction')
@login_required
async def delete_reaction(_, info, reaction_id: int):
logger.debug(f'{info.context} for {reaction_id}')
user_id = info.context.get('user_id')
roles = info.context.get('roles', [])
if user_id:
with local_session() as session:
try:
author = session.query(Author).filter(Author.user == user_id).one()
r = session.query(Reaction).filter(Reaction.id == reaction_id).one()
if r.created_by != author.id and 'editor' not in roles:
return {'error': 'access denied'}
logger.debug(f'{user_id} user removing his #{reaction_id} reaction')
reaction_dict = r.dict()
session.delete(r)
session.commit()
# обновление счетчика комментариев в кеше
if r.kind == ReactionKind.COMMENT.value:
await update_author_stat(author)
await notify_reaction(reaction_dict, 'delete')
return {'error': None, 'reaction': reaction_dict}
except Exception as exc:
return {'error': f'cannot delete reaction: {exc}'}
return {'error': 'cannot delete reaction'}
def apply_reaction_filters(by, q):
shout_slug = by.get('shout', None)
if shout_slug:
q = q.filter(Shout.slug == shout_slug)
elif by.get('shouts'):
q = q.filter(Shout.slug.in_(by.get('shouts', [])))
created_by = by.get('created_by', None)
if created_by:
q = q.filter(Author.id == created_by)
topic = by.get('topic', None)
if isinstance(topic, int):
q = q.filter(Shout.topics.any(id=topic))
if by.get('comment', False):
q = q.filter(Reaction.kind == ReactionKind.COMMENT.value)
if by.get('rating', False):
q = q.filter(Reaction.kind.in_(RATING_REACTIONS))
by_search = by.get('search', '')
if len(by_search) > 2:
q = q.filter(Reaction.body.ilike(f'%{by_search}%'))
after = by.get('after', None)
if isinstance(after, int):
q = q.filter(Reaction.created_at > after)
return q
@query.field('load_reactions_by')
async def load_reactions_by(_, info, by, limit=50, offset=0):
"""
:param info: graphql meta
:param by: {
:shout - filter by slug
:shouts - filer by shout slug list
:created_by - to filter by author
:topic - to filter by topic
:search - to search by reactions' body
:comment - true if body.length > 0
:after - amount of time ago
:sort - a fieldname to sort desc by default
}
:param limit: int amount of shouts
:param offset: int offset in this order
:return: Reaction[]
"""
q = (
select(Reaction, Author, Shout)
.select_from(Reaction)
.join(Author, Reaction.created_by == Author.id)
.join(Shout, Reaction.shout == Shout.id)
)
# calculate counters
aliased_reaction = aliased(Reaction)
q = add_reaction_stat_columns(q, aliased_reaction)
# filter
q = apply_reaction_filters(by, q)
q = q.where(Reaction.deleted_at.is_(None))
# group by
q = q.group_by(Reaction.id, Author.id, Shout.id, aliased_reaction.id)
# order by
order_stat = by.get('sort', '').lower() # 'like' | 'dislike' | 'newest' | 'oldest'
order_by_stmt = desc(Reaction.created_at)
if order_stat == 'oldest':
order_by_stmt = asc(Reaction.created_at)
elif order_stat.endswith('like'):
order_by_stmt = desc(f'{order_stat}s_stat')
q = q.order_by(order_by_stmt)
# pagination
q = q.limit(limit).offset(offset)
reactions = set()
with local_session() as session:
result_rows = session.execute(q)
for [
reaction,
author,
shout,
reacted_stat,
commented_stat,
likes_stat,
dislikes_stat,
_last_comment,
] in result_rows:
reaction.created_by = author
reaction.shout = shout
reaction.stat = {
'rating': int(likes_stat or 0) - int(dislikes_stat or 0),
'reacted': reacted_stat,
'commented': commented_stat,
}
reactions.add(reaction) # Используем список для хранения реакций
return reactions
async def reacted_shouts_updates(follower_id: int, limit=50, offset=0) -> List[Shout]:
shouts: List[Shout] = []
with local_session() as session:
author = session.query(Author).filter(Author.id == follower_id).first()
if author:
# Shouts where follower is the author
q1 = (
select(Shout)
.outerjoin(
Reaction,
and_(
Reaction.shout == Shout.id, Reaction.created_by == follower_id
),
)
.outerjoin(Author, Shout.authors.any(id=follower_id))
.options(joinedload(Shout.reactions), joinedload(Shout.authors))
)
q1 = add_reaction_stat_columns(q1, aliased(Reaction))
q1 = q1.filter(Author.id == follower_id).group_by(Shout.id)
# Shouts where follower reacted
q2 = (
select(Shout)
.join(Reaction, Reaction.shout == Shout.id)
.options(joinedload(Shout.reactions), joinedload(Shout.authors))
.filter(Reaction.created_by == follower_id)
.group_by(Shout.id)
)
q2 = add_reaction_stat_columns(q2, aliased(Reaction))
# Sort shouts by the `last_comment` field
combined_query = (
union(q1, q2)
.order_by(desc(text('last_comment_stat')))
.limit(limit)
.offset(offset)
)
results = session.execute(combined_query).scalars()
for [
shout,
reacted_stat,
commented_stat,
likes_stat,
dislikes_stat,
last_comment,
] in results:
shout.stat = {
'viewed': await ViewedStorage.get_shout(shout.slug),
'rating': int(likes_stat or 0) - int(dislikes_stat or 0),
'reacted': reacted_stat,
'commented': commented_stat,
'last_comment': last_comment,
}
shouts.append(shout)
return shouts
@query.field('load_shouts_followed')
@login_required
async def load_shouts_followed(_, info, limit=50, offset=0) -> List[Shout]:
user_id = info.context['user_id']
with local_session() as session:
author = session.query(Author).filter(Author.user == user_id).first()
if author:
try:
author_id: int = author.dict()['id']
shouts = await reacted_shouts_updates(author_id, limit, offset)
return shouts
except Exception as error:
logger.debug(error)
return []

472
resolvers/reader.py Normal file
View File

@@ -0,0 +1,472 @@
from sqlalchemy import bindparam, distinct, or_, text
from sqlalchemy.orm import aliased, joinedload
from sqlalchemy.sql.expression import (and_, asc, case, desc, func, nulls_last,
select)
from orm.author import Author, AuthorFollower
from orm.reaction import Reaction, ReactionKind
from orm.shout import Shout, ShoutAuthor, ShoutTopic
from orm.topic import Topic, TopicFollower
from resolvers.reaction import add_reaction_stat_columns
from resolvers.topic import get_topics_random
from services.auth import login_required
from services.db import local_session
from services.logger import root_logger as logger
from services.schema import query
from services.search import search_text
from services.viewed import ViewedStorage
def query_shouts():
return (
select(Shout)
.options(joinedload(Shout.authors), joinedload(Shout.topics))
.where(and_(Shout.published_at.is_not(None), Shout.deleted_at.is_(None)))
)
def filter_my(info, session, q):
reader_id = None
user_id = None
if isinstance(info.context, dict):
user_id = info.context.get('user_id')
if user_id:
reader = session.query(Author).filter(Author.user == user_id).first()
if reader:
reader_followed_authors = select(AuthorFollower.author).where(
AuthorFollower.follower == reader.id
)
reader_followed_topics = select(TopicFollower.topic).where(
TopicFollower.follower == reader.id
)
subquery = (
select(Shout.id)
.where(Shout.id == ShoutAuthor.shout)
.where(Shout.id == ShoutTopic.shout)
.where(
(ShoutAuthor.author.in_(reader_followed_authors))
| (ShoutTopic.topic.in_(reader_followed_topics))
)
)
q = q.filter(Shout.id.in_(subquery))
reader_id = reader.id
return q, reader_id
def apply_filters(q, filters, author_id=None):
if isinstance(filters, dict):
if filters.get('reacted'):
q.join(Reaction, Reaction.created_by == author_id)
by_featured = filters.get('featured')
if by_featured:
q = q.filter(Shout.featured_at.is_not(None))
by_layouts = filters.get('layouts')
if by_layouts:
q = q.filter(Shout.layout.in_(by_layouts))
by_author = filters.get('author')
if by_author:
q = q.filter(Shout.authors.any(slug=by_author))
by_topic = filters.get('topic')
if by_topic:
q = q.filter(Shout.topics.any(slug=by_topic))
by_after = filters.get('after')
if by_after:
ts = int(by_after)
q = q.filter(Shout.created_at > ts)
return q
@query.field('get_shout')
async def get_shout(_, info, slug: str):
with local_session() as session:
q = query_shouts()
aliased_reaction = aliased(Reaction)
q = add_reaction_stat_columns(q, aliased_reaction)
q = q.filter(Shout.slug == slug)
q = q.group_by(Shout.id)
results = session.execute(q).first()
if results:
[
shout,
reacted_stat,
commented_stat,
likes_stat,
dislikes_stat,
last_comment,
] = results
shout.stat = {
'viewed': await ViewedStorage.get_shout(shout.slug),
'reacted': reacted_stat,
'commented': commented_stat,
'rating': int(likes_stat or 0) - int(dislikes_stat or 0),
'last_comment': last_comment,
}
for author_caption in (
session.query(ShoutAuthor)
.join(Shout)
.where(
and_(
Shout.slug == slug,
Shout.published_at.is_not(None),
Shout.deleted_at.is_(None),
)
)
):
for author in shout.authors:
if author.id == author_caption.author:
author.caption = author_caption.caption
main_topic = (
session.query(Topic.slug)
.join(
ShoutTopic,
and_(
ShoutTopic.topic == Topic.id,
ShoutTopic.shout == shout.id,
ShoutTopic.main.is_(True),
),
)
.first()
)
if main_topic:
shout.main_topic = main_topic[0]
return shout
@query.field('load_shouts_by')
async def load_shouts_by(_, _info, options):
"""
:param options: {
filters: {
layouts: ['audio', 'video', ..],
reacted: True,
featured: True, // filter featured-only
author: 'discours',
topic: 'culture',
after: 1234567 // unixtime
}
offset: 0
limit: 50
order_by: 'created_at' | 'commented' | 'likes_stat'
order_by_desc: true
}
:return: Shout[]
"""
# base
q = query_shouts()
# stats
aliased_reaction = aliased(Reaction)
q = add_reaction_stat_columns(q, aliased_reaction)
# filters
filters = options.get('filters', {})
q = apply_filters(q, filters)
# group
q = q.group_by(Shout.id)
# order
order_by = Shout.featured_at if filters.get('featured') else Shout.published_at
order_str = options.get('order_by')
if order_str in ['likes', 'shouts', 'followers', 'comments', 'last_comment']:
q = q.order_by(desc(text(f'{order_str}_stat')))
query_order_by = (
desc(order_by) if options.get('order_by_desc', True) else asc(order_by)
)
q = q.order_by(nulls_last(query_order_by))
# limit offset
offset = options.get('offset', 0)
limit = options.get('limit', 10)
q = q.limit(limit).offset(offset)
shouts = []
with local_session() as session:
for [
shout,
reacted_stat,
commented_stat,
likes_stat,
dislikes_stat,
last_comment,
] in session.execute(q).unique():
main_topic = (
session.query(Topic.slug)
.join(
ShoutTopic,
and_(
ShoutTopic.topic == Topic.id,
ShoutTopic.shout == shout.id,
ShoutTopic.main.is_(True),
),
)
.first()
)
if main_topic:
shout.main_topic = main_topic[0]
shout.stat = {
'viewed': await ViewedStorage.get_shout(shout.slug),
'reacted': reacted_stat,
'commented': commented_stat,
'rating': int(likes_stat) - int(dislikes_stat),
'last_comment': last_comment,
}
shouts.append(shout)
return shouts
@query.field('load_shouts_feed')
@login_required
async def load_shouts_feed(_, info, options):
shouts = []
with local_session() as session:
q = query_shouts()
aliased_reaction = aliased(Reaction)
q = add_reaction_stat_columns(q, aliased_reaction)
# filters
filters = options.get('filters', {})
if filters:
q, reader_id = filter_my(info, session, q)
q = apply_filters(q, filters, reader_id)
# sort order
order_by = options.get(
'order_by',
Shout.featured_at if filters.get('featured') else Shout.published_at,
)
query_order_by = (
desc(order_by) if options.get('order_by_desc', True) else asc(order_by)
)
# pagination
offset = options.get('offset', 0)
limit = options.get('limit', 10)
q = (
q.group_by(Shout.id)
.order_by(nulls_last(query_order_by))
.limit(limit)
.offset(offset)
)
# print(q.compile(compile_kwargs={"literal_binds": True}))
for [
shout,
reacted_stat,
commented_stat,
likes_stat,
dislikes_stat,
last_comment,
] in session.execute(q).unique():
main_topic = (
session.query(Topic.slug)
.join(
ShoutTopic,
and_(
ShoutTopic.topic == Topic.id,
ShoutTopic.shout == shout.id,
ShoutTopic.main.is_(True),
),
)
.first()
)
if main_topic:
shout.main_topic = main_topic[0]
shout.stat = {
'viewed': await ViewedStorage.get_shout(shout.slug),
'reacted': reacted_stat,
'commented': commented_stat,
'rating': likes_stat - dislikes_stat,
'last_comment': last_comment,
}
shouts.append(shout)
return shouts
@query.field('load_shouts_search')
async def load_shouts_search(_, _info, text, limit=50, offset=0):
if isinstance(text, str) and len(text) > 2:
results = await search_text(text, limit, offset)
logger.debug(results)
return results
return []
@query.field('load_shouts_unrated')
@login_required
async def load_shouts_unrated(_, info, limit: int = 50, offset: int = 0):
q = query_shouts()
q = (
q.outerjoin(
Reaction,
and_(
Reaction.shout == Shout.id,
Reaction.replyTo.is_(None),
Reaction.kind.in_(
[ReactionKind.LIKE.value, ReactionKind.DISLIKE.value]
),
),
)
.outerjoin(Author, Author.user == bindparam('user_id'))
.where(
and_(
Shout.deleted_at.is_(None),
Shout.layout.is_not(None),
or_(Author.id.is_(None), Reaction.created_by != Author.id),
)
)
)
# 3 or fewer votes is 0, 1, 2 or 3 votes (null, reaction id1, reaction id2, reaction id3)
q = q.having(func.count(distinct(Reaction.id)) <= 4)
aliased_reaction = aliased(Reaction)
q = add_reaction_stat_columns(q, aliased_reaction)
q = q.group_by(Shout.id).order_by(func.random()).limit(limit).offset(offset)
user_id = info.context.get('user_id') if isinstance(info.context, dict) else None
if user_id:
with local_session() as session:
author = session.query(Author).filter(Author.user == user_id).first()
if author:
return await get_shouts_from_query(q, author.id)
else:
return await get_shouts_from_query(q)
async def get_shouts_from_query(q, author_id=None):
shouts = []
with local_session() as session:
for [
shout,
reacted_stat,
commented_stat,
likes_stat,
dislikes_stat,
last_comment,
] in session.execute(q, {'author_id': author_id}).unique():
shouts.append(shout)
shout.stat = {
'viewed': await ViewedStorage.get_shout(shout_slug=shout.slug),
'reacted': reacted_stat,
'commented': commented_stat,
'rating': int(likes_stat or 0) - int(dislikes_stat or 0),
'last_comment': last_comment,
}
return shouts
@query.field('load_shouts_random_top')
async def load_shouts_random_top(_, _info, options):
"""
:param _
:param _info: GraphQLInfoContext
:param options: {
filters: {
layouts: ['music']
after: 13245678
}
random_limit: 100
limit: 50
offset: 0
}
:return: Shout[]
"""
aliased_reaction = aliased(Reaction)
subquery = (
select(Shout.id)
.outerjoin(aliased_reaction)
.where(and_(Shout.deleted_at.is_(None), Shout.layout.is_not(None)))
)
subquery = apply_filters(subquery, options.get('filters', {}))
subquery = subquery.group_by(Shout.id).order_by(
desc(
func.sum(
case(
# do not count comments' reactions
(aliased_reaction.reply_to.is_not(None), 0),
(aliased_reaction.kind == ReactionKind.LIKE.value, 1),
(aliased_reaction.kind == ReactionKind.DISLIKE.value, -1),
else_=0,
)
)
)
)
random_limit = options.get('random_limit', 100)
if random_limit:
subquery = subquery.limit(random_limit)
q = (
select(Shout)
.options(joinedload(Shout.authors), joinedload(Shout.topics))
.where(Shout.id.in_(subquery))
)
q = add_reaction_stat_columns(q, aliased_reaction)
limit = options.get('limit', 10)
q = q.group_by(Shout.id).order_by(func.random()).limit(limit)
shouts = await get_shouts_from_query(q)
return shouts
@query.field('load_shouts_random_topic')
async def load_shouts_random_topic(_, info, limit: int = 10):
[topic] = get_topics_random(None, None, 1)
if topic:
shouts = fetch_shouts_by_topic(topic, limit)
if shouts:
return {'topic': topic, 'shouts': shouts}
return {
'error': 'failed to get random topic after few retries',
'shouts': [],
'topic': {},
}
def fetch_shouts_by_topic(topic, limit):
q = (
select(Shout)
.options(joinedload(Shout.authors), joinedload(Shout.topics))
.filter(
and_(
Shout.deleted_at.is_(None),
Shout.featured_at.is_not(None),
Shout.topics.any(slug=topic.slug),
)
)
)
aliased_reaction = aliased(Reaction)
q = add_reaction_stat_columns(q, aliased_reaction)
q = q.group_by(Shout.id).order_by(desc(Shout.created_at)).limit(limit)
shouts = get_shouts_from_query(q)
return shouts

164
resolvers/stat.py Normal file
View File

@@ -0,0 +1,164 @@
from sqlalchemy import and_, distinct, func, join, select
from sqlalchemy.orm import aliased
from orm.author import Author, AuthorFollower
from orm.reaction import Reaction, ReactionKind
from orm.shout import Shout, ShoutAuthor, ShoutTopic
from orm.topic import Topic, TopicFollower
from services.db import local_session
from services.cache import cache_author
from services.logger import root_logger as logger
def add_topic_stat_columns(q):
aliased_shout_topic = aliased(ShoutTopic)
aliased_authors = aliased(ShoutAuthor)
aliased_followers = aliased(TopicFollower)
aliased_shout = aliased(Shout)
# shouts
q = q.outerjoin(aliased_shout_topic, aliased_shout_topic.topic == Topic.id)
q = q.add_columns(func.count(distinct(aliased_shout_topic.shout)).label('shouts_stat'))
# authors
q = q.outerjoin(aliased_shout, and_(
aliased_shout.id == aliased_shout_topic.shout,
aliased_shout.published_at.is_not(None),
aliased_shout.deleted_at.is_(None)
))
q = q.outerjoin(aliased_authors, aliased_shout.authors.any(id=aliased_authors.id))
q = q.add_columns(func.count(distinct(aliased_authors.author)).label('authors_stat'))
# followers
q = q.outerjoin(aliased_followers, aliased_followers.topic == Topic.id)
q = q.add_columns(
func.count(distinct(aliased_followers.follower)).label('followers_stat')
)
# comments
sub_comments = (
select(
Shout.id.label('shout_id'),
func.coalesce(func.count(Reaction.id)).label('comments_count')
)
.join(ShoutTopic, ShoutTopic.shout == Shout.id)
.join(Topic, ShoutTopic.topic == Topic.id)
.outerjoin(
Reaction,
and_(
Reaction.shout == Shout.id,
Reaction.kind == ReactionKind.COMMENT.value,
Reaction.deleted_at.is_(None),
),
)
.group_by(Shout.id)
.subquery()
)
q = q.outerjoin(sub_comments, aliased_shout_topic.shout == sub_comments.c.shout_id)
q = q.add_columns(func.coalesce(sub_comments.c.comments_count, 0).label('comments_stat'))
group_list = [Topic.id, sub_comments.c.comments_count]
q = q.group_by(*group_list)
logger.debug(q)
return q
def add_author_stat_columns(q):
aliased_shout_author = aliased(ShoutAuthor)
aliased_authors = aliased(AuthorFollower)
aliased_followers = aliased(AuthorFollower)
q = q.outerjoin(aliased_shout_author, aliased_shout_author.author == Author.id)
q = q.add_columns(
func.count(distinct(aliased_shout_author.shout)).label('shouts_stat')
)
q = q.outerjoin(aliased_authors, aliased_authors.follower == Author.id)
q = q.add_columns(
func.count(distinct(aliased_authors.author)).label('authors_stat')
)
q = q.outerjoin(aliased_followers, aliased_followers.author == Author.id)
q = q.add_columns(
func.count(distinct(aliased_followers.follower)).label('followers_stat')
)
# Create a subquery for comments count
sub_comments = (
select(
Author.id, func.coalesce(func.count(Reaction.id)).label('comments_count')
)
.outerjoin(
Reaction,
and_(
Reaction.created_by == Author.id,
Reaction.kind == ReactionKind.COMMENT.value,
Reaction.deleted_at.is_(None),
),
)
.group_by(Author.id)
.subquery()
)
q = q.outerjoin(sub_comments, Author.id == sub_comments.c.id)
q = q.add_columns(sub_comments.c.comments_count)
group_list = [Author.id, sub_comments.c.comments_count]
q = q.group_by(*group_list)
return q
def get_with_stat(q):
records = []
try:
is_author = f'{q}'.lower().startswith('select author')
is_topic = f'{q}'.lower().startswith('select topic')
if is_author:
q = add_author_stat_columns(q)
elif is_topic:
q = add_topic_stat_columns(q)
with local_session() as session:
result = session.execute(q)
for cols in result:
entity = cols[0]
stat = dict()
stat['shouts'] = cols[1]
stat['authors'] = cols[2]
stat['followers'] = cols[3]
if is_author:
stat['comments'] = cols[4]
entity.stat = stat
records.append(entity)
except Exception as exc:
import traceback
logger.error(exc, traceback.format_exc())
raise Exception(exc)
return records
def author_follows_authors(author_id: int):
af = aliased(AuthorFollower, name='af')
q = (
select(Author)
.select_from(join(Author, af, Author.id == af.author))
.where(af.follower == author_id)
)
return get_with_stat(q)
def author_follows_topics(author_id: int):
q = (
select(Topic)
.select_from(join(Topic, TopicFollower, Topic.id == TopicFollower.topic))
.where(TopicFollower.follower == author_id)
)
return get_with_stat(q)
async def update_author_stat(author: Author):
author_with_stat = get_with_stat(select(Author).where(Author.id==author.id))
if isinstance(author_with_stat, Author):
author_dict = author_with_stat.dict()
await cache_author(author_dict)

119
resolvers/topic.py Normal file
View File

@@ -0,0 +1,119 @@
from sqlalchemy import distinct, func, select
from orm.author import Author
from orm.shout import ShoutTopic
from orm.topic import Topic
from resolvers.stat import get_with_stat
from services.auth import login_required
from services.db import local_session
from services.memorycache import cache_region
from services.schema import mutation, query
@query.field('get_topics_all')
def get_topics_all(_, _info):
cache_key = 'get_topics_all'
@cache_region.cache_on_arguments(cache_key)
def _get_topics_all():
return get_with_stat(select(Topic))
return _get_topics_all()
@query.field('get_topics_by_community')
def get_topics_by_community(_, _info, community_id: int):
cache_key = f'get_topics_by_community_{community_id}'
@cache_region.cache_on_arguments(cache_key)
def _get_topics_by_community():
q = select(Topic).where(Topic.community == community_id)
return get_with_stat(q)
return _get_topics_by_community()
@query.field('get_topics_by_author')
async def get_topics_by_author(_, _info, author_id=0, slug='', user=''):
q = select(Topic)
if author_id:
q = q.join(Author).where(Author.id == author_id)
elif slug:
q = q.join(Author).where(Author.slug == slug)
elif user:
q = q.join(Author).where(Author.user == user)
return get_with_stat(q)
@query.field('get_topic')
def get_topic(_, _info, slug: str):
q = select(Topic).filter(Topic.slug == slug)
result = get_with_stat(q)
for topic in result:
return topic
@mutation.field('create_topic')
@login_required
async def create_topic(_, _info, inp):
with local_session() as session:
# TODO: check user permissions to create topic for exact community
# and actor is permitted to craete it
new_topic = Topic(**inp)
session.add(new_topic)
session.commit()
return {'topic': new_topic}
@mutation.field('update_topic')
@login_required
async def update_topic(_, _info, inp):
slug = inp['slug']
with local_session() as session:
topic = session.query(Topic).filter(Topic.slug == slug).first()
if not topic:
return {'error': 'topic not found'}
else:
Topic.update(topic, inp)
session.add(topic)
session.commit()
return {'topic': topic}
@mutation.field('delete_topic')
@login_required
async def delete_topic(_, info, slug: str):
user_id = info.context['user_id']
with local_session() as session:
t: Topic = session.query(Topic).filter(Topic.slug == slug).first()
if not t:
return {'error': 'invalid topic slug'}
author = session.query(Author).filter(Author.user == user_id).first()
if author:
if t.created_by != author.id:
return {'error': 'access denied'}
session.delete(t)
session.commit()
return {}
return {'error': 'access denied'}
@query.field('get_topics_random')
def get_topics_random(_, _info, amount=12):
q = select(Topic)
q = q.join(ShoutTopic)
q = q.group_by(Topic.id)
q = q.having(func.count(distinct(ShoutTopic.shout)) > 2)
q = q.order_by(func.random()).limit(amount)
topics = []
with local_session() as session:
for [topic] in session.execute(q):
topics.append(topic)
return topics

View File

@@ -1,56 +0,0 @@
import os
import shutil
import tempfile
import uuid
import boto3
from botocore.exceptions import BotoCoreError, ClientError
from starlette.responses import JSONResponse
STORJ_ACCESS_KEY = os.environ.get('STORJ_ACCESS_KEY')
STORJ_SECRET_KEY = os.environ.get('STORJ_SECRET_KEY')
STORJ_END_POINT = os.environ.get('STORJ_END_POINT')
STORJ_BUCKET_NAME = os.environ.get('STORJ_BUCKET_NAME')
CDN_DOMAIN = os.environ.get('CDN_DOMAIN')
async def upload_handler(request):
form = await request.form()
file = form.get('file')
if file is None:
return JSONResponse({'error': 'No file uploaded'}, status_code=400)
file_name, file_extension = os.path.splitext(file.filename)
key = str(uuid.uuid4()) + file_extension
# Create an S3 client with Storj configuration
s3 = boto3.client('s3',
aws_access_key_id=STORJ_ACCESS_KEY,
aws_secret_access_key=STORJ_SECRET_KEY,
endpoint_url=STORJ_END_POINT)
try:
# Save the uploaded file to a temporary file
with tempfile.NamedTemporaryFile() as tmp_file:
shutil.copyfileobj(file.file, tmp_file)
s3.upload_file(
Filename=tmp_file.name,
Bucket=STORJ_BUCKET_NAME,
Key=key,
ExtraArgs={
"ContentType": file.content_type
}
)
url = 'http://' + CDN_DOMAIN + '/' + key
return JSONResponse({'url': url, 'originalFilename': file.filename})
except (BotoCoreError, ClientError) as e:
print(e)
return JSONResponse({'error': 'Failed to upload file'}, status_code=500)

View File

@@ -1,71 +0,0 @@
import asyncio
from base.orm import local_session
from base.resolvers import mutation
from auth.authenticate import login_required
from auth.credentials import AuthCredentials
# from resolvers.community import community_follow, community_unfollow
from orm.user import AuthorFollower
from orm.topic import TopicFollower
from orm.shout import ShoutReactionsFollower
from resolvers.zine.profile import author_follow, author_unfollow
from resolvers.zine.reactions import reactions_follow, reactions_unfollow
from resolvers.zine.topics import topic_follow, topic_unfollow
from services.following import Following, FollowingManager, FollowingResult
from graphql.type import GraphQLResolveInfo
@mutation.field("follow")
@login_required
async def follow(_, info, what, slug):
auth: AuthCredentials = info.context["request"].auth
try:
if what == "AUTHOR":
if author_follow(auth.user_id, slug):
result = FollowingResult("NEW", 'author', slug)
await FollowingManager.push('author', result)
elif what == "TOPIC":
if topic_follow(auth.user_id, slug):
result = FollowingResult("NEW", 'topic', slug)
await FollowingManager.push('topic', result)
elif what == "COMMUNITY":
if False: # TODO: use community_follow(auth.user_id, slug):
result = FollowingResult("NEW", 'community', slug)
await FollowingManager.push('community', result)
elif what == "REACTIONS":
if reactions_follow(auth.user_id, slug):
result = FollowingResult("NEW", 'shout', slug)
await FollowingManager.push('shout', result)
except Exception as e:
print(Exception(e))
return {"error": str(e)}
return {}
@mutation.field("unfollow")
@login_required
async def unfollow(_, info, what, slug):
auth: AuthCredentials = info.context["request"].auth
try:
if what == "AUTHOR":
if author_unfollow(auth.user_id, slug):
result = FollowingResult("DELETED", 'author', slug)
await FollowingManager.push('author', result)
elif what == "TOPIC":
if topic_unfollow(auth.user_id, slug):
result = FollowingResult("DELETED", 'topic', slug)
await FollowingManager.push('topic', result)
elif what == "COMMUNITY":
if False: # TODO: use community_unfollow(auth.user_id, slug):
result = FollowingResult("DELETED", 'community', slug)
await FollowingManager.push('community', result)
elif what == "REACTIONS":
if reactions_unfollow(auth.user_id, slug):
result = FollowingResult("DELETED", 'shout', slug)
await FollowingManager.push('shout', result)
except Exception as e:
return {"error": str(e)}
return {}

View File

@@ -1,259 +0,0 @@
from datetime import datetime, timedelta, timezone
from sqlalchemy.orm import joinedload, aliased
from sqlalchemy.sql.expression import desc, asc, select, func, case, and_, text, nulls_last
from auth.authenticate import login_required
from auth.credentials import AuthCredentials
from base.exceptions import ObjectNotExist, OperationNotAllowed
from base.orm import local_session
from base.resolvers import query
from orm import TopicFollower
from orm.reaction import Reaction, ReactionKind
from orm.shout import Shout, ShoutAuthor, ShoutTopic
from orm.user import AuthorFollower
def add_stat_columns(q):
aliased_reaction = aliased(Reaction)
q = q.outerjoin(aliased_reaction).add_columns(
func.sum(
aliased_reaction.id
).label('reacted_stat'),
func.sum(
case(
(aliased_reaction.kind == ReactionKind.COMMENT, 1),
else_=0
)
).label('commented_stat'),
func.sum(case(
# do not count comments' reactions
(aliased_reaction.replyTo.is_not(None), 0),
(aliased_reaction.kind == ReactionKind.AGREE, 1),
(aliased_reaction.kind == ReactionKind.DISAGREE, -1),
(aliased_reaction.kind == ReactionKind.PROOF, 1),
(aliased_reaction.kind == ReactionKind.DISPROOF, -1),
(aliased_reaction.kind == ReactionKind.ACCEPT, 1),
(aliased_reaction.kind == ReactionKind.REJECT, -1),
(aliased_reaction.kind == ReactionKind.LIKE, 1),
(aliased_reaction.kind == ReactionKind.DISLIKE, -1),
else_=0)
).label('rating_stat'),
func.max(case(
(aliased_reaction.kind != ReactionKind.COMMENT, None),
else_=aliased_reaction.createdAt
)).label('last_comment'))
return q
def apply_filters(q, filters, user_id=None):
if filters.get("reacted") and user_id:
q.join(Reaction, Reaction.createdBy == user_id)
v = filters.get("visibility")
if v == "public":
q = q.filter(Shout.visibility == filters.get("visibility"))
if v == "community":
q = q.filter(Shout.visibility.in_(["public", "community"]))
if filters.get("layout"):
q = q.filter(Shout.layout == filters.get("layout"))
if filters.get('excludeLayout'):
q = q.filter(Shout.layout != filters.get("excludeLayout"))
if filters.get("author"):
q = q.filter(Shout.authors.any(slug=filters.get("author")))
if filters.get("topic"):
q = q.filter(Shout.topics.any(slug=filters.get("topic")))
if filters.get("title"):
q = q.filter(Shout.title.ilike(f'%{filters.get("title")}%'))
if filters.get("body"):
q = q.filter(Shout.body.ilike(f'%{filters.get("body")}%s'))
if filters.get("days"):
before = datetime.now(tz=timezone.utc) - timedelta(days=int(filters.get("days")) or 30)
q = q.filter(Shout.createdAt > before)
return q
@query.field("loadShout")
async def load_shout(_, info, slug=None, shout_id=None):
with local_session() as session:
q = select(Shout).options(
joinedload(Shout.authors),
joinedload(Shout.topics),
)
q = add_stat_columns(q)
if slug is not None:
q = q.filter(
Shout.slug == slug
)
if shout_id is not None:
q = q.filter(
Shout.id == shout_id
)
q = q.filter(
Shout.deletedAt.is_(None)
).group_by(Shout.id)
try:
[shout, reacted_stat, commented_stat, rating_stat, last_comment] = session.execute(q).first()
shout.stat = {
"viewed": shout.views,
"reacted": reacted_stat,
"commented": commented_stat,
"rating": rating_stat
}
for author_caption in session.query(ShoutAuthor).join(Shout).where(Shout.slug == slug):
for author in shout.authors:
if author.id == author_caption.user:
author.caption = author_caption.caption
return shout
except Exception:
raise ObjectNotExist("Slug was not found: %s" % slug)
@query.field("loadShouts")
async def load_shouts_by(_, info, options):
"""
:param options: {
filters: {
layout: 'music',
excludeLayout: 'article',
visibility: "public",
author: 'discours',
topic: 'culture',
title: 'something',
body: 'something else',
days: 30
}
offset: 0
limit: 50
order_by: 'createdAt' | 'commented' | 'reacted' | 'rating'
order_by_desc: true
}
:return: Shout[]
"""
q = select(Shout).options(
joinedload(Shout.authors),
joinedload(Shout.topics),
).where(
and_(
Shout.deletedAt.is_(None),
Shout.layout.is_not(None)
)
)
q = add_stat_columns(q)
auth: AuthCredentials = info.context["request"].auth
q = apply_filters(q, options.get("filters", {}), auth.user_id)
order_by = options.get("order_by", Shout.publishedAt)
query_order_by = desc(order_by) if options.get('order_by_desc', True) else asc(order_by)
offset = options.get("offset", 0)
limit = options.get("limit", 10)
q = q.group_by(Shout.id).order_by(nulls_last(query_order_by)).limit(limit).offset(offset)
shouts = []
with local_session() as session:
shouts_map = {}
for [shout, reacted_stat, commented_stat, rating_stat, last_comment] in session.execute(q).unique():
shouts.append(shout)
shout.stat = {
"viewed": shout.views,
"reacted": reacted_stat,
"commented": commented_stat,
"rating": rating_stat
}
shouts_map[shout.id] = shout
return shouts
@query.field("loadDrafts")
@login_required
async def get_drafts(_, info):
auth: AuthCredentials = info.context["request"].auth
user_id = auth.user_id
q = select(Shout).options(
joinedload(Shout.authors),
joinedload(Shout.topics),
).where(
and_(Shout.deletedAt.is_(None), Shout.createdBy == user_id)
)
q = q.group_by(Shout.id)
shouts = []
with local_session() as session:
for [shout] in session.execute(q).unique():
shouts.append(shout)
return shouts
@query.field("myFeed")
@login_required
async def get_my_feed(_, info, options):
auth: AuthCredentials = info.context["request"].auth
user_id = auth.user_id
subquery = select(Shout.id).join(
ShoutAuthor
).join(
AuthorFollower, AuthorFollower.follower == user_id
).join(
ShoutTopic
).join(
TopicFollower, TopicFollower.follower == user_id
)
q = select(Shout).options(
joinedload(Shout.authors),
joinedload(Shout.topics),
).where(
and_(
Shout.publishedAt.is_not(None),
Shout.deletedAt.is_(None),
Shout.id.in_(subquery)
)
)
q = add_stat_columns(q)
q = apply_filters(q, options.get("filters", {}), user_id)
order_by = options.get("order_by", Shout.publishedAt)
query_order_by = desc(order_by) if options.get('order_by_desc', True) else asc(order_by)
offset = options.get("offset", 0)
limit = options.get("limit", 10)
q = q.group_by(Shout.id).order_by(nulls_last(query_order_by)).limit(limit).offset(offset)
shouts = []
with local_session() as session:
shouts_map = {}
for [shout, reacted_stat, commented_stat, rating_stat, last_comment] in session.execute(q).unique():
shouts.append(shout)
shout.stat = {
"viewed": shout.views,
"reacted": reacted_stat,
"commented": commented_stat,
"rating": rating_stat
}
shouts_map[shout.id] = shout
return shouts

View File

@@ -1,298 +0,0 @@
from typing import List
from datetime import datetime, timedelta, timezone
from sqlalchemy import and_, func, distinct, select, literal
from sqlalchemy.orm import aliased, joinedload
from auth.authenticate import login_required
from auth.credentials import AuthCredentials
from base.orm import local_session
from base.resolvers import mutation, query
from orm.reaction import Reaction, ReactionKind
from orm.shout import ShoutAuthor, ShoutTopic
from orm.topic import Topic
from orm.user import AuthorFollower, Role, User, UserRating, UserRole
# from .community import followed_communities
from resolvers.inbox.unread import get_total_unread_counter
from resolvers.zine.topics import followed_by_user
def add_author_stat_columns(q):
author_followers = aliased(AuthorFollower)
author_following = aliased(AuthorFollower)
shout_author_aliased = aliased(ShoutAuthor)
# user_rating_aliased = aliased(UserRating)
q = q.outerjoin(shout_author_aliased).add_columns(
func.count(distinct(shout_author_aliased.shout)).label('shouts_stat')
)
q = q.outerjoin(author_followers, author_followers.author == User.id).add_columns(
func.count(distinct(author_followers.follower)).label('followers_stat')
)
q = q.outerjoin(author_following, author_following.follower == User.id).add_columns(
func.count(distinct(author_following.author)).label('followings_stat')
)
q = q.add_columns(literal(0).label('rating_stat'))
# FIXME
# q = q.outerjoin(user_rating_aliased, user_rating_aliased.user == User.id).add_columns(
# # TODO: check
# func.sum(user_rating_aliased.value).label('rating_stat')
# )
q = q.add_columns(literal(0).label('commented_stat'))
# q = q.outerjoin(Reaction, and_(Reaction.createdBy == User.id, Reaction.body.is_not(None))).add_columns(
# func.count(distinct(Reaction.id)).label('commented_stat')
# )
q = q.group_by(User.id)
return q
def add_stat(author, stat_columns):
[shouts_stat, followers_stat, followings_stat, rating_stat, commented_stat] = stat_columns
author.stat = {
"shouts": shouts_stat,
"followers": followers_stat,
"followings": followings_stat,
"rating": rating_stat,
"commented": commented_stat
}
return author
def get_authors_from_query(q):
authors = []
with local_session() as session:
for [author, *stat_columns] in session.execute(q):
author = add_stat(author, stat_columns)
authors.append(author)
return authors
async def user_subscriptions(user_id: int):
return {
"unread": await get_total_unread_counter(user_id), # unread inbox messages counter
"topics": [t.slug for t in await followed_topics(user_id)], # followed topics slugs
"authors": [a.slug for a in await followed_authors(user_id)], # followed authors slugs
"reactions": await followed_reactions(user_id)
# "communities": [c.slug for c in followed_communities(slug)], # communities
}
# @query.field("userFollowedDiscussions")
# @login_required
async def followed_discussions(_, info, user_id) -> List[Topic]:
return await followed_reactions(user_id)
async def followed_reactions(user_id):
with local_session() as session:
user = session.query(User).where(User.id == user_id).first()
return session.query(
Reaction.shout
).where(
Reaction.createdBy == user.id
).filter(
Reaction.createdAt > user.lastSeen
).all()
# dufok mod (^*^') :
@query.field("userFollowedTopics")
async def get_followed_topics(_, info, slug) -> List[Topic]:
user_id_query = select(User.id).where(User.slug == slug)
with local_session() as session:
user_id = session.execute(user_id_query).scalar()
if user_id is None:
raise ValueError("User not found")
return await followed_topics(user_id)
async def followed_topics(user_id):
return followed_by_user(user_id)
# dufok mod (^*^') :
@query.field("userFollowedAuthors")
async def get_followed_authors(_, _info, slug) -> List[User]:
# 1. First, we need to get the user_id for the given slug
user_id_query = select(User.id).where(User.slug == slug)
with local_session() as session:
user_id = session.execute(user_id_query).scalar()
if user_id is None:
raise ValueError("User not found")
return await followed_authors(user_id)
# 2. Now, we can use the user_id to get the followed authors
async def followed_authors(user_id):
q = select(User)
q = add_author_stat_columns(q)
q = q.join(AuthorFollower, AuthorFollower.author == User.id).where(
AuthorFollower.follower == user_id
)
# 3. Pass the query to the get_authors_from_query function and return the results
return get_authors_from_query(q)
@query.field("userFollowers")
async def user_followers(_, _info, slug) -> List[User]:
q = select(User)
q = add_author_stat_columns(q)
aliased_user = aliased(User)
q = q.join(AuthorFollower, AuthorFollower.follower == User.id).join(
aliased_user, aliased_user.id == AuthorFollower.author
).where(
aliased_user.slug == slug
)
return get_authors_from_query(q)
async def get_user_roles(slug):
with local_session() as session:
user = session.query(User).where(User.slug == slug).first()
roles = (
session.query(Role)
.options(joinedload(Role.permissions))
.join(UserRole)
.where(UserRole.user == user.id)
.all()
)
return roles
@mutation.field("updateProfile")
@login_required
async def update_profile(_, info, profile):
auth = info.context["request"].auth
user_id = auth.user_id
with local_session() as session:
user = session.query(User).filter(User.id == user_id).one()
if not user:
return {
"error": "canoot find user"
}
user.update(profile)
session.commit()
return {
"error": None,
"author": user
}
@mutation.field("rateUser")
@login_required
async def rate_user(_, info, rated_userslug, value):
auth: AuthCredentials = info.context["request"].auth
with local_session() as session:
rating = (
session.query(UserRating)
.filter(and_(UserRating.rater == auth.user_id, UserRating.user == rated_userslug))
.first()
)
if rating:
rating.value = value
session.commit()
return {}
try:
UserRating.create(rater=auth.user_id, user=rated_userslug, value=value)
except Exception as err:
return {"error": err}
return {}
# for mutation.field("follow")
def author_follow(user_id, slug):
try:
with local_session() as session:
author = session.query(User).where(User.slug == slug).one()
af = AuthorFollower.create(follower=user_id, author=author.id)
session.add(af)
session.commit()
return True
except:
return False
# for mutation.field("unfollow")
def author_unfollow(user_id, slug):
with local_session() as session:
flw = (
session.query(
AuthorFollower
).join(User, User.id == AuthorFollower.author).filter(
and_(
AuthorFollower.follower == user_id, User.slug == slug
)
).first()
)
if flw:
session.delete(flw)
session.commit()
return True
return False
@query.field("authorsAll")
async def get_authors_all(_, _info):
q = select(User)
q = add_author_stat_columns(q)
q = q.join(ShoutAuthor, User.id == ShoutAuthor.user)
return get_authors_from_query(q)
@query.field("getAuthor")
async def get_author(_, _info, slug):
q = select(User).where(User.slug == slug)
q = add_author_stat_columns(q)
[author] = get_authors_from_query(q)
with local_session() as session:
comments_count = session.query(Reaction).where(
and_(
Reaction.createdBy == author.id,
Reaction.kind == ReactionKind.COMMENT
)
).count()
author.stat["commented"] = comments_count
return author
@query.field("loadAuthorsBy")
async def load_authors_by(_, info, by, limit, offset):
q = select(User)
q = add_author_stat_columns(q)
if by.get("slug"):
q = q.filter(User.slug.ilike(f"%{by['slug']}%"))
elif by.get("name"):
q = q.filter(User.name.ilike(f"%{by['name']}%"))
elif by.get("topic"):
q = q.join(ShoutAuthor).join(ShoutTopic).join(Topic).where(Topic.slug == by["topic"])
if by.get("lastSeen"): # in days
days_before = datetime.now(tz=timezone.utc) - timedelta(days=by["lastSeen"])
q = q.filter(User.lastSeen > days_before)
elif by.get("createdAt"): # in days
days_before = datetime.now(tz=timezone.utc) - timedelta(days=by["createdAt"])
q = q.filter(User.createdAt > days_before)
q = q.order_by(
by.get("order", User.createdAt)
).limit(limit).offset(offset)
return get_authors_from_query(q)

View File

@@ -1,387 +0,0 @@
from datetime import datetime, timedelta, timezone
from sqlalchemy import and_, asc, desc, select, text, func, case
from sqlalchemy.orm import aliased
from auth.authenticate import login_required
from auth.credentials import AuthCredentials
from base.exceptions import OperationNotAllowed
from base.orm import local_session
from base.resolvers import mutation, query
from orm.reaction import Reaction, ReactionKind
from orm.shout import Shout, ShoutReactionsFollower
from orm.user import User
from services.notifications.notification_service import notification_service
def add_reaction_stat_columns(q):
aliased_reaction = aliased(Reaction)
q = q.outerjoin(aliased_reaction, Reaction.id == aliased_reaction.replyTo).add_columns(
func.sum(
aliased_reaction.id
).label('reacted_stat'),
func.sum(
case(
(aliased_reaction.body.is_not(None), 1),
else_=0
)
).label('commented_stat'),
func.sum(case(
(aliased_reaction.kind == ReactionKind.AGREE, 1),
(aliased_reaction.kind == ReactionKind.DISAGREE, -1),
(aliased_reaction.kind == ReactionKind.PROOF, 1),
(aliased_reaction.kind == ReactionKind.DISPROOF, -1),
(aliased_reaction.kind == ReactionKind.ACCEPT, 1),
(aliased_reaction.kind == ReactionKind.REJECT, -1),
(aliased_reaction.kind == ReactionKind.LIKE, 1),
(aliased_reaction.kind == ReactionKind.DISLIKE, -1),
else_=0)
).label('rating_stat'))
return q
def reactions_follow(user_id, shout_id: int, auto=False):
try:
with local_session() as session:
shout = session.query(Shout).where(Shout.id == shout_id).one()
following = (
session.query(ShoutReactionsFollower).where(and_(
ShoutReactionsFollower.follower == user_id,
ShoutReactionsFollower.shout == shout.id,
)).first()
)
if not following:
following = ShoutReactionsFollower.create(
follower=user_id,
shout=shout.id,
auto=auto
)
session.add(following)
session.commit()
return True
except:
return False
def reactions_unfollow(user_id: int, shout_id: int):
try:
with local_session() as session:
shout = session.query(Shout).where(Shout.id == shout_id).one()
following = (
session.query(ShoutReactionsFollower).where(and_(
ShoutReactionsFollower.follower == user_id,
ShoutReactionsFollower.shout == shout.id
)).first()
)
if following:
session.delete(following)
session.commit()
return True
except:
pass
return False
def is_published_author(session, user_id):
''' checks if user has at least one publication '''
return session.query(
Shout
).where(
Shout.authors.contains(user_id)
).filter(
and_(
Shout.publishedAt.is_not(None),
Shout.deletedAt.is_(None)
)
).count() > 0
def check_to_publish(session, user_id, reaction):
''' set shout to public if publicated approvers amount > 4 '''
if not reaction.replyTo and reaction.kind in [
ReactionKind.ACCEPT,
ReactionKind.LIKE,
ReactionKind.PROOF
]:
if is_published_author(user_id):
# now count how many approvers are voted already
approvers_reactions = session.query(Reaction).where(Reaction.shout == reaction.shout).all()
approvers = [user_id, ]
for ar in approvers_reactions:
a = ar.createdBy
if is_published_author(session, a):
approvers.append(a)
if len(approvers) > 4:
return True
return False
def check_to_hide(session, user_id, reaction):
''' hides any shout if 20% of reactions are negative '''
if not reaction.replyTo and reaction.kind in [
ReactionKind.REJECT,
ReactionKind.DISLIKE,
ReactionKind.DISPROOF
]:
# if is_published_author(user):
approvers_reactions = session.query(Reaction).where(Reaction.shout == reaction.shout).all()
rejects = 0
for r in approvers_reactions:
if r.kind in [
ReactionKind.REJECT,
ReactionKind.DISLIKE,
ReactionKind.DISPROOF
]:
rejects += 1
if len(approvers_reactions) / rejects < 5:
return True
return False
def set_published(session, shout_id):
s = session.query(Shout).where(Shout.id == shout_id).first()
s.publishedAt = datetime.now(tz=timezone.utc)
s.visibility = text('public')
session.add(s)
session.commit()
def set_hidden(session, shout_id):
s = session.query(Shout).where(Shout.id == shout_id).first()
s.visibility = text('community')
session.add(s)
session.commit()
@mutation.field("createReaction")
@login_required
async def create_reaction(_, info, reaction):
auth: AuthCredentials = info.context["request"].auth
reaction['createdBy'] = auth.user_id
rdict = {}
with local_session() as session:
shout = session.query(Shout).where(Shout.id == reaction["shout"]).one()
author = session.query(User).where(User.id == auth.user_id).one()
if reaction["kind"] in [
ReactionKind.DISLIKE.name,
ReactionKind.LIKE.name
]:
existing_reaction = session.query(Reaction).where(
and_(
Reaction.shout == reaction["shout"],
Reaction.createdBy == auth.user_id,
Reaction.kind == reaction["kind"],
Reaction.replyTo == reaction.get("replyTo")
)
).first()
if existing_reaction is not None:
raise OperationNotAllowed("You can't vote twice")
opposite_reaction_kind = ReactionKind.DISLIKE if reaction["kind"] == ReactionKind.LIKE.name else ReactionKind.LIKE
opposite_reaction = session.query(Reaction).where(
and_(
Reaction.shout == reaction["shout"],
Reaction.createdBy == auth.user_id,
Reaction.kind == opposite_reaction_kind,
Reaction.replyTo == reaction.get("replyTo")
)
).first()
if opposite_reaction is not None:
session.delete(opposite_reaction)
r = Reaction.create(**reaction)
# # Proposal accepting logix
# FIXME: will break if there will be 2 proposals, will break if shout will be changed
# if r.replyTo is not None and \
# r.kind == ReactionKind.ACCEPT and \
# auth.user_id in shout.dict()['authors']:
# replied_reaction = session.query(Reaction).where(Reaction.id == r.replyTo).first()
# if replied_reaction and replied_reaction.kind == ReactionKind.PROPOSE:
# if replied_reaction.range:
# old_body = shout.body
# start, end = replied_reaction.range.split(':')
# start = int(start)
# end = int(end)
# new_body = old_body[:start] + replied_reaction.body + old_body[end:]
# shout.body = new_body
# # TODO: update git version control
session.add(r)
session.commit()
await notification_service.handle_new_reaction(r.id)
rdict = r.dict()
rdict['shout'] = shout.dict()
rdict['createdBy'] = author.dict()
# self-regulation mechanics
if check_to_hide(session, auth.user_id, r):
set_hidden(session, r.shout)
elif check_to_publish(session, auth.user_id, r):
set_published(session, r.shout)
try:
reactions_follow(auth.user_id, reaction["shout"], True)
except Exception as e:
print(f"[resolvers.reactions] error on reactions autofollowing: {e}")
rdict['stat'] = {
"commented": 0,
"reacted": 0,
"rating": 0
}
return {"reaction": rdict}
@mutation.field("updateReaction")
@login_required
async def update_reaction(_, info, id, reaction={}):
auth: AuthCredentials = info.context["request"].auth
with local_session() as session:
user = session.query(User).where(User.id == auth.user_id).first()
q = select(Reaction).filter(Reaction.id == id)
q = add_reaction_stat_columns(q)
q = q.group_by(Reaction.id)
[r, reacted_stat, commented_stat, rating_stat] = session.execute(q).unique().one()
if not r:
return {"error": "invalid reaction id"}
if r.createdBy != user.id:
return {"error": "access denied"}
r.body = reaction["body"]
r.updatedAt = datetime.now(tz=timezone.utc)
if r.kind != reaction["kind"]:
# NOTE: change mind detection can be here
pass
if reaction.get("range"):
r.range = reaction.get("range")
session.commit()
r.stat = {
"commented": commented_stat,
"reacted": reacted_stat,
"rating": rating_stat
}
return {"reaction": r}
@mutation.field("deleteReaction")
@login_required
async def delete_reaction(_, info, id):
auth: AuthCredentials = info.context["request"].auth
with local_session() as session:
r = session.query(Reaction).filter(Reaction.id == id).first()
if not r:
return {"error": "invalid reaction id"}
if r.createdBy != auth.user_id:
return {"error": "access denied"}
if r.kind in [
ReactionKind.LIKE,
ReactionKind.DISLIKE
]:
session.delete(r)
else:
r.deletedAt = datetime.now(tz=timezone.utc)
session.commit()
return {
"reaction": r
}
@query.field("loadReactionsBy")
async def load_reactions_by(_, _info, by, limit=50, offset=0):
"""
:param by: {
:shout - filter by slug
:shouts - filer by shout slug list
:createdBy - to filter by author
:topic - to filter by topic
:search - to search by reactions' body
:comment - true if body.length > 0
:days - a number of days ago
:sort - a fieldname to sort desc by default
}
:param limit: int amount of shouts
:param offset: int offset in this order
:return: Reaction[]
"""
q = select(
Reaction, User, Shout
).join(
User, Reaction.createdBy == User.id
).join(
Shout, Reaction.shout == Shout.id
)
if by.get("shout"):
q = q.filter(Shout.slug == by["shout"])
elif by.get("shouts"):
q = q.filter(Shout.slug.in_(by["shouts"]))
if by.get("createdBy"):
q = q.filter(User.slug == by.get("createdBy"))
if by.get("topic"):
# TODO: check
q = q.filter(Shout.topics.contains(by["topic"]))
if by.get("comment"):
q = q.filter(func.length(Reaction.body) > 0)
if len(by.get('search', '')) > 2:
q = q.filter(Reaction.body.ilike(f'%{by["body"]}%'))
if by.get("days"):
after = datetime.now(tz=timezone.utc) - timedelta(days=int(by["days"]) or 30)
q = q.filter(Reaction.createdAt > after)
order_way = asc if by.get("sort", "").startswith("-") else desc
order_field = by.get("sort", "").replace('-', '') or Reaction.createdAt
q = q.group_by(
Reaction.id, User.id, Shout.id
).order_by(
order_way(order_field)
)
q = add_reaction_stat_columns(q)
q = q.where(Reaction.deletedAt.is_(None))
q = q.limit(limit).offset(offset)
reactions = []
with local_session() as session:
for [reaction, user, shout, reacted_stat, commented_stat, rating_stat] in session.execute(q):
reaction.createdBy = user
reaction.shout = shout
reaction.stat = {
"rating": rating_stat,
"commented": commented_stat,
"reacted": reacted_stat
}
reaction.kind = reaction.kind.name
reactions.append(reaction)
# ?
if by.get("stat"):
reactions.sort(lambda r: r.stat.get(by["stat"]) or r.createdAt)
return reactions

View File

@@ -1,165 +0,0 @@
from sqlalchemy import and_, select, distinct, func
from sqlalchemy.orm import aliased
from auth.authenticate import login_required
from base.orm import local_session
from base.resolvers import mutation, query
from orm.shout import ShoutTopic, ShoutAuthor
from orm.topic import Topic, TopicFollower
from orm import User
def add_topic_stat_columns(q):
aliased_shout_author = aliased(ShoutAuthor)
aliased_topic_follower = aliased(TopicFollower)
q = q.outerjoin(ShoutTopic, Topic.id == ShoutTopic.topic).add_columns(
func.count(distinct(ShoutTopic.shout)).label('shouts_stat')
).outerjoin(aliased_shout_author, ShoutTopic.shout == aliased_shout_author.shout).add_columns(
func.count(distinct(aliased_shout_author.user)).label('authors_stat')
).outerjoin(aliased_topic_follower).add_columns(
func.count(distinct(aliased_topic_follower.follower)).label('followers_stat')
)
q = q.group_by(Topic.id)
return q
def add_stat(topic, stat_columns):
[shouts_stat, authors_stat, followers_stat] = stat_columns
topic.stat = {
"shouts": shouts_stat,
"authors": authors_stat,
"followers": followers_stat
}
return topic
def get_topics_from_query(q):
topics = []
with local_session() as session:
for [topic, *stat_columns] in session.execute(q):
topic = add_stat(topic, stat_columns)
topics.append(topic)
return topics
def followed_by_user(user_id):
q = select(Topic)
q = add_topic_stat_columns(q)
q = q.join(TopicFollower).where(TopicFollower.follower == user_id)
return get_topics_from_query(q)
@query.field("topicsAll")
async def topics_all(_, _info):
q = select(Topic)
q = add_topic_stat_columns(q)
return get_topics_from_query(q)
@query.field("topicsByCommunity")
async def topics_by_community(_, info, community):
q = select(Topic).where(Topic.community == community)
q = add_topic_stat_columns(q)
return get_topics_from_query(q)
@query.field("topicsByAuthor")
async def topics_by_author(_, _info, author):
q = select(Topic)
q = add_topic_stat_columns(q)
q = q.join(User).where(User.slug == author)
return get_topics_from_query(q)
@query.field("getTopic")
async def get_topic(_, _info, slug):
q = select(Topic).where(Topic.slug == slug)
q = add_topic_stat_columns(q)
topics = get_topics_from_query(q)
return topics[0]
@mutation.field("createTopic")
@login_required
async def create_topic(_, _info, inp):
with local_session() as session:
# TODO: check user permissions to create topic for exact community
new_topic = Topic.create(**inp)
session.add(new_topic)
session.commit()
return {"topic": new_topic}
@mutation.field("updateTopic")
@login_required
async def update_topic(_, _info, inp):
slug = inp["slug"]
with local_session() as session:
topic = session.query(Topic).filter(Topic.slug == slug).first()
if not topic:
return {"error": "topic not found"}
else:
topic.update(**inp)
session.commit()
return {"topic": topic}
def topic_follow(user_id, slug):
try:
with local_session() as session:
topic = session.query(Topic).where(Topic.slug == slug).one()
following = TopicFollower.create(topic=topic.id, follower=user_id)
session.add(following)
session.commit()
return True
except:
return False
def topic_unfollow(user_id, slug):
try:
with local_session() as session:
sub = (
session.query(TopicFollower).join(Topic).filter(
and_(
TopicFollower.follower == user_id,
Topic.slug == slug
)
).first()
)
if sub:
session.delete(sub)
session.commit()
return True
except:
pass
return False
@query.field("topicsRandom")
async def topics_random(_, info, amount=12):
q = select(Topic)
q = q.join(ShoutTopic)
q = q.group_by(Topic.id)
q = q.having(func.count(distinct(ShoutTopic.shout)) > 2)
q = q.order_by(func.random()).limit(amount)
topics = []
with local_session() as session:
for [topic] in session.execute(q):
topics.append(topic)
return topics

View File

@@ -1,524 +0,0 @@
scalar DateTime
################################### Payload ###################################
enum MessageStatus {
NEW
UPDATED
DELETED
}
type UserFollowings {
unread: Int
topics: [String]
authors: [String]
reactions: [Int]
communities: [String]
}
type AuthResult {
error: String
token: String
user: User
news: UserFollowings
}
type ChatMember {
id: Int!
slug: String!
name: String!
userpic: String
lastSeen: DateTime
online: Boolean
# invitedAt: DateTime
# invitedBy: String # user slug
# TODO: keep invite databit
}
type AuthorStat {
followings: Int
followers: Int
rating: Int
commented: Int
shouts: Int
}
type Author {
id: Int!
slug: String!
name: String!
userpic: String
caption: String # only for full shout
bio: String
about: String
links: [String]
stat: AuthorStat
roles: [Role] # in different communities
lastSeen: DateTime
createdAt: DateTime
}
type Result {
error: String
slugs: [String]
chat: Chat
chats: [Chat]
message: Message
messages: [Message]
members: [ChatMember]
shout: Shout
shouts: [Shout]
author: Author
authors: [Author]
reaction: Reaction
reactions: [Reaction]
topic: Topic
topics: [Topic]
community: Community
communities: [Community]
}
enum ReactionStatus {
NEW
UPDATED
CHANGED
EXPLAINED
DELETED
}
type ReactionUpdating {
error: String
status: ReactionStatus
reaction: Reaction
}
################################### Inputs ###################################
input ShoutInput {
slug: String
title: String
body: String
lead: String
description: String
layout: String
media: String
authors: [String]
topics: [TopicInput]
community: Int
mainTopic: TopicInput
subtitle: String
cover: String
}
input ProfileInput {
slug: String
name: String
userpic: String
links: [String]
bio: String
about: String
}
input TopicInput {
id: Int,
slug: String!
# community: String!
title: String
body: String
pic: String
# children: [String]
# parents: [String]
}
input ReactionInput {
kind: ReactionKind!
shout: Int!
range: String
body: String
replyTo: Int
}
input ChatInput {
id: String!
title: String
description: String
}
enum FollowingEntity {
TOPIC
AUTHOR
COMMUNITY
REACTIONS
}
################################### Mutation
type Mutation {
# inbox
createChat(title: String, members: [Int]!): Result!
updateChat(chat: ChatInput!): Result!
deleteChat(chatId: String!): Result!
createMessage(chat: String!, body: String!, replyTo: Int): Result!
updateMessage(chatId: String!, id: Int!, body: String!): Result!
deleteMessage(chatId: String!, id: Int!): Result!
markAsRead(chatId: String!, ids: [Int]!): Result!
# auth
getSession: AuthResult!
registerUser(email: String!, password: String, name: String): AuthResult!
sendLink(email: String!, lang: String, template: String): Result!
confirmEmail(token: String!): AuthResult!
# shout
createShout(inp: ShoutInput!): Result!
updateShout(shout_id: Int!, shout_input: ShoutInput, publish: Boolean): Result!
deleteShout(shout_id: Int!): Result!
# user profile
rateUser(slug: String!, value: Int!): Result!
updateProfile(profile: ProfileInput!): Result!
# topics
createTopic(input: TopicInput!): Result!
# TODO: mergeTopics(t1: String!, t2: String!): Result!
updateTopic(input: TopicInput!): Result!
destroyTopic(slug: String!): Result!
# reactions
createReaction(reaction: ReactionInput!): Result!
updateReaction(id: Int!, reaction: ReactionInput!): Result!
deleteReaction(id: Int!): Result!
# following
follow(what: FollowingEntity!, slug: String!): Result!
unfollow(what: FollowingEntity!, slug: String!): Result!
markNotificationAsRead(notification_id: Int!): Result!
markAllNotificationsAsRead: Result!
}
input MessagesBy {
author: String
body: String
chat: String
order: String
days: Int
stat: String
}
input AuthorsBy {
lastSeen: DateTime
createdAt: DateTime
slug: String
name: String
topic: String
order: String
days: Int
stat: String
}
input LoadShoutsFilters {
title: String
body: String
topic: String
author: String
layout: String
excludeLayout: String
visibility: String
days: Int
reacted: Boolean
}
input LoadShoutsOptions {
filters: LoadShoutsFilters
with_author_captions: Boolean
limit: Int!
offset: Int
order_by: String
order_by_desc: Boolean
}
input ReactionBy {
shout: String # slug
shouts: [String]
search: String # fts on body
comment: Boolean
topic: String # topic.slug
createdBy: String # user.slug
days: Int # before
sort: String # how to sort, default createdAt
}
input NotificationsQueryParams {
limit: Int
offset: Int
}
type NotificationsQueryResult {
notifications: [Notification]!
totalCount: Int!
totalUnreadCount: Int!
}
type Query {
# inbox
loadChats( limit: Int, offset: Int): Result! # your chats
loadMessagesBy(by: MessagesBy!, limit: Int, offset: Int): Result!
loadRecipients(limit: Int, offset: Int): Result!
searchRecipients(query: String!, limit: Int, offset: Int): Result!
searchMessages(by: MessagesBy!, limit: Int, offset: Int): Result!
# auth
isEmailUsed(email: String!): Boolean!
signIn(email: String!, password: String, lang: String): AuthResult!
signOut: AuthResult!
# zine
loadAuthorsBy(by: AuthorsBy, limit: Int, offset: Int): [Author]!
loadShout(slug: String, shout_id: Int): Shout
loadShouts(options: LoadShoutsOptions): [Shout]!
loadDrafts: [Shout]!
loadReactionsBy(by: ReactionBy!, limit: Int, offset: Int): [Reaction]!
userFollowers(slug: String!): [Author]!
userFollowedAuthors(slug: String!): [Author]!
userFollowedTopics(slug: String!): [Topic]!
authorsAll: [Author]!
getAuthor(slug: String!): Author
myFeed(options: LoadShoutsOptions): [Shout]
# migrate
markdownBody(body: String!): String!
# topics
getTopic(slug: String!): Topic
topicsAll: [Topic]!
topicsRandom(amount: Int): [Topic]!
topicsByCommunity(community: String!): [Topic]!
topicsByAuthor(author: String!): [Topic]!
loadNotifications(params: NotificationsQueryParams!): NotificationsQueryResult!
}
############################################ Entities
type Resource {
id: Int!
name: String!
}
type Operation {
id: Int!
name: String!
}
type Permission {
operation: Int!
resource: Int!
}
type Role {
id: Int!
name: String!
community: String!
desc: String
permissions: [Permission!]!
}
type Rating {
rater: String!
value: Int!
}
type User {
id: Int!
username: String! # to login, ex. email, phone
createdAt: DateTime!
lastSeen: DateTime
slug: String!
name: String # to display
email: String
password: String
oauth: String # provider:token
userpic: String
links: [String]
emailConfirmed: Boolean # should contain all emails too
muted: Boolean
updatedAt: DateTime
ratings: [Rating]
bio: String
about: String
communities: [Int] # user participating communities
oid: String
}
enum ReactionKind {
LIKE
DISLIKE
AGREE
DISAGREE
PROOF
DISPROOF
COMMENT
QUOTE
PROPOSE
ASK
REMARK
FOOTNOTE
ACCEPT
REJECT
}
type Reaction {
id: Int!
shout: Shout!
createdAt: DateTime!
createdBy: User!
updatedAt: DateTime
deletedAt: DateTime
deletedBy: User
range: String # full / 0:2340
kind: ReactionKind!
body: String
replyTo: Int
stat: Stat
old_id: String
old_thread: String
}
# is publication
type Shout {
id: Int!
slug: String!
body: String!
lead: String
description: String
createdAt: DateTime!
topics: [Topic]
mainTopic: String
title: String
subtitle: String
authors: [Author]
lang: String
community: String
cover: String
layout: String # music video literature image
versionOf: String # for translations and re-telling the same story
visibility: String # owner authors community public
updatedAt: DateTime
updatedBy: User
deletedAt: DateTime
deletedBy: User
publishedAt: DateTime
media: String # json [ { title pic url body }, .. ]
stat: Stat
}
type Stat {
viewed: Int
reacted: Int
rating: Int
commented: Int
ranking: Int
}
type Community {
id: Int!
slug: String!
name: String!
desc: String
pic: String!
createdAt: DateTime!
createdBy: User!
}
type Collection {
id: Int!
slug: String!
title: String!
desc: String
amount: Int
publishedAt: DateTime
createdAt: DateTime!
createdBy: User!
}
type TopicStat {
shouts: Int!
followers: Int!
authors: Int!
# viewed: Int
# reacted: Int!
# commented: Int
# rating: Int
}
type Topic {
id: Int!
slug: String!
title: String
body: String
pic: String
# community: Community!
stat: TopicStat
oid: String
}
type Token {
createdAt: DateTime!
expiresAt: DateTime
id: Int!
ownerId: Int!
usedAt: DateTime
value: String!
}
type Message {
author: Int!
chatId: String!
body: String!
createdAt: Int!
id: Int!
replyTo: Int
updatedAt: Int
seen: Boolean
}
type Chat {
id: String!
createdAt: Int!
createdBy: Int!
updatedAt: Int!
title: String
description: String
users: [Int]
members: [ChatMember]
admins: [Int]
messages: [Message]
unread: Int
private: Boolean
}
enum NotificationType {
NEW_COMMENT,
NEW_REPLY
}
type Notification {
id: Int!
shout: Int
reaction: Int
type: NotificationType!
createdAt: DateTime!
seen: Boolean!
data: String # JSON
occurrences: Int!
}

47
schema/enum.graphql Normal file
View File

@@ -0,0 +1,47 @@
enum ReactionStatus {
NEW
UPDATED
CHANGED
EXPLAINED
DELETED
}
enum ReactionSort {
newest
oldest
like
dislike
}
enum ReactionKind {
# collabs
AGREE
DISAGREE
ASK
PROPOSE
PROOF
DISPROOF
ACCEPT
REJECT
# public feed
QUOTE
COMMENT
LIKE
DISLIKE
}
enum FollowingEntity {
TOPIC
AUTHOR
SHOUT
COMMUNITY
}
enum InviteStatus {
PENDING
ACCEPTED
REJECTED
}

86
schema/input.graphql Normal file
View File

@@ -0,0 +1,86 @@
input ShoutInput {
slug: String
title: String
body: String
lead: String
description: String
layout: String
media: String
topics: [TopicInput]
community: Int
subtitle: String
cover: String
}
input ProfileInput {
slug: String
name: String
pic: String
links: [String]
bio: String
about: String
}
input TopicInput {
id: Int
slug: String!
title: String
body: String
pic: String
}
input ReactionInput {
id: Int
kind: ReactionKind!
shout: Int!
quote: String
body: String
reply_to: Int
}
input AuthorsBy {
last_seen: Int
created_at: Int
slug: String
name: String
topic: String
order: String
after: Int
stat: String
}
input LoadShoutsFilters {
topic: String
author: String
layouts: [String]
featured: Boolean
reacted: Boolean
after: Int
}
input LoadShoutsOptions {
filters: LoadShoutsFilters
with_author_captions: Boolean
limit: Int!
random_limit: Int
offset: Int
order_by: String
order_by_desc: Boolean
}
input ReactionBy {
shout: String
shouts: [String]
search: String
comment: Boolean
rating: Boolean
topic: String
created_by: Int
after: Int
sort: ReactionSort
}
input NotificationSeenInput {
notifications: [Int]
thread: Int
}

36
schema/mutation.graphql Normal file
View File

@@ -0,0 +1,36 @@
type Mutation {
# author
rate_author(rated_slug: String!, value: Int!): CommonResult!
update_author(profile: ProfileInput!): CommonResult!
# editor
create_shout(inp: ShoutInput!): CommonResult!
update_shout(shout_id: Int!, shout_input: ShoutInput, publish: Boolean): CommonResult!
delete_shout(shout_id: Int!): CommonResult!
# follower
follow(what: FollowingEntity!, slug: String!): CommonResult!
unfollow(what: FollowingEntity!, slug: String!): CommonResult!
# topic
create_topic(input: TopicInput!): CommonResult!
update_topic(input: TopicInput!): CommonResult!
delete_topic(slug: String!): CommonResult!
# reaction
create_reaction(reaction: ReactionInput!): CommonResult!
update_reaction(reaction: ReactionInput!): CommonResult!
delete_reaction(reaction_id: Int!): CommonResult!
# collab
create_invite(slug: String, author_id: Int): CommonResult!
remove_author(slug: String, author_id: Int): CommonResult!
remove_invite(invite_id: Int!): CommonResult!
accept_invite(invite_id: Int!): CommonResult!
reject_invite(invite_id: Int!): CommonResult!
# notifier
notification_mark_seen(notification_id: Int!, seen: Boolean): CommonResult!
notifications_seen_after(after: Int!, seen: Boolean): CommonResult!
notifications_seen_thread(thread_id: String!, seen: Boolean): CommonResult!
}

Some files were not shown because too many files have changed in this diff Show More