2174 Commits

Author SHA1 Message Date
867232e48f auth-fix
All checks were successful
Deploy on push / deploy (push) Successful in 13s
2025-07-23 13:29:49 +03:00
3826797317 panel-upgrade-and-fixes
All checks were successful
Deploy on push / deploy (push) Successful in 7s
2025-07-18 16:32:35 +03:00
5d766b7601 asynccontextmanagerf-fix
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2025-07-07 22:53:01 +03:00
d03336174f admin-ui-fix
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2025-07-07 17:51:48 +03:00
9f70654fb5 0.7.8 2025-07-07 14:10:31 +03:00
c8728540ed reactions-admin-tab
All checks were successful
Deploy on push / deploy (push) Successful in 7s
2025-07-04 12:39:41 +03:00
db92cc6406 changelog 2025-07-03 13:03:25 +03:00
2ca2a7b256 readme
All checks were successful
Deploy on push / deploy (push) Successful in 7s
2025-07-03 12:47:54 +03:00
f51d15c871 vers2
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2025-07-03 12:31:16 +03:00
faf25d77a1 vers
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2025-07-03 12:25:26 +03:00
b2df345072 badges
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2025-07-03 12:23:06 +03:00
eb2140bcc6 0.7.7-topics-editing
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2025-07-03 12:15:10 +03:00
441cca8045 0.7.5-topicfix 2025-07-03 00:20:10 +03:00
27c5a57709 0.7.1-fix
All checks were successful
Deploy on push / deploy (push) Successful in 9s
2025-07-02 22:49:20 +03:00
82111ed0f6 Squashed new RBAC
All checks were successful
Deploy on push / deploy (push) Successful in 7s
2025-07-02 22:30:21 +03:00
7585dae0ab less search logs 2025-07-01 12:18:40 +03:00
971b87c0be schema-fmt 2025-07-01 12:18:24 +03:00
27a358a41f panel-improves 2025-07-01 09:32:22 +03:00
547c934302 admin-body-editor-fix
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2025-07-01 09:10:32 +03:00
30757fb38a simpler-parent-select 2025-07-01 01:21:08 +03:00
bb41c02d62 simpler-parent-select 2025-07-01 01:20:48 +03:00
2683982180 invites-table-creating
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2025-07-01 00:01:20 +03:00
71f26a76c3 invites-fix4 2025-06-30 23:51:46 +03:00
5f48ec465a invites-fix3 2025-06-30 23:44:13 +03:00
6c95b0575a invites-fix2
All checks were successful
Deploy on push / deploy (push) Successful in 7s
2025-06-30 23:37:21 +03:00
5cfde98c22 invites-fix
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2025-06-30 23:27:22 +03:00
b01de1fdc1 changelog-restored+internal-auth-fix
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2025-06-30 23:10:48 +03:00
ab65fd4fd8 schema-fix
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2025-06-30 22:43:32 +03:00
41395eb7c6 0.5.10-invites-crud
All checks were successful
Deploy on push / deploy (push) Successful in 7s
2025-06-30 22:19:46 +03:00
1e2c85e56a 0.5.9-collections-crud+spa-fix
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2025-06-30 21:46:53 +03:00
952b294345 0.5.8-panel-upgrade-community-crud-fix
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2025-06-30 21:25:26 +03:00
9de86c0fae community-filter
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2025-06-30 11:38:22 +03:00
23a6bf66b9 pretty-modal3 2025-06-28 16:47:47 +03:00
eb7a85100b pretty-modal2 2025-06-28 16:42:34 +03:00
81926e4738 pretty-modal
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2025-06-28 16:39:16 +03:00
d69310567c pretty-print3
All checks were successful
Deploy on push / deploy (push) Successful in 5s
2025-06-28 16:34:20 +03:00
9cbd5e4288 pretty-print2 2025-06-28 16:27:31 +03:00
b417000cc1 pretty-print
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2025-06-28 15:30:29 +03:00
a58e0191d8 тщвфкл 2025-06-28 15:24:39 +03:00
567b18fe24 logo
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2025-06-28 15:21:19 +03:00
3ccd8ce1d0 shouts-admin-fix5 2025-06-28 14:57:07 +03:00
8f93d77eef shouts-admin-fix4 2025-06-28 14:52:46 +03:00
20840b2d54 shouts-admin-fix3 2025-06-28 14:39:33 +03:00
4bafadde45 shouts-admin-fix2
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2025-06-28 14:35:22 +03:00
c68e964bf5 shouts-admin-fix
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2025-06-28 14:30:18 +03:00
cbecf13053 auth-improves
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2025-06-28 14:04:23 +03:00
7c20415533 mediaitem-schema-fix
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2025-06-28 14:00:18 +03:00
da951ed14e admins-roles-fix
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2025-06-28 13:59:58 +03:00
52bf78320b middlware-fix
All checks were successful
Deploy on push / deploy (push) Successful in 5s
2025-06-28 13:56:05 +03:00
c48f5f9368 0.5.7-shouts-admin
All checks were successful
Deploy on push / deploy (push) Successful in 5s
2025-06-28 13:47:08 +03:00
7c11c9875f allow-all 2025-06-27 15:42:16 +03:00
b1775f4814 allow-vercel2 2025-06-27 15:34:51 +03:00
0c4a2bcf6d allow-vercel
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2025-06-27 15:27:28 +03:00
f4d7cd8f67 merged-dev
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2025-06-26 17:31:41 +03:00
599a6c9f59 authors-sort-fix3
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2025-06-26 17:19:42 +03:00
b5aa7032eb docs+featured/unfeatured-upgrade
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2025-06-19 11:28:48 +03:00
6a582d49d4 Merge branch 'feature/auth-internal' of https://dev.dscrs.site/discours.io/core into feature/auth-internal
All checks were successful
Deploy on push / deploy (push) Successful in 7s
2025-06-16 20:20:32 +03:00
8a5f4a2421 maintainance 2025-06-16 20:20:23 +03:00
6edc0ed3db docs + admin-fix + search-linter-fixes
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2025-06-03 12:46:54 +03:00
0375939e73 hardcopy-search-service-code
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2025-06-03 02:10:08 +03:00
1329aee1f1 search-combined
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2025-06-03 02:00:44 +03:00
7fc9908857 nginx-revert-fix
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2025-06-03 01:53:19 +03:00
a3e4d6a49a cors-fix
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2025-06-03 01:48:23 +03:00
91133e11f6 nginx-fix
All checks were successful
Deploy on push / deploy (push) Successful in 7s
2025-06-03 01:45:06 +03:00
852cb6d653 postmerge3
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2025-06-03 01:25:24 +03:00
36ea07b8fc postmerge2
All checks were successful
Deploy on push / deploy (push) Successful in 7s
2025-06-03 01:24:49 +03:00
1710fce600 nochecks1
All checks were successful
Deploy on push / deploy (push) Successful in 7s
2025-06-03 01:14:51 +03:00
6ab76a9754 nochecks
Some checks are pending
Deploy on push / deploy (push) Blocked by required conditions
2025-06-03 01:13:51 +03:00
6689847c0e img
Some checks are pending
Deploy on push / type-check (push) Waiting to run
Deploy on push / deploy (push) Blocked by required conditions
2025-06-03 01:13:13 +03:00
17b6069fb2 dplfix
Some checks failed
Deploy on push / type-check (push) Failing after 7s
Deploy on push / deploy (push) Has been skipped
2025-06-03 01:11:28 +03:00
f00eea2c31 тщмук
Some checks failed
Deploy on push / type-check (push) Failing after 8s
Deploy on push / deploy (push) Has been skipped
2025-06-03 01:09:29 +03:00
9555cc3125 311-str
Some checks failed
Deploy on push / type-check (push) Failing after 7s
Deploy on push / deploy (push) Has been skipped
2025-06-03 01:08:26 +03:00
b97912c3c4 311
Some checks failed
Deploy on push / type-check (push) Failing after 7s
Deploy on push / deploy (push) Has been skipped
2025-06-03 01:06:09 +03:00
8d410fcac3 312
Some checks failed
Deploy on push / type-check (push) Failing after 34s
Deploy on push / deploy (push) Has been skipped
2025-06-03 01:03:58 +03:00
5e370eef95 3133
Some checks failed
Deploy on push / type-check (push) Failing after 6s
Deploy on push / deploy (push) Has been skipped
2025-06-03 01:02:46 +03:00
b905ba59e4 311
Some checks failed
Deploy on push / type-check (push) Failing after 8s
Deploy on push / deploy (push) Has been skipped
2025-06-03 01:01:45 +03:00
ba21a4b920 deply-fix
Some checks failed
Deploy on push / type-check (push) Failing after 14s
Deploy on push / deploy (push) Has been skipped
2025-06-03 00:58:07 +03:00
aeb53a7354 nginx-simpler
Some checks failed
Deploy on push / type-check (push) Failing after 7s
Deploy on push / deploy (push) Has been skipped
2025-06-03 00:50:39 +03:00
Stepan Vladovskiy
e1d1096674 feat: without staging deploying by gitea
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2025-06-02 18:17:24 -03:00
9f16ee022b cors-applevel-fix
Some checks failed
Deploy on push / deploy (push) Failing after 40s
2025-06-02 23:59:09 +03:00
89f6c32b78 passlib-fixed-ver
Some checks failed
Deploy on push / type-check (push) Failing after 7s
Deploy on push / deploy (push) Has been skipped
2025-06-02 23:28:53 +03:00
903065fdb3 search-debug
Some checks failed
Deploy on push / type-check (push) Failing after 6s
Deploy on push / deploy (push) Has been skipped
2025-06-02 22:40:10 +03:00
63c96ef965 auth-fix
Some checks failed
Deploy on push / type-check (push) Failing after 6s
Deploy on push / deploy (push) Has been skipped
2025-06-02 22:28:17 +03:00
21d28a0d8b token-storage-refactored
Some checks failed
Deploy on push / type-check (push) Failing after 8s
Deploy on push / deploy (push) Has been skipped
2025-06-02 21:50:58 +03:00
cca2f71c59 0.5.0-typesafety-begin
Some checks failed
Deploy on push / type-check (push) Failing after 1m3s
Deploy on push / deploy (push) Has been skipped
2025-06-02 03:00:40 +03:00
3327976586 Improve topic sorting: add popular sorting by publications and authors count 2025-06-02 02:56:11 +03:00
baca19a4d5 feat: настроить автоматическое исправление ruff в pre-commit 2025-05-31 19:33:48 +03:00
ffe19ef238 follow-fix
All checks were successful
Deploy on push / deploy (push) Successful in 7s
2025-05-31 17:21:14 +03:00
0140fcd522 unfollow-fix 2025-05-31 17:18:31 +03:00
90260534eb sigil-on
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2025-05-30 14:08:29 +03:00
f160ab4d26 middleware-fix+oauth-routes
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2025-05-30 14:05:50 +03:00
f8ad73571c author-followers-fix 2025-05-30 13:48:02 +03:00
6ba5c04564 sigil-off 2025-05-30 08:56:49 +03:00
5bdfdad63e load_authors_by-fix 2025-05-30 08:54:20 +03:00
d917d63bf2 distinct-fix 2025-05-30 08:51:24 +03:00
1223c1d278 shout-id-fox+test-imports-fix
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2025-05-29 23:40:27 +03:00
e375db4125 author_id-fix
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2025-05-29 18:56:55 +03:00
6e5545b190 nonginx-cors
All checks were successful
Deploy on push / deploy (push) Successful in 5s
2025-05-29 18:26:10 +03:00
bdc9854037 dounble-headers-fix2
All checks were successful
Deploy on push / deploy (push) Successful in 5s
2025-05-29 18:21:54 +03:00
47b551068a dounble-headers-fix
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2025-05-29 18:18:51 +03:00
fb4f98ebf6 allow-origin-fix
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2025-05-29 18:15:50 +03:00
5f10599a51 double-true-nginx-fix
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2025-05-29 18:12:19 +03:00
97d2b914b7 login-fix+draft_create-fix
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2025-05-29 17:09:32 +03:00
4070f4fcde linted+fmt
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2025-05-29 12:37:39 +03:00
d4c16658bd author-user-fix
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2025-05-29 12:15:06 +03:00
6c0d96e7ac auth.orm-fix
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2025-05-28 18:51:49 +03:00
301145fcff load_authors_by-debug
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2025-05-26 20:34:51 +03:00
627be9a4f1 env-creds-mask
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2025-05-26 13:31:25 +03:00
c06a187fd6 Merge branch 'fix/sv-authorSortingBy' of https://dev.dscrs.site/discours.io/core into feature/auth-internal
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2025-05-26 12:42:47 +03:00
6d734af5ce depfix-dockernode5 2025-05-26 10:47:33 +03:00
8489320ab1 depfix-dockernode4 2025-05-26 10:04:22 +03:00
ee79091e35 depfix-dockernode3 2025-05-26 09:51:15 +03:00
92ba4c1c03 depfix-dockernode2 2025-05-26 09:48:33 +03:00
16c34ac792 depfix-dockernode 2025-05-26 09:40:15 +03:00
d18e99ee4c depfix-dockernode 2025-05-26 09:39:59 +03:00
bad4928219 depfix 2025-05-26 09:32:55 +03:00
Stepan Vladovskiy
80cb8df41c merged with dev
All checks were successful
Deploy on push / deploy (push) Successful in 5s
2025-05-25 20:53:25 +00:00
Stepan Vladovskiy
804f900c38 style: readme with python granian server start
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2025-05-25 20:51:39 +00:00
Stepan Vladovskiy
b5dd690fbb feat: with author sorting by shouts, followers and names
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2025-05-25 17:30:12 -03:00
071d8217dd migrated-test-staging
Some checks failed
Deploy on push / deploy (push) Failing after 8s
2025-05-25 23:21:53 +03:00
ab39b534fe auth fixes, search connected 2025-05-22 04:34:30 +03:00
32bc1276e0 Merge branch 'dev' into feature/auth-internal 2025-05-22 00:24:06 +03:00
91258721c6 Merge branch 'staging' of https://dev.dscrs.site/discours.io/core into feature/auth-internal 2025-05-21 23:00:03 +03:00
09f0747c1f minor fixes 2025-05-21 22:26:39 +03:00
5874d3ccae create_draft fix 2025-05-21 18:29:46 +03:00
ebf9dfcf62 INTERNAL AUTH FIX 2025-05-21 18:29:32 +03:00
f6156ccfa3 fix manage roles 2025-05-21 10:35:27 +03:00
d3a760b6ba auth-wip 2025-05-21 01:34:02 +03:00
Stepan Vladovskiy
82870a4e47 debug: prechase wrapped for time out
All checks were successful
Deploy on push / deploy (push) Successful in 1m15s
2025-05-20 11:26:30 -03:00
Stepan Vladovskiy
80b909d801 debug: with logs in prechashing process
All checks were successful
Deploy on push / deploy (push) Successful in 44s
2025-05-20 11:23:00 -03:00
Stepan Vladovskiy
1ada0a02f9 debug: with timeout for prechashing 2025-05-20 11:19:58 -03:00
Stepan Vladovskiy
44aef147b5 debug: moved precache to background to avoid stucking ...
All checks were successful
Deploy on push / deploy (push) Successful in 45s
2025-05-20 11:03:02 -03:00
1d64811880 userlist-demo-ready
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2025-05-20 00:00:24 +03:00
Stepan Vladovskiy
2bebfbd4df debug: force rebuild core stag branch
All checks were successful
Deploy on push / deploy (push) Successful in 44s
2025-05-19 15:45:13 -03:00
dc5ad46df9 wip 2025-05-19 11:25:41 +03:00
Stepan Vladovskiy
f19248184a debug: without ersions for starlette and ariadne
All checks were successful
Deploy on push / deploy (push) Successful in 1m5s
2025-05-18 22:48:34 +00:00
Stepan Vladovskiy
7df9361daa debug: Dockerfile with build-essential
Some checks failed
Deploy on push / deploy (push) Failing after 1m36s
2025-05-18 22:43:20 +00:00
Stepan Vladovskiy
e38a1c1338 with vers for starlette, ariadne, granian
Some checks failed
Deploy on push / deploy (push) Failing after 50s
2025-05-18 22:36:47 +00:00
11e46f7352 adminpanel login fix 2025-05-16 10:30:02 +03:00
2d382be794 upgrade schema, resolvers, panel added 2025-05-16 09:23:48 +03:00
7bbb847eb1 tests, maintainance fixes
All checks were successful
Deploy on push / deploy (push) Successful in 1m36s
2025-05-16 09:22:53 +03:00
8a60bec73a tests upgrade 2025-05-16 09:11:39 +03:00
Stepan Vladovskiy
1281157d93 feat: check before parse graphQL
All checks were successful
Deploy on push / deploy (push) Successful in 44s
2025-05-14 14:42:40 -03:00
Stepan Vladovskiy
0018749905 Merge branch 'dev' into staging
All checks were successful
Deploy on push / deploy (push) Successful in 1m28s
2025-05-14 14:33:52 -03:00
a6b3b21894 draft-topics-fix2
All checks were successful
Deploy on push / deploy (push) Successful in 45s
2025-05-07 10:37:18 +03:00
51de649686 draft-topics-fix
All checks were successful
Deploy on push / deploy (push) Successful in 47s
2025-05-07 10:22:30 +03:00
2b7d5a25b5 unpublish-fix7
All checks were successful
Deploy on push / deploy (push) Successful in 46s
2025-05-03 11:52:14 +03:00
32cb810f51 unpublish-fix7 2025-05-03 11:52:10 +03:00
d2a8c23076 unpublish-fix5
All checks were successful
Deploy on push / deploy (push) Successful in 45s
2025-05-03 11:47:35 +03:00
96afda77a6 unpublish-fix5
All checks were successful
Deploy on push / deploy (push) Successful in 47s
2025-05-03 11:35:03 +03:00
785548d055 cache-revalidation-fix
All checks were successful
Deploy on push / deploy (push) Successful in 47s
2025-05-03 11:11:14 +03:00
d6202561a9 unpublish-fix4
All checks were successful
Deploy on push / deploy (push) Successful in 47s
2025-05-03 11:07:03 +03:00
3fbd2e677a unpublish-fix3
All checks were successful
Deploy on push / deploy (push) Successful in 47s
2025-05-03 11:00:19 +03:00
4f1eab513a unpublish-fix2
All checks were successful
Deploy on push / deploy (push) Successful in 46s
2025-05-03 10:57:55 +03:00
44852a1553 delete-draft-fix2 2025-05-03 10:56:34 +03:00
58ec60262b unpublish,delete-draft-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m23s
2025-05-03 10:53:40 +03:00
Stepan Vladovskiy
c344fcee2d refactoring(search.py): logs for search-combine and search-authors are equal
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2025-05-02 18:28:06 -03:00
Stepan Vladovskiy
a1a61a6731 feat: follow same logic as search shouts for authors. Store them to Reddis cache + pagination
All checks were successful
Deploy on push / deploy (push) Successful in 41s
2025-05-02 18:17:05 -03:00
Stepan Vladovskiy
8d6ad2c84f refactor(author.py): remove verbose loging in resolver level 2025-05-02 18:04:10 -03:00
Stepan Vladovskiy
beba1992e9 fix(__init.py__): clean name of resolver for authors search loading
All checks were successful
Deploy on push / deploy (push) Successful in 39s
2025-04-29 19:49:47 -03:00
Stepan Vladovskiy
b0296d7747 fix(__init.py__): added created resolver in resolvers lists
All checks were successful
Deploy on push / deploy (push) Successful in 41s
2025-04-29 19:40:20 -03:00
Stepan Vladovskiy
98e3dff35e fix(author.py): resolver load_authors_search error fix
All checks were successful
Deploy on push / deploy (push) Successful in 40s
2025-04-29 18:00:38 -03:00
Stepan Vladovskiy
3782a9dffb fix(search.py, author.py): small fixes for start. logger import fails
All checks were successful
Deploy on push / deploy (push) Successful in 40s
2025-04-29 17:50:51 -03:00
Stepan Vladovskiy
93c00b3dd1 feat(author.py):addresolver for searching authors by text
All checks were successful
Deploy on push / deploy (push) Successful in 1m15s
2025-04-29 17:45:37 -03:00
5f3d90fc90 draft-publication-debug
All checks were successful
Deploy on push / deploy (push) Successful in 48s
2025-04-28 16:24:08 +03:00
f71fc7fde9 draft-publication-info
All checks were successful
Deploy on push / deploy (push) Successful in 46s
2025-04-28 11:10:18 +03:00
ed71405082 topic-commented-stat-fix4
All checks were successful
Deploy on push / deploy (push) Successful in 46s
2025-04-28 10:30:58 +03:00
79e1f15a2e topic-commented-stat-fix3
All checks were successful
Deploy on push / deploy (push) Successful in 47s
2025-04-28 10:30:04 +03:00
b17acae0af topic-commented-stat-fix2
All checks were successful
Deploy on push / deploy (push) Successful in 46s
2025-04-28 10:24:48 +03:00
d293819ad9 topic-commented-stat-fix
All checks were successful
Deploy on push / deploy (push) Successful in 48s
2025-04-28 10:13:29 +03:00
bcbfdd76e9 html wrap fix
All checks were successful
Deploy on push / deploy (push) Successful in 48s
2025-04-27 12:53:49 +03:00
b735bf8cab draft-creator-adding
All checks were successful
Deploy on push / deploy (push) Successful in 47s
2025-04-27 09:15:07 +03:00
20fd40df0e updated-by-auto-fix
All checks were successful
Deploy on push / deploy (push) Successful in 47s
2025-04-26 23:46:07 +03:00
bde3211a5f updated-by-auto
All checks were successful
Deploy on push / deploy (push) Successful in 49s
2025-04-26 17:07:02 +03:00
4cd8883d72 validhtmlfix 2025-04-26 17:02:55 +03:00
0939e91700 empty-body-fix
All checks were successful
Deploy on push / deploy (push) Successful in 45s
2025-04-26 16:19:33 +03:00
dfbdfba2f0 draft-create-fix5
All checks were successful
Deploy on push / deploy (push) Successful in 46s
2025-04-26 16:13:07 +03:00
b66e347c91 draft-create-fix
All checks were successful
Deploy on push / deploy (push) Successful in 45s
2025-04-26 16:03:41 +03:00
6d9513f1b2 reaction-by-fix4
All checks were successful
Deploy on push / deploy (push) Successful in 45s
2025-04-26 15:57:51 +03:00
af7fbd2fc9 reaction-by-fix3
All checks were successful
Deploy on push / deploy (push) Successful in 47s
2025-04-26 15:50:20 +03:00
631ad47fe8 reaction-by-fix2
All checks were successful
Deploy on push / deploy (push) Successful in 46s
2025-04-26 15:47:44 +03:00
3b3cc1c1d8 reaction-by-ашч
All checks were successful
Deploy on push / deploy (push) Successful in 35s
2025-04-26 15:42:15 +03:00
e4943f524c reaction-by-upgrade2
All checks were successful
Deploy on push / deploy (push) Successful in 36s
2025-04-26 15:35:31 +03:00
e7684c9c05 reaction-by-upgrade
All checks were successful
Deploy on push / deploy (push) Successful in 36s
2025-04-26 14:10:05 +03:00
bdae2abe25 drafts schema restore + publish/unpublish fixes
All checks were successful
Deploy on push / deploy (push) Successful in 32s
2025-04-26 13:11:12 +03:00
a310d59432 draft-resolvers
All checks were successful
Deploy on push / deploy (push) Successful in 46s
2025-04-26 11:45:16 +03:00
6b2ac09f74 unpublish-fix
All checks were successful
Deploy on push / deploy (push) Successful in 46s
2025-04-26 10:16:55 +03:00
Stepan Vladovskiy
fac43e5997 refact(search,reader): withput any kind of sorting
All checks were successful
Deploy on push / deploy (push) Successful in 42s
2025-04-24 21:00:41 -03:00
Stepan Vladovskiy
e7facf8d87 style(search.py): with indexing message
All checks were successful
Deploy on push / deploy (push) Successful in 42s
2025-04-24 18:45:00 -03:00
Stepan Vladovskiy
3062a2b7de refactor(search.py): with checking titles without bodies for not re indexing them every startup
All checks were successful
Deploy on push / deploy (push) Successful in 42s
2025-04-24 14:58:14 -03:00
Stepan Vladovskiy
c0406dbbf2 refac(search.py): without logger and rm dublicated def search-text
All checks were successful
Deploy on push / deploy (push) Successful in 44s
2025-04-24 14:18:14 -03:00
Stepan Vladovskiy
ab4610575f refactor(reader.py): to handle search combined
All checks were successful
Deploy on push / deploy (push) Successful in 44s
2025-04-24 13:56:38 -03:00
Stepan Vladovskiy
5425dbf832 refactor(search.py): simplify def search 2025-04-24 13:46:58 -03:00
Stepan Vladovskiy
a10db2d38a feat(search.py): combined search on shouts tittles and bodys 2025-04-24 13:35:36 -03:00
5024e963e3 notify-draft-hotfix
All checks were successful
Deploy on push / deploy (push) Successful in 47s
2025-04-24 12:12:48 +03:00
Stepan Vladovskiy
83e70856cd debug(server.py): i dont know why, but it is appears and i am rm it
All checks were successful
Deploy on push / deploy (push) Successful in 41s
2025-04-23 18:32:58 -03:00
Stepan Vladovskiy
11654dba68 feat: with three separate endpoints
All checks were successful
Deploy on push / deploy (push) Successful in 5s
2025-04-23 18:24:00 -03:00
Stepan Vladovskiy
ec9465ad40 merge dev
All checks were successful
Deploy on push / deploy (push) Successful in 46s
2025-04-20 19:24:59 -03:00
Stepan Vladovskiy
4d965fb27b feat(search.py): separate indexing of Shout Title, shout Body and Authors
All checks were successful
Deploy on push / deploy (push) Successful in 39s
2025-04-20 19:22:08 -03:00
aaa6022a53 draft-create-fix4
All checks were successful
Deploy on push / deploy (push) Successful in 46s
2025-04-16 14:17:59 +03:00
d6ada44c7f draft-create-fix3
All checks were successful
Deploy on push / deploy (push) Successful in 48s
2025-04-16 11:51:19 +03:00
243f836f0a draft-create-fix2
All checks were successful
Deploy on push / deploy (push) Successful in 46s
2025-04-16 11:48:47 +03:00
536c094e72 draft-create-fix
All checks were successful
Deploy on push / deploy (push) Successful in 48s
2025-04-16 11:45:38 +03:00
Stepan Vladovskiy
e382cc1ea5 Merge branch 'dev' into feat/sv-searching-txtai
All checks were successful
Deploy on push / deploy (push) Successful in 6s
:
2025-04-15 19:20:48 -03:00
6920351b82 schema-fix
All checks were successful
Deploy on push / deploy (push) Successful in 52s
2025-04-15 20:30:12 +03:00
eb216a5f36 draft-seo-handling
All checks were successful
Deploy on push / deploy (push) Successful in 1m10s
2025-04-15 20:16:01 +03:00
bd129efde6 update-seo-handling 2025-04-15 20:14:42 +03:00
b9f6033e66 generate seo text when draft created 2025-04-15 20:09:22 +03:00
710f522c8f schema-upgrade
All checks were successful
Deploy on push / deploy (push) Successful in 47s
2025-04-14 19:53:14 +03:00
0de4404cb1 draft-community
All checks were successful
Deploy on push / deploy (push) Successful in 47s
2025-04-14 16:02:19 +03:00
to
83d61ca76d Merge branch 'dev' into feat/sv-searching-txtai
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2025-04-13 05:36:18 +00:00
1c61e889d6 update-draft-fix
All checks were successful
Deploy on push / deploy (push) Successful in 47s
2025-04-10 22:51:07 +03:00
fdedb75a2c topics-comments-stat
All checks were successful
Deploy on push / deploy (push) Successful in 45s
2025-04-10 19:14:27 +03:00
f20000f1f6 topic.stat.authors-fix2
All checks were successful
Deploy on push / deploy (push) Successful in 45s
2025-04-10 18:46:09 +03:00
7d50638b3a topic.stat.authors-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m20s
2025-04-10 18:39:31 +03:00
Stepan Vladovskiy
106222b0e0 debug: without debug logging. clean
All checks were successful
Deploy on push / deploy (push) Successful in 1m27s
2025-04-07 11:41:48 -03:00
Stepan Vladovskiy
c533241d1e fix(reader): sorting by rang not by id in cash
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2025-04-03 13:51:13 -03:00
Stepan Vladovskiy
78326047bf fix(reader.py): change sorting and answer on querys
All checks were successful
Deploy on push / deploy (push) Successful in 50s
2025-04-03 13:20:18 -03:00
Stepan Vladovskiy
bc4ec79240 fix(search.py): store all results in cash not only first offset
All checks were successful
Deploy on push / deploy (push) Successful in 52s
2025-04-03 13:10:53 -03:00
Stepan Vladovskiy
a0db5707c4 feat: add cash for storing searchresalts and hold them for working pagination. Now we are have offset for use on frontend
All checks were successful
Deploy on push / deploy (push) Successful in 51s
2025-04-01 16:01:09 -03:00
Stepan Vladovskiy
ecc443c3ad refactor(reader.py): Remove the unnecessary topic joins that cause duplicate results
All checks were successful
Deploy on push / deploy (push) Successful in 51s
2025-04-01 12:57:46 -03:00
Stepan Vladovskiy
9a02ca74ad merged with dev
All checks were successful
Deploy on push / deploy (push) Successful in 1m24s
2025-03-31 13:38:32 -03:00
Stepan Vladovskiy
9ebb81cbd3 refactor(reader.py): rm debug line 2025-03-31 13:32:51 -03:00
abbc074474 updateby-fix
All checks were successful
Deploy on push / deploy (push) Successful in 54s
2025-03-31 14:39:02 +03:00
Stepan Vladovskiy
0bc55977ac debug(reader.py): query_with_stat(info) always
All checks were successful
Deploy on push / deploy (push) Successful in 51s
2025-03-27 15:18:08 -03:00
Stepan Vladovskiy
ff3a4debce debug(reader.py): trying to handle main topic ids founded
All checks were successful
Deploy on push / deploy (push) Successful in 54s
2025-03-27 14:43:17 -03:00
Stepan Vladovskiy
ae85b32f69 feat(type.qraphql): SearchResult with shout id
All checks were successful
Deploy on push / deploy (push) Successful in 51s
2025-03-27 14:06:52 -03:00
Stepan Vladovskiy
34a354e9e3 debug(reader.py: trying back shout id in query call
All checks were successful
Deploy on push / deploy (push) Successful in 52s
2025-03-27 11:54:56 -03:00
4f599e097f [0.4.17] - 2025-03-26
All checks were successful
Deploy on push / deploy (push) Successful in 54s
- Fixed `'Reaction' object is not subscriptable` error in hierarchical comments:
  - Modified `get_reactions_with_stat()` to convert Reaction objects to dictionaries
  - Added default values for limit/offset parameters
  - Fixed `load_first_replies()` implementation with proper parameter passing
  - Added doctest with example usage
  - Limited child comments to 100 per parent for performance
2025-03-26 08:54:10 +03:00
a5eaf4bb65 commented->comments_count
All checks were successful
Deploy on push / deploy (push) Successful in 55s
2025-03-26 08:25:18 +03:00
Stepan Vladovskiy
e405fb527b refactor(search.py): moved to use one table docs for embdings and docs store
All checks were successful
Deploy on push / deploy (push) Successful in 50s
2025-03-25 16:42:44 -03:00
Stepan Vladovskiy
7f36f93d92 feat(search.py): detects both missing documents and null embeddings
All checks were successful
Deploy on push / deploy (push) Successful in 1m32s
2025-03-25 15:18:29 -03:00
Stepan Vladovskiy
f089a32394 debug(search.py): with more logs when check sync of indexing
All checks were successful
Deploy on push / deploy (push) Successful in 1m3s
2025-03-25 14:44:05 -03:00
Stepan Vladovskiy
1fd623a660 feat: with index sync endpoints configs
All checks were successful
Deploy on push / deploy (push) Successful in 56s
2025-03-25 13:31:45 -03:00
Stepan Vladovskiy
88012f1b8c debug(server.py): with 4 workers (threds). cheking reindexing
All checks were successful
Deploy on push / deploy (push) Successful in 55s
2025-03-25 12:21:59 -03:00
Stepan Vladovskiy
6e284640c0 feat: give little timeout for resource stab
All checks were successful
Deploy on push / deploy (push) Successful in 51s
2025-03-24 21:42:51 -03:00
Stepan Vladovskiy
077cb46482 debug: server.py -> threds 1 , search.py -> add 3 times reconect
All checks were successful
Deploy on push / deploy (push) Successful in 49s
2025-03-24 20:16:07 -03:00
Stepan Vladovskiy
60a13a9097 refactor(search.py): moved initialization logic in search-txtai instance
All checks were successful
Deploy on push / deploy (push) Successful in 55s
2025-03-24 19:47:02 -03:00
3c56fdfaea get_topics_paginated-fix
All checks were successful
Deploy on push / deploy (push) Successful in 56s
2025-03-22 18:49:15 +03:00
81a8bf3c58 query-type-fix
All checks were successful
Deploy on push / deploy (push) Successful in 49s
2025-03-22 18:44:31 +03:00
fe9984e2d8 type-fix
All checks were successful
Deploy on push / deploy (push) Successful in 43s
2025-03-22 18:39:14 +03:00
369ff757b0 [0.4.16] - 2025-03-22
All checks were successful
Deploy on push / deploy (push) Successful in 6s
- Added hierarchical comments pagination:
  - Created new GraphQL query `load_comments_branch` for efficient loading of hierarchical comments
  - Ability to load root comments with their first N replies
  - Added pagination for both root and child comments
  - Using existing `commented` field in `Stat` type to display number of replies
  - Added special `first_replies` field to store first replies to a comment
  - Optimized SQL queries for efficient loading of comment hierarchies
  - Implemented flexible comment sorting system (by time, rating)
2025-03-22 13:37:43 +03:00
615f1fe468 topics+authors-reimplemented-cache
All checks were successful
Deploy on push / deploy (push) Successful in 5s
2025-03-22 11:47:19 +03:00
86ddb50cb8 topics caching upgrade 2025-03-22 09:31:53 +03:00
Stepan Vladovskiy
316375bf18 debug(search.py): encrease batch size for bulk indexing
All checks were successful
Deploy on push / deploy (push) Successful in 1m1s
2025-03-21 17:56:54 -03:00
Stepan Vladovskiy
fb820f67fd debug(search.py): encrease batch size for bulk indexing
All checks were successful
Deploy on push / deploy (push) Successful in 53s
2025-03-21 17:48:26 -03:00
Stepan Vladovskiy
f1d9f4e036 feat(search.py): with db reset endpoint
All checks were successful
Deploy on push / deploy (push) Successful in 53s
2025-03-21 17:28:54 -03:00
Stepan Vladovskiy
ebb67eb311 debug: decrease chars in search.py for bulk indexing
All checks were successful
Deploy on push / deploy (push) Successful in 52s
2025-03-21 16:53:00 -03:00
Stepan Vladovskiy
50a8c24ead feat(search.py): documnet for bulk indexing are categorized
All checks were successful
Deploy on push / deploy (push) Successful in 55s
2025-03-21 15:40:29 -03:00
Stepan Vladovskiy
eb4b9363ab debug: change logs entris and indexing not wraps all in documents
All checks were successful
Deploy on push / deploy (push) Successful in 53s
2025-03-21 14:32:45 -03:00
Stepan Vladovskiy
19c5028a0c debug: Limit max chars for bulk indexing
All checks were successful
Deploy on push / deploy (push) Successful in 53s
2025-03-21 14:18:32 -03:00
Stepan Vladovskiy
57e1e8e6bd debug: more logs in indexing
All checks were successful
Deploy on push / deploy (push) Successful in 53s
2025-03-21 14:10:09 -03:00
Stepan Vladovskiy
385057ffcd debug: with logs in indexing procedure
All checks were successful
Deploy on push / deploy (push) Successful in 54s
2025-03-21 13:45:50 -03:00
Stepan Vladovskiy
90699768ff debug: start index
All checks were successful
Deploy on push / deploy (push) Successful in 55s
2025-03-21 13:30:23 -03:00
31c32143d0 reaction-to-feature-fix
All checks were successful
Deploy on push / deploy (push) Successful in 56s
2025-03-21 12:34:10 +03:00
b63c387806 jsonfix3
All checks were successful
Deploy on push / deploy (push) Successful in 56s
2025-03-20 12:52:44 +03:00
dbbfd42e08 redeploy
All checks were successful
Deploy on push / deploy (push) Successful in 57s
2025-03-20 12:35:55 +03:00
47e12b4452 fx2
Some checks failed
Deploy on push / deploy (push) Failing after 16s
2025-03-20 12:33:27 +03:00
e1a1b4dc7d fx
All checks were successful
Deploy on push / deploy (push) Successful in 44s
2025-03-20 12:25:18 +03:00
ca01181f37 jsonfix
All checks were successful
Deploy on push / deploy (push) Successful in 44s
2025-03-20 12:24:30 +03:00
0aff77eda6 portfix
All checks were successful
Deploy on push / deploy (push) Successful in 49s
2025-03-20 12:13:14 +03:00
8a95aa1209 jsonload-fix
All checks were successful
Deploy on push / deploy (push) Successful in 45s
2025-03-20 12:05:58 +03:00
a4a3c35f4d lesscode
All checks were successful
Deploy on push / deploy (push) Successful in 46s
2025-03-20 12:04:47 +03:00
edece36ecc jsonenc-fix
All checks were successful
Deploy on push / deploy (push) Successful in 12s
2025-03-20 11:59:43 +03:00
247fc98760 cachedep-fix+orjson+fmt
All checks were successful
Deploy on push / deploy (push) Successful in 1m16s
2025-03-20 11:55:21 +03:00
a1781b3800 depfix
All checks were successful
Deploy on push / deploy (push) Successful in 1m4s
2025-03-20 11:36:12 +03:00
450c73c060 nothreads
All checks were successful
Deploy on push / deploy (push) Successful in 47s
2025-03-20 11:30:36 +03:00
3a1924279f redeploy
All checks were successful
Deploy on push / deploy (push) Successful in 49s
2025-03-20 11:23:37 +03:00
094e7e6fe2 granian-fix
Some checks failed
Deploy on push / deploy (push) Failing after 7s
2025-03-20 11:19:29 +03:00
ae48a18536 comment-delete-handling-patch
All checks were successful
Deploy on push / deploy (push) Successful in 1m15s
2025-03-20 11:01:39 +03:00
Stepan Vladovskiy
ad0ca75aa9 debug: no redis for indexing in nackend side
All checks were successful
Deploy on push / deploy (push) Successful in 1m41s
2025-03-19 14:47:31 -03:00
354bda0efa drafts-fix
All checks were successful
Deploy on push / deploy (push) Successful in 59s
2025-03-13 22:21:43 +03:00
Stepan Vladovskiy
39242d5e6c debug: add logs in search.py and change and input validation ... index ver too
All checks were successful
Deploy on push / deploy (push) Successful in 55s
2025-03-12 14:13:55 -03:00
Stepan Vladovskiy
24cca7f2cb debug: something wrong one stap back with logs
All checks were successful
Deploy on push / deploy (push) Successful in 53s
2025-03-12 13:11:19 -03:00
Stepan Vladovskiy
a9c7ac49d6 feat: with logs >>>
All checks were successful
Deploy on push / deploy (push) Successful in 59s
2025-03-12 13:07:27 -03:00
Stepan Vladovskiy
f249752db5 feat: moved txtai and search procedure in different instance
All checks were successful
Deploy on push / deploy (push) Successful in 2m18s
2025-03-12 12:06:09 -03:00
856f4ffc85 i 2025-03-09 21:01:52 +03:00
Stepan Vladovskiy
c0b2116da2 feat(db.py): added fetch_all_shouts, to populate the search index
All checks were successful
Deploy on push / deploy (push) Successful in 35s
2025-03-05 20:32:34 +00:00
Stepan Vladovskiy
59e71c8144 debug: fixed workflows gitea
All checks were successful
Deploy on push / deploy (push) Successful in 4m41s
2025-03-05 20:17:34 +00:00
Stepan Vladovskiy
e6a416383d debug: fixed workflows gitea
All checks were successful
Deploy on push / deploy (push) Successful in 15s
2025-03-05 20:16:32 +00:00
Stepan Vladovskiy
d55448398d feat(search.py): change to txtai server, with ai model. And fix granian workers 2025-03-05 20:08:21 +00:00
20eba36c65 create-draft-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m47s
2025-02-27 16:16:41 +03:00
8cd0c8ea4c less-logs
All checks were successful
Deploy on push / deploy (push) Successful in 56s
2025-02-19 00:23:42 +03:00
2939cd8adc pyright-conf
All checks were successful
Deploy on push / deploy (push) Successful in 57s
2025-02-19 00:21:51 +03:00
41d8253094 lesslogs
All checks were successful
Deploy on push / deploy (push) Successful in 56s
2025-02-14 21:49:21 +03:00
5263d1657e 0.4.11-b
All checks were successful
Deploy on push / deploy (push) Successful in 56s
2025-02-12 22:34:57 +03:00
1de3d163c1 0.4.11-create_draft-fix
All checks were successful
Deploy on push / deploy (push) Successful in 57s
2025-02-12 21:59:05 +03:00
d3ed335fde main_topic-fix7-debug
All checks were successful
Deploy on push / deploy (push) Successful in 55s
2025-02-12 20:04:06 +03:00
f84be7b11b main_topic-fix7
All checks were successful
Deploy on push / deploy (push) Successful in 56s
2025-02-12 19:33:02 +03:00
b011c0fd48 main_topic-fix6
All checks were successful
Deploy on push / deploy (push) Successful in 54s
2025-02-12 19:21:21 +03:00
fe661a5008 main_topic-json-fix
All checks were successful
Deploy on push / deploy (push) Successful in 56s
2025-02-12 02:23:51 +03:00
e97823f99c main_topic-fix4
All checks were successful
Deploy on push / deploy (push) Successful in 56s
2025-02-12 00:55:55 +03:00
a9dd593ac8 main_topic-fix3
All checks were successful
Deploy on push / deploy (push) Successful in 56s
2025-02-12 00:47:39 +03:00
1585e55342 main_topic-fix2
All checks were successful
Deploy on push / deploy (push) Successful in 57s
2025-02-12 00:39:25 +03:00
52b608da99 main_topic-fix
All checks were successful
Deploy on push / deploy (push) Successful in 57s
2025-02-12 00:31:18 +03:00
5a4f75537d debug more
All checks were successful
Deploy on push / deploy (push) Successful in 58s
2025-02-11 23:47:54 +03:00
ce4a401c1a minor-debug
All checks were successful
Deploy on push / deploy (push) Successful in 57s
2025-02-11 23:44:29 +03:00
7814e3d64d 0.4.10
All checks were successful
Deploy on push / deploy (push) Successful in 57s
2025-02-11 12:40:55 +03:00
9191d83f84 usermoved
All checks were successful
Deploy on push / deploy (push) Successful in 56s
2025-02-11 12:24:02 +03:00
5d87035885 0.4.10-a
All checks were successful
Deploy on push / deploy (push) Successful in 44s
2025-02-11 12:00:35 +03:00
25b61c6b29 simple-dockerfile
All checks were successful
Deploy on push / deploy (push) Successful in 1m41s
2025-02-10 19:10:13 +03:00
9671ef2508 author-stat-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m22s
2025-02-10 18:38:26 +03:00
759520f024 initdb-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m13s
2025-02-10 18:15:54 +03:00
a84d8a0c7e 0.4.9-c
All checks were successful
Deploy on push / deploy (push) Successful in 7s
2025-02-10 18:04:08 +03:00
20173f7d1c trigdeploy
All checks were successful
Deploy on push / deploy (push) Successful in 55s
2025-02-10 11:30:58 +03:00
4a835bbfba 0.4.9-b
All checks were successful
Deploy on push / deploy (push) Successful in 2m38s
2025-02-09 22:26:50 +03:00
37a9a284ef 0.4.9-drafts 2025-02-09 17:18:01 +03:00
dce05342df fmt
All checks were successful
Deploy on push / deploy (push) Successful in 58s
2025-02-04 15:27:59 +03:00
56db33d7f1 get_my_rates_comments-fix
All checks were successful
Deploy on push / deploy (push) Successful in 55s
2025-02-04 02:53:01 +03:00
40b4703b1a get_cached_topic_followers-fix
All checks were successful
Deploy on push / deploy (push) Successful in 55s
2025-02-04 01:40:00 +03:00
747d550d80 fix-revalidation2
All checks were successful
Deploy on push / deploy (push) Successful in 58s
2025-02-04 00:08:25 +03:00
84de0c5538 fix-revalidation
All checks were successful
Deploy on push / deploy (push) Successful in 59s
2025-02-04 00:01:54 +03:00
33ddfc6c31 after-handlers-cache
All checks were successful
Deploy on push / deploy (push) Successful in 57s
2025-02-03 23:22:45 +03:00
26b862d601 more-revalidation
All checks were successful
Deploy on push / deploy (push) Successful in 1m32s
2025-02-03 23:16:50 +03:00
9fe5fea238 editor-fix
All checks were successful
Deploy on push / deploy (push) Successful in 58s
2025-02-03 19:06:00 +03:00
0347b6f5ff logs-update-shout-5
All checks were successful
Deploy on push / deploy (push) Successful in 59s
2025-02-02 21:57:51 +03:00
ffb75e53f7 logs-update-shout-4
All checks were successful
Deploy on push / deploy (push) Successful in 58s
2025-02-02 21:55:22 +03:00
582ba75643 logs-update-shout-3
All checks were successful
Deploy on push / deploy (push) Successful in 58s
2025-02-02 21:49:28 +03:00
2db1da3194 logs-update-shout-2
All checks were successful
Deploy on push / deploy (push) Successful in 58s
2025-02-02 21:45:24 +03:00
fd6b0ce5fd logs-update-shout
All checks were successful
Deploy on push / deploy (push) Successful in 58s
2025-02-02 21:41:03 +03:00
Stepan Vladovskiy
670a477f9a debug: ok, moved map on tgop layaer of nginx. now this version without map
All checks were successful
Deploy on push / deploy (push) Successful in 57s
2025-01-29 14:38:33 -03:00
Stepan Vladovskiy
46945197d9 debug: with hardcoded domain testing.dscrs.site and in default non for understanding
All checks were successful
Deploy on push / deploy (push) Successful in 1m0s
2025-01-29 13:59:47 -03:00
Stepan Vladovskiy
4ebc64d13a fix: so, the problem can be somewhere else, becasue map is working fine. And we are trying to find where it is ovveriting issue. Modified main.py with some extra rules. Maybe it is helps
All checks were successful
Deploy on push / deploy (push) Successful in 55s
2025-01-28 20:03:56 -03:00
Stepan Vladovskiy
bc9560e56e feat: safe version. debug is not give results. this is simple version. In case of code beuty can be rewrite with previos nodebug version
All checks were successful
Deploy on push / deploy (push) Successful in 55s
2025-01-28 19:38:19 -03:00
Stepan Vladovskiy
38f5aab9e0 debug: not safe version. back to safe map function
All checks were successful
Deploy on push / deploy (push) Successful in 55s
2025-01-28 19:27:24 -03:00
Stepan Vladovskiy
95f49a7ca5 debug: rewrite nginx file to use it without variables logic
All checks were successful
Deploy on push / deploy (push) Successful in 57s
2025-01-28 19:23:02 -03:00
Stepan Vladovskiy
cd8f5977af debug: sv just in case for testing maping issue and trying to find place with filter maybe, in this option if dscrs.site origin then allow is discours.io
All checks were successful
Deploy on push / deploy (push) Successful in 57s
2025-01-28 18:57:27 -03:00
Stepan Vladovskiy
a218d1309b debug: no force optins and simpl regexp logic
All checks were successful
Deploy on push / deploy (push) Successful in 59s
2025-01-28 18:24:10 -03:00
Stepan Vladovskiy
113d4807b2 feat:sv with force flag
All checks were successful
Deploy on push / deploy (push) Successful in 1m2s
2025-01-28 17:55:41 -03:00
Stepan Vladovskiy
9bc3cdbd0b debug: sv clean testing in cors polici maping because it is redundant and add Allow origin heade custom log
Some checks failed
Deploy on push / deploy (push) Failing after 7s
2025-01-28 17:48:59 -03:00
Stepan Vladovskiy
79e6402df3 debug: added for dscrs.site separate rule in nginx config map part 2025-01-28 17:48:59 -03:00
Stepan Vladovskiy
ec2e9444e3 debug: nginx conf sigil with custom logs with headers and backslash in dscrs.site 2025-01-28 17:48:59 -03:00
Stepan Vladovskiy
a86a2fee85 debug: nginx conf sigil file withou custom log and add for domain dscrs.site determinating backsplash 2025-01-28 17:48:59 -03:00
Stepan Vladovskiy
aec67b9db8 debug: layer with logs added for debug allow_orrigin missing for dscrs.site domain fix back slash 2025-01-28 17:48:59 -03:00
Stepan Vladovskiy
0bbe1d428a debug: layer with logs added for debug allow_orrigin missing for dscrs.site domain 2025-01-28 17:48:59 -03:00
Stepan Vladovskiy
a05f0afa8b debug: layer with logs added for debug allow_orrigin missing for dscrs.site domain 2025-01-28 17:48:59 -03:00
5e2842774a media-field-workarounds
Some checks failed
Deploy on push / deploy (push) Failing after 8s
2025-01-28 15:38:10 +03:00
e17690f27b nostat
Some checks failed
Deploy on push / deploy (push) Failing after 7s
2025-01-26 18:16:33 +03:00
cb990b61a3 gqldata 2025-01-26 18:01:04 +03:00
cc837288bb simpler-reader-field 2025-01-26 17:59:08 +03:00
4a26e4f75b fmt 2025-01-26 17:53:16 +03:00
eee2c1a13d fieldresolver-fix 2025-01-26 17:52:45 +03:00
209d5c1a5e shout-media-field-resolver 2025-01-25 15:31:23 +03:00
4f4affaca4 cache-invalidate-fix-3 2025-01-25 15:19:19 +03:00
d59710309d cache-invalidate-fix-2 2025-01-25 11:57:10 +03:00
88525276c2 cache-invalidate-fix 2025-01-25 11:23:20 +03:00
1f4b3d3eee create-shout-fix6 2025-01-22 00:43:59 +03:00
76a4c5fb53 create-shout-fix5 2025-01-21 21:54:23 +03:00
8f6b96cb0f create-shout-fix4 2025-01-21 20:53:27 +03:00
76a707c7fd create-shout-fix3 2025-01-21 20:39:54 +03:00
ae584abb5b create-shout-fix2 2025-01-21 19:58:20 +03:00
eff8278cc3 create-shout-fix 2025-01-21 19:33:28 +03:00
8432a00691 create-shout-fix2 2025-01-21 18:28:03 +03:00
1ed185a701 create-shout-fix 2025-01-21 18:19:25 +03:00
562ce3296e published_at-revert2 2025-01-21 17:52:04 +03:00
ddc2d69e54 published_at-revert 2025-01-21 17:50:02 +03:00
f6863b32e8 published_at-fix5 2025-01-21 17:44:29 +03:00
9bf9f3d384 published_at-fix4 2025-01-21 16:40:52 +03:00
998d01c751 published_at-fix3 2025-01-21 15:57:22 +03:00
57d04ddf1c published_at-fix2 2025-01-21 13:34:20 +03:00
0ba2d2ecee published_at-fix 2025-01-21 13:11:15 +03:00
839cc84c26 stat-syntetic 2025-01-21 10:21:38 +03:00
c80c282118 prepare-topics-authors-dicts 2025-01-21 10:09:49 +03:00
5acae03c55 fmt 2025-01-21 10:09:28 +03:00
49be05d4db shout-create-fix 2025-01-18 10:57:34 +03:00
ae7580252b invcache-fix6 2025-01-16 06:49:15 +03:00
7c85f51436 invcache-fix5 2025-01-16 06:42:12 +03:00
83ec475cc8 invcache-fix4 2025-01-16 06:01:47 +03:00
c1c095a73c invcache-fix3 2025-01-16 06:00:15 +03:00
c4e84364c6 invcache-fix 2025-01-16 05:53:37 +03:00
8287b82554 invalidate-cache-fix 2025-01-16 05:46:31 +03:00
56fe8bebbe invalidate-cache-fix 2025-01-16 05:45:53 +03:00
4fffd1025f debug-update-shout-2 2025-01-16 05:42:53 +03:00
576e1ea152 debug-update-shout 2025-01-16 05:34:43 +03:00
5e1021a18e corsfix-34 2024-12-24 14:22:49 +03:00
dcbdd01f53 cors-fix-33
All checks were successful
Deploy on push / deploy (push) Successful in 57s
2024-12-24 14:04:52 +03:00
608bf8f33a tokencheckfix
All checks were successful
Deploy on push / deploy (push) Successful in 55s
2024-12-22 11:33:57 +03:00
48994d8bfd claims
All checks were successful
Deploy on push / deploy (push) Successful in 55s
2024-12-22 00:34:35 +03:00
4ffcbf36d3 nobearer
All checks were successful
Deploy on push / deploy (push) Successful in 55s
2024-12-22 00:30:04 +03:00
e539e0334f Merge branch 'dev' of https://dev.discours.io/discours.io/core into dev
All checks were successful
Deploy on push / deploy (push) Successful in 55s
2024-12-22 00:24:29 +03:00
1898b3ef3f auth-debug 2024-12-22 00:22:26 +03:00
Stepan Vladovskiy
1100a1b66f debug: add dscrs.site map in cors
All checks were successful
Deploy on push / deploy (push) Successful in 57s
2024-12-20 14:47:40 -03:00
Stepan Vladovskiy
04a0a6ddf4 debug: Sigil back to map with only discours.io domain
All checks were successful
Deploy on push / deploy (push) Successful in 56s
2024-12-20 14:35:59 -03:00
bfbb307d6b corsfix8
All checks were successful
Deploy on push / deploy (push) Successful in 5s
2024-12-17 20:26:17 +03:00
1c573f9a12 corsfix7 2024-12-17 20:17:19 +03:00
6b1533402a corsfix6 2024-12-17 20:14:01 +03:00
fdf5f795da corsfix5 2024-12-17 20:09:39 +03:00
daf5336410 corsfix4 2024-12-17 20:06:15 +03:00
0923dc61d6 corsfix3 2024-12-17 20:02:41 +03:00
4275131645 corslogs 2024-12-17 19:52:49 +03:00
c64d5971ee corsfix2 2024-12-17 19:51:00 +03:00
3968bc3910 sigilfix
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-12-17 19:46:47 +03:00
99b0748129 mapfix2 2024-12-17 00:31:02 +03:00
fcaac9cc41 mapfix 2024-12-17 00:27:07 +03:00
b5c6535ee8 wh5
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-12-16 20:14:11 +03:00
cf6150b155 wh4 2024-12-16 20:10:39 +03:00
5d1bfeaa9a headercase 2024-12-16 20:07:56 +03:00
c4ffc08bae webfk 2024-12-16 20:03:00 +03:00
f73f3608c0 webhook-fix2 2024-12-16 19:50:25 +03:00
5944d9542e webhook-fix 2024-12-16 19:44:24 +03:00
2aefcd2708 corsfix 2024-12-16 19:39:31 +03:00
3af4c1ac7a issuer-port-fix 2024-12-16 19:23:45 +03:00
aff0e8b1df webhookfix 2024-12-16 19:13:16 +03:00
e4a9bfa08b authdev2 2024-12-16 19:06:47 +03:00
a41a5ad39a authdev 2024-12-16 18:57:10 +03:00
434d59a7ba nginx-fix10 2024-12-16 14:05:26 +03:00
407de622ec allow-origin-fix 2024-12-16 14:01:05 +03:00
be03e7b931 viewed-storage-update
All checks were successful
Deploy on push / deploy (push) Successful in 5s
2024-12-12 02:03:19 +03:00
d02ae5bd3f fmt+debug
All checks were successful
Deploy on push / deploy (push) Successful in 5s
2024-12-12 01:04:11 +03:00
87506b0478 check-inner-logix
All checks were successful
Deploy on push / deploy (push) Successful in 5s
2024-12-12 00:32:27 +03:00
3a819007c1 morelogs-update
All checks were successful
Deploy on push / deploy (push) Successful in 5s
2024-12-12 00:29:04 +03:00
961ba9c616 warnbetter
All checks were successful
Deploy on push / deploy (push) Successful in 5s
2024-12-12 00:21:51 +03:00
7b58c7537e warn-not-found
All checks were successful
Deploy on push / deploy (push) Successful in 4s
2024-12-12 00:20:43 +03:00
a1486b3bba comments-rates-fix
All checks were successful
Deploy on push / deploy (push) Successful in 7s
2024-12-11 23:49:58 +03:00
f3c06e1969 mutation-fix-2 2024-12-11 23:06:55 +03:00
354d9c20a3 mutation-fix
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-12-11 23:04:45 +03:00
fbcee18db1 fmt
All checks were successful
Deploy on push / deploy (push) Successful in 7s
2024-12-11 23:02:14 +03:00
c5d21c3554 check-webhook
All checks were successful
Deploy on push / deploy (push) Successful in 7s
2024-12-11 23:01:03 +03:00
4410311b80 webhook-is-mutation
All checks were successful
Deploy on push / deploy (push) Successful in 7s
2024-12-11 22:54:37 +03:00
8f5ee384ff logsdebug
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-12-11 22:52:25 +03:00
bffc48e5d9 log-auth-graphql
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-12-11 22:49:08 +03:00
9cead2ab0e search-off
All checks were successful
Deploy on push / deploy (push) Successful in 7s
2024-12-11 22:31:41 +03:00
444c853f54 webhook-fix
All checks were successful
Deploy on push / deploy (push) Successful in 7s
2024-12-11 22:21:05 +03:00
7751b0d0f8 startup-fixes
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-12-11 22:10:48 +03:00
fe93439194 webhook-add
All checks were successful
Deploy on push / deploy (push) Successful in 7s
2024-12-11 22:07:36 +03:00
6762b18135 get-author-followers-fix2 2024-12-11 21:34:43 +03:00
9439d71249 get-author-followers-fix
All checks were successful
Deploy on push / deploy (push) Successful in 7s
2024-12-11 21:25:03 +03:00
b8f86e5d5e last-commented-fix
All checks were successful
Deploy on push / deploy (push) Successful in 8s
2024-12-04 18:25:51 +03:00
597fd6ad55 last_commented_at
All checks were successful
Deploy on push / deploy (push) Successful in 8s
2024-12-04 17:40:45 +03:00
a71a6fcc41 saerch-fail-toler
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-11-22 20:32:14 +03:00
9dde136c9c search-fail-tolerance
All checks were successful
Deploy on push / deploy (push) Successful in 5s
2024-11-22 20:23:45 +03:00
779cb9a87c following-error
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-11-22 20:19:56 +03:00
79f7c914d7 v0.4.7 2024-11-20 23:59:11 +03:00
a9d181db8f fixapi
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-11-18 23:23:20 +03:00
283ad80632 fasternomyreate
All checks were successful
Deploy on push / deploy (push) Successful in 7s
2024-11-18 22:24:54 +03:00
e9f9582110 sqlsynt2 2024-11-18 22:21:15 +03:00
3a5449df79 sqlsynt
All checks were successful
Deploy on push / deploy (push) Successful in 5s
2024-11-18 22:19:06 +03:00
cf88c165ee nomyratestat2
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-11-18 22:16:42 +03:00
2fec47d363 nomyratestat
All checks were successful
Deploy on push / deploy (push) Successful in 5s
2024-11-18 22:13:49 +03:00
6966d900fa myrates-api-minor-fix3 2024-11-18 22:10:25 +03:00
773615e201 myrates-api-minor-fix2 2024-11-18 22:05:45 +03:00
080ba76684 myrates-api-minor-fix
All checks were successful
Deploy on push / deploy (push) Successful in 5s
2024-11-18 22:03:11 +03:00
25f929026f commend-id-fix
All checks were successful
Deploy on push / deploy (push) Successful in 5s
2024-11-18 13:14:32 +03:00
47a8493824 no-my-rate
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-11-18 11:31:19 +03:00
821a4c0df1 info-context-debug
All checks were successful
Deploy on push / deploy (push) Successful in 5s
2024-11-14 14:11:51 +03:00
1a371b191a ..
All checks were successful
Deploy on push / deploy (push) Successful in 5s
2024-11-14 14:00:33 +03:00
471781f942 debug-stat-wip
All checks were successful
Deploy on push / deploy (push) Successful in 5s
2024-11-14 13:42:40 +03:00
b4eff32427 authorized-context-debug
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-11-14 13:33:09 +03:00
2d0ca1c7bf myrate-fix+log
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-11-14 13:25:33 +03:00
88812da592 myrate-fix
All checks were successful
Deploy on push / deploy (push) Successful in 5s
2024-11-14 13:21:32 +03:00
bffa4aa1ef unrated-fix5
All checks were successful
Deploy on push / deploy (push) Successful in 5s
2024-11-14 01:36:15 +03:00
4adf3d5a1e unrated-fix3 2024-11-14 01:32:00 +03:00
4b111951b7 unrated-fix
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-11-14 00:55:25 +03:00
b91e4ddfd1 unrated-fix2 2024-11-14 00:29:15 +03:00
cd90e7a2d0 unrated-fix
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-11-14 00:26:12 +03:00
af2d8caebe toler-none2
All checks were successful
Deploy on push / deploy (push) Successful in 5s
2024-11-12 18:52:48 +03:00
f32b6a6a27 toler-none
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-11-12 18:49:44 +03:00
8116160b4d my_rate-stat
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-11-12 17:56:20 +03:00
34511a8edf join-maintopic-unrated
All checks were successful
Deploy on push / deploy (push) Successful in 5s
2024-11-03 11:32:05 +03:00
08fb1d3510 create-reaction-shout
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-11-02 22:38:40 +03:00
6d61e038e7 create-reaction-fix-4
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-11-02 22:34:20 +03:00
bcb602d3cf create-reaction-fix3 2024-11-02 19:48:43 +03:00
f4a8a653d0 create-reaction-fix
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-11-02 19:16:52 +03:00
2c981bc972 create-reaction-fkx2
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-11-02 13:52:03 +03:00
b322219173 create-reaction-fkx
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-11-02 13:49:22 +03:00
52567557e8 debug-create-reaction
All checks were successful
Deploy on push / deploy (push) Successful in 5s
2024-11-02 13:44:00 +03:00
3f1ef8dfd8 proposals-fix
All checks were successful
Deploy on push / deploy (push) Successful in 5s
2024-11-02 13:35:30 +03:00
1b43f742d3 tolerate-double-follow
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-11-02 12:33:52 +03:00
5f3f00366f tolerate-double-follow 2024-11-02 12:33:35 +03:00
a61bb6da20 unfollow-fix
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-11-02 12:12:19 +03:00
11611fd577 following-fixes+fmt
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-11-02 12:09:24 +03:00
09a6d085fd revalidation-fix
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-11-02 11:56:47 +03:00
d4548f71c7 lesslogs
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-11-02 11:49:30 +03:00
9b67f1aa21 notify-follower-fix
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-11-02 11:42:24 +03:00
2e91f9399a revalidation-follower-fix
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-11-02 11:40:02 +03:00
0eb95e238b following-debug
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-11-02 11:35:02 +03:00
65bd2ef9cf author-created-at-fix
All checks were successful
Deploy on push / deploy (push) Successful in 5s
2024-11-02 06:27:31 +03:00
9a6c995589 lgos
All checks were successful
Deploy on push / deploy (push) Successful in 5s
2024-11-02 04:44:07 +03:00
8965395377 viewed-fix
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-11-02 04:28:16 +03:00
38d39dd618 debug-create-reaction
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-11-02 04:24:41 +03:00
0c009495a3 async-revised
All checks were successful
Deploy on push / deploy (push) Successful in 5s
2024-11-02 00:26:57 +03:00
54c59d26b9 media-item-type
All checks were successful
Deploy on push / deploy (push) Successful in 5s
2024-11-01 22:57:20 +03:00
92e49c8ad9 group-by-shout
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-11-01 22:23:23 +03:00
493e6cf92c psql8
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-11-01 22:17:56 +03:00
1dcc0cf8c5 psql7
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-11-01 22:11:42 +03:00
d3daf2800e psql6
All checks were successful
Deploy on push / deploy (push) Successful in 5s
2024-11-01 22:04:39 +03:00
d0b5c2d3f9 psql5
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-11-01 22:01:41 +03:00
0930e80b9b psql2 2024-11-01 21:52:25 +03:00
044d28cfe9 psql
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-11-01 21:49:31 +03:00
4b4234314d fields-group 2024-11-01 21:45:51 +03:00
baa8d56799 .
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-11-01 21:42:20 +03:00
d40728aec9 nodist4 2024-11-01 21:39:05 +03:00
c78347b6f9 nodist2 2024-11-01 21:35:33 +03:00
021765340a nodist
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-11-01 21:30:52 +03:00
567507c412 groupby-fix
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-11-01 21:25:25 +03:00
8bf0566d72 row.stat-fix
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-11-01 21:17:51 +03:00
0874794140 stat-dict
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-11-01 21:09:53 +03:00
154477e1ad logfix
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-11-01 21:04:30 +03:00
f495953f6a media-item-type
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-11-01 21:03:09 +03:00
fba0f34020 nodistinct
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-11-01 20:28:59 +03:00
4752ef19b2 order-by-fix
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-11-01 20:27:25 +03:00
3e50902f07 json-distinct-fix
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-11-01 20:24:09 +03:00
a0f29eb5b8 json-builder-compat
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-11-01 20:11:58 +03:00
fcbbe4fcac fixed-shouts-load
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-11-01 20:02:46 +03:00
4ef5d172a0 results-fix
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-11-01 17:26:45 +03:00
31bd421e22 merged-hub
All checks were successful
Deploy on push / deploy (push) Successful in 5s
2024-11-01 15:06:21 +03:00
dd60d1a1c4 deployfix
All checks were successful
Deploy on push / deploy (push) Successful in 5s
2024-11-01 14:33:34 +03:00
1892ea666a apply-options-moved
All checks were successful
Deploy on push / deploy (push) Successful in 1m26s
2024-11-01 14:29:58 +03:00
3a5297015f rating-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m15s
2024-11-01 14:09:22 +03:00
8ad00f0fa5 case-whens-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m21s
2024-11-01 14:07:10 +03:00
3247a3674f feed-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m22s
2024-11-01 14:00:19 +03:00
d88f905609 reworked-feed+reader
All checks were successful
Deploy on push / deploy (push) Successful in 1m16s
2024-11-01 13:50:47 +03:00
a01a3f1d7a reader-fix2
All checks were successful
Deploy on push / deploy (push) Successful in 1m18s
2024-11-01 12:27:13 +03:00
75e7079087 reader-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m15s
2024-11-01 12:06:23 +03:00
7f58bf48fe row-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m16s
2024-11-01 11:57:49 +03:00
f7c41532a5 feed-fixes
All checks were successful
Deploy on push / deploy (push) Successful in 1m16s
2024-11-01 11:29:41 +03:00
a105372b15 norandomtopic-onserver-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m17s
2024-11-01 11:09:16 +03:00
54e26fb863 main-topic
Some checks failed
Deploy on push / deploy (push) Failing after 10s
2024-11-01 10:29:18 +03:00
600d52414e txt
Some checks failed
Deploy on push / deploy (push) Failing after 10s
2024-11-01 10:04:32 +03:00
5a9a02d3a4 0.4.6 2024-11-01 09:50:19 +03:00
bcac627345 main-x-fix
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-11-01 07:51:33 +03:00
5dd47b3cd4 maintopic-nullable
Some checks failed
Deploy on push / deploy (push) Failing after 10s
2024-11-01 07:43:10 +03:00
c9328041ce main_-fix
Some checks failed
Deploy on push / deploy (push) Failing after 10s
2024-10-31 21:58:24 +03:00
ddd18f8d70 media-type
Some checks failed
Deploy on push / deploy (push) Failing after 11s
2024-10-31 21:45:55 +03:00
1ccc5fb9e7 more-agile-query-shout-api
Some checks failed
Deploy on push / deploy (push) Failing after 16s
2024-10-31 21:11:54 +03:00
fc930a539b 5random
Some checks failed
Deploy on push / deploy (push) Failing after 10s
2024-10-31 20:42:09 +03:00
e7b4e59b65 authors_and_topics-fix
Some checks failed
Deploy on push / deploy (push) Failing after 10s
2024-10-31 20:34:25 +03:00
e2b6ae5e81 agile-query
Some checks failed
Deploy on push / deploy (push) Failing after 10s
2024-10-31 20:28:52 +03:00
827300366d unrated-fix
Some checks failed
Deploy on push / deploy (push) Failing after 10s
2024-10-31 19:57:09 +03:00
8c05589168 optimized-query
Some checks failed
Deploy on push / deploy (push) Failing after 10s
2024-10-31 19:48:06 +03:00
f29eb5f35a separate-subq
Some checks failed
Deploy on push / deploy (push) Failing after 10s
2024-10-31 19:11:41 +03:00
62370b94b3 reader-query-optimized
Some checks failed
Deploy on push / deploy (push) Failing after 10s
2024-10-31 19:06:58 +03:00
1114c7766d get-shout-fix
Some checks failed
Deploy on push / deploy (push) Failing after 10s
2024-10-31 18:37:00 +03:00
0c83b9c401 query-shouts-simpler 2024-10-31 18:28:09 +03:00
f437119711 unrated-sort-fix
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-10-31 17:47:07 +03:00
eaa23134de comments-fix
Some checks failed
Deploy on push / deploy (push) Failing after 10s
2024-10-31 17:27:07 +03:00
00fe5d91a7 dictify
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-10-31 15:41:18 +03:00
071022c63b _sa_instance..
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-10-31 15:37:32 +03:00
3ace2093b2 keep-json
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-10-31 15:32:13 +03:00
42e06bd2e6 jsongix
Some checks failed
Deploy on push / deploy (push) Failing after 10s
2024-10-31 15:28:41 +03:00
6dd6fd764a no-create-json
Some checks failed
Deploy on push / deploy (push) Failing after 10s
2024-10-31 15:25:22 +03:00
21888c6d00 ismain-field
Some checks failed
Deploy on push / deploy (push) Failing after 10s
2024-10-31 15:18:27 +03:00
2bc0ac1cff maintopicslug
Some checks failed
Deploy on push / deploy (push) Failing after 11s
2024-10-31 15:14:58 +03:00
bf3fd4b39a captionfix-2
Some checks failed
Deploy on push / deploy (push) Failing after 10s
2024-10-31 15:10:26 +03:00
7eed615991 author-captions-fix
Some checks failed
Deploy on push / deploy (push) Failing after 10s
2024-10-31 15:05:22 +03:00
6e56eba0c2 oneval-subq
Some checks failed
Deploy on push / deploy (push) Failing after 10s
2024-10-31 14:50:45 +03:00
5f2f4262a5 scalar
Some checks failed
Deploy on push / deploy (push) Failing after 10s
2024-10-31 14:48:15 +03:00
882ef0288a whensfix
Some checks failed
Deploy on push / deploy (push) Failing after 10s
2024-10-31 14:29:47 +03:00
9416165699 minorfix-3
Some checks failed
Deploy on push / deploy (push) Failing after 10s
2024-10-31 14:27:13 +03:00
c72588800f minorfi4
Some checks failed
Deploy on push / deploy (push) Failing after 10s
2024-10-31 14:20:22 +03:00
1c6678d55d minorfix
Some checks failed
Deploy on push / deploy (push) Failing after 10s
2024-10-31 14:14:54 +03:00
91e4e751d8 readerfix2 2024-10-31 14:11:59 +03:00
bc4432c057 readerfix
Some checks failed
Deploy on push / deploy (push) Failing after 10s
2024-10-31 14:09:33 +03:00
38185273af get-shouts-with-stats-fix8 2024-10-31 14:04:28 +03:00
5fb7ba074c get-shouts-with-stats-fix7 2024-10-31 14:02:36 +03:00
d83be5247b get-shouts-with-stats-fix6 2024-10-31 14:00:56 +03:00
0f87ac6a00 get-shouts-with-stats-fix5 2024-10-31 13:59:18 +03:00
f61a2d07fe get-shouts-with-stats-fix4
Some checks failed
Deploy on push / deploy (push) Failing after 10s
2024-10-31 13:52:32 +03:00
d48577b191 get-shouts-with-stats-fix3 2024-10-31 13:46:33 +03:00
4aec829c74 get-shouts-with-stats-fix2 2024-10-31 13:42:46 +03:00
d8496bf094 get-shouts-with-stats-fix
Some checks failed
Deploy on push / deploy (push) Failing after 10s
2024-10-31 13:39:38 +03:00
55a0474602 reader-reactionalias-fix
Some checks failed
Deploy on push / deploy (push) Failing after 10s
2024-10-31 13:25:05 +03:00
751f3de4b1 jsonify2
Some checks failed
Deploy on push / deploy (push) Failing after 10s
2024-10-31 12:49:18 +03:00
5b211c349e create_shout-community-1-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m15s
2024-10-31 09:33:17 +03:00
a578e8160e unrated-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m14s
2024-10-24 16:27:16 +03:00
9ac533ee73 fmt 2024-10-24 00:01:09 +03:00
d9644f901e more-toler3
All checks were successful
Deploy on push / deploy (push) Successful in 1m12s
2024-10-24 00:00:04 +03:00
0a26f2986f more-toler2
Some checks failed
Deploy on push / deploy (push) Has been cancelled
2024-10-23 23:59:17 +03:00
7cf3f91dac more-toler
All checks were successful
Deploy on push / deploy (push) Successful in 1m13s
2024-10-23 23:57:52 +03:00
33bedbcd67 restoring-test
All checks were successful
Deploy on push / deploy (push) Successful in 1m15s
2024-10-23 11:29:44 +03:00
8de91a8232 hgetall-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m12s
2024-10-23 11:25:56 +03:00
23514ca5a4 get_shout-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m12s
2024-10-23 11:22:07 +03:00
79ab0d6a4c init-create-fix
All checks were successful
Deploy on push / deploy (push) Successful in 31s
2024-10-21 20:21:31 +03:00
1476d4262d trick-import
Some checks failed
Deploy on push / deploy (push) Failing after 10s
2024-10-21 20:19:52 +03:00
724f901bbd community-stat-fixes
All checks were successful
Deploy on push / deploy (push) Successful in 1m4s
2024-10-21 16:57:03 +03:00
a4e48eb3f4 commynity-cudl
All checks were successful
Deploy on push / deploy (push) Successful in 1m5s
2024-10-21 16:42:50 +03:00
c6f160c8cf update-api-3
All checks were successful
Deploy on push / deploy (push) Successful in 1m12s
2024-10-21 12:15:44 +03:00
62f2876ade queryfix
All checks were successful
Deploy on push / deploy (push) Successful in 1m12s
2024-10-21 11:53:00 +03:00
93b7c6bf4d rolesfix
All checks were successful
Deploy on push / deploy (push) Successful in 1m4s
2024-10-21 11:48:51 +03:00
635ff4285e communityfollower-roles
All checks were successful
Deploy on push / deploy (push) Successful in 1m3s
2024-10-21 11:29:57 +03:00
0cf963240e virtual-cols-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m6s
2024-10-21 11:08:16 +03:00
160f02e67f 0.4.5-api-update
All checks were successful
Deploy on push / deploy (push) Successful in 1m49s
2024-10-21 10:52:23 +03:00
045d2ddadf create-all-tables-fix2
All checks were successful
Deploy on push / deploy (push) Successful in 1m35s
2024-10-15 19:52:12 +03:00
63ebf3af2d create-all-tables-fix 2024-10-15 19:50:17 +03:00
bf33cdc95c fixed-coales
Some checks failed
Deploy on push / deploy (push) Failing after 10s
2024-10-15 11:12:09 +03:00
76aeddbde2 ignoreup 2024-10-15 10:07:44 +03:00
3b1c4475c6 readme-update 2024-10-14 19:06:30 +03:00
5966512a8f poetry-fix
Some checks failed
Deploy on push / deploy (push) Failing after 10s
2024-10-14 18:28:16 +03:00
8b65c87750 add-fakeredis
All checks were successful
Deploy on push / deploy (push) Successful in 1m19s
2024-10-14 13:08:43 +03:00
6f6b619c11 graphql-handler-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m12s
2024-10-14 12:31:55 +03:00
3188a67661 async+fmt-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m12s
2024-10-14 12:19:30 +03:00
4e7fb953ba try-to-fix-2 2024-10-14 12:13:18 +03:00
173c865a69 try-to-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m46s
2024-10-14 11:11:13 +03:00
d5ba8d1cde correlate-stat-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m12s
2024-10-14 10:47:38 +03:00
998db09c09 shout-query-substat-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m12s
2024-10-14 09:37:40 +03:00
78d575863d logfixes 2024-10-14 09:33:31 +03:00
503e859b5c query-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m33s
2024-10-14 09:23:11 +03:00
5dc61dc397 db-init-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m13s
2024-10-14 09:12:20 +03:00
7c86d95f5e sqlite-support
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-10-14 02:05:20 +03:00
5c40ab3d00 312
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-10-13 00:49:06 +03:00
31867d3c6c ex-support
All checks were successful
Deploy on push / deploy (push) Successful in 1m59s
2024-09-27 10:18:08 +03:00
e2b54b37dd sentry-log-detailed
All checks were successful
Deploy on push / deploy (push) Successful in 1m13s
2024-08-26 21:18:33 +03:00
6a6df10825 fixd2
All checks were successful
Deploy on push / deploy (push) Successful in 1m11s
2024-08-22 16:04:23 +03:00
15ffc9eb3e restore-authorizer-dev
All checks were successful
Deploy on push / deploy (push) Successful in 1m12s
2024-08-22 15:55:26 +03:00
5095b0b4c0 get-with-stat-as-arg
All checks were successful
Deploy on push / deploy (push) Successful in 1m11s
2024-08-14 18:33:11 +03:00
4c126fd859 cache-author-fiz 2024-08-14 16:30:52 +03:00
8f3fded5fe nosp
Some checks failed
Deploy on push / deploy (push) Failing after 11s
2024-08-14 14:32:40 +03:00
96ea356c62 ms2 2024-08-12 11:16:25 +03:00
4c8f7d5958 ms
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-08-12 11:13:36 +03:00
c5ee827230 merged
Some checks failed
Deploy on push / deploy (push) Failing after 11s
2024-08-12 11:00:01 +03:00
208de158bc imports sort
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-08-09 09:37:06 +03:00
d0c1f33227 nodistinct
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-08-09 08:17:40 +03:00
71db929fa4 comments-counter-fix
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-08-09 07:44:23 +03:00
56f1506450 followers-ids-fix
Some checks failed
Deploy on push / deploy (push) Failing after 10s
2024-08-09 07:35:45 +03:00
fae5f6f735 get-objects
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-08-09 07:26:04 +03:00
983f25d6d3 debug-followers
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-08-09 07:22:55 +03:00
1c9f6f30d9 debug:get_cached_topic_followers
Some checks failed
Deploy on push / deploy (push) Failing after 10s
2024-08-09 07:14:33 +03:00
4a7b305ad4 fmt
Some checks failed
Deploy on push / deploy (push) Failing after 10s
2024-08-08 18:57:03 +03:00
b5deb8889a follower-stat-fix 2024-08-08 18:56:49 +03:00
218bbd54da redis-fix-3
Some checks failed
Deploy on push / deploy (push) Failing after 10s
2024-08-08 18:14:49 +03:00
531e4bf32c redis-fix
Some checks failed
Deploy on push / deploy (push) Failing after 10s
2024-08-08 18:13:51 +03:00
65bbbdb2b0 is_main-fix2
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-08-08 18:06:06 +03:00
13acff1708 get_cached_topic_followers-fix
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-08-08 18:00:50 +03:00
ff9c0a0b82 redis-fixes
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-08-08 17:55:34 +03:00
69a848d6a7 redis-fix 2024-08-08 17:54:15 +03:00
6a13e3bb0f is_main-fix
Some checks failed
Deploy on push / deploy (push) Failing after 10s
2024-08-08 17:48:53 +03:00
e4266b0bab get_cached_topic_by_slug-fix
Some checks failed
Deploy on push / deploy (push) Failing after 11s
2024-08-08 17:46:25 +03:00
5bd9c9750d parse_aggregated_string-fix
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-08-08 17:39:37 +03:00
e46de27ba9 get-shout-fix2
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-08-08 17:36:20 +03:00
7bb70c41df get-shout-fix 2024-08-08 17:36:11 +03:00
a771cd0617 reaction
Some checks failed
Deploy on push / deploy (push) Failing after 10s
2024-08-08 17:33:55 +03:00
21d9b75a09 fix-string-agg 2024-08-08 16:20:45 +03:00
71015c2ca3 fix-topic-non-body
Some checks failed
Deploy on push / deploy (push) Failing after 8s
2024-08-08 16:16:40 +03:00
ea99219283 fmt
Some checks failed
Deploy on push / deploy (push) Failing after 10s
2024-08-08 16:10:45 +03:00
0533863230 minor-fixes 2024-08-08 16:10:31 +03:00
a5ec1838b1 parser-fix
Some checks failed
Deploy on push / deploy (push) Failing after 8s
2024-08-07 18:13:40 +03:00
7fb4b5bd18 follower-fix
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-08-07 18:11:32 +03:00
87aa39959a tricky-followers-count
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-08-07 18:09:44 +03:00
8b377123e1 follower-groupby2
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-08-07 18:06:31 +03:00
fb687d50dd follower-groupby
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-08-07 18:03:30 +03:00
64e0e0ce79 followers_stat
Some checks failed
Deploy on push / deploy (push) Failing after 8s
2024-08-07 18:01:12 +03:00
5a6a318b60 ismain-fix
Some checks failed
Deploy on push / deploy (push) Failing after 10s
2024-08-07 17:53:59 +03:00
1ce12c0980 parse-agregated-string
Some checks failed
Deploy on push / deploy (push) Failing after 11s
2024-08-07 17:52:23 +03:00
9c374d789e string_agg
Some checks failed
Deploy on push / deploy (push) Failing after 11s
2024-08-07 17:45:22 +03:00
f9a91e3a66 from-clause
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-08-07 15:36:05 +03:00
c551ca2e70 nogroupby2
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-08-07 15:31:13 +03:00
6a4785cdac nogroupby
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-08-07 15:10:37 +03:00
ec7b25df3c вшые 2024-08-07 15:04:17 +03:00
c601fcc2a4 alc
Some checks failed
Deploy on push / deploy (push) Failing after 8s
2024-08-07 14:54:13 +03:00
1524f141b8 distinct
Some checks failed
Deploy on push / deploy (push) Failing after 8s
2024-08-07 14:49:15 +03:00
50f2c9d161 subsub3 2024-08-07 14:41:22 +03:00
7712832b76 subsub2 2024-08-07 14:38:42 +03:00
a973da5bb4 subsub
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-08-07 14:35:50 +03:00
3fde67a87d sqltypes-text
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-08-07 14:29:31 +03:00
19c9ef462e CITEXT
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-08-07 14:26:41 +03:00
56c010975c array_agg
Some checks failed
Deploy on push / deploy (push) Failing after 8s
2024-08-07 14:22:24 +03:00
572f63f12b reader-loads-move 2024-08-07 14:18:05 +03:00
a01ca30f5b stat-docs-reactions-apifix
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-08-07 14:02:36 +03:00
6517fc9550 groupby
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-08-07 13:57:37 +03:00
dcd9f9e0bf json-agg-fix§2 2024-08-07 13:53:44 +03:00
26d83aba7a json-agg-fix
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-08-07 13:51:35 +03:00
087f6a7157 shouts-distinc-topics-authors-fix
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-08-07 13:47:10 +03:00
7e89a3471f import-fix
Some checks failed
Deploy on push / deploy (push) Failing after 8s
2024-08-07 13:37:50 +03:00
1f9b320f04 viewed-fix
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-08-07 13:37:08 +03:00
eba97e967b thread-lock-fix2
Some checks failed
Deploy on push / deploy (push) Failing after 10s
2024-08-07 13:30:41 +03:00
2f65a538fa thread-lock-fix
Some checks failed
Deploy on push / deploy (push) Failing after 10s
2024-08-07 13:25:48 +03:00
57d25b637d sync-viewed-stat
Some checks failed
Deploy on push / deploy (push) Failing after 10s
2024-08-07 13:15:58 +03:00
9c7a62c384 selectinload2
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-08-07 12:57:48 +03:00
41482bfd4b selectinload 2024-08-07 12:57:01 +03:00
d369cfe333 ident-fix
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-08-07 12:49:25 +03:00
2082e2a6e5 discussed-fix
Some checks failed
Deploy on push / deploy (push) Failing after 10s
2024-08-07 12:48:57 +03:00
7a8f0a1c21 reader-oneloop
Some checks failed
Deploy on push / deploy (push) Failing after 8s
2024-08-07 12:38:15 +03:00
3febfff1db postquery-topics-authors3 2024-08-07 12:29:51 +03:00
ad320ae83e postquery-topics-authors2 2024-08-07 12:23:56 +03:00
5609184d3b all 2024-08-07 12:22:51 +03:00
1e8d2aba0a postquery-topics-authors
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-08-07 12:18:29 +03:00
ebec80f198 precache-аix2 2024-08-07 11:56:13 +03:00
2a21decc94 precache-debug 2024-08-07 11:53:31 +03:00
520b39cb0b groupbyfix 2024-08-07 11:52:16 +03:00
1b46184781 groupbyfix 2024-08-07 11:52:07 +03:00
c1675cdf32 precache-fix2 2024-08-07 11:40:32 +03:00
c5a5e449d4 precache-fix
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-08-07 11:38:34 +03:00
69a5dfcc45 shouts-load-optimisations
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-08-07 11:35:59 +03:00
7c48a6a1dc dict-fix
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-08-07 10:30:51 +03:00
1af63dee81 shout-stats-fix
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-08-07 10:22:37 +03:00
d4982017f6 refactored-starting
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-08-07 09:51:09 +03:00
60a56fd098 moved 2024-08-07 08:57:56 +03:00
1d4fa4b977 loop-fix-4 2024-08-07 08:42:59 +03:00
8b1e42de1c loop-fix-3 2024-08-07 08:35:38 +03:00
6bab1b0189 loop-fix-2 2024-08-07 08:33:02 +03:00
26fcd4ba50 loop-fix
Some checks failed
Deploy on push / deploy (push) Failing after 8s
2024-08-07 08:31:11 +03:00
c731639aa4 get-cached-topic
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-08-07 08:25:47 +03:00
b358a6f4a9 nocacheshout
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-08-07 08:22:08 +03:00
df25eaf905 query-fitness
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-08-07 07:27:56 +03:00
821c81dd9c redis-fix
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-08-07 07:18:49 +03:00
3981fa3181 revalidation-manager
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-08-06 21:44:33 +03:00
a577b5510d cache-fix3
Some checks failed
Deploy on push / deploy (push) Failing after 10s
2024-08-06 20:55:19 +03:00
1612778baa cache-fix3 2024-08-06 20:23:23 +03:00
4cbe78f81f cache-fix2 2024-08-06 20:20:20 +03:00
31d38c016e get_cached_author_by_user_id
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-08-06 20:05:24 +03:00
08eebd6071 cache-part2 2024-08-06 19:59:27 +03:00
c276a0eeb0 caching-wip1 2024-08-06 19:55:27 +03:00
9f91490441 trigger-fix-2
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-08-06 19:45:42 +03:00
e0a44ae199 indexing2
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-08-06 19:03:43 +03:00
ab388af35f indexing
Some checks failed
Deploy on push / deploy (push) Failing after 10s
2024-08-06 19:01:50 +03:00
95977f0853 semaphore
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-08-06 18:57:35 +03:00
b823862cec caching-fix
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-08-06 18:53:25 +03:00
522718f3a1 last-comment-revert
Some checks failed
Deploy on push / deploy (push) Failing after 10s
2024-08-06 18:18:51 +03:00
dfd476411f nossl
All checks were successful
Deploy on push / deploy (push) Successful in 1m10s
2024-08-06 14:44:01 +03:00
626d76f406 fmt2
All checks were successful
Deploy on push / deploy (push) Successful in 1m8s
2024-08-06 14:37:50 +03:00
c576fc0241 fmt
All checks were successful
Deploy on push / deploy (push) Successful in 1m9s
2024-08-06 14:34:12 +03:00
385c8ce04b logging-fix 2024-08-06 14:33:52 +03:00
34c16c8cdf logging-sentry
All checks were successful
Deploy on push / deploy (push) Successful in 2m0s
2024-08-06 13:47:49 +03:00
2f4c8acaa2 reaction.likes fix
Some checks failed
Deploy on push / deploy (push) Failing after 56s
2024-07-30 05:19:16 +03:00
960a00101c load-comment-ratings
All checks were successful
Deploy on push / deploy (push) Successful in 1m9s
2024-07-26 19:04:40 +03:00
c46dc759d7 load-shout-comments-fix-2
All checks were successful
Deploy on push / deploy (push) Successful in 57s
2024-07-26 16:56:30 +03:00
16728f1d49 group-by-asked
All checks were successful
Deploy on push / deploy (push) Successful in 1m9s
2024-07-26 16:42:26 +03:00
4c625db853 group_by-fix2
All checks were successful
Deploy on push / deploy (push) Successful in 2m7s
2024-07-23 17:36:26 +03:00
fce78df549 group_by-fix 2024-07-23 17:35:45 +03:00
a4411cfa34 comment-ratings
All checks were successful
Deploy on push / deploy (push) Successful in 1m31s
2024-07-22 11:32:47 +03:00
a43a44302b reactions-api-update
All checks were successful
Deploy on push / deploy (push) Successful in 1m8s
2024-07-22 10:42:41 +03:00
451f041206 aliased-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m8s
2024-07-18 12:13:30 +03:00
6595d12108 unrated-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m10s
2024-07-18 12:07:53 +03:00
983ad12dd3 slug-filter-author-2
All checks were successful
Deploy on push / deploy (push) Successful in 1m11s
2024-07-18 09:09:48 +03:00
3ff52f944c slug-filter-author
All checks were successful
Deploy on push / deploy (push) Successful in 1m10s
2024-07-18 09:05:10 +03:00
77282ade62 load-shouts-discussed-coauthored
All checks were successful
Deploy on push / deploy (push) Successful in 2m4s
2024-07-16 01:06:43 +03:00
1223c633d4 followed-by
All checks were successful
Deploy on push / deploy (push) Successful in 1m9s
2024-07-03 15:35:12 +03:00
d55a3050fc Merge branch 'dev' of https://dev.discours.io/discours.io/core into dev
All checks were successful
Deploy on push / deploy (push) Successful in 1m11s
2024-07-03 11:57:26 +03:00
62a2280a80 load-shouts-followed-fix 2024-07-03 11:57:17 +03:00
Stepan Vladovskiy
c57fca0aee test: encrease users from one ip to 10000 to see if something chnages on stress press tests
All checks were successful
Deploy on push / deploy (push) Successful in 2m2s
2024-07-03 01:40:00 -03:00
612f91a708 followers-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m53s
2024-06-17 14:52:09 +03:00
a25a434ea2 check-existing-on-create
All checks were successful
Deploy on push / deploy (push) Successful in 1m9s
2024-06-12 13:00:35 +03:00
ac9f1d8a40 followers-empty-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m9s
2024-06-12 12:52:07 +03:00
e32baa8d8f stat-aliased-select-fix-3 2024-06-12 12:48:09 +03:00
9580282c79 stat-aliased-select-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m10s
2024-06-12 12:26:53 +03:00
c24f3bbb4a faster-response
All checks were successful
Deploy on push / deploy (push) Successful in 1m8s
2024-06-11 22:46:35 +03:00
04e20b29ee author-with-stat-cache-nonblock-2
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-06-11 17:51:34 +03:00
b2fdc9a453 parser-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m9s
2024-06-11 14:46:10 +03:00
8708efece2 stabfix
All checks were successful
Deploy on push / deploy (push) Successful in 1m10s
2024-06-09 15:49:37 +03:00
51f56c0f1f issue#842-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m9s
2024-06-09 14:02:24 +03:00
e58fbe263f reaction.shout-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m7s
2024-06-09 09:13:21 +03:00
ea28f5346c auth-debug
All checks were successful
Deploy on push / deploy (push) Successful in 1m54s
2024-06-09 09:07:48 +03:00
4743581395 load_authors_by-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m9s
2024-06-06 14:16:16 +03:00
3f12bcfd39 precache-debug-2
All checks were successful
Deploy on push / deploy (push) Successful in 1m8s
2024-06-06 12:49:22 +03:00
10ad7089f4 precache-debug
All checks were successful
Deploy on push / deploy (push) Successful in 1m8s
2024-06-06 12:47:43 +03:00
8d371e6519 log-loza
All checks were successful
Deploy on push / deploy (push) Successful in 1m9s
2024-06-06 12:42:54 +03:00
76ee4a387c shout-link-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m8s
2024-06-06 12:37:55 +03:00
7a4c02d11d typo-fx
All checks were successful
Deploy on push / deploy (push) Successful in 1m8s
2024-06-06 12:33:58 +03:00
ae861aa8b4 fix-select-by-topic
All checks were successful
Deploy on push / deploy (push) Successful in 1m9s
2024-06-06 12:23:47 +03:00
ddc5254e5f log-response
All checks were successful
Deploy on push / deploy (push) Successful in 1m9s
2024-06-06 11:24:26 +03:00
543b2e6b4d load_shouts_unrated-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m9s
2024-06-06 11:13:54 +03:00
626e899ca3 get-cached-topic-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m9s
2024-06-06 11:07:49 +03:00
f5ebd0ada9 text-order-by-fix 2024-06-06 11:06:18 +03:00
afe710d955 debug-precache
All checks were successful
Deploy on push / deploy (push) Successful in 1m9s
2024-06-06 09:56:21 +03:00
1946d5eda2 int
All checks were successful
Deploy on push / deploy (push) Successful in 1m9s
2024-06-06 09:40:39 +03:00
3476d6e6d1 get_cached_topic_by_slug-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m9s
2024-06-06 09:23:45 +03:00
85f63a0e17 precache-synced-with-cache
All checks were successful
Deploy on push / deploy (push) Successful in 1m9s
2024-06-06 08:06:10 +03:00
1cc779e17b get-author-id-by-user-id-2
All checks were successful
Deploy on push / deploy (push) Successful in 1m8s
2024-06-05 23:42:25 +03:00
b04fc1ba65 get-author-id-by-user-id
Some checks failed
Deploy on push / deploy (push) Has been cancelled
2024-06-05 23:42:09 +03:00
5afa046f18 get-author-by-user
All checks were successful
Deploy on push / deploy (push) Successful in 1m9s
2024-06-05 23:15:19 +03:00
fbf21ae3f9 strip-more-2
All checks were successful
Deploy on push / deploy (push) Successful in 1m8s
2024-06-05 23:07:29 +03:00
12439b6ef2 strip-more
All checks were successful
Deploy on push / deploy (push) Successful in 1m8s
2024-06-05 23:03:13 +03:00
b72ef072e4 fix-cache-topic
All checks were successful
Deploy on push / deploy (push) Successful in 1m8s
2024-06-05 22:56:48 +03:00
ee6a636e68 fix-cache-author
Some checks failed
Deploy on push / deploy (push) Has been cancelled
2024-06-05 22:55:44 +03:00
e942fdbffa debug-precache
All checks were successful
Deploy on push / deploy (push) Successful in 1m9s
2024-06-05 22:27:23 +03:00
13e609bcf7 fixed-redis-intfix4
All checks were successful
Deploy on push / deploy (push) Successful in 1m8s
2024-06-05 22:20:39 +03:00
d5d5a69ab4 userid-renewal-toler
All checks were successful
Deploy on push / deploy (push) Successful in 1m9s
2024-06-05 21:44:51 +03:00
53545605d0 fixed-redis-cache-4
All checks were successful
Deploy on push / deploy (push) Successful in 1m9s
2024-06-05 21:40:32 +03:00
d93fa4cb4b cached-author-fi
All checks were successful
Deploy on push / deploy (push) Successful in 1m9s
2024-06-05 21:04:48 +03:00
35ef4357fb simpler-cache-author
All checks were successful
Deploy on push / deploy (push) Successful in 1m9s
2024-06-05 18:51:12 +03:00
d3fe4c4aff get_cached_author-fix-2
All checks were successful
Deploy on push / deploy (push) Successful in 1m6s
2024-06-05 18:48:41 +03:00
1e0d0f465a get_cached_author-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m8s
2024-06-05 18:46:01 +03:00
6e80942beb reactions-follow-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m10s
2024-06-05 18:29:15 +03:00
67636e6d17 author-id-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m21s
2024-06-05 18:18:03 +03:00
713fb4d62b 0.4.1-following-update 2024-06-05 17:45:55 +03:00
67c299939c toler-no-author
All checks were successful
Deploy on push / deploy (push) Successful in 1m9s
2024-06-05 16:23:53 +03:00
1042eb6e58 less-bloat-log
All checks were successful
Deploy on push / deploy (push) Successful in 1m8s
2024-06-04 12:55:12 +03:00
db2ae09ead aifix
All checks were successful
Deploy on push / deploy (push) Successful in 1m28s
2024-06-04 11:51:39 +03:00
708bdaa7f6 ruff-update 2024-06-04 09:10:52 +03:00
9c02333e2b precache-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m7s
2024-06-04 09:07:46 +03:00
bfc177a811 exc-mw-connected
All checks were successful
Deploy on push / deploy (push) Successful in 1m7s
2024-06-04 08:15:59 +03:00
d53256bcd7 exc-mw
All checks were successful
Deploy on push / deploy (push) Successful in 1m8s
2024-06-04 08:10:57 +03:00
231de135ca search-fin
All checks were successful
Deploy on push / deploy (push) Successful in 1m9s
2024-06-02 19:28:21 +03:00
5f36b7c6e2 search-with-images40
All checks were successful
Deploy on push / deploy (push) Successful in 1m8s
2024-06-02 19:24:23 +03:00
23e46df8a9 search-with-images39
All checks were successful
Deploy on push / deploy (push) Successful in 1m8s
2024-06-02 19:21:50 +03:00
6b8b61fa37 search-with-images38
All checks were successful
Deploy on push / deploy (push) Successful in 1m8s
2024-06-02 19:19:30 +03:00
25964b6797 search-with-images36
All checks were successful
Deploy on push / deploy (push) Successful in 1m9s
2024-06-02 19:14:18 +03:00
c0b3e90943 search-with-images35
All checks were successful
Deploy on push / deploy (push) Successful in 1m9s
2024-06-02 19:09:02 +03:00
9c4ddea33d search-with-images34
All checks were successful
Deploy on push / deploy (push) Successful in 1m9s
2024-06-02 19:06:26 +03:00
f41359b8c9 search-with-images33
Some checks failed
Deploy on push / deploy (push) Failing after 8s
2024-06-02 18:59:00 +03:00
44b797c1de search-with-images32
Some checks failed
Deploy on push / deploy (push) Has been cancelled
2024-06-02 18:58:24 +03:00
4933553d50 search-with-images31
All checks were successful
Deploy on push / deploy (push) Successful in 1m7s
2024-06-02 18:52:57 +03:00
93c9fcc248 search-with-images30
Some checks failed
Deploy on push / deploy (push) Has been cancelled
2024-06-02 18:52:34 +03:00
2365485a68 search-with-images29
All checks were successful
Deploy on push / deploy (push) Successful in 1m8s
2024-06-02 18:48:54 +03:00
27bea7d06f search-with-images28
All checks were successful
Deploy on push / deploy (push) Successful in 1m10s
2024-06-02 18:47:01 +03:00
c29838b6ee search-with-images27
All checks were successful
Deploy on push / deploy (push) Successful in 1m8s
2024-06-02 18:44:27 +03:00
c8baa6abf9 search-with-images26
All checks were successful
Deploy on push / deploy (push) Successful in 1m9s
2024-06-02 18:41:09 +03:00
9358a86df1 search-with-images25
All checks were successful
Deploy on push / deploy (push) Successful in 1m9s
2024-06-02 18:39:05 +03:00
7e8757ec72 search-with-images24
All checks were successful
Deploy on push / deploy (push) Successful in 1m7s
2024-06-02 18:36:11 +03:00
c1fe419ff9 search-with-images22
All checks were successful
Deploy on push / deploy (push) Successful in 1m9s
2024-06-02 18:34:15 +03:00
ebf1309b48 search-with-images22
All checks were successful
Deploy on push / deploy (push) Successful in 1m8s
2024-06-02 18:01:17 +03:00
d83b459408 search-with-images120
All checks were successful
Deploy on push / deploy (push) Successful in 1m34s
2024-06-02 17:56:24 +03:00
db8472ae06 search-with-images19
All checks were successful
Deploy on push / deploy (push) Successful in 1m8s
2024-06-02 17:47:27 +03:00
9d265fa3f9 search-with-images17
All checks were successful
Deploy on push / deploy (push) Successful in 1m8s
2024-06-02 17:36:34 +03:00
5169cff892 search-with-images16
All checks were successful
Deploy on push / deploy (push) Successful in 1m8s
2024-06-02 17:25:09 +03:00
8f2bd30d54 search-with-images15
All checks were successful
Deploy on push / deploy (push) Successful in 1m8s
2024-06-02 17:16:43 +03:00
b8266c41fc search-with-images14
All checks were successful
Deploy on push / deploy (push) Successful in 1m8s
2024-06-02 17:12:34 +03:00
1a601b93eb search-with-images13
All checks were successful
Deploy on push / deploy (push) Successful in 1m8s
2024-06-02 17:08:50 +03:00
1b838676e3 search-with-images12
All checks were successful
Deploy on push / deploy (push) Successful in 1m8s
2024-06-02 17:07:29 +03:00
8cc9d0d4d3 search-with-images11
All checks were successful
Deploy on push / deploy (push) Successful in 1m8s
2024-06-02 17:01:22 +03:00
8e77a57bc1 search-with-images10
All checks were successful
Deploy on push / deploy (push) Successful in 1m7s
2024-06-02 16:48:11 +03:00
e74c9688c8 search-with-images9
All checks were successful
Deploy on push / deploy (push) Successful in 1m7s
2024-06-02 16:40:47 +03:00
60d6743fcd search-with-images8
All checks were successful
Deploy on push / deploy (push) Successful in 1m7s
2024-06-02 16:38:38 +03:00
f42d81b9fc search-with-images7
All checks were successful
Deploy on push / deploy (push) Successful in 1m8s
2024-06-02 16:36:12 +03:00
774240ca73 search-with-images6
All checks were successful
Deploy on push / deploy (push) Successful in 1m9s
2024-06-02 16:18:19 +03:00
fb2c31a81b search-with-images6
All checks were successful
Deploy on push / deploy (push) Successful in 1m8s
2024-06-02 16:14:01 +03:00
eba991f4f5 search-with-images5
All checks were successful
Deploy on push / deploy (push) Successful in 1m8s
2024-06-02 16:10:09 +03:00
0fdb056460 search-with-images4
All checks were successful
Deploy on push / deploy (push) Successful in 1m8s
2024-06-02 16:05:59 +03:00
17da2c8359 search-with-images3
All checks were successful
Deploy on push / deploy (push) Successful in 1m8s
2024-06-02 16:00:09 +03:00
0abb4d605d search-with-images2
All checks were successful
Deploy on push / deploy (push) Successful in 1m7s
2024-06-02 15:58:14 +03:00
465d9093bd search-with-images
All checks were successful
Deploy on push / deploy (push) Successful in 1m8s
2024-06-02 15:56:17 +03:00
67e4cacb28 scoreidcache-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m7s
2024-06-02 15:32:02 +03:00
a3d1d1b067 saerch-fix2
All checks were successful
Deploy on push / deploy (push) Successful in 1m7s
2024-06-02 14:11:46 +03:00
2e5919f3e6 saerch-fix
Some checks failed
Deploy on push / deploy (push) Has been cancelled
2024-06-02 14:10:49 +03:00
9b2db3cc1d d-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m7s
2024-05-30 22:04:28 +03:00
9307fc97fb follower-topics-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m8s
2024-05-30 21:29:25 +03:00
b3a998fec2 followers-cache-fix-3
All checks were successful
Deploy on push / deploy (push) Successful in 1m27s
2024-05-30 21:13:50 +03:00
5ba7f5e3c9 followers-cache-fix-2
All checks were successful
Deploy on push / deploy (push) Successful in 48s
2024-05-30 21:10:16 +03:00
9212fbe6b5 followers-cache-fix
All checks were successful
Deploy on push / deploy (push) Successful in 48s
2024-05-30 20:55:47 +03:00
8dcd985c67 cache-fix-10
All checks were successful
Deploy on push / deploy (push) Successful in 47s
2024-05-30 20:49:37 +03:00
c9dcd6a9c9 cache-fix-9
All checks were successful
Deploy on push / deploy (push) Successful in 46s
2024-05-30 20:43:18 +03:00
afef19fae3 cache-fix-8
All checks were successful
Deploy on push / deploy (push) Successful in 47s
2024-05-30 20:26:53 +03:00
2e2dc80718 cache-fix-7
All checks were successful
Deploy on push / deploy (push) Successful in 46s
2024-05-30 20:23:32 +03:00
abc5381adb cache-fix-6
All checks were successful
Deploy on push / deploy (push) Successful in 47s
2024-05-30 20:22:10 +03:00
75dd4120ec cache-fix-5
All checks were successful
Deploy on push / deploy (push) Successful in 46s
2024-05-30 20:20:02 +03:00
b0637da11d cache-fix-4
All checks were successful
Deploy on push / deploy (push) Successful in 47s
2024-05-30 20:14:26 +03:00
968935869e cache-refactored-4
All checks were successful
Deploy on push / deploy (push) Successful in 48s
2024-05-30 19:42:38 +03:00
74e000c96b cache-refactored-3
All checks were successful
Deploy on push / deploy (push) Successful in 48s
2024-05-30 19:29:57 +03:00
8dd885b6a8 cache-refactored2
All checks were successful
Deploy on push / deploy (push) Successful in 46s
2024-05-30 19:16:50 +03:00
042cf595f7 cache-refactored
All checks were successful
Deploy on push / deploy (push) Successful in 39s
2024-05-30 19:15:11 +03:00
3712ecf8ae author:user-key
All checks were successful
Deploy on push / deploy (push) Successful in 49s
2024-05-30 18:07:41 +03:00
d20647c825 cache-fix
All checks were successful
Deploy on push / deploy (push) Successful in 47s
2024-05-30 17:49:33 +03:00
98010ed1bc get-with-stat-fix-2
All checks were successful
Deploy on push / deploy (push) Successful in 47s
2024-05-30 15:05:06 +03:00
76d4fc675f get-with-stat-fix
All checks were successful
Deploy on push / deploy (push) Successful in 26s
2024-05-30 14:45:41 +03:00
e4cc182db4 get-with-stat-debug-2
All checks were successful
Deploy on push / deploy (push) Successful in 26s
2024-05-30 14:40:04 +03:00
9ca7a42d56 get-with-stat-debug
All checks were successful
Deploy on push / deploy (push) Successful in 30s
2024-05-30 14:38:14 +03:00
570c8a97e3 shouts-stat-fix-5
All checks were successful
Deploy on push / deploy (push) Successful in 26s
2024-05-30 14:29:00 +03:00
3bde3ea5e9 shouts-stat-fix-3
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-05-30 14:25:35 +03:00
d54e2a2f3f shouts-stat-fix
All checks were successful
Deploy on push / deploy (push) Successful in 27s
2024-05-30 14:01:34 +03:00
a1ee49ba54 poestmerge
All checks were successful
Deploy on push / deploy (push) Successful in 27s
2024-05-30 12:49:46 +03:00
e638ad81e2 fmt+follows-refactored 2024-05-30 07:12:00 +03:00
bce43096b1 postmerge-fixex
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-05-27 20:14:40 +03:00
19d10b6219 Merge branch 'v2' into dev
All checks were successful
Deploy on push / deploy (push) Successful in 26s
2024-05-27 20:11:04 +03:00
a9ab2e8bb2 cached-empty-fix-4 2024-05-27 20:03:07 +03:00
9a94e5ac56 cached-empty-fix-3 2024-05-27 19:59:16 +03:00
d93311541e cached-empty-fix-2 2024-05-27 19:57:22 +03:00
01d2d90df1 cached-empty-fix 2024-05-27 19:55:56 +03:00
7b72963b24 reply-to-fix 2024-05-27 19:39:48 +03:00
c90783f461 async-fix-3 2024-05-27 19:38:34 +03:00
9d9adfbdfa async-fix-2 2024-05-27 19:36:25 +03:00
f43624ca3d async-fix 2024-05-27 19:29:51 +03:00
3f6f7f1aa0 get-followers-fix 2024-05-27 18:30:28 +03:00
da89b20e5c session-close-fix 2024-05-26 02:17:45 +03:00
c4817c1e52 logfix
All checks were successful
Deploy on push / deploy (push) Successful in 29s
2024-05-24 13:25:05 +03:00
c444895945 log-response
All checks were successful
Deploy on push / deploy (push) Successful in 1m50s
2024-05-24 13:14:19 +03:00
9791ba4b49 result-fix6
All checks were successful
Deploy on push / deploy (push) Successful in 26s
2024-05-21 04:40:48 +03:00
6ed144327c result-fix5
All checks were successful
Deploy on push / deploy (push) Successful in 31s
2024-05-21 04:34:08 +03:00
472801199c result-fix4
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-05-21 04:28:15 +03:00
a3514e6874 result-fix3
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-05-21 04:25:40 +03:00
95b2b97dd4 result-fix2
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-05-21 04:24:58 +03:00
df934a8fd2 result-fix
All checks were successful
Deploy on push / deploy (push) Successful in 26s
2024-05-21 04:22:36 +03:00
d89fa283dc cache-postrefactor-fix
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-05-21 02:56:58 +03:00
1592065a8c postfixing-reimplemented-cache
All checks were successful
Deploy on push / deploy (push) Successful in 27s
2024-05-21 02:01:18 +03:00
4c1fbf64a2 cache-reimplement-2
All checks were successful
Deploy on push / deploy (push) Successful in 1m4s
2024-05-21 01:40:57 +03:00
3742528e3a follows-returns
All checks were successful
Deploy on push / deploy (push) Successful in 27s
2024-05-20 19:11:07 +03:00
232892d397 isort
All checks were successful
Deploy on push / deploy (push) Successful in 26s
2024-05-20 16:46:05 +03:00
e0b3562e80 follow/unfollow-handling-noauthor 2024-05-20 16:23:49 +03:00
71c2e8ea13 notopicid
All checks were successful
Deploy on push / deploy (push) Successful in 27s
2024-05-18 19:30:25 +03:00
b73cce5431 create-reaction-fix-2
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-05-18 17:41:04 +03:00
0d618116e1 compound-fix
All checks were successful
Deploy on push / deploy (push) Successful in 28s
2024-05-18 17:31:45 +03:00
b7dbaa6e73 aliased-union-fix
All checks were successful
Deploy on push / deploy (push) Successful in 26s
2024-05-18 16:16:09 +03:00
5fe51e03bb fix-get-stat
All checks were successful
Deploy on push / deploy (push) Successful in 26s
2024-05-18 15:40:15 +03:00
306caf9520 logs-fix
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-05-18 15:26:22 +03:00
e6f42b388a logs-fix
Some checks failed
Deploy on push / deploy (push) Has been cancelled
2024-05-18 15:25:53 +03:00
fd7bd385fc queries-refactoring-2
All checks were successful
Deploy on push / deploy (push) Successful in 26s
2024-05-18 14:15:05 +03:00
7d97f40826 cache-when-created
All checks were successful
Deploy on push / deploy (push) Successful in 28s
2024-05-18 13:57:30 +03:00
bc01dfb125 media-indexed
All checks were successful
Deploy on push / deploy (push) Successful in 26s
2024-05-18 13:16:39 +03:00
5dfb890b84 no-delete-on-create
All checks were successful
Deploy on push / deploy (push) Successful in 26s
2024-05-18 12:57:18 +03:00
2beb584e87 search-index-softer-check
All checks were successful
Deploy on push / deploy (push) Successful in 32s
2024-05-18 12:55:34 +03:00
1f3607b4d3 search-compare-fix
All checks were successful
Deploy on push / deploy (push) Successful in 32s
2024-05-18 12:51:41 +03:00
0051492bd3 proper-searchfields
All checks were successful
Deploy on push / deploy (push) Successful in 26s
2024-05-18 12:48:43 +03:00
0f5df77d28 create-reaction-unauthorized-handling
All checks were successful
Deploy on push / deploy (push) Successful in 26s
2024-05-18 12:38:46 +03:00
c80229b7b9 delete-if-wrong
Some checks failed
Deploy on push / deploy (push) Failing after 8s
2024-05-18 12:11:34 +03:00
8bc7a471cd index-struct-sync
All checks were successful
Deploy on push / deploy (push) Successful in 27s
2024-05-18 11:58:47 +03:00
91a2854537 no-remove-index-fix
All checks were successful
Deploy on push / deploy (push) Successful in 27s
2024-05-18 11:52:17 +03:00
3d8e484187 no-delete-index 2024-05-18 11:32:30 +03:00
be6d2454b1 search-info-2
All checks were successful
Deploy on push / deploy (push) Successful in 41s
2024-05-18 11:28:38 +03:00
4e97a22642 search-service-improve
All checks were successful
Deploy on push / deploy (push) Successful in 26s
2024-05-18 11:22:13 +03:00
a749ade30b fmt
All checks were successful
Deploy on push / deploy (push) Successful in 26s
2024-05-18 11:00:46 +03:00
3d90d9c81d search-cached-non-empty-only-fix 2024-05-18 11:00:01 +03:00
102eae1c98 sentry-on
All checks were successful
Deploy on push / deploy (push) Successful in 26s
2024-05-09 08:32:25 +03:00
75cd8b9f71 get-author-ref
All checks were successful
Deploy on push / deploy (push) Successful in 26s
2024-05-09 00:02:59 +03:00
a18ad12ff7 lesslog 2024-05-08 23:57:31 +03:00
f7fdd6fd76 sentry-off
All checks were successful
Deploy on push / deploy (push) Successful in 26s
2024-05-08 23:48:11 +03:00
80685fd1cc follows-result-update
All checks were successful
Deploy on push / deploy (push) Successful in 27s
2024-05-08 23:42:57 +03:00
69409f92e1 redis-set-set-fix
All checks were successful
Deploy on push / deploy (push) Successful in 27s
2024-05-07 21:56:07 +03:00
cfcb858bba new-profile-followers 2024-05-07 19:17:18 +03:00
8618e1eff7 followers-stat-fix
All checks were successful
Deploy on push / deploy (push) Successful in 26s
2024-05-07 00:10:54 +03:00
e0a5c654d8 fmt
All checks were successful
Deploy on push / deploy (push) Successful in 24s
2024-05-07 00:06:31 +03:00
e61db5d6e5 logs-fix
All checks were successful
Deploy on push / deploy (push) Successful in 24s
2024-05-07 00:03:58 +03:00
fac25ab4f4 followers-fix
All checks were successful
Deploy on push / deploy (push) Successful in 24s
2024-05-07 00:02:15 +03:00
ceeeb23c26 delete-reaction-update-stat-fix
All checks were successful
Deploy on push / deploy (push) Successful in 24s
2024-05-06 22:44:30 +03:00
ce90fedacb delete-reaction-fix
All checks were successful
Deploy on push / deploy (push) Successful in 24s
2024-05-06 22:38:19 +03:00
0179c69b82 delete-reaction-debug 2024-05-06 22:37:38 +03:00
dac79b53ca api-fix
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-05-06 21:18:19 +03:00
b372fd81d5 drafts-api-common
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-05-06 21:14:17 +03:00
205019ce39 handle-no-author-profile
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-05-06 21:01:10 +03:00
9c4d88c8fd handle-noauthor 2024-05-06 20:59:56 +03:00
dd2becaab2 cache_by_id-fix
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-05-06 20:41:34 +03:00
658c8c7702 followers-cache-fix
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-05-06 20:30:49 +03:00
809b980145 load-authors-fix-2
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-05-06 20:04:50 +03:00
1185880f8e authors-all-fix
All checks were successful
Deploy on push / deploy (push) Successful in 37s
2024-05-06 20:00:26 +03:00
499ecb501d load-authors-fix
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-05-06 19:40:51 +03:00
b3e7d24d9d shouts-counter-fix
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-05-06 19:27:51 +03:00
78b12d4f33 followers-cache-debug-2 2024-05-06 12:46:42 +03:00
5caa2d1f8c followers-cache-debug
All checks were successful
Deploy on push / deploy (push) Successful in 26s
2024-05-06 12:41:53 +03:00
c46f264c4b followers-fix
All checks were successful
Deploy on push / deploy (push) Successful in 27s
2024-05-06 12:38:39 +03:00
f6b21174bf unique-fix-2
All checks were successful
Deploy on push / deploy (push) Successful in 37s
2024-05-06 11:27:15 +03:00
d15b36a0f1 unique-fix
All checks were successful
Deploy on push / deploy (push) Successful in 26s
2024-05-06 11:23:14 +03:00
232cdbfad8 docker-check-logger
All checks were successful
Deploy on push / deploy (push) Successful in 27s
2024-05-06 10:58:31 +03:00
55e28162fe subprocess-fix
All checks were successful
Deploy on push / deploy (push) Successful in 27s
2024-05-06 10:53:27 +03:00
49eec2de46 Merge branch 'dev' of https://dev.discours.io/discours.io/core into dev
All checks were successful
Deploy on push / deploy (push) Successful in 30s
2024-05-06 10:30:05 +03:00
52f5a4e813 followers-cache-fix 2024-05-06 10:29:50 +03:00
a5d99fa517 cache-follower-fix 2024-05-06 10:25:09 +03:00
Stepan Vladovskiy
2a08e6204e feat: sv - nginx sigil with /upload
All checks were successful
Deploy on push / deploy (push) Successful in 27s
2024-05-06 04:20:18 -03:00
Stepan Vladovskiy
ab6dcde170 feat: sv - nginx sigil with /upload and now / on the end of proxy-pass 2024-05-06 04:18:24 -03:00
Stepan Vladovskiy
bf9e571cd8 feat: sv - nginx sigil with /upload 2024-05-06 04:07:07 -03:00
Stepan Vladovskiy
e38df1f9d5 debug: sv - nginx sigil old fasch alot of /// around all 2024-05-06 03:28:31 -03:00
Stepan Vladovskiy
449f63f540 debug: sv - back configs in nginx.sigil 2024-05-06 02:56:40 -03:00
Stepan Vladovskiy
22106ad657 debug: sv - trying different configs in nginx.sigil 2024-05-06 02:54:58 -03:00
Stepan Vladovskiy
4c274eee2e debug: /upload instead of /upload/ in sigil 2024-05-06 02:04:55 -03:00
Stepan Vladovskiy
b3caccb786 debug: sv - sigil style for uploader without / on the end of the proxy_pass 2024-05-05 23:11:31 -03:00
Stepan Vladovskiy
fc033734f5 debug: with proxy-pass in nginx to uploader
All checks were successful
Deploy on push / deploy (push) Successful in 24s
2024-05-05 16:55:21 -03:00
Stepan Vladovskiy
2fb21847c3 debug: nginx.conf.sigil right names
Some checks failed
Deploy on push / deploy (push) Failing after 22s
2024-05-05 16:48:11 -03:00
Stepan Vladovskiy
e4d83d35eb debug: without check uploader in server.py
Some checks failed
Deploy on push / deploy (push) Failing after 22s
2024-05-05 16:44:09 -03:00
Stepan Vladovskiy
98d7c522fb debug: run check with dokku not cocker for find uploader
Some checks failed
Deploy on push / deploy (push) Failing after 32s
2024-05-05 16:41:39 -03:00
Stepan Vladovskiy
e6f88ffff0 feat: run check with dokku not cocker for find uploader
Some checks failed
Deploy on push / deploy (push) Failing after 7s
2024-05-05 16:40:33 -03:00
Stepan Vladovskiy
d26f8c4903 feat: gitea workflow if uploader then stop installer
Some checks failed
Deploy on push / deploy (push) Failing after 8s
2024-05-05 16:35:19 -03:00
Stepan Vladovskiy
89021ea018 feat: gitea workflow with Uploader check is runing, plus in server.py is checker too 2024-05-05 16:35:19 -03:00
0d87d3d889 unique-follows-debug
All checks were successful
Deploy on push / deploy (push) Successful in 35s
2024-05-05 21:38:59 +03:00
2b5fb704ba follow/unfollow-cache-fix
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-05-05 21:04:38 +03:00
13d144f838 cant-follow-catch-fix
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-05-05 20:44:57 +03:00
ac5674d18f following-cache-anyway-found
All checks were successful
Deploy on push / deploy (push) Successful in 27s
2024-05-05 20:17:07 +03:00
3ab42ecb72 following-cache-anyway-found 2024-05-05 20:16:45 +03:00
cfe9ac1005 follow-fi
All checks were successful
Deploy on push / deploy (push) Successful in 22s
2024-05-05 19:49:07 +03:00
e50a6358a8 merged
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-05-05 18:46:46 +03:00
f6cb7e18d1 cache-updates-fix 2024-05-05 18:46:16 +03:00
Stepan Vladovskiy
526d2c3e4e feat: sv - in nginx client_max_body_size=100M, solution for upload large files
All checks were successful
Deploy on push / deploy (push) Successful in 24s
2024-05-05 03:15:47 -03:00
c9205a698f typo-fix
All checks were successful
Deploy on push / deploy (push) Successful in 24s
2024-05-05 00:00:58 +03:00
dc791d4e7a same-rating-fox
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-05-04 23:48:55 +03:00
b2f7b06a93 topic caching
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-05-03 14:12:57 +03:00
db33410675 lesslog
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-05-02 09:12:47 +03:00
6c58f09402 feed-filter-fix-2 2024-05-01 05:08:54 +03:00
79f21387a5 feed-filter-fix
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-05-01 05:02:35 +03:00
dc9c66c00f follow-fmr
All checks were successful
Deploy on push / deploy (push) Successful in 22s
2024-05-01 04:01:21 +03:00
c68322e550 follow-fix 2024-05-01 04:00:54 +03:00
88de00706d follow-fix 2024-05-01 04:00:37 +03:00
658a2400c4 debug-4 2024-05-01 03:46:16 +03:00
12e42f2023 debug-2 2024-05-01 03:38:49 +03:00
f1bda441b4 debug-2 2024-05-01 03:35:31 +03:00
026bad95e2 debug
All checks were successful
Deploy on push / deploy (push) Successful in 22s
2024-05-01 03:29:25 +03:00
831684922a get-my-shout-dbg 2024-05-01 03:09:54 +03:00
435e97ab04 get-my-shout-debug 2024-05-01 02:46:19 +03:00
883e98c3d3 get-my-shout-debug 2024-05-01 02:42:25 +03:00
94bf54b192 get-my-shout-fix
All checks were successful
Deploy on push / deploy (push) Successful in 26s
2024-05-01 02:38:17 +03:00
9aacb75e84 auth-debug
All checks were successful
Deploy on push / deploy (push) Successful in 24s
2024-04-30 16:18:50 +03:00
61c7f5a0dc drafts-debug
All checks were successful
Deploy on push / deploy (push) Successful in 24s
2024-04-30 14:10:01 +03:00
a7f163009e fastify-load-authors-2
All checks were successful
Deploy on push / deploy (push) Successful in 49s
2024-04-30 12:35:51 +03:00
ab6ef76a34 fastify-load-authors
All checks were successful
Deploy on push / deploy (push) Successful in 39s
2024-04-30 12:33:41 +03:00
a992941aef logs-fix
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-04-27 13:30:19 +03:00
9dc986b08c start-date-views-fix
All checks were successful
Deploy on push / deploy (push) Successful in 21s
2024-04-27 01:51:45 +03:00
653b18041e str-time
All checks were successful
Deploy on push / deploy (push) Successful in 21s
2024-04-27 01:48:15 +03:00
868b2ba16a removed-search-vector
All checks were successful
Deploy on push / deploy (push) Successful in 21s
2024-04-27 01:43:42 +03:00
2e4d70db28 viwed-fix+fmt+outerjoin-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m1s
2024-04-27 01:41:47 +03:00
89956d6240 get-shout-debug
All checks were successful
Deploy on push / deploy (push) Successful in 22s
2024-04-26 11:43:22 +03:00
7f1794891c cache-follower-fix
All checks were successful
Deploy on push / deploy (push) Successful in 22s
2024-04-26 11:21:00 +03:00
ee24f2f1db my-shout-not-published
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-04-26 11:06:13 +03:00
cfed40ddd9 not-error-expired-token
All checks were successful
Deploy on push / deploy (push) Successful in 24s
2024-04-26 07:21:03 +03:00
899016907c shouts-load-debug-2
All checks were successful
Deploy on push / deploy (push) Successful in 22s
2024-04-25 14:06:21 +03:00
54e82f99eb shouts-load-debug
All checks were successful
Deploy on push / deploy (push) Successful in 24s
2024-04-25 14:03:30 +03:00
605d60f126 featured-filter-fix 2024-04-25 14:01:16 +03:00
b1bd9a4829 feed-featured-fix-2
All checks were successful
Deploy on push / deploy (push) Successful in 26s
2024-04-25 12:19:42 +03:00
54766ffa42 feed-featured-fix 2024-04-25 12:08:20 +03:00
27d5272032 fmt
All checks were successful
Deploy on push / deploy (push) Successful in 58s
2024-04-25 12:07:30 +03:00
e68196ce0b counters-fix
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-04-25 11:47:13 +03:00
c4148254ed get-topic-fix-5
All checks were successful
Deploy on push / deploy (push) Successful in 22s
2024-04-25 11:25:39 +03:00
1e8b6b156b get-topic-fix-4 2024-04-25 11:25:23 +03:00
b1d459d7fa get-topic-fix-3
All checks were successful
Deploy on push / deploy (push) Successful in 21s
2024-04-25 11:24:16 +03:00
961d86c8f9 get-topic-fix-2
All checks were successful
Deploy on push / deploy (push) Successful in 21s
2024-04-25 11:22:18 +03:00
1b22276237 get-topic-fix
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-04-25 11:20:57 +03:00
0b185c1c2d fmt
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-04-24 10:42:33 +03:00
5dbb0ccb12 region-cache-fix 2024-04-24 10:42:09 +03:00
e90d5aefb2 stat-resolver-fix
All checks were successful
Deploy on push / deploy (push) Successful in 24s
2024-04-24 10:30:32 +03:00
c1a66500e5 sort-order-reimplement-syntetic-stat
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-04-23 16:05:27 +03:00
54980faf49 select-from-fix
All checks were successful
Deploy on push / deploy (push) Successful in 28s
2024-04-23 15:46:59 +03:00
83204d1dff left-join-fix 2024-04-23 15:27:19 +03:00
870d5b62dc isort
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-04-23 15:15:18 +03:00
0b4c0faa79 stat-fix 2024-04-23 15:14:59 +03:00
f64d0a09a8 color-fix
All checks were successful
Deploy on push / deploy (push) Successful in 21s
2024-04-23 14:41:19 +03:00
8436bc4305 separate-stat-query
All checks were successful
Deploy on push / deploy (push) Successful in 22s
2024-04-23 14:31:34 +03:00
8e130027f0 auth-request-data-log
All checks were successful
Deploy on push / deploy (push) Successful in 22s
2024-04-22 12:48:57 +03:00
b7d82d9cc5 refactored-author-on-login-required
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-04-19 18:22:07 +03:00
0ca6676474 Merge branch 'dev' of https://dev.discours.io/discours.io/core into dev
All checks were successful
Deploy on push / deploy (push) Successful in 56s
2024-04-18 12:34:32 +03:00
1a685e458d following-fix 2024-04-18 12:34:04 +03:00
47bc3adb69 fixes 2024-04-17 20:30:05 +03:00
372185e336 webhook-fix 2024-04-17 19:54:38 +03:00
519f5e4624 use-email-login-webhook 2024-04-17 19:20:35 +03:00
c25d7e3ab6 fmt 2024-04-17 18:32:23 +03:00
937b154c6b family-name-fix 2024-04-17 18:31:11 +03:00
Stepan Vladovskiy
994cd05d85 feat: no fore push dev to staging
All checks were successful
Deploy on push / deploy (push) Successful in 22s
2024-04-11 16:14:26 -03:00
Stepan Vladovskiy
52280c29ea feat: fore push dev to staging
All checks were successful
Deploy on push / deploy (push) Successful in 22s
2024-04-11 16:12:11 -03:00
Stepan Vladovskiy
dce4d77706 feat: fore push dev to staging
All checks were successful
Deploy on push / deploy (push) Successful in 6s
2024-04-11 16:11:17 -03:00
Stepan Vladovskiy
9ce0426b7e feat: force push to staging inbox 2024-04-11 15:41:00 -03:00
9911a9410d ..
Some checks failed
Deploy on push / deploy (push) Failing after 7s
2024-04-10 16:09:03 +03:00
25868ec27b logger-fix
Some checks failed
Deploy on push / deploy (push) Failing after 6s
2024-04-09 22:37:58 +03:00
25a65d09d6 tolerate
Some checks failed
Deploy on push / deploy (push) Failing after 6s
2024-04-09 22:35:11 +03:00
cd99041bcc add-author-stat-fix
Some checks failed
Deploy on push / deploy (push) Failing after 6s
2024-04-09 22:32:15 +03:00
1110f7d8ec any-id-fix-2
Some checks failed
Deploy on push / deploy (push) Failing after 7s
2024-04-09 22:24:47 +03:00
e0df7e7436 any-id-fix
Some checks failed
Deploy on push / deploy (push) Failing after 6s
2024-04-09 22:09:26 +03:00
44647bbf39 author-stat-fix
Some checks failed
Deploy on push / deploy (push) Failing after 5s
2024-04-09 22:06:00 +03:00
103fcfd045 trace-fix
Some checks failed
Deploy on push / deploy (push) Failing after 6s
2024-04-09 22:02:26 +03:00
3f2c00a1df get-author-fix-3
Some checks failed
Deploy on push / deploy (push) Failing after 6s
2024-04-09 21:53:35 +03:00
3cc680754b get-author-fix-3
Some checks failed
Deploy on push / deploy (push) Failing after 7s
2024-04-09 21:51:24 +03:00
d7db2689c8 get-author-fix
Some checks failed
Deploy on push / deploy (push) Failing after 7s
2024-04-09 21:46:21 +03:00
23288d1f91 query-debug-3
Some checks failed
Deploy on push / deploy (push) Failing after 6s
2024-04-09 21:39:59 +03:00
1b00086148 query-debug-2
Some checks failed
Deploy on push / deploy (push) Failing after 6s
2024-04-09 21:15:38 +03:00
0501b0f38e outerjoin-fix
Some checks failed
Deploy on push / deploy (push) Failing after 6s
2024-04-09 21:08:47 +03:00
6703e3d093 authors-stat-fix 2024-04-09 21:06:34 +03:00
10c24fe400 topic-stat-fix 2024-04-09 21:05:24 +03:00
489c3c3232 any-fix-4 2024-04-09 20:59:03 +03:00
64f473e037 any-fix-3 2024-04-09 20:56:47 +03:00
202c8461f5 any-fix-2 2024-04-09 20:55:35 +03:00
cf64090ac3 any-fix
Some checks failed
Deploy on push / deploy (push) Failing after 6s
2024-04-09 20:54:00 +03:00
f22b37cc91 has-fix
Some checks failed
Deploy on push / deploy (push) Failing after 6s
2024-04-09 20:51:32 +03:00
e9fa53aff9 glitchtip
Some checks failed
Deploy on push / deploy (push) Failing after 6s
2024-04-09 19:50:27 +03:00
d3262accc5 shout-topic-comments
Some checks failed
Deploy on push / deploy (push) Failing after 7s
2024-04-09 19:48:02 +03:00
142a5f09af ..
Some checks failed
Deploy on push / deploy (push) Failing after 6s
2024-04-09 19:40:44 +03:00
c6a4f04779 topic-stat-fix
Some checks failed
Deploy on push / deploy (push) Failing after 6s
2024-04-09 19:38:02 +03:00
4fe15d1440 topic-stat-join-fix
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-04-09 18:06:29 +03:00
e529ecbe41 params-fix-2 2024-04-09 17:57:22 +03:00
7be4642f5d params-fix
All checks were successful
Deploy on push / deploy (push) Successful in 35s
2024-04-09 17:55:07 +03:00
3fd94dc0fa notification-check
All checks were successful
Deploy on push / deploy (push) Successful in 34s
2024-04-09 17:51:23 +03:00
9e6f81606b import-fix
All checks were successful
Deploy on push / deploy (push) Successful in 26s
2024-04-09 16:59:41 +03:00
2bf456b343 reactions-cache-update
All checks were successful
Deploy on push / deploy (push) Successful in 34s
2024-04-09 16:43:06 +03:00
1769b0925b follow-if-liked-fix
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-04-09 14:03:50 +03:00
5e8c1ac30b tolerate-notifier-fails 2024-04-09 13:46:27 +03:00
6e17b89f26 author-get-fix
All checks were successful
Deploy on push / deploy (push) Successful in 24s
2024-04-09 13:41:30 +03:00
739b7b40d6 follower-id-fix
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-04-09 13:38:44 +03:00
b3eda4a0e1 result-fix
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-04-09 13:32:11 +03:00
dd0c5d15fd fix2
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-04-09 13:30:48 +03:00
e587ed05df found-author-fix 2024-04-09 13:30:02 +03:00
5bbfd2249f topic-cache-fix
All checks were successful
Deploy on push / deploy (push) Successful in 24s
2024-04-09 11:30:20 +03:00
d3ae078b20 refactored-cache-following
All checks were successful
Deploy on push / deploy (push) Successful in 36s
2024-04-09 11:17:32 +03:00
b802bb029a cache-upgrade
All checks were successful
Deploy on push / deploy (push) Successful in 22s
2024-04-08 21:33:47 +03:00
d1cd69eb2a async-fig
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-04-08 12:42:45 +03:00
c301256751 precommit
All checks were successful
Deploy on push / deploy (push) Successful in 49s
2024-04-08 10:38:58 +03:00
df15e63dde reindex-fix
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-04-08 10:23:54 +03:00
aa1693cc16 sentry-init-fix
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-04-08 09:17:05 +03:00
Stepan Vladovskiy
8aa133aab1 feat: nginx with limit_conn_zone 10m change place
All checks were successful
Deploy on push / deploy (push) Successful in 24s
2024-04-07 14:31:38 -03:00
Stepan Vladovskiy
acaea73a38 feat: with limit_conn_zone 10m
Some checks failed
Deploy on push / deploy (push) Failing after 20s
2024-04-07 14:29:21 -03:00
Stepan Vladovskiy
f4c43f7c00 feat: events worker_connections in global nginx.conf
Some checks failed
Deploy on push / deploy (push) Failing after 20s
2024-04-07 14:25:56 -03:00
Stepan Vladovskiy
7c19291ba9 feat: nginx worker events config in Dockerfile
Some checks failed
Deploy on push / deploy (push) Failing after 9s
2024-04-07 13:45:59 -03:00
Stepan Vladovskiy
0da9c87f5a feat: nginx with cach, keepalive, proxy_read, users_from_one_ip, workers
Some checks failed
Deploy on push / deploy (push) Failing after 21s
2024-04-07 13:39:37 -03:00
Stepan Vladovskiy
c9369e3c08 feat: simple glitchtip setup, without all
All checks were successful
Deploy on push / deploy (push) Successful in 22s
2024-04-03 01:21:19 -03:00
Stepan Vladovskiy
4166f8e695 feat: make all like in docs
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-04-01 17:44:18 -03:00
Stepan Vladovskiy
c8776df610 debug: with glitchtip amd middleware in main.py
All checks were successful
Deploy on push / deploy (push) Successful in 22s
2024-04-01 00:25:14 -03:00
Stepan Vladovskiy
deb8da2363 feat: with glitchtip amd middleware in main.py
All checks were successful
Deploy on push / deploy (push) Successful in 38s
2024-04-01 00:11:48 -03:00
Stepan Vladovskiy
1970b197a5 feat: with glitchtip in main.py
All checks were successful
Deploy on push / deploy (push) Successful in 34s
2024-04-01 00:01:38 -03:00
232f41b905 isolate-ratings
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-03-29 14:44:44 +03:00
c159490413 rating-fix-9
All checks were successful
Deploy on push / deploy (push) Successful in 21s
2024-03-29 03:03:37 +03:00
dd840b63ca rating-fix-8
All checks were successful
Deploy on push / deploy (push) Successful in 22s
2024-03-29 02:56:25 +03:00
d06b8eaa4e rating-fix-7
All checks were successful
Deploy on push / deploy (push) Successful in 22s
2024-03-29 02:50:38 +03:00
d529daea25 rating-fix-6
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-03-29 02:45:23 +03:00
489e6b39a9 rating-fix-5
All checks were successful
Deploy on push / deploy (push) Successful in 21s
2024-03-29 02:40:08 +03:00
943b52e067 rating-fix-4
All checks were successful
Deploy on push / deploy (push) Successful in 22s
2024-03-29 02:37:26 +03:00
99895d1b94 rating-fix-3
All checks were successful
Deploy on push / deploy (push) Successful in 22s
2024-03-29 02:31:59 +03:00
3f68e25230 rating-fix-2
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-03-29 02:29:16 +03:00
9cc0c5b011 rating-fix
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-03-29 02:15:38 +03:00
a4dd56ee44 comments-rating-fix-3
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-03-29 01:49:30 +03:00
53c067ff80 comments-rating-fix-2
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-03-29 01:34:50 +03:00
cc8f08588c comments-rating-fix
All checks were successful
Deploy on push / deploy (push) Successful in 24s
2024-03-29 00:36:19 +03:00
b8f08c3411 comments-rating
All checks were successful
Deploy on push / deploy (push) Successful in 24s
2024-03-29 00:29:28 +03:00
8f532b0023 author-stat-fix-9 2024-03-28 23:59:53 +03:00
4b5c101f2f author-stat-fix-8 2024-03-28 23:59:26 +03:00
f8f3a32556 author-stat-fix-7 2024-03-28 23:39:12 +03:00
8ff0e6786b author-stat-fix-6
All checks were successful
Deploy on push / deploy (push) Successful in 24s
2024-03-28 23:33:56 +03:00
e9c852d23d author-stat-fix-5
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-03-28 23:26:45 +03:00
feede764bf author-stat-fix-4
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-03-28 23:19:07 +03:00
e426a2b087 author-stat-fix-3
All checks were successful
Deploy on push / deploy (push) Successful in 22s
2024-03-28 22:53:02 +03:00
284250770e author-stat-fix
All checks were successful
Deploy on push / deploy (push) Successful in 22s
2024-03-28 22:51:09 +03:00
d74a6dedaa comments-count-fix
All checks were successful
Deploy on push / deploy (push) Successful in 22s
2024-03-28 22:41:48 +03:00
0a767a14b6 author-rating-4
All checks were successful
Deploy on push / deploy (push) Successful in 24s
2024-03-28 22:31:33 +03:00
2f4019ca6f author-rating-3
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-03-28 22:29:51 +03:00
b023773cc6 author-rating-2
All checks were successful
Deploy on push / deploy (push) Successful in 24s
2024-03-28 22:26:46 +03:00
34e12975fe get-author-stat-debug
All checks were successful
Deploy on push / deploy (push) Successful in 24s
2024-03-28 22:10:01 +03:00
c9605cf918 get
All checks were successful
Deploy on push / deploy (push) Successful in 22s
2024-03-28 20:45:26 +03:00
ea16de3f1a rating-fix
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-03-28 20:45:03 +03:00
d6bf3e1602 rating-stat-fix
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-03-28 20:42:22 +03:00
029e6af161 debloat-get-author
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-03-28 20:36:35 +03:00
5c41312b1d with-stat-cached-fix-3
All checks were successful
Deploy on push / deploy (push) Successful in 21s
2024-03-28 19:51:09 +03:00
495b296508 with-stat-cached-fix-2
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-03-28 19:45:21 +03:00
1eeff25b4d with-stat-cached-fix
All checks were successful
Deploy on push / deploy (push) Successful in 21s
2024-03-28 19:40:54 +03:00
1f012ae5c9 revalidate-stat
All checks were successful
Deploy on push / deploy (push) Successful in 21s
2024-03-28 19:39:10 +03:00
77440388d3 author-refactored
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-03-28 19:36:27 +03:00
736877d50e cached-stat-fix
All checks were successful
Deploy on push / deploy (push) Successful in 22s
2024-03-28 19:22:47 +03:00
0f57bea256 renew-stat 2024-03-28 19:21:57 +03:00
9647ec9708 scalar
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-03-28 19:16:47 +03:00
a4957ef0ad stat-fix
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-03-28 19:14:39 +03:00
2d538a292a refactored-get-author-4
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-03-28 19:11:26 +03:00
9d8831d7ed refactored-get-author-3
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-03-28 19:08:55 +03:00
8826af02b5 refactored-get-author
All checks were successful
Deploy on push / deploy (push) Successful in 24s
2024-03-28 19:05:27 +03:00
e103b283cb dblog-debug5
All checks were successful
Deploy on push / deploy (push) Successful in 22s
2024-03-28 16:37:04 +03:00
9a12cbcdde dblog-debug3
All checks were successful
Deploy on push / deploy (push) Successful in 36s
2024-03-28 16:34:27 +03:00
6bc4fe42c4 dblog-debug3
All checks were successful
Deploy on push / deploy (push) Successful in 22s
2024-03-28 16:30:04 +03:00
556857fc28 dblog-debug2
All checks were successful
Deploy on push / deploy (push) Successful in 22s
2024-03-28 16:28:17 +03:00
23fb4227ad dblog-debug
All checks were successful
Deploy on push / deploy (push) Successful in 22s
2024-03-28 16:25:51 +03:00
057b43730e dblog-fix-2
All checks were successful
Deploy on push / deploy (push) Successful in 22s
2024-03-28 16:17:34 +03:00
bb0412bb5c dblog-fix
All checks were successful
Deploy on push / deploy (push) Successful in 22s
2024-03-28 16:05:28 +03:00
e9be761420 dblog-fox
All checks were successful
Deploy on push / deploy (push) Successful in 22s
2024-03-28 16:01:48 +03:00
9bda7cef95 fmt
All checks were successful
Deploy on push / deploy (push) Successful in 22s
2024-03-28 15:56:32 +03:00
7f913050ee author-follows-result-type-debug
All checks were successful
Deploy on push / deploy (push) Successful in 21s
2024-03-28 15:48:58 +03:00
73c3d47f1b author-follows-result-type-debug
All checks were successful
Deploy on push / deploy (push) Successful in 22s
2024-03-28 15:43:41 +03:00
72b9bb407d compact-author-ratings
All checks were successful
Deploy on push / deploy (push) Successful in 22s
2024-03-28 15:38:14 +03:00
1eb3d54dd0 author-follows-result-type-
All checks were successful
Deploy on push / deploy (push) Successful in 22s
2024-03-28 15:11:08 +03:00
e7149e905a author-follows-result-type
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-03-28 15:04:46 +03:00
2ee87c975a get_author-follows-fixed 2024-03-28 14:58:47 +03:00
cf6230e8d6 get_author-follows-fixed
All checks were successful
Deploy on push / deploy (push) Successful in 22s
2024-03-28 14:57:21 +03:00
054077c99e get_author-follows-debug-3
All checks were successful
Deploy on push / deploy (push) Successful in 22s
2024-03-28 14:56:08 +03:00
3d28370362 get_author-follows-debug-2
All checks were successful
Deploy on push / deploy (push) Successful in 22s
2024-03-28 14:38:03 +03:00
6c9fd23e67 get_author-follows-debгп
All checks were successful
Deploy on push / deploy (push) Successful in 21s
2024-03-28 14:13:18 +03:00
95c54ff0c4 get_author-follows-debug
All checks were successful
Deploy on push / deploy (push) Successful in 22s
2024-03-28 14:09:11 +03:00
e2faec5893 scalar-fix
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-03-28 14:05:46 +03:00
6f016f236d caching-fixes 2024-03-28 14:05:06 +03:00
7907e5bc4f get_author_follows-fix
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-03-28 13:37:28 +03:00
65fd4df5ef get_author_follows-fix 2024-03-28 13:33:41 +03:00
235b908766 logger-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m17s
2024-03-26 11:57:00 +03:00
3eacc142f2 unrated-fix
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-03-25 21:07:32 +03:00
9eb2ad21d0 filters-fix
All checks were successful
Deploy on push / deploy (push) Successful in 21s
2024-03-25 20:41:28 +03:00
f03a6d0efe filter-my-fix
All checks were successful
Deploy on push / deploy (push) Successful in 21s
2024-03-25 20:38:46 +03:00
e9611fc8c1 feed-filters-fix
All checks were successful
Deploy on push / deploy (push) Successful in 21s
2024-03-25 20:28:58 +03:00
337fa82fb4 last-comment-fix
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-03-25 19:50:23 +03:00
d92d280595 typo-fix
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-03-25 15:31:16 +03:00
fab57469d3 random-top-shouts
All checks were successful
Deploy on push / deploy (push) Successful in 46s
2024-03-25 15:03:03 +03:00
4daf746976 views-independant 2024-03-19 16:24:25 +03:00
e97ffacd23 update-after-debug-2 2024-03-18 15:01:43 +03:00
c346481ade update-after-debug 2024-03-18 15:01:10 +03:00
818b4ccae9 debug-get-with-stat 2024-03-14 10:21:04 +03:00
837763ed64 get-author-fix-2 2024-03-14 09:59:38 +03:00
ab36dfe233 debug-get-author 2024-03-14 09:55:14 +03:00
64b1498215 authorid-fix
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-03-14 01:35:09 +03:00
ff7c5df8de trigdeploy
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-03-13 23:02:41 +03:00
3231e42428 query-fix 2024-03-13 15:53:40 +03:00
324f069844 following-error-fix
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-03-13 15:35:49 +03:00
1dd34d5818 following-error 2024-03-13 15:34:17 +03:00
4c0f3087db nginx-ports
Some checks failed
Deploy on push / deploy (push) Failing after 19s
2024-03-13 12:50:40 +03:00
13bff800f0 author-id-faster
Some checks failed
Deploy on push / deploy (push) Failing after 20s
2024-03-13 12:44:08 +03:00
13e2a4b7ba log-color-fix-4
Some checks failed
Deploy on push / deploy (push) Failing after 20s
2024-03-12 18:27:58 +03:00
9a15cda218 log-color-fix-3 2024-03-12 18:24:44 +03:00
695c9a97eb log-color-fix 2024-03-12 18:17:28 +03:00
b6691b1b7b logger-fix 2024-03-12 18:14:34 +03:00
4667168636 logs-fox 2024-03-12 17:48:34 +03:00
9c7c5fb8d2 multiline-logger-fix 2024-03-12 17:40:55 +03:00
e99acd591a cached-all 2024-03-12 17:26:52 +03:00
a3303837d5 cached-load-fix-2 2024-03-12 17:00:20 +03:00
567f41c0c3 cached-load-fix 2024-03-12 16:50:14 +03:00
23547546cb cached-authors-fix 2024-03-12 16:46:18 +03:00
0b8776a87f topics-fix 2024-03-12 16:23:01 +03:00
358cc86197 debug-topics 2024-03-12 16:21:28 +03:00
6064f0326a dogpiled-cache-authors 2024-03-12 16:18:07 +03:00
625836afee authorsby-not-cached 2024-03-12 16:07:21 +03:00
3e57ef5948 views-log-fix 2024-03-12 15:57:46 +03:00
9b7aa57a18 cache-reform 2024-03-12 15:50:57 +03:00
d1a510b093 use-cached-following 2024-03-12 15:28:20 +03:00
26a527473f use-cached-authors 2024-03-12 15:26:36 +03:00
d5a9a18c04 dict-fix 2024-03-12 15:05:45 +03:00
480485c20a circular-fix 2024-03-12 15:01:45 +03:00
37319c2091 cache-events-fix 2024-03-12 14:59:36 +03:00
91ffcb85df typechecker-column-fix 2024-03-12 10:52:32 +03:00
04f7231fe9 refactored-2 2024-03-12 10:36:34 +03:00
a7944f5176 refactored 2024-03-12 10:35:33 +03:00
0e1df1e7ca followers-update-fix 2024-03-12 08:00:42 +03:00
059dd0f9b4 remove-follow-debug 2024-03-11 17:07:37 +03:00
78dbde6273 dict-fix 2024-03-11 16:58:31 +03:00
e6f5cfcb8d return-error-on-follow 2024-03-11 16:52:16 +03:00
ebf08ea2ed clean 2024-03-11 16:17:52 +03:00
c6e045d5ee follows-brushed 2024-03-11 16:12:28 +03:00
4bc469ab04 dbeug-follow-3 2024-03-11 15:50:44 +03:00
11f3cdeb7c dbeug-follow-2 2024-03-11 15:41:24 +03:00
9944277908 tuple-fix 2024-03-11 15:21:34 +03:00
8b5a50b7ae author-id-fix 2024-03-11 15:19:10 +03:00
b45ad1082d author-id-fix 2024-03-11 15:18:51 +03:00
10f8faccdd follows-return 2024-03-11 15:15:28 +03:00
4898e43f57 follow-unfollow-2 2024-03-11 15:13:46 +03:00
df55b68a5a follow-unfollow 2024-03-11 14:49:42 +03:00
23be0da876 search-log 2024-03-11 13:47:12 +03:00
e50bbcdb7c debug-unfollow 2024-03-11 13:44:48 +03:00
b3196f6dcb reaction-after-fix 2024-03-11 13:41:15 +03:00
ebbd1d729e reaction-after-debug 2024-03-11 13:39:12 +03:00
e6cd0ecadc unfollow-fix-2 2024-03-11 13:37:35 +03:00
1572c77882 remove-logs 2024-03-11 12:43:37 +03:00
bda2b7b59a unfollow-debug 2024-03-11 12:41:00 +03:00
7234eb9519 less-log 2024-03-11 12:37:34 +03:00
b18ba16aab update_follows_for_author-call-fix 2024-03-11 12:25:08 +03:00
b58406866c set_follows_authors_cache-fix 2024-03-11 12:22:28 +03:00
9933545383 debug-get_author_follows 2024-03-11 12:20:50 +03:00
1c7729a5b9 query-fixes 2024-03-11 12:10:14 +03:00
e23c49b6c6 redis-fix 2024-03-11 12:07:19 +03:00
5f7087b0df follow-fix 2024-03-11 12:03:41 +03:00
1162c62a9b logger-auth 2024-03-11 11:59:20 +03:00
6243c27390 handle-no-userid 2024-03-11 11:56:14 +03:00
bf1068d070 follow-unfollow-fix 2024-03-11 11:33:39 +03:00
20cc14adc6 debug-follow 2024-03-11 11:16:12 +03:00
94be60304e refactored-get-my-shout-topics
Some checks failed
Deploy on push / deploy (push) Failing after 20s
2024-03-07 14:46:03 +03:00
0182b501fe refactored-get-my-shout-2 2024-03-07 14:44:07 +03:00
0d111bda47 refactored-get-my-shout 2024-03-07 14:42:48 +03:00
6f3ed3704a get-my-shout-api-fix 2024-03-07 14:29:45 +03:00
61088320c9 patch-main-topic-fix 2024-03-07 11:55:23 +03:00
e378cbd442 rm-reaction-fix 2024-03-07 10:18:05 +03:00
c84aae40d3 rm-reaction-debug 2024-03-07 08:13:19 +03:00
e4e681a9ab logs-with-params 2024-03-06 22:18:32 +03:00
5c7b28de90 custom-encoder-fix-3 2024-03-06 22:05:17 +03:00
7a5cbf7438 custom-encoder-fix-2 2024-03-06 22:00:37 +03:00
2b89ab7c78 custom-encoder-fix 2024-03-06 21:57:04 +03:00
4aa4303a59 groupby-fix-2 2024-03-06 15:17:46 +03:00
b13d57ca17 groupby-fix 2024-03-06 15:16:29 +03:00
54eeb5b549 subquery-fix-2 2024-03-06 15:13:05 +03:00
83f12202a8 subquery-fix 2024-03-06 15:11:01 +03:00
045217c011 shout-id-fix 2024-03-06 15:08:20 +03:00
30f5b09a51 typo-fix 2024-03-06 14:27:30 +03:00
7199539a28 reaction-cudl-log 2024-03-06 14:09:21 +03:00
2c1bfaf0fe topics-comments-stat 2024-03-06 13:43:30 +03:00
70c5233305 oauth-name-patch 2024-03-06 13:04:23 +03:00
b82a4bb2fa add_author_stat-fix-2 2024-03-06 12:34:17 +03:00
9f881c0641 add_author_stat-fix+fmt 2024-03-06 12:25:55 +03:00
70589a35da cosmetics 2024-03-06 12:15:26 +03:00
6e046a677c less-cond 2024-03-06 12:09:46 +03:00
c55f696bf3 typo-fix 2024-03-06 12:07:40 +03:00
8bbbe2b0c7 delete-reaction-fix
Some checks failed
Deploy on push / deploy (push) Failing after 18s
2024-03-06 12:03:26 +03:00
cb535cffea forbidden-fix
Some checks failed
Deploy on push / deploy (push) Failing after 20s
2024-03-06 10:44:08 +03:00
b09ea39668 get-my-shout-resolver 2024-03-05 20:12:17 +03:00
5d8c46e76c drafts-resolver-1
Some checks failed
Deploy on push / deploy (push) Failing after 19s
2024-03-05 18:53:18 +03:00
b5727b1b85 update-shout-fix-10 2024-03-05 18:17:48 +03:00
13f6c43df2 update-shout-fix-9 2024-03-05 18:15:21 +03:00
f378925a16 update-shout-fix-8 2024-03-05 18:13:39 +03:00
f68778e529 update-shout-fix-7 2024-03-05 18:10:58 +03:00
fa76d6c7b4 update-shout-fix-6 2024-03-05 18:04:47 +03:00
ee7c464065 update-shout-fix-5 2024-03-05 18:01:47 +03:00
78c7a41c46 update-shout-fix-4 2024-03-05 18:01:29 +03:00
5943f9bf81 update-shout-fix-3 2024-03-05 17:59:00 +03:00
7c75c2accc update-shout-fix-2 2024-03-05 17:53:49 +03:00
12a9880815 update-shout-fix
Some checks failed
Deploy on push / deploy (push) Failing after 19s
2024-03-05 16:59:55 +03:00
130942d9dd decor-order-fix 2024-03-05 14:56:19 +03:00
005889c470 less-scope-exception-5 2024-03-05 14:50:50 +03:00
16c425fd5e less-scope-exception-4 2024-03-05 14:45:53 +03:00
cc3e7b982b less-scope-exception- 2024-03-05 14:44:19 +03:00
3e96366887 less-scope-exception-2 2024-03-05 14:39:47 +03:00
c8b55d0d5b less-scope-exception 2024-03-05 14:38:04 +03:00
1099f8a185 401-ex 2024-03-05 12:50:01 +03:00
8a449bbe7a get-shout-access
Some checks failed
Deploy on push / deploy (push) Failing after 19s
2024-03-05 12:00:45 +03:00
ef25ebc7bc result-fix-2
Some checks failed
Deploy on push / deploy (push) Failing after 19s
2024-03-04 21:08:01 +03:00
2f4747a5de result-fix 2024-03-04 20:34:11 +03:00
e4915dcd7d debug-logs
Some checks failed
Deploy on push / deploy (push) Failing after 20s
2024-03-04 20:25:47 +03:00
b62f40d549 webhook-fix
Some checks failed
Deploy on push / deploy (push) Failing after 19s
2024-03-04 20:24:17 +03:00
21bcda1e3b webhook-fix 2024-03-04 19:08:21 +03:00
5ff28ce31b schema-update 2024-03-04 15:48:04 +03:00
36fefd93be offset-entity-fix
Some checks failed
Deploy on push / deploy (push) Failing after 18s
2024-03-04 15:47:17 +03:00
abfe9f6e0e notifier-fixes
Some checks failed
Deploy on push / deploy (push) Failing after 18s
2024-03-04 13:43:02 +03:00
88ca5a1362 notifier-schema-fix
Some checks failed
Deploy on push / deploy (push) Failing after 19s
2024-03-04 10:59:14 +03:00
3016a75332 notifier-integration
Some checks failed
Deploy on push / deploy (push) Failing after 19s
2024-03-04 10:35:33 +03:00
ad0dc98bc9 webhook-fix 2024-03-03 16:59:15 +03:00
ab7d677a20 long-queries-only 2024-03-01 12:20:06 +03:00
da0a709ce7 sqlalchemy-warn-fix 2024-03-01 12:18:06 +03:00
ef36e38007 groupby-fix 2024-03-01 10:32:18 +03:00
3a04a69d24 typofix+topic-stat 2024-03-01 09:59:19 +03:00
c41ae4ba98 comments-stat-subquery 2024-03-01 09:56:36 +03:00
b0136fd9bc follows-return
Some checks failed
Deploy on push / deploy (push) Failing after 20s
2024-03-01 00:51:49 +03:00
bdf78bb45d comments-order-hotfix-2
Some checks failed
Deploy on push / deploy (push) Failing after 20s
2024-02-29 20:00:35 +03:00
bd905021ae coalesce-desc-sord 2024-02-29 15:52:36 +03:00
978595c246 drafts-ordered 2024-02-29 15:50:26 +03:00
dfbfa9335c get-author-slug-fix 2024-02-29 15:47:32 +03:00
1a563420d3 reaction-sort-type 2024-02-29 15:39:55 +03:00
4d992f1b60 aliased-revert 2024-02-29 15:21:46 +03:00
cc16163673 order-field-fix-2 2024-02-29 15:19:53 +03:00
395120ad7a order-field-fix 2024-02-29 15:17:42 +03:00
acb804f78c sa-warns-back 2024-02-29 15:15:04 +03:00
0437052280 comments-sort-order-fix 2024-02-29 15:10:59 +03:00
fc3bb52431 reindex-fix-6 2024-02-29 14:56:50 +03:00
cb85e24a11 recreate-fixed-4 2024-02-29 14:48:08 +03:00
c8acf6a9ac recreate-fixed-2
Some checks failed
Deploy on push / deploy (push) Failing after 18s
2024-02-29 14:41:32 +03:00
8de765ed50 recreate-fixed 2024-02-29 14:28:51 +03:00
7ad9b7919a search-logs
Some checks failed
Deploy on push / deploy (push) Failing after 19s
2024-02-29 14:24:53 +03:00
5df82704b3 indexing-fix- 2024-02-29 14:17:10 +03:00
2b530131e5 indexing-fix-5 2024-02-29 14:12:35 +03:00
67d1a3ae5c indexing-fix-4 2024-02-29 14:11:48 +03:00
ca3065f741 indexing-fix-3 2024-02-29 14:09:50 +03:00
f07fd646d3 indexing-fix
Some checks failed
Deploy on push / deploy (push) Failing after 18s
2024-02-29 14:04:24 +03:00
0ea4e596d2 indexing 2024-02-29 13:55:44 +03:00
14c2750d92 search-thread-2 2024-02-29 13:49:34 +03:00
b4f86526a2 search-thread 2024-02-29 13:48:20 +03:00
24cbba0746 search-reindex-fix
Some checks failed
Deploy on push / deploy (push) Failing after 19s
2024-02-29 13:43:41 +03:00
e656920f7b search-reindex-fix 2024-02-29 13:43:30 +03:00
435279735b viewes-service-refactor
Some checks failed
Deploy on push / deploy (push) Failing after 19s
2024-02-29 13:18:17 +03:00
9f30f251d6 update-shout-fix 2024-02-29 13:14:14 +03:00
d28024a69b logs-fix 2024-02-29 13:04:25 +03:00
cfb0ba910f redeploy 2024-02-29 12:14:45 +03:00
62b90d73a7 views-logs-fix 2024-02-29 11:48:18 +03:00
aaa39e0a0d no-cursor-events
All checks were successful
Deploy on push / deploy (push) Successful in 1m31s
2024-02-29 11:29:28 +03:00
5bec25fc23 less-logs
Some checks failed
Deploy on push / deploy (push) Failing after 5s
2024-02-29 11:12:54 +03:00
a3c94a9ab7 load-authors-by-fix
Some checks failed
Deploy on push / deploy (push) Failing after 5s
2024-02-29 11:00:41 +03:00
5e8b7cfe98 followers-cache-fixes 2024-02-29 10:42:17 +03:00
977b86a3c6 fix-followers-save 2024-02-29 10:39:07 +03:00
5e400a7618 redis-keys-renamed 2024-02-29 10:34:22 +03:00
10248ffd8c debug-followers-cache 2024-02-29 10:31:49 +03:00
f774c54cc2 followers-cached
Some checks failed
Deploy on push / deploy (push) Failing after 5s
2024-02-29 10:23:08 +03:00
caf45f3d42 .dict-fxt
Some checks failed
Deploy on push / deploy (push) Failing after 6s
2024-02-29 10:02:29 +03:00
ad5b4a81c3 get-author-debug
Some checks failed
Deploy on push / deploy (push) Failing after 5s
2024-02-29 09:48:41 +03:00
ceecef6a7a return-none
Some checks failed
Deploy on push / deploy (push) Failing after 5s
2024-02-29 09:44:04 +03:00
b26da8f316 search-debug
Some checks failed
Deploy on push / deploy (push) Failing after 6s
2024-02-29 09:34:40 +03:00
f52c13e082 staging-deploy-test
Some checks failed
Deploy on push / deploy (push) Failing after 6s
2024-02-29 07:56:23 +03:00
31320c9972 revert-2-queries-less-price
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-02-28 19:24:05 +03:00
b99ed1a7d1 groupby-fix
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-02-28 19:14:57 +03:00
6c0b43bd14 random-topic-shouts-patch-2
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-02-28 18:20:58 +03:00
7a3ce4a982 .c
All checks were successful
Deploy on push / deploy (push) Successful in 24s
2024-02-28 18:15:19 +03:00
ac1fc151ab random-topic-shouts-patch
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-02-28 18:11:51 +03:00
129c4bccf4 get-followers-scalar-fix
All checks were successful
Deploy on push / deploy (push) Successful in 28s
2024-02-27 17:03:21 +03:00
a993741cf2 get-followers-fix-3
All checks were successful
Deploy on push / deploy (push) Successful in 24s
2024-02-27 16:56:00 +03:00
04d918749f get-followers-fix-2
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-02-27 16:52:11 +03:00
fa7b05a86e get-author-followers-fix
All checks were successful
Deploy on push / deploy (push) Successful in 24s
2024-02-27 16:42:26 +03:00
eadae7f639 logger-improved-2
All checks were successful
Deploy on push / deploy (push) Successful in 24s
2024-02-27 16:41:09 +03:00
4c328370c2 logger-improved
All checks were successful
Deploy on push / deploy (push) Successful in 29s
2024-02-27 16:33:25 +03:00
eb295549fb update-tolerate 2024-02-27 16:28:54 +03:00
2e68128dfc cache-refactored
All checks were successful
Deploy on push / deploy (push) Successful in 24s
2024-02-27 15:40:53 +03:00
564a8c10b7 cache-author-with-stat
All checks were successful
Deploy on push / deploy (push) Successful in 24s
2024-02-27 14:53:13 +03:00
8d058b4902 delete-shout-tolerate
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-02-27 14:29:28 +03:00
52f46555a7 auth-fix
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-02-27 14:16:54 +03:00
fc0e3b5541 authlogs2
All checks were successful
Deploy on push / deploy (push) Successful in 26s
2024-02-27 14:06:00 +03:00
def6921215 authlogs
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-02-27 13:56:21 +03:00
a962435898 root-auth-logs-3
All checks were successful
Deploy on push / deploy (push) Successful in 26s
2024-02-27 13:55:11 +03:00
7434c47755 root-auth-logs-2 2024-02-27 13:54:47 +03:00
401c058f32 root-auth-logs
All checks were successful
Deploy on push / deploy (push) Successful in 26s
2024-02-27 13:49:06 +03:00
9f49cde0d7 notuple
All checks were successful
Deploy on push / deploy (push) Successful in 30s
2024-02-27 13:40:56 +03:00
03568ecea0 login-required-async-fix
All checks were successful
Deploy on push / deploy (push) Successful in 26s
2024-02-27 13:21:50 +03:00
4ee4c3595a async-create-shout-fix
All checks were successful
Deploy on push / deploy (push) Successful in 24s
2024-02-27 13:07:14 +03:00
82e129a589 less-fields-author-serlect-after-reaction 2024-02-27 12:58:24 +03:00
193332f6d8 Merge branch 'dev' of https://dev.discours.io/discours.io/core into dev
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-02-27 12:49:17 +03:00
cbd8ba6b68 authors-subquery-json-fix 2024-02-27 12:47:42 +03:00
Stepan Vladovskiy
145c5cdbc2 feat: Cors with mp3 and clean up basura
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-02-27 06:05:01 -03:00
ef2f8dca82 compound-select-fix-2
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-02-27 11:22:48 +03:00
a5636af259 compound-select-fix
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-02-27 11:19:46 +03:00
8914dfc8b0 select_from-author-2
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-02-27 11:09:04 +03:00
23b7fe7af9 select_from-author
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-02-27 11:07:24 +03:00
1214dc03d9 less-logs
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-02-27 10:53:53 +03:00
fc6b8d3a08 debug-cached-authpr
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-02-27 10:41:36 +03:00
3efcfef537 sort-fix-2
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-02-26 20:26:57 +03:00
be27e7306c sort-fix
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-02-26 20:07:42 +03:00
02b504cc4f no-distinct
All checks were successful
Deploy on push / deploy (push) Successful in 30s
2024-02-26 20:05:07 +03:00
02b2aad813 no-comments-stat
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-02-26 19:50:54 +03:00
2ae3f2875f comments_stat-0
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-02-26 19:44:13 +03:00
fbee450bde comments_stat
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-02-26 19:38:22 +03:00
248620622a reactions-distinct
All checks were successful
Deploy on push / deploy (push) Successful in 26s
2024-02-26 18:16:52 +03:00
172b3af6df no-distinct-fix
All checks were successful
Deploy on push / deploy (push) Successful in 24s
2024-02-26 18:12:09 +03:00
c905666591 json-as-dict
All checks were successful
Deploy on push / deploy (push) Successful in 24s
2024-02-26 18:04:34 +03:00
72aa96a99f dict-patch
All checks were successful
Deploy on push / deploy (push) Successful in 26s
2024-02-26 18:00:55 +03:00
431b14bf5b orderby-fix
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-02-26 16:04:39 +03:00
3c0a1cf592 less-redis-log
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-02-26 15:56:13 +03:00
851a661c6f distinct-reactions
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-02-26 15:46:30 +03:00
fec363063d distinct
All checks were successful
Deploy on push / deploy (push) Successful in 29s
2024-02-26 15:21:12 +03:00
ced8c9f75c error-handle-create-shout-2
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-02-26 12:52:22 +03:00
4a57866c3d error-handle-create-shout
All checks were successful
Deploy on push / deploy (push) Successful in 24s
2024-02-26 12:22:55 +03:00
a93fa7fb18 async-login-requiered
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-02-26 12:14:08 +03:00
2257c3375a nodict
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-02-26 11:57:18 +03:00
ecbeb5b85e shout-author-create-fix
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-02-26 11:52:57 +03:00
33a59a4acc after-shouts-update-fix
All checks were successful
Deploy on push / deploy (push) Successful in 33s
2024-02-26 11:47:52 +03:00
886ca8c0ff setex-fix
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-02-26 05:52:08 +03:00
ebbbe05237 get-author-fix-6
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-02-26 05:43:35 +03:00
8fb161470f preparing-cache-data
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-02-26 05:36:18 +03:00
28d2227c39 get-author-fix-5
All checks were successful
Deploy on push / deploy (push) Successful in 24s
2024-02-26 05:23:18 +03:00
8b8a284e59 more-caching
All checks were successful
Deploy on push / deploy (push) Successful in 24s
2024-02-26 05:06:27 +03:00
732bd2b098 caching-follows
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-02-26 04:58:27 +03:00
f40eff2822 events-fix
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-02-26 04:46:23 +03:00
eab1700b0d get-author-fix-3
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-02-26 04:22:06 +03:00
a00c68068f follows-cache-fix
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-02-26 03:49:56 +03:00
5478ff45e7 get-author-fix-3
All checks were successful
Deploy on push / deploy (push) Successful in 23s
2024-02-26 02:07:46 +03:00
8635fd9c08 comments-stat-off-2
All checks were successful
Deploy on push / deploy (push) Successful in 24s
2024-02-26 01:24:32 +03:00
90a6e23e61 comments-stat-off
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-02-26 01:10:15 +03:00
152730526f get-author-fix
All checks were successful
Deploy on push / deploy (push) Successful in 24s
2024-02-26 01:06:10 +03:00
f12d2fc560 get-author-fix
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-02-26 01:03:11 +03:00
a7f14ee473 author.stat.comments
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-02-26 00:29:14 +03:00
5ca072dfaa events-trigger-query-fix
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-02-26 00:06:37 +03:00
b02b8276a6 get-author-fix
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-02-25 22:45:36 +03:00
8be96daae4 cache-update-fix
All checks were successful
Deploy on push / deploy (push) Successful in 24s
2024-02-25 21:47:14 +03:00
fc774adb9f search-authors-fix
All checks were successful
Deploy on push / deploy (push) Successful in 25s
2024-02-25 21:43:30 +03:00
8b3cfebc47 Merge remote-tracking branch 'origin/dev' into dev 2024-02-25 21:27:17 +03:00
f596a9bf2c update-author_cache 2024-02-25 21:27:07 +03:00
7a89bb2783 update-author_cache
All checks were successful
Deploy on push / deploy (push) Successful in 1m26s
2024-02-25 21:16:34 +03:00
314c54969b sa-warning-fix
Some checks failed
Deploy on push / deploy (push) Failing after 1m4s
2024-02-25 20:58:48 +03:00
c7fe7f458c aliased-author-fix
All checks were successful
Deploy on push / deploy (push) Successful in 2m12s
2024-02-25 19:44:33 +03:00
9ea10ba5c1 dockerfile-revert
All checks were successful
Deploy on push / deploy (push) Successful in 1m46s
2024-02-25 19:32:36 +03:00
695c5efbc8 dockerfile-update-4
Some checks failed
Deploy on push / deploy (push) Failing after 2m39s
2024-02-25 19:29:32 +03:00
feea5845a8 dockerfile-update-3
Some checks failed
Deploy on push / deploy (push) Failing after 28s
2024-02-25 19:27:41 +03:00
3b5a6973ef dockerfile-fix
Some checks failed
Deploy on push / deploy (push) Failing after 8s
2024-02-25 19:08:20 +03:00
b12db9af0e faster-get-author
Some checks failed
Deploy on push / deploy (push) Failing after 8s
2024-02-25 19:02:15 +03:00
1e922e3161 create-all-fix
Some checks failed
Deploy on push / deploy (push) Failing after 35s
2024-02-25 18:36:08 +03:00
a760d253b3 configure-mappers-call-fix-3
Some checks failed
Deploy on push / deploy (push) Failing after 7s
2024-02-25 18:26:23 +03:00
b5240d9508 configure-mappers-call-fix-2
All checks were successful
Deploy on push / deploy (push) Successful in 2m10s
2024-02-25 18:19:12 +03:00
4dbd593cba configure-mappers-call-fix
Some checks failed
Deploy on push / deploy (push) Failing after 35s
2024-02-25 18:08:02 +03:00
309ac2d929 desc-order-fix 2024-02-25 17:58:09 +03:00
2e635abe5e sql-text-fix-order
All checks were successful
Deploy on push / deploy (push) Successful in 1m56s
2024-02-25 17:49:15 +03:00
26c12b2aad order-by-text-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m39s
2024-02-25 17:39:38 +03:00
ad1bb4af19 search-pg-catalog 2024-02-25 16:46:27 +03:00
2222f6fc19 searchable
All checks were successful
Deploy on push / deploy (push) Successful in 4m21s
2024-02-25 16:43:04 +03:00
4b83f5d0f5 sql-text-fix
All checks were successful
Deploy on push / deploy (push) Successful in 3m35s
2024-02-25 16:15:07 +03:00
857a3648a3 pgtrgm-add
All checks were successful
Deploy on push / deploy (push) Successful in 1m49s
2024-02-25 16:12:08 +03:00
a4745df71b sql-text-fix
All checks were successful
Deploy on push / deploy (push) Successful in 2m1s
2024-02-25 16:04:15 +03:00
8b15ef9429 fmt
Some checks failed
Deploy on push / deploy (push) Failing after 7s
2024-02-25 16:00:50 +03:00
07a9e7ef56 engine-exec-2
Some checks failed
Deploy on push / deploy (push) Failing after 6s
2024-02-25 15:56:28 +03:00
146d49be5b table-name-fix-2
Some checks failed
Deploy on push / deploy (push) Failing after 1m43s
2024-02-25 15:47:28 +03:00
ccc5c98a14 typo-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m43s
2024-02-25 15:33:07 +03:00
a149091e3c search-authors-fmt
All checks were successful
Deploy on push / deploy (push) Successful in 1m25s
2024-02-25 15:22:48 +03:00
9aabfacf84 little-redis-cache
All checks were successful
Deploy on push / deploy (push) Successful in 1m25s
2024-02-25 14:58:16 +03:00
9c6a349cc7 re-alias-author
All checks were successful
Deploy on push / deploy (push) Successful in 1m8s
2024-02-25 14:41:04 +03:00
fc58208bdd more-logs
All checks were successful
Deploy on push / deploy (push) Successful in 1m29s
2024-02-25 14:39:26 +03:00
60e7cd03b7 logs
All checks were successful
Deploy on push / deploy (push) Successful in 2m23s
2024-02-25 14:26:44 +03:00
5d8638867d no-ratings-check
All checks were successful
Deploy on push / deploy (push) Successful in 1m30s
2024-02-25 14:08:09 +03:00
fc0e4bb2df aliased-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m45s
2024-02-25 13:54:28 +03:00
c863dda81b ratings-subquery-fix-2
All checks were successful
Deploy on push / deploy (push) Successful in 1m35s
2024-02-25 13:45:33 +03:00
8d47c02511 ratings-subquery-fix 2024-02-25 13:43:12 +03:00
c216161ece one-joined-query
All checks were successful
Deploy on push / deploy (push) Successful in 1m32s
2024-02-25 13:29:57 +03:00
eb4a4fef61 import-fix-3
All checks were successful
Deploy on push / deploy (push) Successful in 1m19s
2024-02-25 12:10:09 +03:00
7370c8ca2d import-fix-2
All checks were successful
Deploy on push / deploy (push) Successful in 1m30s
2024-02-25 12:06:41 +03:00
42313184b0 import-fix
All checks were successful
Deploy on push / deploy (push) Successful in 3m59s
2024-02-25 11:35:06 +03:00
efa6ac7d60 get-author-followers-fix
All checks were successful
Deploy on push / deploy (push) Successful in 2m10s
2024-02-25 11:27:08 +03:00
b2357e0afb debug-stat
All checks were successful
Deploy on push / deploy (push) Successful in 1m21s
2024-02-25 09:48:16 +03:00
d58bbe3499 load-authors-by-rating
All checks were successful
Deploy on push / deploy (push) Successful in 1m46s
2024-02-25 09:31:06 +03:00
40305ad35d fix-sawarning
All checks were successful
Deploy on push / deploy (push) Successful in 1m40s
2024-02-25 00:42:22 +03:00
3097c33e44 full-traceback-on-sawarning
All checks were successful
Deploy on push / deploy (push) Successful in 2m11s
2024-02-25 00:06:54 +03:00
6f11652320 fix-int
All checks were successful
Deploy on push / deploy (push) Successful in 1m37s
2024-02-24 21:56:09 +03:00
f5b3cd8f97 debug-query-follows
All checks were successful
Deploy on push / deploy (push) Successful in 1m24s
2024-02-24 21:52:16 +03:00
eaaace4d28 fmt
All checks were successful
Deploy on push / deploy (push) Successful in 3m45s
2024-02-24 21:45:38 +03:00
12137eccda minor-fixes
All checks were successful
Deploy on push / deploy (push) Successful in 1m48s
2024-02-24 21:30:19 +03:00
d7c9622ffa int-id-fix
All checks were successful
Deploy on push / deploy (push) Successful in 5m45s
2024-02-24 21:15:11 +03:00
5e72a08e0f circular-fix-2
All checks were successful
Deploy on push / deploy (push) Successful in 3m49s
2024-02-24 20:42:19 +03:00
a3244fc74b circular-fix
Some checks failed
Deploy on push / deploy (push) Failing after 6s
2024-02-24 19:53:47 +03:00
f1444cbe10 stat-fn-moved
All checks were successful
Deploy on push / deploy (push) Successful in 1m59s
2024-02-24 19:23:53 +03:00
3e58164ae8 ratings-true
All checks were successful
Deploy on push / deploy (push) Successful in 1m42s
2024-02-24 19:12:35 +03:00
003fa1bbac types fixes 2024-02-24 13:22:35 +03:00
0ca83cc91e cache authors by id 2024-02-24 09:26:31 +03:00
02a7b64449 unauthorized-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m52s
2024-02-24 00:00:46 +03:00
dae2c7b689 select-from-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m42s
2024-02-23 23:42:49 +03:00
11ea8b7efb fieldname-fix
All checks were successful
Deploy on push / deploy (push) Successful in 2m2s
2024-02-23 23:26:12 +03:00
1edf93f7ce follows-stat-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m43s
2024-02-23 23:15:16 +03:00
8b9ac594cd query-fixed
All checks were successful
Deploy on push / deploy (push) Successful in 2m36s
2024-02-23 22:43:50 +03:00
fbbc408df6 clean
All checks were successful
Deploy on push / deploy (push) Successful in 1m27s
2024-02-23 22:24:48 +03:00
f16f345040 topics-with-stat-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m23s
2024-02-23 22:14:08 +03:00
2f81a5cf12 coalesce
All checks were successful
Deploy on push / deploy (push) Successful in 1m37s
2024-02-23 21:34:02 +03:00
586672b279 fieldnames-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m48s
2024-02-23 21:27:38 +03:00
f04e20426f topic-stat-query-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m19s
2024-02-23 21:22:55 +03:00
a05072fd71 separated-follows
All checks were successful
Deploy on push / deploy (push) Successful in 1m43s
2024-02-23 21:10:11 +03:00
3bc7946ab3 stat-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m32s
2024-02-23 20:25:52 +03:00
e80b3ac770 fmt+refactor
All checks were successful
Deploy on push / deploy (push) Successful in 24s
2024-02-23 19:35:40 +03:00
14947225a6 same-shout-on-update-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m39s
2024-02-23 15:38:13 +03:00
2e2eba68a2 db-adapter-fixes
All checks were successful
Deploy to core / deploy (push) Successful in 1m7s
2024-02-23 15:02:14 +03:00
32bc750071 revert-auth-nocache
All checks were successful
Deploy to core / deploy (push) Successful in 1m35s
2024-02-23 14:53:14 +03:00
a0f75c0505 stat-fix-2
All checks were successful
Deploy to core / deploy (push) Successful in 1m43s
2024-02-23 14:43:14 +03:00
5b34cab6bc add-columns-stat
All checks were successful
Deploy to core / deploy (push) Successful in 23s
2024-02-23 14:40:38 +03:00
cc80c92ad3 stat-fix
All checks were successful
Deploy to core / deploy (push) Successful in 3m59s
2024-02-23 14:34:43 +03:00
a55fa8d2ff trace-more
All checks were successful
Deploy to core / deploy (push) Successful in 1m30s
2024-02-23 14:23:13 +03:00
9999c362d4 auth-cache-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m58s
2024-02-23 14:19:54 +03:00
64012344cb alias-fix-2
All checks were successful
Deploy to core / deploy (push) Successful in 1m25s
2024-02-23 14:09:12 +03:00
6e0da78658 alias-fix
All checks were successful
Deploy to core / deploy (push) Successful in 2m7s
2024-02-23 14:05:46 +03:00
14e2828e2d aliased-more
All checks were successful
Deploy to core / deploy (push) Successful in 2m1s
2024-02-23 13:52:31 +03:00
595e4ba87d nosearchinfo
All checks were successful
Deploy to core / deploy (push) Successful in 4m8s
2024-02-23 13:40:40 +03:00
72aa21c9cd get-topic-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m15s
2024-02-23 13:34:31 +03:00
17f79e1622 Merge branch 'dev' of https://dev.discours.io/discours.io/core into dev
All checks were successful
Deploy to core / deploy (push) Successful in 5m51s
2024-02-23 10:20:13 +03:00
ec08e85e8f select-from-fix 2024-02-23 10:14:58 +03:00
6ed09d5851 reverte 2024-02-23 04:08:29 +03:00
f8b4b0b96f gixing-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m7s
2024-02-23 03:59:28 +03:00
ef7f2d7b92 aliasing
All checks were successful
Deploy to core / deploy (push) Successful in 1m25s
2024-02-23 03:28:46 +03:00
8d97463c1d join-clause-groupby-fixes
All checks were successful
Deploy to core / deploy (push) Successful in 1m6s
2024-02-23 03:03:34 +03:00
60c7ab5fe4 separate-getter-follows
All checks were successful
Deploy to core / deploy (push) Successful in 3m44s
2024-02-23 02:53:19 +03:00
392cfb19bd separate-getter
All checks were successful
Deploy to core / deploy (push) Successful in 1m53s
2024-02-23 02:49:34 +03:00
3d34c6c540 stat-refactored
All checks were successful
Deploy to core / deploy (push) Successful in 1m42s
2024-02-23 02:08:43 +03:00
b0e2551e9b groupby-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m23s
2024-02-23 00:03:12 +03:00
54f7dd9c1f select-from
All checks were successful
Deploy to core / deploy (push) Successful in 1m19s
2024-02-22 23:53:28 +03:00
d69f29bda3 move-author-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m26s
2024-02-22 23:32:26 +03:00
f8dafda86b no-select-from-fix 2024-02-22 23:13:29 +03:00
96b698f7ff select-from-fix-aliased
All checks were successful
Deploy to core / deploy (push) Successful in 1m53s
2024-02-22 23:13:00 +03:00
a877e1a7b8 select-from-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m44s
2024-02-22 23:07:08 +03:00
00b7aab220 debug-auth
All checks were successful
Deploy to core / deploy (push) Successful in 2m0s
2024-02-22 23:01:13 +03:00
5303aef4f0 alias-fix
Some checks failed
Deploy to core / deploy (push) Failing after 1m9s
2024-02-22 22:56:58 +03:00
078e8ab7d1 aliased
All checks were successful
Deploy to core / deploy (push) Successful in 2m1s
2024-02-22 21:22:22 +03:00
ebf342c73b webhook-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m28s
2024-02-22 21:18:20 +03:00
ce736e2624 session-commit-fix
Some checks failed
Deploy to core / deploy (push) Failing after 1m15s
2024-02-22 21:10:43 +03:00
88a0d58751 update-last-seen-aware
All checks were successful
Deploy to core / deploy (push) Successful in 1m39s
2024-02-22 13:20:14 +03:00
4a1ee2ac80 add-topic-stats
Some checks failed
Deploy to core / deploy (push) Failing after 6s
2024-02-22 13:12:34 +03:00
a5416143df query-follows-fix
Some checks failed
Deploy to core / deploy (push) Has been cancelled
2024-02-22 13:12:01 +03:00
d9abea9840 get-user-followsx
All checks were successful
Deploy to core / deploy (push) Successful in 1m25s
2024-02-22 13:07:09 +03:00
0f038ac6d7 caching-author-fix
Some checks failed
Deploy to core / deploy (push) Failing after 1m10s
2024-02-22 13:01:38 +03:00
187c14d6b0 slug-patch-on-create
All checks were successful
Deploy to core / deploy (push) Successful in 1m29s
2024-02-22 12:23:46 +03:00
8d06f59702 port-fix
Some checks failed
Deploy to core / deploy (push) Failing after 6s
2024-02-21 23:14:06 +03:00
750f00c6ac 1sec-delay
Some checks failed
Deploy to core / deploy (push) Has been cancelled
2024-02-21 23:12:47 +03:00
aed1885278 row-adapt
All checks were successful
Deploy to core / deploy (push) Successful in 2m39s
2024-02-21 22:29:27 +03:00
1796d0c82d small-fix
Some checks failed
Deploy to core / deploy (push) Failing after 6m38s
2024-02-21 22:20:17 +03:00
fc3f859602 profiling-less
Some checks failed
Deploy to core / deploy (push) Failing after 6s
2024-02-21 22:16:29 +03:00
d50064a97e query-fix7
Some checks failed
Deploy to core / deploy (push) Has been cancelled
2024-02-21 22:12:31 +03:00
332be3f12b query-fix6
All checks were successful
Deploy to core / deploy (push) Successful in 4m16s
2024-02-21 22:03:57 +03:00
da33ae92a9 query-fix5
All checks were successful
Deploy to core / deploy (push) Successful in 2m40s
2024-02-21 21:53:11 +03:00
f49fb2d01d db-profiling-simple
All checks were successful
Deploy to core / deploy (push) Successful in 4m19s
2024-02-21 21:47:00 +03:00
296721d2b1 fix-queru-more-2
All checks were successful
Deploy to core / deploy (push) Successful in 2m35s
2024-02-21 21:33:27 +03:00
5f4e30866f fix-queru-more
All checks were successful
Deploy to core / deploy (push) Successful in 2m31s
2024-02-21 21:25:23 +03:00
1c04125921 noworker-5
All checks were successful
Deploy to core / deploy (push) Successful in 3m11s
2024-02-21 21:04:57 +03:00
3db2efdf79 noworker-3
All checks were successful
Deploy to core / deploy (push) Successful in 4m47s
2024-02-21 20:50:26 +03:00
cb64cd66da noworker
All checks were successful
Deploy to core / deploy (push) Successful in 2m51s
2024-02-21 20:46:29 +03:00
9b2d1c96ba fix
Some checks failed
Deploy to core / deploy (push) Failing after 1m10s
2024-02-21 20:38:12 +03:00
1f0d5ae8e8 batch-load-fix
Some checks failed
Deploy to core / deploy (push) Failing after 16m25s
2024-02-21 20:12:47 +03:00
784f790b83 stats-follows
All checks were successful
Deploy to core / deploy (push) Successful in 3m20s
2024-02-21 19:48:33 +03:00
1eac614e35 Merge branch 'dev' of https://dev.discours.io/discours.io/core into dev 2024-02-21 19:48:04 +03:00
214af0cf51 fmt 2024-02-21 19:45:53 +03:00
823e59ea74 fmt
Some checks failed
Deploy to core / deploy (push) Failing after 15m33s
2024-02-21 19:14:58 +03:00
88cd6e1060 dict-query-fix
Some checks failed
Deploy to core / deploy (push) Has been cancelled
2024-02-21 19:12:24 +03:00
5b8347ee54 query-fix-2 2024-02-21 19:11:49 +03:00
2e07219732 initial-delay 2024-02-21 19:06:39 +03:00
59c46172c4 almost-dict
Some checks are pending
Deploy to core / deploy (push) Waiting to run
2024-02-21 19:03:49 +03:00
2e3d85b43d select-fix
Some checks are pending
Deploy to core / deploy (push) Waiting to run
2024-02-21 18:55:21 +03:00
b7cbef01a3 dictify
All checks were successful
Deploy to core / deploy (push) Successful in 2m31s
2024-02-21 18:51:37 +03:00
3f361b1af7 sqlfix
Some checks are pending
Deploy to core / deploy (push) Waiting to run
2024-02-21 18:38:15 +03:00
3ae706d6db healhchecks-warn-out
All checks were successful
Deploy to core / deploy (push) Successful in 1m55s
2024-02-21 18:33:42 +03:00
960cdf30da batch-render-follows
Some checks are pending
Deploy to core / deploy (push) Waiting to run
2024-02-21 18:26:18 +03:00
ab31d0d296 query_follows-fix
Some checks are pending
Deploy to core / deploy (push) Waiting to run
2024-02-21 18:13:43 +03:00
67fa44b062 redis-save-fi
Some checks failed
Deploy to core / deploy (push) Failing after 1m45s
2024-02-21 18:07:02 +03:00
74e639737e profiling-fix-2 2024-02-21 18:03:02 +03:00
be9f62eb76 profiling-db
Some checks failed
Deploy to core / deploy (push) Failing after 2m5s
2024-02-21 17:55:54 +03:00
e69046a1f8 cache-fixed
Some checks failed
Deploy to core / deploy (push) Failing after 15m39s
2024-02-21 17:37:58 +03:00
63f5a708b7 update-redis-api
Some checks failed
Deploy to core / deploy (push) Failing after 1m27s
2024-02-21 16:06:24 +03:00
33330fb052 logger-restore
All checks were successful
Deploy to core / deploy (push) Successful in 1m41s
2024-02-21 14:23:42 +03:00
a40eb878be async-events-fix
Some checks failed
Deploy to core / deploy (push) Failing after 1m35s
2024-02-21 14:21:04 +03:00
9da452c2f0 follower-resolver-fix
Some checks failed
Deploy to core / deploy (push) Failing after 1m30s
2024-02-21 13:59:17 +03:00
3b867ded20 redis-hset-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m25s
2024-02-21 13:51:07 +03:00
2cfcab744e fmt
All checks were successful
Deploy to core / deploy (push) Successful in 1m55s
2024-02-21 13:47:33 +03:00
f75eb13971 less-log 2024-02-21 13:45:33 +03:00
9118ae9268 logger-query-id
All checks were successful
Deploy to core / deploy (push) Successful in 1m13s
2024-02-21 13:44:36 +03:00
4ca884f257 debug-get-author-but-userid
All checks were successful
Deploy to core / deploy (push) Successful in 1m42s
2024-02-21 13:27:00 +03:00
9c14f4b4d3 logger-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m51s
2024-02-21 13:22:46 +03:00
fb48bee8df get_author_by_user_id-fix
Some checks failed
Deploy to core / deploy (push) Failing after 6s
2024-02-21 13:16:39 +03:00
ba436de055 lesslog
Some checks failed
Deploy to core / deploy (push) Has been cancelled
2024-02-21 13:13:43 +03:00
253ee11bb9 logger-timing-logix-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m27s
2024-02-21 13:09:40 +03:00
731f9a45df logger-timing-logix-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m41s
2024-02-21 13:02:49 +03:00
73f020ae5d fix-circular
All checks were successful
Deploy to core / deploy (push) Successful in 1m59s
2024-02-21 12:34:12 +03:00
762857ffbe trigger-fix
Some checks failed
Deploy to core / deploy (push) Failing after 1m59s
2024-02-21 12:22:55 +03:00
8f6416a73c trigger-get-author-fixes
Some checks failed
Deploy to core / deploy (push) Failing after 3m32s
2024-02-21 12:10:30 +03:00
4cde1c14b4 handle-shouts-paginating
Some checks failed
Deploy to core / deploy (push) Failing after 1m33s
2024-02-21 11:59:47 +03:00
ee577c75fd graphql-schema-update
Some checks failed
Deploy to core / deploy (push) Failing after 1m46s
2024-02-21 11:52:57 +03:00
9eee73acf3 shouts-follows
All checks were successful
Deploy to core / deploy (push) Successful in 5m54s
2024-02-21 11:35:13 +03:00
7cf702eb98 fmt
All checks were successful
Deploy to core / deploy (push) Successful in 2m0s
2024-02-21 10:27:16 +03:00
4f26812340 appdata-triggers
All checks were successful
Deploy to core / deploy (push) Successful in 1m16s
2024-02-20 21:57:39 +03:00
66f1c654cf format-multiline-log-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m8s
2024-02-20 19:45:55 +03:00
abc752c629 format-multiline-log-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m10s
2024-02-20 19:42:14 +03:00
333340056e logger-3301
All checks were successful
Deploy to core / deploy (push) Successful in 1m25s
2024-02-20 19:37:20 +03:00
3c03688544 logger-3000
Some checks failed
Deploy to core / deploy (push) Failing after 1m9s
2024-02-20 19:33:24 +03:00
b59a8ef323 root-logger
All checks were successful
Deploy to core / deploy (push) Successful in 1m27s
2024-02-20 19:23:38 +03:00
183755e637 one-logger
All checks were successful
Deploy to core / deploy (push) Successful in 1m24s
2024-02-20 19:19:46 +03:00
822815fdac logger-3
All checks were successful
Deploy to core / deploy (push) Successful in 1m10s
2024-02-20 19:01:50 +03:00
9f10a23345 typo-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m24s
2024-02-20 18:46:30 +03:00
86754c341d log
Some checks failed
Deploy to core / deploy (push) Failing after 2m7s
2024-02-20 18:42:14 +03:00
20e9add575 log
Some checks failed
Deploy to core / deploy (push) Failing after 3m51s
2024-02-20 18:37:53 +03:00
b5cdface63 logger-fix
Some checks failed
Deploy to core / deploy (push) Failing after 1m25s
2024-02-20 18:28:38 +03:00
dd2301343f loggerfix
All checks were successful
Deploy to core / deploy (push) Successful in 1m25s
2024-02-20 18:22:54 +03:00
f7d0d10d50 debug-auth 2024-02-20 18:20:57 +03:00
e85c179d93 muiltilinelog
All checks were successful
Deploy to core / deploy (push) Successful in 1m39s
2024-02-20 18:16:17 +03:00
d8a4481aab logger-fix
All checks were successful
Deploy to core / deploy (push) Successful in 3m17s
2024-02-20 18:10:36 +03:00
cbb4533855 depfix
All checks were successful
Deploy to core / deploy (push) Successful in 1m32s
2024-02-20 18:04:59 +03:00
40e52b4d71 nosentry
Some checks failed
Deploy to core / deploy (push) Failing after 1m56s
2024-02-20 17:54:43 +03:00
f283ea048b logs-fix
Some checks failed
Deploy to core / deploy (push) Failing after 1m28s
2024-02-20 17:49:21 +03:00
0febd91b25 logs-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m22s
2024-02-20 17:46:33 +03:00
0e701020bb lesslog
All checks were successful
Deploy to core / deploy (push) Successful in 3m34s
2024-02-20 17:27:30 +03:00
0d1b73878e debug-auth 2024-02-20 17:22:55 +03:00
5af3dcb132 typo-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m26s
2024-02-20 12:58:16 +03:00
8b08e23801 fixmodel
All checks were successful
Deploy to core / deploy (push) Successful in 1m27s
2024-02-20 12:53:15 +03:00
6377bc3d64 revert
Some checks failed
Deploy to core / deploy (push) Failing after 6s
2024-02-20 12:40:22 +03:00
811086de83 simpler-author-model
All checks were successful
Deploy to core / deploy (push) Successful in 1m22s
2024-02-20 12:04:45 +03:00
a00fe8b8ef orm-update2
All checks were successful
Deploy to core / deploy (push) Successful in 1m28s
2024-02-20 11:53:55 +03:00
d590884dca change-index
All checks were successful
Deploy to core / deploy (push) Successful in 3m51s
2024-02-20 11:47:37 +03:00
da9ccbd0cc ratings-model-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m29s
2024-02-20 10:52:30 +03:00
69984788fa no-unique-index
All checks were successful
Deploy to core / deploy (push) Successful in 1m33s
2024-02-19 17:22:38 +03:00
981a4c4fce buildsystemver-fix-2
All checks were successful
Deploy to core / deploy (push) Successful in 1m10s
2024-02-19 16:29:05 +03:00
67d6d7134a buildsystemver-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m24s
2024-02-19 16:23:24 +03:00
2d75593cc2 realname-core
All checks were successful
Deploy to core / deploy (push) Successful in 1m57s
2024-02-19 16:18:35 +03:00
e483ea9329 no-searchclient-info
All checks were successful
Deploy to core / deploy (push) Successful in 1m24s
2024-02-19 16:07:52 +03:00
09887bc516 handle-exception
All checks were successful
Deploy to core / deploy (push) Successful in 1m23s
2024-02-19 15:51:09 +03:00
74233e96ff auth-cached-fix-2
All checks were successful
Deploy to core / deploy (push) Successful in 1m31s
2024-02-19 15:31:51 +03:00
e5edc97ab1 auth-cached-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m52s
2024-02-19 15:18:25 +03:00
75edee4fe9 we-all-made-of-stars
All checks were successful
Deploy to core / deploy (push) Successful in 1m21s
2024-02-19 14:54:13 +03:00
37230a8392 ruff-up 2024-02-19 14:46:45 +03:00
6d3c0ee39e isort+authfix
All checks were successful
Deploy to core / deploy (push) Successful in 1m36s
2024-02-19 14:45:55 +03:00
b89060f15f model-index-slug
All checks were successful
Deploy to core / deploy (push) Successful in 1m39s
2024-02-19 13:25:47 +03:00
8193bd0178 all-cached-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m30s
2024-02-19 13:16:44 +03:00
1fa97908b2 auth-cached-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m38s
2024-02-19 12:56:58 +03:00
a39db6991c depfix
All checks were successful
Deploy to core / deploy (push) Successful in 2m18s
2024-02-19 12:49:33 +03:00
add5f6df63 cache-jwt-validation
Some checks failed
Deploy to core / deploy (push) Failing after 28s
2024-02-19 12:40:26 +03:00
cf8934c605 schema-main 2024-02-19 11:58:31 +03:00
680242f1e3 schema-main 2024-02-19 11:58:02 +03:00
0301d8041d schema-move-test
All checks were successful
Deploy to core / deploy (push) Successful in 1m54s
2024-02-19 11:20:13 +03:00
2464b91f9b fix-env
Some checks failed
Deploy to core / deploy (push) Failing after 5s
2024-02-19 11:16:48 +03:00
ddf203a869 healthcheck
Some checks failed
Deploy to core / deploy (push) Has been cancelled
2024-02-19 11:15:53 +03:00
b01bf77d8e fix-pjs
Some checks failed
Deploy to core / deploy (push) Failing after 1m18s
2024-02-19 11:13:05 +03:00
22466e65e2 4threads-1worker
Some checks failed
Deploy to core / deploy (push) Failing after 8s
2024-02-19 11:12:00 +03:00
e4036c8a79 no-aiohttp
Some checks failed
Deploy to core / deploy (push) Failing after 9s
2024-02-19 11:11:13 +03:00
5772db6a36 query-time-log
Some checks failed
Deploy to core / deploy (push) Failing after 10s
2024-02-19 11:10:12 +03:00
f01dde845c fixrating
Some checks failed
Deploy to core / deploy (push) Failing after 5s
2024-02-19 10:33:15 +03:00
e6720ccaaf restore-struct
Some checks failed
Deploy to core / deploy (push) Failing after 7s
2024-02-19 10:14:14 +03:00
7b8e9fbea6 sql-profiling
Some checks failed
Deploy to core / deploy (push) Failing after 1m24s
2024-02-19 10:06:46 +03:00
aa55e952aa dockerfile fix 2024-02-19 09:56:23 +03:00
f74358be76 rating in orm
Some checks failed
Deploy to core / deploy (push) Failing after 3m55s
2024-02-19 09:50:15 +03:00
ca22ac9b13 dev-deploy
All checks were successful
Deploy to core / deploy (push) Successful in 1m21s
2024-02-19 09:40:30 +03:00
1092b8a2ca ml
All checks were successful
Deploy to core / deploy (push) Successful in 6s
2024-02-19 09:38:18 +03:00
a1ed480567 shout-id
All checks were successful
Deploy to core / deploy (push) Successful in 6s
2024-02-17 21:55:50 +03:00
f3df37a41b update-reaction-fix-3
All checks were successful
Deploy to core / deploy (push) Successful in 1m24s
2024-02-17 21:44:22 +03:00
c6df11dc7d update-reaction-fix-2
Some checks failed
Deploy to core / deploy (push) Failing after 1m9s
2024-02-17 21:04:01 +03:00
47ecf4bd1a dockerfile-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m52s
2024-02-17 13:25:24 +03:00
93d536bdba dockerfile-fix
Some checks failed
Deploy to core / deploy (push) Failing after 30s
2024-02-17 13:21:52 +03:00
8a4e4ce6d5 linter-update
Some checks failed
Deploy to core / deploy (push) Failing after 50s
2024-02-17 13:18:54 +03:00
92246bc9d1 create-update-shout-fix
Some checks failed
Deploy to core / deploy (push) Failing after 46s
2024-02-17 09:35:11 +03:00
6ef2c47e11 id-optional-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m58s
2024-02-16 19:59:12 +03:00
0a74ed0f63 update-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m37s
2024-02-16 19:46:57 +03:00
7aaa9e8d8b sentry-enable
Some checks failed
Deploy to core / deploy (push) Failing after 46s
2024-02-16 12:44:19 +03:00
9a2d7b6f11 fmt
All checks were successful
Deploy to core / deploy (push) Successful in 1m53s
2024-02-16 12:40:41 +03:00
994469c2e3 cleaner-main
All checks were successful
Deploy to core / deploy (push) Successful in 1m51s
2024-02-16 12:34:39 +03:00
79ec5a1841 log-fix
All checks were successful
Deploy to core / deploy (push) Successful in 2m25s
2024-02-16 12:16:00 +03:00
233c71385f more-instance-check
All checks were successful
Deploy to core / deploy (push) Successful in 2m8s
2024-02-15 18:17:18 +03:00
e9ed01e797 postprocess-query-for-order-4
All checks were successful
Deploy to core / deploy (push) Successful in 2m29s
2024-02-14 12:07:55 +03:00
2e60fd2cc7 postprocess-query-for-order-3
All checks were successful
Deploy to core / deploy (push) Successful in 2m9s
2024-02-14 12:05:19 +03:00
9b174d94c6 postprocess-query-for-order-2
All checks were successful
Deploy to core / deploy (push) Successful in 1m43s
2024-02-14 10:51:43 +03:00
3488282c14 postprocess-query-for-order
All checks were successful
Deploy to core / deploy (push) Successful in 2m27s
2024-02-14 10:47:54 +03:00
c732ec8136 reply-to-empty-fix
Some checks failed
Deploy to core / deploy (push) Failing after 50s
2024-02-07 19:50:01 +03:00
180dab1c06 filter-rating-only
All checks were successful
Deploy to core / deploy (push) Successful in 1m58s
2024-02-07 18:39:55 +03:00
85931d04ba delete-reaction-fix
Some checks failed
Deploy to core / deploy (push) Failing after 2m11s
2024-02-07 16:41:17 +03:00
7746d1992f fmt
All checks were successful
Deploy to core / deploy (push) Successful in 1m42s
2024-02-05 12:47:26 +03:00
77dddedae6 no-notify-on-entity-create
All checks were successful
Deploy to core / deploy (push) Successful in 1m42s
2024-02-05 10:08:11 +03:00
23468e4b3e debug-3
All checks were successful
Deploy to core / deploy (push) Successful in 1m38s
2024-02-03 20:13:51 +03:00
e7a1697f11 get-my-followings-fix
Some checks failed
Deploy to core / deploy (push) Failing after 1m34s
2024-02-03 20:08:22 +03:00
e4846f8abb readme-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m36s
2024-02-03 18:40:25 +03:00
33193b2345 update_profile-
All checks were successful
Deploy to core / deploy (push) Successful in 1m37s
2024-02-03 17:44:28 +03:00
2008345e69 common-result-type
All checks were successful
Deploy to core / deploy (push) Successful in 1m38s
2024-02-03 17:35:57 +03:00
d3b2eddf58 return-type-fix
Some checks failed
Deploy to core / deploy (push) Failing after 1m30s
2024-02-03 17:31:00 +03:00
18521f3fc5 schema-fix
Some checks failed
Deploy to core / deploy (push) Failing after 1m33s
2024-02-03 17:18:20 +03:00
1b4315fcce bye-following-manageer
All checks were successful
Deploy to core / deploy (push) Successful in 1m43s
2024-02-03 17:00:48 +03:00
53ceac108f full-preload
Some checks failed
Deploy to core / deploy (push) Failing after 1m32s
2024-02-03 16:17:00 +03:00
066770febc logs
All checks were successful
Deploy to core / deploy (push) Successful in 1m37s
2024-02-03 12:51:52 +03:00
83390912e9 following-manager-upgrade
All checks were successful
Deploy to core / deploy (push) Successful in 1m42s
2024-02-03 12:48:36 +03:00
7f04eba208 comment-filter-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m41s
2024-02-03 12:10:38 +03:00
dea03ffa4c load-reactions-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m39s
2024-02-03 01:39:57 +03:00
d6151c00c8 update-fix-2
All checks were successful
Deploy to core / deploy (push) Successful in 1m49s
2024-02-02 23:59:42 +03:00
b0e981ece4 update-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m34s
2024-02-02 23:49:12 +03:00
7cd7447796 revied
All checks were successful
Deploy to core / deploy (push) Successful in 1m37s
2024-02-02 23:38:42 +03:00
8cc7e21338 revised
Some checks failed
Deploy to core / deploy (push) Has been cancelled
2024-02-02 23:38:16 +03:00
6d3bd13218 check-twice
All checks were successful
Deploy to core / deploy (push) Successful in 1m38s
2024-02-02 23:16:04 +03:00
516945ddec publish-fix
Some checks failed
Deploy to core / deploy (push) Failing after 5s
2024-02-02 21:04:21 +03:00
410d426ea5 indexing-serializer-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m39s
2024-02-02 20:54:17 +03:00
1be8eeb810 typo-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m41s
2024-02-02 19:57:34 +03:00
61528e5269 visibility-no-need
All checks were successful
Deploy to core / deploy (push) Successful in 1m36s
2024-02-02 19:36:30 +03:00
e3ee65f79a unfeature-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m37s
2024-02-02 19:29:26 +03:00
fa2b0eeffa name
All checks were successful
Deploy to core / deploy (push) Successful in 29s
2024-02-02 16:00:57 +03:00
d1f4b05e8d name
Some checks failed
Deploy to core / deploy (push) Failing after 6s
2024-02-02 15:59:56 +03:00
7a3830653e fmt
Some checks failed
Deploy to core / deploy (push) Has been cancelled
2024-02-02 15:59:22 +03:00
08b69e5d0a packaging-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m41s
2024-02-02 15:16:53 +03:00
c00361b2ec featured-id-patch
All checks were successful
Deploy to core / deploy (push) Successful in 1m40s
2024-02-02 15:05:20 +03:00
bd5f910f8c delete-shout-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m42s
2024-01-31 22:47:30 +03:00
fbbe6b0751 following-set-fix-4
All checks were successful
Deploy to core / deploy (push) Successful in 1m41s
2024-01-31 18:23:00 +03:00
a6d604f233 following-set-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m38s
2024-01-31 18:18:36 +03:00
5a810fa126 following-fix-3
All checks were successful
Deploy to core / deploy (push) Successful in 1m39s
2024-01-31 17:48:36 +03:00
77907c73e0 following-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m43s
2024-01-31 17:45:02 +03:00
ff30960608 following-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m41s
2024-01-31 17:11:53 +03:00
1fb37f8aa0 create-reaction-fix-2
All checks were successful
Deploy to core / deploy (push) Successful in 1m39s
2024-01-31 03:09:58 +03:00
75cff9dbed create-reaction-fox
All checks were successful
Deploy to core / deploy (push) Successful in 1m40s
2024-01-31 02:46:52 +03:00
880e295b45 unique-reactions
All checks were successful
Deploy to core / deploy (push) Successful in 1m39s
2024-01-31 01:53:54 +03:00
fceb3b61c7 logs-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m44s
2024-01-30 14:00:53 +03:00
e28f03d7db author-shouts-counter-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m49s
2024-01-30 11:58:17 +03:00
e4d7284681 reacted-stat-restore
All checks were successful
Deploy to core / deploy (push) Successful in 1m38s
2024-01-29 15:20:28 +03:00
325927739e info-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m42s
2024-01-29 13:02:14 +03:00
774a5ee596 reorg-code
Some checks failed
Deploy to core / deploy (push) Failing after 1m32s
2024-01-29 11:09:10 +03:00
b975e174ca lesslog
Some checks failed
Deploy to core / deploy (push) Failing after 1m32s
2024-01-29 11:04:50 +03:00
98b379c8e1 lock-more-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m39s
2024-01-29 11:01:04 +03:00
133067d09a await-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m40s
2024-01-29 10:48:36 +03:00
e6f12e9106 lesslog
Some checks failed
Deploy to core / deploy (push) Has been cancelled
2024-01-29 10:47:31 +03:00
e6366d15f6 debug-search-results
All checks were successful
Deploy to core / deploy (push) Successful in 1m45s
2024-01-29 10:37:21 +03:00
ae9e025959 cache-success-only
All checks were successful
Deploy to core / deploy (push) Successful in 1m46s
2024-01-29 09:45:00 +03:00
2f2fa346ed bloatcodeless
All checks were successful
Deploy to core / deploy (push) Successful in 1m38s
2024-01-29 07:04:37 +03:00
b9d602eedf not-error-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m34s
2024-01-29 06:56:34 +03:00
9f9ea93526 release-lock-fix
Some checks failed
Deploy to core / deploy (push) Failing after 6s
2024-01-29 06:52:51 +03:00
520b43ee48 bypass-fix
Some checks failed
Deploy to core / deploy (push) Has been cancelled
2024-01-29 06:51:26 +03:00
d595a18de4 logs-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m34s
2024-01-29 06:48:11 +03:00
f164fd66d4 index-restruct-2
All checks were successful
Deploy to core / deploy (push) Successful in 1m43s
2024-01-29 06:45:07 +03:00
5002e85177 index-restruct
All checks were successful
Deploy to core / deploy (push) Successful in 1m40s
2024-01-29 06:42:02 +03:00
56bf5b2874 simpler-disabled
All checks were successful
Deploy to core / deploy (push) Successful in 1m41s
2024-01-29 06:18:36 +03:00
8a88a98b53 ignore-unavial-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m36s
2024-01-29 06:09:40 +03:00
4b9382c47d stability-2
Some checks failed
Deploy to core / deploy (push) Failing after 1m33s
2024-01-29 06:03:37 +03:00
cf23d343d1 stability-fail
All checks were successful
Deploy to core / deploy (push) Successful in 1m35s
2024-01-29 05:56:28 +03:00
9e18697cac disabling
All checks were successful
Deploy to core / deploy (push) Successful in 1m35s
2024-01-29 05:37:10 +03:00
b574673f00 search-indicies
All checks were successful
Deploy to core / deploy (push) Successful in 1m35s
2024-01-29 05:26:49 +03:00
62018534fd index-name-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m34s
2024-01-29 05:20:24 +03:00
f86d2f0cd6 readme-fix 2024-01-29 05:17:47 +03:00
4a6863c474 check-if-exists
All checks were successful
Deploy to core / deploy (push) Successful in 1m33s
2024-01-29 05:13:37 +03:00
f38ee9239f tolerate-error
All checks were successful
Deploy to core / deploy (push) Successful in 1m35s
2024-01-29 05:07:30 +03:00
ff3ccc6174 opensearch-client-2
All checks were successful
Deploy to core / deploy (push) Successful in 1m33s
2024-01-29 05:03:20 +03:00
69eb41fc8d opensearch-client
Some checks failed
Deploy to core / deploy (push) Failing after 1m30s
2024-01-29 05:00:54 +03:00
6c398fc593 disabled-logix
Some checks failed
Deploy to core / deploy (push) Failing after 1m30s
2024-01-29 04:47:53 +03:00
258bb4e779 logs-fox
Some checks failed
Deploy to core / deploy (push) Failing after 5s
2024-01-29 04:43:02 +03:00
e1a27b55cd inner-search-3
Some checks failed
Deploy to core / deploy (push) Has been cancelled
2024-01-29 04:41:46 +03:00
2663d1cbc5 allow-selfsigned
All checks were successful
Deploy to core / deploy (push) Successful in 1m38s
2024-01-29 04:21:28 +03:00
8ff1949170 inner-search-2
All checks were successful
Deploy to core / deploy (push) Successful in 1m36s
2024-01-29 04:09:54 +03:00
2c2932caeb inner-search
All checks were successful
Deploy to core / deploy (push) Successful in 1m39s
2024-01-29 03:27:30 +03:00
35f7a35f27 scored-subquery-fix-3
All checks were successful
Deploy to core / deploy (push) Successful in 1m38s
2024-01-29 01:57:34 +03:00
1066b85e1b scored-subquery-fix-2
All checks were successful
Deploy to core / deploy (push) Successful in 1m43s
2024-01-29 01:25:47 +03:00
982d424e1b merged
All checks were successful
Deploy to core / deploy (push) Successful in 1m42s
2024-01-29 00:43:13 +03:00
f749ac7999 scored-subquery-fix 2024-01-29 00:42:03 +03:00
Stepan Vladovskii
84078c7cfe feat: no force any more for CI deploy from Gitea
All checks were successful
Deploy to core / deploy (push) Successful in 1m43s
2024-01-28 18:37:47 -03:00
86f2c51f5a virtual-score-column-fix-2
All checks were successful
Deploy to core / deploy (push) Successful in 1m42s
2024-01-29 00:31:48 +03:00
18fc08f6c8 virtual-score-column-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m44s
2024-01-29 00:28:04 +03:00
b92431e802 search-simpler-query-fix-6
All checks were successful
Deploy to core / deploy (push) Successful in 1m42s
2024-01-28 23:57:34 +03:00
01b9091310 search-simpler-query-fix-5
All checks were successful
Deploy to core / deploy (push) Successful in 1m41s
2024-01-28 23:52:58 +03:00
77114c66ec search-simpler-query-fix-4
All checks were successful
Deploy to core / deploy (push) Successful in 1m39s
2024-01-28 23:46:01 +03:00
30a281a693 search-simpler-query-fix-2-3
All checks were successful
Deploy to core / deploy (push) Successful in 1m47s
2024-01-28 23:42:35 +03:00
c061e5cdb3 search-simpler-query-fix-2
All checks were successful
Deploy to core / deploy (push) Successful in 1m42s
2024-01-28 23:27:40 +03:00
5e4ef40b21 search-simpler-query-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m45s
2024-01-28 23:21:02 +03:00
00a672f96e slug-string-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m41s
2024-01-28 18:56:06 +03:00
263ceac5a3 found-keys-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m42s
2024-01-28 18:51:12 +03:00
c90b0bd994 ga-metric-fieldname-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m44s
2024-01-28 18:33:04 +03:00
ef9fbe7c88 trig-ga
All checks were successful
Deploy to core / deploy (push) Successful in 1m41s
2024-01-28 18:17:34 +03:00
4bd7e7d0a1 creds-fix
All checks were successful
Deploy to core / deploy (push) Successful in 1m59s
2024-01-28 16:38:17 +03:00
7f203bf900 deps-fix
Some checks failed
Deploy to core / deploy (push) Failing after 2m13s
2024-01-28 16:33:45 +03:00
ebdfdb2613 ga4-data-api-usage
Some checks failed
Deploy to core / deploy (push) Failing after 2m16s
2024-01-28 16:26:40 +03:00
bba87bbf1d daterange-fix
All checks were successful
Deploy to core / deploy (push) Successful in 2m44s
2024-01-28 15:54:38 +03:00
bd004f6fce no-view-id
All checks were successful
Deploy to core / deploy (push) Successful in 3m21s
2024-01-28 15:40:44 +03:00
753a77ae72 daterange-format-fix
Some checks are pending
Deploy to core / deploy (push) Waiting to run
2024-01-28 14:28:03 +03:00
37b6776bdb viewed-service-fix
Some checks are pending
Deploy to core / deploy (push) Waiting to run
2024-01-28 14:20:22 +03:00
38645d063a logs-fix 2024-01-28 12:03:41 +03:00
08845152d1 Merge branch 'feature/core' of v2.discours.io:core into feature/core 2024-01-28 11:42:40 +03:00
dd2ef55f04 Merge branch 'feature/core' of v2.discours.io:core into feature/core
Some checks failed
Deploy to core / deploy (push) Has been cancelled
2024-01-28 11:40:00 +03:00
a98284522b Merge branch 'feature/core' of v2.discours.io:core into feature/core 2024-01-28 10:03:51 +03:00
8a0da7381b Merge branch 'feature/core' of https://dev.discours.io/discours.io/core into feature/core 2024-01-28 10:01:28 +03:00
Stepan Vladovskii
bed2f89964 debug: main.py with import sentry-sdk
All checks were successful
Deploy to core / deploy (push) Successful in 1m38s
2024-01-27 22:11:39 -03:00
Stepan Vladovskii
0eef9b3061 debug: main.py with import sentry-sdk
Some checks failed
Deploy to core / deploy (push) Failing after 1m29s
2024-01-27 22:02:22 -03:00
Stepan Vladovskii
d7a3c840ea feat: gitea runner push branch feature/core to v2.discours.io/core feauter/core
Some checks failed
Deploy to core / deploy (push) Failing after 1m32s
2024-01-27 21:55:12 -03:00
Stepan Vladovskii
2c9155cd54 feat: gitea runner branch feature/core to v2.discours.io/core
All checks were successful
Deploy to core / deploy (push) Successful in 6s
2024-01-27 21:50:13 -03:00
Stepan Vladovskii
405337da27 feat: add Sentry Reddis perfomance monitoring 2024-01-27 21:45:24 -03:00
to
f73c2094d9 Update README.md 2024-01-27 08:48:03 +00:00
to
7235d2acc4 Update README.md 2024-01-27 08:45:29 +00:00
to
db33c625db Update README.md 2024-01-27 08:36:03 +00:00
7e4aa83b8e joined-search-fix 2024-01-26 18:28:02 +03:00
6116254d9f search-fix-3 2024-01-26 18:19:10 +03:00
90f164521b search-fix-2 2024-01-26 18:09:25 +03:00
24da021a62 search-fix-2 2024-01-26 17:58:01 +03:00
e7e9089b7c query-fix 2024-01-26 13:28:49 +03:00
59dec8cad6 query-fix 2024-01-26 04:24:47 +03:00
1b80d596cb search-fix-2 2024-01-26 04:05:25 +03:00
3f703ad357 add-granian 2024-01-25 22:58:35 +03:00
e2f2976572 portfix 2024-01-25 22:55:00 +03:00
f3acf878aa Merge branch 'feature/core' of https://dev.discours.io/discours.io/core into feature/core 2024-01-25 22:47:40 +03:00
4a5f1d634a granian+precommit 2024-01-25 22:41:27 +03:00
ad3fd32a6e precommit-3 2024-01-25 11:05:28 +03:00
623e532533 precommit-installed 2024-01-25 11:04:00 +03:00
9aea7b02fb precommit 2024-01-25 11:02:31 +03:00
Stepan Vladovskii
1db943acc0 debug: deploy in branch main of core dokku app 2024-01-24 22:47:36 -03:00
Stepan Vladovskii
ebbbcc97f2 feat: yess, it was deploy on staging
All checks were successful
Deploy to core / deploy (push) Successful in 5s
2024-01-24 21:07:05 -03:00
Stepan Vladovskii
e8d85d9914 debug: simplify main.yml for actions
All checks were successful
Deploy to core / deploy (push) Successful in 1m56s
2024-01-24 20:43:10 -03:00
Stepan Vladovskii
2d73a5b874 debug: simplify main.yml for actions 2024-01-24 20:42:51 -03:00
Stepan Vladovskii
1883f0d733 debug: simplify main.yml for actions
Some checks failed
Deploy to core / deploy (push) Failing after 5s
2024-01-24 20:39:26 -03:00
Stepan Vladovskii
3332088b21 debug: actions without yml strange contex 2024-01-24 19:14:41 -03:00
Stepan Vladovskii
284f91b851 feat: change workflow for use branch feature/core in app core 2024-01-24 19:04:36 -03:00
ccbbc04051 .. 2024-01-24 18:19:26 +03:00
7fe026cb41 Merge branch 'feature/core' of https://dev.discours.io/discours.io/core into feature/core
All checks were successful
deploy / deploy (push) Successful in 1m55s
2024-01-24 15:36:46 +03:00
8c33955d5c redis-service-fix 2024-01-24 15:36:34 +03:00
Stepan Vladovskii
ac31a96a89 feat: migrate CI to v2 strange update of yml
All checks were successful
deploy / deploy (push) Successful in 1m48s
2024-01-23 23:31:49 -03:00
Stepan Vladovskii
0923070111 feat: migrate CI to v2
Some checks failed
deploy / deploy (push) Failing after 5s
2024-01-23 22:42:38 -03:00
06699a000a delete-reaction-schema-fix 2024-01-23 22:52:40 +03:00
f5f5cea184 load-shouts-feed 2024-01-23 22:20:43 +03:00
92dd45d278 auth-uncache 2024-01-23 22:12:22 +03:00
86e142292f cache-fix 2024-01-23 21:59:46 +03:00
c41fe8b6c9 cached-auth 2024-01-23 21:34:51 +03:00
987eb8c078 visibility-fix 2024-01-23 19:51:26 +03:00
3a6c805bcf rating-fix 2024-01-23 18:07:37 +03:00
e2e85376f0 no-return-reaction-fix 2024-01-23 17:14:43 +03:00
3f65652a5f 0.2.21-ga 2024-01-23 16:04:38 +03:00
954e6dabb7 no-rating-stat 2024-01-23 11:50:58 +03:00
d6dc374b01 community-stats-fix 2024-01-23 05:03:23 +03:00
ce5077a529 reacted_shouts_updates-fix 2024-01-23 04:58:45 +03:00
43f0c517b3 load-random-top-fix 2024-01-23 04:34:48 +03:00
e0395b0ab6 unread-fixes 2024-01-23 04:03:15 +03:00
6f5b5c364a self-regulation-logix-fix 2024-01-23 03:12:59 +03:00
8f846b6f7a refactored 2024-01-23 03:06:48 +03:00
c6088c5705 notifier-call-fix 2024-01-23 02:47:23 +03:00
f4e8f29fdd following-fix-2 2024-01-23 02:41:37 +03:00
5548d6d1f7 following-fix 2024-01-23 02:37:18 +03:00
6c5ce12b7e wrap-order-fix 2024-01-23 02:28:54 +03:00
bb2edd13e9 follow-debug 2024-01-23 02:23:31 +03:00
adbcec2511 reaction-kind-fix 2024-01-23 02:09:42 +03:00
0a38ae8e7e rating-fix 2024-01-23 02:08:59 +03:00
438baeb1a2 reaction-api-upgrade 2024-01-23 01:57:25 +03:00
4cb70d951a rating-sum-fix 2024-01-23 01:51:38 +03:00
9782cf402e create-reaction-fix-7 2024-01-23 01:21:01 +03:00
257ff43eaa create-reaction-debug-6 2024-01-23 01:11:34 +03:00
31f2414064 create-reaction-debug-4 2024-01-23 00:51:50 +03:00
3e6354afed craete-reaction-fix 2024-01-23 00:36:52 +03:00
8eb36f0cc3 create-reaction-debug-2 2024-01-23 00:27:57 +03:00
6be7ada9a1 create-reaction-revision 2024-01-22 23:54:02 +03:00
ad45cd4b10 minor-fixes
Some checks failed
deploy / deploy (push) Failing after 4s
2024-01-22 22:21:41 +03:00
0ebea28cce schema-upgrade
Some checks failed
deploy / deploy (push) Failing after 5s
2024-01-22 21:38:38 +03:00
a3688ba29a viewed-by-author-by-topic-feat
Some checks failed
deploy / deploy (push) Failing after 5s
2024-01-22 21:20:17 +03:00
f67ef7dd05 create-shout-fix 2024-01-22 19:57:48 +03:00
ff6637a51e precounted-views-import 2024-01-22 19:17:39 +03:00
f08a00e3c2 imports-fix 2024-01-22 18:48:58 +03:00
cdb54dbbe0 schema-path-fix 2024-01-22 18:45:35 +03:00
9bd458c47c add
Some checks failed
deploy / deploy (push) Failing after 6s
2024-01-22 18:42:45 +03:00
7b5330625b get-my-followed-fix-2 2024-01-18 15:30:53 +03:00
4320c9674c get-my-followed-fix
Some checks failed
deploy / deploy (push) Failing after 4s
2024-01-18 15:12:40 +03:00
9812b308b3 load_shouts_random_top-fix
Some checks failed
deploy / deploy (push) Failing after 4s
2024-01-18 14:45:47 +03:00
a43eaee8e0 ackee-load-fix
Some checks failed
deploy / deploy (push) Failing after 8s
2024-01-13 15:57:35 +03:00
033a8b6534 viewed-service-fix
Some checks failed
deploy / deploy (push) Failing after 4s
2024-01-13 15:44:56 +03:00
8f690af6ef from-topic-follower-fix
Some checks failed
deploy / deploy (push) Failing after 5s
2024-01-13 11:49:12 +03:00
8050a7e828 reactions-by-fix+reacted-shouts-fix
Some checks failed
deploy / deploy (push) Failing after 4s
2024-01-13 11:15:45 +03:00
d561deeb73 v2-deploy
Some checks failed
deploy / deploy (push) Failing after 5s
2024-01-13 11:03:35 +03:00
9c804bc873 get-my-followed-fix
Some checks failed
deploy / deploy (push) Has been cancelled
2024-01-13 11:01:59 +03:00
28f1f1cc57 reactions-sort-groupby-fix
All checks were successful
deploy / deploy (push) Successful in 1m44s
2024-01-13 10:27:45 +03:00
3a0683137d reactions-order-fix 2024-01-13 09:59:56 +03:00
10be35c78c dokku-conf 2024-01-11 20:23:02 +03:00
bd31c0afc5 no-presence-sigil 2024-01-11 20:02:39 +03:00
d9e1fb5161 no-gateway-sigil 2024-01-11 19:52:10 +03:00
3175fbd4a4 start-fix
All checks were successful
deploy / deploy (push) Successful in 1m36s
2024-01-10 16:36:42 +03:00
1b2b060b23 0.2.19-fixes
Some checks failed
deploy / deploy (push) Failing after 1m35s
2024-01-10 16:29:49 +03:00
14dc1c761a fix-get-author-i
All checks were successful
deploy / deploy (push) Successful in 1m31s
2023-12-29 02:31:44 +03:00
aa9ffd3053 ratings-update
All checks were successful
deploy / deploy (push) Successful in 1m30s
2023-12-28 01:37:54 +03:00
0ba38ac700 author-fix-3
All checks were successful
deploy / deploy (push) Successful in 1m30s
2023-12-28 01:09:38 +03:00
9968fb27f4 author-fix
All checks were successful
deploy / deploy (push) Successful in 1m29s
2023-12-28 01:05:52 +03:00
6207f7d3ed author-rating-fix
All checks were successful
deploy / deploy (push) Successful in 1m33s
2023-12-28 00:30:18 +03:00
da3e7e55fd logs-gic
All checks were successful
deploy / deploy (push) Successful in 1m32s
2023-12-25 10:48:50 +03:00
48b8209e23 search-query-fix-7
All checks were successful
deploy / deploy (push) Successful in 1m28s
2023-12-25 06:16:40 +03:00
c4c7ce0ad4 search-query-fix-7
All checks were successful
deploy / deploy (push) Successful in 1m31s
2023-12-25 05:04:53 +03:00
5492887a10 search-query-fix-6
All checks were successful
deploy / deploy (push) Successful in 1m27s
2023-12-25 05:01:49 +03:00
ec70549e48 search-query-fix-5
All checks were successful
deploy / deploy (push) Successful in 1m28s
2023-12-25 04:56:30 +03:00
c76e1625f3 search-query-fix-4
All checks were successful
deploy / deploy (push) Successful in 1m29s
2023-12-25 04:52:40 +03:00
d528da9b4a search-query-fix-3
All checks were successful
deploy / deploy (push) Successful in 1m27s
2023-12-25 04:45:21 +03:00
f4f1b3bb45 search-query-fix
Some checks failed
deploy / deploy (push) Failing after 22s
2023-12-25 04:35:21 +03:00
15fbc56d78 search-results-fix
Some checks failed
deploy / deploy (push) Failing after 1m24s
2023-12-25 04:27:02 +03:00
a4b0fd1a46 add-role-feature
Some checks failed
deploy / deploy (push) Failing after 1m23s
2023-12-25 01:42:39 +03:00
2547bd111b logs-fix
All checks were successful
deploy / deploy (push) Successful in 1m28s
2023-12-25 01:13:17 +03:00
935a12945d case-fix
All checks were successful
deploy / deploy (push) Successful in 1m26s
2023-12-25 01:08:31 +03:00
0ea9f45854 load-random-topic-fix
All checks were successful
deploy / deploy (push) Successful in 1m30s
2023-12-25 01:06:27 +03:00
c236768c07 trig
All checks were successful
deploy / deploy (push) Successful in 1m30s
2023-12-25 00:02:54 +03:00
88d33f96b0 commented-fix-2
All checks were successful
deploy / deploy (push) Successful in 1m28s
2023-12-24 21:38:16 +03:00
f9abe421aa commented-fix
All checks were successful
deploy / deploy (push) Successful in 1m29s
2023-12-24 20:46:50 +03:00
8c67438d01 commented-outerjoin-fix
Some checks failed
deploy / deploy (push) Failing after 5s
2023-12-24 18:34:06 +03:00
392712c604 sqlalchemy-debug
Some checks failed
deploy / deploy (push) Has been cancelled
2023-12-24 17:25:57 +03:00
8856bfc978 resolvers-fix
All checks were successful
deploy / deploy (push) Successful in 1m30s
2023-12-23 22:00:22 +03:00
bf2c5b67e3 cache-fix
Some checks failed
deploy / deploy (push) Failing after 9s
2023-12-23 08:40:41 +03:00
8e28e3d86d model-fix
All checks were successful
deploy / deploy (push) Successful in 1m22s
2023-12-22 21:25:21 +03:00
4fb581de2d random-topic-fix-2
All checks were successful
deploy / deploy (push) Successful in 1m23s
2023-12-22 21:22:23 +03:00
d9d2e5e954 random-topic-fix
All checks were successful
deploy / deploy (push) Successful in 1m25s
2023-12-22 21:15:26 +03:00
d65687afb3 unrated-fi
All checks were successful
deploy / deploy (push) Successful in 1m35s
2023-12-22 21:12:42 +03:00
d3ea567797 postmerge
All checks were successful
deploy / deploy (push) Successful in 1m26s
2023-12-22 21:08:37 +03:00
4e769332b7 viewed-fix
All checks were successful
deploy / deploy (push) Successful in 1m22s
2023-12-22 12:09:24 +03:00
b502c581f7 search-result-schema-fix-5
All checks were successful
deploy / deploy (push) Successful in 1m29s
2023-12-19 15:42:46 +03:00
56cdd4e0f9 search-result-schema-fix-4
All checks were successful
deploy / deploy (push) Successful in 1m19s
2023-12-19 15:32:34 +03:00
d14f0c2f95 search-result-schema-fix-3
All checks were successful
deploy / deploy (push) Successful in 1m21s
2023-12-19 15:28:55 +03:00
5aa8258f16 search-result-schema-fix
All checks were successful
deploy / deploy (push) Successful in 1m21s
2023-12-19 15:18:58 +03:00
71000aad35 search-debug
All checks were successful
deploy / deploy (push) Successful in 1m22s
2023-12-19 15:03:27 +03:00
f52db8f9e5 get-authors-all
All checks were successful
deploy / deploy (push) Successful in 2m3s
2023-12-19 11:09:50 +03:00
8e8952dd46 last-seen-mark-remove
All checks were successful
deploy / deploy (push) Successful in 1m21s
2023-12-18 18:37:39 +03:00
8830908307 auth-connector-less
All checks were successful
deploy / deploy (push) Successful in 1m20s
2023-12-18 10:12:17 +03:00
64b571fccd schema-fix
All checks were successful
deploy / deploy (push) Successful in 1m33s
2023-12-18 03:55:12 +03:00
a2ab5e8473 update-last-seen-author
All checks were successful
deploy / deploy (push) Successful in 1m24s
2023-12-18 01:20:00 +03:00
a6c5243c06 viewed-service-fixes
All checks were successful
deploy / deploy (push) Successful in 1m23s
2023-12-17 23:30:20 +03:00
2c6b872acb following-fix-5
All checks were successful
deploy / deploy (push) Successful in 1m25s
2023-12-17 15:30:28 +03:00
5bac172cce less-logs-auth
All checks were successful
deploy / deploy (push) Successful in 1m25s
2023-12-17 15:27:26 +03:00
49fe665d4d following-fix-4
All checks were successful
deploy / deploy (push) Successful in 1m26s
2023-12-17 15:22:07 +03:00
5cccaf43f7 following-fix-3
All checks were successful
deploy / deploy (push) Successful in 1m25s
2023-12-17 15:15:08 +03:00
ea5b9e5b09 following-fix
All checks were successful
deploy / deploy (push) Successful in 1m25s
2023-12-17 15:07:53 +03:00
a79f3cd5ec community-author-fix-2
All checks were successful
deploy / deploy (push) Successful in 1m26s
2023-12-17 09:23:15 +03:00
af4c1efd1c less-logs
All checks were successful
deploy / deploy (push) Successful in 1m26s
2023-12-17 09:20:33 +03:00
312900cec1 community-author-fix
All checks were successful
deploy / deploy (push) Successful in 1m41s
2023-12-17 09:17:23 +03:00
edf20466d6 formatting
All checks were successful
deploy / deploy (push) Successful in 1m28s
2023-12-17 08:40:05 +03:00
509f4409ff upgraded-resolvers-fix
All checks were successful
deploy / deploy (push) Successful in 1m29s
2023-12-17 08:28:34 +03:00
bb0a218eb7 new-resolvers
All checks were successful
deploy / deploy (push) Successful in 1m25s
2023-12-17 08:16:08 +03:00
81173f989a version-upgrade-0.2.18
All checks were successful
deploy / deploy (push) Successful in 1m25s
2023-12-17 08:08:35 +03:00
4697b44504 import-fix
All checks were successful
deploy / deploy (push) Successful in 1m55s
2023-12-17 07:59:16 +03:00
cd0ba88462 comminity-author-link-name-fix
Some checks failed
deploy / deploy (push) Failing after 1m22s
2023-12-16 20:03:00 +03:00
d0ce4dd3d3 webhook-name-fix
Some checks failed
deploy / deploy (push) Failing after 1m24s
2023-12-16 19:59:43 +03:00
692dd9cfe0 resolvers-updates
Some checks failed
deploy / deploy (push) Failing after 1m30s
2023-12-16 18:24:30 +03:00
bf7bc03e50 webhook-fix-3
All checks were successful
deploy / deploy (push) Successful in 1m32s
2023-12-15 19:27:23 +03:00
642c4eeb9d debug-webhook
All checks were successful
deploy / deploy (push) Successful in 1m32s
2023-12-15 18:46:53 +03:00
7e16ee97fa webhook-debug
All checks were successful
deploy / deploy (push) Successful in 1m28s
2023-12-15 18:28:44 +03:00
a8ee8cde0b author-hook-fix-2
All checks were successful
deploy / deploy (push) Successful in 1m27s
2023-12-15 17:37:32 +03:00
f9afe3d9dd author-hook-fix
All checks were successful
deploy / deploy (push) Successful in 1m29s
2023-12-15 17:25:21 +03:00
1ca23cc159 author-debug
All checks were successful
deploy / deploy (push) Successful in 1m35s
2023-12-15 16:59:03 +03:00
50016c0ba7 auth-debug
All checks were successful
deploy / deploy (push) Successful in 1m33s
2023-12-15 16:55:12 +03:00
db7aee730f debug-get-author
All checks were successful
deploy / deploy (push) Successful in 1m41s
2023-12-15 16:48:47 +03:00
68978fa1c0 json-fix
All checks were successful
deploy / deploy (push) Successful in 32s
2023-12-14 03:06:35 +03:00
ab9be5ef14 encode-try
All checks were successful
deploy / deploy (push) Successful in 1m42s
2023-12-14 00:57:32 +03:00
2f13943781 fix-operation
All checks were successful
deploy / deploy (push) Successful in 1m40s
2023-12-14 00:53:37 +03:00
afb65d396b operation-name-fix
All checks were successful
deploy / deploy (push) Successful in 1m34s
2023-12-14 00:47:02 +03:00
b36a655090 logs-fix
All checks were successful
deploy / deploy (push) Successful in 1m36s
2023-12-14 00:17:20 +03:00
8fb2764bc1 debug-gql
All checks were successful
deploy / deploy (push) Successful in 1m32s
2023-12-14 00:10:34 +03:00
2518e0357b dep-fix-2
All checks were successful
deploy / deploy (push) Successful in 1m33s
2023-12-13 23:54:38 +03:00
2fb48d76b6 dep-fix
All checks were successful
deploy / deploy (push) Successful in 1m39s
2023-12-13 23:48:42 +03:00
510402032d auth-connector-fix-3
Some checks failed
deploy / deploy (push) Failing after 1m28s
2023-12-13 23:42:52 +03:00
f51d7539eb auth-connector-fix-2
Some checks failed
deploy / deploy (push) Has been cancelled
2023-12-13 23:42:19 +03:00
99349dcad6 auth-connector-fix
Some checks failed
deploy / deploy (push) Failing after 1m31s
2023-12-13 23:39:25 +03:00
c97bd9c784 debug-get-author-2
All checks were successful
deploy / deploy (push) Successful in 1m34s
2023-12-13 22:59:21 +03:00
c68900babf debug-response-3
All checks were successful
deploy / deploy (push) Successful in 1m34s
2023-12-13 21:33:23 +03:00
d1447d3c05 debug-response
All checks were successful
deploy / deploy (push) Successful in 1m35s
2023-12-13 20:49:26 +03:00
fa0e815f13 gql-fix
All checks were successful
deploy / deploy (push) Successful in 1m36s
2023-12-13 20:42:00 +03:00
a86739ed1b debug-response
All checks were successful
deploy / deploy (push) Successful in 1m36s
2023-12-13 20:13:57 +03:00
29c02158b7 debug-authors-2
All checks were successful
deploy / deploy (push) Successful in 1m30s
2023-12-13 16:32:02 +03:00
4bd5109034 debug-authors
All checks were successful
deploy / deploy (push) Successful in 1m33s
2023-12-13 16:27:51 +03:00
359cfb1b75 validate-jwt
All checks were successful
deploy / deploy (push) Successful in 1m35s
2023-12-13 16:20:06 +03:00
a72dd5675e authorizer-full-vars-fix-2
All checks were successful
deploy / deploy (push) Successful in 1m30s
2023-12-12 11:25:34 +03:00
d27a6897cc authorizer-full-vars-fix
All checks were successful
deploy / deploy (push) Successful in 1m31s
2023-12-12 11:19:22 +03:00
74ca120879 authorizer-connector-debug
All checks were successful
deploy / deploy (push) Successful in 1m32s
2023-12-12 10:30:32 +03:00
954c3740cd authorizer-connector-fix-7
All checks were successful
deploy / deploy (push) Successful in 1m42s
2023-12-12 08:00:46 +03:00
3b7b47599c authorizer-connector-fix-7
All checks were successful
deploy / deploy (push) Successful in 1m30s
2023-12-11 23:06:51 +03:00
2f3ceae8c2 authorizer-connector-fix-6
All checks were successful
deploy / deploy (push) Successful in 1m30s
2023-12-11 22:56:59 +03:00
27612186de authorizer-connector-fix-5
All checks were successful
deploy / deploy (push) Successful in 1m34s
2023-12-11 22:50:13 +03:00
54acfe2b89 authorizer-connector-fix-4
All checks were successful
deploy / deploy (push) Successful in 1m32s
2023-12-11 22:39:58 +03:00
ccfeb89e66 authorizer-connector-fix-3
All checks were successful
deploy / deploy (push) Successful in 1m31s
2023-12-11 22:36:46 +03:00
7937fb89d4 authorizer-connector-fix-2
All checks were successful
deploy / deploy (push) Successful in 1m30s
2023-12-11 22:12:18 +03:00
7d0268ec52 authorizer-connector-fix
Some checks failed
deploy / deploy (push) Has been cancelled
2023-12-11 22:10:45 +03:00
2184fcf1f9 reaction-order-fix
All checks were successful
deploy / deploy (push) Successful in 1m48s
2023-12-11 17:57:34 +03:00
159c151ae7 reactions-sort-order-fix
Some checks failed
deploy / deploy (push) Failing after 1m22s
2023-12-10 01:47:22 +03:00
de63f313a5 paginated-authors
All checks were successful
deploy / deploy (push) Successful in 1m29s
2023-12-09 22:02:04 +03:00
275a1f9a08 pop-fix-3
All checks were successful
deploy / deploy (push) Successful in 1m28s
2023-12-09 21:21:38 +03:00
1f6f722eef pop-fix-2
All checks were successful
deploy / deploy (push) Successful in 1m30s
2023-12-09 21:15:30 +03:00
b992a73698 pop-fix
All checks were successful
deploy / deploy (push) Successful in 1m55s
2023-12-09 21:03:53 +03:00
d37f68869c alchemy-fix
All checks were successful
deploy / deploy (push) Successful in 1m28s
2023-12-09 20:15:57 +03:00
0b69b0b856 import-fix
All checks were successful
deploy / deploy (push) Successful in 1m28s
2023-12-09 20:12:04 +03:00
3acedcc7d6 main_topic-fix
All checks were successful
deploy / deploy (push) Successful in 1m28s
2023-12-09 19:45:02 +03:00
724e9bd5a0 add-main_topic
All checks were successful
deploy / deploy (push) Successful in 1m32s
2023-12-09 19:22:47 +03:00
c1adaf3ed6 schema-fix-author-fix
All checks were successful
deploy / deploy (push) Successful in 1m42s
2023-12-07 21:29:25 +03:00
bb55cfaefe get-shout-fix 2023-12-03 01:42:16 +03:00
b93d91528b search-resolver-fix 2023-12-03 01:22:16 +03:00
4f857e1425 revert-fix 2023-12-03 01:14:36 +03:00
748e3c6db3 join-cond-2 2023-12-03 00:39:06 +03:00
e2271e38e1 join-cond 2023-12-03 00:36:22 +03:00
a6df648af1 stat-fix-8 2023-12-03 00:29:57 +03:00
a3294de4dc stat-fix-7 2023-12-02 23:46:01 +03:00
89c453fedc stat-fix-6 2023-12-02 23:44:36 +03:00
ebe034a527 stat-fix-5 2023-12-02 23:38:28 +03:00
2e3e79f51e stat-fix-4 2023-12-02 23:35:06 +03:00
fcdaabd10d stat-fix-3 2023-12-02 23:30:06 +03:00
807f6ba5b1 stat-fix-2 2023-12-02 23:23:16 +03:00
16bbe995b7 stat-fix 2023-12-02 23:17:26 +03:00
6c607732a8 all-authors-fix-2 2023-12-02 22:45:41 +03:00
1cdf286594 all-authors-fix 2023-12-02 22:33:00 +03:00
fc3745f07e groupby-fix-2 2023-12-02 22:17:09 +03:00
a8b8637057 groupby-fix 2023-12-02 22:13:47 +03:00
34940178ad resolvers-fix 2023-12-02 09:25:08 +03:00
5fe27f9c0c .. 2023-12-01 13:00:10 +03:00
c049f882f3 joinedload-fix-5
All checks were successful
deploy / deploy (push) Successful in 5s
2023-11-30 20:08:56 +03:00
dbab772e62 joinedload-fix-2 2023-11-30 19:41:53 +03:00
e82ca2e385 joinedload-fix 2023-11-30 19:37:53 +03:00
f1ccef7919 no-debug 2023-11-30 16:07:30 +03:00
5f0a8f3b10 replyto-fix 2023-11-30 15:12:12 +03:00
95507ffa48 topicstat-fix 2023-11-30 15:07:08 +03:00
ecf0727631 joined-createdby-fix 2023-11-30 14:04:55 +03:00
e2f2dff755 topics-sql-debug 2023-11-30 13:30:50 +03:00
919aaa951f string-enum-fix 2023-11-30 11:40:27 +03:00
1362eaa125 createdby-fix 2023-11-30 11:27:06 +03:00
685988c219 createdby 2023-11-30 11:04:03 +03:00
2d3f7a51b4 enum-fix 2023-11-30 10:38:41 +03:00
537d588853 stats-fix 2023-11-30 00:21:22 +03:00
f57719d182 author-stats 2023-11-29 23:53:26 +03:00
ece918ac2c plus-fix 2023-11-29 23:22:39 +03:00
a0ee3a1be9 less-classes 2023-11-29 21:11:05 +03:00
dc80255fc7 schema-fix 2023-11-29 15:14:21 +03:00
28853c3a4b published-filter 2023-11-29 15:11:05 +03:00
4a1d7280fc schema-fix 2023-11-29 15:01:51 +03:00
ecaa4ffbc5 param-fox 2023-11-29 14:28:08 +03:00
3454766063 reaction-fix 2023-11-29 14:24:59 +03:00
cd955ecf8a createdat-fix 2023-11-29 14:16:09 +03:00
a950f57efc groupby-createdby 2023-11-29 13:56:26 +03:00
cdb9d31fa4 query-fix 2023-11-29 13:50:20 +03:00
6bac6b737e isnot-fix 2023-11-29 13:44:40 +03:00
af761f916f reactions-filters-fix
Some checks failed
deploy / deploy (push) Has been cancelled
2023-11-29 12:59:00 +03:00
f930822d8a filters-fix-2
Some checks are pending
deploy / deploy (push) Waiting to run
2023-11-29 12:33:33 +03:00
64e8c8afd7 filters-fix 2023-11-29 12:29:09 +03:00
44b7a3da98 visibility-fix-2
Some checks are pending
deploy / deploy (push) Waiting to run
2023-11-29 12:19:01 +03:00
0920af7e77 visibility-filter-fix
Some checks are pending
deploy / deploy (push) Waiting to run
2023-11-29 12:16:37 +03:00
fe4e37663e pyrafixes
Some checks are pending
deploy / deploy (push) Waiting to run
2023-11-29 11:00:00 +03:00
63eb952655 aiohttp-try 2023-11-29 10:23:41 +03:00
36ab83d02f shoutauthor-fix
Some checks failed
deploy / deploy (push) Has been cancelled
2023-11-29 09:32:24 +03:00
cefc77e8e4 sentry-add
Some checks are pending
deploy / deploy (push) Waiting to run
2023-11-29 09:14:23 +03:00
4b77cea690 sentry-integrations 2023-11-29 07:48:31 +03:00
4ca9491824 routes-fix
Some checks failed
deploy / deploy (push) Has been cancelled
2023-11-29 00:19:33 +03:00
6cd2fc0f80 typed-endpoint
Some checks are pending
deploy / deploy (push) Waiting to run
2023-11-29 00:13:46 +03:00
aaf4c0b876 trig-ci
Some checks failed
deploy / deploy (push) Has been cancelled
2023-11-28 23:13:42 +03:00
269c0e449f webhook-fix
All checks were successful
deploy / deploy (push) Successful in 9s
2023-11-28 22:13:53 +03:00
0c2af2bdf4 new-author-webhook-endpoint
All checks were successful
deploy / deploy (push) Successful in 2m14s
2023-11-28 22:07:53 +03:00
a241a098b9 create-invite-fix
All checks were successful
deploy / deploy (push) Successful in 2m20s
2023-11-28 15:56:32 +03:00
01d7dadd78 load-shouts-filters
All checks were successful
deploy / deploy (push) Successful in 2m17s
2023-11-28 14:17:21 +03:00
168a7079f6 schema-fix
All checks were successful
deploy / deploy (push) Successful in 2m16s
2023-11-28 13:59:36 +03:00
a21efb99df author-invitee-fix
All checks were successful
deploy / deploy (push) Successful in 2m8s
2023-11-28 13:55:05 +03:00
0240005ed1 invite-feature
All checks were successful
deploy / deploy (push) Successful in 2m10s
2023-11-28 13:46:06 +03:00
13ba5ebaed shout-followers
All checks were successful
deploy / deploy (push) Successful in 2m20s
2023-11-28 12:11:45 +03:00
20f7c22441 0.2.16-resolvers-revision
All checks were successful
deploy / deploy (push) Successful in 2m22s
2023-11-28 10:53:48 +03:00
3cf86d9e6e isnot-fix
All checks were successful
deploy / deploy (push) Successful in 2m11s
2023-11-28 08:56:57 +03:00
14ae7fbcc9 resolvers-fix
All checks were successful
deploy / deploy (push) Successful in 2m14s
2023-11-27 21:18:52 +03:00
5f8ec549df emptybody-fix
All checks were successful
deploy / deploy (push) Successful in 2m12s
2023-11-27 21:03:59 +03:00
3b0aedf959 loadshouts-fix
All checks were successful
deploy / deploy (push) Successful in 2m10s
2023-11-27 20:35:26 +03:00
53a0f2e328 iffix
All checks were successful
deploy / deploy (push) Successful in 2m20s
2023-11-27 19:15:34 +03:00
caa2dbfdf3 reaction-model-fix
All checks were successful
deploy / deploy (push) Successful in 2m19s
2023-11-27 19:03:47 +03:00
909ddbd79d pyright-fix
All checks were successful
deploy / deploy (push) Successful in 2m16s
2023-11-27 11:12:42 +03:00
fe60d625e5 notest 2023-11-24 05:21:31 +03:00
4e7250acef logs-fix
All checks were successful
deploy / deploy (push) Successful in 2m5s
2023-11-24 04:53:30 +03:00
167eed436d my-subs-fix
All checks were successful
deploy / deploy (push) Successful in 2m6s
2023-11-24 04:13:55 +03:00
7257f52aeb query-schema-fix
All checks were successful
deploy / deploy (push) Successful in 2m2s
2023-11-24 02:10:13 +03:00
a63cf24812 0.2.15
Some checks failed
deploy / deploy (push) Failing after 1m58s
2023-11-24 02:00:28 +03:00
c150d28447 schema-fix 2023-11-23 23:30:00 +03:00
7d5dc8b8cd nochecks
All checks were successful
deploy / deploy (push) Successful in 2m11s
2023-11-23 01:19:50 +03:00
3ab5d53439 curl-fix
Some checks failed
deploy / deploy (push) Failing after 22s
2023-11-23 00:19:15 +03:00
4b85b602c2 community-fix-2
Some checks failed
deploy / deploy (push) Failing after 1m50s
2023-11-23 00:12:23 +03:00
bdae67804e community-fix
Some checks failed
deploy / deploy (push) Failing after 1m56s
2023-11-23 00:05:04 +03:00
af5746c5d8 imports-fix
Some checks failed
deploy / deploy (push) Failing after 2m1s
2023-11-22 21:23:15 +03:00
3379376016 binary-back-dburl-fix
Some checks failed
deploy / deploy (push) Failing after 1m53s
2023-11-22 21:06:45 +03:00
998340baf8 psycopg2-ix
Some checks failed
deploy / deploy (push) Failing after 1m36s
2023-11-22 21:04:51 +03:00
9ee850ddb7 import-fix
Some checks failed
deploy / deploy (push) Failing after 25s
2023-11-22 20:56:25 +03:00
db76ba3733 0.2.14
Some checks failed
deploy / deploy (push) Failing after 2m1s
2023-11-22 19:38:39 +03:00
e2082b48d3 orm-fix
Some checks failed
deploy / deploy (push) Failing after 1m46s
2023-11-04 12:43:08 +03:00
435d1e4505 new-version-0-2-13
Some checks failed
deploy / deploy (push) Failing after 1m54s
2023-11-03 13:10:22 +03:00
1f5e5472c9 refactoring
Some checks failed
deploy / deploy (push) Failing after 1m56s
2023-10-25 21:33:53 +03:00
20e1fa989a shout-community-fix
Some checks failed
deploy / deploy (push) Failing after 2m18s
2023-10-25 20:25:53 +03:00
04dedaa3a3 updates-fixes
Some checks failed
deploy / deploy (push) Failing after 2m0s
2023-10-25 20:02:01 +03:00
46e684b28d core-update
Some checks failed
deploy / deploy (push) Failing after 2m0s
2023-10-25 19:55:30 +03:00
e151034bab fix-imports
Some checks failed
deploy / deploy (push) Failing after 1m43s
2023-10-23 17:51:13 +03:00
bf241a8fbd merged-isolated-core
Some checks failed
deploy / deploy (push) Failing after 1m46s
2023-10-23 17:47:11 +03:00
b675188013 upd
All checks were successful
deploy / deploy (push) Successful in 1m33s
2023-10-19 17:42:42 +03:00
fa7a04077a feat: /connect/
All checks were successful
deploy / deploy (push) Successful in 28s
2023-10-18 07:33:36 -03:00
24be18abf1 feat: /connect/=
All checks were successful
deploy / deploy (push) Successful in 27s
2023-10-18 07:30:22 -03:00
83b5c2c139 feat: presence-8080
Some checks failed
deploy / deploy (push) Failing after 5s
2023-10-18 07:27:06 -03:00
9e84d6ea37 feat: presence-8080
Some checks are pending
deploy / deploy (push) Waiting to run
2023-10-18 07:25:34 -03:00
4da963f9c8 Noop commit to sync with server changes
All checks were successful
deploy / deploy (push) Successful in 29s
2023-10-18 07:18:34 -03:00
c1d6a2d4e3 feat: add wildcat to /connect for handle Token
Some checks failed
deploy / deploy (push) Failing after 5s
2023-10-18 07:13:10 -03:00
518bc4020b feat: add wildcat to /connect for handle Token
Some checks are pending
deploy / deploy (push) Waiting to run
2023-10-18 07:11:08 -03:00
e13cdd7298 feat: add wildcat to /connect for handle Token
All checks were successful
deploy / deploy (push) Successful in 28s
2023-10-18 06:57:40 -03:00
4fec0ca7fb fix-follow-author-notification
All checks were successful
deploy / deploy (push) Successful in 30s
2023-10-16 22:24:10 +03:00
b13d532da2 postmerge
All checks were successful
deploy / deploy (push) Successful in 27s
2023-10-16 20:45:01 +03:00
b03ac825b6 unread-fix3
All checks were successful
deploy / deploy (push) Successful in 27s
2023-10-16 19:13:39 +03:00
49423ffb93 unread-fix-2
All checks were successful
deploy / deploy (push) Successful in 29s
2023-10-16 19:00:18 +03:00
faa97d27c2 unread-fix
All checks were successful
deploy / deploy (push) Successful in 27s
2023-10-16 18:56:03 +03:00
6e0cb18909 cleanup-notifications
All checks were successful
deploy / deploy (push) Successful in 29s
2023-10-16 18:30:54 +03:00
066bf72547 cleanup-orm
Some checks failed
deploy / deploy (push) Failing after 23s
2023-10-16 18:28:43 +03:00
bc08ece4c3 user User for awhile, filter follower fields
Some checks failed
deploy / deploy (push) Failing after 23s
2023-10-16 18:25:15 +03:00
562a919fca post-merge
Some checks failed
deploy / deploy (push) Failing after 22s
2023-10-16 18:21:05 +03:00
51ad266b62 Merge branch 'main' of https://github.com/Discours/discours-backend into feature/refactoring-services 2023-10-16 18:19:06 +03:00
15ef976538 using presence service 2023-10-16 18:18:29 +03:00
823b3c56c1 presence service interface fix 2023-10-16 17:51:08 +03:00
34e6a03a89 following manager does not manage chats 2023-10-16 17:50:40 +03:00
0c75902a64 fix-unread 2023-10-16 17:50:05 +03:00
582a21408e feat:test
All checks were successful
deploy / deploy (push) Successful in 1m40s
2023-10-16 09:14:37 -03:00
9a7852e17c feat: add to CI/CD piplin
All checks were successful
deploy / deploy (push) Successful in 29s
2023-10-15 15:33:11 -03:00
cbd4c41d32 feat: add to CI/CD piplin
Some checks failed
deploy / deploy (push) Failing after 22s
2023-10-15 15:32:06 -03:00
fd304768b7 feat: add to CI/CD piplin
Some checks failed
deploy / deploy (push) Failing after 5s
2023-10-15 15:27:44 -03:00
fe078809d6 feat: add to CI/CD piplin 2023-10-15 15:26:48 -03:00
36d36defd8 debug: sigil / after proxy connect 2023-10-15 15:26:48 -03:00
6047a3b259 unread-counter-fix-2 2023-10-13 15:20:06 +03:00
f86da630e8 redis-debug-fix 2023-10-13 15:17:44 +03:00
7348e5d9fe unread-counter-fix2 2023-10-13 15:13:01 +03:00
f5da6d450b unread-counter-fix 2023-10-13 15:10:56 +03:00
882ff39f28 redis-debug 2023-10-13 15:01:35 +03:00
7cd5929df2 token-type-tolerance 2023-10-13 14:47:31 +03:00
e9f68c8fb1 token-type-tolerance 2023-10-13 14:45:24 +03:00
792d60453a new-query-fix2 2023-10-13 14:35:10 +03:00
e648091a3c new-query-fix 2023-10-13 14:32:55 +03:00
1b7aa6aa0a some-more-queries-fix-3 2023-10-13 14:07:13 +03:00
d881f9da27 some-more-queries-fix-2 2023-10-13 14:02:44 +03:00
3f1aff2d0f some-more-queries-fix 2023-10-13 14:00:30 +03:00
d4dbf5c0ae some-more-queries 2023-10-13 13:59:24 +03:00
fed154c7f1 fix-redis 2023-10-13 13:48:17 +03:00
c1abace1c0 few-more-resolvers-fix-2 2023-10-13 13:46:34 +03:00
31824cccc9 few-more-resolvers-fix 2023-10-13 13:45:27 +03:00
85a9077792 few-more-resolvers 2023-10-13 13:41:47 +03:00
bbd8f61408 redis update 2023-10-13 13:13:45 +03:00
82618bf7f3 merged 2023-10-11 23:00:15 +03:00
9720b9f26b Merge branch 'feature/refactoring-services' of https://dev.discours.io/discours.io/backend into feature/refactoring-services 2023-10-11 22:59:13 +03:00
2c15852e9b fix-str 2023-10-11 22:59:05 +03:00
e39450d33b fix: sigil proxy for /connect 2023-10-11 10:49:52 -03:00
df2f097e11 fix: sigil proxy for /connect 2023-10-11 10:42:18 -03:00
a14c70e8c7 unmerge 2023-10-11 15:56:28 +03:00
9c651a6d72 debug 2023-10-11 15:41:04 +03:00
09d77bb1d1 merge-fix-7 2023-10-11 13:07:49 +03:00
eca3de7579 merge-fix-6 2023-10-11 13:02:17 +03:00
2fafe8b618 merged-fix-5 2023-10-11 12:26:08 +03:00
62020bd668 merged-fix-4 2023-10-11 12:23:09 +03:00
f1bdd7a0f8 merged-fix-3 2023-10-11 12:20:58 +03:00
6e63be30e0 merged-fix-2 2023-10-11 12:00:36 +03:00
d89235e82a merged-fix 2023-10-11 11:57:58 +03:00
6252671b85 merged 2023-10-11 11:56:46 +03:00
0e8b39bed6 Merge branch 'main' of dev.discours.io:discoursio-api into feature/refactoring-services 2023-10-11 10:28:04 +03:00
d50a510d52 fix-profile 2023-10-11 08:36:40 +03:00
e1245d1f46 feat: sigil with logs and reguest methods 2023-10-10 09:13:25 -03:00
fbeaac5cad feat: sigil with logs and reguest methods 2023-10-10 09:13:25 -03:00
d6913d6ff5 feat: sigil with logs and reguest methods 2023-10-10 07:52:43 -03:00
93b86eab86 feat: sigil with logs and reguest methods 2023-10-10 07:48:33 -03:00
0eed70c102 port=8080 2023-10-10 01:09:15 +03:00
14fa314e2a fix-load 2023-10-10 00:34:51 +03:00
ad97aa2227 fix-slug-raise-error 2023-10-10 00:29:22 +03:00
57aa4caa84 started-log 2023-10-10 00:22:16 +03:00
0bd44d1fab new-sigi 2023-10-09 23:47:18 +03:00
177a47ba7c _Service-redeploy3 2023-10-06 12:57:08 +03:00
32b00d5065 merged 2023-10-06 12:51:48 +03:00
01be3ac95e schema-sdl-serv 2023-10-06 12:51:07 +03:00
d1366d0b88 feat: @read about keys 2023-10-06 06:14:24 -03:00
fada9a289a feat: right schema in schema.py 2023-10-06 06:05:01 -03:00
6d56e8b3a7 feat: right schema in schema.py 2023-10-06 06:02:11 -03:00
c5ea08f939 feat: add to SDL full Query Mutation schema 2023-10-06 05:47:41 -03:00
d9f47183c8 feat: add in schema.py resolver fro _server 2023-10-06 03:51:23 -03:00
ilya-bkv
6ddfc11a91 getAuthor add stat 2023-10-06 03:39:09 -03:00
2697ec4fcd _Service-redeploy2 2023-10-06 06:39:01 +03:00
e244549a1d _Service 2023-10-06 06:29:52 +03:00
150449a0cf port=80 2023-10-06 05:33:51 +03:00
aa5709c695 fix-reqs2 2023-10-06 03:56:27 +03:00
8a3aa1dae6 fix-reqs 2023-10-06 03:55:43 +03:00
12f65bd8fa fix-poetry-deps 2023-10-06 03:33:48 +03:00
bab6990c87 fix-poetryenv 2023-10-06 03:31:45 +03:00
34f9139742 fix-dockerfile 2023-10-06 03:24:40 +03:00
b64d9d5014 poetry-rty 2023-10-06 03:22:37 +03:00
12416c1b83 path-fix 2023-10-06 02:03:36 +03:00
b2e196d261 forked-ariadne 2023-10-06 02:01:18 +03:00
0e8e8f4d04 git+ssh 2023-10-06 01:49:34 +03:00
8de2eb385b async-fix 2023-10-06 01:45:32 +03:00
12c43dbf32 fix-sync 2023-10-06 01:22:58 +03:00
d34597e349 debug-stat 2023-10-06 01:15:23 +03:00
78a3354d5f logs-fix 2023-10-06 01:12:34 +03:00
720d8a4a68 no-sigil-here2 2023-10-06 01:02:51 +03:00
ffa3fbb252 no-sigil-here 2023-10-06 01:02:14 +03:00
400fff4ef0 schema-no-subs2 2023-10-06 00:42:34 +03:00
4f0377c57d schema-no-subs 2023-10-06 00:34:08 +03:00
7761ccf2d5 schema-path-fix 2023-10-06 00:22:54 +03:00
4de1e64ba2 schema-fix 2023-10-06 00:20:02 +03:00
bbc5dc441d requests-transport 2023-10-06 00:17:24 +03:00
120208a621 rollback-requests 2023-10-06 00:10:46 +03:00
8524d0f843 edge 2023-10-06 00:05:15 +03:00
d26d444975 deps-workaround2 2023-10-06 00:02:25 +03:00
e0bd938a6e deps-workaround 2023-10-05 23:57:04 +03:00
aed91c6375 deps... 2023-10-05 23:55:23 +03:00
34f3098a0d import-fix6 2023-10-05 23:50:14 +03:00
c57f3857a6 import-fix4 2023-10-05 23:47:51 +03:00
c665c0056c import-fix4 2023-10-05 23:45:21 +03:00
d30b4c7d2b import-fix3 2023-10-05 23:42:48 +03:00
f468ccca93 import-fix2 2023-10-05 23:34:02 +03:00
d5b0aaba9b import-fix 2023-10-05 23:31:21 +03:00
da5bbc79b4 deps... 2023-10-05 23:25:52 +03:00
3c936e7860 deps... 2023-10-05 23:22:11 +03:00
46044a0f98 migration-removed 2023-10-05 23:18:55 +03:00
5fedd007c7 git-dep3 2023-10-05 23:18:06 +03:00
3d659caa6e git-dep2 2023-10-05 23:05:09 +03:00
9d2cd9f21f git-dep 2023-10-05 23:04:09 +03:00
f068869727 git-install 2023-10-05 23:01:25 +03:00
45d187786b fix-imports 2023-10-05 22:59:50 +03:00
f6e3320e18 async-mail 2023-10-05 22:47:02 +03:00
9537814718 deps-fixes 2023-10-05 22:38:35 +03:00
458823b894 dockerfile-fix 2023-10-05 22:19:20 +03:00
b8e6f7bb5a requests-removed+fixes 2023-10-05 22:18:05 +03:00
fbc85f6c2d aioredis-removed 2023-10-05 22:00:24 +03:00
deac939ed8 restructured,inbox-removed 2023-10-05 21:46:18 +03:00
6dfec6714a Merge branch 'main' of https://github.com/Discours/discours-backend 2023-10-05 20:22:48 +03:00
2c72189055 lintbump 2023-09-28 15:51:28 +03:00
289 changed files with 58915 additions and 10448 deletions

1
.cursorignore Normal file
View File

@@ -0,0 +1 @@
# Add directories or file patterns to ignore during indexing (e.g. foo/ or *.csv)

45
.gitea/workflows/main.yml Normal file
View File

@@ -0,0 +1,45 @@
name: 'Deploy on push'
on: [push]
jobs:
deploy:
runs-on: ubuntu-latest
steps:
- name: Cloning repo
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Get Repo Name
id: repo_name
run: echo "::set-output name=repo::$(echo ${GITHUB_REPOSITORY##*/})"
- name: Get Branch Name
id: branch_name
run: echo "::set-output name=branch::$(echo ${GITHUB_REF##*/})"
- name: Push to dokku for main branch
if: github.ref == 'refs/heads/main'
uses: dokku/github-action@master
with:
branch: 'main'
git_remote_url: 'ssh://dokku@v2.discours.io:22/discoursio-api'
ssh_private_key: ${{ secrets.SSH_PRIVATE_KEY }}
- name: Push to dokku for dev branch
if: github.ref == 'refs/heads/dev'
uses: dokku/github-action@master
with:
branch: 'main'
force: true
git_remote_url: 'ssh://dokku@v2.discours.io:22/core'
ssh_private_key: ${{ secrets.SSH_PRIVATE_KEY }}
- name: Push to dokku for staging branch
if: github.ref == 'refs/heads/staging'
uses: dokku/github-action@master
with:
branch: 'dev'
git_remote_url: 'ssh://dokku@staging.discours.io:22/core'
ssh_private_key: ${{ secrets.SSH_PRIVATE_KEY }}
git_push_flags: '--force'

View File

@@ -1,16 +0,0 @@
name: Checks
on: [pull_request]
jobs:
build:
runs-on: ubuntu-latest
name: Checks
steps:
- uses: actions/checkout@v2
- uses: actions/setup-python@v2
with:
python-version: 3.10.6
- run: pip install --upgrade pip
- run: pip install -r requirements.txt
- run: pip install -r requirements-dev.txt
- run: ./checks.sh

View File

@@ -11,17 +11,17 @@ jobs:
steps:
- name: Checkout source repository
uses: actions/checkout@v3
uses: actions/checkout@v4
with:
fetch-depth: 0
- uses: webfactory/ssh-agent@v0.8.0
with:
ssh-private-key: ${{ secrets.SSH_PRIVATE_KEY }}
ssh-private-key: ${{ github.action.secrets.SSH_PRIVATE_KEY }}
- name: Push to dokku
env:
HOST_KEY: ${{ secrets.HOST_KEY }}
HOST_KEY: ${{ github.action.secrets.HOST_KEY }}
run: |
echo $HOST_KEY > ~/.ssh/known_hosts
git remote add dokku dokku@v2.discours.io:discoursio-api

22
.gitignore vendored
View File

@@ -147,3 +147,25 @@ migration/content/**/*.md
*.csv
dev-server.pid
backups/
poetry.lock
.ruff_cache
.jj
.zed
dokku_config
*.db
*.sqlite3
views.json
*.pem
*.key
*.crt
*cache.json
.cursor
node_modules/
panel/graphql/generated/
panel/types.gen.ts
.cursorrules
.cursor/

View File

@@ -1,44 +1,74 @@
exclude: |
(?x)(
^tests/unit_tests/resource|
_grpc.py|
_pb2.py
)
default_language_version:
python: python3.10
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.5.0
rev: v5.0.0
hooks:
- id: check-added-large-files
- id: check-case-conflict
- id: check-docstring-first
- id: check-json
- id: check-merge-conflict
- id: check-toml
- id: check-yaml
- id: check-toml
- id: end-of-file-fixer
- id: trailing-whitespace
- id: requirements-txt-fixer
- id: check-added-large-files
- id: detect-private-key
- id: check-ast
- id: check-merge-conflict
- repo: https://github.com/timothycrosley/isort
rev: 5.12.0
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.11.12
hooks:
- id: isort
- id: ruff
name: ruff lint with fixes
args: [
--fix,
--ignore, UP035,
--ignore, UP006,
--ignore, TRY400,
--ignore, TRY401,
--ignore, FBT001,
--ignore, FBT002,
--ignore, ARG002,
--ignore, SLF001,
--ignore, RUF012,
--ignore, RUF013,
--ignore, PERF203,
--ignore, PERF403,
--ignore, SIM105,
--ignore, SIM108,
--ignore, SIM118,
--ignore, S110,
--ignore, PLR0911,
--ignore, RET504,
--ignore, INP001,
--ignore, F811,
--ignore, F841,
--ignore, B012,
--ignore, E712,
--ignore, ANN001,
--ignore, ANN201,
--ignore, SIM102,
--ignore, FBT003
]
- id: ruff-format
name: ruff format
- repo: https://github.com/ambv/black
rev: 23.10.1
hooks:
- id: black
- repo: https://github.com/PyCQA/flake8
rev: 6.1.0
hooks:
- id: flake8
# - repo: https://github.com/python/mypy
# rev: v1.6.1
# hooks:
# - id: mypy
# Временно отключаем mypy для стабильности
# - repo: https://github.com/pre-commit/mirrors-mypy
# rev: v1.16.0
# hooks:
# - id: mypy
# name: mypy type checking
# entry: mypy
# language: python
# types: [python]
# require_serial: true
# additional_dependencies: [
# "types-redis",
# "types-requests",
# "types-passlib",
# "types-Authlib",
# "sqlalchemy[mypy]"
# ]
# args: [
# "--config-file=mypy.ini",
# "--show-error-codes",
# "--no-error-summary",
# "--ignore-missing-imports"
# ]

1803
CHANGELOG.md Normal file

File diff suppressed because it is too large Load Diff

5
CHECKS
View File

@@ -1,5 +0,0 @@
WAIT=10
TIMEOUT=10
ATTEMPTS=3
/

134
CONTRIBUTING.md Normal file
View File

@@ -0,0 +1,134 @@
# Contributing to Discours Core
🎉 Thanks for taking the time to contribute!
## 🚀 Quick Start
1. Fork the repository
2. Create a feature branch: `git checkout -b my-new-feature`
3. Make your changes
4. Add tests for your changes
5. Run the test suite: `pytest`
6. Run the linter: `ruff check . --fix && ruff format . --line-length=120`
7. Commit your changes: `git commit -am 'Add some feature'`
8. Push to the branch: `git push origin my-new-feature`
9. Create a Pull Request
## 📋 Development Guidelines
### Code Style
- **Python 3.12+** required
- **Line length**: 120 characters max
- **Type hints**: Required for all functions
- **Docstrings**: Required for public methods
- **Ruff**: linting and formatting
- **MyPy**: typechecks
### Testing
- **Pytest** for testing
- **85%+ coverage** required
- Test both positive and negative cases
- Mock external dependencies
### Commit Messages
We follow [Conventional Commits](https://conventionalcommits.org/):
```
feat: add user authentication
fix: resolve database connection issue
docs: update API documentation
test: add tests for reaction system
refactor: improve GraphQL resolvers
```
### Python Code Standards
```python
# Good example
async def create_reaction(
session: Session,
author_id: int,
reaction_data: dict[str, Any]
) -> dict[str, Any]:
"""
Create a new reaction.
Args:
session: Database session
author_id: ID of the author creating the reaction
reaction_data: Reaction data
Returns:
Created reaction data
Raises:
ValueError: If reaction data is invalid
"""
if not reaction_data.get("kind"):
raise ValueError("Reaction kind is required")
reaction = Reaction(**reaction_data)
session.add(reaction)
session.commit()
return reaction.dict()
```
## 🐛 Bug Reports
When filing a bug report, please include:
- **Python version**
- **Package versions** (`pip freeze`)
- **Error message** and full traceback
- **Steps to reproduce**
- **Expected vs actual behavior**
## 💡 Feature Requests
For feature requests, please include:
- **Use case** description
- **Proposed solution**
- **Alternatives considered**
- **Breaking changes** (if any)
## 📚 Documentation
- Update documentation for new features
- Add examples for complex functionality
- Use Russian comments for Russian-speaking team members
- Keep README.md up to date
## 🔍 Code Review Process
1. **Automated checks** must pass (tests, linting)
2. **Manual review** by at least one maintainer
3. **Documentation** must be updated if needed
4. **Breaking changes** require discussion
## 🏷️ Release Process
We follow [Semantic Versioning](https://semver.org/):
- **MAJOR**: Breaking changes
- **MINOR**: New features (backward compatible)
- **PATCH**: Bug fixes (backward compatible)
## 🤝 Community
- Be respectful and inclusive
- Help newcomers get started
- Share knowledge and best practices
- Follow our [Code of Conduct](CODE_OF_CONDUCT.md)
## 📞 Getting Help
- **Issues**: For bugs and feature requests
- **Discussions**: For questions and general discussion
- **Documentation**: Check `docs/` folder first
Thank you for contributing! 🙏

View File

@@ -1,9 +1,26 @@
FROM python:3.10
FROM python:slim
EXPOSE 8080
ADD nginx.conf.sigil ./
RUN /usr/local/bin/python -m pip install --upgrade pip
WORKDIR /usr/src/app
COPY requirements.txt ./
RUN pip install -r requirements.txt
RUN apt-get update && apt-get install -y \
postgresql-client \
curl \
build-essential \
gnupg \
ca-certificates \
&& rm -rf /var/lib/apt/lists/*
# Установка Node.js LTS и npm
RUN curl -fsSL https://deb.nodesource.com/setup_lts.x | bash - && \
apt-get install -y nodejs \
&& rm -rf /var/lib/apt/lists/*
WORKDIR /app
COPY package.json package-lock.json ./
RUN npm ci
COPY . .
RUN npm run build
RUN pip install -r requirements.txt
EXPOSE 8000
CMD ["python", "-m", "granian", "main:app", "--interface", "asgi", "--host", "0.0.0.0", "--port", "8000"]

21
LICENSE Normal file
View File

@@ -0,0 +1,21 @@
MIT License
Copyright (c) 2025 Discours Team
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@@ -1 +0,0 @@
web: python server.py

193
README.md
View File

@@ -1,47 +1,180 @@
# discoursio-api
# GraphQL API Backend
<div align="center">
- sqlalchemy
- redis
- ariadne
- starlette
- uvicorn
![Version](https://img.shields.io/badge/v0.7.8-lightgrey)
![Tests](https://img.shields.io/badge/tests%2090%25-lightcyan?logo=pytest&logoColor=black)
![Python](https://img.shields.io/badge/python%203.12+-lightblue?logo=python&logoColor=black)
![PostgreSQL](https://img.shields.io/badge/postgresql%2016.1-lightblue?logo=postgresql&logoColor=black)
![Redis](https://img.shields.io/badge/redis%206.2.0-salmon?logo=redis&logoColor=black)
![txtai](https://img.shields.io/badge/txtai%208.6.0-lavender?logo=elasticsearch&logoColor=black)
![GraphQL](https://img.shields.io/badge/ariadne%200.23.0-pink?logo=graphql&logoColor=black)
![TypeScript](https://img.shields.io/badge/typescript%205.8.3-blue?logo=typescript&logoColor=black)
![SolidJS](https://img.shields.io/badge/solidjs%201.9.1-blue?logo=solid&logoColor=black)
![Vite](https://img.shields.io/badge/vite%207.0.0-blue?logo=vite&logoColor=black)
![Biome](https://img.shields.io/badge/biome%202.0.6-blue?logo=biome&logoColor=black)
on osx
```
brew install redis nginx postgres
brew services start redis
</div>
Backend service providing GraphQL API for content management system with reactions, ratings and topics.
## 📚 Documentation
- [API Documentation](docs/api.md)
- [Authentication Guide](docs/auth.md)
- [Caching System](docs/redis-schema.md)
- [Features Overview](docs/features.md)
- [RBAC System](docs/rbac-system.md)
## 🚀 Core Features
### Shouts (Posts)
- CRUD operations via GraphQL mutations
- Rich filtering and sorting options
- Support for multiple authors and topics
- Rating system with likes/dislikes
- Comments and nested replies
- Bookmarks and following
### Reactions System
- `ReactionKind` types: LIKE, DISLIKE, COMMENT
- Rating calculation for shouts and comments
- User-specific reaction tracking
- Reaction stats and aggregations
- Nested comments support
### Authors & Topics
- Author profiles with stats
- Topic categorization and hierarchy
- Following system for authors/topics
- Activity tracking and stats
- Community features
### RBAC & Permissions
- RBAC with hierarchy using Redis
## 🛠️ Tech Stack
**Core:** Python 3.12 • GraphQL • PostgreSQL • SQLAlchemy • JWT • Redis • txtai
**Server:** Starlette • Granian 1.8.0 • Nginx
**Frontend:** SolidJS 1.9.1 • TypeScript 5.7.2 • Vite 5.4.11
**GraphQL:** Ariadne 0.23.0
**Tools:** Pytest • MyPy • Biome 2.0.6
## 🔧 Development
![PRs Welcome](https://img.shields.io/badge/PRs-welcome-lightcyan?logo=git&logoColor=black)
![Biome](https://img.shields.io/badge/biome%202.0.6-yellow?logo=code&logoColor=black)
![Mypy](https://img.shields.io/badge/mypy-lavender?logo=python&logoColor=black)
### 📦 Prepare environment:
```shell
python3.12 -m venv venv
source venv/bin/activate
pip install -r requirements.dev.txt
```
on debian/ubuntu
```
apt install redis nginx
### 🚀 Run server
First, certificates are required to run the server with HTTPS.
```shell
mkcert -install
mkcert localhost
```
# Local development
Then, run the server:
Install deps first
```
pip install -r requirements.txt
pip install -r requirements-dev.txt
pre-commit install
```shell
python -m granian main:app --interface asgi
```
Create database from backup
```
./restdb.sh
### ⚡ Useful Commands
```shell
# Linting and formatting with Biome
biome check . --write
# Lint only
biome lint .
# Format only
biome format . --write
# Run tests
pytest
# Type checking
mypy .
# dev run
python -m granian main:app --interface asgi
```
Start local server
```
python3 server.py dev
### 📝 Code Style
![Line 120](https://img.shields.io/badge/line%20120-lightblue?logo=prettier&logoColor=black)
![Types](https://img.shields.io/badge/typed-pink?logo=python&logoColor=black)
![Docs](https://img.shields.io/badge/documented-lightcyan?logo=markdown&logoColor=black)
**Biome 2.1.2** for linting and formatting • **120 char** lines • **Type hints** required • **Docstrings** for public methods
### 🔍 GraphQL Development
Test queries in GraphQL Playground at `http://localhost:8000`:
```graphql
# Example query
query GetShout($slug: String) {
get_shout(slug: $slug) {
id
title
main_author {
name
}
}
}
```
# How to do an authorized request
---
Put the header 'Authorization' with token from signIn query or registerUser mutation.
## 📊 Project Stats
# How to debug Ackee
<div align="center">
Set ACKEE_TOKEN var
![Lines](https://img.shields.io/badge/15k%2B-lines-lightcyan?logo=code&logoColor=black)
![Files](https://img.shields.io/badge/100%2B-files-lavender?logo=folder&logoColor=black)
![Coverage](https://img.shields.io/badge/90%25-coverage-gold?logo=test-tube&logoColor=black)
![MIT](https://img.shields.io/badge/MIT-license-silver?logo=balance-scale&logoColor=black)
</div>
## 🤝 Contributing
[CHANGELOG.md](CHANGELOG.md)
![Contributing](https://img.shields.io/badge/contributing-guide-salmon?logo=handshake&logoColor=black) • [Read the guide](CONTRIBUTING.md)
We welcome contributions! Please read our contributing guide before submitting PRs.
## 📄 License
This project is licensed under the MIT License - see the [LICENSE](LICENSE) file for details.
## 🔗 Links
![Website](https://img.shields.io/badge/discours.io-website-lightblue?logo=globe&logoColor=black)
![GitHub](https://img.shields.io/badge/discours/core-github-silver?logo=github&logoColor=black)
• [discours.io](https://discours.io)
• [Source Code](https://github.com/discours/core)
---
<div align="center">
**Made with ❤️ by the Discours Team**
![Made with Love](https://img.shields.io/badge/made%20with%20❤-pink?logo=heart&logoColor=black)
![Open Source](https://img.shields.io/badge/open%20source-lightcyan?logo=open-source-initiative&logoColor=black)
</div>

6
__init__.py Normal file
View File

@@ -0,0 +1,6 @@
import os
import sys
# Получаем путь к корневой директории проекта
root_path = os.path.abspath(os.path.dirname(__file__))
sys.path.append(root_path)

View File

@@ -6,8 +6,6 @@ script_location = alembic
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
# Uncomment the line below if you want the files to be prepended with date and time
# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
# for all available tokens
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
# sys.path path, will be prepended to sys.path if present.
@@ -35,32 +33,17 @@ prepend_sys_path = .
# versions/ directory
# sourceless = false
# version location specification; This defaults
# to alembic/versions. When using multiple version
# directories, initial revisions must be specified with --version-path.
# The path separator used here should be the separator specified by "version_path_separator" below.
# version_locations = %(here)s/bar:%(here)s/bat:alembic/versions
# version number format.
version_num_format = %%04d
# version path separator; As mentioned above, this is the character used to split
# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
# Valid values for version_path_separator are:
#
# version_path_separator = :
# version_path_separator = ;
# version_path_separator = space
version_path_separator = os # Use os.pathsep. Default configuration used for new projects.
# set to 'true' to search source files recursively
# in each "version_locations" directory
# new in Alembic version 1.10
# recursive_version_locations = false
# version name format.
version_name_format = %%s
# the output encoding used when revision files
# are written from script.py.mako
# output_encoding = utf-8
sqlalchemy.url = %(DB_URL)
sqlalchemy.url = sqlite:///discoursio.db
[post_write_hooks]

View File

@@ -1,3 +0,0 @@
Generic single-database configuration.
https://alembic.sqlalchemy.org/en/latest/tutorial.html

View File

@@ -2,8 +2,9 @@ from logging.config import fileConfig
from sqlalchemy import engine_from_config, pool
# Импорт всех моделей для корректной генерации миграций
from alembic import context
from base.orm import Base
from services.db import Base
from settings import DB_URL
# this is the Alembic Config object, which provides
@@ -11,7 +12,7 @@ from settings import DB_URL
config = context.config
# override DB_URL
config.set_section_option(config.config_ini_section, "DB_URL", DB_URL)
config.set_main_option("sqlalchemy.url", DB_URL)
# Interpret the config file for Python logging.
# This line sets up loggers basically.

View File

@@ -5,17 +5,15 @@ Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision: str = ${repr(up_revision)}
down_revision: Union[str, None] = ${repr(down_revision)}
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
revision = ${repr(up_revision)}
down_revision = ${repr(down_revision)}
branch_labels = ${repr(branch_labels)}
depends_on = ${repr(depends_on)}
def upgrade() -> None:

View File

@@ -1,26 +0,0 @@
"""init alembic
Revision ID: fe943b098418
Revises:
Create Date: 2023-08-19 01:37:57.031933
"""
from typing import Sequence, Union
# import sqlalchemy as sa
# from alembic import op
# revision identifiers, used by Alembic.
revision: str = "fe943b098418"
down_revision: Union[str, None] = None
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
pass
def downgrade() -> None:
pass

15
app.json Normal file
View File

@@ -0,0 +1,15 @@
{
"healthchecks": {
"web": [
{
"type": "startup",
"name": "web check",
"description": "Checking if the app responds to the GET /",
"path": "/",
"attempts": 3,
"warn": true,
"initialDelay": 1
}
]
}
}

179
auth/__init__.py Normal file
View File

@@ -0,0 +1,179 @@
from starlette.requests import Request
from starlette.responses import JSONResponse, RedirectResponse, Response
from auth.internal import verify_internal_auth
from auth.orm import Author
from auth.tokens.storage import TokenStorage
from services.db import local_session
from settings import (
SESSION_COOKIE_HTTPONLY,
SESSION_COOKIE_MAX_AGE,
SESSION_COOKIE_NAME,
SESSION_COOKIE_SAMESITE,
SESSION_COOKIE_SECURE,
SESSION_TOKEN_HEADER,
)
from utils.logger import root_logger as logger
async def logout(request: Request) -> Response:
"""
Выход из системы с удалением сессии и cookie.
Поддерживает получение токена из:
1. HTTP-only cookie
2. Заголовка Authorization
"""
token = None
# Получаем токен из cookie
if SESSION_COOKIE_NAME in request.cookies:
token = request.cookies.get(SESSION_COOKIE_NAME)
logger.debug(f"[auth] logout: Получен токен из cookie {SESSION_COOKIE_NAME}")
# Если токен не найден в cookie, проверяем заголовок
if not token:
# Сначала проверяем основной заголовок авторизации
auth_header = request.headers.get(SESSION_TOKEN_HEADER)
if auth_header:
if auth_header.startswith("Bearer "):
token = auth_header[7:].strip()
logger.debug(f"[auth] logout: Получен Bearer токен из заголовка {SESSION_TOKEN_HEADER}")
else:
token = auth_header.strip()
logger.debug(f"[auth] logout: Получен прямой токен из заголовка {SESSION_TOKEN_HEADER}")
# Если токен не найден в основном заголовке, проверяем стандартный Authorization
if not token and "Authorization" in request.headers:
auth_header = request.headers.get("Authorization")
if auth_header and auth_header.startswith("Bearer "):
token = auth_header[7:].strip()
logger.debug("[auth] logout: Получен Bearer токен из заголовка Authorization")
# Если токен найден, отзываем его
if token:
try:
# Декодируем токен для получения user_id
user_id, _, _ = await verify_internal_auth(token)
if user_id:
# Отзываем сессию
await TokenStorage.revoke_session(token)
logger.info(f"[auth] logout: Токен успешно отозван для пользователя {user_id}")
else:
logger.warning("[auth] logout: Не удалось получить user_id из токена")
except Exception as e:
logger.error(f"[auth] logout: Ошибка при отзыве токена: {e}")
else:
logger.warning("[auth] logout: Токен не найден в запросе")
# Создаем ответ с редиректом на страницу входа
response = RedirectResponse(url="/")
# Удаляем cookie с токеном
response.delete_cookie(
key=SESSION_COOKIE_NAME,
secure=SESSION_COOKIE_SECURE,
httponly=SESSION_COOKIE_HTTPONLY,
samesite=SESSION_COOKIE_SAMESITE,
)
logger.info("[auth] logout: Cookie успешно удалена")
return response
async def refresh_token(request: Request) -> JSONResponse:
"""
Обновление токена аутентификации.
Поддерживает получение токена из:
1. HTTP-only cookie
2. Заголовка Authorization
Возвращает новый токен как в HTTP-only cookie, так и в теле ответа.
"""
token = None
source = None
# Получаем текущий токен из cookie
if SESSION_COOKIE_NAME in request.cookies:
token = request.cookies.get(SESSION_COOKIE_NAME)
source = "cookie"
logger.debug(f"[auth] refresh_token: Токен получен из cookie {SESSION_COOKIE_NAME}")
# Если токен не найден в cookie, проверяем заголовок авторизации
if not token:
# Проверяем основной заголовок авторизации
auth_header = request.headers.get(SESSION_TOKEN_HEADER)
if auth_header:
if auth_header.startswith("Bearer "):
token = auth_header[7:].strip()
source = "header"
logger.debug(f"[auth] refresh_token: Токен получен из заголовка {SESSION_TOKEN_HEADER} (Bearer)")
else:
token = auth_header.strip()
source = "header"
logger.debug(f"[auth] refresh_token: Токен получен из заголовка {SESSION_TOKEN_HEADER} (прямой)")
# Если токен не найден в основном заголовке, проверяем стандартный Authorization
if not token and "Authorization" in request.headers:
auth_header = request.headers.get("Authorization")
if auth_header and auth_header.startswith("Bearer "):
token = auth_header[7:].strip()
source = "header"
logger.debug("[auth] refresh_token: Токен получен из заголовка Authorization")
if not token:
logger.warning("[auth] refresh_token: Токен не найден в запросе")
return JSONResponse({"success": False, "error": "Токен не найден"}, status_code=401)
try:
# Получаем информацию о пользователе из токена
user_id, _, _ = await verify_internal_auth(token)
if not user_id:
logger.warning("[auth] refresh_token: Недействительный токен")
return JSONResponse({"success": False, "error": "Недействительный токен"}, status_code=401)
# Получаем пользователя из базы данных
with local_session() as session:
author = session.query(Author).filter(Author.id == user_id).first()
if not author:
logger.warning(f"[auth] refresh_token: Пользователь с ID {user_id} не найден")
return JSONResponse({"success": False, "error": "Пользователь не найден"}, status_code=404)
# Обновляем сессию (создаем новую и отзываем старую)
device_info = {
"ip": request.client.host if request.client else "unknown",
"user_agent": request.headers.get("user-agent"),
}
new_token = await TokenStorage.refresh_session(user_id, token, device_info)
if not new_token:
logger.error(f"[auth] refresh_token: Не удалось обновить токен для пользователя {user_id}")
return JSONResponse({"success": False, "error": "Не удалось обновить токен"}, status_code=500)
# Создаем ответ
response = JSONResponse(
{
"success": True,
# Возвращаем токен в теле ответа только если он был получен из заголовка
"token": new_token if source == "header" else None,
"author": {"id": author.id, "email": author.email, "name": author.name},
}
)
# Всегда устанавливаем cookie с новым токеном
response.set_cookie(
key=SESSION_COOKIE_NAME,
value=new_token,
httponly=SESSION_COOKIE_HTTPONLY,
secure=SESSION_COOKIE_SECURE,
samesite=SESSION_COOKIE_SAMESITE,
max_age=SESSION_COOKIE_MAX_AGE,
)
logger.info(f"[auth] refresh_token: Токен успешно обновлен для пользователя {user_id}")
return response
except Exception as e:
logger.error(f"[auth] refresh_token: Ошибка при обновлении токена: {e}")
return JSONResponse({"success": False, "error": str(e)}, status_code=401)

View File

@@ -1,89 +0,0 @@
from functools import wraps
from typing import Optional, Tuple
from graphql.type import GraphQLResolveInfo
from sqlalchemy.orm import exc, joinedload
from starlette.authentication import AuthenticationBackend
from starlette.requests import HTTPConnection
from auth.credentials import AuthCredentials, AuthUser
from auth.tokenstorage import SessionToken
from base.exceptions import OperationNotAllowed
from base.orm import local_session
from orm.user import Role, User
from settings import SESSION_TOKEN_HEADER
class JWTAuthenticate(AuthenticationBackend):
async def authenticate(
self, request: HTTPConnection
) -> Optional[Tuple[AuthCredentials, AuthUser]]:
if SESSION_TOKEN_HEADER not in request.headers:
return AuthCredentials(scopes={}), AuthUser(user_id=None, username="")
token = request.headers.get(SESSION_TOKEN_HEADER)
if not token:
print("[auth.authenticate] no token in header %s" % SESSION_TOKEN_HEADER)
return AuthCredentials(scopes={}, error_message=str("no token")), AuthUser(
user_id=None, username=""
)
if len(token.split(".")) > 1:
payload = await SessionToken.verify(token)
with local_session() as session:
try:
user = (
session.query(User)
.options(
joinedload(User.roles).options(joinedload(Role.permissions)),
joinedload(User.ratings),
)
.filter(User.id == payload.user_id)
.one()
)
scopes = {} # TODO: integrate await user.get_permission()
return (
AuthCredentials(user_id=payload.user_id, scopes=scopes, logged_in=True),
AuthUser(user_id=user.id, username=""),
)
except exc.NoResultFound:
pass
return AuthCredentials(scopes={}, error_message=str("Invalid token")), AuthUser(
user_id=None, username=""
)
def login_required(func):
@wraps(func)
async def wrap(parent, info: GraphQLResolveInfo, *args, **kwargs):
# debug only
# print('[auth.authenticate] login required for %r with info %r' % (func, info))
auth: AuthCredentials = info.context["request"].auth
# print(auth)
if not auth or not auth.logged_in:
# raise Unauthorized(auth.error_message or "Please login")
return {"error": "Please login first"}
return await func(parent, info, *args, **kwargs)
return wrap
def permission_required(resource, operation, func):
@wraps(func)
async def wrap(parent, info: GraphQLResolveInfo, *args, **kwargs):
print(
"[auth.authenticate] permission_required for %r with info %r" % (func, info)
) # debug only
auth: AuthCredentials = info.context["request"].auth
if not auth.logged_in:
raise OperationNotAllowed(auth.error_message or "Please login")
# TODO: add actual check permission logix here
return await func(parent, info, *args, **kwargs)
return wrap

View File

@@ -1,43 +1,95 @@
from typing import List, Optional, Text
from typing import Any, Optional
from pydantic import BaseModel
from pydantic import BaseModel, Field
# from base.exceptions import Unauthorized
from settings import ADMIN_EMAILS as ADMIN_EMAILS_LIST
ADMIN_EMAILS = ADMIN_EMAILS_LIST.split(",")
class Permission(BaseModel):
name: Text
"""Модель разрешения для RBAC"""
resource: str
operation: str
def __str__(self) -> str:
return f"{self.resource}:{self.operation}"
class AuthCredentials(BaseModel):
user_id: Optional[int] = None
scopes: Optional[dict] = {}
logged_in: bool = False
error_message: str = ""
"""
Модель учетных данных авторизации.
Используется как часть механизма аутентификации Starlette.
"""
author_id: Optional[int] = Field(None, description="ID автора")
scopes: dict[str, set[str]] = Field(default_factory=dict, description="Разрешения пользователя")
logged_in: bool = Field(False, description="Флаг, указывающий, авторизован ли пользователь")
error_message: str = Field("", description="Сообщение об ошибке аутентификации")
email: Optional[str] = Field(None, description="Email пользователя")
token: Optional[str] = Field(None, description="JWT токен авторизации")
def get_permissions(self) -> list[str]:
"""
Возвращает список строковых представлений разрешений.
Например: ["posts:read", "posts:write", "comments:create"].
Returns:
List[str]: Список разрешений
"""
result = []
for resource, operations in self.scopes.items():
for operation in operations:
result.extend([f"{resource}:{operation}"])
return result
def has_permission(self, resource: str, operation: str) -> bool:
"""
Проверяет наличие определенного разрешения.
Args:
resource: Ресурс (например, "posts")
operation: Операция (например, "read")
Returns:
bool: True, если пользователь имеет указанное разрешение
"""
if not self.logged_in:
return False
return resource in self.scopes and operation in self.scopes[resource]
@property
def is_admin(self):
# TODO: check admin logix
return True
def is_admin(self) -> bool:
"""
Проверяет, является ли пользователь администратором.
async def permissions(self) -> List[Permission]:
if self.user_id is None:
Returns:
bool: True, если email пользователя находится в списке ADMIN_EMAILS
"""
return self.email in ADMIN_EMAILS if self.email else False
async def to_dict(self) -> dict[str, Any]:
"""
Преобразует учетные данные в словарь
Returns:
Dict[str, Any]: Словарь с данными учетных данных
"""
permissions = self.get_permissions()
return {
"author_id": self.author_id,
"logged_in": self.logged_in,
"is_admin": self.is_admin,
"permissions": list(permissions),
}
async def permissions(self) -> list[Permission]:
if self.author_id is None:
# raise Unauthorized("Please login first")
return {"error": "Please login first"}
else:
# TODO: implement permissions logix
print(self.user_id)
return NotImplemented
class AuthUser(BaseModel):
user_id: Optional[int]
username: Optional[str]
@property
def is_authenticated(self) -> bool:
return self.user_id is not None
# @property
# def display_id(self) -> int:
# return self.user_id
return [] # Возвращаем пустой список вместо dict
# TODO: implement permissions logix
print(self.author_id)
return [] # Возвращаем пустой список вместо NotImplemented

521
auth/decorators.py Normal file
View File

@@ -0,0 +1,521 @@
from collections.abc import Callable
from functools import wraps
from typing import Any, Optional
from graphql import GraphQLError, GraphQLResolveInfo
from sqlalchemy import exc
from auth.credentials import AuthCredentials
from auth.exceptions import OperationNotAllowed
from auth.internal import authenticate
from auth.orm import Author
from orm.community import CommunityAuthor
from services.db import local_session
from settings import ADMIN_EMAILS as ADMIN_EMAILS_LIST
from settings import SESSION_COOKIE_NAME, SESSION_TOKEN_HEADER
from utils.logger import root_logger as logger
ADMIN_EMAILS = ADMIN_EMAILS_LIST.split(",")
def get_safe_headers(request: Any) -> dict[str, str]:
"""
Безопасно получает заголовки запроса.
Args:
request: Объект запроса
Returns:
Dict[str, str]: Словарь заголовков
"""
headers = {}
try:
# Первый приоритет: scope из ASGI (самый надежный источник)
if hasattr(request, "scope") and isinstance(request.scope, dict):
scope_headers = request.scope.get("headers", [])
if scope_headers:
headers.update({k.decode("utf-8").lower(): v.decode("utf-8") for k, v in scope_headers})
logger.debug(f"[decorators] Получены заголовки из request.scope: {len(headers)}")
# Второй приоритет: метод headers() или атрибут headers
if hasattr(request, "headers"):
if callable(request.headers):
h = request.headers()
if h:
headers.update({k.lower(): v for k, v in h.items()})
logger.debug(f"[decorators] Получены заголовки из request.headers() метода: {len(headers)}")
else:
h = request.headers
if hasattr(h, "items") and callable(h.items):
headers.update({k.lower(): v for k, v in h.items()})
logger.debug(f"[decorators] Получены заголовки из request.headers атрибута: {len(headers)}")
elif isinstance(h, dict):
headers.update({k.lower(): v for k, v in h.items()})
logger.debug(f"[decorators] Получены заголовки из request.headers словаря: {len(headers)}")
# Третий приоритет: атрибут _headers
if hasattr(request, "_headers") and request._headers:
headers.update({k.lower(): v for k, v in request._headers.items()})
logger.debug(f"[decorators] Получены заголовки из request._headers: {len(headers)}")
except Exception as e:
logger.warning(f"[decorators] Ошибка при доступе к заголовкам: {e}")
return headers
def get_auth_token(request: Any) -> Optional[str]:
"""
Извлекает токен авторизации из запроса.
Порядок проверки:
1. Проверяет auth из middleware
2. Проверяет auth из scope
3. Проверяет заголовок Authorization
4. Проверяет cookie с именем auth_token
Args:
request: Объект запроса
Returns:
Optional[str]: Токен авторизации или None
"""
try:
# 1. Проверяем auth из middleware (если middleware уже обработал токен)
if hasattr(request, "auth") and request.auth:
token = getattr(request.auth, "token", None)
if token:
logger.debug(f"[decorators] Токен получен из request.auth: {len(token)}")
return token
# 2. Проверяем наличие auth в scope
if hasattr(request, "scope") and isinstance(request.scope, dict) and "auth" in request.scope:
auth_info = request.scope.get("auth", {})
if isinstance(auth_info, dict) and "token" in auth_info:
token = auth_info["token"]
logger.debug(f"[decorators] Токен получен из request.scope['auth']: {len(token)}")
return token
# 3. Проверяем заголовок Authorization
headers = get_safe_headers(request)
# Сначала проверяем основной заголовок авторизации
auth_header = headers.get(SESSION_TOKEN_HEADER.lower(), "")
if auth_header:
if auth_header.startswith("Bearer "):
token = auth_header[7:].strip()
logger.debug(f"[decorators] Токен получен из заголовка {SESSION_TOKEN_HEADER}: {len(token)}")
return token
token = auth_header.strip()
logger.debug(f"[decorators] Прямой токен получен из заголовка {SESSION_TOKEN_HEADER}: {len(token)}")
return token
# Затем проверяем стандартный заголовок Authorization, если основной не определен
if SESSION_TOKEN_HEADER.lower() != "authorization":
auth_header = headers.get("authorization", "")
if auth_header and auth_header.startswith("Bearer "):
token = auth_header[7:].strip()
logger.debug(f"[decorators] Токен получен из заголовка Authorization: {len(token)}")
return token
# 4. Проверяем cookie
if hasattr(request, "cookies") and request.cookies:
token = request.cookies.get(SESSION_COOKIE_NAME)
if token:
logger.debug(f"[decorators] Токен получен из cookie {SESSION_COOKIE_NAME}: {len(token)}")
return token
# Если токен не найден ни в одном из мест
logger.debug("[decorators] Токен авторизации не найден")
return None
except Exception as e:
logger.warning(f"[decorators] Ошибка при извлечении токена: {e}")
return None
async def validate_graphql_context(info: GraphQLResolveInfo) -> None:
"""
Проверяет валидность GraphQL контекста и проверяет авторизацию.
Args:
info: GraphQL информация о контексте
Raises:
GraphQLError: если контекст невалиден или пользователь не авторизован
"""
# Подробное логирование для диагностики
logger.debug("[validate_graphql_context] Начало проверки контекста и авторизации")
# Проверка базовой структуры контекста
if info is None or not hasattr(info, "context"):
logger.error("[validate_graphql_context] Missing GraphQL context information")
msg = "Internal server error: missing context"
raise GraphQLError(msg)
request = info.context.get("request")
if not request:
logger.error("[validate_graphql_context] Missing request in context")
msg = "Internal server error: missing request"
raise GraphQLError(msg)
# Логируем детали запроса
client_info = {
"ip": getattr(request.client, "host", "unknown") if hasattr(request, "client") else "unknown",
"headers_keys": list(get_safe_headers(request).keys()),
}
logger.debug(f"[validate_graphql_context] Детали запроса: {client_info}")
# Проверяем auth из контекста - если уже авторизован, просто возвращаем
auth = getattr(request, "auth", None)
if auth and getattr(auth, "logged_in", False):
logger.debug(f"[validate_graphql_context] Пользователь уже авторизован через request.auth: {auth.author_id}")
return
# Если аутентификации нет в request.auth, пробуем получить ее из scope
if hasattr(request, "scope") and "auth" in request.scope:
auth_cred = request.scope.get("auth")
if isinstance(auth_cred, AuthCredentials) and getattr(auth_cred, "logged_in", False):
logger.debug(f"[validate_graphql_context] Пользователь авторизован через scope: {auth_cred.author_id}")
return
# Если авторизации нет ни в auth, ни в scope, пробуем получить и проверить токен
token = get_auth_token(request)
if not token:
# Если токен не найден, бросаем ошибку авторизации
client_info = {
"ip": getattr(request.client, "host", "unknown") if hasattr(request, "client") else "unknown",
"headers": {k: v for k, v in get_safe_headers(request).items() if k not in ["authorization", "cookie"]},
}
logger.warning(f"[validate_graphql_context] Токен авторизации не найден: {client_info}")
msg = "Unauthorized - please login"
raise GraphQLError(msg)
# Логируем информацию о найденном токене
logger.debug(f"[validate_graphql_context] Токен найден, длина: {len(token)}")
# Используем единый механизм проверки токена из auth.internal
auth_state = await authenticate(request)
logger.debug(
f"[validate_graphql_context] Результат аутентификации: logged_in={auth_state.logged_in}, author_id={auth_state.author_id}, error={auth_state.error}"
)
if not auth_state.logged_in:
error_msg = auth_state.error or "Invalid or expired token"
logger.warning(f"[validate_graphql_context] Недействительный токен: {error_msg}")
msg = f"Unauthorized - {error_msg}"
raise GraphQLError(msg)
# Если все проверки пройдены, создаем AuthCredentials и устанавливаем в request.scope
with local_session() as session:
try:
author = session.query(Author).filter(Author.id == auth_state.author_id).one()
logger.debug(f"[validate_graphql_context] Найден автор: id={author.id}, email={author.email}")
# Создаем объект авторизации с пустыми разрешениями
# Разрешения будут проверяться через RBAC систему по требованию
auth_cred = AuthCredentials(
author_id=author.id,
scopes={}, # Пустой словарь разрешений
logged_in=True,
error_message="",
email=author.email,
token=auth_state.token,
)
# Устанавливаем auth в request.scope вместо прямого присваивания к request.auth
if hasattr(request, "scope") and isinstance(request.scope, dict):
request.scope["auth"] = auth_cred
logger.debug(
f"[validate_graphql_context] Токен успешно проверен и установлен для пользователя {auth_state.author_id}"
)
else:
logger.error("[validate_graphql_context] Не удалось установить auth: отсутствует request.scope")
msg = "Internal server error: unable to set authentication context"
raise GraphQLError(msg)
except exc.NoResultFound:
logger.error(f"[validate_graphql_context] Пользователь с ID {auth_state.author_id} не найден в базе данных")
msg = "Unauthorized - user not found"
raise GraphQLError(msg) from None
return
def admin_auth_required(resolver: Callable) -> Callable:
"""
Декоратор для защиты админских эндпоинтов.
Проверяет принадлежность к списку разрешенных email-адресов.
Args:
resolver: GraphQL резолвер для защиты
Returns:
Обернутый резолвер, который проверяет права доступа администратора
Raises:
GraphQLError: если пользователь не авторизован или не имеет доступа администратора
Example:
>>> @admin_auth_required
... async def admin_resolver(root, info, **kwargs):
... return "Admin data"
"""
@wraps(resolver)
async def wrapper(root: Any = None, info: Optional[GraphQLResolveInfo] = None, **kwargs: dict[str, Any]) -> Any:
# Подробное логирование для диагностики
logger.debug(f"[admin_auth_required] Начало проверки авторизации для {resolver.__name__}")
# Проверяем авторизацию пользователя
if info is None:
logger.error("[admin_auth_required] GraphQL info is None")
msg = "Invalid GraphQL context"
raise GraphQLError(msg)
# Логируем детали запроса
request = info.context.get("request")
client_info = {
"ip": getattr(request.client, "host", "unknown") if hasattr(request, "client") else "unknown",
"headers": {k: v for k, v in get_safe_headers(request).items() if k not in ["authorization", "cookie"]},
}
logger.debug(f"[admin_auth_required] Детали запроса: {client_info}")
# Проверяем наличие токена до validate_graphql_context
token = get_auth_token(request)
logger.debug(f"[admin_auth_required] Токен найден: {bool(token)}, длина: {len(token) if token else 0}")
try:
# Проверяем авторизацию - НЕ ловим GraphQLError здесь!
await validate_graphql_context(info)
logger.debug("[admin_auth_required] validate_graphql_context успешно пройден")
except GraphQLError:
# Пробрасываем GraphQLError дальше - это ошибки авторизации
logger.debug("[admin_auth_required] GraphQLError от validate_graphql_context - пробрасываем дальше")
raise
# Получаем объект авторизации
auth = None
if hasattr(info.context["request"], "scope") and "auth" in info.context["request"].scope:
auth = info.context["request"].scope.get("auth")
logger.debug(f"[admin_auth_required] Auth из scope: {auth.author_id if auth else None}")
elif hasattr(info.context["request"], "auth"):
auth = info.context["request"].auth
logger.debug(f"[admin_auth_required] Auth из request: {auth.author_id if auth else None}")
else:
logger.error("[admin_auth_required] Auth не найден ни в scope, ни в request")
if not auth or not getattr(auth, "logged_in", False):
logger.error("[admin_auth_required] Пользователь не авторизован после validate_graphql_context")
msg = "Unauthorized - please login"
raise GraphQLError(msg)
# Проверяем, является ли пользователь администратором
try:
with local_session() as session:
# Преобразуем author_id в int для совместимости с базой данных
author_id = int(auth.author_id) if auth and auth.author_id else None
if not author_id:
logger.error(f"[admin_auth_required] ID автора не определен: {auth}")
msg = "Unauthorized - invalid user ID"
raise GraphQLError(msg)
author = session.query(Author).filter(Author.id == author_id).one()
logger.debug(f"[admin_auth_required] Найден автор: {author.id}, {author.email}")
# Проверяем, является ли пользователь системным администратором
if author.email and author.email in ADMIN_EMAILS:
logger.info(f"System admin access granted for {author.email} (ID: {author.id})")
return await resolver(root, info, **kwargs)
# Системный администратор определяется ТОЛЬКО по ADMIN_EMAILS
logger.warning(f"System admin access denied for {author.email} (ID: {author.id}). Not in ADMIN_EMAILS.")
msg = "Unauthorized - system admin access required"
raise GraphQLError(msg)
except exc.NoResultFound:
logger.error(f"[admin_auth_required] Пользователь с ID {auth.author_id} не найден в базе данных")
msg = "Unauthorized - user not found"
raise GraphQLError(msg) from None
except GraphQLError:
# Пробрасываем GraphQLError дальше
raise
except Exception as e:
# Ловим только неожиданные ошибки, не GraphQLError
error_msg = f"Admin access error: {e!s}"
logger.error(f"[admin_auth_required] Неожиданная ошибка: {error_msg}")
raise GraphQLError(error_msg) from e
return wrapper
def permission_required(resource: str, operation: str, func: Callable) -> Callable:
"""
Декоратор для проверки разрешений.
Args:
resource: Ресурс для проверки
operation: Операция для проверки
func: Декорируемая функция
"""
@wraps(func)
async def wrap(parent: Any, info: GraphQLResolveInfo, *args: Any, **kwargs: Any) -> Any:
# Сначала проверяем авторизацию
await validate_graphql_context(info)
# Получаем объект авторизации
logger.debug(f"[permission_required] Контекст: {info.context}")
auth = None
if hasattr(info.context["request"], "scope") and "auth" in info.context["request"].scope:
auth = info.context["request"].scope.get("auth")
if not auth or not getattr(auth, "logged_in", False):
logger.error("[permission_required] Пользователь не авторизован после validate_graphql_context")
msg = "Требуются права доступа"
raise OperationNotAllowed(msg)
# Проверяем разрешения
with local_session() as session:
try:
author = session.query(Author).filter(Author.id == auth.author_id).one()
# Проверяем базовые условия
if author.is_locked():
msg = "Account is locked"
raise OperationNotAllowed(msg)
# Проверяем, является ли пользователь администратором (у них есть все разрешения)
if author.email in ADMIN_EMAILS:
logger.debug(f"[permission_required] Администратор {author.email} имеет все разрешения")
return await func(parent, info, *args, **kwargs)
# Проверяем роли пользователя
admin_roles = ["admin", "super"]
ca = session.query(CommunityAuthor).filter_by(author_id=author.id, community_id=1).first()
if ca:
user_roles = ca.role_list
else:
user_roles = []
if any(role in admin_roles for role in user_roles):
logger.debug(
f"[permission_required] Пользователь с ролью администратора {author.email} имеет все разрешения"
)
return await func(parent, info, *args, **kwargs)
# Проверяем разрешение
if not author.has_permission(resource, operation):
logger.warning(
f"[permission_required] У пользователя {author.email} нет разрешения {operation} на {resource}"
)
msg = f"No permission for {operation} on {resource}"
raise OperationNotAllowed(msg)
logger.debug(
f"[permission_required] Пользователь {author.email} имеет разрешение {operation} на {resource}"
)
return await func(parent, info, *args, **kwargs)
except exc.NoResultFound:
logger.error(f"[permission_required] Пользователь с ID {auth.author_id} не найден в базе данных")
msg = "User not found"
raise OperationNotAllowed(msg) from None
return wrap
def login_accepted(func: Callable) -> Callable:
"""
Декоратор для проверки аутентификации пользователя.
Args:
func: функция-резолвер для декорирования
Returns:
Callable: обернутая функция
"""
@wraps(func)
async def wrap(parent: Any, info: GraphQLResolveInfo, *args: Any, **kwargs: Any) -> Any:
try:
await validate_graphql_context(info)
return await func(parent, info, *args, **kwargs)
except GraphQLError:
# Пробрасываем ошибки авторизации далее
raise
except Exception as e:
logger.error(f"[decorators] Unexpected error in login_accepted: {e}")
msg = "Internal server error"
raise GraphQLError(msg) from e
return wrap
def editor_or_admin_required(func: Callable) -> Callable:
"""
Декоратор для проверки, что пользователь имеет роль 'editor' или 'admin'.
Args:
func: функция-резолвер для декорирования
Returns:
Callable: обернутая функция
"""
@wraps(func)
async def wrap(parent: Any, info: GraphQLResolveInfo, *args: Any, **kwargs: Any) -> Any:
try:
# Сначала проверяем авторизацию
await validate_graphql_context(info)
# Получаем информацию о пользователе
request = info.context.get("request")
author_id = None
# Пробуем получить author_id из разных источников
if hasattr(request, "auth") and request.auth and hasattr(request.auth, "author_id"):
author_id = request.auth.author_id
elif hasattr(request, "scope") and "auth" in request.scope:
auth_info = request.scope.get("auth", {})
if isinstance(auth_info, dict):
author_id = auth_info.get("author_id")
elif hasattr(auth_info, "author_id"):
author_id = auth_info.author_id
if not author_id:
logger.warning("[decorators] Не удалось получить author_id для проверки ролей")
raise GraphQLError("Ошибка авторизации: не удалось определить пользователя")
# Проверяем роли пользователя
with local_session() as session:
author = session.query(Author).filter(Author.id == author_id).first()
if not author:
logger.warning(f"[decorators] Автор с ID {author_id} не найден")
raise GraphQLError("Пользователь не найден")
# Проверяем email админа
if author.email in ADMIN_EMAILS:
logger.debug(f"[decorators] Пользователь {author.email} является админом по email")
return await func(parent, info, *args, **kwargs)
# Получаем список ролей пользователя
ca = session.query(CommunityAuthor).filter_by(author_id=author.id, community_id=1).first()
if ca:
user_roles = ca.role_list
else:
user_roles = []
logger.debug(f"[decorators] Роли пользователя {author_id}: {user_roles}")
# Проверяем наличие роли admin или editor
if "admin" in user_roles or "editor" in user_roles:
logger.debug(f"[decorators] Пользователь {author_id} имеет разрешение (роли: {user_roles})")
return await func(parent, info, *args, **kwargs)
# Если нет нужных ролей
logger.warning(f"[decorators] Пользователю {author_id} отказано в доступе. Роли: {user_roles}")
raise GraphQLError("Доступ запрещен. Требуется роль редактора или администратора.")
except GraphQLError:
# Пробрасываем ошибки авторизации далее
raise
except Exception as e:
logger.error(f"[decorators] Неожиданная ошибка в editor_or_admin_required: {e}")
raise GraphQLError("Внутренняя ошибка сервера") from e
return wrap

View File

@@ -1,3 +1,5 @@
from typing import Any
import requests
from settings import MAILGUN_API_KEY, MAILGUN_DOMAIN
@@ -7,9 +9,9 @@ noreply = "discours.io <noreply@%s>" % (MAILGUN_DOMAIN or "discours.io")
lang_subject = {"ru": "Подтверждение почты", "en": "Confirm email"}
async def send_auth_email(user, token, lang="ru", template="email_confirmation"):
async def send_auth_email(user: Any, token: str, lang: str = "ru", template: str = "email_confirmation") -> None:
try:
to = "%s <%s>" % (user.name, user.email)
to = f"{user.name} <{user.email}>"
if lang not in ["ru", "en"]:
lang = "ru"
subject = lang_subject.get(lang, lang_subject["en"])
@@ -19,12 +21,12 @@ async def send_auth_email(user, token, lang="ru", template="email_confirmation")
"to": to,
"subject": subject,
"template": template,
"h:X-Mailgun-Variables": '{ "token": "%s" }' % token,
"h:X-Mailgun-Variables": f'{{ "token": "{token}" }}',
}
print("[auth.email] payload: %r" % payload)
print(f"[auth.email] payload: {payload!r}")
# debug
# print('http://localhost:3000/?modal=auth&mode=confirm-email&token=%s' % token)
response = requests.post(api_url, auth=("api", MAILGUN_API_KEY), data=payload)
response = requests.post(api_url, auth=("api", MAILGUN_API_KEY), data=payload, timeout=30)
response.raise_for_status()
except Exception as e:
print(e)

56
auth/handler.py Normal file
View File

@@ -0,0 +1,56 @@
from ariadne.asgi.handlers import GraphQLHTTPHandler
from starlette.requests import Request
from starlette.responses import JSONResponse
from auth.middleware import auth_middleware
from utils.logger import root_logger as logger
class EnhancedGraphQLHTTPHandler(GraphQLHTTPHandler):
"""
Улучшенный GraphQL HTTP обработчик с поддержкой cookie и авторизации.
Расширяет стандартный GraphQLHTTPHandler для:
1. Создания расширенного контекста запроса с авторизационными данными
2. Корректной обработки ответов с cookie и headers
3. Интеграции с AuthMiddleware
"""
async def get_context_for_request(self, request: Request, data: dict) -> dict:
"""
Расширяем контекст для GraphQL запросов.
Добавляет к стандартному контексту:
- Объект response для установки cookie
- Интеграцию с AuthMiddleware
- Расширения для управления авторизацией
Args:
request: Starlette Request объект
data: данные запроса
Returns:
dict: контекст с дополнительными данными для авторизации и cookie
"""
# Получаем стандартный контекст от базового класса
context = await super().get_context_for_request(request, data)
# Создаем объект ответа для установки cookie
response = JSONResponse({})
context["response"] = response
# Интегрируем с AuthMiddleware
auth_middleware.set_context(context)
context["extensions"] = auth_middleware
# Добавляем данные авторизации только если они доступны
# Проверяем наличие данных авторизации в scope
if hasattr(request, "scope") and isinstance(request.scope, dict) and "auth" in request.scope:
auth_cred = request.scope.get("auth")
context["auth"] = auth_cred
# Безопасно логируем информацию о типе объекта auth
logger.debug(f"[graphql] Добавлены данные авторизации в контекст из scope: {type(auth_cred).__name__}")
logger.debug("[graphql] Подготовлен расширенный контекст для запроса")
return context

View File

@@ -1,15 +1,20 @@
from binascii import hexlify
from hashlib import sha256
from typing import TYPE_CHECKING, Any, TypeVar
from jwt import DecodeError, ExpiredSignatureError
from passlib.hash import bcrypt
from auth.exceptions import ExpiredToken, InvalidPassword, InvalidToken
from auth.jwtcodec import JWTCodec
from auth.tokenstorage import TokenStorage
from services.db import local_session
from services.redis import redis
from utils.logger import root_logger as logger
# from base.exceptions import InvalidPassword, InvalidToken
from base.orm import local_session
from orm import User
# Для типизации
if TYPE_CHECKING:
from auth.orm import Author
AuthorType = TypeVar("AuthorType", bound="Author")
class Password:
@@ -24,16 +29,25 @@ class Password:
@staticmethod
def encode(password: str) -> str:
"""
Кодирует пароль пользователя
Args:
password (str): Пароль пользователя
Returns:
str: Закодированный пароль
"""
password_sha256 = Password._get_sha256(password)
return bcrypt.using(rounds=10).hash(password_sha256)
@staticmethod
def verify(password: str, hashed: str) -> bool:
"""
r"""
Verify that password hash is equal to specified hash. Hash format:
$2a$10$Ro0CUfOqk6cXEKf3dyaM7OhSCvnwM9s4wIX9JeLapehKK5YdLxKcm
\__/\/ \____________________/\_____________________________/ # noqa: W605
\__/\/ \____________________/\_____________________________/
| | Salt Hash
| Cost
Version
@@ -52,46 +66,102 @@ class Password:
class Identity:
@staticmethod
def password(orm_user: User, password: str) -> User:
user = User(**orm_user.dict())
if not user.password:
# raise InvalidPassword("User password is empty")
return {"error": "User password is empty"}
if not Password.verify(password, user.password):
# raise InvalidPassword("Wrong user password")
return {"error": "Wrong user password"}
return user
def password(orm_author: AuthorType, password: str) -> AuthorType:
"""
Проверяет пароль пользователя
Args:
orm_author (Author): Объект пользователя
password (str): Пароль пользователя
Returns:
Author: Объект автора при успешной проверке
Raises:
InvalidPassword: Если пароль не соответствует хешу или отсутствует
"""
# Импортируем внутри функции для избежания циклических импортов
from utils.logger import root_logger as logger
# Проверим исходный пароль в orm_author
if not orm_author.password:
logger.warning(f"[auth.identity] Пароль в исходном объекте автора пуст: email={orm_author.email}")
msg = "Пароль не установлен для данного пользователя"
raise InvalidPassword(msg)
# Проверяем пароль напрямую, не используя dict()
password_hash = str(orm_author.password) if orm_author.password else ""
if not password_hash or not Password.verify(password, password_hash):
logger.warning(f"[auth.identity] Неверный пароль для {orm_author.email}")
msg = "Неверный пароль пользователя"
raise InvalidPassword(msg)
# Возвращаем исходный объект, чтобы сохранить все связи
return orm_author
@staticmethod
def oauth(inp) -> User:
def oauth(inp: dict[str, Any]) -> Any:
"""
Создает нового пользователя OAuth, если он не существует
Args:
inp (dict): Данные OAuth пользователя
Returns:
Author: Объект пользователя
"""
# Импортируем внутри функции для избежания циклических импортов
from auth.orm import Author
with local_session() as session:
user = session.query(User).filter(User.email == inp["email"]).first()
if not user:
user = User.create(**inp, emailConfirmed=True)
author = session.query(Author).filter(Author.email == inp["email"]).first()
if not author:
author = Author(**inp)
author.email_verified = True # type: ignore[assignment]
session.add(author)
session.commit()
return user
return author
@staticmethod
async def onetime(token: str) -> User:
async def onetime(token: str) -> Any:
"""
Проверяет одноразовый токен
Args:
token (str): Одноразовый токен
Returns:
Author: Объект пользователя
"""
# Импортируем внутри функции для избежания циклических импортов
from auth.orm import Author
try:
print("[auth.identity] using one time token")
payload = JWTCodec.decode(token)
if not await TokenStorage.exist(f"{payload.user_id}-{payload.username}-{token}"):
# raise InvalidToken("Login token has expired, please login again")
return {"error": "Token has expired"}
except ExpiredSignatureError:
if payload is None:
logger.warning("[Identity.token] Токен не валиден (payload is None)")
return {"error": "Invalid token"}
# Проверяем существование токена в хранилище
token_key = f"{payload.user_id}-{payload.username}-{token}"
if not await redis.exists(token_key):
logger.warning(f"[Identity.token] Токен не найден в хранилище: {token_key}")
return {"error": "Token not found"}
# Если все проверки пройдены, ищем автора в базе данных
with local_session() as session:
author = session.query(Author).filter_by(id=payload.user_id).first()
if not author:
logger.warning(f"[Identity.token] Автор с ID {payload.user_id} не найден")
return {"error": "User not found"}
logger.info(f"[Identity.token] Токен валиден для автора {author.id}")
return author
except ExpiredToken:
# raise InvalidToken("Login token has expired, please try again")
return {"error": "Token has expired"}
except DecodeError:
except InvalidToken:
# raise InvalidToken("token format error") from e
return {"error": "Token format error"}
with local_session() as session:
user = session.query(User).filter_by(id=payload.user_id).first()
if not user:
# raise Exception("user not exist")
return {"error": "User does not exist"}
if not user.emailConfirmed:
user.emailConfirmed = True
session.commit()
return user

147
auth/internal.py Normal file
View File

@@ -0,0 +1,147 @@
"""
Утилитные функции для внутренней аутентификации
Используются в GraphQL резолверах и декораторах
"""
import time
from typing import Optional
from sqlalchemy.orm import exc
from auth.orm import Author
from auth.state import AuthState
from auth.tokens.storage import TokenStorage as TokenManager
from services.db import local_session
from settings import ADMIN_EMAILS as ADMIN_EMAILS_LIST
from utils.logger import root_logger as logger
ADMIN_EMAILS = ADMIN_EMAILS_LIST.split(",")
async def verify_internal_auth(token: str) -> tuple[int, list, bool]:
"""
Проверяет локальную авторизацию.
Возвращает user_id, список ролей и флаг администратора.
Args:
token: Токен авторизации (может быть как с Bearer, так и без)
Returns:
tuple: (user_id, roles, is_admin)
"""
logger.debug(f"[verify_internal_auth] Проверка токена: {token[:10]}...")
# Обработка формата "Bearer <token>" (если токен не был обработан ранее)
if token and token.startswith("Bearer "):
token = token.replace("Bearer ", "", 1).strip()
# Проверяем сессию
payload = await TokenManager.verify_session(token)
if not payload:
logger.warning("[verify_internal_auth] Недействительный токен: payload не получен")
return 0, [], False
logger.debug(f"[verify_internal_auth] Токен действителен, user_id={payload.user_id}")
with local_session() as session:
try:
author = session.query(Author).filter(Author.id == payload.user_id).one()
# Получаем роли
from orm.community import CommunityAuthor
ca = session.query(CommunityAuthor).filter_by(author_id=author.id, community_id=1).first()
if ca:
roles = ca.role_list
else:
roles = []
logger.debug(f"[verify_internal_auth] Роли пользователя: {roles}")
# Определяем, является ли пользователь администратором
is_admin = any(role in ["admin", "super"] for role in roles) or author.email in ADMIN_EMAILS
logger.debug(
f"[verify_internal_auth] Пользователь {author.id} {'является' if is_admin else 'не является'} администратором"
)
return int(author.id), roles, is_admin
except exc.NoResultFound:
logger.warning(f"[verify_internal_auth] Пользователь с ID {payload.user_id} не найден в БД или не активен")
return 0, [], False
async def create_internal_session(author: Author, device_info: Optional[dict] = None) -> str:
"""
Создает новую сессию для автора
Args:
author: Объект автора
device_info: Информация об устройстве (опционально)
Returns:
str: Токен сессии
"""
# Сбрасываем счетчик неудачных попыток
author.reset_failed_login()
# Обновляем last_seen
author.last_seen = int(time.time()) # type: ignore[assignment]
# Создаем сессию, используя token для идентификации
return await TokenManager.create_session(
user_id=str(author.id),
username=str(author.slug or author.email or author.phone or ""),
device_info=device_info,
)
async def authenticate(request) -> AuthState:
"""
Аутентифицирует пользователя по токену из запроса.
Args:
request: Объект запроса
Returns:
AuthState: Состояние аутентификации
"""
from auth.decorators import get_auth_token
from utils.logger import root_logger as logger
logger.debug("[authenticate] Начало аутентификации")
# Создаем объект AuthState
auth_state = AuthState()
auth_state.logged_in = False
auth_state.author_id = None
auth_state.error = None
auth_state.token = None
# Получаем токен из запроса
token = get_auth_token(request)
if not token:
logger.warning("[authenticate] Токен не найден в запросе")
auth_state.error = "No authentication token provided"
return auth_state
logger.debug(f"[authenticate] Токен найден, длина: {len(token)}")
# Проверяем токен
try:
# Используем TokenManager вместо прямого создания SessionTokenManager
auth_result = await TokenManager.verify_session(token)
if auth_result and hasattr(auth_result, "user_id") and auth_result.user_id:
logger.debug(f"[authenticate] Успешная аутентификация, user_id: {auth_result.user_id}")
auth_state.logged_in = True
auth_state.author_id = auth_result.user_id
auth_state.token = token
return auth_state
error_msg = "Invalid or expired token"
logger.warning(f"[authenticate] Недействительный токен: {error_msg}")
auth_state.error = error_msg
return auth_state
except Exception as e:
logger.error(f"[authenticate] Ошибка при проверке токена: {e}")
auth_state.error = f"Authentication error: {e!s}"
return auth_state

View File

@@ -1,31 +1,79 @@
from datetime import datetime, timezone
from datetime import datetime, timedelta, timezone
from typing import Any, Optional, Union
import jwt
from pydantic import BaseModel
from base.exceptions import ExpiredToken, InvalidToken
from settings import JWT_ALGORITHM, JWT_SECRET_KEY
from validations.auth import AuthInput, TokenPayload
from utils.logger import root_logger as logger
class TokenPayload(BaseModel):
user_id: str
username: str
exp: Optional[datetime] = None
iat: datetime
iss: str
class JWTCodec:
@staticmethod
def encode(user: AuthInput, exp: datetime) -> str:
def encode(user: Union[dict[str, Any], Any], exp: Optional[datetime] = None) -> str:
# Поддержка как объектов, так и словарей
if isinstance(user, dict):
# В TokenStorage.create_session передается словарь {"user_id": user_id, "username": username}
user_id = str(user.get("user_id", "") or user.get("id", ""))
username = user.get("username", "") or user.get("email", "")
else:
# Для объектов с атрибутами
user_id = str(getattr(user, "id", ""))
username = getattr(user, "slug", "") or getattr(user, "email", "") or getattr(user, "phone", "") or ""
logger.debug(f"[JWTCodec.encode] Кодирование токена для user_id={user_id}, username={username}")
# Если время истечения не указано, установим срок годности на 30 дней
if exp is None:
exp = datetime.now(tz=timezone.utc) + timedelta(days=30)
logger.debug(f"[JWTCodec.encode] Время истечения не указано, устанавливаем срок: {exp}")
# Важно: убедимся, что exp всегда является либо datetime, либо целым числом от timestamp
if isinstance(exp, datetime):
# Преобразуем datetime в timestamp чтобы гарантировать правильный формат
exp_timestamp = int(exp.timestamp())
else:
# Если передано что-то другое, установим значение по умолчанию
logger.warning(f"[JWTCodec.encode] Некорректный формат exp: {exp}, используем значение по умолчанию")
exp_timestamp = int((datetime.now(tz=timezone.utc) + timedelta(days=30)).timestamp())
payload = {
"user_id": user.id,
"username": user.email or user.phone,
"exp": exp,
"user_id": user_id,
"username": username,
"exp": exp_timestamp, # Используем timestamp вместо datetime
"iat": datetime.now(tz=timezone.utc),
"iss": "discours",
}
logger.debug(f"[JWTCodec.encode] Сформирован payload: {payload}")
try:
return jwt.encode(payload, JWT_SECRET_KEY, JWT_ALGORITHM)
token = jwt.encode(payload, JWT_SECRET_KEY, JWT_ALGORITHM)
logger.debug(f"[JWTCodec.encode] Токен успешно создан, длина: {len(token) if token else 0}")
# Ensure we always return str, not bytes
if isinstance(token, bytes):
return token.decode("utf-8")
return str(token)
except Exception as e:
print("[auth.jwtcodec] JWT encode error %r" % e)
logger.error(f"[JWTCodec.encode] Ошибка при кодировании JWT: {e}")
raise
@staticmethod
def decode(token: str, verify_exp: bool = True) -> TokenPayload:
r = None
payload = None
def decode(token: str, verify_exp: bool = True) -> Optional[TokenPayload]:
logger.debug(f"[JWTCodec.decode] Начало декодирования токена длиной {len(token) if token else 0}")
if not token:
logger.error("[JWTCodec.decode] Пустой токен")
return None
try:
payload = jwt.decode(
token,
@@ -37,16 +85,39 @@ class JWTCodec:
algorithms=[JWT_ALGORITHM],
issuer="discours",
)
r = TokenPayload(**payload)
# print('[auth.jwtcodec] debug token %r' % r)
return r
logger.debug(f"[JWTCodec.decode] Декодирован payload: {payload}")
# Убедимся, что exp существует (добавим обработку если exp отсутствует)
if "exp" not in payload:
logger.warning("[JWTCodec.decode] В токене отсутствует поле exp")
# Добавим exp по умолчанию, чтобы избежать ошибки при создании TokenPayload
payload["exp"] = int((datetime.now(tz=timezone.utc) + timedelta(days=30)).timestamp())
try:
r = TokenPayload(**payload)
logger.debug(
f"[JWTCodec.decode] Создан объект TokenPayload: user_id={r.user_id}, username={r.username}"
)
return r
except Exception as e:
logger.error(f"[JWTCodec.decode] Ошибка при создании TokenPayload: {e}")
return None
except jwt.InvalidIssuedAtError:
print("[auth.jwtcodec] invalid issued at: %r" % payload)
raise ExpiredToken("check token issued time")
logger.error("[JWTCodec.decode] Недействительное время выпуска токена")
return None
except jwt.ExpiredSignatureError:
print("[auth.jwtcodec] expired signature %r" % payload)
raise ExpiredToken("check token lifetime")
except jwt.InvalidTokenError:
raise InvalidToken("token is not valid")
logger.error("[JWTCodec.decode] Истек срок действия токена")
return None
except jwt.InvalidSignatureError:
raise InvalidToken("token is not valid")
logger.error("[JWTCodec.decode] Недействительная подпись токена")
return None
except jwt.InvalidTokenError:
logger.error("[JWTCodec.decode] Недействительный токен")
return None
except jwt.InvalidKeyError:
logger.error("[JWTCodec.decode] Недействительный ключ")
return None
except Exception as e:
logger.error(f"[JWTCodec.decode] Неожиданная ошибка при декодировании: {e}")
return None

439
auth/middleware.py Normal file
View File

@@ -0,0 +1,439 @@
"""
Единый middleware для обработки авторизации в GraphQL запросах
"""
import time
from collections.abc import Awaitable, MutableMapping
from typing import Any, Callable, Optional
from graphql import GraphQLResolveInfo
from sqlalchemy.orm import exc
from starlette.authentication import UnauthenticatedUser
from starlette.datastructures import Headers
from starlette.requests import Request
from starlette.responses import JSONResponse, Response
from starlette.types import ASGIApp
from auth.credentials import AuthCredentials
from auth.orm import Author
from auth.tokens.storage import TokenStorage as TokenManager
from orm.community import CommunityAuthor
from services.db import local_session
from settings import (
ADMIN_EMAILS as ADMIN_EMAILS_LIST,
)
from settings import (
SESSION_COOKIE_HTTPONLY,
SESSION_COOKIE_MAX_AGE,
SESSION_COOKIE_NAME,
SESSION_COOKIE_SAMESITE,
SESSION_COOKIE_SECURE,
SESSION_TOKEN_HEADER,
)
from utils.logger import root_logger as logger
ADMIN_EMAILS = ADMIN_EMAILS_LIST.split(",")
class AuthenticatedUser:
"""Аутентифицированный пользователь"""
def __init__(
self,
user_id: str,
username: str = "",
roles: Optional[list] = None,
permissions: Optional[dict] = None,
token: Optional[str] = None,
) -> None:
self.user_id = user_id
self.username = username
self.roles = roles or []
self.permissions = permissions or {}
self.token = token
@property
def is_authenticated(self) -> bool:
return True
@property
def display_name(self) -> str:
return self.username
@property
def identity(self) -> str:
return self.user_id
class AuthMiddleware:
"""
Единый middleware для обработки авторизации и аутентификации.
Основные функции:
1. Извлечение Bearer токена из заголовка Authorization или cookie
2. Проверка сессии через TokenStorage
3. Создание request.user и request.auth
4. Предоставление методов для установки/удаления cookies
"""
def __init__(self, app: ASGIApp) -> None:
self.app = app
self._context = None
async def authenticate_user(self, token: str) -> tuple[AuthCredentials, AuthenticatedUser | UnauthenticatedUser]:
"""Аутентифицирует пользователя по токену"""
if not token:
logger.debug("[auth.authenticate] Токен отсутствует")
return AuthCredentials(
author_id=None, scopes={}, logged_in=False, error_message="no token", email=None, token=None
), UnauthenticatedUser()
# Проверяем сессию в Redis
try:
payload = await TokenManager.verify_session(token)
if not payload:
logger.debug("[auth.authenticate] Недействительный токен или сессия не найдена")
return AuthCredentials(
author_id=None,
scopes={},
logged_in=False,
error_message="Invalid token or session",
email=None,
token=None,
), UnauthenticatedUser()
with local_session() as session:
try:
author = session.query(Author).filter(Author.id == payload.user_id).one()
if author.is_locked():
logger.debug(f"[auth.authenticate] Аккаунт заблокирован: {author.id}")
return AuthCredentials(
author_id=None,
scopes={},
logged_in=False,
error_message="Account is locked",
email=None,
token=None,
), UnauthenticatedUser()
# Создаем пустой словарь разрешений
# Разрешения будут проверяться через RBAC систему по требованию
scopes: dict[str, Any] = {}
# Получаем роли для пользователя
ca = session.query(CommunityAuthor).filter_by(author_id=author.id, community_id=1).first()
if ca:
roles = ca.role_list
else:
roles = []
# Обновляем last_seen
author.last_seen = int(time.time())
session.commit()
# Создаем объекты авторизации с сохранением токена
credentials = AuthCredentials(
author_id=author.id,
scopes=scopes,
logged_in=True,
error_message="",
email=author.email,
token=token,
)
user = AuthenticatedUser(
user_id=str(author.id),
username=author.slug or author.email or "",
roles=roles,
permissions=scopes,
token=token,
)
logger.debug(f"[auth.authenticate] Успешная аутентификация: {author.email}")
return credentials, user
except exc.NoResultFound:
logger.debug("[auth.authenticate] Пользователь не найден в базе данных")
return AuthCredentials(
author_id=None,
scopes={},
logged_in=False,
error_message="User not found",
email=None,
token=None,
), UnauthenticatedUser()
except Exception as e:
logger.error(f"[auth.authenticate] Ошибка при работе с базой данных: {e}")
return AuthCredentials(
author_id=None, scopes={}, logged_in=False, error_message=str(e), email=None, token=None
), UnauthenticatedUser()
except Exception as e:
logger.error(f"[auth.authenticate] Ошибка при проверке сессии: {e}")
return AuthCredentials(
author_id=None, scopes={}, logged_in=False, error_message=str(e), email=None, token=None
), UnauthenticatedUser()
async def __call__(
self,
scope: MutableMapping[str, Any],
receive: Callable[[], Awaitable[MutableMapping[str, Any]]],
send: Callable[[MutableMapping[str, Any]], Awaitable[None]],
) -> None:
"""Обработка ASGI запроса"""
if scope["type"] != "http":
await self.app(scope, receive, send)
return
# Извлекаем заголовки
headers = Headers(scope=scope)
token = None
# Сначала пробуем получить токен из заголовка авторизации
auth_header = headers.get(SESSION_TOKEN_HEADER)
if auth_header:
if auth_header.startswith("Bearer "):
token = auth_header.replace("Bearer ", "", 1).strip()
logger.debug(
f"[middleware] Извлечен Bearer токен из заголовка {SESSION_TOKEN_HEADER}, длина: {len(token) if token else 0}"
)
else:
# Если заголовок не начинается с Bearer, предполагаем, что это чистый токен
token = auth_header.strip()
logger.debug(
f"[middleware] Извлечен прямой токен из заголовка {SESSION_TOKEN_HEADER}, длина: {len(token) if token else 0}"
)
# Если токен не получен из основного заголовка и это не Authorization, проверяем заголовок Authorization
if not token and SESSION_TOKEN_HEADER.lower() != "authorization":
auth_header = headers.get("Authorization")
if auth_header and auth_header.startswith("Bearer "):
token = auth_header.replace("Bearer ", "", 1).strip()
logger.debug(
f"[middleware] Извлечен Bearer токен из заголовка Authorization, длина: {len(token) if token else 0}"
)
# Если токен не получен из заголовка, пробуем взять из cookie
if not token:
cookies = headers.get("cookie", "")
cookie_items = cookies.split(";")
for item in cookie_items:
if "=" in item:
name, value = item.split("=", 1)
if name.strip() == SESSION_COOKIE_NAME:
token = value.strip()
logger.debug(
f"[middleware] Извлечен токен из cookie {SESSION_COOKIE_NAME}, длина: {len(token) if token else 0}"
)
break
# Аутентифицируем пользователя
auth, user = await self.authenticate_user(token or "")
# Добавляем в scope данные авторизации и пользователя
scope["auth"] = auth
scope["user"] = user
if token:
# Обновляем заголовки в scope для совместимости
new_headers: list[tuple[bytes, bytes]] = []
for name, value in scope["headers"]:
header_name = name.decode("latin1") if isinstance(name, bytes) else str(name)
if header_name.lower() != SESSION_TOKEN_HEADER.lower():
# Ensure both name and value are bytes
name_bytes = name if isinstance(name, bytes) else str(name).encode("latin1")
value_bytes = value if isinstance(value, bytes) else str(value).encode("latin1")
new_headers.append((name_bytes, value_bytes))
new_headers.append((SESSION_TOKEN_HEADER.encode("latin1"), token.encode("latin1")))
scope["headers"] = new_headers
logger.debug(f"[middleware] Пользователь аутентифицирован: {user.is_authenticated}")
else:
logger.debug("[middleware] Токен не найден, пользователь неаутентифицирован")
await self.app(scope, receive, send)
def set_context(self, context) -> None:
"""Сохраняет ссылку на контекст GraphQL запроса"""
self._context = context
logger.debug(f"[middleware] Установлен контекст GraphQL: {bool(context)}")
def set_cookie(self, key: str, value: str, **options: Any) -> None:
"""
Устанавливает cookie в ответе
Args:
key: Имя cookie
value: Значение cookie
**options: Дополнительные параметры (httponly, secure, max_age, etc.)
"""
success = False
# Способ 1: Через response
if self._context and "response" in self._context and hasattr(self._context["response"], "set_cookie"):
try:
self._context["response"].set_cookie(key, value, **options)
logger.debug(f"[middleware] Установлена cookie {key} через response")
success = True
except Exception as e:
logger.error(f"[middleware] Ошибка при установке cookie {key} через response: {e!s}")
# Способ 2: Через собственный response в контексте
if not success and hasattr(self, "_response") and self._response and hasattr(self._response, "set_cookie"):
try:
self._response.set_cookie(key, value, **options)
logger.debug(f"[middleware] Установлена cookie {key} через _response")
success = True
except Exception as e:
logger.error(f"[middleware] Ошибка при установке cookie {key} через _response: {e!s}")
if not success:
logger.error(f"[middleware] Не удалось установить cookie {key}: объекты response недоступны")
def delete_cookie(self, key: str, **options: Any) -> None:
"""
Удаляет cookie из ответа
"""
success = False
# Способ 1: Через response
if self._context and "response" in self._context and hasattr(self._context["response"], "delete_cookie"):
try:
self._context["response"].delete_cookie(key, **options)
logger.debug(f"[middleware] Удалена cookie {key} через response")
success = True
except Exception as e:
logger.error(f"[middleware] Ошибка при удалении cookie {key} через response: {e!s}")
# Способ 2: Через собственный response в контексте
if not success and hasattr(self, "_response") and self._response and hasattr(self._response, "delete_cookie"):
try:
self._response.delete_cookie(key, **options)
logger.debug(f"[middleware] Удалена cookie {key} через _response")
success = True
except Exception as e:
logger.error(f"[middleware] Ошибка при удалении cookie {key} через _response: {e!s}")
if not success:
logger.error(f"[middleware] Не удалось удалить cookie {key}: объекты response недоступны")
async def resolve(
self, next_resolver: Callable[..., Any], root: Any, info: GraphQLResolveInfo, *args: Any, **kwargs: Any
) -> Any:
"""
Middleware для обработки запросов GraphQL.
Добавляет методы для установки cookie в контекст.
"""
try:
# Получаем доступ к контексту запроса
context = info.context
# Сохраняем ссылку на контекст
self.set_context(context)
# Добавляем себя как объект, содержащий утилитные методы
context["extensions"] = self
# Проверяем наличие response в контексте
if "response" not in context or not context["response"]:
from starlette.responses import JSONResponse
context["response"] = JSONResponse({})
logger.debug("[middleware] Создан новый response объект в контексте GraphQL")
logger.debug("[middleware] GraphQL resolve: контекст подготовлен, добавлены расширения для работы с cookie")
return await next_resolver(root, info, *args, **kwargs)
except Exception as e:
logger.error(f"[AuthMiddleware] Ошибка в GraphQL resolve: {e!s}")
raise
async def process_result(self, request: Request, result: Any) -> Response:
"""
Обрабатывает результат GraphQL запроса, поддерживая установку cookie
Args:
request: Starlette Request объект
result: результат GraphQL запроса (dict или Response)
Returns:
Response: HTTP-ответ с результатом и cookie (если необходимо)
"""
# Проверяем, является ли result уже объектом Response
if isinstance(result, Response):
response = result
# Пытаемся получить данные из response для проверки логина/логаута
result_data = {}
if isinstance(result, JSONResponse):
try:
import json
body_content = result.body
if isinstance(body_content, (bytes, memoryview)):
body_text = bytes(body_content).decode("utf-8")
result_data = json.loads(body_text)
else:
result_data = json.loads(str(body_content))
except Exception as e:
logger.error(f"[process_result] Не удалось извлечь данные из JSONResponse: {e!s}")
else:
response = JSONResponse(result)
result_data = result
# Проверяем, был ли токен в запросе или ответе
if request.method == "POST":
try:
data = await request.json()
op_name = data.get("operationName", "").lower()
# Если это операция логина или обновления токена, и в ответе есть токен
if op_name in ["login", "refreshtoken"]:
token = None
# Пытаемся извлечь токен из данных ответа
if result_data and isinstance(result_data, dict):
data_obj = result_data.get("data", {})
if isinstance(data_obj, dict) and op_name in data_obj:
op_result = data_obj.get(op_name, {})
if isinstance(op_result, dict) and "token" in op_result:
token = op_result.get("token")
if token:
# Устанавливаем cookie с токеном
response.set_cookie(
key=SESSION_COOKIE_NAME,
value=token,
httponly=SESSION_COOKIE_HTTPONLY,
secure=SESSION_COOKIE_SECURE,
samesite=SESSION_COOKIE_SAMESITE,
max_age=SESSION_COOKIE_MAX_AGE,
)
logger.debug(
f"[graphql_handler] Установлена cookie {SESSION_COOKIE_NAME} для операции {op_name}"
)
# Если это операция logout, удаляем cookie
elif op_name == "logout":
response.delete_cookie(
key=SESSION_COOKIE_NAME,
secure=SESSION_COOKIE_SECURE,
httponly=SESSION_COOKIE_HTTPONLY,
samesite=SESSION_COOKIE_SAMESITE,
)
logger.debug(f"[graphql_handler] Удалена cookie {SESSION_COOKIE_NAME} для операции {op_name}")
except Exception as e:
logger.error(f"[process_result] Ошибка при обработке POST запроса: {e!s}")
return response
# Создаем единый экземпляр AuthMiddleware для использования с GraphQL
async def _dummy_app(
scope: MutableMapping[str, Any],
receive: Callable[[], Awaitable[MutableMapping[str, Any]]],
send: Callable[[MutableMapping[str, Any]], Awaitable[None]],
) -> None:
"""Dummy ASGI app for middleware initialization"""
auth_middleware = AuthMiddleware(_dummy_app)

View File

@@ -1,98 +1,618 @@
from authlib.integrations.starlette_client import OAuth
from starlette.responses import RedirectResponse
import time
from secrets import token_urlsafe
from typing import Any, Callable, Optional
import orjson
from authlib.integrations.starlette_client import OAuth
from authlib.oauth2.rfc7636 import create_s256_code_challenge
from graphql import GraphQLResolveInfo
from sqlalchemy.orm import Session
from starlette.requests import Request
from starlette.responses import JSONResponse, RedirectResponse
from auth.orm import Author
from auth.tokens.storage import TokenStorage
from services.db import local_session
from services.redis import redis
from settings import (
FRONTEND_URL,
OAUTH_CLIENTS,
SESSION_COOKIE_HTTPONLY,
SESSION_COOKIE_MAX_AGE,
SESSION_COOKIE_NAME,
SESSION_COOKIE_SAMESITE,
SESSION_COOKIE_SECURE,
)
from utils.generate_slug import generate_unique_slug
from utils.logger import root_logger as logger
# Type для dependency injection сессии
SessionFactory = Callable[[], Session]
class SessionManager:
"""Менеджер сессий для dependency injection с поддержкой тестирования"""
def __init__(self) -> None:
self._factory: SessionFactory = local_session
def set_factory(self, factory: SessionFactory) -> None:
"""Устанавливает фабрику сессий для dependency injection"""
self._factory = factory
def get_session(self) -> Session:
"""Получает сессию БД через dependency injection"""
return self._factory()
# Глобальный менеджер сессий
session_manager = SessionManager()
def set_session_factory(factory: SessionFactory) -> None:
"""
Устанавливает фабрику сессий для dependency injection.
Используется в тестах для подмены реальной БД на тестовую.
"""
session_manager.set_factory(factory)
def get_session() -> Session:
"""
Получает сессию БД через dependency injection.
Возвращает сессию которую нужно явно закрывать после использования.
Внимание: не забывайте закрывать сессию после использования!
Рекомендуется использовать try/finally блок.
"""
return session_manager.get_session()
from auth.identity import Identity
from auth.tokenstorage import TokenStorage
from settings import FRONTEND_URL, OAUTH_CLIENTS
oauth = OAuth()
oauth.register(
name="facebook",
client_id=OAUTH_CLIENTS["FACEBOOK"]["id"],
client_secret=OAUTH_CLIENTS["FACEBOOK"]["key"],
access_token_url="https://graph.facebook.com/v11.0/oauth/access_token",
access_token_params=None,
authorize_url="https://www.facebook.com/v11.0/dialog/oauth",
authorize_params=None,
api_base_url="https://graph.facebook.com/",
client_kwargs={"scope": "public_profile email"},
)
# OAuth state management через Redis (TTL 10 минут)
OAUTH_STATE_TTL = 600 # 10 минут
oauth.register(
name="github",
client_id=OAUTH_CLIENTS["GITHUB"]["id"],
client_secret=OAUTH_CLIENTS["GITHUB"]["key"],
access_token_url="https://github.com/login/oauth/access_token",
access_token_params=None,
authorize_url="https://github.com/login/oauth/authorize",
authorize_params=None,
api_base_url="https://api.github.com/",
client_kwargs={"scope": "user:email"},
)
# Конфигурация провайдеров для регистрации
PROVIDER_CONFIGS = {
"google": {
"server_metadata_url": "https://accounts.google.com/.well-known/openid-configuration",
},
"github": {
"access_token_url": "https://github.com/login/oauth/access_token",
"authorize_url": "https://github.com/login/oauth/authorize",
"api_base_url": "https://api.github.com/",
},
"facebook": {
"access_token_url": "https://graph.facebook.com/v13.0/oauth/access_token",
"authorize_url": "https://www.facebook.com/v13.0/dialog/oauth",
"api_base_url": "https://graph.facebook.com/",
},
"x": {
"access_token_url": "https://api.twitter.com/2/oauth2/token",
"authorize_url": "https://twitter.com/i/oauth2/authorize",
"api_base_url": "https://api.twitter.com/2/",
},
"telegram": {
"authorize_url": "https://oauth.telegram.org/auth",
"api_base_url": "https://api.telegram.org/",
},
"vk": {
"access_token_url": "https://oauth.vk.com/access_token",
"authorize_url": "https://oauth.vk.com/authorize",
"api_base_url": "https://api.vk.com/method/",
},
"yandex": {
"access_token_url": "https://oauth.yandex.ru/token",
"authorize_url": "https://oauth.yandex.ru/authorize",
"api_base_url": "https://login.yandex.ru/info",
},
}
oauth.register(
name="google",
client_id=OAUTH_CLIENTS["GOOGLE"]["id"],
client_secret=OAUTH_CLIENTS["GOOGLE"]["key"],
server_metadata_url="https://accounts.google.com/.well-known/openid-configuration",
client_kwargs={"scope": "openid email profile"},
authorize_state="test",
)
# Константы для генерации временного email
TEMP_EMAIL_SUFFIX = "@oauth.local"
async def google_profile(client, request, token):
userinfo = token["userinfo"]
profile = {"name": userinfo["name"], "email": userinfo["email"], "id": userinfo["sub"]}
if userinfo["picture"]:
userpic = userinfo["picture"].replace("=s96", "=s600")
profile["userpic"] = userpic
return profile
def _generate_temp_email(provider: str, user_id: str) -> str:
"""Генерирует временный email для OAuth провайдеров без email"""
return f"{provider}_{user_id}@oauth.local"
async def facebook_profile(client, request, token):
profile = await client.get("me?fields=name,id,email", token=token)
return profile.json()
def _register_oauth_provider(provider: str, client_config: dict) -> None:
"""Регистрирует OAuth провайдер в зависимости от его типа"""
try:
provider_config = PROVIDER_CONFIGS.get(provider, {})
if not provider_config:
logger.warning(f"Unknown OAuth provider: {provider}")
return
# Базовые параметры для всех провайдеров
register_params = {
"name": provider,
"client_id": client_config["id"],
"client_secret": client_config["key"],
**provider_config,
}
oauth.register(**register_params)
logger.info(f"OAuth provider {provider} registered successfully")
except Exception as e:
logger.error(f"Failed to register OAuth provider {provider}: {e}")
async def github_profile(client, request, token):
profile = await client.get("user", token=token)
return profile.json()
for provider in PROVIDER_CONFIGS:
if provider in OAUTH_CLIENTS and OAUTH_CLIENTS[provider.upper()]:
client_config = OAUTH_CLIENTS[provider.upper()]
if "id" in client_config and "key" in client_config:
_register_oauth_provider(provider, client_config)
profile_callbacks = {
"google": google_profile,
"facebook": facebook_profile,
"github": github_profile,
# Провайдеры со специальной обработкой данных
PROVIDER_HANDLERS = {
"google": lambda token, _: {
"id": token.get("userinfo", {}).get("sub"),
"email": token.get("userinfo", {}).get("email"),
"name": token.get("userinfo", {}).get("name"),
"picture": token.get("userinfo", {}).get("picture", "").replace("=s96", "=s600"),
},
"telegram": lambda token, _: {
"id": str(token.get("id", "")),
"email": None,
"phone": str(token.get("phone_number", "")),
"name": token.get("first_name", "") + " " + token.get("last_name", ""),
"picture": token.get("photo_url"),
},
"x": lambda _, profile_data: {
"id": profile_data.get("data", {}).get("id"),
"email": None,
"name": profile_data.get("data", {}).get("name") or profile_data.get("data", {}).get("username"),
"picture": profile_data.get("data", {}).get("profile_image_url", "").replace("_normal", "_400x400"),
},
}
async def oauth_login(request):
provider = request.path_params["provider"]
request.session["provider"] = provider
client = oauth.create_client(provider)
redirect_uri = "https://v2.discours.io/oauth-authorize"
return await client.authorize_redirect(request, redirect_uri)
async def oauth_authorize(request):
provider = request.session["provider"]
client = oauth.create_client(provider)
token = await client.authorize_access_token(request)
get_profile = profile_callbacks[provider]
profile = await get_profile(client, request, token)
user_oauth_info = "%s:%s" % (provider, profile["id"])
user_input = {
"oauth": user_oauth_info,
"email": profile["email"],
"username": profile["name"],
"userpic": profile["userpic"],
async def _fetch_github_profile(client: Any, token: Any) -> dict:
"""Получает профиль из GitHub API"""
profile = await client.get("user", token=token)
profile_data = profile.json()
emails = await client.get("user/emails", token=token)
emails_data = emails.json()
primary_email = next((email["email"] for email in emails_data if email["primary"]), None)
return {
"id": str(profile_data["id"]),
"email": primary_email or profile_data.get("email"),
"name": profile_data.get("name") or profile_data.get("login"),
"picture": profile_data.get("avatar_url"),
}
user = Identity.oauth(user_input)
session_token = await TokenStorage.create_session(user)
response = RedirectResponse(url=FRONTEND_URL + "/confirm")
response.set_cookie("token", session_token)
return response
async def _fetch_facebook_profile(client: Any, token: Any) -> dict:
"""Получает профиль из Facebook API"""
profile = await client.get("me?fields=id,name,email,picture.width(600)", token=token)
profile_data = profile.json()
return {
"id": profile_data["id"],
"email": profile_data.get("email"),
"name": profile_data.get("name"),
"picture": profile_data.get("picture", {}).get("data", {}).get("url"),
}
async def _fetch_x_profile(client: Any, token: Any) -> dict:
"""Получает профиль из X (Twitter) API"""
profile = await client.get("authors/me?user.fields=id,name,username,profile_image_url", token=token)
profile_data = profile.json()
return PROVIDER_HANDLERS["x"](token, profile_data)
async def _fetch_vk_profile(client: Any, token: Any) -> dict:
"""Получает профиль из VK API"""
profile = await client.get("authors.get?fields=photo_400_orig,contacts&v=5.131", token=token)
profile_data = profile.json()
if profile_data.get("response"):
user_data = profile_data["response"][0]
return {
"id": str(user_data["id"]),
"email": user_data.get("contacts", {}).get("email"),
"name": f"{user_data.get('first_name', '')} {user_data.get('last_name', '')}".strip(),
"picture": user_data.get("photo_400_orig"),
}
return {}
async def _fetch_yandex_profile(client: Any, token: Any) -> dict:
"""Получает профиль из Yandex API"""
profile = await client.get("?format=json", token=token)
profile_data = profile.json()
return {
"id": profile_data.get("id"),
"email": profile_data.get("default_email"),
"name": profile_data.get("display_name") or profile_data.get("real_name"),
"picture": f"https://avatars.yandex.net/get-yapic/{profile_data.get('default_avatar_id')}/islands-200"
if profile_data.get("default_avatar_id")
else None,
}
async def get_user_profile(provider: str, client: Any, token: Any) -> dict:
"""Получает профиль пользователя от провайдера OAuth"""
# Простые провайдеры с обработкой через lambda
if provider in PROVIDER_HANDLERS:
return PROVIDER_HANDLERS[provider](token, None)
# Провайдеры требующие API вызовов
profile_fetchers = {
"github": _fetch_github_profile,
"facebook": _fetch_facebook_profile,
"x": _fetch_x_profile,
"vk": _fetch_vk_profile,
"yandex": _fetch_yandex_profile,
}
if provider in profile_fetchers:
return await profile_fetchers[provider](client, token)
return {}
async def oauth_login(_: None, _info: GraphQLResolveInfo, provider: str, callback_data: dict[str, Any]) -> JSONResponse:
"""
Обработка OAuth авторизации
Args:
provider: Провайдер OAuth (google, github, etc.)
callback_data: Данные из callback-а
Returns:
dict: Результат авторизации с токеном или ошибкой
"""
if provider not in PROVIDER_CONFIGS:
return JSONResponse({"error": "Invalid provider"}, status_code=400)
client = oauth.create_client(provider)
if not client:
return JSONResponse({"error": "Provider not configured"}, status_code=400)
# Получаем параметры из query string
state = callback_data.get("state")
redirect_uri = callback_data.get("redirect_uri", FRONTEND_URL)
if not state:
return JSONResponse({"error": "State parameter is required"}, status_code=400)
# Генерируем PKCE challenge
code_verifier = token_urlsafe(32)
code_challenge = create_s256_code_challenge(code_verifier)
# Сохраняем состояние OAuth в Redis
oauth_data = {
"code_verifier": code_verifier,
"provider": provider,
"redirect_uri": redirect_uri,
"created_at": int(time.time()),
}
await store_oauth_state(state, oauth_data)
# Используем URL из фронтенда для callback
oauth_callback_uri = f"{callback_data['base_url']}oauth/{provider}/callback"
try:
return await client.authorize_redirect(
callback_data["request"],
oauth_callback_uri,
code_challenge=code_challenge,
code_challenge_method="S256",
state=state,
)
except Exception as e:
logger.error(f"OAuth redirect error for {provider}: {e!s}")
return JSONResponse({"error": str(e)}, status_code=500)
async def oauth_callback(request: Any) -> JSONResponse | RedirectResponse:
"""
Обработчик OAuth callback.
Создает или обновляет пользователя и устанавливает сессионный токен.
"""
try:
provider = request.path_params.get("provider")
if not provider:
return JSONResponse({"error": "Provider not specified"}, status_code=400)
# Получаем OAuth клиента
client = oauth.create_client(provider)
if not client:
return JSONResponse({"error": "Invalid provider"}, status_code=400)
# Получаем токен
token = await client.authorize_access_token(request)
if not token:
return JSONResponse({"error": "Failed to get access token"}, status_code=400)
# Получаем профиль пользователя
profile = await get_user_profile(provider, client, token)
if not profile:
return JSONResponse({"error": "Failed to get user profile"}, status_code=400)
# Создаем или обновляем пользователя
author = await _create_or_update_user(provider, profile)
if not author:
return JSONResponse({"error": "Failed to create/update user"}, status_code=500)
# Создаем сессию
session_token = await TokenStorage.create_session(
str(author.id),
auth_data={
"provider": provider,
"profile": profile,
},
username=author.name
if isinstance(author.name, str)
else str(author.name)
if author.name is not None
else None,
device_info={
"user_agent": request.headers.get("user-agent"),
"ip": request.client.host if hasattr(request, "client") else None,
},
)
# Получаем state из Redis для редиректа
state = request.query_params.get("state")
state_data = await get_oauth_state(state) if state else None
redirect_uri = state_data.get("redirect_uri") if state_data else FRONTEND_URL
if not isinstance(redirect_uri, str) or not redirect_uri:
redirect_uri = FRONTEND_URL
# Создаем ответ с редиректом
response = RedirectResponse(url=str(redirect_uri))
# Устанавливаем cookie с сессией
response.set_cookie(
SESSION_COOKIE_NAME,
session_token,
httponly=SESSION_COOKIE_HTTPONLY,
secure=SESSION_COOKIE_SECURE,
samesite=SESSION_COOKIE_SAMESITE,
max_age=SESSION_COOKIE_MAX_AGE,
path="/", # Важно: устанавливаем path="/" для доступности cookie во всех путях
)
logger.info(f"OAuth успешно завершен для {provider}, user_id={author.id}")
return response
except Exception as e:
logger.error(f"OAuth callback error: {e!s}")
# В случае ошибки редиректим на фронтенд с ошибкой
fallback_redirect = request.query_params.get("redirect_uri", FRONTEND_URL)
return RedirectResponse(url=f"{fallback_redirect}?error=auth_failed")
async def store_oauth_state(state: str, data: dict) -> None:
"""Сохраняет OAuth состояние в Redis с TTL"""
key = f"oauth_state:{state}"
await redis.execute("SETEX", key, OAUTH_STATE_TTL, orjson.dumps(data))
async def get_oauth_state(state: str) -> Optional[dict]:
"""Получает и удаляет OAuth состояние из Redis (one-time use)"""
key = f"oauth_state:{state}"
data = await redis.execute("GET", key)
if data:
await redis.execute("DEL", key) # Одноразовое использование
return orjson.loads(data)
return None
# HTTP handlers для тестирования
async def oauth_login_http(request: Request) -> JSONResponse | RedirectResponse:
"""HTTP handler для OAuth login"""
try:
provider = request.path_params.get("provider")
if not provider or provider not in PROVIDER_CONFIGS:
return JSONResponse({"error": "Invalid provider"}, status_code=400)
client = oauth.create_client(provider)
if not client:
return JSONResponse({"error": "Provider not configured"}, status_code=400)
# Генерируем PKCE challenge
code_verifier = token_urlsafe(32)
code_challenge = create_s256_code_challenge(code_verifier)
state = token_urlsafe(32)
# Сохраняем состояние в сессии
request.session["code_verifier"] = code_verifier
request.session["provider"] = provider
request.session["state"] = state
# Сохраняем состояние OAuth в Redis
oauth_data = {
"code_verifier": code_verifier,
"provider": provider,
"redirect_uri": FRONTEND_URL,
"created_at": int(time.time()),
}
await store_oauth_state(state, oauth_data)
# URL для callback
callback_uri = f"{FRONTEND_URL}oauth/{provider}/callback"
return await client.authorize_redirect(
request,
callback_uri,
code_challenge=code_challenge,
code_challenge_method="S256",
state=state,
)
except Exception as e:
logger.error(f"OAuth login error: {e}")
return JSONResponse({"error": "OAuth login failed"}, status_code=500)
async def oauth_callback_http(request: Request) -> JSONResponse | RedirectResponse:
"""HTTP handler для OAuth callback"""
try:
# Используем GraphQL resolver логику
provider = request.session.get("provider")
if not provider:
return JSONResponse({"error": "No OAuth session found"}, status_code=400)
state = request.query_params.get("state")
session_state = request.session.get("state")
if not state or state != session_state:
return JSONResponse({"error": "Invalid or expired OAuth state"}, status_code=400)
oauth_data = await get_oauth_state(state)
if not oauth_data:
return JSONResponse({"error": "Invalid or expired OAuth state"}, status_code=400)
# Используем существующую логику
client = oauth.create_client(provider)
token = await client.authorize_access_token(request)
profile = await get_user_profile(provider, client, token)
if not profile:
return JSONResponse({"error": "Failed to get user profile"}, status_code=400)
# Создаем или обновляем пользователя используя helper функцию
author = await _create_or_update_user(provider, profile)
# Создаем токен сессии
session_token = await TokenStorage.create_session(str(author.id))
# Очищаем OAuth сессию
request.session.pop("code_verifier", None)
request.session.pop("provider", None)
request.session.pop("state", None)
# Возвращаем redirect с cookie
response = RedirectResponse(url="/auth/success", status_code=307)
response.set_cookie(
SESSION_COOKIE_NAME,
session_token,
httponly=SESSION_COOKIE_HTTPONLY,
secure=SESSION_COOKIE_SECURE,
samesite=SESSION_COOKIE_SAMESITE,
max_age=SESSION_COOKIE_MAX_AGE,
)
return response
except Exception as e:
logger.error(f"OAuth callback error: {e}")
return JSONResponse({"error": "OAuth callback failed"}, status_code=500)
async def _create_or_update_user(provider: str, profile: dict) -> Author:
"""
Создает или обновляет пользователя на основе OAuth профиля.
Возвращает объект Author.
"""
# Для некоторых провайдеров (X, Telegram) email может отсутствовать
email = profile.get("email")
if not email:
# Генерируем временный email на основе провайдера и ID
email = _generate_temp_email(provider, profile.get("id", "unknown"))
logger.info(f"Generated temporary email for {provider} user: {email}")
# Создаем или обновляем пользователя
session = get_session()
try:
# Сначала ищем пользователя по OAuth
author = Author.find_by_oauth(provider, profile["id"], session)
if author:
# Пользователь найден по OAuth - обновляем данные
author.set_oauth_account(provider, profile["id"], email=profile.get("email"))
_update_author_profile(author, profile)
else:
# Ищем пользователя по email если есть настоящий email
author = None
if email and not email.endswith(TEMP_EMAIL_SUFFIX):
author = session.query(Author).filter(Author.email == email).first()
if author:
# Пользователь найден по email - добавляем OAuth данные
author.set_oauth_account(provider, profile["id"], email=profile.get("email"))
_update_author_profile(author, profile)
else:
# Создаем нового пользователя
author = _create_new_oauth_user(provider, profile, email, session)
session.commit()
return author
finally:
session.close()
def _update_author_profile(author: Author, profile: dict) -> None:
"""Обновляет профиль автора данными из OAuth"""
if profile.get("name") and not author.name:
author.name = profile["name"] # type: ignore[assignment]
if profile.get("picture") and not author.pic:
author.pic = profile["picture"] # type: ignore[assignment]
author.updated_at = int(time.time()) # type: ignore[assignment]
author.last_seen = int(time.time()) # type: ignore[assignment]
def _create_new_oauth_user(provider: str, profile: dict, email: str, session: Any) -> Author:
"""Создает нового пользователя из OAuth профиля"""
from orm.community import Community, CommunityAuthor, CommunityFollower
from utils.logger import root_logger as logger
slug = generate_unique_slug(profile["name"] or f"{provider}_{profile.get('id', 'user')}")
author = Author(
email=email,
name=profile["name"] or f"{provider.title()} User",
slug=slug,
pic=profile.get("picture"),
email_verified=bool(profile.get("email")),
created_at=int(time.time()),
updated_at=int(time.time()),
last_seen=int(time.time()),
)
session.add(author)
session.flush() # Получаем ID автора
# Добавляем OAuth данные для нового пользователя
author.set_oauth_account(provider, profile["id"], email=profile.get("email"))
# Добавляем пользователя в основное сообщество с дефолтными ролями
target_community_id = 1 # Основное сообщество
# Получаем сообщество для назначения дефолтных ролей
community = session.query(Community).filter(Community.id == target_community_id).first()
if community:
# Инициализируем права сообщества если нужно
try:
import asyncio
loop = asyncio.get_event_loop()
loop.run_until_complete(community.initialize_role_permissions())
except Exception as e:
logger.warning(f"Не удалось инициализировать права сообщества {target_community_id}: {e}")
# Получаем дефолтные роли сообщества или используем стандартные
try:
default_roles = community.get_default_roles()
if not default_roles:
default_roles = ["reader", "author"]
except AttributeError:
default_roles = ["reader", "author"]
# Создаем CommunityAuthor с дефолтными ролями
community_author = CommunityAuthor(
community_id=target_community_id, author_id=author.id, roles=",".join(default_roles)
)
session.add(community_author)
logger.info(f"Создана запись CommunityAuthor для OAuth пользователя {author.id} с ролями: {default_roles}")
# Добавляем пользователя в подписчики сообщества
follower = CommunityFollower(community=target_community_id, follower=int(author.id))
session.add(follower)
logger.info(f"OAuth пользователь {author.id} добавлен в подписчики сообщества {target_community_id}")
return author

268
auth/orm.py Normal file
View File

@@ -0,0 +1,268 @@
import time
from typing import Any, Dict, Optional
from sqlalchemy import JSON, Boolean, Column, ForeignKey, Index, Integer, String
from sqlalchemy.orm import Session
from auth.identity import Password
from services.db import BaseModel as Base
# Общие table_args для всех моделей
DEFAULT_TABLE_ARGS = {"extend_existing": True}
"""
Модель закладок автора
"""
class AuthorBookmark(Base):
"""
Закладка автора на публикацию.
Attributes:
author (int): ID автора
shout (int): ID публикации
"""
__tablename__ = "author_bookmark"
__table_args__ = (
Index("idx_author_bookmark_author", "author"),
Index("idx_author_bookmark_shout", "shout"),
{"extend_existing": True},
)
author = Column(ForeignKey("author.id"), primary_key=True)
shout = Column(ForeignKey("shout.id"), primary_key=True)
class AuthorRating(Base):
"""
Рейтинг автора от другого автора.
Attributes:
rater (int): ID оценивающего автора
author (int): ID оцениваемого автора
plus (bool): Положительная/отрицательная оценка
"""
__tablename__ = "author_rating"
__table_args__ = (
Index("idx_author_rating_author", "author"),
Index("idx_author_rating_rater", "rater"),
{"extend_existing": True},
)
rater = Column(ForeignKey("author.id"), primary_key=True)
author = Column(ForeignKey("author.id"), primary_key=True)
plus = Column(Boolean)
class AuthorFollower(Base):
"""
Подписка одного автора на другого.
Attributes:
follower (int): ID подписчика
author (int): ID автора, на которого подписываются
created_at (int): Время создания подписки
auto (bool): Признак автоматической подписки
"""
__tablename__ = "author_follower"
__table_args__ = (
Index("idx_author_follower_author", "author"),
Index("idx_author_follower_follower", "follower"),
{"extend_existing": True},
)
id = None # type: ignore[assignment]
follower = Column(ForeignKey("author.id"), primary_key=True)
author = Column(ForeignKey("author.id"), primary_key=True)
created_at = Column(Integer, nullable=False, default=lambda: int(time.time()))
auto = Column(Boolean, nullable=False, default=False)
class Author(Base):
"""
Расширенная модель автора с функциями аутентификации и авторизации
"""
__tablename__ = "author"
__table_args__ = (
Index("idx_author_slug", "slug"),
Index("idx_author_email", "email"),
Index("idx_author_phone", "phone"),
{"extend_existing": True},
)
# Базовые поля автора
id = Column(Integer, primary_key=True)
name = Column(String, nullable=True, comment="Display name")
slug = Column(String, unique=True, comment="Author's slug")
bio = Column(String, nullable=True, comment="Bio") # короткое описание
about = Column(String, nullable=True, comment="About") # длинное форматированное описание
pic = Column(String, nullable=True, comment="Picture")
links = Column(JSON, nullable=True, comment="Links")
# OAuth аккаунты - JSON с данными всех провайдеров
# Формат: {"google": {"id": "123", "email": "user@gmail.com"}, "github": {"id": "456"}}
oauth = Column(JSON, nullable=True, default=dict, comment="OAuth accounts data")
# Поля аутентификации
email = Column(String, unique=True, nullable=True, comment="Email")
phone = Column(String, unique=True, nullable=True, comment="Phone")
password = Column(String, nullable=True, comment="Password hash")
email_verified = Column(Boolean, default=False)
phone_verified = Column(Boolean, default=False)
failed_login_attempts = Column(Integer, default=0)
account_locked_until = Column(Integer, nullable=True)
# Временные метки
created_at = Column(Integer, nullable=False, default=lambda: int(time.time()))
updated_at = Column(Integer, nullable=False, default=lambda: int(time.time()))
last_seen = Column(Integer, nullable=False, default=lambda: int(time.time()))
deleted_at = Column(Integer, nullable=True)
oid = Column(String, nullable=True)
# Список защищенных полей, которые видны только владельцу и администраторам
_protected_fields = ["email", "password", "provider_access_token", "provider_refresh_token"]
@property
def is_authenticated(self) -> bool:
"""Проверяет, аутентифицирован ли пользователь"""
return self.id is not None
def verify_password(self, password: str) -> bool:
"""Проверяет пароль пользователя"""
return Password.verify(password, str(self.password)) if self.password else False
def set_password(self, password: str):
"""Устанавливает пароль пользователя"""
self.password = Password.encode(password) # type: ignore[assignment]
def increment_failed_login(self):
"""Увеличивает счетчик неудачных попыток входа"""
self.failed_login_attempts += 1 # type: ignore[assignment]
if self.failed_login_attempts >= 5:
self.account_locked_until = int(time.time()) + 300 # type: ignore[assignment] # 5 минут
def reset_failed_login(self):
"""Сбрасывает счетчик неудачных попыток входа"""
self.failed_login_attempts = 0 # type: ignore[assignment]
self.account_locked_until = None # type: ignore[assignment]
def is_locked(self) -> bool:
"""Проверяет, заблокирован ли аккаунт"""
if not self.account_locked_until:
return False
return bool(self.account_locked_until > int(time.time()))
@property
def username(self) -> str:
"""
Возвращает имя пользователя для использования в токенах.
Необходимо для совместимости с TokenStorage и JWTCodec.
Returns:
str: slug, email или phone пользователя
"""
return str(self.slug or self.email or self.phone or "")
def dict(self, access: bool = False) -> Dict[str, Any]:
"""
Сериализует объект автора в словарь.
Args:
access: Если True, включает защищенные поля
Returns:
Dict: Словарь с данными автора
"""
result: Dict[str, Any] = {
"id": self.id,
"name": self.name,
"slug": self.slug,
"bio": self.bio,
"about": self.about,
"pic": self.pic,
"links": self.links,
"created_at": self.created_at,
"updated_at": self.updated_at,
"last_seen": self.last_seen,
"deleted_at": self.deleted_at,
"email_verified": self.email_verified,
}
# Добавляем защищенные поля только если запрошен полный доступ
if access:
result.update({"email": self.email, "phone": self.phone, "oauth": self.oauth})
return result
@classmethod
def find_by_oauth(cls, provider: str, provider_id: str, session: Session) -> Optional["Author"]:
"""
Находит автора по OAuth провайдеру и ID
Args:
provider (str): Имя OAuth провайдера (google, github и т.д.)
provider_id (str): ID пользователя у провайдера
session: Сессия базы данных
Returns:
Author или None: Найденный автор или None если не найден
"""
# Ищем авторов, у которых есть данный провайдер с данным ID
authors = session.query(cls).filter(cls.oauth.isnot(None)).all()
for author in authors:
if author.oauth and provider in author.oauth:
oauth_data = author.oauth[provider] # type: ignore[index]
if isinstance(oauth_data, dict) and oauth_data.get("id") == provider_id:
return author
return None
def set_oauth_account(self, provider: str, provider_id: str, email: Optional[str] = None) -> None:
"""
Устанавливает OAuth аккаунт для автора
Args:
provider (str): Имя OAuth провайдера (google, github и т.д.)
provider_id (str): ID пользователя у провайдера
email (Optional[str]): Email от провайдера
"""
if not self.oauth:
self.oauth = {} # type: ignore[assignment]
oauth_data: Dict[str, str] = {"id": provider_id}
if email:
oauth_data["email"] = email
self.oauth[provider] = oauth_data # type: ignore[index]
def get_oauth_account(self, provider: str) -> Optional[Dict[str, Any]]:
"""
Получает OAuth аккаунт провайдера
Args:
provider (str): Имя OAuth провайдера
Returns:
dict или None: Данные OAuth аккаунта или None если не найден
"""
oauth_data = getattr(self, "oauth", None)
if not oauth_data:
return None
if isinstance(oauth_data, dict):
return oauth_data.get(provider)
return None
def remove_oauth_account(self, provider: str):
"""
Удаляет OAuth аккаунт провайдера
Args:
provider (str): Имя OAuth провайдера
"""
if self.oauth and provider in self.oauth:
del self.oauth[provider]

146
auth/permissions.py Normal file
View File

@@ -0,0 +1,146 @@
"""
Модуль для проверки разрешений пользователей в контексте сообществ.
Позволяет проверять доступ пользователя к определенным операциям в сообществе
на основе его роли в этом сообществе.
"""
from sqlalchemy.orm import Session
from auth.orm import Author
from orm.community import Community, CommunityAuthor
from settings import ADMIN_EMAILS as ADMIN_EMAILS_LIST
ADMIN_EMAILS = ADMIN_EMAILS_LIST.split(",")
class ContextualPermissionCheck:
"""
Класс для проверки контекстно-зависимых разрешений.
Позволяет проверять разрешения пользователя в контексте сообщества,
учитывая как глобальные роли пользователя, так и его роли внутри сообщества.
"""
@staticmethod
async def check_community_permission(
session: Session, author_id: int, community_slug: str, resource: str, operation: str
) -> bool:
"""
Проверяет наличие разрешения у пользователя в контексте сообщества.
Args:
session: Сессия SQLAlchemy
author_id: ID автора/пользователя
community_slug: Slug сообщества
resource: Ресурс для доступа
operation: Операция над ресурсом
Returns:
bool: True, если пользователь имеет разрешение, иначе False
"""
# 1. Проверка глобальных разрешений (например, администратор)
author = session.query(Author).filter(Author.id == author_id).one_or_none()
if not author:
return False
# Если это администратор (по списку email)
if author.email in ADMIN_EMAILS:
return True
# 2. Проверка разрешений в контексте сообщества
# Получаем информацию о сообществе
community = session.query(Community).filter(Community.slug == community_slug).one_or_none()
if not community:
return False
# Если автор является создателем сообщества, то у него есть полные права
if community.created_by == author_id:
return True
# Проверяем наличие разрешения для этих ролей
permission_id = f"{resource}:{operation}"
ca = CommunityAuthor.find_by_user_and_community(author_id, community.id, session)
return bool(await ca.has_permission(permission_id))
@staticmethod
async def get_user_community_roles(session: Session, author_id: int, community_slug: str) -> list[str]:
"""
Получает список ролей пользователя в сообществе.
Args:
session: Сессия SQLAlchemy
author_id: ID автора/пользователя
community_slug: Slug сообщества
Returns:
List[CommunityRole]: Список ролей пользователя в сообществе
"""
# Получаем информацию о сообществе
community = session.query(Community).filter(Community.slug == community_slug).one_or_none()
if not community:
return []
# Если автор является создателем сообщества, то у него есть роль владельца
if community.created_by == author_id:
return ["editor", "author", "expert", "reader"]
ca = CommunityAuthor.find_by_user_and_community(author_id, community.id, session)
return ca.role_list if ca else []
@staticmethod
async def assign_role_to_user(session: Session, author_id: int, community_slug: str, role: str) -> bool:
"""
Назначает роль пользователю в сообществе.
Args:
session: Сессия SQLAlchemy
author_id: ID автора/пользователя
community_slug: Slug сообщества
role: Роль для назначения (CommunityRole или строковое представление)
Returns:
bool: True если роль успешно назначена, иначе False
"""
# Получаем информацию о сообществе
community = session.query(Community).filter(Community.slug == community_slug).one_or_none()
if not community:
return False
# Проверяем существование связи автор-сообщество
ca = CommunityAuthor.find_by_user_and_community(author_id, community.id, session)
if not ca:
return False
# Назначаем роль
ca.add_role(role)
return True
@staticmethod
async def revoke_role_from_user(session: Session, author_id: int, community_slug: str, role: str) -> bool:
"""
Отзывает роль у пользователя в сообществе.
Args:
session: Сессия SQLAlchemy
author_id: ID автора/пользователя
community_slug: Slug сообщества
role: Роль для отзыва (CommunityRole или строковое представление)
Returns:
bool: True если роль успешно отозвана, иначе False
"""
# Получаем информацию о сообществе
community = session.query(Community).filter(Community.slug == community_slug).one_or_none()
if not community:
return False
# Проверяем существование связи автор-сообщество
ca = CommunityAuthor.find_by_user_and_community(author_id, community.id, session)
if not ca:
return False
# Отзываем роль
ca.remove_role(role)
return True

25
auth/state.py Normal file
View File

@@ -0,0 +1,25 @@
"""
Классы состояния авторизации
"""
from typing import Optional
class AuthState:
"""
Класс для хранения информации о состоянии авторизации пользователя.
Используется в аутентификационных middleware и функциях.
"""
def __init__(self) -> None:
self.logged_in: bool = False
self.author_id: Optional[str] = None
self.token: Optional[str] = None
self.username: Optional[str] = None
self.is_admin: bool = False
self.is_editor: bool = False
self.error: Optional[str] = None
def __bool__(self) -> bool:
"""Возвращает True если пользователь авторизован"""
return self.logged_in

54
auth/tokens/base.py Normal file
View File

@@ -0,0 +1,54 @@
"""
Базовый класс для работы с токенами
"""
import secrets
from functools import lru_cache
from typing import Optional
from .types import TokenType
class BaseTokenManager:
"""
Базовый класс с общими методами для всех типов токенов
"""
@staticmethod
@lru_cache(maxsize=1000)
def _make_token_key(token_type: TokenType, identifier: str, token: Optional[str] = None) -> str:
"""
Создает унифицированный ключ для токена с кэшированием
Args:
token_type: Тип токена
identifier: Идентификатор (user_id, user_id:provider, etc)
token: Сам токен (для session и verification)
Returns:
str: Ключ токена
"""
if token_type == "session": # noqa: S105
return f"session:{identifier}:{token}"
if token_type == "verification": # noqa: S105
return f"verification_token:{token}"
if token_type == "oauth_access": # noqa: S105
return f"oauth_access:{identifier}"
if token_type == "oauth_refresh": # noqa: S105
return f"oauth_refresh:{identifier}"
error_msg = f"Неизвестный тип токена: {token_type}"
raise ValueError(error_msg)
@staticmethod
@lru_cache(maxsize=500)
def _make_user_tokens_key(user_id: str, token_type: TokenType) -> str:
"""Создает ключ для списка токенов пользователя"""
if token_type == "session": # noqa: S105
return f"user_sessions:{user_id}"
return f"user_tokens:{user_id}:{token_type}"
@staticmethod
def generate_token() -> str:
"""Генерирует криптографически стойкий токен"""
return secrets.token_urlsafe(32)

197
auth/tokens/batch.py Normal file
View File

@@ -0,0 +1,197 @@
"""
Батчевые операции с токенами для оптимизации производительности
"""
import asyncio
from typing import Any, Dict, List, Optional
from auth.jwtcodec import JWTCodec
from services.redis import redis as redis_adapter
from utils.logger import root_logger as logger
from .base import BaseTokenManager
from .types import BATCH_SIZE
class BatchTokenOperations(BaseTokenManager):
"""
Класс для пакетных операций с токенами
"""
async def batch_validate_tokens(self, tokens: List[str]) -> Dict[str, bool]:
"""
Пакетная валидация токенов для улучшения производительности
Args:
tokens: Список токенов для валидации
Returns:
Dict[str, bool]: Словарь {токен: валиден}
"""
if not tokens:
return {}
results = {}
# Разбиваем на батчи для избежания блокировки Redis
for i in range(0, len(tokens), BATCH_SIZE):
batch = tokens[i : i + BATCH_SIZE]
batch_results = await self._validate_token_batch(batch)
results.update(batch_results)
return results
async def _validate_token_batch(self, token_batch: List[str]) -> Dict[str, bool]:
"""Валидация батча токенов"""
results = {}
# Создаем задачи для декодирования токенов пакетно
decode_tasks = [asyncio.create_task(self._safe_decode_token(token)) for token in token_batch]
decoded_payloads = await asyncio.gather(*decode_tasks, return_exceptions=True)
# Подготавливаем ключи для проверки
token_keys = []
valid_tokens = []
for token, payload in zip(token_batch, decoded_payloads):
if isinstance(payload, Exception) or not payload or not hasattr(payload, "user_id"):
results[token] = False
continue
token_key = self._make_token_key("session", payload.user_id, token)
token_keys.append(token_key)
valid_tokens.append(token)
# Проверяем существование ключей пакетно
if token_keys:
async with redis_adapter.pipeline() as pipe:
for key in token_keys:
await pipe.exists(key)
existence_results = await pipe.execute()
for token, exists in zip(valid_tokens, existence_results):
results[token] = bool(exists)
return results
async def _safe_decode_token(self, token: str) -> Optional[Any]:
"""Безопасное декодирование токена"""
try:
return JWTCodec.decode(token)
except Exception:
return None
async def batch_revoke_tokens(self, tokens: List[str]) -> int:
"""
Пакетный отзыв токенов
Args:
tokens: Список токенов для отзыва
Returns:
int: Количество отозванных токенов
"""
if not tokens:
return 0
revoked_count = 0
# Обрабатываем батчами
for i in range(0, len(tokens), BATCH_SIZE):
batch = tokens[i : i + BATCH_SIZE]
batch_count = await self._revoke_token_batch(batch)
revoked_count += batch_count
return revoked_count
async def _revoke_token_batch(self, token_batch: List[str]) -> int:
"""Отзыв батча токенов"""
keys_to_delete = []
user_updates: Dict[str, set[str]] = {} # {user_id: {tokens_to_remove}}
# Декодируем токены и подготавливаем операции
for token in token_batch:
payload = await self._safe_decode_token(token)
if payload:
user_id = payload.user_id
username = payload.username
# Ключи для удаления
new_key = self._make_token_key("session", user_id, token)
old_key = f"{user_id}-{username}-{token}"
keys_to_delete.extend([new_key, old_key])
# Обновления пользовательских списков
if user_id not in user_updates:
user_updates[user_id] = set()
user_updates[user_id].add(token)
if not keys_to_delete:
return 0
# Выполняем удаление пакетно
async with redis_adapter.pipeline() as pipe:
# Удаляем ключи токенов
await pipe.delete(*keys_to_delete)
# Обновляем пользовательские списки
for user_id, tokens_to_remove in user_updates.items():
user_tokens_key = self._make_user_tokens_key(user_id, "session")
for token in tokens_to_remove:
await pipe.srem(user_tokens_key, token)
results = await pipe.execute()
return len([r for r in results if r > 0])
async def cleanup_expired_tokens(self) -> int:
"""Оптимизированная очистка истекших токенов с использованием SCAN"""
try:
cleaned_count = 0
cursor = 0
# Ищем все ключи пользовательских сессий
while True:
cursor, keys = await redis_adapter.execute("scan", cursor, "user_sessions:*", 100)
for user_tokens_key in keys:
tokens = await redis_adapter.smembers(user_tokens_key)
active_tokens = []
# Проверяем активность токенов пакетно
if tokens:
async with redis_adapter.pipeline() as pipe:
for token in tokens:
token_str = token if isinstance(token, str) else str(token)
session_key = self._make_token_key("session", user_tokens_key.split(":")[1], token_str)
await pipe.exists(session_key)
results = await pipe.execute()
for token, exists in zip(tokens, results):
if exists:
active_tokens.append(token)
else:
cleaned_count += 1
# Обновляем список активных токенов
if active_tokens:
async with redis_adapter.pipeline() as pipe:
await pipe.delete(user_tokens_key)
for token in active_tokens:
await pipe.sadd(user_tokens_key, token)
await pipe.execute()
else:
await redis_adapter.delete(user_tokens_key)
if cursor == 0:
break
if cleaned_count > 0:
logger.info(f"Очищено {cleaned_count} ссылок на истекшие токены")
return cleaned_count
except Exception as e:
logger.error(f"Ошибка очистки токенов: {e}")
return 0

189
auth/tokens/monitoring.py Normal file
View File

@@ -0,0 +1,189 @@
"""
Статистика и мониторинг системы токенов
"""
import asyncio
from typing import Any, Dict
from services.redis import redis as redis_adapter
from utils.logger import root_logger as logger
from .base import BaseTokenManager
from .types import SCAN_BATCH_SIZE
class TokenMonitoring(BaseTokenManager):
"""
Класс для мониторинга и статистики токенов
"""
async def get_token_statistics(self) -> Dict[str, Any]:
"""
Получает статистику по токенам для мониторинга
Returns:
Dict: Статистика токенов
"""
stats = {
"session_tokens": 0,
"verification_tokens": 0,
"oauth_access_tokens": 0,
"oauth_refresh_tokens": 0,
"user_sessions": 0,
"memory_usage": 0,
}
try:
# Считаем токены по типам используя SCAN
patterns = {
"session_tokens": "session:*",
"verification_tokens": "verification_token:*",
"oauth_access_tokens": "oauth_access:*",
"oauth_refresh_tokens": "oauth_refresh:*",
"user_sessions": "user_sessions:*",
}
count_tasks = [self._count_keys_by_pattern(pattern) for pattern in patterns.values()]
counts = await asyncio.gather(*count_tasks)
for (stat_name, _), count in zip(patterns.items(), counts):
stats[stat_name] = count
# Получаем информацию о памяти Redis
memory_info = await redis_adapter.execute("INFO", "MEMORY")
stats["memory_usage"] = memory_info.get("used_memory", 0)
except Exception as e:
logger.error(f"Ошибка получения статистики токенов: {e}")
return stats
async def _count_keys_by_pattern(self, pattern: str) -> int:
"""Подсчет ключей по паттерну используя SCAN"""
count = 0
cursor = 0
while True:
cursor, keys = await redis_adapter.execute("scan", cursor, pattern, SCAN_BATCH_SIZE)
count += len(keys)
if cursor == 0:
break
return count
async def optimize_memory_usage(self) -> Dict[str, Any]:
"""
Оптимизирует использование памяти Redis
Returns:
Dict: Результаты оптимизации
"""
results = {"cleaned_expired": 0, "optimized_structures": 0, "memory_saved": 0}
try:
# Очищаем истекшие токены
from .batch import BatchTokenOperations
batch_ops = BatchTokenOperations()
cleaned = await batch_ops.cleanup_expired_tokens()
results["cleaned_expired"] = cleaned
# Оптимизируем структуры данных
optimized = await self._optimize_data_structures()
results["optimized_structures"] = optimized
# Запускаем сборку мусора Redis
await redis_adapter.execute("MEMORY", "PURGE")
logger.info(f"Оптимизация памяти завершена: {results}")
except Exception as e:
logger.error(f"Ошибка оптимизации памяти: {e}")
return results
async def _optimize_data_structures(self) -> int:
"""Оптимизирует структуры данных Redis"""
optimized_count = 0
cursor = 0
# Оптимизируем пользовательские списки сессий
while True:
cursor, keys = await redis_adapter.execute("scan", cursor, "user_sessions:*", SCAN_BATCH_SIZE)
for key in keys:
try:
# Проверяем размер множества
size = await redis_adapter.execute("scard", key)
if size == 0:
await redis_adapter.delete(key)
optimized_count += 1
elif size > 100: # Слишком много сессий у одного пользователя
# Оставляем только последние 50 сессий
members = await redis_adapter.execute("smembers", key)
if len(members) > 50:
members_list = list(members)
to_remove = members_list[:-50]
if to_remove:
await redis_adapter.srem(key, *to_remove)
optimized_count += len(to_remove)
except Exception as e:
logger.error(f"Ошибка оптимизации ключа {key}: {e}")
continue
if cursor == 0:
break
return optimized_count
async def health_check(self) -> Dict[str, Any]:
"""
Проверка здоровья системы токенов
Returns:
Dict: Результаты проверки
"""
health: Dict[str, Any] = {
"status": "healthy",
"redis_connected": False,
"token_operations": False,
"errors": [],
}
try:
# Проверяем подключение к Redis
await redis_adapter.ping()
health["redis_connected"] = True
# Тестируем основные операции с токенами
from .sessions import SessionTokenManager
session_manager = SessionTokenManager()
test_user_id = "health_check_user"
test_token = await session_manager.create_session(test_user_id)
if test_token:
# Проверяем валидацию
valid, _ = await session_manager.validate_session_token(test_token)
if valid:
# Проверяем отзыв
revoked = await session_manager.revoke_session_token(test_token)
if revoked:
health["token_operations"] = True
else:
health["errors"].append("Failed to revoke test token") # type: ignore[misc]
else:
health["errors"].append("Failed to validate test token") # type: ignore[misc]
else:
health["errors"].append("Failed to create test token") # type: ignore[misc]
except Exception as e:
health["errors"].append(f"Health check error: {e}") # type: ignore[misc]
if health["errors"]:
health["status"] = "unhealthy"
return health

155
auth/tokens/oauth.py Normal file
View File

@@ -0,0 +1,155 @@
"""
Управление OAuth токенов
"""
import json
import time
from typing import Optional
from services.redis import redis as redis_adapter
from utils.logger import root_logger as logger
from .base import BaseTokenManager
from .types import DEFAULT_TTL, TokenData, TokenType
class OAuthTokenManager(BaseTokenManager):
"""
Менеджер OAuth токенов
"""
async def store_oauth_tokens(
self,
user_id: str,
provider: str,
access_token: str,
refresh_token: Optional[str] = None,
expires_in: Optional[int] = None,
additional_data: Optional[TokenData] = None,
) -> bool:
"""Сохраняет OAuth токены"""
try:
# Сохраняем access token
access_data = {
"token": access_token,
"provider": provider,
"expires_in": expires_in,
**(additional_data or {}),
}
access_ttl = expires_in if expires_in else DEFAULT_TTL["oauth_access"]
await self._create_oauth_token(user_id, access_data, access_ttl, provider, "oauth_access")
# Сохраняем refresh token если есть
if refresh_token:
refresh_data = {
"token": refresh_token,
"provider": provider,
}
await self._create_oauth_token(
user_id, refresh_data, DEFAULT_TTL["oauth_refresh"], provider, "oauth_refresh"
)
return True
except Exception as e:
logger.error(f"Ошибка сохранения OAuth токенов: {e}")
return False
async def _create_oauth_token(
self, user_id: str, token_data: TokenData, ttl: int, provider: str, token_type: TokenType
) -> str:
"""Оптимизированное создание OAuth токена"""
if not provider:
error_msg = "OAuth токены требуют указания провайдера"
raise ValueError(error_msg)
identifier = f"{user_id}:{provider}"
token_key = self._make_token_key(token_type, identifier)
# Добавляем метаданные
token_data.update(
{"user_id": user_id, "token_type": token_type, "provider": provider, "created_at": int(time.time())}
)
# Используем SETEX для атомарной операции
serialized_data = json.dumps(token_data, ensure_ascii=False)
await redis_adapter.execute("setex", token_key, ttl, serialized_data)
logger.info(f"Создан {token_type} токен для пользователя {user_id}, провайдер {provider}")
return token_key
async def get_token(self, user_id: int, provider: str, token_type: TokenType) -> Optional[TokenData]:
"""Получает токен"""
if token_type.startswith("oauth_"):
return await self._get_oauth_data_optimized(token_type, str(user_id), provider)
return None
async def _get_oauth_data_optimized(
self, token_type: TokenType, user_id: str, provider: str
) -> Optional[TokenData]:
"""Оптимизированное получение OAuth данных"""
if not user_id or not provider:
error_msg = "OAuth токены требуют user_id и provider"
raise ValueError(error_msg)
identifier = f"{user_id}:{provider}"
token_key = self._make_token_key(token_type, identifier)
# Получаем данные и TTL в одном pipeline
async with redis_adapter.pipeline() as pipe:
await pipe.get(token_key)
await pipe.ttl(token_key)
results = await pipe.execute()
if results[0]:
token_data = json.loads(results[0])
if results[1] > 0:
token_data["ttl_remaining"] = results[1]
return token_data
return None
async def revoke_oauth_tokens(self, user_id: str, provider: str) -> bool:
"""Удаляет все OAuth токены для провайдера"""
try:
result1 = await self._revoke_oauth_token_optimized("oauth_access", user_id, provider)
result2 = await self._revoke_oauth_token_optimized("oauth_refresh", user_id, provider)
return result1 or result2
except Exception as e:
logger.error(f"Ошибка удаления OAuth токенов: {e}")
return False
async def _revoke_oauth_token_optimized(self, token_type: TokenType, user_id: str, provider: str) -> bool:
"""Оптимизированный отзыв OAuth токена"""
if not user_id or not provider:
error_msg = "OAuth токены требуют user_id и provider"
raise ValueError(error_msg)
identifier = f"{user_id}:{provider}"
token_key = self._make_token_key(token_type, identifier)
result = await redis_adapter.delete(token_key)
return result > 0
async def revoke_user_oauth_tokens(self, user_id: str, token_type: TokenType) -> int:
"""Оптимизированный отзыв OAuth токенов пользователя используя SCAN"""
count = 0
cursor = 0
delete_keys = []
pattern = f"{token_type}:{user_id}:*"
# Используем SCAN для безопасного поиска токенов
while True:
cursor, keys = await redis_adapter.execute("scan", cursor, pattern, 100)
if keys:
delete_keys.extend(keys)
count += len(keys)
if cursor == 0:
break
# Удаляем найденные токены пакетно
if delete_keys:
await redis_adapter.delete(*delete_keys)
return count

267
auth/tokens/sessions.py Normal file
View File

@@ -0,0 +1,267 @@
"""
Управление токенами сессий
"""
import json
import time
from typing import Any, List, Optional, Union
from auth.jwtcodec import JWTCodec
from services.redis import redis as redis_adapter
from utils.logger import root_logger as logger
from .base import BaseTokenManager
from .types import DEFAULT_TTL, TokenData
class SessionTokenManager(BaseTokenManager):
"""
Менеджер токенов сессий
"""
async def create_session(
self,
user_id: str,
auth_data: Optional[dict] = None,
username: Optional[str] = None,
device_info: Optional[dict] = None,
) -> str:
"""Создает токен сессии"""
session_data = {}
if auth_data:
session_data["auth_data"] = json.dumps(auth_data)
if username:
session_data["username"] = username
if device_info:
session_data["device_info"] = json.dumps(device_info)
return await self.create_session_token(user_id, session_data)
async def create_session_token(self, user_id: str, token_data: TokenData) -> str:
"""Создание JWT токена сессии"""
username = token_data.get("username", "")
# Создаем JWT токен
jwt_token = JWTCodec.encode(
{
"user_id": user_id,
"username": username,
}
)
session_token = jwt_token
token_key = self._make_token_key("session", user_id, session_token)
user_tokens_key = self._make_user_tokens_key(user_id, "session")
ttl = DEFAULT_TTL["session"]
# Добавляем метаданные
token_data.update({"user_id": user_id, "token_type": "session", "created_at": int(time.time())})
# Используем новый метод execute_pipeline для избежания deprecated warnings
commands: list[tuple[str, tuple[Any, ...]]] = []
# Сохраняем данные сессии в hash, преобразуя значения в строки
for field, value in token_data.items():
commands.append(("hset", (token_key, field, str(value))))
commands.append(("expire", (token_key, ttl)))
# Добавляем в список сессий пользователя
commands.append(("sadd", (user_tokens_key, session_token)))
commands.append(("expire", (user_tokens_key, ttl)))
await redis_adapter.execute_pipeline(commands)
logger.info(f"Создан токен сессии для пользователя {user_id}")
return session_token
async def get_session_data(self, token: str, user_id: Optional[str] = None) -> Optional[TokenData]:
"""Получение данных сессии"""
if not user_id:
# Извлекаем user_id из JWT
payload = JWTCodec.decode(token)
if payload:
user_id = payload.user_id
else:
return None
token_key = self._make_token_key("session", user_id, token)
# Используем новый метод execute_pipeline для избежания deprecated warnings
commands: list[tuple[str, tuple[Any, ...]]] = [
("hgetall", (token_key,)),
("hset", (token_key, "last_activity", str(int(time.time())))),
]
results = await redis_adapter.execute_pipeline(commands)
token_data = results[0] if results else None
return dict(token_data) if token_data else None
async def validate_session_token(self, token: str) -> tuple[bool, Optional[TokenData]]:
"""
Проверяет валидность токена сессии
"""
try:
# Декодируем JWT токен
payload = JWTCodec.decode(token)
if not payload:
return False, None
user_id = payload.user_id
token_key = self._make_token_key("session", user_id, token)
# Проверяем существование и получаем данные
commands: list[tuple[str, tuple[Any, ...]]] = [("exists", (token_key,)), ("hgetall", (token_key,))]
results = await redis_adapter.execute_pipeline(commands)
if results and results[0]: # exists
return True, dict(results[1])
return False, None
except Exception as e:
logger.error(f"Ошибка валидации токена сессии: {e}")
return False, None
async def revoke_session_token(self, token: str) -> bool:
"""Отзыв токена сессии"""
payload = JWTCodec.decode(token)
if not payload:
return False
user_id = payload.user_id
# Используем новый метод execute_pipeline для избежания deprecated warnings
token_key = self._make_token_key("session", user_id, token)
user_tokens_key = self._make_user_tokens_key(user_id, "session")
commands: list[tuple[str, tuple[Any, ...]]] = [("delete", (token_key,)), ("srem", (user_tokens_key, token))]
results = await redis_adapter.execute_pipeline(commands)
return any(result > 0 for result in results if result is not None)
async def revoke_user_sessions(self, user_id: str) -> int:
"""Отзыв всех сессий пользователя"""
user_tokens_key = self._make_user_tokens_key(user_id, "session")
tokens = await redis_adapter.smembers(user_tokens_key)
if not tokens:
return 0
# Используем пакетное удаление
keys_to_delete = []
for token in tokens:
token_str = token if isinstance(token, str) else str(token)
keys_to_delete.append(self._make_token_key("session", user_id, token_str))
# Добавляем ключ списка токенов
keys_to_delete.append(user_tokens_key)
# Удаляем все ключи пакетно
if keys_to_delete:
await redis_adapter.delete(*keys_to_delete)
return len(tokens)
async def get_user_sessions(self, user_id: Union[int, str]) -> List[TokenData]:
"""Получение сессий пользователя"""
try:
user_tokens_key = self._make_user_tokens_key(str(user_id), "session")
tokens = await redis_adapter.smembers(user_tokens_key)
if not tokens:
return []
# Получаем данные всех сессий пакетно
sessions = []
async with redis_adapter.pipeline() as pipe:
for token in tokens:
token_str = token if isinstance(token, str) else str(token)
await pipe.hgetall(self._make_token_key("session", str(user_id), token_str))
results = await pipe.execute()
for token, session_data in zip(tokens, results):
if session_data:
token_str = token if isinstance(token, str) else str(token)
session_dict = dict(session_data)
session_dict["token"] = token_str
sessions.append(session_dict)
return sessions
except Exception as e:
logger.error(f"Ошибка получения сессий пользователя: {e}")
return []
async def refresh_session(self, user_id: int, old_token: str, device_info: Optional[dict] = None) -> Optional[str]:
"""
Обновляет сессию пользователя, заменяя старый токен новым
"""
try:
user_id_str = str(user_id)
# Получаем данные старой сессии
old_session_data = await self.get_session_data(old_token)
if not old_session_data:
logger.warning(f"Сессия не найдена: {user_id}")
return None
# Используем старые данные устройства, если новые не предоставлены
if not device_info and "device_info" in old_session_data:
try:
device_info = json.loads(old_session_data.get("device_info", "{}"))
except (json.JSONDecodeError, TypeError):
device_info = None
# Создаем новую сессию
new_token = await self.create_session(
user_id_str, device_info=device_info, username=old_session_data.get("username", "")
)
# Отзываем старую сессию
await self.revoke_session_token(old_token)
return new_token
except Exception as e:
logger.error(f"Ошибка обновления сессии: {e}")
return None
async def verify_session(self, token: str) -> Optional[Any]:
"""
Проверяет сессию по токену для совместимости с TokenStorage
"""
if not token:
logger.debug("Пустой токен")
return None
logger.debug(f"Проверка сессии для токена: {token[:20]}...")
try:
# Декодируем токен для получения payload
payload = JWTCodec.decode(token)
if not payload:
logger.error("Не удалось декодировать токен")
return None
if not hasattr(payload, "user_id"):
logger.error("В токене отсутствует user_id")
return None
logger.debug(f"Успешно декодирован токен, user_id={payload.user_id}")
# Проверяем наличие сессии в Redis
token_key = self._make_token_key("session", str(payload.user_id), token)
session_exists = await redis_adapter.exists(token_key)
if not session_exists:
logger.warning(f"Сессия не найдена в Redis для user_id={payload.user_id}")
return None
# Обновляем last_activity
await redis_adapter.hset(token_key, "last_activity", str(int(time.time())))
return payload
except Exception as e:
logger.error(f"Ошибка при проверке сессии: {e}")
return None

114
auth/tokens/storage.py Normal file
View File

@@ -0,0 +1,114 @@
"""
Простой интерфейс для системы токенов
"""
from typing import Any, Optional
from .batch import BatchTokenOperations
from .monitoring import TokenMonitoring
from .oauth import OAuthTokenManager
from .sessions import SessionTokenManager
from .verification import VerificationTokenManager
class _TokenStorageImpl:
"""
Внутренний класс для фасада токенов.
Использует композицию вместо наследования.
"""
def __init__(self) -> None:
self._sessions = SessionTokenManager()
self._verification = VerificationTokenManager()
self._oauth = OAuthTokenManager()
self._batch = BatchTokenOperations()
self._monitoring = TokenMonitoring()
# === МЕТОДЫ ДЛЯ СЕССИЙ ===
async def create_session(
self,
user_id: str,
auth_data: Optional[dict] = None,
username: Optional[str] = None,
device_info: Optional[dict] = None,
) -> str:
"""Создание сессии пользователя"""
return await self._sessions.create_session(user_id, auth_data, username, device_info)
async def verify_session(self, token: str) -> Optional[Any]:
"""Проверка сессии по токену"""
return await self._sessions.verify_session(token)
async def refresh_session(self, user_id: int, old_token: str, device_info: Optional[dict] = None) -> Optional[str]:
"""Обновление сессии пользователя"""
return await self._sessions.refresh_session(user_id, old_token, device_info)
async def revoke_session(self, session_token: str) -> bool:
"""Отзыв сессии"""
return await self._sessions.revoke_session_token(session_token)
async def revoke_user_sessions(self, user_id: str) -> int:
"""Отзыв всех сессий пользователя"""
return await self._sessions.revoke_user_sessions(user_id)
# === ВСПОМОГАТЕЛЬНЫЕ МЕТОДЫ ===
async def cleanup_expired_tokens(self) -> int:
"""Очистка истекших токенов"""
return await self._batch.cleanup_expired_tokens()
async def get_token_statistics(self) -> dict:
"""Получение статистики токенов"""
return await self._monitoring.get_token_statistics()
# Глобальный экземпляр фасада
_token_storage = _TokenStorageImpl()
class TokenStorage:
"""
Статический фасад для системы токенов.
Все методы делегируются глобальному экземпляру.
"""
@staticmethod
async def create_session(
user_id: str,
auth_data: Optional[dict] = None,
username: Optional[str] = None,
device_info: Optional[dict] = None,
) -> str:
"""Создание сессии пользователя"""
return await _token_storage.create_session(user_id, auth_data, username, device_info)
@staticmethod
async def verify_session(token: str) -> Optional[Any]:
"""Проверка сессии по токену"""
return await _token_storage.verify_session(token)
@staticmethod
async def refresh_session(user_id: int, old_token: str, device_info: Optional[dict] = None) -> Optional[str]:
"""Обновление сессии пользователя"""
return await _token_storage.refresh_session(user_id, old_token, device_info)
@staticmethod
async def revoke_session(session_token: str) -> bool:
"""Отзыв сессии"""
return await _token_storage.revoke_session(session_token)
@staticmethod
async def revoke_user_sessions(user_id: str) -> int:
"""Отзыв всех сессий пользователя"""
return await _token_storage.revoke_user_sessions(user_id)
@staticmethod
async def cleanup_expired_tokens() -> int:
"""Очистка истекших токенов"""
return await _token_storage.cleanup_expired_tokens()
@staticmethod
async def get_token_statistics() -> dict:
"""Получение статистики токенов"""
return await _token_storage.get_token_statistics()

23
auth/tokens/types.py Normal file
View File

@@ -0,0 +1,23 @@
"""
Типы и константы для системы токенов
"""
from typing import Any, Dict, Literal
# Типы токенов
TokenType = Literal["session", "verification", "oauth_access", "oauth_refresh"]
# TTL по умолчанию для разных типов токенов
DEFAULT_TTL = {
"session": 30 * 24 * 60 * 60, # 30 дней
"verification": 3600, # 1 час
"oauth_access": 3600, # 1 час
"oauth_refresh": 86400 * 30, # 30 дней
}
# Размеры батчей для оптимизации Redis операций
BATCH_SIZE = 100 # Размер батча для пакетной обработки токенов
SCAN_BATCH_SIZE = 1000 # Размер батча для SCAN операций
# Общие типы данных
TokenData = Dict[str, Any]

161
auth/tokens/verification.py Normal file
View File

@@ -0,0 +1,161 @@
"""
Управление токенами подтверждения
"""
import json
import secrets
import time
from typing import Optional
from services.redis import redis as redis_adapter
from utils.logger import root_logger as logger
from .base import BaseTokenManager
from .types import TokenData
class VerificationTokenManager(BaseTokenManager):
"""
Менеджер токенов подтверждения
"""
async def create_verification_token(
self,
user_id: str,
verification_type: str,
data: TokenData,
ttl: Optional[int] = None,
) -> str:
"""Создает токен подтверждения"""
token_data = {"verification_type": verification_type, **data}
# TTL по типу подтверждения
if ttl is None:
verification_ttls = {
"email_change": 3600, # 1 час
"phone_change": 600, # 10 минут
"password_reset": 1800, # 30 минут
}
ttl = verification_ttls.get(verification_type, 3600)
return await self._create_verification_token(user_id, token_data, ttl)
async def _create_verification_token(
self, user_id: str, token_data: TokenData, ttl: int, token: Optional[str] = None
) -> str:
"""Оптимизированное создание токена подтверждения"""
verification_token = token or secrets.token_urlsafe(32)
token_key = self._make_token_key("verification", user_id, verification_token)
# Добавляем метаданные
token_data.update({"user_id": user_id, "token_type": "verification", "created_at": int(time.time())})
# Отменяем предыдущие токены того же типа
verification_type = token_data.get("verification_type", "unknown")
await self._cancel_verification_tokens_optimized(user_id, verification_type)
# Используем SETEX для атомарной операции установки с TTL
serialized_data = json.dumps(token_data, ensure_ascii=False)
await redis_adapter.execute("setex", token_key, ttl, serialized_data)
logger.info(f"Создан токен подтверждения {verification_type} для пользователя {user_id}")
return verification_token
async def get_verification_token_data(self, token: str) -> Optional[TokenData]:
"""Получает данные токена подтверждения"""
token_key = self._make_token_key("verification", "", token)
return await redis_adapter.get_and_deserialize(token_key)
async def validate_verification_token(self, token_str: str) -> tuple[bool, Optional[TokenData]]:
"""Проверяет валидность токена подтверждения"""
token_key = self._make_token_key("verification", "", token_str)
token_data = await redis_adapter.get_and_deserialize(token_key)
if token_data:
return True, token_data
return False, None
async def confirm_verification_token(self, token_str: str) -> Optional[TokenData]:
"""Подтверждает и использует токен подтверждения (одноразовый)"""
token_data = await self.get_verification_token_data(token_str)
if token_data:
# Удаляем токен после использования
await self.revoke_verification_token(token_str)
return token_data
return None
async def revoke_verification_token(self, token: str) -> bool:
"""Отзывает токен подтверждения"""
token_key = self._make_token_key("verification", "", token)
result = await redis_adapter.delete(token_key)
return result > 0
async def revoke_user_verification_tokens(self, user_id: str) -> int:
"""Оптимизированный отзыв токенов подтверждения пользователя используя SCAN вместо KEYS"""
count = 0
cursor = 0
delete_keys = []
# Используем SCAN для безопасного поиска токенов
while True:
cursor, keys = await redis_adapter.execute("scan", cursor, "verification_token:*", 100)
# Проверяем каждый ключ в пакете
if keys:
async with redis_adapter.pipeline() as pipe:
for key in keys:
await pipe.get(key)
results = await pipe.execute()
for key, data in zip(keys, results):
if data:
try:
token_data = json.loads(data)
if token_data.get("user_id") == user_id:
delete_keys.append(key)
count += 1
except (json.JSONDecodeError, TypeError):
continue
if cursor == 0:
break
# Удаляем найденные токены пакетно
if delete_keys:
await redis_adapter.delete(*delete_keys)
return count
async def _cancel_verification_tokens_optimized(self, user_id: str, verification_type: str) -> None:
"""Оптимизированная отмена токенов подтверждения используя SCAN"""
cursor = 0
delete_keys = []
while True:
cursor, keys = await redis_adapter.execute("scan", cursor, "verification_token:*", 100)
if keys:
# Получаем данные пакетно
async with redis_adapter.pipeline() as pipe:
for key in keys:
await pipe.get(key)
results = await pipe.execute()
# Проверяем какие токены нужно удалить
for key, data in zip(keys, results):
if data:
try:
token_data = json.loads(data)
if (
token_data.get("user_id") == user_id
and token_data.get("verification_type") == verification_type
):
delete_keys.append(key)
except (json.JSONDecodeError, TypeError):
continue
if cursor == 0:
break
# Удаляем найденные токены пакетно
if delete_keys:
await redis_adapter.delete(*delete_keys)

View File

@@ -1,73 +0,0 @@
from datetime import datetime, timedelta, timezone
from auth.jwtcodec import JWTCodec
from base.redis import redis
from settings import ONETIME_TOKEN_LIFE_SPAN, SESSION_TOKEN_LIFE_SPAN
from validations.auth import AuthInput
async def save(token_key, life_span, auto_delete=True):
await redis.execute("SET", token_key, "True")
if auto_delete:
expire_at = (datetime.now(tz=timezone.utc) + timedelta(seconds=life_span)).timestamp()
await redis.execute("EXPIREAT", token_key, int(expire_at))
class SessionToken:
@classmethod
async def verify(cls, token: str):
"""
Rules for a token to be valid.
- token format is legal
- token exists in redis database
- token is not expired
"""
try:
return JWTCodec.decode(token)
except Exception as e:
raise e
@classmethod
async def get(cls, payload, token):
return await TokenStorage.get(f"{payload.user_id}-{payload.username}-{token}")
class TokenStorage:
@staticmethod
async def get(token_key):
print("[tokenstorage.get] " + token_key)
# 2041-user@domain.zn-eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJ1c2VyX2lkIjoyMDQxLCJ1c2VybmFtZSI6ImFudG9uLnJld2luK3Rlc3QtbG9hZGNoYXRAZ21haWwuY29tIiwiZXhwIjoxNjcxNzgwNjE2LCJpYXQiOjE2NjkxODg2MTYsImlzcyI6ImRpc2NvdXJzIn0.Nml4oV6iMjMmc6xwM7lTKEZJKBXvJFEIZ-Up1C1rITQ
return await redis.execute("GET", token_key)
@staticmethod
async def create_onetime(user: AuthInput) -> str:
life_span = ONETIME_TOKEN_LIFE_SPAN
exp = datetime.now(tz=timezone.utc) + timedelta(seconds=life_span)
one_time_token = JWTCodec.encode(user, exp)
await save(f"{user.id}-{user.username}-{one_time_token}", life_span)
return one_time_token
@staticmethod
async def create_session(user: AuthInput) -> str:
life_span = SESSION_TOKEN_LIFE_SPAN
exp = datetime.now(tz=timezone.utc) + timedelta(seconds=life_span)
session_token = JWTCodec.encode(user, exp)
await save(f"{user.id}-{user.username}-{session_token}", life_span)
return session_token
@staticmethod
async def revoke(token: str) -> bool:
payload = None
try:
print("[auth.tokenstorage] revoke token")
payload = JWTCodec.decode(token)
except: # noqa
pass
else:
await redis.execute("DEL", f"{payload.user_id}-{payload.username}-{token}")
return True
@staticmethod
async def revoke_all(user: AuthInput):
tokens = await redis.execute("KEYS", f"{user.id}-*")
await redis.execute("DEL", *tokens)

126
auth/validations.py Normal file
View File

@@ -0,0 +1,126 @@
import re
from datetime import datetime
from typing import Optional, Union
from pydantic import BaseModel, Field, field_validator
# RFC 5322 compliant email regex pattern
EMAIL_PATTERN = r"^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,}$"
class AuthInput(BaseModel):
"""Base model for authentication input validation"""
user_id: str = Field(description="Unique user identifier")
username: str = Field(min_length=2, max_length=50)
token: str = Field(min_length=32)
@field_validator("user_id")
@classmethod
def validate_user_id(cls, v: str) -> str:
if not v.strip():
msg = "user_id cannot be empty"
raise ValueError(msg)
return v
class UserRegistrationInput(BaseModel):
"""Validation model for user registration"""
email: str = Field(max_length=254) # Max email length per RFC 5321
password: str = Field(min_length=8, max_length=100)
name: str = Field(min_length=2, max_length=50)
@field_validator("email")
@classmethod
def validate_email(cls, v: str) -> str:
"""Validate email format"""
if not re.match(EMAIL_PATTERN, v):
msg = "Invalid email format"
raise ValueError(msg)
return v.lower()
@field_validator("password")
@classmethod
def validate_password_strength(cls, v: str) -> str:
"""Validate password meets security requirements"""
if not any(c.isupper() for c in v):
msg = "Password must contain at least one uppercase letter"
raise ValueError(msg)
if not any(c.islower() for c in v):
msg = "Password must contain at least one lowercase letter"
raise ValueError(msg)
if not any(c.isdigit() for c in v):
msg = "Password must contain at least one number"
raise ValueError(msg)
if not any(c in "!@#$%^&*()_+-=[]{}|;:,.<>?" for c in v):
msg = "Password must contain at least one special character"
raise ValueError(msg)
return v
class UserLoginInput(BaseModel):
"""Validation model for user login"""
email: str = Field(max_length=254)
password: str = Field(min_length=8, max_length=100)
@field_validator("email")
@classmethod
def validate_email(cls, v: str) -> str:
if not re.match(EMAIL_PATTERN, v):
msg = "Invalid email format"
raise ValueError(msg)
return v.lower()
class TokenPayload(BaseModel):
"""Validation model for JWT token payload"""
user_id: str
username: str
exp: datetime
iat: datetime
scopes: Optional[list[str]] = []
class OAuthInput(BaseModel):
"""Validation model for OAuth input"""
provider: str = Field(pattern="^(google|github|facebook)$")
code: str
redirect_uri: Optional[str] = None
@field_validator("provider")
@classmethod
def validate_provider(cls, v: str) -> str:
valid_providers = ["google", "github", "facebook"]
if v.lower() not in valid_providers:
msg = f"Provider must be one of: {', '.join(valid_providers)}"
raise ValueError(msg)
return v.lower()
class AuthResponse(BaseModel):
"""Validation model for authentication responses"""
success: bool
token: Optional[str] = None
error: Optional[str] = None
user: Optional[dict[str, Union[str, int, bool]]] = None
@field_validator("error")
@classmethod
def validate_error_if_not_success(cls, v: Optional[str], info) -> Optional[str]:
if not info.data.get("success") and not v:
msg = "Error message required when success is False"
raise ValueError(msg)
return v
@field_validator("token")
@classmethod
def validate_token_if_success(cls, v: Optional[str], info) -> Optional[str]:
if info.data.get("success") and not v:
msg = "Token required when success is True"
raise ValueError(msg)
return v

View File

@@ -1,57 +0,0 @@
from typing import Any, Callable, Dict, Generic, TypeVar
from sqlalchemy import Column, Integer, create_engine
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import Session
from sqlalchemy.sql.schema import Table
from settings import DB_URL
engine = create_engine(DB_URL, echo=False, pool_size=10, max_overflow=20)
T = TypeVar("T")
REGISTRY: Dict[str, type] = {}
def local_session():
return Session(bind=engine, expire_on_commit=False)
DeclarativeBase = declarative_base() # type: Any
class Base(DeclarativeBase):
__table__: Table
__tablename__: str
__new__: Callable
__init__: Callable
__allow_unmapped__ = True
__abstract__ = True
__table_args__ = {"extend_existing": True}
id = Column(Integer, primary_key=True)
def __init_subclass__(cls, **kwargs):
REGISTRY[cls.__name__] = cls
@classmethod
def create(cls: Generic[T], **kwargs) -> Generic[T]:
instance = cls(**kwargs)
return instance.save()
def save(self) -> Generic[T]:
with local_session() as session:
session.add(self)
session.commit()
return self
def update(self, input):
column_names = self.__table__.columns.keys()
for name, value in input.items():
if name in column_names:
setattr(self, name, value)
def dict(self) -> Dict[str, Any]:
column_names = self.__table__.columns.keys()
return {c: getattr(self, c) for c in column_names}

View File

@@ -1,46 +0,0 @@
from asyncio import sleep
from aioredis import from_url
from settings import REDIS_URL
class RedisCache:
def __init__(self, uri=REDIS_URL):
self._uri: str = uri
self._instance = None
async def connect(self):
if self._instance is not None:
return
self._instance = await from_url(self._uri, encoding="utf-8")
# print(self._instance)
async def disconnect(self):
if self._instance is None:
return
await self._instance.close()
# await self._instance.wait_closed() # deprecated
self._instance = None
async def execute(self, command, *args, **kwargs):
while not self._instance:
await sleep(1)
try:
# print("[redis] " + command + ' ' + ' '.join(args))
return await self._instance.execute_command(command, *args, **kwargs)
except Exception:
pass
async def lrange(self, key, start, stop):
# print(f"[redis] LRANGE {key} {start} {stop}")
return await self._instance.lrange(key, start, stop)
async def mget(self, key, *keys):
# print(f"[redis] MGET {key} {keys}")
return await self._instance.mget(key, *keys)
redis = RedisCache()
__all__ = ["redis"]

View File

@@ -1,13 +0,0 @@
from ariadne import MutationType, QueryType, ScalarType
datetime_scalar = ScalarType("DateTime")
@datetime_scalar.serializer
def serialize_datetime(value):
return value.isoformat()
query = QueryType()
mutation = MutationType()
resolvers = [query, mutation, datetime_scalar]

109
biome.json Normal file
View File

@@ -0,0 +1,109 @@
{
"$schema": "https://biomejs.dev/schemas/2.1.2/schema.json",
"files": {
"includes": [
"**/*.tsx",
"**/*.ts",
"**/*.js",
"**/*.json",
"!dist",
"!node_modules",
"!**/.husky",
"!**/docs",
"!**/gen",
"!**/*.gen.ts",
"!**/*.d.ts"
]
},
"vcs": {
"enabled": true,
"defaultBranch": "dev",
"useIgnoreFile": true,
"clientKind": "git"
},
"assist": { "actions": { "source": { "organizeImports": "on" } } },
"formatter": {
"enabled": true,
"indentStyle": "space",
"indentWidth": 2,
"lineWidth": 108,
"includes": ["**", "!panel/graphql/generated"]
},
"javascript": {
"formatter": {
"enabled": true,
"semicolons": "asNeeded",
"quoteStyle": "single",
"jsxQuoteStyle": "double",
"arrowParentheses": "always",
"trailingCommas": "none"
}
},
"linter": {
"enabled": true,
"includes": ["**", "!**/*.scss", "!**/*.md", "!**/.DS_Store", "!**/*.svg", "!**/*.d.ts"],
"rules": {
"complexity": {
"noForEach": "off",
"noUselessFragments": "off",
"useOptionalChain": "warn",
"useLiteralKeys": "off",
"noExcessiveCognitiveComplexity": "off",
"useSimplifiedLogicExpression": "off"
},
"correctness": {
"useHookAtTopLevel": "off",
"useImportExtensions": "off",
"noUndeclaredDependencies": "off"
},
"a11y": {
"useHeadingContent": "off",
"useKeyWithClickEvents": "off",
"useKeyWithMouseEvents": "off",
"useAnchorContent": "off",
"useValidAnchor": "off",
"useMediaCaption": "off",
"useAltText": "off",
"useButtonType": "off",
"noRedundantAlt": "off",
"noStaticElementInteractions": "off",
"noSvgWithoutTitle": "off",
"noLabelWithoutControl": "off"
},
"performance": {
"noBarrelFile": "off",
"noNamespaceImport": "warn"
},
"style": {
"noNonNullAssertion": "off",
"noUselessElse": "off",
"useBlockStatements": "off",
"noImplicitBoolean": "off",
"useNamingConvention": "off",
"useImportType": "off",
"noDefaultExport": "off",
"useFilenamingConvention": "off",
"useExplicitLengthCheck": "off",
"noParameterAssign": "error",
"useAsConstAssertion": "error",
"useDefaultParameterLast": "error",
"useEnumInitializers": "error",
"useSelfClosingElements": "error",
"useSingleVarDeclarator": "error",
"noUnusedTemplateLiteral": "error",
"useNumberNamespace": "error",
"noInferrableTypes": "error"
},
"suspicious": {
"noConsole": "off",
"noAssignInExpressions": "off",
"useAwait": "off",
"noEmptyBlockStatements": "off"
},
"nursery": {
"noFloatingPromises": "warn",
"noImportCycles": "warn"
}
}
}
}

927
cache/cache.py vendored Normal file
View File

@@ -0,0 +1,927 @@
"""
Caching system for the Discours platform
----------------------------------------
This module provides a comprehensive caching solution with these key components:
1. KEY NAMING CONVENTIONS:
- Entity-based keys: "entity:property:value" (e.g., "author:id:123")
- Collection keys: "entity:collection:params" (e.g., "authors:stats:limit=10:offset=0")
- Special case keys: Maintained for backwards compatibility (e.g., "topic_shouts_123")
2. CORE FUNCTIONS:
- cached_query(): High-level function for retrieving cached data or executing queries
3. ENTITY-SPECIFIC FUNCTIONS:
- cache_author(), cache_topic(): Cache entity data
- get_cached_author(), get_cached_topic(): Retrieve entity data from cache
- invalidate_cache_by_prefix(): Invalidate all keys with a specific prefix
4. CACHE INVALIDATION STRATEGY:
- Direct invalidation via invalidate_* functions for immediate changes
- Delayed invalidation via revalidation_manager for background processing
- Event-based triggers for automatic cache updates (see triggers.py)
To maintain consistency with the existing codebase, this module preserves
the original key naming patterns while providing a more structured approach
for new cache operations.
"""
import asyncio
import json
from typing import Any, Callable, Dict, List, Optional, Type, Union
import orjson
from sqlalchemy import and_, join, select
from auth.orm import Author, AuthorFollower
from orm.shout import Shout, ShoutAuthor, ShoutTopic
from orm.topic import Topic, TopicFollower
from services.db import local_session
from services.redis import redis
from utils.encoders import fast_json_dumps
from utils.logger import root_logger as logger
DEFAULT_FOLLOWS = {
"topics": [],
"authors": [],
"shouts": [],
"communities": [{"id": 1, "name": "Дискурс", "slug": "discours", "pic": ""}],
}
CACHE_TTL = 300 # 5 minutes
# Key templates for common entity types
# These are used throughout the codebase and should be maintained for compatibility
CACHE_KEYS = {
"TOPIC_ID": "topic:id:{}",
"TOPIC_SLUG": "topic:slug:{}",
"TOPIC_AUTHORS": "topic:authors:{}",
"TOPIC_FOLLOWERS": "topic:followers:{}",
"TOPIC_SHOUTS": "topic_shouts_{}",
"AUTHOR_ID": "author:id:{}",
"SHOUTS": "shouts:{}",
}
# Type alias for JSON encoder
JSONEncoderType = Type[json.JSONEncoder]
# Cache topic data
async def cache_topic(topic: dict) -> None:
payload = fast_json_dumps(topic)
await asyncio.gather(
redis.execute("SET", f"topic:id:{topic['id']}", payload),
redis.execute("SET", f"topic:slug:{topic['slug']}", payload),
)
# Cache author data
async def cache_author(author: dict) -> None:
payload = fast_json_dumps(author)
await asyncio.gather(
redis.execute("SET", f"author:slug:{author['slug'].strip()}", str(author["id"])),
redis.execute("SET", f"author:id:{author['id']}", payload),
)
# Cache follows data
async def cache_follows(follower_id: int, entity_type: str, entity_id: int, is_insert: bool = True) -> None:
key = f"author:follows-{entity_type}s:{follower_id}"
follows_str = await redis.execute("GET", key)
if follows_str:
follows = orjson.loads(follows_str)
# Для большинства типов используем пустой список ID, кроме communities
elif entity_type == "community":
follows = DEFAULT_FOLLOWS.get("communities", [])
else:
follows = []
if is_insert:
if entity_id not in follows:
follows.append(entity_id)
else:
follows = [eid for eid in follows if eid != entity_id]
await redis.execute("SET", key, fast_json_dumps(follows))
await update_follower_stat(follower_id, entity_type, len(follows))
# Update follower statistics
async def update_follower_stat(follower_id: int, entity_type: str, count: int) -> None:
follower_key = f"author:id:{follower_id}"
follower_str = await redis.execute("GET", follower_key)
follower = orjson.loads(follower_str) if follower_str else None
if follower:
follower["stat"] = {f"{entity_type}s": count}
await cache_author(follower)
# Get author from cache
async def get_cached_author(author_id: int, get_with_stat) -> dict | None:
logger.debug(f"[get_cached_author] Начало выполнения для author_id: {author_id}")
author_key = f"author:id:{author_id}"
logger.debug(f"[get_cached_author] Проверка кэша по ключу: {author_key}")
result = await redis.execute("GET", author_key)
if result:
logger.debug(f"[get_cached_author] Найдены данные в кэше, размер: {len(result)} байт")
cached_data = orjson.loads(result)
logger.debug(
f"[get_cached_author] Кэшированные данные имеют ключи: {list(cached_data.keys()) if cached_data else 'None'}"
)
return cached_data
logger.debug("[get_cached_author] Данные не найдены в кэше, загрузка из БД")
# Load from database if not found in cache
q = select(Author).where(Author.id == author_id)
authors = get_with_stat(q)
logger.debug(f"[get_cached_author] Результат запроса из БД: {len(authors) if authors else 0} записей")
if authors:
author = authors[0]
logger.debug(f"[get_cached_author] Получен автор из БД: {type(author)}, id: {getattr(author, 'id', 'N/A')}")
# Используем безопасный вызов dict() для Author
author_dict = author.dict() if hasattr(author, "dict") else author.__dict__
logger.debug(
f"[get_cached_author] Сериализованные данные автора: {list(author_dict.keys()) if author_dict else 'None'}"
)
await cache_author(author_dict)
logger.debug("[get_cached_author] Автор кэширован")
return author_dict
logger.warning(f"[get_cached_author] Автор с ID {author_id} не найден в БД")
return None
# Function to get cached topic
async def get_cached_topic(topic_id: int) -> dict | None:
"""
Fetch topic data from cache or database by id.
Args:
topic_id (int): The identifier for the topic.
Returns:
dict: Topic data or None if not found.
"""
topic_key = f"topic:id:{topic_id}"
cached_topic = await redis.execute("GET", topic_key)
if cached_topic:
return orjson.loads(cached_topic)
# If not in cache, fetch from the database
with local_session() as session:
topic = session.execute(select(Topic).where(Topic.id == topic_id)).scalar_one_or_none()
if topic:
topic_dict = topic.dict()
await redis.execute("SET", topic_key, fast_json_dumps(topic_dict))
return topic_dict
return None
# Get topic by slug from cache
async def get_cached_topic_by_slug(slug: str, get_with_stat) -> dict | None:
topic_key = f"topic:slug:{slug}"
result = await redis.execute("GET", topic_key)
if result:
return orjson.loads(result)
# Load from database if not found in cache
topic_query = select(Topic).where(Topic.slug == slug)
topics = get_with_stat(topic_query)
if topics:
topic_dict = topics[0].dict()
await cache_topic(topic_dict)
return topic_dict
return None
# Get list of authors by ID from cache
async def get_cached_authors_by_ids(author_ids: list[int]) -> list[dict]:
# Fetch all author data concurrently
keys = [f"author:id:{author_id}" for author_id in author_ids]
results = await asyncio.gather(*(redis.execute("GET", key) for key in keys))
authors = [orjson.loads(result) if result else None for result in results]
# Load missing authors from database and cache
missing_indices = [index for index, author in enumerate(authors) if author is None]
if missing_indices:
missing_ids = [author_ids[index] for index in missing_indices]
with local_session() as session:
query = select(Author).where(Author.id.in_(missing_ids))
missing_authors = session.execute(query).scalars().unique().all()
await asyncio.gather(*(cache_author(author.dict()) for author in missing_authors))
for index, author in zip(missing_indices, missing_authors):
authors[index] = author.dict()
# Фильтруем None значения для корректного типа возвращаемого значения
return [author for author in authors if author is not None]
async def get_cached_topic_followers(topic_id: int):
"""
Получает подписчиков темы по ID, используя кеш Redis.
Args:
topic_id: ID темы
Returns:
List[dict]: Список подписчиков с их данными
"""
try:
cache_key = CACHE_KEYS["TOPIC_FOLLOWERS"].format(topic_id)
cached = await redis.execute("GET", cache_key)
if cached:
followers_ids = orjson.loads(cached)
logger.debug(f"Found {len(followers_ids)} cached followers for topic #{topic_id}")
return await get_cached_authors_by_ids(followers_ids)
with local_session() as session:
followers_ids = [
f[0]
for f in session.query(Author.id)
.join(TopicFollower, TopicFollower.follower == Author.id)
.filter(TopicFollower.topic == topic_id)
.all()
]
await redis.execute("SETEX", cache_key, CACHE_TTL, fast_json_dumps(followers_ids))
followers = await get_cached_authors_by_ids(followers_ids)
logger.debug(f"Cached {len(followers)} followers for topic #{topic_id}")
return followers
except Exception as e:
logger.error(f"Error getting followers for topic #{topic_id}: {e!s}")
return []
# Get cached author followers
async def get_cached_author_followers(author_id: int):
# Check cache for data
cached = await redis.execute("GET", f"author:followers:{author_id}")
if cached:
followers_ids = orjson.loads(cached)
followers = await get_cached_authors_by_ids(followers_ids)
logger.debug(f"Cached followers for author #{author_id}: {len(followers)}")
return followers
# Query database if cache is empty
with local_session() as session:
followers_ids = [
f[0]
for f in session.query(Author.id)
.join(AuthorFollower, AuthorFollower.follower == Author.id)
.filter(AuthorFollower.author == author_id, Author.id != author_id)
.all()
]
await redis.execute("SET", f"author:followers:{author_id}", fast_json_dumps(followers_ids))
return await get_cached_authors_by_ids(followers_ids)
# Get cached follower authors
async def get_cached_follower_authors(author_id: int):
# Attempt to retrieve authors from cache
cached = await redis.execute("GET", f"author:follows-authors:{author_id}")
if cached:
authors_ids = orjson.loads(cached)
else:
# Query authors from database
with local_session() as session:
authors_ids = [
a[0]
for a in session.execute(
select(Author.id)
.select_from(join(Author, AuthorFollower, Author.id == AuthorFollower.author))
.where(AuthorFollower.follower == author_id)
).all()
]
await redis.execute("SET", f"author:follows-authors:{author_id}", fast_json_dumps(authors_ids))
return await get_cached_authors_by_ids(authors_ids)
# Get cached follower topics
async def get_cached_follower_topics(author_id: int):
# Attempt to retrieve topics from cache
cached = await redis.execute("GET", f"author:follows-topics:{author_id}")
if cached:
topics_ids = orjson.loads(cached)
else:
# Load topics from database and cache them
with local_session() as session:
topics_ids = [
t[0]
for t in session.query(Topic.id)
.join(TopicFollower, TopicFollower.topic == Topic.id)
.where(TopicFollower.follower == author_id)
.all()
]
await redis.execute("SET", f"author:follows-topics:{author_id}", fast_json_dumps(topics_ids))
topics = []
for topic_id in topics_ids:
topic_str = await redis.execute("GET", f"topic:id:{topic_id}")
if topic_str:
topic = orjson.loads(topic_str)
if topic and topic not in topics:
topics.append(topic)
logger.debug(f"Cached topics for author#{author_id}: {len(topics)}")
return topics
# Get author by author_id from cache
async def get_cached_author_by_id(author_id: int, get_with_stat):
"""
Retrieve author information by author_id, checking the cache first, then the database.
Args:
author_id (int): The author identifier for which to retrieve the author.
Returns:
dict: Dictionary with author data or None if not found.
"""
# Attempt to find author data by author_id in Redis cache
cached_author_data = await redis.execute("GET", f"author:id:{author_id}")
if cached_author_data:
# If data is found, return parsed JSON
return orjson.loads(cached_author_data)
# If data is not found in cache, query the database
author_query = select(Author).where(Author.id == author_id)
authors = get_with_stat(author_query)
if authors:
# Cache the retrieved author data
author = authors[0]
author_dict = author.dict()
await asyncio.gather(
redis.execute("SET", f"author:id:{author.id}", fast_json_dumps(author_dict)),
)
return author_dict
# Return None if author is not found
return None
# Get cached topic authors
async def get_cached_topic_authors(topic_id: int):
"""
Retrieve a list of authors for a given topic, using cache or database.
Args:
topic_id (int): The identifier of the topic for which to retrieve authors.
Returns:
List[dict]: A list of dictionaries containing author data.
"""
# Attempt to get a list of author IDs from cache
rkey = f"topic:authors:{topic_id}"
cached_authors_ids = await redis.execute("GET", rkey)
if cached_authors_ids:
authors_ids = orjson.loads(cached_authors_ids)
else:
# If cache is empty, get data from the database
with local_session() as session:
query = (
select(ShoutAuthor.author)
.select_from(join(ShoutTopic, Shout, ShoutTopic.shout == Shout.id))
.join(ShoutAuthor, ShoutAuthor.shout == Shout.id)
.where(
and_(
ShoutTopic.topic == topic_id,
Shout.published_at.is_not(None),
Shout.deleted_at.is_(None),
)
)
)
authors_ids = [author_id for (author_id,) in session.execute(query).all()]
# Cache the retrieved author IDs
await redis.execute("SET", rkey, fast_json_dumps(authors_ids))
# Retrieve full author details from cached IDs
if authors_ids:
authors = await get_cached_authors_by_ids(authors_ids)
logger.debug(f"Topic#{topic_id} authors fetched and cached: {len(authors)} authors found.")
return authors
return []
async def invalidate_shouts_cache(cache_keys: list[str]) -> None:
"""
Инвалидирует кэш выборок публикаций по переданным ключам.
"""
for cache_key in cache_keys:
try:
# Удаляем основной кэш
await redis.execute("DEL", cache_key)
logger.debug(f"Invalidated cache key: {cache_key}")
# Добавляем ключ в список инвалидированных с TTL
await redis.execute("SETEX", f"{cache_key}:invalidated", CACHE_TTL, "1")
# Если это кэш темы, инвалидируем также связанные ключи
if cache_key.startswith("topic_"):
topic_id = cache_key.split("_")[1]
related_keys = [
f"topic:id:{topic_id}",
f"topic:authors:{topic_id}",
f"topic:followers:{topic_id}",
f"topic:stats:{topic_id}",
]
for related_key in related_keys:
await redis.execute("DEL", related_key)
logger.debug(f"Invalidated related key: {related_key}")
except Exception as e:
logger.error(f"Error invalidating cache key {cache_key}: {e}")
async def cache_topic_shouts(topic_id: int, shouts: list[dict]) -> None:
"""Кэширует список публикаций для темы"""
key = f"topic_shouts_{topic_id}"
payload = fast_json_dumps(shouts)
await redis.execute("SETEX", key, CACHE_TTL, payload)
async def get_cached_topic_shouts(topic_id: int) -> list[dict]:
"""Получает кэшированный список публикаций для темы"""
key = f"topic_shouts_{topic_id}"
cached = await redis.execute("GET", key)
if cached:
return orjson.loads(cached)
return []
async def cache_related_entities(shout: Shout) -> None:
"""
Кэширует все связанные с публикацией сущности (авторов и темы)
"""
tasks = [cache_by_id(Author, author.id, cache_author) for author in shout.authors]
tasks.extend(cache_by_id(Topic, topic.id, cache_topic) for topic in shout.topics)
await asyncio.gather(*tasks)
async def invalidate_shout_related_cache(shout: Shout, author_id: int) -> None:
"""
Инвалидирует весь кэш, связанный с публикацией и её связями
Args:
shout: Объект публикации
author_id: ID автора
"""
cache_keys = {
"feed", # основная лента
f"author_{author_id}", # публикации автора
"random_top", # случайные топовые
"unrated", # неоцененные
"recent", # последние
"coauthored", # совместные
}
# Добавляем ключи авторов
cache_keys.update(f"author_{a.id}" for a in shout.authors)
cache_keys.update(f"authored_{a.id}" for a in shout.authors)
# Добавляем ключи тем
cache_keys.update(f"topic_{t.id}" for t in shout.topics)
cache_keys.update(f"topic_shouts_{t.id}" for t in shout.topics)
await invalidate_shouts_cache(list(cache_keys))
# Function removed - direct Redis calls used throughout the module instead
async def get_cached_entity(entity_type: str, entity_id: int, get_method, cache_method):
"""
Универсальная функция получения кэшированной сущности
Args:
entity_type: 'author' или 'topic'
entity_id: ID сущности
get_method: метод получения из БД
cache_method: метод кэширования
"""
key = f"{entity_type}:id:{entity_id}"
cached = await redis.execute("GET", key)
if cached:
return orjson.loads(cached)
entity = await get_method(entity_id)
if entity:
await cache_method(entity)
return entity
return None
async def cache_by_id(entity, entity_id: int, cache_method):
"""
Кэширует сущность по ID, используя указанный метод кэширования
Args:
entity: класс сущности (Author/Topic)
entity_id: ID сущности
cache_method: функция кэширования
"""
from resolvers.stat import get_with_stat
caching_query = select(entity).filter(entity.id == entity_id)
result = get_with_stat(caching_query)
if not result or not result[0]:
logger.warning(f"{entity.__name__} with id {entity_id} not found")
return None
x = result[0]
d = x.dict()
await cache_method(d)
return d
# Универсальная функция для сохранения данных в кеш
async def cache_data(key: str, data: Any, ttl: Optional[int] = None) -> None:
"""
Сохраняет данные в кеш по указанному ключу.
Args:
key: Ключ кеша
data: Данные для сохранения
ttl: Время жизни кеша в секундах (None - бессрочно)
"""
try:
payload = fast_json_dumps(data)
if ttl:
await redis.execute("SETEX", key, ttl, payload)
else:
await redis.execute("SET", key, payload)
logger.debug(f"Данные сохранены в кеш по ключу {key}")
except Exception as e:
logger.error(f"Ошибка при сохранении данных в кеш: {e}")
# Универсальная функция для получения данных из кеша
async def get_cached_data(key: str) -> Optional[Any]:
"""
Получает данные из кеша по указанному ключу.
Args:
key: Ключ кеша
Returns:
Any: Данные из кеша или None, если данных нет
"""
try:
cached_data = await redis.execute("GET", key)
if cached_data:
loaded = orjson.loads(cached_data)
logger.debug(f"Данные получены из кеша по ключу {key}: {len(loaded)}")
return loaded
return None
except Exception as e:
logger.error(f"Ошибка при получении данных из кеша: {e}")
return None
# Универсальная функция для инвалидации кеша по префиксу
async def invalidate_cache_by_prefix(prefix: str) -> None:
"""
Инвалидирует все ключи кеша с указанным префиксом.
Args:
prefix: Префикс ключей кеша для инвалидации
"""
try:
keys = await redis.execute("KEYS", f"{prefix}:*")
if keys:
await redis.execute("DEL", *keys)
logger.debug(f"Удалено {len(keys)} ключей кеша с префиксом {prefix}")
except Exception as e:
logger.error(f"Ошибка при инвалидации кеша: {e}")
# Универсальная функция для получения и кеширования данных
async def cached_query(
cache_key: str,
query_func: Callable,
ttl: Optional[int] = None,
force_refresh: bool = False,
use_key_format: bool = True,
**query_params,
) -> Any:
"""
Gets data from cache or executes query and saves result to cache.
Supports existing key formats for compatibility.
Args:
cache_key: Cache key or key template from CACHE_KEYS
query_func: Function to execute the query
ttl: Cache TTL in seconds (None - indefinite)
force_refresh: Force cache refresh
use_key_format: Whether to check if cache_key matches a key template in CACHE_KEYS
**query_params: Parameters to pass to the query function
Returns:
Any: Data from cache or query result
"""
# Check if cache_key matches a pattern in CACHE_KEYS
actual_key = cache_key
if use_key_format and "{}" in cache_key:
# Look for a template match in CACHE_KEYS
for key_format in CACHE_KEYS.values():
if cache_key == key_format:
# We have a match, now look for the id or value to format with
for param_name, param_value in query_params.items():
if param_name in ["id", "slug", "user", "topic_id", "author_id"]:
actual_key = cache_key.format(param_value)
break
# If not forcing refresh, try to get data from cache
if not force_refresh:
cached_result = await get_cached_data(actual_key)
if cached_result is not None:
return cached_result
# If data not in cache or refresh required, execute query
try:
result = await query_func(**query_params)
if result is not None:
# Save result to cache
await cache_data(actual_key, result, ttl)
return result
except Exception as e:
logger.error(f"Error executing query for caching: {e}")
# In case of error, return data from cache if not forcing refresh
if not force_refresh:
return await get_cached_data(actual_key)
raise
async def save_topic_to_cache(topic: Dict[str, Any]) -> None:
"""Сохраняет топик в кеш"""
try:
topic_id = topic.get("id")
if not topic_id:
return
topic_key = f"topic:{topic_id}"
payload = fast_json_dumps(topic)
await redis.execute("SET", topic_key, payload)
await redis.execute("EXPIRE", topic_key, 3600) # 1 час
logger.debug(f"Topic {topic_id} saved to cache")
except Exception as e:
logger.error(f"Failed to save topic to cache: {e}")
async def save_author_to_cache(author: Dict[str, Any]) -> None:
"""Сохраняет автора в кеш"""
try:
author_id = author.get("id")
if not author_id:
return
author_key = f"author:{author_id}"
payload = fast_json_dumps(author)
await redis.execute("SET", author_key, payload)
await redis.execute("EXPIRE", author_key, 1800) # 30 минут
logger.debug(f"Author {author_id} saved to cache")
except Exception as e:
logger.error(f"Failed to save author to cache: {e}")
async def cache_follows_by_follower(author_id: int, follows: List[Dict[str, Any]]) -> None:
"""Кеширует подписки пользователя"""
try:
key = f"follows:author:{author_id}"
await redis.execute("SET", key, fast_json_dumps(follows))
await redis.execute("EXPIRE", key, 1800) # 30 минут
logger.debug(f"Follows cached for author {author_id}")
except Exception as e:
logger.error(f"Failed to cache follows: {e}")
async def get_topic_from_cache(topic_id: Union[int, str]) -> Optional[Dict[str, Any]]:
"""Получает топик из кеша"""
try:
topic_key = f"topic:{topic_id}"
cached_data = await redis.get(topic_key)
if cached_data:
if isinstance(cached_data, bytes):
cached_data = cached_data.decode("utf-8")
return json.loads(cached_data)
return None
except Exception as e:
logger.error(f"Failed to get topic from cache: {e}")
return None
async def get_author_from_cache(author_id: Union[int, str]) -> Optional[Dict[str, Any]]:
"""Получает автора из кеша"""
try:
author_key = f"author:{author_id}"
cached_data = await redis.get(author_key)
if cached_data:
if isinstance(cached_data, bytes):
cached_data = cached_data.decode("utf-8")
return json.loads(cached_data)
return None
except Exception as e:
logger.error(f"Failed to get author from cache: {e}")
return None
async def cache_topic_with_content(topic_dict: Dict[str, Any]) -> None:
"""Кеширует топик с контентом"""
try:
topic_id = topic_dict.get("id")
if topic_id:
topic_key = f"topic_content:{topic_id}"
await redis.execute("SET", topic_key, fast_json_dumps(topic_dict))
await redis.execute("EXPIRE", topic_key, 7200) # 2 часа
logger.debug(f"Topic content {topic_id} cached")
except Exception as e:
logger.error(f"Failed to cache topic content: {e}")
async def get_cached_topic_content(topic_id: Union[int, str]) -> Optional[Dict[str, Any]]:
"""Получает кешированный контент топика"""
try:
topic_key = f"topic_content:{topic_id}"
cached_data = await redis.get(topic_key)
if cached_data:
if isinstance(cached_data, bytes):
cached_data = cached_data.decode("utf-8")
return json.loads(cached_data)
return None
except Exception as e:
logger.error(f"Failed to get cached topic content: {e}")
return None
async def save_shouts_to_cache(shouts: List[Dict[str, Any]], cache_key: str = "recent_shouts") -> None:
"""Сохраняет статьи в кеш"""
try:
payload = fast_json_dumps(shouts)
await redis.execute("SET", cache_key, payload)
await redis.execute("EXPIRE", cache_key, 900) # 15 минут
logger.debug(f"Shouts saved to cache with key: {cache_key}")
except Exception as e:
logger.error(f"Failed to save shouts to cache: {e}")
async def get_shouts_from_cache(cache_key: str = "recent_shouts") -> Optional[List[Dict[str, Any]]]:
"""Получает статьи из кеша"""
try:
cached_data = await redis.get(cache_key)
if cached_data:
if isinstance(cached_data, bytes):
cached_data = cached_data.decode("utf-8")
return json.loads(cached_data)
return None
except Exception as e:
logger.error(f"Failed to get shouts from cache: {e}")
return None
async def cache_search_results(query: str, data: List[Dict[str, Any]], ttl: int = 600) -> None:
"""Кеширует результаты поиска"""
try:
search_key = f"search:{query.lower().replace(' ', '_')}"
payload = fast_json_dumps(data)
await redis.execute("SET", search_key, payload)
await redis.execute("EXPIRE", search_key, ttl)
logger.debug(f"Search results cached for query: {query}")
except Exception as e:
logger.error(f"Failed to cache search results: {e}")
async def get_cached_search_results(query: str) -> Optional[List[Dict[str, Any]]]:
"""Получает кешированные результаты поиска"""
try:
search_key = f"search:{query.lower().replace(' ', '_')}"
cached_data = await redis.get(search_key)
if cached_data:
if isinstance(cached_data, bytes):
cached_data = cached_data.decode("utf-8")
return json.loads(cached_data)
return None
except Exception as e:
logger.error(f"Failed to get cached search results: {e}")
return None
async def invalidate_topic_cache(topic_id: Union[int, str]) -> None:
"""Инвалидирует кеш топика"""
try:
topic_key = f"topic:{topic_id}"
content_key = f"topic_content:{topic_id}"
await redis.delete(topic_key)
await redis.delete(content_key)
logger.debug(f"Cache invalidated for topic {topic_id}")
except Exception as e:
logger.error(f"Failed to invalidate topic cache: {e}")
async def invalidate_author_cache(author_id: Union[int, str]) -> None:
"""Инвалидирует кеш автора"""
try:
author_key = f"author:{author_id}"
follows_key = f"follows:author:{author_id}"
await redis.delete(author_key)
await redis.delete(follows_key)
logger.debug(f"Cache invalidated for author {author_id}")
except Exception as e:
logger.error(f"Failed to invalidate author cache: {e}")
async def clear_all_cache() -> None:
"""
Очищает весь кэш Redis (используйте с осторожностью!)
Warning:
Эта функция удаляет ВСЕ данные из Redis!
Используйте только в тестовой среде или при критической необходимости.
"""
try:
await redis.execute("FLUSHDB")
logger.info("Весь кэш очищен")
except Exception as e:
logger.error(f"Ошибка при очистке кэша: {e}")
async def invalidate_topic_followers_cache(topic_id: int) -> None:
"""
Инвалидирует кеши подписчиков при удалении топика.
Эта функция:
1. Получает список всех подписчиков топика
2. Инвалидирует персональные кеши подписок для каждого подписчика
3. Инвалидирует кеши самого топика
4. Логирует процесс для отладки
Args:
topic_id: ID топика для которого нужно инвалидировать кеши подписчиков
"""
try:
logger.debug(f"Инвалидация кешей подписчиков для топика {topic_id}")
# Получаем список всех подписчиков топика из БД
with local_session() as session:
followers_query = session.query(TopicFollower.follower).filter(TopicFollower.topic == topic_id)
follower_ids = [row[0] for row in followers_query.all()]
logger.debug(f"Найдено {len(follower_ids)} подписчиков топика {topic_id}")
# Инвалидируем кеши подписок для всех подписчиков
for follower_id in follower_ids:
cache_keys_to_delete = [
f"author:follows-topics:{follower_id}", # Список топиков на которые подписан автор
f"author:followers:{follower_id}", # Счетчик подписчиков автора
f"author:stat:{follower_id}", # Общая статистика автора
f"author:id:{follower_id}", # Кешированные данные автора
]
for cache_key in cache_keys_to_delete:
try:
await redis.execute("DEL", cache_key)
logger.debug(f"Удален кеш: {cache_key}")
except Exception as e:
logger.error(f"Ошибка при удалении кеша {cache_key}: {e}")
# Инвалидируем кеши самого топика
topic_cache_keys = [
f"topic:followers:{topic_id}", # Список подписчиков топика
f"topic:id:{topic_id}", # Данные топика по ID
f"topic:authors:{topic_id}", # Авторы топика
f"topic_shouts_{topic_id}", # Публикации топика (legacy format)
]
for cache_key in topic_cache_keys:
try:
await redis.execute("DEL", cache_key)
logger.debug(f"Удален кеш топика: {cache_key}")
except Exception as e:
logger.error(f"Ошибка при удалении кеша топика {cache_key}: {e}")
# Также ищем и удаляем коллекционные кеши, содержащие данные об этом топике
try:
collection_keys = await redis.execute("KEYS", "topics:stats:*")
if collection_keys:
await redis.execute("DEL", *collection_keys)
logger.debug(f"Удалено {len(collection_keys)} коллекционных ключей тем")
except Exception as e:
logger.error(f"Ошибка при удалении коллекционных кешей: {e}")
logger.info(f"Успешно инвалидированы кеши для топика {topic_id} и {len(follower_ids)} подписчиков")
except Exception as e:
logger.error(f"Ошибка при инвалидации кешей подписчиков топика {topic_id}: {e}")
raise

195
cache/precache.py vendored Normal file
View File

@@ -0,0 +1,195 @@
import asyncio
from sqlalchemy import and_, join, select
from auth.orm import Author, AuthorFollower
from cache.cache import cache_author, cache_topic
from orm.shout import Shout, ShoutAuthor, ShoutReactionsFollower, ShoutTopic
from orm.topic import Topic, TopicFollower
from resolvers.stat import get_with_stat
from services.db import local_session
from services.redis import redis
from utils.encoders import fast_json_dumps
from utils.logger import root_logger as logger
# Предварительное кеширование подписчиков автора
async def precache_authors_followers(author_id, session) -> None:
authors_followers: set[int] = set()
followers_query = select(AuthorFollower.follower).where(AuthorFollower.author == author_id)
result = session.execute(followers_query)
authors_followers.update(row[0] for row in result if row[0])
followers_payload = fast_json_dumps(list(authors_followers))
await redis.execute("SET", f"author:followers:{author_id}", followers_payload)
# Предварительное кеширование подписок автора
async def precache_authors_follows(author_id, session) -> None:
follows_topics_query = select(TopicFollower.topic).where(TopicFollower.follower == author_id)
follows_authors_query = select(AuthorFollower.author).where(AuthorFollower.follower == author_id)
follows_shouts_query = select(ShoutReactionsFollower.shout).where(ShoutReactionsFollower.follower == author_id)
follows_topics = {row[0] for row in session.execute(follows_topics_query) if row[0]}
follows_authors = {row[0] for row in session.execute(follows_authors_query) if row[0]}
follows_shouts = {row[0] for row in session.execute(follows_shouts_query) if row[0]}
topics_payload = fast_json_dumps(list(follows_topics))
authors_payload = fast_json_dumps(list(follows_authors))
shouts_payload = fast_json_dumps(list(follows_shouts))
await asyncio.gather(
redis.execute("SET", f"author:follows-topics:{author_id}", topics_payload),
redis.execute("SET", f"author:follows-authors:{author_id}", authors_payload),
redis.execute("SET", f"author:follows-shouts:{author_id}", shouts_payload),
)
# Предварительное кеширование авторов тем
async def precache_topics_authors(topic_id: int, session) -> None:
topic_authors_query = (
select(ShoutAuthor.author)
.select_from(join(ShoutTopic, Shout, ShoutTopic.shout == Shout.id))
.join(ShoutAuthor, ShoutAuthor.shout == Shout.id)
.filter(
and_(
ShoutTopic.topic == topic_id,
Shout.published_at.is_not(None),
Shout.deleted_at.is_(None),
)
)
)
topic_authors = {row[0] for row in session.execute(topic_authors_query) if row[0]}
authors_payload = fast_json_dumps(list(topic_authors))
await redis.execute("SET", f"topic:authors:{topic_id}", authors_payload)
# Предварительное кеширование подписчиков тем
async def precache_topics_followers(topic_id: int, session) -> None:
followers_query = select(TopicFollower.follower).where(TopicFollower.topic == topic_id)
topic_followers = {row[0] for row in session.execute(followers_query) if row[0]}
followers_payload = fast_json_dumps(list(topic_followers))
await redis.execute("SET", f"topic:followers:{topic_id}", followers_payload)
async def precache_data() -> None:
logger.info("precaching...")
logger.debug("Entering precache_data")
try:
# Список паттернов ключей, которые нужно сохранить при FLUSHDB
preserve_patterns = [
"migrated_views_*", # Данные миграции просмотров
"session:*", # Сессии пользователей
"env_vars:*", # Переменные окружения
"oauth_*", # OAuth токены
]
# Сохраняем все важные ключи перед очисткой
all_keys_to_preserve = []
preserved_data = {}
for pattern in preserve_patterns:
keys = await redis.execute("KEYS", pattern)
if keys:
all_keys_to_preserve.extend(keys)
logger.info(f"Найдено {len(keys)} ключей по паттерну '{pattern}'")
if all_keys_to_preserve:
logger.info(f"Сохраняем {len(all_keys_to_preserve)} важных ключей перед FLUSHDB")
for key in all_keys_to_preserve:
try:
# Определяем тип ключа и сохраняем данные
key_type = await redis.execute("TYPE", key)
if key_type == "hash":
preserved_data[key] = await redis.execute("HGETALL", key)
elif key_type == "string":
preserved_data[key] = await redis.execute("GET", key)
elif key_type == "set":
preserved_data[key] = await redis.execute("SMEMBERS", key)
elif key_type == "list":
preserved_data[key] = await redis.execute("LRANGE", key, 0, -1)
elif key_type == "zset":
preserved_data[key] = await redis.execute("ZRANGE", key, 0, -1, "WITHSCORES")
except Exception as e:
logger.error(f"Ошибка при сохранении ключа {key}: {e}")
continue
await redis.execute("FLUSHDB")
logger.debug("Redis database flushed")
logger.info("redis: FLUSHDB")
# Восстанавливаем все сохранённые ключи
if preserved_data:
logger.info(f"Восстанавливаем {len(preserved_data)} сохранённых ключей")
for key, data in preserved_data.items():
try:
if isinstance(data, dict) and data:
# Hash
flattened = []
for field, val in data.items():
flattened.extend([field, val])
if flattened:
await redis.execute("HSET", key, *flattened)
elif isinstance(data, str) and data:
# String
await redis.execute("SET", key, data)
elif isinstance(data, list) and data:
# List или ZSet
if any(isinstance(item, (list, tuple)) and len(item) == 2 for item in data):
# ZSet with scores
for item in data:
if isinstance(item, (list, tuple)) and len(item) == 2:
await redis.execute("ZADD", key, item[1], item[0])
else:
# Regular list
await redis.execute("LPUSH", key, *data)
elif isinstance(data, set) and data:
# Set
await redis.execute("SADD", key, *data)
except Exception as e:
logger.error(f"Ошибка при восстановлении ключа {key}: {e}")
continue
logger.info("Beginning topic precache phase")
with local_session() as session:
# topics
q = select(Topic).where(Topic.community == 1)
topics = get_with_stat(q)
logger.info(f"Found {len(topics)} topics to precache")
for topic in topics:
topic_dict = topic.dict() if hasattr(topic, "dict") else topic
# logger.debug(f"Precaching topic id={topic_dict.get('id')}")
await cache_topic(topic_dict)
# logger.debug(f"Cached topic id={topic_dict.get('id')}")
await asyncio.gather(
precache_topics_followers(topic_dict["id"], session),
precache_topics_authors(topic_dict["id"], session),
)
# logger.debug(f"Finished precaching followers and authors for topic id={topic_dict.get('id')}")
logger.info(f"{len(topics)} topics and their followings precached")
# authors
authors = get_with_stat(select(Author))
# logger.info(f"{len(authors)} authors found in database")
for author in authors:
if isinstance(author, Author):
profile = author.dict()
author_id = profile.get("id")
# user_id = profile.get("user", "").strip()
if author_id: # and user_id:
await cache_author(profile)
await asyncio.gather(
precache_authors_followers(author_id, session),
precache_authors_follows(author_id, session),
)
# logger.debug(f"Finished precaching followers and follows for author id={author_id}")
else:
logger.error(f"fail caching {author}")
logger.info(f"{len(authors)} authors and their followings precached")
except Exception as exc:
import traceback
traceback.print_exc()
logger.error(f"Error in precache_data: {exc}")

181
cache/revalidator.py vendored Normal file
View File

@@ -0,0 +1,181 @@
import asyncio
import contextlib
from cache.cache import (
cache_author,
cache_topic,
get_cached_author,
get_cached_topic,
invalidate_cache_by_prefix,
)
from resolvers.stat import get_with_stat
from services.redis import redis
from utils.logger import root_logger as logger
CACHE_REVALIDATION_INTERVAL = 300 # 5 minutes
class CacheRevalidationManager:
def __init__(self, interval=CACHE_REVALIDATION_INTERVAL) -> None:
"""Инициализация менеджера с заданным интервалом проверки (в секундах)."""
self.interval = interval
self.items_to_revalidate: dict[str, set[str]] = {
"authors": set(),
"topics": set(),
"shouts": set(),
"reactions": set(),
}
self.lock = asyncio.Lock()
self.running = True
self.MAX_BATCH_SIZE = 10 # Максимальное количество элементов для поштучной обработки
self._redis = redis # Добавлена инициализация _redis для доступа к Redis-клиенту
async def start(self) -> None:
"""Запуск фонового воркера для ревалидации кэша."""
# Проверяем, что у нас есть соединение с Redis
if not self._redis._client:
try:
await self._redis.connect()
logger.info("Redis connection established for revalidation manager")
except Exception as e:
logger.error(f"Failed to connect to Redis: {e}")
self.task = asyncio.create_task(self.revalidate_cache())
async def revalidate_cache(self) -> None:
"""Циклическая проверка и ревалидация кэша каждые self.interval секунд."""
try:
while self.running:
await asyncio.sleep(self.interval)
await self.process_revalidation()
except asyncio.CancelledError:
logger.info("Revalidation worker was stopped.")
except Exception as e:
logger.error(f"An error occurred in the revalidation worker: {e}")
async def process_revalidation(self) -> None:
"""Обновление кэша для всех сущностей, требующих ревалидации."""
# Проверяем соединение с Redis
if not self._redis._client:
return # Выходим из метода, если не удалось подключиться
async with self.lock:
# Ревалидация кэша авторов
if self.items_to_revalidate["authors"]:
logger.debug(f"Revalidating {len(self.items_to_revalidate['authors'])} authors")
for author_id in self.items_to_revalidate["authors"]:
if author_id == "all":
await invalidate_cache_by_prefix("authors")
break
try:
author = await get_cached_author(int(author_id), get_with_stat)
if author:
await cache_author(author)
except ValueError:
logger.warning(f"Invalid author_id: {author_id}")
self.items_to_revalidate["authors"].clear()
# Ревалидация кэша тем
if self.items_to_revalidate["topics"]:
logger.debug(f"Revalidating {len(self.items_to_revalidate['topics'])} topics")
for topic_id in self.items_to_revalidate["topics"]:
if topic_id == "all":
await invalidate_cache_by_prefix("topics")
break
try:
topic = await get_cached_topic(int(topic_id))
if topic:
await cache_topic(topic)
except ValueError:
logger.warning(f"Invalid topic_id: {topic_id}")
self.items_to_revalidate["topics"].clear()
# Ревалидация шаутов (публикаций)
if self.items_to_revalidate["shouts"]:
shouts_count = len(self.items_to_revalidate["shouts"])
logger.debug(f"Revalidating {shouts_count} shouts")
# Проверяем наличие специального флага 'all'
if "all" in self.items_to_revalidate["shouts"]:
await invalidate_cache_by_prefix("shouts")
# Если элементов много, но не 'all', используем специфический подход
elif shouts_count > self.MAX_BATCH_SIZE:
# Инвалидируем только collections keys, которые затрагивают много сущностей
collection_keys = await asyncio.create_task(self._redis.execute("KEYS", "shouts:*"))
if collection_keys:
await self._redis.execute("DEL", *collection_keys)
logger.debug(f"Удалено {len(collection_keys)} коллекционных ключей шаутов")
# Обновляем кеш каждого конкретного шаута
for shout_id in self.items_to_revalidate["shouts"]:
if shout_id != "all":
# Точечная инвалидация для каждого shout_id
specific_keys = [f"shout:id:{shout_id}"]
for key in specific_keys:
await self._redis.execute("DEL", key)
logger.debug(f"Удален ключ кеша {key}")
else:
# Если элементов немного, обрабатываем каждый
for shout_id in self.items_to_revalidate["shouts"]:
if shout_id != "all":
# Точечная инвалидация для каждого shout_id
specific_keys = [f"shout:id:{shout_id}"]
for key in specific_keys:
await self._redis.execute("DEL", key)
logger.debug(f"Удален ключ кеша {key}")
self.items_to_revalidate["shouts"].clear()
# Аналогично для реакций - точечная инвалидация
if self.items_to_revalidate["reactions"]:
reactions_count = len(self.items_to_revalidate["reactions"])
logger.debug(f"Revalidating {reactions_count} reactions")
if "all" in self.items_to_revalidate["reactions"]:
await invalidate_cache_by_prefix("reactions")
elif reactions_count > self.MAX_BATCH_SIZE:
# Инвалидируем только collections keys для реакций
collection_keys = await asyncio.create_task(self._redis.execute("KEYS", "reactions:*"))
if collection_keys:
await self._redis.execute("DEL", *collection_keys)
logger.debug(f"Удалено {len(collection_keys)} коллекционных ключей реакций")
# Точечная инвалидация для каждой реакции
for reaction_id in self.items_to_revalidate["reactions"]:
if reaction_id != "all":
specific_keys = [f"reaction:id:{reaction_id}"]
for key in specific_keys:
await self._redis.execute("DEL", key)
logger.debug(f"Удален ключ кеша {key}")
else:
# Точечная инвалидация для каждой реакции
for reaction_id in self.items_to_revalidate["reactions"]:
if reaction_id != "all":
specific_keys = [f"reaction:id:{reaction_id}"]
for key in specific_keys:
await self._redis.execute("DEL", key)
logger.debug(f"Удален ключ кеша {key}")
self.items_to_revalidate["reactions"].clear()
def mark_for_revalidation(self, entity_id, entity_type) -> None:
"""Отметить сущность для ревалидации."""
if entity_id and entity_type:
self.items_to_revalidate[entity_type].add(entity_id)
def invalidate_all(self, entity_type) -> None:
"""Пометить для инвалидации все элементы указанного типа."""
logger.debug(f"Marking all {entity_type} for invalidation")
# Особый флаг для полной инвалидации
self.items_to_revalidate[entity_type].add("all")
async def stop(self) -> None:
"""Остановка фонового воркера."""
self.running = False
if hasattr(self, "task"):
self.task.cancel()
with contextlib.suppress(asyncio.CancelledError):
await self.task
revalidation_manager = CacheRevalidationManager()

147
cache/triggers.py vendored Normal file
View File

@@ -0,0 +1,147 @@
from sqlalchemy import event
from auth.orm import Author, AuthorFollower
from cache.revalidator import revalidation_manager
from orm.reaction import Reaction, ReactionKind
from orm.shout import Shout, ShoutAuthor, ShoutReactionsFollower
from orm.topic import Topic, TopicFollower
from services.db import local_session
from utils.logger import root_logger as logger
def mark_for_revalidation(entity, *args) -> None:
"""Отметка сущности для ревалидации."""
entity_type = (
"authors"
if isinstance(entity, Author)
else "topics"
if isinstance(entity, Topic)
else "reactions"
if isinstance(entity, Reaction)
else "shouts"
if isinstance(entity, Shout)
else None
)
if entity_type:
revalidation_manager.mark_for_revalidation(entity.id, entity_type)
def after_follower_handler(mapper, connection, target, is_delete=False) -> None:
"""Обработчик добавления, обновления или удаления подписки."""
entity_type = None
if isinstance(target, AuthorFollower):
entity_type = "authors"
elif isinstance(target, TopicFollower):
entity_type = "topics"
elif isinstance(target, ShoutReactionsFollower):
entity_type = "shouts"
if entity_type:
revalidation_manager.mark_for_revalidation(
target.author if entity_type == "authors" else target.topic, entity_type
)
if not is_delete:
revalidation_manager.mark_for_revalidation(target.follower, "authors")
def after_shout_handler(mapper, connection, target) -> None:
"""Обработчик изменения статуса публикации"""
if not isinstance(target, Shout):
return
# Проверяем изменение статуса публикации
# was_published = target.published_at is not None and target.deleted_at is None
# Всегда обновляем счетчики для авторов и тем при любом изменении поста
for author in target.authors:
revalidation_manager.mark_for_revalidation(author.id, "authors")
for topic in target.topics:
revalidation_manager.mark_for_revalidation(topic.id, "topics")
# Обновляем сам пост
revalidation_manager.mark_for_revalidation(target.id, "shouts")
def after_reaction_handler(mapper, connection, target) -> None:
"""Обработчик для комментариев"""
if not isinstance(target, Reaction):
return
# Проверяем что это комментарий
is_comment = target.kind == ReactionKind.COMMENT.value
# Получаем связанный пост
shout_id = target.shout if isinstance(target.shout, int) else target.shout.id
if not shout_id:
return
# Обновляем счетчики для автора комментария
if target.created_by:
revalidation_manager.mark_for_revalidation(target.created_by, "authors")
# Обновляем счетчики для поста
revalidation_manager.mark_for_revalidation(shout_id, "shouts")
if is_comment:
# Для комментариев обновляем также авторов и темы
with local_session() as session:
shout = (
session.query(Shout)
.filter(
Shout.id == shout_id,
Shout.published_at.is_not(None),
Shout.deleted_at.is_(None),
)
.first()
)
if shout:
for author in shout.authors:
revalidation_manager.mark_for_revalidation(author.id, "authors")
for topic in shout.topics:
revalidation_manager.mark_for_revalidation(topic.id, "topics")
def events_register() -> None:
"""Регистрация обработчиков событий для всех сущностей."""
event.listen(ShoutAuthor, "after_insert", mark_for_revalidation)
event.listen(ShoutAuthor, "after_update", mark_for_revalidation)
event.listen(ShoutAuthor, "after_delete", mark_for_revalidation)
event.listen(AuthorFollower, "after_insert", after_follower_handler)
event.listen(AuthorFollower, "after_update", after_follower_handler)
event.listen(
AuthorFollower,
"after_delete",
lambda mapper, connection, target: after_follower_handler(mapper, connection, target, is_delete=True),
)
event.listen(TopicFollower, "after_insert", after_follower_handler)
event.listen(TopicFollower, "after_update", after_follower_handler)
event.listen(
TopicFollower,
"after_delete",
lambda mapper, connection, target: after_follower_handler(mapper, connection, target, is_delete=True),
)
event.listen(ShoutReactionsFollower, "after_insert", after_follower_handler)
event.listen(ShoutReactionsFollower, "after_update", after_follower_handler)
event.listen(
ShoutReactionsFollower,
"after_delete",
lambda mapper, connection, target: after_follower_handler(mapper, connection, target, is_delete=True),
)
event.listen(Reaction, "after_update", mark_for_revalidation)
event.listen(Author, "after_update", mark_for_revalidation)
event.listen(Topic, "after_update", mark_for_revalidation)
event.listen(Shout, "after_update", after_shout_handler)
event.listen(Shout, "after_delete", after_shout_handler)
event.listen(Reaction, "after_insert", after_reaction_handler)
event.listen(Reaction, "after_update", after_reaction_handler)
event.listen(Reaction, "after_delete", after_reaction_handler)
logger.info("Event handlers registered successfully.")

View File

@@ -1,10 +0,0 @@
#!/usr/bin/env bash
echo "> isort"
isort .
echo "> black"
black .
echo "> flake8"
flake8 .
# echo "> mypy"
# mypy .

View File

@@ -0,0 +1,114 @@
{
"reader": [
"shout:read",
"topic:read",
"collection:read",
"community:read",
"bookmark:read",
"bookmark:create",
"bookmark:update_own",
"bookmark:delete_own",
"invite:read",
"invite:accept",
"invite:decline",
"chat:read",
"chat:create",
"chat:update_own",
"chat:delete_own",
"message:read",
"message:create",
"message:update_own",
"message:delete_own",
"reaction:read:COMMENT",
"reaction:create:COMMENT",
"reaction:update_own:COMMENT",
"reaction:delete_own:COMMENT",
"reaction:read:QUOTE",
"reaction:create:QUOTE",
"reaction:update_own:QUOTE",
"reaction:delete_own:QUOTE",
"reaction:read:LIKE",
"reaction:create:LIKE",
"reaction:update_own:LIKE",
"reaction:delete_own:LIKE",
"reaction:read:DISLIKE",
"reaction:create:DISLIKE",
"reaction:update_own:DISLIKE",
"reaction:delete_own:DISLIKE",
"reaction:read:CREDIT",
"reaction:read:PROOF",
"reaction:read:DISPROOF",
"reaction:read:AGREE",
"reaction:read:DISAGREE"
],
"author": [
"draft:read",
"draft:create",
"draft:update_own",
"draft:delete_own",
"shout:create",
"shout:update_own",
"shout:delete_own",
"collection:create",
"collection:update_own",
"collection:delete_own",
"invite:create",
"invite:update_own",
"invite:delete_own",
"reaction:create:SILENT",
"reaction:read:SILENT",
"reaction:update_own:SILENT",
"reaction:delete_own:SILENT"
],
"artist": [
"reaction:create:CREDIT",
"reaction:read:CREDIT",
"reaction:update_own:CREDIT",
"reaction:delete_own:CREDIT"
],
"expert": [
"reaction:create:PROOF",
"reaction:read:PROOF",
"reaction:update_own:PROOF",
"reaction:delete_own:PROOF",
"reaction:create:DISPROOF",
"reaction:read:DISPROOF",
"reaction:update_own:DISPROOF",
"reaction:delete_own:DISPROOF",
"reaction:create:AGREE",
"reaction:read:AGREE",
"reaction:update_own:AGREE",
"reaction:delete_own:AGREE",
"reaction:create:DISAGREE",
"reaction:read:DISAGREE",
"reaction:update_own:DISAGREE",
"reaction:delete_own:DISAGREE"
],
"editor": [
"shout:delete_any",
"shout:update_any",
"topic:create",
"topic:delete_own",
"topic:update_own",
"topic:merge",
"reaction:delete_any:*",
"reaction:update_any:*",
"invite:delete_any",
"invite:update_any",
"collection:delete_any",
"collection:update_any",
"community:create",
"community:update_own",
"community:delete_own",
"draft:delete_any",
"draft:update_any"
],
"admin": [
"author:delete_any",
"author:update_any",
"chat:delete_any",
"chat:update_any",
"message:delete_any",
"message:update_any"
]
}

143
dev.py Normal file
View File

@@ -0,0 +1,143 @@
import argparse
import subprocess
from pathlib import Path
from typing import Optional
from granian import Granian
from granian.constants import Interfaces
from utils.logger import root_logger as logger
def check_mkcert_installed() -> Optional[bool]:
"""
Проверяет, установлен ли инструмент mkcert в системе
Returns:
bool: True если mkcert установлен, иначе False
>>> check_mkcert_installed() # doctest: +SKIP
True
"""
try:
subprocess.run(["mkcert", "-version"], capture_output=True, check=False)
return True
except FileNotFoundError:
return False
def generate_certificates(domain="localhost", cert_file="localhost.pem", key_file="localhost-key.pem"):
"""
Генерирует сертификаты с использованием mkcert
Args:
domain: Домен для сертификата
cert_file: Имя файла сертификата
key_file: Имя файла ключа
Returns:
tuple: (cert_file, key_file) пути к созданным файлам
>>> generate_certificates() # doctest: +SKIP
('localhost.pem', 'localhost-key.pem')
"""
# Проверяем, существуют ли сертификаты
if Path(cert_file).exists() and Path(key_file).exists():
logger.info(f"Сертификаты уже существуют: {cert_file}, {key_file}")
return cert_file, key_file
# Проверяем, установлен ли mkcert
if not check_mkcert_installed():
logger.error("mkcert не установлен. Установите mkcert с помощью команды:")
logger.error(" macOS: brew install mkcert")
logger.error(" Linux: apt install mkcert или эквивалент для вашего дистрибутива")
logger.error(" Windows: choco install mkcert")
logger.error("После установки выполните: mkcert -install")
return None, None
try:
# Запускаем mkcert для создания сертификата
logger.info(f"Создание сертификатов для {domain} с помощью mkcert...")
result = subprocess.run(
["mkcert", "-cert-file", cert_file, "-key-file", key_file, domain],
capture_output=True,
text=True,
check=False,
)
if result.returncode != 0:
logger.error(f"Ошибка при создании сертификатов: {result.stderr}")
return None, None
logger.info(f"Сертификаты созданы: {cert_file}, {key_file}")
return cert_file, key_file
except Exception as e:
logger.error(f"Не удалось создать сертификаты: {e!s}")
return None, None
def run_server(host="localhost", port=8000, use_https=False, workers=1, domain="localhost") -> None:
"""
Запускает сервер Granian с поддержкой HTTPS при необходимости
Args:
host: Хост для запуска сервера
port: Порт для запуска сервера
use_https: Флаг использования HTTPS
workers: Количество рабочих процессов
domain: Домен для сертификата
>>> run_server(use_https=True) # doctest: +SKIP
"""
# Проблема с многопроцессорным режимом - не поддерживает локальные объекты приложений
# Всегда запускаем в режиме одного процесса для отладки
if workers > 1:
logger.warning("Многопроцессорный режим может вызвать проблемы сериализации приложения. Использую 1 процесс.")
workers = 1
try:
if use_https:
# Генерируем сертификаты с помощью mkcert
cert_file, key_file = generate_certificates(domain=domain)
if not cert_file or not key_file:
logger.error("Не удалось сгенерировать сертификаты для HTTPS")
return
logger.info(f"Запуск HTTPS сервера на https://{host}:{port} с использованием Granian")
# Запускаем Granian сервер с явным указанием ASGI
server = Granian(
address=host,
port=port,
workers=workers,
interface=Interfaces.ASGI,
target="main:app",
ssl_cert=Path(cert_file),
ssl_key=Path(key_file),
)
else:
logger.info(f"Запуск HTTP сервера на http://{host}:{port} с использованием Granian")
server = Granian(
address=host,
port=port,
workers=workers,
interface=Interfaces.ASGI,
target="main:app",
)
server.serve()
except Exception as e:
# В случае проблем с Granian, логируем ошибку
logger.error(f"Ошибка при запуске Granian: {e!s}")
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Запуск сервера разработки с поддержкой HTTPS")
parser.add_argument("--https", action="store_true", help="Использовать HTTPS")
parser.add_argument("--workers", type=int, default=1, help="Количество рабочих процессов")
parser.add_argument("--domain", type=str, default="localhost", help="Домен для сертификата")
parser.add_argument("--port", type=int, default=8000, help="Порт для запуска сервера")
parser.add_argument("--host", type=str, default="localhost", help="Хост для запуска сервера")
args = parser.parse_args()
run_server(host=args.host, port=args.port, use_https=args.https, workers=args.workers, domain=args.domain)

116
docs/README.md Normal file
View File

@@ -0,0 +1,116 @@
# Документация Discours.io API
## 🚀 Быстрый старт
### Запуск локально
```bash
# Стандартный запуск
python main.py
# С HTTPS (требует mkcert)
python dev.py
```
## 📚 Документация
### Авторизация и безопасность
- [Система авторизации](auth-system.md) - Токены, сессии, OAuth
- [Архитектура](auth-architecture.md) - Диаграммы и схемы
- [Миграция](auth-migration.md) - Переход на новую версию
- [Безопасность](security.md) - Пароли, email, RBAC
- [Система RBAC](rbac-system.md) - Роли, разрешения, топики
- [OAuth](oauth.md) - Google, GitHub, Facebook, X, Telegram, VK, Yandex
- [OAuth настройка](oauth-setup.md) - Инструкции по настройке OAuth провайдеров
### Функциональность
- [Система рейтингов](rating.md) - Лайки, дизлайки, featured статьи
- [Подписки](follower.md) - Follow/unfollow логика
- [Кэширование](caching.md) - Redis, производительность
- [Схема данных Redis](redis-schema.md) - Полная документация структур данных
- [Пагинация комментариев](comments-pagination.md) - Иерархические комментарии
- [Загрузка контента](load_shouts.md) - Оптимизированные запросы
### Администрирование
- **Админ-панель**: Управление пользователями, ролями, переменными среды
- **Управление публикациями**: Просмотр, поиск, фильтрация по статусу (опубликованные/черновики/удаленные)
- **Управление топиками**: Упрощенное редактирование топиков с иерархическим отображением
- **Клик по строке**: Модалка редактирования открывается при клике на строку таблицы
- **Ненавязчивый крестик**: Серая кнопка "×" для удаления, краснеет при hover
- **Простой HTML редактор**: Обычный contenteditable div с моноширинным шрифтом
- **Редактируемые поля**: ID (просмотр), название, slug, описание, сообщество, родители
- **Дерево топиков**: Визуализация родительско-дочерних связей с отступами и символами `└─`
- **Безопасное удаление**: Предупреждения о каскадном удалении дочерних топиков
- **Автообновление**: Рефреш списка после операций с корректной инвалидацией кешей
- **Модерация реакций**: Полная система управления реакциями пользователей
- **Просмотр всех реакций**: Таблица с типом, текстом, автором, публикацией и статистикой
- **Фильтрация по типам**: Лайки, дизлайки, комментарии, цитаты, согласие/несогласие, вопросы, предложения, доказательства/опровержения
- **Поиск и фильтры**: По тексту реакции, автору, email или ID публикации
- **Эмоджи-индикаторы**: Визуальное отображение типов реакций (👍 👎 💬 ❝ ✅ ❌ ❓ 💡 🔬 🚫)
- **Модерация**: Редактирование текста, мягкое удаление и восстановление
- **Статистика**: Рейтинг и количество комментариев к каждой реакции
- **Безопасность**: RBAC защита и аудит всех операций
- **Просмотр данных**: Body, media, авторы, темы с удобной навигацией
- **DRY принцип**: Переиспользование существующих резолверов из reader.py и editor.py
### API и инфраструктура
- [API методы](api.md) - GraphQL эндпоинты
- [Функции системы](features.md) - Полный список возможностей
## ⚡ Ключевые возможности
### Авторизация
- **Модульная архитектура**: SessionTokenManager, VerificationTokenManager, OAuthTokenManager
- **OAuth провайдеры**: 7 поддерживаемых провайдеров с PKCE
- **RBAC**: Система ролей reader/author/artist/expert/editor/admin с наследованием
- **Права на топики**: Специальные разрешения для создания, редактирования и слияния топиков
- **Производительность**: 50% ускорение Redis, 30% меньше памяти
### Nginx (упрощенная конфигурация)
- **KISS принцип**: ~60 строк вместо сложной конфигурации
- **Dokku дефолты**: Максимальное использование встроенных настроек
- **SSL/TLS**: TLS 1.2/1.3, HSTS, OCSP stapling
- **Статические файлы**: Кэширование на 1 год, gzip сжатие
- **Безопасность**: X-Frame-Options, X-Content-Type-Options
### Реакции и комментарии
- **Иерархические комментарии** с эффективной пагинацией
- **Физическое/логическое удаление** (рейтинги/комментарии)
- **Автоматический featured статус** на основе лайков
- **Distinct() оптимизация** для JOIN запросов
### Производительность
- **Redis pipeline операции** для пакетных запросов
- **Автоматическая очистка** истекших токенов
- **Connection pooling** и keepalive
- **Type-safe codebase** (mypy clean)
- **Оптимизированная сортировка авторов** с кешированием по параметрам
## 🔧 Конфигурация
```python
# JWT
JWT_SECRET_KEY = "your-secret-key"
JWT_EXPIRATION_HOURS = 720 # 30 дней
# Redis
REDIS_URL = "redis://localhost:6379/0"
# OAuth (необходимые провайдеры)
OAUTH_CLIENTS_GOOGLE_ID = "..."
OAUTH_CLIENTS_GITHUB_ID = "..."
# ... другие провайдеры
```
## 🛠 Использование API
```python
# Сессии
from auth.tokens.sessions import SessionTokenManager
sessions = SessionTokenManager()
token = await sessions.create_session(user_id, username=username)
# Мониторинг
from auth.tokens.monitoring import TokenMonitoring
monitoring = TokenMonitoring()
stats = await monitoring.get_token_statistics()
```

560
docs/admin-panel.md Normal file
View File

@@ -0,0 +1,560 @@
# Администраторская панель Discours
## Обзор
Администраторская панель — это комплексная система управления платформой Discours, предоставляющая полный контроль над пользователями, публикациями, сообществами и их ролями.
## Архитектура системы доступа
### Уровни доступа
1. **Системные администраторы** — email в переменной `ADMIN_EMAILS` (управление системой через переменные среды)
2. **RBAC роли в сообществах**`reader`, `author`, `artist`, `expert`, `editor`, `admin` (управляемые через админку)
**ВАЖНО**:
- Роль `admin` в RBAC — это обычная роль в сообществе, управляемая через админку
- "Системный администратор" — синтетическая роль, которая НЕ хранится в базе данных
- Синтетическая роль добавляется только в API ответы для пользователей из `ADMIN_EMAILS`
- На фронте в сообществах синтетическая роль НЕ отображается
### Декораторы безопасности
```python
@admin_auth_required # Доступ только системным админам (ADMIN_EMAILS)
@editor_or_admin_required # Доступ редакторам и админам сообщества (RBAC роли)
```
## Модули администрирования
### 1. Управление пользователями
#### Получение списка пользователей
```graphql
query AdminGetUsers(
$limit: Int = 20
$offset: Int = 0
$search: String = ""
) {
adminGetUsers(limit: $limit, offset: $offset, search: $search) {
authors {
id
email
name
slug
roles
created_at
last_seen
}
total
page
perPage
totalPages
}
}
```
**Особенности:**
- Поиск по email, имени и ID
- Пагинация с ограничением 1-100 записей
- Роли получаются из основного сообщества (ID=1)
- Автоматическое добавление синтетической роли "Системный администратор" для email из `ADMIN_EMAILS`
#### Обновление пользователя
```graphql
mutation AdminUpdateUser($user: AdminUserUpdateInput!) {
adminUpdateUser(user: $user) {
success
error
}
}
```
**Поддерживаемые поля:**
- `email`с проверкой уникальности
- `name` — имя пользователя
- `slug`с проверкой уникальности
- `roles` — массив ролей для основного сообщества
### 2. Система ролей и разрешений (RBAC)
#### Иерархия ролей
```
reader → author → artist → expert → editor → admin
```
Каждая роль наследует права предыдущих **только при инициализации** сообщества.
#### Получение ролей
```graphql
query AdminGetRoles($community: Int) {
adminGetRoles(community: $community) {
id
name
description
}
}
```
- Без `community` — все системные роли
- С `community` — роли конкретного сообщества + счетчик разрешений
#### Управление ролями в сообществах
**Получение ролей пользователя:**
```graphql
query AdminGetUserCommunityRoles(
$author_id: Int!
$community_id: Int!
) {
adminGetUserCommunityRoles(
author_id: $author_id
community_id: $community_id
) {
author_id
community_id
roles
}
}
```
**Назначение ролей:**
```graphql
mutation AdminSetUserCommunityRoles(
$author_id: Int!
$community_id: Int!
$roles: [String!]!
) {
adminSetUserCommunityRoles(
author_id: $author_id
community_id: $community_id
roles: $roles
) {
success
error
author_id
community_id
roles
}
}
```
**Добавление отдельной роли:**
```graphql
mutation AdminAddUserToRole(
$author_id: Int!
$role_id: String!
$community_id: Int!
) {
adminAddUserToRole(
author_id: $author_id
role_id: $role_id
community_id: $community_id
) {
success
error
}
}
```
**Удаление роли:**
```graphql
mutation AdminRemoveUserFromRole(
$author_id: Int!
$role_id: String!
$community_id: Int!
) {
adminRemoveUserFromRole(
author_id: $author_id
role_id: $role_id
community_id: $community_id
) {
success
removed
}
}
```
### 3. Управление сообществами
#### Участники сообщества
```graphql
query AdminGetCommunityMembers(
$community_id: Int!
$limit: Int = 20
$offset: Int = 0
) {
adminGetCommunityMembers(
community_id: $community_id
limit: $limit
offset: $offset
) {
members {
id
name
email
slug
roles
}
total
community_id
}
}
```
#### Настройки ролей сообщества
**Получение настроек:**
```graphql
query AdminGetCommunityRoleSettings($community_id: Int!) {
adminGetCommunityRoleSettings(community_id: $community_id) {
community_id
default_roles
available_roles
error
}
}
```
**Обновление настроек:**
```graphql
mutation AdminUpdateCommunityRoleSettings(
$community_id: Int!
$default_roles: [String!]!
$available_roles: [String!]!
) {
adminUpdateCommunityRoleSettings(
community_id: $community_id
default_roles: $default_roles
available_roles: $available_roles
) {
success
error
community_id
default_roles
available_roles
}
}
```
#### Создание пользовательской роли
```graphql
mutation AdminCreateCustomRole($role: CustomRoleInput!) {
adminCreateCustomRole(role: $role) {
success
error
role {
id
name
description
}
}
}
```
#### Удаление пользовательской роли
```graphql
mutation AdminDeleteCustomRole(
$role_id: String!
$community_id: Int!
) {
adminDeleteCustomRole(
role_id: $role_id
community_id: $community_id
) {
success
error
}
}
```
### 4. Управление публикациями
#### Получение списка публикаций
```graphql
query AdminGetShouts(
$limit: Int = 20
$offset: Int = 0
$search: String = ""
$status: String = "all"
$community: Int
) {
adminGetShouts(
limit: $limit
offset: $offset
search: $search
status: $status
community: $community
) {
shouts {
id
title
slug
body
lead
subtitle
# ... остальные поля
created_by {
id
email
name
slug
}
community {
id
name
slug
}
authors {
id
email
name
slug
}
topics {
id
title
slug
}
}
total
page
perPage
totalPages
}
}
```
**Статусы публикаций:**
- `all` — все публикации (включая удаленные)
- `published` — опубликованные
- `draft` — черновики
- `deleted` — удаленные
#### Операции с публикациями
**Обновление:**
```graphql
mutation AdminUpdateShout($shout: AdminShoutUpdateInput!) {
adminUpdateShout(shout: $shout) {
success
error
}
}
```
**Удаление (мягкое):**
```graphql
mutation AdminDeleteShout($shout_id: Int!) {
adminDeleteShout(shout_id: $shout_id) {
success
error
}
}
```
**Восстановление:**
```graphql
mutation AdminRestoreShout($shout_id: Int!) {
adminRestoreShout(shout_id: $shout_id) {
success
error
}
}
```
### 5. Управление приглашениями
#### Получение списка приглашений
```graphql
query AdminGetInvites(
$limit: Int = 20
$offset: Int = 0
$search: String = ""
$status: String = "all"
) {
adminGetInvites(
limit: $limit
offset: $offset
search: $search
status: $status
) {
invites {
inviter_id
author_id
shout_id
status
inviter {
id
email
name
slug
}
author {
id
email
name
slug
}
shout {
id
title
slug
created_by {
id
email
name
slug
}
}
}
total
page
perPage
totalPages
}
}
```
**Статусы приглашений:**
- `PENDING` — ожидает ответа
- `ACCEPTED` — принято
- `REJECTED` — отклонено
#### Операции с приглашениями
**Обновление статуса:**
```graphql
mutation AdminUpdateInvite($invite: AdminInviteUpdateInput!) {
adminUpdateInvite(invite: $invite) {
success
error
}
}
```
**Удаление:**
```graphql
mutation AdminDeleteInvite(
$inviter_id: Int!
$author_id: Int!
$shout_id: Int!
) {
adminDeleteInvite(
inviter_id: $inviter_id
author_id: $author_id
shout_id: $shout_id
) {
success
error
}
}
```
**Пакетное удаление:**
```graphql
mutation AdminDeleteInvitesBatch($invites: [AdminInviteIdInput!]!) {
adminDeleteInvitesBatch(invites: $invites) {
success
error
}
}
```
### 6. Переменные окружения
Системные администраторы могут управлять переменными окружения:
```graphql
query GetEnvVariables {
getEnvVariables {
name
description
variables {
key
value
description
type
isSecret
}
}
}
```
```graphql
mutation UpdateEnvVariable($key: String!, $value: String!) {
updateEnvVariable(key: $key, value: $value) {
success
error
}
}
```
## Особенности реализации
### Принцип DRY
- Переиспользование логики из `reader.py`, `editor.py`
- Общие утилиты в `_get_user_roles()`
- Централизованная обработка ошибок
### Новая RBAC система
- Роли хранятся в CSV формате в `CommunityAuthor.roles`
- Методы модели: `add_role()`, `remove_role()`, `set_roles()`, `has_role()`
- Права наследуются **только при инициализации**
- Redis кэширование развернутых прав
### Синтетические роли
- **"Системный администратор"** — добавляется автоматически для пользователей из `ADMIN_EMAILS`
- НЕ хранится в базе данных, только в API ответах
- НЕ отображается на фронте в интерфейсах управления сообществами
- Используется только для индикации системных прав доступа
### Безопасность
- Валидация всех входных данных
- Проверка существования сущностей
- Контроль доступа через декораторы
- Логирование всех административных действий
### Производительность
- Пагинация для всех списков
- Индексы по ключевым полям
- Ограничения на размер выборки (max 100)
- Оптимизированные SQL запросы с `joinedload`
## Миграция данных
При переходе на новую RBAC систему используется функция:
```python
from orm.community import migrate_old_roles_to_community_author
migrate_old_roles_to_community_author()
```
Функция автоматически переносит роли из старых таблиц в новый формат CSV.
## Мониторинг и логирование
Все административные действия логируются с уровнем INFO:
- Изменение ролей пользователей
- Обновление настроек сообществ
- Операции с публикациями
- Управление приглашениями
Ошибки логируются с уровнем ERROR и полным стектрейсом.
## Лучшие практики
1. **Всегда проверяйте роли перед назначением**
2. **Используйте транзакции для групповых операций**
3. **Логируйте критические изменения**
4. **Валидируйте права доступа на каждом этапе**
5. **Применяйте принцип минимальных привилегий**
## Расширение функциональности
Для добавления новых административных функций:
1. Создайте резолвер с соответствующим декоратором
2. Добавьте GraphQL схему в `schema/admin.graphql`
3. Реализуйте логику с переиспользованием существующих компонентов
4. Добавьте тесты и документацию
5. Обновите права доступа при необходимости

40
docs/api.md Normal file
View File

@@ -0,0 +1,40 @@
## API Documentation
### GraphQL Schema
- Mutations: Authentication, content management, security
- Queries: Content retrieval, user data
- Types: Author, Topic, Shout, Community
### Key Features
#### Security Management
- Password change with validation
- Email change with confirmation
- Two-factor authentication flow
- Protected fields for user privacy
#### Content Management
- Publication system with drafts
- Topic and community organization
- Author collaboration tools
- Real-time notifications
#### Following System
- Subscribe to authors and topics
- Cache-optimized operations
- Consistent UI state management
## Database
### Models
- `Author` - User accounts with RBAC
- `Shout` - Publications and articles
- `Topic` - Content categorization
- `Community` - User groups
### Cache System
- Redis-based caching
- Automatic cache invalidation
- Optimized for real-time updates

253
docs/auth-architecture.md Normal file
View File

@@ -0,0 +1,253 @@
# Архитектура системы авторизации
## Схема потоков данных
```mermaid
graph TB
subgraph "Frontend"
FE[Web Frontend]
MOB[Mobile App]
end
subgraph "Auth Layer"
MW[AuthMiddleware]
DEC[GraphQL Decorators]
HANDLER[Auth Handlers]
end
subgraph "Core Auth"
IDENTITY[Identity]
JWT[JWT Codec]
OAUTH[OAuth Manager]
PERM[Permissions]
end
subgraph "Token System"
TS[TokenStorage]
STM[SessionTokenManager]
VTM[VerificationTokenManager]
OTM[OAuthTokenManager]
BTM[BatchTokenOperations]
MON[TokenMonitoring]
end
subgraph "Storage"
REDIS[(Redis)]
DB[(PostgreSQL)]
end
subgraph "External"
GOOGLE[Google OAuth]
GITHUB[GitHub OAuth]
FACEBOOK[Facebook]
OTHER[Other Providers]
end
FE --> MW
MOB --> MW
MW --> IDENTITY
MW --> JWT
DEC --> PERM
HANDLER --> OAUTH
IDENTITY --> STM
OAUTH --> OTM
TS --> STM
TS --> VTM
TS --> OTM
STM --> REDIS
VTM --> REDIS
OTM --> REDIS
BTM --> REDIS
MON --> REDIS
IDENTITY --> DB
OAUTH --> DB
PERM --> DB
OAUTH --> GOOGLE
OAUTH --> GITHUB
OAUTH --> FACEBOOK
OAUTH --> OTHER
```
## Диаграмма компонентов
```mermaid
graph LR
subgraph "HTTP Layer"
REQ[HTTP Request]
RESP[HTTP Response]
end
subgraph "Middleware"
AUTH_MW[Auth Middleware]
CORS_MW[CORS Middleware]
end
subgraph "GraphQL"
RESOLVER[GraphQL Resolvers]
DECORATOR[Auth Decorators]
end
subgraph "Auth Core"
VALIDATION[Validation]
IDENTIFICATION[Identity Check]
AUTHORIZATION[Permission Check]
end
subgraph "Token Management"
CREATE[Token Creation]
VERIFY[Token Verification]
REVOKE[Token Revocation]
REFRESH[Token Refresh]
end
REQ --> CORS_MW
CORS_MW --> AUTH_MW
AUTH_MW --> RESOLVER
RESOLVER --> DECORATOR
DECORATOR --> VALIDATION
VALIDATION --> IDENTIFICATION
IDENTIFICATION --> AUTHORIZATION
AUTHORIZATION --> CREATE
AUTHORIZATION --> VERIFY
AUTHORIZATION --> REVOKE
AUTHORIZATION --> REFRESH
CREATE --> RESP
VERIFY --> RESP
REVOKE --> RESP
REFRESH --> RESP
```
## Схема OAuth потока
```mermaid
sequenceDiagram
participant U as User
participant F as Frontend
participant A as Auth Service
participant R as Redis
participant P as OAuth Provider
participant D as Database
U->>F: Click "Login with Provider"
F->>A: GET /oauth/{provider}?state={csrf}
A->>R: Store OAuth state
A->>P: Redirect to Provider
P->>U: Show authorization page
U->>P: Grant permission
P->>A: GET /oauth/{provider}/callback?code={code}&state={state}
A->>R: Verify state
A->>P: Exchange code for token
P->>A: Return access token + user data
A->>D: Find/create user
A->>A: Generate JWT session token
A->>R: Store session in Redis
A->>F: Redirect with JWT token
F->>U: User logged in
```
## Схема сессионного управления
```mermaid
stateDiagram-v2
[*] --> Anonymous
Anonymous --> Authenticating: Login attempt
Authenticating --> Authenticated: Valid credentials
Authenticating --> Anonymous: Invalid credentials
Authenticated --> Refreshing: Token near expiry
Refreshing --> Authenticated: Successful refresh
Refreshing --> Anonymous: Refresh failed
Authenticated --> Anonymous: Logout/Revoke
Authenticated --> Anonymous: Token expired
```
## Redis структура данных
```
├── Sessions
│ ├── session:{user_id}:{token} → Hash {user_id, username, device_info, last_activity}
│ ├── user_sessions:{user_id} → Set {token1, token2, ...}
│ └── {user_id}-{username}-{token} → Hash (legacy format)
├── Verification
│ └── verification_token:{token} → JSON {user_id, type, data, created_at}
├── OAuth
│ ├── oauth_access:{user_id}:{provider} → JSON {token, expires_in, scope}
│ ├── oauth_refresh:{user_id}:{provider} → JSON {token, provider_data}
│ └── oauth_state:{state} → JSON {provider, redirect_uri, code_verifier}
└── Monitoring
└── token_stats → Hash {session_count, oauth_count, memory_usage}
```
## Компоненты безопасности
```mermaid
graph TD
subgraph "Input Validation"
EMAIL[Email Format]
PASS[Password Strength]
TOKEN[Token Format]
end
subgraph "Authentication"
BCRYPT[bcrypt + SHA256]
JWT_SIGN[JWT Signing]
OAUTH_VERIFY[OAuth Verification]
end
subgraph "Authorization"
ROLE[Role-based Access]
PERM[Permission Checks]
RESOURCE[Resource Access]
end
subgraph "Session Security"
TTL[Token TTL]
REVOKE[Token Revocation]
REFRESH[Secure Refresh]
end
EMAIL --> BCRYPT
PASS --> BCRYPT
TOKEN --> JWT_SIGN
BCRYPT --> ROLE
JWT_SIGN --> ROLE
OAUTH_VERIFY --> ROLE
ROLE --> PERM
PERM --> RESOURCE
RESOURCE --> TTL
RESOURCE --> REVOKE
RESOURCE --> REFRESH
```
## Масштабирование и производительность
### Горизонтальное масштабирование
- **Stateless JWT** токены
- **Redis Cluster** для высокой доступности
- **Load Balancer** aware session management
### Оптимизации
- **Connection pooling** для Redis
- **Batch operations** для массовых операций
- **Pipeline использование** для атомарности
- **LRU кэширование** для часто используемых данных
### Мониторинг производительности
- **Response time** auth операций
- **Redis memory usage** и hit rate
- **Token creation/validation** rate
- **OAuth provider** response times

322
docs/auth-migration.md Normal file
View File

@@ -0,0 +1,322 @@
# Миграция системы авторизации
## Обзор изменений
Система авторизации была полностью переработана для улучшения производительности, безопасности и поддерживаемости:
### Основные изменения
- ✅ Упрощена архитектура токенов (убрана прокси-логика)
- ✅ Исправлены проблемы с типами (mypy clean)
- ✅ Оптимизированы Redis операции
- ✅ Добавлена система мониторинга токенов
- ✅ Улучшена производительность OAuth
- ✅ Удалены deprecated компоненты
## Миграция кода
### TokenStorage API
#### Было (deprecated):
```python
# Старый универсальный API
await TokenStorage.create_token("session", user_id, data, ttl)
await TokenStorage.get_token_data("session", token)
await TokenStorage.validate_token(token, "session")
await TokenStorage.revoke_token("session", token)
```
#### Стало (рекомендуется):
```python
# Прямое использование менеджеров
from auth.tokens.sessions import SessionTokenManager
from auth.tokens.verification import VerificationTokenManager
from auth.tokens.oauth import OAuthTokenManager
# Сессии
sessions = SessionTokenManager()
token = await sessions.create_session(user_id, username=username)
valid, data = await sessions.validate_session_token(token)
await sessions.revoke_session_token(token)
# Токены подтверждения
verification = VerificationTokenManager()
token = await verification.create_verification_token(user_id, "email_change", data)
valid, data = await verification.validate_verification_token(token)
# OAuth токены
oauth = OAuthTokenManager()
await oauth.store_oauth_tokens(user_id, "google", access_token, refresh_token)
```
#### Фасад TokenStorage (для совместимости):
```python
# Упрощенный фасад для основных операций
await TokenStorage.create_session(user_id, username=username)
await TokenStorage.verify_session(token)
await TokenStorage.refresh_session(user_id, old_token, device_info)
await TokenStorage.revoke_session(token)
```
### Redis Service
#### Обновленный API:
```python
from services.redis import redis
# Базовые операции
await redis.get(key)
await redis.set(key, value, ex=ttl)
await redis.delete(key)
await redis.exists(key)
# Pipeline операции
async with redis.pipeline(transaction=True) as pipe:
await pipe.hset(key, field, value)
await pipe.expire(key, seconds)
results = await pipe.execute()
# Новые методы
await redis.scan(cursor, match=pattern, count=100)
await redis.scard(key)
await redis.ttl(key)
await redis.info(section="memory")
```
### Мониторинг токенов
#### Новые возможности:
```python
from auth.tokens.monitoring import TokenMonitoring
monitoring = TokenMonitoring()
# Статистика токенов
stats = await monitoring.get_token_statistics()
print(f"Active sessions: {stats['session_tokens']}")
print(f"Memory usage: {stats['memory_usage']} bytes")
# Health check
health = await monitoring.health_check()
if health["status"] == "healthy":
print("Token system is healthy")
# Оптимизация памяти
results = await monitoring.optimize_memory_usage()
print(f"Cleaned {results['cleaned_expired']} expired tokens")
```
### Пакетные операции
#### Новые возможности:
```python
from auth.tokens.batch import BatchTokenOperations
batch = BatchTokenOperations()
# Массовая валидация
tokens = ["token1", "token2", "token3"]
results = await batch.batch_validate_tokens(tokens)
# {"token1": True, "token2": False, "token3": True}
# Массовый отзыв
revoked_count = await batch.batch_revoke_tokens(tokens)
print(f"Revoked {revoked_count} tokens")
# Очистка истекших
cleaned = await batch.cleanup_expired_tokens()
print(f"Cleaned {cleaned} expired tokens")
```
## Изменения в конфигурации
### Переменные окружения
#### Добавлены:
```bash
# Новые OAuth провайдеры
VK_APP_ID=your_vk_app_id
VK_APP_SECRET=your_vk_app_secret
YANDEX_CLIENT_ID=your_yandex_client_id
YANDEX_CLIENT_SECRET=your_yandex_client_secret
# Расширенные настройки Redis
REDIS_SOCKET_KEEPALIVE=true
REDIS_HEALTH_CHECK_INTERVAL=30
REDIS_SOCKET_TIMEOUT=5
```
#### Удалены:
```bash
# Больше не используются
OLD_TOKEN_FORMAT_SUPPORT=true # автоматически определяется
TOKEN_CLEANUP_INTERVAL=3600 # заменено на on-demand cleanup
```
## Breaking Changes
### 1. Убраны deprecated методы
#### Удалено:
```python
# Эти методы больше не существуют
TokenStorage.create_token() # -> используйте конкретные менеджеры
TokenStorage.get_token_data() # -> используйте конкретные менеджеры
TokenStorage.validate_token() # -> используйте конкретные менеджеры
TokenStorage.revoke_user_tokens() # -> используйте конкретные менеджеры
```
#### Альтернативы:
```python
# Для сессий
sessions = SessionTokenManager()
await sessions.create_session(user_id)
await sessions.revoke_user_sessions(user_id)
# Для verification
verification = VerificationTokenManager()
await verification.create_verification_token(user_id, "email", data)
await verification.revoke_user_verification_tokens(user_id)
```
### 2. Изменения в compat.py
Файл `auth/tokens/compat.py` удален. Если вы использовали `CompatibilityMethods`:
#### Миграция:
```python
# Было
from auth.tokens.compat import CompatibilityMethods
compat = CompatibilityMethods()
await compat.get(token_key)
# Стало
from services.redis import redis
result = await redis.get(token_key)
```
### 3. Изменения в типах
#### Обновленные импорты:
```python
# Было
from auth.tokens.storage import TokenType, TokenData
# Стало
from auth.tokens.types import TokenType, TokenData
```
## Рекомендации по миграции
### Поэтапная миграция
#### Шаг 1: Обновите импорты
```python
# Замените старые импорты
from auth.tokens.sessions import SessionTokenManager
from auth.tokens.verification import VerificationTokenManager
from auth.tokens.oauth import OAuthTokenManager
```
#### Шаг 2: Используйте конкретные менеджеры
```python
# Вместо универсального TokenStorage
# используйте специализированные менеджеры
sessions = SessionTokenManager()
```
#### Шаг 3: Добавьте мониторинг
```python
from auth.tokens.monitoring import TokenMonitoring
# Добавьте health checks в ваши endpoints
monitoring = TokenMonitoring()
health = await monitoring.health_check()
```
#### Шаг 4: Оптимизируйте батчевые операции
```python
from auth.tokens.batch import BatchTokenOperations
# Используйте batch операции для массовых действий
batch = BatchTokenOperations()
results = await batch.batch_validate_tokens(token_list)
```
### Тестирование миграции
#### Checklist:
- [ ] Все auth тесты проходят
- [ ] mypy проверки без ошибок
- [ ] OAuth провайдеры работают
- [ ] Session management функционирует
- [ ] Redis операции оптимизированы
- [ ] Мониторинг настроен
#### Команды для тестирования:
```bash
# Проверка типов
mypy .
# Запуск auth тестов
pytest tests/auth/ -v
# Проверка Redis подключения
python -c "
import asyncio
from services.redis import redis
async def test():
result = await redis.ping()
print(f'Redis connection: {result}')
asyncio.run(test())
"
# Health check системы токенов
python -c "
import asyncio
from auth.tokens.monitoring import TokenMonitoring
async def test():
health = await TokenMonitoring().health_check()
print(f'Token system health: {health}')
asyncio.run(test())
"
```
## Производительность
### Ожидаемые улучшения
- **50%** ускорение Redis операций (pipeline использование)
- **30%** снижение memory usage (оптимизированные структуры)
- **Elimination** of proxy overhead (прямое обращение к менеджерам)
- **Real-time** мониторинг и статистика
### Мониторинг после миграции
```python
# Регулярно проверяйте статистику
from auth.tokens.monitoring import TokenMonitoring
async def check_performance():
monitoring = TokenMonitoring()
stats = await monitoring.get_token_statistics()
print(f"Session tokens: {stats['session_tokens']}")
print(f"Memory usage: {stats['memory_usage'] / 1024 / 1024:.2f} MB")
# Оптимизация при необходимости
if stats['memory_usage'] > 100 * 1024 * 1024: # 100MB
results = await monitoring.optimize_memory_usage()
print(f"Optimized: {results}")
```
## Поддержка
Если возникли проблемы при миграции:
1. **Проверьте логи** - все изменения логируются
2. **Запустите health check** - `TokenMonitoring().health_check()`
3. **Проверьте Redis** - подключение и память
4. **Откатитесь к TokenStorage фасаду** при необходимости
### Контакты
- **Issues**: GitHub Issues
- **Документация**: `/docs/auth-system.md`
- **Архитектура**: `/docs/auth-architecture.md`

349
docs/auth-system.md Normal file
View File

@@ -0,0 +1,349 @@
# Система авторизации Discours.io
## Обзор архитектуры
Система авторизации построена на модульной архитектуре с разделением на независимые компоненты:
```
auth/
├── tokens/ # Система управления токенами
├── middleware.py # HTTP middleware для аутентификации
├── decorators.py # GraphQL декораторы авторизации
├── oauth.py # OAuth провайдеры
├── orm.py # ORM модели пользователей
├── permissions.py # Система разрешений
├── identity.py # Методы идентификации
├── jwtcodec.py # JWT кодек
├── validations.py # Валидация данных
├── credentials.py # Работа с креденшалами
├── exceptions.py # Исключения авторизации
└── handler.py # HTTP обработчики
```
## Система токенов
### Типы токенов
| Тип | TTL | Назначение |
|-----|-----|------------|
| `session` | 30 дней | Токены пользовательских сессий |
| `verification` | 1 час | Токены подтверждения (email, телефон) |
| `oauth_access` | 1 час | OAuth access токены |
| `oauth_refresh` | 30 дней | OAuth refresh токены |
### Компоненты системы токенов
#### `SessionTokenManager`
Управление сессиями пользователей:
- JWT-токены с payload `{user_id, username, iat, exp}`
- Redis хранение для отзыва и управления
- Поддержка multiple sessions per user
- Автоматическое продление при активности
**Основные методы:**
```python
async def create_session(user_id: str, auth_data=None, username=None, device_info=None) -> str
async def verify_session(token: str) -> Optional[Any]
async def refresh_session(user_id: int, old_token: str, device_info=None) -> Optional[str]
async def revoke_session_token(token: str) -> bool
async def revoke_user_sessions(user_id: str) -> int
```
**Redis структура:**
```
session:{user_id}:{token} # hash с данными сессии
user_sessions:{user_id} # set с активными токенами
{user_id}-{username}-{token} # legacy ключи для совместимости
```
#### `VerificationTokenManager`
Управление токенами подтверждения:
- Email verification
- Phone verification
- Password reset
- Одноразовые токены
**Основные методы:**
```python
async def create_verification_token(user_id: str, verification_type: str, data: TokenData, ttl=None) -> str
async def validate_verification_token(token: str) -> tuple[bool, Optional[TokenData]]
async def confirm_verification_token(token: str) -> Optional[TokenData] # одноразовое использование
```
#### `OAuthTokenManager`
Управление OAuth токенами:
- Google, GitHub, Facebook, X, Telegram, VK, Yandex
- Access/refresh token pairs
- Provider-specific storage
**Redis структура:**
```
oauth_access:{user_id}:{provider} # access токен
oauth_refresh:{user_id}:{provider} # refresh токен
```
#### `BatchTokenOperations`
Пакетные операции для производительности:
- Массовая валидация токенов
- Пакетный отзыв
- Очистка истекших токенов
#### `TokenMonitoring`
Мониторинг и статистика:
- Подсчет активных токенов по типам
- Статистика использования памяти
- Health check системы токенов
- Оптимизация производительности
### TokenStorage (Фасад)
Упрощенный фасад для основных операций:
```python
# Основные методы
await TokenStorage.create_session(user_id, username=username)
await TokenStorage.verify_session(token)
await TokenStorage.refresh_session(user_id, old_token, device_info)
await TokenStorage.revoke_session(token)
# Deprecated методы (для миграции)
await TokenStorage.create_onetime(user) # -> VerificationTokenManager
```
## OAuth система
### Поддерживаемые провайдеры
- **Google** - OpenID Connect
- **GitHub** - OAuth 2.0
- **Facebook** - Facebook Login
- **X (Twitter)** - OAuth 2.0 (без email)
- **Telegram** - Telegram Login Widget (без email)
- **VK** - VK OAuth (требует разрешений для email)
- **Yandex** - Yandex OAuth
### Процесс OAuth авторизации
1. **Инициация**: `GET /oauth/{provider}?state={csrf_token}&redirect_uri={url}`
2. **Callback**: `GET /oauth/{provider}/callback?code={code}&state={state}`
3. **Обработка**: Получение user profile, создание/обновление пользователя
4. **Результат**: JWT токен в cookie + redirect на фронтенд
### Безопасность OAuth
- **PKCE** (Proof Key for Code Exchange) для дополнительной безопасности
- **State параметры** хранятся в Redis с TTL 10 минут
- **Одноразовые сессии** - после использования удаляются
- **Генерация временных email** для провайдеров без email (X, Telegram)
## Middleware и декораторы
### AuthMiddleware
HTTP middleware для автоматической аутентификации:
- Извлечение токенов из cookies/headers
- Валидация JWT токенов
- Добавление user context в request
- Обработка истекших токенов
### GraphQL декораторы
```python
@auth_required # Требует авторизации
@permission_required # Требует конкретных разрешений
@admin_required # Требует admin права
```
## ORM модели
### Author (Пользователь)
```python
class Author:
id: int
email: str
name: str
slug: str
password: Optional[str] # bcrypt hash
pic: Optional[str] # URL аватара
bio: Optional[str]
email_verified: bool
created_at: int
updated_at: int
last_seen: int
# OAuth связи
oauth_accounts: List[OAuthAccount]
```
### OAuthAccount
```python
class OAuthAccount:
id: int
author_id: int
provider: str # google, github, etc.
provider_id: str # ID пользователя у провайдера
provider_email: Optional[str]
provider_data: dict # Дополнительные данные от провайдера
```
## Система разрешений
### Роли
- **user** - Обычный пользователь
- **moderator** - Модератор контента
- **admin** - Администратор системы
### Разрешения
- **read** - Чтение контента
- **write** - Создание контента
- **moderate** - Модерация контента
- **admin** - Административные действия
### Проверка разрешений
```python
from auth.permissions import check_permission
@permission_required("moderate")
async def moderate_content(info, content_id: str):
# Только пользователи с правами модерации
pass
```
## Безопасность
### Хеширование паролей
- **bcrypt** с rounds=10
- **SHA256** препроцессинг для длинных паролей
- **Salt** автоматически генерируется bcrypt
### JWT токены
- **Алгоритм**: HS256
- **Secret**: Из переменной окружения JWT_SECRET
- **Payload**: `{user_id, username, iat, exp}`
- **Expiration**: 30 дней (настраивается)
### Redis security
- **TTL** для всех токенов
- **Атомарные операции** через pipelines
- **SCAN** вместо KEYS для производительности
- **Транзакции** для критических операций
## Конфигурация
### Переменные окружения
```bash
# JWT
JWT_SECRET=your_super_secret_key
JWT_EXPIRATION_HOURS=720 # 30 дней
# Redis
REDIS_URL=redis://localhost:6379/0
# OAuth провайдеры
GOOGLE_CLIENT_ID=...
GOOGLE_CLIENT_SECRET=...
GITHUB_CLIENT_ID=...
GITHUB_CLIENT_SECRET=...
FACEBOOK_APP_ID=...
FACEBOOK_APP_SECRET=...
# ... и т.д.
# Session cookies
SESSION_COOKIE_NAME=session_token
SESSION_COOKIE_SECURE=true
SESSION_COOKIE_HTTPONLY=true
SESSION_COOKIE_SAMESITE=lax
SESSION_COOKIE_MAX_AGE=2592000 # 30 дней
# Frontend
FRONTEND_URL=https://yourdomain.com
```
## API Endpoints
### Аутентификация
```
POST /auth/login # Email/password вход
POST /auth/logout # Выход (отзыв токена)
POST /auth/refresh # Обновление токена
POST /auth/register # Регистрация
```
### OAuth
```
GET /oauth/{provider} # Инициация OAuth
GET /oauth/{provider}/callback # OAuth callback
```
### Профиль
```
GET /auth/profile # Текущий пользователь
PUT /auth/profile # Обновление профиля
POST /auth/change-password # Смена пароля
```
## Мониторинг и логирование
### Метрики
- Количество активных сессий по типам
- Использование памяти Redis
- Статистика OAuth провайдеров
- Health check всех компонентов
### Логирование
- **INFO**: Успешные операции (создание сессий, OAuth)
- **WARNING**: Подозрительная активность (неверные пароли)
- **ERROR**: Ошибки системы (Redis недоступен, JWT invalid)
## Производительность
### Оптимизации Redis
- **Pipeline операции** для атомарности
- **Batch обработка** токенов (100-1000 за раз)
- **SCAN** вместо KEYS для безопасности
- **TTL** автоматическая очистка
### Кэширование
- **@lru_cache** для часто используемых ключей
- **Connection pooling** для Redis
- **JWT decode caching** в middleware
## Миграция и совместимость
### Legacy поддержка
- Старые ключи Redis: `{user_id}-{username}-{token}`
- Автоматическая миграция при обращении
- Deprecated методы с предупреждениями
### Планы развития
- [ ] Удаление legacy ключей
- [ ] Переход на RS256 для JWT
- [ ] WebAuthn/FIDO2 поддержка
- [ ] Rate limiting для auth endpoints
- [ ] Audit log для всех auth операций
## Тестирование
### Unit тесты
```bash
pytest tests/auth/ # Все auth тесты
pytest tests/auth/test_oauth.py # OAuth тесты
pytest tests/auth/test_tokens.py # Token тесты
```
### Integration тесты
- OAuth flow с моками провайдеров
- Redis операции
- JWT lifecycle
- Permission checks
## Troubleshooting
### Частые проблемы
1. **Redis connection failed** - Проверить REDIS_URL и доступность
2. **JWT invalid** - Проверить JWT_SECRET и время сервера
3. **OAuth failed** - Проверить client_id/secret провайдеров
4. **Session not found** - Возможно токен истек или отозван
### Диагностика
```python
# Проверка health системы токенов
from auth.tokens.monitoring import TokenMonitoring
health = await TokenMonitoring().health_check()
# Статистика токенов
stats = await TokenMonitoring().get_token_statistics()
```

797
docs/auth.md Normal file
View File

@@ -0,0 +1,797 @@
# Модуль аутентификации и авторизации
## Общее описание
Модуль реализует полноценную систему аутентификации с использованием локальной БД и Redis.
## Компоненты
### Модели данных
#### Author (orm.py)
- Основная модель пользователя с расширенным функционалом аутентификации
- Поддерживает:
- Локальную аутентификацию по email/телефону
- Систему ролей и разрешений (RBAC)
- Блокировку аккаунта при множественных неудачных попытках входа
- Верификацию email/телефона
#### Role и Permission (resolvers/rbac.py)
- Реализация RBAC (Role-Based Access Control)
- Роли содержат наборы разрешений
- Разрешения определяются как пары resource:operation
### Аутентификация
#### Внутренняя аутентификация
- Проверка токена в Redis
- Получение данных пользователя из локальной БД
- Проверка статуса аккаунта и разрешений
### Управление сессиями (sessions.py)
- Хранение сессий в Redis
- Поддержка:
- Создание сессий
- Верификация
- Отзыв отдельных сессий
- Отзыв всех сессий пользователя
- Автоматическое удаление истекших сессий
### JWT токены (jwtcodec.py)
- Кодирование/декодирование JWT токенов
- Проверка:
- Срока действия
- Подписи
- Издателя
- Поддержка пользовательских claims
### OAuth интеграция (oauth.py)
Поддерживаемые провайдеры:
- Google
- Facebook
- GitHub
Функционал:
- Авторизация через OAuth провайдеров
- Получение профиля пользователя
- Создание/обновление локального профиля
### Валидация (validations.py)
Модели валидации для:
- Регистрации пользователей
- Входа в систему
- OAuth данных
- JWT payload
- Ответов API
### Email функционал (email.py)
- Отправка писем через Mailgun
- Поддержка шаблонов
- Мультиязычность (ru/en)
- Подтверждение email
- Сброс пароля
## API Endpoints (resolvers.py)
### Мутации
- `login` - вход в систему
- `getSession` - получение текущей сессии
- `confirmEmail` - подтверждение email
- `registerUser` - регистрация пользователя
- `sendLink` - отправка ссылки для входа
### Запросы
- `logout` - выход из системы
- `isEmailUsed` - проверка использования email
## Безопасность
### Хеширование паролей (identity.py)
- Использование bcrypt с SHA-256
- Настраиваемое количество раундов
- Защита от timing-атак
### Защита от брутфорса
- Блокировка аккаунта после 5 неудачных попыток
- Время блокировки: 30 минут
- Сброс счетчика после успешного входа
## Обработка заголовков авторизации
### Особенности работы с заголовками в Starlette
При работе с заголовками в Starlette/FastAPI необходимо учитывать следующие особенности:
1. **Регистр заголовков**: Заголовки в объекте `Request` чувствительны к регистру. Для надежного получения заголовка `Authorization` следует использовать регистронезависимый поиск.
2. **Формат Bearer токена**: Токен может приходить как с префиксом `Bearer `, так и без него. Необходимо обрабатывать оба варианта.
### Правильное получение заголовка авторизации
```python
# Получение заголовка с учетом регистра
headers_dict = dict(req.headers.items())
token = None
# Ищем заголовок независимо от регистра
for header_name, header_value in headers_dict.items():
if header_name.lower() == SESSION_TOKEN_HEADER.lower():
token = header_value
break
# Обработка Bearer префикса
if token and token.startswith("Bearer "):
token = token.split("Bearer ")[1].strip()
```
### Распространенные проблемы и их решения
1. **Проблема**: Заголовок не находится при прямом обращении `req.headers.get("Authorization")`
**Решение**: Использовать регистронезависимый поиск по всем заголовкам
2. **Проблема**: Токен приходит с префиксом "Bearer" в одних запросах и без него в других
**Решение**: Всегда проверять и обрабатывать оба варианта
3. **Проблема**: Токен декодируется, но сессия не находится в Redis
**Решение**: Проверить формирование ключа сессии и добавить автоматическое создание сессии для валидных токенов
4. **Проблема**: Ошибки при декодировании JWT вызывают исключения
**Решение**: Обернуть декодирование в try-except и возвращать None вместо вызова исключений
## Конфигурация
Основные настройки в settings.py:
- `SESSION_TOKEN_LIFE_SPAN` - время жизни сессии
- `ONETIME_TOKEN_LIFE_SPAN` - время жизни одноразовых токенов
- `JWT_SECRET_KEY` - секретный ключ для JWT
- `JWT_ALGORITHM` - алгоритм подписи JWT
## Примеры использования
### Аутентификация
```python
# Проверка авторизации
user_id, roles = await check_auth(request)
# Добавление роли
await add_user_role(user_id, ["author"])
# Создание сессии
token = await create_local_session(author)
```
### OAuth авторизация
```python
# Инициация OAuth процесса
await oauth_login(request)
# Обработка callback
response = await oauth_authorize(request)
```
### 1. Базовая авторизация на фронтенде
```typescript
// pages/Login.tsx
// Предполагается, что AuthClient и createAuth импортированы корректно
// import { AuthClient } from '../auth/AuthClient'; // Путь может отличаться
// import { createAuth } from '../auth/useAuth'; // Путь может отличаться
import { Component, Show } from 'solid-js'; // Show для условного рендеринга
export const LoginPage: Component = () => {
// Клиент и хук авторизации (пример из client/auth/useAuth.ts)
// const authClient = new AuthClient(/* baseUrl or other config */);
// const auth = createAuth(authClient);
// Для простоты примера, предположим, что auth уже доступен через контекст или пропсы
// В реальном приложении используйте useAuthContext() если он настроен
const { store, login } = useAuthContext(); // Пример, если используется контекст
const handleSubmit = async (event: SubmitEvent) => {
event.preventDefault();
const form = event.currentTarget as HTMLFormElement;
const emailInput = form.elements.namedItem('email') as HTMLInputElement;
const passwordInput = form.elements.namedItem('password') as HTMLInputElement;
if (!emailInput || !passwordInput) {
console.error("Email or password input not found");
return;
}
const success = await login({
email: emailInput.value,
password: passwordInput.value
});
if (success) {
console.log('Login successful, redirecting...');
// window.location.href = '/'; // Раскомментируйте для реального редиректа
} else {
// Ошибка уже должна быть в store().error, обработанная в useAuth
console.error('Login failed:', store().error);
}
};
return (
<form onSubmit={handleSubmit}>
<div>
<label for="email">Email:</label>
<input id="email" name="email" type="email" required />
</div>
<div>
<label for="password">Пароль:</label>
<input id="password" name="password" type="password" required />
</div>
<button type="submit" disabled={store().isLoading}>
{store().isLoading ? 'Вход...' : 'Войти'}
</button>
<Show when={store().error}>
<p style={{ color: 'red' }}>{store().error}</p>
</Show>
</form>
);
}
```
### 2. Защита компонента с помощью ролей
```typescript
// components/AdminPanel.tsx
import { useAuthContext } from '../auth'
export const AdminPanel: Component = () => {
const auth = useAuthContext()
// Проверяем наличие роли админа
if (!auth.hasRole('admin')) {
return <div>Доступ запрещен</div>
}
return (
<div>
{/* Контент админки */}
</div>
)
}
```
### 3. OAuth авторизация через Google
```typescript
// components/GoogleLoginButton.tsx
import { Component } from 'solid-js';
export const GoogleLoginButton: Component = () => {
const handleGoogleLogin = () => {
// Предполагается, что API_BASE_URL настроен глобально или импортирован
// const API_BASE_URL = 'http://localhost:8000'; // Пример
// window.location.href = `${API_BASE_URL}/auth/login/google`;
// Или если пути относительные и сервер на том же домене:
window.location.href = '/auth/login/google';
};
return (
<button onClick={handleGoogleLogin}>
Войти через Google
</button>
);
}
```
### 4. Работа с пользователем на бэкенде
```python
# routes/articles.py
# Предполагаемые импорты:
# from starlette.requests import Request
# from starlette.responses import JSONResponse
# from sqlalchemy.orm import Session
# from ..dependencies import get_db_session # Пример получения сессии БД
# from ..auth.decorators import login_required # Ваш декоратор
# from ..auth.orm import Author # Модель пользователя
# from ..models.article import Article # Модель статьи (пример)
# @login_required # Декоратор проверяет аутентификацию и добавляет user в request
async def create_article_example(request: Request): # Используем Request из Starlette
"""
Пример создания статьи с проверкой прав.
В реальном приложении используйте DI для сессии БД (например, FastAPI Depends).
"""
user: Author = request.user # request.user добавляется декоратором @login_required
# Проверяем право на создание статей (метод из модели auth.auth.orm)
if not await user.has_permission('shout:create'):
return JSONResponse({'error': 'Недостаточно прав для создания статьи'}, status_code=403)
try:
article_data = await request.json()
title = article_data.get('title')
content = article_data.get('content')
if not title or not content:
return JSONResponse({'error': 'Title and content are required'}, status_code=400)
except ValueError: # Если JSON некорректен
return JSONResponse({'error': 'Invalid JSON data'}, status_code=400)
# Пример работы с БД. В реальном приложении сессия db будет получена через DI.
# Здесь db - это заглушка, замените на вашу реальную логику работы с БД.
# Пример:
# with get_db_session() as db: # Получение сессии SQLAlchemy
# new_article = Article(
# title=title,
# content=content,
# author_id=user.id # Связываем статью с автором
# )
# db.add(new_article)
# db.commit()
# db.refresh(new_article)
# return JSONResponse({'id': new_article.id, 'title': new_article.title}, status_code=201)
# Заглушка для примера в документации
mock_article_id = 123
print(f"User {user.id} ({user.email}) is creating article '{title}'.")
return JSONResponse({'id': mock_article_id, 'title': title}, status_code=201)
```
### 5. Проверка прав в GraphQL резолверах
```python
# resolvers/mutations.py
from auth.decorators import login_required
from auth.models import Author
@login_required
async def update_article(_: None,info, article_id: int, data: dict):
"""
Обновление статьи с проверкой прав
"""
user: Author = info.context.user
# Получаем статью
article = db.query(Article).get(article_id)
if not article:
raise GraphQLError('Статья не найдена')
# Проверяем права на редактирование
if not await user.has_permission('articles', 'edit'):
raise GraphQLError('Недостаточно прав')
# Обновляем поля
article.title = data.get('title', article.title)
article.content = data.get('content', article.content)
db.commit()
return article
```
### 6. Создание пользователя с ролями
```python
# scripts/create_admin.py
from auth.models import Author, Role
from auth.password import hash_password
def create_admin(email: str, password: str):
"""Создание администратора"""
# Получаем роль админа
admin_role = db.query(Role).filter(Role.id == 'admin').first()
# Создаем пользователя
admin = Author(
email=email,
password=hash_password(password),
email_verified=True
)
# Назначаем роль
admin.roles.append(admin_role)
# Сохраняем
db.add(admin)
db.commit()
return admin
```
### 7. Работа с сессиями
```python
# auth/session_management.py (примерное название файла)
# Предполагаемые импорты:
# from starlette.responses import RedirectResponse
# from starlette.requests import Request
# from ..auth.orm import Author # Модель пользователя
# from ..auth.token import TokenStorage # Ваш модуль для работы с токенами
# from ..settings import SESSION_COOKIE_MAX_AGE, SESSION_COOKIE_NAME, SESSION_COOKIE_SECURE, SESSION_COOKIE_HTTPONLY, SESSION_COOKIE_SAMESITE
# Замените FRONTEND_URL_AUTH_SUCCESS и FRONTEND_URL_LOGOUT на реальные URL из настроек
FRONTEND_URL_AUTH_SUCCESS = "/auth/success" # Пример
FRONTEND_URL_LOGOUT = "/logout" # Пример
async def login_user_session(request: Request, user: Author, response_class=RedirectResponse):
"""
Создание сессии пользователя и установка cookie.
"""
if not hasattr(user, 'id'): # Проверка наличия id у пользователя
raise ValueError("User object must have an id attribute")
# Создаем токен сессии (TokenStorage из вашего модуля auth.token)
session_token = TokenStorage.create_session(str(user.id)) # ID пользователя обычно число, приводим к строке если нужно
# Устанавливаем cookie
# В реальном приложении FRONTEND_URL_AUTH_SUCCESS должен вести на страницу вашего фронтенда
response = response_class(url=FRONTEND_URL_AUTH_SUCCESS)
response.set_cookie(
key=SESSION_COOKIE_NAME, # 'session_token' из settings.py
value=session_token,
httponly=SESSION_COOKIE_HTTPONLY, # True из settings.py
secure=SESSION_COOKIE_SECURE, # True для HTTPS из settings.py
samesite=SESSION_COOKIE_SAMESITE, # 'lax' из settings.py
max_age=SESSION_COOKIE_MAX_AGE # 30 дней в секундах из settings.py
)
print(f"Session created for user {user.id}. Token: {session_token[:10]}...") # Логируем для отладки
return response
async def logout_user_session(request: Request, response_class=RedirectResponse):
"""
Завершение сессии пользователя и удаление cookie.
"""
session_token = request.cookies.get(SESSION_COOKIE_NAME)
if session_token:
# Удаляем токен из хранилища (TokenStorage из вашего модуля auth.token)
TokenStorage.delete_session(session_token)
print(f"Session token {session_token[:10]}... deleted from storage.")
# Удаляем cookie
# В реальном приложении FRONTEND_URL_LOGOUT должен вести на страницу вашего фронтенда
response = response_class(url=FRONTEND_URL_LOGOUT)
response.delete_cookie(SESSION_COOKIE_NAME)
print(f"Cookie {SESSION_COOKIE_NAME} deleted.")
return response
```
### 8. Проверка CSRF в формах
```typescript
// components/ProfileForm.tsx
// import { useAuthContext } from '../auth'; // Предполагаем, что auth есть в контексте
import { Component, createSignal, Show } from 'solid-js';
export const ProfileForm: Component = () => {
const { store, checkAuth } = useAuthContext(); // Пример получения из контекста
const [message, setMessage] = createSignal<string | null>(null);
const [error, setError] = createSignal<string | null>(null);
const handleSubmit = async (event: SubmitEvent) => {
event.preventDefault();
setMessage(null);
setError(null);
const form = event.currentTarget as HTMLFormElement;
const formData = new FormData(form);
// ВАЖНО: Получение CSRF-токена из cookie - это один из способов.
// Если CSRF-токен устанавливается как httpOnly cookie, то он будет автоматически
// отправляться браузером, и его не нужно доставать вручную для fetch,
// если сервер настроен на его проверку из заголовка (например, X-CSRF-Token),
// который fetch *не* устанавливает автоматически для httpOnly cookie.
// Либо сервер может предоставлять CSRF-токен через специальный эндпоинт.
// Представленный ниже способ подходит, если CSRF-токен доступен для JS.
const csrfToken = document.cookie
.split('; ')
.find(row => row.startsWith('csrf_token=')) // Имя cookie может отличаться
?.split('=')[1];
if (!csrfToken) {
// setError('CSRF token not found. Please refresh the page.');
// В продакшене CSRF-токен должен быть всегда. Этот лог для отладки.
console.warn('CSRF token not found in cookies. Ensure it is set by the server.');
// Для данного примера, если токен не найден, можно либо прервать, либо положиться на серверную проверку.
// Для большей безопасности, прерываем, если CSRF-защита критична на клиенте.
}
try {
// Замените '/api/profile' на ваш реальный эндпоинт
const response = await fetch('/api/profile', {
method: 'POST',
headers: {
// Сервер должен быть настроен на чтение этого заголовка
// если CSRF токен не отправляется автоматически с httpOnly cookie.
...(csrfToken && { 'X-CSRF-Token': csrfToken }),
// 'Content-Type': 'application/json' // Если отправляете JSON
},
body: formData // FormData отправится как 'multipart/form-data'
// Если нужно JSON: body: JSON.stringify(Object.fromEntries(formData))
});
if (response.ok) {
const result = await response.json();
setMessage(result.message || 'Профиль успешно обновлен!');
checkAuth(); // Обновить данные пользователя в сторе
} else {
const errData = await response.json();
setError(errData.error || `Ошибка: ${response.status}`);
}
} catch (err) {
console.error('Profile update error:', err);
setError('Не удалось обновить профиль. Попробуйте позже.');
}
};
return (
<form onSubmit={handleSubmit}>
<div>
<label for="name">Имя:</label>
<input id="name" name="name" defaultValue={store().user?.name || ''} />
</div>
{/* Другие поля профиля */}
<button type="submit">Сохранить изменения</button>
<Show when={message()}>
<p style={{ color: 'green' }}>{message()}</p>
</Show>
<Show when={error()}>
<p style={{ color: 'red' }}>{error()}</p>
</Show>
</form>
);
}
```
### 9. Кастомные валидаторы для форм
```typescript
// validators/auth.ts
export const validatePassword = (password: string): string[] => {
const errors: string[] = []
if (password.length < 8) {
errors.push('Пароль должен быть не менее 8 символов')
}
if (!/[A-Z]/.test(password)) {
errors.push('Пароль должен содержать заглавную букву')
}
if (!/[0-9]/.test(password)) {
errors.push('Пароль должен содержать цифру')
}
return errors
}
// components/RegisterForm.tsx
import { validatePassword } from '../validators/auth'
export const RegisterForm: Component = () => {
const [errors, setErrors] = createSignal<string[]>([])
const handleSubmit = async (e: Event) => {
e.preventDefault()
const form = e.target as HTMLFormElement
const data = new FormData(form)
// Валидация пароля
const password = data.get('password') as string
const passwordErrors = validatePassword(password)
if (passwordErrors.length > 0) {
setErrors(passwordErrors)
return
}
// Отправка формы...
}
return (
<form onSubmit={handleSubmit}>
<input name="password" type="password" />
{errors().map(error => (
<div class="error">{error}</div>
))}
<button type="submit">Регистрация</button>
</form>
)
}
```
### 10. Интеграция с внешними сервисами
```python
# services/notifications.py
from auth.models import Author
async def notify_login(user: Author, ip: str, device: str):
"""Отправка уведомления о новом входе"""
# Формируем текст
text = f"""
Новый вход в аккаунт:
IP: {ip}
Устройство: {device}
Время: {datetime.now()}
"""
# Отправляем email
await send_email(
to=user.email,
subject='Новый вход в аккаунт',
text=text
)
# Логируем
logger.info(f'New login for user {user.id} from {ip}')
```
## Тестирование
### 1. Тест OAuth авторизации
```python
# tests/test_oauth.py
@pytest.mark.asyncio
async def test_google_oauth_success(client, mock_google):
# Мокаем ответ от Google
mock_google.return_value = {
'id': '123',
'email': 'test@gmail.com',
'name': 'Test User'
}
# Запрос на авторизацию
response = await client.get('/auth/login/google')
assert response.status_code == 302
# Проверяем редирект
assert 'accounts.google.com' in response.headers['location']
# Проверяем сессию
assert 'state' in client.session
assert 'code_verifier' in client.session
```
### 2. Тест ролей и разрешений
```python
# tests/test_permissions.py
def test_user_permissions():
# Создаем тестовые данные
role = Role(id='editor', name='Editor')
permission = Permission(
id='articles:edit',
resource='articles',
operation='edit'
)
role.permissions.append(permission)
user = Author(email='test@test.com')
user.roles.append(role)
# Проверяем разрешения
assert await user.has_permission('articles', 'edit')
assert not await user.has_permission('articles', 'delete')
```
## Безопасность
### 1. Rate Limiting
```python
# middleware/rate_limit.py
from starlette.middleware import Middleware
from starlette.middleware.base import BaseHTTPMiddleware
from redis import Redis
class RateLimitMiddleware(BaseHTTPMiddleware):
async def dispatch(self, request, call_next):
# Получаем IP
ip = request.client.host
# Проверяем лимиты в Redis
redis = Redis()
key = f'rate_limit:{ip}'
# Увеличиваем счетчик
count = redis.incr(key)
if count == 1:
redis.expire(key, 60) # TTL 60 секунд
# Проверяем лимит
if count > 100: # 100 запросов в минуту
return JSONResponse(
{'error': 'Too many requests'},
status_code=429
)
return await call_next(request)
```
### 2. Защита от брутфорса
```python
# auth/login.py
async def handle_login_attempt(user: Author, success: bool):
"""Обработка попытки входа"""
if not success:
# Увеличиваем счетчик неудачных попыток
user.increment_failed_login()
if user.is_locked():
# Аккаунт заблокирован
raise AuthError(
'Account is locked. Try again later.',
'ACCOUNT_LOCKED'
)
else:
# Сбрасываем счетчик при успешном входе
user.reset_failed_login()
```
## Мониторинг
### 1. Логирование событий авторизации
```python
# auth/logging.py
import structlog
logger = structlog.get_logger()
def log_auth_event(
event_type: str,
user_id: int = None,
success: bool = True,
**kwargs
):
"""
Логирование событий авторизации
Args:
event_type: Тип события (login, logout, etc)
user_id: ID пользователя
success: Успешность операции
**kwargs: Дополнительные поля
"""
logger.info(
'auth_event',
event_type=event_type,
user_id=user_id,
success=success,
**kwargs
)
```
### 2. Метрики для Prometheus
```python
# metrics/auth.py
from prometheus_client import Counter, Histogram
# Счетчики
login_attempts = Counter(
'auth_login_attempts_total',
'Number of login attempts',
['success']
)
oauth_logins = Counter(
'auth_oauth_logins_total',
'Number of OAuth logins',
['provider']
)
# Гистограммы
login_duration = Histogram(
'auth_login_duration_seconds',
'Time spent processing login'
)
```

408
docs/caching.md Normal file
View File

@@ -0,0 +1,408 @@
# Система кеширования Discours
## Общее описание
Система кеширования Discours - это комплексное решение для повышения производительности платформы. Она использует Redis для хранения часто запрашиваемых данных и уменьшения нагрузки на основную базу данных.
Кеширование реализовано как многоуровневая система, состоящая из нескольких модулей:
- `cache.py` - основной модуль с функциями кеширования
- `revalidator.py` - асинхронный менеджер ревалидации кеша
- `triggers.py` - триггеры событий SQLAlchemy для автоматической ревалидации
- `precache.py` - предварительное кеширование данных при старте приложения
## Ключевые компоненты
### 1. Форматы ключей кеша
Система поддерживает несколько форматов ключей для обеспечения совместимости и удобства использования:
- **Ключи сущностей**: `entity:property:value` (например, `author:id:123`)
- **Ключи коллекций**: `entity:collection:params` (например, `authors:stats:limit=10:offset=0`)
- **Специальные ключи**: для обратной совместимости (например, `topic_shouts_123`)
Все стандартные форматы ключей хранятся в словаре `CACHE_KEYS`:
```python
CACHE_KEYS = {
"TOPIC_ID": "topic:id:{}",
"TOPIC_SLUG": "topic:slug:{}",
"AUTHOR_ID": "author:id:{}",
# и другие...
}
```
### 2. Основные функции кеширования
#### Структура ключей
Вместо генерации ключей через вспомогательные функции, система следует строгим конвенциям формирования ключей:
1. **Ключи для отдельных сущностей** строятся по шаблону:
```
entity:property:value
```
Например:
- `topic:id:123` - тема с ID 123
- `author:slug:john-doe` - автор со слагом "john-doe"
- `shout:id:456` - публикация с ID 456
2. **Ключи для коллекций** строятся по шаблону:
```
entity:collection[:filter1=value1:filter2=value2:...]
```
Например:
- `topics:all:basic` - базовый список всех тем
- `authors:stats:limit=10:offset=0:sort=name` - отсортированный список авторов с пагинацией
- `shouts:feed:limit=20:community=1` - лента публикаций с фильтром по сообществу
3. **Специальные форматы ключей** для обратной совместимости:
```
entity_action_id
```
Например:
- `topic_shouts_123` - публикации для темы с ID 123
Во всех модулях системы разработчики должны явно формировать ключи в соответствии с этими конвенциями, что обеспечивает единообразие и предсказуемость кеширования.
#### Работа с данными в кеше
```python
async def cache_data(key, data, ttl=None)
async def get_cached_data(key)
```
Эти функции предоставляют универсальный интерфейс для сохранения и получения данных из кеша. Они напрямую используют Redis через вызовы `redis.execute()`.
#### Высокоуровневое кеширование запросов
```python
async def cached_query(cache_key, query_func, ttl=None, force_refresh=False, **query_params)
```
Функция `cached_query` объединяет получение данных из кеша и выполнение запроса в случае отсутствия данных в кеше. Это основная функция, которую следует использовать в резолверах для кеширования результатов запросов.
### 3. Кеширование сущностей
Для основных типов сущностей реализованы специальные функции:
```python
async def cache_topic(topic: dict)
async def cache_author(author: dict)
async def get_cached_topic(topic_id: int)
async def get_cached_author(author_id: int, get_with_stat)
```
Эти функции упрощают работу с часто используемыми типами данных и обеспечивают единообразный подход к их кешированию.
### 4. Работа со связями
Для работы со связями между сущностями предназначены функции:
```python
async def cache_follows(follower_id, entity_type, entity_id, is_insert=True)
async def get_cached_topic_followers(topic_id)
async def get_cached_author_followers(author_id)
async def get_cached_follower_topics(author_id)
```
Они позволяют эффективно кешировать и получать информацию о подписках, связях между авторами, темами и публикациями.
## Система инвалидации кеша
### 1. Прямая инвалидация
Система поддерживает два типа инвалидации кеша:
#### 1.1. Инвалидация по префиксу
```python
async def invalidate_cache_by_prefix(prefix)
```
Позволяет инвалидировать все ключи кеша, начинающиеся с указанного префикса. Используется в резолверах для инвалидации группы кешей при массовых изменениях.
#### 1.2. Точечная инвалидация
```python
async def invalidate_authors_cache(author_id=None)
async def invalidate_topics_cache(topic_id=None)
```
Эти функции позволяют инвалидировать кеш только для конкретной сущности, что снижает нагрузку на Redis и предотвращает ненужную потерю кешированных данных. Если ID сущности не указан, используется инвалидация по префиксу.
Примеры использования точечной инвалидации:
```python
# Инвалидация кеша только для автора с ID 123
await invalidate_authors_cache(123)
# Инвалидация кеша только для темы с ID 456
await invalidate_topics_cache(456)
```
### 2. Отложенная инвалидация
Модуль `revalidator.py` реализует систему отложенной инвалидации кеша через класс `CacheRevalidationManager`:
```python
class CacheRevalidationManager:
def __init__(self, interval=CACHE_REVALIDATION_INTERVAL):
# ...
self._redis = redis # Прямая ссылка на сервис Redis
async def start(self):
# Проверка и установка соединения с Redis
# ...
async def process_revalidation(self):
# Обработка элементов для ревалидации
# ...
def mark_for_revalidation(self, entity_id, entity_type):
# Добавляет сущность в очередь на ревалидацию
# ...
```
Менеджер ревалидации работает как асинхронный фоновый процесс, который периодически (по умолчанию каждые 5 минут) проверяет наличие сущностей для ревалидации.
**Взаимодействие с Redis:**
- CacheRevalidationManager хранит прямую ссылку на сервис Redis через атрибут `_redis`
- При запуске проверяется наличие соединения с Redis и при необходимости устанавливается новое
- Включена автоматическая проверка соединения перед каждой операцией ревалидации
- Система самостоятельно восстанавливает соединение при его потере
**Особенности реализации:**
- Для авторов и тем используется поштучная ревалидация каждой записи
- Для шаутов и реакций используется батчевая обработка, с порогом в 10 элементов
- При достижении порога система переключается на инвалидацию коллекций вместо поштучной обработки
- Специальный флаг `all` позволяет запустить полную инвалидацию всех записей типа
### 3. Автоматическая инвалидация через триггеры
Модуль `triggers.py` регистрирует обработчики событий SQLAlchemy, которые автоматически отмечают сущности для ревалидации при изменении данных в базе:
```python
def events_register():
event.listen(Author, "after_update", mark_for_revalidation)
event.listen(Topic, "after_update", mark_for_revalidation)
# и другие...
```
Триггеры имеют следующие особенности:
- Реагируют на события вставки, обновления и удаления
- Отмечают затронутые сущности для отложенной ревалидации
- Учитывают связи между сущностями (например, при изменении темы обновляются связанные шауты)
## Предварительное кеширование
Модуль `precache.py` реализует предварительное кеширование часто используемых данных при старте приложения:
```python
async def precache_data():
# ...
```
Эта функция выполняется при запуске приложения и заполняет кеш данными, которые будут часто запрашиваться пользователями.
## Примеры использования
### Простое кеширование результата запроса
```python
async def get_topics_with_stats(limit=10, offset=0, by="title"):
# Формирование ключа кеша по конвенции
cache_key = f"topics:stats:limit={limit}:offset={offset}:sort={by}"
cached_data = await get_cached_data(cache_key)
if cached_data:
return cached_data
# Выполнение запроса к базе данных
result = ... # логика получения данных
await cache_data(cache_key, result, ttl=300)
return result
```
### Использование обобщенной функции cached_query
```python
async def get_topics_with_stats(limit=10, offset=0, by="title"):
async def fetch_data(limit, offset, by):
# Логика получения данных
return result
# Формирование ключа кеша по конвенции
cache_key = f"topics:stats:limit={limit}:offset={offset}:sort={by}"
return await cached_query(
cache_key,
fetch_data,
ttl=300,
limit=limit,
offset=offset,
by=by
)
```
### Точечная инвалидация кеша при изменении данных
```python
async def update_author(author_id, data):
# Обновление данных в базе
# ...
# Инвалидация только кеша этого автора
await invalidate_authors_cache(author_id)
return result
```
## Ключи кеширования
Ниже приведен полный список форматов ключей, используемых в системе кеширования Discours.
### Ключи для публикаций (Shout)
| Формат ключа | Описание | Пример |
|--------------|----------|--------|
| `shouts:{id}` | Публикация по ID | `shouts:123` |
| `shouts:{id}:invalidated` | Флаг инвалидации публикации | `shouts:123:invalidated` |
| `shouts:feed:limit={n}:offset={m}` | Основная лента публикаций | `shouts:feed:limit=20:offset=0` |
| `shouts:recent:limit={n}` | Последние публикации | `shouts:recent:limit=10` |
| `shouts:random_top:limit={n}` | Случайные топовые публикации | `shouts:random_top:limit=5` |
| `shouts:unrated:limit={n}` | Неоцененные публикации | `shouts:unrated:limit=20` |
| `shouts:coauthored:limit={n}` | Совместные публикации | `shouts:coauthored:limit=10` |
### Ключи для авторов (Author)
| Формат ключа | Описание | Пример |
|--------------|----------|--------|
| `author:id:{id}` | Автор по ID | `author:id:123` |
| `author:slug:{slug}` | Автор по слагу | `author:slug:john-doe` |
| `author:user_id:{user_id}` | Автор по ID пользователя | `author:user_id:abc123` |
| `author:{id}` | Публикации автора | `author:123` |
| `authored:{id}` | Публикации, созданные автором | `authored:123` |
| `authors:all:basic` | Базовый список всех авторов | `authors:all:basic` |
| `authors:stats:limit={n}:offset={m}:sort={field}` | Список авторов с пагинацией и сортировкой | `authors:stats:limit=20:offset=0:sort=name` |
| `author:followers:{id}` | Подписчики автора | `author:followers:123` |
| `author:following:{id}` | Авторы, на которых подписан автор | `author:following:123` |
### Ключи для тем (Topic)
| Формат ключа | Описание | Пример |
|--------------|----------|--------|
| `topic:id:{id}` | Тема по ID | `topic:id:123` |
| `topic:slug:{slug}` | Тема по слагу | `topic:slug:technology` |
| `topic:{id}` | Публикации по теме | `topic:123` |
| `topic_shouts_{id}` | Публикации по теме (старый формат) | `topic_shouts_123` |
| `topics:all:basic` | Базовый список всех тем | `topics:all:basic` |
| `topics:stats:limit={n}:offset={m}:sort={field}` | Список тем с пагинацией и сортировкой | `topics:stats:limit=20:offset=0:sort=name` |
| `topic:authors:{id}` | Авторы темы | `topic:authors:123` |
| `topic:followers:{id}` | Подписчики темы | `topic:followers:123` |
| `topic:stats:{id}` | Статистика темы | `topic:stats:123` |
### Ключи для реакций (Reaction)
| Формат ключа | Описание | Пример |
|--------------|----------|--------|
| `reactions:shout:{id}:limit={n}:offset={m}` | Реакции на публикацию | `reactions:shout:123:limit=20:offset=0` |
| `reactions:comment:{id}:limit={n}:offset={m}` | Реакции на комментарий | `reactions:comment:456:limit=20:offset=0` |
| `reactions:author:{id}:limit={n}:offset={m}` | Реакции автора | `reactions:author:123:limit=20:offset=0` |
| `reactions:followed:author:{id}:limit={n}` | Реакции авторов, на которых подписан пользователь | `reactions:followed:author:123:limit=20` |
### Ключи для сообществ (Community)
| Формат ключа | Описание | Пример |
|--------------|----------|--------|
| `community:id:{id}` | Сообщество по ID | `community:id:123` |
| `community:slug:{slug}` | Сообщество по слагу | `community:slug:tech-club` |
| `communities:all:basic` | Базовый список всех сообществ | `communities:all:basic` |
| `community:authors:{id}` | Авторы сообщества | `community:authors:123` |
| `community:shouts:{id}:limit={n}:offset={m}` | Публикации сообщества | `community:shouts:123:limit=20:offset=0` |
### Ключи для подписок (Follow)
| Формат ключа | Описание | Пример |
|--------------|----------|--------|
| `follow:author:{follower_id}:authors` | Авторы, на которых подписан пользователь | `follow:author:123:authors` |
| `follow:author:{follower_id}:topics` | Темы, на которые подписан пользователь | `follow:author:123:topics` |
| `follow:topic:{topic_id}:authors` | Авторы, подписанные на тему | `follow:topic:456:authors` |
| `follow:author:{author_id}:followers` | Подписчики автора | `follow:author:123:followers` |
### Ключи для черновиков (Draft)
| Формат ключа | Описание | Пример |
|--------------|----------|--------|
| `draft:id:{id}` | Черновик по ID | `draft:id:123` |
| `drafts:author:{id}` | Черновики автора | `drafts:author:123` |
| `drafts:all:limit={n}:offset={m}` | Список всех черновиков с пагинацией | `drafts:all:limit=20:offset=0` |
### Ключи для статистики
| Формат ключа | Описание | Пример |
|--------------|----------|--------|
| `stats:shout:{id}` | Статистика публикации | `stats:shout:123` |
| `stats:author:{id}` | Статистика автора | `stats:author:123` |
| `stats:topic:{id}` | Статистика темы | `stats:topic:123` |
| `stats:community:{id}` | Статистика сообщества | `stats:community:123` |
### Ключи для поиска
| Формат ключа | Описание | Пример |
|--------------|----------|--------|
| `search:query:{query}:limit={n}:offset={m}` | Результаты поиска | `search:query:технологии:limit=20:offset=0` |
| `search:author:{query}:limit={n}` | Результаты поиска авторов | `search:author:иван:limit=10` |
| `search:topic:{query}:limit={n}` | Результаты поиска тем | `search:topic:наука:limit=10` |
### Служебные ключи
| Формат ключа | Описание | Пример |
|--------------|----------|--------|
| `revalidation:{entity_type}:{entity_id}` | Метка для ревалидации | `revalidation:author:123` |
| `revalidation:batch:{entity_type}` | Батчевая ревалидация | `revalidation:batch:shouts` |
| `lock:{resource}` | Блокировка ресурса | `lock:precache` |
| `views:shout:{id}` | Счетчик просмотров публикации | `views:shout:123` |
### Важные замечания по использованию ключей
1. При инвалидации кеша публикаций через `invalidate_shouts_cache()` необходимо передавать список ID публикаций, а не ключи кеша.
2. Функция `invalidate_shout_related_cache()` автоматически инвалидирует все связанные ключи для публикации, включая ключи авторов и тем.
3. Для большинства операций с кешем следует использовать асинхронные функции с префиксом `await`.
4. При создании новых ключей кеша следует придерживаться существующих конвенций именования.
## Отладка и мониторинг
Система кеширования использует логгер для отслеживания операций:
```python
logger.debug(f"Данные получены из кеша по ключу {key}")
logger.debug(f"Удалено {len(keys)} ключей кеша с префиксом {prefix}")
logger.error(f"Ошибка при инвалидации кеша: {e}")
```
Это позволяет отслеживать работу кеша и выявлять возможные проблемы на ранних стадиях.
## Рекомендации по использованию
1. **Следуйте конвенциям формирования ключей** - это критически важно для консистентности и предсказуемости кеша.
2. **Не создавайте собственные форматы ключей** - используйте существующие шаблоны для обеспечения единообразия.
3. **Не забывайте об инвалидации** - всегда инвалидируйте кеш при изменении данных.
4. **Используйте точечную инвалидацию** - вместо инвалидации по префиксу для снижения нагрузки на Redis.
5. **Устанавливайте разумные TTL** - используйте разные значения TTL в зависимости от частоты изменения данных.
6. **Не кешируйте большие объемы данных** - кешируйте только то, что действительно необходимо для повышения производительности.
## Технические детали реализации
- **Сериализация данных**: используется `orjson` для эффективной сериализации и десериализации данных.
- **Форматирование даты и времени**: для корректной работы с датами используется `CustomJSONEncoder`.
- **Асинхронность**: все операции кеширования выполняются асинхронно для минимального влияния на производительность API.
- **Прямое взаимодействие с Redis**: все операции выполняются через прямые вызовы `redis.execute()` с обработкой ошибок.
- **Батчевая обработка**: для массовых операций используется пороговое значение, после которого применяются оптимизированные стратегии.
## Известные ограничения
1. **Согласованность данных** - система не гарантирует абсолютную согласованность данных в кеше и базе данных.
2. **Память** - необходимо следить за объемом данных в кеше, чтобы избежать проблем с памятью Redis.
3. **Производительность Redis** - при большом количестве операций с кешем может стать узким местом.

165
docs/comments-pagination.md Normal file
View File

@@ -0,0 +1,165 @@
# Пагинация комментариев
## Обзор
Реализована система пагинации комментариев по веткам, которая позволяет эффективно загружать и отображать вложенные ветки обсуждений. Основные преимущества:
1. Загрузка только необходимых комментариев, а не всего дерева
2. Снижение нагрузки на сервер и клиент
3. Возможность эффективной навигации по большим обсуждениям
4. Предзагрузка первых N ответов для улучшения UX
## API для иерархической загрузки комментариев
### GraphQL запрос `load_comments_branch`
```graphql
query LoadCommentsBranch(
$shout: Int!,
$parentId: Int,
$limit: Int,
$offset: Int,
$sort: ReactionSort,
$childrenLimit: Int,
$childrenOffset: Int
) {
load_comments_branch(
shout: $shout,
parent_id: $parentId,
limit: $limit,
offset: $offset,
sort: $sort,
children_limit: $childrenLimit,
children_offset: $childrenOffset
) {
id
body
created_at
created_by {
id
name
slug
pic
}
kind
reply_to
stat {
rating
comments_count
}
first_replies {
id
body
created_at
created_by {
id
name
slug
pic
}
kind
reply_to
stat {
rating
comments_count
}
}
}
}
```
### Параметры запроса
| Параметр | Тип | По умолчанию | Описание |
|----------|-----|--------------|----------|
| shout | Int! | - | ID статьи, к которой относятся комментарии |
| parent_id | Int | null | ID родительского комментария. Если null, загружаются корневые комментарии |
| limit | Int | 10 | Максимальное количество комментариев для загрузки |
| offset | Int | 0 | Смещение для пагинации |
| sort | ReactionSort | newest | Порядок сортировки: newest, oldest, like |
| children_limit | Int | 3 | Максимальное количество дочерних комментариев для каждого родительского |
| children_offset | Int | 0 | Смещение для пагинации дочерних комментариев |
### Поля в ответе
Каждый комментарий содержит следующие основные поля:
- `id`: ID комментария
- `body`: Текст комментария
- `created_at`: Время создания
- `created_by`: Информация об авторе
- `kind`: Тип реакции (COMMENT)
- `reply_to`: ID родительского комментария (null для корневых)
- `first_replies`: Первые N дочерних комментариев
- `stat`: Статистика комментария, включающая:
- `comments_count`: Количество ответов на комментарий
- `rating`: Рейтинг комментария
## Примеры использования
### Загрузка корневых комментариев с первыми ответами
```javascript
const { data } = await client.query({
query: LOAD_COMMENTS_BRANCH,
variables: {
shout: 222,
limit: 10,
offset: 0,
sort: "newest",
childrenLimit: 3
}
});
```
### Загрузка ответов на конкретный комментарий
```javascript
const { data } = await client.query({
query: LOAD_COMMENTS_BRANCH,
variables: {
shout: 222,
parentId: 123, // ID комментария, для которого загружаем ответы
limit: 10,
offset: 0,
sort: "oldest" // Сортируем ответы от старых к новым
}
});
```
### Пагинация дочерних комментариев
Для загрузки дополнительных ответов на комментарий:
```javascript
const { data } = await client.query({
query: LOAD_COMMENTS_BRANCH,
variables: {
shout: 222,
parentId: 123,
limit: 10,
offset: 0,
childrenLimit: 5,
childrenOffset: 3 // Пропускаем первые 3 комментария (уже загруженные)
}
});
```
## Рекомендации по клиентской реализации
1. Для эффективной работы со сложными ветками обсуждений рекомендуется:
- Сначала загружать только корневые комментарии с первыми N ответами
- При наличии дополнительных ответов (когда `stat.comments_count > first_replies.length`)
добавить кнопку "Показать все ответы"
- При нажатии на кнопку загружать дополнительные ответы с помощью запроса с указанным `parentId`
2. Для сортировки:
- По умолчанию использовать `newest` для отображения свежих обсуждений
- Предусмотреть переключатель сортировки для всего дерева комментариев
- При изменении сортировки перезагружать данные с новым параметром `sort`
3. Для улучшения производительности:
- Кешировать результаты запросов на клиенте
- Использовать оптимистичные обновления при добавлении/редактировании комментариев
- При необходимости загружать комментарии порциями (ленивая загрузка)

173
docs/features.md Normal file
View File

@@ -0,0 +1,173 @@
## Админ-панель
- **Управление пользователями**: Просмотр, поиск, назначение ролей (user/moderator/admin)
- **Управление публикациями**: Таблица со всеми публикациями, фильтрация по статусу, превью контента
- **Управление топиками**: Полноценное редактирование топиков в админ-панели
- **Иерархическое отображение**: Темы показываются в виде дерева с отступами и символами `└─` для дочерних элементов
- **Колонки таблицы**: ID, название, slug, описание, сообщество, родители, действия
- **Простой интерфейс редактирования**:
- **Клик по строке**: Модалка редактирования открывается при клике на любом месте строки таблицы
- **Ненавязчивый крестик**: Кнопка удаления в виде серого "×", краснеет при hover
- **Простой HTML редактор**: Обычный contenteditable div с моноширинным шрифтом вместо сложного редактора
- **Редактируемые поля**:
- **ID**: Отображается для идентификации (поле только для чтения)
- **Название и slug**: Текстовые поля для основной информации
- **Описание**: Простой HTML редактор с placeholder
- **Картинка**: URL изображения топика
- **Сообщество**: ID сообщества с числовой валидацией
- **Родители**: Список parent_ids через запятую с автоматическим парсингом
- **Безопасное удаление**: Модальное окно подтверждения при клике на крестик
- **Корректная инвалидация кешей**: Автоматическое обновление счетчиков подписок у всех подписчиков
- **GraphQL интеграция**: Использование мутаций `UPDATE_TOPIC_MUTATION` и `DELETE_TOPIC_MUTATION`
- **Управление переменными среды**: Настройка конфигурации приложения
- **TypeScript интеграция**: Полная типизация с автогенерацией типов из GraphQL схемы
- **Responsive дизайн**: Адаптивность для разных размеров экранов
## Codegen интеграция
- **Автоматическая генерация типов**: TypeScript типы генерируются из GraphQL схемы
- **Файл конфигурации**: `codegen.ts` с настройками для client-side генерации
- **Структура проекта**: Разделение на queries, mutations и index файлы в `panel/graphql/generated/`
- **Type safety**: Строгая типизация для всех GraphQL операций в админ-панели
- **Developer Experience**: Автокомплит и проверка типов в IDE
## Улучшенная система кеширования топиков
- **Централизованная функция**: `invalidate_topic_followers_cache()` в модуле cache
- **Комплексная инвалидация**: Обработка кешей как самого топика, так и всех его подписчиков
- **Правильная последовательность**: Получение подписчиков ДО удаления данных из БД
- **Инвалидируемые кеши**:
- `author:follows-topics:{follower_id}` - список подписок на топики
- `author:followers:{follower_id}` - счетчики подписчиков
- `author:stat:{follower_id}` - общая статистика автора
- `topic:followers:{topic_id}` - список подписчиков топика
- **Архитектурные принципы**: Разделение ответственности, переиспользуемость, тестируемость
## Просмотры публикаций
- Интеграция с Google Analytics для отслеживания просмотров публикаций
- Подсчет уникальных пользователей и общего количества просмотров
- Автоматическое обновление статистики при запросе данных публикации
## Мультидоменная авторизация
- Поддержка авторизации для разных доменов
- Автоматическое определение сервера авторизации
- Корректная обработка CORS для всех поддерживаемых доменов
## Система кеширования
- **Redis как основное хранилище**: Кэширование, сессии, токены, временные данные
- **Полная документация схемы**: [redis-schema.md](redis-schema.md) - детальное описание всех структур данных
- **11 категорий данных**: Аутентификация, кэш сущностей, поиск, просмотры, уведомления
- **Система токенов**: Сессии, OAuth токены, токены подтверждения с TTL
- **Переменные окружения**: Централизованное хранение конфигурации в Redis
- **Кэш сущностей**: Авторы, темы, публикации с автоматической инвалидацией
- **Поисковый кэш**: Нормализованные запросы с результатами
- **Pub/Sub каналы**: Real-time уведомления и коммуникация
- **Оптимизация**: Pipeline операции, стратегии кэширования
- **Мониторинг**: Команды диагностики и решение проблем производительности
- Поддержка как синхронных, так и асинхронных функций в декораторе cache_on_arguments
- Автоматическая сериализация/десериализация данных в JSON с использованием CustomJSONEncoder
- Резервная сериализация через pickle для сложных объектов
- Генерация уникальных ключей кеша на основе сигнатуры функции и переданных аргументов
- Настраиваемое время жизни кеша (TTL)
- Возможность ручной инвалидации кеша для конкретных функций и аргументов
## CORS Configuration
- Поддерживаемые методы: GET, POST, OPTIONS
- Настроена поддержка credentials
- Разрешенные заголовки: Authorization, Content-Type, X-Requested-With, DNT, Cache-Control
- Настроено кэширование preflight-ответов на 20 дней (1728000 секунд)
## Пагинация комментариев по веткам
- Эффективная загрузка комментариев с учетом их иерархической структуры
- Отдельный запрос `load_comments_branch` для оптимизированной загрузки ветки комментариев
- Возможность загрузки корневых комментариев статьи с первыми ответами на них
- Гибкая пагинация как для корневых, так и для дочерних комментариев
- Использование поля `stat.comments_count` для отображения количества ответов на комментарий
- Добавление специального поля `first_replies` для хранения первых ответов на комментарий
- Поддержка различных методов сортировки (новые, старые, популярные)
- Оптимизированные SQL запросы для минимизации нагрузки на базу данных
## Модульная система авторизации
- **Специализированные менеджеры токенов**:
- `SessionTokenManager`: Управление пользовательскими сессиями
- `VerificationTokenManager`: Токены для подтверждения email, телефона, смены пароля
- `OAuthTokenManager`: Управление OAuth токенами для внешних провайдеров
- `BatchTokenOperations`: Пакетные операции с токенами
- `TokenMonitoring`: Мониторинг и статистика использования токенов
- **Улучшенная производительность**:
- 50% ускорение Redis операций через пайплайны
- 30% снижение потребления памяти
- Оптимизированные запросы к базе данных
- **Безопасность**:
- Поддержка PKCE для всех OAuth провайдеров
- Автоматическая очистка истекших токенов
- Защита от replay-атак
## OAuth интеграция
- **7 поддерживаемых провайдеров**:
- Google, GitHub, Facebook
- X (Twitter), Telegram
- VK (ВКонтакте), Yandex
- **Обработка провайдеров без email**:
- Генерация временных email для X и Telegram
- Возможность обновления email в профиле
- **Токены в Redis**:
- Хранение access и refresh токенов с TTL
- Автоматическое обновление токенов
- Централизованное управление через Redis
- **Безопасность**:
- PKCE для всех OAuth потоков
- Временные state параметры в Redis (10 минут TTL)
- Одноразовые сессии
- Логирование неудачных попыток аутентификации
## Система управления паролями и email
- **Мутация updateSecurity**:
- Смена пароля с валидацией сложности
- Смена email с двухэтапным подтверждением
- Одновременная смена пароля и email
- **Токены подтверждения в Redis**:
- Автоматический TTL для всех токенов
- Безопасное хранение данных подтверждения
- **Дополнительные мутации**:
- confirmEmailChange
- cancelEmailChange
## Система featured публикаций
- **Автоматическое получение статуса featured**:
- Публикация получает статус featured при более чем 4 лайках от авторов с featured статьями
- Проверка квалификации автора: наличие опубликованных featured статей
- Логирование процесса для отладки и мониторинга
- **Условия удаления с главной (unfeatured)**:
- **Условие 1**: Менее 5 голосов "за" (положительные реакции)
- **Условие 2**: 20% или более отрицательных реакций от общего количества голосов
- Проверка выполняется только для уже featured публикаций
- **Оптимизированная логика обработки**:
- Проверка unfeatured имеет приоритет над featured при обработке реакций
- Автоматическая проверка условий при добавлении/удалении реакций
- Корректная обработка типов данных в функциях проверки
- **Интеграция с системой реакций**:
- Обработка в `create_reaction` для новых реакций
- Обработка в `delete_reaction` для удаленных реакций
- Учет только реакций на саму публикацию (не на комментарии)
## RBAC
- **Наследование разрешений между ролями** происходит только при инициализации прав для сообщества. В Redis хранятся уже развернутые (полные) списки разрешений для каждой роли. Проверка прав — это быстрый lookup без on-the-fly наследования.
## Core features
- RBAC с иерархией ролей, наследование только при инициализации, быстрый доступ к правам через Redis
## Changelog
- v0.6.11: RBAC — наследование только при инициализации, ускорение, упрощение кода, исправлены тесты

219
docs/follower.md Normal file
View File

@@ -0,0 +1,219 @@
# Following System
## Overview
System supports following different entity types:
- Authors
- Topics
- Communities
- Shouts (Posts)
## GraphQL API
### Mutations
#### follow
Follow an entity (author/topic/community/shout).
**Parameters:**
- `what: String!` - Entity type (`AUTHOR`, `TOPIC`, `COMMUNITY`, `SHOUT`)
- `slug: String` - Entity slug
- `entity_id: Int` - Optional entity ID
**Returns:**
```typescript
{
authors?: Author[] // For AUTHOR type
topics?: Topic[] // For TOPIC type
communities?: Community[] // For COMMUNITY type
shouts?: Shout[] // For SHOUT type
error?: String // Error message if any
}
```
#### unfollow
Unfollow an entity.
**Parameters:** Same as `follow`
**Returns:** Same as `follow`
**Important:** Always returns current following list even if the subscription was not found, ensuring UI consistency.
### Queries
#### get_shout_followers
Get list of authors who reacted to a shout.
**Parameters:**
- `slug: String` - Shout slug
- `shout_id: Int` - Optional shout ID
**Returns:**
```typescript
Author[] // List of authors who reacted
```
## Caching System
### Supported Entity Types
- Authors: `cache_author`, `get_cached_follower_authors`
- Topics: `cache_topic`, `get_cached_follower_topics`
- Communities: No cache
- Shouts: No cache
### Cache Flow
1. On follow/unfollow:
- Update entity in cache
- **Invalidate user's following list cache** (NEW)
- Update follower's following list
2. Cache is updated before notifications
### Cache Invalidation (NEW)
Following cache keys are invalidated after operations:
- `author:follows-topics:{user_id}` - After topic follow/unfollow
- `author:follows-authors:{user_id}` - After author follow/unfollow
This ensures fresh data is fetched from database on next request.
## Error Handling
### Enhanced Error Handling (UPDATED)
- Unauthorized access check
- Entity existence validation
- Duplicate follow prevention
- **Graceful handling of "following not found" errors**
- **Always returns current following list, even on errors**
- Full error logging
- Transaction safety with `local_session()`
### Error Response Format
```typescript
{
error?: "following was not found" | "invalid unfollow type" | "access denied",
topics?: Topic[], // Always present for topic operations
authors?: Author[], // Always present for author operations
// ... other entity types
}
```
## Recent Fixes (NEW)
### Issue 1: Stale UI State on Unfollow Errors
**Problem:** When unfollow operation failed with "following was not found", the client didn't update its state because it only processed successful responses.
**Root Cause:**
1. `unfollow` mutation returned error with empty follows list `[]`
2. Client logic: `if (result && !result.error)` prevented state updates on errors
3. User remained "subscribed" in UI despite no actual subscription in database
**Solution:**
1. **Always fetch current following list** from cache/database
2. **Return actual following state** even when subscription not found
3. **Add cache invalidation** after successful operations
4. **Enhanced logging** for debugging
### Issue 2: Inconsistent Behavior in Follow Operations (NEW)
**Problem:** The `follow` function had similar issues to `unfollow`:
- Could return `None` instead of actual following list in error scenarios
- Cache was not invalidated when trying to follow already-followed entities
- Inconsistent error handling between follow/unfollow operations
**Root Cause:**
1. `follow` mutation could return `{topics: null}` when `get_cached_follows_method` was not available
2. When user was already following an entity, cache invalidation was skipped
3. Error responses didn't include current following state
**Solution:**
1. **Always return actual following list** from cache/database
2. **Invalidate cache on every operation** (both new and existing subscriptions)
3. **Add "already following" error** while still returning current state
4. **Unified error handling** consistent with unfollow
### Code Changes
```python
# UNFOLLOW - Before (BROKEN)
if sub:
# ... process unfollow
else:
return {"error": "following was not found", f"{entity_type}s": follows} # follows was []
# UNFOLLOW - After (FIXED)
if sub:
# ... process unfollow
# Invalidate cache
await redis.execute("DEL", f"author:follows-{entity_type}s:{follower_id}")
else:
error = "following was not found"
# Always get current state
existing_follows = await get_cached_follows_method(follower_id)
return {f"{entity_type}s": existing_follows, "error": error}
# FOLLOW - Before (BROKEN)
if existing_sub:
logger.info(f"User already following...")
# Cache not invalidated, could return stale data
else:
# ... create subscription
# Cache invalidated only here
follows = None # Could be None!
# ... complex logic to build follows list
return {f"{entity_type}s": follows} # follows could be None
# FOLLOW - After (FIXED)
if existing_sub:
error = "already following"
else:
# ... create subscription
# Always invalidate cache and get current state
await redis.execute("DEL", f"author:follows-{entity_type}s:{follower_id}")
existing_follows = await get_cached_follows_method(follower_id)
return {f"{entity_type}s": existing_follows, "error": error}
```
### Impact
**Before fixes:**
- UI could show incorrect subscription state
- Cache inconsistencies between follow/unfollow operations
- Client-side logic `if (result && !result.error)` failed on valid error states
**After fixes:**
-**UI always receives current subscription state**
-**Consistent cache invalidation** on all operations
-**Unified error handling** between follow/unfollow
-**Client can safely update UI** even on error responses
## Notifications
- Sent when author is followed/unfollowed
- Contains:
- Follower info
- Author ID
- Action type ("follow"/"unfollow")
## Database Schema
### Follower Tables
- `AuthorFollower`
- `TopicFollower`
- `CommunityFollower`
- `ShoutReactionsFollower`
Each table contains:
- `follower` - ID of following user
- `{entity_type}` - ID of followed entity
## Testing
Run the test script to verify fixes:
```bash
python test_unfollow_fix.py
```
### Test Coverage
- ✅ Unfollow existing subscription
- ✅ Unfollow non-existent subscription
- ✅ Cache invalidation
- ✅ Proper error handling
- ✅ UI state consistency

80
docs/load_shouts.md Normal file
View File

@@ -0,0 +1,80 @@
# Система загрузки публикаций
## Особенности реализации
### Базовый запрос
- Автоматически подгружает основного автора
- Добавляет основную тему публикации
- Поддерживает гибкую систему фильтрации
- Оптимизирует запросы на основе запрошенных полей
### Статистика
- Подсчёт лайков/дислайков
- Количество комментариев
- Дата последней реакции
- Статистика подгружается только при запросе поля `stat`
### Оптимизация производительности
- Ленивая загрузка связанных данных
- Кэширование результатов на 5 минут
- Пакетная загрузка авторов и тем
- Использование подзапросов для сложных выборок
## Типы лент
### Случайные топовые посты (load_shouts_random_top)
**Преимущества:**
- Разнообразный контент
- Быстрая выборка из кэша топовых постов
- Настраиваемый размер пула для выборки
**Ограничения:**
- Обновление раз в 5 минут
- Максимальный размер пула: 100 постов
- Учитываются только лайки/дислайки (без комментариев)
### Неоцененные посты (load_shouts_unrated)
**Преимущества:**
- Помогает найти новый контент
- Равномерное распределение оценок
- Случайный порядок выдачи
**Ограничения:**
- Только посты с менее чем 3 реакциями
- Не учитываются комментарии
- Без сортировки по рейтингу
### Закладки (load_shouts_bookmarked)
**Преимущества:**
- Персонализированная выборка
- Быстрый доступ к сохраненному
- Поддержка всех фильтров
**Ограничения:**
- Требует авторизации
- Ограничение на количество закладок
- Кэширование отключено
## Важные моменты
### Пагинация
- Стандартный размер страницы: 10
- Максимальный размер: 100
- Поддержка курсор-пагинации
### Кэширование
- TTL: 5 минут
- Инвалидация при изменении поста
- Отдельный кэш для каждого типа сортировки
### Сортировка
- По рейтингу (лайки минус дислайки)
- По количеству комментариев
- По дате последней реакции
- По дате публикации (по умолчанию)
### Безопасность
- Проверка прав доступа
- Фильтрация удаленного контента
- Защита от SQL-инъекций
- Валидация входных данных

199
docs/oauth-deployment.md Normal file
View File

@@ -0,0 +1,199 @@
# OAuth Deployment Checklist
## 🚀 Quick Setup Guide
### 1. Backend Implementation
```bash
# Добавьте в requirements.txt или poetry
redis>=4.0.0
httpx>=0.24.0
pydantic>=2.0.0
```
### 2. Environment Variables
```bash
# .env file
GOOGLE_CLIENT_ID=your_google_client_id
GOOGLE_CLIENT_SECRET=your_google_client_secret
FACEBOOK_APP_ID=your_facebook_app_id
FACEBOOK_APP_SECRET=your_facebook_app_secret
GITHUB_CLIENT_ID=your_github_client_id
GITHUB_CLIENT_SECRET=your_github_client_secret
VK_APP_ID=your_vk_app_id
VK_APP_SECRET=your_vk_app_secret
YANDEX_CLIENT_ID=your_yandex_client_id
YANDEX_CLIENT_SECRET=your_yandex_client_secret
REDIS_URL=redis://localhost:6379/0
JWT_SECRET=your_super_secret_jwt_key
JWT_EXPIRATION_HOURS=24
```
### 3. Database Migration
```sql
-- Create oauth_links table
CREATE TABLE oauth_links (
id SERIAL PRIMARY KEY,
user_id INTEGER NOT NULL REFERENCES authors(id) ON DELETE CASCADE,
provider VARCHAR(50) NOT NULL,
provider_id VARCHAR(255) NOT NULL,
provider_data JSONB,
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
UNIQUE(provider, provider_id)
);
CREATE INDEX idx_oauth_links_user_id ON oauth_links(user_id);
CREATE INDEX idx_oauth_links_provider ON oauth_links(provider, provider_id);
```
### 4. OAuth Provider Setup
#### Google OAuth
1. Перейти в [Google Cloud Console](https://console.cloud.google.com/)
2. Создать новый проект или выбрать существующий
3. Включить Google+ API
4. Настроить OAuth consent screen
5. Создать OAuth 2.0 credentials
6. Добавить redirect URIs:
- `https://your-domain.com/auth/oauth/google/callback`
- `http://localhost:3000/auth/oauth/google/callback` (для разработки)
#### Facebook OAuth
1. Перейти в [Facebook Developers](https://developers.facebook.com/)
2. Создать новое приложение
3. Добавить продукт "Facebook Login"
4. Настроить Valid OAuth Redirect URIs:
- `https://your-domain.com/auth/oauth/facebook/callback`
#### GitHub OAuth
1. Перейти в [GitHub Settings](https://github.com/settings/applications/new)
2. Создать новое OAuth App
3. Настроить Authorization callback URL:
- `https://your-domain.com/auth/oauth/github/callback`
### 5. Backend Endpoints (FastAPI example)
```python
# auth/oauth.py
from fastapi import APIRouter, HTTPException, Request
from fastapi.responses import RedirectResponse
router = APIRouter(prefix="/auth/oauth")
@router.get("/{provider}")
async def oauth_redirect(provider: str, state: str, redirect_uri: str):
# Валидация провайдера
if provider not in ["google", "facebook", "github", "vk", "yandex"]:
raise HTTPException(400, "Unsupported provider")
# Сохранение state в Redis
await store_oauth_state(state, redirect_uri)
# Генерация URL провайдера
oauth_url = generate_provider_url(provider, state, redirect_uri)
return RedirectResponse(url=oauth_url)
@router.get("/{provider}/callback")
async def oauth_callback(provider: str, code: str, state: str):
# Проверка state
stored_data = await get_oauth_state(state)
if not stored_data:
raise HTTPException(400, "Invalid state")
# Обмен code на user_data
user_data = await exchange_code_for_user_data(provider, code)
# Создание/поиск пользователя
user = await get_or_create_user_from_oauth(provider, user_data)
# Генерация JWT
access_token = generate_jwt_token(user.id)
# Редирект с токеном
return RedirectResponse(
url=f"{stored_data['redirect_uri']}?state={state}&access_token={access_token}"
)
```
### 6. Testing
```bash
# Запуск E2E тестов
npm run test:e2e -- oauth.spec.ts
# Проверка OAuth endpoints
curl -X GET "http://localhost:8000/auth/oauth/google?state=test&redirect_uri=http://localhost:3000"
```
### 7. Production Deployment
#### Frontend
- [ ] Проверить корректность `coreApiUrl` в production
- [ ] Добавить обработку ошибок OAuth в UI
- [ ] Настроить CSP headers для OAuth редиректов
#### Backend
- [ ] Настроить HTTPS для всех OAuth endpoints
- [ ] Добавить rate limiting для OAuth endpoints
- [ ] Настроить CORS для фронтенд доменов
- [ ] Добавить мониторинг OAuth ошибок
- [ ] Настроить логирование OAuth событий
#### Infrastructure
- [ ] Настроить Redis для production
- [ ] Добавить health checks для OAuth endpoints
- [ ] Настроить backup для oauth_links таблицы
### 8. Security Checklist
- [ ] Все OAuth секреты в environment variables
- [ ] State validation с TTL (10 минут)
- [ ] CSRF protection включен
- [ ] Redirect URI validation
- [ ] Rate limiting на OAuth endpoints
- [ ] Логирование всех OAuth событий
- [ ] HTTPS обязателен в production
### 9. Monitoring
```python
# Добавить метрики для мониторинга
from prometheus_client import Counter, Histogram
oauth_requests = Counter('oauth_requests_total', 'OAuth requests', ['provider', 'status'])
oauth_duration = Histogram('oauth_duration_seconds', 'OAuth request duration')
@router.get("/{provider}")
async def oauth_redirect(provider: str, state: str, redirect_uri: str):
with oauth_duration.time():
try:
# OAuth logic
oauth_requests.labels(provider=provider, status='success').inc()
except Exception as e:
oauth_requests.labels(provider=provider, status='error').inc()
raise
```
## 🔧 Troubleshooting
### Частые ошибки
1. **"OAuth state mismatch"**
- Проверьте TTL Redis
- Убедитесь, что state генерируется правильно
2. **"Provider authentication failed"**
- Проверьте client_id и client_secret
- Убедитесь, что redirect_uri совпадает с настройками провайдера
3. **"Invalid redirect URI"**
- Добавьте все возможные redirect URIs в настройки приложения
- Проверьте HTTPS/HTTP в production/development
### Логи для отладки
```bash
# Backend логи
tail -f /var/log/app/oauth.log | grep "oauth"
# Frontend логи (browser console)
# Фильтр: "[oauth]" или "[SessionProvider]"
```

View File

@@ -0,0 +1,430 @@
# OAuth Implementation Guide
## Фронтенд (Текущая реализация)
### Контекст сессии
```typescript
// src/context/session.tsx
const oauth = (provider: string) => {
console.info('[oauth] Starting OAuth flow for provider:', provider)
if (isServer) {
console.warn('[oauth] OAuth not available during SSR')
return
}
// Генерируем state для OAuth
const state = crypto.randomUUID()
localStorage.setItem('oauth_state', state)
// Формируем URL для OAuth
const oauthUrl = `${coreApiUrl}/auth/oauth/${provider}?state=${state}&redirect_uri=${encodeURIComponent(window.location.origin)}`
// Перенаправляем на OAuth провайдера
window.location.href = oauthUrl
}
```
### Обработка OAuth callback
```typescript
// Обработка OAuth параметров в SessionProvider
createEffect(
on([() => searchParams?.state, () => searchParams?.access_token, () => searchParams?.token],
([state, access_token, token]) => {
// OAuth обработка
if (state && access_token) {
console.info('[SessionProvider] Processing OAuth callback')
const storedState = !isServer ? localStorage.getItem('oauth_state') : null
if (storedState === state) {
console.info('[SessionProvider] OAuth state verified')
batch(() => {
changeSearchParams({ mode: 'confirm-email', m: 'auth', access_token }, { replace: true })
if (!isServer) localStorage.removeItem('oauth_state')
})
} else {
console.warn('[SessionProvider] OAuth state mismatch')
setAuthError('OAuth state mismatch')
}
return
}
// Обработка токена сброса пароля
if (token) {
console.info('[SessionProvider] Processing password reset token')
changeSearchParams({ mode: 'change-password', m: 'auth', token }, { replace: true })
}
},
{ defer: true }
)
)
```
## Бекенд Requirements
### 1. OAuth Endpoints
#### GET `/auth/oauth/{provider}`
```python
@router.get("/auth/oauth/{provider}")
async def oauth_redirect(
provider: str,
state: str,
redirect_uri: str,
request: Request
):
"""
Инициация OAuth flow с внешним провайдером
Args:
provider: Провайдер OAuth (google, facebook, github)
state: CSRF токен от клиента
redirect_uri: URL для редиректа после авторизации
Returns:
RedirectResponse: Редирект на провайдера OAuth
"""
# Валидация провайдера
if provider not in SUPPORTED_PROVIDERS:
raise HTTPException(status_code=400, detail="Unsupported OAuth provider")
# Сохранение state в сессии/Redis для проверки
await store_oauth_state(state, redirect_uri)
# Генерация URL провайдера
oauth_url = generate_provider_url(provider, state, redirect_uri)
return RedirectResponse(url=oauth_url)
```
#### GET `/auth/oauth/{provider}/callback`
```python
@router.get("/auth/oauth/{provider}/callback")
async def oauth_callback(
provider: str,
code: str,
state: str,
request: Request
):
"""
Обработка callback от OAuth провайдера
Args:
provider: Провайдер OAuth
code: Authorization code от провайдера
state: CSRF токен для проверки
Returns:
RedirectResponse: Редирект обратно на фронтенд с токеном
"""
# Проверка state
stored_data = await get_oauth_state(state)
if not stored_data:
raise HTTPException(status_code=400, detail="Invalid or expired state")
# Обмен code на access_token
try:
user_data = await exchange_code_for_user_data(provider, code)
except OAuthException as e:
logger.error(f"OAuth error for {provider}: {e}")
return RedirectResponse(url=f"{stored_data['redirect_uri']}?error=oauth_failed")
# Поиск/создание пользователя
user = await get_or_create_user_from_oauth(provider, user_data)
# Генерация JWT токена
access_token = generate_jwt_token(user.id)
# Редирект обратно на фронтенд
redirect_url = f"{stored_data['redirect_uri']}?state={state}&access_token={access_token}"
return RedirectResponse(url=redirect_url)
```
### 2. Provider Configuration
#### Google OAuth
```python
GOOGLE_OAUTH_CONFIG = {
"client_id": os.getenv("GOOGLE_CLIENT_ID"),
"client_secret": os.getenv("GOOGLE_CLIENT_SECRET"),
"auth_url": "https://accounts.google.com/o/oauth2/v2/auth",
"token_url": "https://oauth2.googleapis.com/token",
"user_info_url": "https://www.googleapis.com/oauth2/v2/userinfo",
"scope": "openid email profile"
}
```
#### Facebook OAuth
```python
FACEBOOK_OAUTH_CONFIG = {
"client_id": os.getenv("FACEBOOK_APP_ID"),
"client_secret": os.getenv("FACEBOOK_APP_SECRET"),
"auth_url": "https://www.facebook.com/v18.0/dialog/oauth",
"token_url": "https://graph.facebook.com/v18.0/oauth/access_token",
"user_info_url": "https://graph.facebook.com/v18.0/me",
"scope": "email public_profile"
}
```
#### GitHub OAuth
```python
GITHUB_OAUTH_CONFIG = {
"client_id": os.getenv("GITHUB_CLIENT_ID"),
"client_secret": os.getenv("GITHUB_CLIENT_SECRET"),
"auth_url": "https://github.com/login/oauth/authorize",
"token_url": "https://github.com/login/oauth/access_token",
"user_info_url": "https://api.github.com/user",
"scope": "read:user user:email"
}
```
### 3. User Management
#### OAuth User Model
```python
class OAuthUser(BaseModel):
provider: str
provider_id: str
email: str
name: str
avatar_url: Optional[str] = None
raw_data: dict
```
#### User Creation/Linking
```python
async def get_or_create_user_from_oauth(
provider: str,
oauth_data: OAuthUser
) -> User:
"""
Поиск существующего пользователя или создание нового
Args:
provider: OAuth провайдер
oauth_data: Данные пользователя от провайдера
Returns:
User: Пользователь в системе
"""
# Поиск по OAuth связке
oauth_link = await OAuthLink.get_by_provider_and_id(
provider=provider,
provider_id=oauth_data.provider_id
)
if oauth_link:
return await User.get(oauth_link.user_id)
# Поиск по email
existing_user = await User.get_by_email(oauth_data.email)
if existing_user:
# Привязка OAuth к существующему пользователю
await OAuthLink.create(
user_id=existing_user.id,
provider=provider,
provider_id=oauth_data.provider_id,
provider_data=oauth_data.raw_data
)
return existing_user
# Создание нового пользователя
new_user = await User.create(
email=oauth_data.email,
name=oauth_data.name,
pic=oauth_data.avatar_url,
is_verified=True, # OAuth email считается верифицированным
registration_method='oauth',
registration_provider=provider
)
# Создание OAuth связки
await OAuthLink.create(
user_id=new_user.id,
provider=provider,
provider_id=oauth_data.provider_id,
provider_data=oauth_data.raw_data
)
return new_user
```
### 4. Security
#### State Management
```python
import redis
from datetime import timedelta
redis_client = redis.Redis()
async def store_oauth_state(
state: str,
redirect_uri: str,
ttl: timedelta = timedelta(minutes=10)
):
"""Сохранение OAuth state с TTL"""
key = f"oauth_state:{state}"
data = {
"redirect_uri": redirect_uri,
"created_at": datetime.utcnow().isoformat()
}
await redis_client.setex(key, ttl, json.dumps(data))
async def get_oauth_state(state: str) -> Optional[dict]:
"""Получение и удаление OAuth state"""
key = f"oauth_state:{state}"
data = await redis_client.get(key)
if data:
await redis_client.delete(key) # One-time use
return json.loads(data)
return None
```
#### CSRF Protection
```python
def validate_oauth_state(stored_state: str, received_state: str) -> bool:
"""Проверка OAuth state для защиты от CSRF"""
return stored_state == received_state
def validate_redirect_uri(uri: str) -> bool:
"""Валидация redirect_uri для предотвращения открытых редиректов"""
allowed_domains = [
"localhost:3000",
"discours.io",
"new.discours.io"
]
parsed = urlparse(uri)
return any(domain in parsed.netloc for domain in allowed_domains)
```
### 5. Database Schema
#### OAuth Links Table
```sql
CREATE TABLE oauth_links (
id SERIAL PRIMARY KEY,
user_id INTEGER NOT NULL REFERENCES users(id) ON DELETE CASCADE,
provider VARCHAR(50) NOT NULL,
provider_id VARCHAR(255) NOT NULL,
provider_data JSONB,
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
UNIQUE(provider, provider_id),
INDEX(user_id),
INDEX(provider, provider_id)
);
```
### 6. Environment Variables
#### Required Config
```bash
# Google OAuth
GOOGLE_CLIENT_ID=your_google_client_id
GOOGLE_CLIENT_SECRET=your_google_client_secret
# Facebook OAuth
FACEBOOK_APP_ID=your_facebook_app_id
FACEBOOK_APP_SECRET=your_facebook_app_secret
# GitHub OAuth
GITHUB_CLIENT_ID=your_github_client_id
GITHUB_CLIENT_SECRET=your_github_client_secret
# Redis для state management
REDIS_URL=redis://localhost:6379/0
# JWT
JWT_SECRET=your_jwt_secret_key
JWT_EXPIRATION_HOURS=24
```
### 7. Error Handling
#### OAuth Exceptions
```python
class OAuthException(Exception):
pass
class InvalidProviderException(OAuthException):
pass
class StateValidationException(OAuthException):
pass
class ProviderAPIException(OAuthException):
pass
# Error responses
@app.exception_handler(OAuthException)
async def oauth_exception_handler(request: Request, exc: OAuthException):
logger.error(f"OAuth error: {exc}")
return RedirectResponse(
url=f"{request.base_url}?error=oauth_failed&message={str(exc)}"
)
```
### 8. Testing
#### Unit Tests
```python
def test_oauth_redirect():
response = client.get("/auth/oauth/google?state=test&redirect_uri=http://localhost:3000")
assert response.status_code == 307
assert "accounts.google.com" in response.headers["location"]
def test_oauth_callback():
# Mock provider response
with mock.patch('oauth.exchange_code_for_user_data') as mock_exchange:
mock_exchange.return_value = OAuthUser(
provider="google",
provider_id="123456",
email="test@example.com",
name="Test User"
)
response = client.get("/auth/oauth/google/callback?code=test_code&state=test_state")
assert response.status_code == 307
assert "access_token=" in response.headers["location"]
```
## Frontend Testing
### E2E Tests
```typescript
// tests/oauth.spec.ts
test('OAuth flow with Google', async ({ page }) => {
await page.goto('/login')
// Click Google OAuth button
await page.click('[data-testid="oauth-google"]')
// Should redirect to Google
await page.waitForURL(/accounts\.google\.com/)
// Mock successful OAuth (in test environment)
await page.goto('/?state=test&access_token=mock_token')
// Should be logged in
await expect(page.locator('[data-testid="user-menu"]')).toBeVisible()
})
```
## Deployment Checklist
- [ ] Зарегистрировать OAuth приложения у провайдеров
- [ ] Настроить redirect URLs в консолях провайдеров
- [ ] Добавить environment variables
- [ ] Настроить Redis для state management
- [ ] Создать таблицу oauth_links
- [ ] Добавить rate limiting для OAuth endpoints
- [ ] Настроить мониторинг OAuth ошибок
- [ ] Протестировать все провайдеры в staging
- [ ] Добавить логирование OAuth событий

123
docs/oauth-setup.md Normal file
View File

@@ -0,0 +1,123 @@
# OAuth Providers Setup Guide
This guide explains how to set up OAuth authentication for various social platforms.
## Supported Providers
The platform supports the following OAuth providers:
- Google
- GitHub
- Facebook
- X (Twitter)
- Telegram
- VK (VKontakte)
- Yandex
## Environment Variables
Add the following environment variables to your `.env` file:
```bash
# Google OAuth
OAUTH_CLIENTS_GOOGLE_ID=your_google_client_id
OAUTH_CLIENTS_GOOGLE_KEY=your_google_client_secret
# GitHub OAuth
OAUTH_CLIENTS_GITHUB_ID=your_github_client_id
OAUTH_CLIENTS_GITHUB_KEY=your_github_client_secret
# Facebook OAuth
OAUTH_CLIENTS_FACEBOOK_ID=your_facebook_app_id
OAUTH_CLIENTS_FACEBOOK_KEY=your_facebook_app_secret
# X (Twitter) OAuth
OAUTH_CLIENTS_X_ID=your_x_client_id
OAUTH_CLIENTS_X_KEY=your_x_client_secret
# Telegram OAuth
OAUTH_CLIENTS_TELEGRAM_ID=your_telegram_bot_token
OAUTH_CLIENTS_TELEGRAM_KEY=your_telegram_bot_secret
# VK OAuth
OAUTH_CLIENTS_VK_ID=your_vk_app_id
OAUTH_CLIENTS_VK_KEY=your_vk_secure_key
# Yandex OAuth
OAUTH_CLIENTS_YANDEX_ID=your_yandex_client_id
OAUTH_CLIENTS_YANDEX_KEY=your_yandex_client_secret
```
## Provider Setup Instructions
### Google
1. Go to [Google Cloud Console](https://console.cloud.google.com/)
2. Create a new project or select existing
3. Enable Google+ API and OAuth 2.0
4. Create OAuth 2.0 Client ID credentials
5. Add your callback URLs: `https://yourdomain.com/oauth/google/callback`
### GitHub
1. Go to [GitHub Developer Settings](https://github.com/settings/developers)
2. Create a new OAuth App
3. Set Authorization callback URL: `https://yourdomain.com/oauth/github/callback`
### Facebook
1. Go to [Facebook Developers](https://developers.facebook.com/)
2. Create a new app
3. Add Facebook Login product
4. Configure Valid OAuth redirect URIs: `https://yourdomain.com/oauth/facebook/callback`
### X (Twitter)
1. Go to [Twitter Developer Portal](https://developer.twitter.com/)
2. Create a new app
3. Enable OAuth 2.0 authentication
4. Set Callback URLs: `https://yourdomain.com/oauth/x/callback`
5. **Note**: X doesn't provide email addresses through their API
### Telegram
1. Create a bot with [@BotFather](https://t.me/botfather)
2. Use `/newbot` command and follow instructions
3. Get your bot token
4. Configure domain settings with `/setdomain` command
5. **Note**: Telegram doesn't provide email addresses
### VK (VKontakte)
1. Go to [VK for Developers](https://vk.com/dev)
2. Create a new application
3. Set Authorized redirect URI: `https://yourdomain.com/oauth/vk/callback`
4. **Note**: Email access requires special permissions from VK
### Yandex
1. Go to [Yandex OAuth](https://oauth.yandex.com/)
2. Create a new application
3. Set Callback URI: `https://yourdomain.com/oauth/yandex/callback`
4. Select required permissions: `login:email login:info`
## Email Handling
Some providers (X, Telegram) don't provide email addresses. In these cases:
- A temporary email is generated: `{provider}_{user_id}@oauth.local`
- Users can update their email in profile settings later
- `email_verified` is set to `false` for generated emails
## Usage in Frontend
OAuth URLs:
```
/oauth/google
/oauth/github
/oauth/facebook
/oauth/x
/oauth/telegram
/oauth/vk
/oauth/yandex
```
Each provider accepts a `state` parameter for CSRF protection and a `redirect_uri` for post-authentication redirects.
## Security Notes
- All OAuth flows use PKCE (Proof Key for Code Exchange) for additional security
- State parameters are stored in Redis with 10-minute TTL
- OAuth sessions are one-time use only
- Failed authentications are logged for monitoring

329
docs/oauth.md Normal file
View File

@@ -0,0 +1,329 @@
# OAuth Token Management
## Overview
Система управления OAuth токенами с использованием Redis для безопасного и производительного хранения токенов доступа и обновления от различных провайдеров.
## Архитектура
### Redis Storage
OAuth токены хранятся в Redis с автоматическим истечением (TTL):
- `oauth_access:{user_id}:{provider}` - access tokens
- `oauth_refresh:{user_id}:{provider}` - refresh tokens
### Поддерживаемые провайдеры
- Google OAuth 2.0
- Facebook Login
- GitHub OAuth
## API Documentation
### OAuthTokenStorage Class
#### store_access_token()
Сохраняет access token в Redis с автоматическим TTL.
```python
await OAuthTokenStorage.store_access_token(
user_id=123,
provider="google",
access_token="ya29.a0AfH6SM...",
expires_in=3600,
additional_data={"scope": "profile email"}
)
```
#### store_refresh_token()
Сохраняет refresh token с длительным TTL (30 дней по умолчанию).
```python
await OAuthTokenStorage.store_refresh_token(
user_id=123,
provider="google",
refresh_token="1//04...",
ttl=2592000 # 30 дней
)
```
#### get_access_token()
Получает действующий access token из Redis.
```python
token_data = await OAuthTokenStorage.get_access_token(123, "google")
if token_data:
access_token = token_data["token"]
expires_in = token_data["expires_in"]
```
#### refresh_access_token()
Обновляет access token (и опционально refresh token).
```python
success = await OAuthTokenStorage.refresh_access_token(
user_id=123,
provider="google",
new_access_token="ya29.new_token...",
expires_in=3600,
new_refresh_token="1//04new..." # опционально
)
```
#### delete_tokens()
Удаляет все токены пользователя для провайдера.
```python
await OAuthTokenStorage.delete_tokens(123, "google")
```
#### get_user_providers()
Получает список OAuth провайдеров для пользователя.
```python
providers = await OAuthTokenStorage.get_user_providers(123)
# ["google", "github"]
```
#### extend_token_ttl()
Продлевает срок действия токена.
```python
# Продлить access token на 30 минут
success = await OAuthTokenStorage.extend_token_ttl(123, "google", "access", 1800)
# Продлить refresh token на 7 дней
success = await OAuthTokenStorage.extend_token_ttl(123, "google", "refresh", 604800)
```
#### get_token_info()
Получает подробную информацию о токенах включая TTL.
```python
info = await OAuthTokenStorage.get_token_info(123, "google")
# {
# "user_id": 123,
# "provider": "google",
# "access_token": {"exists": True, "ttl": 3245},
# "refresh_token": {"exists": True, "ttl": 2589600}
# }
```
## Data Structures
### Access Token Structure
```json
{
"token": "ya29.a0AfH6SM...",
"provider": "google",
"user_id": 123,
"created_at": 1640995200,
"expires_in": 3600,
"scope": "profile email",
"token_type": "Bearer"
}
```
### Refresh Token Structure
```json
{
"token": "1//04...",
"provider": "google",
"user_id": 123,
"created_at": 1640995200
}
```
## Security Considerations
### Token Expiration
- **Access tokens**: TTL основан на `expires_in` от провайдера (обычно 1 час)
- **Refresh tokens**: TTL 30 дней по умолчанию
- **Автоматическая очистка**: Redis автоматически удаляет истекшие токены
- **Внутренняя система истечения**: Использует SET + EXPIRE для точного контроля TTL
### Redis Expiration Benefits
- **Гибкость**: Можно изменять TTL существующих токенов через EXPIRE
- **Мониторинг**: Команда TTL показывает оставшееся время жизни токена
- **Расширение**: Возможность продления срока действия токенов без перезаписи
- **Атомарность**: Separate SET/EXPIRE operations для лучшего контроля
### Access Control
- Токены доступны только владельцу аккаунта
- Нет доступа к токенам через GraphQL API
- Токены не хранятся в основной базе данных
### Provider Isolation
- Токены разных провайдеров хранятся отдельно
- Удаление токенов одного провайдера не влияет на другие
- Поддержка множественных OAuth подключений
## Integration Examples
### OAuth Login Flow
```python
# После успешной авторизации через OAuth провайдера
async def handle_oauth_callback(user_id: int, provider: str, tokens: dict):
# Сохраняем токены в Redis
await OAuthTokenStorage.store_access_token(
user_id=user_id,
provider=provider,
access_token=tokens["access_token"],
expires_in=tokens.get("expires_in", 3600)
)
if "refresh_token" in tokens:
await OAuthTokenStorage.store_refresh_token(
user_id=user_id,
provider=provider,
refresh_token=tokens["refresh_token"]
)
```
### Token Refresh
```python
async def refresh_oauth_token(user_id: int, provider: str):
# Получаем refresh token
refresh_data = await OAuthTokenStorage.get_refresh_token(user_id, provider)
if not refresh_data:
return False
# Обмениваем refresh token на новый access token
new_tokens = await exchange_refresh_token(
provider, refresh_data["token"]
)
# Сохраняем новые токены
return await OAuthTokenStorage.refresh_access_token(
user_id=user_id,
provider=provider,
new_access_token=new_tokens["access_token"],
expires_in=new_tokens.get("expires_in"),
new_refresh_token=new_tokens.get("refresh_token")
)
```
### API Integration
```python
async def make_oauth_request(user_id: int, provider: str, endpoint: str):
# Получаем действующий access token
token_data = await OAuthTokenStorage.get_access_token(user_id, provider)
if not token_data:
# Токен отсутствует, требуется повторная авторизация
raise OAuthTokenMissing()
# Делаем запрос к API провайдера
headers = {"Authorization": f"Bearer {token_data['token']}"}
response = await httpx.get(endpoint, headers=headers)
if response.status_code == 401:
# Токен истек, пытаемся обновить
if await refresh_oauth_token(user_id, provider):
# Повторяем запрос с новым токеном
token_data = await OAuthTokenStorage.get_access_token(user_id, provider)
headers = {"Authorization": f"Bearer {token_data['token']}"}
response = await httpx.get(endpoint, headers=headers)
return response.json()
```
### TTL Monitoring and Management
```python
async def monitor_token_expiration(user_id: int, provider: str):
"""Мониторинг и управление сроком действия токенов"""
# Получаем информацию о токенах
info = await OAuthTokenStorage.get_token_info(user_id, provider)
# Проверяем access token
if info["access_token"]["exists"]:
ttl = info["access_token"]["ttl"]
if ttl < 300: # Меньше 5 минут
logger.warning(f"Access token expires soon: {ttl}s")
# Автоматически обновляем токен
await refresh_oauth_token(user_id, provider)
# Проверяем refresh token
if info["refresh_token"]["exists"]:
ttl = info["refresh_token"]["ttl"]
if ttl < 86400: # Меньше 1 дня
logger.warning(f"Refresh token expires soon: {ttl}s")
# Уведомляем пользователя о необходимости повторной авторизации
async def extend_session_if_active(user_id: int, provider: str):
"""Продлевает сессию для активных пользователей"""
# Проверяем активность пользователя
if await is_user_active(user_id):
# Продлеваем access token на 1 час
success = await OAuthTokenStorage.extend_token_ttl(
user_id, provider, "access", 3600
)
if success:
logger.info(f"Extended access token for active user {user_id}")
```
## Migration from Database
Если у вас уже есть OAuth токены в базе данных, используйте этот скрипт для миграции:
```python
async def migrate_oauth_tokens():
"""Миграция OAuth токенов из БД в Redis"""
with local_session() as session:
# Предполагая, что токены хранились в таблице authors
authors = session.query(Author).filter(
or_(
Author.provider_access_token.is_not(None),
Author.provider_refresh_token.is_not(None)
)
).all()
for author in authors:
# Получаем провайдер из oauth вместо старого поля oauth
if author.oauth:
for provider in author.oauth.keys():
if author.provider_access_token:
await OAuthTokenStorage.store_access_token(
user_id=author.id,
provider=provider,
access_token=author.provider_access_token
)
if author.provider_refresh_token:
await OAuthTokenStorage.store_refresh_token(
user_id=author.id,
provider=provider,
refresh_token=author.provider_refresh_token
)
print(f"Migrated OAuth tokens for {len(authors)} authors")
```
## Performance Benefits
### Redis Advantages
- **Скорость**: Доступ к токенам за микросекунды
- **Масштабируемость**: Не нагружает основную БД
- **Автоматическая очистка**: TTL убирает истекшие токены
- **Память**: Эффективное использование памяти Redis
### Reduced Database Load
- OAuth токены больше не записываются в основную БД
- Уменьшено количество записей в таблице authors
- Faster user queries без JOIN к токенам
## Monitoring and Maintenance
### Redis Memory Usage
```bash
# Проверка использования памяти OAuth токенами
redis-cli --scan --pattern "oauth_*" | wc -l
redis-cli memory usage oauth_access:123:google
```
### Cleanup Statistics
```python
# Периодическая очистка и логирование (опционально)
async def oauth_cleanup_job():
cleaned = await OAuthTokenStorage.cleanup_expired_tokens()
logger.info(f"OAuth cleanup completed, {cleaned} tokens processed")
```

82
docs/rating.md Normal file
View File

@@ -0,0 +1,82 @@
# Rating System
## GraphQL Resolvers
### Queries
#### get_my_rates_shouts
Get user's reactions (LIKE/DISLIKE) for specified posts.
**Parameters:**
- `shouts: [Int!]!` - array of shout IDs
**Returns:**
```typescript
[{
shout_id: Int
my_rate: ReactionKind // LIKE or DISLIKE
}]
```
#### get_my_rates_comments
Get user's reactions (LIKE/DISLIKE) for specified comments.
**Parameters:**
- `comments: [Int!]!` - array of comment IDs
**Returns:**
```typescript
[{
comment_id: Int
my_rate: ReactionKind // LIKE or DISLIKE
}]
```
### Mutations
#### rate_author
Rate another author (karma system).
**Parameters:**
- `rated_slug: String!` - author's slug
- `value: Int!` - rating value (positive/negative)
## Rating Calculation
### Author Rating Components
#### Shouts Rating
- Calculated from LIKE/DISLIKE reactions on author's posts
- Each LIKE: +1
- Each DISLIKE: -1
- Excludes deleted reactions
- Excludes comment reactions
#### Comments Rating
- Calculated from LIKE/DISLIKE reactions on author's comments
- Each LIKE: +1
- Each DISLIKE: -1
- Only counts reactions to COMMENT type reactions
- Excludes deleted reactions
#### Legacy Karma
- Based on direct author ratings via `rate_author` mutation
- Stored in `AuthorRating` table
- Each positive rating: +1
- Each negative rating: -1
### Helper Functions
- `count_author_comments_rating()` - Calculate comment rating
- `count_author_shouts_rating()` - Calculate posts rating
- `get_author_rating_old()` - Get legacy karma rating
- `get_author_rating_shouts()` - Get posts rating (optimized)
- `get_author_rating_comments()` - Get comments rating (optimized)
- `add_author_rating_columns()` - Add rating columns to author query
## Notes
- All ratings exclude deleted content
- Reactions are unique per user/content
- Rating calculations are optimized with SQLAlchemy
- System supports both direct author rating and content-based rating

477
docs/rbac-system.md Normal file
View File

@@ -0,0 +1,477 @@
# Система ролей и разрешений (RBAC)
## Общее описание
Система управления доступом на основе ролей (Role-Based Access Control, RBAC) обеспечивает гибкое управление правами пользователей в рамках сообществ платформы.
## Архитектура системы
### Основные компоненты
1. **Community** - сообщество, контекст для ролей
2. **CommunityAuthor** - связь пользователя с сообществом и его ролями
3. **Role** - роль пользователя (reader, author, editor, admin)
4. **Permission** - разрешение на выполнение действия
5. **RBAC Service** - сервис управления ролями и разрешениями
### Модель данных
```sql
-- Основная таблица связи пользователя с сообществом
CREATE TABLE community_author (
id INTEGER PRIMARY KEY,
community_id INTEGER REFERENCES community(id),
author_id INTEGER REFERENCES author(id),
roles TEXT, -- CSV строка ролей: "reader,author,editor"
joined_at INTEGER NOT NULL,
UNIQUE(community_id, author_id)
);
-- Индексы для производительности
CREATE INDEX idx_community_author_community ON community_author(community_id);
CREATE INDEX idx_community_author_author ON community_author(author_id);
```
## Роли в системе
### Базовые роли
#### 1. `reader` (Читатель)
- **Обязательная роль для всех пользователей**
- **Права:**
- Чтение публикаций
- Просмотр комментариев
- Подписка на сообщества
- Базовая навигация по платформе
#### 2. `author` (Автор)
- **Права:**
- Все права `reader`
- Создание публикаций (шаутов)
- Редактирование своих публикаций
- Комментирование
- Создание черновиков
#### 3. `artist` (Художник)
- **Права:**
- Все права `author`
- Может быть указан как credited artist
- Загрузка и управление медиафайлами
#### 4. `expert` (Эксперт)
- **Права:**
- Все права `author`
- Добавление доказательств (evidence)
- Верификация контента
- Экспертная оценка публикаций
#### 5. `editor` (Редактор)
- **Права:**
- Все права `expert`
- Модерация контента
- Редактирование чужих публикаций
- Управление тегами и категориями
- Модерация комментариев
#### 6. `admin` (Администратор)
- **Права:**
- Все права `editor`
- Управление пользователями
- Управление ролями
- Настройка сообщества
- Полный доступ к административной панели
### Иерархия ролей
```
admin > editor > expert > artist/author > reader
```
Каждая роль автоматически включает права всех ролей ниже по иерархии.
## Разрешения (Permissions)
### Формат разрешений
Разрешения записываются в формате `resource:action`:
- `shout:create` - создание публикаций
- `shout:edit` - редактирование публикаций
- `shout:delete` - удаление публикаций
- `comment:create` - создание комментариев
- `comment:moderate` - модерация комментариев
- `user:manage` - управление пользователями
- `community:settings` - настройки сообщества
### Категории разрешений
#### Контент (Content)
- `shout:create` - создание шаутов
- `shout:edit_own` - редактирование своих шаутов
- `shout:edit_any` - редактирование любых шаутов
- `shout:delete_own` - удаление своих шаутов
- `shout:delete_any` - удаление любых шаутов
- `shout:publish` - публикация шаутов
- `shout:feature` - продвижение шаутов
#### Комментарии (Comments)
- `comment:create` - создание комментариев
- `comment:edit_own` - редактирование своих комментариев
- `comment:edit_any` - редактирование любых комментариев
- `comment:delete_own` - удаление своих комментариев
- `comment:delete_any` - удаление любых комментариев
- `comment:moderate` - модерация комментариев
#### Пользователи (Users)
- `user:view_profile` - просмотр профилей
- `user:edit_own_profile` - редактирование своего профиля
- `user:manage_roles` - управление ролями пользователей
- `user:ban` - блокировка пользователей
#### Сообщество (Community)
- `community:view` - просмотр сообщества
- `community:settings` - настройки сообщества
- `community:manage_members` - управление участниками
- `community:analytics` - просмотр аналитики
## Логика работы системы
### 1. Регистрация пользователя
При регистрации пользователя:
```python
# 1. Создается запись в Author
user = Author(email=email, name=name, ...)
# 2. Создается связь с дефолтным сообществом (ID=1)
community_author = CommunityAuthor(
community_id=1,
author_id=user.id,
roles="reader,author" # Дефолтные роли
)
# 3. Создается подписка на сообщество
follower = CommunityFollower(
community=1,
follower=user.id
)
```
### 2. Проверка авторизации
При входе в систему проверяется наличие роли `reader`:
```python
def login(email, password):
# 1. Найти пользователя
author = Author.get_by_email(email)
# 2. Проверить пароль
if not verify_password(password, author.password):
return error("Неверный пароль")
# 3. Получить роли в дефолтном сообществе
user_roles = get_user_roles_in_community(author.id, community_id=1)
# 4. Проверить наличие роли reader
if "reader" not in user_roles and author.email not in ADMIN_EMAILS:
return error("Нет прав для входа. Требуется роль 'reader'.")
# 5. Создать сессию
return create_session(author)
```
### 3. Проверка разрешений
При выполнении действий проверяются разрешения:
```python
@login_required
async def create_shout(info, input):
user_id = info.context["author"]["id"]
# Проверяем разрешение на создание шаутов
has_permission = await check_user_permission_in_community(
user_id,
"shout:create",
community_id=1
)
if not has_permission:
raise GraphQLError("Недостаточно прав для создания публикации")
# Создаем шаут
return Shout.create(input)
```
### 4. Управление ролями
#### Назначение ролей
```python
# Назначить роль пользователю
assign_role_to_user(user_id=123, role="editor", community_id=1)
# Убрать роль
remove_role_from_user(user_id=123, role="editor", community_id=1)
# Установить все роли
community.set_user_roles(user_id=123, roles=["reader", "author", "editor"])
```
#### Проверка ролей
```python
# Получить роли пользователя
roles = get_user_roles_in_community(user_id=123, community_id=1)
# Проверить конкретную роль
has_role = "editor" in roles
# Проверить разрешение
has_permission = await check_user_permission_in_community(
user_id=123,
permission="shout:edit_any",
community_id=1
)
```
## Конфигурация сообщества
### Дефолтные роли
Каждое сообщество может настроить свои дефолтные роли для новых пользователей:
```python
# Получить дефолтные роли
default_roles = community.get_default_roles() # ["reader", "author"]
# Установить дефолтные роли
community.set_default_roles(["reader"]) # Только reader по умолчанию
```
### Доступные роли
Сообщество может ограничить список доступных ролей:
```python
# Все роли доступны по умолчанию
available_roles = ["reader", "author", "artist", "expert", "editor", "admin"]
# Ограничить только базовыми ролями
community.set_available_roles(["reader", "author", "editor"])
```
## Миграция данных
### Проблемы существующих пользователей
1. **Пользователи без роли `reader`** - не могут войти в систему
2. **Старая система ролей** - данные в `Author.roles` устарели
3. **Отсутствие связей `CommunityAuthor`** - новые пользователи без ролей
### Решения
#### 1. Автоматическое добавление роли `reader`
```python
async def ensure_user_has_reader_role(user_id: int) -> bool:
"""Убеждается, что у пользователя есть роль 'reader'"""
existing_roles = get_user_roles_in_community(user_id, community_id=1)
if "reader" not in existing_roles:
success = assign_role_to_user(user_id, "reader", community_id=1)
if success:
logger.info(f"Роль 'reader' добавлена пользователю {user_id}")
return True
return True
```
#### 2. Массовое исправление ролей
```python
async def fix_all_users_reader_role() -> dict[str, int]:
"""Проверяет всех пользователей и добавляет роль 'reader'"""
stats = {"checked": 0, "fixed": 0, "errors": 0}
all_authors = session.query(Author).all()
for author in all_authors:
stats["checked"] += 1
try:
await ensure_user_has_reader_role(author.id)
stats["fixed"] += 1
except Exception as e:
logger.error(f"Ошибка для пользователя {author.id}: {e}")
stats["errors"] += 1
return stats
```
#### 3. Миграция из старой системы
```python
def migrate_old_roles_to_community_author():
"""Переносит роли из старой системы в CommunityAuthor"""
# Получаем все старые роли из Author.roles
old_roles = session.query(AuthorRole).all()
for role in old_roles:
# Создаем запись CommunityAuthor
ca = CommunityAuthor(
community_id=role.community,
author_id=role.author,
roles=role.role
)
session.add(ca)
session.commit()
```
## API для работы с ролями
### GraphQL мутации
```graphql
# Назначить роль пользователю
mutation AssignRole($userId: Int!, $role: String!, $communityId: Int) {
assignRole(userId: $userId, role: $role, communityId: $communityId) {
success
message
}
}
# Убрать роль
mutation RemoveRole($userId: Int!, $role: String!, $communityId: Int) {
removeRole(userId: $userId, role: $role, communityId: $communityId) {
success
message
}
}
# Установить все роли пользователя
mutation SetUserRoles($userId: Int!, $roles: [String!]!, $communityId: Int) {
setUserRoles(userId: $userId, roles: $roles, communityId: $communityId) {
success
message
}
}
```
### GraphQL запросы
```graphql
# Получить роли пользователя
query GetUserRoles($userId: Int!, $communityId: Int) {
userRoles(userId: $userId, communityId: $communityId) {
roles
permissions
community {
id
name
}
}
}
# Получить всех участников сообщества с ролями
query GetCommunityMembers($communityId: Int!) {
communityMembers(communityId: $communityId) {
authorId
roles
permissions
joinedAt
author {
id
name
email
}
}
}
```
## Безопасность
### Принципы безопасности
1. **Принцип минимальных привилегий** - пользователь получает только необходимые права
2. **Разделение обязанностей** - разные роли для разных функций
3. **Аудит действий** - логирование всех изменений ролей
4. **Проверка на каждом уровне** - валидация разрешений в API и UI
### Защита от атак
1. **Privilege Escalation** - проверка прав на изменение ролей
2. **Mass Assignment** - валидация входных данных
3. **CSRF** - использование токенов для изменения ролей
4. **XSS** - экранирование данных ролей в UI
### Логирование
```python
# Логирование изменений ролей
logger.info(f"Role {role} assigned to user {user_id} by admin {admin_id}")
logger.warning(f"Failed login attempt for user without reader role: {user_id}")
logger.error(f"Permission denied: user {user_id} tried to access {resource}")
```
## Тестирование
### Тестовые сценарии
1. **Регистрация пользователя** - проверка назначения дефолтных ролей
2. **Вход в систему** - проверка требования роли `reader`
3. **Назначение ролей** - проверка прав администратора
4. **Проверка разрешений** - валидация доступа к ресурсам
5. **Иерархия ролей** - наследование прав
### Пример тестов
```python
def test_user_registration_assigns_default_roles():
"""Проверяет назначение дефолтных ролей при регистрации"""
user = create_user(email="test@test.com")
roles = get_user_roles_in_community(user.id, community_id=1)
assert "reader" in roles
assert "author" in roles
def test_login_requires_reader_role():
"""Проверяет требование роли reader для входа"""
user = create_user_without_roles(email="test@test.com")
result = login(email="test@test.com", password="password")
assert result["success"] == False
assert "reader" in result["error"]
def test_role_hierarchy():
"""Проверяет иерархию ролей"""
user = create_user(email="admin@test.com")
assign_role_to_user(user.id, "admin", community_id=1)
# Админ должен иметь все права
assert check_permission(user.id, "shout:create")
assert check_permission(user.id, "user:manage")
assert check_permission(user.id, "community:settings")
```
## Производительность
### Оптимизации
1. **Кеширование ролей** - хранение ролей пользователя в Redis
2. **Индексы БД** - быстрый поиск по `community_id` и `author_id`
3. **Batch операции** - массовое назначение ролей
4. **Ленивая загрузка** - загрузка разрешений по требованию
### Мониторинг
```python
# Метрики для Prometheus
role_checks_total = Counter('rbac_role_checks_total')
permission_checks_total = Counter('rbac_permission_checks_total')
role_assignments_total = Counter('rbac_role_assignments_total')
```

378
docs/react-to-solidjs.md Normal file
View File

@@ -0,0 +1,378 @@
# Миграция с React 18 на SolidStart: Comprehensive Guide
## 1. Введение
### 1.1 Что такое SolidStart?
SolidStart - это метафреймворк для SolidJS, который предоставляет полнофункциональное решение для создания веб-приложений. Ключевые особенности:
- Полностью изоморфное приложение (работает на клиенте и сервере)
- Встроенная поддержка SSR, SSG и CSR
- Интеграция с Vite и Nitro
- Гибкая маршрутизация
- Встроенные серверные функции и действия
### 1.2 Основные различия между React и SolidStart
| Характеристика | React 18 | SolidStart |
|---------------|----------|------------|
| Рендеринг | Virtual DOM | Компиляция и прямое обновление DOM |
| Серверный рендеринг | Сложная настройка | Встроенная поддержка |
| Размер бандла | ~40 кБ | ~7.7 кБ |
| Реактивность | Хуки с зависимостями | Сигналы без явных зависимостей |
| Маршрутизация | react-router | @solidjs/router |
## 2. Подготовка проекта
### 2.1 Установка зависимостей
```bash
# Удаление React зависимостей
npm uninstall react react-dom react-router-dom
# Установка SolidStart и связанных библиотек
npm install @solidjs/start solid-js @solidjs/router
```
### 2.2 Обновление конфигурации
#### Vite Configuration (`vite.config.ts`)
```typescript
import { defineConfig } from 'vite';
import solid from 'solid-start/vite';
export default defineConfig({
plugins: [solid()],
// Дополнительные настройки
});
```
#### TypeScript Configuration (`tsconfig.json`)
```json
{
"compilerOptions": {
"jsx": "preserve",
"jsxImportSource": "solid-js",
"types": ["solid-start/env"]
}
}
```
#### SolidStart Configuration (`app.config.ts`)
```typescript
import { defineConfig } from "@solidjs/start/config";
export default defineConfig({
server: {
// Настройки сервера, например:
preset: "netlify" // или другой провайдер
},
// Дополнительные настройки
});
```
## 3. Миграция компонентов и логики
### 3.1 Состояние и реактивность
#### React:
```typescript
const [count, setCount] = useState(0);
```
#### SolidJS:
```typescript
const [count, setCount] = createSignal(0);
// Использование: count(), setCount(newValue)
```
### 3.2 Серверные функции и загрузка данных
В SolidStart есть несколько способов работы с данными:
#### Серверная функция
```typescript
// server/api.ts
export function getUser(id: string) {
return db.users.findUnique({ where: { id } });
}
// Component
export default function UserProfile() {
const user = createAsync(() => getUser(params.id));
return <div>{user()?.name}</div>;
}
```
#### Действия (Actions)
```typescript
export function updateProfile(formData: FormData) {
'use server';
const name = formData.get('name');
// Логика обновления профиля
}
```
### 3.3 Маршрутизация
```typescript
// src/routes/index.tsx
import { A } from "@solidjs/router";
export default function HomePage() {
return (
<div>
<A href="/about">О нас</A>
<A href="/profile">Профиль</A>
</div>
);
}
// src/routes/profile.tsx
export default function ProfilePage() {
return <div>Профиль пользователя</div>;
}
```
## 4. Оптимизация и производительность
### 4.1 Мемоизация
```typescript
// Кэширование сложных вычислений
const sortedUsers = createMemo(() =>
users().sort((a, b) => a.name.localeCompare(b.name))
);
// Ленивая загрузка
const UserList = lazy(() => import('./UserList'));
```
### 4.2 Серверный рендеринг и предзагрузка
```typescript
// Предзагрузка данных
export function routeData() {
return {
user: createAsync(() => fetchUser())
};
}
export default function UserPage() {
const user = useRouteData<typeof routeData>();
return <div>{user().name}</div>;
}
```
## 5. Особенности миграции
### 5.1 Ключевые изменения
- Замена `useState` на `createSignal`
- Использование `createAsync` вместо `useEffect` для загрузки данных
- Серверные функции с `'use server'`
- Маршрутизация через `@solidjs/router`
### 5.2 Потенциальные проблемы
- Переписать все React-специфичные хуки
- Адаптировать библиотеки компонентов
- Обновить тесты и CI/CD
## 6. Деплой
SolidStart поддерживает множество платформ:
- Netlify
- Vercel
- Cloudflare
- AWS
- Deno
- и другие
```typescript
// app.config.ts
export default defineConfig({
server: {
preset: "netlify" // Выберите вашу платформу
}
});
```
## 7. Инструменты и экосистема
### Рекомендованные библиотеки
- Роутинг: `@solidjs/router`
- Состояние: Встроенные примитивы SolidJS
- Запросы: `@tanstack/solid-query`
- Девтулзы: `solid-devtools`
## 8. Миграция конкретных компонентов
### 8.1 Страница регистрации (RegisterPage)
#### React-версия
```typescript
import React from 'react'
import { Navigate } from 'react-router-dom'
import { RegisterForm } from '../components/auth/RegisterForm'
import { useAuthStore } from '../store/authStore'
export const RegisterPage: React.FC = () => {
const { isAuthenticated } = useAuthStore()
if (isAuthenticated) {
return <Navigate to="/" replace />
}
return (
<div className="min-h-screen ...">
<RegisterForm />
</div>
)
}
```
#### SolidJS-версия
```typescript
import { Navigate } from '@solidjs/router'
import { Show } from 'solid-js'
import { RegisterForm } from '../components/auth/RegisterForm'
import { useAuthStore } from '../store/authStore'
export default function RegisterPage() {
const { isAuthenticated } = useAuthStore()
return (
<Show when={!isAuthenticated()} fallback={<Navigate href="/" />}>
<div class="min-h-screen ...">
<RegisterForm />
</div>
</Show>
)
}
```
#### Ключевые изменения
- Удаление импорта React
- Использование `@solidjs/router` вместо `react-router-dom`
- Замена `className` на `class`
- Использование `Show` для условного рендеринга
- Вызов `isAuthenticated()` как функции
- Использование `href` вместо `to`
- Экспорт по умолчанию вместо именованного экспорта
### Рекомендации
- Всегда используйте `Show` для условного рендеринга
- Помните, что сигналы в SolidJS - это функции
- Следите за совместимостью импортов и маршрутизации
## 9. UI Component Migration
### 9.1 Key Differences in Component Structure
When migrating UI components from React to SolidJS, several key changes are necessary:
1. **Props Handling**
- Replace `React.FC<Props>` with function component syntax
- Use object destructuring for props instead of individual parameters
- Replace `className` with `class`
- Use `props.children` instead of `children` prop
2. **Type Annotations**
- Use TypeScript interfaces for props
- Explicitly type `children` as `any` or a more specific type
- Remove React-specific type imports
3. **Event Handling**
- Use SolidJS event types (e.g., `InputEvent`)
- Modify event handler signatures to match SolidJS conventions
### 9.2 Component Migration Example
#### React Component
```typescript
import React from 'react'
import { clsx } from 'clsx'
interface ButtonProps extends React.ButtonHTMLAttributes<HTMLButtonElement> {
variant?: 'primary' | 'secondary'
fullWidth?: boolean
}
export const Button: React.FC<ButtonProps> = ({
variant = 'primary',
fullWidth = false,
className,
children,
...props
}) => {
const classes = clsx(
'button',
variant === 'primary' && 'bg-blue-500',
fullWidth && 'w-full',
className
)
return (
<button className={classes} {...props}>
{children}
</button>
)
}
```
#### SolidJS Component
```typescript
import { clsx } from 'clsx'
interface ButtonProps {
variant?: 'primary' | 'secondary'
fullWidth?: boolean
class?: string
children: any
disabled?: boolean
type?: 'button' | 'submit'
onClick?: () => void
}
export const Button = (props: ButtonProps) => {
const classes = clsx(
'button',
props.variant === 'primary' && 'bg-blue-500',
props.fullWidth && 'w-full',
props.class
)
return (
<button
class={classes}
disabled={props.disabled}
type={props.type || 'button'}
onClick={props.onClick}
>
{props.children}
</button>
)
}
```
### 9.3 Key Migration Strategies
- Replace `React.FC` with standard function components
- Use `props` object instead of individual parameters
- Replace `className` with `class`
- Modify event handling to match SolidJS patterns
- Remove React-specific lifecycle methods
- Use SolidJS primitives like `createEffect` for side effects
## Заключение
Миграция на SolidStart требует внимательного подхода, но предоставляет значительные преимущества в производительности, простоте разработки и серверных возможностях.
### Рекомендации
- Мигрируйте постепенно
- Пишите тесты на каждом этапе
- Используйте инструменты совместимости
---
Этот гайд поможет вам систематически и безопасно мигрировать ваш проект на SolidStart, сохраняя существующую функциональность и улучшая производительность.

434
docs/redis-schema.md Normal file
View File

@@ -0,0 +1,434 @@
# Схема данных Redis в Discours.io
## Обзор
Redis используется как основное хранилище для кэширования, сессий, токенов и временных данных. Все ключи следуют структурированным паттернам для обеспечения консистентности и производительности.
## Принципы именования ключей
### Общие правила
- Использование двоеточия `:` как разделителя иерархии
- Формат: `{category}:{type}:{identifier}` или `{entity}:{property}:{value}`
- Константное время поиска через точные ключи
- TTL для всех временных данных
### Категории данных
1. **Аутентификация**: `session:*`, `oauth_*`, `env_vars:*`
2. **Кэш сущностей**: `author:*`, `topic:*`, `shout:*`
3. **Поиск**: `search_cache:*`
4. **Просмотры**: `migrated_views_*`, `viewed_*`
5. **Уведомления**: publish/subscribe каналы
## 1. Система аутентификации
### 1.1 Сессии пользователей
#### Структура ключей
```
session:{user_id}:{jwt_token} # HASH - данные сессии
user_sessions:{user_id} # SET - список активных токенов пользователя
{user_id}-{username}-{token} # STRING - legacy формат (deprecated)
```
#### Данные сессии (HASH)
```redis
HGETALL session:123:eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9...
```
**Поля:**
- `user_id`: ID пользователя (string)
- `username`: Имя пользователя (string)
- `token_type`: "session" (string)
- `created_at`: Unix timestamp создания (string)
- `last_activity`: Unix timestamp последней активности (string)
- `auth_data`: JSON строка с данными авторизации (string, optional)
- `device_info`: JSON строка с информацией об устройстве (string, optional)
**TTL**: 30 дней (2592000 секунд)
#### Список токенов пользователя (SET)
```redis
SMEMBERS user_sessions:123
```
**Содержимое**: JWT токены активных сессий пользователя
**TTL**: 30 дней
### 1.2 OAuth токены
#### Структура ключей
```
oauth_access:{user_id}:{provider} # STRING - access токен
oauth_refresh:{user_id}:{provider} # STRING - refresh токен
oauth_state:{state} # HASH - временное состояние OAuth flow
```
#### Access токены
**Провайдеры**: `google`, `github`, `facebook`, `twitter`, `telegram`, `vk`, `yandex`
**TTL**: 1 час (3600 секунд)
**Пример**:
```redis
GET oauth_access:123:google
# Возвращает: access_token_string
```
#### Refresh токены
**TTL**: 30 дней (2592000 секунд)
**Пример**:
```redis
GET oauth_refresh:123:google
# Возвращает: refresh_token_string
```
#### OAuth состояние (временное)
```redis
HGETALL oauth_state:a1b2c3d4e5f6
```
**Поля:**
- `redirect_uri`: URL для перенаправления после авторизации
- `csrf_token`: CSRF защита
- `provider`: Провайдер OAuth
- `created_at`: Время создания
**TTL**: 10 минут (600 секунд)
### 1.3 Токены подтверждения
#### Структура ключей
```
verification:{user_id}:{type}:{token} # HASH - данные токена подтверждения
```
#### Типы подтверждения
- `email_verification`: Подтверждение email
- `phone_verification`: Подтверждение телефона
- `password_reset`: Сброс пароля
- `email_change`: Смена email
**Поля токена**:
- `user_id`: ID пользователя
- `token_type`: Тип токена
- `verification_type`: Тип подтверждения
- `created_at`: Время создания
- `data`: JSON с дополнительными данными
**TTL**: 1 час (3600 секунд)
## 2. Переменные окружения
### Структура ключей
```
env_vars:{variable_name} # STRING - значение переменной
```
### Примеры переменных
```redis
GET env_vars:JWT_SECRET # Секретный ключ JWT
GET env_vars:REDIS_URL # URL Redis
GET env_vars:OAUTH_GOOGLE_CLIENT_ID # Google OAuth Client ID
GET env_vars:FEATURE_REGISTRATION # Флаг функции регистрации
```
**Категории переменных**:
- **database**: DB_URL, POSTGRES_*
- **auth**: JWT_SECRET, OAUTH_*
- **redis**: REDIS_URL, REDIS_HOST, REDIS_PORT
- **search**: SEARCH_API_KEY, ELASTICSEARCH_URL
- **integrations**: GOOGLE_ANALYTICS_ID, SENTRY_DSN, SMTP_*
- **security**: CORS_ORIGINS, ALLOWED_HOSTS
- **logging**: LOG_LEVEL, DEBUG
- **features**: FEATURE_*
**TTL**: Без ограничения (постоянное хранение)
## 3. Кэш сущностей
### 3.1 Авторы (пользователи)
#### Структура ключей
```
author:id:{author_id} # STRING - JSON данные автора
author:slug:{author_slug} # STRING - ID автора по slug
author:followers:{author_id} # STRING - JSON массив подписчиков
author:follows-topics:{author_id} # STRING - JSON массив отслеживаемых тем
author:follows-authors:{author_id} # STRING - JSON массив отслеживаемых авторов
author:follows-shouts:{author_id} # STRING - JSON массив отслеживаемых публикаций
```
#### Данные автора (JSON)
```json
{
"id": 123,
"email": "user@example.com",
"name": "Имя Пользователя",
"slug": "username",
"pic": "https://example.com/avatar.jpg",
"bio": "Описание автора",
"email_verified": true,
"created_at": 1640995200,
"updated_at": 1640995200,
"last_seen": 1640995200,
"stat": {
"topics": 15,
"authors": 8,
"shouts": 42
}
}
```
#### Подписчики автора
```json
[123, 456, 789] // Массив ID подписчиков
```
#### Подписки автора
```json
// author:follows-topics:123
[1, 5, 10, 15] // ID отслеживаемых тем
// author:follows-authors:123
[45, 67, 89] // ID отслеживаемых авторов
// author:follows-shouts:123
[101, 102, 103] // ID отслеживаемых публикаций
```
**TTL**: Без ограничения (инвалидация при изменениях)
### 3.2 Темы
#### Структура ключей
```
topic:id:{topic_id} # STRING - JSON данные темы
topic:slug:{topic_slug} # STRING - JSON данные темы
topic:authors:{topic_id} # STRING - JSON массив авторов темы
topic:followers:{topic_id} # STRING - JSON массив подписчиков темы
topic_shouts_{topic_id} # STRING - JSON массив публикаций темы (legacy)
```
#### Данные темы (JSON)
```json
{
"id": 5,
"title": "Название темы",
"slug": "tema-slug",
"description": "Описание темы",
"pic": "https://example.com/topic.jpg",
"community": 1,
"created_at": 1640995200,
"updated_at": 1640995200,
"stat": {
"shouts": 150,
"authors": 25,
"followers": 89
}
}
```
#### Авторы темы
```json
[123, 456, 789] // ID авторов, писавших в теме
```
#### Подписчики темы
```json
[111, 222, 333, 444] // ID подписчиков темы
```
**TTL**: Без ограничения (инвалидация при изменениях)
### 3.3 Публикации (Shouts)
#### Структура ключей
```
shouts:{params_hash} # STRING - JSON массив публикаций
topic_shouts_{topic_id} # STRING - JSON массив публикаций темы
```
#### Примеры ключей публикаций
```
shouts:limit=20:offset=0:sort=created_at # Последние публикации
shouts:author=123:limit=10 # Публикации автора
shouts:topic=5:featured=true # Рекомендуемые публикации темы
```
**TTL**: 5 минут (300 секунд)
## 4. Поисковый кэш
### Структура ключей
```
search_cache:{normalized_query} # STRING - JSON результаты поиска
```
### Нормализация запроса
- Приведение к нижнему регистру
- Удаление лишних пробелов
- Сортировка параметров
### Данные поиска (JSON)
```json
{
"query": "поисковый запрос",
"results": [
{
"type": "shout",
"id": 123,
"title": "Заголовок публикации",
"slug": "publication-slug",
"score": 0.95
}
],
"total": 15,
"cached_at": 1640995200
}
```
**TTL**: 10 минут (600 секунд)
## 5. Система просмотров
### Структура ключей
```
migrated_views_{timestamp} # HASH - просмотры публикаций
migrated_views_slugs # HASH - маппинг slug -> id
viewed:{shout_id} # STRING - счетчик просмотров
```
### Мигрированные просмотры (HASH)
```redis
HGETALL migrated_views_1640995200
```
**Поля**:
- `{shout_id}`: количество просмотров (string)
- `_timestamp`: время создания записи
- `_total`: общее количество записей
### Маппинг slug -> ID
```redis
HGETALL migrated_views_slugs
```
**Поля**: `{shout_slug}` -> `{shout_id}`
**TTL**: Без ограничения (данные аналитики)
## 6. Pub/Sub каналы
### Каналы уведомлений
```
notifications:{user_id} # Персональные уведомления
notifications:global # Глобальные уведомления
notifications:topic:{topic_id} # Уведомления темы
notifications:shout:{shout_id} # Уведомления публикации
```
### Структура сообщения (JSON)
```json
{
"type": "notification_type",
"user_id": 123,
"entity_type": "shout",
"entity_id": 456,
"action": "created|updated|deleted",
"data": {
"title": "Заголовок",
"author": "Автор"
},
"timestamp": 1640995200
}
```
## 7. Временные данные
### Ключи блокировок
```
lock:{operation}:{entity_id} # STRING - блокировка операции
```
**TTL**: 30 секунд (автоматическое снятие блокировки)
### Ключи состояния
```
state:{process}:{identifier} # HASH - состояние процесса
```
**TTL**: От 1 минуты до 1 часа в зависимости от процесса
## 8. Мониторинг и статистика
### Ключи метрик
```
metrics:{metric_name}:{period} # STRING - значение метрики
stats:{entity}:{timeframe} # HASH - статистика сущности
```
### Примеры метрик
```
metrics:active_sessions:hourly # Количество активных сессий
metrics:cache_hits:daily # Попадания в кэш за день
stats:topics:weekly # Статистика тем за неделю
```
**TTL**: От 1 часа до 30 дней в зависимости от типа метрики
## 9. Оптимизация и производительность
### Пакетные операции
Используются Redis pipelines для атомарных операций:
```python
# Пример создания сессии
commands = [
("hset", (token_key, "user_id", user_id)),
("hset", (token_key, "created_at", timestamp)),
("expire", (token_key, ttl)),
("sadd", (user_tokens_key, token)),
]
await redis.execute_pipeline(commands)
```
### Стратегии кэширования
1. **Write-through**: Немедленное обновление кэша при изменении данных
2. **Cache-aside**: Lazy loading с обновлением при промахе
3. **Write-behind**: Отложенная запись в БД
### Инвалидация кэша
- **Точечная**: Удаление конкретных ключей при изменениях
- **По префиксу**: Массовое удаление связанных ключей
- **TTL**: Автоматическое истечение для временных данных
## 10. Мониторинг
### Команды диагностики
```bash
# Статистика использования памяти
redis-cli info memory
# Количество ключей по типам
redis-cli --scan --pattern "session:*" | wc -l
redis-cli --scan --pattern "author:*" | wc -l
redis-cli --scan --pattern "topic:*" | wc -l
# Размер конкретного ключа
redis-cli memory usage session:123:token...
# Анализ истечения ключей
redis-cli --scan --pattern "*" | xargs -I {} redis-cli ttl {}
```
### Проблемы и решения
1. **Память**: Использование TTL для временных данных
2. **Производительность**: Pipeline операции, connection pooling
3. **Консистентность**: Транзакции для критических операций
4. **Масштабирование**: Шардирование по user_id для сессий
## 11. Безопасность
### Принципы
- TTL для всех временных данных предотвращает накопление мусора
- Раздельное хранение секретных данных (токены) и публичных (кэш)
- Использование pipeline для атомарных операций
- Регулярная очистка истекших ключей
### Рекомендации
- Мониторинг использования памяти Redis
- Backup критичных данных (переменные окружения)
- Ограничение размера значений для предотвращения OOM
- Использование отдельных баз данных для разных типов данных

212
docs/security.md Normal file
View File

@@ -0,0 +1,212 @@
# Security System
## Overview
Система безопасности обеспечивает управление паролями и email адресами пользователей через специализированные GraphQL мутации с использованием Redis для хранения токенов.
## GraphQL API
### Мутации
#### updateSecurity
Универсальная мутация для смены пароля и/или email пользователя с полной валидацией и безопасностью.
**Parameters:**
- `email: String` - Новый email (опционально)
- `old_password: String` - Текущий пароль (обязательно для любых изменений)
- `new_password: String` - Новый пароль (опционально)
**Returns:**
```typescript
type SecurityUpdateResult {
success: Boolean!
error: String
author: Author
}
```
**Примеры использования:**
```graphql
# Смена пароля
mutation {
updateSecurity(
old_password: "current123"
new_password: "newPassword456"
) {
success
error
author {
id
name
email
}
}
}
# Смена email
mutation {
updateSecurity(
email: "newemail@example.com"
old_password: "current123"
) {
success
error
author {
id
name
email
}
}
}
# Одновременная смена пароля и email
mutation {
updateSecurity(
email: "newemail@example.com"
old_password: "current123"
new_password: "newPassword456"
) {
success
error
author {
id
name
email
}
}
}
```
#### confirmEmailChange
Подтверждение смены email по токену, полученному на новый email адрес.
**Parameters:**
- `token: String!` - Токен подтверждения
**Returns:** `SecurityUpdateResult`
#### cancelEmailChange
Отмена процесса смены email.
**Returns:** `SecurityUpdateResult`
### Валидация и Ошибки
```typescript
const ERRORS = {
NOT_AUTHENTICATED: "User not authenticated",
INCORRECT_OLD_PASSWORD: "incorrect old password",
PASSWORDS_NOT_MATCH: "New passwords do not match",
EMAIL_ALREADY_EXISTS: "email already exists",
INVALID_EMAIL: "Invalid email format",
WEAK_PASSWORD: "Password too weak",
SAME_PASSWORD: "New password must be different from current",
VALIDATION_ERROR: "Validation failed",
INVALID_TOKEN: "Invalid token",
TOKEN_EXPIRED: "Token expired",
NO_PENDING_EMAIL: "No pending email change"
}
```
## Логика смены email
1. **Инициация смены:**
- Пользователь вызывает `updateSecurity` с новым email
- Генерируется токен подтверждения `token_urlsafe(32)`
- Данные смены email сохраняются в Redis с ключом `email_change:{user_id}`
- Устанавливается автоматическое истечение токена (1 час)
- Отправляется письмо на новый email с токеном
2. **Подтверждение:**
- Пользователь получает письмо с токеном
- Вызывает `confirmEmailChange` с токеном
- Система проверяет токен и срок действия в Redis
- Если токен валиден, email обновляется в базе данных
- Данные смены email удаляются из Redis
3. **Отмена:**
- Пользователь может отменить смену через `cancelEmailChange`
- Данные смены email удаляются из Redis
## Redis Storage
### Хранение токенов смены email
```json
{
"key": "email_change:{user_id}",
"value": {
"user_id": 123,
"old_email": "old@example.com",
"new_email": "new@example.com",
"token": "random_token_32_chars",
"expires_at": 1640995200
},
"ttl": 3600 // 1 час
}
```
### Хранение OAuth токенов
```json
{
"key": "oauth_access:{user_id}:{provider}",
"value": {
"token": "oauth_access_token",
"provider": "google",
"user_id": 123,
"created_at": 1640995200,
"expires_in": 3600,
"scope": "profile email"
},
"ttl": 3600 // время из expires_in или 1 час по умолчанию
}
```
```json
{
"key": "oauth_refresh:{user_id}:{provider}",
"value": {
"token": "oauth_refresh_token",
"provider": "google",
"user_id": 123,
"created_at": 1640995200
},
"ttl": 2592000 // 30 дней по умолчанию
}
```
### Преимущества Redis хранения
- **Автоматическое истечение**: TTL в Redis автоматически удаляет истекшие токены
- **Производительность**: Быстрый доступ к данным токенов
- **Масштабируемость**: Не нагружает основную базу данных
- **Безопасность**: Токены не хранятся в основной БД
- **Простота**: Не требует миграции схемы базы данных
- **OAuth токены**: Централизованное управление токенами всех OAuth провайдеров
## Безопасность
### Требования к паролю
- Минимум 8 символов
- Не может совпадать с текущим паролем
### Аутентификация
- Все операции требуют валидного токена аутентификации
- Старый пароль обязателен для подтверждения личности
### Валидация email
- Проверка формата email через регулярное выражение
- Проверка уникальности email в системе
- Защита от race conditions при смене email
### Токены безопасности
- Генерация токенов через `secrets.token_urlsafe(32)`
- Автоматическое истечение через 1 час
- Удаление токенов после использования или отмены
## Database Schema
Система не требует изменений в схеме базы данных. Все токены и временные данные хранятся в Redis.
### Защищенные поля
Следующие поля показываются только владельцу аккаунта:
- `email`
- `password`

11
env.d.ts vendored Normal file
View File

@@ -0,0 +1,11 @@
/// <reference types="vite/client" />
declare const __APP_VERSION__: string
interface ImportMetaEnv {
readonly VITE_API_URL: string;
}
interface ImportMeta {
readonly env: ImportMetaEnv;
}

View File

@@ -1 +0,0 @@
python -m gql_schema_codegen -p ./schema.graphql -t ./schema_types.py

20
index.html Normal file
View File

@@ -0,0 +1,20 @@
<!DOCTYPE html>
<html lang="ru">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<meta name="description" content="Admin Panel">
<title>Admin Panel</title>
<link rel="icon" type="image/x-icon" href="/static/favicon.ico">
<meta name="theme-color" content="#228be6">
</head>
<body>
<div id="root"></div>
<script type="module" src="/panel/index.tsx"></script>
<noscript>
<div style="text-align: center; padding: 20px;">
Для работы приложения необходим JavaScript
</div>
</noscript>
</body>
</html>

307
main.py
View File

@@ -1,94 +1,279 @@
import asyncio
import os
from contextlib import asynccontextmanager
from importlib import import_module
from os.path import exists
from pathlib import Path
from ariadne import load_schema_from_path, make_executable_schema
from ariadne.asgi import GraphQL
from starlette.applications import Starlette
from starlette.middleware import Middleware
from starlette.middleware.authentication import AuthenticationMiddleware
from starlette.middleware.sessions import SessionMiddleware
from starlette.routing import Route
from starlette.middleware.cors import CORSMiddleware
from starlette.requests import Request
from starlette.responses import FileResponse, JSONResponse, Response
from starlette.routing import Mount, Route
from starlette.staticfiles import StaticFiles
from auth.authenticate import JWTAuthenticate
from auth.oauth import oauth_authorize, oauth_login
from base.redis import redis
from base.resolvers import resolvers
from orm import init_tables
from resolvers.upload import upload_handler
from services.main import storages_init
from services.notifications.notification_service import notification_service
from services.notifications.sse import sse_subscribe_handler
from services.stat.viewed import ViewedStorage
from auth.handler import EnhancedGraphQLHTTPHandler
from auth.middleware import AuthMiddleware, auth_middleware
from auth.oauth import oauth_callback, oauth_login
from cache.precache import precache_data
from cache.revalidator import revalidation_manager
from services.exception import ExceptionHandlerMiddleware
from services.redis import redis
from services.schema import create_all_tables, resolvers
from services.search import check_search_service, initialize_search_index_background, search_service
from services.viewed import ViewedStorage
from settings import DEV_SERVER_PID_FILE_NAME
from utils.logger import root_logger as logger
# from services.zine.gittask import GitTask
from settings import DEV_SERVER_PID_FILE_NAME, SENTRY_DSN, SESSION_SECRET_KEY
DEVMODE = os.getenv("DOKKU_APP_TYPE", "false").lower() == "false"
DIST_DIR = Path(__file__).parent / "dist" # Директория для собранных файлов
INDEX_HTML = Path(__file__).parent / "index.html"
import_module("resolvers")
schema = make_executable_schema(load_schema_from_path("schema.graphql"), resolvers)
schema = make_executable_schema(load_schema_from_path("schema/"), resolvers)
# Создаем middleware с правильным порядком
middleware = [
Middleware(AuthenticationMiddleware, backend=JWTAuthenticate()),
Middleware(SessionMiddleware, secret_key=SESSION_SECRET_KEY),
# Начинаем с обработки ошибок
Middleware(ExceptionHandlerMiddleware),
# CORS должен быть перед другими middleware для корректной обработки preflight-запросов
Middleware(
CORSMiddleware,
allow_origins=[
"https://testing.discours.io",
"https://testing3.discours.io",
"https://v3.dscrs.site",
"https://session-daily.vercel.app",
"https://coretest.discours.io",
"https://new.discours.io",
],
allow_methods=["GET", "POST", "OPTIONS"], # Явно указываем OPTIONS
allow_headers=["*"],
allow_credentials=True,
),
# Аутентификация должна быть после CORS
Middleware(AuthMiddleware),
]
# Создаем экземпляр GraphQL с улучшенным обработчиком
graphql_app = GraphQL(schema, debug=DEVMODE, http_handler=EnhancedGraphQLHTTPHandler())
async def start_up():
init_tables()
await redis.connect()
await storages_init()
views_stat_task = asyncio.create_task(ViewedStorage().worker())
print(views_stat_task)
# git_task = asyncio.create_task(GitTask.git_task_worker())
# print(git_task)
notification_service_task = asyncio.create_task(notification_service.worker())
print(notification_service_task)
# Оборачиваем GraphQL-обработчик для лучшей обработки ошибок
async def graphql_handler(request: Request) -> Response:
"""
Обработчик GraphQL запросов с поддержкой middleware и обработкой ошибок.
Выполняет:
1. Проверку метода запроса (GET, POST, OPTIONS)
2. Обработку GraphQL запроса через ariadne
3. Применение middleware для корректной обработки cookie и авторизации
4. Обработку исключений и формирование ответа
Args:
request: Starlette Request объект
Returns:
Response: объект ответа (обычно JSONResponse)
"""
if request.method not in ["GET", "POST", "OPTIONS"]:
return JSONResponse({"error": "Method Not Allowed by main.py"}, status_code=405)
# Проверяем, что все необходимые middleware корректно отработали
if not hasattr(request, "scope") or "auth" not in request.scope:
logger.warning("[graphql] AuthMiddleware не обработал запрос перед GraphQL обработчиком")
try:
import sentry_sdk
# Обрабатываем запрос через GraphQL приложение
result = await graphql_app.handle_request(request)
sentry_sdk.init(SENTRY_DSN)
# Применяем middleware для установки cookie
# Используем метод process_result из auth_middleware для корректной обработки
# cookie на основе результатов операций login/logout
return await auth_middleware.process_result(request, result)
except asyncio.CancelledError:
return JSONResponse({"error": "Request cancelled"}, status_code=499)
except Exception as e:
print("[sentry] init error")
print(e)
logger.error(f"GraphQL error: {e!s}")
# Логируем более подробную информацию для отладки
import traceback
logger.debug(f"GraphQL error traceback: {traceback.format_exc()}")
return JSONResponse({"error": str(e)}, status_code=500)
async def dev_start_up():
if exists(DEV_SERVER_PID_FILE_NAME):
await redis.connect()
return
else:
with open(DEV_SERVER_PID_FILE_NAME, "w", encoding="utf-8") as f:
f.write(str(os.getpid()))
async def spa_handler(request: Request) -> Response:
"""
Обработчик для SPA (Single Page Application) fallback.
await start_up()
Возвращает index.html для всех маршрутов, которые не найдены,
чтобы клиентский роутер (SolidJS) мог обработать маршрутинг.
Args:
request: Starlette Request объект
Returns:
FileResponse: ответ с содержимым index.html
"""
index_path = DIST_DIR / "index.html"
if index_path.exists():
return FileResponse(index_path, media_type="text/html")
return JSONResponse({"error": "Admin panel not built"}, status_code=404)
async def shutdown():
async def shutdown() -> None:
"""Остановка сервера и освобождение ресурсов"""
logger.info("Остановка сервера")
# Закрываем соединение с Redis
await redis.disconnect()
# Останавливаем поисковый сервис
await search_service.close()
routes = [
Route("/oauth/{provider}", endpoint=oauth_login),
Route("/oauth-authorize", endpoint=oauth_authorize),
Route("/upload", endpoint=upload_handler, methods=["POST"]),
Route("/subscribe/{user_id}", endpoint=sse_subscribe_handler),
]
# Удаляем PID-файл, если он существует
from settings import DEV_SERVER_PID_FILE_NAME
pid_file = Path(DEV_SERVER_PID_FILE_NAME)
if pid_file.exists():
pid_file.unlink()
async def dev_start() -> None:
"""
Инициализация сервера в DEV режиме.
Функция:
1. Проверяет наличие DEV режима
2. Создает PID-файл для отслеживания процесса
3. Логирует информацию о старте сервера
Используется только при запуске сервера с флагом "dev".
"""
try:
pid_path = Path(DEV_SERVER_PID_FILE_NAME)
# Если PID-файл уже существует, проверяем, не запущен ли уже сервер с этим PID
if pid_path.exists():
try:
with pid_path.open(encoding="utf-8") as f:
old_pid = int(f.read().strip())
# Проверяем, существует ли процесс с таким PID
try:
os.kill(old_pid, 0) # Сигнал 0 только проверяет существование процесса
print(f"[warning] DEV server already running with PID {old_pid}")
except OSError:
print(f"[info] Stale PID file found, previous process {old_pid} not running")
except (ValueError, FileNotFoundError):
print("[warning] Invalid PID file found, recreating")
# Создаем или перезаписываем PID-файл
with pid_path.open("w", encoding="utf-8") as f:
f.write(str(os.getpid()))
print(f"[main] process started in DEV mode with PID {os.getpid()}")
except Exception as e:
logger.error(f"[main] Error during server startup: {e!s}")
# Не прерываем запуск сервера из-за ошибки в этой функции
print(f"[warning] Error during DEV mode initialization: {e!s}")
# Глобальная переменная для background tasks
background_tasks = []
@asynccontextmanager
async def lifespan(app: Starlette):
"""
Функция жизненного цикла приложения.
Обеспечивает:
1. Инициализацию всех необходимых сервисов и компонентов
2. Предзагрузку кеша данных
3. Подключение к Redis и поисковому сервису
4. Корректное завершение работы при остановке сервера
Args:
app: экземпляр Starlette приложения
Yields:
None: генератор для управления жизненным циклом
"""
try:
print("[lifespan] Starting application initialization")
create_all_tables()
await asyncio.gather(
redis.connect(),
precache_data(),
ViewedStorage.init(),
check_search_service(),
revalidation_manager.start(),
)
if DEVMODE:
await dev_start()
print("[lifespan] Basic initialization complete")
# Add a delay before starting the intensive search indexing
print("[lifespan] Waiting for system stabilization before search indexing...")
await asyncio.sleep(1) # 1-second delay to let the system stabilize
# Start search indexing as a background task with lower priority
search_task = asyncio.create_task(initialize_search_index_background())
background_tasks.append(search_task)
# Не ждем завершения задачи, позволяем ей выполняться в фоне
yield
finally:
print("[lifespan] Shutting down application services")
# Отменяем все background tasks
for task in background_tasks:
if not task.done():
task.cancel()
# Ждем завершения отмены tasks
if background_tasks:
await asyncio.gather(*background_tasks, return_exceptions=True)
tasks = [redis.disconnect(), ViewedStorage.stop(), revalidation_manager.stop()]
await asyncio.gather(*tasks, return_exceptions=True)
print("[lifespan] Shutdown complete")
# Обновляем маршрут в Starlette
app = Starlette(
on_startup=[start_up],
on_shutdown=[shutdown],
middleware=middleware,
routes=routes,
)
app.mount("/", GraphQL(schema))
dev_app = Starlette(
routes=[
Route("/graphql", graphql_handler, methods=["GET", "POST", "OPTIONS"]),
# OAuth маршруты
Route("/oauth/{provider}", oauth_login, methods=["GET"]),
Route("/oauth/{provider}/callback", oauth_callback, methods=["GET"]),
# Статические файлы (CSS, JS, изображения)
Mount("/assets", app=StaticFiles(directory=str(DIST_DIR / "assets"))),
# Корневой маршрут для админ-панели
Route("/", spa_handler, methods=["GET"]),
# SPA fallback для всех остальных маршрутов
Route("/{path:path}", spa_handler, methods=["GET"]),
],
middleware=middleware, # Используем единый список middleware
lifespan=lifespan,
debug=True,
on_startup=[dev_start_up],
on_shutdown=[shutdown],
middleware=middleware,
routes=routes,
)
dev_app.mount("/", GraphQL(schema, debug=True))
if DEVMODE:
# Для DEV режима регистрируем дополнительный CORS middleware только для localhost
app.add_middleware(
CORSMiddleware,
allow_origins=[
"https://localhost:3000",
"https://localhost:3001",
"https://localhost:3002",
"http://localhost:3000",
"http://localhost:3001",
"http://localhost:3002",
],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)

View File

@@ -1,18 +0,0 @@
database_name="discoursio"
echo "DATABASE MIGRATION STARTED"
echo "Dropping database $database_name"
dropdb $database_name --force
if [ $? -ne 0 ]; then { echo "Failed to drop database, aborting." ; exit 1; } fi
echo "Database $database_name dropped"
echo "Creating database $database_name"
createdb $database_name
if [ $? -ne 0 ]; then { echo "Failed to create database, aborting." ; exit 1; } fi
echo "Database $database_name successfully created"
echo "Start migration"
python3 server.py migrate
if [ $? -ne 0 ]; then { echo "Migration failed, aborting." ; exit 1; } fi
echo 'Done!'

View File

@@ -1,279 +0,0 @@
""" cmd managed migration """
import asyncio
import gc
import json
import sys
from datetime import datetime, timezone
import bs4
from migration.export import export_mdx
from migration.tables.comments import migrate as migrateComment
from migration.tables.comments import migrate_2stage as migrateComment_2stage
from migration.tables.content_items import get_shout_slug
from migration.tables.content_items import migrate as migrateShout
# from migration.tables.remarks import migrate as migrateRemark
from migration.tables.topics import migrate as migrateTopic
from migration.tables.users import migrate as migrateUser
from migration.tables.users import migrate_2stage as migrateUser_2stage
from migration.tables.users import post_migrate as users_post_migrate
from orm import init_tables
from orm.reaction import Reaction
TODAY = datetime.strftime(datetime.now(tz=timezone.utc), "%Y%m%d")
OLD_DATE = "2016-03-05 22:22:00.350000"
async def users_handle(storage):
"""migrating users first"""
counter = 0
id_map = {}
print("[migration] migrating %d users" % (len(storage["users"]["data"])))
for entry in storage["users"]["data"]:
oid = entry["_id"]
user = migrateUser(entry)
storage["users"]["by_oid"][oid] = user # full
del user["password"]
del user["emailConfirmed"]
del user["username"]
del user["email"]
storage["users"]["by_slug"][user["slug"]] = user # public
id_map[user["oid"]] = user["slug"]
counter += 1
ce = 0
for entry in storage["users"]["data"]:
ce += migrateUser_2stage(entry, id_map)
users_post_migrate()
async def topics_handle(storage):
"""topics from categories and tags"""
counter = 0
for t in storage["topics"]["tags"] + storage["topics"]["cats"]:
if t["slug"] in storage["replacements"]:
t["slug"] = storage["replacements"][t["slug"]]
topic = migrateTopic(t)
storage["topics"]["by_oid"][t["_id"]] = topic
storage["topics"]["by_slug"][t["slug"]] = topic
counter += 1
else:
print("[migration] topic " + t["slug"] + " ignored")
for oldslug, newslug in storage["replacements"].items():
if oldslug != newslug and oldslug in storage["topics"]["by_slug"]:
oid = storage["topics"]["by_slug"][oldslug]["_id"]
del storage["topics"]["by_slug"][oldslug]
storage["topics"]["by_oid"][oid] = storage["topics"]["by_slug"][newslug]
print("[migration] " + str(counter) + " topics migrated")
print("[migration] " + str(len(storage["topics"]["by_oid"].values())) + " topics by oid")
print("[migration] " + str(len(storage["topics"]["by_slug"].values())) + " topics by slug")
async def shouts_handle(storage, args):
"""migrating content items one by one"""
counter = 0
discours_author = 0
anonymous_author = 0
pub_counter = 0
ignored = 0
topics_dataset_bodies = []
topics_dataset_tlist = []
for entry in storage["shouts"]["data"]:
gc.collect()
# slug
slug = get_shout_slug(entry)
# single slug mode
if "-" in args and slug not in args:
continue
# migrate
shout_dict = await migrateShout(entry, storage)
if shout_dict:
storage["shouts"]["by_oid"][entry["_id"]] = shout_dict
storage["shouts"]["by_slug"][shout_dict["slug"]] = shout_dict
# shouts.topics
if not shout_dict["topics"]:
print("[migration] no topics!")
# with author
author = shout_dict["authors"][0]
if author["slug"] == "discours":
discours_author += 1
if author["slug"] == "anonymous":
anonymous_author += 1
# print('[migration] ' + shout['slug'] + ' with author ' + author)
if entry.get("published"):
if "mdx" in args:
export_mdx(shout_dict)
pub_counter += 1
# print main counter
counter += 1
print(
"[migration] shouts_handle %d: %s @%s"
% ((counter + 1), shout_dict["slug"], author["slug"])
)
b = bs4.BeautifulSoup(shout_dict["body"], "html.parser")
texts = [shout_dict["title"].lower().replace(r"[^а-яА-Яa-zA-Z]", "")]
texts = texts + b.findAll(text=True)
topics_dataset_bodies.append(" ".join([x.strip().lower() for x in texts]))
topics_dataset_tlist.append(shout_dict["topics"])
else:
ignored += 1
# np.savetxt('topics_dataset.csv', (topics_dataset_bodies, topics_dataset_tlist), delimiter=',
# ', fmt='%s')
print("[migration] " + str(counter) + " content items were migrated")
print("[migration] " + str(pub_counter) + " have been published")
print("[migration] " + str(discours_author) + " authored by @discours")
print("[migration] " + str(anonymous_author) + " authored by @anonymous")
# async def remarks_handle(storage):
# print("[migration] comments")
# c = 0
# for entry_remark in storage["remarks"]["data"]:
# remark = await migrateRemark(entry_remark, storage)
# c += 1
# print("[migration] " + str(c) + " remarks migrated")
async def comments_handle(storage):
print("[migration] comments")
id_map = {}
ignored_counter = 0
missed_shouts = {}
for oldcomment in storage["reactions"]["data"]:
if not oldcomment.get("deleted"):
reaction = await migrateComment(oldcomment, storage)
if isinstance(reaction, str):
missed_shouts[reaction] = oldcomment
elif isinstance(reaction, Reaction):
reaction = reaction.dict()
rid = reaction["id"]
oid = reaction["oid"]
id_map[oid] = rid
else:
ignored_counter += 1
for reaction in storage["reactions"]["data"]:
migrateComment_2stage(reaction, id_map)
print("[migration] " + str(len(id_map)) + " comments migrated")
print("[migration] " + str(ignored_counter) + " comments ignored")
print("[migration] " + str(len(missed_shouts.keys())) + " commented shouts missed")
missed_counter = 0
for missed in missed_shouts.values():
missed_counter += len(missed)
print("[migration] " + str(missed_counter) + " comments dropped")
async def all_handle(storage, args):
print("[migration] handle everything")
await users_handle(storage)
await topics_handle(storage)
print("[migration] users and topics are migrated")
await shouts_handle(storage, args)
# print("[migration] remarks...")
# await remarks_handle(storage)
print("[migration] migrating comments")
await comments_handle(storage)
# export_email_subscriptions()
print("[migration] done!")
def data_load():
storage = {
"content_items": {
"by_oid": {},
"by_slug": {},
},
"shouts": {"by_oid": {}, "by_slug": {}, "data": []},
"reactions": {"by_oid": {}, "by_slug": {}, "by_content": {}, "data": []},
"topics": {
"by_oid": {},
"by_slug": {},
"cats": [],
"tags": [],
},
"remarks": {"data": []},
"users": {"by_oid": {}, "by_slug": {}, "data": []},
"replacements": json.loads(open("migration/tables/replacements.json").read()),
}
try:
users_data = json.loads(open("migration/data/users.json").read())
print("[migration.load] " + str(len(users_data)) + " users ")
tags_data = json.loads(open("migration/data/tags.json").read())
storage["topics"]["tags"] = tags_data
print("[migration.load] " + str(len(tags_data)) + " tags ")
cats_data = json.loads(open("migration/data/content_item_categories.json").read())
storage["topics"]["cats"] = cats_data
print("[migration.load] " + str(len(cats_data)) + " cats ")
comments_data = json.loads(open("migration/data/comments.json").read())
storage["reactions"]["data"] = comments_data
print("[migration.load] " + str(len(comments_data)) + " comments ")
content_data = json.loads(open("migration/data/content_items.json").read())
storage["shouts"]["data"] = content_data
print("[migration.load] " + str(len(content_data)) + " content items ")
remarks_data = json.loads(open("migration/data/remarks.json").read())
storage["remarks"]["data"] = remarks_data
print("[migration.load] " + str(len(remarks_data)) + " remarks data ")
# fill out storage
for x in users_data:
storage["users"]["by_oid"][x["_id"]] = x
# storage['users']['by_slug'][x['slug']] = x
# no user.slug yet
print("[migration.load] " + str(len(storage["users"]["by_oid"].keys())) + " users by oid")
for x in tags_data:
storage["topics"]["by_oid"][x["_id"]] = x
storage["topics"]["by_slug"][x["slug"]] = x
for x in cats_data:
storage["topics"]["by_oid"][x["_id"]] = x
storage["topics"]["by_slug"][x["slug"]] = x
print(
"[migration.load] " + str(len(storage["topics"]["by_slug"].keys())) + " topics by slug"
)
for item in content_data:
slug = get_shout_slug(item)
storage["content_items"]["by_slug"][slug] = item
storage["content_items"]["by_oid"][item["_id"]] = item
print("[migration.load] " + str(len(content_data)) + " content items")
for x in comments_data:
storage["reactions"]["by_oid"][x["_id"]] = x
cid = x["contentItem"]
storage["reactions"]["by_content"][cid] = x
ci = storage["content_items"]["by_oid"].get(cid, {})
if "slug" in ci:
storage["reactions"]["by_slug"][ci["slug"]] = x
print(
"[migration.load] "
+ str(len(storage["reactions"]["by_content"].keys()))
+ " with comments"
)
storage["users"]["data"] = users_data
storage["topics"]["tags"] = tags_data
storage["topics"]["cats"] = cats_data
storage["shouts"]["data"] = content_data
storage["reactions"]["data"] = comments_data
except Exception as e:
raise e
return storage
async def handling_migration():
init_tables()
await all_handle(data_load(), sys.argv)
def process():
loop = asyncio.get_event_loop()
loop.run_until_complete(handling_migration())
if __name__ == "__main__":
process()

View File

@@ -1,33 +0,0 @@
import gc
import json
import os
import bson
from .utils import DateTimeEncoder
def json_tables():
print("[migration] unpack dump/discours/*.bson to migration/data/*.json")
data = {
"content_items": [],
"content_item_categories": [],
"tags": [],
"email_subscriptions": [],
"users": [],
"comments": [],
"remarks": [],
}
for table in data.keys():
print("[migration] bson2json for " + table)
gc.collect()
lc = []
bs = open("dump/discours/" + table + ".bson", "rb").read()
base = 0
while base < len(bs):
base, d = bson.decode_document(bs, base)
lc.append(d)
data[table] = lc
open(os.getcwd() + "/migration/data/" + table + ".json", "w").write(
json.dumps(lc, cls=DateTimeEncoder)
)

View File

@@ -1,137 +0,0 @@
import json
import os
from datetime import datetime, timezone
import frontmatter
from .extract import extract_html, extract_media
from .utils import DateTimeEncoder
OLD_DATE = "2016-03-05 22:22:00.350000"
EXPORT_DEST = "../discoursio-web/data/"
parentDir = "/".join(os.getcwd().split("/")[:-1])
contentDir = parentDir + "/discoursio-web/content/"
ts = datetime.now(tz=timezone.utc)
def get_metadata(r):
authors = []
for a in r["authors"]:
authors.append(
{ # a short version for public listings
"slug": a.slug or "discours",
"name": a.name or "Дискурс",
"userpic": a.userpic or "https://discours.io/static/img/discours.png",
}
)
metadata = {}
metadata["title"] = r.get("title", "").replace("{", "(").replace("}", ")")
metadata["authors"] = authors
metadata["createdAt"] = r.get("createdAt", ts)
metadata["layout"] = r["layout"]
metadata["topics"] = [topic for topic in r["topics"]]
metadata["topics"].sort()
if r.get("cover", False):
metadata["cover"] = r.get("cover")
return metadata
def export_mdx(r):
# print('[export] mdx %s' % r['slug'])
content = ""
metadata = get_metadata(r)
content = frontmatter.dumps(frontmatter.Post(r["body"], **metadata))
ext = "mdx"
filepath = contentDir + r["slug"]
bc = bytes(content, "utf-8").decode("utf-8", "ignore")
open(filepath + "." + ext, "w").write(bc)
def export_body(shout, storage):
entry = storage["content_items"]["by_oid"][shout["oid"]]
if entry:
body = extract_html(entry)
media = extract_media(entry)
shout["body"] = body # prepare_html_body(entry) # prepare_md_body(entry)
shout["media"] = media
export_mdx(shout)
print("[export] html for %s" % shout["slug"])
open(contentDir + shout["slug"] + ".html", "w").write(body)
else:
raise Exception("no content_items entry found")
def export_slug(slug, storage):
shout = storage["shouts"]["by_slug"][slug]
shout = storage["shouts"]["by_slug"].get(slug)
assert shout, "[export] no shout found by slug: %s " % slug
author = shout["authors"][0]
assert author, "[export] no author error"
export_body(shout, storage)
def export_email_subscriptions():
email_subscriptions_data = json.loads(open("migration/data/email_subscriptions.json").read())
for data in email_subscriptions_data:
# TODO: migrate to mailgun list manually
# migrate_email_subscription(data)
pass
print("[migration] " + str(len(email_subscriptions_data)) + " email subscriptions exported")
def export_shouts(storage):
# update what was just migrated or load json again
if len(storage["users"]["by_slugs"].keys()) == 0:
storage["users"]["by_slugs"] = json.loads(open(EXPORT_DEST + "authors.json").read())
print("[migration] " + str(len(storage["users"]["by_slugs"].keys())) + " exported authors ")
if len(storage["shouts"]["by_slugs"].keys()) == 0:
storage["shouts"]["by_slugs"] = json.loads(open(EXPORT_DEST + "articles.json").read())
print(
"[migration] " + str(len(storage["shouts"]["by_slugs"].keys())) + " exported articles "
)
for slug in storage["shouts"]["by_slugs"].keys():
export_slug(slug, storage)
def export_json(export_articles={}, export_authors={}, export_topics={}, export_comments={}):
open(EXPORT_DEST + "authors.json", "w").write(
json.dumps(
export_authors,
cls=DateTimeEncoder,
indent=4,
sort_keys=True,
ensure_ascii=False,
)
)
print("[migration] " + str(len(export_authors.items())) + " authors exported")
open(EXPORT_DEST + "topics.json", "w").write(
json.dumps(
export_topics,
cls=DateTimeEncoder,
indent=4,
sort_keys=True,
ensure_ascii=False,
)
)
print("[migration] " + str(len(export_topics.keys())) + " topics exported")
open(EXPORT_DEST + "articles.json", "w").write(
json.dumps(
export_articles,
cls=DateTimeEncoder,
indent=4,
sort_keys=True,
ensure_ascii=False,
)
)
print("[migration] " + str(len(export_articles.items())) + " articles exported")
open(EXPORT_DEST + "comments.json", "w").write(
json.dumps(
export_comments,
cls=DateTimeEncoder,
indent=4,
sort_keys=True,
ensure_ascii=False,
)
)
print("[migration] " + str(len(export_comments.items())) + " exported articles with comments")

View File

@@ -1,276 +0,0 @@
import os
import re
from bs4 import BeautifulSoup
TOOLTIP_REGEX = r"(\/\/\/(.+)\/\/\/)"
contentDir = os.path.join(
os.path.dirname(os.path.realpath(__file__)), "..", "..", "discoursio-web", "content"
)
cdn = "https://images.discours.io"
def replace_tooltips(body):
# change if you prefer regexp
newbody = body
matches = list(re.finditer(TOOLTIP_REGEX, body, re.IGNORECASE | re.MULTILINE))[1:]
for match in matches:
newbody = body.replace(
match.group(1), '<Tooltip text="' + match.group(2) + '" />'
) # NOTE: doesn't work
if len(matches) > 0:
print("[extract] found %d tooltips" % len(matches))
return newbody
# def extract_footnotes(body, shout_dict):
# parts = body.split("&&&")
# lll = len(parts)
# newparts = list(parts)
# placed = False
# if lll & 1:
# if lll > 1:
# i = 1
# print("[extract] found %d footnotes in body" % (lll - 1))
# for part in parts[1:]:
# if i & 1:
# placed = True
# if 'a class="footnote-url" href=' in part:
# print("[extract] footnote: " + part)
# fn = 'a class="footnote-url" href="'
# exxtracted_link = part.split(fn, 1)[1].split('"', 1)[0]
# extracted_body = part.split(fn, 1)[1].split(">", 1)[1].split("</a>", 1)[0]
# print("[extract] footnote link: " + extracted_link)
# with local_session() as session:
# Reaction.create(
# {
# "shout": shout_dict["id"],
# "kind": ReactionKind.FOOTNOTE,
# "body": extracted_body,
# "range": str(body.index(fn + link) - len("<"))
# + ":"
# + str(body.index(extracted_body) + len("</a>")),
# }
# )
# newparts[i] = "<a href='#'></a>"
# else:
# newparts[i] = part
# i += 1
# return ("".join(newparts), placed)
# def place_tooltips(body):
# parts = body.split("&&&")
# lll = len(parts)
# newparts = list(parts)
# placed = False
# if lll & 1:
# if lll > 1:
# i = 1
# print("[extract] found %d tooltips" % (lll - 1))
# for part in parts[1:]:
# if i & 1:
# placed = True
# if 'a class="footnote-url" href=' in part:
# print("[extract] footnote: " + part)
# fn = 'a class="footnote-url" href="'
# link = part.split(fn, 1)[1].split('"', 1)[0]
# extracted_part = part.split(fn, 1)[0] + " " + part.split("/", 1)[-1]
# newparts[i] = (
# "<Tooltip"
# + (' link="' + link + '" ' if link else "")
# + ">"
# + extracted_part
# + "</Tooltip>"
# )
# else:
# newparts[i] = "<Tooltip>%s</Tooltip>" % part
# # print('[extract] ' + newparts[i])
# else:
# # print('[extract] ' + part[:10] + '..')
# newparts[i] = part
# i += 1
# return ("".join(newparts), placed)
IMG_REGEX = (
r"\!\[(.*?)\]\((data\:image\/(png|jpeg|jpg);base64\,((?:[A-Za-z\d+\/]{4})*(?:[A-Za-z\d+\/]{3}="
)
IMG_REGEX += r"|[A-Za-z\d+\/]{2}==)))\)"
parentDir = "/".join(os.getcwd().split("/")[:-1])
public = parentDir + "/discoursio-web/public"
cache = {}
# def reextract_images(body, oid):
# # change if you prefer regexp
# matches = list(re.finditer(IMG_REGEX, body, re.IGNORECASE | re.MULTILINE))[1:]
# i = 0
# for match in matches:
# print("[extract] image " + match.group(1))
# ext = match.group(3)
# name = oid + str(i)
# link = public + "/upload/image-" + name + "." + ext
# img = match.group(4)
# title = match.group(1) # NOTE: this is not the title
# if img not in cache:
# content = base64.b64decode(img + "==")
# print(str(len(img)) + " image bytes been written")
# open("../" + link, "wb").write(content)
# cache[img] = name
# i += 1
# else:
# print("[extract] image cached " + cache[img])
# body.replace(
# str(match), "![" + title + "](" + cdn + link + ")"
# ) # WARNING: this does not work
# return body
IMAGES = {
"data:image/png": "png",
"data:image/jpg": "jpg",
"data:image/jpeg": "jpg",
}
b64 = ";base64,"
di = "data:image"
def extract_media(entry):
"""normalized media extraction method"""
# media [ { title pic url body } ]}
kind = entry.get("type")
if not kind:
print(entry)
raise Exception("shout no layout")
media = []
for m in entry.get("media") or []:
# title
title = m.get("title", "").replace("\n", " ").replace("&nbsp;", " ")
artist = m.get("performer") or m.get("artist")
if artist:
title = artist + " - " + title
# pic
url = m.get("fileUrl") or m.get("url", "")
pic = ""
if m.get("thumborId"):
pic = cdn + "/unsafe/" + m["thumborId"]
# url
if not url:
if kind == "Image":
url = pic
elif "youtubeId" in m:
url = "https://youtube.com/?watch=" + m["youtubeId"]
elif "vimeoId" in m:
url = "https://vimeo.com/" + m["vimeoId"]
# body
body = m.get("body") or m.get("literatureBody") or ""
media.append({"url": url, "pic": pic, "title": title, "body": body})
return media
def prepare_html_body(entry):
# body modifications
body = ""
kind = entry.get("type")
addon = ""
if kind == "Video":
addon = ""
for m in entry.get("media") or []:
if "youtubeId" in m:
addon += '<iframe width="420" height="345" src="http://www.youtube.com/embed/'
addon += m["youtubeId"]
addon += '?autoplay=1" frameborder="0" allowfullscreen></iframe>\n'
elif "vimeoId" in m:
addon += '<iframe src="https://player.vimeo.com/video/'
addon += m["vimeoId"]
addon += ' width="420" height="345" frameborder="0" allow="autoplay; fullscreen"'
addon += " allowfullscreen></iframe>"
else:
print("[extract] media is not supported")
print(m)
body += addon
elif kind == "Music":
addon = ""
for m in entry.get("media") or []:
artist = m.get("performer")
trackname = ""
if artist:
trackname += artist + " - "
if "title" in m:
trackname += m.get("title", "")
addon += "<figure><figcaption>"
addon += trackname
addon += '</figcaption><audio controls src="'
addon += m.get("fileUrl", "")
addon += '"></audio></figure>'
body += addon
body = extract_html(entry)
# if body_orig: body += extract_md(html2text(body_orig), entry['_id'])
return body
def cleanup_html(body: str) -> str:
new_body = body
regex_remove = [
r"style=\"width:\s*\d+px;height:\s*\d+px;\"",
r"style=\"width:\s*\d+px;\"",
r"style=\"color: #000000;\"",
r"style=\"float: none;\"",
r"style=\"background: white;\"",
r"class=\"Apple-interchange-newline\"",
r"class=\"MsoNormalCxSpMiddle\"",
r"class=\"MsoNormal\"",
r"lang=\"EN-US\"",
r"id=\"docs-internal-guid-[\w-]+\"",
r"<p>\s*</p>",
r"<span></span>",
r"<i>\s*</i>",
r"<b>\s*</b>",
r"<h1>\s*</h1>",
r"<h2>\s*</h2>",
r"<h3>\s*</h3>",
r"<h4>\s*</h4>",
r"<div>\s*</div>",
]
regex_replace = {r"<br>\s*</p>": "</p>"}
changed = True
while changed:
# we need several iterations to clean nested tags this way
changed = False
new_body_iteration = new_body
for regex in regex_remove:
new_body = re.sub(regex, "", new_body)
for regex, replace in regex_replace.items():
new_body = re.sub(regex, replace, new_body)
if new_body_iteration != new_body:
changed = True
return new_body
def extract_html(entry, shout_id=None, cleanup=False):
body_orig = (entry.get("body") or "").replace(r"\(", "(").replace(r"\)", ")")
if cleanup:
# we do that before bs parsing to catch the invalid html
body_clean = cleanup_html(body_orig)
if body_clean != body_orig:
print(f"[migration] html cleaned for slug {entry.get('slug', None)}")
body_orig = body_clean
# if shout_id:
# extract_footnotes(body_orig, shout_id)
body_html = str(BeautifulSoup(body_orig, features="html.parser"))
if cleanup:
# we do that after bs parsing because it can add dummy tags
body_clean_html = cleanup_html(body_html)
if body_clean_html != body_html:
print(f"[migration] html cleaned after bs4 for slug {entry.get('slug', None)}")
body_html = body_clean_html
return body_html

File diff suppressed because it is too large Load Diff

View File

@@ -1,3 +0,0 @@
from .cli import main
main()

View File

@@ -1,318 +0,0 @@
import argparse
import sys
from . import HTML2Text, __version__, config
# noinspection DuplicatedCode
def main() -> None:
baseurl = ""
class bcolors:
HEADER = "\033[95m"
OKBLUE = "\033[94m"
OKGREEN = "\033[92m"
WARNING = "\033[93m"
FAIL = "\033[91m"
ENDC = "\033[0m"
BOLD = "\033[1m"
UNDERLINE = "\033[4m"
p = argparse.ArgumentParser()
p.add_argument(
"--default-image-alt",
dest="default_image_alt",
default=config.DEFAULT_IMAGE_ALT,
help="The default alt string for images with missing ones",
)
p.add_argument(
"--pad-tables",
dest="pad_tables",
action="store_true",
default=config.PAD_TABLES,
help="pad the cells to equal column width in tables",
)
p.add_argument(
"--no-wrap-links",
dest="wrap_links",
action="store_false",
default=config.WRAP_LINKS,
help="don't wrap links during conversion",
)
p.add_argument(
"--wrap-list-items",
dest="wrap_list_items",
action="store_true",
default=config.WRAP_LIST_ITEMS,
help="wrap list items during conversion",
)
p.add_argument(
"--wrap-tables",
dest="wrap_tables",
action="store_true",
default=config.WRAP_TABLES,
help="wrap tables",
)
p.add_argument(
"--ignore-emphasis",
dest="ignore_emphasis",
action="store_true",
default=config.IGNORE_EMPHASIS,
help="don't include any formatting for emphasis",
)
p.add_argument(
"--reference-links",
dest="inline_links",
action="store_false",
default=config.INLINE_LINKS,
help="use reference style links instead of inline links",
)
p.add_argument(
"--ignore-links",
dest="ignore_links",
action="store_true",
default=config.IGNORE_ANCHORS,
help="don't include any formatting for links",
)
p.add_argument(
"--ignore-mailto-links",
action="store_true",
dest="ignore_mailto_links",
default=config.IGNORE_MAILTO_LINKS,
help="don't include mailto: links",
)
p.add_argument(
"--protect-links",
dest="protect_links",
action="store_true",
default=config.PROTECT_LINKS,
help="protect links from line breaks surrounding them with angle brackets",
)
p.add_argument(
"--ignore-images",
dest="ignore_images",
action="store_true",
default=config.IGNORE_IMAGES,
help="don't include any formatting for images",
)
p.add_argument(
"--images-as-html",
dest="images_as_html",
action="store_true",
default=config.IMAGES_AS_HTML,
help=(
"Always write image tags as raw html; preserves `height`, `width` and "
"`alt` if possible."
),
)
p.add_argument(
"--images-to-alt",
dest="images_to_alt",
action="store_true",
default=config.IMAGES_TO_ALT,
help="Discard image data, only keep alt text",
)
p.add_argument(
"--images-with-size",
dest="images_with_size",
action="store_true",
default=config.IMAGES_WITH_SIZE,
help=("Write image tags with height and width attrs as raw html to retain " "dimensions"),
)
p.add_argument(
"-g",
"--google-doc",
action="store_true",
dest="google_doc",
default=False,
help="convert an html-exported Google Document",
)
p.add_argument(
"-d",
"--dash-unordered-list",
action="store_true",
dest="ul_style_dash",
default=False,
help="use a dash rather than a star for unordered list items",
)
p.add_argument(
"-e",
"--asterisk-emphasis",
action="store_true",
dest="em_style_asterisk",
default=False,
help="use an asterisk rather than an underscore for emphasized text",
)
p.add_argument(
"-b",
"--body-width",
dest="body_width",
type=int,
default=config.BODY_WIDTH,
help="number of characters per output line, 0 for no wrap",
)
p.add_argument(
"-i",
"--google-list-indent",
dest="list_indent",
type=int,
default=config.GOOGLE_LIST_INDENT,
help="number of pixels Google indents nested lists",
)
p.add_argument(
"-s",
"--hide-strikethrough",
action="store_true",
dest="hide_strikethrough",
default=False,
help="hide strike-through text. only relevant when -g is " "specified as well",
)
p.add_argument(
"--escape-all",
action="store_true",
dest="escape_snob",
default=False,
help=(
"Escape all special characters. Output is less readable, but avoids "
"corner case formatting issues."
),
)
p.add_argument(
"--bypass-tables",
action="store_true",
dest="bypass_tables",
default=config.BYPASS_TABLES,
help="Format tables in HTML rather than Markdown syntax.",
)
p.add_argument(
"--ignore-tables",
action="store_true",
dest="ignore_tables",
default=config.IGNORE_TABLES,
help="Ignore table-related tags (table, th, td, tr) " "while keeping rows.",
)
p.add_argument(
"--single-line-break",
action="store_true",
dest="single_line_break",
default=config.SINGLE_LINE_BREAK,
help=(
"Use a single line break after a block element rather than two line "
"breaks. NOTE: Requires --body-width=0"
),
)
p.add_argument(
"--unicode-snob",
action="store_true",
dest="unicode_snob",
default=config.UNICODE_SNOB,
help="Use unicode throughout document",
)
p.add_argument(
"--no-automatic-links",
action="store_false",
dest="use_automatic_links",
default=config.USE_AUTOMATIC_LINKS,
help="Do not use automatic links wherever applicable",
)
p.add_argument(
"--no-skip-internal-links",
action="store_false",
dest="skip_internal_links",
default=config.SKIP_INTERNAL_LINKS,
help="Do not skip internal links",
)
p.add_argument(
"--links-after-para",
action="store_true",
dest="links_each_paragraph",
default=config.LINKS_EACH_PARAGRAPH,
help="Put links after each paragraph instead of document",
)
p.add_argument(
"--mark-code",
action="store_true",
dest="mark_code",
default=config.MARK_CODE,
help="Mark program code blocks with [code]...[/code]",
)
p.add_argument(
"--decode-errors",
dest="decode_errors",
default=config.DECODE_ERRORS,
help=(
"What to do in case of decode errors.'ignore', 'strict' and 'replace' are "
"acceptable values"
),
)
p.add_argument(
"--open-quote",
dest="open_quote",
default=config.OPEN_QUOTE,
help="The character used to open quotes",
)
p.add_argument(
"--close-quote",
dest="close_quote",
default=config.CLOSE_QUOTE,
help="The character used to close quotes",
)
p.add_argument("--version", action="version", version=".".join(map(str, __version__)))
p.add_argument("filename", nargs="?")
p.add_argument("encoding", nargs="?", default="utf-8")
args = p.parse_args()
if args.filename and args.filename != "-":
with open(args.filename, "rb") as fp:
data = fp.read()
else:
data = sys.stdin.buffer.read()
try:
html = data.decode(args.encoding, args.decode_errors)
except UnicodeDecodeError as err:
warning = bcolors.WARNING + "Warning:" + bcolors.ENDC
warning += " Use the " + bcolors.OKGREEN
warning += "--decode-errors=ignore" + bcolors.ENDC + " flag."
print(warning)
raise err
h = HTML2Text(baseurl=baseurl)
# handle options
if args.ul_style_dash:
h.ul_item_mark = "-"
if args.em_style_asterisk:
h.emphasis_mark = "*"
h.strong_mark = "__"
h.body_width = args.body_width
h.google_list_indent = args.list_indent
h.ignore_emphasis = args.ignore_emphasis
h.ignore_links = args.ignore_links
h.ignore_mailto_links = args.ignore_mailto_links
h.protect_links = args.protect_links
h.ignore_images = args.ignore_images
h.images_as_html = args.images_as_html
h.images_to_alt = args.images_to_alt
h.images_with_size = args.images_with_size
h.google_doc = args.google_doc
h.hide_strikethrough = args.hide_strikethrough
h.escape_snob = args.escape_snob
h.bypass_tables = args.bypass_tables
h.ignore_tables = args.ignore_tables
h.single_line_break = args.single_line_break
h.inline_links = args.inline_links
h.unicode_snob = args.unicode_snob
h.use_automatic_links = args.use_automatic_links
h.skip_internal_links = args.skip_internal_links
h.links_each_paragraph = args.links_each_paragraph
h.mark_code = args.mark_code
h.wrap_links = args.wrap_links
h.wrap_list_items = args.wrap_list_items
h.wrap_tables = args.wrap_tables
h.pad_tables = args.pad_tables
h.default_image_alt = args.default_image_alt
h.open_quote = args.open_quote
h.close_quote = args.close_quote
sys.stdout.write(h.handle(html))

View File

@@ -1,164 +0,0 @@
import re
# Use Unicode characters instead of their ascii pseudo-replacements
UNICODE_SNOB = True
# Marker to use for marking tables for padding post processing
TABLE_MARKER_FOR_PAD = "special_marker_for_table_padding"
# Escape all special characters. Output is less readable, but avoids
# corner case formatting issues.
ESCAPE_SNOB = True
# Put the links after each paragraph instead of at the end.
LINKS_EACH_PARAGRAPH = False
# Wrap long lines at position. 0 for no wrapping.
BODY_WIDTH = 0
# Don't show internal links (href="#local-anchor") -- corresponding link
# targets won't be visible in the plain text file anyway.
SKIP_INTERNAL_LINKS = False
# Use inline, rather than reference, formatting for images and links
INLINE_LINKS = True
# Protect links from line breaks surrounding them with angle brackets (in
# addition to their square brackets)
PROTECT_LINKS = True
WRAP_LINKS = True
# Wrap list items.
WRAP_LIST_ITEMS = False
# Wrap tables
WRAP_TABLES = False
# Number of pixels Google indents nested lists
GOOGLE_LIST_INDENT = 36
# Values Google and others may use to indicate bold text
BOLD_TEXT_STYLE_VALUES = ("bold", "700", "800", "900")
IGNORE_ANCHORS = False
IGNORE_MAILTO_LINKS = False
IGNORE_IMAGES = False
IMAGES_AS_HTML = False
IMAGES_TO_ALT = False
IMAGES_WITH_SIZE = False
IGNORE_EMPHASIS = False
MARK_CODE = True
DECODE_ERRORS = "strict"
DEFAULT_IMAGE_ALT = ""
PAD_TABLES = True
# Convert links with same href and text to <href> format
# if they are absolute links
USE_AUTOMATIC_LINKS = True
# For checking space-only lines on line 771
RE_SPACE = re.compile(r"\s\+")
RE_ORDERED_LIST_MATCHER = re.compile(r"\d+\.\s")
RE_UNORDERED_LIST_MATCHER = re.compile(r"[-\*\+]\s")
RE_MD_CHARS_MATCHER = re.compile(r"([\\\[\]\(\)])")
RE_MD_CHARS_MATCHER_ALL = re.compile(r"([`\*_{}\[\]\(\)#!])")
# to find links in the text
RE_LINK = re.compile(r"(\[.*?\] ?\(.*?\))|(\[.*?\]:.*?)")
# to find table separators
RE_TABLE = re.compile(r" \| ")
RE_MD_DOT_MATCHER = re.compile(
r"""
^ # start of line
(\s*\d+) # optional whitespace and a number
(\.) # dot
(?=\s) # lookahead assert whitespace
""",
re.MULTILINE | re.VERBOSE,
)
RE_MD_PLUS_MATCHER = re.compile(
r"""
^
(\s*)
(\+)
(?=\s)
""",
flags=re.MULTILINE | re.VERBOSE,
)
RE_MD_DASH_MATCHER = re.compile(
r"""
^
(\s*)
(-)
(?=\s|\-) # followed by whitespace (bullet list, or spaced out hr)
# or another dash (header or hr)
""",
flags=re.MULTILINE | re.VERBOSE,
)
RE_SLASH_CHARS = r"\`*_{}[]()#+-.!"
RE_MD_BACKSLASH_MATCHER = re.compile(
r"""
(\\) # match one slash
(?=[%s]) # followed by a char that requires escaping
"""
% re.escape(RE_SLASH_CHARS),
flags=re.VERBOSE,
)
UNIFIABLE = {
"rsquo": "'",
"lsquo": "'",
"rdquo": '"',
"ldquo": '"',
"copy": "(C)",
"mdash": "--",
"nbsp": " ",
"rarr": "->",
"larr": "<-",
"middot": "*",
"ndash": "-",
"oelig": "oe",
"aelig": "ae",
"agrave": "a",
"aacute": "a",
"acirc": "a",
"atilde": "a",
"auml": "a",
"aring": "a",
"egrave": "e",
"eacute": "e",
"ecirc": "e",
"euml": "e",
"igrave": "i",
"iacute": "i",
"icirc": "i",
"iuml": "i",
"ograve": "o",
"oacute": "o",
"ocirc": "o",
"otilde": "o",
"ouml": "o",
"ugrave": "u",
"uacute": "u",
"ucirc": "u",
"uuml": "u",
"lrm": "",
"rlm": "",
}
# Format tables in HTML rather than Markdown syntax
BYPASS_TABLES = False
# Ignore table-related tags (table, th, td, tr) while keeping rows
IGNORE_TABLES = False
# Use a single line break after a block element rather than two line breaks.
# NOTE: Requires body width setting to be 0.
SINGLE_LINE_BREAK = False
# Use double quotation marks when converting the <q> tag.
OPEN_QUOTE = '"'
CLOSE_QUOTE = '"'

View File

@@ -1,18 +0,0 @@
from typing import Dict, Optional
class AnchorElement:
__slots__ = ["attrs", "count", "outcount"]
def __init__(self, attrs: Dict[str, Optional[str]], count: int, outcount: int):
self.attrs = attrs
self.count = count
self.outcount = outcount
class ListElement:
__slots__ = ["name", "num"]
def __init__(self, name: str, num: int):
self.name = name
self.num = num

View File

@@ -1,3 +0,0 @@
class OutCallback:
def __call__(self, s: str) -> None:
...

View File

@@ -1,282 +0,0 @@
import html.entities
from typing import Dict, List, Optional
from . import config
unifiable_n = {
html.entities.name2codepoint[k]: v for k, v in config.UNIFIABLE.items() if k != "nbsp"
}
def hn(tag: str) -> int:
if tag[0] == "h" and len(tag) == 2:
n = tag[1]
if "0" < n <= "9":
return int(n)
return 0
def dumb_property_dict(style: str) -> Dict[str, str]:
"""
:returns: A hash of css attributes
"""
return {
x.strip().lower(): y.strip().lower()
for x, y in [z.split(":", 1) for z in style.split(";") if ":" in z]
}
def dumb_css_parser(data: str) -> Dict[str, Dict[str, str]]:
"""
:type data: str
:returns: A hash of css selectors, each of which contains a hash of
css attributes.
:rtype: dict
"""
# remove @import sentences
data += ";"
importIndex = data.find("@import")
while importIndex != -1:
data = data[0:importIndex] + data[data.find(";", importIndex) + 1 :]
importIndex = data.find("@import")
# parse the css. reverted from dictionary comprehension in order to
# support older pythons
pairs = [x.split("{") for x in data.split("}") if "{" in x.strip()]
try:
elements = {a.strip(): dumb_property_dict(b) for a, b in pairs}
except ValueError:
elements = {} # not that important
return elements
def element_style(
attrs: Dict[str, Optional[str]],
style_def: Dict[str, Dict[str, str]],
parent_style: Dict[str, str],
) -> Dict[str, str]:
"""
:type attrs: dict
:type style_def: dict
:type style_def: dict
:returns: A hash of the 'final' style attributes of the element
:rtype: dict
"""
style = parent_style.copy()
attrs_class = attrs.get("class")
if attrs_class:
for css_class in attrs_class.split():
css_style = style_def.get("." + css_class, {})
style.update(css_style)
attrs_style = attrs.get("style")
if attrs_style:
immediate_style = dumb_property_dict(attrs_style)
style.update(immediate_style)
return style
def google_list_style(style: Dict[str, str]) -> str:
"""
Finds out whether this is an ordered or unordered list
:type style: dict
:rtype: str
"""
if "list-style-type" in style:
list_style = style["list-style-type"]
if list_style in ["disc", "circle", "square", "none"]:
return "ul"
return "ol"
def google_has_height(style: Dict[str, str]) -> bool:
"""
Check if the style of the element has the 'height' attribute
explicitly defined
:type style: dict
:rtype: bool
"""
return "height" in style
def google_text_emphasis(style: Dict[str, str]) -> List[str]:
"""
:type style: dict
:returns: A list of all emphasis modifiers of the element
:rtype: list
"""
emphasis = []
if "text-decoration" in style:
emphasis.append(style["text-decoration"])
if "font-style" in style:
emphasis.append(style["font-style"])
if "font-weight" in style:
emphasis.append(style["font-weight"])
return emphasis
def google_fixed_width_font(style: Dict[str, str]) -> bool:
"""
Check if the css of the current element defines a fixed width font
:type style: dict
:rtype: bool
"""
font_family = ""
if "font-family" in style:
font_family = style["font-family"]
return "courier new" == font_family or "consolas" == font_family
def list_numbering_start(attrs: Dict[str, Optional[str]]) -> int:
"""
Extract numbering from list element attributes
:type attrs: dict
:rtype: int or None
"""
attrs_start = attrs.get("start")
if attrs_start:
try:
return int(attrs_start) - 1
except ValueError:
pass
return 0
def skipwrap(para: str, wrap_links: bool, wrap_list_items: bool, wrap_tables: bool) -> bool:
# If it appears to contain a link
# don't wrap
if not wrap_links and config.RE_LINK.search(para):
return True
# If the text begins with four spaces or one tab, it's a code block;
# don't wrap
if para[0:4] == " " or para[0] == "\t":
return True
# If the text begins with only two "--", possibly preceded by
# whitespace, that's an emdash; so wrap.
stripped = para.lstrip()
if stripped[0:2] == "--" and len(stripped) > 2 and stripped[2] != "-":
return False
# I'm not sure what this is for; I thought it was to detect lists,
# but there's a <br>-inside-<span> case in one of the tests that
# also depends upon it.
if stripped[0:1] in ("-", "*") and not stripped[0:2] == "**":
return not wrap_list_items
# If text contains a pipe character it is likely a table
if not wrap_tables and config.RE_TABLE.search(para):
return True
# If the text begins with a single -, *, or +, followed by a space,
# or an integer, followed by a ., followed by a space (in either
# case optionally proceeded by whitespace), it's a list; don't wrap.
return bool(
config.RE_ORDERED_LIST_MATCHER.match(stripped)
or config.RE_UNORDERED_LIST_MATCHER.match(stripped)
)
def escape_md(text: str) -> str:
"""
Escapes markdown-sensitive characters within other markdown
constructs.
"""
return config.RE_MD_CHARS_MATCHER.sub(r"\\\1", text)
def escape_md_section(text: str, snob: bool = False) -> str:
"""
Escapes markdown-sensitive characters across whole document sections.
"""
text = config.RE_MD_BACKSLASH_MATCHER.sub(r"\\\1", text)
if snob:
text = config.RE_MD_CHARS_MATCHER_ALL.sub(r"\\\1", text)
text = config.RE_MD_DOT_MATCHER.sub(r"\1\\\2", text)
text = config.RE_MD_PLUS_MATCHER.sub(r"\1\\\2", text)
text = config.RE_MD_DASH_MATCHER.sub(r"\1\\\2", text)
return text
def reformat_table(lines: List[str], right_margin: int) -> List[str]:
"""
Given the lines of a table
padds the cells and returns the new lines
"""
# find the maximum width of the columns
max_width = [len(x.rstrip()) + right_margin for x in lines[0].split("|")]
max_cols = len(max_width)
for line in lines:
cols = [x.rstrip() for x in line.split("|")]
num_cols = len(cols)
# don't drop any data if colspan attributes result in unequal lengths
if num_cols < max_cols:
cols += [""] * (max_cols - num_cols)
elif max_cols < num_cols:
max_width += [len(x) + right_margin for x in cols[-(num_cols - max_cols) :]]
max_cols = num_cols
max_width = [max(len(x) + right_margin, old_len) for x, old_len in zip(cols, max_width)]
# reformat
new_lines = []
for line in lines:
cols = [x.rstrip() for x in line.split("|")]
if set(line.strip()) == set("-|"):
filler = "-"
new_cols = [
x.rstrip() + (filler * (M - len(x.rstrip()))) for x, M in zip(cols, max_width)
]
new_lines.append("|-" + "|".join(new_cols) + "|")
else:
filler = " "
new_cols = [
x.rstrip() + (filler * (M - len(x.rstrip()))) for x, M in zip(cols, max_width)
]
new_lines.append("| " + "|".join(new_cols) + "|")
return new_lines
def pad_tables_in_text(text: str, right_margin: int = 1) -> str:
"""
Provide padding for tables in the text
"""
lines = text.split("\n")
table_buffer = [] # type: List[str]
table_started = False
new_lines = []
for line in lines:
# Toggle table started
if config.TABLE_MARKER_FOR_PAD in line:
table_started = not table_started
if not table_started:
table = reformat_table(table_buffer, right_margin)
new_lines.extend(table)
table_buffer = []
new_lines.append("")
continue
# Process lines
if table_started:
table_buffer.append(line)
else:
new_lines.append(line)
return "\n".join(new_lines)

View File

@@ -1,196 +0,0 @@
from datetime import datetime, timezone
from dateutil.parser import parse as date_parse
from base.orm import local_session
from migration.html2text import html2text
from orm.reaction import Reaction, ReactionKind
from orm.shout import Shout, ShoutReactionsFollower
from orm.topic import TopicFollower
from orm.user import User
ts = datetime.now(tz=timezone.utc)
def auto_followers(session, topics, reaction_dict):
# creating shout's reactions following for reaction author
following1 = (
session.query(ShoutReactionsFollower)
.where(ShoutReactionsFollower.follower == reaction_dict["createdBy"])
.filter(ShoutReactionsFollower.shout == reaction_dict["shout"])
.first()
)
if not following1:
following1 = ShoutReactionsFollower.create(
follower=reaction_dict["createdBy"], shout=reaction_dict["shout"], auto=True
)
session.add(following1)
# creating topics followings for reaction author
for t in topics:
tf = (
session.query(TopicFollower)
.where(TopicFollower.follower == reaction_dict["createdBy"])
.filter(TopicFollower.topic == t["id"])
.first()
)
if not tf:
topic_following = TopicFollower.create(
follower=reaction_dict["createdBy"], topic=t["id"], auto=True
)
session.add(topic_following)
def migrate_ratings(session, entry, reaction_dict):
for comment_rating_old in entry.get("ratings", []):
rater = session.query(User).filter(User.oid == comment_rating_old["createdBy"]).first()
re_reaction_dict = {
"shout": reaction_dict["shout"],
"replyTo": reaction_dict["id"],
"kind": ReactionKind.LIKE if comment_rating_old["value"] > 0 else ReactionKind.DISLIKE,
"createdBy": rater.id if rater else 1,
}
cts = comment_rating_old.get("createdAt")
if cts:
re_reaction_dict["createdAt"] = date_parse(cts)
try:
# creating reaction from old rating
rr = Reaction.create(**re_reaction_dict)
following2 = (
session.query(ShoutReactionsFollower)
.where(ShoutReactionsFollower.follower == re_reaction_dict["createdBy"])
.filter(ShoutReactionsFollower.shout == rr.shout)
.first()
)
if not following2:
following2 = ShoutReactionsFollower.create(
follower=re_reaction_dict["createdBy"], shout=rr.shout, auto=True
)
session.add(following2)
session.add(rr)
except Exception as e:
print("[migration] comment rating error: %r" % re_reaction_dict)
raise e
session.commit()
async def migrate(entry, storage):
"""
{
"_id": "hdtwS8fSyFLxXCgSC",
"body": "<p>",
"contentItem": "mnK8KsJHPRi8DrybQ",
"createdBy": "bMFPuyNg6qAD2mhXe",
"thread": "01/",
"createdAt": "2016-04-19 04:33:53+00:00",
"ratings": [
{ "createdBy": "AqmRukvRiExNpAe8C", "value": 1 },
{ "createdBy": "YdE76Wth3yqymKEu5", "value": 1 }
],
"rating": 2,
"updatedAt": "2020-05-27 19:22:57.091000+00:00",
"updatedBy": "0"
}
->
type Reaction {
id: Int!
shout: Shout!
createdAt: DateTime!
createdBy: User!
updatedAt: DateTime
deletedAt: DateTime
deletedBy: User
range: String # full / 0:2340
kind: ReactionKind!
body: String
replyTo: Reaction
stat: Stat
old_id: String
old_thread: String
}
"""
old_ts = entry.get("createdAt")
reaction_dict = {
"createdAt": (ts if not old_ts else date_parse(old_ts)),
"body": html2text(entry.get("body", "")),
"oid": entry["_id"],
}
shout_oid = entry.get("contentItem")
if shout_oid not in storage["shouts"]["by_oid"]:
if len(storage["shouts"]["by_oid"]) > 0:
return shout_oid
else:
print("[migration] no shouts migrated yet")
raise Exception
return
else:
stage = "started"
reaction = None
with local_session() as session:
author = session.query(User).filter(User.oid == entry["createdBy"]).first()
old_shout = storage["shouts"]["by_oid"].get(shout_oid)
if not old_shout:
raise Exception("no old shout in storage")
else:
stage = "author and old id found"
try:
shout = session.query(Shout).where(Shout.slug == old_shout["slug"]).one()
if shout:
reaction_dict["shout"] = shout.id
reaction_dict["createdBy"] = author.id if author else 1
reaction_dict["kind"] = ReactionKind.COMMENT
# creating reaction from old comment
reaction = Reaction.create(**reaction_dict)
session.add(reaction)
# session.commit()
stage = "new reaction commited"
reaction_dict = reaction.dict()
topics = [t.dict() for t in shout.topics]
auto_followers(session, topics, reaction_dict)
migrate_ratings(session, entry, reaction_dict)
return reaction
except Exception as e:
print(e)
print(reaction)
raise Exception(stage)
return
def migrate_2stage(old_comment, idmap):
if old_comment.get("body"):
new_id = idmap.get(old_comment.get("oid"))
new_id = idmap.get(old_comment.get("_id"))
if new_id:
new_replyto_id = None
old_replyto_id = old_comment.get("replyTo")
if old_replyto_id:
new_replyto_id = int(idmap.get(old_replyto_id, "0"))
with local_session() as session:
comment = session.query(Reaction).where(Reaction.id == new_id).first()
try:
if new_replyto_id:
new_reply = (
session.query(Reaction).where(Reaction.id == new_replyto_id).first()
)
if not new_reply:
print(new_replyto_id)
raise Exception("cannot find reply by id!")
comment.replyTo = new_reply.id
session.add(comment)
srf = (
session.query(ShoutReactionsFollower)
.where(ShoutReactionsFollower.shout == comment.shout)
.filter(ShoutReactionsFollower.follower == comment.createdBy)
.first()
)
if not srf:
srf = ShoutReactionsFollower.create(
shout=comment.shout, follower=comment.createdBy, auto=True
)
session.add(srf)
session.commit()
except Exception:
raise Exception("cannot find a comment by oldid")

View File

@@ -1,399 +0,0 @@
import json
import re
from datetime import datetime, timezone
from dateutil.parser import parse as date_parse
from sqlalchemy.exc import IntegrityError
from transliterate import translit
from base.orm import local_session
from migration.extract import extract_html, extract_media
from orm.reaction import Reaction, ReactionKind
from orm.shout import Shout, ShoutReactionsFollower, ShoutTopic
from orm.topic import Topic, TopicFollower
from orm.user import User
from services.stat.viewed import ViewedStorage
OLD_DATE = "2016-03-05 22:22:00.350000"
ts = datetime.now(tz=timezone.utc)
type2layout = {
"Article": "article",
"Literature": "literature",
"Music": "music",
"Video": "video",
"Image": "image",
}
anondict = {"slug": "anonymous", "id": 1, "name": "Аноним"}
discours = {"slug": "discours", "id": 2, "name": "Дискурс"}
def get_shout_slug(entry):
slug = entry.get("slug", "")
if not slug:
for friend in entry.get("friendlySlugs", []):
slug = friend.get("slug", "")
if slug:
break
slug = re.sub("[^0-9a-zA-Z]+", "-", slug)
return slug
def create_author_from_app(app):
user = None
userdata = None
# check if email is used
if app["email"]:
with local_session() as session:
user = session.query(User).where(User.email == app["email"]).first()
if not user:
# print('[migration] app %r' % app)
name = app.get("name")
if name:
slug = translit(name, "ru", reversed=True).lower()
slug = re.sub("[^0-9a-zA-Z]+", "-", slug)
print("[migration] created slug %s" % slug)
# check if slug is used
if slug:
user = session.query(User).where(User.slug == slug).first()
# get slug from email
if user:
slug = app["email"].split("@")[0]
user = session.query(User).where(User.slug == slug).first()
# one more try
if user:
slug += "-author"
user = session.query(User).where(User.slug == slug).first()
# create user with application data
if not user:
userdata = {
"username": app["email"],
"email": app["email"],
"name": app.get("name", ""),
"emailConfirmed": False,
"slug": slug,
"createdAt": ts,
"lastSeen": ts,
}
# print('[migration] userdata %r' % userdata)
user = User.create(**userdata)
session.add(user)
session.commit()
userdata["id"] = user.id
userdata = user.dict()
return userdata
else:
raise Exception("app is not ok", app)
async def create_shout(shout_dict):
s = Shout.create(**shout_dict)
author = s.authors[0]
with local_session() as session:
srf = (
session.query(ShoutReactionsFollower)
.where(ShoutReactionsFollower.shout == s.id)
.filter(ShoutReactionsFollower.follower == author.id)
.first()
)
if not srf:
srf = ShoutReactionsFollower.create(shout=s.id, follower=author.id, auto=True)
session.add(srf)
session.commit()
return s
async def get_user(entry, storage):
app = entry.get("application")
userdata = None
user_oid = None
if app:
userdata = create_author_from_app(app)
else:
user_oid = entry.get("createdBy")
if user_oid == "0":
userdata = discours
elif user_oid:
userdata = storage["users"]["by_oid"].get(user_oid)
if not userdata:
print("no userdata by oid, anonymous")
userdata = anondict
print(app)
# cleanup slug
if userdata:
slug = userdata.get("slug", "")
if slug:
slug = re.sub("[^0-9a-zA-Z]+", "-", slug)
userdata["slug"] = slug
else:
userdata = anondict
user = await process_user(userdata, storage, user_oid)
return user, user_oid
async def migrate(entry, storage):
author, user_oid = await get_user(entry, storage)
r = {
"layout": type2layout[entry["type"]],
"title": entry["title"],
"authors": [
author,
],
"slug": get_shout_slug(entry),
"cover": (
"https://images.discours.io/unsafe/" + entry["thumborId"]
if entry.get("thumborId")
else entry.get("image", {}).get("url")
),
"visibility": "public" if entry.get("published") else "community",
"publishedAt": date_parse(entry.get("publishedAt")) if entry.get("published") else None,
"deletedAt": date_parse(entry.get("deletedAt")) if entry.get("deletedAt") else None,
"createdAt": date_parse(entry.get("createdAt", OLD_DATE)),
"updatedAt": date_parse(entry["updatedAt"]) if "updatedAt" in entry else ts,
"createdBy": author.id,
"topics": await add_topics_follower(entry, storage, author),
"body": extract_html(entry, cleanup=True),
}
# main topic patch
r["mainTopic"] = r["topics"][0]
# published author auto-confirm
if entry.get("published"):
with local_session() as session:
# update user.emailConfirmed if published
author.emailConfirmed = True
session.add(author)
session.commit()
# media
media = extract_media(entry)
r["media"] = json.dumps(media, ensure_ascii=True) if media else None
# ----------------------------------- copy
shout_dict = r.copy()
del shout_dict["topics"]
try:
# save shout to db
shout_dict["oid"] = entry.get("_id", "")
shout = await create_shout(shout_dict)
except IntegrityError as e:
print("[migration] create_shout integrity error", e)
shout = await resolve_create_shout(shout_dict)
except Exception as e:
raise Exception(e)
# udpate data
shout_dict = shout.dict()
shout_dict["authors"] = [
author.dict(),
]
# shout topics aftermath
shout_dict["topics"] = await topics_aftermath(r, storage)
# content_item ratings to reactions
await content_ratings_to_reactions(entry, shout_dict["slug"])
# shout views
await ViewedStorage.increment(
shout_dict["slug"], amount=entry.get("views", 1), viewer="old-discours"
)
# del shout_dict['ratings']
storage["shouts"]["by_oid"][entry["_id"]] = shout_dict
storage["shouts"]["by_slug"][shout_dict["slug"]] = shout_dict
return shout_dict
async def add_topics_follower(entry, storage, user):
topics = set([])
category = entry.get("category")
topics_by_oid = storage["topics"]["by_oid"]
oids = [
category,
] + entry.get("tags", [])
for toid in oids:
tslug = topics_by_oid.get(toid, {}).get("slug")
if tslug:
topics.add(tslug)
ttt = list(topics)
# add author as TopicFollower
with local_session() as session:
for tpcslug in topics:
try:
tpc = session.query(Topic).where(Topic.slug == tpcslug).first()
if tpc:
tf = (
session.query(TopicFollower)
.where(TopicFollower.follower == user.id)
.filter(TopicFollower.topic == tpc.id)
.first()
)
if not tf:
tf = TopicFollower.create(topic=tpc.id, follower=user.id, auto=True)
session.add(tf)
session.commit()
except IntegrityError:
print("[migration.shout] hidden by topic " + tpc.slug)
# main topic
maintopic = storage["replacements"].get(topics_by_oid.get(category, {}).get("slug"))
if maintopic in ttt:
ttt.remove(maintopic)
ttt.insert(0, maintopic)
return ttt
async def process_user(userdata, storage, oid):
with local_session() as session:
uid = userdata.get("id") # anonymous as
if not uid:
print(userdata)
print("has no id field, set it @anonymous")
userdata = anondict
uid = 1
user = session.query(User).filter(User.id == uid).first()
if not user:
try:
slug = userdata["slug"].lower().strip()
slug = re.sub("[^0-9a-zA-Z]+", "-", slug)
userdata["slug"] = slug
user = User.create(**userdata)
session.add(user)
session.commit()
except IntegrityError:
print(f"[migration] user creating with slug {userdata['slug']}")
print("[migration] from userdata")
print(userdata)
raise Exception("[migration] cannot create user in content_items.get_user()")
if user.id == 946:
print("[migration] ***************** ALPINA")
if user.id == 2:
print("[migration] +++++++++++++++++ DISCOURS")
userdata["id"] = user.id
userdata["createdAt"] = user.createdAt
storage["users"]["by_slug"][userdata["slug"]] = userdata
storage["users"]["by_oid"][oid] = userdata
if not user:
raise Exception("could not get a user")
return user
async def resolve_create_shout(shout_dict):
with local_session() as session:
s = session.query(Shout).filter(Shout.slug == shout_dict["slug"]).first()
bump = False
if s:
if s.createdAt != shout_dict["createdAt"]:
# create new with different slug
shout_dict["slug"] += "-" + shout_dict["layout"]
try:
await create_shout(shout_dict)
except IntegrityError as e:
print(e)
bump = True
else:
# update old
for key in shout_dict:
if key in s.__dict__:
if s.__dict__[key] != shout_dict[key]:
print("[migration] shout already exists, but differs in %s" % key)
bump = True
else:
print("[migration] shout already exists, but lacks %s" % key)
bump = True
if bump:
s.update(shout_dict)
else:
print("[migration] something went wrong with shout: \n%r" % shout_dict)
raise Exception("")
session.commit()
return s
async def topics_aftermath(entry, storage):
r = []
for tpc in filter(lambda x: bool(x), entry["topics"]):
oldslug = tpc
newslug = storage["replacements"].get(oldslug, oldslug)
if newslug:
with local_session() as session:
shout = session.query(Shout).where(Shout.slug == entry["slug"]).first()
new_topic = session.query(Topic).where(Topic.slug == newslug).first()
shout_topic_old = (
session.query(ShoutTopic)
.join(Shout)
.join(Topic)
.filter(Shout.slug == entry["slug"])
.filter(Topic.slug == oldslug)
.first()
)
if shout_topic_old:
shout_topic_old.update({"topic": new_topic.id})
else:
shout_topic_new = (
session.query(ShoutTopic)
.join(Shout)
.join(Topic)
.filter(Shout.slug == entry["slug"])
.filter(Topic.slug == newslug)
.first()
)
if not shout_topic_new:
try:
ShoutTopic.create(**{"shout": shout.id, "topic": new_topic.id})
except Exception:
print("[migration] shout topic error: " + newslug)
session.commit()
if newslug not in r:
r.append(newslug)
else:
print("[migration] ignored topic slug: \n%r" % tpc["slug"])
# raise Exception
return r
async def content_ratings_to_reactions(entry, slug):
try:
with local_session() as session:
for content_rating in entry.get("ratings", []):
rater = (
session.query(User).filter(User.oid == content_rating["createdBy"]).first()
) or User.default_user
shout = session.query(Shout).where(Shout.slug == slug).first()
cts = content_rating.get("createdAt")
reaction_dict = {
"createdAt": date_parse(cts) if cts else None,
"kind": ReactionKind.LIKE
if content_rating["value"] > 0
else ReactionKind.DISLIKE,
"createdBy": rater.id,
"shout": shout.id,
}
reaction = (
session.query(Reaction)
.filter(Reaction.shout == reaction_dict["shout"])
.filter(Reaction.createdBy == reaction_dict["createdBy"])
.filter(Reaction.kind == reaction_dict["kind"])
.first()
)
if reaction:
k = ReactionKind.AGREE if content_rating["value"] > 0 else ReactionKind.DISAGREE
reaction_dict["kind"] = k
reaction.update(reaction_dict)
session.add(reaction)
else:
rea = Reaction.create(**reaction_dict)
session.add(rea)
# shout_dict['ratings'].append(reaction_dict)
session.commit()
except Exception:
print("[migration] content_item.ratings error: \n%r" % content_rating)

View File

@@ -1,35 +0,0 @@
# from base.orm import local_session
# from migration.extract import extract_md
# from migration.html2text import html2text
# from orm.reaction import Reaction, ReactionKind
# def migrate(entry, storage):
# post_oid = entry["contentItem"]
# print(post_oid)
# shout_dict = storage["shouts"]["by_oid"].get(post_oid)
# if shout_dict:
# print(shout_dict["body"])
# remark = {
# "shout": shout_dict["id"],
# "body": extract_md(html2text(entry["body"]), shout_dict),
# "kind": ReactionKind.REMARK,
# }
#
# if entry.get("textBefore"):
# remark["range"] = (
# str(shout_dict["body"].index(entry["textBefore"] or ""))
# + ":"
# + str(
# shout_dict["body"].index(entry["textAfter"] or "")
# + len(entry["textAfter"] or "")
# )
# )
#
# with local_session() as session:
# rmrk = Reaction.create(**remark)
# session.commit()
# del rmrk["_sa_instance_state"]
# return rmrk
# return

View File

@@ -1,828 +0,0 @@
{
"207": "207",
"1990-e": "90s",
"2000-e": "2000s",
"90-e": "90s",
"Georgia": "georgia",
"Japan": "japan",
"Sweden": "sweden",
"abstraktsiya": "abstract",
"absurdism": "absurdism",
"acclimatization": "acclimatisation",
"activism": "activism",
"adolf-gitler": "adolf-hitler",
"afrika": "africa",
"agata-kristi": "agatha-christie",
"agressivnoe-povedenie": "agression",
"agressiya": "agression",
"aktsii": "actions",
"aktsionizm": "actionism",
"alber-kamyu": "albert-kamus",
"albomy": "albums",
"aleksandr-griboedov": "aleksander-griboedov",
"aleksandr-pushkin": "aleksander-pushkin",
"aleksandr-solzhenitsyn": "aleksander-solzhenitsyn",
"aleksandr-vvedenskiy": "aleksander-vvedensky",
"aleksey-navalnyy": "alexey-navalny",
"alfavit": "alphabet",
"alkogol": "alcohol",
"alternativa": "alternative",
"alternative": "alternative",
"alternativnaya-istoriya": "alternative-history",
"amerika": "america",
"anarhizm": "anarchism",
"anatoliy-mariengof": "anatoly-mariengof",
"ancient-russia": "ancient-russia",
"andegraund": "underground",
"andrey-platonov": "andrey-platonov",
"andrey-rodionov": "andrey-rodionov",
"andrey-tarkovskiy": "andrey-tarkovsky",
"angliyskie-istorii": "english-stories",
"angliyskiy-yazyk": "english-langugae",
"ango": "ango",
"animation": "animation",
"animatsiya": "animation",
"anime": "anime",
"anri-volohonskiy": "anri-volohonsky",
"antifashizm": "anti-faschism",
"antiquity": "antiquity",
"antiutopiya": "dystopia",
"anton-dolin": "anton-dolin",
"antropology": "antropology",
"antropotsen": "antropocenus",
"architecture": "architecture",
"arheologiya": "archeology",
"arhetipy": "archetypes",
"arhiv": "archive",
"aristokraty": "aristocracy",
"aristotel": "aristotle",
"arktika": "arctic",
"armiya": "army",
"armiya-1": "army",
"art": "art",
"art-is": "art-is",
"artists": "artists",
"ateizm": "atheism",
"audio-poetry": "audio-poetry",
"audiopoeziya": "audio-poetry",
"audiospektakl": "audio-spectacles",
"auktsyon": "auktsyon",
"avangard": "avantgarde",
"avtofikshn": "autofiction",
"avtorskaya-pesnya": "bardsongs",
"azbuka-immigratsii": "immigration-basics",
"aziatskiy-kinematograf": "asian-cinema",
"b-movie": "b-movie",
"bannye-chteniya": "sauna-reading",
"bardsongs": "bardsongs",
"bdsm": "bdsm",
"beecake": "beecake",
"belarus": "belarus",
"belgiya": "belgium",
"bertold-breht": "berttold-brecht",
"bezumie": "madness",
"biography": "biography",
"biologiya": "biology",
"bipolyarnoe-rasstroystvo": "bipolar-disorder",
"bitniki": "beatnics",
"biznes": "business",
"blizhniy-vostok": "middle-east",
"blizost": "closeness",
"blocked-in-russia": "blocked-in-russia",
"blokada": "blockade",
"bob-dilan": "bob-dylan",
"bog": "god",
"bol": "pain",
"bolotnoe-delo": "bolotnaya-case",
"books": "books",
"boris-eltsin": "boris-eltsin",
"boris-godunov": "boris-godunov",
"boris-grebenschikov": "boris-grebenschikov",
"boris-nemtsov": "boris-nemtsov",
"boris-pasternak": "boris-pasternak",
"brak": "marriage",
"bret-iston-ellis": "bret-iston-ellis",
"buddizm": "buddhism",
"bullying": "bullying",
"bunt": "riot",
"burning-man": "burning-man",
"bytie": "being",
"byurokratiya": "bureaucracy",
"capitalism": "capitalism",
"censored-in-russia": "censored-in-russia",
"ch-rno-beloe": "black-and-white",
"ch-rnyy-yumor": "black-humour",
"chapters": "chapters",
"charity": "charity",
"chayldfri": "childfree",
"chechenskaya-voyna": "chechen-war",
"chechnya": "chechnya",
"chelovek": "male",
"chernobyl": "chernobyl",
"chernyy-yumor": "black-humour",
"children": "children",
"china": "china",
"chinovniki": "bureaucracy",
"chukotka": "chukotka",
"chuma": "plague",
"church": "church",
"cinema": "cinema",
"city": "city",
"civil-position": "civil-position",
"clips": "clips",
"collage": "collage",
"comics": "comics",
"conspiracy-theory": "conspiracy-theory",
"contemporary-art": "contemporary-art",
"contemporary-poetry": "poetry",
"contemporary-prose": "prose",
"coronavirus": "coronavirus",
"corruption": "corruption",
"creative-writing-school": "creative-writing-school",
"crime": "crime",
"criticism": "criticism",
"critiques": "reviews",
"culture": "culture",
"dadaizm": "dadaism",
"daniel-defo": "daniel-defoe",
"daniil-harms": "daniil-kharms",
"dante-aligeri": "dante-alighieri",
"darkveyv": "darkwave",
"death": "death",
"debaty": "debats",
"delo-seti": "seti-case",
"democracy": "democracy",
"demografiya": "demographics",
"demonstrations": "demonstrations",
"depression": "depression",
"derevnya": "village",
"derrida": "derrida",
"design": "design",
"detskie-doma": "orphanages",
"detstvo": "childhood",
"devid-linch": "david-linch",
"devyanostye": "90s",
"dialog": "dialogue",
"digital": "digital",
"digital-art": "digital-art",
"dinozavry": "dinosaurs",
"directing": "directing",
"diskurs": "discours",
"diskurs-1": "discourse",
"diskurs-analiz": "discourse-analytics",
"dissidenty": "dissidents",
"diy": "diy",
"dmitriy-donskoy": "dmitriy-donskoy",
"dmitriy-prigov": "dmitriy-prigov",
"dnevnik-1": "dairy",
"dnevniki": "dairies",
"documentary": "documentary",
"dokumentalnaya-poema": "documentary-poem",
"dokumentalnaya-poeziya": "documentary-poetry",
"dokumenty": "doсuments",
"domashnee-nasilie": "home-terror",
"donald-tramp": "donald-trump",
"donbass": "donbass",
"donbass-diary": "donbass-diary",
"donorstvo": "donation",
"dozhd": "rain",
"drama": "drama",
"dramaturgy": "dramaturgy",
"drawing": "drawing",
"drevo-zhizni": "tree-of-life",
"drugs": "drugs",
"duh": "spirit",
"dzhaz": "jazz",
"dzhek-keruak": "jack-keruak",
"dzhim-morrison": "jim-morrison",
"dzhordzh-romero": "george-romero",
"dzhordzho-agamben": "giorgio-agamben",
"ecology": "ecology",
"economics": "economics",
"eda": "food",
"editorial-statements": "editorial-statements",
"eduard-limonov": "eduard-limonov",
"education": "education",
"egor-letov": "egor-letov",
"ekspat": "expat",
"eksperiment": "experiments",
"eksperimentalnaya-muzyka": "experimental-music",
"ekspressionizm": "expressionism",
"ekstremizm": "extremism",
"ekzistentsializm-1": "existentialism",
"ekzistentsiya": "existence",
"elections": "elections",
"electronic": "electronics",
"electronics": "electronics",
"elena-glinskaya": "elena-glinskaya",
"elena-guro": "elena-guro",
"elizaveta-mnatsakanova": "elizaveta-mnatsakanova",
"embient": "ambient",
"emigration": "emigration",
"emil-dyurkgeym": "emile-durkheim",
"emotsii": "emotions",
"empiric": "empiric",
"epidemiya": "pandemic",
"erich-von-neff": "erich-von-neff",
"erotika": "erotics",
"essay": "essay",
"estetika": "aestetics",
"etika": "ethics",
"etno": "ethno",
"etnos": "ethnics",
"everyday-life": "everyday-life",
"evgeniy-onegin": "eugene-onegin",
"evolyutsiya": "evolution",
"exhibitions": "exhibitions",
"experience": "experiences",
"experimental": "experimental",
"experimental-music": "experimental-music",
"explanation": "explanation",
"faktcheking": "fact-checking",
"falsifikatsii": "falsifications",
"family": "family",
"fanfiki": "fan-fiction",
"fantastika": "sci-fi",
"fatalizm": "fatalism",
"fedor-dostoevskiy": "fedor-dostoevsky",
"fedor-ioannovich": "fedor-ioannovich",
"feleton": "feuilleton",
"feminism": "feminism",
"fenomenologiya": "phenomenology",
"fentezi": "fantasy",
"festival": "festival",
"festival-territoriya": "festival-territory",
"folk": "folk",
"folklor": "folklore",
"fotoreportazh": "photoreports",
"france": "france",
"frants-kafka": "franz-kafka",
"frederik-begbeder": "frederick-begbeder",
"freedom": "freedom",
"friendship": "friendship",
"fsb": "fsb",
"futbol": "footbool",
"future": "future",
"futuristy": "futurists",
"futurizm": "futurism",
"galereya": "gallery",
"galereya-anna-nova": "gallery-anna-nova",
"gdr": "gdr",
"gender": "gender",
"gendernyy-diskurs": "gender",
"gennadiy-aygi": "gennadiy-aygi",
"gerhard-rihter": "gerhard-rihter",
"germaniya": "germany",
"germenevtika": "hermeneutics",
"geroi": "heroes",
"girls": "girls",
"gkchp": "gkchp",
"glitch": "glitch",
"globalizatsiya": "globalisation",
"gollivud": "hollywood",
"gonzo": "gonzo",
"gore-ot-uma": "woe-from-wit",
"graffiti": "graffiti",
"graficheskaya-novella": "graphic-novell",
"graphics": "graphics",
"gravyura": "engraving",
"grazhdanskaya-oborona": "grazhdanskaya-oborona",
"gretsiya": "greece",
"griby": "mushrooms",
"gruziya-2": "georgia",
"gulag": "gulag",
"han-batyy": "khan-batyy",
"hayku": "haiku",
"health": "health",
"himiya": "chemistry",
"hip-hop": "hip-hop",
"history": "history",
"history-of-russia": "history-of-russia",
"holokost": "holocaust",
"horeografiya": "choreography",
"horror": "horror",
"hospis": "hospice",
"hristianstvo": "christianity",
"humans": "humans",
"humour": "humour",
"ideologiya": "ideology",
"idm": "idm",
"igil": "isis",
"igor-pomerantsev": "igor-pomerantsev",
"igra": "game",
"igra-prestolov": "game-of-throne",
"igry": "games",
"iisus-hristos": "jesus-christ",
"illness": "illness",
"illustration-history": "illustration-history",
"illustrations": "illustrations",
"imazhinizm": "imagism",
"immanuil-kant": "immanuel-kant",
"impressionizm": "impressionism",
"improvizatsiya": "improvisation",
"indi": "indie",
"individualizm": "individualism",
"infografika": "infographics",
"informatsiya": "information",
"ingmar-bergman": "ingmar-bergman",
"inklyuziya": "inclusion",
"installyatsiya": "installation",
"internet": "internet",
"interview": "interview",
"invalidnost": "disability",
"investigations": "investigations",
"iosif-brodskiy": "joseph-brodsky",
"iosif-stalin": "joseph-stalin",
"iskusstvennyy-intellekt": "artificial-intelligence",
"islam": "islam",
"istoriya-moskvy": "moscow-history",
"istoriya-nauki": "history-of-sceince",
"istoriya-o-medsestre": "nurse-story",
"istoriya-teatra": "theatre-history",
"italiya": "italy",
"italyanskiy-yazyk": "italian-language",
"iudaika": "judaica",
"ivan-groznyy": "ivan-grozny",
"ivan-iii-gorbatyy": "ivan-iii-gorbaty",
"ivan-kalita": "ivan-kalita",
"ivan-krylov": "ivan-krylov",
"izobreteniya": "inventions",
"izrail-1": "israel",
"jazz": "jazz",
"john-lennon": "john-lennon",
"journalism": "journalism",
"justice": "justice",
"k-pop": "k-pop",
"kalligrafiya": "calligraphy",
"karikatura": "caricatures",
"kartochki-rubinshteyna": "rubinstein-cards",
"katrin-nenasheva": "katrin-nenasheva",
"kavarga": "kavarga",
"kavkaz": "caucasus",
"kazan": "kazan",
"kiberbezopasnost": "cybersecurity",
"kinoklub": "cinema-club",
"kinokritika": "film-criticism",
"kirill-serebrennikov": "kirill-serebrennikov",
"kladbische": "cemetery",
"klassika": "classic",
"kollektivnoe-bessoznatelnoe": "сollective-unconscious",
"komediya": "comedy",
"kommunikatsii": "communications",
"kommunizm": "communism",
"kommuny": "communes",
"kompyuternye-igry": "computer-games",
"konets-vesny": "end-of-spring",
"konservatizm": "conservatism",
"kontrkultura": "counter-culture",
"kontseptualizm": "conceptualism",
"korotkometrazhka": "cinema-shorts",
"kosmos": "cosmos",
"kraudfanding": "crowdfunding",
"kriptovalyuty": "cryptocurrencies",
"krizis": "crisis",
"krov": "blood",
"krym": "crimea",
"kulturologiya": "culturology",
"kulty": "cults",
"kurdistan": "kurdistan",
"kurt-kobeyn": "kurt-cobain",
"kurt-vonnegut": "kurt-vonnegut",
"kvir": "queer",
"laboratoriya": "lab",
"language": "languages",
"lars-fon-trier": "lars-fon-trier",
"laws": "laws",
"lectures": "lectures",
"leto": "summer",
"lev-tolstoy": "leo-tolstoy",
"lgbt": "lgbt",
"liberalizm": "liberalism",
"libertarianstvo": "libertarianism",
"life": "life",
"likbez": "likbez",
"lingvistika": "linguistics",
"lirika": "lirics",
"literary-studies": "literary-studies",
"literature": "literature",
"literaturnyykaver": "literature-cover",
"lo-fi": "lo-fi",
"lomonosov": "lomonosov",
"love": "love",
"luzha-goluboy-krovi": "luzha-goluboy-krovi",
"lyudvig-vitgenshteyn": "ludwig-wittgenstein",
"lzhedmitriy": "false-dmitry",
"lzhenauka": "pseudoscience",
"magiya": "magic",
"maks-veber": "max-weber",
"manifests": "manifests",
"manipulyatsii-soznaniem": "mind-manipulation",
"marina-abramovich": "marina-abramovich",
"marketing": "marketing",
"marksizm": "marxism",
"marsel-dyushan": "marchel-duchamp",
"marsel-prust": "marcel-proust",
"martin-haydegger": "martin-hidegger",
"matematika": "maths",
"mayakovskiy": "vladimir-mayakovsky",
"media": "media",
"medicine": "medicine",
"memuary": "memoirs",
"menedzhment": "management",
"menty": "police",
"merab-mamardashvili": "merab-mamardashvili",
"mest": "revenge",
"metamodernizm": "metamodern",
"metavselennaya": "metaverse",
"metro": "metro",
"mifologiya": "mythology",
"mify": "myth",
"mihael-haneke": "michael-haneke",
"mihail-baryshnikov": "mihail-baryshnikov",
"mihail-bulgakov": "mihail-bulgakov",
"mikrotonalnaya-muzyka": "mikrotone-muzyka",
"minimalizm": "minimalism",
"minkult-privet": "minkult-privet",
"mir": "world",
"mirovozzrenie": "mindsets",
"mishel-fuko": "michel-foucault",
"mistika": "mystics",
"mitropolit-makariy": "mitropolit-makariy",
"mlm": "mlm",
"mobilizatsiya": "mobilisation",
"moda": "fashion",
"modernizm": "modernism",
"mokyumentari": "mockumentary",
"molodezh": "youth",
"moloko-plus": "moloko-plus",
"money": "money",
"monologs": "monologues",
"monstratsiya": "monstration",
"moralnaya-otvetstvennost": "moral-responsibility",
"more": "sea",
"moscow": "moscow",
"moshennichestvo": "frauds",
"moskovskiy-romanticheskiy-kontseptualizm": "moscow-romantic-conceptualism",
"moskovskoe-delo": "moscow-case",
"movies": "movies",
"mozg": "brain",
"multiplikatsiya": "animation",
"music": "music",
"musulmanstvo": "islam",
"muzei": "museum",
"muzey": "museum",
"muzhchiny": "man",
"myshlenie": "thinking",
"nagornyy-karabah": "nagorno-karabakh",
"nasilie-1": "violence",
"natsionalizm": "nationalism",
"natsionalnaya-ideya": "national-idea",
"natsizm": "nazism",
"natyurmort": "nature-morte",
"nauchpop": "pop-science",
"nbp": "nbp",
"nenavist": "hate",
"neofitsialnaya-literatura": "unofficial-literature",
"neoklassika": "neoclassic",
"neprozrachnye-smysly": "hidden-meanings",
"neravenstvo": "inequality",
"net-voyne": "no-war",
"new-year": "new-year",
"neyronauka": "neuro-science",
"neyroseti": "neural-networks",
"niu-vshe": "hse",
"nizhniy-novgorod": "nizhny-novgorod",
"nko": "nonprofits",
"nlo": "ufo",
"nobelevskaya-premiya": "nobel-prize",
"noize-mc": "noize-mc",
"nonkonformizm": "nonconformism",
"notforall": "notforall",
"novaya-drama": "new-drama",
"novosti": "news",
"noyz": "noise",
"nuar": "noir",
"oberiu": "oberiu",
"ocherk": "etudes",
"ochevidnyy-nuar": "ochevidnyy-nuar",
"odinochestvo": "loneliness",
"odna-kniga-odna-istoriya": "one-book-one-story",
"okrainy": "outskirts",
"omon": "swat",
"opinions": "opinions",
"oppozitsiya": "opposition",
"orhan-pamuk": "orhan-pamuk",
"ornitologiya": "ornitology",
"osen": "autumn",
"osip-mandelshtam": "osip-mandelshtam",
"oskar-uayld": "oscar-wilde",
"osoznanie": "awareness",
"otnosheniya": "relationship",
"pablo-pikasso": "pablo-picasso",
"painting": "painting",
"paintings": "painting",
"pamyat": "memory",
"pandemiya": "pandemic",
"parizh": "paris",
"patriotizm": "patriotism",
"patsifizm": "pacifism",
"paul-tselan": "paul-tselan",
"per-burd": "pierre-bourdieu",
"perezhivaniya": "worries",
"performance": "performance",
"peyzazh": "landscape",
"philology": "philology",
"philosophy": "philosophy",
"photo": "photography",
"photography": "photography",
"photoprojects": "photoprojects",
"plakaty": "posters",
"plastilin": "plasticine",
"plays": "plays",
"podrostki": "teenagers",
"poema": "poem",
"poems": "poems",
"poeticheskaya-proza": "poetic-prose",
"poetry": "poetry",
"poetry-of-squares": "poetry-of-squares",
"poetry-slam": "poetry-slam",
"pokoy": "peace",
"police": "police",
"politicheskoe-fentezi": "political-fantasy",
"politics": "politics",
"politzaklyuchennye": "political-prisoners",
"polsha": "poland",
"pomosch": "help",
"pop-art": "pop-art",
"pop-culture": "pop-culture",
"populyarnaya-psihologiya": "popular-psychology",
"pornografiya": "pornography",
"portret": "portrait",
"poslovitsy": "proverbs",
"post-pank": "post-punk",
"post-rok": "post-rock",
"postmodernism": "postmodernism",
"povest": "novells",
"povsednevnost": "everyday-life",
"power": "power",
"pravo": "right",
"pravoslavie": "orthodox",
"pravozaschitniki": "human-rights-activism",
"prazdnik": "holidays",
"predatelstvo": "betrayal",
"predprinimatelstvo": "entrepreneurship",
"premera": "premier",
"premiya-oskar": "oscar-prize",
"pribaltika-1": "baltic",
"priroda": "nature",
"prison": "prison",
"pritcha": "parable",
"privatnost": "privacy",
"progress": "progress",
"projects": "projects",
"prokrastinatsiya": "procrastination",
"propaganda": "propaganda",
"proschenie": "forgiveness",
"prose": "prose",
"proshloe": "past",
"prostitutsiya": "prostitution",
"prosveschenie": "enlightenment",
"protests": "protests",
"psalmy": "psalms",
"psihoanaliz": "psychoanalysis",
"psihodeliki": "psychodelics",
"pskov": "pskov",
"psychiatry": "psychiatry",
"psychology": "psychology",
"ptitsy": "birds",
"punk": "punk",
"r-b": "rnb",
"rasizm": "racism",
"realizm": "realism",
"redaktura": "editing",
"refleksiya": "reflection",
"reggi": "reggae",
"religion": "religion",
"rene-zhirar": "rene-girard",
"renesanss": "renessance",
"renovatsiya": "renovation",
"rep": "rap",
"reportage": "reportage",
"reportazh-1": "reportage",
"repressions": "repressions",
"research": "research",
"retroveyv": "retrowave",
"review": "review",
"revolution": "revolution",
"rezo-gabriadze": "rezo-gabriadze",
"risunki": "painting",
"roboty": "robots",
"rock": "rock",
"roditeli": "parents",
"romantizm": "romantism",
"romany": "novell",
"ronald-reygan": "ronald-reygan",
"roskomnadzor": "roskomnadzor",
"rossiyskoe-kino": "russian-cinema",
"rouling": "rowling",
"rozhava": "rojava",
"rpts": "rpts",
"rus-na-grani-sryva": "rus-na-grani-sryva",
"russia": "russia",
"russian-language": "russian-language",
"russian-literature": "russian-literature",
"russkaya-toska": "russian-toska",
"russkiy-mir": "russkiy-mir",
"salo": "lard",
"salvador-dali": "salvador-dali",
"samoidentifikatsiya": "self-identity",
"samoopredelenie": "self-definition",
"sankt-peterburg": "saint-petersburg",
"sasha-skochilenko": "sasha-skochilenko",
"satira": "satiric",
"saund-art": "sound-art",
"schaste": "happiness",
"school": "school",
"science": "science",
"sculpture": "sculpture",
"second-world-war": "second-world-war",
"sekond-hend": "second-hand",
"seksprosvet": "sex-education",
"seksualizirovannoe-nasilie": "sexualized-violence",
"seksualnoe-nasilie": "sexualized-violence",
"sekty": "sects",
"semi": "semi",
"semiotics": "semiotics",
"serbiya": "serbia",
"sergey-bodrov-mladshiy": "sergey-bodrov-junior",
"sergey-solov-v": "sergey-solovyov",
"serialy": "series",
"sever": "north",
"severnaya-koreya": "north-korea",
"sex": "sex",
"shotlandiya": "scotland",
"shugeyz": "shoegaze",
"siloviki": "siloviki",
"simeon-bekbulatovich": "simeon-bekbulatovich",
"simvolizm": "simbolism",
"siriya": "siria",
"skulptura": "sculpture",
"slavoy-zhizhek": "slavoj-zizek",
"smert-1": "death",
"smysl": "meaning",
"sny": "dreams",
"sobytiya": "events",
"social": "society",
"society": "society",
"sociology": "sociology",
"sofya-paleolog": "sofya-paleolog",
"sofya-vitovtovna": "sofya-vitovtovna",
"soobschestva": "communities",
"soprotivlenie": "resistence",
"sotsializm": "socialism",
"sotsialnaya-filosofiya": "social-philosophy",
"sotsiologiya-1": "sociology",
"sotsseti": "social-networks",
"sotvorenie-tretego-rima": "third-rome",
"sovremennost": "modernity",
"spaces": "spaces",
"spektakl": "spectacles",
"spetseffekty": "special-fx",
"spetsoperatsiya": "special-operation",
"spetssluzhby": "special-services",
"sport": "sport",
"srednevekove": "middle-age",
"state": "state",
"statistika": "statistics",
"stendap": "stand-up",
"stihi": "poetry",
"stoitsizm": "stoicism",
"stories": "stories",
"stoyanie-na-ugre": "stoyanie-na-ugre",
"strah": "fear",
"street-art": "street-art",
"stsenarii": "scenarios",
"sud": "court",
"summary": "summary",
"supergeroi": "superheroes",
"svetlana-aleksievich": "svetlana-aleksievich",
"svobodu-ivanu-golunovu": "free-ivan-golunov",
"syurrealizm": "surrealism",
"tales": "tales",
"tanets": "dance",
"tataro-mongolskoe-igo": "mongol-tatar-yoke",
"tatuirovki": "tattoo",
"technology": "technology",
"televidenie": "television",
"telo": "body",
"telo-kak-iskusstvo": "body-as-art",
"terrorizm": "terrorism",
"tests": "tests",
"text": "texts",
"the-beatles": "the-beatles",
"theater": "theater",
"theory": "theory",
"tokio": "tokio",
"torture": "torture",
"totalitarizm": "totalitarism",
"traditions": "traditions",
"tragicomedy": "tragicomedy",
"transgendernost": "transgender",
"translation": "translation",
"transport": "transport",
"travel": "travel",
"travma": "trauma",
"trendy": "trends",
"tretiy-reyh": "third-reich",
"triller": "thriller",
"tsar": "central-african-republic",
"tsar-edip": "oedipus",
"tsarevich-dmitriy": "tsarevich-dmitry",
"tsennosti": "values",
"tsenzura": "censorship",
"tseremonii": "ceremonies",
"turizm": "tourism",
"tvorchestvo": "creativity",
"ugnetennyy-zhilischnyy-klass": "oppressed-housing-class",
"uilyam-shekspir": "william-shakespeare",
"ukraina-2": "ukraine",
"ukraine": "ukraine",
"university": "university",
"urban-studies": "urban-studies",
"uroki-literatury": "literature-lessons",
"usa": "usa",
"ussr": "ussr",
"utopiya": "utopia",
"utrata": "loss",
"valter-benyamin": "valter-benyamin",
"varlam-shalamov": "varlam-shalamov",
"vasiliy-ii-temnyy": "basil-ii-temnyy",
"vasiliy-iii": "basil-iii",
"vdnh": "vdnh",
"vechnost": "ethernety",
"velikobritaniya": "great-britain",
"velimir-hlebnikov": "velimir-hlebnikov",
"velkom-tu-greyt-britn": "welcome-to-great-britain",
"venedikt-erofeev": "venedikt-erofeev",
"venetsiya": "veneece",
"vengriya": "hungary",
"verlibry": "free-verse",
"veschi": "things",
"vessels": "vessels",
"veterany": "veterans",
"video": "video",
"videoart": "videoart",
"videoklip": "clips",
"videopoeziya": "video-poetry",
"viktor-astafev": "viktor-astafev",
"viktor-pelevin": "viktor-pelevin",
"vilgelm-rayh": "wilhelm-reich",
"vinzavod": "vinzavod",
"violence": "violence",
"visual-culture": "visual-culture",
"vizualnaya-poeziya": "visual-poetry",
"vladimir-lenin": "vladimir-lenin",
"vladimir-mayakovskiy": "vladimir-mayakovsky",
"vladimir-nabokov": "vladimir-nabokov",
"vladimir-putin": "vladimir-putin",
"vladimir-sorokin": "vladimir-sorokin",
"vladimir-voynovich": "vladimir-voynovich",
"vnutrenniy-opyt": "inner-expirience",
"volga": "volga",
"volontery": "volonteurs",
"vong-karvay": "wong-karwai",
"vospominaniya": "memories",
"vostok": "east",
"voyna-na-ukraine": "war-in-ukraine",
"voyna-v-ukraine": "war-in-ukraine",
"vremya": "time",
"vudi-allen": "woody-allen",
"vynuzhdennye-otnosheniya": "forced-relationship",
"war": "war",
"war-in-ukraine-images": "war-in-ukrahine-images",
"women": "women",
"work": "work",
"writers": "writers",
"xx-century": "xx-century",
"yakob-yordans": "yakob-yordans",
"yan-vermeer": "yan-vermeer",
"yanka-dyagileva": "yanka-dyagileva",
"yaponskaya-literatura": "japan-literature",
"yazychestvo": "paganism",
"youth": "youth",
"yozef-rot": "yozef-rot",
"yurgen-habermas": "jorgen-habermas",
"za-liniey-mannergeyma": "behind-mannerheim-line",
"zabota": "care",
"zahar-prilepin": "zahar-prilepin",
"zakonodatelstvo": "laws",
"zakony-mira": "world-laws",
"zametki": "notes",
"zhelanie": "wish",
"zhivotnye": "animals",
"zhoze-saramago": "jose-saramago",
"zigmund-freyd": "sigmund-freud",
"zolotaya-orda": "golden-horde",
"zombi": "zombie",
"zombi-simpsony": "zombie-simpsons"
}

View File

@@ -1,31 +0,0 @@
from base.orm import local_session
from migration.html2text import html2text
from orm import Topic
def migrate(entry):
body_orig = entry.get("description", "").replace("&nbsp;", " ")
topic_dict = {
"slug": entry["slug"],
"oid": entry["_id"],
"title": entry["title"].replace("&nbsp;", " "),
"body": html2text(body_orig),
}
with local_session() as session:
slug = topic_dict["slug"]
topic = session.query(Topic).filter(Topic.slug == slug).first() or Topic.create(
**topic_dict
)
if not topic:
raise Exception("no topic!")
if topic:
if len(topic.title) > len(topic_dict["title"]):
Topic.update(topic, {"title": topic_dict["title"]})
if len(topic.body) < len(topic_dict["body"]):
Topic.update(topic, {"body": topic_dict["body"]})
session.commit()
# print(topic.__dict__)
rt = topic.__dict__.copy()
del rt["_sa_instance_state"]
return rt

View File

@@ -1,156 +0,0 @@
import re
from bs4 import BeautifulSoup
from dateutil.parser import parse
from sqlalchemy.exc import IntegrityError
from base.orm import local_session
from orm.user import AuthorFollower, User, UserRating
def migrate(entry): # noqa: C901
if "subscribedTo" in entry:
del entry["subscribedTo"]
email = entry["emails"][0]["address"]
user_dict = {
"oid": entry["_id"],
"roles": [],
"ratings": [],
"username": email,
"email": email,
"createdAt": parse(entry["createdAt"]),
"emailConfirmed": ("@discours.io" in email) or bool(entry["emails"][0]["verified"]),
"muted": False, # amnesty
"links": [],
"name": "anonymous",
"password": entry["services"]["password"].get("bcrypt"),
}
if "updatedAt" in entry:
user_dict["updatedAt"] = parse(entry["updatedAt"])
if "wasOnlineAt" in entry:
user_dict["lastSeen"] = parse(entry["wasOnlineAt"])
if entry.get("profile"):
# slug
slug = entry["profile"].get("path").lower()
slug = re.sub("[^0-9a-zA-Z]+", "-", slug).strip()
user_dict["slug"] = slug
bio = (
(entry.get("profile", {"bio": ""}).get("bio") or "")
.replace(r"\(", "(")
.replace(r"\)", ")")
)
bio_text = BeautifulSoup(bio, features="lxml").text
if len(bio_text) > 120:
user_dict["about"] = bio_text
else:
user_dict["bio"] = bio_text
# userpic
try:
user_dict["userpic"] = (
"https://images.discours.io/unsafe/" + entry["profile"]["thumborId"]
)
except KeyError:
try:
user_dict["userpic"] = entry["profile"]["image"]["url"]
except KeyError:
user_dict["userpic"] = ""
# name
fn = entry["profile"].get("firstName", "")
ln = entry["profile"].get("lastName", "")
name = fn if fn else ""
name = (name + " " + ln) if ln else name
if not name:
name = slug if slug else "anonymous"
name = entry["profile"]["path"].lower().strip().replace(" ", "-") if len(name) < 2 else name
user_dict["name"] = name
# links
fb = entry["profile"].get("facebook", False)
if fb:
user_dict["links"].append(fb)
vk = entry["profile"].get("vkontakte", False)
if vk:
user_dict["links"].append(vk)
tr = entry["profile"].get("twitter", False)
if tr:
user_dict["links"].append(tr)
ws = entry["profile"].get("website", False)
if ws:
user_dict["links"].append(ws)
# some checks
if not user_dict["slug"] and len(user_dict["links"]) > 0:
user_dict["slug"] = user_dict["links"][0].split("/")[-1]
user_dict["slug"] = user_dict.get("slug", user_dict["email"].split("@")[0])
oid = user_dict["oid"]
user_dict["slug"] = user_dict["slug"].lower().strip().replace(" ", "-")
try:
user = User.create(**user_dict.copy())
except IntegrityError:
print("[migration] cannot create user " + user_dict["slug"])
with local_session() as session:
old_user = session.query(User).filter(User.slug == user_dict["slug"]).first()
old_user.oid = oid
old_user.password = user_dict["password"]
session.commit()
user = old_user
if not user:
print("[migration] ERROR: cannot find user " + user_dict["slug"])
raise Exception
user_dict["id"] = user.id
return user_dict
def post_migrate():
old_discours_dict = {
"slug": "old-discours",
"username": "old-discours",
"email": "old@discours.io",
"name": "Просмотры на старой версии сайта",
}
with local_session() as session:
old_discours_user = User.create(**old_discours_dict)
session.add(old_discours_user)
session.commit()
def migrate_2stage(entry, id_map):
ce = 0
for rating_entry in entry.get("ratings", []):
rater_oid = rating_entry["createdBy"]
rater_slug = id_map.get(rater_oid)
if not rater_slug:
ce += 1
# print(rating_entry)
continue
oid = entry["_id"]
author_slug = id_map.get(oid)
with local_session() as session:
try:
rater = session.query(User).where(User.slug == rater_slug).one()
user = session.query(User).where(User.slug == author_slug).one()
user_rating_dict = {
"value": rating_entry["value"],
"rater": rater.id,
"user": user.id,
}
user_rating = UserRating.create(**user_rating_dict)
if user_rating_dict["value"] > 0:
af = AuthorFollower.create(author=user.id, follower=rater.id, auto=True)
session.add(af)
session.add(user_rating)
session.commit()
except IntegrityError:
print("[migration] cannot rate " + author_slug + "`s by " + rater_slug)
except Exception as e:
print(e)
return ce

Some files were not shown because too many files have changed in this diff Show More