This commit is contained in:
tonyrewin 2022-06-25 17:52:55 +03:00
commit e14a2c4d03
8 changed files with 1059 additions and 1012 deletions

View File

@ -4,25 +4,35 @@ from starlette.exceptions import HTTPException
from auth.authenticate import EmailAuthenticate, ResetPassword from auth.authenticate import EmailAuthenticate, ResetPassword
from settings import BACKEND_URL, MAILGUN_API_KEY, MAILGUN_DOMAIN, RESET_PWD_URL, CONFIRM_EMAIL_URL from settings import BACKEND_URL, MAILGUN_API_KEY, MAILGUN_DOMAIN, RESET_PWD_URL, \
CONFIRM_EMAIL_URL, ERROR_URL_ON_FRONTEND
MAILGUN_API_URL = "https://api.mailgun.net/v3/%s/messages" % (MAILGUN_DOMAIN) MAILGUN_API_URL = "https://api.mailgun.net/v3/%s/messages" % (MAILGUN_DOMAIN)
MAILGUN_FROM = "postmaster <postmaster@%s>" % (MAILGUN_DOMAIN) MAILGUN_FROM = "postmaster <postmaster@%s>" % (MAILGUN_DOMAIN)
AUTH_URL = "%s/email_authorize" % (BACKEND_URL) AUTH_URL = "%s/email_authorize" % (BACKEND_URL)
email_templates = {"confirm_email" : "", "auth_email" : "", "reset_password_email" : ""}
def load_email_templates():
for name in email_templates:
filename = "templates/%s.tmpl" % name
with open(filename) as f:
email_templates[name] = f.read()
print("all email templates loaded")
async def send_confirm_email(user): async def send_confirm_email(user):
text = "<html><body>To confirm registration follow the <a href='%s'>link</link></body></html>" text = email_templates["confirm_email"]
token = await EmailAuthenticate.get_email_token(user) token = await EmailAuthenticate.get_email_token(user)
await send_email(user, AUTH_URL, text, token) await send_email(user, AUTH_URL, text, token)
async def send_auth_email(user): async def send_auth_email(user):
text = "<html><body>To enter the site follow the <a href='%s'>link</link></body></html>" text = email_templates["auth_email"]
token = await EmailAuthenticate.get_email_token(user) token = await EmailAuthenticate.get_email_token(user)
await send_email(user, AUTH_URL, text, token) await send_email(user, AUTH_URL, text, token)
async def send_reset_password_email(user): async def send_reset_password_email(user):
text = "<html><body>To reset password follow the <a href='%s'>link</link></body></html>" text = email_templates["reset_password_email"]
token = await ResetPassword.get_reset_token(user) token = await ResetPassword.get_reset_token(user)
await send_email(user, RESET_PWD_URL, text, token) await send_email(user, RESET_PWD_URL, text, token)
@ -45,9 +55,14 @@ async def send_email(user, url, text, token):
async def email_authorize(request): async def email_authorize(request):
token = request.query_params.get('token') token = request.query_params.get('token')
if not token: if not token:
raise HTTPException(500, "invalid url") url_with_error = "%s?error=%s" % (ERROR_URL_ON_FRONTEND, "INVALID_TOKEN")
return RedirectResponse(url = url_with_error)
auth_token, user = await EmailAuthenticate.authenticate(token) try:
auth_token, user = await EmailAuthenticate.authenticate(token)
except:
url_with_error = "%s?error=%s" % (ERROR_URL_ON_FRONTEND, "INVALID_TOKEN")
return RedirectResponse(url = url_with_error)
if not user.emailConfirmed: if not user.emailConfirmed:
with local_session() as session: with local_session() as session:

File diff suppressed because it is too large Load Diff

View File

@ -39,6 +39,7 @@ def get_metadata(r):
metadata['createdAt'] = r.get('createdAt', ts) metadata['createdAt'] = r.get('createdAt', ts)
metadata['layout'] = r['layout'] metadata['layout'] = r['layout']
metadata['topics'] = [topic['slug'] for topic in r['topics']] metadata['topics'] = [topic['slug'] for topic in r['topics']]
metadata['topics'].sort()
if r.get('cover', False): if r.get('cover', False):
metadata['cover'] = r.get('cover') metadata['cover'] = r.get('cover')
return metadata return metadata
@ -80,7 +81,6 @@ def migrate(entry, users_by_oid, topics_by_oid):
'createdAt': entry.get('createdAt', '2016-03-05 22:22:00.350000') 'createdAt': entry.get('createdAt', '2016-03-05 22:22:00.350000')
} }
r['slug'] = entry.get('slug', '') r['slug'] = entry.get('slug', '')
body_orig = entry.get('body', '')
if not r['slug'] and entry.get('friendlySlugs') is not None: if not r['slug'] and entry.get('friendlySlugs') is not None:
r['slug'] = entry['friendlySlugs']['slug'][0]['slug'] r['slug'] = entry['friendlySlugs']['slug'][0]['slug']
if(r['slug'] is None): if(r['slug'] is None):
@ -94,12 +94,12 @@ def migrate(entry, users_by_oid, topics_by_oid):
mainTopic = topics_by_oid.get(category) mainTopic = topics_by_oid.get(category)
if mainTopic: if mainTopic:
r['mainTopic'] = mainTopic["slug"] r['mainTopic'] = mainTopic["slug"]
topic_oids = set([category]) topic_oids = [category, ]
topic_oids.update(entry.get("tags", [])) taglist = entry.get("tags", [])
topic_oids.extend(taglist)
for oid in topic_oids: for oid in topic_oids:
if oid in topics_by_oid: if oid in topics_by_oid:
r['topics'].append(topics_by_oid[oid]) r['topics'].append(topics_by_oid[oid])
if entry.get('image') is not None: if entry.get('image') is not None:
r['cover'] = entry['image']['url'] r['cover'] = entry['image']['url']
if entry.get('thumborId') is not None: if entry.get('thumborId') is not None:
@ -116,7 +116,7 @@ def migrate(entry, users_by_oid, topics_by_oid):
else: else:
body_html = str(BeautifulSoup( body_html = str(BeautifulSoup(
body_orig, features="html.parser")) body_orig, features="html.parser"))
r['body'] = body_html # html2text(body_html) r['body'] = html2text(body_html)
else: else:
print(r['slug'] + ': literature has no media') print(r['slug'] + ': literature has no media')
elif entry.get('type') == 'Video': elif entry.get('type') == 'Video':
@ -127,17 +127,31 @@ def migrate(entry, users_by_oid, topics_by_oid):
if video_url == '#': if video_url == '#':
video_url = 'https://vimeo.com/' + vm if vm else '#' video_url = 'https://vimeo.com/' + vm if vm else '#'
if video_url == '#': if video_url == '#':
print(entry.get('media', 'NO MEDIA!')) print(entry.get('media', 'UNKNOWN MEDIA PROVIDER!'))
# raise Exception # raise Exception
r['body'] = '<ShoutVideo src=\"' + video_url + \ therestof = html2text(m.get('body', ''))
'\" />' + html2text(m.get('body', '')) # FIXME r['body'] = 'import VideoPlayer from \"src/components/Article/VideoPlayer\"\n' + \
'<VideoPlayer src=\"''' + video_url + '\" />\n\n' + therestof
elif entry.get('type') == 'Music': elif entry.get('type') == 'Music':
r['body'] = '<ShoutMusic media={\"' + \ r['body'] = 'import MusicPlayer from \"src/components/MusicPlayer\"\n'
json.dumps(entry['media']) + '\"} />' # FIXME for m in entry['media']:
if m == { 'main': 'true' } or m == { 'main': True } or m == {}:
continue
# TODO: mark highlighted track isMain == True
try: r['body'] += '<MusicPlayer src=\"' + m['fileUrl'] + '\"'
except: print(m)
try: r['body'] += ' title=\"' + m['title'] + '\"'
except: print(m)
r['body'] += ' />\n\n'
r['body'] += html2text(m.get('body', ''))
elif entry.get('type') == 'Image':
m = r.get('media')
try: r['body'] = '<img src=\"' + r['cover'] + '\" />'
except: print(entry)
if r.get('body') is None: if r.get('body') is None:
body_orig = entry.get('body', '') body_orig = entry.get('body', '')
body_html = str(BeautifulSoup(body_orig, features="html.parser")) body_html = str(BeautifulSoup(body_orig, features="html.parser"))
r['body'] = body_html # html2text(body_html) r['body'] = html2text(body_html)
body = r.get('body', '') body = r.get('body', '')
# get author data # get author data
@ -176,8 +190,8 @@ def migrate(entry, users_by_oid, topics_by_oid):
if entry['published']: if entry['published']:
metadata = get_metadata(shout_dict) metadata = get_metadata(shout_dict)
content = frontmatter.dumps(frontmatter.Post(body, **metadata)) content = frontmatter.dumps(frontmatter.Post(body, **metadata))
ext = 'md' ext = 'mdx'
open('migration/content/' + r['layout'] + '/' + r['slug'] + '.' + ext, 'w').write(content) open('../discoursio-web/content/' + r['layout'] + '/' + r['slug'] + '.' + ext, 'w').write(content)
try: try:
shout_dict['createdAt'] = date_parse(r.get('createdAt')) if entry.get('createdAt') else ts shout_dict['createdAt'] = date_parse(r.get('createdAt')) if entry.get('createdAt') else ts
shout_dict['publishedAt'] = date_parse(entry.get('publishedAt')) if entry.get('published') else None shout_dict['publishedAt'] = date_parse(entry.get('publishedAt')) if entry.get('published') else None

View File

@ -219,7 +219,7 @@ async def invite_author(_, author_slug, shout):
@mutation.field("removeAuthor") @mutation.field("removeAuthor")
@login_required @login_required
async def invite_author(_, author_slug, shout): async def remove_author(_, author_slug, shout):
auth = info.context["request"].auth auth = info.context["request"].auth
user_id = auth.user_id user_id = auth.user_id

View File

@ -8,6 +8,7 @@ BACKEND_URL = environ.get("BACKEND_URL") or "https://localhost:8080"
OAUTH_CALLBACK_URL = environ.get("OAUTH_CALLBACK_URL") or "https://localhost:8080" OAUTH_CALLBACK_URL = environ.get("OAUTH_CALLBACK_URL") or "https://localhost:8080"
RESET_PWD_URL = environ.get("RESET_PWD_URL") or "https://localhost:8080/reset_pwd" RESET_PWD_URL = environ.get("RESET_PWD_URL") or "https://localhost:8080/reset_pwd"
CONFIRM_EMAIL_URL = environ.get("CONFIRM_EMAIL_URL") or "https://new.discours.io" CONFIRM_EMAIL_URL = environ.get("CONFIRM_EMAIL_URL") or "https://new.discours.io"
ERROR_URL_ON_FRONTEND = environ.get("ERROR_URL_ON_FRONTEND") or "https://new.discours.io"
DB_URL = environ.get("DATABASE_URL") or environ.get("DB_URL") or "sqlite:///db.sqlite3" DB_URL = environ.get("DATABASE_URL") or environ.get("DB_URL") or "sqlite:///db.sqlite3"
JWT_ALGORITHM = "HS256" JWT_ALGORITHM = "HS256"

View File

@ -0,0 +1 @@
<html><body>To enter the site follow the <a href='%s'>link</link></body></html>

View File

@ -0,0 +1 @@
<html><body>To confirm registration follow the <a href='%s'>link</link></body></html>

View File

@ -0,0 +1 @@
<html><body>To reset password follow the <a href='%s'>link</link></body></html>