Compare commits

..

No commits in common. "master" and "v4.41.0" have entirely different histories.

82 changed files with 890 additions and 3369 deletions

View File

@ -1,6 +1,7 @@
name: Test and lint name: Test and lint
on: [push, pull_request] on:
push:
jobs: jobs:
lint: lint:
@ -138,12 +139,6 @@ jobs:
with: with:
fetch-depth: 0 fetch-depth: 0
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Create Sentry release - name: Create Sentry release
uses: getsentry/action-release@v1 uses: getsentry/action-release@v1
env: env:
@ -163,7 +158,6 @@ jobs:
uses: docker/build-push-action@v3 uses: docker/build-push-action@v3
with: with:
context: . context: .
platforms: linux/amd64,linux/arm64
push: true push: true
tags: ${{ steps.meta.outputs.tags }} tags: ${{ steps.meta.outputs.tags }}

View File

@ -68,12 +68,6 @@ For most tests, you will need to have ``redis`` installed and started on your ma
sh scripts/run-test.sh sh scripts/run-test.sh
``` ```
You can also run tests using a local Postgres DB to speed things up. This can be done by
- creating an empty test DB and running the database migration by `dropdb test && createdb test && DB_URI=postgresql://localhost:5432/test alembic upgrade head`
- replacing the `DB_URI` in `test.env` file by `DB_URI=postgresql://localhost:5432/test`
## Run the code locally ## Run the code locally
Install npm packages Install npm packages
@ -157,10 +151,10 @@ Here are the small sum-ups of the directory structures and their roles:
## Pull request ## Pull request
The code is formatted using [ruff](https://github.com/astral-sh/ruff), to format the code, simply run The code is formatted using https://github.com/psf/black, to format the code, simply run
``` ```
poetry run ruff format . poetry run black .
``` ```
The code is also checked with `flake8`, make sure to run `flake8` before creating the pull request by The code is also checked with `flake8`, make sure to run `flake8` before creating the pull request by

View File

@ -46,8 +46,7 @@ class SLModelView(sqla.ModelView):
def inaccessible_callback(self, name, **kwargs): def inaccessible_callback(self, name, **kwargs):
# redirect to login page if user doesn't have access # redirect to login page if user doesn't have access
flash("You don't have access to the admin page", "error") return redirect(url_for("auth.login", next=request.url))
return redirect(url_for("dashboard.index", next=request.url))
def on_model_change(self, form, model, is_created): def on_model_change(self, form, model, is_created):
changes = {} changes = {}

View File

@ -25,12 +25,6 @@ from app.email_utils import (
render, render,
) )
from app.errors import AliasInTrashError from app.errors import AliasInTrashError
from app.events.event_dispatcher import EventDispatcher
from app.events.generated.event_pb2 import (
AliasDeleted,
AliasStatusChanged,
EventContent,
)
from app.log import LOG from app.log import LOG
from app.models import ( from app.models import (
Alias, Alias,
@ -314,36 +308,31 @@ def delete_alias(alias: Alias, user: User):
Delete an alias and add it to either global or domain trash Delete an alias and add it to either global or domain trash
Should be used instead of Alias.delete, DomainDeletedAlias.create, DeletedAlias.create Should be used instead of Alias.delete, DomainDeletedAlias.create, DeletedAlias.create
""" """
LOG.i(f"User {user} has deleted alias {alias}") # save deleted alias to either global or domain trash
# save deleted alias to either global or domain tra
if alias.custom_domain_id: if alias.custom_domain_id:
if not DomainDeletedAlias.get_by( if not DomainDeletedAlias.get_by(
email=alias.email, domain_id=alias.custom_domain_id email=alias.email, domain_id=alias.custom_domain_id
): ):
domain_deleted_alias = DomainDeletedAlias( LOG.d("add %s to domain %s trash", alias, alias.custom_domain_id)
user_id=user.id, Session.add(
email=alias.email, DomainDeletedAlias(
domain_id=alias.custom_domain_id, user_id=user.id,
email=alias.email,
domain_id=alias.custom_domain_id,
)
) )
Session.add(domain_deleted_alias)
Session.commit() Session.commit()
LOG.i(
f"Moving {alias} to domain {alias.custom_domain_id} trash {domain_deleted_alias}"
)
else: else:
if not DeletedAlias.get_by(email=alias.email): if not DeletedAlias.get_by(email=alias.email):
deleted_alias = DeletedAlias(email=alias.email) LOG.d("add %s to global trash", alias)
Session.add(deleted_alias) Session.add(DeletedAlias(email=alias.email))
Session.commit() Session.commit()
LOG.i(f"Moving {alias} to global trash {deleted_alias}")
LOG.i("delete alias %s", alias)
Alias.filter(Alias.id == alias.id).delete() Alias.filter(Alias.id == alias.id).delete()
Session.commit() Session.commit()
EventDispatcher.send_event(
user, EventContent(alias_deleted=AliasDeleted(alias_id=alias.id))
)
def aliases_for_mailbox(mailbox: Mailbox) -> [Alias]: def aliases_for_mailbox(mailbox: Mailbox) -> [Alias]:
""" """
@ -469,16 +458,3 @@ def transfer_alias(alias, new_user, new_mailboxes: [Mailbox]):
alias.pinned = False alias.pinned = False
Session.commit() Session.commit()
def change_alias_status(alias: Alias, enabled: bool, commit: bool = False):
LOG.i(f"Changing alias {alias} enabled to {enabled}")
alias.enabled = enabled
event = AliasStatusChanged(
alias_id=alias.id, alias_email=alias.email, enabled=enabled
)
EventDispatcher.send_event(alias.user, EventContent(alias_status_change=event))
if commit:
Session.commit()

View File

@ -25,7 +25,6 @@ from app.errors import (
ErrAddressInvalid, ErrAddressInvalid,
) )
from app.extensions import limiter from app.extensions import limiter
from app.log import LOG
from app.models import Alias, Contact, Mailbox, AliasMailbox from app.models import Alias, Contact, Mailbox, AliasMailbox
@ -185,8 +184,7 @@ def toggle_alias(alias_id):
if not alias or alias.user_id != user.id: if not alias or alias.user_id != user.id:
return jsonify(error="Forbidden"), 403 return jsonify(error="Forbidden"), 403
alias_utils.change_alias_status(alias, enabled=not alias.enabled) alias.enabled = not alias.enabled
LOG.i(f"User {user} changed alias {alias} enabled status to {alias.enabled}")
Session.commit() Session.commit()
return jsonify(enabled=alias.enabled), 200 return jsonify(enabled=alias.enabled), 200

View File

@ -3,13 +3,11 @@ from flask_login import login_user
from app.auth.base import auth_bp from app.auth.base import auth_bp
from app.db import Session from app.db import Session
from app.extensions import limiter
from app.log import LOG from app.log import LOG
from app.models import EmailChange, ResetPasswordCode from app.models import EmailChange, ResetPasswordCode
@auth_bp.route("/change_email", methods=["GET", "POST"]) @auth_bp.route("/change_email", methods=["GET", "POST"])
@limiter.limit("3/hour")
def change_email(): def change_email():
code = request.args.get("code") code = request.args.get("code")

View File

@ -1,13 +1,14 @@
from flask import request, session, redirect, flash, url_for from flask import request, session, redirect, flash, url_for
from requests_oauthlib import OAuth2Session from requests_oauthlib import OAuth2Session
import requests
from app import config from app import config
from app.auth.base import auth_bp from app.auth.base import auth_bp
from app.auth.views.login_utils import after_login from app.auth.views.login_utils import after_login
from app.config import ( from app.config import (
URL, URL,
OIDC_AUTHORIZATION_URL,
OIDC_USER_INFO_URL,
OIDC_TOKEN_URL,
OIDC_SCOPES, OIDC_SCOPES,
OIDC_NAME_FIELD, OIDC_NAME_FIELD,
) )
@ -15,15 +16,14 @@ from app.db import Session
from app.email_utils import send_welcome_email from app.email_utils import send_welcome_email
from app.log import LOG from app.log import LOG
from app.models import User, SocialAuth from app.models import User, SocialAuth
from app.utils import sanitize_email, sanitize_next_url from app.utils import encode_url, sanitize_email, sanitize_next_url
# need to set explicitly redirect_uri instead of leaving the lib to pre-fill redirect_uri # need to set explicitly redirect_uri instead of leaving the lib to pre-fill redirect_uri
# when served behind nginx, the redirect_uri is localhost... and not the real url # when served behind nginx, the redirect_uri is localhost... and not the real url
redirect_uri = URL + "/auth/oidc/callback" _redirect_uri = URL + "/auth/oidc/callback"
SESSION_STATE_KEY = "oauth_state" SESSION_STATE_KEY = "oauth_state"
SESSION_NEXT_KEY = "oauth_redirect_next"
@auth_bp.route("/oidc/login") @auth_bp.route("/oidc/login")
@ -32,17 +32,18 @@ def oidc_login():
return redirect(url_for("auth.login")) return redirect(url_for("auth.login"))
next_url = sanitize_next_url(request.args.get("next")) next_url = sanitize_next_url(request.args.get("next"))
if next_url:
auth_url = requests.get(config.OIDC_WELL_KNOWN_URL).json()["authorization_endpoint"] redirect_uri = _redirect_uri + "?next=" + encode_url(next_url)
else:
redirect_uri = _redirect_uri
oidc = OAuth2Session( oidc = OAuth2Session(
config.OIDC_CLIENT_ID, scope=[OIDC_SCOPES], redirect_uri=redirect_uri config.OIDC_CLIENT_ID, scope=[OIDC_SCOPES], redirect_uri=redirect_uri
) )
authorization_url, state = oidc.authorization_url(auth_url) authorization_url, state = oidc.authorization_url(OIDC_AUTHORIZATION_URL)
# State is used to prevent CSRF, keep this for later. # State is used to prevent CSRF, keep this for later.
session[SESSION_STATE_KEY] = state session[SESSION_STATE_KEY] = state
session[SESSION_NEXT_KEY] = next_url
return redirect(authorization_url) return redirect(authorization_url)
@ -59,23 +60,19 @@ def oidc_callback():
flash("Please use another sign in method then", "warning") flash("Please use another sign in method then", "warning")
return redirect("/") return redirect("/")
oidc_configuration = requests.get(config.OIDC_WELL_KNOWN_URL).json()
user_info_url = oidc_configuration["userinfo_endpoint"]
token_url = oidc_configuration["token_endpoint"]
oidc = OAuth2Session( oidc = OAuth2Session(
config.OIDC_CLIENT_ID, config.OIDC_CLIENT_ID,
state=session[SESSION_STATE_KEY], state=session[SESSION_STATE_KEY],
scope=[OIDC_SCOPES], scope=[OIDC_SCOPES],
redirect_uri=redirect_uri, redirect_uri=_redirect_uri,
) )
oidc.fetch_token( oidc.fetch_token(
token_url, OIDC_TOKEN_URL,
client_secret=config.OIDC_CLIENT_SECRET, client_secret=config.OIDC_CLIENT_SECRET,
authorization_response=request.url, authorization_response=request.url,
) )
oidc_user_data = oidc.get(user_info_url) oidc_user_data = oidc.get(OIDC_USER_INFO_URL)
if oidc_user_data.status_code != 200: if oidc_user_data.status_code != 200:
LOG.e( LOG.e(
f"cannot get oidc user data {oidc_user_data.status_code} {oidc_user_data.text}" f"cannot get oidc user data {oidc_user_data.status_code} {oidc_user_data.text}"
@ -114,8 +111,7 @@ def oidc_callback():
Session.commit() Session.commit()
# The activation link contains the original page, for ex authorize page # The activation link contains the original page, for ex authorize page
next_url = session[SESSION_NEXT_KEY] next_url = sanitize_next_url(request.args.get("next")) if request.args else None
session[SESSION_NEXT_KEY] = None
return after_login(user, next_url) return after_login(user, next_url)

View File

@ -245,7 +245,9 @@ FACEBOOK_CLIENT_ID = os.environ.get("FACEBOOK_CLIENT_ID")
FACEBOOK_CLIENT_SECRET = os.environ.get("FACEBOOK_CLIENT_SECRET") FACEBOOK_CLIENT_SECRET = os.environ.get("FACEBOOK_CLIENT_SECRET")
CONNECT_WITH_OIDC_ICON = os.environ.get("CONNECT_WITH_OIDC_ICON") CONNECT_WITH_OIDC_ICON = os.environ.get("CONNECT_WITH_OIDC_ICON")
OIDC_WELL_KNOWN_URL = os.environ.get("OIDC_WELL_KNOWN_URL") OIDC_AUTHORIZATION_URL = os.environ.get("OIDC_AUTHORIZATION_URL")
OIDC_USER_INFO_URL = os.environ.get("OIDC_USER_INFO_URL")
OIDC_TOKEN_URL = os.environ.get("OIDC_TOKEN_URL")
OIDC_CLIENT_ID = os.environ.get("OIDC_CLIENT_ID") OIDC_CLIENT_ID = os.environ.get("OIDC_CLIENT_ID")
OIDC_CLIENT_SECRET = os.environ.get("OIDC_CLIENT_SECRET") OIDC_CLIENT_SECRET = os.environ.get("OIDC_CLIENT_SECRET")
OIDC_SCOPES = os.environ.get("OIDC_SCOPES") OIDC_SCOPES = os.environ.get("OIDC_SCOPES")
@ -281,7 +283,6 @@ JOB_DELETE_MAILBOX = "delete-mailbox"
JOB_DELETE_DOMAIN = "delete-domain" JOB_DELETE_DOMAIN = "delete-domain"
JOB_SEND_USER_REPORT = "send-user-report" JOB_SEND_USER_REPORT = "send-user-report"
JOB_SEND_PROTON_WELCOME_1 = "proton-welcome-1" JOB_SEND_PROTON_WELCOME_1 = "proton-welcome-1"
JOB_SEND_ALIAS_CREATION_EVENTS = "send-alias-creation-events"
# for pagination # for pagination
PAGE_LIMIT = 20 PAGE_LIMIT = 20
@ -430,11 +431,9 @@ except Exception:
HIBP_SCAN_INTERVAL_DAYS = 7 HIBP_SCAN_INTERVAL_DAYS = 7
HIBP_API_KEYS = sl_getenv("HIBP_API_KEYS", list) or [] HIBP_API_KEYS = sl_getenv("HIBP_API_KEYS", list) or []
HIBP_MAX_ALIAS_CHECK = 10_000 HIBP_MAX_ALIAS_CHECK = 10_000
HIBP_RPM = int(os.environ.get("HIBP_API_RPM", 100)) HIBP_RPM = 100
HIBP_SKIP_PARTNER_ALIAS = os.environ.get("HIBP_SKIP_PARTNER_ALIAS") HIBP_SKIP_PARTNER_ALIAS = os.environ.get("HIBP_SKIP_PARTNER_ALIAS")
KEEP_OLD_DATA_DAYS = 30
POSTMASTER = os.environ.get("POSTMASTER") POSTMASTER = os.environ.get("POSTMASTER")
# store temporary files, especially for debugging # store temporary files, especially for debugging
@ -582,9 +581,3 @@ UPCLOUD_PASSWORD = os.environ.get("UPCLOUD_PASSWORD", None)
UPCLOUD_DB_ID = os.environ.get("UPCLOUD_DB_ID", None) UPCLOUD_DB_ID = os.environ.get("UPCLOUD_DB_ID", None)
STORE_TRANSACTIONAL_EMAILS = "STORE_TRANSACTIONAL_EMAILS" in os.environ STORE_TRANSACTIONAL_EMAILS = "STORE_TRANSACTIONAL_EMAILS" in os.environ
EVENT_WEBHOOK = os.environ.get("EVENT_WEBHOOK", None)
# We want it disabled by default, so only skip if defined
EVENT_WEBHOOK_SKIP_VERIFY_SSL = "EVENT_WEBHOOK_SKIP_VERIFY_SSL" in os.environ
EVENT_WEBHOOK_DISABLE = "EVENT_WEBHOOK_DISABLE" in os.environ

View File

@ -2,12 +2,10 @@ from app.dashboard.base import dashboard_bp
from flask_login import login_required, current_user from flask_login import login_required, current_user
from app.alias_utils import alias_export_csv from app.alias_utils import alias_export_csv
from app.dashboard.views.enter_sudo import sudo_required from app.dashboard.views.enter_sudo import sudo_required
from app.extensions import limiter
@dashboard_bp.route("/alias_export", methods=["GET"]) @dashboard_bp.route("/alias_export", methods=["GET"])
@login_required @login_required
@sudo_required @sudo_required
@limiter.limit("2/minute")
def alias_export_route(): def alias_export_route():
return alias_export_csv(current_user) return alias_export_csv(current_user)

View File

@ -7,7 +7,6 @@ from app.config import JOB_BATCH_IMPORT
from app.dashboard.base import dashboard_bp from app.dashboard.base import dashboard_bp
from app.dashboard.views.enter_sudo import sudo_required from app.dashboard.views.enter_sudo import sudo_required
from app.db import Session from app.db import Session
from app.extensions import limiter
from app.log import LOG from app.log import LOG
from app.models import File, BatchImport, Job from app.models import File, BatchImport, Job
from app.utils import random_string, CSRFValidationForm from app.utils import random_string, CSRFValidationForm
@ -16,7 +15,6 @@ from app.utils import random_string, CSRFValidationForm
@dashboard_bp.route("/batch_import", methods=["GET", "POST"]) @dashboard_bp.route("/batch_import", methods=["GET", "POST"])
@login_required @login_required
@sudo_required @sudo_required
@limiter.limit("10/minute", methods=["POST"])
def batch_import_route(): def batch_import_route():
# only for users who have custom domains # only for users who have custom domains
if not current_user.verified_custom_domains(): if not current_user.verified_custom_domains():
@ -41,7 +39,7 @@ def batch_import_route():
return redirect(request.url) return redirect(request.url)
if len(batch_imports) > 10: if len(batch_imports) > 10:
flash( flash(
"You have too many imports already. Please wait until some get cleaned up", "You have too many imports already. Wait until some get cleaned up",
"error", "error",
) )
return render_template( return render_template(

View File

@ -14,7 +14,7 @@ from app.models import PartnerUser, SocialAuth
from app.proton.utils import get_proton_partner from app.proton.utils import get_proton_partner
from app.utils import sanitize_next_url from app.utils import sanitize_next_url
_SUDO_GAP = 120 _SUDO_GAP = 900
class LoginForm(FlaskForm): class LoginForm(FlaskForm):

View File

@ -141,12 +141,12 @@ def index():
) )
if request.form.get("form-name") == "delete-alias": if request.form.get("form-name") == "delete-alias":
LOG.i(f"User {current_user} requested deletion of alias {alias}") LOG.d("delete alias %s", alias)
email = alias.email email = alias.email
alias_utils.delete_alias(alias, current_user) alias_utils.delete_alias(alias, current_user)
flash(f"Alias {email} has been deleted", "success") flash(f"Alias {email} has been deleted", "success")
elif request.form.get("form-name") == "disable-alias": elif request.form.get("form-name") == "disable-alias":
alias_utils.change_alias_status(alias, enabled=False) alias.enabled = False
Session.commit() Session.commit()
flash(f"Alias {alias.email} has been disabled", "success") flash(f"Alias {alias.email} has been disabled", "success")

View File

@ -11,11 +11,9 @@ from wtforms.fields.html5 import EmailField
from app.config import ENFORCE_SPF, MAILBOX_SECRET from app.config import ENFORCE_SPF, MAILBOX_SECRET
from app.config import URL from app.config import URL
from app.dashboard.base import dashboard_bp from app.dashboard.base import dashboard_bp
from app.dashboard.views.enter_sudo import sudo_required
from app.db import Session from app.db import Session
from app.email_utils import email_can_be_used_as_mailbox from app.email_utils import email_can_be_used_as_mailbox
from app.email_utils import mailbox_already_used, render, send_email from app.email_utils import mailbox_already_used, render, send_email
from app.extensions import limiter
from app.log import LOG from app.log import LOG
from app.models import Alias, AuthorizedAddress from app.models import Alias, AuthorizedAddress
from app.models import Mailbox from app.models import Mailbox
@ -31,8 +29,6 @@ class ChangeEmailForm(FlaskForm):
@dashboard_bp.route("/mailbox/<int:mailbox_id>/", methods=["GET", "POST"]) @dashboard_bp.route("/mailbox/<int:mailbox_id>/", methods=["GET", "POST"])
@login_required @login_required
@sudo_required
@limiter.limit("20/minute", methods=["POST"])
def mailbox_detail_route(mailbox_id): def mailbox_detail_route(mailbox_id):
mailbox: Mailbox = Mailbox.get(mailbox_id) mailbox: Mailbox = Mailbox.get(mailbox_id)
if not mailbox or mailbox.user_id != current_user.id: if not mailbox or mailbox.user_id != current_user.id:
@ -183,15 +179,8 @@ def mailbox_detail_route(mailbox_id):
elif request.form.get("form-name") == "toggle-pgp": elif request.form.get("form-name") == "toggle-pgp":
if request.form.get("pgp-enabled") == "on": if request.form.get("pgp-enabled") == "on":
if mailbox.is_proton(): mailbox.disable_pgp = False
mailbox.disable_pgp = True flash(f"PGP is enabled on {mailbox.email}", "success")
flash(
"Enabling PGP for a Proton Mail mailbox is redundant and does not add any security benefit",
"info",
)
else:
mailbox.disable_pgp = False
flash(f"PGP is enabled on {mailbox.email}", "info")
else: else:
mailbox.disable_pgp = True mailbox.disable_pgp = True
flash(f"PGP is disabled on {mailbox.email}", "info") flash(f"PGP is disabled on {mailbox.email}", "info")

View File

@ -227,21 +227,6 @@ def setting():
Session.commit() Session.commit()
flash("Your preference has been updated", "success") flash("Your preference has been updated", "success")
return redirect(url_for("dashboard.setting")) return redirect(url_for("dashboard.setting"))
elif request.form.get("form-name") == "enable_data_breach_check":
if not current_user.is_premium():
flash("Only premium plan can enable data breach monitoring", "warning")
return redirect(url_for("dashboard.setting"))
choose = request.form.get("enable_data_breach_check")
if choose == "on":
LOG.i("User {current_user} has enabled data breach monitoring")
current_user.enable_data_breach_check = True
flash("Data breach monitoring is enabled", "success")
else:
LOG.i("User {current_user} has disabled data breach monitoring")
current_user.enable_data_breach_check = False
flash("Data breach monitoring is disabled", "info")
Session.commit()
return redirect(url_for("dashboard.setting"))
elif request.form.get("form-name") == "sender-in-ra": elif request.form.get("form-name") == "sender-in-ra":
choose = request.form.get("enable") choose = request.form.get("enable")
if choose == "on": if choose == "on":

View File

@ -8,7 +8,6 @@ from app.db import Session
from flask import redirect, url_for, flash, request, render_template from flask import redirect, url_for, flash, request, render_template
from flask_login import login_required, current_user from flask_login import login_required, current_user
from app import alias_utils
from app.dashboard.base import dashboard_bp from app.dashboard.base import dashboard_bp
from app.handler.unsubscribe_encoder import UnsubscribeAction from app.handler.unsubscribe_encoder import UnsubscribeAction
from app.handler.unsubscribe_handler import UnsubscribeHandler from app.handler.unsubscribe_handler import UnsubscribeHandler
@ -32,7 +31,7 @@ def unsubscribe(alias_id):
# automatic unsubscribe, according to https://tools.ietf.org/html/rfc8058 # automatic unsubscribe, according to https://tools.ietf.org/html/rfc8058
if request.method == "POST": if request.method == "POST":
alias_utils.change_alias_status(alias, False) alias.enabled = False
flash(f"Alias {alias.email} has been blocked", "success") flash(f"Alias {alias.email} has been blocked", "success")
Session.commit() Session.commit()

View File

@ -21,7 +21,6 @@ LIST_UNSUBSCRIBE = "List-Unsubscribe"
LIST_UNSUBSCRIBE_POST = "List-Unsubscribe-Post" LIST_UNSUBSCRIBE_POST = "List-Unsubscribe-Post"
RETURN_PATH = "Return-Path" RETURN_PATH = "Return-Path"
AUTHENTICATION_RESULTS = "Authentication-Results" AUTHENTICATION_RESULTS = "Authentication-Results"
SL_QUEUE_ID = "X-SL-Queue-Id"
# headers used to DKIM sign in order of preference # headers used to DKIM sign in order of preference
DKIM_HEADERS = [ DKIM_HEADERS = [

View File

@ -33,7 +33,6 @@ from flanker.addresslib import address
from flanker.addresslib.address import EmailAddress from flanker.addresslib.address import EmailAddress
from jinja2 import Environment, FileSystemLoader from jinja2 import Environment, FileSystemLoader
from sqlalchemy import func from sqlalchemy import func
from flask_login import current_user
from app import config from app import config
from app.db import Session from app.db import Session
@ -75,16 +74,11 @@ def render(template_name, **kwargs) -> str:
template = env.get_template(template_name) template = env.get_template(template_name)
use_partner_template = False
if current_user and current_user.is_authenticated:
use_partner_template = current_user.has_used_alias_from_partner()
return template.render( return template.render(
MAX_NB_EMAIL_FREE_PLAN=config.MAX_NB_EMAIL_FREE_PLAN, MAX_NB_EMAIL_FREE_PLAN=config.MAX_NB_EMAIL_FREE_PLAN,
URL=config.URL, URL=config.URL,
LANDING_PAGE_URL=config.LANDING_PAGE_URL, LANDING_PAGE_URL=config.LANDING_PAGE_URL,
YEAR=arrow.now().year, YEAR=arrow.now().year,
USE_PARTNER_TEMPLATE=use_partner_template,
**kwargs, **kwargs,
) )
@ -500,10 +494,9 @@ def delete_header(msg: Message, header: str):
def sanitize_header(msg: Message, header: str): def sanitize_header(msg: Message, header: str):
"""remove trailing space and remove linebreak from a header""" """remove trailing space and remove linebreak from a header"""
header_lowercase = header.lower()
for i in reversed(range(len(msg._headers))): for i in reversed(range(len(msg._headers))):
header_name = msg._headers[i][0].lower() header_name = msg._headers[i][0].lower()
if header_name == header_lowercase: if header_name == header.lower():
# msg._headers[i] is a tuple like ('From', 'hey@google.com') # msg._headers[i] is a tuple like ('From', 'hey@google.com')
if msg._headers[i][1]: if msg._headers[i][1]:
msg._headers[i] = ( msg._headers[i] = (

View File

View File

@ -1,66 +0,0 @@
from abc import ABC, abstractmethod
from app import config
from app.db import Session
from app.errors import ProtonPartnerNotSetUp
from app.events.generated import event_pb2
from app.models import User, PartnerUser, SyncEvent
from app.proton.utils import get_proton_partner
from typing import Optional
NOTIFICATION_CHANNEL = "simplelogin_sync_events"
class Dispatcher(ABC):
@abstractmethod
def send(self, event: bytes):
pass
class PostgresDispatcher(Dispatcher):
def send(self, event: bytes):
instance = SyncEvent.create(content=event, flush=True)
Session.execute(f"NOTIFY {NOTIFICATION_CHANNEL}, '{instance.id}';")
@staticmethod
def get():
return PostgresDispatcher()
class EventDispatcher:
@staticmethod
def send_event(
user: User,
content: event_pb2.EventContent,
dispatcher: Dispatcher = PostgresDispatcher.get(),
skip_if_webhook_missing: bool = True,
):
if config.EVENT_WEBHOOK_DISABLE:
return
if not config.EVENT_WEBHOOK and skip_if_webhook_missing:
return
partner_user = EventDispatcher.__partner_user(user.id)
if not partner_user:
return
event = event_pb2.Event(
user_id=user.id,
external_user_id=partner_user.external_user_id,
partner_id=partner_user.partner_id,
content=content,
)
serialized = event.SerializeToString()
dispatcher.send(serialized)
@staticmethod
def __partner_user(user_id: int) -> Optional[PartnerUser]:
# Check if the current user has a partner_id
try:
proton_partner_id = get_proton_partner().id
except ProtonPartnerNotSetUp:
return None
# It has. Retrieve the information for the PartnerUser
return PartnerUser.get_by(user_id=user_id, partner_id=proton_partner_id)

View File

@ -1,50 +0,0 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# NO CHECKED-IN PROTOBUF GENCODE
# source: event.proto
# Protobuf Python Version: 5.27.0
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
from google.protobuf import runtime_version as _runtime_version
from google.protobuf import symbol_database as _symbol_database
from google.protobuf.internal import builder as _builder
_runtime_version.ValidateProtobufRuntimeVersion(
_runtime_version.Domain.PUBLIC,
5,
27,
0,
'',
'event.proto'
)
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0b\x65vent.proto\x12\x12simplelogin_events\"(\n\x0fUserPlanChanged\x12\x15\n\rplan_end_time\x18\x01 \x01(\r\"\r\n\x0bUserDeleted\"Z\n\x0c\x41liasCreated\x12\x10\n\x08\x61lias_id\x18\x01 \x01(\r\x12\x13\n\x0b\x61lias_email\x18\x02 \x01(\t\x12\x12\n\nalias_note\x18\x03 \x01(\t\x12\x0f\n\x07\x65nabled\x18\x04 \x01(\x08\"L\n\x12\x41liasStatusChanged\x12\x10\n\x08\x61lias_id\x18\x01 \x01(\r\x12\x13\n\x0b\x61lias_email\x18\x02 \x01(\t\x12\x0f\n\x07\x65nabled\x18\x03 \x01(\x08\"5\n\x0c\x41liasDeleted\x12\x10\n\x08\x61lias_id\x18\x01 \x01(\r\x12\x13\n\x0b\x61lias_email\x18\x02 \x01(\t\"D\n\x10\x41liasCreatedList\x12\x30\n\x06\x65vents\x18\x01 \x03(\x0b\x32 .simplelogin_events.AliasCreated\"\x93\x03\n\x0c\x45ventContent\x12?\n\x10user_plan_change\x18\x01 \x01(\x0b\x32#.simplelogin_events.UserPlanChangedH\x00\x12\x37\n\x0cuser_deleted\x18\x02 \x01(\x0b\x32\x1f.simplelogin_events.UserDeletedH\x00\x12\x39\n\ralias_created\x18\x03 \x01(\x0b\x32 .simplelogin_events.AliasCreatedH\x00\x12\x45\n\x13\x61lias_status_change\x18\x04 \x01(\x0b\x32&.simplelogin_events.AliasStatusChangedH\x00\x12\x39\n\ralias_deleted\x18\x05 \x01(\x0b\x32 .simplelogin_events.AliasDeletedH\x00\x12\x41\n\x11\x61lias_create_list\x18\x06 \x01(\x0b\x32$.simplelogin_events.AliasCreatedListH\x00\x42\t\n\x07\x63ontent\"y\n\x05\x45vent\x12\x0f\n\x07user_id\x18\x01 \x01(\r\x12\x18\n\x10\x65xternal_user_id\x18\x02 \x01(\t\x12\x12\n\npartner_id\x18\x03 \x01(\r\x12\x31\n\x07\x63ontent\x18\x04 \x01(\x0b\x32 .simplelogin_events.EventContentb\x06proto3')
_globals = globals()
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'event_pb2', _globals)
if not _descriptor._USE_C_DESCRIPTORS:
DESCRIPTOR._loaded_options = None
_globals['_USERPLANCHANGED']._serialized_start=35
_globals['_USERPLANCHANGED']._serialized_end=75
_globals['_USERDELETED']._serialized_start=77
_globals['_USERDELETED']._serialized_end=90
_globals['_ALIASCREATED']._serialized_start=92
_globals['_ALIASCREATED']._serialized_end=182
_globals['_ALIASSTATUSCHANGED']._serialized_start=184
_globals['_ALIASSTATUSCHANGED']._serialized_end=260
_globals['_ALIASDELETED']._serialized_start=262
_globals['_ALIASDELETED']._serialized_end=315
_globals['_ALIASCREATEDLIST']._serialized_start=317
_globals['_ALIASCREATEDLIST']._serialized_end=385
_globals['_EVENTCONTENT']._serialized_start=388
_globals['_EVENTCONTENT']._serialized_end=791
_globals['_EVENT']._serialized_start=793
_globals['_EVENT']._serialized_end=914
# @@protoc_insertion_point(module_scope)

View File

@ -1,80 +0,0 @@
from google.protobuf.internal import containers as _containers
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from typing import ClassVar as _ClassVar, Iterable as _Iterable, Mapping as _Mapping, Optional as _Optional, Union as _Union
DESCRIPTOR: _descriptor.FileDescriptor
class UserPlanChanged(_message.Message):
__slots__ = ("plan_end_time",)
PLAN_END_TIME_FIELD_NUMBER: _ClassVar[int]
plan_end_time: int
def __init__(self, plan_end_time: _Optional[int] = ...) -> None: ...
class UserDeleted(_message.Message):
__slots__ = ()
def __init__(self) -> None: ...
class AliasCreated(_message.Message):
__slots__ = ("alias_id", "alias_email", "alias_note", "enabled")
ALIAS_ID_FIELD_NUMBER: _ClassVar[int]
ALIAS_EMAIL_FIELD_NUMBER: _ClassVar[int]
ALIAS_NOTE_FIELD_NUMBER: _ClassVar[int]
ENABLED_FIELD_NUMBER: _ClassVar[int]
alias_id: int
alias_email: str
alias_note: str
enabled: bool
def __init__(self, alias_id: _Optional[int] = ..., alias_email: _Optional[str] = ..., alias_note: _Optional[str] = ..., enabled: bool = ...) -> None: ...
class AliasStatusChanged(_message.Message):
__slots__ = ("alias_id", "alias_email", "enabled")
ALIAS_ID_FIELD_NUMBER: _ClassVar[int]
ALIAS_EMAIL_FIELD_NUMBER: _ClassVar[int]
ENABLED_FIELD_NUMBER: _ClassVar[int]
alias_id: int
alias_email: str
enabled: bool
def __init__(self, alias_id: _Optional[int] = ..., alias_email: _Optional[str] = ..., enabled: bool = ...) -> None: ...
class AliasDeleted(_message.Message):
__slots__ = ("alias_id", "alias_email")
ALIAS_ID_FIELD_NUMBER: _ClassVar[int]
ALIAS_EMAIL_FIELD_NUMBER: _ClassVar[int]
alias_id: int
alias_email: str
def __init__(self, alias_id: _Optional[int] = ..., alias_email: _Optional[str] = ...) -> None: ...
class AliasCreatedList(_message.Message):
__slots__ = ("events",)
EVENTS_FIELD_NUMBER: _ClassVar[int]
events: _containers.RepeatedCompositeFieldContainer[AliasCreated]
def __init__(self, events: _Optional[_Iterable[_Union[AliasCreated, _Mapping]]] = ...) -> None: ...
class EventContent(_message.Message):
__slots__ = ("user_plan_change", "user_deleted", "alias_created", "alias_status_change", "alias_deleted", "alias_create_list")
USER_PLAN_CHANGE_FIELD_NUMBER: _ClassVar[int]
USER_DELETED_FIELD_NUMBER: _ClassVar[int]
ALIAS_CREATED_FIELD_NUMBER: _ClassVar[int]
ALIAS_STATUS_CHANGE_FIELD_NUMBER: _ClassVar[int]
ALIAS_DELETED_FIELD_NUMBER: _ClassVar[int]
ALIAS_CREATE_LIST_FIELD_NUMBER: _ClassVar[int]
user_plan_change: UserPlanChanged
user_deleted: UserDeleted
alias_created: AliasCreated
alias_status_change: AliasStatusChanged
alias_deleted: AliasDeleted
alias_create_list: AliasCreatedList
def __init__(self, user_plan_change: _Optional[_Union[UserPlanChanged, _Mapping]] = ..., user_deleted: _Optional[_Union[UserDeleted, _Mapping]] = ..., alias_created: _Optional[_Union[AliasCreated, _Mapping]] = ..., alias_status_change: _Optional[_Union[AliasStatusChanged, _Mapping]] = ..., alias_deleted: _Optional[_Union[AliasDeleted, _Mapping]] = ..., alias_create_list: _Optional[_Union[AliasCreatedList, _Mapping]] = ...) -> None: ...
class Event(_message.Message):
__slots__ = ("user_id", "external_user_id", "partner_id", "content")
USER_ID_FIELD_NUMBER: _ClassVar[int]
EXTERNAL_USER_ID_FIELD_NUMBER: _ClassVar[int]
PARTNER_ID_FIELD_NUMBER: _ClassVar[int]
CONTENT_FIELD_NUMBER: _ClassVar[int]
user_id: int
external_user_id: str
partner_id: int
content: EventContent
def __init__(self, user_id: _Optional[int] = ..., external_user_id: _Optional[str] = ..., partner_id: _Optional[int] = ..., content: _Optional[_Union[EventContent, _Mapping]] = ...) -> None: ...

View File

@ -30,9 +30,7 @@ def apply_dmarc_policy_for_forward_phase(
) -> Tuple[Message, Optional[str]]: ) -> Tuple[Message, Optional[str]]:
spam_result = SpamdResult.extract_from_headers(msg, Phase.forward) spam_result = SpamdResult.extract_from_headers(msg, Phase.forward)
if not DMARC_CHECK_ENABLED or not spam_result: if not DMARC_CHECK_ENABLED or not spam_result:
LOG.i("DMARC check disabled")
return msg, None return msg, None
LOG.i(f"Spam check result in {spam_result}")
from_header = get_header_unicode(msg[headers.FROM]) from_header = get_header_unicode(msg[headers.FROM])
@ -152,10 +150,8 @@ def apply_dmarc_policy_for_reply_phase(
) -> Optional[str]: ) -> Optional[str]:
spam_result = SpamdResult.extract_from_headers(msg, Phase.reply) spam_result = SpamdResult.extract_from_headers(msg, Phase.reply)
if not DMARC_CHECK_ENABLED or not spam_result: if not DMARC_CHECK_ENABLED or not spam_result:
LOG.i("DMARC check disabled")
return None return None
LOG.i(f"Spam check result is {spam_result}")
if spam_result.dmarc not in ( if spam_result.dmarc not in (
DmarcCheckResult.quarantine, DmarcCheckResult.quarantine,
DmarcCheckResult.reject, DmarcCheckResult.reject,

View File

@ -5,7 +5,6 @@ from typing import Optional
from aiosmtpd.smtp import Envelope from aiosmtpd.smtp import Envelope
from app import config from app import config
from app import alias_utils
from app.db import Session from app.db import Session
from app.email import headers, status from app.email import headers, status
from app.email_utils import ( from app.email_utils import (
@ -102,8 +101,7 @@ class UnsubscribeHandler:
mailbox.email, alias mailbox.email, alias
): ):
return status.E509 return status.E509
LOG.i(f"User disabled alias {alias} via unsubscribe header") alias.enabled = False
alias_utils.change_alias_status(alias, enabled=False)
Session.commit() Session.commit()
enable_alias_url = config.URL + f"/dashboard/?highlight_alias_id={alias.id}" enable_alias_url = config.URL + f"/dashboard/?highlight_alias_id={alias.id}"
for mailbox in alias.mailboxes: for mailbox in alias.mailboxes:

View File

@ -30,10 +30,7 @@ def handle_batch_import(batch_import: BatchImport):
LOG.d("Download file %s from %s", batch_import.file, file_url) LOG.d("Download file %s from %s", batch_import.file, file_url)
r = requests.get(file_url) r = requests.get(file_url)
# Replace invisible character lines = [line.decode("utf-8") for line in r.iter_lines()]
lines = [
line.decode("utf-8").replace("\ufeff", "").strip() for line in r.iter_lines()
]
import_from_csv(batch_import, user, lines) import_from_csv(batch_import, user, lines)

View File

@ -1,40 +0,0 @@
from app.events.event_dispatcher import EventDispatcher, Dispatcher
from app.events.generated.event_pb2 import EventContent, AliasCreated, AliasCreatedList
from app.log import LOG
from app.models import User, Alias
def send_alias_creation_events_for_user(
user: User, dispatcher: Dispatcher, chunk_size=50
):
if user.disabled:
LOG.i("User {user} is disabled. Skipping sending events for that user")
return
chunk_size = min(chunk_size, 50)
event_list = []
for alias in (
Alias.yield_per_query(chunk_size)
.filter_by(user_id=user.id)
.order_by(Alias.id.asc())
):
event_list.append(
AliasCreated(
alias_id=alias.id,
alias_email=alias.email,
alias_note=alias.note,
enabled=alias.enabled,
)
)
if len(event_list) >= chunk_size:
EventDispatcher.send_event(
user,
EventContent(alias_create_list=AliasCreatedList(events=event_list)),
dispatcher=dispatcher,
)
event_list = []
if len(event_list) > 0:
EventDispatcher.send_event(
user,
EventContent(alias_create_list=AliasCreatedList(events=event_list)),
dispatcher=dispatcher,
)

View File

@ -330,7 +330,6 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
FLAG_FREE_DISABLE_CREATE_ALIAS = 1 << 0 FLAG_FREE_DISABLE_CREATE_ALIAS = 1 << 0
FLAG_CREATED_FROM_PARTNER = 1 << 1 FLAG_CREATED_FROM_PARTNER = 1 << 1
FLAG_FREE_OLD_ALIAS_LIMIT = 1 << 2 FLAG_FREE_OLD_ALIAS_LIMIT = 1 << 2
FLAG_CREATED_ALIAS_FROM_PARTNER = 1 << 3
email = sa.Column(sa.String(256), unique=True, nullable=False) email = sa.Column(sa.String(256), unique=True, nullable=False)
@ -526,11 +525,6 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
sa.Boolean, default=True, nullable=False, server_default="1" sa.Boolean, default=True, nullable=False, server_default="1"
) )
# user opted in for data breach check
enable_data_breach_check = sa.Column(
sa.Boolean, default=False, nullable=False, server_default="0"
)
# bitwise flags. Allow for future expansion # bitwise flags. Allow for future expansion
flags = sa.Column( flags = sa.Column(
sa.BigInteger, sa.BigInteger,
@ -658,21 +652,6 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
return user return user
@classmethod
def delete(cls, obj_id, commit=False):
# Internal import to avoid global import cycles
from app.events.event_dispatcher import EventDispatcher
from app.events.generated.event_pb2 import UserDeleted, EventContent
user: User = cls.get(obj_id)
EventDispatcher.send_event(user, EventContent(user_deleted=UserDeleted()))
res = super(User, cls).delete(obj_id)
if commit:
Session.commit()
return res
def get_active_subscription( def get_active_subscription(
self, include_partner_subscription: bool = True self, include_partner_subscription: bool = True
) -> Optional[ ) -> Optional[
@ -1154,13 +1133,6 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
return True return True
return not config.DISABLE_CREATE_CONTACTS_FOR_FREE_USERS return not config.DISABLE_CREATE_CONTACTS_FOR_FREE_USERS
def has_used_alias_from_partner(self) -> bool:
return (
self.flags
& (User.FLAG_CREATED_ALIAS_FROM_PARTNER | User.FLAG_CREATED_FROM_PARTNER)
> 0
)
def __repr__(self): def __repr__(self):
return f"<User {self.id} {self.name} {self.email}>" return f"<User {self.id} {self.name} {self.email}>"
@ -1642,24 +1614,6 @@ class Alias(Base, ModelMixin):
Session.add(new_alias) Session.add(new_alias)
DailyMetric.get_or_create_today_metric().nb_alias += 1 DailyMetric.get_or_create_today_metric().nb_alias += 1
# Internal import to avoid global import cycles
from app.events.event_dispatcher import EventDispatcher
from app.events.generated.event_pb2 import AliasCreated, EventContent
event = AliasCreated(
alias_id=new_alias.id,
alias_email=new_alias.email,
alias_note=new_alias.note,
enabled=True,
)
EventDispatcher.send_event(user, EventContent(alias_created=event))
if (
new_alias.flags & cls.FLAG_PARTNER_CREATED > 0
and new_alias.user.flags & User.FLAG_CREATED_ALIAS_FROM_PARTNER == 0
):
user.flags = user.flags | User.FLAG_CREATED_ALIAS_FROM_PARTNER
if commit: if commit:
Session.commit() Session.commit()
@ -2690,15 +2644,10 @@ class Mailbox(Base, ModelMixin):
return False return False
def nb_alias(self): def nb_alias(self):
alias_ids = set( return (
am.alias_id AliasMailbox.filter_by(mailbox_id=self.id).count()
for am in AliasMailbox.filter_by(mailbox_id=self.id).values( + Alias.filter_by(mailbox_id=self.id).count()
AliasMailbox.alias_id
)
) )
for alias in Alias.filter_by(mailbox_id=self.id).values(Alias.id):
alias_ids.add(alias.id)
return len(alias_ids)
def is_proton(self) -> bool: def is_proton(self) -> bool:
if ( if (
@ -2747,15 +2696,12 @@ class Mailbox(Base, ModelMixin):
@property @property
def aliases(self) -> [Alias]: def aliases(self) -> [Alias]:
ret = dict( ret = Alias.filter_by(mailbox_id=self.id).all()
(alias.id, alias) for alias in Alias.filter_by(mailbox_id=self.id).all()
)
for am in AliasMailbox.filter_by(mailbox_id=self.id): for am in AliasMailbox.filter_by(mailbox_id=self.id):
if am.alias_id not in ret: ret.append(am.alias)
ret[am.alias_id] = am.alias
return list(ret.values()) return ret
@classmethod @classmethod
def create(cls, **kw): def create(cls, **kw):
@ -2985,7 +2931,11 @@ class RecoveryCode(Base, ModelMixin):
@classmethod @classmethod
def find_by_user_code(cls, user: User, code: str): def find_by_user_code(cls, user: User, code: str):
hashed_code = cls._hash_code(code) hashed_code = cls._hash_code(code)
return cls.get_by(user_id=user.id, code=hashed_code) # TODO: Only return hashed codes once there aren't unhashed codes in the db.
found_code = cls.get_by(user_id=user.id, code=hashed_code)
if found_code:
return found_code
return cls.get_by(user_id=user.id, code=code)
@classmethod @classmethod
def empty(cls, user): def empty(cls, user):
@ -3685,52 +3635,3 @@ class ApiToCookieToken(Base, ModelMixin):
code = secrets.token_urlsafe(32) code = secrets.token_urlsafe(32)
return super().create(code=code, **kwargs) return super().create(code=code, **kwargs)
class SyncEvent(Base, ModelMixin):
"""This model holds the events that need to be sent to the webhook"""
__tablename__ = "sync_event"
content = sa.Column(sa.LargeBinary, unique=False, nullable=False)
taken_time = sa.Column(
ArrowType, default=None, nullable=True, server_default=None, index=True
)
__table_args__ = (
sa.Index("ix_sync_event_created_at", "created_at"),
sa.Index("ix_sync_event_taken_time", "taken_time"),
)
def mark_as_taken(self) -> bool:
sql = """
UPDATE sync_event
SET taken_time = :taken_time
WHERE id = :sync_event_id
AND taken_time IS NULL
"""
args = {"taken_time": arrow.now().datetime, "sync_event_id": self.id}
res = Session.execute(sql, args)
Session.commit()
return res.rowcount > 0
@classmethod
def get_dead_letter(cls, older_than: Arrow) -> [SyncEvent]:
return (
SyncEvent.filter(
(
(
SyncEvent.taken_time.isnot(None)
& (SyncEvent.taken_time < older_than)
)
| (
SyncEvent.taken_time.is_(None)
& (SyncEvent.created_at < older_than)
)
)
)
.order_by(SyncEvent.id)
.limit(100)
.all()
)

View File

@ -2,7 +2,6 @@ from newrelic import agent
from typing import Optional from typing import Optional
from app.db import Session from app.db import Session
from app.log import LOG
from app.errors import ProtonPartnerNotSetUp from app.errors import ProtonPartnerNotSetUp
from app.models import Partner, PartnerUser, User from app.models import Partner, PartnerUser, User
@ -31,7 +30,6 @@ def perform_proton_account_unlink(current_user: User):
user_id=current_user.id, partner_id=proton_partner.id user_id=current_user.id, partner_id=proton_partner.id
) )
if partner_user is not None: if partner_user is not None:
LOG.info(f"User {current_user} has unlinked the account from {partner_user}")
PartnerUser.delete(partner_user.id) PartnerUser.delete(partner_user.id)
Session.commit() Session.commit()
agent.record_custom_event("AccountUnlinked", {"partner": proton_partner.name}) agent.record_custom_event("AccountUnlinked", {"partner": proton_partner.name})

View File

@ -30,9 +30,7 @@ def check_bucket_limit(
try: try:
value = lock_redis.incr(bucket_lock_name, bucket_seconds) value = lock_redis.incr(bucket_lock_name, bucket_seconds)
if value > max_hits: if value > max_hits:
LOG.i( LOG.i(f"Rate limit hit for {bucket_lock_name} -> {value}/{max_hits}")
f"Rate limit hit for {lock_name} (bucket id {bucket_id}) -> {value}/{max_hits}"
)
newrelic.agent.record_custom_event( newrelic.agent.record_custom_event(
"BucketRateLimit", "BucketRateLimit",
{"lock_name": lock_name, "bucket_seconds": bucket_seconds}, {"lock_name": lock_name, "bucket_seconds": bucket_seconds},

View File

@ -5,9 +5,19 @@ from typing import Optional
import boto3 import boto3
import requests import requests
from app import config from app.config import (
AWS_REGION,
BUCKET,
AWS_ACCESS_KEY_ID,
AWS_SECRET_ACCESS_KEY,
LOCAL_FILE_UPLOAD,
UPLOAD_DIR,
URL,
AWS_ENDPOINT_URL,
)
from app.log import LOG from app.log import LOG
_s3_client = None _s3_client = None
@ -15,12 +25,12 @@ def _get_s3client():
global _s3_client global _s3_client
if _s3_client is None: if _s3_client is None:
args = { args = {
"aws_access_key_id": config.AWS_ACCESS_KEY_ID, "aws_access_key_id": AWS_ACCESS_KEY_ID,
"aws_secret_access_key": config.AWS_SECRET_ACCESS_KEY, "aws_secret_access_key": AWS_SECRET_ACCESS_KEY,
"region_name": config.AWS_REGION, "region_name": AWS_REGION,
} }
if config.AWS_ENDPOINT_URL: if AWS_ENDPOINT_URL:
args["endpoint_url"] = config.AWS_ENDPOINT_URL args["endpoint_url"] = AWS_ENDPOINT_URL
_s3_client = boto3.client("s3", **args) _s3_client = boto3.client("s3", **args)
return _s3_client return _s3_client
@ -28,8 +38,8 @@ def _get_s3client():
def upload_from_bytesio(key: str, bs: BytesIO, content_type="application/octet-stream"): def upload_from_bytesio(key: str, bs: BytesIO, content_type="application/octet-stream"):
bs.seek(0) bs.seek(0)
if config.LOCAL_FILE_UPLOAD: if LOCAL_FILE_UPLOAD:
file_path = os.path.join(config.UPLOAD_DIR, key) file_path = os.path.join(UPLOAD_DIR, key)
file_dir = os.path.dirname(file_path) file_dir = os.path.dirname(file_path)
os.makedirs(file_dir, exist_ok=True) os.makedirs(file_dir, exist_ok=True)
with open(file_path, "wb") as f: with open(file_path, "wb") as f:
@ -37,7 +47,7 @@ def upload_from_bytesio(key: str, bs: BytesIO, content_type="application/octet-s
else: else:
_get_s3client().put_object( _get_s3client().put_object(
Bucket=config.BUCKET, Bucket=BUCKET,
Key=key, Key=key,
Body=bs, Body=bs,
ContentType=content_type, ContentType=content_type,
@ -47,8 +57,8 @@ def upload_from_bytesio(key: str, bs: BytesIO, content_type="application/octet-s
def upload_email_from_bytesio(path: str, bs: BytesIO, filename): def upload_email_from_bytesio(path: str, bs: BytesIO, filename):
bs.seek(0) bs.seek(0)
if config.LOCAL_FILE_UPLOAD: if LOCAL_FILE_UPLOAD:
file_path = os.path.join(config.UPLOAD_DIR, path) file_path = os.path.join(UPLOAD_DIR, path)
file_dir = os.path.dirname(file_path) file_dir = os.path.dirname(file_path)
os.makedirs(file_dir, exist_ok=True) os.makedirs(file_dir, exist_ok=True)
with open(file_path, "wb") as f: with open(file_path, "wb") as f:
@ -56,7 +66,7 @@ def upload_email_from_bytesio(path: str, bs: BytesIO, filename):
else: else:
_get_s3client().put_object( _get_s3client().put_object(
Bucket=config.BUCKET, Bucket=BUCKET,
Key=path, Key=path,
Body=bs, Body=bs,
# Support saving a remote file using Http header # Support saving a remote file using Http header
@ -67,12 +77,12 @@ def upload_email_from_bytesio(path: str, bs: BytesIO, filename):
def download_email(path: str) -> Optional[str]: def download_email(path: str) -> Optional[str]:
if config.LOCAL_FILE_UPLOAD: if LOCAL_FILE_UPLOAD:
file_path = os.path.join(config.UPLOAD_DIR, path) file_path = os.path.join(UPLOAD_DIR, path)
with open(file_path, "rb") as f: with open(file_path, "rb") as f:
return f.read() return f.read()
resp = _get_s3client().get_object( resp = _get_s3client().get_object(
Bucket=config.BUCKET, Bucket=BUCKET,
Key=path, Key=path,
) )
if not resp or "Body" not in resp: if not resp or "Body" not in resp:
@ -86,30 +96,29 @@ def upload_from_url(url: str, upload_path):
def get_url(key: str, expires_in=3600) -> str: def get_url(key: str, expires_in=3600) -> str:
if config.LOCAL_FILE_UPLOAD: if LOCAL_FILE_UPLOAD:
return config.URL + "/static/upload/" + key return URL + "/static/upload/" + key
else: else:
return _get_s3client().generate_presigned_url( return _get_s3client().generate_presigned_url(
ExpiresIn=expires_in, ExpiresIn=expires_in,
ClientMethod="get_object", ClientMethod="get_object",
Params={"Bucket": config.BUCKET, "Key": key}, Params={"Bucket": BUCKET, "Key": key},
) )
def delete(path: str): def delete(path: str):
if config.LOCAL_FILE_UPLOAD: if LOCAL_FILE_UPLOAD:
file_path = os.path.join(config.UPLOAD_DIR, path) os.remove(os.path.join(UPLOAD_DIR, path))
os.remove(file_path)
else: else:
_get_s3client().delete_object(Bucket=config.BUCKET, Key=path) _get_s3client().delete_object(Bucket=BUCKET, Key=path)
def create_bucket_if_not_exists(): def create_bucket_if_not_exists():
s3client = _get_s3client() s3client = _get_s3client()
buckets = s3client.list_buckets() buckets = s3client.list_buckets()
for bucket in buckets["Buckets"]: for bucket in buckets["Buckets"]:
if bucket["Name"] == config.BUCKET: if bucket["Name"] == BUCKET:
LOG.i("Bucket already exists") LOG.i("Bucket already exists")
return return
s3client.create_bucket(Bucket=config.BUCKET) s3client.create_bucket(Bucket=BUCKET)
LOG.i(f"Bucket {config.BUCKET} created") LOG.i(f"Bucket {BUCKET} created")

View File

@ -2,8 +2,6 @@ import requests
from requests import RequestException from requests import RequestException
from app import config from app import config
from app.events.event_dispatcher import EventDispatcher
from app.events.generated.event_pb2 import EventContent, UserPlanChanged
from app.log import LOG from app.log import LOG
from app.models import User from app.models import User
@ -33,6 +31,3 @@ def execute_subscription_webhook(user: User):
) )
except RequestException as e: except RequestException as e:
LOG.error(f"Subscription request exception: {e}") LOG.error(f"Subscription request exception: {e}")
event = UserPlanChanged(plan_end_time=sl_subscription_end)
EventDispatcher.send_event(user, EventContent(user_plan_change=event))

136
cron.py
View File

@ -5,7 +5,7 @@ from typing import List, Tuple
import arrow import arrow
import requests import requests
from sqlalchemy import func, desc, or_, and_ from sqlalchemy import func, desc, or_, and_, nullsfirst
from sqlalchemy.ext.compiler import compiles from sqlalchemy.ext.compiler import compiles
from sqlalchemy.orm import joinedload from sqlalchemy.orm import joinedload
from sqlalchemy.orm.exc import ObjectDeletedError from sqlalchemy.orm.exc import ObjectDeletedError
@ -61,9 +61,6 @@ from app.pgp_utils import load_public_key_and_check, PGPException
from app.proton.utils import get_proton_partner from app.proton.utils import get_proton_partner
from app.utils import sanitize_email from app.utils import sanitize_email
from server import create_light_app from server import create_light_app
from tasks.cleanup_old_imports import cleanup_old_imports
from tasks.cleanup_old_jobs import cleanup_old_jobs
from tasks.cleanup_old_notifications import cleanup_old_notifications
DELETE_GRACE_DAYS = 30 DELETE_GRACE_DAYS = 30
@ -979,9 +976,6 @@ async def _hibp_check(api_key, queue):
continue continue
user = alias.user user = alias.user
if user.disabled or not user.is_paid(): if user.disabled or not user.is_paid():
# Mark it as hibp done to skip it as if it had been checked
alias.hibp_last_check = arrow.utcnow()
Session.commit()
continue continue
LOG.d("Checking HIBP for %s", alias) LOG.d("Checking HIBP for %s", alias)
@ -1036,61 +1030,6 @@ async def _hibp_check(api_key, queue):
await asyncio.sleep(rate_sleep) await asyncio.sleep(rate_sleep)
def get_alias_to_check_hibp(
oldest_hibp_allowed: arrow.Arrow,
user_ids_to_skip: list[int],
min_alias_id: int,
max_alias_id: int,
):
now = arrow.now()
alias_query = (
Session.query(Alias)
.join(User, User.id == Alias.user_id)
.join(Subscription, User.id == Subscription.user_id, isouter=True)
.join(ManualSubscription, User.id == ManualSubscription.user_id, isouter=True)
.join(AppleSubscription, User.id == AppleSubscription.user_id, isouter=True)
.join(
CoinbaseSubscription,
User.id == CoinbaseSubscription.user_id,
isouter=True,
)
.join(PartnerUser, User.id == PartnerUser.user_id, isouter=True)
.join(
PartnerSubscription,
PartnerSubscription.partner_user_id == PartnerUser.id,
isouter=True,
)
.filter(
or_(
Alias.hibp_last_check.is_(None),
Alias.hibp_last_check < oldest_hibp_allowed,
),
Alias.user_id.notin_(user_ids_to_skip),
Alias.enabled,
Alias.id >= min_alias_id,
Alias.id < max_alias_id,
User.disabled == False, # noqa: E712
User.enable_data_breach_check,
or_(
User.lifetime,
ManualSubscription.end_at > now,
Subscription.next_bill_date > now.date(),
AppleSubscription.expires_date > now,
CoinbaseSubscription.end_at > now,
PartnerSubscription.end_at > now,
),
)
)
if config.HIBP_SKIP_PARTNER_ALIAS:
alias_query = alias_query.filter(
Alias.flags.op("&")(Alias.FLAG_PARTNER_CREATED) == 0
)
for alias in (
alias_query.order_by(Alias.id.asc()).enable_eagerloads(False).yield_per(500)
):
yield alias
async def check_hibp(): async def check_hibp():
""" """
Check all aliases on the HIBP (Have I Been Pwned) API Check all aliases on the HIBP (Have I Been Pwned) API
@ -1117,43 +1056,43 @@ async def check_hibp():
user_ids = [row[0] for row in rows] user_ids = [row[0] for row in rows]
LOG.d("Got %d users to skip" % len(user_ids)) LOG.d("Got %d users to skip" % len(user_ids))
LOG.d("Checking aliases") LOG.d("Preparing list of aliases to check")
queue = asyncio.Queue() queue = asyncio.Queue()
min_alias_id = 0 max_date = arrow.now().shift(days=-config.HIBP_SCAN_INTERVAL_DAYS)
max_alias_id = Session.query(func.max(Alias.id)).scalar() alias_query = Alias.filter(
step = 10000 or_(Alias.hibp_last_check.is_(None), Alias.hibp_last_check < max_date),
now = arrow.now() Alias.user_id.notin_(user_ids),
oldest_hibp_allowed = now.shift(days=-config.HIBP_SCAN_INTERVAL_DAYS) Alias.enabled,
alias_checked = 0 )
for alias_batch_id in range(min_alias_id, max_alias_id, step): if config.HIBP_SKIP_PARTNER_ALIAS:
for alias in get_alias_to_check_hibp( alias_query = alias_query(Alias.flags.op("&")(Alias.FLAG_PARTNER_CREATED) == 0)
oldest_hibp_allowed, user_ids, alias_batch_id, alias_batch_id + step for alias in (
): alias_query.order_by(nullsfirst(Alias.hibp_last_check.asc()), Alias.id.asc())
await queue.put(alias.id) .yield_per(500)
.enable_eagerloads(False)
):
await queue.put(alias.id)
alias_checked += queue.qsize() LOG.d("Need to check about %s aliases", queue.qsize())
LOG.d(
f"Need to check about {queue.qsize()} aliases in this loop {alias_batch_id}/{max_alias_id}"
)
# Start one checking process per API key # Start one checking process per API key
# Each checking process will take one alias from the queue, get the info # Each checking process will take one alias from the queue, get the info
# and then sleep for 1.5 seconds (due to HIBP API request limits) # and then sleep for 1.5 seconds (due to HIBP API request limits)
checkers = [] checkers = []
for i in range(len(config.HIBP_API_KEYS)): for i in range(len(config.HIBP_API_KEYS)):
checker = asyncio.create_task( checker = asyncio.create_task(
_hibp_check( _hibp_check(
config.HIBP_API_KEYS[i], config.HIBP_API_KEYS[i],
queue, queue,
)
) )
checkers.append(checker) )
checkers.append(checker)
# Wait until all checking processes are done # Wait until all checking processes are done
for checker in checkers: for checker in checkers:
await checker await checker
LOG.d(f"Done checking {alias_checked} HIBP API for aliases in breaches") LOG.d("Done checking HIBP API for aliases in breaches")
def notify_hibp(): def notify_hibp():
@ -1225,13 +1164,6 @@ def clear_users_scheduled_to_be_deleted(dry_run=False):
Session.commit() Session.commit()
def delete_old_data():
oldest_valid = arrow.now().shift(days=-config.KEEP_OLD_DATA_DAYS)
cleanup_old_imports(oldest_valid)
cleanup_old_jobs(oldest_valid)
cleanup_old_notifications(oldest_valid)
if __name__ == "__main__": if __name__ == "__main__":
LOG.d("Start running cronjob") LOG.d("Start running cronjob")
parser = argparse.ArgumentParser() parser = argparse.ArgumentParser()
@ -1246,7 +1178,6 @@ if __name__ == "__main__":
"notify_manual_subscription_end", "notify_manual_subscription_end",
"notify_premium_end", "notify_premium_end",
"delete_logs", "delete_logs",
"delete_old_data",
"poll_apple_subscription", "poll_apple_subscription",
"sanity_check", "sanity_check",
"delete_old_monitoring", "delete_old_monitoring",
@ -1275,9 +1206,6 @@ if __name__ == "__main__":
elif args.job == "delete_logs": elif args.job == "delete_logs":
LOG.d("Deleted Logs") LOG.d("Deleted Logs")
delete_logs() delete_logs()
elif args.job == "delete_old_data":
LOG.d("Delete old data")
delete_old_data()
elif args.job == "poll_apple_subscription": elif args.job == "poll_apple_subscription":
LOG.d("Poll Apple Subscriptions") LOG.d("Poll Apple Subscriptions")
poll_apple_subscription() poll_apple_subscription()

View File

@ -37,12 +37,6 @@ jobs:
schedule: "15 5 * * *" schedule: "15 5 * * *"
captureStderr: true captureStderr: true
- name: SimpleLogin Delete Old data
command: python /code/cron.py -j delete_old_data
shell: /bin/bash
schedule: "30 5 * * *"
captureStderr: true
- name: SimpleLogin Poll Apple Subscriptions - name: SimpleLogin Poll Apple Subscriptions
command: python /code/cron.py -j poll_apple_subscription command: python /code/cron.py -j poll_apple_subscription
shell: /bin/bash shell: /bin/bash

View File

@ -53,7 +53,7 @@ from flanker.addresslib.address import EmailAddress
from sqlalchemy.exc import IntegrityError from sqlalchemy.exc import IntegrityError
from app import pgp_utils, s3, config from app import pgp_utils, s3, config
from app.alias_utils import try_auto_create, change_alias_status from app.alias_utils import try_auto_create
from app.config import ( from app.config import (
EMAIL_DOMAIN, EMAIL_DOMAIN,
URL, URL,
@ -875,7 +875,6 @@ def forward_email_to_mailbox(
# References and In-Reply-To are used for keeping the email thread # References and In-Reply-To are used for keeping the email thread
headers.REFERENCES, headers.REFERENCES,
headers.IN_REPLY_TO, headers.IN_REPLY_TO,
headers.SL_QUEUE_ID,
headers.LIST_UNSUBSCRIBE, headers.LIST_UNSUBSCRIBE,
headers.LIST_UNSUBSCRIBE_POST, headers.LIST_UNSUBSCRIBE_POST,
] + headers.MIME_HEADERS ] + headers.MIME_HEADERS
@ -1180,7 +1179,6 @@ def handle_reply(envelope, msg: Message, rcpt_to: str) -> (bool, str):
# References and In-Reply-To are used for keeping the email thread # References and In-Reply-To are used for keeping the email thread
headers.REFERENCES, headers.REFERENCES,
headers.IN_REPLY_TO, headers.IN_REPLY_TO,
headers.SL_QUEUE_ID,
] ]
+ headers.MIME_HEADERS, + headers.MIME_HEADERS,
) )
@ -1585,7 +1583,7 @@ def handle_bounce_forward_phase(msg: Message, email_log: EmailLog):
LOG.w( LOG.w(
f"Disable alias {alias} because {reason}. {alias.mailboxes} {alias.user}. Last contact {contact}" f"Disable alias {alias} because {reason}. {alias.mailboxes} {alias.user}. Last contact {contact}"
) )
change_alias_status(alias, enabled=False) alias.enabled = False
Notification.create( Notification.create(
user_id=user.id, user_id=user.id,
@ -2042,11 +2040,10 @@ def handle(envelope: Envelope, msg: Message) -> str:
return status.E204 return status.E204
# sanitize email headers # sanitize email headers
sanitize_header(msg, headers.FROM) sanitize_header(msg, "from")
sanitize_header(msg, headers.TO) sanitize_header(msg, "to")
sanitize_header(msg, headers.CC) sanitize_header(msg, "cc")
sanitize_header(msg, headers.REPLY_TO) sanitize_header(msg, "reply-to")
sanitize_header(msg, headers.MESSAGE_ID)
LOG.d( LOG.d(
"==>> Handle mail_from:%s, rcpt_tos:%s, header_from:%s, header_to:%s, " "==>> Handle mail_from:%s, rcpt_tos:%s, header_from:%s, header_to:%s, "

View File

@ -1,64 +0,0 @@
import argparse
from enum import Enum
from sys import argv, exit
from app.config import DB_URI
from app.log import LOG
from events.runner import Runner
from events.event_source import DeadLetterEventSource, PostgresEventSource
from events.event_sink import ConsoleEventSink, HttpEventSink
class Mode(Enum):
DEAD_LETTER = "dead_letter"
LISTENER = "listener"
@staticmethod
def from_str(value: str):
if value == Mode.DEAD_LETTER.value:
return Mode.DEAD_LETTER
elif value == Mode.LISTENER.value:
return Mode.LISTENER
else:
raise ValueError(f"Invalid mode: {value}")
def main(mode: Mode, dry_run: bool):
if mode == Mode.DEAD_LETTER:
LOG.i("Using DeadLetterEventSource")
source = DeadLetterEventSource()
elif mode == Mode.LISTENER:
LOG.i("Using PostgresEventSource")
source = PostgresEventSource(DB_URI)
else:
raise ValueError(f"Invalid mode: {mode}")
if dry_run:
LOG.i("Starting with ConsoleEventSink")
sink = ConsoleEventSink()
else:
LOG.i("Starting with HttpEventSink")
sink = HttpEventSink()
runner = Runner(source=source, sink=sink)
runner.run()
def args():
parser = argparse.ArgumentParser(description="Run event listener")
parser.add_argument(
"mode",
help="Mode to run",
choices=[Mode.DEAD_LETTER.value, Mode.LISTENER.value],
)
parser.add_argument("--dry-run", help="Dry run mode", action="store_true")
return parser.parse_args()
if __name__ == "__main__":
if len(argv) < 2:
print("Invalid usage. Pass 'listener' or 'dead_letter' as argument")
exit(1)
args = args()
main(Mode.from_str(args.mode), args.dry_run)

View File

View File

@ -1,42 +0,0 @@
import requests
from abc import ABC, abstractmethod
from app.config import EVENT_WEBHOOK, EVENT_WEBHOOK_SKIP_VERIFY_SSL
from app.log import LOG
from app.models import SyncEvent
class EventSink(ABC):
@abstractmethod
def process(self, event: SyncEvent) -> bool:
pass
class HttpEventSink(EventSink):
def process(self, event: SyncEvent) -> bool:
if not EVENT_WEBHOOK:
LOG.warning("Skipping sending event because there is no webhook configured")
return False
LOG.info(f"Sending event {event.id} to {EVENT_WEBHOOK}")
res = requests.post(
url=EVENT_WEBHOOK,
data=event.content,
headers={"Content-Type": "application/x-protobuf"},
verify=not EVENT_WEBHOOK_SKIP_VERIFY_SSL,
)
if res.status_code != 200:
LOG.warning(
f"Failed to send event to webhook: {res.status_code} {res.text}"
)
return False
else:
LOG.info(f"Event {event.id} sent successfully to webhook")
return True
class ConsoleEventSink(EventSink):
def process(self, event: SyncEvent) -> bool:
LOG.info(f"Handling event {event.id}")
return True

View File

@ -1,100 +0,0 @@
import arrow
import newrelic.agent
import psycopg2
import select
from abc import ABC, abstractmethod
from app.log import LOG
from app.models import SyncEvent
from app.events.event_dispatcher import NOTIFICATION_CHANNEL
from time import sleep
from typing import Callable, NoReturn
_DEAD_LETTER_THRESHOLD_MINUTES = 10
_DEAD_LETTER_INTERVAL_SECONDS = 30
_POSTGRES_RECONNECT_INTERVAL_SECONDS = 5
class EventSource(ABC):
@abstractmethod
def run(self, on_event: Callable[[SyncEvent], NoReturn]):
pass
class PostgresEventSource(EventSource):
def __init__(self, connection_string: str):
self.__connection_string = connection_string
self.__connect()
def run(self, on_event: Callable[[SyncEvent], NoReturn]):
while True:
try:
self.__listen(on_event)
except Exception as e:
LOG.warn(f"Error listening to events: {e}")
sleep(_POSTGRES_RECONNECT_INTERVAL_SECONDS)
self.__connect()
def __listen(self, on_event: Callable[[SyncEvent], NoReturn]):
self.__connection.set_isolation_level(
psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT
)
cursor = self.__connection.cursor()
cursor.execute(f"LISTEN {NOTIFICATION_CHANNEL};")
while True:
if select.select([self.__connection], [], [], 5) != ([], [], []):
self.__connection.poll()
while self.__connection.notifies:
notify = self.__connection.notifies.pop(0)
LOG.debug(
f"Got NOTIFY: pid={notify.pid} channel={notify.channel} payload={notify.payload}"
)
try:
webhook_id = int(notify.payload)
event = SyncEvent.get_by(id=webhook_id)
if event is not None:
if event.mark_as_taken():
on_event(event)
else:
LOG.info(
f"Event {event.id} was handled by another runner"
)
else:
LOG.info(f"Could not find event with id={notify.payload}")
except Exception as e:
LOG.warn(f"Error getting event: {e}")
def __connect(self):
self.__connection = psycopg2.connect(self.__connection_string)
from app.db import Session
Session.close()
class DeadLetterEventSource(EventSource):
@newrelic.agent.background_task()
def run(self, on_event: Callable[[SyncEvent], NoReturn]):
while True:
try:
threshold = arrow.utcnow().shift(
minutes=-_DEAD_LETTER_THRESHOLD_MINUTES
)
events = SyncEvent.get_dead_letter(older_than=threshold)
if events:
LOG.info(f"Got {len(events)} dead letter events")
if events:
newrelic.agent.record_custom_metric(
"Custom/dead_letter_events_to_process", len(events)
)
for event in events:
on_event(event)
else:
LOG.debug("No dead letter events")
sleep(_DEAD_LETTER_INTERVAL_SECONDS)
except Exception as e:
LOG.warn(f"Error getting dead letter event: {e}")
sleep(_DEAD_LETTER_INTERVAL_SECONDS)

View File

@ -1,42 +0,0 @@
import arrow
import newrelic.agent
from app.log import LOG
from app.models import SyncEvent
from events.event_sink import EventSink
from events.event_source import EventSource
class Runner:
def __init__(self, source: EventSource, sink: EventSink):
self.__source = source
self.__sink = sink
def run(self):
self.__source.run(self.__on_event)
@newrelic.agent.background_task()
def __on_event(self, event: SyncEvent):
try:
event_created_at = event.created_at
start_time = arrow.now()
success = self.__sink.process(event)
if success:
event_id = event.id
SyncEvent.delete(event.id, commit=True)
LOG.info(f"Marked {event_id} as done")
end_time = arrow.now() - start_time
time_between_taken_and_created = start_time - event_created_at
newrelic.agent.record_custom_metric("Custom/sync_event_processed", 1)
newrelic.agent.record_custom_metric(
"Custom/sync_event_process_time", end_time.total_seconds()
)
newrelic.agent.record_custom_metric(
"Custom/sync_event_elapsed_time",
time_between_taken_and_created.total_seconds(),
)
except Exception as e:
LOG.warn(f"Exception processing event [id={event.id}]: {e}")
newrelic.agent.record_custom_metric("Custom/sync_event_failed", 1)

View File

@ -118,7 +118,9 @@ WORDS_FILE_PATH=local_data/test_words.txt
# Login with OIDC # Login with OIDC
# CONNECT_WITH_OIDC_ICON=fa-github # CONNECT_WITH_OIDC_ICON=fa-github
# OIDC_WELL_KNOWN_URL=to_fill # OIDC_AUTHORIZATION_URL=to_fill
# OIDC_USER_INFO_URL=to_fill
# OIDC_TOKEN_URL=to_fill
# OIDC_SCOPES=openid email profile # OIDC_SCOPES=openid email profile
# OIDC_NAME_FIELD=name # OIDC_NAME_FIELD=name
# OIDC_CLIENT_ID=to_fill # OIDC_CLIENT_ID=to_fill

View File

@ -15,7 +15,6 @@ from app.email_utils import (
render, render,
) )
from app.import_utils import handle_batch_import from app.import_utils import handle_batch_import
from app.jobs.event_jobs import send_alias_creation_events_for_user
from app.jobs.export_user_data_job import ExportUserDataJob from app.jobs.export_user_data_job import ExportUserDataJob
from app.log import LOG from app.log import LOG
from app.models import User, Job, BatchImport, Mailbox, CustomDomain, JobState from app.models import User, Job, BatchImport, Mailbox, CustomDomain, JobState
@ -198,18 +197,13 @@ def process_job(job: Job):
onboarding_mailbox(user) onboarding_mailbox(user)
elif job.name == config.JOB_ONBOARDING_4: elif job.name == config.JOB_ONBOARDING_4:
user_id = job.payload.get("user_id") user_id = job.payload.get("user_id")
user: User = User.get(user_id) user = User.get(user_id)
# user might delete their account in the meantime # user might delete their account in the meantime
# or disable the notification # or disable the notification
if user and user.notification and user.activated: if user and user.notification and user.activated:
# if user only has 1 mailbox which is Proton then do not send PGP onboarding email LOG.d("send onboarding pgp email to user %s", user)
mailboxes = user.mailboxes() onboarding_pgp(user)
if len(mailboxes) == 1 and mailboxes[0].is_proton():
LOG.d("Do not send onboarding PGP email to Proton mailbox")
else:
LOG.d("send onboarding pgp email to user %s", user)
onboarding_pgp(user)
elif job.name == config.JOB_BATCH_IMPORT: elif job.name == config.JOB_BATCH_IMPORT:
batch_import_id = job.payload.get("batch_import_id") batch_import_id = job.payload.get("batch_import_id")
@ -270,14 +264,8 @@ SimpleLogin team.
user_id = job.payload.get("user_id") user_id = job.payload.get("user_id")
user = User.get(user_id) user = User.get(user_id)
if user and user.activated: if user and user.activated:
LOG.d("Send proton welcome email to user %s", user) LOG.d("send proton welcome email to user %s", user)
welcome_proton(user) welcome_proton(user)
elif job.name == config.JOB_SEND_ALIAS_CREATION_EVENTS:
user_id = job.payload.get("user_id")
user = User.get(user_id)
if user and user.activated:
LOG.d(f"Sending alias creation events for {user}")
send_alias_creation_events_for_user(user)
else: else:
LOG.e("Unknown job name %s", job.name) LOG.e("Unknown job name %s", job.name)

View File

@ -1,29 +0,0 @@
"""empty message
Revision ID: fa2f19bb4e5a
Revises: 52510a633d6f
Create Date: 2024-04-09 13:12:26.305340
"""
import sqlalchemy_utils
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'fa2f19bb4e5a'
down_revision = '52510a633d6f'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('users', sa.Column('enable_data_breach_check', sa.Boolean(), server_default='0', nullable=False))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('users', 'enable_data_breach_check')
# ### end Alembic commands ###

View File

@ -1,38 +0,0 @@
"""Create sync_event table
Revision ID: 06a9a7133445
Revises: fa2f19bb4e5a
Create Date: 2024-05-17 13:11:20.402259
"""
import sqlalchemy_utils
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '06a9a7133445'
down_revision = 'fa2f19bb4e5a'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('sync_event',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('created_at', sqlalchemy_utils.types.arrow.ArrowType(), nullable=False),
sa.Column('updated_at', sqlalchemy_utils.types.arrow.ArrowType(), nullable=True),
sa.Column('content', sa.LargeBinary(), nullable=False),
sa.Column('taken_time', sqlalchemy_utils.types.arrow.ArrowType(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_sync_event_created_at'), 'sync_event', ['created_at'], unique=False)
op.create_index(op.f('ix_sync_event_taken_time'), 'sync_event', ['taken_time'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('sync_event')
# ### end Alembic commands ###

View File

@ -4,7 +4,6 @@ import subprocess
from time import sleep from time import sleep
from typing import List, Dict from typing import List, Dict
import arrow
import newrelic.agent import newrelic.agent
from app.db import Session from app.db import Session
@ -94,44 +93,11 @@ def log_nb_db_connection():
newrelic.agent.record_custom_metric("Custom/nb_db_connections", nb_connection) newrelic.agent.record_custom_metric("Custom/nb_db_connections", nb_connection)
@newrelic.agent.background_task()
def log_pending_to_process_events():
r = Session.execute("select count(*) from sync_event WHERE taken_time IS NULL;")
events_pending = list(r)[0][0]
LOG.d("number of events pending to process %s", events_pending)
newrelic.agent.record_custom_metric(
"Custom/sync_events_pending_to_process", events_pending
)
@newrelic.agent.background_task()
def log_events_pending_dead_letter():
since = arrow.now().shift(minutes=-10).datetime
r = Session.execute(
"""
SELECT COUNT(*)
FROM sync_event
WHERE (taken_time IS NOT NULL AND taken_time < :since)
OR (taken_time IS NULL AND created_at < :since)
""",
{"since": since},
)
events_pending = list(r)[0][0]
LOG.d("number of events pending dead letter %s", events_pending)
newrelic.agent.record_custom_metric(
"Custom/sync_events_pending_dead_letter", events_pending
)
if __name__ == "__main__": if __name__ == "__main__":
exporter = MetricExporter(get_newrelic_license()) exporter = MetricExporter(get_newrelic_license())
while True: while True:
log_postfix_metrics() log_postfix_metrics()
log_nb_db_connection() log_nb_db_connection()
log_pending_to_process_events()
log_events_pending_dead_letter()
Session.close() Session.close()
exporter.run() exporter.run()

View File

@ -1,37 +0,0 @@
#!/usr/bin/env python3
import argparse
import random
import time
from sqlalchemy import func
from app import config
from app.models import Alias, Contact
from app.db import Session
parser = argparse.ArgumentParser(
prog=f"Replace {config.NOREPLY}",
description=f"Replace {config.NOREPLY} from contacts reply email",
)
args = parser.parse_args()
max_alias_id: int = Session.query(func.max(Alias.id)).scalar()
start = time.time()
tests = 1000
for i in range(tests):
alias = (
Alias.filter(Alias.id > int(random.random() * max_alias_id))
.order_by(Alias.id.asc())
.limit(1)
.first()
)
contact = Contact.filter_by(alias_id=alias.id).order_by(Contact.id.asc()).first()
mailboxes = alias.mailboxes
user = alias.user
if i % 10:
print("{i} -> {alias.id}")
end = time.time()
time_taken = end - start
print(f"Took {time_taken} -> {time_taken/tests} per test")

View File

@ -1,56 +1,29 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
import argparse import argparse
import time
from sqlalchemy import func
from app.log import LOG
from app.models import Alias, SLDomain from app.models import Alias, SLDomain
from app.db import Session from app.db import Session
parser = argparse.ArgumentParser( parser = argparse.ArgumentParser(
prog="Mark partner created aliases with the PARTNER_CREATED flag", prog="Mark partner created aliases with the PARTNER_CREATED flag",
) )
parser.add_argument(
"-s", "--start_alias_id", default=0, type=int, help="Initial alias_id"
)
parser.add_argument("-e", "--end_alias_id", default=0, type=int, help="Last alias_id")
args = parser.parse_args() args = parser.parse_args()
alias_id_start = args.start_alias_id
max_alias_id = args.end_alias_id
if max_alias_id == 0:
max_alias_id = Session.query(func.max(Alias.id)).scalar()
print(f"Updating aliases from {alias_id_start} to {max_alias_id}")
domains = SLDomain.filter(SLDomain.partner_id.isnot(None)).all() domains = SLDomain.filter(SLDomain.partner_id.isnot(None)).all()
cond = [f"email like '%{domain.domain}'" for domain in domains]
sql_or_cond = " OR ".join(cond)
sql = f"UPDATE alias set flags = (flags | :flag) WHERE id >= :start and id<:end and flags & :flag = 0 and ({sql_or_cond})"
print(sql)
step = 1000 for domain in domains:
updated = 0 LOG.i(f"Checking aliases for domain {domain.domain}")
start_time = time.time() for alias in (
for batch_start in range(alias_id_start, max_alias_id, step): Alias.filter(
updated += Session.execute( Alias.email.like(f"%{domain.domain}"),
sql, Alias.flags.op("&")(Alias.FLAG_PARTNER_CREATED) == 0,
{ )
"start": batch_start, .enable_eagerloads(False)
"end": batch_start + step, .yield_per(100)
"flag": Alias.FLAG_PARTNER_CREATED, .all()
}, ):
).rowcount alias.flags = alias.flags | Alias.FLAG_PARTNER_CREATED
elapsed = time.time() - start_time LOG.i(f" * Updating {alias.email} to {alias.flags}")
time_per_alias = elapsed / (batch_start - alias_id_start + step) Session.commit()
last_batch_id = batch_start + step
remaining = max_alias_id - last_batch_id
time_remaining = (max_alias_id - last_batch_id) * time_per_alias
hours_remaining = time_remaining / 3600.0
percent = int(
((batch_start - alias_id_start) * 100) / (max_alias_id - alias_id_start)
)
print(
f"\rAlias {batch_start}/{max_alias_id} {percent}% {updated} updated {hours_remaining:.2f}hrs remaining"
)
print(f"Updated aliases up to {max_alias_id}")

View File

@ -1,55 +0,0 @@
#!/usr/bin/env python3
import argparse
import time
from sqlalchemy import func
from app.models import Alias, User
from app.db import Session
parser = argparse.ArgumentParser(
prog="Backfill alias", description="Backfill user flags for partner alias created"
)
parser.add_argument(
"-s", "--start_user_id", default=0, type=int, help="Initial user_id"
)
parser.add_argument("-e", "--end_user_id", default=0, type=int, help="Last user_id")
args = parser.parse_args()
user_id_start = args.start_user_id
max_user_id = args.end_user_id
if max_user_id == 0:
max_user_id = Session.query(func.max(User.id)).scalar()
print(f"Checking user {user_id_start} to {max_user_id}")
step = 1000
el_query = "SELECT user_id, count(id) from alias where user_id>=:start AND user_id < :end AND flags & :alias_flag > 0 GROUP BY user_id"
user_update_query = "UPDATE users set flags = flags | :user_flag where id = :user_id"
updated = 0
start_time = time.time()
for batch_start in range(user_id_start, max_user_id, step):
rows = Session.execute(
el_query,
{
"start": batch_start,
"end": batch_start + step,
"alias_flag": Alias.FLAG_PARTNER_CREATED,
},
)
for row in rows:
if row[1] > 0:
Session.execute(
user_update_query,
{"user_id": row[0], "user_flag": User.FLAG_CREATED_ALIAS_FROM_PARTNER},
)
Session.commit()
updated += 1
elapsed = time.time() - start_time
time_per_alias = elapsed / (updated + 1)
last_batch_id = batch_start + step
remaining = max_user_id - last_batch_id
time_remaining = (max_user_id - last_batch_id) * time_per_alias
hours_remaining = time_remaining / 3600.0
print(
f"\rUser {batch_start}/{max_user_id} {updated} {hours_remaining:.2f}hrs remaining"
)
print("")

28
poetry.lock generated
View File

@ -2150,22 +2150,24 @@ wcwidth = "*"
[[package]] [[package]]
name = "protobuf" name = "protobuf"
version = "5.27.1" version = "4.24.3"
description = "" description = ""
optional = false optional = false
python-versions = ">=3.8" python-versions = ">=3.7"
files = [ files = [
{file = "protobuf-5.27.1-cp310-abi3-win32.whl", hash = "sha256:3adc15ec0ff35c5b2d0992f9345b04a540c1e73bfee3ff1643db43cc1d734333"}, {file = "protobuf-4.24.3-cp310-abi3-win32.whl", hash = "sha256:20651f11b6adc70c0f29efbe8f4a94a74caf61b6200472a9aea6e19898f9fcf4"},
{file = "protobuf-5.27.1-cp310-abi3-win_amd64.whl", hash = "sha256:25236b69ab4ce1bec413fd4b68a15ef8141794427e0b4dc173e9d5d9dffc3bcd"}, {file = "protobuf-4.24.3-cp310-abi3-win_amd64.whl", hash = "sha256:3d42e9e4796a811478c783ef63dc85b5a104b44aaaca85d4864d5b886e4b05e3"},
{file = "protobuf-5.27.1-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:4e38fc29d7df32e01a41cf118b5a968b1efd46b9c41ff515234e794011c78b17"}, {file = "protobuf-4.24.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:6e514e8af0045be2b56e56ae1bb14f43ce7ffa0f68b1c793670ccbe2c4fc7d2b"},
{file = "protobuf-5.27.1-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:917ed03c3eb8a2d51c3496359f5b53b4e4b7e40edfbdd3d3f34336e0eef6825a"}, {file = "protobuf-4.24.3-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:ba53c2f04798a326774f0e53b9c759eaef4f6a568ea7072ec6629851c8435959"},
{file = "protobuf-5.27.1-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:ee52874a9e69a30271649be88ecbe69d374232e8fd0b4e4b0aaaa87f429f1631"}, {file = "protobuf-4.24.3-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:f6ccbcf027761a2978c1406070c3788f6de4a4b2cc20800cc03d52df716ad675"},
{file = "protobuf-5.27.1-cp38-cp38-win32.whl", hash = "sha256:7a97b9c5aed86b9ca289eb5148df6c208ab5bb6906930590961e08f097258107"}, {file = "protobuf-4.24.3-cp37-cp37m-win32.whl", hash = "sha256:1b182c7181a2891e8f7f3a1b5242e4ec54d1f42582485a896e4de81aa17540c2"},
{file = "protobuf-5.27.1-cp38-cp38-win_amd64.whl", hash = "sha256:f6abd0f69968792da7460d3c2cfa7d94fd74e1c21df321eb6345b963f9ec3d8d"}, {file = "protobuf-4.24.3-cp37-cp37m-win_amd64.whl", hash = "sha256:b0271a701e6782880d65a308ba42bc43874dabd1a0a0f41f72d2dac3b57f8e76"},
{file = "protobuf-5.27.1-cp39-cp39-win32.whl", hash = "sha256:dfddb7537f789002cc4eb00752c92e67885badcc7005566f2c5de9d969d3282d"}, {file = "protobuf-4.24.3-cp38-cp38-win32.whl", hash = "sha256:e29d79c913f17a60cf17c626f1041e5288e9885c8579832580209de8b75f2a52"},
{file = "protobuf-5.27.1-cp39-cp39-win_amd64.whl", hash = "sha256:39309898b912ca6febb0084ea912e976482834f401be35840a008da12d189340"}, {file = "protobuf-4.24.3-cp38-cp38-win_amd64.whl", hash = "sha256:067f750169bc644da2e1ef18c785e85071b7c296f14ac53e0900e605da588719"},
{file = "protobuf-5.27.1-py3-none-any.whl", hash = "sha256:4ac7249a1530a2ed50e24201d6630125ced04b30619262f06224616e0030b6cf"}, {file = "protobuf-4.24.3-cp39-cp39-win32.whl", hash = "sha256:2da777d34b4f4f7613cdf85c70eb9a90b1fbef9d36ae4a0ccfe014b0b07906f1"},
{file = "protobuf-5.27.1.tar.gz", hash = "sha256:df5e5b8e39b7d1c25b186ffdf9f44f40f810bbcc9d2b71d9d3156fee5a9adf15"}, {file = "protobuf-4.24.3-cp39-cp39-win_amd64.whl", hash = "sha256:f631bb982c5478e0c1c70eab383af74a84be66945ebf5dd6b06fc90079668d0b"},
{file = "protobuf-4.24.3-py3-none-any.whl", hash = "sha256:f6f8dc65625dadaad0c8545319c2e2f0424fede988368893ca3844261342c11a"},
{file = "protobuf-4.24.3.tar.gz", hash = "sha256:12e9ad2ec079b833176d2921be2cb24281fa591f0b119b208b788adc48c2561d"},
] ]
[[package]] [[package]]

View File

@ -1,50 +0,0 @@
syntax = "proto3";
package simplelogin_events;
message UserPlanChanged {
uint32 plan_end_time = 1;
}
message UserDeleted {
}
message AliasCreated {
uint32 alias_id = 1;
string alias_email = 2;
string alias_note = 3;
bool enabled = 4;
}
message AliasStatusChanged {
uint32 alias_id = 1;
string alias_email = 2;
bool enabled = 3;
}
message AliasDeleted {
uint32 alias_id = 1;
string alias_email = 2;
}
message AliasCreatedList {
repeated AliasCreated events = 1;
}
message EventContent {
oneof content {
UserPlanChanged user_plan_change = 1;
UserDeleted user_deleted = 2;
AliasCreated alias_created = 3;
AliasStatusChanged alias_status_change = 4;
AliasDeleted alias_deleted = 5;
AliasCreatedList alias_create_list = 6;
}
}
message Event {
uint32 user_id = 1;
string external_user_id = 2;
uint32 partner_id = 3;
EventContent content = 4;
}

View File

@ -14,14 +14,13 @@ exclude = '''
| build | build
| dist | dist
| migrations # migrations/ is generated by alembic | migrations # migrations/ is generated by alembic
| app/events/generated
)/ )/
) )
''' '''
[tool.ruff] [tool.ruff]
ignore-init-module-imports = true ignore-init-module-imports = true
exclude = [".venv", "migrations", "app/events/generated"] exclude = [".venv", "migrations"]
[tool.djlint] [tool.djlint]
indent = 2 indent = 2

View File

@ -1,24 +0,0 @@
#!/bin/bash
set -euxo pipefail
SCRIPT_DIR="$(cd "$(dirname "$0")" || exit 1; pwd -P)"
REPO_ROOT=$(echo "${SCRIPT_DIR}" | sed 's:scripts::g')
DEST_DIR="${REPO_ROOT}/app/events/generated"
PROTOC=${PROTOC:-"protoc"}
if ! eval "${PROTOC} --version" &> /dev/null ; then
echo "Cannot find $PROTOC"
exit 1
fi
rm -rf "${DEST_DIR}"
mkdir -p "${DEST_DIR}"
pushd $REPO_ROOT || exit 1
eval "${PROTOC} --proto_path=proto --python_out=\"${DEST_DIR}\" --pyi_out=\"${DEST_DIR}\" proto/event.proto"
popd || exit 1

BIN
static/logo-proton.png vendored

Binary file not shown.

Before

Width:  |  Height:  |  Size: 3.2 KiB

View File

View File

@ -1,19 +0,0 @@
import arrow
from app import s3
from app.log import LOG
from app.models import BatchImport
def cleanup_old_imports(oldest_allowed: arrow.Arrow):
LOG.i(f"Deleting imports older than {oldest_allowed}")
for batch_import in (
BatchImport.filter(BatchImport.created_at < oldest_allowed).yield_per(500).all()
):
LOG.i(
f"Deleting batch import {batch_import} with file {batch_import.file.path}"
)
file = batch_import.file
if file is not None:
s3.delete(file.path)
BatchImport.delete(batch_import.id, commit=True)

View File

@ -1,24 +0,0 @@
import arrow
from sqlalchemy import or_, and_
from app import config
from app.db import Session
from app.log import LOG
from app.models import Job, JobState
def cleanup_old_jobs(oldest_allowed: arrow.Arrow):
LOG.i(f"Deleting jobs older than {oldest_allowed}")
count = Job.filter(
or_(
Job.state == JobState.done.value,
Job.state == JobState.error.value,
and_(
Job.state == JobState.taken.value,
Job.attempts >= config.JOB_MAX_ATTEMPTS,
),
),
Job.updated_at < oldest_allowed,
).delete()
Session.commit()
LOG.i(f"Deleted {count} jobs")

View File

@ -1,12 +0,0 @@
import arrow
from app.db import Session
from app.log import LOG
from app.models import Notification
def cleanup_old_notifications(oldest_allowed: arrow.Arrow):
LOG.i(f"Deleting notifications older than {oldest_allowed}")
count = Notification.filter(Notification.created_at < oldest_allowed).delete()
Session.commit()
LOG.i(f"Deleted {count} notifications")

View File

@ -120,6 +120,21 @@
</div> </div>
</div> </div>
<!-- END WebAuthn --> <!-- END WebAuthn -->
<!-- Alias import/export -->
<div class="card">
<div class="card-body">
<div class="card-title">Alias import/export</div>
<div class="mb-3">
You can import your aliases created on other platforms into SimpleLogin.
You can also export your aliases to a readable csv format for a future batch import.
</div>
<a href="{{ url_for('dashboard.batch_import_route') }}"
class="btn btn-outline-primary">Batch Import</a>
<a href="{{ url_for('dashboard.alias_export_route') }}"
class="btn btn-outline-secondary">Export Aliases</a>
</div>
</div>
<!-- END Alias import/export -->
<!-- data export --> <!-- data export -->
<div class="card"> <div class="card">
<div class="card-body"> <div class="card-body">

View File

@ -249,42 +249,6 @@
</div> </div>
</div> </div>
<!-- END Random alias --> <!-- END Random alias -->
<!-- Data breach check -->
<div class="card" id="data-breach">
<div class="card-body">
<div class="card-title">Data breach monitoring</div>
<div class="mt-1 mb-3">
{% if not current_user.is_premium() %}
<div class="alert alert-info" role="alert">
This feature is only available on Premium plan.
<a href="{{ url_for('dashboard.pricing') }}"
target="_blank"
rel="noopener noreferrer">
Upgrade<i class="fe fe-external-link"></i>
</a>
</div>
{% endif %}
If enabled, we will inform you via email if one of your aliases appears in a data breach.
<br>
SimpleLogin uses <a href="https://haveibeenpwned.com/">HaveIBeenPwned</a> API for checking for data breaches.
</div>
<form method="post" action="#data-breach">
{{ csrf_form.csrf_token }}
<input type="hidden" name="form-name" value="enable_data_breach_check">
<div class="form-check">
<input type="checkbox"
id="enable_data_breach_check"
name="enable_data_breach_check"
{% if current_user.enable_data_breach_check %} checked{% endif %}
class="form-check-input">
<label for="enable_data_breach_check">Enable data breach monitoring</label>
</div>
<button type="submit" class="btn btn-outline-primary">Update</button>
</form>
</div>
</div>
<!-- END Data breach check -->
<!-- Sender Format --> <!-- Sender Format -->
<div class="card" id="sender-format"> <div class="card" id="sender-format">
<div class="card-body"> <div class="card-body">
@ -321,9 +285,7 @@
No Name (i.e. only reverse-alias) No Name (i.e. only reverse-alias)
</option> </option>
</select> </select>
<button class="btn btn-outline-primary mt-3"> <button class="btn btn-outline-primary mt-3">Update</button>
Update
</button>
</form> </form>
</div> </div>
</div> </div>
@ -333,9 +295,7 @@
<div class="card-body"> <div class="card-body">
<div class="card-title"> <div class="card-title">
Reverse Alias Replacement Reverse Alias Replacement
<div class="badge badge-warning"> <div class="badge badge-warning">Experimental</div>
Experimental
</div>
</div> </div>
<div class="mb-3"> <div class="mb-3">
When replying to a forwarded email, the <b>reverse-alias</b> can be automatically included When replying to a forwarded email, the <b>reverse-alias</b> can be automatically included
@ -352,13 +312,9 @@
name="replace-ra" name="replace-ra"
{% if current_user.replace_reverse_alias %} checked{% endif %} {% if current_user.replace_reverse_alias %} checked{% endif %}
class="form-check-input"> class="form-check-input">
<label for="replace-ra"> <label for="replace-ra">Enable replacing reverse alias</label>
Enable replacing reverse alias
</label>
</div> </div>
<button type="submit" class="btn btn-outline-primary"> <button type="submit" class="btn btn-outline-primary">Update</button>
Update
</button>
</form> </form>
</div> </div>
</div> </div>
@ -603,7 +559,7 @@
sender address. sender address.
<br /> <br />
If this option is enabled, the original sender addresses is stored in the email header <b>X-SimpleLogin-Envelope-From</b> If this option is enabled, the original sender addresses is stored in the email header <b>X-SimpleLogin-Envelope-From</b>
and the original From header is stored in <b>X-SimpleLogin-Original-From</b>. and the original From header is stored in <b>X-SimpleLogin-Original-From<b>.
You can choose to display this header in your email client. You can choose to display this header in your email client.
<br /> <br />
As email headers aren't encrypted, your mailbox service can know the sender address via this header. As email headers aren't encrypted, your mailbox service can know the sender address via this header.
@ -627,23 +583,6 @@
</form> </form>
</div> </div>
</div> </div>
<!-- Alias import/export -->
<div class="card">
<div class="card-body">
<div class="card-title">
Alias import/export
</div>
<div class="mb-3">
You can import your aliases created on other platforms into SimpleLogin.
You can also export your aliases to a readable csv format for a future batch import.
</div>
<a href="{{ url_for('dashboard.batch_import_route') }}"
class="btn btn-outline-primary">Batch Import</a>
<a href="{{ url_for('dashboard.alias_export_route') }}"
class="btn btn-outline-secondary">Export Aliases</a>
</div>
</div>
<!-- END Alias import/export -->
</div> </div>
{% endblock %} {% endblock %}
{% block script %} {% block script %}

View File

@ -1,8 +1,623 @@
{% if USE_PARTNER_TEMPLATE %} {% from "_emailhelpers.html" import render_text, text, render_button, raw_url, grey_section, section %}
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml" lang="en">
<head>
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<meta name="x-apple-disable-message-reformatting" />
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8" />
<style type="text/css" rel="stylesheet" media="all">
/* Base ------------------------------ */
body {
width: 100% !important;
height: 100%;
margin: 0;
-webkit-text-size-adjust: none;
line-height: 1.6;
}
{% extends "base_partner.html" %} img {
max-width: 100%;
}
{% else %} a {
{% extends "base_sl.html" %} color: #3869D4;
}
{% endif %} a img {
border: none;
}
td {
word-break: break-word;
}
.preheader {
display: none !important;
visibility: hidden;
mso-hide: all;
font-size: 1px;
line-height: 1px;
max-height: 0;
max-width: 0;
opacity: 0;
overflow: hidden;
}
/* Type ------------------------------ */
body,
td,
th {
font-family: "Nunito Sans", Helvetica, Arial, sans-serif;
}
h1 {
margin-top: 0;
color: #333333;
font-size: 22px;
font-weight: bold;
text-align: left;
}
h2 {
margin-top: 0;
color: #333333;
font-size: 16px;
font-weight: bold;
text-align: left;
}
h3 {
margin-top: 0;
color: #333333;
font-size: 14px;
font-weight: bold;
text-align: left;
}
td,
th {
font-size: 16px;
}
p,
ul,
ol,
blockquote {
margin: .4em 0 1.1875em;
font-size: 16px;
line-height: 1.625;
}
p.sub {
font-size: 13px;
}
/* Utilities ------------------------------ */
.align-right {
text-align: right;
}
.align-left {
text-align: left;
}
.align-center {
text-align: center;
}
/* Buttons ------------------------------ */
.button {
background-color: #3869D4;
border-top: 10px solid #3869D4;
border-right: 18px solid #3869D4;
border-bottom: 10px solid #3869D4;
border-left: 18px solid #3869D4;
display: inline-block;
color: #FFF;
text-decoration: none;
border-radius: 3px;
box-shadow: 0 2px 3px rgba(0, 0, 0, 0.16);
-webkit-text-size-adjust: none;
box-sizing: border-box;
}
.button--green {
background-color: #22BC66;
border-top: 10px solid #22BC66;
border-right: 18px solid #22BC66;
border-bottom: 10px solid #22BC66;
border-left: 18px solid #22BC66;
}
.button--red {
background-color: #FF6136;
border-top: 10px solid #FF6136;
border-right: 18px solid #FF6136;
border-bottom: 10px solid #FF6136;
border-left: 18px solid #FF6136;
}
@media only screen and (max-width: 500px) {
.button {
width: 100% !important;
text-align: center !important;
}
}
/* Attribute list ------------------------------ */
.attributes {
margin: 0 0 21px;
}
.attributes_content {
background-color: #F4F4F7;
padding: 16px;
}
.attributes_item {
padding: 0;
}
/* Related Items ------------------------------ */
.related {
width: 100%;
margin: 0;
padding: 25px 0 0 0;
-premailer-width: 100%;
-premailer-cellpadding: 0;
-premailer-cellspacing: 0;
}
.related_item {
padding: 10px 0;
color: #CBCCCF;
font-size: 15px;
line-height: 18px;
}
.related_item-title {
display: block;
margin: .5em 0 0;
}
.related_item-thumb {
display: block;
padding-bottom: 10px;
}
.related_heading {
border-top: 1px solid #CBCCCF;
text-align: center;
padding: 25px 0 10px;
}
/* Discount Code ------------------------------ */
.discount {
width: 100%;
margin: 0;
padding: 24px;
-premailer-width: 100%;
-premailer-cellpadding: 0;
-premailer-cellspacing: 0;
background-color: #F4F4F7;
border: 2px dashed #CBCCCF;
}
.discount_heading {
text-align: center;
}
.discount_body {
text-align: center;
font-size: 15px;
}
/* Social Icons ------------------------------ */
.social {
width: auto;
}
.social td {
padding: 0;
width: auto;
}
.social_icon {
height: 20px;
margin: 0 8px 10px 8px;
padding: 0;
}
/* Data table ------------------------------ */
.purchase {
width: 100%;
margin: 0;
padding: 35px 0;
-premailer-width: 100%;
-premailer-cellpadding: 0;
-premailer-cellspacing: 0;
}
.purchase_content {
width: 100%;
margin: 0;
padding: 25px 0 0 0;
-premailer-width: 100%;
-premailer-cellpadding: 0;
-premailer-cellspacing: 0;
}
.purchase_item {
padding: 10px 0;
color: #51545E;
font-size: 15px;
line-height: 18px;
}
.purchase_heading {
padding-bottom: 8px;
border-bottom: 1px solid #EAEAEC;
}
.purchase_heading p {
margin: 0;
color: #85878E;
font-size: 12px;
}
.purchase_footer {
padding-top: 15px;
border-top: 1px solid #EAEAEC;
}
.purchase_total {
margin: 0;
text-align: right;
font-weight: bold;
color: #333333;
}
.purchase_total--label {
padding: 0 15px 0 0;
}
body {
background-color: #F2F4F6;
color: #51545E;
}
p {
color: #51545E;
}
.email-wrapper {
width: 100%;
margin: 0;
padding: 0;
-premailer-width: 100%;
-premailer-cellpadding: 0;
-premailer-cellspacing: 0;
background-color: #F2F4F6;
}
.email-content {
width: 100%;
margin: 0;
padding: 0;
-premailer-width: 100%;
-premailer-cellpadding: 0;
-premailer-cellspacing: 0;
}
/* Masthead ----------------------- */
.email-masthead {
padding: 25px 0;
text-align: center;
}
.email-masthead_logo {
width: 94px;
}
.email-masthead_name {
font-size: 16px;
font-weight: bold;
color: #A8AAAF;
text-decoration: none;
text-shadow: 0 1px 0 white;
}
/* Body ------------------------------ */
.email-body {
width: 100%;
margin: 0;
padding: 0;
-premailer-width: 100%;
-premailer-cellpadding: 0;
-premailer-cellspacing: 0;
}
.email-body_inner {
width: 750px;
margin: 0 auto;
padding: 0;
-premailer-width: 750px;
-premailer-cellpadding: 0;
-premailer-cellspacing: 0;
background-color: #FFFFFF;
}
.email-footer {
width: 750px;
margin: 0 auto;
padding: 0;
-premailer-width: 750px;
-premailer-cellpadding: 0;
-premailer-cellspacing: 0;
text-align: center;
}
.email-footer p {
color: #A8AAAF;
}
.body-action {
width: 100%;
margin: 30px auto;
padding: 0;
-premailer-width: 100%;
-premailer-cellpadding: 0;
-premailer-cellspacing: 0;
text-align: center;
}
.body-sub {
margin-top: 25px;
padding-top: 25px;
border-top: 1px solid #EAEAEC;
}
.content-cell {
padding: 30px;
}
/*Media Queries ------------------------------ */
@media only screen and (max-width: 600px) {
.email-body_inner,
.email-footer {
width: 100% !important;
}
}
@media (prefers-color-scheme: dark) {
body,
.email-body,
.email-body_inner,
.email-content,
.email-wrapper,
.email-masthead,
.email-footer {
background-color: #333333 !important;
color: #FFF !important;
}
p,
ul,
ol,
blockquote,
h1,
h2,
h3 {
color: #FFF !important;
}
.attributes_content,
.discount {
background-color: #222 !important;
}
.email-masthead_name {
text-shadow: none !important;
}
}
</style>
<!--[if mso]>
<style type="text/css">
.f-fallback {
font-family: Arial, sans-serif;
}
</style>
<![endif]-->
<style type="text/css" rel="stylesheet" media="all">
body {
width: 100% !important;
height: 100%;
margin: 0;
-webkit-text-size-adjust: none;
}
body {
font-family: "Nunito Sans", Helvetica, Arial, sans-serif;
}
body {
background-color: #F2F4F6;
color: #51545E;
}
</style>
</head>
<body style="width: 100% !important;
height: 100%;
-webkit-text-size-adjust: none;
font-family: Helvetica, Arial, sans-serif;
background-color: #F2F4F6;
color: #51545E;
margin: 0;"
bgcolor="#F2F4F6">
<span class="preheader"
style="display: none !important;
visibility: hidden;
mso-hide: all;
font-size: 1px;
line-height: 1px;
max-height: 0;
max-width: 0;
opacity: 0;
overflow: hidden;">{{ pre_header }}</span>
<table class="email-wrapper"
width="100%"
cellpadding="0"
cellspacing="0"
role="presentation"
style="width: 100%;
-premailer-width: 100%;
-premailer-cellpadding: 0;
-premailer-cellspacing: 0;
background-color: #F2F4F6;
margin: 0;
padding: 0;"
bgcolor="#F2F4F6">
<tr>
<td align="center"
style="word-break: break-word;
font-family: Helvetica, Arial, sans-serif;
font-size: 16px;">
<table class="email-content"
width="100%"
cellpadding="0"
cellspacing="0"
role="presentation"
style="width: 100%;
-premailer-width: 100%;
-premailer-cellpadding: 0;
-premailer-cellspacing: 0;
margin: 0;
padding: 0;">
<tr>
<td class="email-masthead"
style="word-break: break-word;
font-family: Helvetica, Arial, sans-serif;
font-size: 16px;
text-align: center;
padding: 25px 0;"
align="center">
<a href="{{ LANDING_PAGE_URL }}"
class="f-fallback email-masthead_name"
style="color: #A8AAAF;
font-size: 16px;
font-weight: bold;
text-decoration: none;
text-shadow: 0 1px 0 white;">
{% block logo %}<img src="{{ URL }}/static/logo.png" style="width: 150px; margin: auto">{% endblock %}
</a>
</td>
</tr>
<!-- Email Body -->
<tr>
<td class="email-body"
width="750"
cellpadding="0"
cellspacing="0"
style="word-break: break-word;
margin: 0;
padding: 0;
font-family: Helvetica, Arial, sans-serif;
font-size: 16px;
width: 100%;
-premailer-width: 100%;
-premailer-cellpadding: 0;
-premailer-cellspacing: 0;">
<table class="email-body_inner"
align="center"
width="750"
cellpadding="0"
cellspacing="0"
role="presentation"
style="width: 750px;
-premailer-width: 750px;
-premailer-cellpadding: 0;
-premailer-cellspacing: 0;
background-color: #FFFFFF;
margin: 0 auto;
padding: 0;"
bgcolor="#FFFFFF">
<!-- Body content -->
<tr>
<td class="content-cell"
style="word-break: break-word;
font-family: Helvetica, Arial, sans-serif;
font-size: 16px;
padding: 30px;">
<div class="f-fallback">
{% block greeting %}{% endblock %}
{% block content %}{% endblock %}
<!-- Sub copy -->
{% block sub_copy %}{% endblock %}
</div>
</td>
</tr>
</table>
</td>
</tr>
<tr>
<td style="word-break: break-word;
font-family: Helvetica, Arial, sans-serif;
font-size: 16px;">
<table class="email-footer"
align="center"
width="750"
cellpadding="0"
cellspacing="0"
role="presentation"
style="width: 750px;
-premailer-width: 750px;
-premailer-cellpadding: 0;
-premailer-cellspacing: 0;
text-align: center;
margin: 0 auto;
padding: 0;">
<tr>
<td class="content-cell"
align="center"
style="word-break: break-word;
font-family: Helvetica, Arial, sans-serif;
font-size: 16px;
padding: 30px;">
<p class="f-fallback sub align-center"
style="font-size: 13px;
line-height: 1.625;
text-align: center;
color: #A8AAAF;
margin: .4em 0 1.1875em;"
align="center">
© {{ YEAR }} SimpleLogin - a Proton product. All rights reserved.
<br />
{% block footer %}{% endblock %}
</p>
{% if unsubscribe_oneclick is defined %}
<p class="f-fallback sub align-center"
style="font-size: 13px;
line-height: 1.625;
text-align: center;
margin: .4em 0 1.1875em;">
<a href="{{ unsubscribe_oneclick }}">Unsubscribe from our newsletter</a>
</p>
{% endif %}
<p class="f-fallback sub align-center"
style="font-size: 13px;
line-height: 1.625;
text-align: center;
color: #A8AAAF;
margin: .4em 0 1.1875em;"
align="center">
<a href="https://app.simplelogin.io/dashboard/support">Do you have a question?</a>
</p>
</td>
</tr>
</table>
</td>
</tr>
</table>
</td>
</tr>
</table>
</body>
</html>

View File

@ -1,646 +0,0 @@
{% from "_emailhelpers.html" import render_text, text, render_button, raw_url, grey_section, section %}
<!doctype html>
<html xmlns="http://www.w3.org/1999/xhtml"
xmlns:v="urn:schemas-microsoft-com:vml"
xmlns:o="urn:schemas-microsoft-com:office:office">
<head>
<!-- NAME: 1 COLUMN -->
<!--[if gte mso 15]>
<xml>
<o:OfficeDocumentSettings>
<o:AllowPNG/>
<o:PixelsPerInch>96</o:PixelsPerInch>
</o:OfficeDocumentSettings>
</xml>
<![endif]-->
<meta charset="UTF-8">
<meta name="x-apple-disable-message-reformatting">
<meta name="format-detection"
content="telephone=no, date=no, address=no, email=no, url=no">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1">
<meta name="author" content="Proton">
<style type="text/css">
p {
margin: 12px 0;
padding: 0;
}
table {
border-collapse: collapse;
}
h1,
h2,
h3,
h4,
h5,
h6 {
display: block;
margin: 0;
padding: 0;
}
img,
a img {
border: 0;
height: auto;
outline: none;
text-decoration: none;
}
body,
#bodyTable,
#bodyCell {
height: 100%;
margin: 0;
padding: 0;
width: 100%;
}
.mcnPreviewText {
display: none !important;
}
#outlook a {
padding: 0;
}
img {
-ms-interpolation-mode: bicubic;
}
table {
mso-table-lspace: 0pt;
mso-table-rspace: 0pt;
}
.ReadMsgBody {
width: 100%;
}
.ExternalClass {
width: 100%;
}
p,
a,
li,
td,
blockquote {
mso-line-height-rule: exactly;
}
a[href^=tel],
a[href^=sms] {
color: inherit;
cursor: default;
text-decoration: none;
}
p,
a,
li,
td,
body,
table,
blockquote {
-ms-text-size-adjust: 100%;
-webkit-text-size-adjust: 100%;
}
.ExternalClass,
.ExternalClass p,
.ExternalClass td,
.ExternalClass div,
.ExternalClass span,
.ExternalClass font {
line-height: 100%;
}
.no-link a,
a[x-apple-data-detectors],
a[href^="x-apple-data-detectors:"] {
color: inherit !important;
text-decoration: none !important;
font-size: inherit !important;
font-family: inherit !important;
font-weight: inherit !important;
line-height: inherit !important;
}
#bodyCell {
padding: 10px;
}
.templateContainer {
max-width: 600px !important;
}
a.mcnButton {
display: block;
}
.mcnImage,
.mcnRetinaImage {
vertical-align: bottom;
}
.mcnTextContent {
word-break: break-word;
}
.mcnTextContent img {
height: auto !important;
}
.mcnDividerBlock {
table-layout: fixed !important;
}
.mcnHalfTextRight {
border: 1px solid red;
}
@media only screen and (min-width:768px) {
.templateContainer {
width: 600px !important;
}
}
@media only screen and (max-width: 480px) {
body,
table,
td,
p,
a,
li,
blockquote {
-webkit-text-size-adjust: none !important;
}
body {
width: 100% !important;
min-width: 100% !important;
}
.mcnRetinaImage {
max-width: 100% !important;
}
.mcnImage {
width: 100% !important;
}
.mcnCaptionLeftImageContent .mcnImage,
.mcnCaptionRightImageContent .mcnImage {
width: 176px !important;
}
.mcnHalfCaptionLeftImageContent .mcnImage,
.mcnHalfCaptionRightImageContent .mcnImage {
width: 268px !important;
}
.mcnBoxContentColumnBoxed {
padding: 8px !important;
margin: 0 !important;
}
.mcnButtonContentContainer,
.mcnCartContainer,
.mcnCaptionTopContent,
.mcnRecContentContainer,
.mcnCaptionBottomContent,
.mcnTextContentContainer,
.mcnBoxedTextContentContainer,
.mcnImageGroupContentContainer,
.mcnCaptionLeftTextContentContainer,
.mcnCaptionRightTextContentContainer,
.mcnCaptionLeftImageContentContainer,
.mcnCaptionRightImageContentContainer,
.mcnImageCardLeftTextContentContainer,
.mcnImageCardRightTextContentContainer,
.mcnImageCardLeftImageContentContainer,
.mcnImageCardRightImageContentContainer {
max-width: 100% !important;
width: 100% !important;
}
.mcnBoxedTextContentContainer {
min-width: 100% !important;
}
.mcnImageGroupContent {
padding: 16px !important;
}
.mcnCaptionLeftContentOuter .mcnTextContent,
.mcnCaptionRightContentOuter .mcnTextContent {
padding-top: 16px !important;
}
.mcnImageCardTopImageContent,
.mcnCaptionBottomContent:last-child .mcnCaptionBottomImageContent,
.mcnCaptionBlockInner .mcnCaptionTopContent:last-child .mcnTextContent {
padding-top: 32px !important;
}
.mcnHalfCaptionLeftImageContent,
.mcnHalfCaptionRightImageContent {
text-align: center;
}
.mcnImageCardBottomImageContent {
padding-bottom: 16px !important;
}
.mcnImageGroupBlockInner {
padding-top: 0 !important;
padding-bottom: 0 !important;
}
.mcnImageGroupBlockOuter {
padding-top: 16px !important;
padding-bottom: 16px !important;
}
.mcnTextContent,
.mcnBoxedTextContentColumn {
padding-right: 32px !important;
padding-left: 32px !important;
}
.mcnCaptionBottomContent .mcnTextContent {
padding-left: 16px !important;
padding-right: 16px !important;
}
.mcnCaptionLeftTextContentContainer .mcnTextContent,
.mcnCaptionRightTextContentContainer .mcnTextContent {
padding-right: 0 !important;
padding-left: 0 !important;
}
.mcnImageCardLeftImageContent,
.mcnImageCardRightImageContent {
padding-right: 32px !important;
padding-bottom: 0 !important;
padding-left: 32px !important;
}
.mcnTextContent ul,
.mcnTextContent ol {
padding-inline-start: 24px !important;
}
.mcnButtonContent {
padding-left: 24px !important;
padding-right: 24px !important;
}
.mcnButtonHint {
padding-left: 16px !important;
padding-right: 16px !important;
}
.mcnButtonHint,
.mcnButtonHint * {
text-align: center !important;
}
.mcpreview-image-uploader {
display: none !important;
width: 100% !important;
}
.hide-on-mobile {
display: none !important;
}
.flex-stack-on-mobile {
flex-direction: column;
}
.flex-stack-on-mobile .mcnCaptionBottomContent,
.flex-stack-on-mobile .mcnBoxContentContainer {
height: auto !important;
}
/*
@tab Mobile Styles
@section Heading 1
@tip Make the first-level headings larger in size for better readability on small screens.
*/
h1 {
font-size: 22px !important;
line-height: 1.25em !important;
}
/*
@tab Mobile Styles
@section Heading 2
@tip Make the second-level headings larger in size for better readability on small screens.
*/
h2 {
font-size: 20px !important;
line-height: 1.25em !important;
}
/*
@tab Mobile Styles
@section Heading 3
@tip Make the third-level headings larger in size for better readability on small screens.
*/
h3 {
font-size: 18px !important;
line-height: 1.25em !important;
}
/*
@tab Mobile Styles
@section Heading 4
@tip Make the fourth-level headings larger in size for better readability on small screens.
*/
h4 {
font-size: 16px !important;
line-height: 1.5em !important;
}
/*
@tab Mobile Styles
@section Boxed Text
@tip Make the boxed text larger in size for better readability on small screens. We recommend a font size of at least 16px.
*/
.mcnBoxedTextContentContainer .mcnTextContent,
.mcnBoxedTextContentContainer .mcnTextContent p {
font-size: 14px !important;
line-height: 1.5em !important;
}
/*
@tab Mobile Styles
@section Preheader Visibility
@tip Set the visibility of the email's preheader on small screens. You can hide it to save space.
*/
#templatePreheader {
display: block !important;
}
/*
@tab Mobile Styles
@section Preheader Text
@tip Make the preheader text larger in size for better readability on small screens.
*/
#templatePreheader .mcnTextContent,
#templatePreheader .mcnTextContent p,
#templateBody .templatePreheader .mcnTextContent,
#templateBody .templatePreheader .mcnTextContent p {
font-size: 13px !important;
line-height: 1.5em !important;
}
/*
@tab Mobile Styles
@section Header Text
@tip Make the header text larger in size for better readability on small screens.
*/
#templateHeader .mcnTextContent,
#templateHeader .mcnTextContent p {
font-size: 16px !important;
line-height: 1.5em !important;
}
/*
@tab Mobile Styles
@section Body Text
@tip Make the body text larger in size for better readability on small screens. We recommend a font size of at least 16px.
*/
#templateBody .mcnTextContent,
#templateBody .mcnTextContent p {
font-size: 16px !important;
line-height: 1.5em !important;
}
/*
@tab Mobile Styles
@section Body Caption Text
@tip Make the body text larger in size for better readability on small screens. We recommend a font size of at least 16px.
*/
#templateBody .templateBodyCaption,
#templateBody .templateBodyCaption p {
font-size: 14px !important;
line-height: 1.5em !important;
}
/*
@tab Mobile Styles
@section Footer Text
@tip Make the footer content text larger in size for better readability on small screens.
*/
#templateFooter .mcnTextContent,
#templateFooter .mcnTextContent p {
font-size: 14px !important;
line-height: 1.5em !important;
}
/*
@tab Mobile Styles
@section Footer Follow icons
@tip Reduce the spacing between the footer icons to avoid a line-break them on small screens.
*/
#templateFooter .mcnFollowContentItemContainer {
padding-left: 2px !important;
padding-right: 2px !important;
}
/*
@tab Mobile Styles
@section Footer Follow icons
@tip Reduce the spacing between the footer icons to avoid a line-break them on small screens.
*/
#templateFooter .mcnFollowContentItemContainerSmall {
padding-left: 8px !important;
padding-right: 8px !important;
}
/*
@text Mobile Style
*/
.mcnCaptionRightImageContent,
.mcnCaptionLeftImageContent {
text-align: center;
}
}
@media only screen and (max-width: 352px) {
/*
@tab Mobile Styles
@section Footer Follow icons
@tip Reduce the icon size on very small screens.
*/
.mcnFollowIconContent,
.mcnFollowIconContent img.social-icon {
width: 38px !important;
height: 38px !important;
}
/*
@tab Mobile Styles
@section Footer Follow icons
@tip Remove the spacing between the footer icons to avoid a line-break them on very small screens.
*/
#templateFooter .mcnFollowContentItemContainer {
padding-left: 0 !important;
padding-right: 0 !important;
}
}
</style>
</head>
<body style="height: 100%;
margin: 0;
padding: 0;
width: 100%;
-ms-text-size-adjust: 100%;
-webkit-text-size-adjust: 100%;">
<!--[if !gte mso 9]><!----><span class="mcnPreviewText"
style="display:none;
font-size:0px;
line-height:0px;
max-height:0px;
max-width:0px;
opacity:0;
overflow:hidden;
visibility:hidden;
mso-hide:all;"></span><!--<![endif]-->
<center>
<table align="center"
border="0"
cellpadding="0"
cellspacing="0"
height="100%"
width="100%"
id="bodyTable"
style="border-collapse: collapse;
mso-table-lspace: 0pt;
mso-table-rspace: 0pt;
-ms-text-size-adjust: 100%;
-webkit-text-size-adjust: 100%;
height: 100%;
margin: 0;
padding: 0;
width: 100%;">
<tr>
<td align="center"
valign="top"
id="bodyCell"
style="mso-line-height-rule: exactly;
-ms-text-size-adjust: 100%;
-webkit-text-size-adjust: 100%;
height: 100%;
margin: 0;
padding: 8px;
width: 100%;">
<!-- BEGIN TEMPLATE // -->
<!--[if (gte mso 9)|(IE)]>
<table align="center" border="0" cellspacing="0" cellpadding="0" width="600" style="width:600px;"><tr><td align="center" valign="top" width="600" style="width:600px;">
<![endif]-->
<table border="0"
cellpadding="0"
cellspacing="0"
width="100%"
class="templateContainer"
style="border-collapse: collapse;
mso-table-lspace: 0pt;
mso-table-rspace: 0pt;
-ms-text-size-adjust: 100%;
-webkit-text-size-adjust: 100%;
max-width: 600px !important;">
<tr>
<td valign="top"
id="templateHeader"
style="mso-line-height-rule: exactly;
-ms-text-size-adjust: 100%;
-webkit-text-size-adjust: 100%;">
<table border="0"
cellpadding="0"
cellspacing="0"
width="100%"
class="mcnImageBlock"
style="min-width: 100%;
border-collapse: collapse;
mso-table-lspace: 0pt;
mso-table-rspace: 0pt;
-ms-text-size-adjust: 100%;
-webkit-text-size-adjust: 100%;">
<tbody class="mcnImageBlockOuter">
<tr>
<td valign="top"
style="padding: 16px;
mso-line-height-rule: exactly;
-ms-text-size-adjust: 100%;
-webkit-text-size-adjust: 100%;"
class="mcnImageBlockInner">
<table align="left"
width="100%"
border="0"
cellpadding="0"
cellspacing="0"
class="mcnImageContentContainer"
style="min-width: 100%;
border-collapse: collapse;
mso-table-lspace: 0pt;
mso-table-rspace: 0pt;
-ms-text-size-adjust: 100%;
-webkit-text-size-adjust: 100%;">
<tbody>
<tr>
<td class="mcnImageContent"
valign="top"
style="padding: 16px;
text-align: center;
mso-line-height-rule: exactly;
-ms-text-size-adjust: 100%;
-webkit-text-size-adjust: 100%;">
<a href="https://proton.me/" target="_blank" style="">
<img align="center"
alt="Proton"
src="{{ URL }}/static/logo-proton.png"
width="190"
style="width:35.4477%; max-width: 380px; padding-bottom: 0; display: inline !important; vertical-align: bottom; border: 0; height: auto; outline: none; text-decoration: none; -ms-interpolation-mode: bicubic; ">
</a>
</td>
</tr>
</tbody>
</table>
</td>
</tr>
</tbody>
</table>
</td>
</tr>
<tr>
<td valign="top"
id="templateBody"
style="mso-line-height-rule: exactly; -ms-text-size-adjust: 100%; -webkit-text-size-adjust: 100%; ">
{% block greeting %}{% endblock %}
{% block content %}{% endblock %}
<!-- Sub copy -->
{% block sub_copy %}{% endblock %}
</td>
</tr>
</table>
<!--[if (gte mso 9)|(IE)]>
</td>
</tr>
</table>
<![endif]-->
<!-- // END TEMPLATE -->
</td>
</tr>
</table>
</center>
</body>
</html>

View File

@ -1,623 +0,0 @@
{% from "_emailhelpers.html" import render_text, text, render_button, raw_url, grey_section, section %}
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml" lang="en">
<head>
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<meta name="x-apple-disable-message-reformatting" />
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8" />
<style type="text/css" rel="stylesheet" media="all">
/* Base ------------------------------ */
body {
width: 100% !important;
height: 100%;
margin: 0;
-webkit-text-size-adjust: none;
line-height: 1.6;
}
img {
max-width: 100%;
}
a {
color: #3869D4;
}
a img {
border: none;
}
td {
word-break: break-word;
}
.preheader {
display: none !important;
visibility: hidden;
mso-hide: all;
font-size: 1px;
line-height: 1px;
max-height: 0;
max-width: 0;
opacity: 0;
overflow: hidden;
}
/* Type ------------------------------ */
body,
td,
th {
font-family: "Nunito Sans", Helvetica, Arial, sans-serif;
}
h1 {
margin-top: 0;
color: #333333;
font-size: 22px;
font-weight: bold;
text-align: left;
}
h2 {
margin-top: 0;
color: #333333;
font-size: 16px;
font-weight: bold;
text-align: left;
}
h3 {
margin-top: 0;
color: #333333;
font-size: 14px;
font-weight: bold;
text-align: left;
}
td,
th {
font-size: 16px;
}
p,
ul,
ol,
blockquote {
margin: .4em 0 1.1875em;
font-size: 16px;
line-height: 1.625;
}
p.sub {
font-size: 13px;
}
/* Utilities ------------------------------ */
.align-right {
text-align: right;
}
.align-left {
text-align: left;
}
.align-center {
text-align: center;
}
/* Buttons ------------------------------ */
.button {
background-color: #3869D4;
border-top: 10px solid #3869D4;
border-right: 18px solid #3869D4;
border-bottom: 10px solid #3869D4;
border-left: 18px solid #3869D4;
display: inline-block;
color: #FFF;
text-decoration: none;
border-radius: 3px;
box-shadow: 0 2px 3px rgba(0, 0, 0, 0.16);
-webkit-text-size-adjust: none;
box-sizing: border-box;
}
.button--green {
background-color: #22BC66;
border-top: 10px solid #22BC66;
border-right: 18px solid #22BC66;
border-bottom: 10px solid #22BC66;
border-left: 18px solid #22BC66;
}
.button--red {
background-color: #FF6136;
border-top: 10px solid #FF6136;
border-right: 18px solid #FF6136;
border-bottom: 10px solid #FF6136;
border-left: 18px solid #FF6136;
}
@media only screen and (max-width: 500px) {
.button {
width: 100% !important;
text-align: center !important;
}
}
/* Attribute list ------------------------------ */
.attributes {
margin: 0 0 21px;
}
.attributes_content {
background-color: #F4F4F7;
padding: 16px;
}
.attributes_item {
padding: 0;
}
/* Related Items ------------------------------ */
.related {
width: 100%;
margin: 0;
padding: 25px 0 0 0;
-premailer-width: 100%;
-premailer-cellpadding: 0;
-premailer-cellspacing: 0;
}
.related_item {
padding: 10px 0;
color: #CBCCCF;
font-size: 15px;
line-height: 18px;
}
.related_item-title {
display: block;
margin: .5em 0 0;
}
.related_item-thumb {
display: block;
padding-bottom: 10px;
}
.related_heading {
border-top: 1px solid #CBCCCF;
text-align: center;
padding: 25px 0 10px;
}
/* Discount Code ------------------------------ */
.discount {
width: 100%;
margin: 0;
padding: 24px;
-premailer-width: 100%;
-premailer-cellpadding: 0;
-premailer-cellspacing: 0;
background-color: #F4F4F7;
border: 2px dashed #CBCCCF;
}
.discount_heading {
text-align: center;
}
.discount_body {
text-align: center;
font-size: 15px;
}
/* Social Icons ------------------------------ */
.social {
width: auto;
}
.social td {
padding: 0;
width: auto;
}
.social_icon {
height: 20px;
margin: 0 8px 10px 8px;
padding: 0;
}
/* Data table ------------------------------ */
.purchase {
width: 100%;
margin: 0;
padding: 35px 0;
-premailer-width: 100%;
-premailer-cellpadding: 0;
-premailer-cellspacing: 0;
}
.purchase_content {
width: 100%;
margin: 0;
padding: 25px 0 0 0;
-premailer-width: 100%;
-premailer-cellpadding: 0;
-premailer-cellspacing: 0;
}
.purchase_item {
padding: 10px 0;
color: #51545E;
font-size: 15px;
line-height: 18px;
}
.purchase_heading {
padding-bottom: 8px;
border-bottom: 1px solid #EAEAEC;
}
.purchase_heading p {
margin: 0;
color: #85878E;
font-size: 12px;
}
.purchase_footer {
padding-top: 15px;
border-top: 1px solid #EAEAEC;
}
.purchase_total {
margin: 0;
text-align: right;
font-weight: bold;
color: #333333;
}
.purchase_total--label {
padding: 0 15px 0 0;
}
body {
background-color: #F2F4F6;
color: #51545E;
}
p {
color: #51545E;
}
.email-wrapper {
width: 100%;
margin: 0;
padding: 0;
-premailer-width: 100%;
-premailer-cellpadding: 0;
-premailer-cellspacing: 0;
background-color: #F2F4F6;
}
.email-content {
width: 100%;
margin: 0;
padding: 0;
-premailer-width: 100%;
-premailer-cellpadding: 0;
-premailer-cellspacing: 0;
}
/* Masthead ----------------------- */
.email-masthead {
padding: 25px 0;
text-align: center;
}
.email-masthead_logo {
width: 94px;
}
.email-masthead_name {
font-size: 16px;
font-weight: bold;
color: #A8AAAF;
text-decoration: none;
text-shadow: 0 1px 0 white;
}
/* Body ------------------------------ */
.email-body {
width: 100%;
margin: 0;
padding: 0;
-premailer-width: 100%;
-premailer-cellpadding: 0;
-premailer-cellspacing: 0;
}
.email-body_inner {
width: 750px;
margin: 0 auto;
padding: 0;
-premailer-width: 750px;
-premailer-cellpadding: 0;
-premailer-cellspacing: 0;
background-color: #FFFFFF;
}
.email-footer {
width: 750px;
margin: 0 auto;
padding: 0;
-premailer-width: 750px;
-premailer-cellpadding: 0;
-premailer-cellspacing: 0;
text-align: center;
}
.email-footer p {
color: #A8AAAF;
}
.body-action {
width: 100%;
margin: 30px auto;
padding: 0;
-premailer-width: 100%;
-premailer-cellpadding: 0;
-premailer-cellspacing: 0;
text-align: center;
}
.body-sub {
margin-top: 25px;
padding-top: 25px;
border-top: 1px solid #EAEAEC;
}
.content-cell {
padding: 30px;
}
/*Media Queries ------------------------------ */
@media only screen and (max-width: 600px) {
.email-body_inner,
.email-footer {
width: 100% !important;
}
}
@media (prefers-color-scheme: dark) {
body,
.email-body,
.email-body_inner,
.email-content,
.email-wrapper,
.email-masthead,
.email-footer {
background-color: #333333 !important;
color: #FFF !important;
}
p,
ul,
ol,
blockquote,
h1,
h2,
h3 {
color: #FFF !important;
}
.attributes_content,
.discount {
background-color: #222 !important;
}
.email-masthead_name {
text-shadow: none !important;
}
}
</style>
<!--[if mso]>
<style type="text/css">
.f-fallback {
font-family: Arial, sans-serif;
}
</style>
<![endif]-->
<style type="text/css" rel="stylesheet" media="all">
body {
width: 100% !important;
height: 100%;
margin: 0;
-webkit-text-size-adjust: none;
}
body {
font-family: "Nunito Sans", Helvetica, Arial, sans-serif;
}
body {
background-color: #F2F4F6;
color: #51545E;
}
</style>
</head>
<body style="width: 100% !important;
height: 100%;
-webkit-text-size-adjust: none;
font-family: Helvetica, Arial, sans-serif;
background-color: #F2F4F6;
color: #51545E;
margin: 0;"
bgcolor="#F2F4F6">
<span class="preheader"
style="display: none !important;
visibility: hidden;
mso-hide: all;
font-size: 1px;
line-height: 1px;
max-height: 0;
max-width: 0;
opacity: 0;
overflow: hidden;">{{ pre_header }}</span>
<table class="email-wrapper"
width="100%"
cellpadding="0"
cellspacing="0"
role="presentation"
style="width: 100%;
-premailer-width: 100%;
-premailer-cellpadding: 0;
-premailer-cellspacing: 0;
background-color: #F2F4F6;
margin: 0;
padding: 0;"
bgcolor="#F2F4F6">
<tr>
<td align="center"
style="word-break: break-word;
font-family: Helvetica, Arial, sans-serif;
font-size: 16px;">
<table class="email-content"
width="100%"
cellpadding="0"
cellspacing="0"
role="presentation"
style="width: 100%;
-premailer-width: 100%;
-premailer-cellpadding: 0;
-premailer-cellspacing: 0;
margin: 0;
padding: 0;">
<tr>
<td class="email-masthead"
style="word-break: break-word;
font-family: Helvetica, Arial, sans-serif;
font-size: 16px;
text-align: center;
padding: 25px 0;"
align="center">
<a href="{{ LANDING_PAGE_URL }}"
class="f-fallback email-masthead_name"
style="color: #A8AAAF;
font-size: 16px;
font-weight: bold;
text-decoration: none;
text-shadow: 0 1px 0 white;">
{% block logo %}<img src="{{ URL }}/static/logo.png" style="width: 150px; margin: auto">{% endblock %}
</a>
</td>
</tr>
<!-- Email Body -->
<tr>
<td class="email-body"
width="750"
cellpadding="0"
cellspacing="0"
style="word-break: break-word;
margin: 0;
padding: 0;
font-family: Helvetica, Arial, sans-serif;
font-size: 16px;
width: 100%;
-premailer-width: 100%;
-premailer-cellpadding: 0;
-premailer-cellspacing: 0;">
<table class="email-body_inner"
align="center"
width="750"
cellpadding="0"
cellspacing="0"
role="presentation"
style="width: 750px;
-premailer-width: 750px;
-premailer-cellpadding: 0;
-premailer-cellspacing: 0;
background-color: #FFFFFF;
margin: 0 auto;
padding: 0;"
bgcolor="#FFFFFF">
<!-- Body content -->
<tr>
<td class="content-cell"
style="word-break: break-word;
font-family: Helvetica, Arial, sans-serif;
font-size: 16px;
padding: 30px;">
<div class="f-fallback">
{% block greeting %}{% endblock %}
{% block content %}{% endblock %}
<!-- Sub copy -->
{% block sub_copy %}{% endblock %}
</div>
</td>
</tr>
</table>
</td>
</tr>
<tr>
<td style="word-break: break-word;
font-family: Helvetica, Arial, sans-serif;
font-size: 16px;">
<table class="email-footer"
align="center"
width="750"
cellpadding="0"
cellspacing="0"
role="presentation"
style="width: 750px;
-premailer-width: 750px;
-premailer-cellpadding: 0;
-premailer-cellspacing: 0;
text-align: center;
margin: 0 auto;
padding: 0;">
<tr>
<td class="content-cell"
align="center"
style="word-break: break-word;
font-family: Helvetica, Arial, sans-serif;
font-size: 16px;
padding: 30px;">
<p class="f-fallback sub align-center"
style="font-size: 13px;
line-height: 1.625;
text-align: center;
color: #A8AAAF;
margin: .4em 0 1.1875em;"
align="center">
© {{ YEAR }} SimpleLogin - a Proton product. All rights reserved.
<br />
{% block footer %}{% endblock %}
</p>
{% if unsubscribe_oneclick is defined %}
<p class="f-fallback sub align-center"
style="font-size: 13px;
line-height: 1.625;
text-align: center;
margin: .4em 0 1.1875em;">
<a href="{{ unsubscribe_oneclick }}">Unsubscribe from our newsletter</a>
</p>
{% endif %}
<p class="f-fallback sub align-center"
style="font-size: 13px;
line-height: 1.625;
text-align: center;
color: #A8AAAF;
margin: .4em 0 1.1875em;"
align="center">
<a href="https://app.simplelogin.io/dashboard/support">Do you have a question?</a>
</p>
</td>
</tr>
</table>
</td>
</tr>
</table>
</td>
</tr>
</table>
</body>
</html>

View File

@ -6,7 +6,6 @@
{{ render_text("Your subscription will end on " + next_bill_date + ".") }} {{ render_text("Your subscription will end on " + next_bill_date + ".") }}
{{ render_text("When the subscription ends:") }} {{ render_text("When the subscription ends:") }}
{{ render_text("- All aliases/domains/directories you have created are <b>kept</b> and continue working normally.") }} {{ render_text("- All aliases/domains/directories you have created are <b>kept</b> and continue working normally.") }}
{{ render_text("- You cannot create new reverse aliases.") }}
{% call text() %} {% call text() %}
- You cannot create new aliases if you exceed the free plan limit, i.e. have more than {{ MAX_NB_EMAIL_FREE_PLAN }} aliases. - You cannot create new aliases if you exceed the free plan limit, i.e. have more than {{ MAX_NB_EMAIL_FREE_PLAN }} aliases.
{% endcall %} {% endcall %}

View File

@ -9,7 +9,6 @@ When the subscription ends:
- All aliases/domains/directories you have created are kept and continue working. - All aliases/domains/directories you have created are kept and continue working.
- You cannot create new aliases if you exceed the free plan limit, i.e. have more than {{MAX_NB_EMAIL_FREE_PLAN}} aliases. - You cannot create new aliases if you exceed the free plan limit, i.e. have more than {{MAX_NB_EMAIL_FREE_PLAN}} aliases.
- You cannot create new reverse aliases.
- As features like "catch-all" or "directory" allow you to create aliases on-the-fly, - As features like "catch-all" or "directory" allow you to create aliases on-the-fly,
those aliases cannot be automatically created if you have more than {{MAX_NB_EMAIL_FREE_PLAN}} aliases. those aliases cannot be automatically created if you have more than {{MAX_NB_EMAIL_FREE_PLAN}} aliases.
- You cannot add new domain or directory. - You cannot add new domain or directory.

View File

@ -14,7 +14,6 @@
{{ render_text("- You cannot add new domain or directory.") }} {{ render_text("- You cannot add new domain or directory.") }}
{{ render_text("- You cannot add new mailbox.") }} {{ render_text("- You cannot add new mailbox.") }}
{{ render_text("- You cannot create new reverse aliases.") }}
{{ render_text("- If you enable PGP Encryption, forwarded emails are not encrypted anymore.") }} {{ render_text("- If you enable PGP Encryption, forwarded emails are not encrypted anymore.") }}
{{ render_text('You can upgrade today to continue using all these Premium features (and much more coming).') }} {{ render_text('You can upgrade today to continue using all these Premium features (and much more coming).') }}
{{ render_button("Upgrade your account", URL ~ "/dashboard/pricing") }} {{ render_button("Upgrade your account", URL ~ "/dashboard/pricing") }}

View File

@ -8,7 +8,6 @@ When the trial ends:
- All aliases/domains/directories you have created are kept and continue working. - All aliases/domains/directories you have created are kept and continue working.
- You cannot create new aliases if you exceed the free plan limit, i.e. have more than {{MAX_NB_EMAIL_FREE_PLAN}} aliases. - You cannot create new aliases if you exceed the free plan limit, i.e. have more than {{MAX_NB_EMAIL_FREE_PLAN}} aliases.
- You cannot add new domain or directory. - You cannot add new domain or directory.
- You cannot create new reverse aliases.
- You cannot add new mailbox. - You cannot add new mailbox.
- If you enable PGP Encryption, forwarded emails are not encrypted anymore. - If you enable PGP Encryption, forwarded emails are not encrypted anymore.

View File

@ -48,16 +48,15 @@
{# SIWSL#} {# SIWSL#}
{# </a>#} {# </a>#}
{# </li>#} {# </li>#}
{% if current_user.should_show_app_page() %} {# {% if current_user.should_show_app_page() %}#}
{# <li class="nav-item">#}
<li class="nav-item"> {# <a href="{{ url_for('dashboard.app_route') }}"#}
<a href="{{ url_for('dashboard.app_route') }}" {# class="nav-link {{ 'active' if active_page == 'app' }}">#}
class="nav-link {{ 'active' if active_page == 'app' }}"> {# <i class="fe fe-grid"></i>#}
<i class="fe fe-grid"></i> {# Apps#}
Apps {# </a>#}
</a> {# </li>#}
</li> {# {% endif %}#}
{% endif %}
<li class="nav-item"> <li class="nav-item">
<a href="{{ url_for('dashboard.setting') }}" <a href="{{ url_for('dashboard.setting') }}"
class="nav-link {{ 'active' if active_page == 'setting' }}"> class="nav-link {{ 'active' if active_page == 'setting' }}">

View File

@ -1,5 +1,5 @@
from app import config from app import config
from flask import url_for, session from flask import url_for
from urllib.parse import parse_qs from urllib.parse import parse_qs
from urllib3.util import parse_url from urllib3.util import parse_url
from app.auth.views.oidc import create_user from app.auth.views.oidc import create_user
@ -10,21 +10,7 @@ from app.models import User
from app.config import URL, OIDC_CLIENT_ID from app.config import URL, OIDC_CLIENT_ID
mock_well_known_response = { def test_oidc_login(flask_client):
"authorization_endpoint": "http://localhost:7777/authorization-endpoint",
"userinfo_endpoint": "http://localhost:7777/userinfo-endpoint",
"token_endpoint": "http://localhost:7777/token-endpoint",
}
@patch("requests.get")
def test_oidc_login(mock_get, flask_client):
config.OIDC_WELL_KNOWN_URL = "http://localhost:7777/well-known-url"
with flask_client.session_transaction() as sess:
sess["oauth_redirect_next"] = None
mock_get.return_value.json.return_value = mock_well_known_response
r = flask_client.get( r = flask_client.get(
url_for("auth.oidc_login"), url_for("auth.oidc_login"),
follow_redirects=False, follow_redirects=False,
@ -42,41 +28,8 @@ def test_oidc_login(mock_get, flask_client):
assert expected_redirect_url == query["redirect_uri"][0] assert expected_redirect_url == query["redirect_uri"][0]
@patch("requests.get") def test_oidc_login_no_client_id(flask_client):
def test_oidc_login_next_url(mock_get, flask_client):
config.OIDC_WELL_KNOWN_URL = "http://localhost:7777/well-known-url"
with flask_client.session_transaction() as sess:
sess["oauth_redirect_next"] = None
mock_get.return_value.json.return_value = mock_well_known_response
with flask_client:
r = flask_client.get(
url_for("auth.oidc_login", next="/dashboard/settings/"),
follow_redirects=False,
)
location = r.headers.get("Location")
assert location is not None
parsed = parse_url(location)
query = parse_qs(parsed.query)
expected_redirect_url = f"{URL}/auth/oidc/callback"
assert "code" == query["response_type"][0]
assert OIDC_CLIENT_ID == query["client_id"][0]
assert expected_redirect_url == query["redirect_uri"][0]
assert session["oauth_redirect_next"] == "/dashboard/settings/"
@patch("requests.get")
def test_oidc_login_no_client_id(mock_get, flask_client):
config.OIDC_CLIENT_ID = None config.OIDC_CLIENT_ID = None
config.OIDC_WELL_KNOWN_URL = "http://localhost:7777/well-known-url"
with flask_client.session_transaction() as sess:
sess["oauth_redirect_next"] = None
mock_get.return_value.json.return_value = mock_well_known_response
r = flask_client.get( r = flask_client.get(
url_for("auth.oidc_login"), url_for("auth.oidc_login"),
@ -94,14 +47,8 @@ def test_oidc_login_no_client_id(mock_get, flask_client):
config.OIDC_CLIENT_ID = "to_fill" config.OIDC_CLIENT_ID = "to_fill"
@patch("requests.get") def test_oidc_login_no_client_secret(flask_client):
def test_oidc_login_no_client_secret(mock_get, flask_client):
config.OIDC_CLIENT_SECRET = None config.OIDC_CLIENT_SECRET = None
config.OIDC_WELL_KNOWN_URL = "http://localhost:7777/well-known-url"
with flask_client.session_transaction() as sess:
sess["oauth_redirect_next"] = None
mock_get.return_value.json.return_value = mock_well_known_response
r = flask_client.get( r = flask_client.get(
url_for("auth.oidc_login"), url_for("auth.oidc_login"),
@ -119,14 +66,9 @@ def test_oidc_login_no_client_secret(mock_get, flask_client):
config.OIDC_CLIENT_SECRET = "to_fill" config.OIDC_CLIENT_SECRET = "to_fill"
@patch("requests.get") def test_oidc_callback_no_oauth_state(flask_client):
def test_oidc_callback_no_oauth_state(mock_get, flask_client): with flask_client.session_transaction() as session:
config.OIDC_WELL_KNOWN_URL = "http://localhost:7777/well-known-url" session["oauth_state"] = None
with flask_client.session_transaction() as sess:
sess["oauth_redirect_next"] = None
sess["oauth_state"] = None
mock_get.return_value.json.return_value = mock_well_known_response
r = flask_client.get( r = flask_client.get(
url_for("auth.oidc_callback"), url_for("auth.oidc_callback"),
@ -136,16 +78,11 @@ def test_oidc_callback_no_oauth_state(mock_get, flask_client):
assert location is None assert location is None
@patch("requests.get") def test_oidc_callback_no_client_id(flask_client):
def test_oidc_callback_no_client_id(mock_get, flask_client): with flask_client.session_transaction() as session:
config.OIDC_WELL_KNOWN_URL = "http://localhost:7777/well-known-url" session["oauth_state"] = "state"
with flask_client.session_transaction() as sess:
sess["oauth_redirect_next"] = None
sess["oauth_state"] = "state"
config.OIDC_CLIENT_ID = None config.OIDC_CLIENT_ID = None
mock_get.return_value.json.return_value = mock_well_known_response
r = flask_client.get( r = flask_client.get(
url_for("auth.oidc_callback"), url_for("auth.oidc_callback"),
follow_redirects=False, follow_redirects=False,
@ -160,20 +97,15 @@ def test_oidc_callback_no_client_id(mock_get, flask_client):
assert expected_redirect_url == parsed.path assert expected_redirect_url == parsed.path
config.OIDC_CLIENT_ID = "to_fill" config.OIDC_CLIENT_ID = "to_fill"
with flask_client.session_transaction() as sess: with flask_client.session_transaction() as session:
sess["oauth_state"] = None session["oauth_state"] = None
@patch("requests.get") def test_oidc_callback_no_client_secret(flask_client):
def test_oidc_callback_no_client_secret(mock_get, flask_client): with flask_client.session_transaction() as session:
config.OIDC_WELL_KNOWN_URL = "http://localhost:7777/well-known-url" session["oauth_state"] = "state"
with flask_client.session_transaction() as sess:
sess["oauth_redirect_next"] = None
sess["oauth_state"] = "state"
config.OIDC_CLIENT_SECRET = None config.OIDC_CLIENT_SECRET = None
mock_get.return_value.json.return_value = mock_well_known_response
r = flask_client.get( r = flask_client.get(
url_for("auth.oidc_callback"), url_for("auth.oidc_callback"),
follow_redirects=False, follow_redirects=False,
@ -188,23 +120,16 @@ def test_oidc_callback_no_client_secret(mock_get, flask_client):
assert expected_redirect_url == parsed.path assert expected_redirect_url == parsed.path
config.OIDC_CLIENT_SECRET = "to_fill" config.OIDC_CLIENT_SECRET = "to_fill"
with flask_client.session_transaction() as sess: with flask_client.session_transaction() as session:
sess["oauth_state"] = None session["oauth_state"] = None
@patch("requests.get")
@patch("requests_oauthlib.OAuth2Session.fetch_token") @patch("requests_oauthlib.OAuth2Session.fetch_token")
@patch("requests_oauthlib.OAuth2Session.get") @patch("requests_oauthlib.OAuth2Session.get")
def test_oidc_callback_invalid_user( def test_oidc_callback_invalid_user(mock_get, mock_fetch_token, flask_client):
mock_oauth_get, mock_fetch_token, mock_get, flask_client mock_get.return_value = MockResponse(400, {})
): with flask_client.session_transaction() as session:
mock_oauth_get.return_value = MockResponse(400, {}) session["oauth_state"] = "state"
config.OIDC_WELL_KNOWN_URL = "http://localhost:7777/well-known-url"
with flask_client.session_transaction() as sess:
sess["oauth_redirect_next"] = None
sess["oauth_state"] = "state"
mock_get.return_value.json.return_value = mock_well_known_response
r = flask_client.get( r = flask_client.get(
url_for("auth.oidc_callback"), url_for("auth.oidc_callback"),
@ -218,59 +143,45 @@ def test_oidc_callback_invalid_user(
expected_redirect_url = "/auth/login" expected_redirect_url = "/auth/login"
assert expected_redirect_url == parsed.path assert expected_redirect_url == parsed.path
assert mock_oauth_get.called assert mock_get.called
with flask_client.session_transaction() as sess:
sess["oauth_state"] = None
@patch("requests.get")
@patch("requests_oauthlib.OAuth2Session.fetch_token")
@patch("requests_oauthlib.OAuth2Session.get")
def test_oidc_callback_no_email(
mock_oauth_get, mock_fetch_token, mock_get, flask_client
):
mock_oauth_get.return_value = MockResponse(200, {})
config.OIDC_WELL_KNOWN_URL = "http://localhost:7777/well-known-url"
with flask_client.session_transaction() as sess:
sess["oauth_redirect_next"] = None
sess["oauth_state"] = "state"
mock_get.return_value.json.return_value = mock_well_known_response
r = flask_client.get(
url_for("auth.oidc_callback"),
follow_redirects=False,
)
location = r.headers.get("Location")
assert location is not None
parsed = parse_url(location)
expected_redirect_url = "/auth/login"
assert expected_redirect_url == parsed.path
assert mock_oauth_get.called
with flask_client.session_transaction() as session: with flask_client.session_transaction() as session:
session["oauth_state"] = None session["oauth_state"] = None
@patch("requests.get")
@patch("requests_oauthlib.OAuth2Session.fetch_token") @patch("requests_oauthlib.OAuth2Session.fetch_token")
@patch("requests_oauthlib.OAuth2Session.get") @patch("requests_oauthlib.OAuth2Session.get")
def test_oidc_callback_disabled_registration( def test_oidc_callback_no_email(mock_get, mock_fetch_token, flask_client):
mock_oauth_get, mock_fetch_token, mock_get, flask_client mock_get.return_value = MockResponse(200, {})
): with flask_client.session_transaction() as session:
session["oauth_state"] = "state"
r = flask_client.get(
url_for("auth.oidc_callback"),
follow_redirects=False,
)
location = r.headers.get("Location")
assert location is not None
parsed = parse_url(location)
expected_redirect_url = "/auth/login"
assert expected_redirect_url == parsed.path
assert mock_get.called
with flask_client.session_transaction() as session:
session["oauth_state"] = None
@patch("requests_oauthlib.OAuth2Session.fetch_token")
@patch("requests_oauthlib.OAuth2Session.get")
def test_oidc_callback_disabled_registration(mock_get, mock_fetch_token, flask_client):
config.DISABLE_REGISTRATION = True config.DISABLE_REGISTRATION = True
email = random_string() email = random_string()
mock_oauth_get.return_value = MockResponse(200, {"email": email}) mock_get.return_value = MockResponse(200, {"email": email})
config.OIDC_WELL_KNOWN_URL = "http://localhost:7777/well-known-url" with flask_client.session_transaction() as session:
with flask_client.session_transaction() as sess: session["oauth_state"] = "state"
sess["oauth_redirect_next"] = None
sess["oauth_state"] = "state"
mock_get.return_value.json.return_value = mock_well_known_response
r = flask_client.get( r = flask_client.get(
url_for("auth.oidc_callback"), url_for("auth.oidc_callback"),
@ -284,33 +195,26 @@ def test_oidc_callback_disabled_registration(
expected_redirect_url = "/auth/register" expected_redirect_url = "/auth/register"
assert expected_redirect_url == parsed.path assert expected_redirect_url == parsed.path
assert mock_oauth_get.called assert mock_get.called
config.DISABLE_REGISTRATION = False config.DISABLE_REGISTRATION = False
with flask_client.session_transaction() as sess: with flask_client.session_transaction() as session:
sess["oauth_state"] = None session["oauth_state"] = None
@patch("requests.get")
@patch("requests_oauthlib.OAuth2Session.fetch_token") @patch("requests_oauthlib.OAuth2Session.fetch_token")
@patch("requests_oauthlib.OAuth2Session.get") @patch("requests_oauthlib.OAuth2Session.get")
def test_oidc_callback_registration( def test_oidc_callback_registration(mock_get, mock_fetch_token, flask_client):
mock_oauth_get, mock_fetch_token, mock_get, flask_client
):
email = random_string() email = random_string()
mock_oauth_get.return_value = MockResponse( mock_get.return_value = MockResponse(
200, 200,
{ {
"email": email, "email": email,
config.OIDC_NAME_FIELD: "name", config.OIDC_NAME_FIELD: "name",
}, },
) )
config.OIDC_WELL_KNOWN_URL = "http://localhost:7777/well-known-url" with flask_client.session_transaction() as session:
with flask_client.session_transaction() as sess: session["oauth_state"] = "state"
sess["oauth_redirect_next"] = None
sess["oauth_state"] = "state"
mock_get.return_value.json.return_value = mock_well_known_response
user = User.get_by(email=email) user = User.get_by(email=email)
assert user is None assert user is None
@ -327,33 +231,28 @@ def test_oidc_callback_registration(
expected_redirect_url = "/dashboard/" expected_redirect_url = "/dashboard/"
assert expected_redirect_url == parsed.path assert expected_redirect_url == parsed.path
assert mock_oauth_get.called assert mock_get.called
user = User.get_by(email=email) user = User.get_by(email=email)
assert user is not None assert user is not None
assert user.email == email assert user.email == email
with flask_client.session_transaction() as sess: with flask_client.session_transaction() as session:
sess["oauth_state"] = None session["oauth_state"] = None
@patch("requests.get")
@patch("requests_oauthlib.OAuth2Session.fetch_token") @patch("requests_oauthlib.OAuth2Session.fetch_token")
@patch("requests_oauthlib.OAuth2Session.get") @patch("requests_oauthlib.OAuth2Session.get")
def test_oidc_callback_login(mock_oauth_get, mock_fetch_token, mock_get, flask_client): def test_oidc_callback_login(mock_get, mock_fetch_token, flask_client):
email = random_string() email = random_string()
mock_oauth_get.return_value = MockResponse( mock_get.return_value = MockResponse(
200, 200,
{ {
"email": email, "email": email,
}, },
) )
config.OIDC_WELL_KNOWN_URL = "http://localhost:7777/well-known-url" with flask_client.session_transaction() as session:
with flask_client.session_transaction() as sess: session["oauth_state"] = "state"
sess["oauth_redirect_next"] = None
sess["oauth_state"] = "state"
mock_get.return_value.json.return_value = mock_well_known_response
user = User.create( user = User.create(
email=email, email=email,
@ -376,57 +275,10 @@ def test_oidc_callback_login(mock_oauth_get, mock_fetch_token, mock_get, flask_c
expected_redirect_url = "/dashboard/" expected_redirect_url = "/dashboard/"
assert expected_redirect_url == parsed.path assert expected_redirect_url == parsed.path
assert mock_oauth_get.called assert mock_get.called
with flask_client.session_transaction() as sess: with flask_client.session_transaction() as session:
sess["oauth_state"] = None session["oauth_state"] = None
@patch("requests.get")
@patch("requests_oauthlib.OAuth2Session.fetch_token")
@patch("requests_oauthlib.OAuth2Session.get")
def test_oidc_callback_login_with_next_url(
mock_oauth_get, mock_fetch_token, mock_get, flask_client
):
email = random_string()
mock_oauth_get.return_value = MockResponse(
200,
{
"email": email,
},
)
config.OIDC_WELL_KNOWN_URL = "http://localhost:7777/well-known-url"
with flask_client.session_transaction() as sess:
sess["oauth_redirect_next"] = "/dashboard/settings/"
sess["oauth_state"] = "state"
mock_get.return_value.json.return_value = mock_well_known_response
user = User.create(
email=email,
name="name",
password="",
activated=True,
)
user = User.get_by(email=email)
assert user is not None
r = flask_client.get(
url_for("auth.oidc_callback"),
follow_redirects=False,
)
location = r.headers.get("Location")
assert location is not None
parsed = parse_url(location)
expected_redirect_url = "/dashboard/settings/"
assert expected_redirect_url == parsed.path
assert mock_oauth_get.called
with flask_client.session_transaction() as sess:
sess["oauth_state"] = None
def test_create_user(): def test_create_user():

View File

View File

@ -1,168 +0,0 @@
import arrow
import pytest
import cron
from app.db import Session
from app.models import (
Alias,
AppleSubscription,
PlanEnum,
CoinbaseSubscription,
ManualSubscription,
Subscription,
PartnerUser,
PartnerSubscription,
User,
)
from app.proton.utils import get_proton_partner
from tests.utils import create_new_user, random_token
def test_get_alias_for_free_user_has_no_alias():
user = create_new_user()
alias_id = Alias.create_new_random(user).id
Session.commit()
aliases = list(
cron.get_alias_to_check_hibp(arrow.now(), [], alias_id, alias_id + 1)
)
assert len(aliases) == 0
def test_get_alias_for_lifetime_with_null_hibp_date():
user = create_new_user()
user.lifetime = True
user.enable_data_breach_check = True
alias_id = Alias.create_new_random(user).id
Session.commit()
aliases = list(
cron.get_alias_to_check_hibp(arrow.now(), [], alias_id, alias_id + 1)
)
assert alias_id == aliases[0].id
def test_get_alias_for_lifetime_with_old_hibp_date():
user = create_new_user()
user.lifetime = True
user.enable_data_breach_check = True
alias = Alias.create_new_random(user)
alias.hibp_last_check = arrow.now().shift(days=-1)
alias_id = alias.id
Session.commit()
aliases = list(
cron.get_alias_to_check_hibp(arrow.now(), [], alias_id, alias_id + 1)
)
assert alias_id == aliases[0].id
def create_partner_sub(user: User):
pu = PartnerUser.create(
partner_id=get_proton_partner().id,
partner_email=user.email,
external_user_id=random_token(10),
user_id=user.id,
flush=True,
)
PartnerSubscription.create(
partner_user_id=pu.id, end_at=arrow.utcnow().shift(days=15)
)
sub_generator_list = [
lambda u: AppleSubscription.create(
user_id=u.id,
expires_date=arrow.now().shift(days=15),
original_transaction_id=random_token(10),
receipt_data=random_token(10),
plan=PlanEnum.monthly,
),
lambda u: CoinbaseSubscription.create(
user_id=u.id,
end_at=arrow.now().shift(days=15),
),
lambda u: ManualSubscription.create(
user_id=u.id,
end_at=arrow.now().shift(days=15),
),
lambda u: Subscription.create(
user_id=u.id,
cancel_url="",
update_url="",
subscription_id=random_token(10),
event_time=arrow.now(),
next_bill_date=arrow.now().shift(days=15).date(),
plan=PlanEnum.monthly,
),
create_partner_sub,
]
@pytest.mark.parametrize("sub_generator", sub_generator_list)
def test_get_alias_for_sub(sub_generator):
user = create_new_user()
user.enable_data_breach_check = True
sub_generator(user)
alias_id = Alias.create_new_random(user).id
Session.commit()
aliases = list(
cron.get_alias_to_check_hibp(arrow.now(), [], alias_id, alias_id + 1)
)
assert alias_id == aliases[0].id
def test_disabled_user_is_not_checked():
user = create_new_user()
user.lifetime = True
user.disabled = True
alias_id = Alias.create_new_random(user).id
Session.commit()
aliases = list(
cron.get_alias_to_check_hibp(arrow.now(), [], alias_id, alias_id + 1)
)
assert len(aliases) == 0
def test_skipped_user_is_not_checked():
user = create_new_user()
user.lifetime = True
alias_id = Alias.create_new_random(user).id
Session.commit()
aliases = list(
cron.get_alias_to_check_hibp(arrow.now(), [user.id], alias_id, alias_id + 1)
)
assert len(aliases) == 0
def test_already_checked_is_not_checked():
user = create_new_user()
user.lifetime = True
alias = Alias.create_new_random(user)
alias.hibp_last_check = arrow.now().shift(days=1)
alias_id = alias.id
Session.commit()
aliases = list(
cron.get_alias_to_check_hibp(arrow.now(), [user.id], alias_id, alias_id + 1)
)
assert len(aliases) == 0
def test_outed_in_user_is_checked():
user = create_new_user()
user.lifetime = True
user.enable_data_breach_check = True
alias_id = Alias.create_new_random(user).id
Session.commit()
aliases = list(
cron.get_alias_to_check_hibp(arrow.now(), [], alias_id, alias_id + 1)
)
assert len(aliases) == 1
def test_outed_out_user_is_not_checked():
user = create_new_user()
user.lifetime = True
alias_id = Alias.create_new_random(user).id
Session.commit()
aliases = list(
cron.get_alias_to_check_hibp(arrow.now(), [], alias_id, alias_id + 1)
)
assert len(aliases) == 0

View File

@ -1,56 +0,0 @@
from app.events.event_dispatcher import EventDispatcher, Dispatcher
from app.events.generated.event_pb2 import EventContent, UserDeleted
from app.models import PartnerUser, User
from app.proton.utils import get_proton_partner
from tests.utils import create_new_user, random_token
from typing import Tuple
class OnMemoryDispatcher(Dispatcher):
def __init__(self):
self.memory = []
def send(self, event: bytes):
self.memory.append(event)
def _create_unlinked_user() -> User:
return create_new_user()
def _create_linked_user() -> Tuple[User, PartnerUser]:
user = _create_unlinked_user()
partner_user = PartnerUser.create(
partner_id=get_proton_partner().id,
user_id=user.id,
external_user_id=random_token(10),
flush=True,
)
return user, partner_user
def test_event_dispatcher_stores_events():
dispatcher = OnMemoryDispatcher()
(user, partner) = _create_linked_user()
content = EventContent(user_deleted=UserDeleted())
EventDispatcher.send_event(user, content, dispatcher, skip_if_webhook_missing=False)
assert len(dispatcher.memory) == 1
content = EventContent(user_deleted=UserDeleted())
EventDispatcher.send_event(user, content, dispatcher, skip_if_webhook_missing=False)
assert len(dispatcher.memory) == 2
def test_event_dispatcher_does_not_send_event_if_user_not_linked():
dispatcher = OnMemoryDispatcher()
user = _create_unlinked_user()
content = EventContent(user_deleted=UserDeleted())
EventDispatcher.send_event(user, content, dispatcher, skip_if_webhook_missing=False)
assert len(dispatcher.memory) == 0
content = EventContent(user_deleted=UserDeleted())
EventDispatcher.send_event(user, content, dispatcher, skip_if_webhook_missing=False)
assert len(dispatcher.memory) == 0

View File

@ -1,46 +0,0 @@
from app import config
from app.db import Session
from app.events.event_dispatcher import Dispatcher
from app.events.generated import event_pb2
from app.jobs.event_jobs import send_alias_creation_events_for_user
from app.models import Alias
from tests.utils import create_partner_linked_user
class MemStoreDispatcher(Dispatcher):
def __init__(self):
self.events = []
def send(self, event: bytes):
self.events.append(event)
def setup_module():
config.EVENT_WEBHOOK = True
def teardown_module():
config.EVENT_WEBHOOK = False
def test_send_alias_creation_events():
[user, partner_user] = create_partner_linked_user()
aliases = [Alias.create_new_random(user) for i in range(2)]
Session.flush()
dispatcher = MemStoreDispatcher()
send_alias_creation_events_for_user(user, dispatcher=dispatcher, chunk_size=2)
# 2 batches. 1st newsletter + first alias. 2nd last alias
assert len(dispatcher.events) == 2
decoded_event = event_pb2.Event.FromString(dispatcher.events[0])
assert decoded_event.user_id == user.id
assert decoded_event.external_user_id == partner_user.external_user_id
event_list = decoded_event.content.alias_create_list.events
assert len(event_list) == 2
# 0 is newsletter alias
assert event_list[1].alias_id == aliases[0].id
decoded_event = event_pb2.Event.FromString(dispatcher.events[1])
assert decoded_event.user_id == user.id
assert decoded_event.external_user_id == partner_user.external_user_id
event_list = decoded_event.content.alias_create_list.events
assert len(event_list) == 1
assert event_list[0].alias_id == aliases[1].id

View File

@ -1,5 +1,5 @@
from app.db import Session from app.db import Session
from app.models import Alias, Mailbox, AliasMailbox, User from app.models import Alias, Mailbox, AliasMailbox
from tests.utils import create_new_user, random_email from tests.utils import create_new_user, random_email
@ -15,17 +15,3 @@ def test_duplicated_mailbox_is_returned_only_once():
alias_mailbox_id = [mailbox.id for mailbox in alias_mailboxes] alias_mailbox_id = [mailbox.id for mailbox in alias_mailboxes]
assert user.default_mailbox_id in alias_mailbox_id assert user.default_mailbox_id in alias_mailbox_id
assert other_mailbox.id in alias_mailbox_id assert other_mailbox.id in alias_mailbox_id
def test_alias_create_from_partner_flags_also_the_user():
user = create_new_user()
Session.flush()
email = random_email()
alias = Alias.create(
user_id=user.id,
email=email,
mailbox_id=user.default_mailbox_id,
flags=Alias.FLAG_PARTNER_CREATED,
flush=True,
)
assert alias.user.flags & User.FLAG_CREATED_ALIAS_FROM_PARTNER > 0

View File

@ -1,35 +0,0 @@
import tempfile
from io import BytesIO
import arrow
from app import s3, config
from app.models import File, BatchImport
from tasks.cleanup_old_imports import cleanup_old_imports
from tests.utils import random_token, create_new_user
def test_cleanup_old_imports():
BatchImport.filter().delete()
with tempfile.TemporaryDirectory() as tmpdir:
config.UPLOAD_DIR = tmpdir
user = create_new_user()
path = random_token()
s3.upload_from_bytesio(path, BytesIO("data".encode("utf-8")))
file = File.create(path=path, commit=True) # noqa: F821
now = arrow.now()
delete_batch_import_id = BatchImport.create(
user_id=user.id,
file_id=file.id,
created_at=now.shift(minutes=-1),
flush=True,
).id
keep_batch_import_id = BatchImport.create(
user_id=user.id,
file_id=file.id,
created_at=now.shift(minutes=+1),
commit=True,
).id
cleanup_old_imports(now)
assert BatchImport.get(id=delete_batch_import_id) is None
assert BatchImport.get(id=keep_batch_import_id) is not None

View File

@ -1,72 +0,0 @@
import arrow
from app import config
from app.models import Job, JobState
from tasks.cleanup_old_jobs import cleanup_old_jobs
def test_cleanup_old_jobs():
Job.filter().delete()
now = arrow.now()
delete_ids = [
Job.create(
updated_at=now.shift(minutes=-1),
state=JobState.done.value,
name="",
payload="",
flush=True,
).id,
Job.create(
updated_at=now.shift(minutes=-1),
state=JobState.error.value,
name="",
payload="",
flush=True,
).id,
Job.create(
updated_at=now.shift(minutes=-1),
state=JobState.taken.value,
attempts=config.JOB_MAX_ATTEMPTS,
name="",
payload="",
flush=True,
).id,
]
keep_ids = [
Job.create(
updated_at=now.shift(minutes=+1),
state=JobState.done.value,
name="",
payload="",
flush=True,
).id,
Job.create(
updated_at=now.shift(minutes=+1),
state=JobState.error.value,
name="",
payload="",
flush=True,
).id,
Job.create(
updated_at=now.shift(minutes=+1),
state=JobState.taken.value,
attempts=config.JOB_MAX_ATTEMPTS,
name="",
payload="",
flush=True,
).id,
Job.create(
updated_at=now.shift(minutes=-1),
state=JobState.taken.value,
attempts=config.JOB_MAX_ATTEMPTS - 1,
name="",
payload="",
flush=True,
).id,
]
cleanup_old_jobs(now)
for delete_id in delete_ids:
assert Job.get(id=delete_id) is None
for keep_id in keep_ids:
assert Job.get(id=keep_id) is not None

View File

@ -1,26 +0,0 @@
import arrow
from app.models import Notification
from tasks.cleanup_old_notifications import cleanup_old_notifications
from tests.utils import create_new_user
def test_cleanup_old_notifications():
Notification.filter().delete()
user = create_new_user()
now = arrow.now()
delete_id = Notification.create(
user_id=user.id,
created_at=now.shift(minutes=-1),
message="",
flush=True,
).id
keep_id = Notification.create(
user_id=user.id,
created_at=now.shift(minutes=+1),
message="",
flush=True,
).id
cleanup_old_notifications(now)
assert Notification.get(id=delete_id) is None
assert Notification.get(id=keep_id) is not None

View File

@ -51,7 +51,9 @@ FACEBOOK_CLIENT_SECRET=to_fill
# Login with OIDC # Login with OIDC
CONNECT_WITH_OIDC_ICON=fa-github CONNECT_WITH_OIDC_ICON=fa-github
OIDC_WELL_KNOWN_URL=to_fill OIDC_AUTHORIZATION_URL=to_fill
OIDC_USER_INFO_URL=to_fill
OIDC_TOKEN_URL=to_fill
OIDC_SCOPES=openid email profile OIDC_SCOPES=openid email profile
OIDC_NAME_FIELD=name OIDC_NAME_FIELD=name
OIDC_CLIENT_ID=to_fill OIDC_CLIENT_ID=to_fill

View File

@ -384,30 +384,3 @@ def test_break_loop_alias_as_mailbox(flask_client):
msg[headers.SUBJECT] = random_string() msg[headers.SUBJECT] = random_string()
result = email_handler.handle(envelope, msg) result = email_handler.handle(envelope, msg)
assert result == status.E525 assert result == status.E525
@mail_sender.store_emails_test_decorator
def test_preserve_headers(flask_client):
headers_to_keep = [
headers.SUBJECT,
headers.DATE,
headers.MESSAGE_ID,
headers.REFERENCES,
headers.IN_REPLY_TO,
headers.SL_QUEUE_ID,
] + headers.MIME_HEADERS
user = create_new_user()
alias = Alias.create_new_random(user)
envelope = Envelope()
envelope.mail_from = "somewhere@lo.cal"
envelope.rcpt_tos = [alias.email]
msg = EmailMessage()
for header in headers_to_keep:
msg[header] = header + "keep"
result = email_handler.handle(envelope, msg)
assert result == status.E200
sent_mails = mail_sender.get_stored_emails()
assert len(sent_mails) == 1
msg = sent_mails[0].msg
for header in headers_to_keep:
assert msg[header] == header + "keep"

View File

@ -17,7 +17,6 @@ from app.models import (
Subscription, Subscription,
PlanEnum, PlanEnum,
PADDLE_SUBSCRIPTION_GRACE_DAYS, PADDLE_SUBSCRIPTION_GRACE_DAYS,
SyncEvent,
) )
from tests.utils import login, create_new_user, random_token from tests.utils import login, create_new_user, random_token
@ -326,51 +325,3 @@ def test_user_can_send_receive():
user.disabled = False user.disabled = False
user.delete_on = arrow.now() user.delete_on = arrow.now()
assert not user.can_send_or_receive() assert not user.can_send_or_receive()
def test_sync_event_dead_letter():
# remove all SyncEvents before the test
all_events = SyncEvent.all()
for event in all_events:
SyncEvent.delete(event.id, commit=True)
# create an expired not taken event
e1 = SyncEvent.create(
content=b"content",
created_at=arrow.now().shift(minutes=-15),
taken_time=None,
commit=True,
)
# create an expired taken event (but too long ago)
e2 = SyncEvent.create(
content=b"content",
created_at=arrow.now().shift(minutes=-15),
taken_time=arrow.now().shift(minutes=-14),
commit=True,
)
# create an expired taken event (but recently)
e3 = SyncEvent.create(
content=b"content",
created_at=arrow.now().shift(minutes=-15),
taken_time=arrow.now().shift(minutes=-1),
commit=True,
)
# create a normal event
e4 = SyncEvent.create(
content=b"content",
created_at=arrow.now(),
commit=True,
)
# get dead letter events
dead_letter_events = SyncEvent.get_dead_letter(
older_than=arrow.now().shift(minutes=-10)
)
assert len(dead_letter_events) == 2
assert e1 in dead_letter_events
assert e2 in dead_letter_events
assert e3 not in dead_letter_events
assert e4 not in dead_letter_events

View File

@ -9,8 +9,7 @@ from typing import Optional, Dict
import jinja2 import jinja2
from flask import url_for from flask import url_for
from app.models import User, PartnerUser from app.models import User
from app.proton.utils import get_proton_partner
from app.utils import random_string from app.utils import random_string
@ -31,18 +30,6 @@ def create_new_user(email: Optional[str] = None, name: Optional[str] = None) ->
return user return user
def create_partner_linked_user() -> tuple[User, PartnerUser]:
user = create_new_user()
partner_user = PartnerUser.create(
partner_id=get_proton_partner().id,
user_id=user.id,
external_user_id=random_token(10),
flush=True,
)
return user, partner_user
def login(flask_client, user: Optional[User] = None) -> User: def login(flask_client, user: Optional[User] = None) -> User:
if not user: if not user:
user = create_new_user() user = create_new_user()