Compare commits

..

No commits in common. "master" and "v4.37.2" have entirely different histories.

268 changed files with 4222 additions and 10409 deletions

View File

@ -1,6 +1,7 @@
name: Test and lint
on: [push, pull_request]
on:
push:
jobs:
lint:
@ -109,7 +110,7 @@ jobs:
GITHUB_ACTIONS_TEST: true
- name: Archive code coverage results
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v2
with:
name: code-coverage-report
path: htmlcov
@ -138,12 +139,6 @@ jobs:
with:
fetch-depth: 0
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Create Sentry release
uses: getsentry/action-release@v1
env:
@ -163,7 +158,6 @@ jobs:
uses: docker/build-push-action@v3
with:
context: .
platforms: linux/amd64,linux/arm64
push: true
tags: ${{ steps.meta.outputs.tags }}

1
.gitignore vendored
View File

@ -11,6 +11,7 @@ db.sqlite-journal
static/upload
venv/
.venv
.python-version
.coverage
htmlcov
adhoc

View File

@ -8,7 +8,7 @@ repos:
- id: check-yaml
- id: trailing-whitespace
- repo: https://github.com/Riverside-Healthcare/djLint
rev: v1.34.1
rev: v1.3.0
hooks:
- id: djlint-jinja
files: '.*\.html'
@ -22,3 +22,4 @@ repos:
args: [ --fix ]
# Run the formatter.
- id: ruff-format

View File

@ -20,15 +20,15 @@ SimpleLogin backend consists of 2 main components:
## Install dependencies
The project requires:
- Python 3.10 and poetry to manage dependencies
- Python 3.7+ and [poetry](https://python-poetry.org/) to manage dependencies
- Node v10 for front-end.
- Postgres 13+
- Postgres 12+
First, install all dependencies by running the following command.
Feel free to use `virtualenv` or similar tools to isolate development environment.
```bash
poetry sync
poetry install
```
On Mac, sometimes you might need to install some other packages via `brew`:
@ -68,12 +68,6 @@ For most tests, you will need to have ``redis`` installed and started on your ma
sh scripts/run-test.sh
```
You can also run tests using a local Postgres DB to speed things up. This can be done by
- creating an empty test DB and running the database migration by `dropdb test && createdb test && DB_URI=postgresql://localhost:5432/test alembic upgrade head`
- replacing the `DB_URI` in `test.env` file by `DB_URI=postgresql://localhost:5432/test`
## Run the code locally
Install npm packages
@ -157,10 +151,10 @@ Here are the small sum-ups of the directory structures and their roles:
## Pull request
The code is formatted using [ruff](https://github.com/astral-sh/ruff), to format the code, simply run
The code is formatted using https://github.com/psf/black, to format the code, simply run
```
poetry run ruff format .
poetry run black .
```
The code is also checked with `flake8`, make sure to run `flake8` before creating the pull request by
@ -223,31 +217,6 @@ Now open http://localhost:1080/ (or http://localhost:1080/ for MailHog), you sho
## Job runner
Some features require a job handler (such as GDPR data export). To test such feature you need to run the job_runner
```bash
python job_runner.py
```
# Setup for Mac
There are several ways to setup Python and manage the project dependencies on Mac. For info we have successfully used this setup on a Mac silicon:
```bash
# we haven't managed to make python 3.12 work
brew install python3.10
# make sure to update the PATH so python, pip point to Python3
# for us it can be done by adding "export PATH=/opt/homebrew/opt/python@3.10/libexec/bin:$PATH" to .zprofile
# Although pipx is the recommended way to install poetry,
# install pipx via brew will automatically install python 3.12
# and poetry will then use python 3.12
# so we recommend using poetry this way instead
curl -sSL https://install.python-poetry.org | python3 -
poetry install
# activate the virtualenv and you should be good to go!
source .venv/bin/activate
```

View File

@ -541,7 +541,7 @@ exit
Once you've created all your desired login accounts, add these lines to `/simplelogin.env` to disable further registrations:
```.env
```
DISABLE_REGISTRATION=1
DISABLE_ONBOARDING=true
```

View File

@ -168,8 +168,6 @@ class NewUserStrategy(ClientMergeStrategy):
class ExistingUnlinkedUserStrategy(ClientMergeStrategy):
def process(self) -> LinkResult:
# IF it was scheduled to be deleted. Unschedule it.
self.user.delete_on = None
partner_user = ensure_partner_user_exists_for_user(
self.link_request, self.user, self.partner
)
@ -248,8 +246,6 @@ def link_user(
) -> LinkResult:
# Sanitize email just in case
link_request.email = sanitize_email(link_request.email)
# If it was scheduled to be deleted. Unschedule it.
current_user.delete_on = None
partner_user = ensure_partner_user_exists_for_user(
link_request, current_user, partner
)

View File

@ -1,10 +1,7 @@
from __future__ import annotations
from typing import Optional
import arrow
import sqlalchemy
from flask_admin import BaseView
from flask_admin.form import SecureForm
from flask_admin.model.template import EndpointLinkRowAction
from markupsafe import Markup
@ -30,27 +27,10 @@ from app.models import (
Alias,
Newsletter,
PADDLE_SUBSCRIPTION_GRACE_DAYS,
Mailbox,
DeletedAlias,
DomainDeletedAlias,
PartnerUser,
)
from app.newsletter_utils import send_newsletter_to_user, send_newsletter_to_address
def _admin_action_formatter(view, context, model, name):
action_name = AuditLogActionEnum.get_name(model.action)
return "{} ({})".format(action_name, model.action)
def _admin_date_formatter(view, context, model, name):
return model.created_at.format()
def _user_upgrade_channel_formatter(view, context, model, name):
return Markup(model.upgrade_channel)
class SLModelView(sqla.ModelView):
column_default_sort = ("id", True)
column_display_pk = True
@ -66,8 +46,7 @@ class SLModelView(sqla.ModelView):
def inaccessible_callback(self, name, **kwargs):
# redirect to login page if user doesn't have access
flash("You don't have access to the admin page", "error")
return redirect(url_for("dashboard.index", next=request.url))
return redirect(url_for("auth.login", next=request.url))
def on_model_change(self, form, model, is_created):
changes = {}
@ -115,8 +94,11 @@ class SLAdminIndexView(AdminIndexView):
return redirect("/admin/user")
def _user_upgrade_channel_formatter(view, context, model, name):
return Markup(model.upgrade_channel)
class UserAdmin(SLModelView):
form_base_class = SecureForm
column_searchable_list = ["email", "id"]
column_exclude_list = [
"salt",
@ -135,8 +117,6 @@ class UserAdmin(SLModelView):
column_formatters = {
"upgrade_channel": _user_upgrade_channel_formatter,
"created_at": _admin_date_formatter,
"updated_at": _admin_date_formatter,
}
@action(
@ -234,20 +214,6 @@ class UserAdmin(SLModelView):
Session.commit()
@action(
"remove trial",
"Stop trial period",
"Remove trial for this user?",
)
def stop_trial(self, ids):
for user in User.filter(User.id.in_(ids)):
user.trial_end = None
flash(f"Stopped trial for {user}", "success")
AdminAuditLog.stop_trial(current_user.id, user.id)
Session.commit()
@action(
"disable_otp_fido",
"Disable OTP & FIDO",
@ -363,29 +329,17 @@ def manual_upgrade(way: str, ids: [int], is_giveaway: bool):
class EmailLogAdmin(SLModelView):
form_base_class = SecureForm
column_searchable_list = ["id"]
column_filters = ["id", "user.email", "mailbox.email", "contact.website_email"]
can_edit = False
can_create = False
column_formatters = {
"created_at": _admin_date_formatter,
"updated_at": _admin_date_formatter,
}
class AliasAdmin(SLModelView):
form_base_class = SecureForm
column_searchable_list = ["id", "user.email", "email", "mailbox.email"]
column_filters = ["id", "user.email", "email", "mailbox.email"]
column_formatters = {
"created_at": _admin_date_formatter,
"updated_at": _admin_date_formatter,
}
@action(
"disable_email_spoofing_check",
"Disable email spoofing protection",
@ -408,15 +362,9 @@ class AliasAdmin(SLModelView):
class MailboxAdmin(SLModelView):
form_base_class = SecureForm
column_searchable_list = ["id", "user.email", "email"]
column_filters = ["id", "user.email", "email"]
column_formatters = {
"created_at": _admin_date_formatter,
"updated_at": _admin_date_formatter,
}
# class LifetimeCouponAdmin(SLModelView):
# can_edit = True
@ -424,26 +372,14 @@ class MailboxAdmin(SLModelView):
class CouponAdmin(SLModelView):
form_base_class = SecureForm
can_edit = False
can_create = True
column_formatters = {
"created_at": _admin_date_formatter,
"updated_at": _admin_date_formatter,
}
class ManualSubscriptionAdmin(SLModelView):
form_base_class = SecureForm
can_edit = True
column_searchable_list = ["id", "user.email"]
column_formatters = {
"created_at": _admin_date_formatter,
"updated_at": _admin_date_formatter,
}
@action(
"extend_1y",
"Extend for 1 year",
@ -482,27 +418,15 @@ class ManualSubscriptionAdmin(SLModelView):
class CustomDomainAdmin(SLModelView):
form_base_class = SecureForm
column_searchable_list = ["domain", "user.email", "user.id"]
column_exclude_list = ["ownership_txt_token"]
can_edit = False
column_formatters = {
"created_at": _admin_date_formatter,
"updated_at": _admin_date_formatter,
}
class ReferralAdmin(SLModelView):
form_base_class = SecureForm
column_searchable_list = ["id", "user.email", "code", "name"]
column_filters = ["id", "user.email", "code", "name"]
column_formatters = {
"created_at": _admin_date_formatter,
"updated_at": _admin_date_formatter,
}
def scaffold_list_columns(self):
ret = super().scaffold_list_columns()
ret.insert(0, "nb_user")
@ -518,8 +442,16 @@ class ReferralAdmin(SLModelView):
# can_delete = True
def _admin_action_formatter(view, context, model, name):
action_name = AuditLogActionEnum.get_name(model.action)
return "{} ({})".format(action_name, model.action)
def _admin_created_at_formatter(view, context, model, name):
return model.created_at.format()
class AdminAuditLogAdmin(SLModelView):
form_base_class = SecureForm
column_searchable_list = ["admin.id", "admin.email", "model_id", "created_at"]
column_filters = ["admin.id", "admin.email", "model_id", "created_at"]
column_exclude_list = ["id"]
@ -530,8 +462,7 @@ class AdminAuditLogAdmin(SLModelView):
column_formatters = {
"action": _admin_action_formatter,
"created_at": _admin_date_formatter,
"updated_at": _admin_date_formatter,
"created_at": _admin_created_at_formatter,
}
@ -551,7 +482,6 @@ def _transactionalcomplaint_refused_email_id_formatter(view, context, model, nam
class ProviderComplaintAdmin(SLModelView):
form_base_class = SecureForm
column_searchable_list = ["id", "user.id", "created_at"]
column_filters = ["user.id", "state"]
column_hide_backrefs = False
@ -560,8 +490,8 @@ class ProviderComplaintAdmin(SLModelView):
can_delete = False
column_formatters = {
"created_at": _admin_date_formatter,
"updated_at": _admin_date_formatter,
"created_at": _admin_created_at_formatter,
"updated_at": _admin_created_at_formatter,
"state": _transactionalcomplaint_state_formatter,
"phase": _transactionalcomplaint_phase_formatter,
"refused_email": _transactionalcomplaint_refused_email_id_formatter,
@ -622,7 +552,6 @@ def _newsletter_html_formatter(view, context, model: Newsletter, name):
class NewsletterAdmin(SLModelView):
form_base_class = SecureForm
list_template = "admin/model/newsletter-list.html"
edit_template = "admin/model/newsletter-edit.html"
edit_modal = False
@ -704,7 +633,6 @@ class NewsletterAdmin(SLModelView):
class NewsletterUserAdmin(SLModelView):
form_base_class = SecureForm
column_searchable_list = ["id"]
column_filters = ["id", "user.email", "newsletter.subject"]
column_exclude_list = ["created_at", "updated_at", "id"]
@ -714,112 +642,17 @@ class NewsletterUserAdmin(SLModelView):
class DailyMetricAdmin(SLModelView):
form_base_class = SecureForm
column_exclude_list = ["created_at", "updated_at", "id"]
can_export = True
class MetricAdmin(SLModelView):
form_base_class = SecureForm
column_exclude_list = ["created_at", "updated_at", "id"]
can_export = True
class InvalidMailboxDomainAdmin(SLModelView):
form_base_class = SecureForm
can_create = True
can_delete = True
class EmailSearchResult:
no_match: bool = True
alias: Optional[Alias] = None
mailbox: list[Mailbox] = []
mailbox_count: int = 0
deleted_alias: Optional[DeletedAlias] = None
deleted_custom_alias: Optional[DomainDeletedAlias] = None
user: Optional[User] = None
@staticmethod
def from_email(email: str) -> EmailSearchResult:
output = EmailSearchResult()
alias = Alias.get_by(email=email)
if alias:
output.alias = alias
output.no_match = False
user = User.get_by(email=email)
if user:
output.user = user
output.no_match = False
mailboxes = (
Mailbox.filter_by(email=email).order_by(Mailbox.id.desc()).limit(10).all()
)
if mailboxes:
output.mailbox = mailboxes
output.mailbox_count = Mailbox.filter_by(email=email).count()
output.no_match = False
deleted_alias = DeletedAlias.get_by(email=email)
if deleted_alias:
output.deleted_alias = deleted_alias
output.no_match = False
domain_deleted_alias = DomainDeletedAlias.get_by(email=email)
if domain_deleted_alias:
output.domain_deleted_alias = domain_deleted_alias
output.no_match = False
return output
class EmailSearchHelpers:
@staticmethod
def mailbox_list(user: User) -> list[Mailbox]:
return (
Mailbox.filter_by(user_id=user.id)
.order_by(Mailbox.id.asc())
.limit(10)
.all()
)
@staticmethod
def mailbox_count(user: User) -> int:
return Mailbox.filter_by(user_id=user.id).order_by(Mailbox.id.desc()).count()
@staticmethod
def alias_list(user: User) -> list[Alias]:
return (
Alias.filter_by(user_id=user.id).order_by(Alias.id.desc()).limit(10).all()
)
@staticmethod
def alias_count(user: User) -> int:
return Alias.filter_by(user_id=user.id).count()
@staticmethod
def partner_user(user: User) -> Optional[PartnerUser]:
return PartnerUser.get_by(user_id=user.id)
class EmailSearchAdmin(BaseView):
def is_accessible(self):
return current_user.is_authenticated and current_user.is_admin
def inaccessible_callback(self, name, **kwargs):
# redirect to login page if user doesn't have access
flash("You don't have access to the admin page", "error")
return redirect(url_for("dashboard.index", next=request.url))
@expose("/", methods=["GET", "POST"])
def index(self):
search = EmailSearchResult()
email = request.args.get("email")
if email is not None and len(email) > 0:
email = email.strip()
search = EmailSearchResult.from_email(email)
return self.render(
"admin/email_search.html",
email=email,
data=search,
helper=EmailSearchHelpers,
)

View File

@ -64,12 +64,8 @@ def verify_prefix_suffix(
# SimpleLogin domain case:
# 1) alias_suffix must start with "." and
# 2) alias_domain_prefix must come from the word list
available_sl_domains = [
sl_domain.domain
for sl_domain in user.get_sl_domains(alias_options=alias_options)
]
if (
alias_domain in available_sl_domains
alias_domain in user.available_sl_domains(alias_options=alias_options)
and alias_domain not in user_custom_domains
# when DISABLE_ALIAS_SUFFIX is true, alias_domain_prefix is empty
and not config.DISABLE_ALIAS_SUFFIX
@ -84,7 +80,9 @@ def verify_prefix_suffix(
LOG.e("wrong alias suffix %s, user %s", alias_suffix, user)
return False
if alias_domain not in available_sl_domains:
if alias_domain not in user.available_sl_domains(
alias_options=alias_options
):
LOG.e("wrong alias suffix %s, user %s", alias_suffix, user)
return False

View File

@ -25,16 +25,9 @@ from app.email_utils import (
render,
)
from app.errors import AliasInTrashError
from app.events.event_dispatcher import EventDispatcher
from app.events.generated.event_pb2 import (
AliasDeleted,
AliasStatusChanged,
EventContent,
)
from app.log import LOG
from app.models import (
Alias,
AliasDeleteReason,
CustomDomain,
Directory,
User,
@ -63,16 +56,12 @@ def get_user_if_alias_would_auto_create(
# Prevent addresses with unicode characters (🤯) in them for now.
validate_email(address, check_deliverability=False, allow_smtputf8=False)
except EmailNotValidError:
LOG.i(f"Not creating alias for {address} because email is invalid")
return None
domain_and_rule = check_if_alias_can_be_auto_created_for_custom_domain(
address, notify_user=notify_user
)
if DomainDeletedAlias.get_by(email=address):
LOG.i(
f"Not creating alias for {address} because it was previously deleted for this domain"
)
return None
if domain_and_rule:
return domain_and_rule[0].user
@ -97,9 +86,6 @@ def check_if_alias_can_be_auto_created_for_custom_domain(
custom_domain: CustomDomain = CustomDomain.get_by(domain=alias_domain)
if not custom_domain:
LOG.i(
f"Cannot auto-create custom domain alias for {address} because there's no custom domain for {alias_domain}"
)
return None
user: User = custom_domain.user
@ -115,9 +101,6 @@ def check_if_alias_can_be_auto_created_for_custom_domain(
if not custom_domain.catch_all:
if len(custom_domain.auto_create_rules) == 0:
LOG.i(
f"Cannot create alias {address} for domain {custom_domain} because it has no catch-all and no rules"
)
return None
local = get_email_local_part(address)
@ -131,7 +114,7 @@ def check_if_alias_can_be_auto_created_for_custom_domain(
)
return custom_domain, rule
else: # no rule passes
LOG.d(f"No rule matches auto-create {address} for domain {custom_domain}")
LOG.d("no rule passed to create %s", local)
return None
LOG.d("Create alias via catchall")
@ -158,7 +141,6 @@ def check_if_alias_can_be_auto_created_for_a_directory(
sep = "#"
else:
# if there's no directory separator in the alias, no way to auto-create it
LOG.info(f"Cannot auto-create {address} since it has no directory separator")
return None
directory_name = address[: address.find(sep)]
@ -166,9 +148,6 @@ def check_if_alias_can_be_auto_created_for_a_directory(
directory = Directory.get_by(name=directory_name)
if not directory:
LOG.info(
f"Cannot auto-create {address} because there is no directory for {directory_name}"
)
return None
user: User = directory.user
@ -177,17 +156,12 @@ def check_if_alias_can_be_auto_created_for_a_directory(
return None
if not user.can_create_new_alias():
LOG.d(
f"{user} can't create new directory alias {address} because user cannot create aliases"
)
LOG.d(f"{user} can't create new directory alias {address}")
if notify_user:
send_cannot_create_directory_alias(user, address, directory_name)
return None
if directory.disabled:
LOG.d(
f"{user} can't create new directory alias {address} bcause directory is disabled"
)
if notify_user:
send_cannot_create_directory_alias_disabled(user, address, directory_name)
return None
@ -329,52 +303,36 @@ def try_auto_create_via_domain(address: str) -> Optional[Alias]:
return None
def delete_alias(
alias: Alias,
user: User,
reason: AliasDeleteReason = AliasDeleteReason.Unspecified,
commit: bool = False,
):
def delete_alias(alias: Alias, user: User):
"""
Delete an alias and add it to either global or domain trash
Should be used instead of Alias.delete, DomainDeletedAlias.create, DeletedAlias.create
"""
LOG.i(f"User {user} has deleted alias {alias}")
# save deleted alias to either global or domain tra
# save deleted alias to either global or domain trash
if alias.custom_domain_id:
if not DomainDeletedAlias.get_by(
email=alias.email, domain_id=alias.custom_domain_id
):
domain_deleted_alias = DomainDeletedAlias(
LOG.d("add %s to domain %s trash", alias, alias.custom_domain_id)
Session.add(
DomainDeletedAlias(
user_id=user.id,
email=alias.email,
domain_id=alias.custom_domain_id,
reason=reason,
)
Session.add(domain_deleted_alias)
)
Session.commit()
LOG.i(
f"Moving {alias} to domain {alias.custom_domain_id} trash {domain_deleted_alias}"
)
else:
if not DeletedAlias.get_by(email=alias.email):
deleted_alias = DeletedAlias(email=alias.email, reason=reason)
Session.add(deleted_alias)
LOG.d("add %s to global trash", alias)
Session.add(DeletedAlias(email=alias.email))
Session.commit()
LOG.i(f"Moving {alias} to global trash {deleted_alias}")
alias_id = alias.id
alias_email = alias.email
LOG.i("delete alias %s", alias)
Alias.filter(Alias.id == alias.id).delete()
Session.commit()
EventDispatcher.send_event(
user,
EventContent(alias_deleted=AliasDeleted(id=alias_id, email=alias_email)),
)
if commit:
Session.commit()
def aliases_for_mailbox(mailbox: Mailbox) -> [Alias]:
"""
@ -484,12 +442,10 @@ def transfer_alias(alias, new_user, new_mailboxes: [Mailbox]):
f"Alias {alias.email} has been received",
render(
"transactional/alias-transferred.txt",
user=old_user,
alias=alias,
),
render(
"transactional/alias-transferred.html",
user=old_user,
alias=alias,
),
)
@ -502,19 +458,3 @@ def transfer_alias(alias, new_user, new_mailboxes: [Mailbox]):
alias.pinned = False
Session.commit()
def change_alias_status(alias: Alias, enabled: bool, commit: bool = False):
LOG.i(f"Changing alias {alias} enabled to {enabled}")
alias.enabled = enabled
event = AliasStatusChanged(
id=alias.id,
email=alias.email,
enabled=enabled,
created_at=int(alias.created_at.timestamp),
)
EventDispatcher.send_event(alias.user, EventContent(alias_status_change=event))
if commit:
Session.commit()

View File

@ -19,9 +19,6 @@ def authorize_request() -> Optional[Tuple[str, int]]:
if not api_key:
if current_user.is_authenticated:
# if current_user.is_authenticated and request.headers.get(
# constants.HEADER_ALLOW_API_COOKIES
# ):
g.user = current_user
else:
return jsonify(error="Wrong api key"), 401
@ -36,9 +33,6 @@ def authorize_request() -> Optional[Tuple[str, int]]:
if g.user.disabled:
return jsonify(error="Disabled account"), 403
if not g.user.is_active():
return jsonify(error="Account does not exist"), 401
g.api_key = api_key
return None

View File

@ -201,10 +201,10 @@ def get_alias_infos_with_pagination_v3(
q = q.order_by(Alias.pinned.desc())
q = q.order_by(latest_activity.desc())
q = q.limit(page_limit).offset(page_id * page_size)
q = list(q.limit(page_limit).offset(page_id * page_size))
ret = []
for alias, contact, email_log, nb_reply, nb_blocked, nb_forward in list(q):
for alias, contact, email_log, nb_reply, nb_blocked, nb_forward in q:
ret.append(
AliasInfo(
alias=alias,
@ -358,6 +358,7 @@ def construct_alias_query(user: User):
else_=0,
)
).label("nb_forward"),
func.max(EmailLog.created_at).label("latest_email_log_created_at"),
)
.join(EmailLog, Alias.id == EmailLog.alias_id, isouter=True)
.filter(Alias.user_id == user.id)
@ -365,6 +366,14 @@ def construct_alias_query(user: User):
.subquery()
)
alias_contact_subquery = (
Session.query(Alias.id, func.max(Contact.id).label("max_contact_id"))
.join(Contact, Alias.id == Contact.alias_id, isouter=True)
.filter(Alias.user_id == user.id)
.group_by(Alias.id)
.subquery()
)
return (
Session.query(
Alias,
@ -376,7 +385,23 @@ def construct_alias_query(user: User):
)
.options(joinedload(Alias.hibp_breaches))
.options(joinedload(Alias.custom_domain))
.join(EmailLog, Alias.last_email_log_id == EmailLog.id, isouter=True)
.join(Contact, EmailLog.contact_id == Contact.id, isouter=True)
.join(Contact, Alias.id == Contact.alias_id, isouter=True)
.join(EmailLog, Contact.id == EmailLog.contact_id, isouter=True)
.filter(Alias.id == alias_activity_subquery.c.id)
.filter(Alias.id == alias_contact_subquery.c.id)
.filter(
or_(
EmailLog.created_at
== alias_activity_subquery.c.latest_email_log_created_at,
and_(
# no email log yet for this alias
alias_activity_subquery.c.latest_email_log_created_at.is_(None),
# to make sure only 1 contact is returned in this case
or_(
Contact.id == alias_contact_subquery.c.max_contact_id,
alias_contact_subquery.c.max_contact_id.is_(None),
),
),
)
)
)

View File

@ -25,8 +25,7 @@ from app.errors import (
ErrAddressInvalid,
)
from app.extensions import limiter
from app.log import LOG
from app.models import Alias, Contact, Mailbox, AliasMailbox, AliasDeleteReason
from app.models import Alias, Contact, Mailbox, AliasMailbox
@deprecated
@ -161,7 +160,7 @@ def delete_alias(alias_id):
if not alias or alias.user_id != user.id:
return jsonify(error="Forbidden"), 403
alias_utils.delete_alias(alias, user, AliasDeleteReason.ManualAction)
alias_utils.delete_alias(alias, user)
return jsonify(deleted=True), 200
@ -185,8 +184,7 @@ def toggle_alias(alias_id):
if not alias or alias.user_id != user.id:
return jsonify(error="Forbidden"), 403
alias_utils.change_alias_status(alias, enabled=not alias.enabled)
LOG.i(f"User {user} changed alias {alias} enabled status to {alias.enabled}")
alias.enabled = not alias.enabled
Session.commit()
return jsonify(enabled=alias.enabled), 200
@ -424,7 +422,7 @@ def create_contact_route(alias_id):
contact_address = data.get("contact")
try:
contact = create_contact(alias, contact_address)
contact = create_contact(g.user, alias, contact_address)
except ErrContactErrorUpgradeNeeded as err:
return jsonify(error=err.error_for_user()), 403
except (ErrAddressInvalid, CannotCreateContactForReverseAlias) as err:

View File

@ -17,14 +17,9 @@ from app.models import PlanEnum, AppleSubscription
_MONTHLY_PRODUCT_ID = "io.simplelogin.ios_app.subscription.premium.monthly"
_YEARLY_PRODUCT_ID = "io.simplelogin.ios_app.subscription.premium.yearly"
# SL Mac app used to be in SL account
_MACAPP_MONTHLY_PRODUCT_ID = "io.simplelogin.macapp.subscription.premium.monthly"
_MACAPP_YEARLY_PRODUCT_ID = "io.simplelogin.macapp.subscription.premium.yearly"
# SL Mac app is moved to Proton account
_MACAPP_MONTHLY_PRODUCT_ID_NEW = "me.proton.simplelogin.macos.premium.monthly"
_MACAPP_YEARLY_PRODUCT_ID_NEW = "me.proton.simplelogin.macos.premium.yearly"
# Apple API URL
_SANDBOX_URL = "https://sandbox.itunes.apple.com/verifyReceipt"
_PROD_URL = "https://buy.itunes.apple.com/verifyReceipt"
@ -268,11 +263,7 @@ def apple_update_notification():
plan = (
PlanEnum.monthly
if transaction["product_id"]
in (
_MONTHLY_PRODUCT_ID,
_MACAPP_MONTHLY_PRODUCT_ID,
_MACAPP_MONTHLY_PRODUCT_ID_NEW,
)
in (_MONTHLY_PRODUCT_ID, _MACAPP_MONTHLY_PRODUCT_ID)
else PlanEnum.yearly
)
@ -526,11 +517,7 @@ def verify_receipt(receipt_data, user, password) -> Optional[AppleSubscription]:
plan = (
PlanEnum.monthly
if latest_transaction["product_id"]
in (
_MONTHLY_PRODUCT_ID,
_MACAPP_MONTHLY_PRODUCT_ID,
_MACAPP_MONTHLY_PRODUCT_ID_NEW,
)
in (_MONTHLY_PRODUCT_ID, _MACAPP_MONTHLY_PRODUCT_ID)
else PlanEnum.yearly
)

View File

@ -11,7 +11,7 @@ from itsdangerous import Signer
from app import email_utils
from app.api.base import api_bp
from app.config import FLASK_SECRET, DISABLE_REGISTRATION
from app.dashboard.views.account_setting import send_reset_password_email
from app.dashboard.views.setting import send_reset_password_email
from app.db import Session
from app.email_utils import (
email_can_be_used_as_mailbox,
@ -129,8 +129,8 @@ def auth_register():
send_email(
email,
"Just one more step to join SimpleLogin",
render("transactional/code-activation.txt.jinja2", user=user, code=code),
render("transactional/code-activation.html", user=user, code=code),
render("transactional/code-activation.txt.jinja2", code=code),
render("transactional/code-activation.html", code=code),
)
RegisterEvent(RegisterEvent.ActionType.success, RegisterEvent.Source.api).send()
@ -226,8 +226,8 @@ def auth_reactivate():
send_email(
email,
"Just one more step to join SimpleLogin",
render("transactional/code-activation.txt.jinja2", user=user, code=code),
render("transactional/code-activation.html", user=user, code=code),
render("transactional/code-activation.txt.jinja2", code=code),
render("transactional/code-activation.html", code=code),
)
return jsonify(msg="User needs to confirm their account"), 200

View File

@ -1,18 +1,22 @@
from smtplib import SMTPRecipientsRefused
import arrow
from flask import g
from flask import jsonify
from flask import request
from app import mailbox_utils
from app.api.base import api_bp, require_api_auth
from app.config import JOB_DELETE_MAILBOX
from app.dashboard.views.mailbox import send_verification_email
from app.dashboard.views.mailbox_detail import verify_mailbox_change
from app.db import Session
from app.email_utils import (
mailbox_already_used,
email_can_be_used_as_mailbox,
)
from app.models import Mailbox
from app.email_validation import is_valid_email
from app.log import LOG
from app.models import Mailbox, Job
from app.utils import sanitize_email
@ -40,10 +44,26 @@ def create_mailbox():
user = g.user
mailbox_email = sanitize_email(request.get_json().get("email"))
try:
new_mailbox = mailbox_utils.create_mailbox(user, mailbox_email).mailbox
except mailbox_utils.MailboxError as e:
return jsonify(error=e.msg), 400
if not user.is_premium():
return jsonify(error="Only premium plan can add additional mailbox"), 400
if not is_valid_email(mailbox_email):
return jsonify(error=f"{mailbox_email} invalid"), 400
elif mailbox_already_used(mailbox_email, user):
return jsonify(error=f"{mailbox_email} already used"), 400
elif not email_can_be_used_as_mailbox(mailbox_email):
return (
jsonify(
error=f"{mailbox_email} cannot be used. Please note a mailbox cannot "
f"be a disposable email address"
),
400,
)
else:
new_mailbox = Mailbox.create(email=mailbox_email, user_id=user.id)
Session.commit()
send_verification_email(user, new_mailbox)
return (
jsonify(mailbox_to_dict(new_mailbox)),
@ -66,17 +86,47 @@ def delete_mailbox(mailbox_id):
"""
user = g.user
mailbox = Mailbox.get(mailbox_id)
if not mailbox or mailbox.user_id != user.id:
return jsonify(error="Forbidden"), 403
if mailbox.id == user.default_mailbox_id:
return jsonify(error="You cannot delete the default mailbox"), 400
data = request.get_json() or {}
transfer_mailbox_id = data.get("transfer_aliases_to")
if transfer_mailbox_id and int(transfer_mailbox_id) >= 0:
transfer_mailbox_id = int(transfer_mailbox_id)
else:
transfer_mailbox_id = None
transfer_mailbox = Mailbox.get(transfer_mailbox_id)
try:
mailbox_utils.delete_mailbox(user, mailbox_id, transfer_mailbox_id)
except mailbox_utils.MailboxError as e:
return jsonify(error=e.msg), 400
if not transfer_mailbox or transfer_mailbox.user_id != user.id:
return (
jsonify(error="You must transfer the aliases to a mailbox you own."),
403,
)
if transfer_mailbox_id == mailbox_id:
return (
jsonify(
error="You can not transfer the aliases to the mailbox you want to delete."
),
400,
)
if not transfer_mailbox.verified:
return jsonify(error="Your new mailbox is not verified"), 400
# Schedule delete account job
LOG.w("schedule delete mailbox job for %s", mailbox)
Job.create(
name=JOB_DELETE_MAILBOX,
payload={
"mailbox_id": mailbox.id,
"transfer_mailbox_id": transfer_mailbox_id,
},
run_at=arrow.now(),
commit=True,
)
return jsonify(deleted=True), 200

View File

@ -10,7 +10,6 @@ from app.api.base import api_bp, require_api_auth
from app.config import SESSION_COOKIE_NAME
from app.dashboard.views.index import get_stats
from app.db import Session
from app.image_validation import detect_image_format, ImageFormat
from app.models import ApiKey, File, PartnerUser, User
from app.proton.utils import get_proton_partner
from app.session import logout_session
@ -79,6 +78,7 @@ def update_user_info():
data = request.get_json() or {}
if "profile_picture" in data:
if data["profile_picture"] is None:
if user.profile_picture_id:
file = user.profile_picture
user.profile_picture_id = None
@ -89,8 +89,6 @@ def update_user_info():
Session.flush()
else:
raw_data = base64.decodebytes(data["profile_picture"].encode())
if detect_image_format(raw_data) == ImageFormat.Unknown:
return jsonify(error="Unsupported image format"), 400
file_path = random_string(30)
file = File.create(user_id=user.id, path=file_path)
Session.flush()

View File

@ -16,7 +16,6 @@ from .views import (
social,
recovery,
api_to_cookie,
oidc,
)
__all__ = [
@ -37,5 +36,4 @@ __all__ = [
"social",
"recovery",
"api_to_cookie",
"oidc",
]

View File

@ -3,13 +3,10 @@ from flask_login import login_user
from app.auth.base import auth_bp
from app.db import Session
from app.extensions import limiter
from app.log import LOG
from app.models import EmailChange, ResetPasswordCode
@auth_bp.route("/change_email", methods=["GET", "POST"])
@limiter.limit("3/hour")
def change_email():
code = request.args.get("code")
@ -25,14 +22,12 @@ def change_email():
return render_template("auth/change_email.html")
user = email_change.user
old_email = user.email
user.email = email_change.new_email
EmailChange.delete(email_change.id)
ResetPasswordCode.filter_by(user_id=user.id).delete()
Session.commit()
LOG.i(f"User {user} has changed their email from {old_email} to {user.email}")
flash("Your new email has been updated", "success")
login_user(user)

View File

@ -3,7 +3,7 @@ from flask_wtf import FlaskForm
from wtforms import StringField, validators
from app.auth.base import auth_bp
from app.dashboard.views.account_setting import send_reset_password_email
from app.dashboard.views.setting import send_reset_password_email
from app.extensions import limiter
from app.log import LOG
from app.models import User

View File

@ -7,7 +7,7 @@ from app.config import URL, GOOGLE_CLIENT_ID, GOOGLE_CLIENT_SECRET
from app.db import Session
from app.log import LOG
from app.models import User, File, SocialAuth
from app.utils import random_string, sanitize_email, sanitize_next_url
from app.utils import random_string, sanitize_email
from .login_utils import after_login
_authorization_base_url = "https://accounts.google.com/o/oauth2/v2/auth"
@ -29,7 +29,7 @@ def google_login():
# to avoid flask-login displaying the login error message
session.pop("_flashes", None)
next_url = sanitize_next_url(request.args.get("next"))
next_url = request.args.get("next")
# Google does not allow to append param to redirect_url
# we need to pass the next url by session

View File

@ -5,7 +5,7 @@ from wtforms import StringField, validators
from app.auth.base import auth_bp
from app.auth.views.login_utils import after_login
from app.config import CONNECT_WITH_PROTON, CONNECT_WITH_OIDC_ICON, OIDC_CLIENT_ID
from app.config import CONNECT_WITH_PROTON
from app.events.auth_event import LoginEvent
from app.extensions import limiter
from app.log import LOG
@ -77,6 +77,4 @@ def login():
next_url=next_url,
show_resend_activation=show_resend_activation,
connect_with_proton=CONNECT_WITH_PROTON,
connect_with_oidc=OIDC_CLIENT_ID is not None,
connect_with_oidc_icon=CONNECT_WITH_OIDC_ICON,
)

View File

@ -1,135 +0,0 @@
from flask import request, session, redirect, flash, url_for
from requests_oauthlib import OAuth2Session
import requests
from app import config
from app.auth.base import auth_bp
from app.auth.views.login_utils import after_login
from app.config import (
URL,
OIDC_SCOPES,
OIDC_NAME_FIELD,
)
from app.db import Session
from app.email_utils import send_welcome_email
from app.log import LOG
from app.models import User, SocialAuth
from app.utils import sanitize_email, sanitize_next_url
# need to set explicitly redirect_uri instead of leaving the lib to pre-fill redirect_uri
# when served behind nginx, the redirect_uri is localhost... and not the real url
redirect_uri = URL + "/auth/oidc/callback"
SESSION_STATE_KEY = "oauth_state"
SESSION_NEXT_KEY = "oauth_redirect_next"
@auth_bp.route("/oidc/login")
def oidc_login():
if config.OIDC_CLIENT_ID is None or config.OIDC_CLIENT_SECRET is None:
return redirect(url_for("auth.login"))
next_url = sanitize_next_url(request.args.get("next"))
auth_url = requests.get(config.OIDC_WELL_KNOWN_URL).json()["authorization_endpoint"]
oidc = OAuth2Session(
config.OIDC_CLIENT_ID, scope=[OIDC_SCOPES], redirect_uri=redirect_uri
)
authorization_url, state = oidc.authorization_url(auth_url)
# State is used to prevent CSRF, keep this for later.
session[SESSION_STATE_KEY] = state
session[SESSION_NEXT_KEY] = next_url
return redirect(authorization_url)
@auth_bp.route("/oidc/callback")
def oidc_callback():
if SESSION_STATE_KEY not in session:
flash("Invalid state, please retry", "error")
return redirect(url_for("auth.login"))
if config.OIDC_CLIENT_ID is None or config.OIDC_CLIENT_SECRET is None:
return redirect(url_for("auth.login"))
# user clicks on cancel
if "error" in request.args:
flash("Please use another sign in method then", "warning")
return redirect("/")
oidc_configuration = requests.get(config.OIDC_WELL_KNOWN_URL).json()
user_info_url = oidc_configuration["userinfo_endpoint"]
token_url = oidc_configuration["token_endpoint"]
oidc = OAuth2Session(
config.OIDC_CLIENT_ID,
state=session[SESSION_STATE_KEY],
scope=[OIDC_SCOPES],
redirect_uri=redirect_uri,
)
oidc.fetch_token(
token_url,
client_secret=config.OIDC_CLIENT_SECRET,
authorization_response=request.url,
)
oidc_user_data = oidc.get(user_info_url)
if oidc_user_data.status_code != 200:
LOG.e(
f"cannot get oidc user data {oidc_user_data.status_code} {oidc_user_data.text}"
)
flash(
"Cannot get user data from OIDC, please use another way to login/sign up",
"error",
)
return redirect(url_for("auth.login"))
oidc_user_data = oidc_user_data.json()
email = oidc_user_data.get("email")
if not email:
LOG.e(f"cannot get email for OIDC user {oidc_user_data} {email}")
flash(
"Cannot get a valid email from OIDC, please another way to login/sign up",
"error",
)
return redirect(url_for("auth.login"))
email = sanitize_email(email)
user = User.get_by(email=email)
if not user and config.DISABLE_REGISTRATION:
flash(
"Sorry you cannot sign up via the OIDC provider. Please sign-up first with your email.",
"error",
)
return redirect(url_for("auth.register"))
elif not user:
user = create_user(email, oidc_user_data)
if not SocialAuth.get_by(user_id=user.id, social="oidc"):
SocialAuth.create(user_id=user.id, social="oidc")
Session.commit()
# The activation link contains the original page, for ex authorize page
next_url = session[SESSION_NEXT_KEY]
session[SESSION_NEXT_KEY] = None
return after_login(user, next_url)
def create_user(email, oidc_user_data):
new_user = User.create(
email=email,
name=oidc_user_data.get(OIDC_NAME_FIELD),
password="",
activated=True,
)
LOG.i(f"Created new user for login request from OIDC. New user {new_user.id}")
Session.commit()
send_welcome_email(new_user)
return new_user

View File

@ -6,7 +6,7 @@ from wtforms import StringField, validators
from app import email_utils, config
from app.auth.base import auth_bp
from app.config import CONNECT_WITH_PROTON, CONNECT_WITH_OIDC_ICON
from app.config import CONNECT_WITH_PROTON
from app.auth.views.login_utils import get_referral
from app.config import URL, HCAPTCHA_SECRET, HCAPTCHA_SITEKEY
from app.db import Session
@ -109,14 +109,11 @@ def register():
next_url=next_url,
HCAPTCHA_SITEKEY=HCAPTCHA_SITEKEY,
connect_with_proton=CONNECT_WITH_PROTON,
connect_with_oidc=config.OIDC_CLIENT_ID is not None,
connect_with_oidc_icon=CONNECT_WITH_OIDC_ICON,
)
def send_activation_email(user, next_url):
# the activation code is valid for 1h and delete all previous codes
Session.query(ActivationCode).filter(ActivationCode.user_id == user.id).delete()
# the activation code is valid for 1h
activation = ActivationCode.create(user_id=user.id, code=random_string(30))
Session.commit()
@ -126,4 +123,4 @@ def send_activation_email(user, next_url):
LOG.d("redirect user to %s after activation", next_url)
activation_link = activation_link + "&next=" + encode_url(next_url)
email_utils.send_activation_email(user, activation_link)
email_utils.send_activation_email(user.email, activation_link)

View File

@ -3,7 +3,7 @@ import random
import socket
import string
from ast import literal_eval
from typing import Callable, List, Optional
from typing import Callable, List
from urllib.parse import urlparse
from dotenv import load_dotenv
@ -35,33 +35,6 @@ def sl_getenv(env_var: str, default_factory: Callable = None):
return literal_eval(value)
def get_env_dict(env_var: str) -> dict[str, str]:
"""
Get an env variable and convert it into a python dictionary with keys and values as strings.
Args:
env_var (str): env var, example: SL_DB
Syntax is: key1=value1;key2=value2
Components separated by ;
key and value separated by =
"""
value = os.getenv(env_var)
if not value:
return {}
components = value.split(";")
result = {}
for component in components:
if component == "":
continue
parts = component.split("=")
if len(parts) != 2:
raise Exception(f"Invalid config for env var {env_var}")
result[parts[0].strip()] = parts[1].strip()
return result
config_file = os.environ.get("CONFIG")
if config_file:
config_file = get_abs_path(config_file)
@ -147,7 +120,7 @@ if POSTFIX_SUBMISSION_TLS:
else:
default_postfix_port = 25
POSTFIX_PORT = int(os.environ.get("POSTFIX_PORT", default_postfix_port))
POSTFIX_TIMEOUT = int(os.environ.get("POSTFIX_TIMEOUT", 3))
POSTFIX_TIMEOUT = os.environ.get("POSTFIX_TIMEOUT", 3)
# ["domain1.com", "domain2.com"]
OTHER_ALIAS_DOMAINS = sl_getenv("OTHER_ALIAS_DOMAINS", list)
@ -261,7 +234,7 @@ else:
print("WARNING: Use a temp directory for GNUPGHOME", GNUPGHOME)
# Github, Google, Facebook, OIDC client id and secrets
# Github, Google, Facebook client id and secrets
GITHUB_CLIENT_ID = os.environ.get("GITHUB_CLIENT_ID")
GITHUB_CLIENT_SECRET = os.environ.get("GITHUB_CLIENT_SECRET")
@ -271,13 +244,6 @@ GOOGLE_CLIENT_SECRET = os.environ.get("GOOGLE_CLIENT_SECRET")
FACEBOOK_CLIENT_ID = os.environ.get("FACEBOOK_CLIENT_ID")
FACEBOOK_CLIENT_SECRET = os.environ.get("FACEBOOK_CLIENT_SECRET")
CONNECT_WITH_OIDC_ICON = os.environ.get("CONNECT_WITH_OIDC_ICON")
OIDC_WELL_KNOWN_URL = os.environ.get("OIDC_WELL_KNOWN_URL")
OIDC_CLIENT_ID = os.environ.get("OIDC_CLIENT_ID")
OIDC_CLIENT_SECRET = os.environ.get("OIDC_CLIENT_SECRET")
OIDC_SCOPES = os.environ.get("OIDC_SCOPES")
OIDC_NAME_FIELD = os.environ.get("OIDC_NAME_FIELD", "name")
PROTON_CLIENT_ID = os.environ.get("PROTON_CLIENT_ID")
PROTON_CLIENT_SECRET = os.environ.get("PROTON_CLIENT_SECRET")
PROTON_BASE_URL = os.environ.get(
@ -308,7 +274,6 @@ JOB_DELETE_MAILBOX = "delete-mailbox"
JOB_DELETE_DOMAIN = "delete-domain"
JOB_SEND_USER_REPORT = "send-user-report"
JOB_SEND_PROTON_WELCOME_1 = "proton-welcome-1"
JOB_SEND_ALIAS_CREATION_EVENTS = "send-alias-creation-events"
# for pagination
PAGE_LIMIT = 20
@ -456,11 +421,6 @@ try:
except Exception:
HIBP_SCAN_INTERVAL_DAYS = 7
HIBP_API_KEYS = sl_getenv("HIBP_API_KEYS", list) or []
HIBP_MAX_ALIAS_CHECK = 10_000
HIBP_RPM = int(os.environ.get("HIBP_API_RPM", 100))
HIBP_SKIP_PARTNER_ALIAS = os.environ.get("HIBP_SKIP_PARTNER_ALIAS")
KEEP_OLD_DATA_DAYS = 30
POSTMASTER = os.environ.get("POSTMASTER")
@ -532,31 +492,6 @@ NAMESERVERS = setup_nameservers()
DISABLE_CREATE_CONTACTS_FOR_FREE_USERS = os.environ.get(
"DISABLE_CREATE_CONTACTS_FOR_FREE_USERS", False
)
# Expect format hits,seconds:hits,seconds...
# Example 1,10:4,60 means 1 in the last 10 secs or 4 in the last 60 secs
def getRateLimitFromConfig(
env_var: string, default: string = ""
) -> list[tuple[int, int]]:
value = os.environ.get(env_var, default)
if not value:
return []
entries = [entry for entry in value.split(":")]
limits = []
for entry in entries:
fields = entry.split(",")
limit = (int(fields[0]), int(fields[1]))
limits.append(limit)
return limits
ALIAS_CREATE_RATE_LIMIT_FREE = getRateLimitFromConfig(
"ALIAS_CREATE_RATE_LIMIT_FREE", "10,900:50,3600"
)
ALIAS_CREATE_RATE_LIMIT_PAID = getRateLimitFromConfig(
"ALIAS_CREATE_RATE_LIMIT_PAID", "50,900:200,3600"
)
PARTNER_API_TOKEN_SECRET = os.environ.get("PARTNER_API_TOKEN_SECRET") or (
FLASK_SECRET + "partnerapitoken"
)
@ -607,53 +542,3 @@ MAX_API_KEYS = int(os.environ.get("MAX_API_KEYS", 30))
UPCLOUD_USERNAME = os.environ.get("UPCLOUD_USERNAME", None)
UPCLOUD_PASSWORD = os.environ.get("UPCLOUD_PASSWORD", None)
UPCLOUD_DB_ID = os.environ.get("UPCLOUD_DB_ID", None)
STORE_TRANSACTIONAL_EMAILS = "STORE_TRANSACTIONAL_EMAILS" in os.environ
EVENT_WEBHOOK = os.environ.get("EVENT_WEBHOOK", None)
# We want it disabled by default, so only skip if defined
EVENT_WEBHOOK_SKIP_VERIFY_SSL = "EVENT_WEBHOOK_SKIP_VERIFY_SSL" in os.environ
EVENT_WEBHOOK_DISABLE = "EVENT_WEBHOOK_DISABLE" in os.environ
def read_webhook_enabled_user_ids() -> Optional[List[int]]:
user_ids = os.environ.get("EVENT_WEBHOOK_ENABLED_USER_IDS", None)
if user_ids is None:
return None
ids = []
for user_id in user_ids.split(","):
try:
ids.append(int(user_id.strip()))
except ValueError:
pass
return ids
EVENT_WEBHOOK_ENABLED_USER_IDS: Optional[List[int]] = read_webhook_enabled_user_ids()
# Allow to define a different DB_URI for the event listener, in case we want to skip the connection pool
# It defaults to the regular DB_URI in case it's needed
EVENT_LISTENER_DB_URI = os.environ.get("EVENT_LISTENER_DB_URI", DB_URI)
def read_partner_dict(var: str) -> dict[int, str]:
partner_value = get_env_dict(var)
if len(partner_value) == 0:
return {}
res: dict[int, str] = {}
for partner_id in partner_value.keys():
try:
partner_id_int = int(partner_id.strip())
res[partner_id_int] = partner_value[partner_id]
except ValueError:
pass
return res
PARTNER_DOMAINS: dict[int, str] = read_partner_dict("PARTNER_DOMAINS")
PARTNER_DOMAIN_VALIDATION_PREFIXES: dict[int, str] = read_partner_dict(
"PARTNER_DOMAIN_VALIDATION_PREFIXES"
)

View File

@ -1,2 +0,0 @@
HEADER_ALLOW_API_COOKIES = "X-Sl-Allowcookies"
DMARC_RECORD = "v=DMARC1; p=quarantine; pct=100; adkim=s; aspf=s"

View File

@ -1,113 +0,0 @@
from dataclasses import dataclass
from enum import Enum
from typing import Optional
from sqlalchemy.exc import IntegrityError
from app.db import Session
from app.email_utils import generate_reply_email, parse_full_address
from app.email_validation import is_valid_email
from app.log import LOG
from app.models import Contact, Alias
from app.utils import sanitize_email
class ContactCreateError(Enum):
InvalidEmail = "Invalid email"
NotAllowed = "Your plan does not allow to create contacts"
@dataclass
class ContactCreateResult:
contact: Optional[Contact]
created: bool
error: Optional[ContactCreateError]
def __update_contact_if_needed(
contact: Contact, name: Optional[str], mail_from: Optional[str]
) -> ContactCreateResult:
if name and contact.name != name:
LOG.d(f"Setting {contact} name to {name}")
contact.name = name
Session.commit()
if mail_from and contact.mail_from is None:
LOG.d(f"Setting {contact} mail_from to {mail_from}")
contact.mail_from = mail_from
Session.commit()
return ContactCreateResult(contact, created=False, error=None)
def create_contact(
email: str,
alias: Alias,
name: Optional[str] = None,
mail_from: Optional[str] = None,
allow_empty_email: bool = False,
automatic_created: bool = False,
from_partner: bool = False,
) -> ContactCreateResult:
# If user cannot create contacts, they still need to be created when receiving an email for an alias
if not automatic_created and not alias.user.can_create_contacts():
return ContactCreateResult(
None, created=False, error=ContactCreateError.NotAllowed
)
# Parse emails with form 'name <email>'
try:
email_name, email = parse_full_address(email)
except ValueError:
email = ""
email_name = ""
# If no name is explicitly given try to get it from the parsed email
if name is None:
name = email_name[: Contact.MAX_NAME_LENGTH]
else:
name = name[: Contact.MAX_NAME_LENGTH]
# If still no name is there, make sure the name is None instead of empty string
if not name:
name = None
if name is not None and "\x00" in name:
LOG.w("Cannot use contact name because has \\x00")
name = ""
# Sanitize email and if it's not valid only allow to create a contact if it's explicitly allowed. Otherwise fail
email = sanitize_email(email, not_lower=True)
if not is_valid_email(email):
LOG.w(f"invalid contact email {email}")
if not allow_empty_email:
return ContactCreateResult(
None, created=False, error=ContactCreateError.InvalidEmail
)
LOG.d("Create a contact with invalid email for %s", alias)
# either reuse a contact with empty email or create a new contact with empty email
email = ""
# If contact exists, update name and mail_from if needed
contact = Contact.get_by(alias_id=alias.id, website_email=email)
if contact is not None:
return __update_contact_if_needed(contact, name, mail_from)
# Create the contact
reply_email = generate_reply_email(email, alias)
try:
flags = Contact.FLAG_PARTNER_CREATED if from_partner else 0
contact = Contact.create(
user_id=alias.user_id,
alias_id=alias.id,
website_email=email,
name=name,
reply_email=reply_email,
mail_from=mail_from,
automatic_created=automatic_created,
flags=flags,
invalid_email=email == "",
commit=True,
)
LOG.d(
f"Created contact {contact} for alias {alias} with email {email} invalid_email={contact.invalid_email}"
)
except IntegrityError:
Session.rollback()
LOG.info(
f"Contact with email {email} for alias_id {alias.id} already existed, fetching from DB"
)
contact = Contact.get_by(alias_id=alias.id, website_email=email)
return __update_contact_if_needed(contact, name, mail_from)
return ContactCreateResult(contact, created=True, error=None)

View File

@ -1,142 +0,0 @@
import arrow
import re
from dataclasses import dataclass
from enum import Enum
from typing import Optional
from app.config import JOB_DELETE_DOMAIN
from app.db import Session
from app.email_utils import get_email_domain_part
from app.log import LOG
from app.models import User, CustomDomain, SLDomain, Mailbox, Job
_ALLOWED_DOMAIN_REGEX = re.compile(r"^(?!-)[A-Za-z0-9-]{1,63}(?<!-)$")
@dataclass
class CreateCustomDomainResult:
message: str = ""
message_category: str = ""
success: bool = False
instance: Optional[CustomDomain] = None
redirect: Optional[str] = None
class CannotUseDomainReason(Enum):
InvalidDomain = 1
BuiltinDomain = 2
DomainAlreadyUsed = 3
DomainPartOfUserEmail = 4
DomainUserInMailbox = 5
def message(self, domain: str) -> str:
if self == CannotUseDomainReason.InvalidDomain:
return "This is not a valid domain"
elif self == CannotUseDomainReason.BuiltinDomain:
return "A custom domain cannot be a built-in domain."
elif self == CannotUseDomainReason.DomainAlreadyUsed:
return f"{domain} already used"
elif self == CannotUseDomainReason.DomainPartOfUserEmail:
return "You cannot add a domain that you are currently using for your personal email. Please change your personal email to your real email"
elif self == CannotUseDomainReason.DomainUserInMailbox:
return f"{domain} already used in a SimpleLogin mailbox"
else:
raise Exception("Invalid CannotUseDomainReason")
def is_valid_domain(domain: str) -> bool:
"""
Checks that a domain is valid according to RFC 1035
"""
if len(domain) > 255:
return False
if domain.endswith("."):
domain = domain[:-1] # Strip the trailing dot
labels = domain.split(".")
if not labels:
return False
for label in labels:
if not _ALLOWED_DOMAIN_REGEX.match(label):
return False
return True
def sanitize_domain(domain: str) -> str:
new_domain = domain.lower().strip()
if new_domain.startswith("http://"):
new_domain = new_domain[len("http://") :]
if new_domain.startswith("https://"):
new_domain = new_domain[len("https://") :]
return new_domain
def can_domain_be_used(user: User, domain: str) -> Optional[CannotUseDomainReason]:
if not is_valid_domain(domain):
return CannotUseDomainReason.InvalidDomain
elif SLDomain.get_by(domain=domain):
return CannotUseDomainReason.BuiltinDomain
elif CustomDomain.get_by(domain=domain):
return CannotUseDomainReason.DomainAlreadyUsed
elif get_email_domain_part(user.email) == domain:
return CannotUseDomainReason.DomainPartOfUserEmail
elif Mailbox.filter(
Mailbox.verified.is_(True), Mailbox.email.endswith(f"@{domain}")
).first():
return CannotUseDomainReason.DomainUserInMailbox
else:
return None
def create_custom_domain(
user: User, domain: str, partner_id: Optional[int] = None
) -> CreateCustomDomainResult:
if not user.is_premium():
return CreateCustomDomainResult(
message="Only premium plan can add custom domain",
message_category="warning",
)
new_domain = sanitize_domain(domain)
domain_forbidden_cause = can_domain_be_used(user, new_domain)
if domain_forbidden_cause:
return CreateCustomDomainResult(
message=domain_forbidden_cause.message(new_domain), message_category="error"
)
new_custom_domain = CustomDomain.create(domain=new_domain, user_id=user.id)
# new domain has ownership verified if its parent has the ownership verified
for root_cd in user.custom_domains:
if new_domain.endswith("." + root_cd.domain) and root_cd.ownership_verified:
LOG.i(
"%s ownership verified thanks to %s",
new_custom_domain,
root_cd,
)
new_custom_domain.ownership_verified = True
# Add the partner_id in case it's passed
if partner_id is not None:
new_custom_domain.partner_id = partner_id
Session.commit()
return CreateCustomDomainResult(
success=True,
instance=new_custom_domain,
)
def delete_custom_domain(domain: CustomDomain):
# Schedule delete domain job
LOG.w("schedule delete domain job for %s", domain)
domain.pending_deletion = True
Job.create(
name=JOB_DELETE_DOMAIN,
payload={"custom_domain_id": domain.id},
run_at=arrow.now(),
commit=True,
)

View File

@ -1,157 +1,37 @@
from dataclasses import dataclass
from typing import Optional
from app import config
from app.constants import DMARC_RECORD
from app.db import Session
from app.dns_utils import (
DNSClient,
is_mx_equivalent,
get_network_dns_client,
)
from app.dns_utils import get_cname_record
from app.models import CustomDomain
@dataclass
class DomainValidationResult:
success: bool
errors: [str]
class CustomDomainValidation:
def __init__(
self,
dkim_domain: str,
dns_client: DNSClient = get_network_dns_client(),
partner_domains: Optional[dict[int, str]] = None,
partner_domains_validation_prefixes: Optional[dict[int, str]] = None,
):
def __init__(self, dkim_domain: str):
self.dkim_domain = dkim_domain
self._dns_client = dns_client
self._partner_domains = partner_domains or config.PARTNER_DOMAINS
self._partner_domain_validation_prefixes = (
partner_domains_validation_prefixes
or config.PARTNER_DOMAIN_VALIDATION_PREFIXES
)
def get_ownership_verification_record(self, domain: CustomDomain) -> str:
prefix = "sl"
if (
domain.partner_id is not None
and domain.partner_id in self._partner_domain_validation_prefixes
):
prefix = self._partner_domain_validation_prefixes[domain.partner_id]
return f"{prefix}-verification={domain.ownership_txt_token}"
def get_dkim_records(self, domain: CustomDomain) -> {str: str}:
"""
Get a list of dkim records to set up. Depending on the custom_domain, whether if it's from a partner or not,
it will return the default ones or the partner ones.
"""
# By default use the default domain
dkim_domain = self.dkim_domain
if domain.partner_id is not None:
# Domain is from a partner. Retrieve the partner config and use that domain if exists
dkim_domain = self._partner_domains.get(domain.partner_id, dkim_domain)
return {
f"{key}._domainkey": f"{key}._domainkey.{dkim_domain}"
self._dkim_records = {
(f"{key}._domainkey", f"{key}._domainkey.{self.dkim_domain}")
for key in ("dkim", "dkim02", "dkim03")
}
def get_dkim_records(self) -> {str: str}:
"""
Get a list of dkim records to set up. It will be
"""
return self._dkim_records
def validate_dkim_records(self, custom_domain: CustomDomain) -> dict[str, str]:
"""
Check if dkim records are properly set for this custom domain.
Returns empty list if all records are ok. Other-wise return the records that aren't properly configured
"""
correct_records = {}
invalid_records = {}
expected_records = self.get_dkim_records(custom_domain)
for prefix, expected_record in expected_records.items():
for prefix, expected_record in self.get_dkim_records():
custom_record = f"{prefix}.{custom_domain.domain}"
dkim_record = self._dns_client.get_cname_record(custom_record)
if dkim_record == expected_record:
correct_records[prefix] = custom_record
else:
dkim_record = get_cname_record(custom_record)
if dkim_record != expected_record:
invalid_records[custom_record] = dkim_record or "empty"
# HACK
# As initially we only had one dkim record, we want to allow users that had only the original dkim record and
# the domain validated to continue seeing it as validated (although showing them the missing records).
# However, if not even the original dkim record is right, even if the domain was dkim_verified in the past,
# we will remove the dkim_verified flag.
# This is done in order to give users with the old dkim config (only one) to update their CNAMEs
# HACK: If dkim is enabled, don't disable it to give users time to update their CNAMES
if custom_domain.dkim_verified:
# Check if at least the original dkim is there
if correct_records.get("dkim._domainkey") is not None:
# Original dkim record is there. Return the missing records (if any) and don't clear the flag
return invalid_records
# Original DKIM record is not there, which means the DKIM config is not finished. Proceed with the
# rest of the code path, returning the invalid records and clearing the flag
custom_domain.dkim_verified = len(invalid_records) == 0
Session.commit()
return invalid_records
def validate_domain_ownership(
self, custom_domain: CustomDomain
) -> DomainValidationResult:
"""
Check if the custom_domain has added the ownership verification records
"""
txt_records = self._dns_client.get_txt_record(custom_domain.domain)
expected_verification_record = self.get_ownership_verification_record(
custom_domain
)
if expected_verification_record in txt_records:
custom_domain.ownership_verified = True
Session.commit()
return DomainValidationResult(success=True, errors=[])
else:
return DomainValidationResult(success=False, errors=txt_records)
def validate_mx_records(
self, custom_domain: CustomDomain
) -> DomainValidationResult:
mx_domains = self._dns_client.get_mx_domains(custom_domain.domain)
if not is_mx_equivalent(mx_domains, config.EMAIL_SERVERS_WITH_PRIORITY):
return DomainValidationResult(
success=False,
errors=[f"{priority} {domain}" for (priority, domain) in mx_domains],
)
else:
custom_domain.verified = True
Session.commit()
return DomainValidationResult(success=True, errors=[])
def validate_spf_records(
self, custom_domain: CustomDomain
) -> DomainValidationResult:
spf_domains = self._dns_client.get_spf_domain(custom_domain.domain)
if config.EMAIL_DOMAIN in spf_domains:
custom_domain.spf_verified = True
Session.commit()
return DomainValidationResult(success=True, errors=[])
else:
custom_domain.spf_verified = False
Session.commit()
return DomainValidationResult(
success=False,
errors=self._dns_client.get_txt_record(custom_domain.domain),
)
def validate_dmarc_records(
self, custom_domain: CustomDomain
) -> DomainValidationResult:
txt_records = self._dns_client.get_txt_record("_dmarc." + custom_domain.domain)
if DMARC_RECORD in txt_records:
custom_domain.dmarc_verified = True
Session.commit()
return DomainValidationResult(success=True, errors=[])
else:
custom_domain.dmarc_verified = False
Session.commit()
return DomainValidationResult(success=False, errors=txt_records)

View File

@ -32,7 +32,6 @@ from .views import (
delete_account,
notification,
support,
account_setting,
)
__all__ = [
@ -69,5 +68,4 @@ __all__ = [
"delete_account",
"notification",
"support",
"account_setting",
]

View File

@ -1,242 +0,0 @@
import arrow
from flask import (
render_template,
request,
redirect,
url_for,
flash,
)
from flask_login import login_required, current_user
from app import email_utils
from app.config import (
URL,
FIRST_ALIAS_DOMAIN,
ALIAS_RANDOM_SUFFIX_LENGTH,
CONNECT_WITH_PROTON,
)
from app.dashboard.base import dashboard_bp
from app.dashboard.views.enter_sudo import sudo_required
from app.dashboard.views.mailbox_detail import ChangeEmailForm
from app.db import Session
from app.email_utils import (
email_can_be_used_as_mailbox,
personal_email_already_used,
)
from app.extensions import limiter
from app.jobs.export_user_data_job import ExportUserDataJob
from app.log import LOG
from app.models import (
BlockBehaviourEnum,
PlanEnum,
ResetPasswordCode,
EmailChange,
User,
Alias,
AliasGeneratorEnum,
SenderFormatEnum,
UnsubscribeBehaviourEnum,
)
from app.proton.utils import perform_proton_account_unlink
from app.utils import (
random_string,
CSRFValidationForm,
canonicalize_email,
)
@dashboard_bp.route("/account_setting", methods=["GET", "POST"])
@login_required
@sudo_required
@limiter.limit("5/minute", methods=["POST"])
def account_setting():
change_email_form = ChangeEmailForm()
csrf_form = CSRFValidationForm()
email_change = EmailChange.get_by(user_id=current_user.id)
if email_change:
pending_email = email_change.new_email
else:
pending_email = None
if request.method == "POST":
if not csrf_form.validate():
flash("Invalid request", "warning")
return redirect(url_for("dashboard.setting"))
if request.form.get("form-name") == "update-email":
if change_email_form.validate():
# whether user can proceed with the email update
new_email_valid = True
new_email = canonicalize_email(change_email_form.email.data)
if new_email != current_user.email and not pending_email:
# check if this email is not already used
if personal_email_already_used(new_email) or Alias.get_by(
email=new_email
):
flash(f"Email {new_email} already used", "error")
new_email_valid = False
elif not email_can_be_used_as_mailbox(new_email):
flash(
"You cannot use this email address as your personal inbox.",
"error",
)
new_email_valid = False
# a pending email change with the same email exists from another user
elif EmailChange.get_by(new_email=new_email):
other_email_change: EmailChange = EmailChange.get_by(
new_email=new_email
)
LOG.w(
"Another user has a pending %s with the same email address. Current user:%s",
other_email_change,
current_user,
)
if other_email_change.is_expired():
LOG.d(
"delete the expired email change %s", other_email_change
)
EmailChange.delete(other_email_change.id)
Session.commit()
else:
flash(
"You cannot use this email address as your personal inbox.",
"error",
)
new_email_valid = False
if new_email_valid:
email_change = EmailChange.create(
user_id=current_user.id,
code=random_string(
60
), # todo: make sure the code is unique
new_email=new_email,
)
Session.commit()
send_change_email_confirmation(current_user, email_change)
flash(
"A confirmation email is on the way, please check your inbox",
"success",
)
return redirect(url_for("dashboard.account_setting"))
elif request.form.get("form-name") == "change-password":
flash(
"You are going to receive an email containing instructions to change your password",
"success",
)
send_reset_password_email(current_user)
return redirect(url_for("dashboard.account_setting"))
elif request.form.get("form-name") == "send-full-user-report":
if ExportUserDataJob(current_user).store_job_in_db():
flash(
"You will receive your SimpleLogin data via email shortly",
"success",
)
else:
flash("An export of your data is currently in progress", "error")
partner_sub = None
partner_name = None
return render_template(
"dashboard/account_setting.html",
csrf_form=csrf_form,
PlanEnum=PlanEnum,
SenderFormatEnum=SenderFormatEnum,
BlockBehaviourEnum=BlockBehaviourEnum,
change_email_form=change_email_form,
pending_email=pending_email,
AliasGeneratorEnum=AliasGeneratorEnum,
UnsubscribeBehaviourEnum=UnsubscribeBehaviourEnum,
partner_sub=partner_sub,
partner_name=partner_name,
FIRST_ALIAS_DOMAIN=FIRST_ALIAS_DOMAIN,
ALIAS_RAND_SUFFIX_LENGTH=ALIAS_RANDOM_SUFFIX_LENGTH,
connect_with_proton=CONNECT_WITH_PROTON,
)
def send_reset_password_email(user):
"""
generate a new ResetPasswordCode and send it over email to user
"""
# the activation code is valid for 1h
reset_password_code = ResetPasswordCode.create(
user_id=user.id, code=random_string(60)
)
Session.commit()
reset_password_link = f"{URL}/auth/reset_password?code={reset_password_code.code}"
email_utils.send_reset_password_email(user, reset_password_link)
def send_change_email_confirmation(user: User, email_change: EmailChange):
"""
send confirmation email to the new email address
"""
link = f"{URL}/auth/change_email?code={email_change.code}"
email_utils.send_change_email(user, email_change.new_email, link)
@dashboard_bp.route("/resend_email_change", methods=["GET", "POST"])
@limiter.limit("5/hour")
@login_required
@sudo_required
def resend_email_change():
form = CSRFValidationForm()
if not form.validate():
flash("Invalid request. Please try again", "warning")
return redirect(url_for("dashboard.setting"))
email_change = EmailChange.get_by(user_id=current_user.id)
if email_change:
# extend email change expiration
email_change.expired = arrow.now().shift(hours=12)
Session.commit()
send_change_email_confirmation(current_user, email_change)
flash("A confirmation email is on the way, please check your inbox", "success")
return redirect(url_for("dashboard.setting"))
else:
flash(
"You have no pending email change. Redirect back to Setting page", "warning"
)
return redirect(url_for("dashboard.setting"))
@dashboard_bp.route("/cancel_email_change", methods=["GET", "POST"])
@login_required
@sudo_required
def cancel_email_change():
form = CSRFValidationForm()
if not form.validate():
flash("Invalid request. Please try again", "warning")
return redirect(url_for("dashboard.setting"))
email_change = EmailChange.get_by(user_id=current_user.id)
if email_change:
EmailChange.delete(email_change.id)
Session.commit()
flash("Your email change is cancelled", "success")
return redirect(url_for("dashboard.setting"))
else:
flash(
"You have no pending email change. Redirect back to Setting page", "warning"
)
return redirect(url_for("dashboard.setting"))
@dashboard_bp.route("/unlink_proton_account", methods=["POST"])
@login_required
@sudo_required
def unlink_proton_account():
csrf_form = CSRFValidationForm()
if not csrf_form.validate():
flash("Invalid request", "warning")
return redirect(url_for("dashboard.setting"))
perform_proton_account_unlink(current_user)
flash("Your Proton account has been unlinked", "success")
return redirect(url_for("dashboard.setting"))

View File

@ -9,10 +9,13 @@ from sqlalchemy import and_, func, case
from wtforms import StringField, validators, ValidationError
# Need to import directly from config to allow modification from the tests
from app import config, parallel_limiter, contact_utils
from app.contact_utils import ContactCreateError
from app import config, parallel_limiter
from app.dashboard.base import dashboard_bp
from app.db import Session
from app.email_utils import (
generate_reply_email,
parse_full_address,
)
from app.email_validation import is_valid_email
from app.errors import (
CannotCreateContactForReverseAlias,
@ -21,8 +24,8 @@ from app.errors import (
ErrContactAlreadyExists,
)
from app.log import LOG
from app.models import Alias, Contact, EmailLog
from app.utils import CSRFValidationForm
from app.models import Alias, Contact, EmailLog, User
from app.utils import sanitize_email, CSRFValidationForm
def email_validator():
@ -48,7 +51,7 @@ def email_validator():
return _check
def create_contact(alias: Alias, contact_address: str) -> Contact:
def create_contact(user: User, alias: Alias, contact_address: str) -> Contact:
"""
Create a contact for a user. Can be restricted for new free users by enabling DISABLE_CREATE_CONTACTS_FOR_FREE_USERS.
Can throw exceptions:
@ -58,23 +61,37 @@ def create_contact(alias: Alias, contact_address: str) -> Contact:
"""
if not contact_address:
raise ErrAddressInvalid("Empty address")
output = contact_utils.create_contact(email=contact_address, alias=alias)
if output.error == ContactCreateError.InvalidEmail:
try:
contact_name, contact_email = parse_full_address(contact_address)
except ValueError:
raise ErrAddressInvalid(contact_address)
elif output.error == ContactCreateError.NotAllowed:
raise ErrContactErrorUpgradeNeeded()
elif output.error is not None:
raise ErrAddressInvalid("Invalid address")
elif not output.created:
raise ErrContactAlreadyExists(output.contact)
contact = output.contact
contact_email = sanitize_email(contact_email)
if not is_valid_email(contact_email):
raise ErrAddressInvalid(contact_email)
contact = Contact.get_by(alias_id=alias.id, website_email=contact_email)
if contact:
raise ErrContactAlreadyExists(contact)
if not user.can_create_contacts():
raise ErrContactErrorUpgradeNeeded()
contact = Contact.create(
user_id=alias.user_id,
alias_id=alias.id,
website_email=contact_email,
name=contact_name,
reply_email=generate_reply_email(contact_email, alias),
)
LOG.d(
"create reverse-alias for %s %s, reverse alias:%s",
contact_address,
alias,
contact.reply_email,
)
Session.commit()
return contact
@ -244,7 +261,7 @@ def alias_contact_manager(alias_id):
if new_contact_form.validate():
contact_address = new_contact_form.email.data.strip()
try:
contact = create_contact(alias, contact_address)
contact = create_contact(current_user, alias, contact_address)
except (
ErrContactErrorUpgradeNeeded,
ErrAddressInvalid,

View File

@ -1,13 +1,9 @@
from app.dashboard.base import dashboard_bp
from flask_login import login_required, current_user
from app.alias_utils import alias_export_csv
from app.dashboard.views.enter_sudo import sudo_required
from app.extensions import limiter
@dashboard_bp.route("/alias_export", methods=["GET"])
@login_required
@sudo_required
@limiter.limit("2/minute")
def alias_export_route():
return alias_export_csv(current_user)

View File

@ -5,9 +5,7 @@ from flask_login import login_required, current_user
from app import s3
from app.config import JOB_BATCH_IMPORT
from app.dashboard.base import dashboard_bp
from app.dashboard.views.enter_sudo import sudo_required
from app.db import Session
from app.extensions import limiter
from app.log import LOG
from app.models import File, BatchImport, Job
from app.utils import random_string, CSRFValidationForm
@ -15,8 +13,6 @@ from app.utils import random_string, CSRFValidationForm
@dashboard_bp.route("/batch_import", methods=["GET", "POST"])
@login_required
@sudo_required
@limiter.limit("10/minute", methods=["POST"])
def batch_import_route():
# only for users who have custom domains
if not current_user.verified_custom_domains():
@ -41,7 +37,7 @@ def batch_import_route():
return redirect(request.url)
if len(batch_imports) > 10:
flash(
"You have too many imports already. Please wait until some get cleaned up",
"You have too many imports already. Wait until some get cleaned up",
"error",
)
return render_template(

View File

@ -5,9 +5,11 @@ from wtforms import StringField, validators
from app import parallel_limiter
from app.config import EMAIL_SERVERS_WITH_PRIORITY
from app.custom_domain_utils import create_custom_domain
from app.dashboard.base import dashboard_bp
from app.models import CustomDomain
from app.db import Session
from app.email_utils import get_email_domain_part
from app.log import LOG
from app.models import CustomDomain, Mailbox, DomainMailbox, SLDomain
class NewCustomDomainForm(FlaskForm):
@ -23,8 +25,11 @@ def custom_domain():
custom_domains = CustomDomain.filter_by(
user_id=current_user.id, is_sl_subdomain=False
).all()
mailboxes = current_user.mailboxes()
new_custom_domain_form = NewCustomDomainForm()
errors = {}
if request.method == "POST":
if request.form.get("form-name") == "create":
if not current_user.is_premium():
@ -32,25 +37,87 @@ def custom_domain():
return redirect(url_for("dashboard.custom_domain"))
if new_custom_domain_form.validate():
res = create_custom_domain(
user=current_user, domain=new_custom_domain_form.domain.data
new_domain = new_custom_domain_form.domain.data.lower().strip()
if new_domain.startswith("http://"):
new_domain = new_domain[len("http://") :]
if new_domain.startswith("https://"):
new_domain = new_domain[len("https://") :]
if SLDomain.get_by(domain=new_domain):
flash("A custom domain cannot be a built-in domain.", "error")
elif CustomDomain.get_by(domain=new_domain):
flash(f"{new_domain} already used", "error")
elif get_email_domain_part(current_user.email) == new_domain:
flash(
"You cannot add a domain that you are currently using for your personal email. "
"Please change your personal email to your real email",
"error",
)
if res.success:
flash(f"New domain {res.instance.domain} is created", "success")
elif Mailbox.filter(
Mailbox.verified.is_(True), Mailbox.email.endswith(f"@{new_domain}")
).first():
flash(
f"{new_domain} already used in a SimpleLogin mailbox", "error"
)
else:
new_custom_domain = CustomDomain.create(
domain=new_domain, user_id=current_user.id
)
# new domain has ownership verified if its parent has the ownership verified
for root_cd in current_user.custom_domains:
if (
new_domain.endswith("." + root_cd.domain)
and root_cd.ownership_verified
):
LOG.i(
"%s ownership verified thanks to %s",
new_custom_domain,
root_cd,
)
new_custom_domain.ownership_verified = True
Session.commit()
mailbox_ids = request.form.getlist("mailbox_ids")
if mailbox_ids:
# check if mailbox is not tempered with
mailboxes = []
for mailbox_id in mailbox_ids:
mailbox = Mailbox.get(mailbox_id)
if (
not mailbox
or mailbox.user_id != current_user.id
or not mailbox.verified
):
flash("Something went wrong, please retry", "warning")
return redirect(url_for("dashboard.custom_domain"))
mailboxes.append(mailbox)
for mailbox in mailboxes:
DomainMailbox.create(
domain_id=new_custom_domain.id, mailbox_id=mailbox.id
)
Session.commit()
flash(
f"New domain {new_custom_domain.domain} is created", "success"
)
return redirect(
url_for(
"dashboard.domain_detail_dns",
custom_domain_id=res.instance.id,
custom_domain_id=new_custom_domain.id,
)
)
else:
flash(res.message, res.message_category)
if res.redirect:
return redirect(url_for(res.redirect))
return render_template(
"dashboard/custom_domain.html",
custom_domains=custom_domains,
new_custom_domain_form=new_custom_domain_form,
EMAIL_SERVERS_WITH_PRIORITY=EMAIL_SERVERS_WITH_PRIORITY,
errors=errors,
mailboxes=mailboxes,
)

View File

@ -1,16 +1,22 @@
import re
import arrow
from flask import render_template, request, redirect, url_for, flash
from flask_login import login_required, current_user
from flask_wtf import FlaskForm
from wtforms import StringField, validators, IntegerField
from app.constants import DMARC_RECORD
from app.config import EMAIL_SERVERS_WITH_PRIORITY, EMAIL_DOMAIN
from app.custom_domain_utils import delete_custom_domain
from app.config import EMAIL_SERVERS_WITH_PRIORITY, EMAIL_DOMAIN, JOB_DELETE_DOMAIN
from app.custom_domain_validation import CustomDomainValidation
from app.dashboard.base import dashboard_bp
from app.db import Session
from app.dns_utils import (
get_mx_domains,
get_spf_domain,
get_txt_record,
is_mx_equivalent,
)
from app.log import LOG
from app.models import (
CustomDomain,
Alias,
@ -19,6 +25,7 @@ from app.models import (
DomainMailbox,
AutoCreateRule,
AutoCreateRuleMailbox,
Job,
)
from app.regex_utils import regex_match
from app.utils import random_string, CSRFValidationForm
@ -42,6 +49,8 @@ def domain_detail_dns(custom_domain_id):
domain_validator = CustomDomainValidation(EMAIL_DOMAIN)
csrf_form = CSRFValidationForm()
dmarc_record = "v=DMARC1; p=quarantine; pct=100; adkim=s; aspf=s"
mx_ok = spf_ok = dkim_ok = dmarc_ok = ownership_ok = True
mx_errors = spf_errors = dkim_errors = dmarc_errors = ownership_errors = []
@ -50,14 +59,15 @@ def domain_detail_dns(custom_domain_id):
flash("Invalid request", "warning")
return redirect(request.url)
if request.form.get("form-name") == "check-ownership":
ownership_validation_result = domain_validator.validate_domain_ownership(
custom_domain
)
if ownership_validation_result.success:
txt_records = get_txt_record(custom_domain.domain)
if custom_domain.get_ownership_dns_txt_value() in txt_records:
flash(
"Domain ownership is verified. Please proceed to the other records setup",
"success",
)
custom_domain.ownership_verified = True
Session.commit()
return redirect(
url_for(
"dashboard.domain_detail_dns",
@ -68,28 +78,36 @@ def domain_detail_dns(custom_domain_id):
else:
flash("We can't find the needed TXT record", "error")
ownership_ok = False
ownership_errors = ownership_validation_result.errors
ownership_errors = txt_records
elif request.form.get("form-name") == "check-mx":
mx_validation_result = domain_validator.validate_mx_records(custom_domain)
if mx_validation_result.success:
mx_domains = get_mx_domains(custom_domain.domain)
if not is_mx_equivalent(mx_domains, EMAIL_SERVERS_WITH_PRIORITY):
flash("The MX record is not correctly set", "warning")
mx_ok = False
# build mx_errors to show to user
mx_errors = [
f"{priority} {domain}" for (priority, domain) in mx_domains
]
else:
flash(
"Your domain can start receiving emails. You can now use it to create alias",
"success",
)
custom_domain.verified = True
Session.commit()
return redirect(
url_for(
"dashboard.domain_detail_dns", custom_domain_id=custom_domain.id
)
)
else:
flash("The MX record is not correctly set", "warning")
mx_ok = False
mx_errors = mx_validation_result.errors
elif request.form.get("form-name") == "check-spf":
spf_validation_result = domain_validator.validate_spf_records(custom_domain)
if spf_validation_result.success:
spf_domains = get_spf_domain(custom_domain.domain)
if EMAIL_DOMAIN in spf_domains:
custom_domain.spf_verified = True
Session.commit()
flash("SPF is setup correctly", "success")
return redirect(
url_for(
@ -97,12 +115,14 @@ def domain_detail_dns(custom_domain_id):
)
)
else:
custom_domain.spf_verified = False
Session.commit()
flash(
f"SPF: {EMAIL_DOMAIN} is not included in your SPF record.",
"warning",
)
spf_ok = False
spf_errors = spf_validation_result.errors
spf_errors = get_txt_record(custom_domain.domain)
elif request.form.get("form-name") == "check-dkim":
dkim_errors = domain_validator.validate_dkim_records(custom_domain)
@ -118,10 +138,10 @@ def domain_detail_dns(custom_domain_id):
flash("DKIM: the CNAME record is not correctly set", "warning")
elif request.form.get("form-name") == "check-dmarc":
dmarc_validation_result = domain_validator.validate_dmarc_records(
custom_domain
)
if dmarc_validation_result.success:
txt_records = get_txt_record("_dmarc." + custom_domain.domain)
if dmarc_record in txt_records:
custom_domain.dmarc_verified = True
Session.commit()
flash("DMARC is setup correctly", "success")
return redirect(
url_for(
@ -129,21 +149,19 @@ def domain_detail_dns(custom_domain_id):
)
)
else:
custom_domain.dmarc_verified = False
Session.commit()
flash(
"DMARC: The TXT record is not correctly set",
"warning",
)
dmarc_ok = False
dmarc_errors = dmarc_validation_result.errors
dmarc_errors = txt_records
return render_template(
"dashboard/domain_detail/dns.html",
EMAIL_SERVERS_WITH_PRIORITY=EMAIL_SERVERS_WITH_PRIORITY,
ownership_record=domain_validator.get_ownership_verification_record(
custom_domain
),
dkim_records=domain_validator.get_dkim_records(custom_domain),
dmarc_record=DMARC_RECORD,
dkim_records=domain_validator.get_dkim_records(),
**locals(),
)
@ -261,8 +279,16 @@ def domain_detail(custom_domain_id):
elif request.form.get("form-name") == "delete":
name = custom_domain.domain
LOG.d("Schedule deleting %s", custom_domain)
delete_custom_domain(custom_domain)
# Schedule delete domain job
LOG.w("schedule delete domain job for %s", custom_domain)
Job.create(
name=JOB_DELETE_DOMAIN,
payload={"custom_domain_id": custom_domain.id},
run_at=arrow.now(),
commit=True,
)
flash(
f"{name} scheduled for deletion."

View File

@ -6,15 +6,15 @@ from flask_login import login_required, current_user
from flask_wtf import FlaskForm
from wtforms import PasswordField, validators
from app.config import CONNECT_WITH_PROTON, OIDC_CLIENT_ID, CONNECT_WITH_OIDC_ICON
from app.config import CONNECT_WITH_PROTON
from app.dashboard.base import dashboard_bp
from app.extensions import limiter
from app.log import LOG
from app.models import PartnerUser, SocialAuth
from app.models import PartnerUser
from app.proton.utils import get_proton_partner
from app.utils import sanitize_next_url
_SUDO_GAP = 120
_SUDO_GAP = 900
class LoginForm(FlaskForm):
@ -51,19 +51,11 @@ def enter_sudo():
if not partner_user or partner_user.partner_id != get_proton_partner().id:
proton_enabled = False
oidc_enabled = OIDC_CLIENT_ID is not None
if oidc_enabled:
oidc_enabled = (
SocialAuth.get_by(user_id=current_user.id, social="oidc") is not None
)
return render_template(
"dashboard/enter_sudo.html",
password_check_form=password_check_form,
next=request.args.get("next"),
connect_with_proton=proton_enabled,
connect_with_oidc=oidc_enabled,
connect_with_oidc_icon=CONNECT_WITH_OIDC_ICON,
)

View File

@ -12,7 +12,6 @@ from app.extensions import limiter
from app.log import LOG
from app.models import (
Alias,
AliasDeleteReason,
AliasGeneratorEnum,
User,
EmailLog,
@ -142,14 +141,12 @@ def index():
)
if request.form.get("form-name") == "delete-alias":
LOG.i(f"User {current_user} requested deletion of alias {alias}")
LOG.d("delete alias %s", alias)
email = alias.email
alias_utils.delete_alias(
alias, current_user, AliasDeleteReason.ManualAction, commit=True
)
alias_utils.delete_alias(alias, current_user)
flash(f"Alias {email} has been deleted", "success")
elif request.form.get("form-name") == "disable-alias":
alias_utils.change_alias_status(alias, enabled=False)
alias.enabled = False
Session.commit()
flash(f"Alias {alias.email} has been disabled", "success")

View File

@ -2,6 +2,7 @@ import base64
import binascii
import json
import arrow
from flask import render_template, request, redirect, url_for, flash
from flask_login import login_required, current_user
from flask_wtf import FlaskForm
@ -9,12 +10,19 @@ from itsdangerous import TimestampSigner
from wtforms import validators, IntegerField
from wtforms.fields.html5 import EmailField
from app import parallel_limiter, mailbox_utils, user_settings
from app.config import MAILBOX_SECRET
from app import parallel_limiter
from app.config import MAILBOX_SECRET, URL, JOB_DELETE_MAILBOX
from app.dashboard.base import dashboard_bp
from app.db import Session
from app.email_utils import (
email_can_be_used_as_mailbox,
mailbox_already_used,
render,
send_email,
)
from app.email_validation import is_valid_email
from app.log import LOG
from app.models import Mailbox
from app.models import Mailbox, Job
from app.utils import CSRFValidationForm
@ -50,59 +58,118 @@ def mailbox_route():
if not delete_mailbox_form.validate():
flash("Invalid request", "warning")
return redirect(request.url)
try:
mailbox = mailbox_utils.delete_mailbox(
current_user,
delete_mailbox_form.mailbox_id.data,
delete_mailbox_form.transfer_mailbox_id.data,
)
except mailbox_utils.MailboxError as e:
flash(e.msg, "warning")
mailbox = Mailbox.get(delete_mailbox_form.mailbox_id.data)
if not mailbox or mailbox.user_id != current_user.id:
flash("Invalid mailbox. Refresh the page", "warning")
return redirect(url_for("dashboard.mailbox_route"))
if mailbox.id == current_user.default_mailbox_id:
flash("You cannot delete default mailbox", "error")
return redirect(url_for("dashboard.mailbox_route"))
transfer_mailbox_id = delete_mailbox_form.transfer_mailbox_id.data
if transfer_mailbox_id and transfer_mailbox_id > 0:
transfer_mailbox = Mailbox.get(transfer_mailbox_id)
if not transfer_mailbox or transfer_mailbox.user_id != current_user.id:
flash(
"You must transfer the aliases to a mailbox you own.", "error"
)
return redirect(url_for("dashboard.mailbox_route"))
if transfer_mailbox.id == mailbox.id:
flash(
"You can not transfer the aliases to the mailbox you want to delete.",
"error",
)
return redirect(url_for("dashboard.mailbox_route"))
if not transfer_mailbox.verified:
flash("Your new mailbox is not verified", "error")
return redirect(url_for("dashboard.mailbox_route"))
# Schedule delete account job
LOG.w(
f"schedule delete mailbox job for {mailbox.id} with transfer to mailbox {transfer_mailbox_id}"
)
Job.create(
name=JOB_DELETE_MAILBOX,
payload={
"mailbox_id": mailbox.id,
"transfer_mailbox_id": transfer_mailbox_id
if transfer_mailbox_id > 0
else None,
},
run_at=arrow.now(),
commit=True,
)
flash(
f"Mailbox {mailbox.email} scheduled for deletion."
f"You will receive a confirmation email when the deletion is finished",
"success",
)
return redirect(url_for("dashboard.mailbox_route"))
return redirect(url_for("dashboard.mailbox_route"))
if request.form.get("form-name") == "set-default":
if not csrf_form.validate():
flash("Invalid request", "warning")
return redirect(request.url)
try:
mailbox_id = request.form.get("mailbox_id")
mailbox = user_settings.set_default_mailbox(current_user, mailbox_id)
except user_settings.CannotSetMailbox as e:
flash(e.msg, "warning")
mailbox = Mailbox.get(mailbox_id)
if not mailbox or mailbox.user_id != current_user.id:
flash("Unknown error. Refresh the page", "warning")
return redirect(url_for("dashboard.mailbox_route"))
if mailbox.id == current_user.default_mailbox_id:
flash("This mailbox is already default one", "error")
return redirect(url_for("dashboard.mailbox_route"))
if not mailbox.verified:
flash("Cannot set unverified mailbox as default", "error")
return redirect(url_for("dashboard.mailbox_route"))
current_user.default_mailbox_id = mailbox.id
Session.commit()
flash(f"Mailbox {mailbox.email} is set as Default Mailbox", "success")
return redirect(url_for("dashboard.mailbox_route"))
elif request.form.get("form-name") == "create":
if not new_mailbox_form.validate():
flash("Invalid request", "warning")
return redirect(request.url)
mailbox_email = new_mailbox_form.email.data.lower().strip().replace(" ", "")
try:
mailbox = mailbox_utils.create_mailbox(
current_user, mailbox_email
).mailbox
except mailbox_utils.MailboxError as e:
flash(e.msg, "warning")
if not current_user.is_premium():
flash("Only premium plan can add additional mailbox", "warning")
return redirect(url_for("dashboard.mailbox_route"))
if new_mailbox_form.validate():
mailbox_email = (
new_mailbox_form.email.data.lower().strip().replace(" ", "")
)
if not is_valid_email(mailbox_email):
flash(f"{mailbox_email} invalid", "error")
elif mailbox_already_used(mailbox_email, current_user):
flash(f"{mailbox_email} already used", "error")
elif not email_can_be_used_as_mailbox(mailbox_email):
flash(f"You cannot use {mailbox_email}.", "error")
else:
new_mailbox = Mailbox.create(
email=mailbox_email, user_id=current_user.id
)
Session.commit()
send_verification_email(current_user, new_mailbox)
flash(
f"You are going to receive an email to confirm {mailbox.email}.",
f"You are going to receive an email to confirm {mailbox_email}.",
"success",
)
return redirect(
url_for(
"dashboard.mailbox_detail_route",
mailbox_id=mailbox.id,
mailbox_id=new_mailbox.id,
)
)
@ -115,25 +182,34 @@ def mailbox_route():
)
def send_verification_email(user, mailbox):
s = TimestampSigner(MAILBOX_SECRET)
encoded_data = json.dumps([mailbox.id, mailbox.email]).encode("utf-8")
b64_data = base64.urlsafe_b64encode(encoded_data)
mailbox_id_signed = s.sign(b64_data).decode()
verification_url = (
URL + "/dashboard/mailbox_verify" + f"?mailbox_id={mailbox_id_signed}"
)
send_email(
mailbox.email,
f"Please confirm your mailbox {mailbox.email}",
render(
"transactional/verify-mailbox.txt.jinja2",
user=user,
link=verification_url,
mailbox_email=mailbox.email,
),
render(
"transactional/verify-mailbox.html",
user=user,
link=verification_url,
mailbox_email=mailbox.email,
),
)
@dashboard_bp.route("/mailbox_verify")
@login_required
def mailbox_verify():
mailbox_id = request.args.get("mailbox_id")
code = request.args.get("code")
if not code:
# Old way
return verify_with_signed_secret(mailbox_id)
try:
mailbox = mailbox_utils.verify_mailbox_code(current_user, mailbox_id, code)
except mailbox_utils.MailboxError as e:
LOG.i(f"Cannot verify mailbox {mailbox_id} because of {e}")
flash(f"Cannot verify mailbox: {e.msg}", "error")
return redirect(url_for("dashboard.mailbox_route"))
LOG.d("Mailbox %s is verified", mailbox)
return render_template("dashboard/mailbox_validation.html", mailbox=mailbox)
def verify_with_signed_secret(request: str):
s = TimestampSigner(MAILBOX_SECRET)
mailbox_verify_request = request.args.get("mailbox_id")
try:

View File

@ -11,11 +11,9 @@ from wtforms.fields.html5 import EmailField
from app.config import ENFORCE_SPF, MAILBOX_SECRET
from app.config import URL
from app.dashboard.base import dashboard_bp
from app.dashboard.views.enter_sudo import sudo_required
from app.db import Session
from app.email_utils import email_can_be_used_as_mailbox
from app.email_utils import mailbox_already_used, render, send_email
from app.extensions import limiter
from app.log import LOG
from app.models import Alias, AuthorizedAddress
from app.models import Mailbox
@ -31,8 +29,6 @@ class ChangeEmailForm(FlaskForm):
@dashboard_bp.route("/mailbox/<int:mailbox_id>/", methods=["GET", "POST"])
@login_required
@sudo_required
@limiter.limit("20/minute", methods=["POST"])
def mailbox_detail_route(mailbox_id):
mailbox: Mailbox = Mailbox.get(mailbox_id)
if not mailbox or mailbox.user_id != current_user.id:
@ -183,15 +179,8 @@ def mailbox_detail_route(mailbox_id):
elif request.form.get("form-name") == "toggle-pgp":
if request.form.get("pgp-enabled") == "on":
if mailbox.is_proton():
mailbox.disable_pgp = True
flash(
"Enabling PGP for a Proton Mail mailbox is redundant and does not add any security benefit",
"info",
)
else:
mailbox.disable_pgp = False
flash(f"PGP is enabled on {mailbox.email}", "info")
flash(f"PGP is enabled on {mailbox.email}", "success")
else:
mailbox.disable_pgp = True
flash(f"PGP is disabled on {mailbox.email}", "info")

View File

@ -13,38 +13,51 @@ from flask_login import login_required, current_user
from flask_wtf import FlaskForm
from flask_wtf.file import FileField
from wtforms import StringField, validators
from wtforms.fields.html5 import EmailField
from app import s3, user_settings
from app import s3, email_utils
from app.config import (
URL,
FIRST_ALIAS_DOMAIN,
ALIAS_RANDOM_SUFFIX_LENGTH,
CONNECT_WITH_PROTON,
)
from app.dashboard.base import dashboard_bp
from app.db import Session
from app.email_utils import (
email_can_be_used_as_mailbox,
personal_email_already_used,
)
from app.errors import ProtonPartnerNotSetUp
from app.extensions import limiter
from app.image_validation import detect_image_format, ImageFormat
from app.jobs.export_user_data_job import ExportUserDataJob
from app.log import LOG
from app.models import (
BlockBehaviourEnum,
PlanEnum,
File,
ResetPasswordCode,
EmailChange,
User,
Alias,
CustomDomain,
AliasGeneratorEnum,
AliasSuffixEnum,
ManualSubscription,
SenderFormatEnum,
SLDomain,
CoinbaseSubscription,
AppleSubscription,
PartnerUser,
PartnerSubscription,
UnsubscribeBehaviourEnum,
)
from app.proton.utils import get_proton_partner
from app.proton.utils import get_proton_partner, perform_proton_account_unlink
from app.utils import (
random_string,
CSRFValidationForm,
canonicalize_email,
)
@ -53,6 +66,12 @@ class SettingForm(FlaskForm):
profile_picture = FileField("Profile Picture")
class ChangeEmailForm(FlaskForm):
email = EmailField(
"email", validators=[validators.DataRequired(), validators.Email()]
)
class PromoCodeForm(FlaskForm):
code = StringField("Name", validators=[validators.DataRequired()])
@ -90,6 +109,7 @@ def get_partner_subscription_and_name(
def setting():
form = SettingForm()
promo_form = PromoCodeForm()
change_email_form = ChangeEmailForm()
csrf_form = CSRFValidationForm()
email_change = EmailChange.get_by(user_id=current_user.id)
@ -102,7 +122,63 @@ def setting():
if not csrf_form.validate():
flash("Invalid request", "warning")
return redirect(url_for("dashboard.setting"))
if request.form.get("form-name") == "update-email":
if change_email_form.validate():
# whether user can proceed with the email update
new_email_valid = True
new_email = canonicalize_email(change_email_form.email.data)
if new_email != current_user.email and not pending_email:
# check if this email is not already used
if personal_email_already_used(new_email) or Alias.get_by(
email=new_email
):
flash(f"Email {new_email} already used", "error")
new_email_valid = False
elif not email_can_be_used_as_mailbox(new_email):
flash(
"You cannot use this email address as your personal inbox.",
"error",
)
new_email_valid = False
# a pending email change with the same email exists from another user
elif EmailChange.get_by(new_email=new_email):
other_email_change: EmailChange = EmailChange.get_by(
new_email=new_email
)
LOG.w(
"Another user has a pending %s with the same email address. Current user:%s",
other_email_change,
current_user,
)
if other_email_change.is_expired():
LOG.d(
"delete the expired email change %s", other_email_change
)
EmailChange.delete(other_email_change.id)
Session.commit()
else:
flash(
"You cannot use this email address as your personal inbox.",
"error",
)
new_email_valid = False
if new_email_valid:
email_change = EmailChange.create(
user_id=current_user.id,
code=random_string(
60
), # todo: make sure the code is unique
new_email=new_email,
)
Session.commit()
send_change_email_confirmation(current_user, email_change)
flash(
"A confirmation email is on the way, please check your inbox",
"success",
)
return redirect(url_for("dashboard.setting"))
if request.form.get("form-name") == "update-profile":
if form.validate():
profile_updated = False
@ -146,6 +222,15 @@ def setting():
if profile_updated:
flash("Your profile has been updated", "success")
return redirect(url_for("dashboard.setting"))
elif request.form.get("form-name") == "change-password":
flash(
"You are going to receive an email containing instructions to change your password",
"success",
)
send_reset_password_email(current_user)
return redirect(url_for("dashboard.setting"))
elif request.form.get("form-name") == "notification-preference":
choose = request.form.get("notification")
if choose == "on":
@ -155,6 +240,7 @@ def setting():
Session.commit()
flash("Your notification preference has been updated", "success")
return redirect(url_for("dashboard.setting"))
elif request.form.get("form-name") == "change-alias-generator":
scheme = int(request.form.get("alias-generator-scheme"))
if AliasGeneratorEnum.has_value(scheme):
@ -162,17 +248,46 @@ def setting():
Session.commit()
flash("Your preference has been updated", "success")
return redirect(url_for("dashboard.setting"))
elif request.form.get("form-name") == "change-random-alias-default-domain":
default_domain = request.form.get("random-alias-default-domain")
try:
user_settings.set_default_alias_domain(current_user, default_domain)
except user_settings.CannotSetAlias as e:
flash(e.msg, "error")
if default_domain:
sl_domain: SLDomain = SLDomain.get_by(domain=default_domain)
if sl_domain:
if sl_domain.premium_only and not current_user.is_premium():
flash("You cannot use this domain", "error")
return redirect(url_for("dashboard.setting"))
current_user.default_alias_public_domain_id = sl_domain.id
current_user.default_alias_custom_domain_id = None
else:
custom_domain = CustomDomain.get_by(domain=default_domain)
if custom_domain:
# sanity check
if (
custom_domain.user_id != current_user.id
or not custom_domain.verified
):
LOG.w(
"%s cannot use domain %s", current_user, custom_domain
)
flash(f"Domain {default_domain} can't be used", "error")
return redirect(request.url)
else:
current_user.default_alias_custom_domain_id = (
custom_domain.id
)
current_user.default_alias_public_domain_id = None
else:
current_user.default_alias_custom_domain_id = None
current_user.default_alias_public_domain_id = None
Session.commit()
flash("Your preference has been updated", "success")
return redirect(url_for("dashboard.setting"))
elif request.form.get("form-name") == "random-alias-suffix":
scheme = int(request.form.get("random-alias-suffix-generator"))
if AliasSuffixEnum.has_value(scheme):
@ -180,6 +295,7 @@ def setting():
Session.commit()
flash("Your preference has been updated", "success")
return redirect(url_for("dashboard.setting"))
elif request.form.get("form-name") == "change-sender-format":
sender_format = int(request.form.get("sender-format"))
if SenderFormatEnum.has_value(sender_format):
@ -189,6 +305,7 @@ def setting():
flash("Your sender format preference has been updated", "success")
Session.commit()
return redirect(url_for("dashboard.setting"))
elif request.form.get("form-name") == "replace-ra":
choose = request.form.get("replace-ra")
if choose == "on":
@ -198,21 +315,7 @@ def setting():
Session.commit()
flash("Your preference has been updated", "success")
return redirect(url_for("dashboard.setting"))
elif request.form.get("form-name") == "enable_data_breach_check":
if not current_user.is_premium():
flash("Only premium plan can enable data breach monitoring", "warning")
return redirect(url_for("dashboard.setting"))
choose = request.form.get("enable_data_breach_check")
if choose == "on":
LOG.i("User {current_user} has enabled data breach monitoring")
current_user.enable_data_breach_check = True
flash("Data breach monitoring is enabled", "success")
else:
LOG.i("User {current_user} has disabled data breach monitoring")
current_user.enable_data_breach_check = False
flash("Data breach monitoring is disabled", "info")
Session.commit()
return redirect(url_for("dashboard.setting"))
elif request.form.get("form-name") == "sender-in-ra":
choose = request.form.get("enable")
if choose == "on":
@ -222,6 +325,7 @@ def setting():
Session.commit()
flash("Your preference has been updated", "success")
return redirect(url_for("dashboard.setting"))
elif request.form.get("form-name") == "expand-alias-info":
choose = request.form.get("enable")
if choose == "on":
@ -283,6 +387,14 @@ def setting():
Session.commit()
flash("Your preference has been updated", "success")
return redirect(url_for("dashboard.setting"))
elif request.form.get("form-name") == "send-full-user-report":
if ExportUserDataJob(current_user).store_job_in_db():
flash(
"You will receive your SimpleLogin data via email shortly",
"success",
)
else:
flash("An export of your data is currently in progress", "error")
manual_sub = ManualSubscription.get_by(user_id=current_user.id)
apple_sub = AppleSubscription.get_by(user_id=current_user.id)
@ -305,6 +417,7 @@ def setting():
SenderFormatEnum=SenderFormatEnum,
BlockBehaviourEnum=BlockBehaviourEnum,
promo_form=promo_form,
change_email_form=change_email_form,
pending_email=pending_email,
AliasGeneratorEnum=AliasGeneratorEnum,
UnsubscribeBehaviourEnum=UnsubscribeBehaviourEnum,
@ -319,3 +432,85 @@ def setting():
connect_with_proton=CONNECT_WITH_PROTON,
proton_linked_account=proton_linked_account,
)
def send_reset_password_email(user):
"""
generate a new ResetPasswordCode and send it over email to user
"""
# the activation code is valid for 1h
reset_password_code = ResetPasswordCode.create(
user_id=user.id, code=random_string(60)
)
Session.commit()
reset_password_link = f"{URL}/auth/reset_password?code={reset_password_code.code}"
email_utils.send_reset_password_email(user.email, reset_password_link)
def send_change_email_confirmation(user: User, email_change: EmailChange):
"""
send confirmation email to the new email address
"""
link = f"{URL}/auth/change_email?code={email_change.code}"
email_utils.send_change_email(email_change.new_email, user.email, link)
@dashboard_bp.route("/resend_email_change", methods=["GET", "POST"])
@limiter.limit("5/hour")
@login_required
def resend_email_change():
form = CSRFValidationForm()
if not form.validate():
flash("Invalid request. Please try again", "warning")
return redirect(url_for("dashboard.setting"))
email_change = EmailChange.get_by(user_id=current_user.id)
if email_change:
# extend email change expiration
email_change.expired = arrow.now().shift(hours=12)
Session.commit()
send_change_email_confirmation(current_user, email_change)
flash("A confirmation email is on the way, please check your inbox", "success")
return redirect(url_for("dashboard.setting"))
else:
flash(
"You have no pending email change. Redirect back to Setting page", "warning"
)
return redirect(url_for("dashboard.setting"))
@dashboard_bp.route("/cancel_email_change", methods=["GET", "POST"])
@login_required
def cancel_email_change():
form = CSRFValidationForm()
if not form.validate():
flash("Invalid request. Please try again", "warning")
return redirect(url_for("dashboard.setting"))
email_change = EmailChange.get_by(user_id=current_user.id)
if email_change:
EmailChange.delete(email_change.id)
Session.commit()
flash("Your email change is cancelled", "success")
return redirect(url_for("dashboard.setting"))
else:
flash(
"You have no pending email change. Redirect back to Setting page", "warning"
)
return redirect(url_for("dashboard.setting"))
@dashboard_bp.route("/unlink_proton_account", methods=["POST"])
@login_required
def unlink_proton_account():
csrf_form = CSRFValidationForm()
if not csrf_form.validate():
flash("Invalid request", "warning")
return redirect(url_for("dashboard.setting"))
perform_proton_account_unlink(current_user)
flash("Your Proton account has been unlinked", "success")
return redirect(url_for("dashboard.setting"))

View File

@ -8,7 +8,6 @@ from app.db import Session
from flask import redirect, url_for, flash, request, render_template
from flask_login import login_required, current_user
from app import alias_utils
from app.dashboard.base import dashboard_bp
from app.handler.unsubscribe_encoder import UnsubscribeAction
from app.handler.unsubscribe_handler import UnsubscribeHandler
@ -32,7 +31,7 @@ def unsubscribe(alias_id):
# automatic unsubscribe, according to https://tools.ietf.org/html/rfc8058
if request.method == "POST":
alias_utils.change_alias_status(alias, False)
alias.enabled = False
flash(f"Alias {alias.email} has been blocked", "success")
Session.commit()

View File

@ -1,5 +1,4 @@
from io import BytesIO
from urllib.parse import urlparse
from flask import request, render_template, redirect, url_for, flash
from flask_login import current_user, login_required
@ -12,7 +11,6 @@ from app.config import ADMIN_EMAIL
from app.db import Session
from app.developer.base import developer_bp
from app.email_utils import send_email
from app.image_validation import detect_image_format, ImageFormat
from app.log import LOG
from app.models import Client, RedirectUri, File, Referral
from app.utils import random_string
@ -48,25 +46,16 @@ def client_detail(client_id):
approval_form.description.data = client.description
if action == "edit" and form.validate_on_submit():
parsed_url = urlparse(form.url.data)
if parsed_url.scheme != "https":
flash("Only https urls are allowed", "error")
return redirect(url_for("developer.index"))
client.name = form.name.data
client.home_url = form.url.data
if form.icon.data:
icon_data = form.icon.data.read(10240)
if detect_image_format(icon_data) == ImageFormat.Unknown:
flash("Unknown file format", "warning")
return redirect(url_for("developer.index"))
if client.icon:
s3.delete(client.icon_id)
File.delete(client.icon)
# todo: remove current icon if any
# todo: handle remove icon
file_path = random_string(30)
file = File.create(path=file_path, user_id=client.user_id)
s3.upload_from_bytesio(file_path, BytesIO(icon_data))
s3.upload_from_bytesio(file_path, BytesIO(form.icon.data.read()))
Session.flush()
LOG.d("upload file %s to s3", file)

View File

@ -1,5 +1,3 @@
from urllib.parse import urlparse
from flask import render_template, redirect, url_for, flash
from flask_login import current_user, login_required
from flask_wtf import FlaskForm
@ -22,10 +20,6 @@ def new_client():
if form.validate_on_submit():
client = Client.create_new(form.name.data, current_user.id)
parsed_url = urlparse(form.url.data)
if parsed_url.scheme != "https":
flash("Only https urls are allowed", "error")
return redirect(url_for("developer.new_client"))
client.home_url = form.url.data
Session.commit()

View File

@ -1,13 +1,100 @@
from abc import ABC, abstractmethod
from typing import List, Tuple, Optional
from app import config
from typing import Optional, List, Tuple
import dns.resolver
from app.config import NAMESERVERS
def _get_dns_resolver():
my_resolver = dns.resolver.Resolver()
my_resolver.nameservers = config.NAMESERVERS
return my_resolver
def get_ns(hostname) -> [str]:
try:
answers = _get_dns_resolver().resolve(hostname, "NS", search=True)
except Exception:
return []
return [a.to_text() for a in answers]
def get_cname_record(hostname) -> Optional[str]:
"""Return the CNAME record if exists for a domain, WITHOUT the trailing period at the end"""
try:
answers = _get_dns_resolver().resolve(hostname, "CNAME", search=True)
except Exception:
return None
for a in answers:
ret = a.to_text()
return ret[:-1]
return None
def get_mx_domains(hostname) -> [(int, str)]:
"""return list of (priority, domain name) sorted by priority (lowest priority first)
domain name ends with a "." at the end.
"""
try:
answers = _get_dns_resolver().resolve(hostname, "MX", search=True)
except Exception:
return []
ret = []
for a in answers:
record = a.to_text() # for ex '20 alt2.aspmx.l.google.com.'
parts = record.split(" ")
ret.append((int(parts[0]), parts[1]))
return sorted(ret, key=lambda prio_domain: prio_domain[0])
_include_spf = "include:"
def get_spf_domain(hostname) -> [str]:
"""return all domains listed in *include:*"""
try:
answers = _get_dns_resolver().resolve(hostname, "TXT", search=True)
except Exception:
return []
ret = []
for a in answers: # type: dns.rdtypes.ANY.TXT.TXT
for record in a.strings:
record = record.decode() # record is bytes
if record.startswith("v=spf1"):
parts = record.split(" ")
for part in parts:
if part.startswith(_include_spf):
ret.append(part[part.find(_include_spf) + len(_include_spf) :])
return ret
def get_txt_record(hostname) -> [str]:
try:
answers = _get_dns_resolver().resolve(hostname, "TXT", search=True)
except Exception:
return []
ret = []
for a in answers: # type: dns.rdtypes.ANY.TXT.TXT
for record in a.strings:
record = record.decode() # record is bytes
ret.append(record)
return ret
def is_mx_equivalent(
mx_domains: List[Tuple[int, str]], ref_mx_domains: List[Tuple[int, str]]
) -> bool:
@ -18,127 +105,16 @@ def is_mx_equivalent(
The priority order is taken into account but not the priority number.
For example, [(1, domain1), (2, domain2)] is equivalent to [(10, domain1), (20, domain2)]
"""
mx_domains = sorted(mx_domains, key=lambda x: x[0])
ref_mx_domains = sorted(ref_mx_domains, key=lambda x: x[0])
mx_domains = sorted(mx_domains, key=lambda priority_domain: priority_domain[0])
ref_mx_domains = sorted(
ref_mx_domains, key=lambda priority_domain: priority_domain[0]
)
if len(mx_domains) < len(ref_mx_domains):
return False
for i in range(len(ref_mx_domains)):
for i in range(0, len(ref_mx_domains)):
if mx_domains[i][1] != ref_mx_domains[i][1]:
return False
return True
class DNSClient(ABC):
@abstractmethod
def get_cname_record(self, hostname: str) -> Optional[str]:
pass
@abstractmethod
def get_mx_domains(self, hostname: str) -> List[Tuple[int, str]]:
pass
def get_spf_domain(self, hostname: str) -> List[str]:
"""
return all domains listed in *include:*
"""
try:
records = self.get_txt_record(hostname)
ret = []
for record in records:
if record.startswith("v=spf1"):
parts = record.split(" ")
for part in parts:
if part.startswith(_include_spf):
ret.append(
part[part.find(_include_spf) + len(_include_spf) :]
)
return ret
except Exception:
return []
@abstractmethod
def get_txt_record(self, hostname: str) -> List[str]:
pass
class NetworkDNSClient(DNSClient):
def __init__(self, nameservers: List[str]):
self._resolver = dns.resolver.Resolver()
self._resolver.nameservers = nameservers
def get_cname_record(self, hostname: str) -> Optional[str]:
"""
Return the CNAME record if exists for a domain, WITHOUT the trailing period at the end
"""
try:
answers = self._resolver.resolve(hostname, "CNAME", search=True)
for a in answers:
ret = a.to_text()
return ret[:-1]
except Exception:
return None
def get_mx_domains(self, hostname: str) -> List[Tuple[int, str]]:
"""
return list of (priority, domain name) sorted by priority (lowest priority first)
domain name ends with a "." at the end.
"""
try:
answers = self._resolver.resolve(hostname, "MX", search=True)
ret = []
for a in answers:
record = a.to_text() # for ex '20 alt2.aspmx.l.google.com.'
parts = record.split(" ")
ret.append((int(parts[0]), parts[1]))
return sorted(ret, key=lambda x: x[0])
except Exception:
return []
def get_txt_record(self, hostname: str) -> List[str]:
try:
answers = self._resolver.resolve(hostname, "TXT", search=True)
ret = []
for a in answers: # type: dns.rdtypes.ANY.TXT.TXT
for record in a.strings:
ret.append(record.decode())
return ret
except Exception:
return []
class InMemoryDNSClient(DNSClient):
def __init__(self):
self.cname_records: dict[str, Optional[str]] = {}
self.mx_records: dict[str, List[Tuple[int, str]]] = {}
self.spf_records: dict[str, List[str]] = {}
self.txt_records: dict[str, List[str]] = {}
def set_cname_record(self, hostname: str, cname: str):
self.cname_records[hostname] = cname
def set_mx_records(self, hostname: str, mx_list: List[Tuple[int, str]]):
self.mx_records[hostname] = mx_list
def set_txt_record(self, hostname: str, txt_list: List[str]):
self.txt_records[hostname] = txt_list
def get_cname_record(self, hostname: str) -> Optional[str]:
return self.cname_records.get(hostname)
def get_mx_domains(self, hostname: str) -> List[Tuple[int, str]]:
mx_list = self.mx_records.get(hostname, [])
return sorted(mx_list, key=lambda x: x[0])
def get_txt_record(self, hostname: str) -> List[str]:
return self.txt_records.get(hostname, [])
def get_network_dns_client() -> NetworkDNSClient:
return NetworkDNSClient(NAMESERVERS)
def get_mx_domains(hostname: str) -> [(int, str)]:
return get_network_dns_client().get_mx_domains(hostname)

View File

@ -21,7 +21,6 @@ LIST_UNSUBSCRIBE = "List-Unsubscribe"
LIST_UNSUBSCRIBE_POST = "List-Unsubscribe-Post"
RETURN_PATH = "Return-Path"
AUTHENTICATION_RESULTS = "Authentication-Results"
SL_QUEUE_ID = "X-SL-Queue-Id"
# headers used to DKIM sign in order of preference
DKIM_HEADERS = [

View File

@ -33,7 +33,6 @@ from flanker.addresslib import address
from flanker.addresslib.address import EmailAddress
from jinja2 import Environment, FileSystemLoader
from sqlalchemy import func
from flask_login import current_user
from app import config
from app.db import Session
@ -69,27 +68,17 @@ VERP_TIME_START = 1640995200
VERP_HMAC_ALGO = "sha3-224"
def render(template_name: str, user: Optional[User], **kwargs) -> str:
def render(template_name, **kwargs) -> str:
templates_dir = os.path.join(config.ROOT_DIR, "templates", "emails")
env = Environment(loader=FileSystemLoader(templates_dir))
template = env.get_template(template_name)
if user is None:
if current_user and current_user.is_authenticated:
user = current_user
use_partner_template = False
if user:
use_partner_template = user.has_used_alias_from_partner()
kwargs["user"] = user
return template.render(
MAX_NB_EMAIL_FREE_PLAN=config.MAX_NB_EMAIL_FREE_PLAN,
URL=config.URL,
LANDING_PAGE_URL=config.LANDING_PAGE_URL,
YEAR=arrow.now().year,
USE_PARTNER_TEMPLATE=use_partner_template,
**kwargs,
)
@ -122,59 +111,53 @@ def send_trial_end_soon_email(user):
)
def send_activation_email(user: User, activation_link):
def send_activation_email(email, activation_link):
send_email(
user.email,
email,
"Just one more step to join SimpleLogin",
render(
"transactional/activation.txt",
user=user,
activation_link=activation_link,
email=user.email,
email=email,
),
render(
"transactional/activation.html",
user=user,
activation_link=activation_link,
email=user.email,
email=email,
),
)
def send_reset_password_email(user: User, reset_password_link):
def send_reset_password_email(email, reset_password_link):
send_email(
user.email,
email,
"Reset your password on SimpleLogin",
render(
"transactional/reset-password.txt",
user=user,
reset_password_link=reset_password_link,
),
render(
"transactional/reset-password.html",
user=user,
reset_password_link=reset_password_link,
),
)
def send_change_email(user: User, new_email, link):
def send_change_email(new_email, current_email, link):
send_email(
new_email,
"Confirm email update on SimpleLogin",
render(
"transactional/change-email.txt",
user=user,
link=link,
new_email=new_email,
current_email=user.email,
current_email=current_email,
),
render(
"transactional/change-email.html",
user=user,
link=link,
new_email=new_email,
current_email=user.email,
current_email=current_email,
),
)
@ -187,32 +170,28 @@ def send_invalid_totp_login_email(user, totp_type):
"Unsuccessful attempt to login to your SimpleLogin account",
render(
"transactional/invalid-totp-login.txt",
user=user,
type=totp_type,
),
render(
"transactional/invalid-totp-login.html",
user=user,
type=totp_type,
),
1,
)
def send_test_email_alias(user: User, email: str):
def send_test_email_alias(email, name):
send_email(
email,
f"This email is sent to {email}",
render(
"transactional/test-email.txt",
user=user,
name=user.name,
name=name,
alias=email,
),
render(
"transactional/test-email.html",
user=user,
name=user.name,
name=name,
alias=email,
),
)
@ -227,13 +206,11 @@ def send_cannot_create_directory_alias(user, alias_address, directory_name):
f"Alias {alias_address} cannot be created",
render(
"transactional/cannot-create-alias-directory.txt",
user=user,
alias=alias_address,
directory=directory_name,
),
render(
"transactional/cannot-create-alias-directory.html",
user=user,
alias=alias_address,
directory=directory_name,
),
@ -251,13 +228,11 @@ def send_cannot_create_directory_alias_disabled(user, alias_address, directory_n
f"Alias {alias_address} cannot be created",
render(
"transactional/cannot-create-alias-directory-disabled.txt",
user=user,
alias=alias_address,
directory=directory_name,
),
render(
"transactional/cannot-create-alias-directory-disabled.html",
user=user,
alias=alias_address,
directory=directory_name,
),
@ -273,13 +248,11 @@ def send_cannot_create_domain_alias(user, alias, domain):
f"Alias {alias} cannot be created",
render(
"transactional/cannot-create-alias-domain.txt",
user=user,
alias=alias,
domain=domain,
),
render(
"transactional/cannot-create-alias-domain.html",
user=user,
alias=alias,
domain=domain,
),
@ -521,10 +494,9 @@ def delete_header(msg: Message, header: str):
def sanitize_header(msg: Message, header: str):
"""remove trailing space and remove linebreak from a header"""
header_lowercase = header.lower()
for i in reversed(range(len(msg._headers))):
header_name = msg._headers[i][0].lower()
if header_name == header_lowercase:
if header_name == header.lower():
# msg._headers[i] is a tuple like ('From', 'hey@google.com')
if msg._headers[i][1]:
msg._headers[i] = (
@ -548,9 +520,7 @@ def can_create_directory_for_address(email_address: str) -> bool:
for domain in config.ALIAS_DOMAINS:
if email_address.endswith("@" + domain):
return True
LOG.i(
f"Cannot create address in directory for {email_address} since it does not belong to a valid directory domain"
)
return False
@ -948,20 +918,10 @@ def decode_text(text: str, encoding: EmailEncoding = EmailEncoding.NO) -> str:
return text
def add_header(
msg: Message, text_header, html_header=None, subject_prefix=None
) -> Message:
def add_header(msg: Message, text_header, html_header=None) -> Message:
if not html_header:
html_header = text_header.replace("\n", "<br>")
if subject_prefix is not None:
subject = msg[headers.SUBJECT]
if not subject:
msg.add_header(headers.SUBJECT, subject_prefix)
else:
subject = f"{subject_prefix} {subject}"
msg.replace_header(headers.SUBJECT, subject)
content_type = msg.get_content_type().lower()
if content_type == "text/plain":
encoding = get_encoding(msg)
@ -1292,7 +1252,6 @@ def spf_pass(
f"SimpleLogin Alert: attempt to send emails from your alias {alias.email} from unknown IP Address",
render(
"transactional/spf-fail.txt",
user=user,
alias=alias.email,
ip=ip,
mailbox_url=config.URL + f"/dashboard/mailbox/{mailbox.id}#spf",
@ -1302,7 +1261,6 @@ def spf_pass(
),
render(
"transactional/spf-fail.html",
user=user,
ip=ip,
mailbox_url=config.URL + f"/dashboard/mailbox/{mailbox.id}#spf",
to_email=contact_email,
@ -1445,7 +1403,7 @@ def generate_verp_email(
# Time is in minutes granularity and start counting on 2022-01-01 to reduce bytes to represent time
data = [
verp_type.value,
object_id or 0,
object_id,
int((time.time() - VERP_TIME_START) / 60),
]
json_payload = json.dumps(data).encode("utf-8")

View File

View File

@ -1,95 +0,0 @@
from abc import ABC, abstractmethod
import newrelic.agent
from app import config
from app.db import Session
from app.errors import ProtonPartnerNotSetUp
from app.events.generated import event_pb2
from app.log import LOG
from app.models import User, PartnerUser, SyncEvent
from app.proton.utils import get_proton_partner
from typing import Optional
NOTIFICATION_CHANNEL = "simplelogin_sync_events"
class Dispatcher(ABC):
@abstractmethod
def send(self, event: bytes):
pass
class PostgresDispatcher(Dispatcher):
def send(self, event: bytes):
instance = SyncEvent.create(content=event, flush=True)
Session.execute(f"NOTIFY {NOTIFICATION_CHANNEL}, '{instance.id}';")
@staticmethod
def get():
return PostgresDispatcher()
class GlobalDispatcher:
__dispatcher: Optional[Dispatcher] = None
@staticmethod
def get_dispatcher() -> Dispatcher:
if not GlobalDispatcher.__dispatcher:
GlobalDispatcher.__dispatcher = PostgresDispatcher.get()
return GlobalDispatcher.__dispatcher
@staticmethod
def set_dispatcher(dispatcher: Optional[Dispatcher]):
GlobalDispatcher.__dispatcher = dispatcher
class EventDispatcher:
@staticmethod
def send_event(
user: User,
content: event_pb2.EventContent,
dispatcher: Optional[Dispatcher] = None,
skip_if_webhook_missing: bool = True,
):
if dispatcher is None:
dispatcher = GlobalDispatcher.get_dispatcher()
if config.EVENT_WEBHOOK_DISABLE:
LOG.i("Not sending events because webhook is disabled")
return
if not config.EVENT_WEBHOOK and skip_if_webhook_missing:
LOG.i(
"Not sending events because webhook is not configured and allowed to be empty"
)
return
partner_user = EventDispatcher.__partner_user(user.id)
if not partner_user:
LOG.i(f"Not sending events because there's no partner user for user {user}")
return
event = event_pb2.Event(
user_id=user.id,
external_user_id=partner_user.external_user_id,
partner_id=partner_user.partner_id,
content=content,
)
serialized = event.SerializeToString()
dispatcher.send(serialized)
event_type = content.WhichOneof("content")
newrelic.agent.record_custom_event("EventStoredToDb", {"type": event_type})
LOG.i("Sent event to the dispatcher")
@staticmethod
def __partner_user(user_id: int) -> Optional[PartnerUser]:
# Check if the current user has a partner_id
try:
proton_partner_id = get_proton_partner().id
except ProtonPartnerNotSetUp:
return None
# It has. Retrieve the information for the PartnerUser
return PartnerUser.get_by(user_id=user_id, partner_id=proton_partner_id)

View File

@ -1,50 +0,0 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# NO CHECKED-IN PROTOBUF GENCODE
# source: event.proto
# Protobuf Python Version: 5.27.0
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
from google.protobuf import runtime_version as _runtime_version
from google.protobuf import symbol_database as _symbol_database
from google.protobuf.internal import builder as _builder
_runtime_version.ValidateProtobufRuntimeVersion(
_runtime_version.Domain.PUBLIC,
5,
27,
0,
'',
'event.proto'
)
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0b\x65vent.proto\x12\x12simplelogin_events\"(\n\x0fUserPlanChanged\x12\x15\n\rplan_end_time\x18\x01 \x01(\r\"\r\n\x0bUserDeleted\"\\\n\x0c\x41liasCreated\x12\n\n\x02id\x18\x01 \x01(\r\x12\r\n\x05\x65mail\x18\x02 \x01(\t\x12\x0c\n\x04note\x18\x03 \x01(\t\x12\x0f\n\x07\x65nabled\x18\x04 \x01(\x08\x12\x12\n\ncreated_at\x18\x05 \x01(\r\"T\n\x12\x41liasStatusChanged\x12\n\n\x02id\x18\x01 \x01(\r\x12\r\n\x05\x65mail\x18\x02 \x01(\t\x12\x0f\n\x07\x65nabled\x18\x03 \x01(\x08\x12\x12\n\ncreated_at\x18\x04 \x01(\r\")\n\x0c\x41liasDeleted\x12\n\n\x02id\x18\x01 \x01(\r\x12\r\n\x05\x65mail\x18\x02 \x01(\t\"D\n\x10\x41liasCreatedList\x12\x30\n\x06\x65vents\x18\x01 \x03(\x0b\x32 .simplelogin_events.AliasCreated\"\x93\x03\n\x0c\x45ventContent\x12?\n\x10user_plan_change\x18\x01 \x01(\x0b\x32#.simplelogin_events.UserPlanChangedH\x00\x12\x37\n\x0cuser_deleted\x18\x02 \x01(\x0b\x32\x1f.simplelogin_events.UserDeletedH\x00\x12\x39\n\ralias_created\x18\x03 \x01(\x0b\x32 .simplelogin_events.AliasCreatedH\x00\x12\x45\n\x13\x61lias_status_change\x18\x04 \x01(\x0b\x32&.simplelogin_events.AliasStatusChangedH\x00\x12\x39\n\ralias_deleted\x18\x05 \x01(\x0b\x32 .simplelogin_events.AliasDeletedH\x00\x12\x41\n\x11\x61lias_create_list\x18\x06 \x01(\x0b\x32$.simplelogin_events.AliasCreatedListH\x00\x42\t\n\x07\x63ontent\"y\n\x05\x45vent\x12\x0f\n\x07user_id\x18\x01 \x01(\r\x12\x18\n\x10\x65xternal_user_id\x18\x02 \x01(\t\x12\x12\n\npartner_id\x18\x03 \x01(\r\x12\x31\n\x07\x63ontent\x18\x04 \x01(\x0b\x32 .simplelogin_events.EventContentb\x06proto3')
_globals = globals()
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'event_pb2', _globals)
if not _descriptor._USE_C_DESCRIPTORS:
DESCRIPTOR._loaded_options = None
_globals['_USERPLANCHANGED']._serialized_start=35
_globals['_USERPLANCHANGED']._serialized_end=75
_globals['_USERDELETED']._serialized_start=77
_globals['_USERDELETED']._serialized_end=90
_globals['_ALIASCREATED']._serialized_start=92
_globals['_ALIASCREATED']._serialized_end=184
_globals['_ALIASSTATUSCHANGED']._serialized_start=186
_globals['_ALIASSTATUSCHANGED']._serialized_end=270
_globals['_ALIASDELETED']._serialized_start=272
_globals['_ALIASDELETED']._serialized_end=313
_globals['_ALIASCREATEDLIST']._serialized_start=315
_globals['_ALIASCREATEDLIST']._serialized_end=383
_globals['_EVENTCONTENT']._serialized_start=386
_globals['_EVENTCONTENT']._serialized_end=789
_globals['_EVENT']._serialized_start=791
_globals['_EVENT']._serialized_end=912
# @@protoc_insertion_point(module_scope)

View File

@ -1,84 +0,0 @@
from google.protobuf.internal import containers as _containers
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from typing import ClassVar as _ClassVar, Iterable as _Iterable, Mapping as _Mapping, Optional as _Optional, Union as _Union
DESCRIPTOR: _descriptor.FileDescriptor
class UserPlanChanged(_message.Message):
__slots__ = ("plan_end_time",)
PLAN_END_TIME_FIELD_NUMBER: _ClassVar[int]
plan_end_time: int
def __init__(self, plan_end_time: _Optional[int] = ...) -> None: ...
class UserDeleted(_message.Message):
__slots__ = ()
def __init__(self) -> None: ...
class AliasCreated(_message.Message):
__slots__ = ("id", "email", "note", "enabled", "created_at")
ID_FIELD_NUMBER: _ClassVar[int]
EMAIL_FIELD_NUMBER: _ClassVar[int]
NOTE_FIELD_NUMBER: _ClassVar[int]
ENABLED_FIELD_NUMBER: _ClassVar[int]
CREATED_AT_FIELD_NUMBER: _ClassVar[int]
id: int
email: str
note: str
enabled: bool
created_at: int
def __init__(self, id: _Optional[int] = ..., email: _Optional[str] = ..., note: _Optional[str] = ..., enabled: bool = ..., created_at: _Optional[int] = ...) -> None: ...
class AliasStatusChanged(_message.Message):
__slots__ = ("id", "email", "enabled", "created_at")
ID_FIELD_NUMBER: _ClassVar[int]
EMAIL_FIELD_NUMBER: _ClassVar[int]
ENABLED_FIELD_NUMBER: _ClassVar[int]
CREATED_AT_FIELD_NUMBER: _ClassVar[int]
id: int
email: str
enabled: bool
created_at: int
def __init__(self, id: _Optional[int] = ..., email: _Optional[str] = ..., enabled: bool = ..., created_at: _Optional[int] = ...) -> None: ...
class AliasDeleted(_message.Message):
__slots__ = ("id", "email")
ID_FIELD_NUMBER: _ClassVar[int]
EMAIL_FIELD_NUMBER: _ClassVar[int]
id: int
email: str
def __init__(self, id: _Optional[int] = ..., email: _Optional[str] = ...) -> None: ...
class AliasCreatedList(_message.Message):
__slots__ = ("events",)
EVENTS_FIELD_NUMBER: _ClassVar[int]
events: _containers.RepeatedCompositeFieldContainer[AliasCreated]
def __init__(self, events: _Optional[_Iterable[_Union[AliasCreated, _Mapping]]] = ...) -> None: ...
class EventContent(_message.Message):
__slots__ = ("user_plan_change", "user_deleted", "alias_created", "alias_status_change", "alias_deleted", "alias_create_list")
USER_PLAN_CHANGE_FIELD_NUMBER: _ClassVar[int]
USER_DELETED_FIELD_NUMBER: _ClassVar[int]
ALIAS_CREATED_FIELD_NUMBER: _ClassVar[int]
ALIAS_STATUS_CHANGE_FIELD_NUMBER: _ClassVar[int]
ALIAS_DELETED_FIELD_NUMBER: _ClassVar[int]
ALIAS_CREATE_LIST_FIELD_NUMBER: _ClassVar[int]
user_plan_change: UserPlanChanged
user_deleted: UserDeleted
alias_created: AliasCreated
alias_status_change: AliasStatusChanged
alias_deleted: AliasDeleted
alias_create_list: AliasCreatedList
def __init__(self, user_plan_change: _Optional[_Union[UserPlanChanged, _Mapping]] = ..., user_deleted: _Optional[_Union[UserDeleted, _Mapping]] = ..., alias_created: _Optional[_Union[AliasCreated, _Mapping]] = ..., alias_status_change: _Optional[_Union[AliasStatusChanged, _Mapping]] = ..., alias_deleted: _Optional[_Union[AliasDeleted, _Mapping]] = ..., alias_create_list: _Optional[_Union[AliasCreatedList, _Mapping]] = ...) -> None: ...
class Event(_message.Message):
__slots__ = ("user_id", "external_user_id", "partner_id", "content")
USER_ID_FIELD_NUMBER: _ClassVar[int]
EXTERNAL_USER_ID_FIELD_NUMBER: _ClassVar[int]
PARTNER_ID_FIELD_NUMBER: _ClassVar[int]
CONTENT_FIELD_NUMBER: _ClassVar[int]
user_id: int
external_user_id: str
partner_id: int
content: EventContent
def __init__(self, user_id: _Optional[int] = ..., external_user_id: _Optional[str] = ..., partner_id: _Optional[int] = ..., content: _Optional[_Union[EventContent, _Mapping]] = ...) -> None: ...

View File

@ -30,9 +30,7 @@ def apply_dmarc_policy_for_forward_phase(
) -> Tuple[Message, Optional[str]]:
spam_result = SpamdResult.extract_from_headers(msg, Phase.forward)
if not DMARC_CHECK_ENABLED or not spam_result:
LOG.i("DMARC check disabled")
return msg, None
LOG.i(f"Spam check result in {spam_result}")
from_header = get_header_unicode(msg[headers.FROM])
@ -64,7 +62,6 @@ More info on https://simplelogin.io/docs/getting-started/anti-phishing/
msg,
warning_plain_text,
warning_html,
subject_prefix="[Possible phishing attempt]",
)
return changed_msg, None
@ -77,7 +74,6 @@ More info on https://simplelogin.io/docs/getting-started/anti-phishing/
msg,
warning_plain_text,
warning_html,
subject_prefix="[Possible phishing attempt]",
)
return changed_msg, None
@ -106,14 +102,12 @@ More info on https://simplelogin.io/docs/getting-started/anti-phishing/
f"An email sent to {alias.email} has been quarantined",
render(
"transactional/message-quarantine-dmarc.txt.jinja2",
user=user,
from_header=from_header,
alias=alias,
refused_email_url=email_log.get_dashboard_url(),
),
render(
"transactional/message-quarantine-dmarc.html",
user=user,
from_header=from_header,
alias=alias,
refused_email_url=email_log.get_dashboard_url(),
@ -137,7 +131,7 @@ def quarantine_dmarc_failed_forward_email(alias, contact, envelope, msg) -> Emai
refused_email = RefusedEmail.create(
full_report_path=s3_report_path, user_id=alias.user_id, flush=True
)
email_log = EmailLog.create(
return EmailLog.create(
user_id=alias.user_id,
mailbox_id=alias.mailbox_id,
contact_id=contact.id,
@ -148,7 +142,6 @@ def quarantine_dmarc_failed_forward_email(alias, contact, envelope, msg) -> Emai
blocked=True,
commit=True,
)
return email_log
def apply_dmarc_policy_for_reply_phase(
@ -156,10 +149,8 @@ def apply_dmarc_policy_for_reply_phase(
) -> Optional[str]:
spam_result = SpamdResult.extract_from_headers(msg, Phase.reply)
if not DMARC_CHECK_ENABLED or not spam_result:
LOG.i("DMARC check disabled")
return None
LOG.i(f"Spam check result is {spam_result}")
if spam_result.dmarc not in (
DmarcCheckResult.quarantine,
DmarcCheckResult.reject,
@ -178,14 +169,12 @@ def apply_dmarc_policy_for_reply_phase(
f"Attempt to send an email to your contact {contact_recipient.email} from {envelope.mail_from}",
render(
"transactional/spoof-reply.txt.jinja2",
user=alias_from.user,
contact=contact_recipient,
alias=alias_from,
sender=envelope.mail_from,
),
render(
"transactional/spoof-reply.html",
user=alias_from.user,
contact=contact_recipient,
alias=alias_from,
sender=envelope.mail_from,

View File

@ -319,13 +319,11 @@ def report_complaint_to_user_in_forward_phase(
f"Abuse report from {capitalized_name}",
render(
"transactional/provider-complaint-forward-phase.txt.jinja2",
user=user,
email=mailbox_email,
provider=capitalized_name,
),
render(
"transactional/provider-complaint-forward-phase.html",
user=user,
email=mailbox_email,
provider=capitalized_name,
),

View File

@ -3,7 +3,6 @@ from email.header import Header
from email.message import Message
from app.email import headers
from app import config
from app.email_utils import add_or_replace_header, delete_header
from app.handler.unsubscribe_encoder import (
UnsubscribeEncoder,
@ -48,11 +47,6 @@ class UnsubscribeGenerator:
method = raw_method[start + 1 : end]
url_data = urllib.parse.urlparse(method)
if url_data.scheme == "mailto":
if url_data.path == config.UNSUBSCRIBER:
LOG.debug(
f"Skipping replacing unsubscribe since the original email already points to {config.UNSUBSCRIBER}"
)
return message
query_data = urllib.parse.parse_qs(url_data.query)
mailto_unsubs = (url_data.path, query_data.get("subject", [""])[0])
LOG.debug(f"Unsub is mailto to {mailto_unsubs}")

View File

@ -5,7 +5,6 @@ from typing import Optional
from aiosmtpd.smtp import Envelope
from app import config
from app import alias_utils
from app.db import Session
from app.email import headers, status
from app.email_utils import (
@ -102,8 +101,7 @@ class UnsubscribeHandler:
mailbox.email, alias
):
return status.E509
LOG.i(f"User disabled alias {alias} via unsubscribe header")
alias_utils.change_alias_status(alias, enabled=False)
alias.enabled = False
Session.commit()
enable_alias_url = config.URL + f"/dashboard/?highlight_alias_id={alias.id}"
for mailbox in alias.mailboxes:

View File

@ -30,10 +30,7 @@ def handle_batch_import(batch_import: BatchImport):
LOG.d("Download file %s from %s", batch_import.file, file_url)
r = requests.get(file_url)
# Replace invisible character
lines = [
line.decode("utf-8").replace("\ufeff", "").strip() for line in r.iter_lines()
]
lines = [line.decode("utf-8") for line in r.iter_lines()]
import_from_csv(batch_import, user, lines)

View File

@ -1,52 +0,0 @@
import newrelic.agent
from app.events.event_dispatcher import EventDispatcher, Dispatcher
from app.events.generated.event_pb2 import EventContent, AliasCreated, AliasCreatedList
from app.log import LOG
from app.models import User, Alias
def send_alias_creation_events_for_user(
user: User, dispatcher: Dispatcher, chunk_size=50
):
if user.disabled:
LOG.i("User {user} is disabled. Skipping sending events for that user")
return
chunk_size = min(chunk_size, 50)
event_list = []
LOG.i("Sending alias create events for user {user}")
for alias in (
Alias.yield_per_query(chunk_size)
.filter_by(user_id=user.id)
.order_by(Alias.id.asc())
):
event_list.append(
AliasCreated(
id=alias.id,
email=alias.email,
note=alias.note,
enabled=alias.enabled,
created_at=int(alias.created_at.timestamp),
)
)
if len(event_list) >= chunk_size:
LOG.i(f"Sending {len(event_list)} alias create event for {user}")
EventDispatcher.send_event(
user,
EventContent(alias_create_list=AliasCreatedList(events=event_list)),
dispatcher=dispatcher,
)
newrelic.agent.record_custom_metric(
"Custom/event_alias_created_event", len(event_list)
)
event_list = []
if len(event_list) > 0:
LOG.i(f"Sending {len(event_list)} alias create event for {user}")
EventDispatcher.send_event(
user,
EventContent(alias_create_list=AliasCreatedList(events=event_list)),
dispatcher=dispatcher,
)
newrelic.agent.record_custom_metric(
"Custom/event_alias_created_event", len(event_list)
)

View File

@ -137,9 +137,7 @@ class ExportUserDataJob:
msg[headers.SUBJECT] = "Your SimpleLogin data"
msg[headers.FROM] = f'"SimpleLogin (noreply)" <{config.NOREPLY}>'
msg[headers.TO] = to_email
msg.attach(
MIMEText(render("transactional/user-report.html", user=self._user), "html")
)
msg.attach(MIMEText(render("transactional/user-report.html"), "html"))
attachment = MIMEApplication(zipped_contents.read())
attachment.add_header(
"Content-Disposition", "attachment", filename="user_report.zip"

View File

@ -76,6 +76,7 @@ class SendRequest:
file_path = os.path.join(config.SAVE_UNSENT_DIR, file_name)
self.save_request_to_file(file_path)
@staticmethod
def save_request_to_failed_dir(self, prefix: str = "DeliveryRetryFail"):
file_name = (
f"{prefix}-{int(time.time())}-{uuid.uuid4()}.{SendRequest.SAVE_EXTENSION}"

View File

@ -1,260 +0,0 @@
import dataclasses
import secrets
import random
from typing import Optional
import arrow
from app import config
from app.config import JOB_DELETE_MAILBOX
from app.db import Session
from app.email_utils import (
mailbox_already_used,
email_can_be_used_as_mailbox,
send_email,
render,
)
from app.email_validation import is_valid_email
from app.log import LOG
from app.models import User, Mailbox, Job, MailboxActivation
@dataclasses.dataclass
class CreateMailboxOutput:
mailbox: Mailbox
activation: Optional[MailboxActivation]
class MailboxError(Exception):
def __init__(self, msg: str):
self.msg = msg
class OnlyPaidError(MailboxError):
def __init__(self):
self.msg = "Only available for paid plans"
class CannotVerifyError(MailboxError):
def __init__(self, msg: str):
self.msg = msg
MAX_ACTIVATION_TRIES = 3
def create_mailbox(
user: User,
email: str,
verified: bool = False,
send_email: bool = True,
use_digit_codes: bool = False,
send_link: bool = True,
) -> CreateMailboxOutput:
if not user.is_premium():
LOG.i(
f"User {user} has tried to create mailbox with {email} but is not premium"
)
raise OnlyPaidError()
if not is_valid_email(email):
LOG.i(
f"User {user} has tried to create mailbox with {email} but is not valid email"
)
raise MailboxError("Invalid email")
elif mailbox_already_used(email, user):
LOG.i(
f"User {user} has tried to create mailbox with {email} but email is already used"
)
raise MailboxError("Email already used")
elif not email_can_be_used_as_mailbox(email):
LOG.i(
f"User {user} has tried to create mailbox with {email} but email is invalid"
)
raise MailboxError("Invalid email")
new_mailbox = Mailbox.create(
email=email, user_id=user.id, verified=verified, commit=True
)
if verified:
LOG.i(f"User {user} as created a pre-verified mailbox with {email}")
return CreateMailboxOutput(mailbox=new_mailbox, activation=None)
LOG.i(f"User {user} has created mailbox with {email}")
activation = generate_activation_code(new_mailbox, use_digit_code=use_digit_codes)
output = CreateMailboxOutput(mailbox=new_mailbox, activation=activation)
if not send_email:
LOG.i(f"Skipping sending validation email for mailbox {new_mailbox}")
return output
send_verification_email(
user,
new_mailbox,
activation=activation,
send_link=send_link,
)
return output
def delete_mailbox(
user: User, mailbox_id: int, transfer_mailbox_id: Optional[int]
) -> Mailbox:
mailbox = Mailbox.get(mailbox_id)
if not mailbox or mailbox.user_id != user.id:
LOG.i(
f"User {user} has tried to delete another user's mailbox with {mailbox_id}"
)
raise MailboxError("Invalid mailbox")
if mailbox.id == user.default_mailbox_id:
LOG.i(f"User {user} has tried to delete the default mailbox")
raise MailboxError("Cannot delete your default mailbox")
if transfer_mailbox_id and transfer_mailbox_id > 0:
transfer_mailbox = Mailbox.get(transfer_mailbox_id)
if not transfer_mailbox or transfer_mailbox.user_id != user.id:
LOG.i(
f"User {user} has tried to transfer to a mailbox owned by another user"
)
raise MailboxError("You must transfer the aliases to a mailbox you own")
if transfer_mailbox.id == mailbox.id:
LOG.i(
f"User {user} has tried to transfer to the same mailbox he is deleting"
)
raise MailboxError(
"You can not transfer the aliases to the mailbox you want to delete"
)
if not transfer_mailbox.verified:
LOG.i(f"User {user} has tried to transfer to a non verified mailbox")
MailboxError("Your new mailbox is not verified")
# Schedule delete account job
LOG.i(
f"User {user} has scheduled delete mailbox job for {mailbox.id} with transfer to mailbox {transfer_mailbox_id}"
)
Job.create(
name=JOB_DELETE_MAILBOX,
payload={
"mailbox_id": mailbox.id,
"transfer_mailbox_id": transfer_mailbox_id
if transfer_mailbox_id and transfer_mailbox_id > 0
else None,
},
run_at=arrow.now(),
commit=True,
)
return mailbox
def clear_activation_codes_for_mailbox(mailbox: Mailbox):
Session.query(MailboxActivation).filter(
MailboxActivation.mailbox_id == mailbox.id
).delete()
Session.commit()
def verify_mailbox_code(user: User, mailbox_id: int, code: str) -> Mailbox:
mailbox = Mailbox.get(mailbox_id)
if not mailbox:
LOG.i(
f"User {user} failed to verify mailbox {mailbox_id} because it does not exist"
)
raise MailboxError("Invalid mailbox")
if mailbox.verified:
LOG.i(
f"User {user} failed to verify mailbox {mailbox_id} because it's already verified"
)
clear_activation_codes_for_mailbox(mailbox)
return mailbox
if mailbox.user_id != user.id:
LOG.i(
f"User {user} failed to verify mailbox {mailbox_id} because it's owned by another user"
)
raise MailboxError("Invalid mailbox")
activation = (
MailboxActivation.filter(MailboxActivation.mailbox_id == mailbox_id)
.order_by(MailboxActivation.created_at.desc())
.first()
)
if not activation:
LOG.i(
f"User {user} failed to verify mailbox {mailbox_id} because there is no activation"
)
raise MailboxError("Invalid code")
if activation.tries >= MAX_ACTIVATION_TRIES:
LOG.i(f"User {user} failed to verify mailbox {mailbox_id} more than 3 times")
clear_activation_codes_for_mailbox(mailbox)
raise CannotVerifyError("Invalid activation code. Please request another code.")
if activation.created_at < arrow.now().shift(minutes=-15):
LOG.i(
f"User {user} failed to verify mailbox {mailbox_id} because code is too old"
)
clear_activation_codes_for_mailbox(mailbox)
raise CannotVerifyError("Invalid activation code. Please request another code.")
if code != activation.code:
LOG.i(
f"User {user} failed to verify mailbox {mailbox_id} because code does not match"
)
activation.tries = activation.tries + 1
Session.commit()
raise CannotVerifyError("Invalid activation code")
LOG.i(f"User {user} has verified mailbox {mailbox_id}")
mailbox.verified = True
clear_activation_codes_for_mailbox(mailbox)
return mailbox
def generate_activation_code(
mailbox: Mailbox, use_digit_code: bool = False
) -> MailboxActivation:
clear_activation_codes_for_mailbox(mailbox)
if use_digit_code:
code = "{:06d}".format(random.randint(1, 999999))
else:
code = secrets.token_urlsafe(16)
return MailboxActivation.create(
mailbox_id=mailbox.id,
code=code,
tries=0,
commit=True,
)
def send_verification_email(
user: User, mailbox: Mailbox, activation: MailboxActivation, send_link: bool = True
):
LOG.i(
f"Sending mailbox verification email to {mailbox.email} with send link={send_link}"
)
if send_link:
verification_url = (
config.URL
+ "/dashboard/mailbox_verify"
+ f"?mailbox_id={mailbox.id}&code={activation.code}"
)
else:
verification_url = None
send_email(
mailbox.email,
f"Please confirm your mailbox {mailbox.email}",
render(
"transactional/verify-mailbox.txt.jinja2",
user=user,
code=activation.code,
link=verification_url,
mailbox_email=mailbox.email,
),
render(
"transactional/verify-mailbox.html",
user=user,
code=activation.code,
link=verification_url,
mailbox_email=mailbox.email,
),
)

View File

@ -27,7 +27,7 @@ from sqlalchemy.orm import deferred
from sqlalchemy.sql import and_
from sqlalchemy_utils import ArrowType
from app import config, rate_limiter
from app import config
from app import s3
from app.db import Session
from app.dns_utils import get_mx_domains
@ -235,7 +235,6 @@ class AuditLogActionEnum(EnumE):
download_provider_complaint = 8
disable_user = 9
enable_user = 10
stop_trial = 11
class Phase(EnumE):
@ -263,15 +262,6 @@ class UnsubscribeBehaviourEnum(EnumE):
PreserveOriginal = 2
class AliasDeleteReason(EnumE):
Unspecified = 0
UserHasBeenDeleted = 1
ManualAction = 2
DirectoryDeleted = 3
MailboxDeleted = 4
CustomDomainDeleted = 5
class IntEnumType(sa.types.TypeDecorator):
impl = sa.Integer
@ -336,10 +326,9 @@ class Fido(Base, ModelMixin):
class User(Base, ModelMixin, UserMixin, PasswordOracle):
__tablename__ = "users"
FLAG_DISABLE_CREATE_CONTACTS = 1 << 0
FLAG_FREE_DISABLE_CREATE_ALIAS = 1 << 0
FLAG_CREATED_FROM_PARTNER = 1 << 1
FLAG_FREE_OLD_ALIAS_LIMIT = 1 << 2
FLAG_CREATED_ALIAS_FROM_PARTNER = 1 << 3
email = sa.Column(sa.String(256), unique=True, nullable=False)
@ -535,15 +524,10 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
sa.Boolean, default=True, nullable=False, server_default="1"
)
# user opted in for data breach check
enable_data_breach_check = sa.Column(
sa.Boolean, default=False, nullable=False, server_default="0"
)
# bitwise flags. Allow for future expansion
flags = sa.Column(
sa.BigInteger,
default=FLAG_DISABLE_CREATE_CONTACTS,
default=FLAG_FREE_DISABLE_CREATE_ALIAS,
server_default="0",
nullable=False,
)
@ -667,27 +651,6 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
return user
@classmethod
def delete(cls, obj_id, commit=False):
# Internal import to avoid global import cycles
from app.events.event_dispatcher import EventDispatcher
from app.events.generated.event_pb2 import UserDeleted, EventContent
user: User = cls.get(obj_id)
EventDispatcher.send_event(user, EventContent(user_deleted=UserDeleted()))
# Manually delete all aliases for the user that is about to be deleted
from app.alias_utils import delete_alias
for alias in Alias.filter_by(user_id=user.id):
delete_alias(alias, user, AliasDeleteReason.UserHasBeenDeleted)
res = super(User, cls).delete(obj_id)
if commit:
Session.commit()
return res
def get_active_subscription(
self, include_partner_subscription: bool = True
) -> Optional[
@ -763,11 +726,6 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
return True
def is_active(self) -> bool:
if self.delete_on is None:
return True
return self.delete_on < arrow.now()
def in_trial(self):
"""return True if user does not have lifetime licence or an active subscription AND is in trial period"""
if self.lifetime_or_active_subscription():
@ -869,9 +827,6 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
Whether user can create a new alias. User can't create a new alias if
- has more than 15 aliases in the free plan, *even in the free trial*
"""
if not self.is_active():
return False
if self.disabled:
return False
@ -952,11 +907,7 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
return sub
def verified_custom_domains(self) -> List["CustomDomain"]:
return (
CustomDomain.filter_by(user_id=self.id, ownership_verified=True)
.order_by(CustomDomain.domain.asc())
.all()
)
return CustomDomain.filter_by(user_id=self.id, ownership_verified=True).all()
def mailboxes(self) -> List["Mailbox"]:
"""list of mailbox that user own"""
@ -973,7 +924,7 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
def has_custom_domain(self):
return CustomDomain.filter_by(user_id=self.id, verified=True).count() > 0
def custom_domains(self) -> List["CustomDomain"]:
def custom_domains(self):
return CustomDomain.filter_by(user_id=self.id, verified=True).all()
def available_domains_for_random_alias(
@ -985,8 +936,8 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
- the domain
"""
res = []
for domain in self.get_sl_domains(alias_options=alias_options):
res.append((True, domain.domain))
for domain in self.available_sl_domains(alias_options=alias_options):
res.append((True, domain))
for custom_domain in self.verified_custom_domains():
res.append((False, custom_domain.domain))
@ -1128,10 +1079,7 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
- Verified custom domains
"""
domains = [
sl_domain.domain
for sl_domain in self.get_sl_domains(alias_options=alias_options)
]
domains = self.available_sl_domains(alias_options=alias_options)
for custom_domain in self.verified_custom_domains():
domains.append(custom_domain.domain)
@ -1168,17 +1116,10 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
def can_create_contacts(self) -> bool:
if self.is_premium():
return True
if self.flags & User.FLAG_DISABLE_CREATE_CONTACTS == 0:
if self.flags & User.FLAG_FREE_DISABLE_CREATE_ALIAS == 0:
return True
return not config.DISABLE_CREATE_CONTACTS_FOR_FREE_USERS
def has_used_alias_from_partner(self) -> bool:
return (
self.flags
& (User.FLAG_CREATED_ALIAS_FROM_PARTNER | User.FLAG_CREATED_FROM_PARTNER)
> 0
)
def __repr__(self):
return f"<User {self.id} {self.name} {self.email}>"
@ -1468,9 +1409,6 @@ def generate_random_alias_email(
class Alias(Base, ModelMixin):
__tablename__ = "alias"
FLAG_PARTNER_CREATED = 1 << 0
user_id = sa.Column(
sa.ForeignKey(User.id, ondelete="cascade"), nullable=False, index=True
)
@ -1480,9 +1418,6 @@ class Alias(Base, ModelMixin):
name = sa.Column(sa.String(128), nullable=True, default=None)
enabled = sa.Column(sa.Boolean(), default=True, nullable=False)
flags = sa.Column(
sa.BigInteger(), default=0, server_default="0", nullable=False, index=True
)
custom_domain_id = sa.Column(
sa.ForeignKey("custom_domain.id", ondelete="cascade"), nullable=True, index=True
@ -1560,8 +1495,6 @@ class Alias(Base, ModelMixin):
TSVector(), sa.Computed("to_tsvector('english', note)", persisted=True)
)
last_email_log_id = sa.Column(sa.Integer, default=None, nullable=True)
__table_args__ = (
Index("ix_video___ts_vector__", ts_vector, postgresql_using="gin"),
# index on note column using pg_trgm
@ -1630,15 +1563,6 @@ class Alias(Base, ModelMixin):
flush = kw.pop("flush", False)
new_alias = cls(**kw)
user = User.get(new_alias.user_id)
if user.is_premium():
limits = config.ALIAS_CREATE_RATE_LIMIT_PAID
else:
limits = config.ALIAS_CREATE_RATE_LIMIT_FREE
# limits is array of (hits,days)
for limit in limits:
key = f"alias_create_{limit[1]}d:{user.id}"
rate_limiter.check_bucket_limit(key, limit[0], limit[1])
email = kw["email"]
# make sure email is lowercase and doesn't have any whitespace
@ -1660,31 +1584,12 @@ class Alias(Base, ModelMixin):
Session.add(new_alias)
DailyMetric.get_or_create_today_metric().nb_alias += 1
if (
new_alias.flags & cls.FLAG_PARTNER_CREATED > 0
and new_alias.user.flags & User.FLAG_CREATED_ALIAS_FROM_PARTNER == 0
):
user.flags = user.flags | User.FLAG_CREATED_ALIAS_FROM_PARTNER
if commit:
Session.commit()
if flush:
Session.flush()
# Internal import to avoid global import cycles
from app.events.event_dispatcher import EventDispatcher
from app.events.generated.event_pb2 import AliasCreated, EventContent
event = AliasCreated(
id=new_alias.id,
email=new_alias.email,
note=new_alias.note,
enabled=True,
created_at=int(new_alias.created_at.timestamp),
)
EventDispatcher.send_event(user, EventContent(alias_created=event))
return new_alias
@classmethod
@ -1863,8 +1768,6 @@ class Contact(Base, ModelMixin):
MAX_NAME_LENGTH = 512
FLAG_PARTNER_CREATED = 1 << 0
__tablename__ = "contact"
__table_args__ = (
@ -1923,9 +1826,6 @@ class Contact(Base, ModelMixin):
# whether contact is created automatically during the forward phase
automatic_created = sa.Column(sa.Boolean, nullable=True, default=False)
# contact flags
flags = sa.Column(sa.Integer, nullable=False, default=0, server_default="0")
@property
def email(self):
return self.website_email
@ -2145,20 +2045,6 @@ class EmailLog(Base, ModelMixin):
def get_dashboard_url(self):
return f"{config.URL}/dashboard/refused_email?highlight_id={self.id}"
@classmethod
def create(cls, *args, **kwargs):
commit = kwargs.pop("commit", False)
email_log = super().create(*args, **kwargs)
Session.flush()
if "alias_id" in kwargs:
sql = "UPDATE alias SET last_email_log_id = :el_id WHERE id = :alias_id"
Session.execute(
sql, {"el_id": email_log.id, "alias_id": kwargs["alias_id"]}
)
if commit:
Session.commit()
return email_log
def __repr__(self):
return f"<EmailLog {self.id}>"
@ -2285,12 +2171,6 @@ class DeletedAlias(Base, ModelMixin):
__tablename__ = "deleted_alias"
email = sa.Column(sa.String(256), unique=True, nullable=False)
reason = sa.Column(
IntEnumType(AliasDeleteReason),
nullable=False,
default=AliasDeleteReason.Unspecified,
server_default=str(AliasDeleteReason.Unspecified.value),
)
@classmethod
def create(cls, **kw):
@ -2424,18 +2304,6 @@ class CustomDomain(Base, ModelMixin):
sa.Boolean, nullable=False, default=False, server_default="0"
)
partner_id = sa.Column(
sa.Integer,
sa.ForeignKey("partner.id"),
nullable=True,
default=None,
server_default=None,
)
pending_deletion = sa.Column(
sa.Boolean, nullable=False, default=False, server_default="0"
)
__table_args__ = (
Index(
"ix_unique_domain", # Index name
@ -2460,6 +2328,9 @@ class CustomDomain(Base, ModelMixin):
def get_trash_url(self):
return config.URL + f"/dashboard/domains/{self.id}/trash"
def get_ownership_dns_txt_value(self):
return f"sl-verification={self.ownership_txt_token}"
@classmethod
def create(cls, **kwargs):
domain = kwargs.get("domain")
@ -2487,13 +2358,6 @@ class CustomDomain(Base, ModelMixin):
if obj.is_sl_subdomain:
DeletedSubdomain.create(domain=obj.domain)
from app import alias_utils
for alias in Alias.filter_by(custom_domain_id=obj_id):
alias_utils.delete_alias(
alias, obj.user, AliasDeleteReason.CustomDomainDeleted
)
return super(CustomDomain, cls).delete(obj_id)
@property
@ -2501,7 +2365,7 @@ class CustomDomain(Base, ModelMixin):
return sorted(self._auto_create_rules, key=lambda rule: rule.order)
def __repr__(self):
return f"<Custom Domain {self.id} {self.domain}>"
return f"<Custom Domain {self.domain}>"
class AutoCreateRule(Base, ModelMixin):
@ -2566,12 +2430,6 @@ class DomainDeletedAlias(Base, ModelMixin):
domain = orm.relationship(CustomDomain)
user = orm.relationship(User, foreign_keys=[user_id])
reason = sa.Column(
IntEnumType(AliasDeleteReason),
nullable=False,
default=AliasDeleteReason.Unspecified,
server_default=str(AliasDeleteReason.Unspecified.value),
)
@classmethod
def create(cls, **kw):
@ -2663,7 +2521,7 @@ class Directory(Base, ModelMixin):
for alias in Alias.filter_by(directory_id=obj_id):
from app import alias_utils
alias_utils.delete_alias(alias, user, AliasDeleteReason.DirectoryDeleted)
alias_utils.delete_alias(alias, user)
DeletedDirectory.create(name=obj.name)
cls.filter(cls.id == obj_id).delete()
@ -2690,13 +2548,10 @@ class Job(Base, ModelMixin):
nullable=False,
server_default=str(JobState.ready.value),
default=JobState.ready.value,
index=True,
)
attempts = sa.Column(sa.Integer, nullable=False, server_default="0", default=0)
taken_at = sa.Column(ArrowType, nullable=True)
__table_args__ = (Index("ix_state_run_at_taken_at", state, run_at, taken_at),)
def __repr__(self):
return f"<Job {self.id} {self.name} {self.payload}>"
@ -2742,15 +2597,10 @@ class Mailbox(Base, ModelMixin):
return False
def nb_alias(self):
alias_ids = set(
am.alias_id
for am in AliasMailbox.filter_by(mailbox_id=self.id).values(
AliasMailbox.alias_id
return (
AliasMailbox.filter_by(mailbox_id=self.id).count()
+ Alias.filter_by(mailbox_id=self.id).count()
)
)
for alias in Alias.filter_by(mailbox_id=self.id).values(Alias.id):
alias_ids.add(alias.id)
return len(alias_ids)
def is_proton(self) -> bool:
if (
@ -2791,7 +2641,7 @@ class Mailbox(Base, ModelMixin):
from app import alias_utils
# only put aliases that have mailbox as a single mailbox into trash
alias_utils.delete_alias(alias, user, AliasDeleteReason.MailboxDeleted)
alias_utils.delete_alias(alias, user)
Session.commit()
cls.filter(cls.id == obj_id).delete()
@ -2799,15 +2649,12 @@ class Mailbox(Base, ModelMixin):
@property
def aliases(self) -> [Alias]:
ret = dict(
(alias.id, alias) for alias in Alias.filter_by(mailbox_id=self.id).all()
)
ret = Alias.filter_by(mailbox_id=self.id).all()
for am in AliasMailbox.filter_by(mailbox_id=self.id):
if am.alias_id not in ret:
ret[am.alias_id] = am.alias
ret.append(am.alias)
return list(ret.values())
return ret
@classmethod
def create(cls, **kw):
@ -2819,16 +2666,6 @@ class Mailbox(Base, ModelMixin):
return f"<Mailbox {self.id} {self.email}>"
class MailboxActivation(Base, ModelMixin):
__tablename__ = "mailbox_activation"
mailbox_id = sa.Column(
sa.ForeignKey(Mailbox.id, ondelete="cascade"), nullable=False, index=True
)
code = sa.Column(sa.String(32), nullable=False, index=True)
tries = sa.Column(sa.Integer, default=0, nullable=False)
class AccountActivation(Base, ModelMixin):
"""contains code to activate the user account when they sign up on mobile"""
@ -3047,7 +2884,11 @@ class RecoveryCode(Base, ModelMixin):
@classmethod
def find_by_user_code(cls, user: User, code: str):
hashed_code = cls._hash_code(code)
return cls.get_by(user_id=user.id, code=hashed_code)
# TODO: Only return hashed codes once there aren't unhashed codes in the db.
found_code = cls.get_by(user_id=user.id, code=hashed_code)
if found_code:
return found_code
return cls.get_by(user_id=user.id, code=code)
@classmethod
def empty(cls, user):
@ -3058,9 +2899,7 @@ class RecoveryCode(Base, ModelMixin):
class Notification(Base, ModelMixin):
__tablename__ = "notification"
user_id = sa.Column(
sa.ForeignKey(User.id, ondelete="cascade"), nullable=False, index=True
)
user_id = sa.Column(sa.ForeignKey(User.id, ondelete="cascade"), nullable=False)
message = sa.Column(sa.Text, nullable=False)
title = sa.Column(sa.String(512))
@ -3142,7 +2981,7 @@ class SLDomain(Base, ModelMixin):
)
def __repr__(self):
return f"<SLDomain {self.id} {self.domain} {'Premium' if self.premium_only else 'Free'}>"
return f"<SLDomain {self.domain} {'Premium' if self.premium_only else 'Free'}"
class Monitoring(Base, ModelMixin):
@ -3301,20 +3140,6 @@ class TransactionalEmail(Base, ModelMixin):
__table_args__ = (sa.Index("ix_transactional_email_created_at", "created_at"),)
@classmethod
def create(cls, **kw):
# whether to call Session.commit
commit = kw.pop("commit", False)
r = cls(**kw)
if not config.STORE_TRANSACTIONAL_EMAILS:
return r
Session.add(r)
if commit:
Session.commit()
return r
class Payout(Base, ModelMixin):
"""Referral payouts"""
@ -3505,16 +3330,6 @@ class AdminAuditLog(Base):
},
)
@classmethod
def stop_trial(cls, admin_user_id: int, user_id: int):
cls.create(
admin_user_id=admin_user_id,
action=AuditLogActionEnum.stop_trial.value,
model="User",
model_id=user_id,
data={},
)
@classmethod
def disable_otp_fido(
cls, admin_user_id: int, user_id: int, had_otp: bool, had_fido: bool
@ -3748,54 +3563,3 @@ class ApiToCookieToken(Base, ModelMixin):
code = secrets.token_urlsafe(32)
return super().create(code=code, **kwargs)
class SyncEvent(Base, ModelMixin):
"""This model holds the events that need to be sent to the webhook"""
__tablename__ = "sync_event"
content = sa.Column(sa.LargeBinary, unique=False, nullable=False)
taken_time = sa.Column(
ArrowType, default=None, nullable=True, server_default=None, index=True
)
retry_count = sa.Column(sa.Integer, default=0, nullable=False, server_default="0")
__table_args__ = (
sa.Index("ix_sync_event_created_at", "created_at"),
sa.Index("ix_sync_event_taken_time", "taken_time"),
)
def mark_as_taken(self) -> bool:
sql = """
UPDATE sync_event
SET taken_time = :taken_time
WHERE id = :sync_event_id
AND taken_time IS NULL
"""
args = {"taken_time": arrow.now().datetime, "sync_event_id": self.id}
res = Session.execute(sql, args)
Session.commit()
return res.rowcount > 0
@classmethod
def get_dead_letter(cls, older_than: Arrow, max_retries: int) -> [SyncEvent]:
return (
SyncEvent.filter(
(
(
SyncEvent.taken_time.isnot(None)
& (SyncEvent.taken_time < older_than)
)
| (
SyncEvent.taken_time.is_(None)
& (SyncEvent.created_at < older_than)
)
)
& (SyncEvent.retry_count < max_retries)
)
.order_by(SyncEvent.id)
.limit(100)
.all()
)

View File

@ -140,7 +140,7 @@ def authorize():
Scope=Scope,
)
else: # POST - user allows or denies
if not current_user.is_authenticated or not current_user.is_active():
if not current_user.is_authenticated or not current_user.is_active:
LOG.i(
"Attempt to validate a OAUth allow request by an unauthenticated user"
)

View File

@ -20,7 +20,7 @@ def final():
if form.validate_on_submit():
alias = Alias.get_by(email=form.email.data)
if alias and alias.user_id == current_user.id:
send_test_email_alias(current_user, alias.email)
send_test_email_alias(alias.email, current_user.name)
flash("An email is sent to your alias", "success")
return render_template(

View File

@ -1,13 +1,7 @@
from app.onboarding.base import onboarding_bp
from flask import render_template, url_for, redirect
from flask import render_template
@onboarding_bp.route("/", methods=["GET"])
def index():
# Do the redirect to ensure cookies are set because they are SameSite=lax/strict
return redirect(url_for("onboarding.setup"))
@onboarding_bp.route("/setup", methods=["GET"])
def setup():
return render_template("onboarding/setup.html")
return render_template("onboarding/index.html")

View File

@ -27,7 +27,6 @@ def failed_payment(sub: Subscription, subscription_id: str):
"SimpleLogin - your subscription has failed to be renewed",
render(
"transactional/subscription-cancel.txt",
user=user,
end_date=arrow.arrow.datetime.utcnow(),
),
)

View File

@ -2,11 +2,9 @@ from dataclasses import dataclass
from enum import Enum
from flask import url_for
from typing import Optional
import arrow
from app import config
from app.errors import LinkException
from app.models import User, Partner, Job
from app.models import User, Partner
from app.proton.proton_client import ProtonClient, ProtonUser
from app.account_linking import (
process_login_case,
@ -43,21 +41,12 @@ class ProtonCallbackHandler:
def __init__(self, proton_client: ProtonClient):
self.proton_client = proton_client
def _initial_alias_sync(self, user: User):
Job.create(
name=config.JOB_SEND_ALIAS_CREATION_EVENTS,
payload={"user_id": user.id},
run_at=arrow.now(),
commit=True,
)
def handle_login(self, partner: Partner) -> ProtonCallbackResult:
try:
user = self.__get_partner_user()
if user is None:
return generate_account_not_allowed_to_log_in()
res = process_login_case(user, partner)
self._initial_alias_sync(res.user)
return ProtonCallbackResult(
redirect_to_login=False,
flash_message=None,
@ -86,7 +75,6 @@ class ProtonCallbackHandler:
if user is None:
return generate_account_not_allowed_to_log_in()
res = process_link_case(user, current_user, partner)
self._initial_alias_sync(res.user)
return ProtonCallbackResult(
redirect_to_login=False,
flash_message="Account successfully linked",

View File

@ -2,7 +2,6 @@ from newrelic import agent
from typing import Optional
from app.db import Session
from app.log import LOG
from app.errors import ProtonPartnerNotSetUp
from app.models import Partner, PartnerUser, User
@ -31,7 +30,6 @@ def perform_proton_account_unlink(current_user: User):
user_id=current_user.id, partner_id=proton_partner.id
)
if partner_user is not None:
LOG.info(f"User {current_user} has unlinked the account from {partner_user}")
PartnerUser.delete(partner_user.id)
Session.commit()
agent.record_custom_event("AccountUnlinked", {"partner": proton_partner.name})

View File

@ -1,42 +0,0 @@
from datetime import datetime
from typing import Optional
import newrelic.agent
import redis.exceptions
import werkzeug.exceptions
from limits.storage import RedisStorage
from app.log import LOG
lock_redis: Optional[RedisStorage] = None
def set_redis_concurrent_lock(redis: RedisStorage):
global lock_redis
lock_redis = redis
def check_bucket_limit(
lock_name: Optional[str] = None,
max_hits: int = 5,
bucket_seconds: int = 3600,
):
# Calculate current bucket time
int_time = int(datetime.utcnow().timestamp())
bucket_id = int_time - (int_time % bucket_seconds)
bucket_lock_name = f"bl:{lock_name}:{bucket_id}"
if not lock_redis:
return
try:
value = lock_redis.incr(bucket_lock_name, bucket_seconds)
if value > max_hits:
LOG.i(
f"Rate limit hit for {lock_name} (bucket id {bucket_id}) -> {value}/{max_hits}"
)
newrelic.agent.record_custom_event(
"BucketRateLimit",
{"lock_name": lock_name, "bucket_seconds": bucket_seconds},
)
raise werkzeug.exceptions.TooManyRequests()
except (redis.exceptions.RedisError, AttributeError):
LOG.e("Cannot connect to redis")

View File

@ -2,7 +2,6 @@ import flask
import limits.storage
from app.parallel_limiter import set_redis_concurrent_lock
from app.rate_limiter import set_redis_concurrent_lock as rate_limit_set_redis
from app.session import RedisSessionStore
@ -11,14 +10,12 @@ def initialize_redis_services(app: flask.Flask, redis_url: str):
storage = limits.storage.RedisStorage(redis_url)
app.session_interface = RedisSessionStore(storage.storage, storage.storage, app)
set_redis_concurrent_lock(storage)
rate_limit_set_redis(storage)
elif redis_url.startswith("redis+sentinel://"):
storage = limits.storage.RedisSentinelStorage(redis_url)
app.session_interface = RedisSessionStore(
storage.storage, storage.storage_slave, app
)
set_redis_concurrent_lock(storage)
rate_limit_set_redis(storage)
else:
raise RuntimeError(
f"Tried to set_redis_session with an invalid redis url: ${redis_url}"

View File

@ -5,9 +5,19 @@ from typing import Optional
import boto3
import requests
from app import config
from app.config import (
AWS_REGION,
BUCKET,
AWS_ACCESS_KEY_ID,
AWS_SECRET_ACCESS_KEY,
LOCAL_FILE_UPLOAD,
UPLOAD_DIR,
URL,
AWS_ENDPOINT_URL,
)
from app.log import LOG
_s3_client = None
@ -15,12 +25,12 @@ def _get_s3client():
global _s3_client
if _s3_client is None:
args = {
"aws_access_key_id": config.AWS_ACCESS_KEY_ID,
"aws_secret_access_key": config.AWS_SECRET_ACCESS_KEY,
"region_name": config.AWS_REGION,
"aws_access_key_id": AWS_ACCESS_KEY_ID,
"aws_secret_access_key": AWS_SECRET_ACCESS_KEY,
"region_name": AWS_REGION,
}
if config.AWS_ENDPOINT_URL:
args["endpoint_url"] = config.AWS_ENDPOINT_URL
if AWS_ENDPOINT_URL:
args["endpoint_url"] = AWS_ENDPOINT_URL
_s3_client = boto3.client("s3", **args)
return _s3_client
@ -28,8 +38,8 @@ def _get_s3client():
def upload_from_bytesio(key: str, bs: BytesIO, content_type="application/octet-stream"):
bs.seek(0)
if config.LOCAL_FILE_UPLOAD:
file_path = os.path.join(config.UPLOAD_DIR, key)
if LOCAL_FILE_UPLOAD:
file_path = os.path.join(UPLOAD_DIR, key)
file_dir = os.path.dirname(file_path)
os.makedirs(file_dir, exist_ok=True)
with open(file_path, "wb") as f:
@ -37,7 +47,7 @@ def upload_from_bytesio(key: str, bs: BytesIO, content_type="application/octet-s
else:
_get_s3client().put_object(
Bucket=config.BUCKET,
Bucket=BUCKET,
Key=key,
Body=bs,
ContentType=content_type,
@ -47,8 +57,8 @@ def upload_from_bytesio(key: str, bs: BytesIO, content_type="application/octet-s
def upload_email_from_bytesio(path: str, bs: BytesIO, filename):
bs.seek(0)
if config.LOCAL_FILE_UPLOAD:
file_path = os.path.join(config.UPLOAD_DIR, path)
if LOCAL_FILE_UPLOAD:
file_path = os.path.join(UPLOAD_DIR, path)
file_dir = os.path.dirname(file_path)
os.makedirs(file_dir, exist_ok=True)
with open(file_path, "wb") as f:
@ -56,7 +66,7 @@ def upload_email_from_bytesio(path: str, bs: BytesIO, filename):
else:
_get_s3client().put_object(
Bucket=config.BUCKET,
Bucket=BUCKET,
Key=path,
Body=bs,
# Support saving a remote file using Http header
@ -67,12 +77,12 @@ def upload_email_from_bytesio(path: str, bs: BytesIO, filename):
def download_email(path: str) -> Optional[str]:
if config.LOCAL_FILE_UPLOAD:
file_path = os.path.join(config.UPLOAD_DIR, path)
if LOCAL_FILE_UPLOAD:
file_path = os.path.join(UPLOAD_DIR, path)
with open(file_path, "rb") as f:
return f.read()
resp = _get_s3client().get_object(
Bucket=config.BUCKET,
Bucket=BUCKET,
Key=path,
)
if not resp or "Body" not in resp:
@ -86,30 +96,29 @@ def upload_from_url(url: str, upload_path):
def get_url(key: str, expires_in=3600) -> str:
if config.LOCAL_FILE_UPLOAD:
return config.URL + "/static/upload/" + key
if LOCAL_FILE_UPLOAD:
return URL + "/static/upload/" + key
else:
return _get_s3client().generate_presigned_url(
ExpiresIn=expires_in,
ClientMethod="get_object",
Params={"Bucket": config.BUCKET, "Key": key},
Params={"Bucket": BUCKET, "Key": key},
)
def delete(path: str):
if config.LOCAL_FILE_UPLOAD:
file_path = os.path.join(config.UPLOAD_DIR, path)
os.remove(file_path)
if LOCAL_FILE_UPLOAD:
os.remove(os.path.join(UPLOAD_DIR, path))
else:
_get_s3client().delete_object(Bucket=config.BUCKET, Key=path)
_get_s3client().delete_object(Bucket=BUCKET, Key=path)
def create_bucket_if_not_exists():
s3client = _get_s3client()
buckets = s3client.list_buckets()
for bucket in buckets["Buckets"]:
if bucket["Name"] == config.BUCKET:
if bucket["Name"] == BUCKET:
LOG.i("Bucket already exists")
return
s3client.create_bucket(Bucket=config.BUCKET)
LOG.i(f"Bucket {config.BUCKET} created")
s3client.create_bucket(Bucket=BUCKET)
LOG.i(f"Bucket {BUCKET} created")

View File

@ -87,7 +87,6 @@ class RedisSessionStore(SessionInterface):
httponly = self.get_cookie_httponly(app)
secure = self.get_cookie_secure(app)
expires = self.get_expiration_time(app, session)
samesite = self.get_cookie_samesite(app)
val = pickle.dumps(dict(session))
ttl = int(app.permanent_session_lifetime.total_seconds())
# Only 5 minutes for non-authenticated sessions.
@ -110,7 +109,6 @@ class RedisSessionStore(SessionInterface):
domain=domain,
path=path,
secure=secure,
samesite=samesite,
)

View File

@ -2,9 +2,6 @@ import requests
from requests import RequestException
from app import config
from app.db import Session
from app.events.event_dispatcher import EventDispatcher
from app.events.generated.event_pb2 import EventContent, UserPlanChanged
from app.log import LOG
from app.models import User
@ -30,11 +27,7 @@ def execute_subscription_webhook(user: User):
LOG.i("Sent request to subscription update webhook successfully")
else:
LOG.i(
f"Request to webhook failed with status {response.status_code}: {response.text}"
f"Request to webhook failed with statue {response.status_code}: {response.text}"
)
except RequestException as e:
LOG.error(f"Subscription request exception: {e}")
event = UserPlanChanged(plan_end_time=sl_subscription_end)
EventDispatcher.send_event(user, EventContent(user_plan_change=event))
Session.commit()

View File

@ -1,71 +0,0 @@
from typing import Optional
from app.db import Session
from app.log import LOG
from app.models import User, SLDomain, CustomDomain, Mailbox
class CannotSetAlias(Exception):
def __init__(self, msg: str):
self.msg = msg
class CannotSetMailbox(Exception):
def __init__(self, msg: str):
self.msg = msg
def set_default_alias_domain(user: User, domain_name: Optional[str]):
if not domain_name:
LOG.i(f"User {user} has set no domain as default domain")
user.default_alias_public_domain_id = None
user.default_alias_custom_domain_id = None
Session.flush()
return
sl_domain: SLDomain = SLDomain.get_by(domain=domain_name)
if sl_domain:
if sl_domain.hidden:
LOG.i(f"User {user} has tried to set up a hidden domain as default domain")
raise CannotSetAlias("Domain does not exist")
if sl_domain.premium_only and not user.is_premium():
LOG.i(f"User {user} has tried to set up a premium domain as default domain")
raise CannotSetAlias("You cannot use this domain")
LOG.i(f"User {user} has set public {sl_domain} as default domain")
user.default_alias_public_domain_id = sl_domain.id
user.default_alias_custom_domain_id = None
Session.flush()
return
custom_domain = CustomDomain.get_by(domain=domain_name)
if not custom_domain:
LOG.i(
f"User {user} has tried to set up an non existing domain as default domain"
)
raise CannotSetAlias("Domain does not exist or it hasn't been verified")
if custom_domain.user_id != user.id or not custom_domain.verified:
LOG.i(
f"User {user} has tried to set domain {custom_domain} as default domain that does not belong to the user or that is not verified"
)
raise CannotSetAlias("Domain does not exist or it hasn't been verified")
LOG.i(f"User {user} has set custom {custom_domain} as default domain")
user.default_alias_public_domain_id = None
user.default_alias_custom_domain_id = custom_domain.id
Session.flush()
def set_default_mailbox(user: User, mailbox_id: int) -> Mailbox:
mailbox = Mailbox.get(mailbox_id)
if not mailbox or mailbox.user_id != user.id:
raise CannotSetMailbox("Invalid mailbox")
if not mailbox.verified:
raise CannotSetMailbox("This is mailbox is not verified")
if mailbox.id == user.default_mailbox_id:
return mailbox
LOG.i(f"User {user} has set mailbox {mailbox} as his default one")
user.default_mailbox_id = mailbox.id
Session.commit()
return mailbox

148
cron.py
View File

@ -61,11 +61,6 @@ from app.pgp_utils import load_public_key_and_check, PGPException
from app.proton.utils import get_proton_partner
from app.utils import sanitize_email
from server import create_light_app
from tasks.cleanup_old_imports import cleanup_old_imports
from tasks.cleanup_old_jobs import cleanup_old_jobs
from tasks.cleanup_old_notifications import cleanup_old_notifications
DELETE_GRACE_DAYS = 30
def notify_trial_end():
@ -266,13 +261,11 @@ def notify_manual_sub_end():
"Your SimpleLogin subscription will end soon",
render(
"transactional/coinbase/reminder-subscription.txt",
user=user,
coinbase_subscription=coinbase_subscription,
extend_subscription_url=extend_subscription_url,
),
render(
"transactional/coinbase/reminder-subscription.html",
user=user,
coinbase_subscription=coinbase_subscription,
extend_subscription_url=extend_subscription_url,
),
@ -828,12 +821,10 @@ def check_mailbox_valid_domain():
f"Mailbox {mailbox.email} is disabled",
render(
"transactional/disable-mailbox-warning.txt.jinja2",
user=mailbox.user,
mailbox=mailbox,
),
render(
"transactional/disable-mailbox-warning.html",
user=mailbox.user,
mailbox=mailbox,
),
retries=3,
@ -888,7 +879,6 @@ def check_mailbox_valid_pgp_keys():
f"Mailbox {mailbox.email}'s PGP Key is invalid",
render(
"transactional/invalid-mailbox-pgp-key.txt.jinja2",
user=mailbox.user,
mailbox=mailbox,
),
retries=3,
@ -929,7 +919,6 @@ def check_single_custom_domain(custom_domain):
f"Please update {custom_domain.domain} DNS on SimpleLogin",
render(
"transactional/custom-domain-dns-issue.txt.jinja2",
user=user,
custom_domain=custom_domain,
domain_dns_url=domain_dns_url,
),
@ -971,9 +960,6 @@ async def _hibp_check(api_key, queue):
This function to be ran simultaneously (multiple _hibp_check functions with different keys on the same queue) to make maximum use of multiple API keys.
"""
default_rate_sleep = (60.0 / config.HIBP_RPM) + 0.1
rate_sleep = default_rate_sleep
rate_hit_counter = 0
while True:
try:
alias_id = queue.get_nowait()
@ -981,14 +967,9 @@ async def _hibp_check(api_key, queue):
return
alias = Alias.get(alias_id)
# an alias can be deleted in the meantime
if not alias:
continue
user = alias.user
if user.disabled or not user.is_paid():
# Mark it as hibp done to skip it as if it had been checked
alias.hibp_last_check = arrow.utcnow()
Session.commit()
continue
return
LOG.d("Checking HIBP for %s", alias)
@ -1000,6 +981,7 @@ async def _hibp_check(api_key, queue):
f"https://haveibeenpwned.com/api/v3/breachedaccount/{urllib.parse.quote(alias.email)}",
headers=request_headers,
)
if r.status_code == 200:
# Breaches found
alias.hibp_breaches = [
@ -1007,27 +989,20 @@ async def _hibp_check(api_key, queue):
]
if len(alias.hibp_breaches) > 0:
LOG.w("%s appears in HIBP breaches %s", alias, alias.hibp_breaches)
if rate_hit_counter > 0:
rate_hit_counter -= 1
elif r.status_code == 404:
# No breaches found
alias.hibp_breaches = []
elif r.status_code == 429:
# rate limited
LOG.w("HIBP rate limited, check alias %s in the next run", alias)
rate_hit_counter += 1
rate_sleep = default_rate_sleep + (0.2 * rate_hit_counter)
if rate_hit_counter > 10:
LOG.w(f"HIBP rate limited too many times stopping with alias {alias}")
await asyncio.sleep(1.6)
return
# Just sleep for a while
asyncio.sleep(5)
elif r.status_code > 500:
LOG.w("HIBP server 5** error %s", r.status_code)
return
else:
LOG.error(
"An error occurred while checking alias %s: %s - %s",
"An error occured while checking alias %s: %s - %s",
alias,
r.status_code,
r.text,
@ -1038,63 +1013,9 @@ async def _hibp_check(api_key, queue):
Session.add(alias)
Session.commit()
LOG.d("Updated breach info for %s", alias)
await asyncio.sleep(rate_sleep)
LOG.d("Updated breaches info for %s", alias)
def get_alias_to_check_hibp(
oldest_hibp_allowed: arrow.Arrow,
user_ids_to_skip: list[int],
min_alias_id: int,
max_alias_id: int,
):
now = arrow.now()
alias_query = (
Session.query(Alias)
.join(User, User.id == Alias.user_id)
.join(Subscription, User.id == Subscription.user_id, isouter=True)
.join(ManualSubscription, User.id == ManualSubscription.user_id, isouter=True)
.join(AppleSubscription, User.id == AppleSubscription.user_id, isouter=True)
.join(
CoinbaseSubscription,
User.id == CoinbaseSubscription.user_id,
isouter=True,
)
.join(PartnerUser, User.id == PartnerUser.user_id, isouter=True)
.join(
PartnerSubscription,
PartnerSubscription.partner_user_id == PartnerUser.id,
isouter=True,
)
.filter(
or_(
Alias.hibp_last_check.is_(None),
Alias.hibp_last_check < oldest_hibp_allowed,
),
Alias.user_id.notin_(user_ids_to_skip),
Alias.enabled,
Alias.id >= min_alias_id,
Alias.id < max_alias_id,
User.disabled == False, # noqa: E712
User.enable_data_breach_check,
or_(
User.lifetime,
ManualSubscription.end_at > now,
Subscription.next_bill_date > now.date(),
AppleSubscription.expires_date > now,
CoinbaseSubscription.end_at > now,
PartnerSubscription.end_at > now,
),
)
)
if config.HIBP_SKIP_PARTNER_ALIAS:
alias_query = alias_query.filter(
Alias.flags.op("&")(Alias.FLAG_PARTNER_CREATED) == 0
)
for alias in (
alias_query.order_by(Alias.id.asc()).enable_eagerloads(False).yield_per(500)
):
yield alias
await asyncio.sleep(1.6)
async def check_hibp():
@ -1117,30 +1038,21 @@ async def check_hibp():
Session.commit()
LOG.d("Updated list of known breaches")
LOG.d("Getting the list of users to skip")
query = "select u.id, count(a.id) from users u, alias a where a.user_id=u.id group by u.id having count(a.id) > :max_alias"
rows = Session.execute(query, {"max_alias": config.HIBP_MAX_ALIAS_CHECK})
user_ids = [row[0] for row in rows]
LOG.d("Got %d users to skip" % len(user_ids))
LOG.d("Checking aliases")
LOG.d("Preparing list of aliases to check")
queue = asyncio.Queue()
min_alias_id = 0
max_alias_id = Session.query(func.max(Alias.id)).scalar()
step = 10000
now = arrow.now()
oldest_hibp_allowed = now.shift(days=-config.HIBP_SCAN_INTERVAL_DAYS)
alias_checked = 0
for alias_batch_id in range(min_alias_id, max_alias_id, step):
for alias in get_alias_to_check_hibp(
oldest_hibp_allowed, user_ids, alias_batch_id, alias_batch_id + step
max_date = arrow.now().shift(days=-config.HIBP_SCAN_INTERVAL_DAYS)
for alias in (
Alias.filter(
or_(Alias.hibp_last_check.is_(None), Alias.hibp_last_check < max_date)
)
.filter(Alias.enabled)
.order_by(Alias.hibp_last_check.asc())
.yield_per(500)
.enable_eagerloads(False)
):
await queue.put(alias.id)
alias_checked += queue.qsize()
LOG.d(
f"Need to check about {queue.qsize()} aliases in this loop {alias_batch_id}/{max_alias_id}"
)
LOG.d("Need to check about %s aliases", queue.qsize())
# Start one checking process per API key
# Each checking process will take one alias from the queue, get the info
@ -1159,7 +1071,7 @@ async def check_hibp():
for checker in checkers:
await checker
LOG.d(f"Done checking {alias_checked} HIBP API for aliases in breaches")
LOG.d("Done checking HIBP API for aliases in breaches")
def notify_hibp():
@ -1214,30 +1126,18 @@ def notify_hibp():
Session.commit()
def clear_users_scheduled_to_be_deleted(dry_run=False):
def clear_users_scheduled_to_be_deleted():
users = User.filter(
and_(
User.delete_on.isnot(None),
User.delete_on <= arrow.now().shift(days=-DELETE_GRACE_DAYS),
)
and_(User.delete_on.isnot(None), User.delete_on < arrow.now())
).all()
for user in users:
LOG.i(
f"Scheduled deletion of user {user} with scheduled delete on {user.delete_on}"
)
if dry_run:
continue
User.delete(user.id)
Session.commit()
def delete_old_data():
oldest_valid = arrow.now().shift(days=-config.KEEP_OLD_DATA_DAYS)
cleanup_old_imports(oldest_valid)
cleanup_old_jobs(oldest_valid)
cleanup_old_notifications(oldest_valid)
if __name__ == "__main__":
LOG.d("Start running cronjob")
parser = argparse.ArgumentParser()
@ -1252,7 +1152,6 @@ if __name__ == "__main__":
"notify_manual_subscription_end",
"notify_premium_end",
"delete_logs",
"delete_old_data",
"poll_apple_subscription",
"sanity_check",
"delete_old_monitoring",
@ -1281,9 +1180,6 @@ if __name__ == "__main__":
elif args.job == "delete_logs":
LOG.d("Deleted Logs")
delete_logs()
elif args.job == "delete_old_data":
LOG.d("Delete old data")
delete_old_data()
elif args.job == "poll_apple_subscription":
LOG.d("Poll Apple Subscriptions")
poll_apple_subscription()
@ -1310,4 +1206,4 @@ if __name__ == "__main__":
load_unsent_mails_from_fs_and_resend()
elif args.job == "delete_scheduled_users":
LOG.d("Deleting users scheduled to be deleted")
clear_users_scheduled_to_be_deleted(dry_run=True)
clear_users_scheduled_to_be_deleted()

View File

@ -37,12 +37,6 @@ jobs:
schedule: "15 5 * * *"
captureStderr: true
- name: SimpleLogin Delete Old data
command: python /code/cron.py -j delete_old_data
shell: /bin/bash
schedule: "30 5 * * *"
captureStderr: true
- name: SimpleLogin Poll Apple Subscriptions
command: python /code/cron.py -j poll_apple_subscription
shell: /bin/bash
@ -68,7 +62,7 @@ jobs:
captureStderr: true
- name: SimpleLogin delete users scheduled to be deleted
command: python /code/cron.py -j delete_scheduled_users
command: echo disabled_user_deletion #python /code/cron.py -j delete_scheduled_users
shell: /bin/bash
schedule: "15 11 * * *"
captureStderr: true

View File

@ -52,8 +52,8 @@ from flanker.addresslib import address
from flanker.addresslib.address import EmailAddress
from sqlalchemy.exc import IntegrityError
from app import pgp_utils, s3, config, contact_utils
from app.alias_utils import try_auto_create, change_alias_status
from app import pgp_utils, s3, config
from app.alias_utils import try_auto_create
from app.config import (
EMAIL_DOMAIN,
URL,
@ -195,16 +195,76 @@ def get_or_create_contact(from_header: str, mail_from: str, alias: Alias) -> Con
mail_from,
)
contact_email = mail_from
contact_result = contact_utils.create_contact(
email=contact_email,
alias=alias,
if not is_valid_email(contact_email):
LOG.w(
"invalid contact email %s. Parse from %s %s",
contact_email,
from_header,
mail_from,
)
# either reuse a contact with empty email or create a new contact with empty email
contact_email = ""
contact_email = sanitize_email(contact_email, not_lower=True)
if contact_name and "\x00" in contact_name:
LOG.w("issue with contact name %s", contact_name)
contact_name = ""
contact = Contact.get_by(alias_id=alias.id, website_email=contact_email)
if contact:
if contact.name != contact_name:
LOG.d(
"Update contact %s name %s to %s",
contact,
contact.name,
contact_name,
)
contact.name = contact_name
Session.commit()
# contact created in the past does not have mail_from and from_header field
if not contact.mail_from and mail_from:
LOG.d(
"Set contact mail_from %s: %s to %s",
contact,
contact.mail_from,
mail_from,
)
contact.mail_from = mail_from
Session.commit()
else:
try:
contact = Contact.create(
user_id=alias.user_id,
alias_id=alias.id,
website_email=contact_email,
name=contact_name,
mail_from=mail_from,
allow_empty_email=True,
reply_email=generate_reply_email(contact_email, alias)
if is_valid_email(contact_email)
else NOREPLY,
automatic_created=True,
from_partner=False,
)
return contact_result.contact
if not contact_email:
LOG.d("Create a contact with invalid email for %s", alias)
contact.invalid_email = True
LOG.d(
"create contact %s for %s, reverse alias:%s",
contact_email,
alias,
contact.reply_email,
)
Session.commit()
except IntegrityError:
LOG.w(f"Contact with email {contact_email} for alias {alias} already exist")
Session.rollback()
contact = Contact.get_by(alias_id=alias.id, website_email=contact_email)
return contact
def get_or_create_reply_to_contact(
@ -229,7 +289,33 @@ def get_or_create_reply_to_contact(
)
return None
return contact_utils.create_contact(contact_address, alias, contact_name).contact
contact = Contact.get_by(alias_id=alias.id, website_email=contact_address)
if contact:
return contact
else:
LOG.d(
"create contact %s for alias %s via reply-to header %s",
contact_address,
alias,
reply_to_header,
)
try:
contact = Contact.create(
user_id=alias.user_id,
alias_id=alias.id,
website_email=contact_address,
name=contact_name,
reply_email=generate_reply_email(contact_address, alias),
automatic_created=True,
)
Session.commit()
except IntegrityError:
LOG.w("Contact %s %s already exist", alias, contact_address)
Session.rollback()
contact = Contact.get_by(alias_id=alias.id, website_email=contact_address)
return contact
def replace_header_when_forward(msg: Message, alias: Alias, header: str):
@ -514,14 +600,12 @@ def handle_email_sent_to_ourself(alias, from_addr: str, msg: Message, user):
f"Email sent to {alias.email} from its own mailbox {from_addr}",
render(
"transactional/cycle-email.txt.jinja2",
user=user,
alias=alias,
from_addr=from_addr,
refused_email_url=refused_email_url,
),
render(
"transactional/cycle-email.html",
user=user,
alias=alias,
from_addr=from_addr,
refused_email_url=refused_email_url,
@ -552,10 +636,6 @@ def handle_forward(envelope, msg: Message, rcpt_to: str) -> List[Tuple[bool, str
user = alias.user
if not user.is_active():
LOG.w(f"User {user} has been soft deleted")
return False, status.E502
if not user.can_send_or_receive():
LOG.i(f"User {user} cannot receive emails")
if should_ignore_bounce(envelope.mail_from):
@ -575,9 +655,6 @@ def handle_forward(envelope, msg: Message, rcpt_to: str) -> List[Tuple[bool, str
from_header = get_header_unicode(msg[headers.FROM])
LOG.d("Create or get contact for from_header:%s", from_header)
contact = get_or_create_contact(from_header, envelope.mail_from, alias)
alias = (
contact.alias
) # In case the Session was closed in the get_or_create we re-fetch the alias
reply_to_contact = None
if msg[headers.REPLY_TO]:
@ -646,14 +723,12 @@ def handle_forward(envelope, msg: Message, rcpt_to: str) -> List[Tuple[bool, str
f"Your mailbox {mailbox.email} is an alias",
render(
"transactional/mailbox-invalid.txt.jinja2",
user=mailbox.user,
mailbox=mailbox,
mailbox_url=mailbox_url,
alias=alias,
),
render(
"transactional/mailbox-invalid.html",
user=mailbox.user,
mailbox=mailbox,
mailbox_url=mailbox_url,
alias=alias,
@ -706,14 +781,12 @@ def forward_email_to_mailbox(
f"Your mailbox {mailbox.email} and alias {alias.email} use the same domain",
render(
"transactional/mailbox-invalid.txt.jinja2",
user=mailbox.user,
mailbox=mailbox,
mailbox_url=mailbox_url,
alias=alias,
),
render(
"transactional/mailbox-invalid.html",
user=mailbox.user,
mailbox=mailbox,
mailbox_url=mailbox_url,
alias=alias,
@ -727,7 +800,7 @@ def forward_email_to_mailbox(
email_log = EmailLog.create(
contact_id=contact.id,
user_id=contact.user_id,
user_id=user.id,
mailbox_id=mailbox.id,
alias_id=contact.alias_id,
message_id=str(msg[headers.MESSAGE_ID]),
@ -797,7 +870,6 @@ def forward_email_to_mailbox(
# References and In-Reply-To are used for keeping the email thread
headers.REFERENCES,
headers.IN_REPLY_TO,
headers.SL_QUEUE_ID,
headers.LIST_UNSUBSCRIBE,
headers.LIST_UNSUBSCRIBE_POST,
] + headers.MIME_HEADERS
@ -983,9 +1055,6 @@ def handle_reply(envelope, msg: Message, rcpt_to: str) -> (bool, str):
if not contact:
LOG.w(f"No contact with {reply_email} as reverse alias")
return False, status.E502
if not contact.user.is_active():
LOG.w(f"User {contact.user} has been soft deleted")
return False, status.E502
alias = contact.alias
alias_address: str = contact.alias.email
@ -1102,7 +1171,6 @@ def handle_reply(envelope, msg: Message, rcpt_to: str) -> (bool, str):
# References and In-Reply-To are used for keeping the email thread
headers.REFERENCES,
headers.IN_REPLY_TO,
headers.SL_QUEUE_ID,
]
+ headers.MIME_HEADERS,
)
@ -1198,7 +1266,6 @@ def handle_reply(envelope, msg: Message, rcpt_to: str) -> (bool, str):
f"Email sent to {contact.email} contains non reverse-alias addresses",
render(
"transactional/non-reverse-alias-reply-phase.txt.jinja2",
user=alias.user,
destination=contact.email,
alias=alias.email,
subject=msg[headers.SUBJECT],
@ -1420,7 +1487,6 @@ def handle_unknown_mailbox(
f"Attempt to use your alias {alias.email} from {envelope.mail_from}",
render(
"transactional/reply-must-use-personal-email.txt",
user=user,
alias=alias,
sender=envelope.mail_from,
authorize_address_link=authorize_address_link,
@ -1428,7 +1494,6 @@ def handle_unknown_mailbox(
),
render(
"transactional/reply-must-use-personal-email.html",
user=user,
alias=alias,
sender=envelope.mail_from,
authorize_address_link=authorize_address_link,
@ -1510,7 +1575,7 @@ def handle_bounce_forward_phase(msg: Message, email_log: EmailLog):
LOG.w(
f"Disable alias {alias} because {reason}. {alias.mailboxes} {alias.user}. Last contact {contact}"
)
change_alias_status(alias, enabled=False)
alias.enabled = False
Notification.create(
user_id=user.id,
@ -1529,14 +1594,12 @@ def handle_bounce_forward_phase(msg: Message, email_log: EmailLog):
f"Alias {alias.email} has been disabled due to multiple bounces",
render(
"transactional/bounce/automatic-disable-alias.txt",
user=alias.user,
alias=alias,
refused_email_url=refused_email_url,
mailbox_email=mailbox.email,
),
render(
"transactional/bounce/automatic-disable-alias.html",
user=alias.user,
alias=alias,
refused_email_url=refused_email_url,
mailbox_email=mailbox.email,
@ -1575,7 +1638,6 @@ def handle_bounce_forward_phase(msg: Message, email_log: EmailLog):
f"An email sent to {alias.email} cannot be delivered to your mailbox",
render(
"transactional/bounce/bounced-email.txt.jinja2",
user=alias.user,
alias=alias,
website_email=contact.website_email,
disable_alias_link=disable_alias_link,
@ -1585,7 +1647,6 @@ def handle_bounce_forward_phase(msg: Message, email_log: EmailLog):
),
render(
"transactional/bounce/bounced-email.html",
user=alias.user,
alias=alias,
website_email=contact.website_email,
disable_alias_link=disable_alias_link,
@ -1678,14 +1739,12 @@ def handle_bounce_reply_phase(envelope, msg: Message, email_log: EmailLog):
f"Email cannot be sent to { contact.email } from your alias { alias.email }",
render(
"transactional/bounce/bounce-email-reply-phase.txt",
user=user,
alias=alias,
contact=contact,
refused_email_url=refused_email_url,
),
render(
"transactional/bounce/bounce-email-reply-phase.html",
user=user,
alias=alias,
contact=contact,
refused_email_url=refused_email_url,
@ -1748,7 +1807,6 @@ def handle_spam(
f"Email from {alias.email} to {contact.website_email} is detected as spam",
render(
"transactional/spam-email-reply-phase.txt",
user=user,
alias=alias,
website_email=contact.website_email,
disable_alias_link=disable_alias_link,
@ -1756,7 +1814,6 @@ def handle_spam(
),
render(
"transactional/spam-email-reply-phase.html",
user=user,
alias=alias,
website_email=contact.website_email,
disable_alias_link=disable_alias_link,
@ -1779,7 +1836,6 @@ def handle_spam(
f"Email from {contact.website_email} to {alias.email} is detected as spam",
render(
"transactional/spam-email.txt",
user=user,
alias=alias,
website_email=contact.website_email,
disable_alias_link=disable_alias_link,
@ -1787,7 +1843,6 @@ def handle_spam(
),
render(
"transactional/spam-email.html",
user=user,
alias=alias,
website_email=contact.website_email,
disable_alias_link=disable_alias_link,
@ -1828,19 +1883,13 @@ def handle_transactional_bounce(
envelope: Envelope, msg, rcpt_to, transactional_id=None
):
LOG.d("handle transactional bounce sent to %s", rcpt_to)
if transactional_id is None:
LOG.i(
f"No transactional record for {envelope.mail_from} -> {envelope.rcpt_tos}"
)
return
# parse the TransactionalEmail
transactional_id = transactional_id or parse_id_from_bounce(rcpt_to)
transactional = TransactionalEmail.get(transactional_id)
# a transaction might have been deleted in delete_logs()
if not transactional:
LOG.i(
f"No transactional record for {envelope.mail_from} -> {envelope.rcpt_tos}"
)
return
if transactional:
LOG.i("Create bounce for %s", transactional.email)
bounce_info = get_mailbox_bounce_info(msg)
if bounce_info:
@ -1872,9 +1921,6 @@ def handle_bounce(envelope, email_log: EmailLog, msg: Message) -> str:
contact,
alias,
)
if not email_log.user.is_active():
LOG.d(f"User {email_log.user} is not active")
return status.E510
if email_log.is_reply:
content_type = msg.get_content_type().lower()
@ -1936,15 +1982,12 @@ def send_no_reply_response(mail_from: str, msg: Message):
if not mailbox:
LOG.d("Unknown sender. Skipping reply from {}".format(NOREPLY))
return
if not mailbox.user.is_active():
LOG.d(f"User {mailbox.user} is soft-deleted. Skipping sending reply response")
return
send_email_at_most_times(
mailbox.user,
ALERT_TO_NOREPLY,
mailbox.user.email,
"Auto: {}".format(msg[headers.SUBJECT] or "No subject"),
render("transactional/noreply.text.jinja2", user=mailbox.user),
render("transactional/noreply.text.jinja2"),
)
@ -1977,11 +2020,10 @@ def handle(envelope: Envelope, msg: Message) -> str:
return status.E204
# sanitize email headers
sanitize_header(msg, headers.FROM)
sanitize_header(msg, headers.TO)
sanitize_header(msg, headers.CC)
sanitize_header(msg, headers.REPLY_TO)
sanitize_header(msg, headers.MESSAGE_ID)
sanitize_header(msg, "from")
sanitize_header(msg, "to")
sanitize_header(msg, "cc")
sanitize_header(msg, "reply-to")
LOG.d(
"==>> Handle mail_from:%s, rcpt_tos:%s, header_from:%s, header_to:%s, "
@ -2026,7 +2068,6 @@ def handle(envelope: Envelope, msg: Message) -> str:
"SimpleLogin shouldn't be used with another email forwarding system",
render(
"transactional/email-sent-from-reverse-alias.txt.jinja2",
user=user,
),
)

View File

@ -1,113 +0,0 @@
import argparse
from enum import Enum
from sys import argv, exit
from app.config import EVENT_LISTENER_DB_URI
from app.log import LOG
from events import event_debugger
from events.runner import Runner
from events.event_source import DeadLetterEventSource, PostgresEventSource
from events.event_sink import ConsoleEventSink, HttpEventSink
_DEFAULT_MAX_RETRIES = 10
class Mode(Enum):
DEAD_LETTER = "dead_letter"
LISTENER = "listener"
@staticmethod
def from_str(value: str):
if value == Mode.DEAD_LETTER.value:
return Mode.DEAD_LETTER
elif value == Mode.LISTENER.value:
return Mode.LISTENER
else:
raise ValueError(f"Invalid mode: {value}")
def main(mode: Mode, dry_run: bool, max_retries: int):
if mode == Mode.DEAD_LETTER:
LOG.i("Using DeadLetterEventSource")
source = DeadLetterEventSource(max_retries)
elif mode == Mode.LISTENER:
LOG.i("Using PostgresEventSource")
source = PostgresEventSource(EVENT_LISTENER_DB_URI)
else:
raise ValueError(f"Invalid mode: {mode}")
if dry_run:
LOG.i("Starting with ConsoleEventSink")
sink = ConsoleEventSink()
else:
LOG.i("Starting with HttpEventSink")
sink = HttpEventSink()
runner = Runner(source=source, sink=sink)
runner.run()
def debug_event(event_id: str):
LOG.i(f"Debugging event {event_id}")
try:
event_id_int = int(event_id)
except ValueError:
raise ValueError(f"Invalid event id: {event_id}")
event_debugger.debug_event(event_id_int)
def run_event(event_id: str, delete_on_success: bool):
LOG.i(f"Running event {event_id}")
try:
event_id_int = int(event_id)
except ValueError:
raise ValueError(f"Invalid event id: {event_id}")
event_debugger.run_event(event_id_int, delete_on_success)
def args():
parser = argparse.ArgumentParser(description="Run event listener")
subparsers = parser.add_subparsers(dest="command")
listener_parser = subparsers.add_parser(Mode.LISTENER.value)
listener_parser.add_argument(
"--max-retries", type=int, default=_DEFAULT_MAX_RETRIES
)
listener_parser.add_argument("--dry-run", action="store_true")
dead_letter_parser = subparsers.add_parser(Mode.DEAD_LETTER.value)
dead_letter_parser.add_argument(
"--max-retries", type=int, default=_DEFAULT_MAX_RETRIES
)
dead_letter_parser.add_argument("--dry-run", action="store_true")
debug_parser = subparsers.add_parser("debug")
debug_parser.add_argument("event_id", help="ID of the event to debug")
run_parser = subparsers.add_parser("run")
run_parser.add_argument("event_id", help="ID of the event to run")
run_parser.add_argument("--delete-on-success", action="store_true")
return parser.parse_args()
if __name__ == "__main__":
if len(argv) < 2:
print("Invalid usage. Pass a valid subcommand as argument")
exit(1)
args = args()
if args.command in [Mode.LISTENER.value, Mode.DEAD_LETTER.value]:
main(
mode=Mode.from_str(args.command),
dry_run=args.dry_run,
max_retries=args.max_retries,
)
elif args.command == "debug":
debug_event(args.event_id)
elif args.command == "run":
run_event(args.event_id, args.delete_on_success)
else:
print("Invalid command")
exit(1)

View File

View File

@ -1,43 +0,0 @@
from app.events.generated import event_pb2
from app.models import SyncEvent
from events.event_sink import HttpEventSink
def debug_event(event_id: int):
event = SyncEvent.get_by(id=event_id)
if not event:
print("Event not found")
return
print(f"Info for event {event_id}")
print(f"- Created at: {event.created_at}")
print(f"- Updated at: {event.updated_at}")
print(f"- Taken time: {event.taken_time}")
print(f"- Retry count: {event.retry_count}")
print()
print("Event contents")
event_contents = event.content
parsed = event_pb2.Event.FromString(event_contents)
print(f"- UserID: {parsed.user_id}")
print(f"- ExternalUserID: {parsed.external_user_id}")
print(f"- PartnerID: {parsed.partner_id}")
content = parsed.content
print(f"Content: {content}")
def run_event(event_id: int, delete_on_success: bool = True):
event = SyncEvent.get_by(id=event_id)
if not event:
print("Event not found")
return
print(f"Processing event {event_id}")
sink = HttpEventSink()
res = sink.process(event)
if res:
print(f"Processed event {event_id}")
if delete_on_success:
SyncEvent.delete(event_id, commit=True)

View File

@ -1,46 +0,0 @@
import requests
import newrelic.agent
from abc import ABC, abstractmethod
from app.config import EVENT_WEBHOOK, EVENT_WEBHOOK_SKIP_VERIFY_SSL
from app.log import LOG
from app.models import SyncEvent
class EventSink(ABC):
@abstractmethod
def process(self, event: SyncEvent) -> bool:
pass
class HttpEventSink(EventSink):
def process(self, event: SyncEvent) -> bool:
if not EVENT_WEBHOOK:
LOG.warning("Skipping sending event because there is no webhook configured")
return False
LOG.info(f"Sending event {event.id} to {EVENT_WEBHOOK}")
res = requests.post(
url=EVENT_WEBHOOK,
data=event.content,
headers={"Content-Type": "application/x-protobuf"},
verify=not EVENT_WEBHOOK_SKIP_VERIFY_SSL,
)
newrelic.agent.record_custom_event(
"EventSentToPartner", {"http_code": res.status_code}
)
if res.status_code != 200:
LOG.warning(
f"Failed to send event to webhook: {res.status_code} {res.text}"
)
return False
else:
LOG.info(f"Event {event.id} sent successfully to webhook")
return True
class ConsoleEventSink(EventSink):
def process(self, event: SyncEvent) -> bool:
LOG.info(f"Handling event {event.id}")
return True

View File

@ -1,112 +0,0 @@
import arrow
import newrelic.agent
import psycopg2
import select
from abc import ABC, abstractmethod
from app.db import Session
from app.log import LOG
from app.models import SyncEvent
from app.events.event_dispatcher import NOTIFICATION_CHANNEL
from time import sleep
from typing import Callable, NoReturn
_DEAD_LETTER_THRESHOLD_MINUTES = 10
_DEAD_LETTER_INTERVAL_SECONDS = 30
_POSTGRES_RECONNECT_INTERVAL_SECONDS = 5
class EventSource(ABC):
@abstractmethod
def run(self, on_event: Callable[[SyncEvent], NoReturn]):
pass
class PostgresEventSource(EventSource):
def __init__(self, connection_string: str):
self.__connection_string = connection_string
self.__connect()
def run(self, on_event: Callable[[SyncEvent], NoReturn]):
while True:
try:
self.__listen(on_event)
except Exception as e:
LOG.warn(f"Error listening to events: {e}")
sleep(_POSTGRES_RECONNECT_INTERVAL_SECONDS)
self.__connect()
def __listen(self, on_event: Callable[[SyncEvent], NoReturn]):
self.__connection.set_isolation_level(
psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT
)
cursor = self.__connection.cursor()
cursor.execute(f"LISTEN {NOTIFICATION_CHANNEL};")
LOG.info("Starting to listen to events")
while True:
if select.select([self.__connection], [], [], 5) != ([], [], []):
self.__connection.poll()
while self.__connection.notifies:
notify = self.__connection.notifies.pop(0)
LOG.debug(
f"Got NOTIFY: pid={notify.pid} channel={notify.channel} payload={notify.payload}"
)
try:
webhook_id = int(notify.payload)
event = SyncEvent.get_by(id=webhook_id)
if event is not None:
if event.mark_as_taken():
on_event(event)
else:
LOG.info(
f"Event {event.id} was handled by another runner"
)
else:
LOG.info(f"Could not find event with id={notify.payload}")
except Exception as e:
LOG.warn(f"Error getting event: {e}")
Session.close() # Ensure we get a new connection and we don't leave a dangling tx
def __connect(self):
self.__connection = psycopg2.connect(
self.__connection_string, application_name="sl-event-listen"
)
from app.db import Session
Session.close()
class DeadLetterEventSource(EventSource):
def __init__(self, max_retries: int):
self.__max_retries = max_retries
@newrelic.agent.background_task()
def run(self, on_event: Callable[[SyncEvent], NoReturn]):
while True:
try:
threshold = arrow.utcnow().shift(
minutes=-_DEAD_LETTER_THRESHOLD_MINUTES
)
events = SyncEvent.get_dead_letter(
older_than=threshold, max_retries=self.__max_retries
)
if events:
LOG.info(f"Got {len(events)} dead letter events")
if events:
newrelic.agent.record_custom_metric(
"Custom/dead_letter_events_to_process", len(events)
)
for event in events:
on_event(event)
Session.close() # Ensure that we have a new connection and we don't have a dangling tx with a lock
if not events:
LOG.debug("No dead letter events")
sleep(_DEAD_LETTER_INTERVAL_SECONDS)
except Exception as e:
LOG.warn(f"Error getting dead letter event: {e}")
sleep(_DEAD_LETTER_INTERVAL_SECONDS)

View File

@ -1,46 +0,0 @@
import arrow
import newrelic.agent
from app.log import LOG
from app.db import Session
from app.models import SyncEvent
from events.event_sink import EventSink
from events.event_source import EventSource
class Runner:
def __init__(self, source: EventSource, sink: EventSink):
self.__source = source
self.__sink = sink
def run(self):
self.__source.run(self.__on_event)
@newrelic.agent.background_task()
def __on_event(self, event: SyncEvent):
try:
event_created_at = event.created_at
start_time = arrow.now()
success = self.__sink.process(event)
if success:
event_id = event.id
SyncEvent.delete(event.id, commit=True)
LOG.info(f"Marked {event_id} as done")
end_time = arrow.now() - start_time
time_between_taken_and_created = start_time - event_created_at
newrelic.agent.record_custom_metric("Custom/sync_event_processed", 1)
newrelic.agent.record_custom_metric(
"Custom/sync_event_process_time", end_time.total_seconds()
)
newrelic.agent.record_custom_metric(
"Custom/sync_event_elapsed_time",
time_between_taken_and_created.total_seconds(),
)
else:
event.retry_count = event.retry_count + 1
Session.commit()
except Exception as e:
LOG.warn(f"Exception processing event [id={event.id}]: {e}")
newrelic.agent.record_custom_metric("Custom/sync_event_failed", 1)

View File

@ -116,14 +116,6 @@ WORDS_FILE_PATH=local_data/test_words.txt
# CONNECT_WITH_PROTON=true
# CONNECT_WITH_PROTON_COOKIE_NAME=to_fill
# Login with OIDC
# CONNECT_WITH_OIDC_ICON=fa-github
# OIDC_WELL_KNOWN_URL=to_fill
# OIDC_SCOPES=openid email profile
# OIDC_NAME_FIELD=name
# OIDC_CLIENT_ID=to_fill
# OIDC_CLIENT_SECRET=to_fill
# Flask profiler
# FLASK_PROFILER_PATH=/tmp/flask-profiler.sql
# FLASK_PROFILER_PASSWORD=password

View File

@ -3,7 +3,7 @@ Run scheduled jobs.
Not meant for running job at precise time (+- 1h)
"""
import time
from typing import List, Optional
from typing import List
import arrow
from sqlalchemy.sql.expression import or_, and_
@ -14,9 +14,7 @@ from app.email_utils import (
send_email,
render,
)
from app.events.event_dispatcher import PostgresDispatcher
from app.import_utils import handle_batch_import
from app.jobs.event_jobs import send_alias_creation_events_for_user
from app.jobs.export_user_data_job import ExportUserDataJob
from app.log import LOG
from app.models import User, Job, BatchImport, Mailbox, CustomDomain, JobState
@ -199,16 +197,11 @@ def process_job(job: Job):
onboarding_mailbox(user)
elif job.name == config.JOB_ONBOARDING_4:
user_id = job.payload.get("user_id")
user: User = User.get(user_id)
user = User.get(user_id)
# user might delete their account in the meantime
# or disable the notification
if user and user.notification and user.activated:
# if user only has 1 mailbox which is Proton then do not send PGP onboarding email
mailboxes = user.mailboxes()
if len(mailboxes) == 1 and mailboxes[0].is_proton():
LOG.d("Do not send onboarding PGP email to Proton mailbox")
else:
LOG.d("send onboarding pgp email to user %s", user)
onboarding_pgp(user)
@ -226,21 +219,22 @@ def process_job(job: Job):
user_email = user.email
LOG.w("Delete user %s", user)
User.delete(user.id)
Session.commit()
send_email(
user_email,
"Your SimpleLogin account has been deleted",
render("transactional/account-delete.txt", user=user),
render("transactional/account-delete.html", user=user),
render("transactional/account-delete.txt"),
render("transactional/account-delete.html"),
retries=3,
)
User.delete(user.id)
Session.commit()
elif job.name == config.JOB_DELETE_MAILBOX:
delete_mailbox_job(job)
elif job.name == config.JOB_DELETE_DOMAIN:
custom_domain_id = job.payload.get("custom_domain_id")
custom_domain: Optional[CustomDomain] = CustomDomain.get(custom_domain_id)
custom_domain = CustomDomain.get(custom_domain_id)
if not custom_domain:
return
@ -252,7 +246,6 @@ def process_job(job: Job):
LOG.d("Domain %s deleted", domain_name)
if custom_domain.partner_id is None:
send_email(
user.email,
f"Your domain {domain_name} has been deleted",
@ -271,16 +264,8 @@ def process_job(job: Job):
user_id = job.payload.get("user_id")
user = User.get(user_id)
if user and user.activated:
LOG.d("Send proton welcome email to user %s", user)
LOG.d("send proton welcome email to user %s", user)
welcome_proton(user)
elif job.name == config.JOB_SEND_ALIAS_CREATION_EVENTS:
user_id = job.payload.get("user_id")
user = User.get(user_id)
if user and user.activated:
LOG.d(f"Sending alias creation events for {user}")
send_alias_creation_events_for_user(
user, dispatcher=PostgresDispatcher.get()
)
else:
LOG.e("Unknown job name %s", job.name)

View File

@ -745,6 +745,8 @@ bullish
bullpen
bullring
bullseye
bullwhip
bully
bunch
bundle
bungee
@ -1147,6 +1149,7 @@ coherence
coherent
cohesive
coil
coke
cola
cold
coleslaw
@ -1671,6 +1674,8 @@ delta
deluge
delusion
deluxe
demanding
demeaning
demeanor
demise
democracy
@ -1892,6 +1897,9 @@ divisible
divisibly
division
divisive
divorcee
dizziness
dizzy
doable
docile
dock
@ -1905,6 +1913,7 @@ dole
dollar
dollhouse
dollop
dolly
dolphin
domain
domelike
@ -2018,6 +2027,7 @@ duh
duke
dumping
dumpling
dumpster
duo
dupe
duplex
@ -2026,12 +2036,14 @@ duplicity
durable
durably
duration
duress
during
dusk
dust
dutiful
duty
duvet
dwarf
dweeb
dwelled
dweller
@ -3770,6 +3782,10 @@ makeshift
making
malformed
malt
mama
mammal
mammary
mammogram
manager
managing
manatee
@ -3782,6 +3798,7 @@ mangle
mango
mangy
manhandle
manhole
manhood
manhunt
manicotti
@ -3796,6 +3813,7 @@ manmade
manned
mannish
manor
manpower
mantis
mantra
manual
@ -3832,6 +3850,7 @@ mashed
mashing
massager
masses
massive
mastiff
matador
matchbook
@ -3844,11 +3863,15 @@ maternal
maternity
math
mating
matriarch
matrimony
matrix
matron
matted
matter
maturely
maturing
maturity
mauve
maverick
maximize
@ -3868,6 +3891,9 @@ modify
modular
modulator
module
moisten
moistness
moisture
molar
molasses
mold
@ -3920,7 +3946,11 @@ morality
morally
morbidity
morbidly
morphine
morphing
morse
mortality
mortally
mortician
mortified
mortify
@ -3946,6 +3976,7 @@ mournful
mouse
mousiness
moustache
mousy
mouth
movable
move
@ -3954,6 +3985,7 @@ moving
mower
mowing
much
muck
mud
mug
mulberry
@ -3970,6 +4002,7 @@ mumbling
mumbo
mummified
mummify
mummy
mumps
munchkin
mundane
@ -4765,6 +4798,7 @@ princess
print
prior
prism
prison
prissy
pristine
privacy
@ -4788,6 +4822,8 @@ prodigal
prodigy
produce
product
profane
profanity
professed
professor
profile
@ -5956,6 +5992,10 @@ slit
sliver
slobbery
slogan
sloped
sloping
sloppily
sloppy
slot
slouching
slouchy
@ -5971,6 +6011,7 @@ smartness
smasher
smashing
smashup
smell
smelting
smile
smilingly
@ -5980,6 +6021,11 @@ smith
smitten
smock
smog
smoked
smokeless
smokiness
smoking
smoky
smolder
smooth
smother
@ -6001,6 +6047,7 @@ sneer
sneeze
sneezing
snide
sniff
snippet
snipping
snitch
@ -6156,6 +6203,7 @@ squiggle
squiggly
squint
squire
squirt
squishier
squishy
stability
@ -6275,6 +6323,7 @@ stoning
stony
stood
stooge
stool
stoop
stoplight
stoppable
@ -6409,9 +6458,12 @@ subwoofer
subzero
succulent
such
suction
sudden
sudoku
suds
sufferer
suffering
suffice
suffix
suffocate
@ -6463,6 +6515,7 @@ surplus
surprise
surreal
surrender
surrogate
surround
survey
survival
@ -6475,6 +6528,7 @@ suspend
suspense
sustained
sustainer
swab
swaddling
swagger
swampland
@ -6482,6 +6536,7 @@ swan
swapping
swarm
sway
swear
sweat
sweep
swell
@ -6550,6 +6605,9 @@ talcum
talisman
tall
talon
tamale
tameness
tamer
tamper
tank
tanned
@ -6589,6 +6647,7 @@ thaw
theater
theatrics
thee
theft
theme
theology
theorize
@ -6693,6 +6752,7 @@ trade
trading
tradition
traffic
tragedy
trailing
trailside
train
@ -6712,6 +6772,7 @@ trapped
trapper
trapping
traps
trash
travel
traverse
travesty
@ -7399,7 +7460,9 @@ villain
vindicate
vineyard
vintage
violate
violation
violator
violet
violin
viper

View File

@ -1,29 +0,0 @@
"""empty message
Revision ID: 818b0a956205
Revises: 4bc54632d9aa
Create Date: 2024-02-01 10:43:46.253184
"""
import sqlalchemy_utils
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '818b0a956205'
down_revision = '4bc54632d9aa'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('alias', sa.Column('last_email_log_id', sa.Integer(), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('alias', 'last_email_log_id')
# ### end Alembic commands ###

View File

@ -1,48 +0,0 @@
"""empty message
Revision ID: 52510a633d6f
Revises: 818b0a956205
Create Date: 2024-03-12 12:46:24.161644
"""
import sqlalchemy_utils
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "52510a633d6f"
down_revision = "818b0a956205"
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column(
"alias", sa.Column("flags", sa.BigInteger(), server_default="0", nullable=False)
)
with op.get_context().autocommit_block():
op.create_index(op.f("ix_alias_flags"), "alias", ["flags"], unique=False)
op.create_index(op.f("ix_job_state"), "job", ["state"], unique=False)
op.create_index(
"ix_state_run_at_taken_at",
"job",
["state", "run_at", "taken_at"],
unique=False,
)
op.create_index(
op.f("ix_notification_user_id"), "notification", ["user_id"], unique=False
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.get_context().autocommit_block():
op.drop_index(op.f("ix_notification_user_id"), table_name="notification")
op.drop_index("ix_state_run_at_taken_at", table_name="job")
op.drop_index(op.f("ix_job_state"), table_name="job")
op.drop_index(op.f("ix_alias_flags"), table_name="alias")
op.drop_column("alias", "flags")
# ### end Alembic commands ###

View File

@ -1,29 +0,0 @@
"""empty message
Revision ID: fa2f19bb4e5a
Revises: 52510a633d6f
Create Date: 2024-04-09 13:12:26.305340
"""
import sqlalchemy_utils
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'fa2f19bb4e5a'
down_revision = '52510a633d6f'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('users', sa.Column('enable_data_breach_check', sa.Boolean(), server_default='0', nullable=False))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('users', 'enable_data_breach_check')
# ### end Alembic commands ###

View File

@ -1,38 +0,0 @@
"""Create sync_event table
Revision ID: 06a9a7133445
Revises: fa2f19bb4e5a
Create Date: 2024-05-17 13:11:20.402259
"""
import sqlalchemy_utils
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '06a9a7133445'
down_revision = 'fa2f19bb4e5a'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('sync_event',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('created_at', sqlalchemy_utils.types.arrow.ArrowType(), nullable=False),
sa.Column('updated_at', sqlalchemy_utils.types.arrow.ArrowType(), nullable=True),
sa.Column('content', sa.LargeBinary(), nullable=False),
sa.Column('taken_time', sqlalchemy_utils.types.arrow.ArrowType(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_sync_event_created_at'), 'sync_event', ['created_at'], unique=False)
op.create_index(op.f('ix_sync_event_taken_time'), 'sync_event', ['taken_time'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('sync_event')
# ### end Alembic commands ###

View File

@ -1,31 +0,0 @@
"""empty message
Revision ID: d608b8e48082
Revises: 06a9a7133445
Create Date: 2024-07-05 16:56:04.220173
"""
import sqlalchemy_utils
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'd608b8e48082'
down_revision = '06a9a7133445'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('deleted_alias', sa.Column('reason', sa.Integer(), default=0, server_default='0', nullable=False))
op.add_column('domain_deleted_alias', sa.Column('reason', sa.Integer(), default=0, server_default='0', nullable=False))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('domain_deleted_alias', 'reason')
op.drop_column('deleted_alias', 'reason')
# ### end Alembic commands ###

View File

@ -1,28 +0,0 @@
"""add retry count to sync event
Revision ID: 56d08955fcab
Revises: d608b8e48082
Create Date: 2024-07-19 08:21:19.979973
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '56d08955fcab'
down_revision = 'd608b8e48082'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('sync_event', sa.Column('retry_count', sa.Integer(), server_default='0', nullable=False, default=0))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('sync_event', 'retry_count')
# ### end Alembic commands ###

View File

@ -1,42 +0,0 @@
"""empty message
Revision ID: 1c14339aae90
Revises: 56d08955fcab
Create Date: 2024-07-30 11:46:32.460221
"""
import sqlalchemy_utils
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '1c14339aae90'
down_revision = '56d08955fcab'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('mailbox_activation',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('created_at', sqlalchemy_utils.types.arrow.ArrowType(), nullable=False),
sa.Column('updated_at', sqlalchemy_utils.types.arrow.ArrowType(), nullable=True),
sa.Column('mailbox_id', sa.Integer(), nullable=False),
sa.Column('code', sa.String(length=32), nullable=False),
sa.Column('tries', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['mailbox_id'], ['mailbox.id'], ondelete='cascade'),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_mailbox_activation_code'), 'mailbox_activation', ['code'], unique=False)
op.create_index(op.f('ix_mailbox_activation_mailbox_id'), 'mailbox_activation', ['mailbox_id'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_mailbox_activation_mailbox_id'), table_name='mailbox_activation')
op.drop_index(op.f('ix_mailbox_activation_code'), table_name='mailbox_activation')
op.drop_table('mailbox_activation')
# ### end Alembic commands ###

View File

@ -1,30 +0,0 @@
"""Custom Domain partner id
Revision ID: 2441b7ff5da9
Revises: 1c14339aae90
Create Date: 2024-09-13 15:43:02.425964
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '2441b7ff5da9'
down_revision = '1c14339aae90'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('custom_domain', sa.Column('partner_id', sa.Integer(), nullable=True, default=None, server_default=None))
op.create_foreign_key(None, 'custom_domain', 'partner', ['partner_id'], ['id'])
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_constraint(None, 'custom_domain', type_='foreignkey')
op.drop_column('custom_domain', 'partner_id')
# ### end Alembic commands ###

View File

@ -1,31 +0,0 @@
"""contact.flags and custom_domain.pending_deletion
Revision ID: 88dd7a0abf54
Revises: 2441b7ff5da9
Create Date: 2024-09-19 15:41:20.910374
"""
import sqlalchemy_utils
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '88dd7a0abf54'
down_revision = '2441b7ff5da9'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('contact', sa.Column('flags', sa.Integer(), server_default='0', nullable=False))
op.add_column('custom_domain', sa.Column('pending_deletion', sa.Boolean(), server_default='0', nullable=False))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('custom_domain', 'pending_deletion')
op.drop_column('contact', 'flags')
# ### end Alembic commands ###

View File

@ -4,7 +4,6 @@ import subprocess
from time import sleep
from typing import List, Dict
import arrow
import newrelic.agent
from app.db import Session
@ -94,75 +93,11 @@ def log_nb_db_connection():
newrelic.agent.record_custom_metric("Custom/nb_db_connections", nb_connection)
@newrelic.agent.background_task()
def log_nb_db_connection_by_app_name():
# get the number of connections to the DB
rows = Session.execute(
"SELECT application_name, count(datid) FROM pg_stat_activity group by application_name"
)
for row in rows:
if row[0].find("sl-") == 0:
LOG.d("number of db connections for app %s = %s", row[0], row[1])
newrelic.agent.record_custom_metric(
f"Custom/nb_db_app_connection/{row[0]}", row[1]
)
@newrelic.agent.background_task()
def log_pending_to_process_events():
r = Session.execute("select count(*) from sync_event WHERE taken_time IS NULL;")
events_pending = list(r)[0][0]
LOG.d("number of events pending to process %s", events_pending)
newrelic.agent.record_custom_metric(
"Custom/sync_events_pending_to_process", events_pending
)
@newrelic.agent.background_task()
def log_events_pending_dead_letter():
since = arrow.now().shift(minutes=-10).datetime
r = Session.execute(
"""
SELECT COUNT(*)
FROM sync_event
WHERE (taken_time IS NOT NULL AND taken_time < :since)
OR (taken_time IS NULL AND created_at < :since)
""",
{"since": since},
)
events_pending = list(r)[0][0]
LOG.d("number of events pending dead letter %s", events_pending)
newrelic.agent.record_custom_metric(
"Custom/sync_events_pending_dead_letter", events_pending
)
@newrelic.agent.background_task()
def log_failed_events():
r = Session.execute(
"""
SELECT COUNT(*)
FROM sync_event
WHERE retry_count >= 10;
""",
)
failed_events = list(r)[0][0]
LOG.d("number of failed events %s", failed_events)
newrelic.agent.record_custom_metric("Custom/sync_events_failed", failed_events)
if __name__ == "__main__":
exporter = MetricExporter(get_newrelic_license())
while True:
log_postfix_metrics()
log_nb_db_connection()
log_pending_to_process_events()
log_events_pending_dead_letter()
log_failed_events()
log_nb_db_connection_by_app_name()
Session.close()
exporter.run()

View File

@ -1,49 +0,0 @@
#!/usr/bin/env python3
import argparse
import time
from sqlalchemy import func
from app.models import Alias
from app.db import Session
parser = argparse.ArgumentParser(
prog="Backfill alias", description="Update alias notes and backfill flag"
)
parser.add_argument(
"-s", "--start_alias_id", default=0, type=int, help="Initial alias_id"
)
parser.add_argument("-e", "--end_alias_id", default=0, type=int, help="Last alias_id")
args = parser.parse_args()
alias_id_start = args.start_alias_id
max_alias_id = args.end_alias_id
if max_alias_id == 0:
max_alias_id = Session.query(func.max(Alias.id)).scalar()
print(f"Checking alias {alias_id_start} to {max_alias_id}")
step = 1000
noteSql = "(note = 'Created through Proton' or note = 'Created through partner Proton')"
alias_query = f"UPDATE alias set note = NULL, flags = flags | :flag where id>=:start AND id<:end and {noteSql}"
updated = 0
start_time = time.time()
for batch_start in range(alias_id_start, max_alias_id, step):
rows_done = Session.execute(
alias_query,
{
"start": batch_start,
"end": batch_start + step,
"flag": Alias.FLAG_PARTNER_CREATED,
},
)
updated += rows_done.rowcount
Session.commit()
elapsed = time.time() - start_time
time_per_alias = elapsed / (updated + 1)
last_batch_id = batch_start + step
remaining = max_alias_id - last_batch_id
time_remaining = (max_alias_id - last_batch_id) * time_per_alias
hours_remaining = time_remaining / 3600.0
print(
f"\rAlias {batch_start}/{max_alias_id} {updated} {hours_remaining:.2f}hrs remaining"
)
print("")

View File

@ -1,44 +0,0 @@
#!/usr/bin/env python3
import argparse
import time
from sqlalchemy import func
from app.models import Alias
from app.db import Session
parser = argparse.ArgumentParser(
prog="Backfill alias", description="Backfill alias las use"
)
parser.add_argument(
"-s", "--start_alias_id", default=0, type=int, help="Initial alias_id"
)
parser.add_argument("-e", "--end_alias_id", default=0, type=int, help="Last alias_id")
args = parser.parse_args()
alias_id_start = args.start_alias_id
max_alias_id = args.end_alias_id
if max_alias_id == 0:
max_alias_id = Session.query(func.max(Alias.id)).scalar()
print(f"Checking alias {alias_id_start} to {max_alias_id}")
step = 1000
el_query = "SELECT alias_id, MAX(id) from email_log where alias_id>=:start AND alias_id < :end GROUP BY alias_id"
alias_query = "UPDATE alias set last_email_log_id = :el_id where id = :alias_id"
updated = 0
start_time = time.time()
for batch_start in range(alias_id_start, max_alias_id, step):
rows = Session.execute(el_query, {"start": batch_start, "end": batch_start + step})
for row in rows:
Session.execute(alias_query, {"alias_id": row[0], "el_id": row[1]})
Session.commit()
updated += 1
elapsed = time.time() - start_time
time_per_alias = elapsed / (updated + 1)
last_batch_id = batch_start + step
remaining = max_alias_id - last_batch_id
time_remaining = (max_alias_id - last_batch_id) * time_per_alias
hours_remaining = time_remaining / 3600.0
print(
f"\rAlias {batch_start}/{max_alias_id} {updated} {hours_remaining:.2f}hrs remaining"
)
print("")

View File

@ -1,37 +0,0 @@
#!/usr/bin/env python3
import argparse
import random
import time
from sqlalchemy import func
from app import config
from app.models import Alias, Contact
from app.db import Session
parser = argparse.ArgumentParser(
prog=f"Replace {config.NOREPLY}",
description=f"Replace {config.NOREPLY} from contacts reply email",
)
args = parser.parse_args()
max_alias_id: int = Session.query(func.max(Alias.id)).scalar()
start = time.time()
tests = 1000
for i in range(tests):
alias = (
Alias.filter(Alias.id > int(random.random() * max_alias_id))
.order_by(Alias.id.asc())
.limit(1)
.first()
)
contact = Contact.filter_by(alias_id=alias.id).order_by(Contact.id.asc()).first()
mailboxes = alias.mailboxes
user = alias.user
if i % 10:
print("{i} -> {alias.id}")
end = time.time()
time_taken = end - start
print(f"Took {time_taken} -> {time_taken/tests} per test")

View File

@ -1,56 +0,0 @@
#!/usr/bin/env python3
import argparse
import time
from sqlalchemy import func
from app.models import Alias, SLDomain
from app.db import Session
parser = argparse.ArgumentParser(
prog="Mark partner created aliases with the PARTNER_CREATED flag",
)
parser.add_argument(
"-s", "--start_alias_id", default=0, type=int, help="Initial alias_id"
)
parser.add_argument("-e", "--end_alias_id", default=0, type=int, help="Last alias_id")
args = parser.parse_args()
alias_id_start = args.start_alias_id
max_alias_id = args.end_alias_id
if max_alias_id == 0:
max_alias_id = Session.query(func.max(Alias.id)).scalar()
print(f"Updating aliases from {alias_id_start} to {max_alias_id}")
domains = SLDomain.filter(SLDomain.partner_id.isnot(None)).all()
cond = [f"email like '%{domain.domain}'" for domain in domains]
sql_or_cond = " OR ".join(cond)
sql = f"UPDATE alias set flags = (flags | :flag) WHERE id >= :start and id<:end and flags & :flag = 0 and ({sql_or_cond})"
print(sql)
step = 1000
updated = 0
start_time = time.time()
for batch_start in range(alias_id_start, max_alias_id, step):
updated += Session.execute(
sql,
{
"start": batch_start,
"end": batch_start + step,
"flag": Alias.FLAG_PARTNER_CREATED,
},
).rowcount
elapsed = time.time() - start_time
time_per_alias = elapsed / (batch_start - alias_id_start + step)
last_batch_id = batch_start + step
remaining = max_alias_id - last_batch_id
time_remaining = (max_alias_id - last_batch_id) * time_per_alias
hours_remaining = time_remaining / 3600.0
percent = int(
((batch_start - alias_id_start) * 100) / (max_alias_id - alias_id_start)
)
print(
f"\rAlias {batch_start}/{max_alias_id} {percent}% {updated} updated {hours_remaining:.2f}hrs remaining"
)
print(f"Updated aliases up to {max_alias_id}")

View File

@ -1,55 +0,0 @@
#!/usr/bin/env python3
import argparse
import time
from sqlalchemy import func
from app.models import Alias, User
from app.db import Session
parser = argparse.ArgumentParser(
prog="Backfill alias", description="Backfill user flags for partner alias created"
)
parser.add_argument(
"-s", "--start_user_id", default=0, type=int, help="Initial user_id"
)
parser.add_argument("-e", "--end_user_id", default=0, type=int, help="Last user_id")
args = parser.parse_args()
user_id_start = args.start_user_id
max_user_id = args.end_user_id
if max_user_id == 0:
max_user_id = Session.query(func.max(User.id)).scalar()
print(f"Checking user {user_id_start} to {max_user_id}")
step = 1000
el_query = "SELECT user_id, count(id) from alias where user_id>=:start AND user_id < :end AND flags & :alias_flag > 0 GROUP BY user_id"
user_update_query = "UPDATE users set flags = flags | :user_flag where id = :user_id"
updated = 0
start_time = time.time()
for batch_start in range(user_id_start, max_user_id, step):
rows = Session.execute(
el_query,
{
"start": batch_start,
"end": batch_start + step,
"alias_flag": Alias.FLAG_PARTNER_CREATED,
},
)
for row in rows:
if row[1] > 0:
Session.execute(
user_update_query,
{"user_id": row[0], "user_flag": User.FLAG_CREATED_ALIAS_FROM_PARTNER},
)
Session.commit()
updated += 1
elapsed = time.time() - start_time
time_per_alias = elapsed / (updated + 1)
last_batch_id = batch_start + step
remaining = max_user_id - last_batch_id
time_remaining = (max_user_id - last_batch_id) * time_per_alias
hours_remaining = time_remaining / 3600.0
print(
f"\rUser {batch_start}/{max_user_id} {updated} {hours_remaining:.2f}hrs remaining"
)
print("")

View File

@ -1,53 +0,0 @@
#!/usr/bin/env python3
import argparse
import time
from app import config
from app.email_utils import generate_reply_email
from app.email_validation import is_valid_email
from app.models import Alias
from app.db import Session
parser = argparse.ArgumentParser(
prog=f"Replace {config.NOREPLY}",
description=f"Replace {config.NOREPLY} from contacts reply email",
)
args = parser.parse_args()
el_query = "SELECT id, alias_id, website_email from contact where id>=:last_id AND reply_email=:reply_email ORDER BY id ASC LIMIT :step"
update_query = "UPDATE contact SET reply_email=:reply_email WHERE id=:contact_id "
updated = 0
start_time = time.time()
step = 100
last_id = 0
print(f"Replacing contacts with reply_email={config.NOREPLY}")
while True:
rows = Session.execute(
el_query, {"last_id": last_id, "reply_email": config.NOREPLY, "step": step}
)
loop_updated = 0
for row in rows:
contact_id = row[0]
alias_id = row[1]
last_id = contact_id
website_email = row[2]
contact_email_for_reply = website_email if is_valid_email(website_email) else ""
alias = Alias.get(alias_id)
if alias is None:
print(f"CANNOT find alias {alias_id} in database for contact {contact_id}")
reply_email = generate_reply_email(contact_email_for_reply, alias)
print(
f"Replacing contact {contact_id} with {website_email} reply_email for {reply_email}"
)
Session.execute(
update_query, {"contact_id": row[0], "reply_email": reply_email}
)
Session.commit()
updated += 1
loop_updated += 1
elapsed = time.time() - start_time
print(f"\rContact {last_id} done")
if loop_updated == 0:
break
print("")

Some files were not shown because too many files have changed in this diff Show More