Compare commits

..

No commits in common. "master" and "v4.41.2" have entirely different histories.

63 changed files with 202 additions and 1685 deletions

View File

@ -68,12 +68,6 @@ For most tests, you will need to have ``redis`` installed and started on your ma
sh scripts/run-test.sh
```
You can also run tests using a local Postgres DB to speed things up. This can be done by
- creating an empty test DB and running the database migration by `dropdb test && createdb test && DB_URI=postgresql://localhost:5432/test alembic upgrade head`
- replacing the `DB_URI` in `test.env` file by `DB_URI=postgresql://localhost:5432/test`
## Run the code locally
Install npm packages

View File

@ -46,8 +46,7 @@ class SLModelView(sqla.ModelView):
def inaccessible_callback(self, name, **kwargs):
# redirect to login page if user doesn't have access
flash("You don't have access to the admin page", "error")
return redirect(url_for("dashboard.index", next=request.url))
return redirect(url_for("auth.login", next=request.url))
def on_model_change(self, form, model, is_created):
changes = {}

View File

@ -25,12 +25,6 @@ from app.email_utils import (
render,
)
from app.errors import AliasInTrashError
from app.events.event_dispatcher import EventDispatcher
from app.events.generated.event_pb2 import (
AliasDeleted,
AliasStatusChanged,
EventContent,
)
from app.log import LOG
from app.models import (
Alias,
@ -314,36 +308,31 @@ def delete_alias(alias: Alias, user: User):
Delete an alias and add it to either global or domain trash
Should be used instead of Alias.delete, DomainDeletedAlias.create, DeletedAlias.create
"""
LOG.i(f"User {user} has deleted alias {alias}")
# save deleted alias to either global or domain tra
# save deleted alias to either global or domain trash
if alias.custom_domain_id:
if not DomainDeletedAlias.get_by(
email=alias.email, domain_id=alias.custom_domain_id
):
domain_deleted_alias = DomainDeletedAlias(
user_id=user.id,
email=alias.email,
domain_id=alias.custom_domain_id,
LOG.d("add %s to domain %s trash", alias, alias.custom_domain_id)
Session.add(
DomainDeletedAlias(
user_id=user.id,
email=alias.email,
domain_id=alias.custom_domain_id,
)
)
Session.add(domain_deleted_alias)
Session.commit()
LOG.i(
f"Moving {alias} to domain {alias.custom_domain_id} trash {domain_deleted_alias}"
)
else:
if not DeletedAlias.get_by(email=alias.email):
deleted_alias = DeletedAlias(email=alias.email)
Session.add(deleted_alias)
LOG.d("add %s to global trash", alias)
Session.add(DeletedAlias(email=alias.email))
Session.commit()
LOG.i(f"Moving {alias} to global trash {deleted_alias}")
LOG.i("delete alias %s", alias)
Alias.filter(Alias.id == alias.id).delete()
Session.commit()
EventDispatcher.send_event(
user, EventContent(alias_deleted=AliasDeleted(alias_id=alias.id))
)
def aliases_for_mailbox(mailbox: Mailbox) -> [Alias]:
"""
@ -469,16 +458,3 @@ def transfer_alias(alias, new_user, new_mailboxes: [Mailbox]):
alias.pinned = False
Session.commit()
def change_alias_status(alias: Alias, enabled: bool, commit: bool = False):
LOG.i(f"Changing alias {alias} enabled to {enabled}")
alias.enabled = enabled
event = AliasStatusChanged(
alias_id=alias.id, alias_email=alias.email, enabled=enabled
)
EventDispatcher.send_event(alias.user, EventContent(alias_status_change=event))
if commit:
Session.commit()

View File

@ -25,7 +25,6 @@ from app.errors import (
ErrAddressInvalid,
)
from app.extensions import limiter
from app.log import LOG
from app.models import Alias, Contact, Mailbox, AliasMailbox
@ -185,8 +184,7 @@ def toggle_alias(alias_id):
if not alias or alias.user_id != user.id:
return jsonify(error="Forbidden"), 403
alias_utils.change_alias_status(alias, enabled=not alias.enabled)
LOG.i(f"User {user} changed alias {alias} enabled status to {alias.enabled}")
alias.enabled = not alias.enabled
Session.commit()
return jsonify(enabled=alias.enabled), 200

View File

@ -3,13 +3,11 @@ from flask_login import login_user
from app.auth.base import auth_bp
from app.db import Session
from app.extensions import limiter
from app.log import LOG
from app.models import EmailChange, ResetPasswordCode
@auth_bp.route("/change_email", methods=["GET", "POST"])
@limiter.limit("3/hour")
def change_email():
code = request.args.get("code")

View File

@ -1,13 +1,14 @@
from flask import request, session, redirect, flash, url_for
from requests_oauthlib import OAuth2Session
import requests
from app import config
from app.auth.base import auth_bp
from app.auth.views.login_utils import after_login
from app.config import (
URL,
OIDC_AUTHORIZATION_URL,
OIDC_USER_INFO_URL,
OIDC_TOKEN_URL,
OIDC_SCOPES,
OIDC_NAME_FIELD,
)
@ -15,15 +16,14 @@ from app.db import Session
from app.email_utils import send_welcome_email
from app.log import LOG
from app.models import User, SocialAuth
from app.utils import sanitize_email, sanitize_next_url
from app.utils import encode_url, sanitize_email, sanitize_next_url
# need to set explicitly redirect_uri instead of leaving the lib to pre-fill redirect_uri
# when served behind nginx, the redirect_uri is localhost... and not the real url
redirect_uri = URL + "/auth/oidc/callback"
_redirect_uri = URL + "/auth/oidc/callback"
SESSION_STATE_KEY = "oauth_state"
SESSION_NEXT_KEY = "oauth_redirect_next"
@auth_bp.route("/oidc/login")
@ -32,17 +32,18 @@ def oidc_login():
return redirect(url_for("auth.login"))
next_url = sanitize_next_url(request.args.get("next"))
auth_url = requests.get(config.OIDC_WELL_KNOWN_URL).json()["authorization_endpoint"]
if next_url:
redirect_uri = _redirect_uri + "?next=" + encode_url(next_url)
else:
redirect_uri = _redirect_uri
oidc = OAuth2Session(
config.OIDC_CLIENT_ID, scope=[OIDC_SCOPES], redirect_uri=redirect_uri
)
authorization_url, state = oidc.authorization_url(auth_url)
authorization_url, state = oidc.authorization_url(OIDC_AUTHORIZATION_URL)
# State is used to prevent CSRF, keep this for later.
session[SESSION_STATE_KEY] = state
session[SESSION_NEXT_KEY] = next_url
return redirect(authorization_url)
@ -59,23 +60,19 @@ def oidc_callback():
flash("Please use another sign in method then", "warning")
return redirect("/")
oidc_configuration = requests.get(config.OIDC_WELL_KNOWN_URL).json()
user_info_url = oidc_configuration["userinfo_endpoint"]
token_url = oidc_configuration["token_endpoint"]
oidc = OAuth2Session(
config.OIDC_CLIENT_ID,
state=session[SESSION_STATE_KEY],
scope=[OIDC_SCOPES],
redirect_uri=redirect_uri,
redirect_uri=_redirect_uri,
)
oidc.fetch_token(
token_url,
OIDC_TOKEN_URL,
client_secret=config.OIDC_CLIENT_SECRET,
authorization_response=request.url,
)
oidc_user_data = oidc.get(user_info_url)
oidc_user_data = oidc.get(OIDC_USER_INFO_URL)
if oidc_user_data.status_code != 200:
LOG.e(
f"cannot get oidc user data {oidc_user_data.status_code} {oidc_user_data.text}"
@ -114,8 +111,7 @@ def oidc_callback():
Session.commit()
# The activation link contains the original page, for ex authorize page
next_url = session[SESSION_NEXT_KEY]
session[SESSION_NEXT_KEY] = None
next_url = sanitize_next_url(request.args.get("next")) if request.args else None
return after_login(user, next_url)

View File

@ -245,7 +245,9 @@ FACEBOOK_CLIENT_ID = os.environ.get("FACEBOOK_CLIENT_ID")
FACEBOOK_CLIENT_SECRET = os.environ.get("FACEBOOK_CLIENT_SECRET")
CONNECT_WITH_OIDC_ICON = os.environ.get("CONNECT_WITH_OIDC_ICON")
OIDC_WELL_KNOWN_URL = os.environ.get("OIDC_WELL_KNOWN_URL")
OIDC_AUTHORIZATION_URL = os.environ.get("OIDC_AUTHORIZATION_URL")
OIDC_USER_INFO_URL = os.environ.get("OIDC_USER_INFO_URL")
OIDC_TOKEN_URL = os.environ.get("OIDC_TOKEN_URL")
OIDC_CLIENT_ID = os.environ.get("OIDC_CLIENT_ID")
OIDC_CLIENT_SECRET = os.environ.get("OIDC_CLIENT_SECRET")
OIDC_SCOPES = os.environ.get("OIDC_SCOPES")
@ -281,7 +283,6 @@ JOB_DELETE_MAILBOX = "delete-mailbox"
JOB_DELETE_DOMAIN = "delete-domain"
JOB_SEND_USER_REPORT = "send-user-report"
JOB_SEND_PROTON_WELCOME_1 = "proton-welcome-1"
JOB_SEND_ALIAS_CREATION_EVENTS = "send-alias-creation-events"
# for pagination
PAGE_LIMIT = 20
@ -430,11 +431,9 @@ except Exception:
HIBP_SCAN_INTERVAL_DAYS = 7
HIBP_API_KEYS = sl_getenv("HIBP_API_KEYS", list) or []
HIBP_MAX_ALIAS_CHECK = 10_000
HIBP_RPM = int(os.environ.get("HIBP_API_RPM", 100))
HIBP_RPM = 100
HIBP_SKIP_PARTNER_ALIAS = os.environ.get("HIBP_SKIP_PARTNER_ALIAS")
KEEP_OLD_DATA_DAYS = 30
POSTMASTER = os.environ.get("POSTMASTER")
# store temporary files, especially for debugging
@ -582,9 +581,3 @@ UPCLOUD_PASSWORD = os.environ.get("UPCLOUD_PASSWORD", None)
UPCLOUD_DB_ID = os.environ.get("UPCLOUD_DB_ID", None)
STORE_TRANSACTIONAL_EMAILS = "STORE_TRANSACTIONAL_EMAILS" in os.environ
EVENT_WEBHOOK = os.environ.get("EVENT_WEBHOOK", None)
# We want it disabled by default, so only skip if defined
EVENT_WEBHOOK_SKIP_VERIFY_SSL = "EVENT_WEBHOOK_SKIP_VERIFY_SSL" in os.environ
EVENT_WEBHOOK_DISABLE = "EVENT_WEBHOOK_DISABLE" in os.environ

View File

@ -14,7 +14,7 @@ from app.models import PartnerUser, SocialAuth
from app.proton.utils import get_proton_partner
from app.utils import sanitize_next_url
_SUDO_GAP = 120
_SUDO_GAP = 900
class LoginForm(FlaskForm):

View File

@ -141,12 +141,12 @@ def index():
)
if request.form.get("form-name") == "delete-alias":
LOG.i(f"User {current_user} requested deletion of alias {alias}")
LOG.d("delete alias %s", alias)
email = alias.email
alias_utils.delete_alias(alias, current_user)
flash(f"Alias {email} has been deleted", "success")
elif request.form.get("form-name") == "disable-alias":
alias_utils.change_alias_status(alias, enabled=False)
alias.enabled = False
Session.commit()
flash(f"Alias {alias.email} has been disabled", "success")

View File

@ -11,11 +11,9 @@ from wtforms.fields.html5 import EmailField
from app.config import ENFORCE_SPF, MAILBOX_SECRET
from app.config import URL
from app.dashboard.base import dashboard_bp
from app.dashboard.views.enter_sudo import sudo_required
from app.db import Session
from app.email_utils import email_can_be_used_as_mailbox
from app.email_utils import mailbox_already_used, render, send_email
from app.extensions import limiter
from app.log import LOG
from app.models import Alias, AuthorizedAddress
from app.models import Mailbox
@ -31,8 +29,6 @@ class ChangeEmailForm(FlaskForm):
@dashboard_bp.route("/mailbox/<int:mailbox_id>/", methods=["GET", "POST"])
@login_required
@sudo_required
@limiter.limit("20/minute", methods=["POST"])
def mailbox_detail_route(mailbox_id):
mailbox: Mailbox = Mailbox.get(mailbox_id)
if not mailbox or mailbox.user_id != current_user.id:
@ -183,15 +179,8 @@ def mailbox_detail_route(mailbox_id):
elif request.form.get("form-name") == "toggle-pgp":
if request.form.get("pgp-enabled") == "on":
if mailbox.is_proton():
mailbox.disable_pgp = True
flash(
"Enabling PGP for a Proton Mail mailbox is redundant and does not add any security benefit",
"info",
)
else:
mailbox.disable_pgp = False
flash(f"PGP is enabled on {mailbox.email}", "info")
mailbox.disable_pgp = False
flash(f"PGP is enabled on {mailbox.email}", "success")
else:
mailbox.disable_pgp = True
flash(f"PGP is disabled on {mailbox.email}", "info")

View File

@ -227,21 +227,6 @@ def setting():
Session.commit()
flash("Your preference has been updated", "success")
return redirect(url_for("dashboard.setting"))
elif request.form.get("form-name") == "enable_data_breach_check":
if not current_user.is_premium():
flash("Only premium plan can enable data breach monitoring", "warning")
return redirect(url_for("dashboard.setting"))
choose = request.form.get("enable_data_breach_check")
if choose == "on":
LOG.i("User {current_user} has enabled data breach monitoring")
current_user.enable_data_breach_check = True
flash("Data breach monitoring is enabled", "success")
else:
LOG.i("User {current_user} has disabled data breach monitoring")
current_user.enable_data_breach_check = False
flash("Data breach monitoring is disabled", "info")
Session.commit()
return redirect(url_for("dashboard.setting"))
elif request.form.get("form-name") == "sender-in-ra":
choose = request.form.get("enable")
if choose == "on":

View File

@ -8,7 +8,6 @@ from app.db import Session
from flask import redirect, url_for, flash, request, render_template
from flask_login import login_required, current_user
from app import alias_utils
from app.dashboard.base import dashboard_bp
from app.handler.unsubscribe_encoder import UnsubscribeAction
from app.handler.unsubscribe_handler import UnsubscribeHandler
@ -32,7 +31,7 @@ def unsubscribe(alias_id):
# automatic unsubscribe, according to https://tools.ietf.org/html/rfc8058
if request.method == "POST":
alias_utils.change_alias_status(alias, False)
alias.enabled = False
flash(f"Alias {alias.email} has been blocked", "success")
Session.commit()

View File

@ -21,7 +21,6 @@ LIST_UNSUBSCRIBE = "List-Unsubscribe"
LIST_UNSUBSCRIBE_POST = "List-Unsubscribe-Post"
RETURN_PATH = "Return-Path"
AUTHENTICATION_RESULTS = "Authentication-Results"
SL_QUEUE_ID = "X-SL-Queue-Id"
# headers used to DKIM sign in order of preference
DKIM_HEADERS = [

View File

@ -494,10 +494,9 @@ def delete_header(msg: Message, header: str):
def sanitize_header(msg: Message, header: str):
"""remove trailing space and remove linebreak from a header"""
header_lowercase = header.lower()
for i in reversed(range(len(msg._headers))):
header_name = msg._headers[i][0].lower()
if header_name == header_lowercase:
if header_name == header.lower():
# msg._headers[i] is a tuple like ('From', 'hey@google.com')
if msg._headers[i][1]:
msg._headers[i] = (

View File

View File

@ -1,66 +0,0 @@
from abc import ABC, abstractmethod
from app import config
from app.db import Session
from app.errors import ProtonPartnerNotSetUp
from app.events.generated import event_pb2
from app.models import User, PartnerUser, SyncEvent
from app.proton.utils import get_proton_partner
from typing import Optional
NOTIFICATION_CHANNEL = "simplelogin_sync_events"
class Dispatcher(ABC):
@abstractmethod
def send(self, event: bytes):
pass
class PostgresDispatcher(Dispatcher):
def send(self, event: bytes):
instance = SyncEvent.create(content=event, flush=True)
Session.execute(f"NOTIFY {NOTIFICATION_CHANNEL}, '{instance.id}';")
@staticmethod
def get():
return PostgresDispatcher()
class EventDispatcher:
@staticmethod
def send_event(
user: User,
content: event_pb2.EventContent,
dispatcher: Dispatcher = PostgresDispatcher.get(),
skip_if_webhook_missing: bool = True,
):
if config.EVENT_WEBHOOK_DISABLE:
return
if not config.EVENT_WEBHOOK and skip_if_webhook_missing:
return
partner_user = EventDispatcher.__partner_user(user.id)
if not partner_user:
return
event = event_pb2.Event(
user_id=user.id,
external_user_id=partner_user.external_user_id,
partner_id=partner_user.partner_id,
content=content,
)
serialized = event.SerializeToString()
dispatcher.send(serialized)
@staticmethod
def __partner_user(user_id: int) -> Optional[PartnerUser]:
# Check if the current user has a partner_id
try:
proton_partner_id = get_proton_partner().id
except ProtonPartnerNotSetUp:
return None
# It has. Retrieve the information for the PartnerUser
return PartnerUser.get_by(user_id=user_id, partner_id=proton_partner_id)

View File

@ -1,50 +0,0 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# NO CHECKED-IN PROTOBUF GENCODE
# source: event.proto
# Protobuf Python Version: 5.27.0
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
from google.protobuf import runtime_version as _runtime_version
from google.protobuf import symbol_database as _symbol_database
from google.protobuf.internal import builder as _builder
_runtime_version.ValidateProtobufRuntimeVersion(
_runtime_version.Domain.PUBLIC,
5,
27,
0,
'',
'event.proto'
)
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0b\x65vent.proto\x12\x12simplelogin_events\"(\n\x0fUserPlanChanged\x12\x15\n\rplan_end_time\x18\x01 \x01(\r\"\r\n\x0bUserDeleted\"Z\n\x0c\x41liasCreated\x12\x10\n\x08\x61lias_id\x18\x01 \x01(\r\x12\x13\n\x0b\x61lias_email\x18\x02 \x01(\t\x12\x12\n\nalias_note\x18\x03 \x01(\t\x12\x0f\n\x07\x65nabled\x18\x04 \x01(\x08\"L\n\x12\x41liasStatusChanged\x12\x10\n\x08\x61lias_id\x18\x01 \x01(\r\x12\x13\n\x0b\x61lias_email\x18\x02 \x01(\t\x12\x0f\n\x07\x65nabled\x18\x03 \x01(\x08\"5\n\x0c\x41liasDeleted\x12\x10\n\x08\x61lias_id\x18\x01 \x01(\r\x12\x13\n\x0b\x61lias_email\x18\x02 \x01(\t\"D\n\x10\x41liasCreatedList\x12\x30\n\x06\x65vents\x18\x01 \x03(\x0b\x32 .simplelogin_events.AliasCreated\"\x93\x03\n\x0c\x45ventContent\x12?\n\x10user_plan_change\x18\x01 \x01(\x0b\x32#.simplelogin_events.UserPlanChangedH\x00\x12\x37\n\x0cuser_deleted\x18\x02 \x01(\x0b\x32\x1f.simplelogin_events.UserDeletedH\x00\x12\x39\n\ralias_created\x18\x03 \x01(\x0b\x32 .simplelogin_events.AliasCreatedH\x00\x12\x45\n\x13\x61lias_status_change\x18\x04 \x01(\x0b\x32&.simplelogin_events.AliasStatusChangedH\x00\x12\x39\n\ralias_deleted\x18\x05 \x01(\x0b\x32 .simplelogin_events.AliasDeletedH\x00\x12\x41\n\x11\x61lias_create_list\x18\x06 \x01(\x0b\x32$.simplelogin_events.AliasCreatedListH\x00\x42\t\n\x07\x63ontent\"y\n\x05\x45vent\x12\x0f\n\x07user_id\x18\x01 \x01(\r\x12\x18\n\x10\x65xternal_user_id\x18\x02 \x01(\t\x12\x12\n\npartner_id\x18\x03 \x01(\r\x12\x31\n\x07\x63ontent\x18\x04 \x01(\x0b\x32 .simplelogin_events.EventContentb\x06proto3')
_globals = globals()
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'event_pb2', _globals)
if not _descriptor._USE_C_DESCRIPTORS:
DESCRIPTOR._loaded_options = None
_globals['_USERPLANCHANGED']._serialized_start=35
_globals['_USERPLANCHANGED']._serialized_end=75
_globals['_USERDELETED']._serialized_start=77
_globals['_USERDELETED']._serialized_end=90
_globals['_ALIASCREATED']._serialized_start=92
_globals['_ALIASCREATED']._serialized_end=182
_globals['_ALIASSTATUSCHANGED']._serialized_start=184
_globals['_ALIASSTATUSCHANGED']._serialized_end=260
_globals['_ALIASDELETED']._serialized_start=262
_globals['_ALIASDELETED']._serialized_end=315
_globals['_ALIASCREATEDLIST']._serialized_start=317
_globals['_ALIASCREATEDLIST']._serialized_end=385
_globals['_EVENTCONTENT']._serialized_start=388
_globals['_EVENTCONTENT']._serialized_end=791
_globals['_EVENT']._serialized_start=793
_globals['_EVENT']._serialized_end=914
# @@protoc_insertion_point(module_scope)

View File

@ -1,80 +0,0 @@
from google.protobuf.internal import containers as _containers
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from typing import ClassVar as _ClassVar, Iterable as _Iterable, Mapping as _Mapping, Optional as _Optional, Union as _Union
DESCRIPTOR: _descriptor.FileDescriptor
class UserPlanChanged(_message.Message):
__slots__ = ("plan_end_time",)
PLAN_END_TIME_FIELD_NUMBER: _ClassVar[int]
plan_end_time: int
def __init__(self, plan_end_time: _Optional[int] = ...) -> None: ...
class UserDeleted(_message.Message):
__slots__ = ()
def __init__(self) -> None: ...
class AliasCreated(_message.Message):
__slots__ = ("alias_id", "alias_email", "alias_note", "enabled")
ALIAS_ID_FIELD_NUMBER: _ClassVar[int]
ALIAS_EMAIL_FIELD_NUMBER: _ClassVar[int]
ALIAS_NOTE_FIELD_NUMBER: _ClassVar[int]
ENABLED_FIELD_NUMBER: _ClassVar[int]
alias_id: int
alias_email: str
alias_note: str
enabled: bool
def __init__(self, alias_id: _Optional[int] = ..., alias_email: _Optional[str] = ..., alias_note: _Optional[str] = ..., enabled: bool = ...) -> None: ...
class AliasStatusChanged(_message.Message):
__slots__ = ("alias_id", "alias_email", "enabled")
ALIAS_ID_FIELD_NUMBER: _ClassVar[int]
ALIAS_EMAIL_FIELD_NUMBER: _ClassVar[int]
ENABLED_FIELD_NUMBER: _ClassVar[int]
alias_id: int
alias_email: str
enabled: bool
def __init__(self, alias_id: _Optional[int] = ..., alias_email: _Optional[str] = ..., enabled: bool = ...) -> None: ...
class AliasDeleted(_message.Message):
__slots__ = ("alias_id", "alias_email")
ALIAS_ID_FIELD_NUMBER: _ClassVar[int]
ALIAS_EMAIL_FIELD_NUMBER: _ClassVar[int]
alias_id: int
alias_email: str
def __init__(self, alias_id: _Optional[int] = ..., alias_email: _Optional[str] = ...) -> None: ...
class AliasCreatedList(_message.Message):
__slots__ = ("events",)
EVENTS_FIELD_NUMBER: _ClassVar[int]
events: _containers.RepeatedCompositeFieldContainer[AliasCreated]
def __init__(self, events: _Optional[_Iterable[_Union[AliasCreated, _Mapping]]] = ...) -> None: ...
class EventContent(_message.Message):
__slots__ = ("user_plan_change", "user_deleted", "alias_created", "alias_status_change", "alias_deleted", "alias_create_list")
USER_PLAN_CHANGE_FIELD_NUMBER: _ClassVar[int]
USER_DELETED_FIELD_NUMBER: _ClassVar[int]
ALIAS_CREATED_FIELD_NUMBER: _ClassVar[int]
ALIAS_STATUS_CHANGE_FIELD_NUMBER: _ClassVar[int]
ALIAS_DELETED_FIELD_NUMBER: _ClassVar[int]
ALIAS_CREATE_LIST_FIELD_NUMBER: _ClassVar[int]
user_plan_change: UserPlanChanged
user_deleted: UserDeleted
alias_created: AliasCreated
alias_status_change: AliasStatusChanged
alias_deleted: AliasDeleted
alias_create_list: AliasCreatedList
def __init__(self, user_plan_change: _Optional[_Union[UserPlanChanged, _Mapping]] = ..., user_deleted: _Optional[_Union[UserDeleted, _Mapping]] = ..., alias_created: _Optional[_Union[AliasCreated, _Mapping]] = ..., alias_status_change: _Optional[_Union[AliasStatusChanged, _Mapping]] = ..., alias_deleted: _Optional[_Union[AliasDeleted, _Mapping]] = ..., alias_create_list: _Optional[_Union[AliasCreatedList, _Mapping]] = ...) -> None: ...
class Event(_message.Message):
__slots__ = ("user_id", "external_user_id", "partner_id", "content")
USER_ID_FIELD_NUMBER: _ClassVar[int]
EXTERNAL_USER_ID_FIELD_NUMBER: _ClassVar[int]
PARTNER_ID_FIELD_NUMBER: _ClassVar[int]
CONTENT_FIELD_NUMBER: _ClassVar[int]
user_id: int
external_user_id: str
partner_id: int
content: EventContent
def __init__(self, user_id: _Optional[int] = ..., external_user_id: _Optional[str] = ..., partner_id: _Optional[int] = ..., content: _Optional[_Union[EventContent, _Mapping]] = ...) -> None: ...

View File

@ -30,9 +30,7 @@ def apply_dmarc_policy_for_forward_phase(
) -> Tuple[Message, Optional[str]]:
spam_result = SpamdResult.extract_from_headers(msg, Phase.forward)
if not DMARC_CHECK_ENABLED or not spam_result:
LOG.i("DMARC check disabled")
return msg, None
LOG.i(f"Spam check result in {spam_result}")
from_header = get_header_unicode(msg[headers.FROM])
@ -152,10 +150,8 @@ def apply_dmarc_policy_for_reply_phase(
) -> Optional[str]:
spam_result = SpamdResult.extract_from_headers(msg, Phase.reply)
if not DMARC_CHECK_ENABLED or not spam_result:
LOG.i("DMARC check disabled")
return None
LOG.i(f"Spam check result is {spam_result}")
if spam_result.dmarc not in (
DmarcCheckResult.quarantine,
DmarcCheckResult.reject,

View File

@ -5,7 +5,6 @@ from typing import Optional
from aiosmtpd.smtp import Envelope
from app import config
from app import alias_utils
from app.db import Session
from app.email import headers, status
from app.email_utils import (
@ -102,8 +101,7 @@ class UnsubscribeHandler:
mailbox.email, alias
):
return status.E509
LOG.i(f"User disabled alias {alias} via unsubscribe header")
alias_utils.change_alias_status(alias, enabled=False)
alias.enabled = False
Session.commit()
enable_alias_url = config.URL + f"/dashboard/?highlight_alias_id={alias.id}"
for mailbox in alias.mailboxes:

View File

@ -30,10 +30,7 @@ def handle_batch_import(batch_import: BatchImport):
LOG.d("Download file %s from %s", batch_import.file, file_url)
r = requests.get(file_url)
# Replace invisible character
lines = [
line.decode("utf-8").replace("\ufeff", "").strip() for line in r.iter_lines()
]
lines = [line.decode("utf-8") for line in r.iter_lines()]
import_from_csv(batch_import, user, lines)

View File

@ -1,40 +0,0 @@
from app.events.event_dispatcher import EventDispatcher, Dispatcher
from app.events.generated.event_pb2 import EventContent, AliasCreated, AliasCreatedList
from app.log import LOG
from app.models import User, Alias
def send_alias_creation_events_for_user(
user: User, dispatcher: Dispatcher, chunk_size=50
):
if user.disabled:
LOG.i("User {user} is disabled. Skipping sending events for that user")
return
chunk_size = min(chunk_size, 50)
event_list = []
for alias in (
Alias.yield_per_query(chunk_size)
.filter_by(user_id=user.id)
.order_by(Alias.id.asc())
):
event_list.append(
AliasCreated(
alias_id=alias.id,
alias_email=alias.email,
alias_note=alias.note,
enabled=alias.enabled,
)
)
if len(event_list) >= chunk_size:
EventDispatcher.send_event(
user,
EventContent(alias_create_list=AliasCreatedList(events=event_list)),
dispatcher=dispatcher,
)
event_list = []
if len(event_list) > 0:
EventDispatcher.send_event(
user,
EventContent(alias_create_list=AliasCreatedList(events=event_list)),
dispatcher=dispatcher,
)

View File

@ -525,11 +525,6 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
sa.Boolean, default=True, nullable=False, server_default="1"
)
# user opted in for data breach check
enable_data_breach_check = sa.Column(
sa.Boolean, default=False, nullable=False, server_default="0"
)
# bitwise flags. Allow for future expansion
flags = sa.Column(
sa.BigInteger,
@ -657,21 +652,6 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
return user
@classmethod
def delete(cls, obj_id, commit=False):
# Internal import to avoid global import cycles
from app.events.event_dispatcher import EventDispatcher
from app.events.generated.event_pb2 import UserDeleted, EventContent
user: User = cls.get(obj_id)
EventDispatcher.send_event(user, EventContent(user_deleted=UserDeleted()))
res = super(User, cls).delete(obj_id)
if commit:
Session.commit()
return res
def get_active_subscription(
self, include_partner_subscription: bool = True
) -> Optional[
@ -1634,18 +1614,6 @@ class Alias(Base, ModelMixin):
Session.add(new_alias)
DailyMetric.get_or_create_today_metric().nb_alias += 1
# Internal import to avoid global import cycles
from app.events.event_dispatcher import EventDispatcher
from app.events.generated.event_pb2 import AliasCreated, EventContent
event = AliasCreated(
alias_id=new_alias.id,
alias_email=new_alias.email,
alias_note=new_alias.note,
enabled=True,
)
EventDispatcher.send_event(user, EventContent(alias_created=event))
if commit:
Session.commit()
@ -2676,15 +2644,10 @@ class Mailbox(Base, ModelMixin):
return False
def nb_alias(self):
alias_ids = set(
am.alias_id
for am in AliasMailbox.filter_by(mailbox_id=self.id).values(
AliasMailbox.alias_id
)
return (
AliasMailbox.filter_by(mailbox_id=self.id).count()
+ Alias.filter_by(mailbox_id=self.id).count()
)
for alias in Alias.filter_by(mailbox_id=self.id).values(Alias.id):
alias_ids.add(alias.id)
return len(alias_ids)
def is_proton(self) -> bool:
if (
@ -2733,15 +2696,12 @@ class Mailbox(Base, ModelMixin):
@property
def aliases(self) -> [Alias]:
ret = dict(
(alias.id, alias) for alias in Alias.filter_by(mailbox_id=self.id).all()
)
ret = Alias.filter_by(mailbox_id=self.id).all()
for am in AliasMailbox.filter_by(mailbox_id=self.id):
if am.alias_id not in ret:
ret[am.alias_id] = am.alias
ret.append(am.alias)
return list(ret.values())
return ret
@classmethod
def create(cls, **kw):
@ -2971,7 +2931,11 @@ class RecoveryCode(Base, ModelMixin):
@classmethod
def find_by_user_code(cls, user: User, code: str):
hashed_code = cls._hash_code(code)
return cls.get_by(user_id=user.id, code=hashed_code)
# TODO: Only return hashed codes once there aren't unhashed codes in the db.
found_code = cls.get_by(user_id=user.id, code=hashed_code)
if found_code:
return found_code
return cls.get_by(user_id=user.id, code=code)
@classmethod
def empty(cls, user):
@ -3671,52 +3635,3 @@ class ApiToCookieToken(Base, ModelMixin):
code = secrets.token_urlsafe(32)
return super().create(code=code, **kwargs)
class SyncEvent(Base, ModelMixin):
"""This model holds the events that need to be sent to the webhook"""
__tablename__ = "sync_event"
content = sa.Column(sa.LargeBinary, unique=False, nullable=False)
taken_time = sa.Column(
ArrowType, default=None, nullable=True, server_default=None, index=True
)
__table_args__ = (
sa.Index("ix_sync_event_created_at", "created_at"),
sa.Index("ix_sync_event_taken_time", "taken_time"),
)
def mark_as_taken(self) -> bool:
sql = """
UPDATE sync_event
SET taken_time = :taken_time
WHERE id = :sync_event_id
AND taken_time IS NULL
"""
args = {"taken_time": arrow.now().datetime, "sync_event_id": self.id}
res = Session.execute(sql, args)
Session.commit()
return res.rowcount > 0
@classmethod
def get_dead_letter(cls, older_than: Arrow) -> [SyncEvent]:
return (
SyncEvent.filter(
(
(
SyncEvent.taken_time.isnot(None)
& (SyncEvent.taken_time < older_than)
)
| (
SyncEvent.taken_time.is_(None)
& (SyncEvent.created_at < older_than)
)
)
)
.order_by(SyncEvent.id)
.limit(100)
.all()
)

View File

@ -2,7 +2,6 @@ from newrelic import agent
from typing import Optional
from app.db import Session
from app.log import LOG
from app.errors import ProtonPartnerNotSetUp
from app.models import Partner, PartnerUser, User
@ -31,7 +30,6 @@ def perform_proton_account_unlink(current_user: User):
user_id=current_user.id, partner_id=proton_partner.id
)
if partner_user is not None:
LOG.info(f"User {current_user} has unlinked the account from {partner_user}")
PartnerUser.delete(partner_user.id)
Session.commit()
agent.record_custom_event("AccountUnlinked", {"partner": proton_partner.name})

View File

@ -30,9 +30,7 @@ def check_bucket_limit(
try:
value = lock_redis.incr(bucket_lock_name, bucket_seconds)
if value > max_hits:
LOG.i(
f"Rate limit hit for {lock_name} (bucket id {bucket_id}) -> {value}/{max_hits}"
)
LOG.i(f"Rate limit hit for {bucket_lock_name} -> {value}/{max_hits}")
newrelic.agent.record_custom_event(
"BucketRateLimit",
{"lock_name": lock_name, "bucket_seconds": bucket_seconds},

View File

@ -5,9 +5,19 @@ from typing import Optional
import boto3
import requests
from app import config
from app.config import (
AWS_REGION,
BUCKET,
AWS_ACCESS_KEY_ID,
AWS_SECRET_ACCESS_KEY,
LOCAL_FILE_UPLOAD,
UPLOAD_DIR,
URL,
AWS_ENDPOINT_URL,
)
from app.log import LOG
_s3_client = None
@ -15,12 +25,12 @@ def _get_s3client():
global _s3_client
if _s3_client is None:
args = {
"aws_access_key_id": config.AWS_ACCESS_KEY_ID,
"aws_secret_access_key": config.AWS_SECRET_ACCESS_KEY,
"region_name": config.AWS_REGION,
"aws_access_key_id": AWS_ACCESS_KEY_ID,
"aws_secret_access_key": AWS_SECRET_ACCESS_KEY,
"region_name": AWS_REGION,
}
if config.AWS_ENDPOINT_URL:
args["endpoint_url"] = config.AWS_ENDPOINT_URL
if AWS_ENDPOINT_URL:
args["endpoint_url"] = AWS_ENDPOINT_URL
_s3_client = boto3.client("s3", **args)
return _s3_client
@ -28,8 +38,8 @@ def _get_s3client():
def upload_from_bytesio(key: str, bs: BytesIO, content_type="application/octet-stream"):
bs.seek(0)
if config.LOCAL_FILE_UPLOAD:
file_path = os.path.join(config.UPLOAD_DIR, key)
if LOCAL_FILE_UPLOAD:
file_path = os.path.join(UPLOAD_DIR, key)
file_dir = os.path.dirname(file_path)
os.makedirs(file_dir, exist_ok=True)
with open(file_path, "wb") as f:
@ -37,7 +47,7 @@ def upload_from_bytesio(key: str, bs: BytesIO, content_type="application/octet-s
else:
_get_s3client().put_object(
Bucket=config.BUCKET,
Bucket=BUCKET,
Key=key,
Body=bs,
ContentType=content_type,
@ -47,8 +57,8 @@ def upload_from_bytesio(key: str, bs: BytesIO, content_type="application/octet-s
def upload_email_from_bytesio(path: str, bs: BytesIO, filename):
bs.seek(0)
if config.LOCAL_FILE_UPLOAD:
file_path = os.path.join(config.UPLOAD_DIR, path)
if LOCAL_FILE_UPLOAD:
file_path = os.path.join(UPLOAD_DIR, path)
file_dir = os.path.dirname(file_path)
os.makedirs(file_dir, exist_ok=True)
with open(file_path, "wb") as f:
@ -56,7 +66,7 @@ def upload_email_from_bytesio(path: str, bs: BytesIO, filename):
else:
_get_s3client().put_object(
Bucket=config.BUCKET,
Bucket=BUCKET,
Key=path,
Body=bs,
# Support saving a remote file using Http header
@ -67,12 +77,12 @@ def upload_email_from_bytesio(path: str, bs: BytesIO, filename):
def download_email(path: str) -> Optional[str]:
if config.LOCAL_FILE_UPLOAD:
file_path = os.path.join(config.UPLOAD_DIR, path)
if LOCAL_FILE_UPLOAD:
file_path = os.path.join(UPLOAD_DIR, path)
with open(file_path, "rb") as f:
return f.read()
resp = _get_s3client().get_object(
Bucket=config.BUCKET,
Bucket=BUCKET,
Key=path,
)
if not resp or "Body" not in resp:
@ -86,30 +96,29 @@ def upload_from_url(url: str, upload_path):
def get_url(key: str, expires_in=3600) -> str:
if config.LOCAL_FILE_UPLOAD:
return config.URL + "/static/upload/" + key
if LOCAL_FILE_UPLOAD:
return URL + "/static/upload/" + key
else:
return _get_s3client().generate_presigned_url(
ExpiresIn=expires_in,
ClientMethod="get_object",
Params={"Bucket": config.BUCKET, "Key": key},
Params={"Bucket": BUCKET, "Key": key},
)
def delete(path: str):
if config.LOCAL_FILE_UPLOAD:
file_path = os.path.join(config.UPLOAD_DIR, path)
os.remove(file_path)
if LOCAL_FILE_UPLOAD:
os.remove(os.path.join(UPLOAD_DIR, path))
else:
_get_s3client().delete_object(Bucket=config.BUCKET, Key=path)
_get_s3client().delete_object(Bucket=BUCKET, Key=path)
def create_bucket_if_not_exists():
s3client = _get_s3client()
buckets = s3client.list_buckets()
for bucket in buckets["Buckets"]:
if bucket["Name"] == config.BUCKET:
if bucket["Name"] == BUCKET:
LOG.i("Bucket already exists")
return
s3client.create_bucket(Bucket=config.BUCKET)
LOG.i(f"Bucket {config.BUCKET} created")
s3client.create_bucket(Bucket=BUCKET)
LOG.i(f"Bucket {BUCKET} created")

View File

@ -2,8 +2,6 @@ import requests
from requests import RequestException
from app import config
from app.events.event_dispatcher import EventDispatcher
from app.events.generated.event_pb2 import EventContent, UserPlanChanged
from app.log import LOG
from app.models import User
@ -33,6 +31,3 @@ def execute_subscription_webhook(user: User):
)
except RequestException as e:
LOG.error(f"Subscription request exception: {e}")
event = UserPlanChanged(plan_end_time=sl_subscription_end)
EventDispatcher.send_event(user, EventContent(user_plan_change=event))

15
cron.py
View File

@ -61,9 +61,6 @@ from app.pgp_utils import load_public_key_and_check, PGPException
from app.proton.utils import get_proton_partner
from app.utils import sanitize_email
from server import create_light_app
from tasks.cleanup_old_imports import cleanup_old_imports
from tasks.cleanup_old_jobs import cleanup_old_jobs
from tasks.cleanup_old_notifications import cleanup_old_notifications
DELETE_GRACE_DAYS = 30
@ -1070,7 +1067,6 @@ def get_alias_to_check_hibp(
Alias.id >= min_alias_id,
Alias.id < max_alias_id,
User.disabled == False, # noqa: E712
User.enable_data_breach_check,
or_(
User.lifetime,
ManualSubscription.end_at > now,
@ -1225,13 +1221,6 @@ def clear_users_scheduled_to_be_deleted(dry_run=False):
Session.commit()
def delete_old_data():
oldest_valid = arrow.now().shift(days=-config.KEEP_OLD_DATA_DAYS)
cleanup_old_imports(oldest_valid)
cleanup_old_jobs(oldest_valid)
cleanup_old_notifications(oldest_valid)
if __name__ == "__main__":
LOG.d("Start running cronjob")
parser = argparse.ArgumentParser()
@ -1246,7 +1235,6 @@ if __name__ == "__main__":
"notify_manual_subscription_end",
"notify_premium_end",
"delete_logs",
"delete_old_data",
"poll_apple_subscription",
"sanity_check",
"delete_old_monitoring",
@ -1275,9 +1263,6 @@ if __name__ == "__main__":
elif args.job == "delete_logs":
LOG.d("Deleted Logs")
delete_logs()
elif args.job == "delete_old_data":
LOG.d("Delete old data")
delete_old_data()
elif args.job == "poll_apple_subscription":
LOG.d("Poll Apple Subscriptions")
poll_apple_subscription()

View File

@ -37,12 +37,6 @@ jobs:
schedule: "15 5 * * *"
captureStderr: true
- name: SimpleLogin Delete Old data
command: python /code/cron.py -j delete_old_data
shell: /bin/bash
schedule: "30 5 * * *"
captureStderr: true
- name: SimpleLogin Poll Apple Subscriptions
command: python /code/cron.py -j poll_apple_subscription
shell: /bin/bash

View File

@ -53,7 +53,7 @@ from flanker.addresslib.address import EmailAddress
from sqlalchemy.exc import IntegrityError
from app import pgp_utils, s3, config
from app.alias_utils import try_auto_create, change_alias_status
from app.alias_utils import try_auto_create
from app.config import (
EMAIL_DOMAIN,
URL,
@ -875,7 +875,6 @@ def forward_email_to_mailbox(
# References and In-Reply-To are used for keeping the email thread
headers.REFERENCES,
headers.IN_REPLY_TO,
headers.SL_QUEUE_ID,
headers.LIST_UNSUBSCRIBE,
headers.LIST_UNSUBSCRIBE_POST,
] + headers.MIME_HEADERS
@ -1180,7 +1179,6 @@ def handle_reply(envelope, msg: Message, rcpt_to: str) -> (bool, str):
# References and In-Reply-To are used for keeping the email thread
headers.REFERENCES,
headers.IN_REPLY_TO,
headers.SL_QUEUE_ID,
]
+ headers.MIME_HEADERS,
)
@ -1585,7 +1583,7 @@ def handle_bounce_forward_phase(msg: Message, email_log: EmailLog):
LOG.w(
f"Disable alias {alias} because {reason}. {alias.mailboxes} {alias.user}. Last contact {contact}"
)
change_alias_status(alias, enabled=False)
alias.enabled = False
Notification.create(
user_id=user.id,
@ -2042,11 +2040,10 @@ def handle(envelope: Envelope, msg: Message) -> str:
return status.E204
# sanitize email headers
sanitize_header(msg, headers.FROM)
sanitize_header(msg, headers.TO)
sanitize_header(msg, headers.CC)
sanitize_header(msg, headers.REPLY_TO)
sanitize_header(msg, headers.MESSAGE_ID)
sanitize_header(msg, "from")
sanitize_header(msg, "to")
sanitize_header(msg, "cc")
sanitize_header(msg, "reply-to")
LOG.d(
"==>> Handle mail_from:%s, rcpt_tos:%s, header_from:%s, header_to:%s, "

View File

@ -1,64 +0,0 @@
import argparse
from enum import Enum
from sys import argv, exit
from app.config import DB_URI
from app.log import LOG
from events.runner import Runner
from events.event_source import DeadLetterEventSource, PostgresEventSource
from events.event_sink import ConsoleEventSink, HttpEventSink
class Mode(Enum):
DEAD_LETTER = "dead_letter"
LISTENER = "listener"
@staticmethod
def from_str(value: str):
if value == Mode.DEAD_LETTER.value:
return Mode.DEAD_LETTER
elif value == Mode.LISTENER.value:
return Mode.LISTENER
else:
raise ValueError(f"Invalid mode: {value}")
def main(mode: Mode, dry_run: bool):
if mode == Mode.DEAD_LETTER:
LOG.i("Using DeadLetterEventSource")
source = DeadLetterEventSource()
elif mode == Mode.LISTENER:
LOG.i("Using PostgresEventSource")
source = PostgresEventSource(DB_URI)
else:
raise ValueError(f"Invalid mode: {mode}")
if dry_run:
LOG.i("Starting with ConsoleEventSink")
sink = ConsoleEventSink()
else:
LOG.i("Starting with HttpEventSink")
sink = HttpEventSink()
runner = Runner(source=source, sink=sink)
runner.run()
def args():
parser = argparse.ArgumentParser(description="Run event listener")
parser.add_argument(
"mode",
help="Mode to run",
choices=[Mode.DEAD_LETTER.value, Mode.LISTENER.value],
)
parser.add_argument("--dry-run", help="Dry run mode", action="store_true")
return parser.parse_args()
if __name__ == "__main__":
if len(argv) < 2:
print("Invalid usage. Pass 'listener' or 'dead_letter' as argument")
exit(1)
args = args()
main(Mode.from_str(args.mode), args.dry_run)

View File

View File

@ -1,42 +0,0 @@
import requests
from abc import ABC, abstractmethod
from app.config import EVENT_WEBHOOK, EVENT_WEBHOOK_SKIP_VERIFY_SSL
from app.log import LOG
from app.models import SyncEvent
class EventSink(ABC):
@abstractmethod
def process(self, event: SyncEvent) -> bool:
pass
class HttpEventSink(EventSink):
def process(self, event: SyncEvent) -> bool:
if not EVENT_WEBHOOK:
LOG.warning("Skipping sending event because there is no webhook configured")
return False
LOG.info(f"Sending event {event.id} to {EVENT_WEBHOOK}")
res = requests.post(
url=EVENT_WEBHOOK,
data=event.content,
headers={"Content-Type": "application/x-protobuf"},
verify=not EVENT_WEBHOOK_SKIP_VERIFY_SSL,
)
if res.status_code != 200:
LOG.warning(
f"Failed to send event to webhook: {res.status_code} {res.text}"
)
return False
else:
LOG.info(f"Event {event.id} sent successfully to webhook")
return True
class ConsoleEventSink(EventSink):
def process(self, event: SyncEvent) -> bool:
LOG.info(f"Handling event {event.id}")
return True

View File

@ -1,100 +0,0 @@
import arrow
import newrelic.agent
import psycopg2
import select
from abc import ABC, abstractmethod
from app.log import LOG
from app.models import SyncEvent
from app.events.event_dispatcher import NOTIFICATION_CHANNEL
from time import sleep
from typing import Callable, NoReturn
_DEAD_LETTER_THRESHOLD_MINUTES = 10
_DEAD_LETTER_INTERVAL_SECONDS = 30
_POSTGRES_RECONNECT_INTERVAL_SECONDS = 5
class EventSource(ABC):
@abstractmethod
def run(self, on_event: Callable[[SyncEvent], NoReturn]):
pass
class PostgresEventSource(EventSource):
def __init__(self, connection_string: str):
self.__connection_string = connection_string
self.__connect()
def run(self, on_event: Callable[[SyncEvent], NoReturn]):
while True:
try:
self.__listen(on_event)
except Exception as e:
LOG.warn(f"Error listening to events: {e}")
sleep(_POSTGRES_RECONNECT_INTERVAL_SECONDS)
self.__connect()
def __listen(self, on_event: Callable[[SyncEvent], NoReturn]):
self.__connection.set_isolation_level(
psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT
)
cursor = self.__connection.cursor()
cursor.execute(f"LISTEN {NOTIFICATION_CHANNEL};")
while True:
if select.select([self.__connection], [], [], 5) != ([], [], []):
self.__connection.poll()
while self.__connection.notifies:
notify = self.__connection.notifies.pop(0)
LOG.debug(
f"Got NOTIFY: pid={notify.pid} channel={notify.channel} payload={notify.payload}"
)
try:
webhook_id = int(notify.payload)
event = SyncEvent.get_by(id=webhook_id)
if event is not None:
if event.mark_as_taken():
on_event(event)
else:
LOG.info(
f"Event {event.id} was handled by another runner"
)
else:
LOG.info(f"Could not find event with id={notify.payload}")
except Exception as e:
LOG.warn(f"Error getting event: {e}")
def __connect(self):
self.__connection = psycopg2.connect(self.__connection_string)
from app.db import Session
Session.close()
class DeadLetterEventSource(EventSource):
@newrelic.agent.background_task()
def run(self, on_event: Callable[[SyncEvent], NoReturn]):
while True:
try:
threshold = arrow.utcnow().shift(
minutes=-_DEAD_LETTER_THRESHOLD_MINUTES
)
events = SyncEvent.get_dead_letter(older_than=threshold)
if events:
LOG.info(f"Got {len(events)} dead letter events")
if events:
newrelic.agent.record_custom_metric(
"Custom/dead_letter_events_to_process", len(events)
)
for event in events:
on_event(event)
else:
LOG.debug("No dead letter events")
sleep(_DEAD_LETTER_INTERVAL_SECONDS)
except Exception as e:
LOG.warn(f"Error getting dead letter event: {e}")
sleep(_DEAD_LETTER_INTERVAL_SECONDS)

View File

@ -1,42 +0,0 @@
import arrow
import newrelic.agent
from app.log import LOG
from app.models import SyncEvent
from events.event_sink import EventSink
from events.event_source import EventSource
class Runner:
def __init__(self, source: EventSource, sink: EventSink):
self.__source = source
self.__sink = sink
def run(self):
self.__source.run(self.__on_event)
@newrelic.agent.background_task()
def __on_event(self, event: SyncEvent):
try:
event_created_at = event.created_at
start_time = arrow.now()
success = self.__sink.process(event)
if success:
event_id = event.id
SyncEvent.delete(event.id, commit=True)
LOG.info(f"Marked {event_id} as done")
end_time = arrow.now() - start_time
time_between_taken_and_created = start_time - event_created_at
newrelic.agent.record_custom_metric("Custom/sync_event_processed", 1)
newrelic.agent.record_custom_metric(
"Custom/sync_event_process_time", end_time.total_seconds()
)
newrelic.agent.record_custom_metric(
"Custom/sync_event_elapsed_time",
time_between_taken_and_created.total_seconds(),
)
except Exception as e:
LOG.warn(f"Exception processing event [id={event.id}]: {e}")
newrelic.agent.record_custom_metric("Custom/sync_event_failed", 1)

View File

@ -118,7 +118,9 @@ WORDS_FILE_PATH=local_data/test_words.txt
# Login with OIDC
# CONNECT_WITH_OIDC_ICON=fa-github
# OIDC_WELL_KNOWN_URL=to_fill
# OIDC_AUTHORIZATION_URL=to_fill
# OIDC_USER_INFO_URL=to_fill
# OIDC_TOKEN_URL=to_fill
# OIDC_SCOPES=openid email profile
# OIDC_NAME_FIELD=name
# OIDC_CLIENT_ID=to_fill

View File

@ -15,7 +15,6 @@ from app.email_utils import (
render,
)
from app.import_utils import handle_batch_import
from app.jobs.event_jobs import send_alias_creation_events_for_user
from app.jobs.export_user_data_job import ExportUserDataJob
from app.log import LOG
from app.models import User, Job, BatchImport, Mailbox, CustomDomain, JobState
@ -198,18 +197,13 @@ def process_job(job: Job):
onboarding_mailbox(user)
elif job.name == config.JOB_ONBOARDING_4:
user_id = job.payload.get("user_id")
user: User = User.get(user_id)
user = User.get(user_id)
# user might delete their account in the meantime
# or disable the notification
if user and user.notification and user.activated:
# if user only has 1 mailbox which is Proton then do not send PGP onboarding email
mailboxes = user.mailboxes()
if len(mailboxes) == 1 and mailboxes[0].is_proton():
LOG.d("Do not send onboarding PGP email to Proton mailbox")
else:
LOG.d("send onboarding pgp email to user %s", user)
onboarding_pgp(user)
LOG.d("send onboarding pgp email to user %s", user)
onboarding_pgp(user)
elif job.name == config.JOB_BATCH_IMPORT:
batch_import_id = job.payload.get("batch_import_id")
@ -270,14 +264,8 @@ SimpleLogin team.
user_id = job.payload.get("user_id")
user = User.get(user_id)
if user and user.activated:
LOG.d("Send proton welcome email to user %s", user)
LOG.d("send proton welcome email to user %s", user)
welcome_proton(user)
elif job.name == config.JOB_SEND_ALIAS_CREATION_EVENTS:
user_id = job.payload.get("user_id")
user = User.get(user_id)
if user and user.activated:
LOG.d(f"Sending alias creation events for {user}")
send_alias_creation_events_for_user(user)
else:
LOG.e("Unknown job name %s", job.name)

View File

@ -1,29 +0,0 @@
"""empty message
Revision ID: fa2f19bb4e5a
Revises: 52510a633d6f
Create Date: 2024-04-09 13:12:26.305340
"""
import sqlalchemy_utils
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'fa2f19bb4e5a'
down_revision = '52510a633d6f'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('users', sa.Column('enable_data_breach_check', sa.Boolean(), server_default='0', nullable=False))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('users', 'enable_data_breach_check')
# ### end Alembic commands ###

View File

@ -1,38 +0,0 @@
"""Create sync_event table
Revision ID: 06a9a7133445
Revises: fa2f19bb4e5a
Create Date: 2024-05-17 13:11:20.402259
"""
import sqlalchemy_utils
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '06a9a7133445'
down_revision = 'fa2f19bb4e5a'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('sync_event',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('created_at', sqlalchemy_utils.types.arrow.ArrowType(), nullable=False),
sa.Column('updated_at', sqlalchemy_utils.types.arrow.ArrowType(), nullable=True),
sa.Column('content', sa.LargeBinary(), nullable=False),
sa.Column('taken_time', sqlalchemy_utils.types.arrow.ArrowType(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_sync_event_created_at'), 'sync_event', ['created_at'], unique=False)
op.create_index(op.f('ix_sync_event_taken_time'), 'sync_event', ['taken_time'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('sync_event')
# ### end Alembic commands ###

View File

@ -4,7 +4,6 @@ import subprocess
from time import sleep
from typing import List, Dict
import arrow
import newrelic.agent
from app.db import Session
@ -94,44 +93,11 @@ def log_nb_db_connection():
newrelic.agent.record_custom_metric("Custom/nb_db_connections", nb_connection)
@newrelic.agent.background_task()
def log_pending_to_process_events():
r = Session.execute("select count(*) from sync_event WHERE taken_time IS NULL;")
events_pending = list(r)[0][0]
LOG.d("number of events pending to process %s", events_pending)
newrelic.agent.record_custom_metric(
"Custom/sync_events_pending_to_process", events_pending
)
@newrelic.agent.background_task()
def log_events_pending_dead_letter():
since = arrow.now().shift(minutes=-10).datetime
r = Session.execute(
"""
SELECT COUNT(*)
FROM sync_event
WHERE (taken_time IS NOT NULL AND taken_time < :since)
OR (taken_time IS NULL AND created_at < :since)
""",
{"since": since},
)
events_pending = list(r)[0][0]
LOG.d("number of events pending dead letter %s", events_pending)
newrelic.agent.record_custom_metric(
"Custom/sync_events_pending_dead_letter", events_pending
)
if __name__ == "__main__":
exporter = MetricExporter(get_newrelic_license())
while True:
log_postfix_metrics()
log_nb_db_connection()
log_pending_to_process_events()
log_events_pending_dead_letter()
Session.close()
exporter.run()

28
poetry.lock generated
View File

@ -2150,22 +2150,24 @@ wcwidth = "*"
[[package]]
name = "protobuf"
version = "5.27.1"
version = "4.24.3"
description = ""
optional = false
python-versions = ">=3.8"
python-versions = ">=3.7"
files = [
{file = "protobuf-5.27.1-cp310-abi3-win32.whl", hash = "sha256:3adc15ec0ff35c5b2d0992f9345b04a540c1e73bfee3ff1643db43cc1d734333"},
{file = "protobuf-5.27.1-cp310-abi3-win_amd64.whl", hash = "sha256:25236b69ab4ce1bec413fd4b68a15ef8141794427e0b4dc173e9d5d9dffc3bcd"},
{file = "protobuf-5.27.1-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:4e38fc29d7df32e01a41cf118b5a968b1efd46b9c41ff515234e794011c78b17"},
{file = "protobuf-5.27.1-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:917ed03c3eb8a2d51c3496359f5b53b4e4b7e40edfbdd3d3f34336e0eef6825a"},
{file = "protobuf-5.27.1-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:ee52874a9e69a30271649be88ecbe69d374232e8fd0b4e4b0aaaa87f429f1631"},
{file = "protobuf-5.27.1-cp38-cp38-win32.whl", hash = "sha256:7a97b9c5aed86b9ca289eb5148df6c208ab5bb6906930590961e08f097258107"},
{file = "protobuf-5.27.1-cp38-cp38-win_amd64.whl", hash = "sha256:f6abd0f69968792da7460d3c2cfa7d94fd74e1c21df321eb6345b963f9ec3d8d"},
{file = "protobuf-5.27.1-cp39-cp39-win32.whl", hash = "sha256:dfddb7537f789002cc4eb00752c92e67885badcc7005566f2c5de9d969d3282d"},
{file = "protobuf-5.27.1-cp39-cp39-win_amd64.whl", hash = "sha256:39309898b912ca6febb0084ea912e976482834f401be35840a008da12d189340"},
{file = "protobuf-5.27.1-py3-none-any.whl", hash = "sha256:4ac7249a1530a2ed50e24201d6630125ced04b30619262f06224616e0030b6cf"},
{file = "protobuf-5.27.1.tar.gz", hash = "sha256:df5e5b8e39b7d1c25b186ffdf9f44f40f810bbcc9d2b71d9d3156fee5a9adf15"},
{file = "protobuf-4.24.3-cp310-abi3-win32.whl", hash = "sha256:20651f11b6adc70c0f29efbe8f4a94a74caf61b6200472a9aea6e19898f9fcf4"},
{file = "protobuf-4.24.3-cp310-abi3-win_amd64.whl", hash = "sha256:3d42e9e4796a811478c783ef63dc85b5a104b44aaaca85d4864d5b886e4b05e3"},
{file = "protobuf-4.24.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:6e514e8af0045be2b56e56ae1bb14f43ce7ffa0f68b1c793670ccbe2c4fc7d2b"},
{file = "protobuf-4.24.3-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:ba53c2f04798a326774f0e53b9c759eaef4f6a568ea7072ec6629851c8435959"},
{file = "protobuf-4.24.3-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:f6ccbcf027761a2978c1406070c3788f6de4a4b2cc20800cc03d52df716ad675"},
{file = "protobuf-4.24.3-cp37-cp37m-win32.whl", hash = "sha256:1b182c7181a2891e8f7f3a1b5242e4ec54d1f42582485a896e4de81aa17540c2"},
{file = "protobuf-4.24.3-cp37-cp37m-win_amd64.whl", hash = "sha256:b0271a701e6782880d65a308ba42bc43874dabd1a0a0f41f72d2dac3b57f8e76"},
{file = "protobuf-4.24.3-cp38-cp38-win32.whl", hash = "sha256:e29d79c913f17a60cf17c626f1041e5288e9885c8579832580209de8b75f2a52"},
{file = "protobuf-4.24.3-cp38-cp38-win_amd64.whl", hash = "sha256:067f750169bc644da2e1ef18c785e85071b7c296f14ac53e0900e605da588719"},
{file = "protobuf-4.24.3-cp39-cp39-win32.whl", hash = "sha256:2da777d34b4f4f7613cdf85c70eb9a90b1fbef9d36ae4a0ccfe014b0b07906f1"},
{file = "protobuf-4.24.3-cp39-cp39-win_amd64.whl", hash = "sha256:f631bb982c5478e0c1c70eab383af74a84be66945ebf5dd6b06fc90079668d0b"},
{file = "protobuf-4.24.3-py3-none-any.whl", hash = "sha256:f6f8dc65625dadaad0c8545319c2e2f0424fede988368893ca3844261342c11a"},
{file = "protobuf-4.24.3.tar.gz", hash = "sha256:12e9ad2ec079b833176d2921be2cb24281fa591f0b119b208b788adc48c2561d"},
]
[[package]]

View File

@ -1,50 +0,0 @@
syntax = "proto3";
package simplelogin_events;
message UserPlanChanged {
uint32 plan_end_time = 1;
}
message UserDeleted {
}
message AliasCreated {
uint32 alias_id = 1;
string alias_email = 2;
string alias_note = 3;
bool enabled = 4;
}
message AliasStatusChanged {
uint32 alias_id = 1;
string alias_email = 2;
bool enabled = 3;
}
message AliasDeleted {
uint32 alias_id = 1;
string alias_email = 2;
}
message AliasCreatedList {
repeated AliasCreated events = 1;
}
message EventContent {
oneof content {
UserPlanChanged user_plan_change = 1;
UserDeleted user_deleted = 2;
AliasCreated alias_created = 3;
AliasStatusChanged alias_status_change = 4;
AliasDeleted alias_deleted = 5;
AliasCreatedList alias_create_list = 6;
}
}
message Event {
uint32 user_id = 1;
string external_user_id = 2;
uint32 partner_id = 3;
EventContent content = 4;
}

View File

@ -14,14 +14,13 @@ exclude = '''
| build
| dist
| migrations # migrations/ is generated by alembic
| app/events/generated
)/
)
'''
[tool.ruff]
ignore-init-module-imports = true
exclude = [".venv", "migrations", "app/events/generated"]
exclude = [".venv", "migrations"]
[tool.djlint]
indent = 2

View File

@ -1,24 +0,0 @@
#!/bin/bash
set -euxo pipefail
SCRIPT_DIR="$(cd "$(dirname "$0")" || exit 1; pwd -P)"
REPO_ROOT=$(echo "${SCRIPT_DIR}" | sed 's:scripts::g')
DEST_DIR="${REPO_ROOT}/app/events/generated"
PROTOC=${PROTOC:-"protoc"}
if ! eval "${PROTOC} --version" &> /dev/null ; then
echo "Cannot find $PROTOC"
exit 1
fi
rm -rf "${DEST_DIR}"
mkdir -p "${DEST_DIR}"
pushd $REPO_ROOT || exit 1
eval "${PROTOC} --proto_path=proto --python_out=\"${DEST_DIR}\" --pyi_out=\"${DEST_DIR}\" proto/event.proto"
popd || exit 1

View File

View File

@ -1,19 +0,0 @@
import arrow
from app import s3
from app.log import LOG
from app.models import BatchImport
def cleanup_old_imports(oldest_allowed: arrow.Arrow):
LOG.i(f"Deleting imports older than {oldest_allowed}")
for batch_import in (
BatchImport.filter(BatchImport.created_at < oldest_allowed).yield_per(500).all()
):
LOG.i(
f"Deleting batch import {batch_import} with file {batch_import.file.path}"
)
file = batch_import.file
if file is not None:
s3.delete(file.path)
BatchImport.delete(batch_import.id, commit=True)

View File

@ -1,24 +0,0 @@
import arrow
from sqlalchemy import or_, and_
from app import config
from app.db import Session
from app.log import LOG
from app.models import Job, JobState
def cleanup_old_jobs(oldest_allowed: arrow.Arrow):
LOG.i(f"Deleting jobs older than {oldest_allowed}")
count = Job.filter(
or_(
Job.state == JobState.done.value,
Job.state == JobState.error.value,
and_(
Job.state == JobState.taken.value,
Job.attempts >= config.JOB_MAX_ATTEMPTS,
),
),
Job.updated_at < oldest_allowed,
).delete()
Session.commit()
LOG.i(f"Deleted {count} jobs")

View File

@ -1,12 +0,0 @@
import arrow
from app.db import Session
from app.log import LOG
from app.models import Notification
def cleanup_old_notifications(oldest_allowed: arrow.Arrow):
LOG.i(f"Deleting notifications older than {oldest_allowed}")
count = Notification.filter(Notification.created_at < oldest_allowed).delete()
Session.commit()
LOG.i(f"Deleted {count} notifications")

View File

@ -249,42 +249,6 @@
</div>
</div>
<!-- END Random alias -->
<!-- Data breach check -->
<div class="card" id="data-breach">
<div class="card-body">
<div class="card-title">Data breach monitoring</div>
<div class="mt-1 mb-3">
{% if not current_user.is_premium() %}
<div class="alert alert-info" role="alert">
This feature is only available on Premium plan.
<a href="{{ url_for('dashboard.pricing') }}"
target="_blank"
rel="noopener noreferrer">
Upgrade<i class="fe fe-external-link"></i>
</a>
</div>
{% endif %}
If enabled, we will inform you via email if one of your aliases appears in a data breach.
<br>
SimpleLogin uses <a href="https://haveibeenpwned.com/">HaveIBeenPwned</a> API for checking for data breaches.
</div>
<form method="post" action="#data-breach">
{{ csrf_form.csrf_token }}
<input type="hidden" name="form-name" value="enable_data_breach_check">
<div class="form-check">
<input type="checkbox"
id="enable_data_breach_check"
name="enable_data_breach_check"
{% if current_user.enable_data_breach_check %} checked{% endif %}
class="form-check-input">
<label for="enable_data_breach_check">Enable data breach monitoring</label>
</div>
<button type="submit" class="btn btn-outline-primary">Update</button>
</form>
</div>
</div>
<!-- END Data breach check -->
<!-- Sender Format -->
<div class="card" id="sender-format">
<div class="card-body">
@ -321,9 +285,7 @@
No Name (i.e. only reverse-alias)
</option>
</select>
<button class="btn btn-outline-primary mt-3">
Update
</button>
<button class="btn btn-outline-primary mt-3">Update</button>
</form>
</div>
</div>
@ -333,9 +295,7 @@
<div class="card-body">
<div class="card-title">
Reverse Alias Replacement
<div class="badge badge-warning">
Experimental
</div>
<div class="badge badge-warning">Experimental</div>
</div>
<div class="mb-3">
When replying to a forwarded email, the <b>reverse-alias</b> can be automatically included
@ -352,13 +312,9 @@
name="replace-ra"
{% if current_user.replace_reverse_alias %} checked{% endif %}
class="form-check-input">
<label for="replace-ra">
Enable replacing reverse alias
</label>
<label for="replace-ra">Enable replacing reverse alias</label>
</div>
<button type="submit" class="btn btn-outline-primary">
Update
</button>
<button type="submit" class="btn btn-outline-primary">Update</button>
</form>
</div>
</div>

View File

@ -48,16 +48,15 @@
{# SIWSL#}
{# </a>#}
{# </li>#}
{% if current_user.should_show_app_page() %}
<li class="nav-item">
<a href="{{ url_for('dashboard.app_route') }}"
class="nav-link {{ 'active' if active_page == 'app' }}">
<i class="fe fe-grid"></i>
Apps
</a>
</li>
{% endif %}
{# {% if current_user.should_show_app_page() %}#}
{# <li class="nav-item">#}
{# <a href="{{ url_for('dashboard.app_route') }}"#}
{# class="nav-link {{ 'active' if active_page == 'app' }}">#}
{# <i class="fe fe-grid"></i>#}
{# Apps#}
{# </a>#}
{# </li>#}
{# {% endif %}#}
<li class="nav-item">
<a href="{{ url_for('dashboard.setting') }}"
class="nav-link {{ 'active' if active_page == 'setting' }}">

View File

@ -1,5 +1,5 @@
from app import config
from flask import url_for, session
from flask import url_for
from urllib.parse import parse_qs
from urllib3.util import parse_url
from app.auth.views.oidc import create_user
@ -10,21 +10,7 @@ from app.models import User
from app.config import URL, OIDC_CLIENT_ID
mock_well_known_response = {
"authorization_endpoint": "http://localhost:7777/authorization-endpoint",
"userinfo_endpoint": "http://localhost:7777/userinfo-endpoint",
"token_endpoint": "http://localhost:7777/token-endpoint",
}
@patch("requests.get")
def test_oidc_login(mock_get, flask_client):
config.OIDC_WELL_KNOWN_URL = "http://localhost:7777/well-known-url"
with flask_client.session_transaction() as sess:
sess["oauth_redirect_next"] = None
mock_get.return_value.json.return_value = mock_well_known_response
def test_oidc_login(flask_client):
r = flask_client.get(
url_for("auth.oidc_login"),
follow_redirects=False,
@ -42,41 +28,8 @@ def test_oidc_login(mock_get, flask_client):
assert expected_redirect_url == query["redirect_uri"][0]
@patch("requests.get")
def test_oidc_login_next_url(mock_get, flask_client):
config.OIDC_WELL_KNOWN_URL = "http://localhost:7777/well-known-url"
with flask_client.session_transaction() as sess:
sess["oauth_redirect_next"] = None
mock_get.return_value.json.return_value = mock_well_known_response
with flask_client:
r = flask_client.get(
url_for("auth.oidc_login", next="/dashboard/settings/"),
follow_redirects=False,
)
location = r.headers.get("Location")
assert location is not None
parsed = parse_url(location)
query = parse_qs(parsed.query)
expected_redirect_url = f"{URL}/auth/oidc/callback"
assert "code" == query["response_type"][0]
assert OIDC_CLIENT_ID == query["client_id"][0]
assert expected_redirect_url == query["redirect_uri"][0]
assert session["oauth_redirect_next"] == "/dashboard/settings/"
@patch("requests.get")
def test_oidc_login_no_client_id(mock_get, flask_client):
def test_oidc_login_no_client_id(flask_client):
config.OIDC_CLIENT_ID = None
config.OIDC_WELL_KNOWN_URL = "http://localhost:7777/well-known-url"
with flask_client.session_transaction() as sess:
sess["oauth_redirect_next"] = None
mock_get.return_value.json.return_value = mock_well_known_response
r = flask_client.get(
url_for("auth.oidc_login"),
@ -94,14 +47,8 @@ def test_oidc_login_no_client_id(mock_get, flask_client):
config.OIDC_CLIENT_ID = "to_fill"
@patch("requests.get")
def test_oidc_login_no_client_secret(mock_get, flask_client):
def test_oidc_login_no_client_secret(flask_client):
config.OIDC_CLIENT_SECRET = None
config.OIDC_WELL_KNOWN_URL = "http://localhost:7777/well-known-url"
with flask_client.session_transaction() as sess:
sess["oauth_redirect_next"] = None
mock_get.return_value.json.return_value = mock_well_known_response
r = flask_client.get(
url_for("auth.oidc_login"),
@ -119,14 +66,9 @@ def test_oidc_login_no_client_secret(mock_get, flask_client):
config.OIDC_CLIENT_SECRET = "to_fill"
@patch("requests.get")
def test_oidc_callback_no_oauth_state(mock_get, flask_client):
config.OIDC_WELL_KNOWN_URL = "http://localhost:7777/well-known-url"
with flask_client.session_transaction() as sess:
sess["oauth_redirect_next"] = None
sess["oauth_state"] = None
mock_get.return_value.json.return_value = mock_well_known_response
def test_oidc_callback_no_oauth_state(flask_client):
with flask_client.session_transaction() as session:
session["oauth_state"] = None
r = flask_client.get(
url_for("auth.oidc_callback"),
@ -136,16 +78,11 @@ def test_oidc_callback_no_oauth_state(mock_get, flask_client):
assert location is None
@patch("requests.get")
def test_oidc_callback_no_client_id(mock_get, flask_client):
config.OIDC_WELL_KNOWN_URL = "http://localhost:7777/well-known-url"
with flask_client.session_transaction() as sess:
sess["oauth_redirect_next"] = None
sess["oauth_state"] = "state"
def test_oidc_callback_no_client_id(flask_client):
with flask_client.session_transaction() as session:
session["oauth_state"] = "state"
config.OIDC_CLIENT_ID = None
mock_get.return_value.json.return_value = mock_well_known_response
r = flask_client.get(
url_for("auth.oidc_callback"),
follow_redirects=False,
@ -160,20 +97,15 @@ def test_oidc_callback_no_client_id(mock_get, flask_client):
assert expected_redirect_url == parsed.path
config.OIDC_CLIENT_ID = "to_fill"
with flask_client.session_transaction() as sess:
sess["oauth_state"] = None
with flask_client.session_transaction() as session:
session["oauth_state"] = None
@patch("requests.get")
def test_oidc_callback_no_client_secret(mock_get, flask_client):
config.OIDC_WELL_KNOWN_URL = "http://localhost:7777/well-known-url"
with flask_client.session_transaction() as sess:
sess["oauth_redirect_next"] = None
sess["oauth_state"] = "state"
def test_oidc_callback_no_client_secret(flask_client):
with flask_client.session_transaction() as session:
session["oauth_state"] = "state"
config.OIDC_CLIENT_SECRET = None
mock_get.return_value.json.return_value = mock_well_known_response
r = flask_client.get(
url_for("auth.oidc_callback"),
follow_redirects=False,
@ -188,23 +120,16 @@ def test_oidc_callback_no_client_secret(mock_get, flask_client):
assert expected_redirect_url == parsed.path
config.OIDC_CLIENT_SECRET = "to_fill"
with flask_client.session_transaction() as sess:
sess["oauth_state"] = None
with flask_client.session_transaction() as session:
session["oauth_state"] = None
@patch("requests.get")
@patch("requests_oauthlib.OAuth2Session.fetch_token")
@patch("requests_oauthlib.OAuth2Session.get")
def test_oidc_callback_invalid_user(
mock_oauth_get, mock_fetch_token, mock_get, flask_client
):
mock_oauth_get.return_value = MockResponse(400, {})
config.OIDC_WELL_KNOWN_URL = "http://localhost:7777/well-known-url"
with flask_client.session_transaction() as sess:
sess["oauth_redirect_next"] = None
sess["oauth_state"] = "state"
mock_get.return_value.json.return_value = mock_well_known_response
def test_oidc_callback_invalid_user(mock_get, mock_fetch_token, flask_client):
mock_get.return_value = MockResponse(400, {})
with flask_client.session_transaction() as session:
session["oauth_state"] = "state"
r = flask_client.get(
url_for("auth.oidc_callback"),
@ -218,59 +143,45 @@ def test_oidc_callback_invalid_user(
expected_redirect_url = "/auth/login"
assert expected_redirect_url == parsed.path
assert mock_oauth_get.called
with flask_client.session_transaction() as sess:
sess["oauth_state"] = None
@patch("requests.get")
@patch("requests_oauthlib.OAuth2Session.fetch_token")
@patch("requests_oauthlib.OAuth2Session.get")
def test_oidc_callback_no_email(
mock_oauth_get, mock_fetch_token, mock_get, flask_client
):
mock_oauth_get.return_value = MockResponse(200, {})
config.OIDC_WELL_KNOWN_URL = "http://localhost:7777/well-known-url"
with flask_client.session_transaction() as sess:
sess["oauth_redirect_next"] = None
sess["oauth_state"] = "state"
mock_get.return_value.json.return_value = mock_well_known_response
r = flask_client.get(
url_for("auth.oidc_callback"),
follow_redirects=False,
)
location = r.headers.get("Location")
assert location is not None
parsed = parse_url(location)
expected_redirect_url = "/auth/login"
assert expected_redirect_url == parsed.path
assert mock_oauth_get.called
assert mock_get.called
with flask_client.session_transaction() as session:
session["oauth_state"] = None
@patch("requests.get")
@patch("requests_oauthlib.OAuth2Session.fetch_token")
@patch("requests_oauthlib.OAuth2Session.get")
def test_oidc_callback_disabled_registration(
mock_oauth_get, mock_fetch_token, mock_get, flask_client
):
def test_oidc_callback_no_email(mock_get, mock_fetch_token, flask_client):
mock_get.return_value = MockResponse(200, {})
with flask_client.session_transaction() as session:
session["oauth_state"] = "state"
r = flask_client.get(
url_for("auth.oidc_callback"),
follow_redirects=False,
)
location = r.headers.get("Location")
assert location is not None
parsed = parse_url(location)
expected_redirect_url = "/auth/login"
assert expected_redirect_url == parsed.path
assert mock_get.called
with flask_client.session_transaction() as session:
session["oauth_state"] = None
@patch("requests_oauthlib.OAuth2Session.fetch_token")
@patch("requests_oauthlib.OAuth2Session.get")
def test_oidc_callback_disabled_registration(mock_get, mock_fetch_token, flask_client):
config.DISABLE_REGISTRATION = True
email = random_string()
mock_oauth_get.return_value = MockResponse(200, {"email": email})
config.OIDC_WELL_KNOWN_URL = "http://localhost:7777/well-known-url"
with flask_client.session_transaction() as sess:
sess["oauth_redirect_next"] = None
sess["oauth_state"] = "state"
mock_get.return_value.json.return_value = mock_well_known_response
mock_get.return_value = MockResponse(200, {"email": email})
with flask_client.session_transaction() as session:
session["oauth_state"] = "state"
r = flask_client.get(
url_for("auth.oidc_callback"),
@ -284,33 +195,26 @@ def test_oidc_callback_disabled_registration(
expected_redirect_url = "/auth/register"
assert expected_redirect_url == parsed.path
assert mock_oauth_get.called
assert mock_get.called
config.DISABLE_REGISTRATION = False
with flask_client.session_transaction() as sess:
sess["oauth_state"] = None
with flask_client.session_transaction() as session:
session["oauth_state"] = None
@patch("requests.get")
@patch("requests_oauthlib.OAuth2Session.fetch_token")
@patch("requests_oauthlib.OAuth2Session.get")
def test_oidc_callback_registration(
mock_oauth_get, mock_fetch_token, mock_get, flask_client
):
def test_oidc_callback_registration(mock_get, mock_fetch_token, flask_client):
email = random_string()
mock_oauth_get.return_value = MockResponse(
mock_get.return_value = MockResponse(
200,
{
"email": email,
config.OIDC_NAME_FIELD: "name",
},
)
config.OIDC_WELL_KNOWN_URL = "http://localhost:7777/well-known-url"
with flask_client.session_transaction() as sess:
sess["oauth_redirect_next"] = None
sess["oauth_state"] = "state"
mock_get.return_value.json.return_value = mock_well_known_response
with flask_client.session_transaction() as session:
session["oauth_state"] = "state"
user = User.get_by(email=email)
assert user is None
@ -327,33 +231,28 @@ def test_oidc_callback_registration(
expected_redirect_url = "/dashboard/"
assert expected_redirect_url == parsed.path
assert mock_oauth_get.called
assert mock_get.called
user = User.get_by(email=email)
assert user is not None
assert user.email == email
with flask_client.session_transaction() as sess:
sess["oauth_state"] = None
with flask_client.session_transaction() as session:
session["oauth_state"] = None
@patch("requests.get")
@patch("requests_oauthlib.OAuth2Session.fetch_token")
@patch("requests_oauthlib.OAuth2Session.get")
def test_oidc_callback_login(mock_oauth_get, mock_fetch_token, mock_get, flask_client):
def test_oidc_callback_login(mock_get, mock_fetch_token, flask_client):
email = random_string()
mock_oauth_get.return_value = MockResponse(
mock_get.return_value = MockResponse(
200,
{
"email": email,
},
)
config.OIDC_WELL_KNOWN_URL = "http://localhost:7777/well-known-url"
with flask_client.session_transaction() as sess:
sess["oauth_redirect_next"] = None
sess["oauth_state"] = "state"
mock_get.return_value.json.return_value = mock_well_known_response
with flask_client.session_transaction() as session:
session["oauth_state"] = "state"
user = User.create(
email=email,
@ -376,57 +275,10 @@ def test_oidc_callback_login(mock_oauth_get, mock_fetch_token, mock_get, flask_c
expected_redirect_url = "/dashboard/"
assert expected_redirect_url == parsed.path
assert mock_oauth_get.called
assert mock_get.called
with flask_client.session_transaction() as sess:
sess["oauth_state"] = None
@patch("requests.get")
@patch("requests_oauthlib.OAuth2Session.fetch_token")
@patch("requests_oauthlib.OAuth2Session.get")
def test_oidc_callback_login_with_next_url(
mock_oauth_get, mock_fetch_token, mock_get, flask_client
):
email = random_string()
mock_oauth_get.return_value = MockResponse(
200,
{
"email": email,
},
)
config.OIDC_WELL_KNOWN_URL = "http://localhost:7777/well-known-url"
with flask_client.session_transaction() as sess:
sess["oauth_redirect_next"] = "/dashboard/settings/"
sess["oauth_state"] = "state"
mock_get.return_value.json.return_value = mock_well_known_response
user = User.create(
email=email,
name="name",
password="",
activated=True,
)
user = User.get_by(email=email)
assert user is not None
r = flask_client.get(
url_for("auth.oidc_callback"),
follow_redirects=False,
)
location = r.headers.get("Location")
assert location is not None
parsed = parse_url(location)
expected_redirect_url = "/dashboard/settings/"
assert expected_redirect_url == parsed.path
assert mock_oauth_get.called
with flask_client.session_transaction() as sess:
sess["oauth_state"] = None
with flask_client.session_transaction() as session:
session["oauth_state"] = None
def test_create_user():

View File

@ -28,10 +28,9 @@ def test_get_alias_for_free_user_has_no_alias():
assert len(aliases) == 0
def test_get_alias_for_lifetime_with_null_hibp_date():
def test_get_alias_for_lifetime():
user = create_new_user()
user.lifetime = True
user.enable_data_breach_check = True
alias_id = Alias.create_new_random(user).id
Session.commit()
aliases = list(
@ -40,20 +39,6 @@ def test_get_alias_for_lifetime_with_null_hibp_date():
assert alias_id == aliases[0].id
def test_get_alias_for_lifetime_with_old_hibp_date():
user = create_new_user()
user.lifetime = True
user.enable_data_breach_check = True
alias = Alias.create_new_random(user)
alias.hibp_last_check = arrow.now().shift(days=-1)
alias_id = alias.id
Session.commit()
aliases = list(
cron.get_alias_to_check_hibp(arrow.now(), [], alias_id, alias_id + 1)
)
assert alias_id == aliases[0].id
def create_partner_sub(user: User):
pu = PartnerUser.create(
partner_id=get_proton_partner().id,
@ -99,7 +84,6 @@ sub_generator_list = [
@pytest.mark.parametrize("sub_generator", sub_generator_list)
def test_get_alias_for_sub(sub_generator):
user = create_new_user()
user.enable_data_breach_check = True
sub_generator(user)
alias_id = Alias.create_new_random(user).id
Session.commit()
@ -130,39 +114,3 @@ def test_skipped_user_is_not_checked():
cron.get_alias_to_check_hibp(arrow.now(), [user.id], alias_id, alias_id + 1)
)
assert len(aliases) == 0
def test_already_checked_is_not_checked():
user = create_new_user()
user.lifetime = True
alias = Alias.create_new_random(user)
alias.hibp_last_check = arrow.now().shift(days=1)
alias_id = alias.id
Session.commit()
aliases = list(
cron.get_alias_to_check_hibp(arrow.now(), [user.id], alias_id, alias_id + 1)
)
assert len(aliases) == 0
def test_outed_in_user_is_checked():
user = create_new_user()
user.lifetime = True
user.enable_data_breach_check = True
alias_id = Alias.create_new_random(user).id
Session.commit()
aliases = list(
cron.get_alias_to_check_hibp(arrow.now(), [], alias_id, alias_id + 1)
)
assert len(aliases) == 1
def test_outed_out_user_is_not_checked():
user = create_new_user()
user.lifetime = True
alias_id = Alias.create_new_random(user).id
Session.commit()
aliases = list(
cron.get_alias_to_check_hibp(arrow.now(), [], alias_id, alias_id + 1)
)
assert len(aliases) == 0

View File

@ -1,56 +0,0 @@
from app.events.event_dispatcher import EventDispatcher, Dispatcher
from app.events.generated.event_pb2 import EventContent, UserDeleted
from app.models import PartnerUser, User
from app.proton.utils import get_proton_partner
from tests.utils import create_new_user, random_token
from typing import Tuple
class OnMemoryDispatcher(Dispatcher):
def __init__(self):
self.memory = []
def send(self, event: bytes):
self.memory.append(event)
def _create_unlinked_user() -> User:
return create_new_user()
def _create_linked_user() -> Tuple[User, PartnerUser]:
user = _create_unlinked_user()
partner_user = PartnerUser.create(
partner_id=get_proton_partner().id,
user_id=user.id,
external_user_id=random_token(10),
flush=True,
)
return user, partner_user
def test_event_dispatcher_stores_events():
dispatcher = OnMemoryDispatcher()
(user, partner) = _create_linked_user()
content = EventContent(user_deleted=UserDeleted())
EventDispatcher.send_event(user, content, dispatcher, skip_if_webhook_missing=False)
assert len(dispatcher.memory) == 1
content = EventContent(user_deleted=UserDeleted())
EventDispatcher.send_event(user, content, dispatcher, skip_if_webhook_missing=False)
assert len(dispatcher.memory) == 2
def test_event_dispatcher_does_not_send_event_if_user_not_linked():
dispatcher = OnMemoryDispatcher()
user = _create_unlinked_user()
content = EventContent(user_deleted=UserDeleted())
EventDispatcher.send_event(user, content, dispatcher, skip_if_webhook_missing=False)
assert len(dispatcher.memory) == 0
content = EventContent(user_deleted=UserDeleted())
EventDispatcher.send_event(user, content, dispatcher, skip_if_webhook_missing=False)
assert len(dispatcher.memory) == 0

View File

@ -1,46 +0,0 @@
from app import config
from app.db import Session
from app.events.event_dispatcher import Dispatcher
from app.events.generated import event_pb2
from app.jobs.event_jobs import send_alias_creation_events_for_user
from app.models import Alias
from tests.utils import create_partner_linked_user
class MemStoreDispatcher(Dispatcher):
def __init__(self):
self.events = []
def send(self, event: bytes):
self.events.append(event)
def setup_module():
config.EVENT_WEBHOOK = True
def teardown_module():
config.EVENT_WEBHOOK = False
def test_send_alias_creation_events():
[user, partner_user] = create_partner_linked_user()
aliases = [Alias.create_new_random(user) for i in range(2)]
Session.flush()
dispatcher = MemStoreDispatcher()
send_alias_creation_events_for_user(user, dispatcher=dispatcher, chunk_size=2)
# 2 batches. 1st newsletter + first alias. 2nd last alias
assert len(dispatcher.events) == 2
decoded_event = event_pb2.Event.FromString(dispatcher.events[0])
assert decoded_event.user_id == user.id
assert decoded_event.external_user_id == partner_user.external_user_id
event_list = decoded_event.content.alias_create_list.events
assert len(event_list) == 2
# 0 is newsletter alias
assert event_list[1].alias_id == aliases[0].id
decoded_event = event_pb2.Event.FromString(dispatcher.events[1])
assert decoded_event.user_id == user.id
assert decoded_event.external_user_id == partner_user.external_user_id
event_list = decoded_event.content.alias_create_list.events
assert len(event_list) == 1
assert event_list[0].alias_id == aliases[1].id

View File

@ -1,35 +0,0 @@
import tempfile
from io import BytesIO
import arrow
from app import s3, config
from app.models import File, BatchImport
from tasks.cleanup_old_imports import cleanup_old_imports
from tests.utils import random_token, create_new_user
def test_cleanup_old_imports():
BatchImport.filter().delete()
with tempfile.TemporaryDirectory() as tmpdir:
config.UPLOAD_DIR = tmpdir
user = create_new_user()
path = random_token()
s3.upload_from_bytesio(path, BytesIO("data".encode("utf-8")))
file = File.create(path=path, commit=True) # noqa: F821
now = arrow.now()
delete_batch_import_id = BatchImport.create(
user_id=user.id,
file_id=file.id,
created_at=now.shift(minutes=-1),
flush=True,
).id
keep_batch_import_id = BatchImport.create(
user_id=user.id,
file_id=file.id,
created_at=now.shift(minutes=+1),
commit=True,
).id
cleanup_old_imports(now)
assert BatchImport.get(id=delete_batch_import_id) is None
assert BatchImport.get(id=keep_batch_import_id) is not None

View File

@ -1,72 +0,0 @@
import arrow
from app import config
from app.models import Job, JobState
from tasks.cleanup_old_jobs import cleanup_old_jobs
def test_cleanup_old_jobs():
Job.filter().delete()
now = arrow.now()
delete_ids = [
Job.create(
updated_at=now.shift(minutes=-1),
state=JobState.done.value,
name="",
payload="",
flush=True,
).id,
Job.create(
updated_at=now.shift(minutes=-1),
state=JobState.error.value,
name="",
payload="",
flush=True,
).id,
Job.create(
updated_at=now.shift(minutes=-1),
state=JobState.taken.value,
attempts=config.JOB_MAX_ATTEMPTS,
name="",
payload="",
flush=True,
).id,
]
keep_ids = [
Job.create(
updated_at=now.shift(minutes=+1),
state=JobState.done.value,
name="",
payload="",
flush=True,
).id,
Job.create(
updated_at=now.shift(minutes=+1),
state=JobState.error.value,
name="",
payload="",
flush=True,
).id,
Job.create(
updated_at=now.shift(minutes=+1),
state=JobState.taken.value,
attempts=config.JOB_MAX_ATTEMPTS,
name="",
payload="",
flush=True,
).id,
Job.create(
updated_at=now.shift(minutes=-1),
state=JobState.taken.value,
attempts=config.JOB_MAX_ATTEMPTS - 1,
name="",
payload="",
flush=True,
).id,
]
cleanup_old_jobs(now)
for delete_id in delete_ids:
assert Job.get(id=delete_id) is None
for keep_id in keep_ids:
assert Job.get(id=keep_id) is not None

View File

@ -1,26 +0,0 @@
import arrow
from app.models import Notification
from tasks.cleanup_old_notifications import cleanup_old_notifications
from tests.utils import create_new_user
def test_cleanup_old_notifications():
Notification.filter().delete()
user = create_new_user()
now = arrow.now()
delete_id = Notification.create(
user_id=user.id,
created_at=now.shift(minutes=-1),
message="",
flush=True,
).id
keep_id = Notification.create(
user_id=user.id,
created_at=now.shift(minutes=+1),
message="",
flush=True,
).id
cleanup_old_notifications(now)
assert Notification.get(id=delete_id) is None
assert Notification.get(id=keep_id) is not None

View File

@ -51,7 +51,9 @@ FACEBOOK_CLIENT_SECRET=to_fill
# Login with OIDC
CONNECT_WITH_OIDC_ICON=fa-github
OIDC_WELL_KNOWN_URL=to_fill
OIDC_AUTHORIZATION_URL=to_fill
OIDC_USER_INFO_URL=to_fill
OIDC_TOKEN_URL=to_fill
OIDC_SCOPES=openid email profile
OIDC_NAME_FIELD=name
OIDC_CLIENT_ID=to_fill

View File

@ -384,30 +384,3 @@ def test_break_loop_alias_as_mailbox(flask_client):
msg[headers.SUBJECT] = random_string()
result = email_handler.handle(envelope, msg)
assert result == status.E525
@mail_sender.store_emails_test_decorator
def test_preserve_headers(flask_client):
headers_to_keep = [
headers.SUBJECT,
headers.DATE,
headers.MESSAGE_ID,
headers.REFERENCES,
headers.IN_REPLY_TO,
headers.SL_QUEUE_ID,
] + headers.MIME_HEADERS
user = create_new_user()
alias = Alias.create_new_random(user)
envelope = Envelope()
envelope.mail_from = "somewhere@lo.cal"
envelope.rcpt_tos = [alias.email]
msg = EmailMessage()
for header in headers_to_keep:
msg[header] = header + "keep"
result = email_handler.handle(envelope, msg)
assert result == status.E200
sent_mails = mail_sender.get_stored_emails()
assert len(sent_mails) == 1
msg = sent_mails[0].msg
for header in headers_to_keep:
assert msg[header] == header + "keep"

View File

@ -17,7 +17,6 @@ from app.models import (
Subscription,
PlanEnum,
PADDLE_SUBSCRIPTION_GRACE_DAYS,
SyncEvent,
)
from tests.utils import login, create_new_user, random_token
@ -326,51 +325,3 @@ def test_user_can_send_receive():
user.disabled = False
user.delete_on = arrow.now()
assert not user.can_send_or_receive()
def test_sync_event_dead_letter():
# remove all SyncEvents before the test
all_events = SyncEvent.all()
for event in all_events:
SyncEvent.delete(event.id, commit=True)
# create an expired not taken event
e1 = SyncEvent.create(
content=b"content",
created_at=arrow.now().shift(minutes=-15),
taken_time=None,
commit=True,
)
# create an expired taken event (but too long ago)
e2 = SyncEvent.create(
content=b"content",
created_at=arrow.now().shift(minutes=-15),
taken_time=arrow.now().shift(minutes=-14),
commit=True,
)
# create an expired taken event (but recently)
e3 = SyncEvent.create(
content=b"content",
created_at=arrow.now().shift(minutes=-15),
taken_time=arrow.now().shift(minutes=-1),
commit=True,
)
# create a normal event
e4 = SyncEvent.create(
content=b"content",
created_at=arrow.now(),
commit=True,
)
# get dead letter events
dead_letter_events = SyncEvent.get_dead_letter(
older_than=arrow.now().shift(minutes=-10)
)
assert len(dead_letter_events) == 2
assert e1 in dead_letter_events
assert e2 in dead_letter_events
assert e3 not in dead_letter_events
assert e4 not in dead_letter_events

View File

@ -9,8 +9,7 @@ from typing import Optional, Dict
import jinja2
from flask import url_for
from app.models import User, PartnerUser
from app.proton.utils import get_proton_partner
from app.models import User
from app.utils import random_string
@ -31,18 +30,6 @@ def create_new_user(email: Optional[str] = None, name: Optional[str] = None) ->
return user
def create_partner_linked_user() -> tuple[User, PartnerUser]:
user = create_new_user()
partner_user = PartnerUser.create(
partner_id=get_proton_partner().id,
user_id=user.id,
external_user_id=random_token(10),
flush=True,
)
return user, partner_user
def login(flask_client, user: Optional[User] = None) -> User:
if not user:
user = create_new_user()