mirror of
https://github.com/simple-login/app.git
synced 2024-11-18 01:40:38 +01:00
Compare commits
1 commit
master
...
v20220512.
Author | SHA1 | Date | |
---|---|---|---|
|
220997ce75 |
568 changed files with 707197 additions and 48993 deletions
|
@ -13,5 +13,4 @@ static/upload
|
|||
venv/
|
||||
.venv
|
||||
.coverage
|
||||
htmlcov
|
||||
.git/
|
||||
htmlcov
|
4
.github/ISSUE_TEMPLATE/bug_report.md
vendored
4
.github/ISSUE_TEMPLATE/bug_report.md
vendored
|
@ -7,12 +7,12 @@ assignees: ''
|
|||
|
||||
---
|
||||
|
||||
Please note that this is only for bug report.
|
||||
Please note that this is only for bug report.
|
||||
|
||||
For help on your account, please reach out to us at hi[at]simplelogin.io. Please make sure to check out [our FAQ](https://simplelogin.io/faq/) that contains frequently asked questions.
|
||||
|
||||
|
||||
For feature request, you can use our [forum](https://github.com/simple-login/app/discussions/categories/feature-request).
|
||||
For feature request, you can use our [forum](https://github.com/simple-login/app/discussions/categories/feature-request).
|
||||
|
||||
For self-hosted question/issue, please ask in [self-hosted forum](https://github.com/simple-login/app/discussions/categories/self-hosting-question)
|
||||
|
||||
|
|
23
.github/changelog_configuration.json
vendored
23
.github/changelog_configuration.json
vendored
|
@ -1,23 +0,0 @@
|
|||
{
|
||||
"template": "${{CHANGELOG}}\n\n<details>\n<summary>Uncategorized</summary>\n\n${{UNCATEGORIZED}}\n</details>",
|
||||
"pr_template": "- ${{TITLE}} #${{NUMBER}}",
|
||||
"empty_template": "- no changes",
|
||||
"categories": [
|
||||
{
|
||||
"title": "## 🚀 Features",
|
||||
"labels": ["feature"]
|
||||
},
|
||||
{
|
||||
"title": "## 🐛 Fixes",
|
||||
"labels": ["fix", "bug"]
|
||||
},
|
||||
{
|
||||
"title": "## 🔧 Enhancements",
|
||||
"labels": ["enhancement"]
|
||||
}
|
||||
],
|
||||
"ignore_labels": ["ignore"],
|
||||
"tag_resolver": {
|
||||
"method": "semver"
|
||||
}
|
||||
}
|
385
.github/workflows/main.yml
vendored
385
.github/workflows/main.yml
vendored
|
@ -1,244 +1,159 @@
|
|||
name: Test and lint
|
||||
name: Run tests & Publish to Docker Registry
|
||||
|
||||
on: [push, pull_request]
|
||||
on:
|
||||
push:
|
||||
pull_request:
|
||||
types: [ 'opened' ]
|
||||
|
||||
jobs:
|
||||
lint:
|
||||
justfortest:
|
||||
if: startsWith(github.ref, 'refs/tags/')
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check out repo
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Install poetry
|
||||
run: pipx install poetry
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: '3.10'
|
||||
cache: 'poetry'
|
||||
|
||||
- name: Install OS dependencies
|
||||
if: ${{ matrix.python-version }} == '3.10'
|
||||
run: |
|
||||
sudo apt update
|
||||
sudo apt install -y libre2-dev libpq-dev
|
||||
|
||||
- name: Install dependencies
|
||||
if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true'
|
||||
run: poetry install --no-interaction
|
||||
|
||||
- name: Check formatting & linting
|
||||
run: |
|
||||
poetry run pre-commit run --all-files
|
||||
|
||||
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
max-parallel: 4
|
||||
matrix:
|
||||
python-version: ["3.10"]
|
||||
|
||||
# service containers to run with `postgres-job`
|
||||
services:
|
||||
# label used to access the service container
|
||||
postgres:
|
||||
# Docker Hub image
|
||||
image: postgres:13
|
||||
# service environment variables
|
||||
# `POSTGRES_HOST` is `postgres`
|
||||
env:
|
||||
# optional (defaults to `postgres`)
|
||||
POSTGRES_DB: test
|
||||
# required
|
||||
POSTGRES_PASSWORD: test
|
||||
# optional (defaults to `5432`)
|
||||
POSTGRES_PORT: 5432
|
||||
# optional (defaults to `postgres`)
|
||||
POSTGRES_USER: test
|
||||
ports:
|
||||
- 15432:5432
|
||||
# set health checks to wait until postgres has started
|
||||
options: >-
|
||||
--health-cmd pg_isready
|
||||
--health-interval 10s
|
||||
--health-timeout 5s
|
||||
--health-retries 5
|
||||
|
||||
steps:
|
||||
- name: Check out repo
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Install poetry
|
||||
run: pipx install poetry
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
cache: 'poetry'
|
||||
|
||||
- name: Install OS dependencies
|
||||
if: ${{ matrix.python-version }} == '3.10'
|
||||
run: |
|
||||
sudo apt update
|
||||
sudo apt install -y libre2-dev libpq-dev
|
||||
|
||||
- name: Install dependencies
|
||||
if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true'
|
||||
run: poetry install --no-interaction
|
||||
|
||||
|
||||
- name: Start Redis v6
|
||||
uses: superchargejs/redis-github-action@1.1.0
|
||||
with:
|
||||
redis-version: 6
|
||||
|
||||
- name: Run db migration
|
||||
run: |
|
||||
CONFIG=tests/test.env poetry run alembic upgrade head
|
||||
|
||||
- name: Prepare version file
|
||||
run: |
|
||||
scripts/generate-build-info.sh ${{ github.sha }}
|
||||
cat app/build_info.py
|
||||
|
||||
- name: Test with pytest
|
||||
run: |
|
||||
poetry run pytest
|
||||
env:
|
||||
GITHUB_ACTIONS_TEST: true
|
||||
|
||||
- name: Archive code coverage results
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: code-coverage-report
|
||||
path: htmlcov
|
||||
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
needs: ['test', 'lint']
|
||||
if: github.event_name == 'push' && (github.ref == 'refs/heads/master' || startsWith(github.ref, 'refs/tags/v'))
|
||||
|
||||
steps:
|
||||
- name: Docker meta
|
||||
id: meta
|
||||
uses: docker/metadata-action@v4
|
||||
with:
|
||||
images: simplelogin/app-ci
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
|
||||
# We need to checkout the repository in order for the "Create Sentry release" to work
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
|
||||
- name: Create Sentry release
|
||||
uses: getsentry/action-release@v1
|
||||
env:
|
||||
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
SENTRY_ORG: ${{ secrets.SENTRY_ORG }}
|
||||
SENTRY_PROJECT: ${{ secrets.SENTRY_PROJECT }}
|
||||
with:
|
||||
ignore_missing: true
|
||||
ignore_empty: true
|
||||
|
||||
- name: Prepare version file
|
||||
run: |
|
||||
scripts/generate-build-info.sh ${{ github.sha }}
|
||||
cat app/build_info.py
|
||||
|
||||
- name: Build image and publish to Docker Registry
|
||||
uses: docker/build-push-action@v3
|
||||
with:
|
||||
context: .
|
||||
platforms: linux/amd64
|
||||
push: true
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
|
||||
|
||||
#- name: Send Telegram message
|
||||
# uses: appleboy/telegram-action@master
|
||||
# with:
|
||||
# to: ${{ secrets.TELEGRAM_TO }}
|
||||
# token: ${{ secrets.TELEGRAM_TOKEN }}
|
||||
# args: Docker image pushed on ${{ github.ref }}
|
||||
|
||||
# If we have generated a tag, generate the changelog, send a notification to slack and create the GitHub release
|
||||
- name: Build Changelog
|
||||
- name: "Build Changelog"
|
||||
id: build_changelog
|
||||
if: startsWith(github.ref, 'refs/tags/v')
|
||||
uses: mikepenz/release-changelog-builder-action@v3
|
||||
with:
|
||||
configuration: ".github/changelog_configuration.json"
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Prepare Slack notification contents
|
||||
if: startsWith(github.ref, 'refs/tags/v')
|
||||
run: |
|
||||
changelog=$(cat << EOH
|
||||
${{ steps.build_changelog.outputs.changelog }}
|
||||
EOH
|
||||
)
|
||||
messageWithoutNewlines=$(echo "${changelog}" | awk '{printf "%s\\n", $0}')
|
||||
messageWithoutDoubleQuotes=$(echo "${messageWithoutNewlines}" | sed "s/\"/'/g")
|
||||
echo "${messageWithoutDoubleQuotes}"
|
||||
|
||||
echo "SLACK_CHANGELOG=${messageWithoutDoubleQuotes}" >> $GITHUB_ENV
|
||||
|
||||
- name: Post notification to Slack
|
||||
uses: slackapi/slack-github-action@v1.19.0
|
||||
if: startsWith(github.ref, 'refs/tags/v')
|
||||
with:
|
||||
channel-id: ${{ secrets.SLACK_CHANNEL_ID }}
|
||||
payload: |
|
||||
{
|
||||
"blocks": [
|
||||
{
|
||||
"type": "header",
|
||||
"text": {
|
||||
"type": "plain_text",
|
||||
"text": "New tag created",
|
||||
"emoji": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "section",
|
||||
"text": {
|
||||
"type": "mrkdwn",
|
||||
"text": "*Tag: ${{ github.ref_name }}* (${{ github.sha }})"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "section",
|
||||
"text": {
|
||||
"type": "mrkdwn",
|
||||
"text": "*Changelog:*\n${{ env.SLACK_CHANGELOG }}"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
env:
|
||||
SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }}
|
||||
|
||||
- name: Create GitHub Release
|
||||
if: startsWith(github.ref, 'refs/tags/v')
|
||||
uses: actions/create-release@v1
|
||||
with:
|
||||
tag_name: ${{ github.ref }}
|
||||
release_name: ${{ github.ref }}
|
||||
body: ${{ steps.build_changelog.outputs.changelog }}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
- run: |
|
||||
MSG="${{ steps.github_release.outputs.changelog }}"
|
||||
echo $MSG
|
||||
# test:
|
||||
# runs-on: ubuntu-latest
|
||||
# strategy:
|
||||
# max-parallel: 4
|
||||
# matrix:
|
||||
# python-version: [3.7, "3.10"]
|
||||
#
|
||||
# # service containers to run with `postgres-job`
|
||||
# services:
|
||||
# # label used to access the service container
|
||||
# postgres:
|
||||
# # Docker Hub image
|
||||
# image: postgres:13
|
||||
# # service environment variables
|
||||
# # `POSTGRES_HOST` is `postgres`
|
||||
# env:
|
||||
# # optional (defaults to `postgres`)
|
||||
# POSTGRES_DB: test
|
||||
# # required
|
||||
# POSTGRES_PASSWORD: test
|
||||
# # optional (defaults to `5432`)
|
||||
# POSTGRES_PORT: 5432
|
||||
# # optional (defaults to `postgres`)
|
||||
# POSTGRES_USER: test
|
||||
# ports:
|
||||
# - 15432:5432
|
||||
# # set health checks to wait until postgres has started
|
||||
# options: >-
|
||||
# --health-cmd pg_isready
|
||||
# --health-interval 10s
|
||||
# --health-timeout 5s
|
||||
# --health-retries 5
|
||||
#
|
||||
# steps:
|
||||
# - name: Check out repository
|
||||
# uses: actions/checkout@v2
|
||||
#
|
||||
# - name: Set up Python ${{ matrix.python-version }}
|
||||
# uses: actions/setup-python@v2
|
||||
# with:
|
||||
# python-version: ${{ matrix.python-version }}
|
||||
#
|
||||
# - name: Install poetry
|
||||
# uses: snok/install-poetry@v1
|
||||
# with:
|
||||
# virtualenvs-create: true
|
||||
# virtualenvs-in-project: true
|
||||
# installer-parallel: true
|
||||
#
|
||||
# - name: Run caching
|
||||
# id: cached-poetry-dependencies
|
||||
# uses: actions/cache@v2
|
||||
# with:
|
||||
# path: .venv
|
||||
# key: venv-${{ runner.os }}-${{ matrix.python-version }}-${{ hashFiles('**/poetry.lock') }}
|
||||
#
|
||||
# - name: Install OS dependencies
|
||||
# if: ${{ matrix.python-version }} == '3.10'
|
||||
# run: |
|
||||
# sudo apt update
|
||||
# sudo apt install -y libre2-dev libpq-dev
|
||||
#
|
||||
# - name: Install dependencies
|
||||
# if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true'
|
||||
# run: poetry install --no-interaction --no-root
|
||||
#
|
||||
# - name: Install library
|
||||
# run: poetry install --no-interaction
|
||||
#
|
||||
# - name: Check formatting & linting
|
||||
# run: |
|
||||
# poetry run black --check .
|
||||
# poetry run flake8
|
||||
#
|
||||
# - name: Run db migration
|
||||
# run: |
|
||||
# CONFIG=tests/test.env poetry run alembic upgrade head
|
||||
#
|
||||
# - name: Test with pytest
|
||||
# run: |
|
||||
# poetry run pytest
|
||||
# env:
|
||||
# GITHUB_ACTIONS_TEST: true
|
||||
#
|
||||
# - name: Archive code coverage results
|
||||
# uses: actions/upload-artifact@v2
|
||||
# with:
|
||||
# name: code-coverage-report
|
||||
# path: htmlcov
|
||||
#
|
||||
# build:
|
||||
# runs-on: ubuntu-latest
|
||||
# needs: ['test']
|
||||
# if: github.event_name == 'push' && (github.ref == 'refs/heads/master' || startsWith(github.ref, 'refs/tags/v'))
|
||||
#
|
||||
# steps:
|
||||
# - name: Docker meta
|
||||
# id: meta
|
||||
# uses: docker/metadata-action@v4
|
||||
# with:
|
||||
# images: simplelogin/app-ci
|
||||
#
|
||||
# - name: Login to Docker Hub
|
||||
# uses: docker/login-action@v2
|
||||
# with:
|
||||
# username: ${{ secrets.DOCKER_USERNAME }}
|
||||
# password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
#
|
||||
# - name: Build image and publish to Docker Registry
|
||||
# uses: docker/build-push-action@v3
|
||||
# with:
|
||||
# push: true
|
||||
# tags: ${{ steps.meta.outputs.tags }}
|
||||
#
|
||||
# # We need to checkout the repository in order for the "Create Sentry release" to work
|
||||
# - name: Checkout repository
|
||||
# uses: actions/checkout@v2
|
||||
#
|
||||
# - name: Create Sentry release
|
||||
# uses: getsentry/action-release@v1
|
||||
# env:
|
||||
# SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
# SENTRY_ORG: ${{ secrets.SENTRY_ORG }}
|
||||
# SENTRY_PROJECT: ${{ secrets.SENTRY_PROJECT }}
|
||||
#
|
||||
# - name: Send Telegram message
|
||||
# uses: appleboy/telegram-action@master
|
||||
# with:
|
||||
# to: ${{ secrets.TELEGRAM_TO }}
|
||||
# token: ${{ secrets.TELEGRAM_TOKEN }}
|
||||
# args: Docker image pushed on ${{ github.ref }}
|
||||
#
|
||||
# - name: Post notification to Slack
|
||||
# uses: slackapi/slack-github-action@v1.19.0
|
||||
# with:
|
||||
# channel-id: ${{ secrets.SLACK_CHANNEL_ID }}
|
||||
# slack-message: "New tag generated: ${{github.ref}}\nBuild result: ${{ job.status }}"
|
||||
# env:
|
||||
# SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }}
|
||||
|
|
2
.gitignore
vendored
2
.gitignore
vendored
|
@ -11,7 +11,7 @@ db.sqlite-journal
|
|||
static/upload
|
||||
venv/
|
||||
.venv
|
||||
.python-version
|
||||
.coverage
|
||||
htmlcov
|
||||
adhoc
|
||||
.env.*
|
||||
|
|
|
@ -1,24 +1,10 @@
|
|||
exclude: "(migrations|static/node_modules|static/assets|static/vendor)"
|
||||
default_language_version:
|
||||
python: python3
|
||||
repos:
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v4.2.0
|
||||
- repo: https://github.com/psf/black
|
||||
rev: 22.1.0
|
||||
hooks:
|
||||
- id: check-yaml
|
||||
- id: trailing-whitespace
|
||||
- repo: https://github.com/Riverside-Healthcare/djLint
|
||||
rev: v1.34.1
|
||||
- id: black
|
||||
language_version: python3.7
|
||||
- repo: https://github.com/pycqa/flake8
|
||||
rev: 4.0.1
|
||||
hooks:
|
||||
- id: djlint-jinja
|
||||
files: '.*\.html'
|
||||
entry: djlint --reformat
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.1.5
|
||||
hooks:
|
||||
# Run the linter.
|
||||
- id: ruff
|
||||
args: [ --fix ]
|
||||
# Run the formatter.
|
||||
- id: ruff-format
|
||||
- id: flake8
|
||||
|
|
227
.pylintrc
227
.pylintrc
|
@ -1,227 +0,0 @@
|
|||
[MASTER]
|
||||
extension-pkg-allow-list=re2
|
||||
|
||||
fail-under=7.0
|
||||
ignore=CVS
|
||||
ignore-paths=migrations
|
||||
ignore-patterns=^\.#
|
||||
jobs=0
|
||||
|
||||
[MESSAGES CONTROL]
|
||||
disable=missing-function-docstring,
|
||||
missing-module-docstring,
|
||||
duplicate-code,
|
||||
#import-error,
|
||||
missing-class-docstring,
|
||||
useless-object-inheritance,
|
||||
use-dict-literal,
|
||||
logging-format-interpolation,
|
||||
consider-using-f-string,
|
||||
unnecessary-comprehension,
|
||||
inconsistent-return-statements,
|
||||
wrong-import-order,
|
||||
line-too-long,
|
||||
invalid-name,
|
||||
global-statement,
|
||||
no-else-return,
|
||||
unspecified-encoding,
|
||||
logging-fstring-interpolation,
|
||||
too-few-public-methods,
|
||||
bare-except,
|
||||
fixme,
|
||||
unnecessary-pass,
|
||||
f-string-without-interpolation,
|
||||
super-init-not-called,
|
||||
unused-argument,
|
||||
ungrouped-imports,
|
||||
too-many-locals,
|
||||
consider-using-with,
|
||||
too-many-statements,
|
||||
consider-using-set-comprehension,
|
||||
unidiomatic-typecheck,
|
||||
useless-else-on-loop,
|
||||
too-many-return-statements,
|
||||
broad-except,
|
||||
protected-access,
|
||||
consider-using-enumerate,
|
||||
too-many-nested-blocks,
|
||||
too-many-branches,
|
||||
simplifiable-if-expression,
|
||||
possibly-unused-variable,
|
||||
pointless-string-statement,
|
||||
wrong-import-position,
|
||||
redefined-outer-name,
|
||||
raise-missing-from,
|
||||
logging-too-few-args,
|
||||
redefined-builtin,
|
||||
too-many-arguments,
|
||||
import-outside-toplevel,
|
||||
redefined-argument-from-local,
|
||||
logging-too-many-args,
|
||||
too-many-instance-attributes,
|
||||
unreachable,
|
||||
no-name-in-module,
|
||||
no-member,
|
||||
consider-using-ternary,
|
||||
too-many-lines,
|
||||
arguments-differ,
|
||||
too-many-public-methods,
|
||||
unused-variable,
|
||||
consider-using-dict-items,
|
||||
consider-using-in,
|
||||
reimported,
|
||||
too-many-boolean-expressions,
|
||||
cyclic-import,
|
||||
not-callable, # (paddle_utils.py) verifier.verify cannot be called (although it can)
|
||||
abstract-method, # (models.py)
|
||||
|
||||
[BASIC]
|
||||
|
||||
# Naming style matching correct argument names.
|
||||
argument-naming-style=snake_case
|
||||
|
||||
# Regular expression matching correct argument names. Overrides argument-
|
||||
# naming-style. If left empty, argument names will be checked with the set
|
||||
# naming style.
|
||||
#argument-rgx=
|
||||
|
||||
# Naming style matching correct attribute names.
|
||||
attr-naming-style=snake_case
|
||||
|
||||
# Regular expression matching correct attribute names. Overrides attr-naming-
|
||||
# style. If left empty, attribute names will be checked with the set naming
|
||||
# style.
|
||||
#attr-rgx=
|
||||
|
||||
# Bad variable names which should always be refused, separated by a comma.
|
||||
bad-names=foo,
|
||||
bar,
|
||||
baz,
|
||||
toto,
|
||||
tutu,
|
||||
tata
|
||||
|
||||
# Bad variable names regexes, separated by a comma. If names match any regex,
|
||||
# they will always be refused
|
||||
bad-names-rgxs=
|
||||
|
||||
# Naming style matching correct class attribute names.
|
||||
class-attribute-naming-style=any
|
||||
|
||||
# Regular expression matching correct class attribute names. Overrides class-
|
||||
# attribute-naming-style. If left empty, class attribute names will be checked
|
||||
# with the set naming style.
|
||||
#class-attribute-rgx=
|
||||
|
||||
# Naming style matching correct class constant names.
|
||||
class-const-naming-style=UPPER_CASE
|
||||
|
||||
# Regular expression matching correct class constant names. Overrides class-
|
||||
# const-naming-style. If left empty, class constant names will be checked with
|
||||
# the set naming style.
|
||||
#class-const-rgx=
|
||||
|
||||
# Naming style matching correct class names.
|
||||
class-naming-style=PascalCase
|
||||
|
||||
# Regular expression matching correct class names. Overrides class-naming-
|
||||
# style. If left empty, class names will be checked with the set naming style.
|
||||
#class-rgx=
|
||||
|
||||
# Naming style matching correct constant names.
|
||||
const-naming-style=UPPER_CASE
|
||||
|
||||
# Regular expression matching correct constant names. Overrides const-naming-
|
||||
# style. If left empty, constant names will be checked with the set naming
|
||||
# style.
|
||||
#const-rgx=
|
||||
|
||||
# Minimum line length for functions/classes that require docstrings, shorter
|
||||
# ones are exempt.
|
||||
docstring-min-length=-1
|
||||
|
||||
# Naming style matching correct function names.
|
||||
function-naming-style=snake_case
|
||||
|
||||
# Regular expression matching correct function names. Overrides function-
|
||||
# naming-style. If left empty, function names will be checked with the set
|
||||
# naming style.
|
||||
#function-rgx=
|
||||
|
||||
# Good variable names which should always be accepted, separated by a comma.
|
||||
good-names=i,
|
||||
j,
|
||||
k,
|
||||
ex,
|
||||
Run,
|
||||
_
|
||||
|
||||
# Good variable names regexes, separated by a comma. If names match any regex,
|
||||
# they will always be accepted
|
||||
good-names-rgxs=
|
||||
|
||||
# Include a hint for the correct naming format with invalid-name.
|
||||
include-naming-hint=no
|
||||
|
||||
# Naming style matching correct inline iteration names.
|
||||
inlinevar-naming-style=any
|
||||
|
||||
# Regular expression matching correct inline iteration names. Overrides
|
||||
# inlinevar-naming-style. If left empty, inline iteration names will be checked
|
||||
# with the set naming style.
|
||||
#inlinevar-rgx=
|
||||
|
||||
# Naming style matching correct method names.
|
||||
method-naming-style=snake_case
|
||||
|
||||
# Regular expression matching correct method names. Overrides method-naming-
|
||||
# style. If left empty, method names will be checked with the set naming style.
|
||||
#method-rgx=
|
||||
|
||||
# Naming style matching correct module names.
|
||||
module-naming-style=snake_case
|
||||
|
||||
# Regular expression matching correct module names. Overrides module-naming-
|
||||
# style. If left empty, module names will be checked with the set naming style.
|
||||
#module-rgx=
|
||||
|
||||
# Colon-delimited sets of names that determine each other's naming style when
|
||||
# the name regexes allow several styles.
|
||||
name-group=
|
||||
|
||||
# Regular expression which should only match function or class names that do
|
||||
# not require a docstring.
|
||||
no-docstring-rgx=^_
|
||||
|
||||
# List of decorators that produce properties, such as abc.abstractproperty. Add
|
||||
# to this list to register other decorators that produce valid properties.
|
||||
# These decorators are taken in consideration only for invalid-name.
|
||||
property-classes=abc.abstractproperty
|
||||
|
||||
# Regular expression matching correct type variable names. If left empty, type
|
||||
# variable names will be checked with the set naming style.
|
||||
#typevar-rgx=
|
||||
|
||||
# Naming style matching correct variable names.
|
||||
variable-naming-style=snake_case
|
||||
|
||||
# Regular expression matching correct variable names. Overrides variable-
|
||||
# naming-style. If left empty, variable names will be checked with the set
|
||||
# naming style.
|
||||
#variable-rgx=
|
||||
|
||||
|
||||
[STRING]
|
||||
|
||||
# This flag controls whether inconsistent-quotes generates a warning when the
|
||||
# character used as a quote delimiter is used inconsistently within a module.
|
||||
check-quote-consistency=no
|
||||
|
||||
# This flag controls whether the implicit-str-concat should generate a warning
|
||||
# on implicit string concatenation in sequences defined over several lines.
|
||||
check-str-concat-over-line-jumps=no
|
||||
|
||||
|
||||
[FORMAT]
|
||||
max-line-length=88
|
||||
single-line-if-stmt=yes
|
1
.version
1
.version
|
@ -1 +0,0 @@
|
|||
dev
|
|
@ -117,7 +117,7 @@ Add SUPPORT_NAME param to set a support email name.
|
|||
|
||||
## [1.0.1] - 2020-01-28
|
||||
|
||||
Simplify config file.
|
||||
Simplify config file.
|
||||
|
||||
## [1.0.0] - 2020-01-22
|
||||
|
||||
|
|
101
CONTRIBUTING.md
101
CONTRIBUTING.md
|
@ -1,9 +1,9 @@
|
|||
Thanks for taking the time to contribute! 🎉👍
|
||||
|
||||
Before working on a new feature, please get in touch with us at dev[at]simplelogin.io to avoid duplication.
|
||||
We can also discuss the best way to implement it.
|
||||
Before working on a new feature, please get in touch with us at dev[at]simplelogin.io to avoid duplication.
|
||||
We can also discuss the best way to implement it.
|
||||
|
||||
The project uses Flask, Python3.7+ and requires Postgres 12+ as dependency.
|
||||
The project uses Flask, Python3.7+ and requires Postgres 12+ as dependency.
|
||||
|
||||
## General Architecture
|
||||
|
||||
|
@ -20,21 +20,21 @@ SimpleLogin backend consists of 2 main components:
|
|||
## Install dependencies
|
||||
|
||||
The project requires:
|
||||
- Python 3.10 and poetry to manage dependencies
|
||||
- Python 3.7+ and [poetry](https://python-poetry.org/) to manage dependencies
|
||||
- Node v10 for front-end.
|
||||
- Postgres 13+
|
||||
- Postgres 12+
|
||||
|
||||
First, install all dependencies by running the following command.
|
||||
Feel free to use `virtualenv` or similar tools to isolate development environment.
|
||||
|
||||
```bash
|
||||
poetry sync
|
||||
poetry install
|
||||
```
|
||||
|
||||
On Mac, sometimes you might need to install some other packages via `brew`:
|
||||
|
||||
```bash
|
||||
brew install pkg-config libffi openssl postgresql@13
|
||||
brew install pkg-config libffi openssl postgresql
|
||||
```
|
||||
|
||||
You also need to install `gpg` tool, on Mac it can be done with:
|
||||
|
@ -43,37 +43,19 @@ You also need to install `gpg` tool, on Mac it can be done with:
|
|||
brew install gnupg
|
||||
```
|
||||
|
||||
If you see the `pyre2` package in the error message, you might need to install its dependencies with `brew`.
|
||||
If you see the `pyre2` package in the error message, you might need to install its dependencies with `brew`.
|
||||
More info on https://github.com/andreasvc/pyre2
|
||||
|
||||
```bash
|
||||
brew install -s re2 pybind11
|
||||
```
|
||||
|
||||
## Linting and static analysis
|
||||
|
||||
We use pre-commit to run all our linting and static analysis checks. Please run
|
||||
|
||||
```bash
|
||||
poetry run pre-commit install
|
||||
```
|
||||
|
||||
To install it in your development environment.
|
||||
|
||||
## Run tests
|
||||
|
||||
For most tests, you will need to have ``redis`` installed and started on your machine (listening on port 6379).
|
||||
|
||||
```bash
|
||||
sh scripts/run-test.sh
|
||||
```
|
||||
|
||||
You can also run tests using a local Postgres DB to speed things up. This can be done by
|
||||
|
||||
- creating an empty test DB and running the database migration by `dropdb test && createdb test && DB_URI=postgresql://localhost:5432/test alembic upgrade head`
|
||||
|
||||
- replacing the `DB_URI` in `test.env` file by `DB_URI=postgresql://localhost:5432/test`
|
||||
|
||||
## Run the code locally
|
||||
|
||||
Install npm packages
|
||||
|
@ -88,16 +70,10 @@ To run the code locally, please create a local setting file based on `example.en
|
|||
cp example.env .env
|
||||
```
|
||||
|
||||
You need to edit your .env to reflect the postgres exposed port, edit the `DB_URI` to:
|
||||
|
||||
```
|
||||
DB_URI=postgresql://myuser:mypassword@localhost:35432/simplelogin
|
||||
```
|
||||
|
||||
Run the postgres database:
|
||||
|
||||
```bash
|
||||
docker run -e POSTGRES_PASSWORD=mypassword -e POSTGRES_USER=myuser -e POSTGRES_DB=simplelogin -p 15432:5432 postgres:13
|
||||
docker run -e POSTGRES_PASSWORD=mypassword -e POSTGRES_USER=myuser -e POSTGRES_DB=simplelogin -p 35432:5432 postgres:13
|
||||
```
|
||||
|
||||
To run the server:
|
||||
|
@ -129,15 +105,6 @@ We cannot use the local database to generate migration script as the local datab
|
|||
It is created via `db.create_all()` (cf `fake_data()` method). This is convenient for development and
|
||||
unit tests as we don't have to wait for the migration.
|
||||
|
||||
## Reset database
|
||||
|
||||
There are two scripts to reset your local db to an empty state:
|
||||
|
||||
- `scripts/reset_local_db.sh` will reset your development db to the latest migration version and add the development data needed to run the
|
||||
server.py locally.
|
||||
- `scripts/reset_test_db.sh` will reset your test db to the latest migration without adding the dev server data to prevent interferring with
|
||||
the tests.
|
||||
|
||||
## Code structure
|
||||
|
||||
The repo consists of the three following entry points:
|
||||
|
@ -157,10 +124,10 @@ Here are the small sum-ups of the directory structures and their roles:
|
|||
|
||||
## Pull request
|
||||
|
||||
The code is formatted using [ruff](https://github.com/astral-sh/ruff), to format the code, simply run
|
||||
The code is formatted using https://github.com/psf/black, to format the code, simply run
|
||||
|
||||
```
|
||||
poetry run ruff format .
|
||||
poetry run black .
|
||||
```
|
||||
|
||||
The code is also checked with `flake8`, make sure to run `flake8` before creating the pull request by
|
||||
|
@ -169,17 +136,7 @@ The code is also checked with `flake8`, make sure to run `flake8` before creatin
|
|||
poetry run flake8
|
||||
```
|
||||
|
||||
For HTML templates, we use `djlint`. Before creating a pull request, please run
|
||||
|
||||
```bash
|
||||
poetry run djlint --check templates
|
||||
```
|
||||
|
||||
If some files aren't properly formatted, you can format all files with
|
||||
|
||||
```bash
|
||||
poetry run djlint --reformat .
|
||||
```
|
||||
Nice to have: as we haven't found a good enough HTML code formatter, please reformat any HTML code with PyCharm.
|
||||
|
||||
## Test sending email
|
||||
|
||||
|
@ -218,36 +175,4 @@ python email_handler.py
|
|||
swaks --to e1@sl.local --from hey@google.com --server 127.0.0.1:20381
|
||||
```
|
||||
|
||||
Now open http://localhost:1080/ (or http://localhost:1080/ for MailHog), you should see the forwarded email.
|
||||
|
||||
## Job runner
|
||||
|
||||
Some features require a job handler (such as GDPR data export). To test such feature you need to run the job_runner
|
||||
|
||||
```bash
|
||||
python job_runner.py
|
||||
```
|
||||
|
||||
# Setup for Mac
|
||||
|
||||
There are several ways to setup Python and manage the project dependencies on Mac. For info we have successfully used this setup on a Mac silicon:
|
||||
|
||||
```bash
|
||||
# we haven't managed to make python 3.12 work
|
||||
brew install python3.10
|
||||
|
||||
# make sure to update the PATH so python, pip point to Python3
|
||||
# for us it can be done by adding "export PATH=/opt/homebrew/opt/python@3.10/libexec/bin:$PATH" to .zprofile
|
||||
|
||||
# Although pipx is the recommended way to install poetry,
|
||||
# install pipx via brew will automatically install python 3.12
|
||||
# and poetry will then use python 3.12
|
||||
# so we recommend using poetry this way instead
|
||||
curl -sSL https://install.python-poetry.org | python3 -
|
||||
|
||||
poetry install
|
||||
|
||||
# activate the virtualenv and you should be good to go!
|
||||
source .venv/bin/activate
|
||||
|
||||
```
|
||||
Now open http://localhost:1080/ (or http://localhost:1080/ for MailHog), you should see the forwarded email.
|
14
Dockerfile
14
Dockerfile
|
@ -2,10 +2,10 @@
|
|||
FROM node:10.17.0-alpine AS npm
|
||||
WORKDIR /code
|
||||
COPY ./static/package*.json /code/static/
|
||||
RUN cd /code/static && npm ci
|
||||
RUN cd /code/static && npm install
|
||||
|
||||
# Main image
|
||||
FROM python:3.10
|
||||
FROM python:3.7
|
||||
|
||||
# Keeps Python from generating .pyc files in the container
|
||||
ENV PYTHONDONTWRITEBYTECODE 1
|
||||
|
@ -13,7 +13,7 @@ ENV PYTHONDONTWRITEBYTECODE 1
|
|||
ENV PYTHONUNBUFFERED 1
|
||||
|
||||
# Add poetry to PATH
|
||||
ENV PATH="${PATH}:/root/.local/bin"
|
||||
ENV PATH="${PATH}:/root/.poetry/bin"
|
||||
|
||||
WORKDIR /code
|
||||
|
||||
|
@ -23,15 +23,15 @@ COPY poetry.lock pyproject.toml ./
|
|||
# Install and setup poetry
|
||||
RUN pip install -U pip \
|
||||
&& apt-get update \
|
||||
&& apt install -y curl netcat-traditional gcc python3-dev gnupg git libre2-dev cmake ninja-build\
|
||||
&& curl -sSL https://install.python-poetry.org | python3 - \
|
||||
&& apt install -y curl netcat gcc python3-dev gnupg git libre2-dev \
|
||||
&& curl -sSL https://raw.githubusercontent.com/python-poetry/poetry/master/get-poetry.py | python - \
|
||||
# Remove curl and netcat from the image
|
||||
&& apt-get purge -y curl netcat-traditional \
|
||||
&& apt-get purge -y curl netcat \
|
||||
# Run poetry
|
||||
&& poetry config virtualenvs.create false \
|
||||
&& poetry install --no-interaction --no-ansi --no-root \
|
||||
# Clear apt cache \
|
||||
&& apt-get purge -y libre2-dev cmake ninja-build\
|
||||
&& apt-get purge -y libre2-dev \
|
||||
&& apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
|
|
31
README.md
31
README.md
|
@ -15,8 +15,8 @@
|
|||
<img src="https://img.shields.io/github/license/simple-login/app">
|
||||
</a>
|
||||
|
||||
<a href="https://twitter.com/simplelogin">
|
||||
<img src="https://img.shields.io/twitter/follow/simplelogin?style=social">
|
||||
<a href="https://twitter.com/simple_login">
|
||||
<img src="https://img.shields.io/twitter/follow/simple_login?style=social">
|
||||
</a>
|
||||
|
||||
</p>
|
||||
|
@ -29,12 +29,12 @@
|
|||
|
||||
---
|
||||
|
||||
Your email address is your **online identity**. When you use the same email address everywhere, you can be easily tracked.
|
||||
More information on https://simplelogin.io
|
||||
Your email address is your **online identity**. When you use the same email address everywhere, you can be easily tracked.
|
||||
More information on https://simplelogin.io
|
||||
|
||||
This README contains instructions on how to self host SimpleLogin.
|
||||
|
||||
Once you have your own SimpleLogin instance running, you can change the `API URL` in SimpleLogin's Chrome/Firefox extension, Android/iOS app to your server.
|
||||
Once you have your own SimpleLogin instance running, you can change the `API URL` in SimpleLogin's Chrome/Firefox extension, Android/iOS app to your server.
|
||||
|
||||
SimpleLogin roadmap is at https://github.com/simple-login/app/projects/1 and our forum at https://github.com/simple-login/app/discussions, feel free to submit new ideas or vote on features.
|
||||
|
||||
|
@ -74,7 +74,7 @@ Setting up DKIM is highly recommended to reduce the chance your emails ending up
|
|||
First you need to generate a private and public key for DKIM:
|
||||
|
||||
```bash
|
||||
openssl genrsa -out dkim.key -traditional 1024
|
||||
openssl genrsa -out dkim.key 1024
|
||||
openssl rsa -in dkim.key -pubout -out dkim.pub.key
|
||||
```
|
||||
|
||||
|
@ -84,7 +84,7 @@ For email gurus, we have chosen 1024 key length instead of 2048 for DNS simplici
|
|||
|
||||
### DNS
|
||||
|
||||
Please note that DNS changes could take up to 24 hours to propagate. In practice, it's a lot faster though (~1 minute or so in our test). In DNS setup, we usually use domain with a trailing dot (`.`) at the end to force using absolute domain.
|
||||
Please note that DNS changes could take up to 24 hours to propagate. In practice, it's a lot faster though (~1 minute or so in our test). In DNS setup, we usually use domain with a trailing dot (`.`) at the end to to force using absolute domain.
|
||||
|
||||
|
||||
#### MX record
|
||||
|
@ -334,12 +334,6 @@ smtpd_recipient_restrictions =
|
|||
permit
|
||||
```
|
||||
|
||||
Check that the ssl certificates `/etc/ssl/certs/ssl-cert-snakeoil.pem` and `/etc/ssl/private/ssl-cert-snakeoil.key` exist. Depending on the linux distribution you are using they may or may not be present. If they are not, you will need to generate them with this command:
|
||||
|
||||
```bash
|
||||
openssl req -x509 -nodes -days 3650 -newkey rsa:2048 -keyout /etc/ssl/private/ssl-cert-snakeoil.key -out /etc/ssl/certs/ssl-cert-snakeoil.pem
|
||||
```
|
||||
|
||||
Create the `/etc/postfix/pgsql-relay-domains.cf` file with the following content.
|
||||
Make sure that the database config is correctly set, replace `mydomain.com` with your domain, update 'myuser' and 'mypassword' with your postgres credentials.
|
||||
|
||||
|
@ -380,10 +374,10 @@ sudo systemctl restart postfix
|
|||
To run SimpleLogin, you need a config file at `$(pwd)/simplelogin.env`. Below is an example that you can use right away, make sure to
|
||||
|
||||
- replace `mydomain.com` by your domain,
|
||||
- set `FLASK_SECRET` to a secret string,
|
||||
- set `FLASK_SECRET` to a secret string,
|
||||
- update 'myuser' and 'mypassword' with your database credentials used in previous step.
|
||||
|
||||
All possible parameters can be found in [config example](example.env). Some are optional and are commented out by default.
|
||||
All possible parameters can be found in [config example](example.env). Some are optional and are commented out by default.
|
||||
Some have "dummy" values, fill them up if you want to enable these features (Paddle, AWS, etc).
|
||||
|
||||
```.env
|
||||
|
@ -510,14 +504,11 @@ server {
|
|||
server_name app.mydomain.com;
|
||||
|
||||
location / {
|
||||
proxy_pass http://localhost:7777;
|
||||
proxy_set_header Host $host;
|
||||
proxy_pass http://localhost:7777;
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Note: If `/etc/nginx/sites-enabled/default` exists, delete it or certbot will fail due to the conflict. The `simplelogin` file should be the only file in `sites-enabled`.
|
||||
|
||||
Reload Nginx with the command below
|
||||
|
||||
```bash
|
||||
|
@ -541,7 +532,7 @@ exit
|
|||
|
||||
Once you've created all your desired login accounts, add these lines to `/simplelogin.env` to disable further registrations:
|
||||
|
||||
```.env
|
||||
```
|
||||
DISABLE_REGISTRATION=1
|
||||
DISABLE_ONBOARDING=true
|
||||
```
|
||||
|
|
|
@ -2,9 +2,13 @@
|
|||
|
||||
## Supported Versions
|
||||
|
||||
We only add security updates to the latest MAJOR.MINOR version of the project. No security updates are backported to previous versions.
|
||||
We only add security updates to the latest MAJOR.MINOR version of the project. No security updates are backported to previous versions.
|
||||
If you want be up to date on security patches, make sure your SimpleLogin image is up to date.
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
If you want to report a vulnerability, please take a look at our bug bounty program at https://proton.me/security/bug-bounty.
|
||||
If you've found a security vulnerability, you can disclose it responsibly by sending a summary to security@simplelogin.io.
|
||||
We will review the potential threat and fix it as fast as we can.
|
||||
|
||||
We are incredibly thankful for people who disclose vulnerabilities, unfortunately we do not have a bounty program in place yet.
|
||||
|
||||
|
|
|
@ -1,392 +0,0 @@
|
|||
from abc import ABC, abstractmethod
|
||||
from dataclasses import dataclass
|
||||
from enum import Enum
|
||||
from typing import Optional
|
||||
|
||||
import arrow
|
||||
from arrow import Arrow
|
||||
from newrelic import agent
|
||||
from psycopg2.errors import UniqueViolation
|
||||
from sqlalchemy import or_
|
||||
|
||||
from app.db import Session
|
||||
from app.email_utils import send_welcome_email
|
||||
from app.events.event_dispatcher import EventDispatcher
|
||||
from app.events.generated.event_pb2 import UserPlanChanged, EventContent
|
||||
from app.partner_user_utils import create_partner_user, create_partner_subscription
|
||||
from app.utils import sanitize_email, canonicalize_email
|
||||
from app.errors import (
|
||||
AccountAlreadyLinkedToAnotherPartnerException,
|
||||
AccountIsUsingAliasAsEmail,
|
||||
AccountAlreadyLinkedToAnotherUserException,
|
||||
)
|
||||
from app.log import LOG
|
||||
from app.models import (
|
||||
PartnerSubscription,
|
||||
Partner,
|
||||
PartnerUser,
|
||||
User,
|
||||
Alias,
|
||||
)
|
||||
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
||||
from app.utils import random_string
|
||||
|
||||
|
||||
class SLPlanType(Enum):
|
||||
Free = 1
|
||||
Premium = 2
|
||||
|
||||
|
||||
@dataclass
|
||||
class SLPlan:
|
||||
type: SLPlanType
|
||||
expiration: Optional[Arrow]
|
||||
|
||||
|
||||
@dataclass
|
||||
class PartnerLinkRequest:
|
||||
name: str
|
||||
email: str
|
||||
external_user_id: str
|
||||
plan: SLPlan
|
||||
from_partner: bool
|
||||
|
||||
|
||||
@dataclass
|
||||
class LinkResult:
|
||||
user: User
|
||||
strategy: str
|
||||
|
||||
|
||||
def send_user_plan_changed_event(partner_user: PartnerUser) -> Optional[int]:
|
||||
subscription_end = partner_user.user.get_active_subscription_end(
|
||||
include_partner_subscription=False
|
||||
)
|
||||
end_timestamp = None
|
||||
if partner_user.user.lifetime:
|
||||
end_timestamp = arrow.get("2038-01-01").timestamp
|
||||
elif subscription_end:
|
||||
end_timestamp = subscription_end.timestamp
|
||||
event = UserPlanChanged(plan_end_time=end_timestamp)
|
||||
EventDispatcher.send_event(partner_user.user, EventContent(user_plan_change=event))
|
||||
Session.flush()
|
||||
return end_timestamp
|
||||
|
||||
|
||||
def set_plan_for_partner_user(partner_user: PartnerUser, plan: SLPlan):
|
||||
sub = PartnerSubscription.get_by(partner_user_id=partner_user.id)
|
||||
if plan.type == SLPlanType.Free:
|
||||
if sub is not None:
|
||||
LOG.i(
|
||||
f"Deleting partner_subscription [user_id={partner_user.user_id}] [partner_id={partner_user.partner_id}]"
|
||||
)
|
||||
PartnerSubscription.delete(sub.id)
|
||||
agent.record_custom_event("PlanChange", {"plan": "free"})
|
||||
else:
|
||||
if sub is None:
|
||||
LOG.i(
|
||||
f"Creating partner_subscription [user_id={partner_user.user_id}] [partner_id={partner_user.partner_id}]"
|
||||
)
|
||||
create_partner_subscription(
|
||||
partner_user=partner_user,
|
||||
expiration=plan.expiration,
|
||||
msg="Upgraded via partner. User did not have a previous partner subscription",
|
||||
)
|
||||
agent.record_custom_event("PlanChange", {"plan": "premium", "type": "new"})
|
||||
else:
|
||||
if sub.end_at != plan.expiration:
|
||||
LOG.i(
|
||||
f"Updating partner_subscription [user_id={partner_user.user_id}] [partner_id={partner_user.partner_id}]"
|
||||
)
|
||||
agent.record_custom_event(
|
||||
"PlanChange", {"plan": "premium", "type": "extension"}
|
||||
)
|
||||
sub.end_at = plan.expiration
|
||||
emit_user_audit_log(
|
||||
user=partner_user.user,
|
||||
action=UserAuditLogAction.SubscriptionExtended,
|
||||
message="Extended partner subscription",
|
||||
)
|
||||
Session.flush()
|
||||
send_user_plan_changed_event(partner_user)
|
||||
Session.commit()
|
||||
|
||||
|
||||
def set_plan_for_user(user: User, plan: SLPlan, partner: Partner):
|
||||
partner_user = PartnerUser.get_by(partner_id=partner.id, user_id=user.id)
|
||||
if partner_user is None:
|
||||
return
|
||||
return set_plan_for_partner_user(partner_user, plan)
|
||||
|
||||
|
||||
def ensure_partner_user_exists_for_user(
|
||||
link_request: PartnerLinkRequest, sl_user: User, partner: Partner
|
||||
) -> PartnerUser:
|
||||
# Find partner_user by user_id
|
||||
res = PartnerUser.get_by(user_id=sl_user.id)
|
||||
if res and res.partner_id != partner.id:
|
||||
raise AccountAlreadyLinkedToAnotherPartnerException()
|
||||
if not res:
|
||||
res = create_partner_user(
|
||||
user=sl_user,
|
||||
partner_id=partner.id,
|
||||
partner_email=link_request.email,
|
||||
external_user_id=link_request.external_user_id,
|
||||
)
|
||||
|
||||
Session.commit()
|
||||
LOG.i(
|
||||
f"Created new partner_user for partner:{partner.id} user:{sl_user.id} external_user_id:{link_request.external_user_id}. PartnerUser.id is {res.id}"
|
||||
)
|
||||
return res
|
||||
|
||||
|
||||
class ClientMergeStrategy(ABC):
|
||||
def __init__(
|
||||
self,
|
||||
link_request: PartnerLinkRequest,
|
||||
user: Optional[User],
|
||||
partner: Partner,
|
||||
):
|
||||
if self.__class__ == ClientMergeStrategy:
|
||||
raise RuntimeError("Cannot directly instantiate a ClientMergeStrategy")
|
||||
self.link_request = link_request
|
||||
self.user = user
|
||||
self.partner = partner
|
||||
|
||||
@abstractmethod
|
||||
def process(self) -> LinkResult:
|
||||
pass
|
||||
|
||||
|
||||
class NewUserStrategy(ClientMergeStrategy):
|
||||
def process(self) -> LinkResult:
|
||||
canonical_email = canonicalize_email(self.link_request.email)
|
||||
try:
|
||||
# Will create a new SL User with a random password
|
||||
new_user = User.create(
|
||||
email=canonical_email,
|
||||
name=self.link_request.name,
|
||||
password=random_string(20),
|
||||
activated=True,
|
||||
from_partner=self.link_request.from_partner,
|
||||
)
|
||||
self.create_partner_user(new_user)
|
||||
Session.commit()
|
||||
|
||||
if not new_user.created_by_partner:
|
||||
send_welcome_email(new_user)
|
||||
|
||||
agent.record_custom_event(
|
||||
"PartnerUserCreation", {"partner": self.partner.name}
|
||||
)
|
||||
|
||||
return LinkResult(
|
||||
user=new_user,
|
||||
strategy=self.__class__.__name__,
|
||||
)
|
||||
except UniqueViolation:
|
||||
return self.create_missing_link(canonical_email)
|
||||
|
||||
def create_missing_link(self, canonical_email: str):
|
||||
# If there's a unique key violation due to race conditions try to create only the partner if needed
|
||||
partner_user = PartnerUser.get_by(
|
||||
external_user_id=self.link_request.external_user_id,
|
||||
partner_id=self.partner.id,
|
||||
)
|
||||
if partner_user is None:
|
||||
# Get the user by canonical email and if not by normal email
|
||||
user = User.get_by(email=canonical_email) or User.get_by(
|
||||
email=self.link_request.email
|
||||
)
|
||||
if not user:
|
||||
raise RuntimeError(
|
||||
"Tried to create only partner on UniqueViolation but cannot find the user"
|
||||
)
|
||||
partner_user = self.create_partner_user(user)
|
||||
Session.commit()
|
||||
return LinkResult(
|
||||
user=partner_user.user, strategy=ExistingUnlinkedUserStrategy.__name__
|
||||
)
|
||||
|
||||
def create_partner_user(self, new_user: User):
|
||||
partner_user = create_partner_user(
|
||||
user=new_user,
|
||||
partner_id=self.partner.id,
|
||||
external_user_id=self.link_request.external_user_id,
|
||||
partner_email=self.link_request.email,
|
||||
)
|
||||
LOG.i(
|
||||
f"Created new user for login request for partner:{self.partner.id} external_user_id:{self.link_request.external_user_id}. New user {new_user.id} partner_user:{partner_user.id}"
|
||||
)
|
||||
set_plan_for_partner_user(
|
||||
partner_user,
|
||||
self.link_request.plan,
|
||||
)
|
||||
return partner_user
|
||||
|
||||
|
||||
class ExistingUnlinkedUserStrategy(ClientMergeStrategy):
|
||||
def process(self) -> LinkResult:
|
||||
# IF it was scheduled to be deleted. Unschedule it.
|
||||
self.user.delete_on = None
|
||||
partner_user = ensure_partner_user_exists_for_user(
|
||||
self.link_request, self.user, self.partner
|
||||
)
|
||||
set_plan_for_partner_user(partner_user, self.link_request.plan)
|
||||
|
||||
return LinkResult(
|
||||
user=self.user,
|
||||
strategy=self.__class__.__name__,
|
||||
)
|
||||
|
||||
|
||||
class LinkedWithAnotherPartnerUserStrategy(ClientMergeStrategy):
|
||||
def process(self) -> LinkResult:
|
||||
raise AccountAlreadyLinkedToAnotherUserException()
|
||||
|
||||
|
||||
def get_login_strategy(
|
||||
link_request: PartnerLinkRequest, user: Optional[User], partner: Partner
|
||||
) -> ClientMergeStrategy:
|
||||
if user is None:
|
||||
# We couldn't find any SimpleLogin user with the requested e-mail
|
||||
return NewUserStrategy(link_request, user, partner)
|
||||
# Check if user is already linked with another partner_user
|
||||
other_partner_user = PartnerUser.get_by(partner_id=partner.id, user_id=user.id)
|
||||
if other_partner_user is not None:
|
||||
return LinkedWithAnotherPartnerUserStrategy(link_request, user, partner)
|
||||
# There is a SimpleLogin user with the partner_user's e-mail
|
||||
return ExistingUnlinkedUserStrategy(link_request, user, partner)
|
||||
|
||||
|
||||
def check_alias(email: str):
|
||||
alias = Alias.get_by(email=email)
|
||||
if alias is not None:
|
||||
raise AccountIsUsingAliasAsEmail()
|
||||
|
||||
|
||||
def process_login_case(
|
||||
link_request: PartnerLinkRequest, partner: Partner
|
||||
) -> LinkResult:
|
||||
# Sanitize email just in case
|
||||
link_request.email = sanitize_email(link_request.email)
|
||||
# Try to find a SimpleLogin user registered with that partner user id
|
||||
partner_user = PartnerUser.get_by(
|
||||
partner_id=partner.id, external_user_id=link_request.external_user_id
|
||||
)
|
||||
if partner_user is None:
|
||||
canonical_email = canonicalize_email(link_request.email)
|
||||
# We didn't find any SimpleLogin user registered with that partner user id
|
||||
# Make sure they aren't using an alias as their link email
|
||||
check_alias(link_request.email)
|
||||
check_alias(canonical_email)
|
||||
# Try to find it using the partner's e-mail address
|
||||
users = User.filter(
|
||||
or_(User.email == link_request.email, User.email == canonical_email)
|
||||
).all()
|
||||
if len(users) > 1:
|
||||
user = [user for user in users if user.email == canonical_email][0]
|
||||
elif len(users) == 1:
|
||||
user = users[0]
|
||||
else:
|
||||
user = None
|
||||
return get_login_strategy(link_request, user, partner).process()
|
||||
else:
|
||||
# We found the SL user registered with that partner user id
|
||||
# We're done
|
||||
set_plan_for_partner_user(partner_user, link_request.plan)
|
||||
# It's the same user. No need to do anything
|
||||
return LinkResult(
|
||||
user=partner_user.user,
|
||||
strategy="Link",
|
||||
)
|
||||
|
||||
|
||||
def link_user(
|
||||
link_request: PartnerLinkRequest, current_user: User, partner: Partner
|
||||
) -> LinkResult:
|
||||
# Sanitize email just in case
|
||||
link_request.email = sanitize_email(link_request.email)
|
||||
# If it was scheduled to be deleted. Unschedule it.
|
||||
current_user.delete_on = None
|
||||
partner_user = ensure_partner_user_exists_for_user(
|
||||
link_request, current_user, partner
|
||||
)
|
||||
set_plan_for_partner_user(partner_user, link_request.plan)
|
||||
|
||||
agent.record_custom_event("AccountLinked", {"partner": partner.name})
|
||||
Session.commit()
|
||||
return LinkResult(
|
||||
user=current_user,
|
||||
strategy="Link",
|
||||
)
|
||||
|
||||
|
||||
def switch_already_linked_user(
|
||||
link_request: PartnerLinkRequest, partner_user: PartnerUser, current_user: User
|
||||
):
|
||||
# Find if the user has another link and unlink it
|
||||
other_partner_user = PartnerUser.get_by(
|
||||
user_id=current_user.id,
|
||||
partner_id=partner_user.partner_id,
|
||||
)
|
||||
if other_partner_user is not None:
|
||||
LOG.i(
|
||||
f"Deleting previous partner_user:{other_partner_user.id} from user:{current_user.id}"
|
||||
)
|
||||
|
||||
emit_user_audit_log(
|
||||
user=other_partner_user.user,
|
||||
action=UserAuditLogAction.UnlinkAccount,
|
||||
message=f"Deleting partner_user {other_partner_user.id} (external_user_id={other_partner_user.external_user_id} | partner_email={other_partner_user.partner_email}) from user {current_user.id}, as we received a new link request for the same partner",
|
||||
)
|
||||
PartnerUser.delete(other_partner_user.id)
|
||||
LOG.i(f"Linking partner_user:{partner_user.id} to user:{current_user.id}")
|
||||
# Link this partner_user to the current user
|
||||
emit_user_audit_log(
|
||||
user=partner_user.user,
|
||||
action=UserAuditLogAction.UnlinkAccount,
|
||||
message=f"Unlinking from partner, as user will now be tied to another external account. old=(id={partner_user.user.id} | email={partner_user.user.email}) | new=(id={current_user.id} | email={current_user.email})",
|
||||
)
|
||||
partner_user.user_id = current_user.id
|
||||
emit_user_audit_log(
|
||||
user=current_user,
|
||||
action=UserAuditLogAction.LinkAccount,
|
||||
message=f"Linking user {current_user.id} ({current_user.email}) to partner_user:{partner_user.id} (external_user_id={partner_user.external_user_id} | partner_email={partner_user.partner_email})",
|
||||
)
|
||||
# Set plan
|
||||
set_plan_for_partner_user(partner_user, link_request.plan)
|
||||
Session.commit()
|
||||
return LinkResult(
|
||||
user=current_user,
|
||||
strategy="Link",
|
||||
)
|
||||
|
||||
|
||||
def process_link_case(
|
||||
link_request: PartnerLinkRequest,
|
||||
current_user: User,
|
||||
partner: Partner,
|
||||
) -> LinkResult:
|
||||
# Sanitize email just in case
|
||||
link_request.email = sanitize_email(link_request.email)
|
||||
# Try to find a SimpleLogin user linked with this Partner account
|
||||
partner_user = PartnerUser.get_by(
|
||||
partner_id=partner.id, external_user_id=link_request.external_user_id
|
||||
)
|
||||
if partner_user is None:
|
||||
# There is no SL user linked with the partner. Proceed with linking
|
||||
return link_user(link_request, current_user, partner)
|
||||
|
||||
# There is a SL user registered with the partner. Check if is the current one
|
||||
if partner_user.user_id == current_user.id:
|
||||
# Update plan
|
||||
set_plan_for_partner_user(partner_user, link_request.plan)
|
||||
# It's the same user. No need to do anything
|
||||
return LinkResult(
|
||||
user=current_user,
|
||||
strategy="Link",
|
||||
)
|
||||
else:
|
||||
return switch_already_linked_user(link_request, partner_user, current_user)
|
|
@ -1,10 +1,7 @@
|
|||
from __future__ import annotations
|
||||
from typing import Optional, List
|
||||
from typing import Optional
|
||||
|
||||
import arrow
|
||||
import sqlalchemy
|
||||
from flask_admin import BaseView
|
||||
from flask_admin.form import SecureForm
|
||||
from flask_admin.model.template import EndpointLinkRowAction
|
||||
from markupsafe import Markup
|
||||
|
||||
|
@ -16,8 +13,6 @@ from flask_admin.contrib import sqla
|
|||
from flask_login import current_user
|
||||
|
||||
from app.db import Session
|
||||
from app.events.event_dispatcher import EventDispatcher
|
||||
from app.events.generated.event_pb2 import EventContent, UserPlanChanged
|
||||
from app.models import (
|
||||
User,
|
||||
ManualSubscription,
|
||||
|
@ -29,38 +24,12 @@ from app.models import (
|
|||
ProviderComplaintState,
|
||||
Phase,
|
||||
ProviderComplaint,
|
||||
Alias,
|
||||
Newsletter,
|
||||
PADDLE_SUBSCRIPTION_GRACE_DAYS,
|
||||
Mailbox,
|
||||
DeletedAlias,
|
||||
DomainDeletedAlias,
|
||||
PartnerUser,
|
||||
AliasMailbox,
|
||||
AliasAuditLog,
|
||||
UserAuditLog,
|
||||
)
|
||||
from app.newsletter_utils import send_newsletter_to_user, send_newsletter_to_address
|
||||
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
||||
|
||||
|
||||
def _admin_action_formatter(view, context, model, name):
|
||||
action_name = AuditLogActionEnum.get_name(model.action)
|
||||
return "{} ({})".format(action_name, model.action)
|
||||
|
||||
|
||||
def _admin_date_formatter(view, context, model, name):
|
||||
return model.created_at.format()
|
||||
|
||||
|
||||
def _user_upgrade_channel_formatter(view, context, model, name):
|
||||
return Markup(model.upgrade_channel)
|
||||
|
||||
|
||||
class SLModelView(sqla.ModelView):
|
||||
column_default_sort = ("id", True)
|
||||
column_display_pk = True
|
||||
page_size = 100
|
||||
|
||||
can_edit = False
|
||||
can_create = False
|
||||
|
@ -72,8 +41,7 @@ class SLModelView(sqla.ModelView):
|
|||
|
||||
def inaccessible_callback(self, name, **kwargs):
|
||||
# redirect to login page if user doesn't have access
|
||||
flash("You don't have access to the admin page", "error")
|
||||
return redirect(url_for("dashboard.index", next=request.url))
|
||||
return redirect(url_for("auth.login", next=request.url))
|
||||
|
||||
def on_model_change(self, form, model, is_created):
|
||||
changes = {}
|
||||
|
@ -118,11 +86,10 @@ class SLAdminIndexView(AdminIndexView):
|
|||
if not current_user.is_authenticated or not current_user.is_admin:
|
||||
return redirect(url_for("auth.login", next=request.url))
|
||||
|
||||
return redirect("/admin/email_search")
|
||||
return redirect("/admin/user")
|
||||
|
||||
|
||||
class UserAdmin(SLModelView):
|
||||
form_base_class = SecureForm
|
||||
column_searchable_list = ["email", "id"]
|
||||
column_exclude_list = [
|
||||
"salt",
|
||||
|
@ -139,40 +106,6 @@ class UserAdmin(SLModelView):
|
|||
ret.insert(0, "upgrade_channel")
|
||||
return ret
|
||||
|
||||
column_formatters = {
|
||||
"upgrade_channel": _user_upgrade_channel_formatter,
|
||||
"created_at": _admin_date_formatter,
|
||||
"updated_at": _admin_date_formatter,
|
||||
}
|
||||
|
||||
@action(
|
||||
"disable_user",
|
||||
"Disable user",
|
||||
"Are you sure you want to disable the selected users?",
|
||||
)
|
||||
def action_disable_user(self, ids):
|
||||
for user in User.filter(User.id.in_(ids)):
|
||||
user.disabled = True
|
||||
|
||||
flash(f"Disabled user {user.id}")
|
||||
AdminAuditLog.disable_user(current_user.id, user.id)
|
||||
|
||||
Session.commit()
|
||||
|
||||
@action(
|
||||
"enable_user",
|
||||
"Enable user",
|
||||
"Are you sure you want to enable the selected users?",
|
||||
)
|
||||
def action_enable_user(self, ids):
|
||||
for user in User.filter(User.id.in_(ids)):
|
||||
user.disabled = False
|
||||
|
||||
flash(f"Enabled user {user.id}")
|
||||
AdminAuditLog.enable_user(current_user.id, user.id)
|
||||
|
||||
Session.commit()
|
||||
|
||||
@action(
|
||||
"education_upgrade",
|
||||
"Education upgrade",
|
||||
|
@ -240,20 +173,6 @@ class UserAdmin(SLModelView):
|
|||
|
||||
Session.commit()
|
||||
|
||||
@action(
|
||||
"remove trial",
|
||||
"Stop trial period",
|
||||
"Remove trial for this user?",
|
||||
)
|
||||
def stop_trial(self, ids):
|
||||
for user in User.filter(User.id.in_(ids)):
|
||||
user.trial_end = None
|
||||
|
||||
flash(f"Stopped trial for {user}", "success")
|
||||
AdminAuditLog.stop_trial(current_user.id, user.id)
|
||||
|
||||
Session.commit()
|
||||
|
||||
@action(
|
||||
"disable_otp_fido",
|
||||
"Disable OTP & FIDO",
|
||||
|
@ -277,36 +196,6 @@ class UserAdmin(SLModelView):
|
|||
|
||||
Session.commit()
|
||||
|
||||
@action(
|
||||
"stop_paddle_sub",
|
||||
"Stop user Paddle subscription",
|
||||
"This will stop the current user Paddle subscription so if user doesn't have Proton sub, they will lose all SL benefits immediately",
|
||||
)
|
||||
def stop_paddle_sub(self, ids):
|
||||
for user in User.filter(User.id.in_(ids)):
|
||||
sub: Subscription = user.get_paddle_subscription()
|
||||
if not sub:
|
||||
flash(f"No Paddle sub for {user}", "warning")
|
||||
continue
|
||||
|
||||
flash(f"{user} sub will end now, instead of {sub.next_bill_date}", "info")
|
||||
sub.next_bill_date = (
|
||||
arrow.now().shift(days=-PADDLE_SUBSCRIPTION_GRACE_DAYS).date()
|
||||
)
|
||||
|
||||
Session.commit()
|
||||
|
||||
@action(
|
||||
"clear_delete_on",
|
||||
"Remove scheduled deletion of user",
|
||||
"This will remove the scheduled deletion for this users",
|
||||
)
|
||||
def clean_delete_on(self, ids):
|
||||
for user in User.filter(User.id.in_(ids)):
|
||||
user.delete_on = None
|
||||
|
||||
Session.commit()
|
||||
|
||||
# @action(
|
||||
# "login_as",
|
||||
# "Login as this user",
|
||||
|
@ -330,7 +219,7 @@ def manual_upgrade(way: str, ids: [int], is_giveaway: bool):
|
|||
flash(f"user {user} already has a lifetime license", "warning")
|
||||
continue
|
||||
|
||||
sub: Subscription = user.get_paddle_subscription()
|
||||
sub: Subscription = user.get_subscription()
|
||||
if sub and not sub.cancelled:
|
||||
flash(
|
||||
f"user {user} already has a Paddle license, they have to cancel it first",
|
||||
|
@ -354,100 +243,37 @@ def manual_upgrade(way: str, ids: [int], is_giveaway: bool):
|
|||
manual_sub.end_at = manual_sub.end_at.shift(years=1)
|
||||
else:
|
||||
manual_sub.end_at = arrow.now().shift(years=1, days=1)
|
||||
emit_user_audit_log(
|
||||
user=user,
|
||||
action=UserAuditLogAction.Upgrade,
|
||||
message=f"Admin {current_user.email} extended manual subscription to user {user.email}",
|
||||
)
|
||||
EventDispatcher.send_event(
|
||||
user=user,
|
||||
content=EventContent(
|
||||
user_plan_change=UserPlanChanged(
|
||||
plan_end_time=manual_sub.end_at.timestamp
|
||||
)
|
||||
),
|
||||
)
|
||||
flash(f"Subscription extended to {manual_sub.end_at.humanize()}", "success")
|
||||
else:
|
||||
emit_user_audit_log(
|
||||
user=user,
|
||||
action=UserAuditLogAction.Upgrade,
|
||||
message=f"Admin {current_user.email} created manual subscription to user {user.email}",
|
||||
)
|
||||
manual_sub = ManualSubscription.create(
|
||||
user_id=user.id,
|
||||
end_at=arrow.now().shift(years=1, days=1),
|
||||
comment=way,
|
||||
is_giveaway=is_giveaway,
|
||||
)
|
||||
EventDispatcher.send_event(
|
||||
user=user,
|
||||
content=EventContent(
|
||||
user_plan_change=UserPlanChanged(
|
||||
plan_end_time=manual_sub.end_at.timestamp
|
||||
)
|
||||
),
|
||||
)
|
||||
continue
|
||||
|
||||
flash(f"New {way} manual subscription for {user} is created", "success")
|
||||
ManualSubscription.create(
|
||||
user_id=user.id,
|
||||
end_at=arrow.now().shift(years=1, days=1),
|
||||
comment=way,
|
||||
is_giveaway=is_giveaway,
|
||||
)
|
||||
|
||||
flash(f"New {way} manual subscription for {user} is created", "success")
|
||||
Session.commit()
|
||||
|
||||
|
||||
class EmailLogAdmin(SLModelView):
|
||||
form_base_class = SecureForm
|
||||
column_searchable_list = ["id"]
|
||||
column_filters = ["id", "user.email", "mailbox.email", "contact.website_email"]
|
||||
|
||||
can_edit = False
|
||||
can_create = False
|
||||
|
||||
column_formatters = {
|
||||
"created_at": _admin_date_formatter,
|
||||
"updated_at": _admin_date_formatter,
|
||||
}
|
||||
|
||||
|
||||
class AliasAdmin(SLModelView):
|
||||
form_base_class = SecureForm
|
||||
column_searchable_list = ["id", "user.email", "email", "mailbox.email"]
|
||||
column_filters = ["id", "user.email", "email", "mailbox.email"]
|
||||
|
||||
column_formatters = {
|
||||
"created_at": _admin_date_formatter,
|
||||
"updated_at": _admin_date_formatter,
|
||||
}
|
||||
|
||||
@action(
|
||||
"disable_email_spoofing_check",
|
||||
"Disable email spoofing protection",
|
||||
"Disable email spoofing protection?",
|
||||
)
|
||||
def disable_email_spoofing_check_for(self, ids):
|
||||
for alias in Alias.filter(Alias.id.in_(ids)):
|
||||
if alias.disable_email_spoofing_check:
|
||||
flash(
|
||||
f"Email spoofing protection is already disabled on {alias.email}",
|
||||
"warning",
|
||||
)
|
||||
else:
|
||||
alias.disable_email_spoofing_check = True
|
||||
flash(
|
||||
f"Email spoofing protection is disabled on {alias.email}", "success"
|
||||
)
|
||||
|
||||
Session.commit()
|
||||
|
||||
|
||||
class MailboxAdmin(SLModelView):
|
||||
form_base_class = SecureForm
|
||||
column_searchable_list = ["id", "user.email", "email"]
|
||||
column_filters = ["id", "user.email", "email"]
|
||||
|
||||
column_formatters = {
|
||||
"created_at": _admin_date_formatter,
|
||||
"updated_at": _admin_date_formatter,
|
||||
}
|
||||
|
||||
|
||||
# class LifetimeCouponAdmin(SLModelView):
|
||||
# can_edit = True
|
||||
|
@ -455,33 +281,28 @@ class MailboxAdmin(SLModelView):
|
|||
|
||||
|
||||
class CouponAdmin(SLModelView):
|
||||
form_base_class = SecureForm
|
||||
can_edit = False
|
||||
can_create = True
|
||||
|
||||
column_formatters = {
|
||||
"created_at": _admin_date_formatter,
|
||||
"updated_at": _admin_date_formatter,
|
||||
}
|
||||
|
||||
|
||||
class ManualSubscriptionAdmin(SLModelView):
|
||||
form_base_class = SecureForm
|
||||
can_edit = True
|
||||
column_searchable_list = ["id", "user.email"]
|
||||
|
||||
column_formatters = {
|
||||
"created_at": _admin_date_formatter,
|
||||
"updated_at": _admin_date_formatter,
|
||||
}
|
||||
|
||||
@action(
|
||||
"extend_1y",
|
||||
"Extend for 1 year",
|
||||
"Extend 1 year more?",
|
||||
)
|
||||
def extend_1y(self, ids):
|
||||
self.__extend_manual_subscription(ids, msg="1 year", years=1)
|
||||
for ms in ManualSubscription.filter(ManualSubscription.id.in_(ids)):
|
||||
ms.end_at = ms.end_at.shift(years=1)
|
||||
flash(f"Extend subscription for 1 year for {ms.user}", "success")
|
||||
AdminAuditLog.extend_subscription(
|
||||
current_user.id, ms.user.id, ms.end_at, "1 year"
|
||||
)
|
||||
|
||||
Session.commit()
|
||||
|
||||
@action(
|
||||
"extend_1m",
|
||||
|
@ -489,26 +310,11 @@ class ManualSubscriptionAdmin(SLModelView):
|
|||
"Extend 1 month more?",
|
||||
)
|
||||
def extend_1m(self, ids):
|
||||
self.__extend_manual_subscription(ids, msg="1 month", months=1)
|
||||
|
||||
def __extend_manual_subscription(self, ids: List[int], msg: str, **kwargs):
|
||||
for ms in ManualSubscription.filter(ManualSubscription.id.in_(ids)):
|
||||
sub: ManualSubscription = ms
|
||||
sub.end_at = sub.end_at.shift(**kwargs)
|
||||
flash(f"Extend subscription for {msg} for {sub.user}", "success")
|
||||
emit_user_audit_log(
|
||||
user=sub.user,
|
||||
action=UserAuditLogAction.Upgrade,
|
||||
message=f"Admin {current_user.email} extended manual subscription for {msg} for {sub.user}",
|
||||
)
|
||||
ms.end_at = ms.end_at.shift(months=1)
|
||||
flash(f"Extend subscription for 1 month for {ms.user}", "success")
|
||||
AdminAuditLog.extend_subscription(
|
||||
current_user.id, sub.user.id, sub.end_at, msg
|
||||
)
|
||||
EventDispatcher.send_event(
|
||||
user=sub.user,
|
||||
content=EventContent(
|
||||
user_plan_change=UserPlanChanged(plan_end_time=sub.end_at.timestamp)
|
||||
),
|
||||
current_user.id, ms.user.id, ms.end_at, "1 month"
|
||||
)
|
||||
|
||||
Session.commit()
|
||||
|
@ -521,27 +327,15 @@ class ManualSubscriptionAdmin(SLModelView):
|
|||
|
||||
|
||||
class CustomDomainAdmin(SLModelView):
|
||||
form_base_class = SecureForm
|
||||
column_searchable_list = ["domain", "user.email", "user.id"]
|
||||
column_exclude_list = ["ownership_txt_token"]
|
||||
can_edit = False
|
||||
|
||||
column_formatters = {
|
||||
"created_at": _admin_date_formatter,
|
||||
"updated_at": _admin_date_formatter,
|
||||
}
|
||||
|
||||
|
||||
class ReferralAdmin(SLModelView):
|
||||
form_base_class = SecureForm
|
||||
column_searchable_list = ["id", "user.email", "code", "name"]
|
||||
column_filters = ["id", "user.email", "code", "name"]
|
||||
|
||||
column_formatters = {
|
||||
"created_at": _admin_date_formatter,
|
||||
"updated_at": _admin_date_formatter,
|
||||
}
|
||||
|
||||
def scaffold_list_columns(self):
|
||||
ret = super().scaffold_list_columns()
|
||||
ret.insert(0, "nb_user")
|
||||
|
@ -557,8 +351,16 @@ class ReferralAdmin(SLModelView):
|
|||
# can_delete = True
|
||||
|
||||
|
||||
def _admin_action_formatter(view, context, model, name):
|
||||
action_name = AuditLogActionEnum.get_name(model.action)
|
||||
return "{} ({})".format(action_name, model.action)
|
||||
|
||||
|
||||
def _admin_created_at_formatter(view, context, model, name):
|
||||
return model.created_at.format()
|
||||
|
||||
|
||||
class AdminAuditLogAdmin(SLModelView):
|
||||
form_base_class = SecureForm
|
||||
column_searchable_list = ["admin.id", "admin.email", "model_id", "created_at"]
|
||||
column_filters = ["admin.id", "admin.email", "model_id", "created_at"]
|
||||
column_exclude_list = ["id"]
|
||||
|
@ -569,8 +371,7 @@ class AdminAuditLogAdmin(SLModelView):
|
|||
|
||||
column_formatters = {
|
||||
"action": _admin_action_formatter,
|
||||
"created_at": _admin_date_formatter,
|
||||
"updated_at": _admin_date_formatter,
|
||||
"created_at": _admin_created_at_formatter,
|
||||
}
|
||||
|
||||
|
||||
|
@ -590,7 +391,6 @@ def _transactionalcomplaint_refused_email_id_formatter(view, context, model, nam
|
|||
|
||||
|
||||
class ProviderComplaintAdmin(SLModelView):
|
||||
form_base_class = SecureForm
|
||||
column_searchable_list = ["id", "user.id", "created_at"]
|
||||
column_filters = ["user.id", "state"]
|
||||
column_hide_backrefs = False
|
||||
|
@ -599,8 +399,8 @@ class ProviderComplaintAdmin(SLModelView):
|
|||
can_delete = False
|
||||
|
||||
column_formatters = {
|
||||
"created_at": _admin_date_formatter,
|
||||
"updated_at": _admin_date_formatter,
|
||||
"created_at": _admin_created_at_formatter,
|
||||
"updated_at": _admin_created_at_formatter,
|
||||
"state": _transactionalcomplaint_state_formatter,
|
||||
"phase": _transactionalcomplaint_phase_formatter,
|
||||
"refused_email": _transactionalcomplaint_refused_email_id_formatter,
|
||||
|
@ -648,271 +448,3 @@ class ProviderComplaintAdmin(SLModelView):
|
|||
)
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
def _newsletter_plain_text_formatter(view, context, model: Newsletter, name):
|
||||
# to display newsletter plain_text with linebreaks in the list view
|
||||
return Markup(model.plain_text.replace("\n", "<br>"))
|
||||
|
||||
|
||||
def _newsletter_html_formatter(view, context, model: Newsletter, name):
|
||||
# to display newsletter html with linebreaks in the list view
|
||||
return Markup(model.html.replace("\n", "<br>"))
|
||||
|
||||
|
||||
class NewsletterAdmin(SLModelView):
|
||||
form_base_class = SecureForm
|
||||
list_template = "admin/model/newsletter-list.html"
|
||||
edit_template = "admin/model/newsletter-edit.html"
|
||||
edit_modal = False
|
||||
|
||||
can_edit = True
|
||||
can_create = True
|
||||
|
||||
column_formatters = {
|
||||
"plain_text": _newsletter_plain_text_formatter,
|
||||
"html": _newsletter_html_formatter,
|
||||
}
|
||||
|
||||
@action(
|
||||
"send_newsletter_to_user",
|
||||
"Send this newsletter to myself or the specified userID",
|
||||
)
|
||||
def send_newsletter_to_user(self, newsletter_ids):
|
||||
user_id = request.form["user_id"]
|
||||
if user_id:
|
||||
user = User.get(user_id)
|
||||
if not user:
|
||||
flash(f"No such user with ID {user_id}", "error")
|
||||
return
|
||||
else:
|
||||
flash("use the current user", "info")
|
||||
user = current_user
|
||||
|
||||
for newsletter_id in newsletter_ids:
|
||||
newsletter = Newsletter.get(newsletter_id)
|
||||
sent, error_msg = send_newsletter_to_user(newsletter, user)
|
||||
if sent:
|
||||
flash(f"{newsletter} sent to {user}", "success")
|
||||
else:
|
||||
flash(error_msg, "error")
|
||||
|
||||
@action(
|
||||
"send_newsletter_to_address",
|
||||
"Send this newsletter to a specific address",
|
||||
)
|
||||
def send_newsletter_to_address(self, newsletter_ids):
|
||||
to_address = request.form["to_address"]
|
||||
if not to_address:
|
||||
flash("to_address missing", "error")
|
||||
return
|
||||
|
||||
for newsletter_id in newsletter_ids:
|
||||
newsletter = Newsletter.get(newsletter_id)
|
||||
# use the current_user for rendering email
|
||||
sent, error_msg = send_newsletter_to_address(
|
||||
newsletter, current_user, to_address
|
||||
)
|
||||
if sent:
|
||||
flash(
|
||||
f"{newsletter} sent to {to_address} with {current_user} context",
|
||||
"success",
|
||||
)
|
||||
else:
|
||||
flash(error_msg, "error")
|
||||
|
||||
@action(
|
||||
"clone_newsletter",
|
||||
"Clone this newsletter",
|
||||
)
|
||||
def clone_newsletter(self, newsletter_ids):
|
||||
if len(newsletter_ids) != 1:
|
||||
flash("you can only select 1 newsletter", "error")
|
||||
return
|
||||
|
||||
newsletter_id = newsletter_ids[0]
|
||||
newsletter: Newsletter = Newsletter.get(newsletter_id)
|
||||
new_newsletter = Newsletter.create(
|
||||
subject=newsletter.subject,
|
||||
html=newsletter.html,
|
||||
plain_text=newsletter.plain_text,
|
||||
commit=True,
|
||||
)
|
||||
|
||||
flash(f"Newsletter {new_newsletter.subject} has been cloned", "success")
|
||||
|
||||
|
||||
class NewsletterUserAdmin(SLModelView):
|
||||
form_base_class = SecureForm
|
||||
column_searchable_list = ["id"]
|
||||
column_filters = ["id", "user.email", "newsletter.subject"]
|
||||
column_exclude_list = ["created_at", "updated_at", "id"]
|
||||
|
||||
can_edit = False
|
||||
can_create = False
|
||||
|
||||
|
||||
class DailyMetricAdmin(SLModelView):
|
||||
form_base_class = SecureForm
|
||||
column_exclude_list = ["created_at", "updated_at", "id"]
|
||||
|
||||
can_export = True
|
||||
|
||||
|
||||
class MetricAdmin(SLModelView):
|
||||
form_base_class = SecureForm
|
||||
column_exclude_list = ["created_at", "updated_at", "id"]
|
||||
|
||||
can_export = True
|
||||
|
||||
|
||||
class InvalidMailboxDomainAdmin(SLModelView):
|
||||
form_base_class = SecureForm
|
||||
can_create = True
|
||||
can_delete = True
|
||||
|
||||
|
||||
class EmailSearchResult:
|
||||
no_match: bool = True
|
||||
alias: Optional[Alias] = None
|
||||
alias_audit_log: Optional[List[AliasAuditLog]] = None
|
||||
mailbox: List[Mailbox] = []
|
||||
mailbox_count: int = 0
|
||||
deleted_alias: Optional[DeletedAlias] = None
|
||||
deleted_alias_audit_log: Optional[List[AliasAuditLog]] = None
|
||||
domain_deleted_alias: Optional[DomainDeletedAlias] = None
|
||||
domain_deleted_alias_audit_log: Optional[List[AliasAuditLog]] = None
|
||||
user: Optional[User] = None
|
||||
user_audit_log: Optional[List[UserAuditLog]] = None
|
||||
query: str
|
||||
|
||||
@staticmethod
|
||||
def from_email(email: str) -> EmailSearchResult:
|
||||
output = EmailSearchResult()
|
||||
output.query = email
|
||||
alias = Alias.get_by(email=email)
|
||||
if alias:
|
||||
output.alias = alias
|
||||
output.alias_audit_log = (
|
||||
AliasAuditLog.filter_by(alias_id=alias.id)
|
||||
.order_by(AliasAuditLog.created_at.desc())
|
||||
.all()
|
||||
)
|
||||
output.no_match = False
|
||||
user = User.get_by(email=email)
|
||||
if user:
|
||||
output.user = user
|
||||
output.user_audit_log = (
|
||||
UserAuditLog.filter_by(user_id=user.id)
|
||||
.order_by(UserAuditLog.created_at.desc())
|
||||
.all()
|
||||
)
|
||||
output.no_match = False
|
||||
|
||||
user_audit_log = (
|
||||
UserAuditLog.filter_by(user_email=email)
|
||||
.order_by(UserAuditLog.created_at.desc())
|
||||
.all()
|
||||
)
|
||||
if user_audit_log:
|
||||
output.user_audit_log = user_audit_log
|
||||
output.no_match = False
|
||||
mailboxes = (
|
||||
Mailbox.filter_by(email=email).order_by(Mailbox.id.desc()).limit(10).all()
|
||||
)
|
||||
if mailboxes:
|
||||
output.mailbox = mailboxes
|
||||
output.mailbox_count = Mailbox.filter_by(email=email).count()
|
||||
output.no_match = False
|
||||
deleted_alias = DeletedAlias.get_by(email=email)
|
||||
if deleted_alias:
|
||||
output.deleted_alias = deleted_alias
|
||||
output.deleted_alias_audit_log = (
|
||||
AliasAuditLog.filter_by(alias_email=deleted_alias.email)
|
||||
.order_by(AliasAuditLog.created_at.desc())
|
||||
.all()
|
||||
)
|
||||
output.no_match = False
|
||||
domain_deleted_alias = DomainDeletedAlias.get_by(email=email)
|
||||
if domain_deleted_alias:
|
||||
output.domain_deleted_alias = domain_deleted_alias
|
||||
output.domain_deleted_alias_audit_log = (
|
||||
AliasAuditLog.filter_by(alias_email=domain_deleted_alias.email)
|
||||
.order_by(AliasAuditLog.created_at.desc())
|
||||
.all()
|
||||
)
|
||||
output.no_match = False
|
||||
return output
|
||||
|
||||
|
||||
class EmailSearchHelpers:
|
||||
@staticmethod
|
||||
def mailbox_list(user: User) -> list[Mailbox]:
|
||||
return (
|
||||
Mailbox.filter_by(user_id=user.id)
|
||||
.order_by(Mailbox.id.asc())
|
||||
.limit(10)
|
||||
.all()
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def mailbox_count(user: User) -> int:
|
||||
return Mailbox.filter_by(user_id=user.id).order_by(Mailbox.id.desc()).count()
|
||||
|
||||
@staticmethod
|
||||
def alias_mailboxes(alias: Alias) -> list[Mailbox]:
|
||||
return (
|
||||
Session.query(Mailbox)
|
||||
.filter(Mailbox.id == Alias.mailbox_id, Alias.id == alias.id)
|
||||
.union(
|
||||
Session.query(Mailbox)
|
||||
.join(AliasMailbox, Mailbox.id == AliasMailbox.mailbox_id)
|
||||
.filter(AliasMailbox.alias_id == alias.id)
|
||||
)
|
||||
.order_by(Mailbox.id)
|
||||
.limit(10)
|
||||
.all()
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def alias_mailbox_count(alias: Alias) -> int:
|
||||
return len(alias.mailboxes)
|
||||
|
||||
@staticmethod
|
||||
def alias_list(user: User) -> list[Alias]:
|
||||
return (
|
||||
Alias.filter_by(user_id=user.id).order_by(Alias.id.desc()).limit(10).all()
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def alias_count(user: User) -> int:
|
||||
return Alias.filter_by(user_id=user.id).count()
|
||||
|
||||
@staticmethod
|
||||
def partner_user(user: User) -> Optional[PartnerUser]:
|
||||
return PartnerUser.get_by(user_id=user.id)
|
||||
|
||||
|
||||
class EmailSearchAdmin(BaseView):
|
||||
def is_accessible(self):
|
||||
return current_user.is_authenticated and current_user.is_admin
|
||||
|
||||
def inaccessible_callback(self, name, **kwargs):
|
||||
# redirect to login page if user doesn't have access
|
||||
flash("You don't have access to the admin page", "error")
|
||||
return redirect(url_for("dashboard.index", next=request.url))
|
||||
|
||||
@expose("/", methods=["GET", "POST"])
|
||||
def index(self):
|
||||
search = EmailSearchResult()
|
||||
email = request.args.get("email")
|
||||
if email is not None and len(email) > 0:
|
||||
email = email.strip()
|
||||
search = EmailSearchResult.from_email(email)
|
||||
|
||||
return self.render(
|
||||
"admin/email_search.html",
|
||||
email=email,
|
||||
data=search,
|
||||
helper=EmailSearchHelpers,
|
||||
)
|
||||
|
|
|
@ -1,38 +0,0 @@
|
|||
from enum import Enum
|
||||
from typing import Optional
|
||||
|
||||
from app.models import Alias, AliasAuditLog
|
||||
|
||||
|
||||
class AliasAuditLogAction(Enum):
|
||||
CreateAlias = "create"
|
||||
ChangeAliasStatus = "change_status"
|
||||
DeleteAlias = "delete"
|
||||
UpdateAlias = "update"
|
||||
|
||||
InitiateTransferAlias = "initiate_transfer_alias"
|
||||
AcceptTransferAlias = "accept_transfer_alias"
|
||||
TransferredAlias = "transferred_alias"
|
||||
|
||||
ChangedMailboxes = "changed_mailboxes"
|
||||
|
||||
CreateContact = "create_contact"
|
||||
UpdateContact = "update_contact"
|
||||
DeleteContact = "delete_contact"
|
||||
|
||||
|
||||
def emit_alias_audit_log(
|
||||
alias: Alias,
|
||||
action: AliasAuditLogAction,
|
||||
message: str,
|
||||
user_id: Optional[int] = None,
|
||||
commit: bool = False,
|
||||
):
|
||||
AliasAuditLog.create(
|
||||
user_id=user_id or alias.user_id,
|
||||
alias_id=alias.id,
|
||||
alias_email=alias.email,
|
||||
action=action.value,
|
||||
message=message,
|
||||
commit=commit,
|
||||
)
|
|
@ -1,61 +0,0 @@
|
|||
from dataclasses import dataclass
|
||||
from enum import Enum
|
||||
from typing import List, Optional
|
||||
|
||||
from app.alias_audit_log_utils import emit_alias_audit_log, AliasAuditLogAction
|
||||
from app.db import Session
|
||||
from app.models import Alias, AliasMailbox, Mailbox
|
||||
|
||||
_MAX_MAILBOXES_PER_ALIAS = 20
|
||||
|
||||
|
||||
class CannotSetMailboxesForAliasCause(Enum):
|
||||
Forbidden = "Forbidden"
|
||||
EmptyMailboxes = "Must choose at least one mailbox"
|
||||
TooManyMailboxes = "Too many mailboxes"
|
||||
|
||||
|
||||
@dataclass
|
||||
class SetMailboxesForAliasResult:
|
||||
performed_change: bool
|
||||
reason: Optional[CannotSetMailboxesForAliasCause]
|
||||
|
||||
|
||||
def set_mailboxes_for_alias(
|
||||
user_id: int, alias: Alias, mailbox_ids: List[int]
|
||||
) -> Optional[CannotSetMailboxesForAliasCause]:
|
||||
if len(mailbox_ids) == 0:
|
||||
return CannotSetMailboxesForAliasCause.EmptyMailboxes
|
||||
if len(mailbox_ids) > _MAX_MAILBOXES_PER_ALIAS:
|
||||
return CannotSetMailboxesForAliasCause.TooManyMailboxes
|
||||
|
||||
mailboxes = (
|
||||
Session.query(Mailbox)
|
||||
.filter(
|
||||
Mailbox.id.in_(mailbox_ids),
|
||||
Mailbox.user_id == user_id,
|
||||
Mailbox.verified == True, # noqa: E712
|
||||
)
|
||||
.all()
|
||||
)
|
||||
if len(mailboxes) != len(mailbox_ids):
|
||||
return CannotSetMailboxesForAliasCause.Forbidden
|
||||
|
||||
# first remove all existing alias-mailboxes links
|
||||
AliasMailbox.filter_by(alias_id=alias.id).delete()
|
||||
Session.flush()
|
||||
|
||||
# then add all new mailboxes, being the first the one associated with the alias
|
||||
for i, mailbox in enumerate(mailboxes):
|
||||
if i == 0:
|
||||
alias.mailbox_id = mailboxes[0].id
|
||||
else:
|
||||
AliasMailbox.create(alias_id=alias.id, mailbox_id=mailbox.id)
|
||||
|
||||
emit_alias_audit_log(
|
||||
alias=alias,
|
||||
action=AliasAuditLogAction.ChangedMailboxes,
|
||||
message=",".join([f"{mailbox.id} ({mailbox.email})" for mailbox in mailboxes]),
|
||||
)
|
||||
|
||||
return None
|
|
@ -1,192 +0,0 @@
|
|||
from __future__ import annotations
|
||||
import json
|
||||
from dataclasses import asdict, dataclass
|
||||
from typing import Optional
|
||||
|
||||
import itsdangerous
|
||||
from app import config
|
||||
from app.log import LOG
|
||||
from app.models import User, AliasOptions, SLDomain
|
||||
|
||||
signer = itsdangerous.TimestampSigner(config.CUSTOM_ALIAS_SECRET)
|
||||
|
||||
|
||||
@dataclass
|
||||
class AliasSuffix:
|
||||
# whether this is a custom domain
|
||||
is_custom: bool
|
||||
# Suffix
|
||||
suffix: str
|
||||
# Suffix signature
|
||||
signed_suffix: str
|
||||
# whether this is a premium SL domain. Not apply to custom domain
|
||||
is_premium: bool
|
||||
# can be either Custom or SL domain
|
||||
domain: str
|
||||
# if custom domain, whether the custom domain has MX verified, i.e. can receive emails
|
||||
mx_verified: bool = True
|
||||
|
||||
def serialize(self):
|
||||
return json.dumps(asdict(self))
|
||||
|
||||
@classmethod
|
||||
def deserialize(cls, data: str) -> AliasSuffix:
|
||||
return AliasSuffix(**json.loads(data))
|
||||
|
||||
|
||||
def check_suffix_signature(signed_suffix: str) -> Optional[str]:
|
||||
# hypothesis: user will click on the button in the 600 secs
|
||||
try:
|
||||
return signer.unsign(signed_suffix, max_age=600).decode()
|
||||
except itsdangerous.BadSignature:
|
||||
return None
|
||||
|
||||
|
||||
def verify_prefix_suffix(
|
||||
user: User, alias_prefix, alias_suffix, alias_options: Optional[AliasOptions] = None
|
||||
) -> bool:
|
||||
"""verify if user could create an alias with the given prefix and suffix"""
|
||||
if not alias_prefix or not alias_suffix: # should be caught on frontend
|
||||
return False
|
||||
|
||||
user_custom_domains = [cd.domain for cd in user.verified_custom_domains()]
|
||||
|
||||
# make sure alias_suffix is either .random_word@simplelogin.co or @my-domain.com
|
||||
alias_suffix = alias_suffix.strip()
|
||||
# alias_domain_prefix is either a .random_word or ""
|
||||
alias_domain_prefix, alias_domain = alias_suffix.split("@", 1)
|
||||
|
||||
# alias_domain must be either one of user custom domains or built-in domains
|
||||
if alias_domain not in user.available_alias_domains(alias_options=alias_options):
|
||||
LOG.i("wrong alias suffix %s, user %s", alias_suffix, user)
|
||||
return False
|
||||
|
||||
# SimpleLogin domain case:
|
||||
# 1) alias_suffix must start with "." and
|
||||
# 2) alias_domain_prefix must come from the word list
|
||||
available_sl_domains = [
|
||||
sl_domain.domain
|
||||
for sl_domain in user.get_sl_domains(alias_options=alias_options)
|
||||
]
|
||||
if (
|
||||
alias_domain in available_sl_domains
|
||||
and alias_domain not in user_custom_domains
|
||||
# when DISABLE_ALIAS_SUFFIX is true, alias_domain_prefix is empty
|
||||
and not config.DISABLE_ALIAS_SUFFIX
|
||||
):
|
||||
if not alias_domain_prefix.startswith("."):
|
||||
LOG.i("User %s submits a wrong alias suffix %s", user, alias_suffix)
|
||||
return False
|
||||
|
||||
else:
|
||||
if alias_domain not in user_custom_domains:
|
||||
if not config.DISABLE_ALIAS_SUFFIX:
|
||||
LOG.i("wrong alias suffix %s, user %s", alias_suffix, user)
|
||||
return False
|
||||
|
||||
if alias_domain not in available_sl_domains:
|
||||
LOG.i("wrong alias suffix %s, user %s", alias_suffix, user)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def get_alias_suffixes(
|
||||
user: User, alias_options: Optional[AliasOptions] = None
|
||||
) -> [AliasSuffix]:
|
||||
"""
|
||||
Similar to as get_available_suffixes() but also return custom domain that doesn't have MX set up.
|
||||
"""
|
||||
user_custom_domains = user.verified_custom_domains()
|
||||
|
||||
alias_suffixes: [AliasSuffix] = []
|
||||
|
||||
# put custom domain first
|
||||
# for each user domain, generate both the domain and a random suffix version
|
||||
for custom_domain in user_custom_domains:
|
||||
if custom_domain.random_prefix_generation:
|
||||
suffix = (
|
||||
f".{user.get_random_alias_suffix(custom_domain)}@{custom_domain.domain}"
|
||||
)
|
||||
alias_suffix = AliasSuffix(
|
||||
is_custom=True,
|
||||
suffix=suffix,
|
||||
signed_suffix=signer.sign(suffix).decode(),
|
||||
is_premium=False,
|
||||
domain=custom_domain.domain,
|
||||
mx_verified=custom_domain.verified,
|
||||
)
|
||||
if user.default_alias_custom_domain_id == custom_domain.id:
|
||||
alias_suffixes.insert(0, alias_suffix)
|
||||
else:
|
||||
alias_suffixes.append(alias_suffix)
|
||||
|
||||
suffix = f"@{custom_domain.domain}"
|
||||
alias_suffix = AliasSuffix(
|
||||
is_custom=True,
|
||||
suffix=suffix,
|
||||
signed_suffix=signer.sign(suffix).decode(),
|
||||
is_premium=False,
|
||||
domain=custom_domain.domain,
|
||||
mx_verified=custom_domain.verified,
|
||||
)
|
||||
|
||||
# put the default domain to top
|
||||
# only if random_prefix_generation isn't enabled
|
||||
if (
|
||||
user.default_alias_custom_domain_id == custom_domain.id
|
||||
and not custom_domain.random_prefix_generation
|
||||
):
|
||||
alias_suffixes.insert(0, alias_suffix)
|
||||
else:
|
||||
alias_suffixes.append(alias_suffix)
|
||||
|
||||
# then SimpleLogin domain
|
||||
sl_domains = user.get_sl_domains(alias_options=alias_options)
|
||||
default_domain_found = False
|
||||
for sl_domain in sl_domains:
|
||||
prefix = (
|
||||
"" if config.DISABLE_ALIAS_SUFFIX else f".{user.get_random_alias_suffix()}"
|
||||
)
|
||||
suffix = f"{prefix}@{sl_domain.domain}"
|
||||
alias_suffix = AliasSuffix(
|
||||
is_custom=False,
|
||||
suffix=suffix,
|
||||
signed_suffix=signer.sign(suffix).decode(),
|
||||
is_premium=sl_domain.premium_only,
|
||||
domain=sl_domain.domain,
|
||||
mx_verified=True,
|
||||
)
|
||||
# No default or this is not the default
|
||||
if (
|
||||
user.default_alias_public_domain_id is None
|
||||
or user.default_alias_public_domain_id != sl_domain.id
|
||||
):
|
||||
alias_suffixes.append(alias_suffix)
|
||||
else:
|
||||
default_domain_found = True
|
||||
alias_suffixes.insert(0, alias_suffix)
|
||||
|
||||
if not default_domain_found:
|
||||
domain_conditions = {"id": user.default_alias_public_domain_id, "hidden": False}
|
||||
if not user.is_premium():
|
||||
domain_conditions["premium_only"] = False
|
||||
sl_domain = SLDomain.get_by(**domain_conditions)
|
||||
if sl_domain:
|
||||
prefix = (
|
||||
""
|
||||
if config.DISABLE_ALIAS_SUFFIX
|
||||
else f".{user.get_random_alias_suffix()}"
|
||||
)
|
||||
suffix = f"{prefix}@{sl_domain.domain}"
|
||||
alias_suffix = AliasSuffix(
|
||||
is_custom=False,
|
||||
suffix=suffix,
|
||||
signed_suffix=signer.sign(suffix).decode(),
|
||||
is_premium=sl_domain.premium_only,
|
||||
domain=sl_domain.domain,
|
||||
mx_verified=True,
|
||||
)
|
||||
alias_suffixes.insert(0, alias_suffix)
|
||||
|
||||
return alias_suffixes
|
|
@ -1,14 +1,9 @@
|
|||
import csv
|
||||
from io import StringIO
|
||||
import re
|
||||
from dataclasses import dataclass
|
||||
from typing import Optional, Tuple
|
||||
|
||||
from email_validator import validate_email, EmailNotValidError
|
||||
from sqlalchemy.exc import IntegrityError, DataError
|
||||
from flask import make_response
|
||||
|
||||
from app.alias_audit_log_utils import AliasAuditLogAction, emit_alias_audit_log
|
||||
from app.config import (
|
||||
BOUNCE_PREFIX_FOR_REPLY_PHASE,
|
||||
BOUNCE_PREFIX,
|
||||
|
@ -23,22 +18,11 @@ from app.email_utils import (
|
|||
send_cannot_create_directory_alias_disabled,
|
||||
get_email_local_part,
|
||||
send_cannot_create_domain_alias,
|
||||
send_email,
|
||||
render,
|
||||
sl_formataddr,
|
||||
)
|
||||
from app.errors import AliasInTrashError
|
||||
from app.events.event_dispatcher import EventDispatcher
|
||||
from app.events.generated.event_pb2 import (
|
||||
AliasDeleted,
|
||||
AliasStatusChanged,
|
||||
EventContent,
|
||||
AliasCreated,
|
||||
)
|
||||
from app.log import LOG
|
||||
from app.models import (
|
||||
Alias,
|
||||
AliasDeleteReason,
|
||||
CustomDomain,
|
||||
Directory,
|
||||
User,
|
||||
|
@ -49,8 +33,6 @@ from app.models import (
|
|||
EmailLog,
|
||||
Contact,
|
||||
AutoCreateRule,
|
||||
AliasUsedOn,
|
||||
ClientUser,
|
||||
)
|
||||
from app.regex_utils import regex_match
|
||||
|
||||
|
@ -67,17 +49,11 @@ def get_user_if_alias_would_auto_create(
|
|||
# Prevent addresses with unicode characters (🤯) in them for now.
|
||||
validate_email(address, check_deliverability=False, allow_smtputf8=False)
|
||||
except EmailNotValidError:
|
||||
LOG.i(f"Not creating alias for {address} because email is invalid")
|
||||
return None
|
||||
|
||||
domain_and_rule = check_if_alias_can_be_auto_created_for_custom_domain(
|
||||
address, notify_user=notify_user
|
||||
)
|
||||
if DomainDeletedAlias.get_by(email=address):
|
||||
LOG.i(
|
||||
f"Not creating alias for {address} because it was previously deleted for this domain"
|
||||
)
|
||||
return None
|
||||
if domain_and_rule:
|
||||
return domain_and_rule[0].user
|
||||
directory = check_if_alias_can_be_auto_created_for_a_directory(
|
||||
|
@ -101,9 +77,6 @@ def check_if_alias_can_be_auto_created_for_custom_domain(
|
|||
custom_domain: CustomDomain = CustomDomain.get_by(domain=alias_domain)
|
||||
|
||||
if not custom_domain:
|
||||
LOG.i(
|
||||
f"Cannot auto-create custom domain alias for {address} because there's no custom domain for {alias_domain}"
|
||||
)
|
||||
return None
|
||||
|
||||
user: User = custom_domain.user
|
||||
|
@ -112,16 +85,12 @@ def check_if_alias_can_be_auto_created_for_custom_domain(
|
|||
return None
|
||||
|
||||
if not user.can_create_new_alias():
|
||||
LOG.d(f"{user} can't create new custom-domain alias {address}")
|
||||
if notify_user:
|
||||
send_cannot_create_domain_alias(custom_domain.user, address, alias_domain)
|
||||
return None
|
||||
|
||||
if not custom_domain.catch_all:
|
||||
if len(custom_domain.auto_create_rules) == 0:
|
||||
LOG.i(
|
||||
f"Cannot create alias {address} for domain {custom_domain} because it has no catch-all and no rules"
|
||||
)
|
||||
return None
|
||||
local = get_email_local_part(address)
|
||||
|
||||
|
@ -135,7 +104,7 @@ def check_if_alias_can_be_auto_created_for_custom_domain(
|
|||
)
|
||||
return custom_domain, rule
|
||||
else: # no rule passes
|
||||
LOG.d(f"No rule matches auto-create {address} for domain {custom_domain}")
|
||||
LOG.d("no rule passed to create %s", local)
|
||||
return None
|
||||
LOG.d("Create alias via catchall")
|
||||
|
||||
|
@ -162,7 +131,6 @@ def check_if_alias_can_be_auto_created_for_a_directory(
|
|||
sep = "#"
|
||||
else:
|
||||
# if there's no directory separator in the alias, no way to auto-create it
|
||||
LOG.info(f"Cannot auto-create {address} since it has no directory separator")
|
||||
return None
|
||||
|
||||
directory_name = address[: address.find(sep)]
|
||||
|
@ -170,9 +138,6 @@ def check_if_alias_can_be_auto_created_for_a_directory(
|
|||
|
||||
directory = Directory.get_by(name=directory_name)
|
||||
if not directory:
|
||||
LOG.info(
|
||||
f"Cannot auto-create {address} because there is no directory for {directory_name}"
|
||||
)
|
||||
return None
|
||||
|
||||
user: User = directory.user
|
||||
|
@ -181,17 +146,11 @@ def check_if_alias_can_be_auto_created_for_a_directory(
|
|||
return None
|
||||
|
||||
if not user.can_create_new_alias():
|
||||
LOG.d(
|
||||
f"{user} can't create new directory alias {address} because user cannot create aliases"
|
||||
)
|
||||
if notify_user:
|
||||
send_cannot_create_directory_alias(user, address, directory_name)
|
||||
return None
|
||||
|
||||
if directory.disabled:
|
||||
LOG.d(
|
||||
f"{user} can't create new directory alias {address} bcause directory is disabled"
|
||||
)
|
||||
if notify_user:
|
||||
send_cannot_create_directory_alias_disabled(user, address, directory_name)
|
||||
return None
|
||||
|
@ -333,56 +292,36 @@ def try_auto_create_via_domain(address: str) -> Optional[Alias]:
|
|||
return None
|
||||
|
||||
|
||||
def delete_alias(
|
||||
alias: Alias,
|
||||
user: User,
|
||||
reason: AliasDeleteReason = AliasDeleteReason.Unspecified,
|
||||
commit: bool = False,
|
||||
):
|
||||
def delete_alias(alias: Alias, user: User):
|
||||
"""
|
||||
Delete an alias and add it to either global or domain trash
|
||||
Should be used instead of Alias.delete, DomainDeletedAlias.create, DeletedAlias.create
|
||||
"""
|
||||
LOG.i(f"User {user} has deleted alias {alias}")
|
||||
# save deleted alias to either global or domain tra
|
||||
# save deleted alias to either global or domain trash
|
||||
if alias.custom_domain_id:
|
||||
if not DomainDeletedAlias.get_by(
|
||||
email=alias.email, domain_id=alias.custom_domain_id
|
||||
):
|
||||
domain_deleted_alias = DomainDeletedAlias(
|
||||
user_id=user.id,
|
||||
email=alias.email,
|
||||
domain_id=alias.custom_domain_id,
|
||||
reason=reason,
|
||||
LOG.d("add %s to domain %s trash", alias, alias.custom_domain_id)
|
||||
Session.add(
|
||||
DomainDeletedAlias(
|
||||
user_id=user.id,
|
||||
email=alias.email,
|
||||
domain_id=alias.custom_domain_id,
|
||||
)
|
||||
)
|
||||
Session.add(domain_deleted_alias)
|
||||
Session.commit()
|
||||
LOG.i(
|
||||
f"Moving {alias} to domain {alias.custom_domain_id} trash {domain_deleted_alias}"
|
||||
)
|
||||
|
||||
else:
|
||||
if not DeletedAlias.get_by(email=alias.email):
|
||||
deleted_alias = DeletedAlias(email=alias.email, reason=reason)
|
||||
Session.add(deleted_alias)
|
||||
LOG.d("add %s to global trash", alias)
|
||||
Session.add(DeletedAlias(email=alias.email))
|
||||
Session.commit()
|
||||
LOG.i(f"Moving {alias} to global trash {deleted_alias}")
|
||||
|
||||
alias_id = alias.id
|
||||
alias_email = alias.email
|
||||
|
||||
emit_alias_audit_log(
|
||||
alias, AliasAuditLogAction.DeleteAlias, "Alias deleted by user action"
|
||||
)
|
||||
LOG.i("delete alias %s", alias)
|
||||
Alias.filter(Alias.id == alias.id).delete()
|
||||
Session.commit()
|
||||
|
||||
EventDispatcher.send_event(
|
||||
user,
|
||||
EventContent(alias_deleted=AliasDeleted(id=alias_id, email=alias_email)),
|
||||
)
|
||||
if commit:
|
||||
Session.commit()
|
||||
|
||||
|
||||
def aliases_for_mailbox(mailbox: Mailbox) -> [Alias]:
|
||||
"""
|
||||
|
@ -423,176 +362,3 @@ def check_alias_prefix(alias_prefix) -> bool:
|
|||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def alias_export_csv(user, csv_direct_export=False):
|
||||
"""
|
||||
Get user aliases as importable CSV file
|
||||
Output:
|
||||
Importable CSV file
|
||||
|
||||
"""
|
||||
data = [["alias", "note", "enabled", "mailboxes"]]
|
||||
for alias in Alias.filter_by(user_id=user.id).all(): # type: Alias
|
||||
# Always put the main mailbox first
|
||||
# It is seen a primary while importing
|
||||
alias_mailboxes = alias.mailboxes
|
||||
alias_mailboxes.insert(
|
||||
0, alias_mailboxes.pop(alias_mailboxes.index(alias.mailbox))
|
||||
)
|
||||
|
||||
mailboxes = " ".join([mailbox.email for mailbox in alias_mailboxes])
|
||||
data.append([alias.email, alias.note, alias.enabled, mailboxes])
|
||||
|
||||
si = StringIO()
|
||||
cw = csv.writer(si)
|
||||
cw.writerows(data)
|
||||
if csv_direct_export:
|
||||
return si.getvalue()
|
||||
output = make_response(si.getvalue())
|
||||
output.headers["Content-Disposition"] = "attachment; filename=aliases.csv"
|
||||
output.headers["Content-type"] = "text/csv"
|
||||
return output
|
||||
|
||||
|
||||
def transfer_alias(alias: Alias, new_user: User, new_mailboxes: [Mailbox]):
|
||||
# cannot transfer alias which is used for receiving newsletter
|
||||
if User.get_by(newsletter_alias_id=alias.id):
|
||||
raise Exception("Cannot transfer alias that's used to receive newsletter")
|
||||
|
||||
# update user_id
|
||||
Session.query(Contact).filter(Contact.alias_id == alias.id).update(
|
||||
{"user_id": new_user.id}
|
||||
)
|
||||
|
||||
Session.query(AliasUsedOn).filter(AliasUsedOn.alias_id == alias.id).update(
|
||||
{"user_id": new_user.id}
|
||||
)
|
||||
|
||||
Session.query(ClientUser).filter(ClientUser.alias_id == alias.id).update(
|
||||
{"user_id": new_user.id}
|
||||
)
|
||||
|
||||
# remove existing mailboxes from the alias
|
||||
Session.query(AliasMailbox).filter(AliasMailbox.alias_id == alias.id).delete()
|
||||
|
||||
# set mailboxes
|
||||
alias.mailbox_id = new_mailboxes.pop().id
|
||||
for mb in new_mailboxes:
|
||||
AliasMailbox.create(alias_id=alias.id, mailbox_id=mb.id)
|
||||
|
||||
# alias has never been transferred before
|
||||
if not alias.original_owner_id:
|
||||
alias.original_owner_id = alias.user_id
|
||||
|
||||
# inform previous owner
|
||||
old_user = alias.user
|
||||
send_email(
|
||||
old_user.email,
|
||||
f"Alias {alias.email} has been received",
|
||||
render(
|
||||
"transactional/alias-transferred.txt",
|
||||
user=old_user,
|
||||
alias=alias,
|
||||
),
|
||||
render(
|
||||
"transactional/alias-transferred.html",
|
||||
user=old_user,
|
||||
alias=alias,
|
||||
),
|
||||
)
|
||||
|
||||
# now the alias belongs to the new user
|
||||
alias.user_id = new_user.id
|
||||
|
||||
# set some fields back to default
|
||||
alias.disable_pgp = False
|
||||
alias.pinned = False
|
||||
|
||||
emit_alias_audit_log(
|
||||
alias=alias,
|
||||
action=AliasAuditLogAction.TransferredAlias,
|
||||
message=f"Lost ownership of alias due to alias transfer confirmed. New owner is {new_user.id}",
|
||||
user_id=old_user.id,
|
||||
)
|
||||
EventDispatcher.send_event(
|
||||
old_user,
|
||||
EventContent(
|
||||
alias_deleted=AliasDeleted(
|
||||
id=alias.id,
|
||||
email=alias.email,
|
||||
)
|
||||
),
|
||||
)
|
||||
|
||||
emit_alias_audit_log(
|
||||
alias=alias,
|
||||
action=AliasAuditLogAction.AcceptTransferAlias,
|
||||
message=f"Accepted alias transfer from user {old_user.id}",
|
||||
user_id=new_user.id,
|
||||
)
|
||||
EventDispatcher.send_event(
|
||||
new_user,
|
||||
EventContent(
|
||||
alias_created=AliasCreated(
|
||||
id=alias.id,
|
||||
email=alias.email,
|
||||
note=alias.note,
|
||||
enabled=alias.enabled,
|
||||
created_at=int(alias.created_at.timestamp),
|
||||
)
|
||||
),
|
||||
)
|
||||
|
||||
Session.commit()
|
||||
|
||||
|
||||
def change_alias_status(
|
||||
alias: Alias, enabled: bool, message: Optional[str] = None, commit: bool = False
|
||||
):
|
||||
LOG.i(f"Changing alias {alias} enabled to {enabled}")
|
||||
alias.enabled = enabled
|
||||
|
||||
event = AliasStatusChanged(
|
||||
id=alias.id,
|
||||
email=alias.email,
|
||||
enabled=enabled,
|
||||
created_at=int(alias.created_at.timestamp),
|
||||
)
|
||||
EventDispatcher.send_event(alias.user, EventContent(alias_status_change=event))
|
||||
audit_log_message = f"Set alias status to {enabled}"
|
||||
if message is not None:
|
||||
audit_log_message += f". {message}"
|
||||
emit_alias_audit_log(
|
||||
alias, AliasAuditLogAction.ChangeAliasStatus, audit_log_message
|
||||
)
|
||||
|
||||
if commit:
|
||||
Session.commit()
|
||||
|
||||
|
||||
@dataclass
|
||||
class AliasRecipientName:
|
||||
name: str
|
||||
message: Optional[str] = None
|
||||
|
||||
|
||||
def get_alias_recipient_name(alias: Alias) -> AliasRecipientName:
|
||||
"""
|
||||
Logic:
|
||||
1. If alias has name, use it
|
||||
2. If alias has custom domain, and custom domain has name, use it
|
||||
3. Otherwise, use the alias email as the recipient
|
||||
"""
|
||||
if alias.name:
|
||||
return AliasRecipientName(
|
||||
name=sl_formataddr((alias.name, alias.email)),
|
||||
message=f"Put alias name {alias.name} in from header",
|
||||
)
|
||||
elif alias.custom_domain:
|
||||
if alias.custom_domain.name:
|
||||
return AliasRecipientName(
|
||||
name=sl_formataddr((alias.custom_domain.name, alias.email)),
|
||||
message=f"Put domain default alias name {alias.custom_domain.name} in from header",
|
||||
)
|
||||
return AliasRecipientName(name=alias.email)
|
||||
|
|
|
@ -13,25 +13,4 @@ from .views import (
|
|||
setting,
|
||||
export,
|
||||
phone,
|
||||
sudo,
|
||||
user,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"alias_options",
|
||||
"new_custom_alias",
|
||||
"custom_domain",
|
||||
"new_random_alias",
|
||||
"user_info",
|
||||
"auth",
|
||||
"auth_mfa",
|
||||
"alias",
|
||||
"apple",
|
||||
"mailbox",
|
||||
"notification",
|
||||
"setting",
|
||||
"export",
|
||||
"phone",
|
||||
"sudo",
|
||||
"user",
|
||||
]
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
from functools import wraps
|
||||
from typing import Tuple, Optional
|
||||
|
||||
import arrow
|
||||
from flask import Blueprint, request, jsonify, g
|
||||
|
@ -10,64 +9,30 @@ from app.models import ApiKey
|
|||
|
||||
api_bp = Blueprint(name="api", import_name=__name__, url_prefix="/api")
|
||||
|
||||
SUDO_MODE_MINUTES_VALID = 5
|
||||
|
||||
|
||||
def authorize_request() -> Optional[Tuple[str, int]]:
|
||||
api_code = request.headers.get("Authentication")
|
||||
api_key = ApiKey.get_by(code=api_code)
|
||||
|
||||
if not api_key:
|
||||
if current_user.is_authenticated:
|
||||
# if current_user.is_authenticated and request.headers.get(
|
||||
# constants.HEADER_ALLOW_API_COOKIES
|
||||
# ):
|
||||
g.user = current_user
|
||||
else:
|
||||
return jsonify(error="Wrong api key"), 401
|
||||
else:
|
||||
# Update api key stats
|
||||
api_key.last_used = arrow.now()
|
||||
api_key.times += 1
|
||||
Session.commit()
|
||||
|
||||
g.user = api_key.user
|
||||
|
||||
if g.user.disabled:
|
||||
return jsonify(error="Disabled account"), 403
|
||||
|
||||
if not g.user.is_active():
|
||||
return jsonify(error="Account does not exist"), 401
|
||||
|
||||
g.api_key = api_key
|
||||
return None
|
||||
|
||||
|
||||
def check_sudo_mode_is_active(api_key: ApiKey) -> bool:
|
||||
return api_key.sudo_mode_at and g.api_key.sudo_mode_at >= arrow.now().shift(
|
||||
minutes=-SUDO_MODE_MINUTES_VALID
|
||||
)
|
||||
|
||||
|
||||
def require_api_auth(f):
|
||||
@wraps(f)
|
||||
def decorated(*args, **kwargs):
|
||||
error_return = authorize_request()
|
||||
if error_return:
|
||||
return error_return
|
||||
return f(*args, **kwargs)
|
||||
|
||||
return decorated
|
||||
|
||||
|
||||
def require_api_sudo(f):
|
||||
@wraps(f)
|
||||
def decorated(*args, **kwargs):
|
||||
error_return = authorize_request()
|
||||
if error_return:
|
||||
return error_return
|
||||
if not check_sudo_mode_is_active(g.api_key):
|
||||
return jsonify(error="Need sudo"), 440
|
||||
api_code = request.headers.get("Authentication")
|
||||
api_key = ApiKey.get_by(code=api_code)
|
||||
|
||||
if not api_key:
|
||||
# if user is authenticated, the request is authorized
|
||||
if current_user.is_authenticated:
|
||||
g.user = current_user
|
||||
else:
|
||||
return jsonify(error="Wrong api key"), 401
|
||||
else:
|
||||
# Update api key stats
|
||||
api_key.last_used = arrow.now()
|
||||
api_key.times += 1
|
||||
Session.commit()
|
||||
|
||||
g.user = api_key.user
|
||||
|
||||
if g.user.disabled:
|
||||
return jsonify(error="Disabled account"), 403
|
||||
|
||||
return f(*args, **kwargs)
|
||||
|
||||
return decorated
|
||||
|
|
|
@ -201,10 +201,10 @@ def get_alias_infos_with_pagination_v3(
|
|||
q = q.order_by(Alias.pinned.desc())
|
||||
q = q.order_by(latest_activity.desc())
|
||||
|
||||
q = q.limit(page_limit).offset(page_id * page_size)
|
||||
q = list(q.limit(page_limit).offset(page_id * page_size))
|
||||
|
||||
ret = []
|
||||
for alias, contact, email_log, nb_reply, nb_blocked, nb_forward in list(q):
|
||||
for alias, contact, email_log, nb_reply, nb_blocked, nb_forward in q:
|
||||
ret.append(
|
||||
AliasInfo(
|
||||
alias=alias,
|
||||
|
@ -358,6 +358,7 @@ def construct_alias_query(user: User):
|
|||
else_=0,
|
||||
)
|
||||
).label("nb_forward"),
|
||||
func.max(EmailLog.created_at).label("latest_email_log_created_at"),
|
||||
)
|
||||
.join(EmailLog, Alias.id == EmailLog.alias_id, isouter=True)
|
||||
.filter(Alias.user_id == user.id)
|
||||
|
@ -365,6 +366,14 @@ def construct_alias_query(user: User):
|
|||
.subquery()
|
||||
)
|
||||
|
||||
alias_contact_subquery = (
|
||||
Session.query(Alias.id, func.max(Contact.id).label("max_contact_id"))
|
||||
.join(Contact, Alias.id == Contact.alias_id, isouter=True)
|
||||
.filter(Alias.user_id == user.id)
|
||||
.group_by(Alias.id)
|
||||
.subquery()
|
||||
)
|
||||
|
||||
return (
|
||||
Session.query(
|
||||
Alias,
|
||||
|
@ -376,7 +385,23 @@ def construct_alias_query(user: User):
|
|||
)
|
||||
.options(joinedload(Alias.hibp_breaches))
|
||||
.options(joinedload(Alias.custom_domain))
|
||||
.join(EmailLog, Alias.last_email_log_id == EmailLog.id, isouter=True)
|
||||
.join(Contact, EmailLog.contact_id == Contact.id, isouter=True)
|
||||
.join(Contact, Alias.id == Contact.alias_id, isouter=True)
|
||||
.join(EmailLog, Contact.id == EmailLog.contact_id, isouter=True)
|
||||
.filter(Alias.id == alias_activity_subquery.c.id)
|
||||
.filter(Alias.id == alias_contact_subquery.c.id)
|
||||
.filter(
|
||||
or_(
|
||||
EmailLog.created_at
|
||||
== alias_activity_subquery.c.latest_email_log_created_at,
|
||||
and_(
|
||||
# no email log yet for this alias
|
||||
alias_activity_subquery.c.latest_email_log_created_at.is_(None),
|
||||
# to make sure only 1 contact is returned in this case
|
||||
or_(
|
||||
Contact.id == alias_contact_subquery.c.max_contact_id,
|
||||
alias_contact_subquery.c.max_contact_id.is_(None),
|
||||
),
|
||||
),
|
||||
)
|
||||
)
|
||||
)
|
||||
|
|
|
@ -1,13 +1,11 @@
|
|||
from typing import Optional
|
||||
|
||||
from deprecated import deprecated
|
||||
from flanker.addresslib import address
|
||||
from flanker.addresslib.address import EmailAddress
|
||||
from flask import g
|
||||
from flask import jsonify
|
||||
from flask import request
|
||||
|
||||
from app import alias_utils
|
||||
from app.alias_audit_log_utils import emit_alias_audit_log, AliasAuditLogAction
|
||||
from app.alias_mailbox_utils import set_mailboxes_for_alias
|
||||
from app.api.base import api_bp, require_api_auth
|
||||
from app.api.serializer import (
|
||||
AliasInfo,
|
||||
|
@ -19,24 +17,20 @@ from app.api.serializer import (
|
|||
get_alias_info_v2,
|
||||
get_alias_infos_with_pagination_v3,
|
||||
)
|
||||
from app.dashboard.views.alias_contact_manager import create_contact
|
||||
from app.dashboard.views.alias_log import get_alias_log
|
||||
from app.db import Session
|
||||
from app.errors import (
|
||||
CannotCreateContactForReverseAlias,
|
||||
ErrContactErrorUpgradeNeeded,
|
||||
ErrContactAlreadyExists,
|
||||
ErrAddressInvalid,
|
||||
from app.email_utils import (
|
||||
generate_reply_email,
|
||||
)
|
||||
from app.extensions import limiter
|
||||
from app.errors import CannotCreateContactForReverseAlias
|
||||
from app.log import LOG
|
||||
from app.models import Alias, Contact, Mailbox, AliasDeleteReason
|
||||
from app.models import Alias, Contact, Mailbox, AliasMailbox
|
||||
from app.utils import sanitize_email
|
||||
|
||||
|
||||
@deprecated
|
||||
@api_bp.route("/aliases", methods=["GET", "POST"])
|
||||
@require_api_auth
|
||||
@limiter.limit("10/minute", key_func=lambda: g.user.id)
|
||||
def get_aliases():
|
||||
"""
|
||||
Get aliases
|
||||
|
@ -79,7 +73,6 @@ def get_aliases():
|
|||
|
||||
@api_bp.route("/v2/aliases", methods=["GET", "POST"])
|
||||
@require_api_auth
|
||||
@limiter.limit("50/minute", key_func=lambda: g.user.id)
|
||||
def get_aliases_v2():
|
||||
"""
|
||||
Get aliases
|
||||
|
@ -165,7 +158,7 @@ def delete_alias(alias_id):
|
|||
if not alias or alias.user_id != user.id:
|
||||
return jsonify(error="Forbidden"), 403
|
||||
|
||||
alias_utils.delete_alias(alias, user, AliasDeleteReason.ManualAction)
|
||||
alias_utils.delete_alias(alias, user)
|
||||
|
||||
return jsonify(deleted=True), 200
|
||||
|
||||
|
@ -189,12 +182,7 @@ def toggle_alias(alias_id):
|
|||
if not alias or alias.user_id != user.id:
|
||||
return jsonify(error="Forbidden"), 403
|
||||
|
||||
alias_utils.change_alias_status(
|
||||
alias,
|
||||
enabled=not alias.enabled,
|
||||
message=f"Set enabled={not alias.enabled} via API",
|
||||
)
|
||||
LOG.i(f"User {user} changed alias {alias} enabled status to {alias.enabled}")
|
||||
alias.enabled = not alias.enabled
|
||||
Session.commit()
|
||||
|
||||
return jsonify(enabled=alias.enabled), 200
|
||||
|
@ -280,12 +268,10 @@ def update_alias(alias_id):
|
|||
if not alias or alias.user_id != user.id:
|
||||
return jsonify(error="Forbidden"), 403
|
||||
|
||||
changed_fields = []
|
||||
changed = False
|
||||
if "note" in data:
|
||||
new_note = data.get("note")
|
||||
alias.note = new_note
|
||||
changed_fields.append("note")
|
||||
changed = True
|
||||
|
||||
if "mailbox_id" in data:
|
||||
|
@ -295,19 +281,35 @@ def update_alias(alias_id):
|
|||
return jsonify(error="Forbidden"), 400
|
||||
|
||||
alias.mailbox_id = mailbox_id
|
||||
changed_fields.append(f"mailbox_id ({mailbox_id})")
|
||||
changed = True
|
||||
|
||||
if "mailbox_ids" in data:
|
||||
mailbox_ids = [int(m_id) for m_id in data.get("mailbox_ids")]
|
||||
err = set_mailboxes_for_alias(
|
||||
user_id=user.id, alias=alias, mailbox_ids=mailbox_ids
|
||||
)
|
||||
if err:
|
||||
return jsonify(error=err.value), 400
|
||||
mailboxes: [Mailbox] = []
|
||||
|
||||
# check if all mailboxes belong to user
|
||||
for mailbox_id in mailbox_ids:
|
||||
mailbox = Mailbox.get(mailbox_id)
|
||||
if not mailbox or mailbox.user_id != user.id or not mailbox.verified:
|
||||
return jsonify(error="Forbidden"), 400
|
||||
mailboxes.append(mailbox)
|
||||
|
||||
if not mailboxes:
|
||||
return jsonify(error="Must choose at least one mailbox"), 400
|
||||
|
||||
# <<< update alias mailboxes >>>
|
||||
# first remove all existing alias-mailboxes links
|
||||
AliasMailbox.filter_by(alias_id=alias.id).delete()
|
||||
Session.flush()
|
||||
|
||||
# then add all new mailboxes
|
||||
for i, mailbox in enumerate(mailboxes):
|
||||
if i == 0:
|
||||
alias.mailbox_id = mailboxes[0].id
|
||||
else:
|
||||
AliasMailbox.create(alias_id=alias.id, mailbox_id=mailbox.id)
|
||||
# <<< END update alias mailboxes >>>
|
||||
|
||||
mailbox_ids_string = ",".join(map(str, mailbox_ids))
|
||||
changed_fields.append(f"mailbox_ids ({mailbox_ids_string})")
|
||||
changed = True
|
||||
|
||||
if "name" in data:
|
||||
|
@ -319,26 +321,17 @@ def update_alias(alias_id):
|
|||
if new_name:
|
||||
new_name = new_name.replace("\n", "")
|
||||
alias.name = new_name
|
||||
changed_fields.append("name")
|
||||
changed = True
|
||||
|
||||
if "disable_pgp" in data:
|
||||
alias.disable_pgp = data.get("disable_pgp")
|
||||
changed_fields.append("disable_pgp")
|
||||
changed = True
|
||||
|
||||
if "pinned" in data:
|
||||
alias.pinned = data.get("pinned")
|
||||
changed_fields.append("pinned")
|
||||
changed = True
|
||||
|
||||
if changed:
|
||||
changed_fields_string = ",".join(changed_fields)
|
||||
emit_alias_audit_log(
|
||||
alias,
|
||||
AliasAuditLogAction.UpdateAlias,
|
||||
f"Alias fields updated ({changed_fields_string})",
|
||||
)
|
||||
Session.commit()
|
||||
|
||||
return jsonify(ok=True), 200
|
||||
|
@ -414,25 +407,50 @@ def create_contact_route(alias_id):
|
|||
Output:
|
||||
201 if success
|
||||
409 if contact already added
|
||||
|
||||
|
||||
"""
|
||||
data = request.get_json()
|
||||
if not data:
|
||||
return jsonify(error="request body cannot be empty"), 400
|
||||
|
||||
alias: Optional[Alias] = Alias.get_by(id=alias_id, user_id=g.user.id)
|
||||
if not alias:
|
||||
user = g.user
|
||||
alias: Alias = Alias.get(alias_id)
|
||||
|
||||
if alias.user_id != user.id:
|
||||
return jsonify(error="Forbidden"), 403
|
||||
|
||||
contact_address = data.get("contact")
|
||||
contact_addr = data.get("contact")
|
||||
|
||||
if not contact_addr:
|
||||
return jsonify(error="Contact cannot be empty"), 400
|
||||
|
||||
full_address: EmailAddress = address.parse(contact_addr)
|
||||
if not full_address:
|
||||
return jsonify(error=f"invalid contact email {contact_addr}"), 400
|
||||
|
||||
contact_name, contact_email = full_address.display_name, full_address.address
|
||||
|
||||
contact_email = sanitize_email(contact_email, not_lower=True)
|
||||
|
||||
# already been added
|
||||
contact = Contact.get_by(alias_id=alias.id, website_email=contact_email)
|
||||
if contact:
|
||||
return jsonify(**serialize_contact(contact, existed=True)), 200
|
||||
|
||||
try:
|
||||
contact = create_contact(alias, contact_address)
|
||||
except ErrContactErrorUpgradeNeeded as err:
|
||||
return jsonify(error=err.error_for_user()), 403
|
||||
except (ErrAddressInvalid, CannotCreateContactForReverseAlias) as err:
|
||||
return jsonify(error=err.error_for_user()), 400
|
||||
except ErrContactAlreadyExists as err:
|
||||
return jsonify(**serialize_contact(err.contact, existed=True)), 200
|
||||
contact = Contact.create(
|
||||
user_id=alias.user_id,
|
||||
alias_id=alias.id,
|
||||
website_email=contact_email,
|
||||
name=contact_name,
|
||||
reply_email=generate_reply_email(contact_email, user),
|
||||
)
|
||||
except CannotCreateContactForReverseAlias:
|
||||
return jsonify(error="You can't create contact for a reverse alias"), 400
|
||||
|
||||
LOG.d("create reverse-alias for %s %s", contact_addr, alias)
|
||||
Session.commit()
|
||||
|
||||
return jsonify(**serialize_contact(contact)), 201
|
||||
|
||||
|
@ -448,16 +466,11 @@ def delete_contact(contact_id):
|
|||
200
|
||||
"""
|
||||
user = g.user
|
||||
contact: Optional[Contact] = Contact.get(contact_id)
|
||||
contact = Contact.get(contact_id)
|
||||
|
||||
if not contact or contact.alias.user_id != user.id:
|
||||
return jsonify(error="Forbidden"), 403
|
||||
|
||||
emit_alias_audit_log(
|
||||
alias=contact.alias,
|
||||
action=AliasAuditLogAction.DeleteContact,
|
||||
message=f"Deleted contact {contact_id} ({contact.email})",
|
||||
)
|
||||
Contact.delete(contact_id)
|
||||
Session.commit()
|
||||
|
||||
|
@ -475,17 +488,12 @@ def toggle_contact(contact_id):
|
|||
200
|
||||
"""
|
||||
user = g.user
|
||||
contact: Optional[Contact] = Contact.get(contact_id)
|
||||
contact = Contact.get(contact_id)
|
||||
|
||||
if not contact or contact.alias.user_id != user.id:
|
||||
return jsonify(error="Forbidden"), 403
|
||||
|
||||
contact.block_forward = not contact.block_forward
|
||||
emit_alias_audit_log(
|
||||
alias=contact.alias,
|
||||
action=AliasAuditLogAction.UpdateContact,
|
||||
message=f"Set contact state {contact.id} {contact.email} -> {contact.website_email} to blocked {contact.block_forward}",
|
||||
)
|
||||
Session.commit()
|
||||
|
||||
return jsonify(block_forward=contact.block_forward), 200
|
||||
|
|
|
@ -2,8 +2,10 @@ import tldextract
|
|||
from flask import jsonify, request, g
|
||||
from sqlalchemy import desc
|
||||
|
||||
from app.alias_suffix import get_alias_suffixes
|
||||
from app.api.base import api_bp, require_api_auth
|
||||
from app.dashboard.views.custom_alias import (
|
||||
get_available_suffixes,
|
||||
)
|
||||
from app.db import Session
|
||||
from app.log import LOG
|
||||
from app.models import AliasUsedOn, Alias, User
|
||||
|
@ -66,7 +68,7 @@ def options_v4():
|
|||
prefix_suggestion = convert_to_id(prefix_suggestion)
|
||||
ret["prefix_suggestion"] = prefix_suggestion
|
||||
|
||||
suffixes = get_alias_suffixes(user)
|
||||
suffixes = get_available_suffixes(user)
|
||||
|
||||
# custom domain should be put first
|
||||
ret["suffixes"] = list([suffix.suffix, suffix.signed_suffix] for suffix in suffixes)
|
||||
|
@ -137,7 +139,7 @@ def options_v5():
|
|||
prefix_suggestion = convert_to_id(prefix_suggestion)
|
||||
ret["prefix_suggestion"] = prefix_suggestion
|
||||
|
||||
suffixes = get_alias_suffixes(user)
|
||||
suffixes = get_available_suffixes(user)
|
||||
|
||||
# custom domain should be put first
|
||||
ret["suffixes"] = [
|
||||
|
|
|
@ -9,7 +9,6 @@ from requests import RequestException
|
|||
|
||||
from app.api.base import api_bp, require_api_auth
|
||||
from app.config import APPLE_API_SECRET, MACAPP_APPLE_API_SECRET
|
||||
from app.subscription_webhook import execute_subscription_webhook
|
||||
from app.db import Session
|
||||
from app.log import LOG
|
||||
from app.models import PlanEnum, AppleSubscription
|
||||
|
@ -17,14 +16,9 @@ from app.models import PlanEnum, AppleSubscription
|
|||
_MONTHLY_PRODUCT_ID = "io.simplelogin.ios_app.subscription.premium.monthly"
|
||||
_YEARLY_PRODUCT_ID = "io.simplelogin.ios_app.subscription.premium.yearly"
|
||||
|
||||
# SL Mac app used to be in SL account
|
||||
_MACAPP_MONTHLY_PRODUCT_ID = "io.simplelogin.macapp.subscription.premium.monthly"
|
||||
_MACAPP_YEARLY_PRODUCT_ID = "io.simplelogin.macapp.subscription.premium.yearly"
|
||||
|
||||
# SL Mac app is moved to Proton account
|
||||
_MACAPP_MONTHLY_PRODUCT_ID_NEW = "me.proton.simplelogin.macos.premium.monthly"
|
||||
_MACAPP_YEARLY_PRODUCT_ID_NEW = "me.proton.simplelogin.macos.premium.yearly"
|
||||
|
||||
# Apple API URL
|
||||
_SANDBOX_URL = "https://sandbox.itunes.apple.com/verifyReceipt"
|
||||
_PROD_URL = "https://buy.itunes.apple.com/verifyReceipt"
|
||||
|
@ -46,17 +40,15 @@ def apple_process_payment():
|
|||
LOG.d("request for /apple/process_payment from %s", user)
|
||||
data = request.get_json()
|
||||
receipt_data = data.get("receipt_data")
|
||||
is_macapp = "is_macapp" in data and data["is_macapp"] is True
|
||||
is_macapp = "is_macapp" in data
|
||||
|
||||
if is_macapp:
|
||||
LOG.d("Use Macapp secret")
|
||||
password = MACAPP_APPLE_API_SECRET
|
||||
else:
|
||||
password = APPLE_API_SECRET
|
||||
|
||||
apple_sub = verify_receipt(receipt_data, user, password)
|
||||
if apple_sub:
|
||||
execute_subscription_webhook(user)
|
||||
return jsonify(ok=True), 200
|
||||
|
||||
return jsonify(error="Processing failed"), 400
|
||||
|
@ -268,11 +260,7 @@ def apple_update_notification():
|
|||
plan = (
|
||||
PlanEnum.monthly
|
||||
if transaction["product_id"]
|
||||
in (
|
||||
_MONTHLY_PRODUCT_ID,
|
||||
_MACAPP_MONTHLY_PRODUCT_ID,
|
||||
_MACAPP_MONTHLY_PRODUCT_ID_NEW,
|
||||
)
|
||||
in (_MONTHLY_PRODUCT_ID, _MACAPP_MONTHLY_PRODUCT_ID)
|
||||
else PlanEnum.yearly
|
||||
)
|
||||
|
||||
|
@ -293,7 +281,6 @@ def apple_update_notification():
|
|||
apple_sub.plan = plan
|
||||
apple_sub.product_id = transaction["product_id"]
|
||||
Session.commit()
|
||||
execute_subscription_webhook(user)
|
||||
return jsonify(ok=True), 200
|
||||
else:
|
||||
LOG.w(
|
||||
|
@ -487,16 +474,14 @@ def verify_receipt(receipt_data, user, password) -> Optional[AppleSubscription]:
|
|||
# }
|
||||
|
||||
if data["status"] != 0:
|
||||
LOG.e(
|
||||
LOG.w(
|
||||
"verifyReceipt status !=0, probably invalid receipt. User %s, data %s",
|
||||
user,
|
||||
data,
|
||||
)
|
||||
return None
|
||||
|
||||
# use responseBody.Latest_receipt_info and not responseBody.Receipt.In_app
|
||||
# as recommended on https://developer.apple.com/documentation/appstorereceipts/responsebody/receipt/in_app
|
||||
# each item in data["latest_receipt_info"] has the following format
|
||||
# each item in data["receipt"]["in_app"] has the following format
|
||||
# {
|
||||
# "quantity": "1",
|
||||
# "product_id": "io.simplelogin.ios_app.subscription.premium.monthly",
|
||||
|
@ -515,9 +500,9 @@ def verify_receipt(receipt_data, user, password) -> Optional[AppleSubscription]:
|
|||
# "is_trial_period": "false",
|
||||
# "is_in_intro_offer_period": "false",
|
||||
# }
|
||||
transactions = data.get("latest_receipt_info")
|
||||
transactions = data["receipt"]["in_app"]
|
||||
if not transactions:
|
||||
LOG.i("Empty transactions in data %s", data)
|
||||
LOG.w("Empty transactions in data %s", data)
|
||||
return None
|
||||
|
||||
latest_transaction = max(transactions, key=lambda t: int(t["expires_date_ms"]))
|
||||
|
@ -526,11 +511,7 @@ def verify_receipt(receipt_data, user, password) -> Optional[AppleSubscription]:
|
|||
plan = (
|
||||
PlanEnum.monthly
|
||||
if latest_transaction["product_id"]
|
||||
in (
|
||||
_MONTHLY_PRODUCT_ID,
|
||||
_MACAPP_MONTHLY_PRODUCT_ID,
|
||||
_MACAPP_MONTHLY_PRODUCT_ID_NEW,
|
||||
)
|
||||
in (_MONTHLY_PRODUCT_ID, _MACAPP_MONTHLY_PRODUCT_ID)
|
||||
else PlanEnum.yearly
|
||||
)
|
||||
|
||||
|
@ -538,10 +519,9 @@ def verify_receipt(receipt_data, user, password) -> Optional[AppleSubscription]:
|
|||
|
||||
if apple_sub:
|
||||
LOG.d(
|
||||
"Update AppleSubscription for user %s, expired at %s (%s), plan %s",
|
||||
"Update AppleSubscription for user %s, expired at %s, plan %s",
|
||||
user,
|
||||
expires_date,
|
||||
expires_date.humanize(),
|
||||
plan,
|
||||
)
|
||||
apple_sub.receipt_data = receipt_data
|
||||
|
@ -570,7 +550,6 @@ def verify_receipt(receipt_data, user, password) -> Optional[AppleSubscription]:
|
|||
product_id=latest_transaction["product_id"],
|
||||
)
|
||||
|
||||
execute_subscription_webhook(user)
|
||||
Session.commit()
|
||||
|
||||
return apple_sub
|
||||
|
|
|
@ -11,7 +11,7 @@ from itsdangerous import Signer
|
|||
from app import email_utils
|
||||
from app.api.base import api_bp
|
||||
from app.config import FLASK_SECRET, DISABLE_REGISTRATION
|
||||
from app.dashboard.views.account_setting import send_reset_password_email
|
||||
from app.dashboard.views.setting import send_reset_password_email
|
||||
from app.db import Session
|
||||
from app.email_utils import (
|
||||
email_can_be_used_as_mailbox,
|
||||
|
@ -23,8 +23,7 @@ from app.events.auth_event import LoginEvent, RegisterEvent
|
|||
from app.extensions import limiter
|
||||
from app.log import LOG
|
||||
from app.models import User, ApiKey, SocialAuth, AccountActivation
|
||||
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
||||
from app.utils import sanitize_email, canonicalize_email
|
||||
from app.utils import sanitize_email
|
||||
|
||||
|
||||
@api_bp.route("/auth/login", methods=["POST"])
|
||||
|
@ -50,17 +49,11 @@ def auth_login():
|
|||
if not data:
|
||||
return jsonify(error="request body cannot be empty"), 400
|
||||
|
||||
email = sanitize_email(data.get("email"))
|
||||
password = data.get("password")
|
||||
device = data.get("device")
|
||||
|
||||
email = data.get("email")
|
||||
if not email:
|
||||
LoginEvent(LoginEvent.ActionType.failed, LoginEvent.Source.api).send()
|
||||
return jsonify(error="Email or password incorrect"), 400
|
||||
email = sanitize_email(email)
|
||||
canonical_email = canonicalize_email(email)
|
||||
|
||||
user = User.get_by(email=email) or User.get_by(email=canonical_email)
|
||||
user = User.filter_by(email=email).first()
|
||||
|
||||
if not user or not user.check_password(password):
|
||||
LoginEvent(LoginEvent.ActionType.failed, LoginEvent.Source.api).send()
|
||||
|
@ -68,11 +61,6 @@ def auth_login():
|
|||
elif user.disabled:
|
||||
LoginEvent(LoginEvent.ActionType.disabled_login, LoginEvent.Source.api).send()
|
||||
return jsonify(error="Account disabled"), 400
|
||||
elif user.delete_on is not None:
|
||||
LoginEvent(
|
||||
LoginEvent.ActionType.scheduled_to_be_deleted, LoginEvent.Source.api
|
||||
).send()
|
||||
return jsonify(error="Account scheduled for deletion"), 400
|
||||
elif not user.activated:
|
||||
LoginEvent(LoginEvent.ActionType.not_activated, LoginEvent.Source.api).send()
|
||||
return jsonify(error="Account not activated"), 422
|
||||
|
@ -101,8 +89,7 @@ def auth_register():
|
|||
if not data:
|
||||
return jsonify(error="request body cannot be empty"), 400
|
||||
|
||||
dirty_email = data.get("email")
|
||||
email = canonicalize_email(dirty_email)
|
||||
email = sanitize_email(data.get("email"))
|
||||
password = data.get("password")
|
||||
|
||||
if DISABLE_REGISTRATION:
|
||||
|
@ -123,7 +110,7 @@ def auth_register():
|
|||
return jsonify(error="password too long"), 400
|
||||
|
||||
LOG.d("create user %s", email)
|
||||
user = User.create(email=email, name=dirty_email, password=password)
|
||||
user = User.create(email=email, name="", password=password)
|
||||
Session.flush()
|
||||
|
||||
# create activation code
|
||||
|
@ -134,8 +121,8 @@ def auth_register():
|
|||
send_email(
|
||||
email,
|
||||
"Just one more step to join SimpleLogin",
|
||||
render("transactional/code-activation.txt.jinja2", user=user, code=code),
|
||||
render("transactional/code-activation.html", user=user, code=code),
|
||||
render("transactional/code-activation.txt.jinja2", code=code),
|
||||
render("transactional/code-activation.html", code=code),
|
||||
)
|
||||
|
||||
RegisterEvent(RegisterEvent.ActionType.success, RegisterEvent.Source.api).send()
|
||||
|
@ -161,10 +148,9 @@ def auth_activate():
|
|||
return jsonify(error="request body cannot be empty"), 400
|
||||
|
||||
email = sanitize_email(data.get("email"))
|
||||
canonical_email = canonicalize_email(data.get("email"))
|
||||
code = data.get("code")
|
||||
|
||||
user = User.get_by(email=email) or User.get_by(email=canonical_email)
|
||||
user = User.get_by(email=email)
|
||||
|
||||
# do not use a different message to avoid exposing existing email
|
||||
if not user or user.activated:
|
||||
|
@ -188,11 +174,6 @@ def auth_activate():
|
|||
|
||||
LOG.d("activate user %s", user)
|
||||
user.activated = True
|
||||
emit_user_audit_log(
|
||||
user=user,
|
||||
action=UserAuditLogAction.ActivateUser,
|
||||
message=f"User has been activated: {user.email}",
|
||||
)
|
||||
AccountActivation.delete(account_activation.id)
|
||||
Session.commit()
|
||||
|
||||
|
@ -215,9 +196,7 @@ def auth_reactivate():
|
|||
return jsonify(error="request body cannot be empty"), 400
|
||||
|
||||
email = sanitize_email(data.get("email"))
|
||||
canonical_email = canonicalize_email(data.get("email"))
|
||||
|
||||
user = User.get_by(email=email) or User.get_by(email=canonical_email)
|
||||
user = User.get_by(email=email)
|
||||
|
||||
# do not use a different message to avoid exposing existing email
|
||||
if not user or user.activated:
|
||||
|
@ -236,8 +215,8 @@ def auth_reactivate():
|
|||
send_email(
|
||||
email,
|
||||
"Just one more step to join SimpleLogin",
|
||||
render("transactional/code-activation.txt.jinja2", user=user, code=code),
|
||||
render("transactional/code-activation.html", user=user, code=code),
|
||||
render("transactional/code-activation.txt.jinja2", code=code),
|
||||
render("transactional/code-activation.html", code=code),
|
||||
)
|
||||
|
||||
return jsonify(msg="User needs to confirm their account"), 200
|
||||
|
@ -372,7 +351,7 @@ def auth_payload(user, device) -> dict:
|
|||
|
||||
|
||||
@api_bp.route("/auth/forgot_password", methods=["POST"])
|
||||
@limiter.limit("2/minute")
|
||||
@limiter.limit("10/minute")
|
||||
def forgot_password():
|
||||
"""
|
||||
User forgot password
|
||||
|
@ -388,9 +367,8 @@ def forgot_password():
|
|||
return jsonify(error="request body must contain email"), 400
|
||||
|
||||
email = sanitize_email(data.get("email"))
|
||||
canonical_email = canonicalize_email(data.get("email"))
|
||||
|
||||
user = User.get_by(email=email) or User.get_by(email=canonical_email)
|
||||
user = User.get_by(email=email)
|
||||
|
||||
if user:
|
||||
send_reset_password_email(user)
|
||||
|
|
|
@ -55,7 +55,7 @@ def auth_mfa():
|
|||
)
|
||||
|
||||
totp = pyotp.TOTP(user.otp_secret)
|
||||
if not totp.verify(mfa_token, valid_window=2):
|
||||
if not totp.verify(mfa_token):
|
||||
send_invalid_totp_login_email(user, "TOTP")
|
||||
return jsonify(error="Wrong TOTP Token"), 400
|
||||
|
||||
|
|
|
@ -2,10 +2,8 @@ from flask import g, request
|
|||
from flask import jsonify
|
||||
|
||||
from app.api.base import api_bp, require_api_auth
|
||||
from app.custom_domain_utils import set_custom_domain_mailboxes
|
||||
from app.db import Session
|
||||
from app.log import LOG
|
||||
from app.models import CustomDomain, DomainDeletedAlias
|
||||
from app.models import CustomDomain, DomainDeletedAlias, Mailbox, DomainMailbox
|
||||
|
||||
|
||||
def custom_domain_to_dict(custom_domain: CustomDomain):
|
||||
|
@ -102,14 +100,23 @@ def update_custom_domain(custom_domain_id):
|
|||
|
||||
if "mailbox_ids" in data:
|
||||
mailbox_ids = [int(m_id) for m_id in data.get("mailbox_ids")]
|
||||
result = set_custom_domain_mailboxes(user.id, custom_domain, mailbox_ids)
|
||||
if result.success:
|
||||
if mailbox_ids:
|
||||
# check if mailbox is not tempered with
|
||||
mailboxes = []
|
||||
for mailbox_id in mailbox_ids:
|
||||
mailbox = Mailbox.get(mailbox_id)
|
||||
if not mailbox or mailbox.user_id != user.id or not mailbox.verified:
|
||||
return jsonify(error="Forbidden"), 400
|
||||
mailboxes.append(mailbox)
|
||||
|
||||
# first remove all existing domain-mailboxes links
|
||||
DomainMailbox.filter_by(domain_id=custom_domain.id).delete()
|
||||
Session.flush()
|
||||
|
||||
for mailbox in mailboxes:
|
||||
DomainMailbox.create(domain_id=custom_domain.id, mailbox_id=mailbox.id)
|
||||
|
||||
changed = True
|
||||
else:
|
||||
LOG.info(
|
||||
f"Prevented from updating mailboxes [custom_domain_id={custom_domain.id}]: {result.reason.value}"
|
||||
)
|
||||
return jsonify(error="Forbidden"), 400
|
||||
|
||||
if changed:
|
||||
Session.commit()
|
||||
|
|
|
@ -1,9 +1,12 @@
|
|||
import csv
|
||||
from io import StringIO
|
||||
|
||||
from flask import g
|
||||
from flask import jsonify
|
||||
from flask import make_response
|
||||
|
||||
from app.api.base import api_bp, require_api_auth
|
||||
from app.models import Alias, Client, CustomDomain
|
||||
from app.alias_utils import alias_export_csv
|
||||
|
||||
|
||||
@api_bp.route("/export/data", methods=["GET"])
|
||||
|
@ -46,4 +49,24 @@ def export_aliases():
|
|||
Importable CSV file
|
||||
|
||||
"""
|
||||
return alias_export_csv(g.user)
|
||||
user = g.user
|
||||
|
||||
data = [["alias", "note", "enabled", "mailboxes"]]
|
||||
for alias in Alias.filter_by(user_id=user.id).all(): # type: Alias
|
||||
# Always put the main mailbox first
|
||||
# It is seen a primary while importing
|
||||
alias_mailboxes = alias.mailboxes
|
||||
alias_mailboxes.insert(
|
||||
0, alias_mailboxes.pop(alias_mailboxes.index(alias.mailbox))
|
||||
)
|
||||
|
||||
mailboxes = " ".join([mailbox.email for mailbox in alias_mailboxes])
|
||||
data.append([alias.email, alias.note, alias.enabled, mailboxes])
|
||||
|
||||
si = StringIO()
|
||||
cw = csv.writer(si)
|
||||
cw.writerows(data)
|
||||
output = make_response(si.getvalue())
|
||||
output.headers["Content-Disposition"] = "attachment; filename=aliases.csv"
|
||||
output.headers["Content-type"] = "text/csv"
|
||||
return output
|
||||
|
|
|
@ -1,18 +1,22 @@
|
|||
from smtplib import SMTPRecipientsRefused
|
||||
|
||||
import arrow
|
||||
from flask import g
|
||||
from flask import jsonify
|
||||
from flask import request
|
||||
|
||||
from app import mailbox_utils
|
||||
from app.api.base import api_bp, require_api_auth
|
||||
from app.config import JOB_DELETE_MAILBOX
|
||||
from app.dashboard.views.mailbox import send_verification_email
|
||||
from app.dashboard.views.mailbox_detail import verify_mailbox_change
|
||||
from app.db import Session
|
||||
from app.email_utils import (
|
||||
mailbox_already_used,
|
||||
email_can_be_used_as_mailbox,
|
||||
is_valid_email,
|
||||
)
|
||||
from app.models import Mailbox
|
||||
from app.log import LOG
|
||||
from app.models import Mailbox, Job
|
||||
from app.utils import sanitize_email
|
||||
|
||||
|
||||
|
@ -38,54 +42,68 @@ def create_mailbox():
|
|||
the new mailbox dict
|
||||
"""
|
||||
user = g.user
|
||||
email = request.get_json().get("email")
|
||||
if not email:
|
||||
return jsonify(error="Invalid email"), 400
|
||||
mailbox_email = sanitize_email(request.get_json().get("email"))
|
||||
|
||||
mailbox_email = sanitize_email(email)
|
||||
if not user.is_premium():
|
||||
return jsonify(error=f"Only premium plan can add additional mailbox"), 400
|
||||
|
||||
try:
|
||||
new_mailbox = mailbox_utils.create_mailbox(user, mailbox_email).mailbox
|
||||
except mailbox_utils.MailboxError as e:
|
||||
return jsonify(error=e.msg), 400
|
||||
if not is_valid_email(mailbox_email):
|
||||
return jsonify(error=f"{mailbox_email} invalid"), 400
|
||||
elif mailbox_already_used(mailbox_email, user):
|
||||
return jsonify(error=f"{mailbox_email} already used"), 400
|
||||
elif not email_can_be_used_as_mailbox(mailbox_email):
|
||||
return (
|
||||
jsonify(
|
||||
error=f"{mailbox_email} cannot be used. Please note a mailbox cannot "
|
||||
f"be a disposable email address"
|
||||
),
|
||||
400,
|
||||
)
|
||||
else:
|
||||
new_mailbox = Mailbox.create(email=mailbox_email, user_id=user.id)
|
||||
Session.commit()
|
||||
|
||||
return (
|
||||
jsonify(mailbox_to_dict(new_mailbox)),
|
||||
201,
|
||||
)
|
||||
send_verification_email(user, new_mailbox)
|
||||
|
||||
return (
|
||||
jsonify(mailbox_to_dict(new_mailbox)),
|
||||
201,
|
||||
)
|
||||
|
||||
|
||||
@api_bp.route("/mailboxes/<int:mailbox_id>", methods=["DELETE"])
|
||||
@api_bp.route("/mailboxes/<mailbox_id>", methods=["DELETE"])
|
||||
@require_api_auth
|
||||
def delete_mailbox(mailbox_id):
|
||||
"""
|
||||
Delete mailbox
|
||||
Input:
|
||||
mailbox_id: in url
|
||||
(optional) transfer_aliases_to: in body. Id of the new mailbox for the aliases.
|
||||
If omitted or the value is set to -1,
|
||||
the aliases of the mailbox will be deleted too.
|
||||
Output:
|
||||
200 if deleted successfully
|
||||
|
||||
"""
|
||||
user = g.user
|
||||
data = request.get_json() or {}
|
||||
transfer_mailbox_id = data.get("transfer_aliases_to")
|
||||
if transfer_mailbox_id and int(transfer_mailbox_id) >= 0:
|
||||
transfer_mailbox_id = int(transfer_mailbox_id)
|
||||
else:
|
||||
transfer_mailbox_id = None
|
||||
mailbox = Mailbox.get(mailbox_id)
|
||||
|
||||
try:
|
||||
mailbox_utils.delete_mailbox(user, mailbox_id, transfer_mailbox_id)
|
||||
except mailbox_utils.MailboxError as e:
|
||||
return jsonify(error=e.msg), 400
|
||||
if not mailbox or mailbox.user_id != user.id:
|
||||
return jsonify(error="Forbidden"), 403
|
||||
|
||||
if mailbox.id == user.default_mailbox_id:
|
||||
return jsonify(error="You cannot delete the default mailbox"), 400
|
||||
|
||||
# Schedule delete account job
|
||||
LOG.w("schedule delete mailbox job for %s", mailbox)
|
||||
Job.create(
|
||||
name=JOB_DELETE_MAILBOX,
|
||||
payload={"mailbox_id": mailbox.id},
|
||||
run_at=arrow.now(),
|
||||
commit=True,
|
||||
)
|
||||
|
||||
return jsonify(deleted=True), 200
|
||||
|
||||
|
||||
@api_bp.route("/mailboxes/<int:mailbox_id>", methods=["PUT"])
|
||||
@api_bp.route("/mailboxes/<mailbox_id>", methods=["PUT"])
|
||||
@require_api_auth
|
||||
def update_mailbox(mailbox_id):
|
||||
"""
|
||||
|
|
|
@ -1,8 +1,7 @@
|
|||
from flask import g
|
||||
from flask import jsonify, request
|
||||
from itsdangerous import SignatureExpired
|
||||
|
||||
from app import parallel_limiter
|
||||
from app.alias_suffix import check_suffix_signature, verify_prefix_suffix
|
||||
from app.alias_utils import check_alias_prefix
|
||||
from app.api.base import api_bp, require_api_auth
|
||||
from app.api.serializer import (
|
||||
|
@ -10,6 +9,7 @@ from app.api.serializer import (
|
|||
get_alias_info_v2,
|
||||
)
|
||||
from app.config import MAX_NB_EMAIL_FREE_PLAN, ALIAS_LIMIT
|
||||
from app.dashboard.views.custom_alias import verify_prefix_suffix, signer
|
||||
from app.db import Session
|
||||
from app.extensions import limiter
|
||||
from app.log import LOG
|
||||
|
@ -28,7 +28,6 @@ from app.utils import convert_to_id
|
|||
@api_bp.route("/v2/alias/custom/new", methods=["POST"])
|
||||
@limiter.limit(ALIAS_LIMIT)
|
||||
@require_api_auth
|
||||
@parallel_limiter.lock(name="alias_creation")
|
||||
def new_custom_alias_v2():
|
||||
"""
|
||||
Create a new custom alias
|
||||
|
@ -66,11 +65,12 @@ def new_custom_alias_v2():
|
|||
note = data.get("note")
|
||||
alias_prefix = convert_to_id(alias_prefix)
|
||||
|
||||
# hypothesis: user will click on the button in the 600 secs
|
||||
try:
|
||||
alias_suffix = check_suffix_signature(signed_suffix)
|
||||
if not alias_suffix:
|
||||
LOG.w("Alias creation time expired for %s", user)
|
||||
return jsonify(error="Alias creation time is expired, please retry"), 412
|
||||
alias_suffix = signer.unsign(signed_suffix, max_age=600).decode()
|
||||
except SignatureExpired:
|
||||
LOG.w("Alias creation time expired for %s", user)
|
||||
return jsonify(error="Alias creation time is expired, please retry"), 412
|
||||
except Exception:
|
||||
LOG.w("Alias suffix is tampered, user %s", user)
|
||||
return jsonify(error="Tampered suffix"), 400
|
||||
|
@ -115,7 +115,6 @@ def new_custom_alias_v2():
|
|||
@api_bp.route("/v3/alias/custom/new", methods=["POST"])
|
||||
@limiter.limit(ALIAS_LIMIT)
|
||||
@require_api_auth
|
||||
@parallel_limiter.lock(name="alias_creation")
|
||||
def new_custom_alias_v3():
|
||||
"""
|
||||
Create a new custom alias
|
||||
|
@ -150,11 +149,10 @@ def new_custom_alias_v3():
|
|||
if not data:
|
||||
return jsonify(error="request body cannot be empty"), 400
|
||||
|
||||
if not isinstance(data, dict):
|
||||
if type(data) is not dict:
|
||||
return jsonify(error="request body does not follow the required format"), 400
|
||||
|
||||
alias_prefix_data = data.get("alias_prefix", "") or ""
|
||||
alias_prefix = alias_prefix_data.strip().lower().replace(" ", "")
|
||||
alias_prefix = data.get("alias_prefix", "").strip().lower().replace(" ", "")
|
||||
signed_suffix = data.get("signed_suffix", "") or ""
|
||||
signed_suffix = signed_suffix.strip()
|
||||
|
||||
|
@ -169,7 +167,7 @@ def new_custom_alias_v3():
|
|||
return jsonify(error="alias prefix invalid format or too long"), 400
|
||||
|
||||
# check if mailbox is not tempered with
|
||||
if not isinstance(mailbox_ids, list):
|
||||
if type(mailbox_ids) is not list:
|
||||
return jsonify(error="mailbox_ids must be an array of id"), 400
|
||||
mailboxes = []
|
||||
for mailbox_id in mailbox_ids:
|
||||
|
@ -183,10 +181,10 @@ def new_custom_alias_v3():
|
|||
|
||||
# hypothesis: user will click on the button in the 600 secs
|
||||
try:
|
||||
alias_suffix = check_suffix_signature(signed_suffix)
|
||||
if not alias_suffix:
|
||||
LOG.w("Alias creation time expired for %s", user)
|
||||
return jsonify(error="Alias creation time is expired, please retry"), 412
|
||||
alias_suffix = signer.unsign(signed_suffix, max_age=600).decode()
|
||||
except SignatureExpired:
|
||||
LOG.w("Alias creation time expired for %s", user)
|
||||
return jsonify(error="Alias creation time is expired, please retry"), 412
|
||||
except Exception:
|
||||
LOG.w("Alias suffix is tampered, user %s", user)
|
||||
return jsonify(error="Tampered suffix"), 400
|
||||
|
|
|
@ -2,14 +2,13 @@ import tldextract
|
|||
from flask import g
|
||||
from flask import jsonify, request
|
||||
|
||||
from app import parallel_limiter
|
||||
from app.alias_suffix import get_alias_suffixes
|
||||
from app.api.base import api_bp, require_api_auth
|
||||
from app.api.serializer import (
|
||||
get_alias_info_v2,
|
||||
serialize_alias_info_v2,
|
||||
)
|
||||
from app.config import MAX_NB_EMAIL_FREE_PLAN, ALIAS_LIMIT
|
||||
from app.dashboard.views.custom_alias import get_available_suffixes
|
||||
from app.db import Session
|
||||
from app.errors import AliasInTrashError
|
||||
from app.extensions import limiter
|
||||
|
@ -21,7 +20,6 @@ from app.utils import convert_to_id
|
|||
@api_bp.route("/alias/random/new", methods=["POST"])
|
||||
@limiter.limit(ALIAS_LIMIT)
|
||||
@require_api_auth
|
||||
@parallel_limiter.lock(name="alias_creation")
|
||||
def new_random_alias():
|
||||
"""
|
||||
Create a new random alias
|
||||
|
@ -59,7 +57,7 @@ def new_random_alias():
|
|||
prefix_suggestion = ext.domain
|
||||
prefix_suggestion = convert_to_id(prefix_suggestion)
|
||||
|
||||
suffixes = get_alias_suffixes(user)
|
||||
suffixes = get_available_suffixes(user)
|
||||
# use the first suffix
|
||||
suggested_alias = prefix_suggestion + suffixes[0].suffix
|
||||
|
||||
|
@ -107,9 +105,8 @@ def new_random_alias():
|
|||
Session.commit()
|
||||
|
||||
if hostname and not AliasUsedOn.get_by(alias_id=alias.id, hostname=hostname):
|
||||
AliasUsedOn.create(
|
||||
alias_id=alias.id, hostname=hostname, user_id=alias.user_id, commit=True
|
||||
)
|
||||
AliasUsedOn.create(alias_id=alias.id, hostname=hostname, user_id=alias.user_id)
|
||||
Session.commit()
|
||||
|
||||
return (
|
||||
jsonify(alias=alias.email, **serialize_alias_info_v2(get_alias_info_v2(alias))),
|
||||
|
|
|
@ -60,7 +60,7 @@ def get_notifications():
|
|||
)
|
||||
|
||||
|
||||
@api_bp.route("/notifications/<int:notification_id>/read", methods=["POST"])
|
||||
@api_bp.route("/notifications/<notification_id>/read", methods=["POST"])
|
||||
@require_api_auth
|
||||
def mark_as_read(notification_id):
|
||||
"""
|
||||
|
|
|
@ -9,7 +9,7 @@ from app.models import (
|
|||
)
|
||||
|
||||
|
||||
@api_bp.route("/phone/reservations/<int:reservation_id>", methods=["GET", "POST"])
|
||||
@api_bp.route("/phone/reservations/<reservation_id>", methods=["GET", "POST"])
|
||||
@require_api_auth
|
||||
def phone_messages(reservation_id):
|
||||
"""
|
||||
|
|
|
@ -12,7 +12,6 @@ from app.models import (
|
|||
SenderFormatEnum,
|
||||
AliasSuffixEnum,
|
||||
)
|
||||
from app.proton.utils import perform_proton_account_unlink
|
||||
|
||||
|
||||
def setting_to_dict(user: User):
|
||||
|
@ -138,11 +137,3 @@ def get_available_domains_for_random_alias_v2():
|
|||
]
|
||||
|
||||
return jsonify(ret)
|
||||
|
||||
|
||||
@api_bp.route("/setting/unlink_proton_account", methods=["DELETE"])
|
||||
@require_api_auth
|
||||
def unlink_proton_account():
|
||||
user = g.user
|
||||
perform_proton_account_unlink(user)
|
||||
return jsonify({"ok": True})
|
||||
|
|
|
@ -1,27 +0,0 @@
|
|||
from flask import jsonify, g, request
|
||||
from sqlalchemy_utils.types.arrow import arrow
|
||||
|
||||
from app.api.base import api_bp, require_api_auth
|
||||
from app.db import Session
|
||||
|
||||
|
||||
@api_bp.route("/sudo", methods=["PATCH"])
|
||||
@require_api_auth
|
||||
def enter_sudo():
|
||||
"""
|
||||
Enter sudo mode
|
||||
|
||||
Input
|
||||
- password: user password to validate request to enter sudo mode
|
||||
"""
|
||||
user = g.user
|
||||
data = request.get_json() or {}
|
||||
if "password" not in data:
|
||||
return jsonify(error="Invalid password"), 403
|
||||
if not user.check_password(data["password"]):
|
||||
return jsonify(error="Invalid password"), 403
|
||||
|
||||
g.api_key.sudo_mode_at = arrow.now()
|
||||
Session.commit()
|
||||
|
||||
return jsonify(ok=True)
|
|
@ -1,52 +0,0 @@
|
|||
from flask import jsonify, g
|
||||
from sqlalchemy_utils.types.arrow import arrow
|
||||
|
||||
from app.api.base import api_bp, require_api_sudo, require_api_auth
|
||||
from app import config
|
||||
from app.extensions import limiter
|
||||
from app.log import LOG
|
||||
from app.models import Job, ApiToCookieToken
|
||||
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
||||
|
||||
|
||||
@api_bp.route("/user", methods=["DELETE"])
|
||||
@require_api_sudo
|
||||
def delete_user():
|
||||
"""
|
||||
Delete the user. Requires sudo mode.
|
||||
|
||||
"""
|
||||
# Schedule delete account job
|
||||
emit_user_audit_log(
|
||||
user=g.user,
|
||||
action=UserAuditLogAction.UserMarkedForDeletion,
|
||||
message=f"Marked user {g.user.id} ({g.user.email}) for deletion from API",
|
||||
)
|
||||
LOG.w("schedule delete account job for %s", g.user)
|
||||
Job.create(
|
||||
name=config.JOB_DELETE_ACCOUNT,
|
||||
payload={"user_id": g.user.id},
|
||||
run_at=arrow.now(),
|
||||
commit=True,
|
||||
)
|
||||
return jsonify(ok=True)
|
||||
|
||||
|
||||
@api_bp.route("/user/cookie_token", methods=["GET"])
|
||||
@require_api_auth
|
||||
@limiter.limit("5/minute")
|
||||
def get_api_session_token():
|
||||
"""
|
||||
Get a temporary token to exchange it for a cookie based session
|
||||
Output:
|
||||
200 and a temporary random token
|
||||
{
|
||||
token: "asdli3ldq39h9hd3",
|
||||
}
|
||||
"""
|
||||
token = ApiToCookieToken.create(
|
||||
user=g.user,
|
||||
api_key_id=g.api_key.id,
|
||||
commit=True,
|
||||
)
|
||||
return jsonify({"token": token.code})
|
|
@ -1,44 +1,25 @@
|
|||
import base64
|
||||
import dataclasses
|
||||
from io import BytesIO
|
||||
from typing import Optional
|
||||
|
||||
from flask import jsonify, g, request, make_response
|
||||
from flask_login import logout_user
|
||||
|
||||
from app import s3, config
|
||||
from app import s3
|
||||
from app.api.base import api_bp, require_api_auth
|
||||
from app.config import SESSION_COOKIE_NAME
|
||||
from app.dashboard.views.index import get_stats
|
||||
from app.db import Session
|
||||
from app.image_validation import detect_image_format, ImageFormat
|
||||
from app.models import ApiKey, File, PartnerUser, User
|
||||
from app.proton.utils import get_proton_partner
|
||||
from app.session import logout_session
|
||||
from app.models import ApiKey, File, User
|
||||
from app.utils import random_string
|
||||
|
||||
|
||||
def get_connected_proton_address(user: User) -> Optional[str]:
|
||||
proton_partner = get_proton_partner()
|
||||
partner_user = PartnerUser.get_by(user_id=user.id, partner_id=proton_partner.id)
|
||||
if partner_user is None:
|
||||
return None
|
||||
return partner_user.partner_email
|
||||
|
||||
|
||||
def user_to_dict(user: User) -> dict:
|
||||
ret = {
|
||||
"name": user.name or "",
|
||||
"is_premium": user.is_premium(),
|
||||
"email": user.email,
|
||||
"in_trial": user.in_trial(),
|
||||
"max_alias_free_plan": user.max_alias_for_free_account(),
|
||||
"connected_proton_address": None,
|
||||
"can_create_reverse_alias": user.can_create_contacts(),
|
||||
}
|
||||
|
||||
if config.CONNECT_WITH_PROTON:
|
||||
ret["connected_proton_address"] = get_connected_proton_address(user)
|
||||
|
||||
if user.profile_picture_id:
|
||||
ret["profile_picture_url"] = user.profile_picture.get_url()
|
||||
else:
|
||||
|
@ -52,15 +33,6 @@ def user_to_dict(user: User) -> dict:
|
|||
def user_info():
|
||||
"""
|
||||
Return user info given the api-key
|
||||
|
||||
Output as json
|
||||
- name
|
||||
- is_premium
|
||||
- email
|
||||
- in_trial
|
||||
- max_alias_free
|
||||
- is_connected_with_proton
|
||||
- can_create_reverse_alias
|
||||
"""
|
||||
user = g.user
|
||||
|
||||
|
@ -74,23 +46,23 @@ def update_user_info():
|
|||
Input
|
||||
- profile_picture (optional): base64 of the profile picture. Set to null to remove the profile picture
|
||||
- name (optional)
|
||||
|
||||
"""
|
||||
user = g.user
|
||||
data = request.get_json() or {}
|
||||
|
||||
if "profile_picture" in data:
|
||||
if user.profile_picture_id:
|
||||
file = user.profile_picture
|
||||
user.profile_picture_id = None
|
||||
Session.flush()
|
||||
if file:
|
||||
File.delete(file.id)
|
||||
s3.delete(file.path)
|
||||
if data["profile_picture"] is None:
|
||||
if user.profile_picture_id:
|
||||
file = user.profile_picture
|
||||
user.profile_picture_id = None
|
||||
Session.flush()
|
||||
if data["profile_picture"] is not None:
|
||||
if file:
|
||||
File.delete(file.id)
|
||||
s3.delete(file.path)
|
||||
Session.flush()
|
||||
else:
|
||||
raw_data = base64.decodebytes(data["profile_picture"].encode())
|
||||
if detect_image_format(raw_data) == ImageFormat.Unknown:
|
||||
return jsonify(error="Unsupported image format"), 400
|
||||
file_path = random_string(30)
|
||||
file = File.create(user_id=user.id, path=file_path)
|
||||
Session.flush()
|
||||
|
@ -137,27 +109,8 @@ def logout():
|
|||
Output:
|
||||
- 200
|
||||
"""
|
||||
logout_session()
|
||||
logout_user()
|
||||
response = make_response(jsonify(msg="User is logged out"), 200)
|
||||
response.delete_cookie(SESSION_COOKIE_NAME)
|
||||
|
||||
return response
|
||||
|
||||
|
||||
@api_bp.route("/stats")
|
||||
@require_api_auth
|
||||
def user_stats():
|
||||
"""
|
||||
Return stats
|
||||
|
||||
Output as json
|
||||
- nb_alias
|
||||
- nb_forward
|
||||
- nb_reply
|
||||
- nb_block
|
||||
|
||||
"""
|
||||
user = g.user
|
||||
stats = get_stats(user)
|
||||
|
||||
return jsonify(dataclasses.asdict(stats))
|
||||
|
|
|
@ -15,27 +15,4 @@ from .views import (
|
|||
fido,
|
||||
social,
|
||||
recovery,
|
||||
api_to_cookie,
|
||||
oidc,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"login",
|
||||
"logout",
|
||||
"register",
|
||||
"activate",
|
||||
"resend_activation",
|
||||
"reset_password",
|
||||
"forgot_password",
|
||||
"github",
|
||||
"google",
|
||||
"facebook",
|
||||
"proton",
|
||||
"change_email",
|
||||
"mfa",
|
||||
"fido",
|
||||
"social",
|
||||
"recovery",
|
||||
"api_to_cookie",
|
||||
"oidc",
|
||||
]
|
||||
|
|
|
@ -7,7 +7,6 @@ from app.db import Session
|
|||
from app.extensions import limiter
|
||||
from app.log import LOG
|
||||
from app.models import ActivationCode
|
||||
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
||||
from app.utils import sanitize_next_url
|
||||
|
||||
|
||||
|
@ -48,11 +47,6 @@ def activate():
|
|||
|
||||
user = activation_code.user
|
||||
user.activated = True
|
||||
emit_user_audit_log(
|
||||
user=user,
|
||||
action=UserAuditLogAction.ActivateUser,
|
||||
message=f"User has been activated: {user.email}",
|
||||
)
|
||||
login_user(user)
|
||||
|
||||
# activation code is to be used only once
|
||||
|
@ -71,5 +65,3 @@ def activate():
|
|||
else:
|
||||
LOG.d("redirect user to dashboard")
|
||||
return redirect(url_for("dashboard.index"))
|
||||
# todo: redirect to account_activated page when more features are added into the browser extension
|
||||
# return redirect(url_for("onboarding.account_activated"))
|
||||
|
|
|
@ -1,30 +0,0 @@
|
|||
import arrow
|
||||
from flask import redirect, url_for, request, flash
|
||||
from flask_login import login_user
|
||||
|
||||
from app.auth.base import auth_bp
|
||||
from app.models import ApiToCookieToken
|
||||
from app.utils import sanitize_next_url
|
||||
|
||||
|
||||
@auth_bp.route("/api_to_cookie", methods=["GET"])
|
||||
def api_to_cookie():
|
||||
code = request.args.get("token")
|
||||
if not code:
|
||||
flash("Missing token", "error")
|
||||
return redirect(url_for("auth.login"))
|
||||
|
||||
token = ApiToCookieToken.get_by(code=code)
|
||||
if not token or token.created_at < arrow.now().shift(minutes=-5):
|
||||
flash("Missing token", "error")
|
||||
return redirect(url_for("auth.login"))
|
||||
|
||||
user = token.user
|
||||
ApiToCookieToken.delete(token.id, commit=True)
|
||||
login_user(user)
|
||||
|
||||
next_url = sanitize_next_url(request.args.get("next"))
|
||||
if next_url:
|
||||
return redirect(next_url)
|
||||
else:
|
||||
return redirect(url_for("dashboard.index"))
|
|
@ -3,13 +3,10 @@ from flask_login import login_user
|
|||
|
||||
from app.auth.base import auth_bp
|
||||
from app.db import Session
|
||||
from app.extensions import limiter
|
||||
from app.log import LOG
|
||||
from app.models import EmailChange, ResetPasswordCode
|
||||
from app.models import EmailChange
|
||||
|
||||
|
||||
@auth_bp.route("/change_email", methods=["GET", "POST"])
|
||||
@limiter.limit("3/hour")
|
||||
def change_email():
|
||||
code = request.args.get("code")
|
||||
|
||||
|
@ -25,14 +22,11 @@ def change_email():
|
|||
return render_template("auth/change_email.html")
|
||||
|
||||
user = email_change.user
|
||||
old_email = user.email
|
||||
user.email = email_change.new_email
|
||||
|
||||
EmailChange.delete(email_change.id)
|
||||
ResetPasswordCode.filter_by(user_id=user.id).delete()
|
||||
Session.commit()
|
||||
|
||||
LOG.i(f"User {user} has changed their email from {old_email} to {user.email}")
|
||||
flash("Your new email has been updated", "success")
|
||||
|
||||
login_user(user)
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
import json
|
||||
import secrets
|
||||
from time import time
|
||||
|
||||
import webauthn
|
||||
from flask import (
|
||||
|
@ -62,7 +61,7 @@ def fido():
|
|||
browser = MfaBrowser.get_by(token=request.cookies.get("mfa"))
|
||||
if browser and not browser.is_expired() and browser.user_id == user.id:
|
||||
login_user(user)
|
||||
flash("Welcome back!", "success")
|
||||
flash(f"Welcome back!", "success")
|
||||
# Redirect user to correct page
|
||||
return redirect(next_url or url_for("dashboard.index"))
|
||||
else:
|
||||
|
@ -108,9 +107,8 @@ def fido():
|
|||
Session.commit()
|
||||
del session[MFA_USER_ID]
|
||||
|
||||
session["sudo_time"] = int(time())
|
||||
login_user(user)
|
||||
flash("Welcome back!", "success")
|
||||
flash(f"Welcome back!", "success")
|
||||
|
||||
# Redirect user to correct page
|
||||
response = make_response(redirect(next_url or url_for("dashboard.index")))
|
||||
|
|
|
@ -1,13 +1,13 @@
|
|||
from flask import request, render_template, flash, g
|
||||
from flask import request, render_template, redirect, url_for, flash, g
|
||||
from flask_wtf import FlaskForm
|
||||
from wtforms import StringField, validators
|
||||
|
||||
from app.auth.base import auth_bp
|
||||
from app.dashboard.views.account_setting import send_reset_password_email
|
||||
from app.dashboard.views.setting import send_reset_password_email
|
||||
from app.extensions import limiter
|
||||
from app.log import LOG
|
||||
from app.models import User
|
||||
from app.utils import sanitize_email, canonicalize_email
|
||||
from app.utils import sanitize_email
|
||||
|
||||
|
||||
class ForgotPasswordForm(FlaskForm):
|
||||
|
@ -16,26 +16,26 @@ class ForgotPasswordForm(FlaskForm):
|
|||
|
||||
@auth_bp.route("/forgot_password", methods=["GET", "POST"])
|
||||
@limiter.limit(
|
||||
"10/hour", deduct_when=lambda r: hasattr(g, "deduct_limit") and g.deduct_limit
|
||||
"10/minute", deduct_when=lambda r: hasattr(g, "deduct_limit") and g.deduct_limit
|
||||
)
|
||||
def forgot_password():
|
||||
form = ForgotPasswordForm(request.form)
|
||||
|
||||
if form.validate_on_submit():
|
||||
# Trigger rate limiter
|
||||
g.deduct_limit = True
|
||||
|
||||
email = sanitize_email(form.email.data)
|
||||
flash(
|
||||
"If your email is correct, you are going to receive an email to reset your password",
|
||||
"success",
|
||||
)
|
||||
|
||||
email = sanitize_email(form.email.data)
|
||||
canonical_email = canonicalize_email(email)
|
||||
user = User.get_by(email=email) or User.get_by(email=canonical_email)
|
||||
user = User.get_by(email=email)
|
||||
|
||||
if user:
|
||||
LOG.d("Send forgot password email to %s", user)
|
||||
send_reset_password_email(user)
|
||||
return redirect(url_for("auth.forgot_password"))
|
||||
|
||||
# Trigger rate limiter
|
||||
g.deduct_limit = True
|
||||
|
||||
return render_template("auth/forgot_password.html", form=form)
|
||||
|
|
|
@ -7,7 +7,7 @@ from app.config import URL, GOOGLE_CLIENT_ID, GOOGLE_CLIENT_SECRET
|
|||
from app.db import Session
|
||||
from app.log import LOG
|
||||
from app.models import User, File, SocialAuth
|
||||
from app.utils import random_string, sanitize_email, sanitize_next_url
|
||||
from app.utils import random_string, sanitize_email
|
||||
from .login_utils import after_login
|
||||
|
||||
_authorization_base_url = "https://accounts.google.com/o/oauth2/v2/auth"
|
||||
|
@ -29,7 +29,7 @@ def google_login():
|
|||
# to avoid flask-login displaying the login error message
|
||||
session.pop("_flashes", None)
|
||||
|
||||
next_url = sanitize_next_url(request.args.get("next"))
|
||||
next_url = request.args.get("next")
|
||||
|
||||
# Google does not allow to append param to redirect_url
|
||||
# we need to pass the next url by session
|
||||
|
|
|
@ -5,13 +5,12 @@ from wtforms import StringField, validators
|
|||
|
||||
from app.auth.base import auth_bp
|
||||
from app.auth.views.login_utils import after_login
|
||||
from app.config import CONNECT_WITH_PROTON, CONNECT_WITH_OIDC_ICON, OIDC_CLIENT_ID
|
||||
from app.config import CONNECT_WITH_PROTON
|
||||
from app.events.auth_event import LoginEvent
|
||||
from app.extensions import limiter
|
||||
from app.log import LOG
|
||||
from app.models import User
|
||||
from app.pw_models import PasswordOracle
|
||||
from app.utils import sanitize_email, sanitize_next_url, canonicalize_email
|
||||
from app.utils import sanitize_email, sanitize_next_url
|
||||
|
||||
|
||||
class LoginForm(FlaskForm):
|
||||
|
@ -39,18 +38,9 @@ def login():
|
|||
show_resend_activation = False
|
||||
|
||||
if form.validate_on_submit():
|
||||
email = sanitize_email(form.email.data)
|
||||
canonical_email = canonicalize_email(email)
|
||||
user = User.get_by(email=email) or User.get_by(email=canonical_email)
|
||||
user = User.filter_by(email=sanitize_email(form.email.data)).first()
|
||||
|
||||
if not user or not user.check_password(form.password.data):
|
||||
if not user:
|
||||
# Do the hash to avoid timing attacks nevertheless
|
||||
dummy_pw = PasswordOracle()
|
||||
dummy_pw.password = (
|
||||
"$2b$12$ZWqpL73h4rGNfLkJohAFAu0isqSw/bX9p/tzpbWRz/To5FAftaW8u"
|
||||
)
|
||||
dummy_pw.check_password(form.password.data)
|
||||
# Trigger rate limiter
|
||||
g.deduct_limit = True
|
||||
form.password.data = None
|
||||
|
@ -62,12 +52,6 @@ def login():
|
|||
"error",
|
||||
)
|
||||
LoginEvent(LoginEvent.ActionType.disabled_login).send()
|
||||
elif user.delete_on is not None:
|
||||
flash(
|
||||
f"Your account is scheduled to be deleted on {user.delete_on}",
|
||||
"error",
|
||||
)
|
||||
LoginEvent(LoginEvent.ActionType.scheduled_to_be_deleted).send()
|
||||
elif not user.activated:
|
||||
show_resend_activation = True
|
||||
flash(
|
||||
|
@ -85,6 +69,4 @@ def login():
|
|||
next_url=next_url,
|
||||
show_resend_activation=show_resend_activation,
|
||||
connect_with_proton=CONNECT_WITH_PROTON,
|
||||
connect_with_oidc=OIDC_CLIENT_ID is not None,
|
||||
connect_with_oidc_icon=CONNECT_WITH_OIDC_ICON,
|
||||
)
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
from time import time
|
||||
from typing import Optional
|
||||
|
||||
from flask import session, redirect, url_for, request
|
||||
|
@ -9,40 +8,37 @@ from app.log import LOG
|
|||
from app.models import Referral
|
||||
|
||||
|
||||
def after_login(user, next_url, login_from_proton: bool = False):
|
||||
def after_login(user, next_url):
|
||||
"""
|
||||
Redirect to the correct page after login.
|
||||
If the user is logged in with Proton, do not look at fido nor otp
|
||||
If user enables MFA: redirect user to MFA page
|
||||
Otherwise redirect to dashboard page if no next_url
|
||||
"""
|
||||
if not login_from_proton:
|
||||
if user.fido_enabled():
|
||||
# Use the same session for FIDO so that we can easily
|
||||
# switch between these two 2FA option
|
||||
session[MFA_USER_ID] = user.id
|
||||
if next_url:
|
||||
return redirect(url_for("auth.fido", next=next_url))
|
||||
else:
|
||||
return redirect(url_for("auth.fido"))
|
||||
elif user.enable_otp:
|
||||
session[MFA_USER_ID] = user.id
|
||||
if next_url:
|
||||
return redirect(url_for("auth.mfa", next=next_url))
|
||||
else:
|
||||
return redirect(url_for("auth.mfa"))
|
||||
|
||||
LOG.d("log user %s in", user)
|
||||
login_user(user)
|
||||
session["sudo_time"] = int(time())
|
||||
|
||||
# User comes to login page from another page
|
||||
if next_url:
|
||||
LOG.d("redirect user to %s", next_url)
|
||||
return redirect(next_url)
|
||||
if user.fido_enabled():
|
||||
# Use the same session for FIDO so that we can easily
|
||||
# switch between these two 2FA option
|
||||
session[MFA_USER_ID] = user.id
|
||||
if next_url:
|
||||
return redirect(url_for("auth.fido", next=next_url))
|
||||
else:
|
||||
return redirect(url_for("auth.fido"))
|
||||
elif user.enable_otp:
|
||||
session[MFA_USER_ID] = user.id
|
||||
if next_url:
|
||||
return redirect(url_for("auth.mfa", next=next_url))
|
||||
else:
|
||||
return redirect(url_for("auth.mfa"))
|
||||
else:
|
||||
LOG.d("redirect user to dashboard")
|
||||
return redirect(url_for("dashboard.index"))
|
||||
LOG.d("log user %s in", user)
|
||||
login_user(user)
|
||||
|
||||
# User comes to login page from another page
|
||||
if next_url:
|
||||
LOG.d("redirect user to %s", next_url)
|
||||
return redirect(next_url)
|
||||
else:
|
||||
LOG.d("redirect user to dashboard")
|
||||
return redirect(url_for("dashboard.index"))
|
||||
|
||||
|
||||
# name of the cookie that stores the referral code
|
||||
|
|
|
@ -1,13 +1,13 @@
|
|||
from flask import redirect, url_for, flash, make_response
|
||||
from flask_login import logout_user
|
||||
|
||||
from app.auth.base import auth_bp
|
||||
from app.config import SESSION_COOKIE_NAME
|
||||
from app.session import logout_session
|
||||
|
||||
|
||||
@auth_bp.route("/logout")
|
||||
def logout():
|
||||
logout_session()
|
||||
logout_user()
|
||||
flash("You are logged out", "success")
|
||||
response = make_response(redirect(url_for("auth.login")))
|
||||
response.delete_cookie(SESSION_COOKIE_NAME)
|
||||
|
|
|
@ -55,7 +55,7 @@ def mfa():
|
|||
browser = MfaBrowser.get_by(token=request.cookies.get("mfa"))
|
||||
if browser and not browser.is_expired() and browser.user_id == user.id:
|
||||
login_user(user)
|
||||
flash("Welcome back!", "success")
|
||||
flash(f"Welcome back!", "success")
|
||||
# Redirect user to correct page
|
||||
return redirect(next_url or url_for("dashboard.index"))
|
||||
else:
|
||||
|
@ -67,13 +67,13 @@ def mfa():
|
|||
|
||||
token = otp_token_form.token.data.replace(" ", "")
|
||||
|
||||
if totp.verify(token, valid_window=2) and user.last_otp != token:
|
||||
if totp.verify(token) and user.last_otp != token:
|
||||
del session[MFA_USER_ID]
|
||||
user.last_otp = token
|
||||
Session.commit()
|
||||
|
||||
login_user(user)
|
||||
flash("Welcome back!", "success")
|
||||
flash(f"Welcome back!", "success")
|
||||
|
||||
# Redirect user to correct page
|
||||
response = make_response(redirect(next_url or url_for("dashboard.index")))
|
||||
|
|
|
@ -1,135 +0,0 @@
|
|||
from flask import request, session, redirect, flash, url_for
|
||||
from requests_oauthlib import OAuth2Session
|
||||
|
||||
import requests
|
||||
|
||||
from app import config
|
||||
from app.auth.base import auth_bp
|
||||
from app.auth.views.login_utils import after_login
|
||||
from app.config import (
|
||||
URL,
|
||||
OIDC_SCOPES,
|
||||
OIDC_NAME_FIELD,
|
||||
)
|
||||
from app.db import Session
|
||||
from app.email_utils import send_welcome_email
|
||||
from app.log import LOG
|
||||
from app.models import User, SocialAuth
|
||||
from app.utils import sanitize_email, sanitize_next_url
|
||||
|
||||
|
||||
# need to set explicitly redirect_uri instead of leaving the lib to pre-fill redirect_uri
|
||||
# when served behind nginx, the redirect_uri is localhost... and not the real url
|
||||
redirect_uri = URL + "/auth/oidc/callback"
|
||||
|
||||
SESSION_STATE_KEY = "oauth_state"
|
||||
SESSION_NEXT_KEY = "oauth_redirect_next"
|
||||
|
||||
|
||||
@auth_bp.route("/oidc/login")
|
||||
def oidc_login():
|
||||
if config.OIDC_CLIENT_ID is None or config.OIDC_CLIENT_SECRET is None:
|
||||
return redirect(url_for("auth.login"))
|
||||
|
||||
next_url = sanitize_next_url(request.args.get("next"))
|
||||
|
||||
auth_url = requests.get(config.OIDC_WELL_KNOWN_URL).json()["authorization_endpoint"]
|
||||
|
||||
oidc = OAuth2Session(
|
||||
config.OIDC_CLIENT_ID, scope=[OIDC_SCOPES], redirect_uri=redirect_uri
|
||||
)
|
||||
authorization_url, state = oidc.authorization_url(auth_url)
|
||||
|
||||
# State is used to prevent CSRF, keep this for later.
|
||||
session[SESSION_STATE_KEY] = state
|
||||
session[SESSION_NEXT_KEY] = next_url
|
||||
return redirect(authorization_url)
|
||||
|
||||
|
||||
@auth_bp.route("/oidc/callback")
|
||||
def oidc_callback():
|
||||
if SESSION_STATE_KEY not in session:
|
||||
flash("Invalid state, please retry", "error")
|
||||
return redirect(url_for("auth.login"))
|
||||
if config.OIDC_CLIENT_ID is None or config.OIDC_CLIENT_SECRET is None:
|
||||
return redirect(url_for("auth.login"))
|
||||
|
||||
# user clicks on cancel
|
||||
if "error" in request.args:
|
||||
flash("Please use another sign in method then", "warning")
|
||||
return redirect("/")
|
||||
|
||||
oidc_configuration = requests.get(config.OIDC_WELL_KNOWN_URL).json()
|
||||
user_info_url = oidc_configuration["userinfo_endpoint"]
|
||||
token_url = oidc_configuration["token_endpoint"]
|
||||
|
||||
oidc = OAuth2Session(
|
||||
config.OIDC_CLIENT_ID,
|
||||
state=session[SESSION_STATE_KEY],
|
||||
scope=[OIDC_SCOPES],
|
||||
redirect_uri=redirect_uri,
|
||||
)
|
||||
oidc.fetch_token(
|
||||
token_url,
|
||||
client_secret=config.OIDC_CLIENT_SECRET,
|
||||
authorization_response=request.url,
|
||||
)
|
||||
|
||||
oidc_user_data = oidc.get(user_info_url)
|
||||
if oidc_user_data.status_code != 200:
|
||||
LOG.e(
|
||||
f"cannot get oidc user data {oidc_user_data.status_code} {oidc_user_data.text}"
|
||||
)
|
||||
flash(
|
||||
"Cannot get user data from OIDC, please use another way to login/sign up",
|
||||
"error",
|
||||
)
|
||||
return redirect(url_for("auth.login"))
|
||||
oidc_user_data = oidc_user_data.json()
|
||||
|
||||
email = oidc_user_data.get("email")
|
||||
|
||||
if not email:
|
||||
LOG.e(f"cannot get email for OIDC user {oidc_user_data} {email}")
|
||||
flash(
|
||||
"Cannot get a valid email from OIDC, please another way to login/sign up",
|
||||
"error",
|
||||
)
|
||||
return redirect(url_for("auth.login"))
|
||||
|
||||
email = sanitize_email(email)
|
||||
user = User.get_by(email=email)
|
||||
|
||||
if not user and config.DISABLE_REGISTRATION:
|
||||
flash(
|
||||
"Sorry you cannot sign up via the OIDC provider. Please sign-up first with your email.",
|
||||
"error",
|
||||
)
|
||||
return redirect(url_for("auth.register"))
|
||||
elif not user:
|
||||
user = create_user(email, oidc_user_data)
|
||||
|
||||
if not SocialAuth.get_by(user_id=user.id, social="oidc"):
|
||||
SocialAuth.create(user_id=user.id, social="oidc")
|
||||
Session.commit()
|
||||
|
||||
# The activation link contains the original page, for ex authorize page
|
||||
next_url = session[SESSION_NEXT_KEY]
|
||||
session[SESSION_NEXT_KEY] = None
|
||||
|
||||
return after_login(user, next_url)
|
||||
|
||||
|
||||
def create_user(email, oidc_user_data):
|
||||
new_user = User.create(
|
||||
email=email,
|
||||
name=oidc_user_data.get(OIDC_NAME_FIELD),
|
||||
password="",
|
||||
activated=True,
|
||||
)
|
||||
LOG.i(f"Created new user for login request from OIDC. New user {new_user.id}")
|
||||
Session.commit()
|
||||
|
||||
send_welcome_email(new_user)
|
||||
|
||||
return new_user
|
|
@ -3,7 +3,6 @@ from flask import request, session, redirect, flash, url_for
|
|||
from flask_limiter.util import get_remote_address
|
||||
from flask_login import current_user
|
||||
from requests_oauthlib import OAuth2Session
|
||||
from typing import Optional
|
||||
|
||||
from app.auth.base import auth_bp
|
||||
from app.auth.views.login_utils import after_login
|
||||
|
@ -11,20 +10,12 @@ from app.config import (
|
|||
PROTON_BASE_URL,
|
||||
PROTON_CLIENT_ID,
|
||||
PROTON_CLIENT_SECRET,
|
||||
PROTON_EXTRA_HEADER_NAME,
|
||||
PROTON_EXTRA_HEADER_VALUE,
|
||||
PROTON_VALIDATE_CERTS,
|
||||
URL,
|
||||
)
|
||||
from app.log import LOG
|
||||
from app.models import ApiKey, User
|
||||
from app.proton.proton_client import HttpProtonClient, convert_access_token
|
||||
from app.proton.proton_callback_handler import (
|
||||
ProtonCallbackHandler,
|
||||
Action,
|
||||
)
|
||||
from app.proton.utils import get_proton_partner
|
||||
from app.utils import sanitize_next_url, sanitize_scheme
|
||||
from app.proton.proton_callback_handler import ProtonCallbackHandler, Action
|
||||
from app.utils import sanitize_next_url
|
||||
|
||||
_authorization_base_url = PROTON_BASE_URL + "/oauth/authorize"
|
||||
_token_url = PROTON_BASE_URL + "/oauth/token"
|
||||
|
@ -33,35 +24,19 @@ _token_url = PROTON_BASE_URL + "/oauth/token"
|
|||
# when served behind nginx, the redirect_uri is localhost... and not the real url
|
||||
_redirect_uri = URL + "/auth/proton/callback"
|
||||
|
||||
SESSION_ACTION_KEY = "oauth_action"
|
||||
SESSION_STATE_KEY = "oauth_state"
|
||||
DEFAULT_SCHEME = "auth.simplelogin"
|
||||
|
||||
|
||||
def get_api_key_for_user(user: User) -> str:
|
||||
ak = ApiKey.create(
|
||||
user_id=user.id,
|
||||
name="Created via Login with Proton on mobile app",
|
||||
commit=True,
|
||||
)
|
||||
return ak.code
|
||||
|
||||
|
||||
def extract_action() -> Optional[Action]:
|
||||
def extract_action() -> Action:
|
||||
action = request.args.get("action")
|
||||
if action is not None:
|
||||
if action == "link":
|
||||
return Action.Link
|
||||
elif action == "login":
|
||||
return Action.Login
|
||||
else:
|
||||
LOG.w(f"Unknown action received: {action}")
|
||||
return None
|
||||
raise Exception(f"Unknown action: {action}")
|
||||
return Action.Login
|
||||
|
||||
|
||||
def get_action_from_state() -> Action:
|
||||
oauth_action = session[SESSION_ACTION_KEY]
|
||||
oauth_action = session["oauth_action"]
|
||||
if oauth_action == Action.Login.value:
|
||||
return Action.Login
|
||||
elif oauth_action == Action.Link.value:
|
||||
|
@ -74,44 +49,20 @@ def proton_login():
|
|||
if PROTON_CLIENT_ID is None or PROTON_CLIENT_SECRET is None:
|
||||
return redirect(url_for("auth.login"))
|
||||
|
||||
action = extract_action()
|
||||
if action is None:
|
||||
return redirect(url_for("auth.login"))
|
||||
if action == Action.Link and not current_user.is_authenticated:
|
||||
return redirect(url_for("auth.login"))
|
||||
|
||||
next_url = sanitize_next_url(request.args.get("next"))
|
||||
if next_url:
|
||||
session["oauth_next"] = next_url
|
||||
elif "oauth_next" in session:
|
||||
del session["oauth_next"]
|
||||
|
||||
scheme = sanitize_scheme(request.args.get("scheme"))
|
||||
if scheme:
|
||||
session["oauth_scheme"] = scheme
|
||||
elif "oauth_scheme" in session:
|
||||
del session["oauth_scheme"]
|
||||
|
||||
mode = request.args.get("mode", "session")
|
||||
if mode == "apikey":
|
||||
session["oauth_mode"] = "apikey"
|
||||
else:
|
||||
session["oauth_mode"] = "session"
|
||||
|
||||
proton = OAuth2Session(PROTON_CLIENT_ID, redirect_uri=_redirect_uri)
|
||||
authorization_url, state = proton.authorization_url(_authorization_base_url)
|
||||
|
||||
# State is used to prevent CSRF, keep this for later.
|
||||
session[SESSION_STATE_KEY] = state
|
||||
session[SESSION_ACTION_KEY] = action.value
|
||||
session["oauth_state"] = state
|
||||
session["oauth_action"] = extract_action().value
|
||||
return redirect(authorization_url)
|
||||
|
||||
|
||||
@auth_bp.route("/proton/callback")
|
||||
def proton_callback():
|
||||
if SESSION_STATE_KEY not in session or SESSION_STATE_KEY not in session:
|
||||
flash("Invalid state, please retry", "error")
|
||||
return redirect(url_for("auth.login"))
|
||||
if PROTON_CLIENT_ID is None or PROTON_CLIENT_SECRET is None:
|
||||
return redirect(url_for("auth.login"))
|
||||
|
||||
|
@ -122,7 +73,7 @@ def proton_callback():
|
|||
|
||||
proton = OAuth2Session(
|
||||
PROTON_CLIENT_ID,
|
||||
state=session[SESSION_STATE_KEY],
|
||||
state=session["oauth_state"],
|
||||
redirect_uri=_redirect_uri,
|
||||
)
|
||||
|
||||
|
@ -134,26 +85,14 @@ def proton_callback():
|
|||
return response
|
||||
|
||||
proton.register_compliance_hook("access_token_response", check_status_code)
|
||||
|
||||
headers = None
|
||||
if PROTON_EXTRA_HEADER_NAME and PROTON_EXTRA_HEADER_VALUE:
|
||||
headers = {PROTON_EXTRA_HEADER_NAME: PROTON_EXTRA_HEADER_VALUE}
|
||||
|
||||
try:
|
||||
token = proton.fetch_token(
|
||||
_token_url,
|
||||
client_secret=PROTON_CLIENT_SECRET,
|
||||
authorization_response=request.url,
|
||||
verify=PROTON_VALIDATE_CERTS,
|
||||
method="GET",
|
||||
include_client_id=True,
|
||||
headers=headers,
|
||||
)
|
||||
except Exception as e:
|
||||
LOG.warning(f"Error fetching Proton token: {e}")
|
||||
flash("There was an error in the login process", "error")
|
||||
return redirect(url_for("auth.login"))
|
||||
|
||||
token = proton.fetch_token(
|
||||
_token_url,
|
||||
client_secret=PROTON_CLIENT_SECRET,
|
||||
authorization_response=request.url,
|
||||
verify=PROTON_VALIDATE_CERTS,
|
||||
method="GET",
|
||||
include_client_id=True,
|
||||
)
|
||||
credentials = convert_access_token(token["access_token"])
|
||||
action = get_action_from_state()
|
||||
|
||||
|
@ -161,30 +100,22 @@ def proton_callback():
|
|||
PROTON_BASE_URL, credentials, get_remote_address(), verify=PROTON_VALIDATE_CERTS
|
||||
)
|
||||
handler = ProtonCallbackHandler(proton_client)
|
||||
proton_partner = get_proton_partner()
|
||||
|
||||
next_url = session.get("oauth_next")
|
||||
if action == Action.Login:
|
||||
res = handler.handle_login(proton_partner)
|
||||
res = handler.handle_login()
|
||||
elif action == Action.Link:
|
||||
res = handler.handle_link(current_user, proton_partner)
|
||||
res = handler.handle_link(current_user)
|
||||
else:
|
||||
raise Exception(f"Unknown Action: {action.name}")
|
||||
|
||||
if res.flash_message is not None:
|
||||
flash(res.flash_message, res.flash_category)
|
||||
|
||||
oauth_scheme = session.get("oauth_scheme")
|
||||
if session.get("oauth_mode", "session") == "apikey":
|
||||
apikey = get_api_key_for_user(res.user)
|
||||
scheme = oauth_scheme or DEFAULT_SCHEME
|
||||
return redirect(f"{scheme}:///login?apikey={apikey}")
|
||||
|
||||
if res.redirect_to_login:
|
||||
return redirect(url_for("auth.login"))
|
||||
|
||||
if next_url and next_url[0] == "/" and oauth_scheme:
|
||||
next_url = f"{oauth_scheme}://{next_url}"
|
||||
if res.redirect:
|
||||
return redirect(res.redirect)
|
||||
|
||||
redirect_url = next_url or res.redirect
|
||||
return after_login(res.user, redirect_url, login_from_proton=True)
|
||||
next_url = session.get("oauth_next")
|
||||
return after_login(res.user, next_url)
|
||||
|
|
|
@ -42,7 +42,7 @@ def recovery_route():
|
|||
|
||||
if recovery_form.validate_on_submit():
|
||||
code = recovery_form.code.data
|
||||
recovery_code = RecoveryCode.find_by_user_code(user, code)
|
||||
recovery_code = RecoveryCode.get_by(user_id=user.id, code=code)
|
||||
|
||||
if recovery_code:
|
||||
if recovery_code.used:
|
||||
|
@ -53,7 +53,7 @@ def recovery_route():
|
|||
del session[MFA_USER_ID]
|
||||
|
||||
login_user(user)
|
||||
flash("Welcome back!", "success")
|
||||
flash(f"Welcome back!", "success")
|
||||
|
||||
recovery_code.used = True
|
||||
recovery_code.used_at = arrow.now()
|
||||
|
|
|
@ -6,7 +6,7 @@ from wtforms import StringField, validators
|
|||
|
||||
from app import email_utils, config
|
||||
from app.auth.base import auth_bp
|
||||
from app.config import CONNECT_WITH_PROTON, CONNECT_WITH_OIDC_ICON
|
||||
from app.config import CONNECT_WITH_PROTON
|
||||
from app.auth.views.login_utils import get_referral
|
||||
from app.config import URL, HCAPTCHA_SECRET, HCAPTCHA_SITEKEY
|
||||
from app.db import Session
|
||||
|
@ -16,8 +16,8 @@ from app.email_utils import (
|
|||
)
|
||||
from app.events.auth_event import RegisterEvent
|
||||
from app.log import LOG
|
||||
from app.models import User, ActivationCode, DailyMetric
|
||||
from app.utils import random_string, encode_url, sanitize_email, canonicalize_email
|
||||
from app.models import User, ActivationCode
|
||||
from app.utils import random_string, encode_url, sanitize_email
|
||||
|
||||
|
||||
class RegisterForm(FlaskForm):
|
||||
|
@ -70,22 +70,19 @@ def register():
|
|||
HCAPTCHA_SITEKEY=HCAPTCHA_SITEKEY,
|
||||
)
|
||||
|
||||
email = canonicalize_email(form.email.data)
|
||||
email = sanitize_email(form.email.data)
|
||||
if not email_can_be_used_as_mailbox(email):
|
||||
flash("You cannot use this email address as your personal inbox.", "error")
|
||||
RegisterEvent(RegisterEvent.ActionType.email_in_use).send()
|
||||
else:
|
||||
sanitized_email = sanitize_email(form.email.data)
|
||||
if personal_email_already_used(email) or personal_email_already_used(
|
||||
sanitized_email
|
||||
):
|
||||
if personal_email_already_used(email):
|
||||
flash(f"Email {email} already used", "error")
|
||||
RegisterEvent(RegisterEvent.ActionType.email_in_use).send()
|
||||
else:
|
||||
LOG.d("create user %s", email)
|
||||
user = User.create(
|
||||
email=email,
|
||||
name=form.email.data,
|
||||
name="",
|
||||
password=form.password.data,
|
||||
referral=get_referral(),
|
||||
)
|
||||
|
@ -94,8 +91,6 @@ def register():
|
|||
try:
|
||||
send_activation_email(user, next_url)
|
||||
RegisterEvent(RegisterEvent.ActionType.success).send()
|
||||
DailyMetric.get_or_create_today_metric().nb_new_web_non_proton_user += 1
|
||||
Session.commit()
|
||||
except Exception:
|
||||
flash("Invalid email, are you sure the email is correct?", "error")
|
||||
RegisterEvent(RegisterEvent.ActionType.invalid_email).send()
|
||||
|
@ -109,14 +104,11 @@ def register():
|
|||
next_url=next_url,
|
||||
HCAPTCHA_SITEKEY=HCAPTCHA_SITEKEY,
|
||||
connect_with_proton=CONNECT_WITH_PROTON,
|
||||
connect_with_oidc=config.OIDC_CLIENT_ID is not None,
|
||||
connect_with_oidc_icon=CONNECT_WITH_OIDC_ICON,
|
||||
)
|
||||
|
||||
|
||||
def send_activation_email(user, next_url):
|
||||
# the activation code is valid for 1h and delete all previous codes
|
||||
Session.query(ActivationCode).filter(ActivationCode.user_id == user.id).delete()
|
||||
# the activation code is valid for 1h
|
||||
activation = ActivationCode.create(user_id=user.id, code=random_string(30))
|
||||
Session.commit()
|
||||
|
||||
|
@ -126,4 +118,4 @@ def send_activation_email(user, next_url):
|
|||
LOG.d("redirect user to %s after activation", next_url)
|
||||
activation_link = activation_link + "&next=" + encode_url(next_url)
|
||||
|
||||
email_utils.send_activation_email(user, activation_link)
|
||||
email_utils.send_activation_email(user.email, activation_link)
|
||||
|
|
|
@ -4,10 +4,9 @@ from wtforms import StringField, validators
|
|||
|
||||
from app.auth.base import auth_bp
|
||||
from app.auth.views.register import send_activation_email
|
||||
from app.extensions import limiter
|
||||
from app.log import LOG
|
||||
from app.models import User
|
||||
from app.utils import sanitize_email, canonicalize_email
|
||||
from app.utils import sanitize_email
|
||||
|
||||
|
||||
class ResendActivationForm(FlaskForm):
|
||||
|
@ -15,14 +14,11 @@ class ResendActivationForm(FlaskForm):
|
|||
|
||||
|
||||
@auth_bp.route("/resend_activation", methods=["GET", "POST"])
|
||||
@limiter.limit("10/hour")
|
||||
def resend_activation():
|
||||
form = ResendActivationForm(request.form)
|
||||
|
||||
if form.validate_on_submit():
|
||||
email = sanitize_email(form.email.data)
|
||||
canonical_email = canonicalize_email(email)
|
||||
user = User.get_by(email=email) or User.get_by(email=canonical_email)
|
||||
user = User.filter_by(email=sanitize_email(form.email.data)).first()
|
||||
|
||||
if not user:
|
||||
flash("There is no such email", "warning")
|
||||
|
|
|
@ -9,7 +9,6 @@ from app.auth.views.login_utils import after_login
|
|||
from app.db import Session
|
||||
from app.extensions import limiter
|
||||
from app.models import ResetPasswordCode
|
||||
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
||||
|
||||
|
||||
class ResetPasswordForm(FlaskForm):
|
||||
|
@ -60,14 +59,9 @@ def reset_password():
|
|||
|
||||
# this can be served to activate user too
|
||||
user.activated = True
|
||||
emit_user_audit_log(
|
||||
user=user,
|
||||
action=UserAuditLogAction.ResetPassword,
|
||||
message="User has reset their password",
|
||||
)
|
||||
|
||||
# remove all reset password codes
|
||||
ResetPasswordCode.filter_by(user_id=user.id).delete()
|
||||
# remove the reset password code
|
||||
ResetPasswordCode.delete(reset_password_code.id)
|
||||
|
||||
# change the alternative_id to log user out on other browsers
|
||||
user.alternative_id = str(uuid.uuid4())
|
||||
|
|
|
@ -1,2 +0,0 @@
|
|||
SHA1 = "dev"
|
||||
BUILD_TIME = "1652365083"
|
215
app/config.py
215
app/config.py
|
@ -2,12 +2,14 @@ import os
|
|||
import random
|
||||
import socket
|
||||
import string
|
||||
import subprocess
|
||||
from ast import literal_eval
|
||||
from typing import Callable, List, Optional
|
||||
from typing import Callable, List
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from dotenv import load_dotenv
|
||||
|
||||
SHA1 = subprocess.getoutput("git rev-parse HEAD")
|
||||
ROOT_DIR = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
|
||||
|
||||
|
||||
|
@ -35,33 +37,6 @@ def sl_getenv(env_var: str, default_factory: Callable = None):
|
|||
return literal_eval(value)
|
||||
|
||||
|
||||
def get_env_dict(env_var: str) -> dict[str, str]:
|
||||
"""
|
||||
Get an env variable and convert it into a python dictionary with keys and values as strings.
|
||||
Args:
|
||||
env_var (str): env var, example: SL_DB
|
||||
|
||||
Syntax is: key1=value1;key2=value2
|
||||
Components separated by ;
|
||||
key and value separated by =
|
||||
"""
|
||||
value = os.getenv(env_var)
|
||||
if not value:
|
||||
return {}
|
||||
|
||||
components = value.split(";")
|
||||
result = {}
|
||||
for component in components:
|
||||
if component == "":
|
||||
continue
|
||||
parts = component.split("=")
|
||||
if len(parts) != 2:
|
||||
raise Exception(f"Invalid config for env var {env_var}")
|
||||
result[parts[0].strip()] = parts[1].strip()
|
||||
|
||||
return result
|
||||
|
||||
|
||||
config_file = os.environ.get("CONFIG")
|
||||
if config_file:
|
||||
config_file = get_abs_path(config_file)
|
||||
|
@ -123,8 +98,6 @@ except Exception:
|
|||
print("MAX_NB_EMAIL_FREE_PLAN is not set, use 5 as default value")
|
||||
MAX_NB_EMAIL_FREE_PLAN = 5
|
||||
|
||||
MAX_NB_EMAIL_OLD_FREE_PLAN = int(os.environ.get("MAX_NB_EMAIL_OLD_FREE_PLAN", 15))
|
||||
|
||||
# maximum number of directory a premium user can create
|
||||
MAX_NB_DIRECTORY = 50
|
||||
MAX_NB_SUBDOMAIN = 5
|
||||
|
@ -138,16 +111,13 @@ POSTFIX_SERVER = os.environ.get("POSTFIX_SERVER", "240.0.0.1")
|
|||
DISABLE_REGISTRATION = "DISABLE_REGISTRATION" in os.environ
|
||||
|
||||
# allow using a different postfix port, useful when developing locally
|
||||
POSTFIX_PORT = 25
|
||||
if "POSTFIX_PORT" in os.environ:
|
||||
POSTFIX_PORT = int(os.environ["POSTFIX_PORT"])
|
||||
|
||||
# Use port 587 instead of 25 when sending emails through Postfix
|
||||
# Useful when calling Postfix from an external network
|
||||
POSTFIX_SUBMISSION_TLS = "POSTFIX_SUBMISSION_TLS" in os.environ
|
||||
if POSTFIX_SUBMISSION_TLS:
|
||||
default_postfix_port = 587
|
||||
else:
|
||||
default_postfix_port = 25
|
||||
POSTFIX_PORT = int(os.environ.get("POSTFIX_PORT", default_postfix_port))
|
||||
POSTFIX_TIMEOUT = int(os.environ.get("POSTFIX_TIMEOUT", 3))
|
||||
|
||||
# ["domain1.com", "domain2.com"]
|
||||
OTHER_ALIAS_DOMAINS = sl_getenv("OTHER_ALIAS_DOMAINS", list)
|
||||
|
@ -190,7 +160,6 @@ if "DKIM_PRIVATE_KEY_PATH" in os.environ:
|
|||
|
||||
# Database
|
||||
DB_URI = os.environ["DB_URI"]
|
||||
DB_CONN_NAME = os.environ.get("DB_CONN_NAME", "webapp")
|
||||
|
||||
# Flask secret
|
||||
FLASK_SECRET = os.environ["FLASK_SECRET"]
|
||||
|
@ -199,14 +168,12 @@ if not FLASK_SECRET:
|
|||
SESSION_COOKIE_NAME = "slapp"
|
||||
MAILBOX_SECRET = FLASK_SECRET + "mailbox"
|
||||
CUSTOM_ALIAS_SECRET = FLASK_SECRET + "custom_alias"
|
||||
UNSUBSCRIBE_SECRET = FLASK_SECRET + "unsub"
|
||||
|
||||
# AWS
|
||||
AWS_REGION = os.environ.get("AWS_REGION") or "eu-west-3"
|
||||
BUCKET = os.environ.get("BUCKET")
|
||||
AWS_ACCESS_KEY_ID = os.environ.get("AWS_ACCESS_KEY_ID")
|
||||
AWS_SECRET_ACCESS_KEY = os.environ.get("AWS_SECRET_ACCESS_KEY")
|
||||
AWS_ENDPOINT_URL = os.environ.get("AWS_ENDPOINT_URL", None)
|
||||
|
||||
# Paddle
|
||||
try:
|
||||
|
@ -261,7 +228,7 @@ else:
|
|||
|
||||
print("WARNING: Use a temp directory for GNUPGHOME", GNUPGHOME)
|
||||
|
||||
# Github, Google, Facebook, OIDC client id and secrets
|
||||
# Github, Google, Facebook client id and secrets
|
||||
GITHUB_CLIENT_ID = os.environ.get("GITHUB_CLIENT_ID")
|
||||
GITHUB_CLIENT_SECRET = os.environ.get("GITHUB_CLIENT_SECRET")
|
||||
|
||||
|
@ -271,13 +238,6 @@ GOOGLE_CLIENT_SECRET = os.environ.get("GOOGLE_CLIENT_SECRET")
|
|||
FACEBOOK_CLIENT_ID = os.environ.get("FACEBOOK_CLIENT_ID")
|
||||
FACEBOOK_CLIENT_SECRET = os.environ.get("FACEBOOK_CLIENT_SECRET")
|
||||
|
||||
CONNECT_WITH_OIDC_ICON = os.environ.get("CONNECT_WITH_OIDC_ICON")
|
||||
OIDC_WELL_KNOWN_URL = os.environ.get("OIDC_WELL_KNOWN_URL")
|
||||
OIDC_CLIENT_ID = os.environ.get("OIDC_CLIENT_ID")
|
||||
OIDC_CLIENT_SECRET = os.environ.get("OIDC_CLIENT_SECRET")
|
||||
OIDC_SCOPES = os.environ.get("OIDC_SCOPES")
|
||||
OIDC_NAME_FIELD = os.environ.get("OIDC_NAME_FIELD", "name")
|
||||
|
||||
PROTON_CLIENT_ID = os.environ.get("PROTON_CLIENT_ID")
|
||||
PROTON_CLIENT_SECRET = os.environ.get("PROTON_CLIENT_SECRET")
|
||||
PROTON_BASE_URL = os.environ.get(
|
||||
|
@ -285,8 +245,6 @@ PROTON_BASE_URL = os.environ.get(
|
|||
)
|
||||
PROTON_VALIDATE_CERTS = "PROTON_VALIDATE_CERTS" in os.environ
|
||||
CONNECT_WITH_PROTON = "CONNECT_WITH_PROTON" in os.environ
|
||||
PROTON_EXTRA_HEADER_NAME = os.environ.get("PROTON_EXTRA_HEADER_NAME")
|
||||
PROTON_EXTRA_HEADER_VALUE = os.environ.get("PROTON_EXTRA_HEADER_VALUE")
|
||||
|
||||
# in seconds
|
||||
AVATAR_URL_EXPIRATION = 3600 * 24 * 7 # 1h*24h/d*7d=1week
|
||||
|
@ -306,10 +264,6 @@ JOB_BATCH_IMPORT = "batch-import"
|
|||
JOB_DELETE_ACCOUNT = "delete-account"
|
||||
JOB_DELETE_MAILBOX = "delete-mailbox"
|
||||
JOB_DELETE_DOMAIN = "delete-domain"
|
||||
JOB_SEND_USER_REPORT = "send-user-report"
|
||||
JOB_SEND_PROTON_WELCOME_1 = "proton-welcome-1"
|
||||
JOB_SEND_ALIAS_CREATION_EVENTS = "send-alias-creation-events"
|
||||
JOB_SEND_EVENT_TO_WEBHOOK = "send-event-to-webhook"
|
||||
|
||||
# for pagination
|
||||
PAGE_LIMIT = 20
|
||||
|
@ -393,9 +347,6 @@ ALERT_COMPLAINT_TRANSACTIONAL_PHASE = "alert_complaint_transactional_phase"
|
|||
|
||||
ALERT_QUARANTINE_DMARC = "alert_quarantine_dmarc"
|
||||
|
||||
ALERT_DUAL_SUBSCRIPTION_WITH_PARTNER = "alert_dual_sub_with_partner"
|
||||
ALERT_WARN_MULTIPLE_SUBSCRIPTIONS = "alert_multiple_subscription"
|
||||
|
||||
# <<<<< END ALERT EMAIL >>>>
|
||||
|
||||
# Disable onboarding emails
|
||||
|
@ -457,25 +408,12 @@ try:
|
|||
except Exception:
|
||||
HIBP_SCAN_INTERVAL_DAYS = 7
|
||||
HIBP_API_KEYS = sl_getenv("HIBP_API_KEYS", list) or []
|
||||
HIBP_MAX_ALIAS_CHECK = 10_000
|
||||
HIBP_RPM = int(os.environ.get("HIBP_API_RPM", 100))
|
||||
HIBP_SKIP_PARTNER_ALIAS = os.environ.get("HIBP_SKIP_PARTNER_ALIAS")
|
||||
|
||||
KEEP_OLD_DATA_DAYS = 30
|
||||
|
||||
POSTMASTER = os.environ.get("POSTMASTER")
|
||||
|
||||
# store temporary files, especially for debugging
|
||||
TEMP_DIR = os.environ.get("TEMP_DIR")
|
||||
|
||||
# Store unsent emails
|
||||
SAVE_UNSENT_DIR = os.environ.get("SAVE_UNSENT_DIR")
|
||||
if SAVE_UNSENT_DIR and not os.path.isdir(SAVE_UNSENT_DIR):
|
||||
try:
|
||||
os.makedirs(SAVE_UNSENT_DIR)
|
||||
except FileExistsError:
|
||||
pass
|
||||
|
||||
# enable the alias automation disable: an alias can be automatically disabled if it has too many bounces
|
||||
ALIAS_AUTOMATIC_DISABLE = "ALIAS_AUTOMATIC_DISABLE" in os.environ
|
||||
|
||||
|
@ -507,9 +445,6 @@ if len(VERP_EMAIL_SECRET) < 32:
|
|||
raise RuntimeError(
|
||||
"Please, set VERP_EMAIL_SECRET to a random string at least 32 chars long"
|
||||
)
|
||||
ALIAS_TRANSFER_TOKEN_SECRET = os.environ.get("ALIAS_TRANSFER_TOKEN_SECRET") or (
|
||||
FLASK_SECRET + "aliastransfertoken"
|
||||
)
|
||||
|
||||
|
||||
def get_allowed_redirect_domains() -> List[str]:
|
||||
|
@ -529,139 +464,3 @@ def setup_nameservers():
|
|||
|
||||
|
||||
NAMESERVERS = setup_nameservers()
|
||||
|
||||
DISABLE_CREATE_CONTACTS_FOR_FREE_USERS = os.environ.get(
|
||||
"DISABLE_CREATE_CONTACTS_FOR_FREE_USERS", False
|
||||
)
|
||||
|
||||
|
||||
# Expect format hits,seconds:hits,seconds...
|
||||
# Example 1,10:4,60 means 1 in the last 10 secs or 4 in the last 60 secs
|
||||
def getRateLimitFromConfig(
|
||||
env_var: string, default: string = ""
|
||||
) -> list[tuple[int, int]]:
|
||||
value = os.environ.get(env_var, default)
|
||||
if not value:
|
||||
return []
|
||||
entries = [entry for entry in value.split(":")]
|
||||
limits = []
|
||||
for entry in entries:
|
||||
fields = entry.split(",")
|
||||
limit = (int(fields[0]), int(fields[1]))
|
||||
limits.append(limit)
|
||||
return limits
|
||||
|
||||
|
||||
ALIAS_CREATE_RATE_LIMIT_FREE = getRateLimitFromConfig(
|
||||
"ALIAS_CREATE_RATE_LIMIT_FREE", "10,900:50,3600"
|
||||
)
|
||||
ALIAS_CREATE_RATE_LIMIT_PAID = getRateLimitFromConfig(
|
||||
"ALIAS_CREATE_RATE_LIMIT_PAID", "50,900:200,3600"
|
||||
)
|
||||
PARTNER_API_TOKEN_SECRET = os.environ.get("PARTNER_API_TOKEN_SECRET") or (
|
||||
FLASK_SECRET + "partnerapitoken"
|
||||
)
|
||||
|
||||
JOB_MAX_ATTEMPTS = 5
|
||||
JOB_TAKEN_RETRY_WAIT_MINS = 30
|
||||
|
||||
# MEM_STORE
|
||||
MEM_STORE_URI = os.environ.get("MEM_STORE_URI", None)
|
||||
|
||||
# Recovery codes hash salt
|
||||
RECOVERY_CODE_HMAC_SECRET = os.environ.get("RECOVERY_CODE_HMAC_SECRET") or (
|
||||
FLASK_SECRET + "generatearandomtoken"
|
||||
)
|
||||
if not RECOVERY_CODE_HMAC_SECRET or len(RECOVERY_CODE_HMAC_SECRET) < 16:
|
||||
raise RuntimeError(
|
||||
"Please define RECOVERY_CODE_HMAC_SECRET in your configuration with a random string at least 16 chars long"
|
||||
)
|
||||
|
||||
|
||||
# the minimum rspamd spam score above which emails that fail DMARC should be quarantined
|
||||
if "MIN_RSPAMD_SCORE_FOR_FAILED_DMARC" in os.environ:
|
||||
MIN_RSPAMD_SCORE_FOR_FAILED_DMARC = float(
|
||||
os.environ["MIN_RSPAMD_SCORE_FOR_FAILED_DMARC"]
|
||||
)
|
||||
else:
|
||||
MIN_RSPAMD_SCORE_FOR_FAILED_DMARC = None
|
||||
|
||||
# run over all reverse alias for an alias and replace them with sender address
|
||||
ENABLE_ALL_REVERSE_ALIAS_REPLACEMENT = (
|
||||
"ENABLE_ALL_REVERSE_ALIAS_REPLACEMENT" in os.environ
|
||||
)
|
||||
|
||||
if ENABLE_ALL_REVERSE_ALIAS_REPLACEMENT:
|
||||
# max number of reverse alias that can be replaced
|
||||
MAX_NB_REVERSE_ALIAS_REPLACEMENT = int(
|
||||
os.environ["MAX_NB_REVERSE_ALIAS_REPLACEMENT"]
|
||||
)
|
||||
|
||||
# Only used for tests
|
||||
SKIP_MX_LOOKUP_ON_CHECK = False
|
||||
|
||||
DISABLE_RATE_LIMIT = "DISABLE_RATE_LIMIT" in os.environ
|
||||
|
||||
MAX_API_KEYS = int(os.environ.get("MAX_API_KEYS", 30))
|
||||
|
||||
UPCLOUD_USERNAME = os.environ.get("UPCLOUD_USERNAME", None)
|
||||
UPCLOUD_PASSWORD = os.environ.get("UPCLOUD_PASSWORD", None)
|
||||
UPCLOUD_DB_ID = os.environ.get("UPCLOUD_DB_ID", None)
|
||||
|
||||
STORE_TRANSACTIONAL_EMAILS = "STORE_TRANSACTIONAL_EMAILS" in os.environ
|
||||
|
||||
EVENT_WEBHOOK = os.environ.get("EVENT_WEBHOOK", None)
|
||||
|
||||
# We want it disabled by default, so only skip if defined
|
||||
EVENT_WEBHOOK_SKIP_VERIFY_SSL = "EVENT_WEBHOOK_SKIP_VERIFY_SSL" in os.environ
|
||||
EVENT_WEBHOOK_DISABLE = "EVENT_WEBHOOK_DISABLE" in os.environ
|
||||
|
||||
|
||||
def read_webhook_enabled_user_ids() -> Optional[List[int]]:
|
||||
user_ids = os.environ.get("EVENT_WEBHOOK_ENABLED_USER_IDS", None)
|
||||
if user_ids is None:
|
||||
return None
|
||||
|
||||
ids = []
|
||||
for user_id in user_ids.split(","):
|
||||
try:
|
||||
ids.append(int(user_id.strip()))
|
||||
except ValueError:
|
||||
pass
|
||||
return ids
|
||||
|
||||
|
||||
EVENT_WEBHOOK_ENABLED_USER_IDS: Optional[List[int]] = read_webhook_enabled_user_ids()
|
||||
|
||||
# Allow to define a different DB_URI for the event listener, in case we want to skip the connection pool
|
||||
# It defaults to the regular DB_URI in case it's needed
|
||||
EVENT_LISTENER_DB_URI = os.environ.get("EVENT_LISTENER_DB_URI", DB_URI)
|
||||
|
||||
|
||||
def read_partner_dict(var: str) -> dict[int, str]:
|
||||
partner_value = get_env_dict(var)
|
||||
if len(partner_value) == 0:
|
||||
return {}
|
||||
|
||||
res: dict[int, str] = {}
|
||||
for partner_id in partner_value.keys():
|
||||
try:
|
||||
partner_id_int = int(partner_id.strip())
|
||||
res[partner_id_int] = partner_value[partner_id]
|
||||
except ValueError:
|
||||
pass
|
||||
return res
|
||||
|
||||
|
||||
PARTNER_DNS_CUSTOM_DOMAINS: dict[int, str] = read_partner_dict(
|
||||
"PARTNER_DNS_CUSTOM_DOMAINS"
|
||||
)
|
||||
PARTNER_CUSTOM_DOMAIN_VALIDATION_PREFIXES: dict[int, str] = read_partner_dict(
|
||||
"PARTNER_CUSTOM_DOMAIN_VALIDATION_PREFIXES"
|
||||
)
|
||||
|
||||
MAILBOX_VERIFICATION_OVERRIDE_CODE: Optional[str] = os.environ.get(
|
||||
"MAILBOX_VERIFICATION_OVERRIDE_CODE", None
|
||||
)
|
||||
|
||||
AUDIT_LOG_MAX_DAYS = int(os.environ.get("AUDIT_LOG_MAX_DAYS", 30))
|
||||
|
|
|
@ -1,2 +0,0 @@
|
|||
HEADER_ALLOW_API_COOKIES = "X-Sl-Allowcookies"
|
||||
DMARC_RECORD = "v=DMARC1; p=quarantine; pct=100; adkim=s; aspf=s"
|
|
@ -1,138 +0,0 @@
|
|||
from dataclasses import dataclass
|
||||
from enum import Enum
|
||||
from typing import Optional
|
||||
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
|
||||
from app.alias_audit_log_utils import emit_alias_audit_log, AliasAuditLogAction
|
||||
from app.db import Session
|
||||
from app.email_utils import generate_reply_email, parse_full_address
|
||||
from app.email_validation import is_valid_email
|
||||
from app.log import LOG
|
||||
from app.models import Contact, Alias
|
||||
from app.utils import sanitize_email
|
||||
|
||||
|
||||
class ContactCreateError(Enum):
|
||||
InvalidEmail = "Invalid email"
|
||||
NotAllowed = "Your plan does not allow to create contacts"
|
||||
Unknown = "Unknown error when trying to create contact"
|
||||
|
||||
|
||||
@dataclass
|
||||
class ContactCreateResult:
|
||||
contact: Optional[Contact]
|
||||
created: bool
|
||||
error: Optional[ContactCreateError]
|
||||
|
||||
|
||||
def __update_contact_if_needed(
|
||||
contact: Contact, name: Optional[str], mail_from: Optional[str]
|
||||
) -> ContactCreateResult:
|
||||
if name and contact.name != name:
|
||||
LOG.d(f"Setting {contact} name to {name}")
|
||||
contact.name = name
|
||||
Session.commit()
|
||||
if mail_from and contact.mail_from is None:
|
||||
LOG.d(f"Setting {contact} mail_from to {mail_from}")
|
||||
contact.mail_from = mail_from
|
||||
Session.commit()
|
||||
return ContactCreateResult(contact, created=False, error=None)
|
||||
|
||||
|
||||
def create_contact(
|
||||
email: str,
|
||||
alias: Alias,
|
||||
name: Optional[str] = None,
|
||||
mail_from: Optional[str] = None,
|
||||
allow_empty_email: bool = False,
|
||||
automatic_created: bool = False,
|
||||
from_partner: bool = False,
|
||||
) -> ContactCreateResult:
|
||||
# If user cannot create contacts, they still need to be created when receiving an email for an alias
|
||||
if not automatic_created and not alias.user.can_create_contacts():
|
||||
return ContactCreateResult(
|
||||
None, created=False, error=ContactCreateError.NotAllowed
|
||||
)
|
||||
# Parse emails with form 'name <email>'
|
||||
try:
|
||||
email_name, email = parse_full_address(email)
|
||||
except ValueError:
|
||||
email = ""
|
||||
email_name = ""
|
||||
# If no name is explicitly given try to get it from the parsed email
|
||||
if name is None:
|
||||
name = email_name[: Contact.MAX_NAME_LENGTH]
|
||||
else:
|
||||
name = name[: Contact.MAX_NAME_LENGTH]
|
||||
# If still no name is there, make sure the name is None instead of empty string
|
||||
if not name:
|
||||
name = None
|
||||
if name is not None and "\x00" in name:
|
||||
LOG.w("Cannot use contact name because has \\x00")
|
||||
name = ""
|
||||
# Sanitize email and if it's not valid only allow to create a contact if it's explicitly allowed. Otherwise fail
|
||||
email = sanitize_email(email, not_lower=True)
|
||||
if not is_valid_email(email):
|
||||
LOG.w(f"invalid contact email {email}")
|
||||
if not allow_empty_email:
|
||||
return ContactCreateResult(
|
||||
None, created=False, error=ContactCreateError.InvalidEmail
|
||||
)
|
||||
LOG.d("Create a contact with invalid email for %s", alias)
|
||||
# either reuse a contact with empty email or create a new contact with empty email
|
||||
email = ""
|
||||
# If contact exists, update name and mail_from if needed
|
||||
contact = Contact.get_by(alias_id=alias.id, website_email=email)
|
||||
if contact is not None:
|
||||
return __update_contact_if_needed(contact, name, mail_from)
|
||||
# Create the contact
|
||||
reply_email = generate_reply_email(email, alias)
|
||||
alias_id = alias.id
|
||||
try:
|
||||
flags = Contact.FLAG_PARTNER_CREATED if from_partner else 0
|
||||
is_invalid_email = email == ""
|
||||
contact = Contact.create(
|
||||
user_id=alias.user_id,
|
||||
alias_id=alias.id,
|
||||
website_email=email,
|
||||
name=name,
|
||||
reply_email=reply_email,
|
||||
mail_from=mail_from,
|
||||
automatic_created=automatic_created,
|
||||
flags=flags,
|
||||
invalid_email=is_invalid_email,
|
||||
commit=True,
|
||||
)
|
||||
contact_id = contact.id
|
||||
if automatic_created:
|
||||
trail = ". Automatically created"
|
||||
else:
|
||||
trail = ". Created by user action"
|
||||
emit_alias_audit_log(
|
||||
alias=alias,
|
||||
action=AliasAuditLogAction.CreateContact,
|
||||
message=f"Created contact {contact_id} ({email}){trail}",
|
||||
commit=True,
|
||||
)
|
||||
LOG.d(
|
||||
f"Created contact {contact} for alias {alias} with email {email} invalid_email={is_invalid_email}"
|
||||
)
|
||||
return ContactCreateResult(contact, created=True, error=None)
|
||||
except IntegrityError:
|
||||
Session.rollback()
|
||||
LOG.info(
|
||||
f"Contact with email {email} for alias_id {alias_id} already existed, fetching from DB"
|
||||
)
|
||||
contact: Optional[Contact] = Contact.get_by(
|
||||
alias_id=alias_id, website_email=email
|
||||
)
|
||||
if contact:
|
||||
return __update_contact_if_needed(contact, name, mail_from)
|
||||
else:
|
||||
LOG.warning(
|
||||
f"Could not find contact with email {email} for alias_id {alias_id} and it should exist"
|
||||
)
|
||||
return ContactCreateResult(
|
||||
None, created=False, error=ContactCreateError.Unknown
|
||||
)
|
|
@ -1,206 +0,0 @@
|
|||
import arrow
|
||||
import re
|
||||
|
||||
from dataclasses import dataclass
|
||||
from enum import Enum
|
||||
from typing import List, Optional
|
||||
|
||||
from app.config import JOB_DELETE_DOMAIN
|
||||
from app.db import Session
|
||||
from app.email_utils import get_email_domain_part
|
||||
from app.log import LOG
|
||||
from app.models import User, CustomDomain, SLDomain, Mailbox, Job, DomainMailbox
|
||||
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
||||
|
||||
_ALLOWED_DOMAIN_REGEX = re.compile(r"^(?!-)[A-Za-z0-9-]{1,63}(?<!-)$")
|
||||
_MAX_MAILBOXES_PER_DOMAIN = 20
|
||||
|
||||
|
||||
@dataclass
|
||||
class CreateCustomDomainResult:
|
||||
message: str = ""
|
||||
message_category: str = ""
|
||||
success: bool = False
|
||||
instance: Optional[CustomDomain] = None
|
||||
redirect: Optional[str] = None
|
||||
|
||||
|
||||
class CannotUseDomainReason(Enum):
|
||||
InvalidDomain = 1
|
||||
BuiltinDomain = 2
|
||||
DomainAlreadyUsed = 3
|
||||
DomainPartOfUserEmail = 4
|
||||
DomainUserInMailbox = 5
|
||||
|
||||
def message(self, domain: str) -> str:
|
||||
if self == CannotUseDomainReason.InvalidDomain:
|
||||
return "This is not a valid domain"
|
||||
elif self == CannotUseDomainReason.BuiltinDomain:
|
||||
return "A custom domain cannot be a built-in domain."
|
||||
elif self == CannotUseDomainReason.DomainAlreadyUsed:
|
||||
return f"{domain} already used"
|
||||
elif self == CannotUseDomainReason.DomainPartOfUserEmail:
|
||||
return "You cannot add a domain that you are currently using for your personal email. Please change your personal email to your real email"
|
||||
elif self == CannotUseDomainReason.DomainUserInMailbox:
|
||||
return f"{domain} already used in a SimpleLogin mailbox"
|
||||
else:
|
||||
raise Exception("Invalid CannotUseDomainReason")
|
||||
|
||||
|
||||
class CannotSetCustomDomainMailboxesCause(Enum):
|
||||
InvalidMailbox = "Something went wrong, please retry"
|
||||
NoMailboxes = "You must select at least 1 mailbox"
|
||||
TooManyMailboxes = (
|
||||
f"You can only set up to {_MAX_MAILBOXES_PER_DOMAIN} mailboxes per domain"
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class SetCustomDomainMailboxesResult:
|
||||
success: bool
|
||||
reason: Optional[CannotSetCustomDomainMailboxesCause] = None
|
||||
|
||||
|
||||
def is_valid_domain(domain: str) -> bool:
|
||||
"""
|
||||
Checks that a domain is valid according to RFC 1035
|
||||
"""
|
||||
if len(domain) > 255:
|
||||
return False
|
||||
if domain.endswith("."):
|
||||
domain = domain[:-1] # Strip the trailing dot
|
||||
labels = domain.split(".")
|
||||
if not labels:
|
||||
return False
|
||||
for label in labels:
|
||||
if not _ALLOWED_DOMAIN_REGEX.match(label):
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def sanitize_domain(domain: str) -> str:
|
||||
new_domain = domain.lower().strip()
|
||||
if new_domain.startswith("http://"):
|
||||
new_domain = new_domain[len("http://") :]
|
||||
|
||||
if new_domain.startswith("https://"):
|
||||
new_domain = new_domain[len("https://") :]
|
||||
|
||||
return new_domain
|
||||
|
||||
|
||||
def can_domain_be_used(user: User, domain: str) -> Optional[CannotUseDomainReason]:
|
||||
if not is_valid_domain(domain):
|
||||
return CannotUseDomainReason.InvalidDomain
|
||||
elif SLDomain.get_by(domain=domain):
|
||||
return CannotUseDomainReason.BuiltinDomain
|
||||
elif CustomDomain.get_by(domain=domain):
|
||||
return CannotUseDomainReason.DomainAlreadyUsed
|
||||
elif get_email_domain_part(user.email) == domain:
|
||||
return CannotUseDomainReason.DomainPartOfUserEmail
|
||||
elif Mailbox.filter(
|
||||
Mailbox.verified.is_(True), Mailbox.email.endswith(f"@{domain}")
|
||||
).first():
|
||||
return CannotUseDomainReason.DomainUserInMailbox
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
def create_custom_domain(
|
||||
user: User, domain: str, partner_id: Optional[int] = None
|
||||
) -> CreateCustomDomainResult:
|
||||
if not user.is_premium():
|
||||
return CreateCustomDomainResult(
|
||||
message="Only premium plan can add custom domain",
|
||||
message_category="warning",
|
||||
)
|
||||
|
||||
new_domain = sanitize_domain(domain)
|
||||
domain_forbidden_cause = can_domain_be_used(user, new_domain)
|
||||
if domain_forbidden_cause:
|
||||
return CreateCustomDomainResult(
|
||||
message=domain_forbidden_cause.message(new_domain), message_category="error"
|
||||
)
|
||||
|
||||
new_custom_domain = CustomDomain.create(domain=new_domain, user_id=user.id)
|
||||
|
||||
# new domain has ownership verified if its parent has the ownership verified
|
||||
for root_cd in user.custom_domains:
|
||||
if new_domain.endswith("." + root_cd.domain) and root_cd.ownership_verified:
|
||||
LOG.i(
|
||||
"%s ownership verified thanks to %s",
|
||||
new_custom_domain,
|
||||
root_cd,
|
||||
)
|
||||
new_custom_domain.ownership_verified = True
|
||||
|
||||
# Add the partner_id in case it's passed
|
||||
if partner_id is not None:
|
||||
new_custom_domain.partner_id = partner_id
|
||||
|
||||
emit_user_audit_log(
|
||||
user=user,
|
||||
action=UserAuditLogAction.CreateCustomDomain,
|
||||
message=f"Created custom domain {new_custom_domain.id} ({new_domain})",
|
||||
)
|
||||
Session.commit()
|
||||
|
||||
return CreateCustomDomainResult(
|
||||
success=True,
|
||||
instance=new_custom_domain,
|
||||
)
|
||||
|
||||
|
||||
def delete_custom_domain(domain: CustomDomain):
|
||||
# Schedule delete domain job
|
||||
LOG.w("schedule delete domain job for %s", domain)
|
||||
domain.pending_deletion = True
|
||||
Job.create(
|
||||
name=JOB_DELETE_DOMAIN,
|
||||
payload={"custom_domain_id": domain.id},
|
||||
run_at=arrow.now(),
|
||||
commit=True,
|
||||
)
|
||||
|
||||
|
||||
def set_custom_domain_mailboxes(
|
||||
user_id: int, custom_domain: CustomDomain, mailbox_ids: List[int]
|
||||
) -> SetCustomDomainMailboxesResult:
|
||||
if len(mailbox_ids) == 0:
|
||||
return SetCustomDomainMailboxesResult(
|
||||
success=False, reason=CannotSetCustomDomainMailboxesCause.NoMailboxes
|
||||
)
|
||||
elif len(mailbox_ids) > _MAX_MAILBOXES_PER_DOMAIN:
|
||||
return SetCustomDomainMailboxesResult(
|
||||
success=False, reason=CannotSetCustomDomainMailboxesCause.TooManyMailboxes
|
||||
)
|
||||
|
||||
mailboxes = (
|
||||
Session.query(Mailbox)
|
||||
.filter(
|
||||
Mailbox.id.in_(mailbox_ids),
|
||||
Mailbox.user_id == user_id,
|
||||
Mailbox.verified == True, # noqa: E712
|
||||
)
|
||||
.all()
|
||||
)
|
||||
if len(mailboxes) != len(mailbox_ids):
|
||||
return SetCustomDomainMailboxesResult(
|
||||
success=False, reason=CannotSetCustomDomainMailboxesCause.InvalidMailbox
|
||||
)
|
||||
|
||||
# first remove all existing domain-mailboxes links
|
||||
DomainMailbox.filter_by(domain_id=custom_domain.id).delete()
|
||||
Session.flush()
|
||||
|
||||
for mailbox in mailboxes:
|
||||
DomainMailbox.create(domain_id=custom_domain.id, mailbox_id=mailbox.id)
|
||||
|
||||
mailboxes_as_str = ",".join(map(str, mailbox_ids))
|
||||
emit_user_audit_log(
|
||||
user=custom_domain.user,
|
||||
action=UserAuditLogAction.UpdateCustomDomain,
|
||||
message=f"Updated custom domain {custom_domain.id} mailboxes (domain={custom_domain.domain}) (mailboxes={mailboxes_as_str})",
|
||||
)
|
||||
Session.commit()
|
||||
return SetCustomDomainMailboxesResult(success=True)
|
|
@ -1,228 +0,0 @@
|
|||
from dataclasses import dataclass
|
||||
from typing import List, Optional
|
||||
|
||||
from app import config
|
||||
from app.constants import DMARC_RECORD
|
||||
from app.db import Session
|
||||
from app.dns_utils import (
|
||||
MxRecord,
|
||||
DNSClient,
|
||||
is_mx_equivalent,
|
||||
get_network_dns_client,
|
||||
)
|
||||
from app.models import CustomDomain
|
||||
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
||||
from app.utils import random_string
|
||||
|
||||
|
||||
@dataclass
|
||||
class DomainValidationResult:
|
||||
success: bool
|
||||
errors: [str]
|
||||
|
||||
|
||||
class CustomDomainValidation:
|
||||
def __init__(
|
||||
self,
|
||||
dkim_domain: str,
|
||||
dns_client: DNSClient = get_network_dns_client(),
|
||||
partner_domains: Optional[dict[int, str]] = None,
|
||||
partner_domains_validation_prefixes: Optional[dict[int, str]] = None,
|
||||
):
|
||||
self.dkim_domain = dkim_domain
|
||||
self._dns_client = dns_client
|
||||
self._partner_domains = partner_domains or config.PARTNER_DNS_CUSTOM_DOMAINS
|
||||
self._partner_domain_validation_prefixes = (
|
||||
partner_domains_validation_prefixes
|
||||
or config.PARTNER_CUSTOM_DOMAIN_VALIDATION_PREFIXES
|
||||
)
|
||||
|
||||
def get_ownership_verification_record(self, domain: CustomDomain) -> str:
|
||||
prefix = "sl"
|
||||
if (
|
||||
domain.partner_id is not None
|
||||
and domain.partner_id in self._partner_domain_validation_prefixes
|
||||
):
|
||||
prefix = self._partner_domain_validation_prefixes[domain.partner_id]
|
||||
|
||||
if not domain.ownership_txt_token:
|
||||
domain.ownership_txt_token = random_string(30)
|
||||
Session.commit()
|
||||
|
||||
return f"{prefix}-verification={domain.ownership_txt_token}"
|
||||
|
||||
def get_expected_mx_records(self, domain: CustomDomain) -> list[MxRecord]:
|
||||
records = []
|
||||
if domain.partner_id is not None and domain.partner_id in self._partner_domains:
|
||||
domain = self._partner_domains[domain.partner_id]
|
||||
records.append(MxRecord(10, f"mx1.{domain}."))
|
||||
records.append(MxRecord(20, f"mx2.{domain}."))
|
||||
else:
|
||||
# Default ones
|
||||
for priority, domain in config.EMAIL_SERVERS_WITH_PRIORITY:
|
||||
records.append(MxRecord(priority, domain))
|
||||
|
||||
return records
|
||||
|
||||
def get_expected_spf_domain(self, domain: CustomDomain) -> str:
|
||||
if domain.partner_id is not None and domain.partner_id in self._partner_domains:
|
||||
return self._partner_domains[domain.partner_id]
|
||||
else:
|
||||
return config.EMAIL_DOMAIN
|
||||
|
||||
def get_expected_spf_record(self, domain: CustomDomain) -> str:
|
||||
spf_domain = self.get_expected_spf_domain(domain)
|
||||
return f"v=spf1 include:{spf_domain} ~all"
|
||||
|
||||
def get_dkim_records(self, domain: CustomDomain) -> {str: str}:
|
||||
"""
|
||||
Get a list of dkim records to set up. Depending on the custom_domain, whether if it's from a partner or not,
|
||||
it will return the default ones or the partner ones.
|
||||
"""
|
||||
|
||||
# By default use the default domain
|
||||
dkim_domain = self.dkim_domain
|
||||
if domain.partner_id is not None:
|
||||
# Domain is from a partner. Retrieve the partner config and use that domain if exists
|
||||
dkim_domain = self._partner_domains.get(domain.partner_id, dkim_domain)
|
||||
|
||||
return {
|
||||
f"{key}._domainkey": f"{key}._domainkey.{dkim_domain}"
|
||||
for key in ("dkim", "dkim02", "dkim03")
|
||||
}
|
||||
|
||||
def validate_dkim_records(self, custom_domain: CustomDomain) -> dict[str, str]:
|
||||
"""
|
||||
Check if dkim records are properly set for this custom domain.
|
||||
Returns empty list if all records are ok. Other-wise return the records that aren't properly configured
|
||||
"""
|
||||
correct_records = {}
|
||||
invalid_records = {}
|
||||
expected_records = self.get_dkim_records(custom_domain)
|
||||
for prefix, expected_record in expected_records.items():
|
||||
custom_record = f"{prefix}.{custom_domain.domain}"
|
||||
dkim_record = self._dns_client.get_cname_record(custom_record)
|
||||
if dkim_record == expected_record:
|
||||
correct_records[prefix] = custom_record
|
||||
else:
|
||||
invalid_records[custom_record] = dkim_record or "empty"
|
||||
|
||||
# HACK
|
||||
# As initially we only had one dkim record, we want to allow users that had only the original dkim record and
|
||||
# the domain validated to continue seeing it as validated (although showing them the missing records).
|
||||
# However, if not even the original dkim record is right, even if the domain was dkim_verified in the past,
|
||||
# we will remove the dkim_verified flag.
|
||||
# This is done in order to give users with the old dkim config (only one) to update their CNAMEs
|
||||
if custom_domain.dkim_verified:
|
||||
# Check if at least the original dkim is there
|
||||
if correct_records.get("dkim._domainkey") is not None:
|
||||
# Original dkim record is there. Return the missing records (if any) and don't clear the flag
|
||||
return invalid_records
|
||||
|
||||
# Original DKIM record is not there, which means the DKIM config is not finished. Proceed with the
|
||||
# rest of the code path, returning the invalid records and clearing the flag
|
||||
custom_domain.dkim_verified = len(invalid_records) == 0
|
||||
if custom_domain.dkim_verified:
|
||||
emit_user_audit_log(
|
||||
user=custom_domain.user,
|
||||
action=UserAuditLogAction.VerifyCustomDomain,
|
||||
message=f"Verified DKIM records for custom domain {custom_domain.id} ({custom_domain.domain})",
|
||||
)
|
||||
Session.commit()
|
||||
return invalid_records
|
||||
|
||||
def validate_domain_ownership(
|
||||
self, custom_domain: CustomDomain
|
||||
) -> DomainValidationResult:
|
||||
"""
|
||||
Check if the custom_domain has added the ownership verification records
|
||||
"""
|
||||
txt_records = self._dns_client.get_txt_record(custom_domain.domain)
|
||||
expected_verification_record = self.get_ownership_verification_record(
|
||||
custom_domain
|
||||
)
|
||||
|
||||
if expected_verification_record in txt_records:
|
||||
custom_domain.ownership_verified = True
|
||||
emit_user_audit_log(
|
||||
user=custom_domain.user,
|
||||
action=UserAuditLogAction.VerifyCustomDomain,
|
||||
message=f"Verified ownership for custom domain {custom_domain.id} ({custom_domain.domain})",
|
||||
)
|
||||
Session.commit()
|
||||
return DomainValidationResult(success=True, errors=[])
|
||||
else:
|
||||
return DomainValidationResult(success=False, errors=txt_records)
|
||||
|
||||
def validate_mx_records(
|
||||
self, custom_domain: CustomDomain
|
||||
) -> DomainValidationResult:
|
||||
mx_domains = self._dns_client.get_mx_domains(custom_domain.domain)
|
||||
expected_mx_records = self.get_expected_mx_records(custom_domain)
|
||||
|
||||
if not is_mx_equivalent(mx_domains, expected_mx_records):
|
||||
return DomainValidationResult(
|
||||
success=False,
|
||||
errors=[f"{record.priority} {record.domain}" for record in mx_domains],
|
||||
)
|
||||
else:
|
||||
custom_domain.verified = True
|
||||
emit_user_audit_log(
|
||||
user=custom_domain.user,
|
||||
action=UserAuditLogAction.VerifyCustomDomain,
|
||||
message=f"Verified MX records for custom domain {custom_domain.id} ({custom_domain.domain})",
|
||||
)
|
||||
Session.commit()
|
||||
return DomainValidationResult(success=True, errors=[])
|
||||
|
||||
def validate_spf_records(
|
||||
self, custom_domain: CustomDomain
|
||||
) -> DomainValidationResult:
|
||||
spf_domains = self._dns_client.get_spf_domain(custom_domain.domain)
|
||||
expected_spf_domain = self.get_expected_spf_domain(custom_domain)
|
||||
if expected_spf_domain in spf_domains:
|
||||
custom_domain.spf_verified = True
|
||||
emit_user_audit_log(
|
||||
user=custom_domain.user,
|
||||
action=UserAuditLogAction.VerifyCustomDomain,
|
||||
message=f"Verified SPF records for custom domain {custom_domain.id} ({custom_domain.domain})",
|
||||
)
|
||||
Session.commit()
|
||||
return DomainValidationResult(success=True, errors=[])
|
||||
else:
|
||||
custom_domain.spf_verified = False
|
||||
Session.commit()
|
||||
txt_records = self._dns_client.get_txt_record(custom_domain.domain)
|
||||
cleaned_records = self.__clean_spf_records(txt_records, custom_domain)
|
||||
return DomainValidationResult(
|
||||
success=False,
|
||||
errors=cleaned_records,
|
||||
)
|
||||
|
||||
def validate_dmarc_records(
|
||||
self, custom_domain: CustomDomain
|
||||
) -> DomainValidationResult:
|
||||
txt_records = self._dns_client.get_txt_record("_dmarc." + custom_domain.domain)
|
||||
if DMARC_RECORD in txt_records:
|
||||
custom_domain.dmarc_verified = True
|
||||
emit_user_audit_log(
|
||||
user=custom_domain.user,
|
||||
action=UserAuditLogAction.VerifyCustomDomain,
|
||||
message=f"Verified DMARC records for custom domain {custom_domain.id} ({custom_domain.domain})",
|
||||
)
|
||||
Session.commit()
|
||||
return DomainValidationResult(success=True, errors=[])
|
||||
else:
|
||||
custom_domain.dmarc_verified = False
|
||||
Session.commit()
|
||||
return DomainValidationResult(success=False, errors=txt_records)
|
||||
|
||||
def __clean_spf_records(
|
||||
self, txt_records: List[str], custom_domain: CustomDomain
|
||||
) -> List[str]:
|
||||
final_records = []
|
||||
verification_record = self.get_ownership_verification_record(custom_domain)
|
||||
for record in txt_records:
|
||||
if record != verification_record:
|
||||
final_records.append(record)
|
||||
return final_records
|
|
@ -6,7 +6,6 @@ from .views import (
|
|||
subdomain,
|
||||
billing,
|
||||
alias_log,
|
||||
alias_export,
|
||||
unsubscribe,
|
||||
api_key,
|
||||
custom_domain,
|
||||
|
@ -24,6 +23,7 @@ from .views import (
|
|||
mailbox_detail,
|
||||
refused_email,
|
||||
referral,
|
||||
recovery_code,
|
||||
contact_detail,
|
||||
setup_done,
|
||||
batch_import,
|
||||
|
@ -32,42 +32,4 @@ from .views import (
|
|||
delete_account,
|
||||
notification,
|
||||
support,
|
||||
account_setting,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"index",
|
||||
"pricing",
|
||||
"setting",
|
||||
"custom_alias",
|
||||
"subdomain",
|
||||
"billing",
|
||||
"alias_log",
|
||||
"alias_export",
|
||||
"unsubscribe",
|
||||
"api_key",
|
||||
"custom_domain",
|
||||
"alias_contact_manager",
|
||||
"enter_sudo",
|
||||
"mfa_setup",
|
||||
"mfa_cancel",
|
||||
"fido_setup",
|
||||
"coupon",
|
||||
"fido_manage",
|
||||
"domain_detail",
|
||||
"lifetime_licence",
|
||||
"directory",
|
||||
"mailbox",
|
||||
"mailbox_detail",
|
||||
"refused_email",
|
||||
"referral",
|
||||
"contact_detail",
|
||||
"setup_done",
|
||||
"batch_import",
|
||||
"alias_transfer",
|
||||
"app",
|
||||
"delete_account",
|
||||
"notification",
|
||||
"support",
|
||||
"account_setting",
|
||||
]
|
||||
|
|
|
@ -1,244 +0,0 @@
|
|||
import secrets
|
||||
|
||||
import arrow
|
||||
from flask import (
|
||||
render_template,
|
||||
request,
|
||||
redirect,
|
||||
url_for,
|
||||
flash,
|
||||
)
|
||||
from flask_login import login_required, current_user
|
||||
|
||||
from app import email_utils
|
||||
from app.config import (
|
||||
URL,
|
||||
FIRST_ALIAS_DOMAIN,
|
||||
ALIAS_RANDOM_SUFFIX_LENGTH,
|
||||
CONNECT_WITH_PROTON,
|
||||
)
|
||||
from app.dashboard.base import dashboard_bp
|
||||
from app.dashboard.views.enter_sudo import sudo_required
|
||||
from app.dashboard.views.mailbox_detail import ChangeEmailForm
|
||||
from app.db import Session
|
||||
from app.email_utils import (
|
||||
email_can_be_used_as_mailbox,
|
||||
personal_email_already_used,
|
||||
)
|
||||
from app.extensions import limiter
|
||||
from app.jobs.export_user_data_job import ExportUserDataJob
|
||||
from app.log import LOG
|
||||
from app.models import (
|
||||
BlockBehaviourEnum,
|
||||
PlanEnum,
|
||||
ResetPasswordCode,
|
||||
EmailChange,
|
||||
User,
|
||||
Alias,
|
||||
AliasGeneratorEnum,
|
||||
SenderFormatEnum,
|
||||
UnsubscribeBehaviourEnum,
|
||||
)
|
||||
from app.proton.utils import perform_proton_account_unlink
|
||||
from app.utils import (
|
||||
random_string,
|
||||
CSRFValidationForm,
|
||||
canonicalize_email,
|
||||
)
|
||||
|
||||
|
||||
@dashboard_bp.route("/account_setting", methods=["GET", "POST"])
|
||||
@login_required
|
||||
@sudo_required
|
||||
@limiter.limit("5/minute", methods=["POST"])
|
||||
def account_setting():
|
||||
change_email_form = ChangeEmailForm()
|
||||
csrf_form = CSRFValidationForm()
|
||||
|
||||
email_change = EmailChange.get_by(user_id=current_user.id)
|
||||
if email_change:
|
||||
pending_email = email_change.new_email
|
||||
else:
|
||||
pending_email = None
|
||||
|
||||
if request.method == "POST":
|
||||
if not csrf_form.validate():
|
||||
flash("Invalid request", "warning")
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
if request.form.get("form-name") == "update-email":
|
||||
if change_email_form.validate():
|
||||
# whether user can proceed with the email update
|
||||
new_email_valid = True
|
||||
new_email = canonicalize_email(change_email_form.email.data)
|
||||
if new_email != current_user.email and not pending_email:
|
||||
# check if this email is not already used
|
||||
if personal_email_already_used(new_email) or Alias.get_by(
|
||||
email=new_email
|
||||
):
|
||||
flash(f"Email {new_email} already used", "error")
|
||||
new_email_valid = False
|
||||
elif not email_can_be_used_as_mailbox(new_email):
|
||||
flash(
|
||||
"You cannot use this email address as your personal inbox.",
|
||||
"error",
|
||||
)
|
||||
new_email_valid = False
|
||||
# a pending email change with the same email exists from another user
|
||||
elif EmailChange.get_by(new_email=new_email):
|
||||
other_email_change: EmailChange = EmailChange.get_by(
|
||||
new_email=new_email
|
||||
)
|
||||
LOG.w(
|
||||
"Another user has a pending %s with the same email address. Current user:%s",
|
||||
other_email_change,
|
||||
current_user,
|
||||
)
|
||||
|
||||
if other_email_change.is_expired():
|
||||
LOG.d(
|
||||
"delete the expired email change %s", other_email_change
|
||||
)
|
||||
EmailChange.delete(other_email_change.id)
|
||||
Session.commit()
|
||||
else:
|
||||
flash(
|
||||
"You cannot use this email address as your personal inbox.",
|
||||
"error",
|
||||
)
|
||||
new_email_valid = False
|
||||
|
||||
if new_email_valid:
|
||||
email_change = EmailChange.create(
|
||||
user_id=current_user.id,
|
||||
code=random_string(
|
||||
60
|
||||
), # todo: make sure the code is unique
|
||||
new_email=new_email,
|
||||
)
|
||||
Session.commit()
|
||||
send_change_email_confirmation(current_user, email_change)
|
||||
flash(
|
||||
"A confirmation email is on the way, please check your inbox",
|
||||
"success",
|
||||
)
|
||||
return redirect(url_for("dashboard.account_setting"))
|
||||
elif request.form.get("form-name") == "change-password":
|
||||
flash(
|
||||
"You are going to receive an email containing instructions to change your password",
|
||||
"success",
|
||||
)
|
||||
send_reset_password_email(current_user)
|
||||
return redirect(url_for("dashboard.account_setting"))
|
||||
elif request.form.get("form-name") == "send-full-user-report":
|
||||
if ExportUserDataJob(current_user).store_job_in_db():
|
||||
flash(
|
||||
"You will receive your SimpleLogin data via email shortly",
|
||||
"success",
|
||||
)
|
||||
else:
|
||||
flash("An export of your data is currently in progress", "error")
|
||||
|
||||
partner_sub = None
|
||||
partner_name = None
|
||||
|
||||
return render_template(
|
||||
"dashboard/account_setting.html",
|
||||
csrf_form=csrf_form,
|
||||
PlanEnum=PlanEnum,
|
||||
SenderFormatEnum=SenderFormatEnum,
|
||||
BlockBehaviourEnum=BlockBehaviourEnum,
|
||||
change_email_form=change_email_form,
|
||||
pending_email=pending_email,
|
||||
AliasGeneratorEnum=AliasGeneratorEnum,
|
||||
UnsubscribeBehaviourEnum=UnsubscribeBehaviourEnum,
|
||||
partner_sub=partner_sub,
|
||||
partner_name=partner_name,
|
||||
FIRST_ALIAS_DOMAIN=FIRST_ALIAS_DOMAIN,
|
||||
ALIAS_RAND_SUFFIX_LENGTH=ALIAS_RANDOM_SUFFIX_LENGTH,
|
||||
connect_with_proton=CONNECT_WITH_PROTON,
|
||||
)
|
||||
|
||||
|
||||
def send_reset_password_email(user):
|
||||
"""
|
||||
generate a new ResetPasswordCode and send it over email to user
|
||||
"""
|
||||
# the activation code is valid for 1h
|
||||
reset_password_code = ResetPasswordCode.create(
|
||||
user_id=user.id, code=secrets.token_urlsafe(32)
|
||||
)
|
||||
Session.commit()
|
||||
|
||||
reset_password_link = f"{URL}/auth/reset_password?code={reset_password_code.code}"
|
||||
|
||||
email_utils.send_reset_password_email(user, reset_password_link)
|
||||
|
||||
|
||||
def send_change_email_confirmation(user: User, email_change: EmailChange):
|
||||
"""
|
||||
send confirmation email to the new email address
|
||||
"""
|
||||
|
||||
link = f"{URL}/auth/change_email?code={email_change.code}"
|
||||
|
||||
email_utils.send_change_email(user, email_change.new_email, link)
|
||||
|
||||
|
||||
@dashboard_bp.route("/resend_email_change", methods=["GET", "POST"])
|
||||
@limiter.limit("5/hour")
|
||||
@login_required
|
||||
@sudo_required
|
||||
def resend_email_change():
|
||||
form = CSRFValidationForm()
|
||||
if not form.validate():
|
||||
flash("Invalid request. Please try again", "warning")
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
email_change = EmailChange.get_by(user_id=current_user.id)
|
||||
if email_change:
|
||||
# extend email change expiration
|
||||
email_change.expired = arrow.now().shift(hours=12)
|
||||
Session.commit()
|
||||
|
||||
send_change_email_confirmation(current_user, email_change)
|
||||
flash("A confirmation email is on the way, please check your inbox", "success")
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
else:
|
||||
flash(
|
||||
"You have no pending email change. Redirect back to Setting page", "warning"
|
||||
)
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
|
||||
|
||||
@dashboard_bp.route("/cancel_email_change", methods=["GET", "POST"])
|
||||
@login_required
|
||||
@sudo_required
|
||||
def cancel_email_change():
|
||||
form = CSRFValidationForm()
|
||||
if not form.validate():
|
||||
flash("Invalid request. Please try again", "warning")
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
email_change = EmailChange.get_by(user_id=current_user.id)
|
||||
if email_change:
|
||||
EmailChange.delete(email_change.id)
|
||||
Session.commit()
|
||||
flash("Your email change is cancelled", "success")
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
else:
|
||||
flash(
|
||||
"You have no pending email change. Redirect back to Setting page", "warning"
|
||||
)
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
|
||||
|
||||
@dashboard_bp.route("/unlink_proton_account", methods=["POST"])
|
||||
@login_required
|
||||
@sudo_required
|
||||
def unlink_proton_account():
|
||||
csrf_form = CSRFValidationForm()
|
||||
if not csrf_form.validate():
|
||||
flash("Invalid request", "warning")
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
|
||||
perform_proton_account_unlink(current_user)
|
||||
flash("Your Proton account has been unlinked", "success")
|
||||
return redirect(url_for("dashboard.setting"))
|
|
@ -1,6 +1,5 @@
|
|||
from dataclasses import dataclass
|
||||
from operator import or_
|
||||
from typing import Optional
|
||||
|
||||
from flask import render_template, request, redirect, flash
|
||||
from flask import url_for
|
||||
|
@ -9,22 +8,17 @@ from flask_wtf import FlaskForm
|
|||
from sqlalchemy import and_, func, case
|
||||
from wtforms import StringField, validators, ValidationError
|
||||
|
||||
# Need to import directly from config to allow modification from the tests
|
||||
from app import config, parallel_limiter, contact_utils
|
||||
from app.alias_audit_log_utils import emit_alias_audit_log, AliasAuditLogAction
|
||||
from app.contact_utils import ContactCreateError
|
||||
from app.config import PAGE_LIMIT
|
||||
from app.dashboard.base import dashboard_bp
|
||||
from app.db import Session
|
||||
from app.email_validation import is_valid_email
|
||||
from app.errors import (
|
||||
CannotCreateContactForReverseAlias,
|
||||
ErrContactErrorUpgradeNeeded,
|
||||
ErrAddressInvalid,
|
||||
ErrContactAlreadyExists,
|
||||
from app.email_utils import (
|
||||
is_valid_email,
|
||||
generate_reply_email,
|
||||
parse_full_address,
|
||||
)
|
||||
from app.errors import CannotCreateContactForReverseAlias
|
||||
from app.log import LOG
|
||||
from app.models import Alias, Contact, EmailLog
|
||||
from app.utils import CSRFValidationForm
|
||||
|
||||
|
||||
def email_validator():
|
||||
|
@ -50,37 +44,6 @@ def email_validator():
|
|||
return _check
|
||||
|
||||
|
||||
def create_contact(alias: Alias, contact_address: str) -> Contact:
|
||||
"""
|
||||
Create a contact for a user. Can be restricted for new free users by enabling DISABLE_CREATE_CONTACTS_FOR_FREE_USERS.
|
||||
Can throw exceptions:
|
||||
- ErrAddressInvalid
|
||||
- ErrContactAlreadyExists
|
||||
- ErrContactUpgradeNeeded - If DISABLE_CREATE_CONTACTS_FOR_FREE_USERS this exception will be raised for new free users
|
||||
"""
|
||||
if not contact_address:
|
||||
raise ErrAddressInvalid("Empty address")
|
||||
output = contact_utils.create_contact(email=contact_address, alias=alias)
|
||||
if output.error == ContactCreateError.InvalidEmail:
|
||||
raise ErrAddressInvalid(contact_address)
|
||||
elif output.error == ContactCreateError.NotAllowed:
|
||||
raise ErrContactErrorUpgradeNeeded()
|
||||
elif output.error is not None:
|
||||
raise ErrAddressInvalid("Invalid address")
|
||||
elif not output.created:
|
||||
raise ErrContactAlreadyExists(output.contact)
|
||||
|
||||
contact = output.contact
|
||||
LOG.d(
|
||||
"create reverse-alias for %s %s, reverse alias:%s",
|
||||
contact_address,
|
||||
alias,
|
||||
contact.reply_email,
|
||||
)
|
||||
|
||||
return contact
|
||||
|
||||
|
||||
class NewContactForm(FlaskForm):
|
||||
email = StringField(
|
||||
"Email", validators=[validators.DataRequired(), email_validator()]
|
||||
|
@ -172,11 +135,7 @@ def get_contact_infos(
|
|||
],
|
||||
else_=Contact.created_at,
|
||||
)
|
||||
q = (
|
||||
q.order_by(latest_activity.desc())
|
||||
.limit(config.PAGE_LIMIT)
|
||||
.offset(page * config.PAGE_LIMIT)
|
||||
)
|
||||
q = q.order_by(latest_activity.desc()).limit(PAGE_LIMIT).offset(page * PAGE_LIMIT)
|
||||
|
||||
ret = []
|
||||
for contact, latest_email_log, nb_reply, nb_forward in q:
|
||||
|
@ -191,46 +150,18 @@ def get_contact_infos(
|
|||
return ret
|
||||
|
||||
|
||||
def delete_contact(alias: Alias, contact_id: int):
|
||||
contact: Optional[Contact] = Contact.get(contact_id)
|
||||
|
||||
if not contact:
|
||||
flash("Unknown error. Refresh the page", "warning")
|
||||
elif contact.alias_id != alias.id:
|
||||
flash("You cannot delete reverse-alias", "warning")
|
||||
else:
|
||||
delete_contact_email = contact.website_email
|
||||
emit_alias_audit_log(
|
||||
alias=alias,
|
||||
action=AliasAuditLogAction.DeleteContact,
|
||||
message=f"Delete contact {contact_id} ({contact.email})",
|
||||
)
|
||||
Contact.delete(contact_id)
|
||||
Session.commit()
|
||||
|
||||
flash(f"Reverse-alias for {delete_contact_email} has been deleted", "success")
|
||||
|
||||
|
||||
@dashboard_bp.route("/alias_contact_manager/<int:alias_id>/", methods=["GET", "POST"])
|
||||
@dashboard_bp.route("/alias_contact_manager/<alias_id>/", methods=["GET", "POST"])
|
||||
@login_required
|
||||
@parallel_limiter.lock(name="contact_creation")
|
||||
def alias_contact_manager(alias_id):
|
||||
highlight_contact_id = None
|
||||
if request.args.get("highlight_contact_id"):
|
||||
try:
|
||||
highlight_contact_id = int(request.args.get("highlight_contact_id"))
|
||||
except ValueError:
|
||||
flash("Invalid contact id", "error")
|
||||
return redirect(url_for("dashboard.index"))
|
||||
highlight_contact_id = int(request.args.get("highlight_contact_id"))
|
||||
|
||||
alias = Alias.get(alias_id)
|
||||
|
||||
page = 0
|
||||
if request.args.get("page"):
|
||||
try:
|
||||
page = int(request.args.get("page"))
|
||||
except ValueError:
|
||||
pass
|
||||
page = int(request.args.get("page"))
|
||||
|
||||
query = request.args.get("query") or ""
|
||||
|
||||
|
@ -244,26 +175,49 @@ def alias_contact_manager(alias_id):
|
|||
return redirect(url_for("dashboard.index"))
|
||||
|
||||
new_contact_form = NewContactForm()
|
||||
csrf_form = CSRFValidationForm()
|
||||
|
||||
if request.method == "POST":
|
||||
if not csrf_form.validate():
|
||||
flash("Invalid request", "warning")
|
||||
return redirect(request.url)
|
||||
if request.form.get("form-name") == "create":
|
||||
if new_contact_form.validate():
|
||||
contact_address = new_contact_form.email.data.strip()
|
||||
contact_addr = new_contact_form.email.data.strip()
|
||||
|
||||
try:
|
||||
contact = create_contact(alias, contact_address)
|
||||
except (
|
||||
ErrContactErrorUpgradeNeeded,
|
||||
ErrAddressInvalid,
|
||||
ErrContactAlreadyExists,
|
||||
CannotCreateContactForReverseAlias,
|
||||
) as excp:
|
||||
flash(excp.error_for_user(), "error")
|
||||
contact_name, contact_email = parse_full_address(contact_addr)
|
||||
except Exception:
|
||||
flash(f"{contact_addr} is invalid", "error")
|
||||
return redirect(request.url)
|
||||
flash(f"Reverse alias for {contact_address} is created", "success")
|
||||
|
||||
if not is_valid_email(contact_email):
|
||||
flash(f"{contact_email} is invalid", "error")
|
||||
return redirect(request.url)
|
||||
|
||||
contact = Contact.get_by(alias_id=alias.id, website_email=contact_email)
|
||||
# already been added
|
||||
if contact:
|
||||
flash(f"{contact_email} is already added", "error")
|
||||
return redirect(request.url)
|
||||
|
||||
try:
|
||||
contact = Contact.create(
|
||||
user_id=alias.user_id,
|
||||
alias_id=alias.id,
|
||||
website_email=contact_email,
|
||||
name=contact_name,
|
||||
reply_email=generate_reply_email(contact_email, current_user),
|
||||
)
|
||||
except CannotCreateContactForReverseAlias:
|
||||
flash("You can't create contact for a reverse alias", "error")
|
||||
return redirect(request.url)
|
||||
|
||||
LOG.d(
|
||||
"create reverse-alias for %s %s, reverse alias:%s",
|
||||
contact_addr,
|
||||
alias,
|
||||
contact.reply_email,
|
||||
)
|
||||
Session.commit()
|
||||
flash(f"Reverse alias for {contact_addr} is created", "success")
|
||||
|
||||
return redirect(
|
||||
url_for(
|
||||
"dashboard.alias_contact_manager",
|
||||
|
@ -273,7 +227,27 @@ def alias_contact_manager(alias_id):
|
|||
)
|
||||
elif request.form.get("form-name") == "delete":
|
||||
contact_id = request.form.get("contact-id")
|
||||
delete_contact(alias, contact_id)
|
||||
contact = Contact.get(contact_id)
|
||||
|
||||
if not contact:
|
||||
flash("Unknown error. Refresh the page", "warning")
|
||||
return redirect(
|
||||
url_for("dashboard.alias_contact_manager", alias_id=alias_id)
|
||||
)
|
||||
elif contact.alias_id != alias.id:
|
||||
flash("You cannot delete reverse-alias", "warning")
|
||||
return redirect(
|
||||
url_for("dashboard.alias_contact_manager", alias_id=alias_id)
|
||||
)
|
||||
|
||||
delete_contact_email = contact.website_email
|
||||
Contact.delete(contact_id)
|
||||
Session.commit()
|
||||
|
||||
flash(
|
||||
f"Reverse-alias for {delete_contact_email} has been deleted", "success"
|
||||
)
|
||||
|
||||
return redirect(
|
||||
url_for("dashboard.alias_contact_manager", alias_id=alias_id)
|
||||
)
|
||||
|
@ -290,7 +264,7 @@ def alias_contact_manager(alias_id):
|
|||
)
|
||||
|
||||
contact_infos = get_contact_infos(alias, page, query=query)
|
||||
last_page = len(contact_infos) < config.PAGE_LIMIT
|
||||
last_page = len(contact_infos) < PAGE_LIMIT
|
||||
nb_contact = Contact.filter(Contact.alias_id == alias.id).count()
|
||||
|
||||
# if highlighted contact isn't included, fetch it
|
||||
|
@ -312,6 +286,4 @@ def alias_contact_manager(alias_id):
|
|||
last_page=last_page,
|
||||
query=query,
|
||||
nb_contact=nb_contact,
|
||||
can_create_contacts=current_user.can_create_contacts(),
|
||||
csrf_form=csrf_form,
|
||||
)
|
||||
|
|
|
@ -1,13 +0,0 @@
|
|||
from app.dashboard.base import dashboard_bp
|
||||
from flask_login import login_required, current_user
|
||||
from app.alias_utils import alias_export_csv
|
||||
from app.dashboard.views.enter_sudo import sudo_required
|
||||
from app.extensions import limiter
|
||||
|
||||
|
||||
@dashboard_bp.route("/alias_export", methods=["GET"])
|
||||
@login_required
|
||||
@sudo_required
|
||||
@limiter.limit("2/minute")
|
||||
def alias_export_route():
|
||||
return alias_export_csv(current_user)
|
|
@ -87,6 +87,6 @@ def get_alias_log(alias: Alias, page_id=0) -> [AliasLog]:
|
|||
contact=contact,
|
||||
)
|
||||
logs.append(al)
|
||||
logs = sorted(logs, key=lambda log: log.when, reverse=True)
|
||||
logs = sorted(logs, key=lambda l: l.when, reverse=True)
|
||||
|
||||
return logs
|
||||
|
|
|
@ -1,38 +1,82 @@
|
|||
import base64
|
||||
import hmac
|
||||
import secrets
|
||||
from uuid import uuid4
|
||||
|
||||
import arrow
|
||||
from flask import render_template, redirect, url_for, flash, request
|
||||
from flask_login import login_required, current_user
|
||||
|
||||
from app import config
|
||||
from app.alias_audit_log_utils import emit_alias_audit_log, AliasAuditLogAction
|
||||
from app.alias_utils import transfer_alias
|
||||
from app.config import URL
|
||||
from app.dashboard.base import dashboard_bp
|
||||
from app.dashboard.views.enter_sudo import sudo_required
|
||||
from app.db import Session
|
||||
from app.email_utils import send_email, render
|
||||
from app.extensions import limiter
|
||||
from app.log import LOG
|
||||
from app.models import (
|
||||
Alias,
|
||||
Contact,
|
||||
AliasUsedOn,
|
||||
AliasMailbox,
|
||||
User,
|
||||
ClientUser,
|
||||
)
|
||||
from app.models import Mailbox
|
||||
from app.utils import CSRFValidationForm
|
||||
|
||||
|
||||
def hmac_alias_transfer_token(transfer_token: str) -> str:
|
||||
alias_hmac = hmac.new(
|
||||
config.ALIAS_TRANSFER_TOKEN_SECRET.encode("utf-8"),
|
||||
transfer_token.encode("utf-8"),
|
||||
"sha3_224",
|
||||
def transfer(alias, new_user, new_mailboxes: [Mailbox]):
|
||||
# cannot transfer alias which is used for receiving newsletter
|
||||
if User.get_by(newsletter_alias_id=alias.id):
|
||||
raise Exception("Cannot transfer alias that's used to receive newsletter")
|
||||
|
||||
# update user_id
|
||||
Session.query(Contact).filter(Contact.alias_id == alias.id).update(
|
||||
{"user_id": new_user.id}
|
||||
)
|
||||
return base64.urlsafe_b64encode(alias_hmac.digest()).decode("utf-8").rstrip("=")
|
||||
|
||||
Session.query(AliasUsedOn).filter(AliasUsedOn.alias_id == alias.id).update(
|
||||
{"user_id": new_user.id}
|
||||
)
|
||||
|
||||
Session.query(ClientUser).filter(ClientUser.alias_id == alias.id).update(
|
||||
{"user_id": new_user.id}
|
||||
)
|
||||
|
||||
# remove existing mailboxes from the alias
|
||||
Session.query(AliasMailbox).filter(AliasMailbox.alias_id == alias.id).delete()
|
||||
|
||||
# set mailboxes
|
||||
alias.mailbox_id = new_mailboxes.pop().id
|
||||
for mb in new_mailboxes:
|
||||
AliasMailbox.create(alias_id=alias.id, mailbox_id=mb.id)
|
||||
|
||||
# alias has never been transferred before
|
||||
if not alias.original_owner_id:
|
||||
alias.original_owner_id = alias.user_id
|
||||
|
||||
# inform previous owner
|
||||
old_user = alias.user
|
||||
send_email(
|
||||
old_user.email,
|
||||
f"Alias {alias.email} has been received",
|
||||
render(
|
||||
"transactional/alias-transferred.txt",
|
||||
alias=alias,
|
||||
),
|
||||
render(
|
||||
"transactional/alias-transferred.html",
|
||||
alias=alias,
|
||||
),
|
||||
)
|
||||
|
||||
# now the alias belongs to the new user
|
||||
alias.user_id = new_user.id
|
||||
|
||||
# set some fields back to default
|
||||
alias.disable_pgp = False
|
||||
alias.pinned = False
|
||||
|
||||
Session.commit()
|
||||
|
||||
|
||||
@dashboard_bp.route("/alias_transfer/send/<int:alias_id>/", methods=["GET", "POST"])
|
||||
@login_required
|
||||
@sudo_required
|
||||
def alias_transfer_send_route(alias_id):
|
||||
alias = Alias.get(alias_id)
|
||||
if not alias or alias.user_id != current_user.id:
|
||||
|
@ -46,46 +90,37 @@ def alias_transfer_send_route(alias_id):
|
|||
)
|
||||
return redirect(url_for("dashboard.index"))
|
||||
|
||||
alias_transfer_url = None
|
||||
csrf_form = CSRFValidationForm()
|
||||
if alias.transfer_token:
|
||||
alias_transfer_url = (
|
||||
URL + "/dashboard/alias_transfer/receive" + f"?token={alias.transfer_token}"
|
||||
)
|
||||
else:
|
||||
alias_transfer_url = None
|
||||
|
||||
# generate a new transfer_token
|
||||
if request.method == "POST":
|
||||
if not csrf_form.validate():
|
||||
flash("Invalid request", "warning")
|
||||
return redirect(request.url)
|
||||
# generate a new transfer_token
|
||||
if request.form.get("form-name") == "create":
|
||||
transfer_token = f"{alias.id}.{secrets.token_urlsafe(32)}"
|
||||
alias.transfer_token = hmac_alias_transfer_token(transfer_token)
|
||||
alias.transfer_token_expiration = arrow.utcnow().shift(hours=24)
|
||||
|
||||
emit_alias_audit_log(
|
||||
alias,
|
||||
AliasAuditLogAction.InitiateTransferAlias,
|
||||
"Initiated alias transfer",
|
||||
)
|
||||
alias.transfer_token = str(uuid4())
|
||||
Session.commit()
|
||||
alias_transfer_url = (
|
||||
config.URL
|
||||
URL
|
||||
+ "/dashboard/alias_transfer/receive"
|
||||
+ f"?token={transfer_token}"
|
||||
+ f"?token={alias.transfer_token}"
|
||||
)
|
||||
flash("Share alias URL created", "success")
|
||||
flash("Share URL created", "success")
|
||||
return redirect(request.url)
|
||||
# request.form.get("form-name") == "remove"
|
||||
else:
|
||||
alias.transfer_token = None
|
||||
alias.transfer_token_expiration = None
|
||||
Session.commit()
|
||||
alias_transfer_url = None
|
||||
flash("Share URL deleted", "success")
|
||||
return redirect(request.url)
|
||||
|
||||
return render_template(
|
||||
"dashboard/alias_transfer_send.html",
|
||||
alias=alias,
|
||||
alias_transfer_url=alias_transfer_url,
|
||||
link_active=alias.transfer_token_expiration is not None
|
||||
and alias.transfer_token_expiration > arrow.utcnow(),
|
||||
csrf_form=csrf_form,
|
||||
)
|
||||
|
||||
|
||||
|
@ -97,27 +132,12 @@ def alias_transfer_receive_route():
|
|||
URL has ?alias_id=signed_alias_id
|
||||
"""
|
||||
token = request.args.get("token")
|
||||
if not token:
|
||||
flash("Invalid transfer token", "error")
|
||||
return redirect(url_for("dashboard.index"))
|
||||
hashed_token = hmac_alias_transfer_token(token)
|
||||
# TODO: Don't allow unhashed tokens once all the tokens have been migrated to the new format
|
||||
alias = Alias.get_by(transfer_token=token) or Alias.get_by(
|
||||
transfer_token=hashed_token
|
||||
)
|
||||
alias = Alias.get_by(transfer_token=token)
|
||||
|
||||
if not alias:
|
||||
flash("Invalid link", "error")
|
||||
return redirect(url_for("dashboard.index"))
|
||||
|
||||
# TODO: Don't allow none once all the tokens have been migrated to the new format
|
||||
if (
|
||||
alias.transfer_token_expiration is not None
|
||||
and alias.transfer_token_expiration < arrow.utcnow()
|
||||
):
|
||||
flash("Expired link, please request a new one", "error")
|
||||
return redirect(url_for("dashboard.index"))
|
||||
|
||||
# alias already belongs to this user
|
||||
if alias.user_id == current_user.id:
|
||||
flash("You already own this alias", "warning")
|
||||
|
@ -154,20 +174,13 @@ def alias_transfer_receive_route():
|
|||
return redirect(request.url)
|
||||
|
||||
LOG.d(
|
||||
"transfer alias %s from %s to %s with %s with token %s",
|
||||
"transfer alias %s from %s to %s with %s",
|
||||
alias,
|
||||
alias.user,
|
||||
current_user,
|
||||
mailboxes,
|
||||
token,
|
||||
)
|
||||
transfer_alias(alias, current_user, mailboxes)
|
||||
|
||||
# reset transfer token
|
||||
alias.transfer_token = None
|
||||
alias.transfer_token_expiration = None
|
||||
Session.commit()
|
||||
|
||||
transfer(alias, current_user, mailboxes)
|
||||
flash(f"You are now owner of {alias.email}", "success")
|
||||
return redirect(url_for("dashboard.index", highlight_alias_id=alias.id))
|
||||
|
||||
|
|
|
@ -3,47 +3,19 @@ from flask_login import login_required, current_user
|
|||
from flask_wtf import FlaskForm
|
||||
from wtforms import StringField, validators
|
||||
|
||||
from app import config
|
||||
from app.dashboard.base import dashboard_bp
|
||||
from app.dashboard.views.enter_sudo import sudo_required
|
||||
from app.db import Session
|
||||
from app.extensions import limiter
|
||||
from app.models import ApiKey
|
||||
from app.utils import CSRFValidationForm
|
||||
|
||||
|
||||
class NewApiKeyForm(FlaskForm):
|
||||
name = StringField("Name", validators=[validators.DataRequired()])
|
||||
|
||||
|
||||
def clean_up_unused_or_old_api_keys(user_id: int):
|
||||
total_keys = ApiKey.filter_by(user_id=user_id).count()
|
||||
if total_keys <= config.MAX_API_KEYS:
|
||||
return
|
||||
# Remove oldest unused
|
||||
for api_key in (
|
||||
ApiKey.filter_by(user_id=user_id, last_used=None)
|
||||
.order_by(ApiKey.created_at.asc())
|
||||
.all()
|
||||
):
|
||||
Session.delete(api_key)
|
||||
total_keys -= 1
|
||||
if total_keys <= config.MAX_API_KEYS:
|
||||
return
|
||||
# Clean up oldest used
|
||||
for api_key in (
|
||||
ApiKey.filter_by(user_id=user_id).order_by(ApiKey.last_used.asc()).all()
|
||||
):
|
||||
Session.delete(api_key)
|
||||
total_keys -= 1
|
||||
if total_keys <= config.MAX_API_KEYS:
|
||||
return
|
||||
|
||||
|
||||
@dashboard_bp.route("/api_key", methods=["GET", "POST"])
|
||||
@login_required
|
||||
@sudo_required
|
||||
@limiter.limit("10/hour")
|
||||
def api_key():
|
||||
api_keys = (
|
||||
ApiKey.filter(ApiKey.user_id == current_user.id)
|
||||
|
@ -51,13 +23,9 @@ def api_key():
|
|||
.all()
|
||||
)
|
||||
|
||||
csrf_form = CSRFValidationForm()
|
||||
new_api_key_form = NewApiKeyForm()
|
||||
|
||||
if request.method == "POST":
|
||||
if not csrf_form.validate():
|
||||
flash("Invalid request", "warning")
|
||||
return redirect(request.url)
|
||||
if request.form.get("form-name") == "delete":
|
||||
api_key_id = request.form.get("api-key-id")
|
||||
|
||||
|
@ -77,15 +45,11 @@ def api_key():
|
|||
|
||||
elif request.form.get("form-name") == "create":
|
||||
if new_api_key_form.validate():
|
||||
clean_up_unused_or_old_api_keys(current_user.id)
|
||||
new_api_key = ApiKey.create(
|
||||
name=new_api_key_form.name.data, user_id=current_user.id
|
||||
)
|
||||
Session.commit()
|
||||
flash(f"New API Key {new_api_key.name} has been created", "success")
|
||||
return render_template(
|
||||
"dashboard/new_api_key.html", api_key=new_api_key
|
||||
)
|
||||
|
||||
elif request.form.get("form-name") == "delete-all":
|
||||
ApiKey.delete_all(current_user.id)
|
||||
|
@ -95,8 +59,5 @@ def api_key():
|
|||
return redirect(url_for("dashboard.api_key"))
|
||||
|
||||
return render_template(
|
||||
"dashboard/api_key.html",
|
||||
api_keys=api_keys,
|
||||
new_api_key_form=new_api_key_form,
|
||||
csrf_form=csrf_form,
|
||||
"dashboard/api_key.html", api_keys=api_keys, new_api_key_form=new_api_key_form
|
||||
)
|
||||
|
|
|
@ -1,9 +1,14 @@
|
|||
from app.db import Session
|
||||
|
||||
"""
|
||||
List of apps that user has used via the "Sign in with SimpleLogin"
|
||||
"""
|
||||
|
||||
from flask import render_template, request, flash, redirect
|
||||
from flask_login import login_required, current_user
|
||||
from sqlalchemy.orm import joinedload
|
||||
|
||||
from app.dashboard.base import dashboard_bp
|
||||
from app.db import Session
|
||||
from app.models import (
|
||||
ClientUser,
|
||||
)
|
||||
|
@ -12,10 +17,6 @@ from app.models import (
|
|||
@dashboard_bp.route("/app", methods=["GET", "POST"])
|
||||
@login_required
|
||||
def app_route():
|
||||
"""
|
||||
List of apps that user has used via the "Sign in with SimpleLogin"
|
||||
"""
|
||||
|
||||
client_users = (
|
||||
ClientUser.filter_by(user_id=current_user.id)
|
||||
.options(joinedload(ClientUser.client))
|
||||
|
|
|
@ -5,18 +5,14 @@ from flask_login import login_required, current_user
|
|||
from app import s3
|
||||
from app.config import JOB_BATCH_IMPORT
|
||||
from app.dashboard.base import dashboard_bp
|
||||
from app.dashboard.views.enter_sudo import sudo_required
|
||||
from app.db import Session
|
||||
from app.extensions import limiter
|
||||
from app.log import LOG
|
||||
from app.models import File, BatchImport, Job
|
||||
from app.utils import random_string, CSRFValidationForm
|
||||
from app.utils import random_string
|
||||
|
||||
|
||||
@dashboard_bp.route("/batch_import", methods=["GET", "POST"])
|
||||
@login_required
|
||||
@sudo_required
|
||||
@limiter.limit("10/minute", methods=["POST"])
|
||||
def batch_import_route():
|
||||
# only for users who have custom domains
|
||||
if not current_user.verified_custom_domains():
|
||||
|
@ -29,27 +25,9 @@ def batch_import_route():
|
|||
)
|
||||
return redirect(url_for("dashboard.index"))
|
||||
|
||||
batch_imports = BatchImport.filter_by(
|
||||
user_id=current_user.id, processed=False
|
||||
).all()
|
||||
|
||||
csrf_form = CSRFValidationForm()
|
||||
batch_imports = BatchImport.filter_by(user_id=current_user.id).all()
|
||||
|
||||
if request.method == "POST":
|
||||
if not csrf_form.validate():
|
||||
flash("Invalid request", "warning")
|
||||
return redirect(request.url)
|
||||
if len(batch_imports) > 10:
|
||||
flash(
|
||||
"You have too many imports already. Please wait until some get cleaned up",
|
||||
"error",
|
||||
)
|
||||
return render_template(
|
||||
"dashboard/batch_import.html",
|
||||
batch_imports=batch_imports,
|
||||
csrf_form=csrf_form,
|
||||
)
|
||||
|
||||
alias_file = request.files["alias-file"]
|
||||
|
||||
file_path = random_string(20) + ".csv"
|
||||
|
@ -77,6 +55,4 @@ def batch_import_route():
|
|||
|
||||
return redirect(url_for("dashboard.batch_import_route"))
|
||||
|
||||
return render_template(
|
||||
"dashboard/batch_import.html", batch_imports=batch_imports, csrf_form=csrf_form
|
||||
)
|
||||
return render_template("dashboard/batch_import.html", batch_imports=batch_imports)
|
||||
|
|
|
@ -13,7 +13,7 @@ from app.paddle_utils import cancel_subscription, change_plan
|
|||
@login_required
|
||||
def billing():
|
||||
# sanity check: make sure this page is only for user who has paddle subscription
|
||||
sub: Subscription = current_user.get_paddle_subscription()
|
||||
sub: Subscription = current_user.get_subscription()
|
||||
|
||||
if not sub:
|
||||
flash("You don't have any active subscription", "warning")
|
||||
|
|
|
@ -1,80 +1,49 @@
|
|||
from typing import Optional
|
||||
|
||||
from flask import render_template, request, redirect, url_for, flash
|
||||
from flask_login import login_required, current_user
|
||||
from flask_wtf import FlaskForm
|
||||
from wtforms import StringField, validators
|
||||
|
||||
from app.alias_audit_log_utils import emit_alias_audit_log, AliasAuditLogAction
|
||||
from app.dashboard.base import dashboard_bp
|
||||
from app.db import Session
|
||||
from app.models import Contact
|
||||
from app.pgp_utils import PGPException, load_public_key_and_check
|
||||
|
||||
|
||||
class PGPContactForm(FlaskForm):
|
||||
action = StringField(
|
||||
"action",
|
||||
validators=[validators.DataRequired(), validators.AnyOf(("save", "remove"))],
|
||||
)
|
||||
pgp = StringField("pgp", validators=[validators.Optional()])
|
||||
|
||||
|
||||
@dashboard_bp.route("/contact/<int:contact_id>/", methods=["GET", "POST"])
|
||||
@login_required
|
||||
def contact_detail_route(contact_id):
|
||||
contact: Optional[Contact] = Contact.get(contact_id)
|
||||
contact = Contact.get(contact_id)
|
||||
if not contact or contact.user_id != current_user.id:
|
||||
flash("You cannot see this page", "warning")
|
||||
return redirect(url_for("dashboard.index"))
|
||||
|
||||
alias = contact.alias
|
||||
pgp_form = PGPContactForm()
|
||||
|
||||
if request.method == "POST":
|
||||
if request.form.get("form-name") == "pgp":
|
||||
if not pgp_form.validate():
|
||||
flash("Invalid request", "warning")
|
||||
return redirect(request.url)
|
||||
if pgp_form.action.data == "save":
|
||||
if request.form.get("action") == "save":
|
||||
if not current_user.is_premium():
|
||||
flash("Only premium plan can add PGP Key", "warning")
|
||||
return redirect(
|
||||
url_for("dashboard.contact_detail_route", contact_id=contact_id)
|
||||
)
|
||||
if not pgp_form.pgp.data:
|
||||
flash("Invalid pgp key")
|
||||
|
||||
contact.pgp_public_key = request.form.get("pgp")
|
||||
try:
|
||||
contact.pgp_finger_print = load_public_key_and_check(
|
||||
contact.pgp_public_key
|
||||
)
|
||||
except PGPException:
|
||||
flash("Cannot add the public key, please verify it", "error")
|
||||
else:
|
||||
contact.pgp_public_key = pgp_form.pgp.data
|
||||
try:
|
||||
contact.pgp_finger_print = load_public_key_and_check(
|
||||
contact.pgp_public_key
|
||||
)
|
||||
except PGPException:
|
||||
flash("Cannot add the public key, please verify it", "error")
|
||||
else:
|
||||
emit_alias_audit_log(
|
||||
alias=alias,
|
||||
action=AliasAuditLogAction.UpdateContact,
|
||||
message=f"Added PGP key {contact.pgp_public_key} for contact {contact_id} ({contact.email})",
|
||||
)
|
||||
Session.commit()
|
||||
flash(
|
||||
f"PGP public key for {contact.email} is saved successfully",
|
||||
"success",
|
||||
)
|
||||
return redirect(
|
||||
url_for(
|
||||
"dashboard.contact_detail_route", contact_id=contact_id
|
||||
)
|
||||
)
|
||||
elif pgp_form.action.data == "remove":
|
||||
Session.commit()
|
||||
flash(
|
||||
f"PGP public key for {contact.email} is saved successfully",
|
||||
"success",
|
||||
)
|
||||
return redirect(
|
||||
url_for("dashboard.contact_detail_route", contact_id=contact_id)
|
||||
)
|
||||
elif request.form.get("action") == "remove":
|
||||
# Free user can decide to remove contact PGP key
|
||||
emit_alias_audit_log(
|
||||
alias=alias,
|
||||
action=AliasAuditLogAction.UpdateContact,
|
||||
message=f"Removed PGP key {contact.pgp_public_key} for contact {contact_id} ({contact.email})",
|
||||
)
|
||||
contact.pgp_public_key = None
|
||||
contact.pgp_finger_print = None
|
||||
Session.commit()
|
||||
|
@ -84,5 +53,5 @@ def contact_detail_route(contact_id):
|
|||
)
|
||||
|
||||
return render_template(
|
||||
"dashboard/contact_detail.html", contact=contact, alias=alias, pgp_form=pgp_form
|
||||
"dashboard/contact_detail.html", contact=contact, alias=alias
|
||||
)
|
||||
|
|
|
@ -4,10 +4,10 @@ from flask_login import login_required, current_user
|
|||
from flask_wtf import FlaskForm
|
||||
from wtforms import StringField, validators
|
||||
|
||||
from app import parallel_limiter
|
||||
from app.config import PADDLE_VENDOR_ID, PADDLE_COUPON_ID
|
||||
from app.config import ADMIN_EMAIL, PADDLE_VENDOR_ID, PADDLE_COUPON_ID
|
||||
from app.dashboard.base import dashboard_bp
|
||||
from app.db import Session
|
||||
from app.email_utils import send_email
|
||||
from app.log import LOG
|
||||
from app.models import (
|
||||
ManualSubscription,
|
||||
|
@ -25,7 +25,6 @@ class CouponForm(FlaskForm):
|
|||
|
||||
@dashboard_bp.route("/coupon", methods=["GET", "POST"])
|
||||
@login_required
|
||||
@parallel_limiter.lock()
|
||||
def coupon_route():
|
||||
coupon_form = CouponForm()
|
||||
|
||||
|
@ -42,7 +41,7 @@ def coupon_route():
|
|||
if current_user.lifetime:
|
||||
can_use_coupon = False
|
||||
|
||||
sub: Subscription = current_user.get_paddle_subscription()
|
||||
sub: Subscription = current_user.get_subscription()
|
||||
if sub:
|
||||
can_use_coupon = False
|
||||
|
||||
|
@ -68,14 +67,9 @@ def coupon_route():
|
|||
)
|
||||
return redirect(request.url)
|
||||
|
||||
updated = (
|
||||
Session.query(Coupon)
|
||||
.filter_by(code=code, used=False)
|
||||
.update({"used_by_user_id": current_user.id, "used": True})
|
||||
)
|
||||
if updated != 1:
|
||||
flash("Coupon is not valid", "error")
|
||||
return redirect(request.url)
|
||||
coupon.used_by_user_id = current_user.id
|
||||
coupon.used = True
|
||||
Session.commit()
|
||||
|
||||
manual_sub: ManualSubscription = ManualSubscription.get_by(
|
||||
user_id=current_user.id
|
||||
|
@ -100,10 +94,22 @@ def coupon_route():
|
|||
commit=True,
|
||||
)
|
||||
flash(
|
||||
"Your account has been upgraded to Premium, thanks for your support!",
|
||||
f"Your account has been upgraded to Premium, thanks for your support!",
|
||||
"success",
|
||||
)
|
||||
|
||||
# notify admin
|
||||
if coupon.is_giveaway:
|
||||
subject = f"User {current_user} applies a (free) coupon"
|
||||
else:
|
||||
subject = f"User {current_user} applies a (paid, {coupon.comment or ''}) coupon"
|
||||
send_email(
|
||||
ADMIN_EMAIL,
|
||||
subject=subject,
|
||||
plaintext="",
|
||||
html="",
|
||||
)
|
||||
|
||||
return redirect(url_for("dashboard.index"))
|
||||
|
||||
else:
|
||||
|
|
|
@ -1,16 +1,16 @@
|
|||
import json
|
||||
from dataclasses import dataclass, asdict
|
||||
|
||||
from email_validator import validate_email, EmailNotValidError
|
||||
from flask import render_template, redirect, url_for, flash, request
|
||||
from flask_login import login_required, current_user
|
||||
from itsdangerous import TimestampSigner, SignatureExpired
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
|
||||
from app import parallel_limiter
|
||||
from app.alias_suffix import (
|
||||
get_alias_suffixes,
|
||||
check_suffix_signature,
|
||||
verify_prefix_suffix,
|
||||
)
|
||||
from app.alias_utils import check_alias_prefix
|
||||
from app.config import (
|
||||
DISABLE_ALIAS_SUFFIX,
|
||||
CUSTOM_ALIAS_SECRET,
|
||||
ALIAS_LIMIT,
|
||||
)
|
||||
from app.dashboard.base import dashboard_bp
|
||||
|
@ -19,18 +19,180 @@ from app.extensions import limiter
|
|||
from app.log import LOG
|
||||
from app.models import (
|
||||
Alias,
|
||||
CustomDomain,
|
||||
DeletedAlias,
|
||||
Mailbox,
|
||||
User,
|
||||
AliasMailbox,
|
||||
DomainDeletedAlias,
|
||||
)
|
||||
from app.utils import CSRFValidationForm
|
||||
|
||||
signer = TimestampSigner(CUSTOM_ALIAS_SECRET)
|
||||
|
||||
|
||||
@dataclass
|
||||
class SuffixInfo:
|
||||
"""
|
||||
Alias suffix info
|
||||
WARNING: should use AliasSuffix instead
|
||||
"""
|
||||
|
||||
# whether this is a custom domain
|
||||
is_custom: bool
|
||||
suffix: str
|
||||
signed_suffix: str
|
||||
|
||||
# whether this is a premium SL domain. Not apply to custom domain
|
||||
is_premium: bool
|
||||
|
||||
|
||||
def get_available_suffixes(user: User) -> [SuffixInfo]:
|
||||
"""
|
||||
WARNING: should use get_alias_suffixes() instead
|
||||
"""
|
||||
user_custom_domains = user.verified_custom_domains()
|
||||
|
||||
suffixes: [SuffixInfo] = []
|
||||
|
||||
# put custom domain first
|
||||
# for each user domain, generate both the domain and a random suffix version
|
||||
for custom_domain in user_custom_domains:
|
||||
if custom_domain.random_prefix_generation:
|
||||
suffix = "." + user.get_random_alias_suffix() + "@" + custom_domain.domain
|
||||
suffix_info = SuffixInfo(True, suffix, signer.sign(suffix).decode(), False)
|
||||
if user.default_alias_custom_domain_id == custom_domain.id:
|
||||
suffixes.insert(0, suffix_info)
|
||||
else:
|
||||
suffixes.append(suffix_info)
|
||||
|
||||
suffix = "@" + custom_domain.domain
|
||||
suffix_info = SuffixInfo(True, suffix, signer.sign(suffix).decode(), False)
|
||||
|
||||
# put the default domain to top
|
||||
# only if random_prefix_generation isn't enabled
|
||||
if (
|
||||
user.default_alias_custom_domain_id == custom_domain.id
|
||||
and not custom_domain.random_prefix_generation
|
||||
):
|
||||
suffixes.insert(0, suffix_info)
|
||||
else:
|
||||
suffixes.append(suffix_info)
|
||||
|
||||
# then SimpleLogin domain
|
||||
for sl_domain in user.get_sl_domains():
|
||||
suffix = (
|
||||
("" if DISABLE_ALIAS_SUFFIX else "." + user.get_random_alias_suffix())
|
||||
+ "@"
|
||||
+ sl_domain.domain
|
||||
)
|
||||
suffix_info = SuffixInfo(
|
||||
False, suffix, signer.sign(suffix).decode(), sl_domain.premium_only
|
||||
)
|
||||
# put the default domain to top
|
||||
if user.default_alias_public_domain_id == sl_domain.id:
|
||||
suffixes.insert(0, suffix_info)
|
||||
else:
|
||||
suffixes.append(suffix_info)
|
||||
|
||||
return suffixes
|
||||
|
||||
|
||||
@dataclass
|
||||
class AliasSuffix:
|
||||
# whether this is a custom domain
|
||||
is_custom: bool
|
||||
suffix: str
|
||||
|
||||
# whether this is a premium SL domain. Not apply to custom domain
|
||||
is_premium: bool
|
||||
|
||||
# can be either Custom or SL domain
|
||||
domain: str
|
||||
|
||||
# if custom domain, whether the custom domain has MX verified, i.e. can receive emails
|
||||
mx_verified: bool = True
|
||||
|
||||
def serialize(self):
|
||||
return json.dumps(asdict(self))
|
||||
|
||||
@classmethod
|
||||
def deserialize(cls, data: str) -> "AliasSuffix":
|
||||
return AliasSuffix(**json.loads(data))
|
||||
|
||||
|
||||
def get_alias_suffixes(user: User) -> [AliasSuffix]:
|
||||
"""
|
||||
Similar to as get_available_suffixes() but also return custom domain that doesn't have MX set up.
|
||||
"""
|
||||
user_custom_domains = CustomDomain.filter_by(
|
||||
user_id=user.id, ownership_verified=True
|
||||
).all()
|
||||
|
||||
alias_suffixes: [AliasSuffix] = []
|
||||
|
||||
# put custom domain first
|
||||
# for each user domain, generate both the domain and a random suffix version
|
||||
for custom_domain in user_custom_domains:
|
||||
if custom_domain.random_prefix_generation:
|
||||
suffix = "." + user.get_random_alias_suffix() + "@" + custom_domain.domain
|
||||
alias_suffix = AliasSuffix(
|
||||
is_custom=True,
|
||||
suffix=suffix,
|
||||
is_premium=False,
|
||||
domain=custom_domain.domain,
|
||||
mx_verified=custom_domain.verified,
|
||||
)
|
||||
if user.default_alias_custom_domain_id == custom_domain.id:
|
||||
alias_suffixes.insert(0, alias_suffix)
|
||||
else:
|
||||
alias_suffixes.append(alias_suffix)
|
||||
|
||||
suffix = "@" + custom_domain.domain
|
||||
alias_suffix = AliasSuffix(
|
||||
is_custom=True,
|
||||
suffix=suffix,
|
||||
is_premium=False,
|
||||
domain=custom_domain.domain,
|
||||
mx_verified=custom_domain.verified,
|
||||
)
|
||||
|
||||
# put the default domain to top
|
||||
# only if random_prefix_generation isn't enabled
|
||||
if (
|
||||
user.default_alias_custom_domain_id == custom_domain.id
|
||||
and not custom_domain.random_prefix_generation
|
||||
):
|
||||
alias_suffixes.insert(0, alias_suffix)
|
||||
else:
|
||||
alias_suffixes.append(alias_suffix)
|
||||
|
||||
# then SimpleLogin domain
|
||||
for sl_domain in user.get_sl_domains():
|
||||
suffix = (
|
||||
("" if DISABLE_ALIAS_SUFFIX else "." + user.get_random_alias_suffix())
|
||||
+ "@"
|
||||
+ sl_domain.domain
|
||||
)
|
||||
alias_suffix = AliasSuffix(
|
||||
is_custom=False,
|
||||
suffix=suffix,
|
||||
is_premium=sl_domain.premium_only,
|
||||
domain=sl_domain.domain,
|
||||
mx_verified=True,
|
||||
)
|
||||
|
||||
# put the default domain to top
|
||||
if user.default_alias_public_domain_id == sl_domain.id:
|
||||
alias_suffixes.insert(0, alias_suffix)
|
||||
else:
|
||||
alias_suffixes.append(alias_suffix)
|
||||
|
||||
return alias_suffixes
|
||||
|
||||
|
||||
@dashboard_bp.route("/custom_alias", methods=["GET", "POST"])
|
||||
@limiter.limit(ALIAS_LIMIT, methods=["POST"])
|
||||
@login_required
|
||||
@parallel_limiter.lock(name="alias_creation")
|
||||
def custom_alias():
|
||||
# check if user has not exceeded the alias quota
|
||||
if not current_user.can_create_new_alias():
|
||||
|
@ -49,13 +211,14 @@ def custom_alias():
|
|||
at_least_a_premium_domain = True
|
||||
break
|
||||
|
||||
csrf_form = CSRFValidationForm()
|
||||
alias_suffixes_with_signature = [
|
||||
(alias_suffix, signer.sign(alias_suffix.serialize()).decode())
|
||||
for alias_suffix in alias_suffixes
|
||||
]
|
||||
|
||||
mailboxes = current_user.mailboxes()
|
||||
|
||||
if request.method == "POST":
|
||||
if not csrf_form.validate():
|
||||
flash("Invalid request", "warning")
|
||||
return redirect(request.url)
|
||||
alias_prefix = request.form.get("prefix").strip().lower().replace(" ", "")
|
||||
signed_alias_suffix = request.form.get("signed-alias-suffix")
|
||||
mailbox_ids = request.form.getlist("mailboxes")
|
||||
|
@ -86,19 +249,25 @@ def custom_alias():
|
|||
flash("At least one mailbox must be selected", "error")
|
||||
return redirect(request.url)
|
||||
|
||||
# hypothesis: user will click on the button in the 600 secs
|
||||
try:
|
||||
suffix = check_suffix_signature(signed_alias_suffix)
|
||||
if not suffix:
|
||||
LOG.w("Alias creation time expired for %s", current_user)
|
||||
flash("Alias creation time is expired, please retry", "warning")
|
||||
return redirect(request.url)
|
||||
signed_alias_suffix_decoded = signer.unsign(
|
||||
signed_alias_suffix, max_age=600
|
||||
).decode()
|
||||
alias_suffix: AliasSuffix = AliasSuffix.deserialize(
|
||||
signed_alias_suffix_decoded
|
||||
)
|
||||
except SignatureExpired:
|
||||
LOG.w("Alias creation time expired for %s", current_user)
|
||||
flash("Alias creation time is expired, please retry", "warning")
|
||||
return redirect(request.url)
|
||||
except Exception:
|
||||
LOG.w("Alias suffix is tampered, user %s", current_user)
|
||||
flash("Unknown error, refresh the page", "error")
|
||||
return redirect(request.url)
|
||||
|
||||
if verify_prefix_suffix(current_user, alias_prefix, suffix):
|
||||
full_alias = alias_prefix + suffix
|
||||
if verify_prefix_suffix(current_user, alias_prefix, alias_suffix.suffix):
|
||||
full_alias = alias_prefix + alias_suffix.suffix
|
||||
|
||||
if ".." in full_alias:
|
||||
flash("Your alias can't contain 2 consecutive dots (..)", "error")
|
||||
|
@ -125,11 +294,18 @@ def custom_alias():
|
|||
email=full_alias
|
||||
)
|
||||
custom_domain = domain_deleted_alias.domain
|
||||
flash(
|
||||
f"You have deleted this alias before. You can restore it on "
|
||||
f"{custom_domain.domain} 'Deleted Alias' page",
|
||||
"error",
|
||||
)
|
||||
if domain_deleted_alias.user_id == current_user.id:
|
||||
flash(
|
||||
f"You have deleted this alias before. You can restore it on "
|
||||
f"{custom_domain.domain} 'Deleted Alias' page",
|
||||
"error",
|
||||
)
|
||||
else:
|
||||
# should never happen as user can only choose their domains
|
||||
LOG.e(
|
||||
"Deleted Alias %s does not belong to user %s",
|
||||
domain_deleted_alias,
|
||||
)
|
||||
|
||||
elif DeletedAlias.get_by(email=full_alias):
|
||||
flash(general_error_msg, "error")
|
||||
|
@ -166,8 +342,51 @@ def custom_alias():
|
|||
return render_template(
|
||||
"dashboard/custom_alias.html",
|
||||
user_custom_domains=user_custom_domains,
|
||||
alias_suffixes=alias_suffixes,
|
||||
alias_suffixes_with_signature=alias_suffixes_with_signature,
|
||||
at_least_a_premium_domain=at_least_a_premium_domain,
|
||||
mailboxes=mailboxes,
|
||||
csrf_form=csrf_form,
|
||||
)
|
||||
|
||||
|
||||
def verify_prefix_suffix(user: User, alias_prefix, alias_suffix) -> bool:
|
||||
"""verify if user could create an alias with the given prefix and suffix"""
|
||||
if not alias_prefix or not alias_suffix: # should be caught on frontend
|
||||
return False
|
||||
|
||||
user_custom_domains = [cd.domain for cd in user.verified_custom_domains()]
|
||||
|
||||
# make sure alias_suffix is either .random_word@simplelogin.co or @my-domain.com
|
||||
alias_suffix = alias_suffix.strip()
|
||||
# alias_domain_prefix is either a .random_word or ""
|
||||
alias_domain_prefix, alias_domain = alias_suffix.split("@", 1)
|
||||
|
||||
# alias_domain must be either one of user custom domains or built-in domains
|
||||
if alias_domain not in user.available_alias_domains():
|
||||
LOG.e("wrong alias suffix %s, user %s", alias_suffix, user)
|
||||
return False
|
||||
|
||||
# SimpleLogin domain case:
|
||||
# 1) alias_suffix must start with "." and
|
||||
# 2) alias_domain_prefix must come from the word list
|
||||
if (
|
||||
alias_domain in user.available_sl_domains()
|
||||
and alias_domain not in user_custom_domains
|
||||
# when DISABLE_ALIAS_SUFFIX is true, alias_domain_prefix is empty
|
||||
and not DISABLE_ALIAS_SUFFIX
|
||||
):
|
||||
|
||||
if not alias_domain_prefix.startswith("."):
|
||||
LOG.e("User %s submits a wrong alias suffix %s", user, alias_suffix)
|
||||
return False
|
||||
|
||||
else:
|
||||
if alias_domain not in user_custom_domains:
|
||||
if not DISABLE_ALIAS_SUFFIX:
|
||||
LOG.e("wrong alias suffix %s, user %s", alias_suffix, user)
|
||||
return False
|
||||
|
||||
if alias_domain not in user.available_sl_domains():
|
||||
LOG.e("wrong alias suffix %s, user %s", alias_suffix, user)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
|
|
@ -3,11 +3,12 @@ from flask_login import login_required, current_user
|
|||
from flask_wtf import FlaskForm
|
||||
from wtforms import StringField, validators
|
||||
|
||||
from app import parallel_limiter
|
||||
from app.config import EMAIL_SERVERS_WITH_PRIORITY
|
||||
from app.custom_domain_utils import create_custom_domain
|
||||
from app.dashboard.base import dashboard_bp
|
||||
from app.models import CustomDomain
|
||||
from app.db import Session
|
||||
from app.email_utils import get_email_domain_part
|
||||
from app.log import LOG
|
||||
from app.models import CustomDomain, Mailbox, DomainMailbox, SLDomain
|
||||
|
||||
|
||||
class NewCustomDomainForm(FlaskForm):
|
||||
|
@ -18,15 +19,15 @@ class NewCustomDomainForm(FlaskForm):
|
|||
|
||||
@dashboard_bp.route("/custom_domain", methods=["GET", "POST"])
|
||||
@login_required
|
||||
@parallel_limiter.lock(only_when=lambda: request.method == "POST")
|
||||
def custom_domain():
|
||||
custom_domains = CustomDomain.filter_by(
|
||||
user_id=current_user.id,
|
||||
is_sl_subdomain=False,
|
||||
pending_deletion=False,
|
||||
user_id=current_user.id, is_sl_subdomain=False
|
||||
).all()
|
||||
mailboxes = current_user.mailboxes()
|
||||
new_custom_domain_form = NewCustomDomainForm()
|
||||
|
||||
errors = {}
|
||||
|
||||
if request.method == "POST":
|
||||
if request.form.get("form-name") == "create":
|
||||
if not current_user.is_premium():
|
||||
|
@ -34,25 +35,87 @@ def custom_domain():
|
|||
return redirect(url_for("dashboard.custom_domain"))
|
||||
|
||||
if new_custom_domain_form.validate():
|
||||
res = create_custom_domain(
|
||||
user=current_user, domain=new_custom_domain_form.domain.data
|
||||
)
|
||||
if res.success:
|
||||
flash(f"New domain {res.instance.domain} is created", "success")
|
||||
new_domain = new_custom_domain_form.domain.data.lower().strip()
|
||||
|
||||
if new_domain.startswith("http://"):
|
||||
new_domain = new_domain[len("http://") :]
|
||||
|
||||
if new_domain.startswith("https://"):
|
||||
new_domain = new_domain[len("https://") :]
|
||||
|
||||
if SLDomain.get_by(domain=new_domain):
|
||||
flash("A custom domain cannot be a built-in domain.", "error")
|
||||
elif CustomDomain.get_by(domain=new_domain):
|
||||
flash(f"{new_domain} already used", "error")
|
||||
elif get_email_domain_part(current_user.email) == new_domain:
|
||||
flash(
|
||||
"You cannot add a domain that you are currently using for your personal email. "
|
||||
"Please change your personal email to your real email",
|
||||
"error",
|
||||
)
|
||||
elif Mailbox.filter(
|
||||
Mailbox.verified.is_(True), Mailbox.email.endswith(f"@{new_domain}")
|
||||
).first():
|
||||
flash(
|
||||
f"{new_domain} already used in a SimpleLogin mailbox", "error"
|
||||
)
|
||||
else:
|
||||
new_custom_domain = CustomDomain.create(
|
||||
domain=new_domain, user_id=current_user.id
|
||||
)
|
||||
# new domain has ownership verified if its parent has the ownership verified
|
||||
for root_cd in current_user.custom_domains:
|
||||
if (
|
||||
new_domain.endswith("." + root_cd.domain)
|
||||
and root_cd.ownership_verified
|
||||
):
|
||||
LOG.i(
|
||||
"%s ownership verified thanks to %s",
|
||||
new_custom_domain,
|
||||
root_cd,
|
||||
)
|
||||
new_custom_domain.ownership_verified = True
|
||||
|
||||
Session.commit()
|
||||
|
||||
mailbox_ids = request.form.getlist("mailbox_ids")
|
||||
if mailbox_ids:
|
||||
# check if mailbox is not tempered with
|
||||
mailboxes = []
|
||||
for mailbox_id in mailbox_ids:
|
||||
mailbox = Mailbox.get(mailbox_id)
|
||||
if (
|
||||
not mailbox
|
||||
or mailbox.user_id != current_user.id
|
||||
or not mailbox.verified
|
||||
):
|
||||
flash("Something went wrong, please retry", "warning")
|
||||
return redirect(url_for("dashboard.custom_domain"))
|
||||
mailboxes.append(mailbox)
|
||||
|
||||
for mailbox in mailboxes:
|
||||
DomainMailbox.create(
|
||||
domain_id=new_custom_domain.id, mailbox_id=mailbox.id
|
||||
)
|
||||
|
||||
Session.commit()
|
||||
|
||||
flash(
|
||||
f"New domain {new_custom_domain.domain} is created", "success"
|
||||
)
|
||||
|
||||
return redirect(
|
||||
url_for(
|
||||
"dashboard.domain_detail_dns",
|
||||
custom_domain_id=res.instance.id,
|
||||
custom_domain_id=new_custom_domain.id,
|
||||
)
|
||||
)
|
||||
else:
|
||||
flash(res.message, res.message_category)
|
||||
if res.redirect:
|
||||
return redirect(url_for(res.redirect))
|
||||
|
||||
return render_template(
|
||||
"dashboard/custom_domain.html",
|
||||
custom_domains=custom_domains,
|
||||
new_custom_domain_form=new_custom_domain_form,
|
||||
EMAIL_SERVERS_WITH_PRIORITY=EMAIL_SERVERS_WITH_PRIORITY,
|
||||
errors=errors,
|
||||
mailboxes=mailboxes,
|
||||
)
|
||||
|
|
|
@ -1,32 +1,20 @@
|
|||
import arrow
|
||||
from flask import flash, redirect, url_for, request, render_template
|
||||
from flask_login import login_required, current_user
|
||||
from flask_wtf import FlaskForm
|
||||
|
||||
from app.config import JOB_DELETE_ACCOUNT
|
||||
from app.dashboard.base import dashboard_bp
|
||||
from app.dashboard.views.enter_sudo import sudo_required
|
||||
from app.log import LOG
|
||||
from app.models import Subscription, Job
|
||||
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
||||
|
||||
|
||||
class DeleteDirForm(FlaskForm):
|
||||
pass
|
||||
|
||||
|
||||
@dashboard_bp.route("/delete_account", methods=["GET", "POST"])
|
||||
@login_required
|
||||
@sudo_required
|
||||
def delete_account():
|
||||
delete_form = DeleteDirForm()
|
||||
if request.method == "POST" and request.form.get("form-name") == "delete-account":
|
||||
if not delete_form.validate():
|
||||
flash("Invalid request", "warning")
|
||||
return render_template(
|
||||
"dashboard/delete_account.html", delete_form=delete_form
|
||||
)
|
||||
sub: Subscription = current_user.get_paddle_subscription()
|
||||
sub: Subscription = current_user.get_subscription()
|
||||
# user who has canceled can also re-subscribe
|
||||
if sub and not sub.cancelled:
|
||||
flash("Please cancel your current subscription first", "warning")
|
||||
|
@ -34,11 +22,6 @@ def delete_account():
|
|||
|
||||
# Schedule delete account job
|
||||
LOG.w("schedule delete account job for %s", current_user)
|
||||
emit_user_audit_log(
|
||||
user=current_user,
|
||||
action=UserAuditLogAction.UserMarkedForDeletion,
|
||||
message=f"User {current_user.id} ({current_user.email}) marked for deletion via webapp",
|
||||
)
|
||||
Job.create(
|
||||
name=JOB_DELETE_ACCOUNT,
|
||||
payload={"user_id": current_user.id},
|
||||
|
@ -53,4 +36,6 @@ def delete_account():
|
|||
)
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
|
||||
return render_template("dashboard/delete_account.html", delete_form=delete_form)
|
||||
return render_template(
|
||||
"dashboard/delete_account.html",
|
||||
)
|
||||
|
|
|
@ -1,17 +1,8 @@
|
|||
from typing import Optional
|
||||
|
||||
from flask import render_template, request, redirect, url_for, flash
|
||||
from flask_login import login_required, current_user
|
||||
from flask_wtf import FlaskForm
|
||||
from wtforms import (
|
||||
StringField,
|
||||
validators,
|
||||
SelectMultipleField,
|
||||
BooleanField,
|
||||
IntegerField,
|
||||
)
|
||||
from wtforms import StringField, validators
|
||||
|
||||
from app import parallel_limiter
|
||||
from app.config import (
|
||||
EMAIL_DOMAIN,
|
||||
ALIAS_DOMAINS,
|
||||
|
@ -22,7 +13,6 @@ from app.dashboard.base import dashboard_bp
|
|||
from app.db import Session
|
||||
from app.errors import DirectoryInTrashError
|
||||
from app.models import Directory, Mailbox, DirectoryMailbox
|
||||
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
||||
|
||||
|
||||
class NewDirForm(FlaskForm):
|
||||
|
@ -31,25 +21,8 @@ class NewDirForm(FlaskForm):
|
|||
)
|
||||
|
||||
|
||||
class ToggleDirForm(FlaskForm):
|
||||
directory_id = IntegerField(validators=[validators.DataRequired()])
|
||||
directory_enabled = BooleanField(validators=[])
|
||||
|
||||
|
||||
class UpdateDirForm(FlaskForm):
|
||||
directory_id = IntegerField(validators=[validators.DataRequired()])
|
||||
mailbox_ids = SelectMultipleField(
|
||||
validators=[validators.DataRequired()], validate_choice=False, choices=[]
|
||||
)
|
||||
|
||||
|
||||
class DeleteDirForm(FlaskForm):
|
||||
directory_id = IntegerField(validators=[validators.DataRequired()])
|
||||
|
||||
|
||||
@dashboard_bp.route("/directory", methods=["GET", "POST"])
|
||||
@login_required
|
||||
@parallel_limiter.lock(only_when=lambda: request.method == "POST")
|
||||
def directory():
|
||||
dirs = (
|
||||
Directory.filter_by(user_id=current_user.id)
|
||||
|
@ -60,80 +33,54 @@ def directory():
|
|||
mailboxes = current_user.mailboxes()
|
||||
|
||||
new_dir_form = NewDirForm()
|
||||
toggle_dir_form = ToggleDirForm()
|
||||
update_dir_form = UpdateDirForm()
|
||||
update_dir_form.mailbox_ids.choices = [
|
||||
(str(mailbox.id), str(mailbox.id)) for mailbox in mailboxes
|
||||
]
|
||||
delete_dir_form = DeleteDirForm()
|
||||
|
||||
if request.method == "POST":
|
||||
if request.form.get("form-name") == "delete":
|
||||
if not delete_dir_form.validate():
|
||||
flash("Invalid request", "warning")
|
||||
return redirect(url_for("dashboard.directory"))
|
||||
dir_obj: Optional[Directory] = Directory.get(
|
||||
delete_dir_form.directory_id.data
|
||||
)
|
||||
dir_id = request.form.get("dir-id")
|
||||
dir = Directory.get(dir_id)
|
||||
|
||||
if not dir_obj:
|
||||
if not dir:
|
||||
flash("Unknown error. Refresh the page", "warning")
|
||||
return redirect(url_for("dashboard.directory"))
|
||||
elif dir_obj.user_id != current_user.id:
|
||||
elif dir.user_id != current_user.id:
|
||||
flash("You cannot delete this directory", "warning")
|
||||
return redirect(url_for("dashboard.directory"))
|
||||
|
||||
name = dir_obj.name
|
||||
emit_user_audit_log(
|
||||
user=current_user,
|
||||
action=UserAuditLogAction.DeleteDirectory,
|
||||
message=f"Delete directory {dir_obj.id} ({dir_obj.name})",
|
||||
)
|
||||
Directory.delete(dir_obj.id)
|
||||
name = dir.name
|
||||
Directory.delete(dir_id)
|
||||
Session.commit()
|
||||
flash(f"Directory {name} has been deleted", "success")
|
||||
|
||||
return redirect(url_for("dashboard.directory"))
|
||||
|
||||
if request.form.get("form-name") == "toggle-directory":
|
||||
if not toggle_dir_form.validate():
|
||||
flash("Invalid request", "warning")
|
||||
return redirect(url_for("dashboard.directory"))
|
||||
dir_id = toggle_dir_form.directory_id.data
|
||||
dir_obj: Optional[Directory] = Directory.get(dir_id)
|
||||
dir_id = request.form.get("dir-id")
|
||||
dir = Directory.get(dir_id)
|
||||
|
||||
if not dir_obj or dir_obj.user_id != current_user.id:
|
||||
if not dir or dir.user_id != current_user.id:
|
||||
flash("Unknown error. Refresh the page", "warning")
|
||||
return redirect(url_for("dashboard.directory"))
|
||||
|
||||
if toggle_dir_form.directory_enabled.data:
|
||||
dir_obj.disabled = False
|
||||
flash(f"On-the-fly is enabled for {dir_obj.name}", "success")
|
||||
if request.form.get("dir-status") == "on":
|
||||
dir.disabled = False
|
||||
flash(f"On-the-fly is enabled for {dir.name}", "success")
|
||||
else:
|
||||
dir_obj.disabled = True
|
||||
flash(f"On-the-fly is disabled for {dir_obj.name}", "warning")
|
||||
dir.disabled = True
|
||||
flash(f"On-the-fly is disabled for {dir.name}", "warning")
|
||||
|
||||
emit_user_audit_log(
|
||||
user=current_user,
|
||||
action=UserAuditLogAction.UpdateDirectory,
|
||||
message=f"Updated directory {dir_obj.id} ({dir_obj.name}) set disabled = {dir_obj.disabled}",
|
||||
)
|
||||
Session.commit()
|
||||
|
||||
return redirect(url_for("dashboard.directory"))
|
||||
|
||||
elif request.form.get("form-name") == "update":
|
||||
if not update_dir_form.validate():
|
||||
flash("Invalid request", "warning")
|
||||
return redirect(url_for("dashboard.directory"))
|
||||
dir_id = update_dir_form.directory_id.data
|
||||
dir_obj: Optional[Directory] = Directory.get(dir_id)
|
||||
dir_id = request.form.get("dir-id")
|
||||
dir = Directory.get(dir_id)
|
||||
|
||||
if not dir_obj or dir_obj.user_id != current_user.id:
|
||||
if not dir or dir.user_id != current_user.id:
|
||||
flash("Unknown error. Refresh the page", "warning")
|
||||
return redirect(url_for("dashboard.directory"))
|
||||
|
||||
mailbox_ids = update_dir_form.mailbox_ids.data
|
||||
mailbox_ids = request.form.getlist("mailbox_ids")
|
||||
# check if mailbox is not tempered with
|
||||
mailboxes = []
|
||||
for mailbox_id in mailbox_ids:
|
||||
|
@ -152,20 +99,14 @@ def directory():
|
|||
return redirect(url_for("dashboard.directory"))
|
||||
|
||||
# first remove all existing directory-mailboxes links
|
||||
DirectoryMailbox.filter_by(directory_id=dir_obj.id).delete()
|
||||
DirectoryMailbox.filter_by(directory_id=dir.id).delete()
|
||||
Session.flush()
|
||||
|
||||
for mailbox in mailboxes:
|
||||
DirectoryMailbox.create(directory_id=dir_obj.id, mailbox_id=mailbox.id)
|
||||
DirectoryMailbox.create(directory_id=dir.id, mailbox_id=mailbox.id)
|
||||
|
||||
mailboxes_as_str = ",".join(map(str, mailbox_ids))
|
||||
emit_user_audit_log(
|
||||
user=current_user,
|
||||
action=UserAuditLogAction.UpdateDirectory,
|
||||
message=f"Updated directory {dir_obj.id} ({dir_obj.name}) mailboxes ({mailboxes_as_str})",
|
||||
)
|
||||
Session.commit()
|
||||
flash(f"Directory {dir_obj.name} has been updated", "success")
|
||||
flash(f"Directory {dir.name} has been updated", "success")
|
||||
|
||||
return redirect(url_for("dashboard.directory"))
|
||||
elif request.form.get("form-name") == "create":
|
||||
|
@ -202,11 +143,6 @@ def directory():
|
|||
new_dir = Directory.create(
|
||||
name=new_dir_name, user_id=current_user.id
|
||||
)
|
||||
emit_user_audit_log(
|
||||
user=current_user,
|
||||
action=UserAuditLogAction.CreateDirectory,
|
||||
message=f"New directory {new_dir.name} ({new_dir.name})",
|
||||
)
|
||||
except DirectoryInTrashError:
|
||||
flash(
|
||||
f"{new_dir_name} has been used before and cannot be reused",
|
||||
|
@ -245,9 +181,6 @@ def directory():
|
|||
return render_template(
|
||||
"dashboard/directory.html",
|
||||
dirs=dirs,
|
||||
toggle_dir_form=toggle_dir_form,
|
||||
update_dir_form=update_dir_form,
|
||||
delete_dir_form=delete_dir_form,
|
||||
new_dir_form=new_dir_form,
|
||||
mailboxes=mailboxes,
|
||||
EMAIL_DOMAIN=EMAIL_DOMAIN,
|
||||
|
|
|
@ -1,27 +1,34 @@
|
|||
import re
|
||||
|
||||
import arrow
|
||||
from flask import render_template, request, redirect, url_for, flash
|
||||
from flask_login import login_required, current_user
|
||||
from flask_wtf import FlaskForm
|
||||
from wtforms import StringField, validators, IntegerField
|
||||
|
||||
from app.constants import DMARC_RECORD
|
||||
from app.config import EMAIL_SERVERS_WITH_PRIORITY, EMAIL_DOMAIN
|
||||
from app.custom_domain_utils import delete_custom_domain, set_custom_domain_mailboxes
|
||||
from app.custom_domain_validation import CustomDomainValidation
|
||||
from app.config import EMAIL_SERVERS_WITH_PRIORITY, EMAIL_DOMAIN, JOB_DELETE_DOMAIN
|
||||
from app.dashboard.base import dashboard_bp
|
||||
from app.db import Session
|
||||
from app.dns_utils import (
|
||||
get_mx_domains,
|
||||
get_spf_domain,
|
||||
get_txt_record,
|
||||
get_cname_record,
|
||||
is_mx_equivalent,
|
||||
)
|
||||
from app.log import LOG
|
||||
from app.models import (
|
||||
CustomDomain,
|
||||
Alias,
|
||||
DomainDeletedAlias,
|
||||
Mailbox,
|
||||
DomainMailbox,
|
||||
AutoCreateRule,
|
||||
AutoCreateRuleMailbox,
|
||||
Job,
|
||||
)
|
||||
from app.regex_utils import regex_match
|
||||
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
||||
from app.utils import random_string, CSRFValidationForm
|
||||
from app.utils import random_string
|
||||
|
||||
|
||||
@dashboard_bp.route("/domains/<int:custom_domain_id>/dns", methods=["GET", "POST"])
|
||||
|
@ -37,25 +44,27 @@ def domain_detail_dns(custom_domain_id):
|
|||
custom_domain.ownership_txt_token = random_string(30)
|
||||
Session.commit()
|
||||
|
||||
domain_validator = CustomDomainValidation(EMAIL_DOMAIN)
|
||||
csrf_form = CSRFValidationForm()
|
||||
spf_record = f"v=spf1 include:{EMAIL_DOMAIN} ~all"
|
||||
|
||||
# hardcode the DKIM selector here
|
||||
dkim_cname = f"dkim._domainkey.{EMAIL_DOMAIN}"
|
||||
|
||||
dmarc_record = "v=DMARC1; p=quarantine; pct=100; adkim=s; aspf=s"
|
||||
|
||||
mx_ok = spf_ok = dkim_ok = dmarc_ok = ownership_ok = True
|
||||
mx_errors = spf_errors = dkim_errors = dmarc_errors = ownership_errors = []
|
||||
|
||||
if request.method == "POST":
|
||||
if not csrf_form.validate():
|
||||
flash("Invalid request", "warning")
|
||||
return redirect(request.url)
|
||||
if request.form.get("form-name") == "check-ownership":
|
||||
ownership_validation_result = domain_validator.validate_domain_ownership(
|
||||
custom_domain
|
||||
)
|
||||
if ownership_validation_result.success:
|
||||
txt_records = get_txt_record(custom_domain.domain)
|
||||
|
||||
if custom_domain.get_ownership_dns_txt_value() in txt_records:
|
||||
flash(
|
||||
"Domain ownership is verified. Please proceed to the other records setup",
|
||||
"success",
|
||||
)
|
||||
custom_domain.ownership_verified = True
|
||||
Session.commit()
|
||||
return redirect(
|
||||
url_for(
|
||||
"dashboard.domain_detail_dns",
|
||||
|
@ -66,28 +75,36 @@ def domain_detail_dns(custom_domain_id):
|
|||
else:
|
||||
flash("We can't find the needed TXT record", "error")
|
||||
ownership_ok = False
|
||||
ownership_errors = ownership_validation_result.errors
|
||||
ownership_errors = txt_records
|
||||
|
||||
elif request.form.get("form-name") == "check-mx":
|
||||
mx_validation_result = domain_validator.validate_mx_records(custom_domain)
|
||||
if mx_validation_result.success:
|
||||
mx_domains = get_mx_domains(custom_domain.domain)
|
||||
|
||||
if not is_mx_equivalent(mx_domains, EMAIL_SERVERS_WITH_PRIORITY):
|
||||
flash("The MX record is not correctly set", "warning")
|
||||
|
||||
mx_ok = False
|
||||
# build mx_errors to show to user
|
||||
mx_errors = [
|
||||
f"{priority} {domain}" for (priority, domain) in mx_domains
|
||||
]
|
||||
else:
|
||||
flash(
|
||||
"Your domain can start receiving emails. You can now use it to create alias",
|
||||
"success",
|
||||
)
|
||||
custom_domain.verified = True
|
||||
Session.commit()
|
||||
return redirect(
|
||||
url_for(
|
||||
"dashboard.domain_detail_dns", custom_domain_id=custom_domain.id
|
||||
)
|
||||
)
|
||||
else:
|
||||
flash("The MX record is not correctly set", "warning")
|
||||
mx_ok = False
|
||||
mx_errors = mx_validation_result.errors
|
||||
|
||||
elif request.form.get("form-name") == "check-spf":
|
||||
spf_validation_result = domain_validator.validate_spf_records(custom_domain)
|
||||
if spf_validation_result.success:
|
||||
spf_domains = get_spf_domain(custom_domain.domain)
|
||||
if EMAIL_DOMAIN in spf_domains:
|
||||
custom_domain.spf_verified = True
|
||||
Session.commit()
|
||||
flash("SPF is setup correctly", "success")
|
||||
return redirect(
|
||||
url_for(
|
||||
|
@ -95,31 +112,39 @@ def domain_detail_dns(custom_domain_id):
|
|||
)
|
||||
)
|
||||
else:
|
||||
custom_domain.spf_verified = False
|
||||
Session.commit()
|
||||
flash(
|
||||
f"SPF: {EMAIL_DOMAIN} is not included in your SPF record.",
|
||||
"warning",
|
||||
)
|
||||
spf_ok = False
|
||||
spf_errors = spf_validation_result.errors
|
||||
spf_errors = get_txt_record(custom_domain.domain)
|
||||
|
||||
elif request.form.get("form-name") == "check-dkim":
|
||||
dkim_errors = domain_validator.validate_dkim_records(custom_domain)
|
||||
if len(dkim_errors) == 0:
|
||||
dkim_record = get_cname_record("dkim._domainkey." + custom_domain.domain)
|
||||
if dkim_record == dkim_cname:
|
||||
flash("DKIM is setup correctly.", "success")
|
||||
custom_domain.dkim_verified = True
|
||||
Session.commit()
|
||||
|
||||
return redirect(
|
||||
url_for(
|
||||
"dashboard.domain_detail_dns", custom_domain_id=custom_domain.id
|
||||
)
|
||||
)
|
||||
else:
|
||||
dkim_ok = False
|
||||
custom_domain.dkim_verified = False
|
||||
Session.commit()
|
||||
flash("DKIM: the CNAME record is not correctly set", "warning")
|
||||
dkim_ok = False
|
||||
dkim_errors = [dkim_record or "[Empty]"]
|
||||
|
||||
elif request.form.get("form-name") == "check-dmarc":
|
||||
dmarc_validation_result = domain_validator.validate_dmarc_records(
|
||||
custom_domain
|
||||
)
|
||||
if dmarc_validation_result.success:
|
||||
txt_records = get_txt_record("_dmarc." + custom_domain.domain)
|
||||
if dmarc_record in txt_records:
|
||||
custom_domain.dmarc_verified = True
|
||||
Session.commit()
|
||||
flash("DMARC is setup correctly", "success")
|
||||
return redirect(
|
||||
url_for(
|
||||
|
@ -127,23 +152,18 @@ def domain_detail_dns(custom_domain_id):
|
|||
)
|
||||
)
|
||||
else:
|
||||
custom_domain.dmarc_verified = False
|
||||
Session.commit()
|
||||
flash(
|
||||
"DMARC: The TXT record is not correctly set",
|
||||
"warning",
|
||||
)
|
||||
dmarc_ok = False
|
||||
dmarc_errors = dmarc_validation_result.errors
|
||||
dmarc_errors = txt_records
|
||||
|
||||
return render_template(
|
||||
"dashboard/domain_detail/dns.html",
|
||||
EMAIL_SERVERS_WITH_PRIORITY=EMAIL_SERVERS_WITH_PRIORITY,
|
||||
ownership_record=domain_validator.get_ownership_verification_record(
|
||||
custom_domain
|
||||
),
|
||||
expected_mx_records=domain_validator.get_expected_mx_records(custom_domain),
|
||||
dkim_records=domain_validator.get_dkim_records(custom_domain),
|
||||
spf_record=domain_validator.get_expected_spf_record(custom_domain),
|
||||
dmarc_record=DMARC_RECORD,
|
||||
**locals(),
|
||||
)
|
||||
|
||||
|
@ -151,7 +171,6 @@ def domain_detail_dns(custom_domain_id):
|
|||
@dashboard_bp.route("/domains/<int:custom_domain_id>/info", methods=["GET", "POST"])
|
||||
@login_required
|
||||
def domain_detail(custom_domain_id):
|
||||
csrf_form = CSRFValidationForm()
|
||||
custom_domain: CustomDomain = CustomDomain.get(custom_domain_id)
|
||||
mailboxes = current_user.mailboxes()
|
||||
|
||||
|
@ -160,16 +179,8 @@ def domain_detail(custom_domain_id):
|
|||
return redirect(url_for("dashboard.index"))
|
||||
|
||||
if request.method == "POST":
|
||||
if not csrf_form.validate():
|
||||
flash("Invalid request", "warning")
|
||||
return redirect(request.url)
|
||||
if request.form.get("form-name") == "switch-catch-all":
|
||||
custom_domain.catch_all = not custom_domain.catch_all
|
||||
emit_user_audit_log(
|
||||
user=current_user,
|
||||
action=UserAuditLogAction.UpdateCustomDomain,
|
||||
message=f"Switched custom domain {custom_domain.id} ({custom_domain.domain}) catch all to {custom_domain.catch_all}",
|
||||
)
|
||||
Session.commit()
|
||||
|
||||
if custom_domain.catch_all:
|
||||
|
@ -188,11 +199,6 @@ def domain_detail(custom_domain_id):
|
|||
elif request.form.get("form-name") == "set-name":
|
||||
if request.form.get("action") == "save":
|
||||
custom_domain.name = request.form.get("alias-name").replace("\n", "")
|
||||
emit_user_audit_log(
|
||||
user=current_user,
|
||||
action=UserAuditLogAction.UpdateCustomDomain,
|
||||
message=f"Switched custom domain {custom_domain.id} ({custom_domain.domain}) name",
|
||||
)
|
||||
Session.commit()
|
||||
flash(
|
||||
f"Default alias name for Domain {custom_domain.domain} has been set",
|
||||
|
@ -200,11 +206,6 @@ def domain_detail(custom_domain_id):
|
|||
)
|
||||
else:
|
||||
custom_domain.name = None
|
||||
emit_user_audit_log(
|
||||
user=current_user,
|
||||
action=UserAuditLogAction.UpdateCustomDomain,
|
||||
message=f"Cleared custom domain {custom_domain.id} ({custom_domain.domain}) name",
|
||||
)
|
||||
Session.commit()
|
||||
flash(
|
||||
f"Default alias name for Domain {custom_domain.domain} has been removed",
|
||||
|
@ -218,11 +219,6 @@ def domain_detail(custom_domain_id):
|
|||
custom_domain.random_prefix_generation = (
|
||||
not custom_domain.random_prefix_generation
|
||||
)
|
||||
emit_user_audit_log(
|
||||
user=current_user,
|
||||
action=UserAuditLogAction.UpdateCustomDomain,
|
||||
message=f"Switched custom domain {custom_domain.id} ({custom_domain.domain}) random prefix generation to {custom_domain.random_prefix_generation}",
|
||||
)
|
||||
Session.commit()
|
||||
|
||||
if custom_domain.random_prefix_generation:
|
||||
|
@ -240,16 +236,40 @@ def domain_detail(custom_domain_id):
|
|||
)
|
||||
elif request.form.get("form-name") == "update":
|
||||
mailbox_ids = request.form.getlist("mailbox_ids")
|
||||
result = set_custom_domain_mailboxes(
|
||||
user_id=current_user.id,
|
||||
custom_domain=custom_domain,
|
||||
mailbox_ids=mailbox_ids,
|
||||
)
|
||||
# check if mailbox is not tempered with
|
||||
mailboxes = []
|
||||
for mailbox_id in mailbox_ids:
|
||||
mailbox = Mailbox.get(mailbox_id)
|
||||
if (
|
||||
not mailbox
|
||||
or mailbox.user_id != current_user.id
|
||||
or not mailbox.verified
|
||||
):
|
||||
flash("Something went wrong, please retry", "warning")
|
||||
return redirect(
|
||||
url_for(
|
||||
"dashboard.domain_detail", custom_domain_id=custom_domain.id
|
||||
)
|
||||
)
|
||||
mailboxes.append(mailbox)
|
||||
|
||||
if result.success:
|
||||
flash(f"{custom_domain.domain} mailboxes has been updated", "success")
|
||||
else:
|
||||
flash(result.reason.value, "warning")
|
||||
if not mailboxes:
|
||||
flash("You must select at least 1 mailbox", "warning")
|
||||
return redirect(
|
||||
url_for(
|
||||
"dashboard.domain_detail", custom_domain_id=custom_domain.id
|
||||
)
|
||||
)
|
||||
|
||||
# first remove all existing domain-mailboxes links
|
||||
DomainMailbox.filter_by(domain_id=custom_domain.id).delete()
|
||||
Session.flush()
|
||||
|
||||
for mailbox in mailboxes:
|
||||
DomainMailbox.create(domain_id=custom_domain.id, mailbox_id=mailbox.id)
|
||||
|
||||
Session.commit()
|
||||
flash(f"{custom_domain.domain} mailboxes has been updated", "success")
|
||||
|
||||
return redirect(
|
||||
url_for("dashboard.domain_detail", custom_domain_id=custom_domain.id)
|
||||
|
@ -257,8 +277,16 @@ def domain_detail(custom_domain_id):
|
|||
|
||||
elif request.form.get("form-name") == "delete":
|
||||
name = custom_domain.domain
|
||||
LOG.d("Schedule deleting %s", custom_domain)
|
||||
|
||||
delete_custom_domain(custom_domain)
|
||||
# Schedule delete domain job
|
||||
LOG.w("schedule delete domain job for %s", custom_domain)
|
||||
Job.create(
|
||||
name=JOB_DELETE_DOMAIN,
|
||||
payload={"custom_domain_id": custom_domain.id},
|
||||
run_at=arrow.now(),
|
||||
commit=True,
|
||||
)
|
||||
|
||||
flash(
|
||||
f"{name} scheduled for deletion."
|
||||
|
@ -279,16 +307,12 @@ def domain_detail(custom_domain_id):
|
|||
@dashboard_bp.route("/domains/<int:custom_domain_id>/trash", methods=["GET", "POST"])
|
||||
@login_required
|
||||
def domain_detail_trash(custom_domain_id):
|
||||
csrf_form = CSRFValidationForm()
|
||||
custom_domain = CustomDomain.get(custom_domain_id)
|
||||
if not custom_domain or custom_domain.user_id != current_user.id:
|
||||
flash("You cannot see this page", "warning")
|
||||
return redirect(url_for("dashboard.index"))
|
||||
|
||||
if request.method == "POST":
|
||||
if not csrf_form.validate():
|
||||
flash("Invalid request", "warning")
|
||||
return redirect(request.url)
|
||||
if request.form.get("form-name") == "empty-all":
|
||||
DomainDeletedAlias.filter_by(domain_id=custom_domain.id).delete()
|
||||
Session.commit()
|
||||
|
@ -332,7 +356,6 @@ def domain_detail_trash(custom_domain_id):
|
|||
"dashboard/domain_detail/trash.html",
|
||||
domain_deleted_aliases=domain_deleted_aliases,
|
||||
custom_domain=custom_domain,
|
||||
csrf_form=csrf_form,
|
||||
)
|
||||
|
||||
|
||||
|
|
|
@ -6,15 +6,11 @@ from flask_login import login_required, current_user
|
|||
from flask_wtf import FlaskForm
|
||||
from wtforms import PasswordField, validators
|
||||
|
||||
from app.config import CONNECT_WITH_PROTON, OIDC_CLIENT_ID, CONNECT_WITH_OIDC_ICON
|
||||
from app.dashboard.base import dashboard_bp
|
||||
from app.extensions import limiter
|
||||
from app.log import LOG
|
||||
from app.models import PartnerUser, SocialAuth
|
||||
from app.proton.utils import get_proton_partner
|
||||
from app.utils import sanitize_next_url
|
||||
|
||||
_SUDO_GAP = 120
|
||||
_SUDO_GAP = 900
|
||||
|
||||
|
||||
class LoginForm(FlaskForm):
|
||||
|
@ -22,7 +18,6 @@ class LoginForm(FlaskForm):
|
|||
|
||||
|
||||
@dashboard_bp.route("/enter_sudo", methods=["GET", "POST"])
|
||||
@limiter.limit("3/minute")
|
||||
@login_required
|
||||
def enter_sudo():
|
||||
password_check_form = LoginForm()
|
||||
|
@ -44,26 +39,8 @@ def enter_sudo():
|
|||
else:
|
||||
flash("Incorrect password", "warning")
|
||||
|
||||
proton_enabled = CONNECT_WITH_PROTON
|
||||
if proton_enabled:
|
||||
# Only for users that have the account linked
|
||||
partner_user = PartnerUser.get_by(user_id=current_user.id)
|
||||
if not partner_user or partner_user.partner_id != get_proton_partner().id:
|
||||
proton_enabled = False
|
||||
|
||||
oidc_enabled = OIDC_CLIENT_ID is not None
|
||||
if oidc_enabled:
|
||||
oidc_enabled = (
|
||||
SocialAuth.get_by(user_id=current_user.id, social="oidc") is not None
|
||||
)
|
||||
|
||||
return render_template(
|
||||
"dashboard/enter_sudo.html",
|
||||
password_check_form=password_check_form,
|
||||
next=request.args.get("next"),
|
||||
connect_with_proton=proton_enabled,
|
||||
connect_with_oidc=oidc_enabled,
|
||||
connect_with_oidc_icon=CONNECT_WITH_OIDC_ICON,
|
||||
"dashboard/enter_sudo.html", password_check_form=password_check_form
|
||||
)
|
||||
|
||||
|
||||
|
|
|
@ -78,10 +78,10 @@ def fido_setup():
|
|||
)
|
||||
|
||||
flash("Security key has been activated", "success")
|
||||
recovery_codes = RecoveryCode.generate(current_user)
|
||||
return render_template(
|
||||
"dashboard/recovery_code.html", recovery_codes=recovery_codes
|
||||
)
|
||||
if not RecoveryCode.filter_by(user_id=current_user.id).all():
|
||||
return redirect(url_for("dashboard.recovery_code_route"))
|
||||
else:
|
||||
return redirect(url_for("dashboard.fido_manage"))
|
||||
|
||||
# Prepare information for key registration process
|
||||
fido_uuid = (
|
||||
|
|
|
@ -3,7 +3,7 @@ from dataclasses import dataclass
|
|||
from flask import render_template, request, redirect, url_for, flash
|
||||
from flask_login import login_required, current_user
|
||||
|
||||
from app import alias_utils, parallel_limiter
|
||||
from app import alias_utils
|
||||
from app.api.serializer import get_alias_infos_with_pagination_v3, get_alias_info_v3
|
||||
from app.config import ALIAS_LIMIT, PAGE_LIMIT
|
||||
from app.dashboard.base import dashboard_bp
|
||||
|
@ -12,13 +12,11 @@ from app.extensions import limiter
|
|||
from app.log import LOG
|
||||
from app.models import (
|
||||
Alias,
|
||||
AliasDeleteReason,
|
||||
AliasGeneratorEnum,
|
||||
User,
|
||||
EmailLog,
|
||||
Contact,
|
||||
)
|
||||
from app.utils import CSRFValidationForm
|
||||
|
||||
|
||||
@dataclass
|
||||
|
@ -53,17 +51,12 @@ def get_stats(user: User) -> Stats:
|
|||
|
||||
|
||||
@dashboard_bp.route("/", methods=["GET", "POST"])
|
||||
@login_required
|
||||
@limiter.limit(
|
||||
ALIAS_LIMIT,
|
||||
methods=["POST"],
|
||||
exempt_when=lambda: request.form.get("form-name") != "create-random-email",
|
||||
)
|
||||
@limiter.limit("10/minute", methods=["GET"], key_func=lambda: current_user.id)
|
||||
@parallel_limiter.lock(
|
||||
name="alias_creation",
|
||||
only_when=lambda: request.form.get("form-name") == "create-random-email",
|
||||
)
|
||||
@login_required
|
||||
def index():
|
||||
query = request.args.get("query") or ""
|
||||
sort = request.args.get("sort") or ""
|
||||
|
@ -71,10 +64,7 @@ def index():
|
|||
|
||||
page = 0
|
||||
if request.args.get("page"):
|
||||
try:
|
||||
page = int(request.args.get("page"))
|
||||
except ValueError:
|
||||
pass
|
||||
page = int(request.args.get("page"))
|
||||
|
||||
highlight_alias_id = None
|
||||
if request.args.get("highlight_alias_id"):
|
||||
|
@ -85,12 +75,8 @@ def index():
|
|||
"highlight_alias_id must be a number, received %s",
|
||||
request.args.get("highlight_alias_id"),
|
||||
)
|
||||
csrf_form = CSRFValidationForm()
|
||||
|
||||
if request.method == "POST":
|
||||
if not csrf_form.validate():
|
||||
flash("Invalid request", "warning")
|
||||
return redirect(request.url)
|
||||
if request.form.get("form-name") == "create-custom-email":
|
||||
if current_user.can_create_new_alias():
|
||||
return redirect(url_for("dashboard.custom_alias"))
|
||||
|
@ -145,27 +131,17 @@ def index():
|
|||
)
|
||||
|
||||
if request.form.get("form-name") == "delete-alias":
|
||||
LOG.i(f"User {current_user} requested deletion of alias {alias}")
|
||||
LOG.d("delete alias %s", alias)
|
||||
email = alias.email
|
||||
alias_utils.delete_alias(
|
||||
alias, current_user, AliasDeleteReason.ManualAction, commit=True
|
||||
)
|
||||
alias_utils.delete_alias(alias, current_user)
|
||||
flash(f"Alias {email} has been deleted", "success")
|
||||
elif request.form.get("form-name") == "disable-alias":
|
||||
alias_utils.change_alias_status(
|
||||
alias, enabled=False, message="Set enabled=False from dashboard"
|
||||
)
|
||||
alias.enabled = False
|
||||
Session.commit()
|
||||
flash(f"Alias {alias.email} has been disabled", "success")
|
||||
|
||||
return redirect(
|
||||
url_for(
|
||||
"dashboard.index",
|
||||
query=query,
|
||||
sort=sort,
|
||||
filter=alias_filter,
|
||||
page=page,
|
||||
)
|
||||
url_for("dashboard.index", query=query, sort=sort, filter=alias_filter)
|
||||
)
|
||||
|
||||
mailboxes = current_user.mailboxes()
|
||||
|
@ -228,7 +204,6 @@ def index():
|
|||
sort=sort,
|
||||
filter=alias_filter,
|
||||
stats=stats,
|
||||
csrf_form=csrf_form,
|
||||
)
|
||||
|
||||
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
import arrow
|
||||
from flask import render_template, flash, redirect, url_for
|
||||
from flask_login import login_required, current_user
|
||||
from flask_wtf import FlaskForm
|
||||
|
@ -8,8 +7,6 @@ from app.config import ADMIN_EMAIL
|
|||
from app.dashboard.base import dashboard_bp
|
||||
from app.db import Session
|
||||
from app.email_utils import send_email
|
||||
from app.events.event_dispatcher import EventDispatcher
|
||||
from app.events.generated.event_pb2 import UserPlanChanged, EventContent
|
||||
from app.models import LifetimeCoupon
|
||||
|
||||
|
||||
|
@ -26,7 +23,7 @@ def lifetime_licence():
|
|||
|
||||
# user needs to cancel active subscription first
|
||||
# to avoid being charged
|
||||
sub = current_user.get_paddle_subscription()
|
||||
sub = current_user.get_subscription()
|
||||
if sub and not sub.cancelled:
|
||||
flash("Please cancel your current subscription first", "warning")
|
||||
return redirect(url_for("dashboard.index"))
|
||||
|
@ -43,14 +40,6 @@ def lifetime_licence():
|
|||
current_user.lifetime_coupon_id = coupon.id
|
||||
if coupon.paid:
|
||||
current_user.paid_lifetime = True
|
||||
EventDispatcher.send_event(
|
||||
user=current_user,
|
||||
content=EventContent(
|
||||
user_plan_change=UserPlanChanged(
|
||||
plan_end_time=arrow.get("2038-01-01").timestamp
|
||||
)
|
||||
),
|
||||
)
|
||||
Session.commit()
|
||||
|
||||
# notify admin
|
||||
|
|
|
@ -1,23 +1,23 @@
|
|||
import base64
|
||||
import binascii
|
||||
import json
|
||||
from typing import Optional
|
||||
|
||||
import arrow
|
||||
from flask import render_template, request, redirect, url_for, flash
|
||||
from flask_login import login_required, current_user
|
||||
from flask_wtf import FlaskForm
|
||||
from itsdangerous import TimestampSigner
|
||||
from wtforms import validators, IntegerField
|
||||
from itsdangerous import Signer
|
||||
from wtforms import validators
|
||||
from wtforms.fields.html5 import EmailField
|
||||
|
||||
from app import parallel_limiter, mailbox_utils, user_settings
|
||||
from app.config import MAILBOX_SECRET
|
||||
from app.config import MAILBOX_SECRET, URL, JOB_DELETE_MAILBOX
|
||||
from app.dashboard.base import dashboard_bp
|
||||
from app.db import Session
|
||||
from app.email_utils import (
|
||||
email_can_be_used_as_mailbox,
|
||||
mailbox_already_used,
|
||||
render,
|
||||
send_email,
|
||||
is_valid_email,
|
||||
)
|
||||
from app.log import LOG
|
||||
from app.models import Mailbox
|
||||
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
||||
from app.utils import CSRFValidationForm
|
||||
from app.models import Mailbox, Job
|
||||
|
||||
|
||||
class NewMailboxForm(FlaskForm):
|
||||
|
@ -26,16 +26,8 @@ class NewMailboxForm(FlaskForm):
|
|||
)
|
||||
|
||||
|
||||
class DeleteMailboxForm(FlaskForm):
|
||||
mailbox_id = IntegerField(
|
||||
validators=[validators.DataRequired()],
|
||||
)
|
||||
transfer_mailbox_id = IntegerField()
|
||||
|
||||
|
||||
@dashboard_bp.route("/mailbox", methods=["GET", "POST"])
|
||||
@login_required
|
||||
@parallel_limiter.lock(only_when=lambda: request.method == "POST")
|
||||
def mailbox_route():
|
||||
mailboxes = (
|
||||
Mailbox.filter_by(user_id=current_user.id)
|
||||
|
@ -44,138 +36,169 @@ def mailbox_route():
|
|||
)
|
||||
|
||||
new_mailbox_form = NewMailboxForm()
|
||||
csrf_form = CSRFValidationForm()
|
||||
delete_mailbox_form = DeleteMailboxForm()
|
||||
|
||||
if request.method == "POST":
|
||||
if request.form.get("form-name") == "delete":
|
||||
if not delete_mailbox_form.validate():
|
||||
flash("Invalid request", "warning")
|
||||
return redirect(request.url)
|
||||
try:
|
||||
mailbox = mailbox_utils.delete_mailbox(
|
||||
current_user,
|
||||
delete_mailbox_form.mailbox_id.data,
|
||||
delete_mailbox_form.transfer_mailbox_id.data,
|
||||
)
|
||||
except mailbox_utils.MailboxError as e:
|
||||
flash(e.msg, "warning")
|
||||
mailbox_id = request.form.get("mailbox-id")
|
||||
mailbox = Mailbox.get(mailbox_id)
|
||||
|
||||
if not mailbox or mailbox.user_id != current_user.id:
|
||||
flash("Unknown error. Refresh the page", "warning")
|
||||
return redirect(url_for("dashboard.mailbox_route"))
|
||||
|
||||
if mailbox.id == current_user.default_mailbox_id:
|
||||
flash("You cannot delete default mailbox", "error")
|
||||
return redirect(url_for("dashboard.mailbox_route"))
|
||||
|
||||
# Schedule delete account job
|
||||
LOG.w("schedule delete mailbox job for %s", mailbox)
|
||||
Job.create(
|
||||
name=JOB_DELETE_MAILBOX,
|
||||
payload={"mailbox_id": mailbox.id},
|
||||
run_at=arrow.now(),
|
||||
commit=True,
|
||||
)
|
||||
|
||||
flash(
|
||||
f"Mailbox {mailbox.email} scheduled for deletion."
|
||||
f"You will receive a confirmation email when the deletion is finished",
|
||||
"success",
|
||||
)
|
||||
return redirect(url_for("dashboard.mailbox_route"))
|
||||
|
||||
return redirect(url_for("dashboard.mailbox_route"))
|
||||
if request.form.get("form-name") == "set-default":
|
||||
if not csrf_form.validate():
|
||||
flash("Invalid request", "warning")
|
||||
return redirect(request.url)
|
||||
try:
|
||||
mailbox_id = request.form.get("mailbox_id")
|
||||
mailbox = user_settings.set_default_mailbox(current_user, mailbox_id)
|
||||
except user_settings.CannotSetMailbox as e:
|
||||
flash(e.msg, "warning")
|
||||
mailbox_id = request.form.get("mailbox-id")
|
||||
mailbox = Mailbox.get(mailbox_id)
|
||||
|
||||
if not mailbox or mailbox.user_id != current_user.id:
|
||||
flash("Unknown error. Refresh the page", "warning")
|
||||
return redirect(url_for("dashboard.mailbox_route"))
|
||||
|
||||
if mailbox.id == current_user.default_mailbox_id:
|
||||
flash("This mailbox is already default one", "error")
|
||||
return redirect(url_for("dashboard.mailbox_route"))
|
||||
|
||||
if not mailbox.verified:
|
||||
flash("Cannot set unverified mailbox as default", "error")
|
||||
return redirect(url_for("dashboard.mailbox_route"))
|
||||
|
||||
current_user.default_mailbox_id = mailbox.id
|
||||
Session.commit()
|
||||
flash(f"Mailbox {mailbox.email} is set as Default Mailbox", "success")
|
||||
|
||||
return redirect(url_for("dashboard.mailbox_route"))
|
||||
|
||||
elif request.form.get("form-name") == "create":
|
||||
if not new_mailbox_form.validate():
|
||||
flash("Invalid request", "warning")
|
||||
return redirect(request.url)
|
||||
mailbox_email = new_mailbox_form.email.data.lower().strip().replace(" ", "")
|
||||
try:
|
||||
mailbox = mailbox_utils.create_mailbox(
|
||||
current_user, mailbox_email
|
||||
).mailbox
|
||||
except mailbox_utils.MailboxError as e:
|
||||
flash(e.msg, "warning")
|
||||
if not current_user.is_premium():
|
||||
flash("Only premium plan can add additional mailbox", "warning")
|
||||
return redirect(url_for("dashboard.mailbox_route"))
|
||||
|
||||
flash(
|
||||
f"You are going to receive an email to confirm {mailbox.email}.",
|
||||
"success",
|
||||
)
|
||||
|
||||
return redirect(
|
||||
url_for(
|
||||
"dashboard.mailbox_detail_route",
|
||||
mailbox_id=mailbox.id,
|
||||
if new_mailbox_form.validate():
|
||||
mailbox_email = (
|
||||
new_mailbox_form.email.data.lower().strip().replace(" ", "")
|
||||
)
|
||||
)
|
||||
|
||||
if not is_valid_email(mailbox_email):
|
||||
flash(f"{mailbox_email} invalid", "error")
|
||||
elif mailbox_already_used(mailbox_email, current_user):
|
||||
flash(f"{mailbox_email} already used", "error")
|
||||
elif not email_can_be_used_as_mailbox(mailbox_email):
|
||||
flash(f"You cannot use {mailbox_email}.", "error")
|
||||
else:
|
||||
new_mailbox = Mailbox.create(
|
||||
email=mailbox_email, user_id=current_user.id
|
||||
)
|
||||
Session.commit()
|
||||
|
||||
send_verification_email(current_user, new_mailbox)
|
||||
|
||||
flash(
|
||||
f"You are going to receive an email to confirm {mailbox_email}.",
|
||||
"success",
|
||||
)
|
||||
|
||||
return redirect(
|
||||
url_for(
|
||||
"dashboard.mailbox_detail_route", mailbox_id=new_mailbox.id
|
||||
)
|
||||
)
|
||||
|
||||
return render_template(
|
||||
"dashboard/mailbox.html",
|
||||
mailboxes=mailboxes,
|
||||
new_mailbox_form=new_mailbox_form,
|
||||
delete_mailbox_form=delete_mailbox_form,
|
||||
csrf_form=csrf_form,
|
||||
)
|
||||
|
||||
|
||||
def delete_mailbox(mailbox_id: int):
|
||||
from server import create_light_app
|
||||
|
||||
with create_light_app().app_context():
|
||||
mailbox = Mailbox.get(mailbox_id)
|
||||
if not mailbox:
|
||||
return
|
||||
|
||||
mailbox_email = mailbox.email
|
||||
user = mailbox.user
|
||||
|
||||
Mailbox.delete(mailbox_id)
|
||||
Session.commit()
|
||||
LOG.d("Mailbox %s %s deleted", mailbox_id, mailbox_email)
|
||||
|
||||
send_email(
|
||||
user.email,
|
||||
f"Your mailbox {mailbox_email} has been deleted",
|
||||
f"""Mailbox {mailbox_email} along with its aliases are deleted successfully.
|
||||
|
||||
Regards,
|
||||
SimpleLogin team.
|
||||
""",
|
||||
)
|
||||
|
||||
|
||||
def send_verification_email(user, mailbox):
|
||||
s = Signer(MAILBOX_SECRET)
|
||||
mailbox_id_signed = s.sign(str(mailbox.id)).decode()
|
||||
verification_url = (
|
||||
URL + "/dashboard/mailbox_verify" + f"?mailbox_id={mailbox_id_signed}"
|
||||
)
|
||||
send_email(
|
||||
mailbox.email,
|
||||
f"Please confirm your email {mailbox.email}",
|
||||
render(
|
||||
"transactional/verify-mailbox.txt",
|
||||
user=user,
|
||||
link=verification_url,
|
||||
mailbox_email=mailbox.email,
|
||||
),
|
||||
render(
|
||||
"transactional/verify-mailbox.html",
|
||||
user=user,
|
||||
link=verification_url,
|
||||
mailbox_email=mailbox.email,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
@dashboard_bp.route("/mailbox_verify")
|
||||
@login_required
|
||||
def mailbox_verify():
|
||||
s = Signer(MAILBOX_SECRET)
|
||||
mailbox_id = request.args.get("mailbox_id")
|
||||
if not mailbox_id:
|
||||
LOG.i("Missing mailbox_id")
|
||||
flash("You followed an invalid link", "error")
|
||||
return redirect(url_for("dashboard.mailbox_route"))
|
||||
|
||||
code = request.args.get("code")
|
||||
if not code:
|
||||
# Old way
|
||||
return verify_with_signed_secret(mailbox_id)
|
||||
|
||||
try:
|
||||
mailbox = mailbox_utils.verify_mailbox_code(current_user, mailbox_id, code)
|
||||
except mailbox_utils.MailboxError as e:
|
||||
LOG.i(f"Cannot verify mailbox {mailbox_id} because of {e}")
|
||||
flash(f"Cannot verify mailbox: {e.msg}", "error")
|
||||
return redirect(url_for("dashboard.mailbox_route"))
|
||||
LOG.d("Mailbox %s is verified", mailbox)
|
||||
return render_template("dashboard/mailbox_validation.html", mailbox=mailbox)
|
||||
|
||||
|
||||
def verify_with_signed_secret(request: str):
|
||||
s = TimestampSigner(MAILBOX_SECRET)
|
||||
mailbox_verify_request = request.args.get("mailbox_id")
|
||||
try:
|
||||
mailbox_raw_data = s.unsign(mailbox_verify_request, max_age=900)
|
||||
r_id = int(s.unsign(mailbox_id))
|
||||
except Exception:
|
||||
flash("Invalid link. Please delete and re-add your mailbox", "error")
|
||||
return redirect(url_for("dashboard.mailbox_route"))
|
||||
try:
|
||||
decoded_data = base64.urlsafe_b64decode(mailbox_raw_data)
|
||||
except binascii.Error:
|
||||
flash("Invalid link. Please delete and re-add your mailbox", "error")
|
||||
return redirect(url_for("dashboard.mailbox_route"))
|
||||
mailbox_data = json.loads(decoded_data)
|
||||
if not isinstance(mailbox_data, list) or len(mailbox_data) != 2:
|
||||
flash("Invalid link. Please delete and re-add your mailbox", "error")
|
||||
return redirect(url_for("dashboard.mailbox_route"))
|
||||
mailbox_id = mailbox_data[0]
|
||||
mailbox: Optional[Mailbox] = Mailbox.get(mailbox_id)
|
||||
if not mailbox:
|
||||
flash("Invalid link", "error")
|
||||
return redirect(url_for("dashboard.mailbox_route"))
|
||||
mailbox_email = mailbox_data[1]
|
||||
if mailbox_email != mailbox.email:
|
||||
flash("Invalid link", "error")
|
||||
return redirect(url_for("dashboard.mailbox_route"))
|
||||
else:
|
||||
mailbox = Mailbox.get(r_id)
|
||||
if not mailbox:
|
||||
flash("Invalid link", "error")
|
||||
return redirect(url_for("dashboard.mailbox_route"))
|
||||
|
||||
mailbox.verified = True
|
||||
emit_user_audit_log(
|
||||
user=current_user,
|
||||
action=UserAuditLogAction.VerifyMailbox,
|
||||
message=f"Verified mailbox {mailbox.id} ({mailbox.email})",
|
||||
)
|
||||
Session.commit()
|
||||
mailbox.verified = True
|
||||
Session.commit()
|
||||
|
||||
LOG.d("Mailbox %s is verified", mailbox)
|
||||
LOG.d("Mailbox %s is verified", mailbox)
|
||||
|
||||
return render_template("dashboard/mailbox_validation.html", mailbox=mailbox)
|
||||
return render_template("dashboard/mailbox_validation.html", mailbox=mailbox)
|
||||
|
|
|
@ -1,27 +1,23 @@
|
|||
from smtplib import SMTPRecipientsRefused
|
||||
|
||||
from email_validator import validate_email, EmailNotValidError
|
||||
from flask import render_template, request, redirect, url_for, flash
|
||||
from flask_login import login_required, current_user
|
||||
from flask_wtf import FlaskForm
|
||||
from itsdangerous import TimestampSigner
|
||||
from itsdangerous import Signer
|
||||
from wtforms import validators
|
||||
from wtforms.fields.html5 import EmailField
|
||||
|
||||
from app.config import ENFORCE_SPF, MAILBOX_SECRET
|
||||
from app.config import URL
|
||||
from app.dashboard.base import dashboard_bp
|
||||
from app.dashboard.views.enter_sudo import sudo_required
|
||||
from app.db import Session
|
||||
from app.email_utils import email_can_be_used_as_mailbox
|
||||
from app.email_utils import mailbox_already_used, render, send_email
|
||||
from app.extensions import limiter
|
||||
from app.mailbox_utils import perform_mailbox_email_change, MailboxEmailChangeError
|
||||
from app.log import LOG
|
||||
from app.models import Alias, AuthorizedAddress
|
||||
from app.models import Mailbox
|
||||
from app.pgp_utils import PGPException, load_public_key_and_check
|
||||
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
||||
from app.utils import sanitize_email, CSRFValidationForm
|
||||
from app.utils import sanitize_email
|
||||
|
||||
|
||||
class ChangeEmailForm(FlaskForm):
|
||||
|
@ -32,16 +28,13 @@ class ChangeEmailForm(FlaskForm):
|
|||
|
||||
@dashboard_bp.route("/mailbox/<int:mailbox_id>/", methods=["GET", "POST"])
|
||||
@login_required
|
||||
@sudo_required
|
||||
@limiter.limit("20/minute", methods=["POST"])
|
||||
def mailbox_detail_route(mailbox_id):
|
||||
mailbox: Mailbox = Mailbox.get(mailbox_id)
|
||||
mailbox = Mailbox.get(mailbox_id)
|
||||
if not mailbox or mailbox.user_id != current_user.id:
|
||||
flash("You cannot see this page", "warning")
|
||||
return redirect(url_for("dashboard.index"))
|
||||
|
||||
change_email_form = ChangeEmailForm()
|
||||
csrf_form = CSRFValidationForm()
|
||||
|
||||
if mailbox.new_email:
|
||||
pending_email = mailbox.new_email
|
||||
|
@ -49,9 +42,6 @@ def mailbox_detail_route(mailbox_id):
|
|||
pending_email = None
|
||||
|
||||
if request.method == "POST":
|
||||
if not csrf_form.validate():
|
||||
flash("Invalid request", "warning")
|
||||
return redirect(request.url)
|
||||
if (
|
||||
request.form.get("form-name") == "update-email"
|
||||
and change_email_form.validate_on_submit()
|
||||
|
@ -89,12 +79,8 @@ def mailbox_detail_route(mailbox_id):
|
|||
flash("SPF enforcement globally not enabled", "error")
|
||||
return redirect(url_for("dashboard.index"))
|
||||
|
||||
force_spf_value = request.form.get("spf-status") == "on"
|
||||
mailbox.force_spf = force_spf_value
|
||||
emit_user_audit_log(
|
||||
user=current_user,
|
||||
action=UserAuditLogAction.UpdateMailbox,
|
||||
message=f"Set force_spf to {force_spf_value} on mailbox {mailbox_id} ({mailbox.email})",
|
||||
mailbox.force_spf = (
|
||||
True if request.form.get("spf-status") == "on" else False
|
||||
)
|
||||
Session.commit()
|
||||
flash(
|
||||
|
@ -108,28 +94,16 @@ def mailbox_detail_route(mailbox_id):
|
|||
)
|
||||
elif request.form.get("form-name") == "add-authorized-address":
|
||||
address = sanitize_email(request.form.get("email"))
|
||||
try:
|
||||
validate_email(
|
||||
address, check_deliverability=False, allow_smtputf8=False
|
||||
).domain
|
||||
except EmailNotValidError:
|
||||
flash(f"invalid {address}", "error")
|
||||
if AuthorizedAddress.get_by(mailbox_id=mailbox.id, email=address):
|
||||
flash(f"{address} already added", "error")
|
||||
else:
|
||||
if AuthorizedAddress.get_by(mailbox_id=mailbox.id, email=address):
|
||||
flash(f"{address} already added", "error")
|
||||
else:
|
||||
emit_user_audit_log(
|
||||
user=current_user,
|
||||
action=UserAuditLogAction.UpdateMailbox,
|
||||
message=f"Add authorized address {address} to mailbox {mailbox_id} ({mailbox.email})",
|
||||
)
|
||||
AuthorizedAddress.create(
|
||||
user_id=current_user.id,
|
||||
mailbox_id=mailbox.id,
|
||||
email=address,
|
||||
commit=True,
|
||||
)
|
||||
flash(f"{address} added as authorized address", "success")
|
||||
AuthorizedAddress.create(
|
||||
user_id=current_user.id,
|
||||
mailbox_id=mailbox.id,
|
||||
email=address,
|
||||
commit=True,
|
||||
)
|
||||
flash(f"{address} added as authorized address", "success")
|
||||
|
||||
return redirect(
|
||||
url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox_id)
|
||||
|
@ -143,11 +117,6 @@ def mailbox_detail_route(mailbox_id):
|
|||
flash("Unknown error. Refresh the page", "warning")
|
||||
else:
|
||||
address = authorized_address.email
|
||||
emit_user_audit_log(
|
||||
user=current_user,
|
||||
action=UserAuditLogAction.UpdateMailbox,
|
||||
message=f"Remove authorized address {address} from mailbox {mailbox_id} ({mailbox.email})",
|
||||
)
|
||||
AuthorizedAddress.delete(authorized_address_id)
|
||||
Session.commit()
|
||||
flash(f"{address} has been deleted", "success")
|
||||
|
@ -163,15 +132,6 @@ def mailbox_detail_route(mailbox_id):
|
|||
url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox_id)
|
||||
)
|
||||
|
||||
if mailbox.is_proton():
|
||||
flash(
|
||||
"Enabling PGP for a Proton Mail mailbox is redundant and does not add any security benefit",
|
||||
"info",
|
||||
)
|
||||
return redirect(
|
||||
url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox_id)
|
||||
)
|
||||
|
||||
mailbox.pgp_public_key = request.form.get("pgp")
|
||||
try:
|
||||
mailbox.pgp_finger_print = load_public_key_and_check(
|
||||
|
@ -180,11 +140,6 @@ def mailbox_detail_route(mailbox_id):
|
|||
except PGPException:
|
||||
flash("Cannot add the public key, please verify it", "error")
|
||||
else:
|
||||
emit_user_audit_log(
|
||||
user=current_user,
|
||||
action=UserAuditLogAction.UpdateMailbox,
|
||||
message=f"Add PGP Key {mailbox.pgp_finger_print} to mailbox {mailbox_id} ({mailbox.email})",
|
||||
)
|
||||
Session.commit()
|
||||
flash("Your PGP public key is saved successfully", "success")
|
||||
return redirect(
|
||||
|
@ -192,11 +147,6 @@ def mailbox_detail_route(mailbox_id):
|
|||
)
|
||||
elif request.form.get("action") == "remove":
|
||||
# Free user can decide to remove their added PGP key
|
||||
emit_user_audit_log(
|
||||
user=current_user,
|
||||
action=UserAuditLogAction.UpdateMailbox,
|
||||
message=f"Remove PGP Key {mailbox.pgp_finger_print} from mailbox {mailbox_id} ({mailbox.email})",
|
||||
)
|
||||
mailbox.pgp_public_key = None
|
||||
mailbox.pgp_finger_print = None
|
||||
mailbox.disable_pgp = False
|
||||
|
@ -208,27 +158,10 @@ def mailbox_detail_route(mailbox_id):
|
|||
|
||||
elif request.form.get("form-name") == "toggle-pgp":
|
||||
if request.form.get("pgp-enabled") == "on":
|
||||
if mailbox.is_proton():
|
||||
mailbox.disable_pgp = True
|
||||
flash(
|
||||
"Enabling PGP for a Proton Mail mailbox is redundant and does not add any security benefit",
|
||||
"info",
|
||||
)
|
||||
else:
|
||||
mailbox.disable_pgp = False
|
||||
emit_user_audit_log(
|
||||
user=current_user,
|
||||
action=UserAuditLogAction.UpdateMailbox,
|
||||
message=f"Enabled PGP for mailbox {mailbox_id} ({mailbox.email})",
|
||||
)
|
||||
flash(f"PGP is enabled on {mailbox.email}", "info")
|
||||
mailbox.disable_pgp = False
|
||||
flash(f"PGP is enabled on {mailbox.email}", "success")
|
||||
else:
|
||||
mailbox.disable_pgp = True
|
||||
emit_user_audit_log(
|
||||
user=current_user,
|
||||
action=UserAuditLogAction.UpdateMailbox,
|
||||
message=f"Disabled PGP for mailbox {mailbox_id} ({mailbox.email})",
|
||||
)
|
||||
flash(f"PGP is disabled on {mailbox.email}", "info")
|
||||
|
||||
Session.commit()
|
||||
|
@ -237,26 +170,25 @@ def mailbox_detail_route(mailbox_id):
|
|||
)
|
||||
elif request.form.get("form-name") == "generic-subject":
|
||||
if request.form.get("action") == "save":
|
||||
if not mailbox.pgp_enabled():
|
||||
flash(
|
||||
"Generic subject can only be used on PGP-enabled mailbox",
|
||||
"error",
|
||||
)
|
||||
return redirect(
|
||||
url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox_id)
|
||||
)
|
||||
|
||||
mailbox.generic_subject = request.form.get("generic-subject")
|
||||
emit_user_audit_log(
|
||||
user=current_user,
|
||||
action=UserAuditLogAction.UpdateMailbox,
|
||||
message=f"Set generic subject for mailbox {mailbox_id} ({mailbox.email})",
|
||||
)
|
||||
Session.commit()
|
||||
flash("Generic subject is enabled", "success")
|
||||
flash("Generic subject for PGP-encrypted email is enabled", "success")
|
||||
return redirect(
|
||||
url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox_id)
|
||||
)
|
||||
elif request.form.get("action") == "remove":
|
||||
mailbox.generic_subject = None
|
||||
emit_user_audit_log(
|
||||
user=current_user,
|
||||
action=UserAuditLogAction.UpdateMailbox,
|
||||
message=f"Remove generic subject for mailbox {mailbox_id} ({mailbox.email})",
|
||||
)
|
||||
Session.commit()
|
||||
flash("Generic subject is disabled", "success")
|
||||
flash("Generic subject for PGP-encrypted email is disabled", "success")
|
||||
return redirect(
|
||||
url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox_id)
|
||||
)
|
||||
|
@ -266,7 +198,7 @@ def mailbox_detail_route(mailbox_id):
|
|||
|
||||
|
||||
def verify_mailbox_change(user, mailbox, new_email):
|
||||
s = TimestampSigner(MAILBOX_SECRET)
|
||||
s = Signer(MAILBOX_SECRET)
|
||||
mailbox_id_signed = s.sign(str(mailbox.id)).decode()
|
||||
verification_url = (
|
||||
f"{URL}/dashboard/mailbox/confirm_change?mailbox_id={mailbox_id_signed}"
|
||||
|
@ -276,7 +208,7 @@ def verify_mailbox_change(user, mailbox, new_email):
|
|||
new_email,
|
||||
"Confirm mailbox change on SimpleLogin",
|
||||
render(
|
||||
"transactional/verify-mailbox-change.txt.jinja2",
|
||||
"transactional/verify-mailbox-change.txt",
|
||||
user=user,
|
||||
link=verification_url,
|
||||
mailbox_email=mailbox.email,
|
||||
|
@ -317,29 +249,39 @@ def cancel_mailbox_change_route(mailbox_id):
|
|||
|
||||
|
||||
@dashboard_bp.route("/mailbox/confirm_change")
|
||||
def mailbox_confirm_email_change_route():
|
||||
s = TimestampSigner(MAILBOX_SECRET)
|
||||
def mailbox_confirm_change_route():
|
||||
s = Signer(MAILBOX_SECRET)
|
||||
signed_mailbox_id = request.args.get("mailbox_id")
|
||||
|
||||
try:
|
||||
mailbox_id = int(s.unsign(signed_mailbox_id, max_age=900))
|
||||
mailbox_id = int(s.unsign(signed_mailbox_id))
|
||||
except Exception:
|
||||
flash("Invalid link", "error")
|
||||
return redirect(url_for("dashboard.index"))
|
||||
|
||||
res = perform_mailbox_email_change(mailbox_id)
|
||||
|
||||
flash(res.message, res.message_category)
|
||||
if res.error:
|
||||
if res.error == MailboxEmailChangeError.EmailAlreadyUsed:
|
||||
return redirect(
|
||||
url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox_id)
|
||||
)
|
||||
elif res.error == MailboxEmailChangeError.InvalidId:
|
||||
return redirect(url_for("dashboard.index"))
|
||||
else:
|
||||
raise Exception("Unhandled MailboxEmailChangeError")
|
||||
else:
|
||||
return redirect(
|
||||
url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox_id)
|
||||
)
|
||||
mailbox = Mailbox.get(mailbox_id)
|
||||
|
||||
# new_email can be None if user cancels change in the meantime
|
||||
if mailbox and mailbox.new_email:
|
||||
user = mailbox.user
|
||||
if Mailbox.get_by(email=mailbox.new_email, user_id=user.id):
|
||||
flash(f"{mailbox.new_email} is already used", "error")
|
||||
return redirect(
|
||||
url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox.id)
|
||||
)
|
||||
|
||||
mailbox.email = mailbox.new_email
|
||||
mailbox.new_email = None
|
||||
|
||||
# mark mailbox as verified if the change request is sent from an unverified mailbox
|
||||
mailbox.verified = True
|
||||
Session.commit()
|
||||
|
||||
LOG.d("Mailbox change %s is verified", mailbox)
|
||||
flash(f"The {mailbox.email} is updated", "success")
|
||||
return redirect(
|
||||
url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox.id)
|
||||
)
|
||||
else:
|
||||
flash("Invalid link", "error")
|
||||
return redirect(url_for("dashboard.index"))
|
||||
|
|
|
@ -5,7 +5,6 @@ from app.dashboard.base import dashboard_bp
|
|||
from app.dashboard.views.enter_sudo import sudo_required
|
||||
from app.db import Session
|
||||
from app.models import RecoveryCode
|
||||
from app.utils import CSRFValidationForm
|
||||
|
||||
|
||||
@dashboard_bp.route("/mfa_cancel", methods=["GET", "POST"])
|
||||
|
@ -16,13 +15,8 @@ def mfa_cancel():
|
|||
flash("you don't have MFA enabled", "warning")
|
||||
return redirect(url_for("dashboard.index"))
|
||||
|
||||
csrf_form = CSRFValidationForm()
|
||||
|
||||
# user cancels TOTP
|
||||
if request.method == "POST":
|
||||
if not csrf_form.validate():
|
||||
flash("Invalid request", "warning")
|
||||
return redirect(request.url)
|
||||
current_user.enable_otp = False
|
||||
current_user.otp_secret = None
|
||||
Session.commit()
|
||||
|
@ -34,4 +28,4 @@ def mfa_cancel():
|
|||
flash("TOTP is now disabled", "warning")
|
||||
return redirect(url_for("dashboard.index"))
|
||||
|
||||
return render_template("dashboard/mfa_cancel.html", csrf_form=csrf_form)
|
||||
return render_template("dashboard/mfa_cancel.html")
|
||||
|
|
|
@ -8,7 +8,6 @@ from app.dashboard.base import dashboard_bp
|
|||
from app.dashboard.views.enter_sudo import sudo_required
|
||||
from app.db import Session
|
||||
from app.log import LOG
|
||||
from app.models import RecoveryCode
|
||||
|
||||
|
||||
class OtpTokenForm(FlaskForm):
|
||||
|
@ -40,10 +39,8 @@ def mfa_setup():
|
|||
current_user.last_otp = token
|
||||
Session.commit()
|
||||
flash("MFA has been activated", "success")
|
||||
recovery_codes = RecoveryCode.generate(current_user)
|
||||
return render_template(
|
||||
"dashboard/recovery_code.html", recovery_codes=recovery_codes
|
||||
)
|
||||
|
||||
return redirect(url_for("dashboard.recovery_code_route"))
|
||||
else:
|
||||
flash("Incorrect token", "warning")
|
||||
|
||||
|
|
|
@ -43,10 +43,7 @@ def notification_route(notification_id):
|
|||
def notifications_route():
|
||||
page = 0
|
||||
if request.args.get("page"):
|
||||
try:
|
||||
page = int(request.args.get("page"))
|
||||
except ValueError:
|
||||
pass
|
||||
page = int(request.args.get("page"))
|
||||
|
||||
notifications = (
|
||||
Notification.filter_by(user_id=current_user.id)
|
||||
|
|
|
@ -12,17 +12,13 @@ from app.config import (
|
|||
COINBASE_API_KEY,
|
||||
)
|
||||
from app.dashboard.base import dashboard_bp
|
||||
from app.extensions import limiter
|
||||
from app.log import LOG
|
||||
from app.models import (
|
||||
AppleSubscription,
|
||||
Subscription,
|
||||
ManualSubscription,
|
||||
CoinbaseSubscription,
|
||||
PartnerUser,
|
||||
PartnerSubscription,
|
||||
)
|
||||
from app.proton.utils import get_proton_partner
|
||||
|
||||
|
||||
@dashboard_bp.route("/pricing", methods=["GET", "POST"])
|
||||
|
@ -32,9 +28,9 @@ def pricing():
|
|||
flash("You already have a lifetime subscription", "error")
|
||||
return redirect(url_for("dashboard.index"))
|
||||
|
||||
paddle_sub: Subscription = current_user.get_paddle_subscription()
|
||||
sub: Subscription = current_user.get_subscription()
|
||||
# user who has canceled can re-subscribe
|
||||
if paddle_sub and not paddle_sub.cancelled:
|
||||
if sub and not sub.cancelled:
|
||||
flash("You already have an active subscription", "error")
|
||||
return redirect(url_for("dashboard.index"))
|
||||
|
||||
|
@ -52,18 +48,6 @@ def pricing():
|
|||
if apple_sub and apple_sub.is_valid():
|
||||
flash("Please make sure to cancel your subscription on Apple first", "warning")
|
||||
|
||||
proton_upgrade = False
|
||||
partner_user = PartnerUser.get_by(user_id=current_user.id)
|
||||
if partner_user:
|
||||
partner_sub = PartnerSubscription.get_by(partner_user_id=partner_user.id)
|
||||
if partner_sub and partner_sub.is_active():
|
||||
flash(
|
||||
f"You already have a subscription provided by {partner_user.partner.name}",
|
||||
"error",
|
||||
)
|
||||
return redirect(url_for("dashboard.index"))
|
||||
proton_upgrade = partner_user.partner_id == get_proton_partner().id
|
||||
|
||||
return render_template(
|
||||
"dashboard/pricing.html",
|
||||
PADDLE_VENDOR_ID=PADDLE_VENDOR_ID,
|
||||
|
@ -73,21 +57,18 @@ def pricing():
|
|||
manual_sub=manual_sub,
|
||||
coinbase_sub=coinbase_sub,
|
||||
now=now,
|
||||
proton_upgrade=proton_upgrade,
|
||||
)
|
||||
|
||||
|
||||
@dashboard_bp.route("/subscription_success")
|
||||
@login_required
|
||||
def subscription_success():
|
||||
return render_template(
|
||||
"dashboard/thank-you.html",
|
||||
)
|
||||
flash("Thanks so much for supporting SimpleLogin!", "success")
|
||||
return redirect(url_for("dashboard.index"))
|
||||
|
||||
|
||||
@dashboard_bp.route("/coinbase_checkout")
|
||||
@login_required
|
||||
@limiter.limit("5/minute")
|
||||
def coinbase_checkout_route():
|
||||
client = Client(api_key=COINBASE_API_KEY)
|
||||
charge = client.charge.create(
|
||||
|
|
30
app/dashboard/views/recovery_code.py
Normal file
30
app/dashboard/views/recovery_code.py
Normal file
|
@ -0,0 +1,30 @@
|
|||
from flask import render_template, flash, redirect, url_for, request
|
||||
from flask_login import login_required, current_user
|
||||
|
||||
from app.dashboard.base import dashboard_bp
|
||||
from app.log import LOG
|
||||
from app.models import RecoveryCode
|
||||
|
||||
|
||||
@dashboard_bp.route("/recovery_code", methods=["GET", "POST"])
|
||||
@login_required
|
||||
def recovery_code_route():
|
||||
if not current_user.two_factor_authentication_enabled():
|
||||
flash("you need to enable either TOTP or WebAuthn", "warning")
|
||||
return redirect(url_for("dashboard.index"))
|
||||
|
||||
recovery_codes = RecoveryCode.filter_by(user_id=current_user.id).all()
|
||||
if request.method == "GET" and not recovery_codes:
|
||||
# user arrives at this page for the first time
|
||||
LOG.d("%s has no recovery keys, generate", current_user)
|
||||
RecoveryCode.generate(current_user)
|
||||
recovery_codes = RecoveryCode.filter_by(user_id=current_user.id).all()
|
||||
|
||||
if request.method == "POST":
|
||||
RecoveryCode.generate(current_user)
|
||||
flash("New recovery codes generated", "success")
|
||||
return redirect(url_for("dashboard.recovery_code_route"))
|
||||
|
||||
return render_template(
|
||||
"dashboard/recovery_code.html", recovery_codes=recovery_codes
|
||||
)
|
|
@ -1,5 +1,4 @@
|
|||
from io import BytesIO
|
||||
from typing import Optional, Tuple
|
||||
|
||||
import arrow
|
||||
from flask import (
|
||||
|
@ -12,40 +11,45 @@ from flask import (
|
|||
from flask_login import login_required, current_user
|
||||
from flask_wtf import FlaskForm
|
||||
from flask_wtf.file import FileField
|
||||
from typing import Optional
|
||||
from wtforms import StringField, validators
|
||||
from wtforms.fields.html5 import EmailField
|
||||
|
||||
from app import s3, user_settings
|
||||
from app import s3, email_utils
|
||||
from app.config import (
|
||||
URL,
|
||||
FIRST_ALIAS_DOMAIN,
|
||||
ALIAS_RANDOM_SUFFIX_LENGTH,
|
||||
CONNECT_WITH_PROTON,
|
||||
)
|
||||
from app.dashboard.base import dashboard_bp
|
||||
from app.db import Session
|
||||
from app.email_utils import (
|
||||
email_can_be_used_as_mailbox,
|
||||
personal_email_already_used,
|
||||
)
|
||||
from app.errors import ProtonPartnerNotSetUp
|
||||
from app.extensions import limiter
|
||||
from app.image_validation import detect_image_format, ImageFormat
|
||||
from app.log import LOG
|
||||
from app.models import (
|
||||
BlockBehaviourEnum,
|
||||
PlanEnum,
|
||||
File,
|
||||
ResetPasswordCode,
|
||||
EmailChange,
|
||||
User,
|
||||
Alias,
|
||||
CustomDomain,
|
||||
AliasGeneratorEnum,
|
||||
AliasSuffixEnum,
|
||||
ManualSubscription,
|
||||
SenderFormatEnum,
|
||||
SLDomain,
|
||||
CoinbaseSubscription,
|
||||
AppleSubscription,
|
||||
PartnerUser,
|
||||
PartnerSubscription,
|
||||
UnsubscribeBehaviourEnum,
|
||||
)
|
||||
from app.proton.utils import get_proton_partner
|
||||
from app.utils import (
|
||||
random_string,
|
||||
CSRFValidationForm,
|
||||
)
|
||||
from app.proton.proton_callback_handler import get_proton_partner_id
|
||||
from app.utils import random_string, sanitize_email
|
||||
|
||||
|
||||
class SettingForm(FlaskForm):
|
||||
|
@ -53,6 +57,12 @@ class SettingForm(FlaskForm):
|
|||
profile_picture = FileField("Profile Picture")
|
||||
|
||||
|
||||
class ChangeEmailForm(FlaskForm):
|
||||
email = EmailField(
|
||||
"email", validators=[validators.DataRequired(), validators.Email()]
|
||||
)
|
||||
|
||||
|
||||
class PromoCodeForm(FlaskForm):
|
||||
code = StringField("Name", validators=[validators.DataRequired()])
|
||||
|
||||
|
@ -60,10 +70,13 @@ class PromoCodeForm(FlaskForm):
|
|||
def get_proton_linked_account() -> Optional[str]:
|
||||
# Check if the current user has a partner_id
|
||||
try:
|
||||
proton_partner_id = get_proton_partner().id
|
||||
proton_partner_id = get_proton_partner_id()
|
||||
except ProtonPartnerNotSetUp:
|
||||
return None
|
||||
|
||||
if current_user.partner_id != proton_partner_id:
|
||||
return None
|
||||
|
||||
# It has. Retrieve the information for the PartnerUser
|
||||
proton_linked_account = PartnerUser.get_by(
|
||||
user_id=current_user.id, partner_id=proton_partner_id
|
||||
|
@ -73,24 +86,12 @@ def get_proton_linked_account() -> Optional[str]:
|
|||
return proton_linked_account.partner_email
|
||||
|
||||
|
||||
def get_partner_subscription_and_name(
|
||||
user_id: int,
|
||||
) -> Optional[Tuple[PartnerSubscription, str]]:
|
||||
partner_sub = PartnerSubscription.find_by_user_id(user_id)
|
||||
if not partner_sub or not partner_sub.is_active():
|
||||
return None
|
||||
|
||||
partner = partner_sub.partner_user.partner
|
||||
return (partner_sub, partner.name)
|
||||
|
||||
|
||||
@dashboard_bp.route("/setting", methods=["GET", "POST"])
|
||||
@login_required
|
||||
@limiter.limit("5/minute", methods=["POST"])
|
||||
def setting():
|
||||
form = SettingForm()
|
||||
promo_form = PromoCodeForm()
|
||||
csrf_form = CSRFValidationForm()
|
||||
change_email_form = ChangeEmailForm()
|
||||
|
||||
email_change = EmailChange.get_by(user_id=current_user.id)
|
||||
if email_change:
|
||||
|
@ -99,10 +100,67 @@ def setting():
|
|||
pending_email = None
|
||||
|
||||
if request.method == "POST":
|
||||
if not csrf_form.validate():
|
||||
flash("Invalid request", "warning")
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
if request.form.get("form-name") == "update-email":
|
||||
if change_email_form.validate():
|
||||
# whether user can proceed with the email update
|
||||
new_email_valid = True
|
||||
if (
|
||||
sanitize_email(change_email_form.email.data) != current_user.email
|
||||
and not pending_email
|
||||
):
|
||||
new_email = sanitize_email(change_email_form.email.data)
|
||||
|
||||
# check if this email is not already used
|
||||
if personal_email_already_used(new_email) or Alias.get_by(
|
||||
email=new_email
|
||||
):
|
||||
flash(f"Email {new_email} already used", "error")
|
||||
new_email_valid = False
|
||||
elif not email_can_be_used_as_mailbox(new_email):
|
||||
flash(
|
||||
"You cannot use this email address as your personal inbox.",
|
||||
"error",
|
||||
)
|
||||
new_email_valid = False
|
||||
# a pending email change with the same email exists from another user
|
||||
elif EmailChange.get_by(new_email=new_email):
|
||||
other_email_change: EmailChange = EmailChange.get_by(
|
||||
new_email=new_email
|
||||
)
|
||||
LOG.w(
|
||||
"Another user has a pending %s with the same email address. Current user:%s",
|
||||
other_email_change,
|
||||
current_user,
|
||||
)
|
||||
|
||||
if other_email_change.is_expired():
|
||||
LOG.d(
|
||||
"delete the expired email change %s", other_email_change
|
||||
)
|
||||
EmailChange.delete(other_email_change.id)
|
||||
Session.commit()
|
||||
else:
|
||||
flash(
|
||||
"You cannot use this email address as your personal inbox.",
|
||||
"error",
|
||||
)
|
||||
new_email_valid = False
|
||||
|
||||
if new_email_valid:
|
||||
email_change = EmailChange.create(
|
||||
user_id=current_user.id,
|
||||
code=random_string(
|
||||
60
|
||||
), # todo: make sure the code is unique
|
||||
new_email=new_email,
|
||||
)
|
||||
Session.commit()
|
||||
send_change_email_confirmation(current_user, email_change)
|
||||
flash(
|
||||
"A confirmation email is on the way, please check your inbox",
|
||||
"success",
|
||||
)
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
if request.form.get("form-name") == "update-profile":
|
||||
if form.validate():
|
||||
profile_updated = False
|
||||
|
@ -113,28 +171,12 @@ def setting():
|
|||
profile_updated = True
|
||||
|
||||
if form.profile_picture.data:
|
||||
image_contents = form.profile_picture.data.read()
|
||||
if detect_image_format(image_contents) == ImageFormat.Unknown:
|
||||
flash(
|
||||
"This image format is not supported",
|
||||
"error",
|
||||
)
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
|
||||
if current_user.profile_picture_id is not None:
|
||||
current_profile_file = File.get_by(
|
||||
id=current_user.profile_picture_id
|
||||
)
|
||||
if (
|
||||
current_profile_file is not None
|
||||
and current_profile_file.user_id == current_user.id
|
||||
):
|
||||
s3.delete(current_profile_file.path)
|
||||
|
||||
file_path = random_string(30)
|
||||
file = File.create(user_id=current_user.id, path=file_path)
|
||||
|
||||
s3.upload_from_bytesio(file_path, BytesIO(image_contents))
|
||||
s3.upload_from_bytesio(
|
||||
file_path, BytesIO(form.profile_picture.data.read())
|
||||
)
|
||||
|
||||
Session.flush()
|
||||
LOG.d("upload file %s to s3", file)
|
||||
|
@ -146,6 +188,15 @@ def setting():
|
|||
if profile_updated:
|
||||
flash("Your profile has been updated", "success")
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
|
||||
elif request.form.get("form-name") == "change-password":
|
||||
flash(
|
||||
"You are going to receive an email containing instructions to change your password",
|
||||
"success",
|
||||
)
|
||||
send_reset_password_email(current_user)
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
|
||||
elif request.form.get("form-name") == "notification-preference":
|
||||
choose = request.form.get("notification")
|
||||
if choose == "on":
|
||||
|
@ -155,6 +206,7 @@ def setting():
|
|||
Session.commit()
|
||||
flash("Your notification preference has been updated", "success")
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
|
||||
elif request.form.get("form-name") == "change-alias-generator":
|
||||
scheme = int(request.form.get("alias-generator-scheme"))
|
||||
if AliasGeneratorEnum.has_value(scheme):
|
||||
|
@ -162,17 +214,46 @@ def setting():
|
|||
Session.commit()
|
||||
flash("Your preference has been updated", "success")
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
|
||||
elif request.form.get("form-name") == "change-random-alias-default-domain":
|
||||
default_domain = request.form.get("random-alias-default-domain")
|
||||
try:
|
||||
user_settings.set_default_alias_domain(current_user, default_domain)
|
||||
except user_settings.CannotSetAlias as e:
|
||||
flash(e.msg, "error")
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
|
||||
if default_domain:
|
||||
sl_domain: SLDomain = SLDomain.get_by(domain=default_domain)
|
||||
if sl_domain:
|
||||
if sl_domain.premium_only and not current_user.is_premium():
|
||||
flash("You cannot use this domain", "error")
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
|
||||
current_user.default_alias_public_domain_id = sl_domain.id
|
||||
current_user.default_alias_custom_domain_id = None
|
||||
else:
|
||||
custom_domain = CustomDomain.get_by(domain=default_domain)
|
||||
if custom_domain:
|
||||
# sanity check
|
||||
if (
|
||||
custom_domain.user_id != current_user.id
|
||||
or not custom_domain.verified
|
||||
):
|
||||
LOG.w(
|
||||
"%s cannot use domain %s", current_user, custom_domain
|
||||
)
|
||||
flash(f"Domain {default_domain} can't be used", "error")
|
||||
return redirect(request.url)
|
||||
else:
|
||||
current_user.default_alias_custom_domain_id = (
|
||||
custom_domain.id
|
||||
)
|
||||
current_user.default_alias_public_domain_id = None
|
||||
|
||||
else:
|
||||
current_user.default_alias_custom_domain_id = None
|
||||
current_user.default_alias_public_domain_id = None
|
||||
|
||||
Session.commit()
|
||||
flash("Your preference has been updated", "success")
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
|
||||
elif request.form.get("form-name") == "random-alias-suffix":
|
||||
scheme = int(request.form.get("random-alias-suffix-generator"))
|
||||
if AliasSuffixEnum.has_value(scheme):
|
||||
|
@ -180,6 +261,7 @@ def setting():
|
|||
Session.commit()
|
||||
flash("Your preference has been updated", "success")
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
|
||||
elif request.form.get("form-name") == "change-sender-format":
|
||||
sender_format = int(request.form.get("sender-format"))
|
||||
if SenderFormatEnum.has_value(sender_format):
|
||||
|
@ -189,6 +271,7 @@ def setting():
|
|||
flash("Your sender format preference has been updated", "success")
|
||||
Session.commit()
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
|
||||
elif request.form.get("form-name") == "replace-ra":
|
||||
choose = request.form.get("replace-ra")
|
||||
if choose == "on":
|
||||
|
@ -198,21 +281,7 @@ def setting():
|
|||
Session.commit()
|
||||
flash("Your preference has been updated", "success")
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
elif request.form.get("form-name") == "enable_data_breach_check":
|
||||
if not current_user.is_premium():
|
||||
flash("Only premium plan can enable data breach monitoring", "warning")
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
choose = request.form.get("enable_data_breach_check")
|
||||
if choose == "on":
|
||||
LOG.i("User {current_user} has enabled data breach monitoring")
|
||||
current_user.enable_data_breach_check = True
|
||||
flash("Data breach monitoring is enabled", "success")
|
||||
else:
|
||||
LOG.i("User {current_user} has disabled data breach monitoring")
|
||||
current_user.enable_data_breach_check = False
|
||||
flash("Data breach monitoring is disabled", "info")
|
||||
Session.commit()
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
|
||||
elif request.form.get("form-name") == "sender-in-ra":
|
||||
choose = request.form.get("enable")
|
||||
if choose == "on":
|
||||
|
@ -222,6 +291,7 @@ def setting():
|
|||
Session.commit()
|
||||
flash("Your preference has been updated", "success")
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
|
||||
elif request.form.get("form-name") == "expand-alias-info":
|
||||
choose = request.form.get("enable")
|
||||
if choose == "on":
|
||||
|
@ -241,16 +311,11 @@ def setting():
|
|||
flash("Your preference has been updated", "success")
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
elif request.form.get("form-name") == "one-click-unsubscribe":
|
||||
choose = request.form.get("unsubscribe-behaviour")
|
||||
if choose == UnsubscribeBehaviourEnum.PreserveOriginal.name:
|
||||
current_user.unsub_behaviour = UnsubscribeBehaviourEnum.PreserveOriginal
|
||||
elif choose == UnsubscribeBehaviourEnum.DisableAlias.name:
|
||||
current_user.unsub_behaviour = UnsubscribeBehaviourEnum.DisableAlias
|
||||
elif choose == UnsubscribeBehaviourEnum.BlockContact.name:
|
||||
current_user.unsub_behaviour = UnsubscribeBehaviourEnum.BlockContact
|
||||
choose = request.form.get("enable")
|
||||
if choose == "on":
|
||||
current_user.one_click_unsubscribe_block_sender = True
|
||||
else:
|
||||
flash("There was an error. Please try again", "warning")
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
current_user.one_click_unsubscribe_block_sender = False
|
||||
Session.commit()
|
||||
flash("Your preference has been updated", "success")
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
|
@ -283,39 +348,106 @@ def setting():
|
|||
Session.commit()
|
||||
flash("Your preference has been updated", "success")
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
elif request.form.get("form-name") == "export-data":
|
||||
return redirect(url_for("api.export_data"))
|
||||
elif request.form.get("form-name") == "export-alias":
|
||||
return redirect(url_for("api.export_aliases"))
|
||||
|
||||
manual_sub = ManualSubscription.get_by(user_id=current_user.id)
|
||||
apple_sub = AppleSubscription.get_by(user_id=current_user.id)
|
||||
coinbase_sub = CoinbaseSubscription.get_by(user_id=current_user.id)
|
||||
paddle_sub = current_user.get_paddle_subscription()
|
||||
partner_sub = None
|
||||
partner_name = None
|
||||
|
||||
partner_sub_name = get_partner_subscription_and_name(current_user.id)
|
||||
if partner_sub_name:
|
||||
partner_sub, partner_name = partner_sub_name
|
||||
|
||||
proton_linked_account = get_proton_linked_account()
|
||||
|
||||
return render_template(
|
||||
"dashboard/setting.html",
|
||||
csrf_form=csrf_form,
|
||||
form=form,
|
||||
PlanEnum=PlanEnum,
|
||||
SenderFormatEnum=SenderFormatEnum,
|
||||
BlockBehaviourEnum=BlockBehaviourEnum,
|
||||
promo_form=promo_form,
|
||||
change_email_form=change_email_form,
|
||||
pending_email=pending_email,
|
||||
AliasGeneratorEnum=AliasGeneratorEnum,
|
||||
UnsubscribeBehaviourEnum=UnsubscribeBehaviourEnum,
|
||||
manual_sub=manual_sub,
|
||||
partner_sub=partner_sub,
|
||||
partner_name=partner_name,
|
||||
apple_sub=apple_sub,
|
||||
paddle_sub=paddle_sub,
|
||||
coinbase_sub=coinbase_sub,
|
||||
FIRST_ALIAS_DOMAIN=FIRST_ALIAS_DOMAIN,
|
||||
ALIAS_RAND_SUFFIX_LENGTH=ALIAS_RANDOM_SUFFIX_LENGTH,
|
||||
connect_with_proton=CONNECT_WITH_PROTON,
|
||||
proton_linked_account=proton_linked_account,
|
||||
)
|
||||
|
||||
|
||||
def send_reset_password_email(user):
|
||||
"""
|
||||
generate a new ResetPasswordCode and send it over email to user
|
||||
"""
|
||||
# the activation code is valid for 1h
|
||||
reset_password_code = ResetPasswordCode.create(
|
||||
user_id=user.id, code=random_string(60)
|
||||
)
|
||||
Session.commit()
|
||||
|
||||
reset_password_link = f"{URL}/auth/reset_password?code={reset_password_code.code}"
|
||||
|
||||
email_utils.send_reset_password_email(user.email, reset_password_link)
|
||||
|
||||
|
||||
def send_change_email_confirmation(user: User, email_change: EmailChange):
|
||||
"""
|
||||
send confirmation email to the new email address
|
||||
"""
|
||||
|
||||
link = f"{URL}/auth/change_email?code={email_change.code}"
|
||||
|
||||
email_utils.send_change_email(email_change.new_email, user.email, link)
|
||||
|
||||
|
||||
@dashboard_bp.route("/resend_email_change", methods=["GET", "POST"])
|
||||
@login_required
|
||||
def resend_email_change():
|
||||
email_change = EmailChange.get_by(user_id=current_user.id)
|
||||
if email_change:
|
||||
# extend email change expiration
|
||||
email_change.expired = arrow.now().shift(hours=12)
|
||||
Session.commit()
|
||||
|
||||
send_change_email_confirmation(current_user, email_change)
|
||||
flash("A confirmation email is on the way, please check your inbox", "success")
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
else:
|
||||
flash(
|
||||
"You have no pending email change. Redirect back to Setting page", "warning"
|
||||
)
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
|
||||
|
||||
@dashboard_bp.route("/cancel_email_change", methods=["GET", "POST"])
|
||||
@login_required
|
||||
def cancel_email_change():
|
||||
email_change = EmailChange.get_by(user_id=current_user.id)
|
||||
if email_change:
|
||||
EmailChange.delete(email_change.id)
|
||||
Session.commit()
|
||||
flash("Your email change is cancelled", "success")
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
else:
|
||||
flash(
|
||||
"You have no pending email change. Redirect back to Setting page", "warning"
|
||||
)
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
|
||||
|
||||
@dashboard_bp.route("/unlink_proton_account", methods=["GET", "POST"])
|
||||
@login_required
|
||||
def unlink_proton_account():
|
||||
current_user.partner_id = None
|
||||
current_user.partner_user_id = None
|
||||
partner_user = PartnerUser.get_by(
|
||||
user_id=current_user.id, partner_id=get_proton_partner_id()
|
||||
)
|
||||
if partner_user is not None:
|
||||
PartnerUser.delete(partner_user.id)
|
||||
Session.commit()
|
||||
flash("Your Proton account has been unlinked", "success")
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
|
|
|
@ -2,33 +2,19 @@ import re
|
|||
|
||||
from flask import render_template, request, redirect, url_for, flash
|
||||
from flask_login import login_required, current_user
|
||||
from flask_wtf import FlaskForm
|
||||
from wtforms import StringField, validators
|
||||
|
||||
from app import parallel_limiter
|
||||
from app.config import MAX_NB_SUBDOMAIN
|
||||
from app.dashboard.base import dashboard_bp
|
||||
from app.errors import SubdomainInTrashError
|
||||
from app.log import LOG
|
||||
from app.models import CustomDomain, Mailbox, SLDomain
|
||||
from app.user_audit_log_utils import emit_user_audit_log, UserAuditLogAction
|
||||
|
||||
# Only lowercase letters, numbers, dashes (-) are currently supported
|
||||
_SUBDOMAIN_PATTERN = r"[0-9a-z-]{1,}"
|
||||
|
||||
|
||||
class NewSubdomainForm(FlaskForm):
|
||||
domain = StringField(
|
||||
"domain", validators=[validators.DataRequired(), validators.Length(max=64)]
|
||||
)
|
||||
subdomain = StringField(
|
||||
"subdomain", validators=[validators.DataRequired(), validators.Length(max=64)]
|
||||
)
|
||||
|
||||
|
||||
@dashboard_bp.route("/subdomain", methods=["GET", "POST"])
|
||||
@login_required
|
||||
@parallel_limiter.lock(only_when=lambda: request.method == "POST")
|
||||
def subdomain_route():
|
||||
if not current_user.subdomain_is_available():
|
||||
flash("Unknown error, redirect to the home page", "error")
|
||||
|
@ -40,13 +26,9 @@ def subdomain_route():
|
|||
).all()
|
||||
|
||||
errors = {}
|
||||
new_subdomain_form = NewSubdomainForm()
|
||||
|
||||
if request.method == "POST":
|
||||
if request.form.get("form-name") == "create":
|
||||
if not new_subdomain_form.validate():
|
||||
flash("Invalid new subdomain", "warning")
|
||||
return redirect(url_for("dashboard.subdomain_route"))
|
||||
if not current_user.is_premium():
|
||||
flash("Only premium plan can add subdomain", "warning")
|
||||
return redirect(request.url)
|
||||
|
@ -57,8 +39,8 @@ def subdomain_route():
|
|||
)
|
||||
return redirect(request.url)
|
||||
|
||||
subdomain = new_subdomain_form.subdomain.data.lower().strip()
|
||||
domain = new_subdomain_form.domain.data.lower().strip()
|
||||
subdomain = request.form.get("subdomain").lower().strip()
|
||||
domain = request.form.get("domain").lower().strip()
|
||||
|
||||
if len(subdomain) < 3:
|
||||
flash("Subdomain must have at least 3 characters", "error")
|
||||
|
@ -103,12 +85,6 @@ def subdomain_route():
|
|||
ownership_verified=True,
|
||||
commit=True,
|
||||
)
|
||||
emit_user_audit_log(
|
||||
user=current_user,
|
||||
action=UserAuditLogAction.CreateCustomDomain,
|
||||
message=f"Create subdomain {new_custom_domain.id} ({full_domain})",
|
||||
commit=True,
|
||||
)
|
||||
except SubdomainInTrashError:
|
||||
flash(
|
||||
f"{full_domain} has been used before and cannot be reused",
|
||||
|
@ -132,5 +108,4 @@ def subdomain_route():
|
|||
sl_domains=sl_domains,
|
||||
errors=errors,
|
||||
subdomains=subdomains,
|
||||
new_subdomain_form=new_subdomain_form,
|
||||
)
|
||||
|
|
|
@ -8,14 +8,11 @@ from app.db import Session
|
|||
from flask import redirect, url_for, flash, request, render_template
|
||||
from flask_login import login_required, current_user
|
||||
|
||||
from app import alias_utils
|
||||
from app.dashboard.base import dashboard_bp
|
||||
from app.handler.unsubscribe_encoder import UnsubscribeAction
|
||||
from app.handler.unsubscribe_handler import UnsubscribeHandler
|
||||
from app.models import Alias, Contact
|
||||
|
||||
|
||||
@dashboard_bp.route("/unsubscribe/<int:alias_id>", methods=["GET", "POST"])
|
||||
@dashboard_bp.route("/unsubscribe/<alias_id>", methods=["GET", "POST"])
|
||||
@login_required
|
||||
def unsubscribe(alias_id):
|
||||
alias = Alias.get(alias_id)
|
||||
|
@ -32,9 +29,7 @@ def unsubscribe(alias_id):
|
|||
|
||||
# automatic unsubscribe, according to https://tools.ietf.org/html/rfc8058
|
||||
if request.method == "POST":
|
||||
alias_utils.change_alias_status(
|
||||
alias, enabled=False, message="Set enabled=False from unsubscribe request"
|
||||
)
|
||||
alias.enabled = False
|
||||
flash(f"Alias {alias.email} has been blocked", "success")
|
||||
Session.commit()
|
||||
|
||||
|
@ -43,7 +38,7 @@ def unsubscribe(alias_id):
|
|||
return render_template("dashboard/unsubscribe.html", alias=alias.email)
|
||||
|
||||
|
||||
@dashboard_bp.route("/block_contact/<int:contact_id>", methods=["GET", "POST"])
|
||||
@dashboard_bp.route("/block_contact/<contact_id>", methods=["GET", "POST"])
|
||||
@login_required
|
||||
def block_contact(contact_id):
|
||||
contact = Contact.get(contact_id)
|
||||
|
@ -73,43 +68,3 @@ def block_contact(contact_id):
|
|||
)
|
||||
else: # ask user confirmation
|
||||
return render_template("dashboard/block_contact.html", contact=contact)
|
||||
|
||||
|
||||
@dashboard_bp.route("/unsubscribe/encoded/<encoded_request>", methods=["GET"])
|
||||
@login_required
|
||||
def encoded_unsubscribe(encoded_request: str):
|
||||
unsub_data = UnsubscribeHandler().handle_unsubscribe_from_request(
|
||||
current_user, encoded_request
|
||||
)
|
||||
if not unsub_data:
|
||||
flash("Invalid unsubscribe request", "error")
|
||||
return redirect(url_for("dashboard.index"))
|
||||
if unsub_data.action == UnsubscribeAction.DisableAlias:
|
||||
alias = Alias.get(unsub_data.data)
|
||||
flash(f"Alias {alias.email} has been blocked", "success")
|
||||
return redirect(url_for("dashboard.index", highlight_alias_id=alias.id))
|
||||
if unsub_data.action == UnsubscribeAction.DisableContact:
|
||||
contact = Contact.get(unsub_data.data)
|
||||
flash(f"Emails sent from {contact.website_email} are now blocked", "success")
|
||||
return redirect(
|
||||
url_for(
|
||||
"dashboard.alias_contact_manager",
|
||||
alias_id=contact.alias_id,
|
||||
highlight_contact_id=contact.id,
|
||||
)
|
||||
)
|
||||
if unsub_data.action == UnsubscribeAction.UnsubscribeNewsletter:
|
||||
flash("You've unsubscribed from the newsletter", "success")
|
||||
return redirect(
|
||||
url_for(
|
||||
"dashboard.index",
|
||||
)
|
||||
)
|
||||
if unsub_data.action == UnsubscribeAction.OriginalUnsubscribeMailto:
|
||||
flash("The original unsubscribe request has been forwarded", "success")
|
||||
return redirect(
|
||||
url_for(
|
||||
"dashboard.index",
|
||||
)
|
||||
)
|
||||
return redirect(url_for("dashboard.index"))
|
||||
|
|
|
@ -3,12 +3,9 @@ from sqlalchemy import create_engine
|
|||
from sqlalchemy.orm import scoped_session
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
|
||||
from app import config
|
||||
from app.config import DB_URI
|
||||
|
||||
|
||||
engine = create_engine(
|
||||
config.DB_URI, connect_args={"application_name": config.DB_CONN_NAME}
|
||||
)
|
||||
engine = create_engine(DB_URI)
|
||||
connection = engine.connect()
|
||||
|
||||
Session = scoped_session(sessionmaker(bind=connection))
|
||||
|
|
|
@ -1,3 +1 @@
|
|||
from .views import index, new_client, client_detail
|
||||
|
||||
__all__ = ["index", "new_client", "client_detail"]
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
from io import BytesIO
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from flask import request, render_template, redirect, url_for, flash
|
||||
from flask_login import current_user, login_required
|
||||
|
@ -12,7 +11,6 @@ from app.config import ADMIN_EMAIL
|
|||
from app.db import Session
|
||||
from app.developer.base import developer_bp
|
||||
from app.email_utils import send_email
|
||||
from app.image_validation import detect_image_format, ImageFormat
|
||||
from app.log import LOG
|
||||
from app.models import Client, RedirectUri, File, Referral
|
||||
from app.utils import random_string
|
||||
|
@ -48,25 +46,16 @@ def client_detail(client_id):
|
|||
approval_form.description.data = client.description
|
||||
|
||||
if action == "edit" and form.validate_on_submit():
|
||||
parsed_url = urlparse(form.url.data)
|
||||
if parsed_url.scheme != "https":
|
||||
flash("Only https urls are allowed", "error")
|
||||
return redirect(url_for("developer.index"))
|
||||
client.name = form.name.data
|
||||
client.home_url = form.url.data
|
||||
|
||||
if form.icon.data:
|
||||
icon_data = form.icon.data.read(10240)
|
||||
if detect_image_format(icon_data) == ImageFormat.Unknown:
|
||||
flash("Unknown file format", "warning")
|
||||
return redirect(url_for("developer.index"))
|
||||
if client.icon:
|
||||
s3.delete(client.icon_id)
|
||||
File.delete(client.icon)
|
||||
# todo: remove current icon if any
|
||||
# todo: handle remove icon
|
||||
file_path = random_string(30)
|
||||
file = File.create(path=file_path, user_id=client.user_id)
|
||||
|
||||
s3.upload_from_bytesio(file_path, BytesIO(icon_data))
|
||||
s3.upload_from_bytesio(file_path, BytesIO(form.icon.data.read()))
|
||||
|
||||
Session.flush()
|
||||
LOG.d("upload file %s to s3", file)
|
||||
|
@ -98,7 +87,7 @@ def client_detail(client_id):
|
|||
)
|
||||
|
||||
flash(
|
||||
"Thanks for submitting, we are informed and will come back to you asap!",
|
||||
f"Thanks for submitting, we are informed and will come back to you asap!",
|
||||
"success",
|
||||
)
|
||||
|
||||
|
|
|
@ -1,5 +1,3 @@
|
|||
from urllib.parse import urlparse
|
||||
|
||||
from flask import render_template, redirect, url_for, flash
|
||||
from flask_login import current_user, login_required
|
||||
from flask_wtf import FlaskForm
|
||||
|
@ -22,10 +20,6 @@ def new_client():
|
|||
|
||||
if form.validate_on_submit():
|
||||
client = Client.create_new(form.name.data, current_user.id)
|
||||
parsed_url = urlparse(form.url.data)
|
||||
if parsed_url.scheme != "https":
|
||||
flash("Only https urls are allowed", "error")
|
||||
return redirect(url_for("developer.new_client"))
|
||||
client.home_url = form.url.data
|
||||
Session.commit()
|
||||
|
||||
|
|
|
@ -1,3 +1 @@
|
|||
from .views import index
|
||||
|
||||
__all__ = ["index"]
|
||||
|
|
221
app/dns_utils.py
221
app/dns_utils.py
|
@ -1,22 +1,102 @@
|
|||
from abc import ABC, abstractmethod
|
||||
from dataclasses import dataclass
|
||||
from typing import List, Optional
|
||||
from app import config
|
||||
from typing import Optional, List, Tuple
|
||||
|
||||
import dns.resolver
|
||||
|
||||
from app.config import NAMESERVERS
|
||||
|
||||
def _get_dns_resolver():
|
||||
my_resolver = dns.resolver.Resolver()
|
||||
my_resolver.nameservers = config.NAMESERVERS
|
||||
|
||||
return my_resolver
|
||||
|
||||
|
||||
def get_ns(hostname) -> [str]:
|
||||
try:
|
||||
answers = _get_dns_resolver().resolve(hostname, "NS", search=True)
|
||||
except Exception:
|
||||
return []
|
||||
return [a.to_text() for a in answers]
|
||||
|
||||
|
||||
def get_cname_record(hostname) -> Optional[str]:
|
||||
"""Return the CNAME record if exists for a domain, WITHOUT the trailing period at the end"""
|
||||
try:
|
||||
answers = _get_dns_resolver().resolve(hostname, "CNAME", search=True)
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
for a in answers:
|
||||
ret = a.to_text()
|
||||
return ret[:-1]
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def get_mx_domains(hostname) -> [(int, str)]:
|
||||
"""return list of (priority, domain name).
|
||||
domain name ends with a "." at the end.
|
||||
"""
|
||||
try:
|
||||
answers = _get_dns_resolver().resolve(hostname, "MX", search=True)
|
||||
except Exception:
|
||||
return []
|
||||
|
||||
ret = []
|
||||
|
||||
for a in answers:
|
||||
record = a.to_text() # for ex '20 alt2.aspmx.l.google.com.'
|
||||
parts = record.split(" ")
|
||||
|
||||
ret.append((int(parts[0]), parts[1]))
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
_include_spf = "include:"
|
||||
|
||||
|
||||
@dataclass
|
||||
class MxRecord:
|
||||
priority: int
|
||||
domain: str
|
||||
def get_spf_domain(hostname) -> [str]:
|
||||
"""return all domains listed in *include:*"""
|
||||
try:
|
||||
answers = _get_dns_resolver().resolve(hostname, "TXT", search=True)
|
||||
except Exception:
|
||||
return []
|
||||
|
||||
ret = []
|
||||
|
||||
for a in answers: # type: dns.rdtypes.ANY.TXT.TXT
|
||||
for record in a.strings:
|
||||
record = record.decode() # record is bytes
|
||||
|
||||
if record.startswith("v=spf1"):
|
||||
parts = record.split(" ")
|
||||
for part in parts:
|
||||
if part.startswith(_include_spf):
|
||||
ret.append(part[part.find(_include_spf) + len(_include_spf) :])
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def get_txt_record(hostname) -> [str]:
|
||||
try:
|
||||
answers = _get_dns_resolver().resolve(hostname, "TXT", search=True)
|
||||
except Exception:
|
||||
return []
|
||||
|
||||
ret = []
|
||||
|
||||
for a in answers: # type: dns.rdtypes.ANY.TXT.TXT
|
||||
for record in a.strings:
|
||||
record = record.decode() # record is bytes
|
||||
|
||||
ret.append(record)
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def is_mx_equivalent(
|
||||
mx_domains: List[MxRecord], ref_mx_domains: List[MxRecord]
|
||||
mx_domains: List[Tuple[int, str]], ref_mx_domains: List[Tuple[int, str]]
|
||||
) -> bool:
|
||||
"""
|
||||
Compare mx_domains with ref_mx_domains to see if they are equivalent.
|
||||
|
@ -25,127 +105,16 @@ def is_mx_equivalent(
|
|||
The priority order is taken into account but not the priority number.
|
||||
For example, [(1, domain1), (2, domain2)] is equivalent to [(10, domain1), (20, domain2)]
|
||||
"""
|
||||
mx_domains = sorted(mx_domains, key=lambda x: x.priority)
|
||||
ref_mx_domains = sorted(ref_mx_domains, key=lambda x: x.priority)
|
||||
mx_domains = sorted(mx_domains, key=lambda priority_domain: priority_domain[0])
|
||||
ref_mx_domains = sorted(
|
||||
ref_mx_domains, key=lambda priority_domain: priority_domain[0]
|
||||
)
|
||||
|
||||
if len(mx_domains) < len(ref_mx_domains):
|
||||
return False
|
||||
|
||||
for actual, expected in zip(mx_domains, ref_mx_domains):
|
||||
if actual.domain != expected.domain:
|
||||
for i in range(0, len(ref_mx_domains)):
|
||||
if mx_domains[i][1] != ref_mx_domains[i][1]:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
class DNSClient(ABC):
|
||||
@abstractmethod
|
||||
def get_cname_record(self, hostname: str) -> Optional[str]:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_mx_domains(self, hostname: str) -> List[MxRecord]:
|
||||
pass
|
||||
|
||||
def get_spf_domain(self, hostname: str) -> List[str]:
|
||||
"""
|
||||
return all domains listed in *include:*
|
||||
"""
|
||||
try:
|
||||
records = self.get_txt_record(hostname)
|
||||
ret = []
|
||||
for record in records:
|
||||
if record.startswith("v=spf1"):
|
||||
parts = record.split(" ")
|
||||
for part in parts:
|
||||
if part.startswith(_include_spf):
|
||||
ret.append(
|
||||
part[part.find(_include_spf) + len(_include_spf) :]
|
||||
)
|
||||
return ret
|
||||
except Exception:
|
||||
return []
|
||||
|
||||
@abstractmethod
|
||||
def get_txt_record(self, hostname: str) -> List[str]:
|
||||
pass
|
||||
|
||||
|
||||
class NetworkDNSClient(DNSClient):
|
||||
def __init__(self, nameservers: List[str]):
|
||||
self._resolver = dns.resolver.Resolver()
|
||||
self._resolver.nameservers = nameservers
|
||||
|
||||
def get_cname_record(self, hostname: str) -> Optional[str]:
|
||||
"""
|
||||
Return the CNAME record if exists for a domain, WITHOUT the trailing period at the end
|
||||
"""
|
||||
try:
|
||||
answers = self._resolver.resolve(hostname, "CNAME", search=True)
|
||||
for a in answers:
|
||||
ret = a.to_text()
|
||||
return ret[:-1]
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def get_mx_domains(self, hostname: str) -> List[MxRecord]:
|
||||
"""
|
||||
return list of (priority, domain name) sorted by priority (lowest priority first)
|
||||
domain name ends with a "." at the end.
|
||||
"""
|
||||
try:
|
||||
answers = self._resolver.resolve(hostname, "MX", search=True)
|
||||
ret = []
|
||||
for a in answers:
|
||||
record = a.to_text() # for ex '20 alt2.aspmx.l.google.com.'
|
||||
parts = record.split(" ")
|
||||
ret.append(MxRecord(priority=int(parts[0]), domain=parts[1]))
|
||||
return sorted(ret, key=lambda x: x.priority)
|
||||
except Exception:
|
||||
return []
|
||||
|
||||
def get_txt_record(self, hostname: str) -> List[str]:
|
||||
try:
|
||||
answers = self._resolver.resolve(hostname, "TXT", search=False)
|
||||
ret = []
|
||||
for a in answers: # type: dns.rdtypes.ANY.TXT.TXT
|
||||
for record in a.strings:
|
||||
ret.append(record.decode())
|
||||
return ret
|
||||
except Exception:
|
||||
return []
|
||||
|
||||
|
||||
class InMemoryDNSClient(DNSClient):
|
||||
def __init__(self):
|
||||
self.cname_records: dict[str, Optional[str]] = {}
|
||||
self.mx_records: dict[str, List[MxRecord]] = {}
|
||||
self.spf_records: dict[str, List[str]] = {}
|
||||
self.txt_records: dict[str, List[str]] = {}
|
||||
|
||||
def set_cname_record(self, hostname: str, cname: str):
|
||||
self.cname_records[hostname] = cname
|
||||
|
||||
def set_mx_records(self, hostname: str, mx_list: List[MxRecord]):
|
||||
self.mx_records[hostname] = mx_list
|
||||
|
||||
def set_txt_record(self, hostname: str, txt_list: List[str]):
|
||||
self.txt_records[hostname] = txt_list
|
||||
|
||||
def get_cname_record(self, hostname: str) -> Optional[str]:
|
||||
return self.cname_records.get(hostname)
|
||||
|
||||
def get_mx_domains(self, hostname: str) -> List[MxRecord]:
|
||||
mx_list = self.mx_records.get(hostname, [])
|
||||
return sorted(mx_list, key=lambda x: x.priority)
|
||||
|
||||
def get_txt_record(self, hostname: str) -> List[str]:
|
||||
return self.txt_records.get(hostname, [])
|
||||
|
||||
|
||||
def get_network_dns_client() -> NetworkDNSClient:
|
||||
return NetworkDNSClient(NAMESERVERS)
|
||||
|
||||
|
||||
def get_mx_domains(hostname: str) -> List[MxRecord]:
|
||||
return get_network_dns_client().get_mx_domains(hostname)
|
||||
|
|
|
@ -19,9 +19,6 @@ DKIM_SIGNATURE = "DKIM-Signature"
|
|||
X_SPAM_STATUS = "X-Spam-Status"
|
||||
LIST_UNSUBSCRIBE = "List-Unsubscribe"
|
||||
LIST_UNSUBSCRIBE_POST = "List-Unsubscribe-Post"
|
||||
RETURN_PATH = "Return-Path"
|
||||
AUTHENTICATION_RESULTS = "Authentication-Results"
|
||||
SL_QUEUE_ID = "X-SL-Queue-Id"
|
||||
|
||||
# headers used to DKIM sign in order of preference
|
||||
DKIM_HEADERS = [
|
||||
|
@ -34,7 +31,6 @@ DKIM_HEADERS = [
|
|||
SL_DIRECTION = "X-SimpleLogin-Type"
|
||||
SL_EMAIL_LOG_ID = "X-SimpleLogin-EmailLog-ID"
|
||||
SL_ENVELOPE_FROM = "X-SimpleLogin-Envelope-From"
|
||||
SL_ORIGINAL_FROM = "X-SimpleLogin-Original-From"
|
||||
SL_ENVELOPE_TO = "X-SimpleLogin-Envelope-To"
|
||||
SL_CLIENT_IP = "X-SimpleLogin-Client-IP"
|
||||
|
||||
|
@ -54,6 +50,3 @@ MIME_HEADERS = [h.lower() for h in MIME_HEADERS]
|
|||
# according to https://datatracker.ietf.org/doc/html/rfc3834#section-3.1.7, this header should be set to "auto-replied"
|
||||
# however on hotmail, this is set to "auto-generated"
|
||||
AUTO_SUBMITTED = "Auto-Submitted"
|
||||
|
||||
# Yahoo complaint specific header
|
||||
YAHOO_ORIGINAL_RECIPIENT = "original-rcpt-to"
|
||||
|
|
|
@ -31,7 +31,11 @@ E402 = "421 SL E402 Encryption failed - Retry later"
|
|||
# E403 = "421 SL E403 Retry later"
|
||||
E404 = "421 SL E404 Unexpected error - Retry later"
|
||||
E405 = "421 SL E405 Mailbox domain problem - Retry later"
|
||||
E406 = "421 SL E406 Retry later"
|
||||
E407 = "421 SL E407 Retry later"
|
||||
E408 = "421 SL E408 Retry later"
|
||||
E409 = "421 SL E409 Retry later"
|
||||
E410 = "421 SL E410 Retry later"
|
||||
# endregion
|
||||
|
||||
# region 5** errors
|
||||
|
@ -60,5 +64,4 @@ E522 = (
|
|||
)
|
||||
E523 = "550 SL E523 Unknown error"
|
||||
E524 = "550 SL E524 Wrong use of reverse-alias"
|
||||
E525 = "550 SL E525 Alias loop"
|
||||
# endregion
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue