2021-03-13 00:13:33 +01:00
|
|
|
import csv
|
2021-06-27 17:50:36 +02:00
|
|
|
|
2021-03-10 23:08:33 +01:00
|
|
|
import requests
|
|
|
|
|
|
|
|
from app import s3
|
2021-10-12 14:36:47 +02:00
|
|
|
from app.db import Session
|
2021-03-13 00:13:33 +01:00
|
|
|
from app.email_utils import get_email_domain_part
|
2021-03-13 15:37:28 +01:00
|
|
|
from app.models import (
|
|
|
|
Alias,
|
|
|
|
AliasMailbox,
|
|
|
|
BatchImport,
|
|
|
|
CustomDomain,
|
|
|
|
DeletedAlias,
|
|
|
|
DomainDeletedAlias,
|
|
|
|
Mailbox,
|
|
|
|
User,
|
|
|
|
)
|
2022-12-14 11:50:36 +01:00
|
|
|
from app.utils import sanitize_email, canonicalize_email
|
2021-06-27 17:50:36 +02:00
|
|
|
from .log import LOG
|
2021-03-10 23:08:33 +01:00
|
|
|
|
2021-03-13 15:37:28 +01:00
|
|
|
|
2021-03-10 23:08:33 +01:00
|
|
|
def handle_batch_import(batch_import: BatchImport):
|
|
|
|
user = batch_import.user
|
|
|
|
|
|
|
|
batch_import.processed = True
|
2021-10-12 14:36:47 +02:00
|
|
|
Session.commit()
|
2021-03-10 23:08:33 +01:00
|
|
|
|
2021-09-08 11:29:55 +02:00
|
|
|
LOG.d("Start batch import for %s %s", batch_import, user)
|
2021-03-10 23:08:33 +01:00
|
|
|
file_url = s3.get_url(batch_import.file.path)
|
|
|
|
|
|
|
|
LOG.d("Download file %s from %s", batch_import.file, file_url)
|
|
|
|
r = requests.get(file_url)
|
2024-03-26 11:43:33 +01:00
|
|
|
# Replace invisible character
|
|
|
|
lines = [
|
|
|
|
line.decode("utf-8").replace("\ufeff", "").strip() for line in r.iter_lines()
|
|
|
|
]
|
2021-03-13 00:13:33 +01:00
|
|
|
|
2021-03-16 10:54:00 +01:00
|
|
|
import_from_csv(batch_import, user, lines)
|
2021-03-13 00:13:33 +01:00
|
|
|
|
2021-03-13 15:37:28 +01:00
|
|
|
|
2021-03-13 00:13:33 +01:00
|
|
|
def import_from_csv(batch_import: BatchImport, user: User, lines):
|
2021-03-10 23:08:33 +01:00
|
|
|
reader = csv.DictReader(lines)
|
|
|
|
|
|
|
|
for row in reader:
|
|
|
|
try:
|
|
|
|
full_alias = sanitize_email(row["alias"])
|
|
|
|
note = row["note"]
|
|
|
|
except KeyError:
|
2021-09-08 11:29:55 +02:00
|
|
|
LOG.w("Cannot parse row %s", row)
|
2021-03-10 23:08:33 +01:00
|
|
|
continue
|
|
|
|
|
|
|
|
alias_domain = get_email_domain_part(full_alias)
|
|
|
|
custom_domain = CustomDomain.get_by(domain=alias_domain)
|
|
|
|
|
|
|
|
if (
|
|
|
|
not custom_domain
|
2021-10-02 19:18:24 +02:00
|
|
|
or not custom_domain.ownership_verified
|
2021-03-10 23:08:33 +01:00
|
|
|
or custom_domain.user_id != user.id
|
|
|
|
):
|
2021-09-08 11:29:55 +02:00
|
|
|
LOG.d("domain %s can't be used %s", alias_domain, user)
|
2021-03-10 23:08:33 +01:00
|
|
|
continue
|
|
|
|
|
|
|
|
if (
|
|
|
|
Alias.get_by(email=full_alias)
|
|
|
|
or DeletedAlias.get_by(email=full_alias)
|
|
|
|
or DomainDeletedAlias.get_by(email=full_alias)
|
|
|
|
):
|
|
|
|
LOG.d("alias already used %s", full_alias)
|
|
|
|
continue
|
|
|
|
|
|
|
|
mailboxes = []
|
|
|
|
|
|
|
|
if "mailboxes" in row:
|
|
|
|
for mailbox_email in row["mailboxes"].split():
|
2022-12-14 11:50:36 +01:00
|
|
|
mailbox_email = canonicalize_email(mailbox_email)
|
2021-03-10 23:08:33 +01:00
|
|
|
mailbox = Mailbox.get_by(email=mailbox_email)
|
|
|
|
|
|
|
|
if not mailbox or not mailbox.verified or mailbox.user_id != user.id:
|
|
|
|
LOG.d("mailbox %s can't be used %s", mailbox, user)
|
|
|
|
continue
|
|
|
|
|
|
|
|
mailboxes.append(mailbox.id)
|
|
|
|
|
|
|
|
if len(mailboxes) == 0:
|
|
|
|
mailboxes = [user.default_mailbox_id]
|
|
|
|
|
2021-04-09 12:40:55 +02:00
|
|
|
if user.can_create_new_alias():
|
|
|
|
alias = Alias.create(
|
|
|
|
user_id=user.id,
|
|
|
|
email=full_alias,
|
|
|
|
note=note,
|
|
|
|
mailbox_id=mailboxes[0],
|
|
|
|
custom_domain_id=custom_domain.id,
|
|
|
|
batch_import_id=batch_import.id,
|
|
|
|
commit=True,
|
2021-03-10 23:08:33 +01:00
|
|
|
)
|
2021-04-09 12:40:55 +02:00
|
|
|
LOG.d("Create %s", alias)
|
|
|
|
|
|
|
|
for i in range(1, len(mailboxes)):
|
|
|
|
AliasMailbox.create(
|
|
|
|
alias_id=alias.id, mailbox_id=mailboxes[i], commit=True
|
|
|
|
)
|
2021-10-12 14:36:47 +02:00
|
|
|
Session.commit()
|
2021-04-09 12:40:55 +02:00
|
|
|
LOG.d("Add %s to mailbox %s", alias, mailboxes[i])
|