Compare commits
126 commits
sudo_setti
...
master
Author | SHA1 | Date | |
---|---|---|---|
![]() |
f81f8ca032 | ||
![]() |
31896ff262 | ||
![]() |
45575261dc | ||
![]() |
627ad302d2 | ||
![]() |
08862a35c3 | ||
![]() |
75dd3cf925 | ||
![]() |
a097e33abe | ||
![]() |
e5cc8b9628 | ||
![]() |
d149686296 | ||
![]() |
babf4b058a | ||
![]() |
eb8f8caeb8 | ||
![]() |
70fc9c383a | ||
![]() |
b68f074783 | ||
![]() |
73a0addf27 | ||
![]() |
e6bcf81726 | ||
![]() |
7600038813 | ||
![]() |
c19b62b878 | ||
![]() |
4fe79bdd42 | ||
![]() |
fd1744470b | ||
![]() |
989a577db6 | ||
![]() |
373c30e53b | ||
![]() |
ff3dbdaad2 | ||
![]() |
7ec7e06c2b | ||
![]() |
ef90423a35 | ||
![]() |
c04f5102d6 | ||
![]() |
5714403976 | ||
![]() |
40ff4604c8 | ||
![]() |
66d26a1193 | ||
![]() |
9b1e4f73ca | ||
![]() |
0435c745fd | ||
![]() |
366631ee93 | ||
![]() |
4bf925fe6f | ||
![]() |
0e82801512 | ||
![]() |
9ab3695d36 | ||
![]() |
06b7e05e61 | ||
![]() |
6c7e9e69dc | ||
![]() |
6e4f6fe540 | ||
![]() |
f2dad4c28c | ||
![]() |
e9e863807c | ||
![]() |
c4003b07ac | ||
![]() |
d8943cf126 | ||
![]() |
2eec918543 | ||
![]() |
4d9b8f9a4b | ||
![]() |
81d5ef0783 | ||
![]() |
04d92b7f23 | ||
![]() |
cb900ed057 | ||
![]() |
516072fd99 | ||
![]() |
2351330732 | ||
![]() |
e2dbf8d48d | ||
![]() |
d62bff8e46 | ||
![]() |
fc205157a8 | ||
![]() |
ac9d550069 | ||
![]() |
daec781ffc | ||
![]() |
501c625ddf | ||
![]() |
d3aae31d45 | ||
![]() |
8512093bfc | ||
![]() |
76b05e0d64 | ||
![]() |
40663358d8 | ||
![]() |
f046b2270c | ||
![]() |
03c67ead44 | ||
![]() |
37ffe4d5fe | ||
![]() |
689ef3a579 | ||
![]() |
495d544505 | ||
![]() |
a539428607 | ||
![]() |
8c7e9f7fb3 | ||
![]() |
9d9e5fcab6 | ||
![]() |
ff33392398 | ||
![]() |
85964f283e | ||
![]() |
d30183bbda | ||
![]() |
ed66c7306b | ||
![]() |
07bb658310 | ||
![]() |
e43a2dd34d | ||
![]() |
3de83f2f05 | ||
![]() |
e4d4317988 | ||
![]() |
da2cedd254 | ||
![]() |
e343b27fa6 | ||
![]() |
6dfb6bb3e4 | ||
![]() |
a5e7da10dd | ||
![]() |
5ddbca05b2 | ||
![]() |
6c33e0d986 | ||
![]() |
7cb7b48845 | ||
![]() |
6276ad4419 | ||
![]() |
66c3a07c92 | ||
![]() |
23a4e46885 | ||
![]() |
52e6f5e2d2 | ||
![]() |
59c189957f | ||
![]() |
bec8cb2292 | ||
![]() |
7f23533c64 | ||
![]() |
62fecf1190 | ||
![]() |
9d8116e535 | ||
![]() |
796c0c5aa1 | ||
![]() |
5a56b46650 | ||
![]() |
e3ae9bc6d5 | ||
![]() |
ec666aee87 | ||
![]() |
2230e0b925 | ||
![]() |
71fd5e2241 | ||
![]() |
97cbff5dc9 | ||
![]() |
b6f79ea3a6 | ||
![]() |
43b91cd197 | ||
![]() |
03e5083d97 | ||
![]() |
1f9d784382 | ||
![]() |
c09b5bc526 | ||
![]() |
eba4ee8c2c | ||
![]() |
1c65094da8 | ||
![]() |
2a014f0e4b | ||
![]() |
b081b6a16a | ||
![]() |
66039c526b | ||
![]() |
f722cae8d6 | ||
![]() |
b6286e3c1b | ||
![]() |
26d5fd400c | ||
![]() |
b470ab3396 | ||
![]() |
66388e72e0 | ||
![]() |
432fb3fcf7 | ||
![]() |
44e0dd8635 | ||
![]() |
2ec1208eb7 | ||
![]() |
87efe6b059 | ||
![]() |
6a60a4951e | ||
![]() |
b3ce5c8901 | ||
![]() |
3fcb37f246 | ||
![]() |
62ba2844f3 | ||
![]() |
9143a0f6bc | ||
![]() |
48ae859e1b | ||
![]() |
0a197313ea | ||
![]() |
b487b01442 | ||
![]() |
170082e2c1 | ||
![]() |
51916a8c8a |
174 changed files with 15588 additions and 328117 deletions
8
.github/workflows/main.yml
vendored
8
.github/workflows/main.yml
vendored
|
@ -15,9 +15,15 @@ jobs:
|
|||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: '3.9'
|
||||
python-version: '3.10'
|
||||
cache: 'poetry'
|
||||
|
||||
- name: Install OS dependencies
|
||||
if: ${{ matrix.python-version }} == '3.10'
|
||||
run: |
|
||||
sudo apt update
|
||||
sudo apt install -y libre2-dev libpq-dev
|
||||
|
||||
- name: Install dependencies
|
||||
if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true'
|
||||
run: poetry install --no-interaction
|
||||
|
|
|
@ -7,17 +7,19 @@ repos:
|
|||
hooks:
|
||||
- id: check-yaml
|
||||
- id: trailing-whitespace
|
||||
- repo: https://github.com/psf/black
|
||||
rev: 22.3.0
|
||||
hooks:
|
||||
- id: black
|
||||
- repo: https://github.com/pycqa/flake8
|
||||
rev: 3.9.2
|
||||
hooks:
|
||||
- id: flake8
|
||||
- repo: https://github.com/Riverside-Healthcare/djLint
|
||||
rev: v1.3.0
|
||||
hooks:
|
||||
- id: djlint-jinja
|
||||
files: '.*\.html'
|
||||
entry: djlint --reformat
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.1.5
|
||||
hooks:
|
||||
# Run the linter.
|
||||
- id: ruff
|
||||
args: [ --fix ]
|
||||
# Run the formatter.
|
||||
- id: ruff-format
|
||||
|
||||
|
|
|
@ -34,7 +34,7 @@ poetry install
|
|||
On Mac, sometimes you might need to install some other packages via `brew`:
|
||||
|
||||
```bash
|
||||
brew install pkg-config libffi openssl postgresql
|
||||
brew install pkg-config libffi openssl postgresql@13
|
||||
```
|
||||
|
||||
You also need to install `gpg` tool, on Mac it can be done with:
|
||||
|
@ -169,6 +169,12 @@ For HTML templates, we use `djlint`. Before creating a pull request, please run
|
|||
poetry run djlint --check templates
|
||||
```
|
||||
|
||||
If some files aren't properly formatted, you can format all files with
|
||||
|
||||
```bash
|
||||
poetry run djlint --reformat .
|
||||
```
|
||||
|
||||
## Test sending email
|
||||
|
||||
[swaks](http://www.jetmore.org/john/code/swaks/) is used for sending test emails to the `email_handler`.
|
||||
|
|
|
@ -23,15 +23,15 @@ COPY poetry.lock pyproject.toml ./
|
|||
# Install and setup poetry
|
||||
RUN pip install -U pip \
|
||||
&& apt-get update \
|
||||
&& apt install -y curl netcat gcc python3-dev gnupg git libre2-dev \
|
||||
&& apt install -y curl netcat-traditional gcc python3-dev gnupg git libre2-dev cmake ninja-build\
|
||||
&& curl -sSL https://install.python-poetry.org | python3 - \
|
||||
# Remove curl and netcat from the image
|
||||
&& apt-get purge -y curl netcat \
|
||||
&& apt-get purge -y curl netcat-traditional \
|
||||
# Run poetry
|
||||
&& poetry config virtualenvs.create false \
|
||||
&& poetry install --no-interaction --no-ansi --no-root \
|
||||
# Clear apt cache \
|
||||
&& apt-get purge -y libre2-dev \
|
||||
&& apt-get purge -y libre2-dev cmake ninja-build\
|
||||
&& apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
|
|
|
@ -334,6 +334,12 @@ smtpd_recipient_restrictions =
|
|||
permit
|
||||
```
|
||||
|
||||
Check that the ssl certificates `/etc/ssl/certs/ssl-cert-snakeoil.pem` and `/etc/ssl/private/ssl-cert-snakeoil.key` exist. Depending on the linux distribution you are using they may or may not be present. If they are not, you will need to generate them with this command:
|
||||
|
||||
```bash
|
||||
openssl req -x509 -nodes -days 3650 -newkey rsa:2048 -keyout /etc/ssl/private/ssl-cert-snakeoil.key -out /etc/ssl/certs/ssl-cert-snakeoil.pem
|
||||
```
|
||||
|
||||
Create the `/etc/postfix/pgsql-relay-domains.cf` file with the following content.
|
||||
Make sure that the database config is correctly set, replace `mydomain.com` with your domain, update 'myuser' and 'mypassword' with your postgres credentials.
|
||||
|
||||
|
|
|
@ -5,17 +5,23 @@ from typing import Optional
|
|||
|
||||
from arrow import Arrow
|
||||
from newrelic import agent
|
||||
from sqlalchemy import or_
|
||||
|
||||
from app.db import Session
|
||||
from app.email_utils import send_welcome_email
|
||||
from app.utils import sanitize_email
|
||||
from app.errors import AccountAlreadyLinkedToAnotherPartnerException
|
||||
from app.utils import sanitize_email, canonicalize_email
|
||||
from app.errors import (
|
||||
AccountAlreadyLinkedToAnotherPartnerException,
|
||||
AccountIsUsingAliasAsEmail,
|
||||
AccountAlreadyLinkedToAnotherUserException,
|
||||
)
|
||||
from app.log import LOG
|
||||
from app.models import (
|
||||
PartnerSubscription,
|
||||
Partner,
|
||||
PartnerUser,
|
||||
User,
|
||||
Alias,
|
||||
)
|
||||
from app.utils import random_string
|
||||
|
||||
|
@ -126,8 +132,9 @@ class ClientMergeStrategy(ABC):
|
|||
class NewUserStrategy(ClientMergeStrategy):
|
||||
def process(self) -> LinkResult:
|
||||
# Will create a new SL User with a random password
|
||||
canonical_email = canonicalize_email(self.link_request.email)
|
||||
new_user = User.create(
|
||||
email=self.link_request.email,
|
||||
email=canonical_email,
|
||||
name=self.link_request.name,
|
||||
password=random_string(20),
|
||||
activated=True,
|
||||
|
@ -161,7 +168,6 @@ class NewUserStrategy(ClientMergeStrategy):
|
|||
|
||||
class ExistingUnlinkedUserStrategy(ClientMergeStrategy):
|
||||
def process(self) -> LinkResult:
|
||||
|
||||
partner_user = ensure_partner_user_exists_for_user(
|
||||
self.link_request, self.user, self.partner
|
||||
)
|
||||
|
@ -175,7 +181,7 @@ class ExistingUnlinkedUserStrategy(ClientMergeStrategy):
|
|||
|
||||
class LinkedWithAnotherPartnerUserStrategy(ClientMergeStrategy):
|
||||
def process(self) -> LinkResult:
|
||||
raise AccountAlreadyLinkedToAnotherPartnerException()
|
||||
raise AccountAlreadyLinkedToAnotherUserException()
|
||||
|
||||
|
||||
def get_login_strategy(
|
||||
|
@ -192,6 +198,12 @@ def get_login_strategy(
|
|||
return ExistingUnlinkedUserStrategy(link_request, user, partner)
|
||||
|
||||
|
||||
def check_alias(email: str) -> bool:
|
||||
alias = Alias.get_by(email=email)
|
||||
if alias is not None:
|
||||
raise AccountIsUsingAliasAsEmail()
|
||||
|
||||
|
||||
def process_login_case(
|
||||
link_request: PartnerLinkRequest, partner: Partner
|
||||
) -> LinkResult:
|
||||
|
@ -202,9 +214,21 @@ def process_login_case(
|
|||
partner_id=partner.id, external_user_id=link_request.external_user_id
|
||||
)
|
||||
if partner_user is None:
|
||||
canonical_email = canonicalize_email(link_request.email)
|
||||
# We didn't find any SimpleLogin user registered with that partner user id
|
||||
# Make sure they aren't using an alias as their link email
|
||||
check_alias(link_request.email)
|
||||
check_alias(canonical_email)
|
||||
# Try to find it using the partner's e-mail address
|
||||
user = User.get_by(email=link_request.email)
|
||||
users = User.filter(
|
||||
or_(User.email == link_request.email, User.email == canonical_email)
|
||||
).all()
|
||||
if len(users) > 1:
|
||||
user = [user for user in users if user.email == canonical_email][0]
|
||||
elif len(users) == 1:
|
||||
user = users[0]
|
||||
else:
|
||||
user = None
|
||||
return get_login_strategy(link_request, user, partner).process()
|
||||
else:
|
||||
# We found the SL user registered with that partner user id
|
||||
|
|
|
@ -256,6 +256,17 @@ class UserAdmin(SLModelView):
|
|||
|
||||
Session.commit()
|
||||
|
||||
@action(
|
||||
"clear_delete_on",
|
||||
"Remove scheduled deletion of user",
|
||||
"This will remove the scheduled deletion for this users",
|
||||
)
|
||||
def clean_delete_on(self, ids):
|
||||
for user in User.filter(User.id.in_(ids)):
|
||||
user.delete_on = None
|
||||
|
||||
Session.commit()
|
||||
|
||||
# @action(
|
||||
# "login_as",
|
||||
# "Login as this user",
|
||||
|
@ -600,6 +611,26 @@ class NewsletterAdmin(SLModelView):
|
|||
else:
|
||||
flash(error_msg, "error")
|
||||
|
||||
@action(
|
||||
"clone_newsletter",
|
||||
"Clone this newsletter",
|
||||
)
|
||||
def clone_newsletter(self, newsletter_ids):
|
||||
if len(newsletter_ids) != 1:
|
||||
flash("you can only select 1 newsletter", "error")
|
||||
return
|
||||
|
||||
newsletter_id = newsletter_ids[0]
|
||||
newsletter: Newsletter = Newsletter.get(newsletter_id)
|
||||
new_newsletter = Newsletter.create(
|
||||
subject=newsletter.subject,
|
||||
html=newsletter.html,
|
||||
plain_text=newsletter.plain_text,
|
||||
commit=True,
|
||||
)
|
||||
|
||||
flash(f"Newsletter {new_newsletter.subject} has been cloned", "success")
|
||||
|
||||
|
||||
class NewsletterUserAdmin(SLModelView):
|
||||
column_searchable_list = ["id"]
|
||||
|
@ -620,3 +651,8 @@ class MetricAdmin(SLModelView):
|
|||
column_exclude_list = ["created_at", "updated_at", "id"]
|
||||
|
||||
can_export = True
|
||||
|
||||
|
||||
class InvalidMailboxDomainAdmin(SLModelView):
|
||||
can_create = True
|
||||
can_delete = True
|
||||
|
|
|
@ -6,8 +6,7 @@ from typing import Optional
|
|||
import itsdangerous
|
||||
from app import config
|
||||
from app.log import LOG
|
||||
from app.models import User
|
||||
|
||||
from app.models import User, AliasOptions, SLDomain
|
||||
|
||||
signer = itsdangerous.TimestampSigner(config.CUSTOM_ALIAS_SECRET)
|
||||
|
||||
|
@ -43,7 +42,9 @@ def check_suffix_signature(signed_suffix: str) -> Optional[str]:
|
|||
return None
|
||||
|
||||
|
||||
def verify_prefix_suffix(user: User, alias_prefix, alias_suffix) -> bool:
|
||||
def verify_prefix_suffix(
|
||||
user: User, alias_prefix, alias_suffix, alias_options: Optional[AliasOptions] = None
|
||||
) -> bool:
|
||||
"""verify if user could create an alias with the given prefix and suffix"""
|
||||
if not alias_prefix or not alias_suffix: # should be caught on frontend
|
||||
return False
|
||||
|
@ -56,7 +57,7 @@ def verify_prefix_suffix(user: User, alias_prefix, alias_suffix) -> bool:
|
|||
alias_domain_prefix, alias_domain = alias_suffix.split("@", 1)
|
||||
|
||||
# alias_domain must be either one of user custom domains or built-in domains
|
||||
if alias_domain not in user.available_alias_domains():
|
||||
if alias_domain not in user.available_alias_domains(alias_options=alias_options):
|
||||
LOG.e("wrong alias suffix %s, user %s", alias_suffix, user)
|
||||
return False
|
||||
|
||||
|
@ -64,12 +65,11 @@ def verify_prefix_suffix(user: User, alias_prefix, alias_suffix) -> bool:
|
|||
# 1) alias_suffix must start with "." and
|
||||
# 2) alias_domain_prefix must come from the word list
|
||||
if (
|
||||
alias_domain in user.available_sl_domains()
|
||||
alias_domain in user.available_sl_domains(alias_options=alias_options)
|
||||
and alias_domain not in user_custom_domains
|
||||
# when DISABLE_ALIAS_SUFFIX is true, alias_domain_prefix is empty
|
||||
and not config.DISABLE_ALIAS_SUFFIX
|
||||
):
|
||||
|
||||
if not alias_domain_prefix.startswith("."):
|
||||
LOG.e("User %s submits a wrong alias suffix %s", user, alias_suffix)
|
||||
return False
|
||||
|
@ -80,14 +80,18 @@ def verify_prefix_suffix(user: User, alias_prefix, alias_suffix) -> bool:
|
|||
LOG.e("wrong alias suffix %s, user %s", alias_suffix, user)
|
||||
return False
|
||||
|
||||
if alias_domain not in user.available_sl_domains():
|
||||
if alias_domain not in user.available_sl_domains(
|
||||
alias_options=alias_options
|
||||
):
|
||||
LOG.e("wrong alias suffix %s, user %s", alias_suffix, user)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def get_alias_suffixes(user: User) -> [AliasSuffix]:
|
||||
def get_alias_suffixes(
|
||||
user: User, alias_options: Optional[AliasOptions] = None
|
||||
) -> [AliasSuffix]:
|
||||
"""
|
||||
Similar to as get_available_suffixes() but also return custom domain that doesn't have MX set up.
|
||||
"""
|
||||
|
@ -99,7 +103,9 @@ def get_alias_suffixes(user: User) -> [AliasSuffix]:
|
|||
# for each user domain, generate both the domain and a random suffix version
|
||||
for custom_domain in user_custom_domains:
|
||||
if custom_domain.random_prefix_generation:
|
||||
suffix = "." + user.get_random_alias_suffix() + "@" + custom_domain.domain
|
||||
suffix = (
|
||||
f".{user.get_random_alias_suffix(custom_domain)}@{custom_domain.domain}"
|
||||
)
|
||||
alias_suffix = AliasSuffix(
|
||||
is_custom=True,
|
||||
suffix=suffix,
|
||||
|
@ -113,7 +119,7 @@ def get_alias_suffixes(user: User) -> [AliasSuffix]:
|
|||
else:
|
||||
alias_suffixes.append(alias_suffix)
|
||||
|
||||
suffix = "@" + custom_domain.domain
|
||||
suffix = f"@{custom_domain.domain}"
|
||||
alias_suffix = AliasSuffix(
|
||||
is_custom=True,
|
||||
suffix=suffix,
|
||||
|
@ -134,16 +140,13 @@ def get_alias_suffixes(user: User) -> [AliasSuffix]:
|
|||
alias_suffixes.append(alias_suffix)
|
||||
|
||||
# then SimpleLogin domain
|
||||
for sl_domain in user.get_sl_domains():
|
||||
suffix = (
|
||||
(
|
||||
""
|
||||
if config.DISABLE_ALIAS_SUFFIX
|
||||
else "." + user.get_random_alias_suffix()
|
||||
)
|
||||
+ "@"
|
||||
+ sl_domain.domain
|
||||
sl_domains = user.get_sl_domains(alias_options=alias_options)
|
||||
default_domain_found = False
|
||||
for sl_domain in sl_domains:
|
||||
prefix = (
|
||||
"" if config.DISABLE_ALIAS_SUFFIX else f".{user.get_random_alias_suffix()}"
|
||||
)
|
||||
suffix = f"{prefix}@{sl_domain.domain}"
|
||||
alias_suffix = AliasSuffix(
|
||||
is_custom=False,
|
||||
suffix=suffix,
|
||||
|
@ -152,11 +155,36 @@ def get_alias_suffixes(user: User) -> [AliasSuffix]:
|
|||
domain=sl_domain.domain,
|
||||
mx_verified=True,
|
||||
)
|
||||
|
||||
# put the default domain to top
|
||||
if user.default_alias_public_domain_id == sl_domain.id:
|
||||
alias_suffixes.insert(0, alias_suffix)
|
||||
else:
|
||||
# No default or this is not the default
|
||||
if (
|
||||
user.default_alias_public_domain_id is None
|
||||
or user.default_alias_public_domain_id != sl_domain.id
|
||||
):
|
||||
alias_suffixes.append(alias_suffix)
|
||||
else:
|
||||
default_domain_found = True
|
||||
alias_suffixes.insert(0, alias_suffix)
|
||||
|
||||
if not default_domain_found:
|
||||
domain_conditions = {"id": user.default_alias_public_domain_id, "hidden": False}
|
||||
if not user.is_premium():
|
||||
domain_conditions["premium_only"] = False
|
||||
sl_domain = SLDomain.get_by(**domain_conditions)
|
||||
if sl_domain:
|
||||
prefix = (
|
||||
""
|
||||
if config.DISABLE_ALIAS_SUFFIX
|
||||
else f".{user.get_random_alias_suffix()}"
|
||||
)
|
||||
suffix = f"{prefix}@{sl_domain.domain}"
|
||||
alias_suffix = AliasSuffix(
|
||||
is_custom=False,
|
||||
suffix=suffix,
|
||||
signed_suffix=signer.sign(suffix).decode(),
|
||||
is_premium=sl_domain.premium_only,
|
||||
domain=sl_domain.domain,
|
||||
mx_verified=True,
|
||||
)
|
||||
alias_suffixes.insert(0, alias_suffix)
|
||||
|
||||
return alias_suffixes
|
||||
|
|
|
@ -21,6 +21,8 @@ from app.email_utils import (
|
|||
send_cannot_create_directory_alias_disabled,
|
||||
get_email_local_part,
|
||||
send_cannot_create_domain_alias,
|
||||
send_email,
|
||||
render,
|
||||
)
|
||||
from app.errors import AliasInTrashError
|
||||
from app.log import LOG
|
||||
|
@ -36,6 +38,8 @@ from app.models import (
|
|||
EmailLog,
|
||||
Contact,
|
||||
AutoCreateRule,
|
||||
AliasUsedOn,
|
||||
ClientUser,
|
||||
)
|
||||
from app.regex_utils import regex_match
|
||||
|
||||
|
@ -57,6 +61,8 @@ def get_user_if_alias_would_auto_create(
|
|||
domain_and_rule = check_if_alias_can_be_auto_created_for_custom_domain(
|
||||
address, notify_user=notify_user
|
||||
)
|
||||
if DomainDeletedAlias.get_by(email=address):
|
||||
return None
|
||||
if domain_and_rule:
|
||||
return domain_and_rule[0].user
|
||||
directory = check_if_alias_can_be_auto_created_for_a_directory(
|
||||
|
@ -397,3 +403,58 @@ def alias_export_csv(user, csv_direct_export=False):
|
|||
output.headers["Content-Disposition"] = "attachment; filename=aliases.csv"
|
||||
output.headers["Content-type"] = "text/csv"
|
||||
return output
|
||||
|
||||
|
||||
def transfer_alias(alias, new_user, new_mailboxes: [Mailbox]):
|
||||
# cannot transfer alias which is used for receiving newsletter
|
||||
if User.get_by(newsletter_alias_id=alias.id):
|
||||
raise Exception("Cannot transfer alias that's used to receive newsletter")
|
||||
|
||||
# update user_id
|
||||
Session.query(Contact).filter(Contact.alias_id == alias.id).update(
|
||||
{"user_id": new_user.id}
|
||||
)
|
||||
|
||||
Session.query(AliasUsedOn).filter(AliasUsedOn.alias_id == alias.id).update(
|
||||
{"user_id": new_user.id}
|
||||
)
|
||||
|
||||
Session.query(ClientUser).filter(ClientUser.alias_id == alias.id).update(
|
||||
{"user_id": new_user.id}
|
||||
)
|
||||
|
||||
# remove existing mailboxes from the alias
|
||||
Session.query(AliasMailbox).filter(AliasMailbox.alias_id == alias.id).delete()
|
||||
|
||||
# set mailboxes
|
||||
alias.mailbox_id = new_mailboxes.pop().id
|
||||
for mb in new_mailboxes:
|
||||
AliasMailbox.create(alias_id=alias.id, mailbox_id=mb.id)
|
||||
|
||||
# alias has never been transferred before
|
||||
if not alias.original_owner_id:
|
||||
alias.original_owner_id = alias.user_id
|
||||
|
||||
# inform previous owner
|
||||
old_user = alias.user
|
||||
send_email(
|
||||
old_user.email,
|
||||
f"Alias {alias.email} has been received",
|
||||
render(
|
||||
"transactional/alias-transferred.txt",
|
||||
alias=alias,
|
||||
),
|
||||
render(
|
||||
"transactional/alias-transferred.html",
|
||||
alias=alias,
|
||||
),
|
||||
)
|
||||
|
||||
# now the alias belongs to the new user
|
||||
alias.user_id = new_user.id
|
||||
|
||||
# set some fields back to default
|
||||
alias.disable_pgp = False
|
||||
alias.pinned = False
|
||||
|
||||
Session.commit()
|
||||
|
|
|
@ -16,3 +16,22 @@ from .views import (
|
|||
sudo,
|
||||
user,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"alias_options",
|
||||
"new_custom_alias",
|
||||
"custom_domain",
|
||||
"new_random_alias",
|
||||
"user_info",
|
||||
"auth",
|
||||
"auth_mfa",
|
||||
"alias",
|
||||
"apple",
|
||||
"mailbox",
|
||||
"notification",
|
||||
"setting",
|
||||
"export",
|
||||
"phone",
|
||||
"sudo",
|
||||
"user",
|
||||
]
|
||||
|
|
|
@ -24,6 +24,7 @@ from app.errors import (
|
|||
ErrContactAlreadyExists,
|
||||
ErrAddressInvalid,
|
||||
)
|
||||
from app.extensions import limiter
|
||||
from app.models import Alias, Contact, Mailbox, AliasMailbox
|
||||
|
||||
|
||||
|
@ -71,6 +72,9 @@ def get_aliases():
|
|||
|
||||
|
||||
@api_bp.route("/v2/aliases", methods=["GET", "POST"])
|
||||
@limiter.limit(
|
||||
"5/minute",
|
||||
)
|
||||
@require_api_auth
|
||||
def get_aliases_v2():
|
||||
"""
|
||||
|
|
|
@ -9,6 +9,7 @@ from requests import RequestException
|
|||
|
||||
from app.api.base import api_bp, require_api_auth
|
||||
from app.config import APPLE_API_SECRET, MACAPP_APPLE_API_SECRET
|
||||
from app.subscription_webhook import execute_subscription_webhook
|
||||
from app.db import Session
|
||||
from app.log import LOG
|
||||
from app.models import PlanEnum, AppleSubscription
|
||||
|
@ -50,6 +51,7 @@ def apple_process_payment():
|
|||
|
||||
apple_sub = verify_receipt(receipt_data, user, password)
|
||||
if apple_sub:
|
||||
execute_subscription_webhook(user)
|
||||
return jsonify(ok=True), 200
|
||||
|
||||
return jsonify(error="Processing failed"), 400
|
||||
|
@ -282,6 +284,7 @@ def apple_update_notification():
|
|||
apple_sub.plan = plan
|
||||
apple_sub.product_id = transaction["product_id"]
|
||||
Session.commit()
|
||||
execute_subscription_webhook(user)
|
||||
return jsonify(ok=True), 200
|
||||
else:
|
||||
LOG.w(
|
||||
|
@ -554,6 +557,7 @@ def verify_receipt(receipt_data, user, password) -> Optional[AppleSubscription]:
|
|||
product_id=latest_transaction["product_id"],
|
||||
)
|
||||
|
||||
execute_subscription_webhook(user)
|
||||
Session.commit()
|
||||
|
||||
return apple_sub
|
||||
|
|
|
@ -63,6 +63,11 @@ def auth_login():
|
|||
elif user.disabled:
|
||||
LoginEvent(LoginEvent.ActionType.disabled_login, LoginEvent.Source.api).send()
|
||||
return jsonify(error="Account disabled"), 400
|
||||
elif user.delete_on is not None:
|
||||
LoginEvent(
|
||||
LoginEvent.ActionType.scheduled_to_be_deleted, LoginEvent.Source.api
|
||||
).send()
|
||||
return jsonify(error="Account scheduled for deletion"), 400
|
||||
elif not user.activated:
|
||||
LoginEvent(LoginEvent.ActionType.not_activated, LoginEvent.Source.api).send()
|
||||
return jsonify(error="Account not activated"), 422
|
||||
|
@ -357,7 +362,7 @@ def auth_payload(user, device) -> dict:
|
|||
|
||||
|
||||
@api_bp.route("/auth/forgot_password", methods=["POST"])
|
||||
@limiter.limit("10/minute")
|
||||
@limiter.limit("2/minute")
|
||||
def forgot_password():
|
||||
"""
|
||||
User forgot password
|
||||
|
|
|
@ -13,8 +13,8 @@ from app.db import Session
|
|||
from app.email_utils import (
|
||||
mailbox_already_used,
|
||||
email_can_be_used_as_mailbox,
|
||||
is_valid_email,
|
||||
)
|
||||
from app.email_validation import is_valid_email
|
||||
from app.log import LOG
|
||||
from app.models import Mailbox, Job
|
||||
from app.utils import sanitize_email
|
||||
|
@ -45,7 +45,7 @@ def create_mailbox():
|
|||
mailbox_email = sanitize_email(request.get_json().get("email"))
|
||||
|
||||
if not user.is_premium():
|
||||
return jsonify(error=f"Only premium plan can add additional mailbox"), 400
|
||||
return jsonify(error="Only premium plan can add additional mailbox"), 400
|
||||
|
||||
if not is_valid_email(mailbox_email):
|
||||
return jsonify(error=f"{mailbox_email} invalid"), 400
|
||||
|
@ -86,7 +86,7 @@ def delete_mailbox(mailbox_id):
|
|||
|
||||
"""
|
||||
user = g.user
|
||||
mailbox = Mailbox.get(id=mailbox_id)
|
||||
mailbox = Mailbox.get(mailbox_id)
|
||||
|
||||
if not mailbox or mailbox.user_id != user.id:
|
||||
return jsonify(error="Forbidden"), 403
|
||||
|
|
|
@ -150,7 +150,7 @@ def new_custom_alias_v3():
|
|||
if not data:
|
||||
return jsonify(error="request body cannot be empty"), 400
|
||||
|
||||
if type(data) is not dict:
|
||||
if not isinstance(data, dict):
|
||||
return jsonify(error="request body does not follow the required format"), 400
|
||||
|
||||
alias_prefix = data.get("alias_prefix", "").strip().lower().replace(" ", "")
|
||||
|
@ -168,7 +168,7 @@ def new_custom_alias_v3():
|
|||
return jsonify(error="alias prefix invalid format or too long"), 400
|
||||
|
||||
# check if mailbox is not tempered with
|
||||
if type(mailbox_ids) is not list:
|
||||
if not isinstance(mailbox_ids, list):
|
||||
return jsonify(error="mailbox_ids must be an array of id"), 400
|
||||
mailboxes = []
|
||||
for mailbox_id in mailbox_ids:
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
import base64
|
||||
import dataclasses
|
||||
from io import BytesIO
|
||||
from typing import Optional
|
||||
|
||||
|
@ -7,6 +8,7 @@ from flask import jsonify, g, request, make_response
|
|||
from app import s3, config
|
||||
from app.api.base import api_bp, require_api_auth
|
||||
from app.config import SESSION_COOKIE_NAME
|
||||
from app.dashboard.views.index import get_stats
|
||||
from app.db import Session
|
||||
from app.models import ApiKey, File, PartnerUser, User
|
||||
from app.proton.utils import get_proton_partner
|
||||
|
@ -136,3 +138,22 @@ def logout():
|
|||
response.delete_cookie(SESSION_COOKIE_NAME)
|
||||
|
||||
return response
|
||||
|
||||
|
||||
@api_bp.route("/stats")
|
||||
@require_api_auth
|
||||
def user_stats():
|
||||
"""
|
||||
Return stats
|
||||
|
||||
Output as json
|
||||
- nb_alias
|
||||
- nb_forward
|
||||
- nb_reply
|
||||
- nb_block
|
||||
|
||||
"""
|
||||
user = g.user
|
||||
stats = get_stats(user)
|
||||
|
||||
return jsonify(dataclasses.asdict(stats))
|
||||
|
|
|
@ -17,3 +17,23 @@ from .views import (
|
|||
recovery,
|
||||
api_to_cookie,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"login",
|
||||
"logout",
|
||||
"register",
|
||||
"activate",
|
||||
"resend_activation",
|
||||
"reset_password",
|
||||
"forgot_password",
|
||||
"github",
|
||||
"google",
|
||||
"facebook",
|
||||
"proton",
|
||||
"change_email",
|
||||
"mfa",
|
||||
"fido",
|
||||
"social",
|
||||
"recovery",
|
||||
"api_to_cookie",
|
||||
]
|
||||
|
|
|
@ -62,7 +62,7 @@ def fido():
|
|||
browser = MfaBrowser.get_by(token=request.cookies.get("mfa"))
|
||||
if browser and not browser.is_expired() and browser.user_id == user.id:
|
||||
login_user(user)
|
||||
flash(f"Welcome back!", "success")
|
||||
flash("Welcome back!", "success")
|
||||
# Redirect user to correct page
|
||||
return redirect(next_url or url_for("dashboard.index"))
|
||||
else:
|
||||
|
@ -110,7 +110,7 @@ def fido():
|
|||
|
||||
session["sudo_time"] = int(time())
|
||||
login_user(user)
|
||||
flash(f"Welcome back!", "success")
|
||||
flash("Welcome back!", "success")
|
||||
|
||||
# Redirect user to correct page
|
||||
response = make_response(redirect(next_url or url_for("dashboard.index")))
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
from flask import request, render_template, redirect, url_for, flash, g
|
||||
from flask import request, render_template, flash, g
|
||||
from flask_wtf import FlaskForm
|
||||
from wtforms import StringField, validators
|
||||
|
||||
|
@ -16,7 +16,7 @@ class ForgotPasswordForm(FlaskForm):
|
|||
|
||||
@auth_bp.route("/forgot_password", methods=["GET", "POST"])
|
||||
@limiter.limit(
|
||||
"10/minute", deduct_when=lambda r: hasattr(g, "deduct_limit") and g.deduct_limit
|
||||
"10/hour", deduct_when=lambda r: hasattr(g, "deduct_limit") and g.deduct_limit
|
||||
)
|
||||
def forgot_password():
|
||||
form = ForgotPasswordForm(request.form)
|
||||
|
@ -37,6 +37,5 @@ def forgot_password():
|
|||
if user:
|
||||
LOG.d("Send forgot password email to %s", user)
|
||||
send_reset_password_email(user)
|
||||
return redirect(url_for("auth.forgot_password"))
|
||||
|
||||
return render_template("auth/forgot_password.html", form=form)
|
||||
|
|
|
@ -54,6 +54,12 @@ def login():
|
|||
"error",
|
||||
)
|
||||
LoginEvent(LoginEvent.ActionType.disabled_login).send()
|
||||
elif user.delete_on is not None:
|
||||
flash(
|
||||
f"Your account is scheduled to be deleted on {user.delete_on}",
|
||||
"error",
|
||||
)
|
||||
LoginEvent(LoginEvent.ActionType.scheduled_to_be_deleted).send()
|
||||
elif not user.activated:
|
||||
show_resend_activation = True
|
||||
flash(
|
||||
|
|
|
@ -55,7 +55,7 @@ def mfa():
|
|||
browser = MfaBrowser.get_by(token=request.cookies.get("mfa"))
|
||||
if browser and not browser.is_expired() and browser.user_id == user.id:
|
||||
login_user(user)
|
||||
flash(f"Welcome back!", "success")
|
||||
flash("Welcome back!", "success")
|
||||
# Redirect user to correct page
|
||||
return redirect(next_url or url_for("dashboard.index"))
|
||||
else:
|
||||
|
@ -73,7 +73,7 @@ def mfa():
|
|||
Session.commit()
|
||||
|
||||
login_user(user)
|
||||
flash(f"Welcome back!", "success")
|
||||
flash("Welcome back!", "success")
|
||||
|
||||
# Redirect user to correct page
|
||||
response = make_response(redirect(next_url or url_for("dashboard.index")))
|
||||
|
|
|
@ -53,7 +53,7 @@ def recovery_route():
|
|||
del session[MFA_USER_ID]
|
||||
|
||||
login_user(user)
|
||||
flash(f"Welcome back!", "success")
|
||||
flash("Welcome back!", "success")
|
||||
|
||||
recovery_code.used = True
|
||||
recovery_code.used_at = arrow.now()
|
||||
|
|
|
@ -94,9 +94,7 @@ def register():
|
|||
try:
|
||||
send_activation_email(user, next_url)
|
||||
RegisterEvent(RegisterEvent.ActionType.success).send()
|
||||
DailyMetric.get_or_create_today_metric().nb_new_web_non_proton_user += (
|
||||
1
|
||||
)
|
||||
DailyMetric.get_or_create_today_metric().nb_new_web_non_proton_user += 1
|
||||
Session.commit()
|
||||
except Exception:
|
||||
flash("Invalid email, are you sure the email is correct?", "error")
|
||||
|
|
|
@ -60,8 +60,8 @@ def reset_password():
|
|||
# this can be served to activate user too
|
||||
user.activated = True
|
||||
|
||||
# remove the reset password code
|
||||
ResetPasswordCode.delete(reset_password_code.id)
|
||||
# remove all reset password codes
|
||||
ResetPasswordCode.filter_by(user_id=user.id).delete()
|
||||
|
||||
# change the alternative_id to log user out on other browsers
|
||||
user.alternative_id = str(uuid.uuid4())
|
||||
|
|
|
@ -111,11 +111,15 @@ POSTFIX_SERVER = os.environ.get("POSTFIX_SERVER", "240.0.0.1")
|
|||
DISABLE_REGISTRATION = "DISABLE_REGISTRATION" in os.environ
|
||||
|
||||
# allow using a different postfix port, useful when developing locally
|
||||
POSTFIX_PORT = int(os.environ.get("POSTFIX_PORT", 25))
|
||||
|
||||
# Use port 587 instead of 25 when sending emails through Postfix
|
||||
# Useful when calling Postfix from an external network
|
||||
POSTFIX_SUBMISSION_TLS = "POSTFIX_SUBMISSION_TLS" in os.environ
|
||||
if POSTFIX_SUBMISSION_TLS:
|
||||
default_postfix_port = 587
|
||||
else:
|
||||
default_postfix_port = 25
|
||||
POSTFIX_PORT = int(os.environ.get("POSTFIX_PORT", default_postfix_port))
|
||||
POSTFIX_TIMEOUT = os.environ.get("POSTFIX_TIMEOUT", 3)
|
||||
|
||||
# ["domain1.com", "domain2.com"]
|
||||
|
@ -353,6 +357,7 @@ ALERT_COMPLAINT_TRANSACTIONAL_PHASE = "alert_complaint_transactional_phase"
|
|||
ALERT_QUARANTINE_DMARC = "alert_quarantine_dmarc"
|
||||
|
||||
ALERT_DUAL_SUBSCRIPTION_WITH_PARTNER = "alert_dual_sub_with_partner"
|
||||
ALERT_WARN_MULTIPLE_SUBSCRIPTIONS = "alert_multiple_subscription"
|
||||
|
||||
# <<<<< END ALERT EMAIL >>>>
|
||||
|
||||
|
@ -527,3 +532,10 @@ if ENABLE_ALL_REVERSE_ALIAS_REPLACEMENT:
|
|||
SKIP_MX_LOOKUP_ON_CHECK = False
|
||||
|
||||
DISABLE_RATE_LIMIT = "DISABLE_RATE_LIMIT" in os.environ
|
||||
|
||||
SUBSCRIPTION_CHANGE_WEBHOOK = os.environ.get("SUBSCRIPTION_CHANGE_WEBHOOK", None)
|
||||
MAX_API_KEYS = int(os.environ.get("MAX_API_KEYS", 30))
|
||||
|
||||
UPCLOUD_USERNAME = os.environ.get("UPCLOUD_USERNAME", None)
|
||||
UPCLOUD_PASSWORD = os.environ.get("UPCLOUD_PASSWORD", None)
|
||||
UPCLOUD_DB_ID = os.environ.get("UPCLOUD_DB_ID", None)
|
||||
|
|
|
@ -33,3 +33,39 @@ from .views import (
|
|||
notification,
|
||||
support,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"index",
|
||||
"pricing",
|
||||
"setting",
|
||||
"custom_alias",
|
||||
"subdomain",
|
||||
"billing",
|
||||
"alias_log",
|
||||
"alias_export",
|
||||
"unsubscribe",
|
||||
"api_key",
|
||||
"custom_domain",
|
||||
"alias_contact_manager",
|
||||
"enter_sudo",
|
||||
"mfa_setup",
|
||||
"mfa_cancel",
|
||||
"fido_setup",
|
||||
"coupon",
|
||||
"fido_manage",
|
||||
"domain_detail",
|
||||
"lifetime_licence",
|
||||
"directory",
|
||||
"mailbox",
|
||||
"mailbox_detail",
|
||||
"refused_email",
|
||||
"referral",
|
||||
"contact_detail",
|
||||
"setup_done",
|
||||
"batch_import",
|
||||
"alias_transfer",
|
||||
"app",
|
||||
"delete_account",
|
||||
"notification",
|
||||
"support",
|
||||
]
|
||||
|
|
|
@ -13,10 +13,10 @@ from app import config, parallel_limiter
|
|||
from app.dashboard.base import dashboard_bp
|
||||
from app.db import Session
|
||||
from app.email_utils import (
|
||||
is_valid_email,
|
||||
generate_reply_email,
|
||||
parse_full_address,
|
||||
)
|
||||
from app.email_validation import is_valid_email
|
||||
from app.errors import (
|
||||
CannotCreateContactForReverseAlias,
|
||||
ErrContactErrorUpgradeNeeded,
|
||||
|
@ -90,7 +90,7 @@ def create_contact(user: User, alias: Alias, contact_address: str) -> Contact:
|
|||
alias_id=alias.id,
|
||||
website_email=contact_email,
|
||||
name=contact_name,
|
||||
reply_email=generate_reply_email(contact_email, user),
|
||||
reply_email=generate_reply_email(contact_email, alias),
|
||||
)
|
||||
|
||||
LOG.d(
|
||||
|
|
|
@ -87,6 +87,6 @@ def get_alias_log(alias: Alias, page_id=0) -> [AliasLog]:
|
|||
contact=contact,
|
||||
)
|
||||
logs.append(al)
|
||||
logs = sorted(logs, key=lambda l: l.when, reverse=True)
|
||||
logs = sorted(logs, key=lambda log: log.when, reverse=True)
|
||||
|
||||
return logs
|
||||
|
|
|
@ -7,79 +7,19 @@ from flask import render_template, redirect, url_for, flash, request
|
|||
from flask_login import login_required, current_user
|
||||
|
||||
from app import config
|
||||
from app.alias_utils import transfer_alias
|
||||
from app.dashboard.base import dashboard_bp
|
||||
from app.dashboard.views.enter_sudo import sudo_required
|
||||
from app.db import Session
|
||||
from app.email_utils import send_email, render
|
||||
from app.extensions import limiter
|
||||
from app.log import LOG
|
||||
from app.models import (
|
||||
Alias,
|
||||
Contact,
|
||||
AliasUsedOn,
|
||||
AliasMailbox,
|
||||
User,
|
||||
ClientUser,
|
||||
)
|
||||
from app.models import Mailbox
|
||||
from app.utils import CSRFValidationForm
|
||||
|
||||
|
||||
def transfer(alias, new_user, new_mailboxes: [Mailbox]):
|
||||
# cannot transfer alias which is used for receiving newsletter
|
||||
if User.get_by(newsletter_alias_id=alias.id):
|
||||
raise Exception("Cannot transfer alias that's used to receive newsletter")
|
||||
|
||||
# update user_id
|
||||
Session.query(Contact).filter(Contact.alias_id == alias.id).update(
|
||||
{"user_id": new_user.id}
|
||||
)
|
||||
|
||||
Session.query(AliasUsedOn).filter(AliasUsedOn.alias_id == alias.id).update(
|
||||
{"user_id": new_user.id}
|
||||
)
|
||||
|
||||
Session.query(ClientUser).filter(ClientUser.alias_id == alias.id).update(
|
||||
{"user_id": new_user.id}
|
||||
)
|
||||
|
||||
# remove existing mailboxes from the alias
|
||||
Session.query(AliasMailbox).filter(AliasMailbox.alias_id == alias.id).delete()
|
||||
|
||||
# set mailboxes
|
||||
alias.mailbox_id = new_mailboxes.pop().id
|
||||
for mb in new_mailboxes:
|
||||
AliasMailbox.create(alias_id=alias.id, mailbox_id=mb.id)
|
||||
|
||||
# alias has never been transferred before
|
||||
if not alias.original_owner_id:
|
||||
alias.original_owner_id = alias.user_id
|
||||
|
||||
# inform previous owner
|
||||
old_user = alias.user
|
||||
send_email(
|
||||
old_user.email,
|
||||
f"Alias {alias.email} has been received",
|
||||
render(
|
||||
"transactional/alias-transferred.txt",
|
||||
alias=alias,
|
||||
),
|
||||
render(
|
||||
"transactional/alias-transferred.html",
|
||||
alias=alias,
|
||||
),
|
||||
)
|
||||
|
||||
# now the alias belongs to the new user
|
||||
alias.user_id = new_user.id
|
||||
|
||||
# set some fields back to default
|
||||
alias.disable_pgp = False
|
||||
alias.pinned = False
|
||||
|
||||
Session.commit()
|
||||
|
||||
|
||||
def hmac_alias_transfer_token(transfer_token: str) -> str:
|
||||
alias_hmac = hmac.new(
|
||||
config.ALIAS_TRANSFER_TOKEN_SECRET.encode("utf-8"),
|
||||
|
@ -214,7 +154,13 @@ def alias_transfer_receive_route():
|
|||
mailboxes,
|
||||
token,
|
||||
)
|
||||
transfer(alias, current_user, mailboxes)
|
||||
transfer_alias(alias, current_user, mailboxes)
|
||||
|
||||
# reset transfer token
|
||||
alias.transfer_token = None
|
||||
alias.transfer_token_expiration = None
|
||||
Session.commit()
|
||||
|
||||
flash(f"You are now owner of {alias.email}", "success")
|
||||
return redirect(url_for("dashboard.index", highlight_alias_id=alias.id))
|
||||
|
||||
|
|
|
@ -3,9 +3,11 @@ from flask_login import login_required, current_user
|
|||
from flask_wtf import FlaskForm
|
||||
from wtforms import StringField, validators
|
||||
|
||||
from app import config
|
||||
from app.dashboard.base import dashboard_bp
|
||||
from app.dashboard.views.enter_sudo import sudo_required
|
||||
from app.db import Session
|
||||
from app.extensions import limiter
|
||||
from app.models import ApiKey
|
||||
from app.utils import CSRFValidationForm
|
||||
|
||||
|
@ -14,9 +16,34 @@ class NewApiKeyForm(FlaskForm):
|
|||
name = StringField("Name", validators=[validators.DataRequired()])
|
||||
|
||||
|
||||
def clean_up_unused_or_old_api_keys(user_id: int):
|
||||
total_keys = ApiKey.filter_by(user_id=user_id).count()
|
||||
if total_keys <= config.MAX_API_KEYS:
|
||||
return
|
||||
# Remove oldest unused
|
||||
for api_key in (
|
||||
ApiKey.filter_by(user_id=user_id, last_used=None)
|
||||
.order_by(ApiKey.created_at.asc())
|
||||
.all()
|
||||
):
|
||||
Session.delete(api_key)
|
||||
total_keys -= 1
|
||||
if total_keys <= config.MAX_API_KEYS:
|
||||
return
|
||||
# Clean up oldest used
|
||||
for api_key in (
|
||||
ApiKey.filter_by(user_id=user_id).order_by(ApiKey.last_used.asc()).all()
|
||||
):
|
||||
Session.delete(api_key)
|
||||
total_keys -= 1
|
||||
if total_keys <= config.MAX_API_KEYS:
|
||||
return
|
||||
|
||||
|
||||
@dashboard_bp.route("/api_key", methods=["GET", "POST"])
|
||||
@login_required
|
||||
@sudo_required
|
||||
@limiter.limit("10/hour")
|
||||
def api_key():
|
||||
api_keys = (
|
||||
ApiKey.filter(ApiKey.user_id == current_user.id)
|
||||
|
@ -50,6 +77,7 @@ def api_key():
|
|||
|
||||
elif request.form.get("form-name") == "create":
|
||||
if new_api_key_form.validate():
|
||||
clean_up_unused_or_old_api_keys(current_user.id)
|
||||
new_api_key = ApiKey.create(
|
||||
name=new_api_key_form.name.data, user_id=current_user.id
|
||||
)
|
||||
|
|
|
@ -1,14 +1,9 @@
|
|||
from app.db import Session
|
||||
|
||||
"""
|
||||
List of apps that user has used via the "Sign in with SimpleLogin"
|
||||
"""
|
||||
|
||||
from flask import render_template, request, flash, redirect
|
||||
from flask_login import login_required, current_user
|
||||
from sqlalchemy.orm import joinedload
|
||||
|
||||
from app.dashboard.base import dashboard_bp
|
||||
from app.db import Session
|
||||
from app.models import (
|
||||
ClientUser,
|
||||
)
|
||||
|
@ -17,6 +12,10 @@ from app.models import (
|
|||
@dashboard_bp.route("/app", methods=["GET", "POST"])
|
||||
@login_required
|
||||
def app_route():
|
||||
"""
|
||||
List of apps that user has used via the "Sign in with SimpleLogin"
|
||||
"""
|
||||
|
||||
client_users = (
|
||||
ClientUser.filter_by(user_id=current_user.id)
|
||||
.options(joinedload(ClientUser.client))
|
||||
|
|
|
@ -68,9 +68,14 @@ def coupon_route():
|
|||
)
|
||||
return redirect(request.url)
|
||||
|
||||
coupon.used_by_user_id = current_user.id
|
||||
coupon.used = True
|
||||
Session.commit()
|
||||
updated = (
|
||||
Session.query(Coupon)
|
||||
.filter_by(code=code, used=False)
|
||||
.update({"used_by_user_id": current_user.id, "used": True})
|
||||
)
|
||||
if updated != 1:
|
||||
flash("Coupon is not valid", "error")
|
||||
return redirect(request.url)
|
||||
|
||||
manual_sub: ManualSubscription = ManualSubscription.get_by(
|
||||
user_id=current_user.id
|
||||
|
@ -95,7 +100,7 @@ def coupon_route():
|
|||
commit=True,
|
||||
)
|
||||
flash(
|
||||
f"Your account has been upgraded to Premium, thanks for your support!",
|
||||
"Your account has been upgraded to Premium, thanks for your support!",
|
||||
"success",
|
||||
)
|
||||
|
||||
|
|
|
@ -120,18 +120,11 @@ def custom_alias():
|
|||
email=full_alias
|
||||
)
|
||||
custom_domain = domain_deleted_alias.domain
|
||||
if domain_deleted_alias.user_id == current_user.id:
|
||||
flash(
|
||||
f"You have deleted this alias before. You can restore it on "
|
||||
f"{custom_domain.domain} 'Deleted Alias' page",
|
||||
"error",
|
||||
)
|
||||
else:
|
||||
# should never happen as user can only choose their domains
|
||||
LOG.e(
|
||||
"Deleted Alias %s does not belong to user %s",
|
||||
domain_deleted_alias,
|
||||
)
|
||||
flash(
|
||||
f"You have deleted this alias before. You can restore it on "
|
||||
f"{custom_domain.domain} 'Deleted Alias' page",
|
||||
"error",
|
||||
)
|
||||
|
||||
elif DeletedAlias.get_by(email=full_alias):
|
||||
flash(general_error_msg, "error")
|
||||
|
|
|
@ -67,7 +67,7 @@ def directory():
|
|||
if request.method == "POST":
|
||||
if request.form.get("form-name") == "delete":
|
||||
if not delete_dir_form.validate():
|
||||
flash(f"Invalid request", "warning")
|
||||
flash("Invalid request", "warning")
|
||||
return redirect(url_for("dashboard.directory"))
|
||||
dir_obj = Directory.get(delete_dir_form.directory_id.data)
|
||||
|
||||
|
@ -87,7 +87,7 @@ def directory():
|
|||
|
||||
if request.form.get("form-name") == "toggle-directory":
|
||||
if not toggle_dir_form.validate():
|
||||
flash(f"Invalid request", "warning")
|
||||
flash("Invalid request", "warning")
|
||||
return redirect(url_for("dashboard.directory"))
|
||||
dir_id = toggle_dir_form.directory_id.data
|
||||
dir_obj = Directory.get(dir_id)
|
||||
|
@ -109,7 +109,7 @@ def directory():
|
|||
|
||||
elif request.form.get("form-name") == "update":
|
||||
if not update_dir_form.validate():
|
||||
flash(f"Invalid request", "warning")
|
||||
flash("Invalid request", "warning")
|
||||
return redirect(url_for("dashboard.directory"))
|
||||
dir_id = update_dir_form.directory_id.data
|
||||
dir_obj = Directory.get(dir_id)
|
||||
|
|
|
@ -8,6 +8,7 @@ from wtforms import PasswordField, validators
|
|||
|
||||
from app.config import CONNECT_WITH_PROTON
|
||||
from app.dashboard.base import dashboard_bp
|
||||
from app.extensions import limiter
|
||||
from app.log import LOG
|
||||
from app.models import PartnerUser
|
||||
from app.proton.utils import get_proton_partner
|
||||
|
@ -21,6 +22,7 @@ class LoginForm(FlaskForm):
|
|||
|
||||
|
||||
@dashboard_bp.route("/enter_sudo", methods=["GET", "POST"])
|
||||
@limiter.limit("3/minute")
|
||||
@login_required
|
||||
def enter_sudo():
|
||||
password_check_form = LoginForm()
|
||||
|
@ -57,15 +59,14 @@ def enter_sudo():
|
|||
)
|
||||
|
||||
|
||||
def ensure_sudo_active():
|
||||
if "sudo_time" not in session or (time() - int(session["sudo_time"])) > _SUDO_GAP:
|
||||
return redirect(url_for("dashboard.enter_sudo", next=request.path))
|
||||
|
||||
|
||||
def sudo_required(f):
|
||||
@wraps(f)
|
||||
def wrap(*args, **kwargs):
|
||||
ensure_sudo_active()
|
||||
if (
|
||||
"sudo_time" not in session
|
||||
or (time() - int(session["sudo_time"])) > _SUDO_GAP
|
||||
):
|
||||
return redirect(url_for("dashboard.enter_sudo", next=request.path))
|
||||
return f(*args, **kwargs)
|
||||
|
||||
return wrap
|
||||
|
|
|
@ -57,6 +57,10 @@ def get_stats(user: User) -> Stats:
|
|||
methods=["POST"],
|
||||
exempt_when=lambda: request.form.get("form-name") != "create-random-email",
|
||||
)
|
||||
@limiter.limit(
|
||||
"5/minute",
|
||||
methods=["GET"],
|
||||
)
|
||||
@login_required
|
||||
@parallel_limiter.lock(
|
||||
name="alias_creation",
|
||||
|
@ -150,7 +154,13 @@ def index():
|
|||
flash(f"Alias {alias.email} has been disabled", "success")
|
||||
|
||||
return redirect(
|
||||
url_for("dashboard.index", query=query, sort=sort, filter=alias_filter)
|
||||
url_for(
|
||||
"dashboard.index",
|
||||
query=query,
|
||||
sort=sort,
|
||||
filter=alias_filter,
|
||||
page=page,
|
||||
)
|
||||
)
|
||||
|
||||
mailboxes = current_user.mailboxes()
|
||||
|
|
|
@ -1,3 +1,7 @@
|
|||
import base64
|
||||
import binascii
|
||||
import json
|
||||
|
||||
import arrow
|
||||
from flask import render_template, request, redirect, url_for, flash
|
||||
from flask_login import login_required, current_user
|
||||
|
@ -15,8 +19,8 @@ from app.email_utils import (
|
|||
mailbox_already_used,
|
||||
render,
|
||||
send_email,
|
||||
is_valid_email,
|
||||
)
|
||||
from app.email_validation import is_valid_email
|
||||
from app.log import LOG
|
||||
from app.models import Mailbox, Job
|
||||
from app.utils import CSRFValidationForm
|
||||
|
@ -69,17 +73,20 @@ def mailbox_route():
|
|||
transfer_mailbox = Mailbox.get(transfer_mailbox_id)
|
||||
|
||||
if not transfer_mailbox or transfer_mailbox.user_id != current_user.id:
|
||||
flash("You must transfer the aliases to a mailbox you own.")
|
||||
flash(
|
||||
"You must transfer the aliases to a mailbox you own.", "error"
|
||||
)
|
||||
return redirect(url_for("dashboard.mailbox_route"))
|
||||
|
||||
if transfer_mailbox.id == mailbox.id:
|
||||
flash(
|
||||
"You can not transfer the aliases to the mailbox you want to delete."
|
||||
"You can not transfer the aliases to the mailbox you want to delete.",
|
||||
"error",
|
||||
)
|
||||
return redirect(url_for("dashboard.mailbox_route"))
|
||||
|
||||
if not transfer_mailbox.verified:
|
||||
flash("Your new mailbox is not verified")
|
||||
flash("Your new mailbox is not verified", "error")
|
||||
return redirect(url_for("dashboard.mailbox_route"))
|
||||
|
||||
# Schedule delete account job
|
||||
|
@ -147,12 +154,12 @@ def mailbox_route():
|
|||
elif not email_can_be_used_as_mailbox(mailbox_email):
|
||||
flash(f"You cannot use {mailbox_email}.", "error")
|
||||
else:
|
||||
transfer_mailbox = Mailbox.create(
|
||||
new_mailbox = Mailbox.create(
|
||||
email=mailbox_email, user_id=current_user.id
|
||||
)
|
||||
Session.commit()
|
||||
|
||||
send_verification_email(current_user, transfer_mailbox)
|
||||
send_verification_email(current_user, new_mailbox)
|
||||
|
||||
flash(
|
||||
f"You are going to receive an email to confirm {mailbox_email}.",
|
||||
|
@ -162,7 +169,7 @@ def mailbox_route():
|
|||
return redirect(
|
||||
url_for(
|
||||
"dashboard.mailbox_detail_route",
|
||||
mailbox_id=transfer_mailbox.id,
|
||||
mailbox_id=new_mailbox.id,
|
||||
)
|
||||
)
|
||||
|
||||
|
@ -177,7 +184,9 @@ def mailbox_route():
|
|||
|
||||
def send_verification_email(user, mailbox):
|
||||
s = TimestampSigner(MAILBOX_SECRET)
|
||||
mailbox_id_signed = s.sign(str(mailbox.id)).decode()
|
||||
encoded_data = json.dumps([mailbox.id, mailbox.email]).encode("utf-8")
|
||||
b64_data = base64.urlsafe_b64encode(encoded_data)
|
||||
mailbox_id_signed = s.sign(b64_data).decode()
|
||||
verification_url = (
|
||||
URL + "/dashboard/mailbox_verify" + f"?mailbox_id={mailbox_id_signed}"
|
||||
)
|
||||
|
@ -202,22 +211,34 @@ def send_verification_email(user, mailbox):
|
|||
@dashboard_bp.route("/mailbox_verify")
|
||||
def mailbox_verify():
|
||||
s = TimestampSigner(MAILBOX_SECRET)
|
||||
mailbox_id = request.args.get("mailbox_id")
|
||||
|
||||
mailbox_verify_request = request.args.get("mailbox_id")
|
||||
try:
|
||||
r_id = int(s.unsign(mailbox_id, max_age=900))
|
||||
mailbox_raw_data = s.unsign(mailbox_verify_request, max_age=900)
|
||||
except Exception:
|
||||
flash("Invalid link. Please delete and re-add your mailbox", "error")
|
||||
return redirect(url_for("dashboard.mailbox_route"))
|
||||
else:
|
||||
mailbox = Mailbox.get(r_id)
|
||||
if not mailbox:
|
||||
flash("Invalid link", "error")
|
||||
return redirect(url_for("dashboard.mailbox_route"))
|
||||
try:
|
||||
decoded_data = base64.urlsafe_b64decode(mailbox_raw_data)
|
||||
except binascii.Error:
|
||||
flash("Invalid link. Please delete and re-add your mailbox", "error")
|
||||
return redirect(url_for("dashboard.mailbox_route"))
|
||||
mailbox_data = json.loads(decoded_data)
|
||||
if not isinstance(mailbox_data, list) or len(mailbox_data) != 2:
|
||||
flash("Invalid link. Please delete and re-add your mailbox", "error")
|
||||
return redirect(url_for("dashboard.mailbox_route"))
|
||||
mailbox_id = mailbox_data[0]
|
||||
mailbox = Mailbox.get(mailbox_id)
|
||||
if not mailbox:
|
||||
flash("Invalid link", "error")
|
||||
return redirect(url_for("dashboard.mailbox_route"))
|
||||
mailbox_email = mailbox_data[1]
|
||||
if mailbox_email != mailbox.email:
|
||||
flash("Invalid link", "error")
|
||||
return redirect(url_for("dashboard.mailbox_route"))
|
||||
|
||||
mailbox.verified = True
|
||||
Session.commit()
|
||||
mailbox.verified = True
|
||||
Session.commit()
|
||||
|
||||
LOG.d("Mailbox %s is verified", mailbox)
|
||||
LOG.d("Mailbox %s is verified", mailbox)
|
||||
|
||||
return render_template("dashboard/mailbox_validation.html", mailbox=mailbox)
|
||||
return render_template("dashboard/mailbox_validation.html", mailbox=mailbox)
|
||||
|
|
|
@ -30,7 +30,7 @@ class ChangeEmailForm(FlaskForm):
|
|||
@dashboard_bp.route("/mailbox/<int:mailbox_id>/", methods=["GET", "POST"])
|
||||
@login_required
|
||||
def mailbox_detail_route(mailbox_id):
|
||||
mailbox = Mailbox.get(mailbox_id)
|
||||
mailbox: Mailbox = Mailbox.get(mailbox_id)
|
||||
if not mailbox or mailbox.user_id != current_user.id:
|
||||
flash("You cannot see this page", "warning")
|
||||
return redirect(url_for("dashboard.index"))
|
||||
|
@ -144,6 +144,15 @@ def mailbox_detail_route(mailbox_id):
|
|||
url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox_id)
|
||||
)
|
||||
|
||||
if mailbox.is_proton():
|
||||
flash(
|
||||
"Enabling PGP for a Proton Mail mailbox is redundant and does not add any security benefit",
|
||||
"info",
|
||||
)
|
||||
return redirect(
|
||||
url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox_id)
|
||||
)
|
||||
|
||||
mailbox.pgp_public_key = request.form.get("pgp")
|
||||
try:
|
||||
mailbox.pgp_finger_print = load_public_key_and_check(
|
||||
|
@ -182,25 +191,16 @@ def mailbox_detail_route(mailbox_id):
|
|||
)
|
||||
elif request.form.get("form-name") == "generic-subject":
|
||||
if request.form.get("action") == "save":
|
||||
if not mailbox.pgp_enabled():
|
||||
flash(
|
||||
"Generic subject can only be used on PGP-enabled mailbox",
|
||||
"error",
|
||||
)
|
||||
return redirect(
|
||||
url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox_id)
|
||||
)
|
||||
|
||||
mailbox.generic_subject = request.form.get("generic-subject")
|
||||
Session.commit()
|
||||
flash("Generic subject for PGP-encrypted email is enabled", "success")
|
||||
flash("Generic subject is enabled", "success")
|
||||
return redirect(
|
||||
url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox_id)
|
||||
)
|
||||
elif request.form.get("action") == "remove":
|
||||
mailbox.generic_subject = None
|
||||
Session.commit()
|
||||
flash("Generic subject for PGP-encrypted email is disabled", "success")
|
||||
flash("Generic subject is disabled", "success")
|
||||
return redirect(
|
||||
url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox_id)
|
||||
)
|
||||
|
|
|
@ -80,8 +80,9 @@ def pricing():
|
|||
@dashboard_bp.route("/subscription_success")
|
||||
@login_required
|
||||
def subscription_success():
|
||||
flash("Thanks so much for supporting SimpleLogin!", "success")
|
||||
return redirect(url_for("dashboard.index"))
|
||||
return render_template(
|
||||
"dashboard/thank-you.html",
|
||||
)
|
||||
|
||||
|
||||
@dashboard_bp.route("/coinbase_checkout")
|
||||
|
|
|
@ -23,7 +23,6 @@ from app.config import (
|
|||
CONNECT_WITH_PROTON,
|
||||
)
|
||||
from app.dashboard.base import dashboard_bp
|
||||
from app.dashboard.views.enter_sudo import sudo_required
|
||||
from app.db import Session
|
||||
from app.email_utils import (
|
||||
email_can_be_used_as_mailbox,
|
||||
|
@ -106,7 +105,6 @@ def get_partner_subscription_and_name(
|
|||
|
||||
@dashboard_bp.route("/setting", methods=["GET", "POST"])
|
||||
@login_required
|
||||
@sudo_required
|
||||
@limiter.limit("5/minute", methods=["POST"])
|
||||
def setting():
|
||||
form = SettingForm()
|
||||
|
@ -130,7 +128,6 @@ def setting():
|
|||
new_email_valid = True
|
||||
new_email = canonicalize_email(change_email_form.email.data)
|
||||
if new_email != current_user.email and not pending_email:
|
||||
|
||||
# check if this email is not already used
|
||||
if personal_email_already_used(new_email) or Alias.get_by(
|
||||
email=new_email
|
||||
|
@ -200,6 +197,16 @@ def setting():
|
|||
)
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
|
||||
if current_user.profile_picture_id is not None:
|
||||
current_profile_file = File.get_by(
|
||||
id=current_user.profile_picture_id
|
||||
)
|
||||
if (
|
||||
current_profile_file is not None
|
||||
and current_profile_file.user_id == current_user.id
|
||||
):
|
||||
s3.delete(current_profile_file.path)
|
||||
|
||||
file_path = random_string(30)
|
||||
file = File.create(user_id=current_user.id, path=file_path)
|
||||
|
||||
|
@ -453,8 +460,13 @@ def send_change_email_confirmation(user: User, email_change: EmailChange):
|
|||
|
||||
|
||||
@dashboard_bp.route("/resend_email_change", methods=["GET", "POST"])
|
||||
@limiter.limit("5/hour")
|
||||
@login_required
|
||||
def resend_email_change():
|
||||
form = CSRFValidationForm()
|
||||
if not form.validate():
|
||||
flash("Invalid request. Please try again", "warning")
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
email_change = EmailChange.get_by(user_id=current_user.id)
|
||||
if email_change:
|
||||
# extend email change expiration
|
||||
|
@ -474,6 +486,10 @@ def resend_email_change():
|
|||
@dashboard_bp.route("/cancel_email_change", methods=["GET", "POST"])
|
||||
@login_required
|
||||
def cancel_email_change():
|
||||
form = CSRFValidationForm()
|
||||
if not form.validate():
|
||||
flash("Invalid request. Please try again", "warning")
|
||||
return redirect(url_for("dashboard.setting"))
|
||||
email_change = EmailChange.get_by(user_id=current_user.id)
|
||||
if email_change:
|
||||
EmailChange.delete(email_change.id)
|
||||
|
|
|
@ -75,12 +75,11 @@ def block_contact(contact_id):
|
|||
@dashboard_bp.route("/unsubscribe/encoded/<encoded_request>", methods=["GET"])
|
||||
@login_required
|
||||
def encoded_unsubscribe(encoded_request: str):
|
||||
|
||||
unsub_data = UnsubscribeHandler().handle_unsubscribe_from_request(
|
||||
current_user, encoded_request
|
||||
)
|
||||
if not unsub_data:
|
||||
flash(f"Invalid unsubscribe request", "error")
|
||||
flash("Invalid unsubscribe request", "error")
|
||||
return redirect(url_for("dashboard.index"))
|
||||
if unsub_data.action == UnsubscribeAction.DisableAlias:
|
||||
alias = Alias.get(unsub_data.data)
|
||||
|
@ -97,14 +96,14 @@ def encoded_unsubscribe(encoded_request: str):
|
|||
)
|
||||
)
|
||||
if unsub_data.action == UnsubscribeAction.UnsubscribeNewsletter:
|
||||
flash(f"You've unsubscribed from the newsletter", "success")
|
||||
flash("You've unsubscribed from the newsletter", "success")
|
||||
return redirect(
|
||||
url_for(
|
||||
"dashboard.index",
|
||||
)
|
||||
)
|
||||
if unsub_data.action == UnsubscribeAction.OriginalUnsubscribeMailto:
|
||||
flash(f"The original unsubscribe request has been forwarded", "success")
|
||||
flash("The original unsubscribe request has been forwarded", "success")
|
||||
return redirect(
|
||||
url_for(
|
||||
"dashboard.index",
|
||||
|
|
|
@ -1 +1,3 @@
|
|||
from .views import index, new_client, client_detail
|
||||
|
||||
__all__ = ["index", "new_client", "client_detail"]
|
||||
|
|
|
@ -87,7 +87,7 @@ def client_detail(client_id):
|
|||
)
|
||||
|
||||
flash(
|
||||
f"Thanks for submitting, we are informed and will come back to you asap!",
|
||||
"Thanks for submitting, we are informed and will come back to you asap!",
|
||||
"success",
|
||||
)
|
||||
|
||||
|
|
|
@ -1 +1,3 @@
|
|||
from .views import index
|
||||
|
||||
__all__ = ["index"]
|
||||
|
|
|
@ -34,7 +34,7 @@ def get_cname_record(hostname) -> Optional[str]:
|
|||
|
||||
|
||||
def get_mx_domains(hostname) -> [(int, str)]:
|
||||
"""return list of (priority, domain name).
|
||||
"""return list of (priority, domain name) sorted by priority (lowest priority first)
|
||||
domain name ends with a "." at the end.
|
||||
"""
|
||||
try:
|
||||
|
@ -50,7 +50,7 @@ def get_mx_domains(hostname) -> [(int, str)]:
|
|||
|
||||
ret.append((int(parts[0]), parts[1]))
|
||||
|
||||
return ret
|
||||
return sorted(ret, key=lambda prio_domain: prio_domain[0])
|
||||
|
||||
|
||||
_include_spf = "include:"
|
||||
|
|
|
@ -20,6 +20,7 @@ X_SPAM_STATUS = "X-Spam-Status"
|
|||
LIST_UNSUBSCRIBE = "List-Unsubscribe"
|
||||
LIST_UNSUBSCRIBE_POST = "List-Unsubscribe-Post"
|
||||
RETURN_PATH = "Return-Path"
|
||||
AUTHENTICATION_RESULTS = "Authentication-Results"
|
||||
|
||||
# headers used to DKIM sign in order of preference
|
||||
DKIM_HEADERS = [
|
||||
|
@ -32,6 +33,7 @@ DKIM_HEADERS = [
|
|||
SL_DIRECTION = "X-SimpleLogin-Type"
|
||||
SL_EMAIL_LOG_ID = "X-SimpleLogin-EmailLog-ID"
|
||||
SL_ENVELOPE_FROM = "X-SimpleLogin-Envelope-From"
|
||||
SL_ORIGINAL_FROM = "X-SimpleLogin-Original-From"
|
||||
SL_ENVELOPE_TO = "X-SimpleLogin-Envelope-To"
|
||||
SL_CLIENT_IP = "X-SimpleLogin-Client-IP"
|
||||
|
||||
|
|
|
@ -60,4 +60,5 @@ E522 = (
|
|||
)
|
||||
E523 = "550 SL E523 Unknown error"
|
||||
E524 = "550 SL E524 Wrong use of reverse-alias"
|
||||
E525 = "550 SL E525 Alias loop"
|
||||
# endregion
|
||||
|
|
|
@ -54,6 +54,7 @@ from app.models import (
|
|||
IgnoreBounceSender,
|
||||
InvalidMailboxDomain,
|
||||
VerpType,
|
||||
available_sl_email,
|
||||
)
|
||||
from app.utils import (
|
||||
random_string,
|
||||
|
@ -92,7 +93,7 @@ def send_welcome_email(user):
|
|||
|
||||
send_email(
|
||||
comm_email,
|
||||
f"Welcome to SimpleLogin",
|
||||
"Welcome to SimpleLogin",
|
||||
render("com/welcome.txt", user=user, alias=alias),
|
||||
render("com/welcome.html", user=user, alias=alias),
|
||||
unsubscribe_link,
|
||||
|
@ -103,7 +104,7 @@ def send_welcome_email(user):
|
|||
def send_trial_end_soon_email(user):
|
||||
send_email(
|
||||
user.email,
|
||||
f"Your trial will end soon",
|
||||
"Your trial will end soon",
|
||||
render("transactional/trial-end.txt.jinja2", user=user),
|
||||
render("transactional/trial-end.html", user=user),
|
||||
ignore_smtp_error=True,
|
||||
|
@ -113,7 +114,7 @@ def send_trial_end_soon_email(user):
|
|||
def send_activation_email(email, activation_link):
|
||||
send_email(
|
||||
email,
|
||||
f"Just one more step to join SimpleLogin",
|
||||
"Just one more step to join SimpleLogin",
|
||||
render(
|
||||
"transactional/activation.txt",
|
||||
activation_link=activation_link,
|
||||
|
@ -767,7 +768,7 @@ def get_header_unicode(header: Union[str, Header]) -> str:
|
|||
ret = ""
|
||||
for to_decoded_str, charset in decode_header(header):
|
||||
if charset is None:
|
||||
if type(to_decoded_str) is bytes:
|
||||
if isinstance(to_decoded_str, bytes):
|
||||
decoded_str = to_decoded_str.decode()
|
||||
else:
|
||||
decoded_str = to_decoded_str
|
||||
|
@ -804,13 +805,13 @@ def to_bytes(msg: Message):
|
|||
for generator_policy in [None, policy.SMTP, policy.SMTPUTF8]:
|
||||
try:
|
||||
return msg.as_bytes(policy=generator_policy)
|
||||
except:
|
||||
except Exception:
|
||||
LOG.w("as_bytes() fails with %s policy", policy, exc_info=True)
|
||||
|
||||
msg_string = msg.as_string()
|
||||
try:
|
||||
return msg_string.encode()
|
||||
except:
|
||||
except Exception:
|
||||
LOG.w("as_string().encode() fails", exc_info=True)
|
||||
|
||||
return msg_string.encode(errors="replace")
|
||||
|
@ -827,19 +828,6 @@ def should_add_dkim_signature(domain: str) -> bool:
|
|||
return False
|
||||
|
||||
|
||||
def is_valid_email(email_address: str) -> bool:
|
||||
"""
|
||||
Used to check whether an email address is valid
|
||||
NOT run MX check.
|
||||
NOT allow unicode.
|
||||
"""
|
||||
try:
|
||||
validate_email(email_address, check_deliverability=False, allow_smtputf8=False)
|
||||
return True
|
||||
except EmailNotValidError:
|
||||
return False
|
||||
|
||||
|
||||
class EmailEncoding(enum.Enum):
|
||||
BASE64 = "base64"
|
||||
QUOTED = "quoted-printable"
|
||||
|
@ -918,7 +906,7 @@ def add_header(msg: Message, text_header, html_header=None) -> Message:
|
|||
if content_type == "text/plain":
|
||||
encoding = get_encoding(msg)
|
||||
payload = msg.get_payload()
|
||||
if type(payload) is str:
|
||||
if isinstance(payload, str):
|
||||
clone_msg = copy(msg)
|
||||
new_payload = f"""{text_header}
|
||||
------------------------------
|
||||
|
@ -928,7 +916,7 @@ def add_header(msg: Message, text_header, html_header=None) -> Message:
|
|||
elif content_type == "text/html":
|
||||
encoding = get_encoding(msg)
|
||||
payload = msg.get_payload()
|
||||
if type(payload) is str:
|
||||
if isinstance(payload, str):
|
||||
new_payload = f"""<table width="100%" style="width: 100%; -premailer-width: 100%; -premailer-cellpadding: 0;
|
||||
-premailer-cellspacing: 0; margin: 0; padding: 0;">
|
||||
<tr>
|
||||
|
@ -950,6 +938,8 @@ def add_header(msg: Message, text_header, html_header=None) -> Message:
|
|||
for part in msg.get_payload():
|
||||
if isinstance(part, Message):
|
||||
new_parts.append(add_header(part, text_header, html_header))
|
||||
elif isinstance(part, str):
|
||||
new_parts.append(MIMEText(part))
|
||||
else:
|
||||
new_parts.append(part)
|
||||
clone_msg = copy(msg)
|
||||
|
@ -958,7 +948,14 @@ def add_header(msg: Message, text_header, html_header=None) -> Message:
|
|||
|
||||
elif content_type in ("multipart/mixed", "multipart/signed"):
|
||||
new_parts = []
|
||||
parts = list(msg.get_payload())
|
||||
payload = msg.get_payload()
|
||||
if isinstance(payload, str):
|
||||
# The message is badly formatted inject as new
|
||||
new_parts = [MIMEText(text_header, "plain"), MIMEText(payload, "plain")]
|
||||
clone_msg = copy(msg)
|
||||
clone_msg.set_payload(new_parts)
|
||||
return clone_msg
|
||||
parts = list(payload)
|
||||
LOG.d("only add header for the first part for %s", content_type)
|
||||
for ix, part in enumerate(parts):
|
||||
if ix == 0:
|
||||
|
@ -975,7 +972,7 @@ def add_header(msg: Message, text_header, html_header=None) -> Message:
|
|||
|
||||
|
||||
def replace(msg: Union[Message, str], old, new) -> Union[Message, str]:
|
||||
if type(msg) is str:
|
||||
if isinstance(msg, str):
|
||||
msg = msg.replace(old, new)
|
||||
return msg
|
||||
|
||||
|
@ -998,7 +995,7 @@ def replace(msg: Union[Message, str], old, new) -> Union[Message, str]:
|
|||
if content_type in ("text/plain", "text/html"):
|
||||
encoding = get_encoding(msg)
|
||||
payload = msg.get_payload()
|
||||
if type(payload) is str:
|
||||
if isinstance(payload, str):
|
||||
if encoding == EmailEncoding.QUOTED:
|
||||
LOG.d("handle quoted-printable replace %s -> %s", old, new)
|
||||
# first decode the payload
|
||||
|
@ -1043,7 +1040,7 @@ def replace(msg: Union[Message, str], old, new) -> Union[Message, str]:
|
|||
return msg
|
||||
|
||||
|
||||
def generate_reply_email(contact_email: str, user: User) -> str:
|
||||
def generate_reply_email(contact_email: str, alias: Alias) -> str:
|
||||
"""
|
||||
generate a reply_email (aka reverse-alias), make sure it isn't used by any contact
|
||||
"""
|
||||
|
@ -1054,6 +1051,7 @@ def generate_reply_email(contact_email: str, user: User) -> str:
|
|||
|
||||
include_sender_in_reverse_alias = False
|
||||
|
||||
user = alias.user
|
||||
# user has set this option explicitly
|
||||
if user.include_sender_in_reverse_alias is not None:
|
||||
include_sender_in_reverse_alias = user.include_sender_in_reverse_alias
|
||||
|
@ -1068,6 +1066,12 @@ def generate_reply_email(contact_email: str, user: User) -> str:
|
|||
contact_email = contact_email.replace(".", "_")
|
||||
contact_email = convert_to_alphanumeric(contact_email)
|
||||
|
||||
reply_domain = config.EMAIL_DOMAIN
|
||||
alias_domain = get_email_domain_part(alias.email)
|
||||
sl_domain = SLDomain.get_by(domain=alias_domain)
|
||||
if sl_domain and sl_domain.use_as_reverse_alias:
|
||||
reply_domain = alias_domain
|
||||
|
||||
# not use while to avoid infinite loop
|
||||
for _ in range(1000):
|
||||
if include_sender_in_reverse_alias and contact_email:
|
||||
|
@ -1075,15 +1079,15 @@ def generate_reply_email(contact_email: str, user: User) -> str:
|
|||
reply_email = (
|
||||
# do not use the ra+ anymore
|
||||
# f"ra+{contact_email}+{random_string(random_length)}@{config.EMAIL_DOMAIN}"
|
||||
f"{contact_email}_{random_string(random_length)}@{config.EMAIL_DOMAIN}"
|
||||
f"{contact_email}_{random_string(random_length)}@{reply_domain}"
|
||||
)
|
||||
else:
|
||||
random_length = random.randint(20, 50)
|
||||
# do not use the ra+ anymore
|
||||
# reply_email = f"ra+{random_string(random_length)}@{config.EMAIL_DOMAIN}"
|
||||
reply_email = f"{random_string(random_length)}@{config.EMAIL_DOMAIN}"
|
||||
reply_email = f"{random_string(random_length)}@{reply_domain}"
|
||||
|
||||
if not Contact.get_by(reply_email=reply_email):
|
||||
if available_sl_email(reply_email):
|
||||
return reply_email
|
||||
|
||||
raise Exception("Cannot generate reply email")
|
||||
|
@ -1099,26 +1103,6 @@ def is_reverse_alias(address: str) -> bool:
|
|||
)
|
||||
|
||||
|
||||
# allow also + and @ that are present in a reply address
|
||||
_ALLOWED_CHARS = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_-.+@"
|
||||
|
||||
|
||||
def normalize_reply_email(reply_email: str) -> str:
|
||||
"""Handle the case where reply email contains *strange* char that was wrongly generated in the past"""
|
||||
if not reply_email.isascii():
|
||||
reply_email = convert_to_id(reply_email)
|
||||
|
||||
ret = []
|
||||
# drop all control characters like shift, separator, etc
|
||||
for c in reply_email:
|
||||
if c not in _ALLOWED_CHARS:
|
||||
ret.append("_")
|
||||
else:
|
||||
ret.append(c)
|
||||
|
||||
return "".join(ret)
|
||||
|
||||
|
||||
def should_disable(alias: Alias) -> (bool, str):
|
||||
"""
|
||||
Return whether an alias should be disabled and if yes, the reason why
|
||||
|
|
38
app/email_validation.py
Normal file
38
app/email_validation.py
Normal file
|
@ -0,0 +1,38 @@
|
|||
from email_validator import (
|
||||
validate_email,
|
||||
EmailNotValidError,
|
||||
)
|
||||
|
||||
from app.utils import convert_to_id
|
||||
|
||||
# allow also + and @ that are present in a reply address
|
||||
_ALLOWED_CHARS = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_-.+@"
|
||||
|
||||
|
||||
def is_valid_email(email_address: str) -> bool:
|
||||
"""
|
||||
Used to check whether an email address is valid
|
||||
NOT run MX check.
|
||||
NOT allow unicode.
|
||||
"""
|
||||
try:
|
||||
validate_email(email_address, check_deliverability=False, allow_smtputf8=False)
|
||||
return True
|
||||
except EmailNotValidError:
|
||||
return False
|
||||
|
||||
|
||||
def normalize_reply_email(reply_email: str) -> str:
|
||||
"""Handle the case where reply email contains *strange* char that was wrongly generated in the past"""
|
||||
if not reply_email.isascii():
|
||||
reply_email = convert_to_id(reply_email)
|
||||
|
||||
ret = []
|
||||
# drop all control characters like shift, separator, etc
|
||||
for c in reply_email:
|
||||
if c not in _ALLOWED_CHARS:
|
||||
ret.append("_")
|
||||
else:
|
||||
ret.append(c)
|
||||
|
||||
return "".join(ret)
|
|
@ -71,7 +71,7 @@ class ErrContactErrorUpgradeNeeded(SLException):
|
|||
"""raised when user cannot create a contact because the plan doesn't allow it"""
|
||||
|
||||
def error_for_user(self) -> str:
|
||||
return f"Please upgrade to premium to create reverse-alias"
|
||||
return "Please upgrade to premium to create reverse-alias"
|
||||
|
||||
|
||||
class ErrAddressInvalid(SLException):
|
||||
|
@ -84,6 +84,14 @@ class ErrAddressInvalid(SLException):
|
|||
return f"{self.address} is not a valid email address"
|
||||
|
||||
|
||||
class InvalidContactEmailError(SLException):
|
||||
def __init__(self, website_email: str): # noqa: F821
|
||||
self.website_email = website_email
|
||||
|
||||
def error_for_user(self) -> str:
|
||||
return f"Cannot create contact with invalid email {self.website_email}"
|
||||
|
||||
|
||||
class ErrContactAlreadyExists(SLException):
|
||||
"""raised when a contact already exists"""
|
||||
|
||||
|
@ -108,3 +116,15 @@ class AccountAlreadyLinkedToAnotherPartnerException(LinkException):
|
|||
class AccountAlreadyLinkedToAnotherUserException(LinkException):
|
||||
def __init__(self):
|
||||
super().__init__("This account is linked to another user")
|
||||
|
||||
|
||||
class AccountIsUsingAliasAsEmail(LinkException):
|
||||
def __init__(self):
|
||||
super().__init__("Your account has an alias as it's email address")
|
||||
|
||||
|
||||
class ProtonAccountNotVerified(LinkException):
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
"The Proton account you are trying to use has not been verified"
|
||||
)
|
||||
|
|
|
@ -9,6 +9,7 @@ class LoginEvent:
|
|||
failed = 1
|
||||
disabled_login = 2
|
||||
not_activated = 3
|
||||
scheduled_to_be_deleted = 4
|
||||
|
||||
class Source(EnumE):
|
||||
web = 0
|
||||
|
|
|
@ -34,10 +34,10 @@ def apply_dmarc_policy_for_forward_phase(
|
|||
|
||||
from_header = get_header_unicode(msg[headers.FROM])
|
||||
|
||||
warning_plain_text = f"""This email failed anti-phishing checks when it was received by SimpleLogin, be careful with its content.
|
||||
warning_plain_text = """This email failed anti-phishing checks when it was received by SimpleLogin, be careful with its content.
|
||||
More info on https://simplelogin.io/docs/getting-started/anti-phishing/
|
||||
"""
|
||||
warning_html = f"""
|
||||
warning_html = """
|
||||
<p style="color:red">
|
||||
This email failed anti-phishing checks when it was received by SimpleLogin, be careful with its content.
|
||||
More info on <a href="https://simplelogin.io/docs/getting-started/anti-phishing/">anti-phishing measure</a>
|
||||
|
|
|
@ -221,7 +221,7 @@ def handle_complaint(message: Message, origin: ProviderComplaintOrigin) -> bool:
|
|||
return True
|
||||
|
||||
if is_deleted_alias(msg_info.sender_address):
|
||||
LOG.i(f"Complaint is for deleted alias. Do nothing")
|
||||
LOG.i("Complaint is for deleted alias. Do nothing")
|
||||
return True
|
||||
|
||||
contact = Contact.get_by(reply_email=msg_info.sender_address)
|
||||
|
@ -231,7 +231,7 @@ def handle_complaint(message: Message, origin: ProviderComplaintOrigin) -> bool:
|
|||
alias = find_alias_with_address(msg_info.rcpt_address)
|
||||
|
||||
if is_deleted_alias(msg_info.rcpt_address):
|
||||
LOG.i(f"Complaint is for deleted alias. Do nothing")
|
||||
LOG.i("Complaint is for deleted alias. Do nothing")
|
||||
return True
|
||||
|
||||
if not alias:
|
||||
|
|
|
@ -54,9 +54,8 @@ class UnsubscribeEncoder:
|
|||
def encode_subject(
|
||||
cls, action: UnsubscribeAction, data: Union[int, UnsubscribeOriginalData]
|
||||
) -> str:
|
||||
if (
|
||||
action != UnsubscribeAction.OriginalUnsubscribeMailto
|
||||
and type(data) is not int
|
||||
if action != UnsubscribeAction.OriginalUnsubscribeMailto and not isinstance(
|
||||
data, int
|
||||
):
|
||||
raise ValueError(f"Data has to be an int for an action of type {action}")
|
||||
if action == UnsubscribeAction.OriginalUnsubscribeMailto:
|
||||
|
@ -74,8 +73,8 @@ class UnsubscribeEncoder:
|
|||
)
|
||||
signed_data = cls._get_signer().sign(serialized_data).decode("utf-8")
|
||||
encoded_request = f"{UNSUB_PREFIX}.{signed_data}"
|
||||
if len(encoded_request) > 256:
|
||||
LOG.e("Encoded request is longer than 256 chars")
|
||||
if len(encoded_request) > 512:
|
||||
LOG.w("Encoded request is longer than 512 chars")
|
||||
return encoded_request
|
||||
|
||||
@staticmethod
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
import urllib
|
||||
from email.header import Header
|
||||
from email.message import Message
|
||||
|
||||
from app.email import headers
|
||||
|
@ -9,6 +10,7 @@ from app.handler.unsubscribe_encoder import (
|
|||
UnsubscribeData,
|
||||
UnsubscribeOriginalData,
|
||||
)
|
||||
from app.log import LOG
|
||||
from app.models import Alias, Contact, UnsubscribeBehaviourEnum
|
||||
|
||||
|
||||
|
@ -30,7 +32,10 @@ class UnsubscribeGenerator:
|
|||
"""
|
||||
unsubscribe_data = message[headers.LIST_UNSUBSCRIBE]
|
||||
if not unsubscribe_data:
|
||||
LOG.info("Email has no unsubscribe header")
|
||||
return message
|
||||
if isinstance(unsubscribe_data, Header):
|
||||
unsubscribe_data = str(unsubscribe_data.encode())
|
||||
raw_methods = [method.strip() for method in unsubscribe_data.split(",")]
|
||||
mailto_unsubs = None
|
||||
other_unsubs = []
|
||||
|
@ -44,7 +49,9 @@ class UnsubscribeGenerator:
|
|||
if url_data.scheme == "mailto":
|
||||
query_data = urllib.parse.parse_qs(url_data.query)
|
||||
mailto_unsubs = (url_data.path, query_data.get("subject", [""])[0])
|
||||
LOG.debug(f"Unsub is mailto to {mailto_unsubs}")
|
||||
else:
|
||||
LOG.debug(f"Unsub has {url_data.scheme} scheme")
|
||||
other_unsubs.append(method)
|
||||
# If there are non mailto unsubscribe methods, use those in the header
|
||||
if other_unsubs:
|
||||
|
@ -56,18 +63,19 @@ class UnsubscribeGenerator:
|
|||
add_or_replace_header(
|
||||
message, headers.LIST_UNSUBSCRIBE_POST, "List-Unsubscribe=One-Click"
|
||||
)
|
||||
LOG.debug(f"Adding click unsub methods to header {other_unsubs}")
|
||||
return message
|
||||
if not mailto_unsubs:
|
||||
message = delete_header(message, headers.LIST_UNSUBSCRIBE)
|
||||
message = delete_header(message, headers.LIST_UNSUBSCRIBE_POST)
|
||||
elif not mailto_unsubs:
|
||||
LOG.debug("No unsubs. Deleting all unsub headers")
|
||||
delete_header(message, headers.LIST_UNSUBSCRIBE)
|
||||
delete_header(message, headers.LIST_UNSUBSCRIBE_POST)
|
||||
return message
|
||||
return self._add_unsubscribe_header(
|
||||
message,
|
||||
UnsubscribeData(
|
||||
UnsubscribeAction.OriginalUnsubscribeMailto,
|
||||
UnsubscribeOriginalData(alias.id, mailto_unsubs[0], mailto_unsubs[1]),
|
||||
),
|
||||
unsub_data = UnsubscribeData(
|
||||
UnsubscribeAction.OriginalUnsubscribeMailto,
|
||||
UnsubscribeOriginalData(alias.id, mailto_unsubs[0], mailto_unsubs[1]),
|
||||
)
|
||||
LOG.debug(f"Adding unsub data {unsub_data}")
|
||||
return self._add_unsubscribe_header(message, unsub_data)
|
||||
|
||||
def _add_unsubscribe_header(
|
||||
self, message: Message, unsub: UnsubscribeData
|
||||
|
|
|
@ -30,7 +30,7 @@ def handle_batch_import(batch_import: BatchImport):
|
|||
|
||||
LOG.d("Download file %s from %s", batch_import.file, file_url)
|
||||
r = requests.get(file_url)
|
||||
lines = [line.decode() for line in r.iter_lines()]
|
||||
lines = [line.decode("utf-8") for line in r.iter_lines()]
|
||||
|
||||
import_from_csv(batch_import, user, lines)
|
||||
|
||||
|
|
|
@ -1,2 +1,4 @@
|
|||
from .integrations import set_enable_proton_cookie
|
||||
from .exit_sudo import exit_sudo_mode
|
||||
|
||||
__all__ = ["set_enable_proton_cookie", "exit_sudo_mode"]
|
||||
|
|
|
@ -39,9 +39,8 @@ from app.models import (
|
|||
|
||||
|
||||
class ExportUserDataJob:
|
||||
|
||||
REMOVE_FIELDS = {
|
||||
"User": ("otp_secret",),
|
||||
"User": ("otp_secret", "password"),
|
||||
"Alias": ("ts_vector", "transfer_token", "hibp_last_check"),
|
||||
"CustomDomain": ("ownership_txt_token",),
|
||||
}
|
||||
|
|
|
@ -17,12 +17,11 @@ from attr import dataclass
|
|||
from app import config
|
||||
from app.email import headers
|
||||
from app.log import LOG
|
||||
from app.message_utils import message_to_bytes
|
||||
from app.message_utils import message_to_bytes, message_format_base64_parts
|
||||
|
||||
|
||||
@dataclass
|
||||
class SendRequest:
|
||||
|
||||
SAVE_EXTENSION = "sendrequest"
|
||||
|
||||
envelope_from: str
|
||||
|
@ -32,6 +31,7 @@ class SendRequest:
|
|||
rcpt_options: Dict = {}
|
||||
is_forward: bool = False
|
||||
ignore_smtp_errors: bool = False
|
||||
retries: int = 0
|
||||
|
||||
def to_bytes(self) -> bytes:
|
||||
if not config.SAVE_UNSENT_DIR:
|
||||
|
@ -45,6 +45,7 @@ class SendRequest:
|
|||
"mail_options": self.mail_options,
|
||||
"rcpt_options": self.rcpt_options,
|
||||
"is_forward": self.is_forward,
|
||||
"retries": self.retries,
|
||||
}
|
||||
return json.dumps(data).encode("utf-8")
|
||||
|
||||
|
@ -65,8 +66,33 @@ class SendRequest:
|
|||
mail_options=decoded_data["mail_options"],
|
||||
rcpt_options=decoded_data["rcpt_options"],
|
||||
is_forward=decoded_data["is_forward"],
|
||||
retries=decoded_data.get("retries", 1),
|
||||
)
|
||||
|
||||
def save_request_to_unsent_dir(self, prefix: str = "DeliveryFail"):
|
||||
file_name = (
|
||||
f"{prefix}-{int(time.time())}-{uuid.uuid4()}.{SendRequest.SAVE_EXTENSION}"
|
||||
)
|
||||
file_path = os.path.join(config.SAVE_UNSENT_DIR, file_name)
|
||||
self.save_request_to_file(file_path)
|
||||
|
||||
@staticmethod
|
||||
def save_request_to_failed_dir(self, prefix: str = "DeliveryRetryFail"):
|
||||
file_name = (
|
||||
f"{prefix}-{int(time.time())}-{uuid.uuid4()}.{SendRequest.SAVE_EXTENSION}"
|
||||
)
|
||||
dir_name = os.path.join(config.SAVE_UNSENT_DIR, "failed")
|
||||
if not os.path.isdir(dir_name):
|
||||
os.makedirs(dir_name)
|
||||
file_path = os.path.join(dir_name, file_name)
|
||||
self.save_request_to_file(file_path)
|
||||
|
||||
def save_request_to_file(self, file_path: str):
|
||||
file_contents = self.to_bytes()
|
||||
with open(file_path, "wb") as fd:
|
||||
fd.write(file_contents)
|
||||
LOG.i(f"Saved unsent message {file_path}")
|
||||
|
||||
|
||||
class MailSender:
|
||||
def __init__(self):
|
||||
|
@ -117,14 +143,12 @@ class MailSender:
|
|||
return True
|
||||
|
||||
def _send_to_smtp(self, send_request: SendRequest, retries: int) -> bool:
|
||||
if config.POSTFIX_SUBMISSION_TLS and config.POSTFIX_PORT == 25:
|
||||
smtp_port = 587
|
||||
else:
|
||||
smtp_port = config.POSTFIX_PORT
|
||||
try:
|
||||
start = time.time()
|
||||
with SMTP(
|
||||
config.POSTFIX_SERVER, smtp_port, timeout=config.POSTFIX_TIMEOUT
|
||||
config.POSTFIX_SERVER,
|
||||
config.POSTFIX_PORT,
|
||||
timeout=config.POSTFIX_TIMEOUT,
|
||||
) as smtp:
|
||||
if config.POSTFIX_SUBMISSION_TLS:
|
||||
smtp.starttls()
|
||||
|
@ -170,19 +194,12 @@ class MailSender:
|
|||
LOG.e(f"Ignore smtp error {e}")
|
||||
return False
|
||||
LOG.e(
|
||||
f"Could not send message to smtp server {config.POSTFIX_SERVER}:{smtp_port}"
|
||||
f"Could not send message to smtp server {config.POSTFIX_SERVER}:{config.POSTFIX_PORT}"
|
||||
)
|
||||
self._save_request_to_unsent_dir(send_request)
|
||||
if config.SAVE_UNSENT_DIR:
|
||||
send_request.save_request_to_unsent_dir()
|
||||
return False
|
||||
|
||||
def _save_request_to_unsent_dir(self, send_request: SendRequest):
|
||||
file_name = f"DeliveryFail-{int(time.time())}-{uuid.uuid4()}.{SendRequest.SAVE_EXTENSION}"
|
||||
file_path = os.path.join(config.SAVE_UNSENT_DIR, file_name)
|
||||
file_contents = send_request.to_bytes()
|
||||
with open(file_path, "wb") as fd:
|
||||
fd.write(file_contents)
|
||||
LOG.i(f"Saved unsent message {file_path}")
|
||||
|
||||
|
||||
mail_sender = MailSender()
|
||||
|
||||
|
@ -216,6 +233,7 @@ def load_unsent_mails_from_fs_and_resend():
|
|||
LOG.i(f"Trying to re-deliver email {filename}")
|
||||
try:
|
||||
send_request = SendRequest.load_from_file(full_file_path)
|
||||
send_request.retries += 1
|
||||
except Exception as e:
|
||||
LOG.e(f"Cannot load {filename}. Error {e}")
|
||||
continue
|
||||
|
@ -227,6 +245,11 @@ def load_unsent_mails_from_fs_and_resend():
|
|||
"DeliverUnsentEmail", {"delivered": "true"}
|
||||
)
|
||||
else:
|
||||
if send_request.retries > 2:
|
||||
os.unlink(full_file_path)
|
||||
send_request.save_request_to_failed_dir()
|
||||
else:
|
||||
send_request.save_request_to_file(full_file_path)
|
||||
newrelic.agent.record_custom_event(
|
||||
"DeliverUnsentEmail", {"delivered": "false"}
|
||||
)
|
||||
|
@ -258,7 +281,7 @@ def sl_sendmail(
|
|||
send_request = SendRequest(
|
||||
envelope_from,
|
||||
envelope_to,
|
||||
msg,
|
||||
message_format_base64_parts(msg),
|
||||
mail_options,
|
||||
rcpt_options,
|
||||
is_forward,
|
||||
|
|
|
@ -1,21 +1,42 @@
|
|||
import re
|
||||
from email import policy
|
||||
from email.message import Message
|
||||
|
||||
from app.email import headers
|
||||
from app.log import LOG
|
||||
|
||||
# Spam assassin might flag as spam with a different line length
|
||||
BASE64_LINELENGTH = 76
|
||||
|
||||
|
||||
def message_to_bytes(msg: Message) -> bytes:
|
||||
"""replace Message.as_bytes() method by trying different policies"""
|
||||
for generator_policy in [None, policy.SMTP, policy.SMTPUTF8]:
|
||||
try:
|
||||
return msg.as_bytes(policy=generator_policy)
|
||||
except:
|
||||
except Exception:
|
||||
LOG.w("as_bytes() fails with %s policy", policy, exc_info=True)
|
||||
|
||||
msg_string = msg.as_string()
|
||||
try:
|
||||
return msg_string.encode()
|
||||
except:
|
||||
except Exception:
|
||||
LOG.w("as_string().encode() fails", exc_info=True)
|
||||
|
||||
return msg_string.encode(errors="replace")
|
||||
|
||||
|
||||
def message_format_base64_parts(msg: Message) -> Message:
|
||||
for part in msg.walk():
|
||||
if part.get(
|
||||
headers.CONTENT_TRANSFER_ENCODING
|
||||
) == "base64" and part.get_content_type() in ("text/plain", "text/html"):
|
||||
# Remove line breaks
|
||||
body = re.sub("[\r\n]", "", part.get_payload())
|
||||
# Split in 80 column lines
|
||||
chunks = [
|
||||
body[i : i + BASE64_LINELENGTH]
|
||||
for i in range(0, len(body), BASE64_LINELENGTH)
|
||||
]
|
||||
part.set_payload("\r\n".join(chunks))
|
||||
return msg
|
||||
|
|
348
app/models.py
348
app/models.py
|
@ -1,6 +1,7 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import base64
|
||||
import dataclasses
|
||||
import enum
|
||||
import hashlib
|
||||
import hmac
|
||||
|
@ -18,7 +19,7 @@ from flanker.addresslib import address
|
|||
from flask import url_for
|
||||
from flask_login import UserMixin
|
||||
from jinja2 import FileSystemLoader, Environment
|
||||
from sqlalchemy import orm
|
||||
from sqlalchemy import orm, or_
|
||||
from sqlalchemy import text, desc, CheckConstraint, Index, Column
|
||||
from sqlalchemy.dialects.postgresql import TSVECTOR
|
||||
from sqlalchemy.ext.declarative import declarative_base
|
||||
|
@ -29,6 +30,8 @@ from sqlalchemy_utils import ArrowType
|
|||
from app import config
|
||||
from app import s3
|
||||
from app.db import Session
|
||||
from app.dns_utils import get_mx_domains
|
||||
|
||||
from app.errors import (
|
||||
AliasInTrashError,
|
||||
DirectoryInTrashError,
|
||||
|
@ -44,7 +47,6 @@ from app.utils import (
|
|||
random_string,
|
||||
random_words,
|
||||
sanitize_email,
|
||||
random_word,
|
||||
)
|
||||
|
||||
Base = declarative_base()
|
||||
|
@ -274,6 +276,13 @@ class IntEnumType(sa.types.TypeDecorator):
|
|||
return self._enum_type(enum_value)
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class AliasOptions:
|
||||
show_sl_domains: bool = True
|
||||
show_partner_domains: Optional[Partner] = None
|
||||
show_partner_premium: Optional[bool] = None
|
||||
|
||||
|
||||
class Hibp(Base, ModelMixin):
|
||||
__tablename__ = "hibp"
|
||||
name = sa.Column(sa.String(), nullable=False, unique=True, index=True)
|
||||
|
@ -292,7 +301,9 @@ class HibpNotifiedAlias(Base, ModelMixin):
|
|||
"""
|
||||
|
||||
__tablename__ = "hibp_notified_alias"
|
||||
alias_id = sa.Column(sa.ForeignKey("alias.id", ondelete="cascade"), nullable=False)
|
||||
alias_id = sa.Column(
|
||||
sa.ForeignKey("alias.id", ondelete="cascade"), nullable=False, index=True
|
||||
)
|
||||
user_id = sa.Column(sa.ForeignKey("users.id", ondelete="cascade"), nullable=False)
|
||||
|
||||
notified_at = sa.Column(ArrowType, default=arrow.utcnow, nullable=False)
|
||||
|
@ -333,7 +344,7 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
|||
sa.Boolean, default=True, nullable=False, server_default="1"
|
||||
)
|
||||
|
||||
activated = sa.Column(sa.Boolean, default=False, nullable=False)
|
||||
activated = sa.Column(sa.Boolean, default=False, nullable=False, index=True)
|
||||
|
||||
# an account can be disabled if having harmful behavior
|
||||
disabled = sa.Column(sa.Boolean, default=False, nullable=False, server_default="0")
|
||||
|
@ -403,7 +414,10 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
|||
)
|
||||
|
||||
referral_id = sa.Column(
|
||||
sa.ForeignKey("referral.id", ondelete="SET NULL"), nullable=True, default=None
|
||||
sa.ForeignKey("referral.id", ondelete="SET NULL"),
|
||||
nullable=True,
|
||||
default=None,
|
||||
index=True,
|
||||
)
|
||||
|
||||
referral = orm.relationship("Referral", foreign_keys=[referral_id])
|
||||
|
@ -420,7 +434,10 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
|||
|
||||
# newsletter is sent to this address
|
||||
newsletter_alias_id = sa.Column(
|
||||
sa.ForeignKey("alias.id", ondelete="SET NULL"), nullable=True, default=None
|
||||
sa.ForeignKey("alias.id", ondelete="SET NULL"),
|
||||
nullable=True,
|
||||
default=None,
|
||||
index=True,
|
||||
)
|
||||
|
||||
# whether to include the sender address in reverse-alias
|
||||
|
@ -434,7 +451,7 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
|||
random_alias_suffix = sa.Column(
|
||||
sa.Integer,
|
||||
nullable=False,
|
||||
default=AliasSuffixEnum.random_string.value,
|
||||
default=AliasSuffixEnum.word.value,
|
||||
server_default=str(AliasSuffixEnum.random_string.value),
|
||||
)
|
||||
|
||||
|
@ -503,9 +520,8 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
|||
server_default=BlockBehaviourEnum.return_2xx.name,
|
||||
)
|
||||
|
||||
# to keep existing behavior, the server default is TRUE whereas for new user, the default value is FALSE
|
||||
include_header_email_header = sa.Column(
|
||||
sa.Boolean, default=False, nullable=False, server_default="1"
|
||||
sa.Boolean, default=True, nullable=False, server_default="1"
|
||||
)
|
||||
|
||||
# bitwise flags. Allow for future expansion
|
||||
|
@ -519,11 +535,21 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
|||
# Keep original unsub behaviour
|
||||
unsub_behaviour = sa.Column(
|
||||
IntEnumType(UnsubscribeBehaviourEnum),
|
||||
default=UnsubscribeBehaviourEnum.DisableAlias,
|
||||
default=UnsubscribeBehaviourEnum.PreserveOriginal,
|
||||
server_default=str(UnsubscribeBehaviourEnum.DisableAlias.value),
|
||||
nullable=False,
|
||||
)
|
||||
|
||||
# Trigger hard deletion of the account at this time
|
||||
delete_on = sa.Column(ArrowType, default=None)
|
||||
|
||||
__table_args__ = (
|
||||
sa.Index(
|
||||
"ix_users_activated_trial_end_lifetime", activated, trial_end, lifetime
|
||||
),
|
||||
sa.Index("ix_users_delete_on", delete_on),
|
||||
)
|
||||
|
||||
@property
|
||||
def directory_quota(self):
|
||||
return min(
|
||||
|
@ -558,7 +584,8 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
|||
|
||||
@classmethod
|
||||
def create(cls, email, name="", password=None, from_partner=False, **kwargs):
|
||||
user: User = super(User, cls).create(email=email, name=name, **kwargs)
|
||||
email = sanitize_email(email)
|
||||
user: User = super(User, cls).create(email=email, name=name[:100], **kwargs)
|
||||
|
||||
if password:
|
||||
user.set_password(password)
|
||||
|
@ -569,19 +596,6 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
|||
Session.flush()
|
||||
user.default_mailbox_id = mb.id
|
||||
|
||||
# create a first alias mail to show user how to use when they login
|
||||
alias = Alias.create_new(
|
||||
user,
|
||||
prefix="simplelogin-newsletter",
|
||||
mailbox_id=mb.id,
|
||||
note="This is your first alias. It's used to receive SimpleLogin communications "
|
||||
"like new features announcements, newsletters.",
|
||||
)
|
||||
Session.flush()
|
||||
|
||||
user.newsletter_alias_id = alias.id
|
||||
Session.flush()
|
||||
|
||||
# generate an alternative_id if needed
|
||||
if "alternative_id" not in kwargs:
|
||||
user.alternative_id = str(uuid.uuid4())
|
||||
|
@ -600,6 +614,19 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
|||
Session.flush()
|
||||
return user
|
||||
|
||||
# create a first alias mail to show user how to use when they login
|
||||
alias = Alias.create_new(
|
||||
user,
|
||||
prefix="simplelogin-newsletter",
|
||||
mailbox_id=mb.id,
|
||||
note="This is your first alias. It's used to receive SimpleLogin communications "
|
||||
"like new features announcements, newsletters.",
|
||||
)
|
||||
Session.flush()
|
||||
|
||||
user.newsletter_alias_id = alias.id
|
||||
Session.flush()
|
||||
|
||||
if config.DISABLE_ONBOARDING:
|
||||
LOG.d("Disable onboarding emails")
|
||||
return user
|
||||
|
@ -625,7 +652,7 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
|||
return user
|
||||
|
||||
def get_active_subscription(
|
||||
self,
|
||||
self, include_partner_subscription: bool = True
|
||||
) -> Optional[
|
||||
Union[
|
||||
Subscription
|
||||
|
@ -653,19 +680,40 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
|||
if coinbase_subscription and coinbase_subscription.is_active():
|
||||
return coinbase_subscription
|
||||
|
||||
partner_sub: PartnerSubscription = PartnerSubscription.find_by_user_id(self.id)
|
||||
if partner_sub and partner_sub.is_active():
|
||||
return partner_sub
|
||||
if include_partner_subscription:
|
||||
partner_sub: PartnerSubscription = PartnerSubscription.find_by_user_id(
|
||||
self.id
|
||||
)
|
||||
if partner_sub and partner_sub.is_active():
|
||||
return partner_sub
|
||||
|
||||
return None
|
||||
|
||||
def get_active_subscription_end(
|
||||
self, include_partner_subscription: bool = True
|
||||
) -> Optional[arrow.Arrow]:
|
||||
sub = self.get_active_subscription(
|
||||
include_partner_subscription=include_partner_subscription
|
||||
)
|
||||
if isinstance(sub, Subscription):
|
||||
return arrow.get(sub.next_bill_date)
|
||||
if isinstance(sub, AppleSubscription):
|
||||
return sub.expires_date
|
||||
if isinstance(sub, ManualSubscription):
|
||||
return sub.end_at
|
||||
if isinstance(sub, CoinbaseSubscription):
|
||||
return sub.end_at
|
||||
return None
|
||||
|
||||
# region Billing
|
||||
def lifetime_or_active_subscription(self) -> bool:
|
||||
def lifetime_or_active_subscription(
|
||||
self, include_partner_subscription: bool = True
|
||||
) -> bool:
|
||||
"""True if user has lifetime licence or active subscription"""
|
||||
if self.lifetime:
|
||||
return True
|
||||
|
||||
return self.get_active_subscription() is not None
|
||||
return self.get_active_subscription(include_partner_subscription) is not None
|
||||
|
||||
def is_paid(self) -> bool:
|
||||
"""same as _lifetime_or_active_subscription but not include free manual subscription"""
|
||||
|
@ -694,14 +742,14 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
|||
|
||||
return True
|
||||
|
||||
def is_premium(self) -> bool:
|
||||
def is_premium(self, include_partner_subscription: bool = True) -> bool:
|
||||
"""
|
||||
user is premium if they:
|
||||
- have a lifetime deal or
|
||||
- in trial period or
|
||||
- active subscription
|
||||
"""
|
||||
if self.lifetime_or_active_subscription():
|
||||
if self.lifetime_or_active_subscription(include_partner_subscription):
|
||||
return True
|
||||
|
||||
if self.trial_end and arrow.now() < self.trial_end:
|
||||
|
@ -790,6 +838,17 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
|||
< self.max_alias_for_free_account()
|
||||
)
|
||||
|
||||
def can_send_or_receive(self) -> bool:
|
||||
if self.disabled:
|
||||
LOG.i(f"User {self} is disabled. Cannot receive or send emails")
|
||||
return False
|
||||
if self.delete_on is not None:
|
||||
LOG.i(
|
||||
f"User {self} is scheduled to be deleted. Cannot receive or send emails"
|
||||
)
|
||||
return False
|
||||
return True
|
||||
|
||||
def profile_picture_url(self):
|
||||
if self.profile_picture_id:
|
||||
return self.profile_picture.get_url()
|
||||
|
@ -868,14 +927,16 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
|||
def custom_domains(self):
|
||||
return CustomDomain.filter_by(user_id=self.id, verified=True).all()
|
||||
|
||||
def available_domains_for_random_alias(self) -> List[Tuple[bool, str]]:
|
||||
def available_domains_for_random_alias(
|
||||
self, alias_options: Optional[AliasOptions] = None
|
||||
) -> List[Tuple[bool, str]]:
|
||||
"""Return available domains for user to create random aliases
|
||||
Each result record contains:
|
||||
- whether the domain belongs to SimpleLogin
|
||||
- the domain
|
||||
"""
|
||||
res = []
|
||||
for domain in self.available_sl_domains():
|
||||
for domain in self.available_sl_domains(alias_options=alias_options):
|
||||
res.append((True, domain))
|
||||
|
||||
for custom_domain in self.verified_custom_domains():
|
||||
|
@ -960,30 +1021,65 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
|||
|
||||
return None, "", False
|
||||
|
||||
def available_sl_domains(self) -> [str]:
|
||||
def available_sl_domains(
|
||||
self, alias_options: Optional[AliasOptions] = None
|
||||
) -> [str]:
|
||||
"""
|
||||
Return all SimpleLogin domains that user can use when creating a new alias, including:
|
||||
- SimpleLogin public domains, available for all users (ALIAS_DOMAIN)
|
||||
- SimpleLogin premium domains, only available for Premium accounts (PREMIUM_ALIAS_DOMAIN)
|
||||
"""
|
||||
return [sl_domain.domain for sl_domain in self.get_sl_domains()]
|
||||
return [
|
||||
sl_domain.domain
|
||||
for sl_domain in self.get_sl_domains(alias_options=alias_options)
|
||||
]
|
||||
|
||||
def get_sl_domains(self) -> List["SLDomain"]:
|
||||
query = SLDomain.filter_by(hidden=False).order_by(SLDomain.order)
|
||||
def get_sl_domains(
|
||||
self, alias_options: Optional[AliasOptions] = None
|
||||
) -> list["SLDomain"]:
|
||||
if alias_options is None:
|
||||
alias_options = AliasOptions()
|
||||
top_conds = [SLDomain.hidden == False] # noqa: E712
|
||||
or_conds = [] # noqa:E711
|
||||
if self.default_alias_public_domain_id is not None:
|
||||
default_domain_conds = [SLDomain.id == self.default_alias_public_domain_id]
|
||||
if not self.is_premium():
|
||||
default_domain_conds.append(
|
||||
SLDomain.premium_only == False # noqa: E712
|
||||
)
|
||||
or_conds.append(and_(*default_domain_conds).self_group())
|
||||
if alias_options.show_partner_domains is not None:
|
||||
partner_user = PartnerUser.filter_by(
|
||||
user_id=self.id, partner_id=alias_options.show_partner_domains.id
|
||||
).first()
|
||||
if partner_user is not None:
|
||||
partner_domain_cond = [SLDomain.partner_id == partner_user.partner_id]
|
||||
if alias_options.show_partner_premium is None:
|
||||
alias_options.show_partner_premium = self.is_premium()
|
||||
if not alias_options.show_partner_premium:
|
||||
partner_domain_cond.append(
|
||||
SLDomain.premium_only == False # noqa: E712
|
||||
)
|
||||
or_conds.append(and_(*partner_domain_cond).self_group())
|
||||
if alias_options.show_sl_domains:
|
||||
sl_conds = [SLDomain.partner_id == None] # noqa: E711
|
||||
if not self.is_premium():
|
||||
sl_conds.append(SLDomain.premium_only == False) # noqa: E712
|
||||
or_conds.append(and_(*sl_conds).self_group())
|
||||
top_conds.append(or_(*or_conds))
|
||||
query = Session.query(SLDomain).filter(*top_conds).order_by(SLDomain.order)
|
||||
return query.all()
|
||||
|
||||
if self.is_premium():
|
||||
return query.all()
|
||||
else:
|
||||
return query.filter_by(premium_only=False).all()
|
||||
|
||||
def available_alias_domains(self) -> [str]:
|
||||
def available_alias_domains(
|
||||
self, alias_options: Optional[AliasOptions] = None
|
||||
) -> [str]:
|
||||
"""return all domains that user can use when creating a new alias, including:
|
||||
- SimpleLogin public domains, available for all users (ALIAS_DOMAIN)
|
||||
- SimpleLogin premium domains, only available for Premium accounts (PREMIUM_ALIAS_DOMAIN)
|
||||
- Verified custom domains
|
||||
|
||||
"""
|
||||
domains = self.available_sl_domains()
|
||||
domains = self.available_sl_domains(alias_options=alias_options)
|
||||
|
||||
for custom_domain in self.verified_custom_domains():
|
||||
domains.append(custom_domain.domain)
|
||||
|
@ -1001,16 +1097,21 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
|||
> 0
|
||||
)
|
||||
|
||||
def get_random_alias_suffix(self):
|
||||
def get_random_alias_suffix(self, custom_domain: Optional["CustomDomain"] = None):
|
||||
"""Get random suffix for an alias based on user's preference.
|
||||
|
||||
Use a shorter suffix in case of custom domain
|
||||
|
||||
Returns:
|
||||
str: the random suffix generated
|
||||
"""
|
||||
if self.random_alias_suffix == AliasSuffixEnum.random_string.value:
|
||||
return random_string(config.ALIAS_RANDOM_SUFFIX_LENGTH, include_digits=True)
|
||||
return random_word()
|
||||
|
||||
if custom_domain is None:
|
||||
return random_words(1, 3)
|
||||
|
||||
return random_words(1)
|
||||
|
||||
def __repr__(self):
|
||||
return f"<User {self.id} {self.name} {self.email}>"
|
||||
|
@ -1255,34 +1356,48 @@ class OauthToken(Base, ModelMixin):
|
|||
return self.expired < arrow.now()
|
||||
|
||||
|
||||
def generate_email(
|
||||
def available_sl_email(email: str) -> bool:
|
||||
if (
|
||||
Alias.get_by(email=email)
|
||||
or Contact.get_by(reply_email=email)
|
||||
or DeletedAlias.get_by(email=email)
|
||||
):
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def generate_random_alias_email(
|
||||
scheme: int = AliasGeneratorEnum.word.value,
|
||||
in_hex: bool = False,
|
||||
alias_domain=config.FIRST_ALIAS_DOMAIN,
|
||||
alias_domain: str = config.FIRST_ALIAS_DOMAIN,
|
||||
retries: int = 10,
|
||||
) -> str:
|
||||
"""generate an email address that does not exist before
|
||||
:param alias_domain: the domain used to generate the alias.
|
||||
:param scheme: int, value of AliasGeneratorEnum, indicate how the email is generated
|
||||
:param retries: int, How many times we can try to generate an alias in case of collision
|
||||
:type in_hex: bool, if the generate scheme is uuid, is hex favorable?
|
||||
"""
|
||||
if retries <= 0:
|
||||
raise Exception("Cannot generate alias after many retries")
|
||||
if scheme == AliasGeneratorEnum.uuid.value:
|
||||
name = uuid.uuid4().hex if in_hex else uuid.uuid4().__str__()
|
||||
random_email = name + "@" + alias_domain
|
||||
else:
|
||||
random_email = random_words() + "@" + alias_domain
|
||||
random_email = random_words(2, 3) + "@" + alias_domain
|
||||
|
||||
random_email = random_email.lower().strip()
|
||||
|
||||
# check that the client does not exist yet
|
||||
if not Alias.get_by(email=random_email) and not DeletedAlias.get_by(
|
||||
email=random_email
|
||||
):
|
||||
if available_sl_email(random_email):
|
||||
LOG.d("generate email %s", random_email)
|
||||
return random_email
|
||||
|
||||
# Rerun the function
|
||||
LOG.w("email %s already exists, generate a new email", random_email)
|
||||
return generate_email(scheme=scheme, in_hex=in_hex)
|
||||
return generate_random_alias_email(
|
||||
scheme=scheme, in_hex=in_hex, retries=retries - 1
|
||||
)
|
||||
|
||||
|
||||
class Alias(Base, ModelMixin):
|
||||
|
@ -1364,7 +1479,7 @@ class Alias(Base, ModelMixin):
|
|||
)
|
||||
|
||||
# have I been pwned
|
||||
hibp_last_check = sa.Column(ArrowType, default=None)
|
||||
hibp_last_check = sa.Column(ArrowType, default=None, index=True)
|
||||
hibp_breaches = orm.relationship("Hibp", secondary="alias_hibp")
|
||||
|
||||
# to use Postgres full text search. Only applied on "note" column for now
|
||||
|
@ -1481,7 +1596,7 @@ class Alias(Base, ModelMixin):
|
|||
suffix = user.get_random_alias_suffix()
|
||||
email = f"{prefix}.{suffix}@{config.FIRST_ALIAS_DOMAIN}"
|
||||
|
||||
if not cls.get_by(email=email) and not DeletedAlias.get_by(email=email):
|
||||
if available_sl_email(email):
|
||||
break
|
||||
|
||||
return Alias.create(
|
||||
|
@ -1510,7 +1625,7 @@ class Alias(Base, ModelMixin):
|
|||
|
||||
if user.default_alias_custom_domain_id:
|
||||
custom_domain = CustomDomain.get(user.default_alias_custom_domain_id)
|
||||
random_email = generate_email(
|
||||
random_email = generate_random_alias_email(
|
||||
scheme=scheme, in_hex=in_hex, alias_domain=custom_domain.domain
|
||||
)
|
||||
elif user.default_alias_public_domain_id:
|
||||
|
@ -1518,12 +1633,12 @@ class Alias(Base, ModelMixin):
|
|||
if sl_domain.premium_only and not user.is_premium():
|
||||
LOG.w("%s not premium, cannot use %s", user, sl_domain)
|
||||
else:
|
||||
random_email = generate_email(
|
||||
random_email = generate_random_alias_email(
|
||||
scheme=scheme, in_hex=in_hex, alias_domain=sl_domain.domain
|
||||
)
|
||||
|
||||
if not random_email:
|
||||
random_email = generate_email(scheme=scheme, in_hex=in_hex)
|
||||
random_email = generate_random_alias_email(scheme=scheme, in_hex=in_hex)
|
||||
|
||||
alias = Alias.create(
|
||||
user_id=user.id,
|
||||
|
@ -1557,7 +1672,9 @@ class ClientUser(Base, ModelMixin):
|
|||
client_id = sa.Column(sa.ForeignKey(Client.id, ondelete="cascade"), nullable=False)
|
||||
|
||||
# Null means client has access to user original email
|
||||
alias_id = sa.Column(sa.ForeignKey(Alias.id, ondelete="cascade"), nullable=True)
|
||||
alias_id = sa.Column(
|
||||
sa.ForeignKey(Alias.id, ondelete="cascade"), nullable=True, index=True
|
||||
)
|
||||
|
||||
# user can decide to send to client another name
|
||||
name = sa.Column(
|
||||
|
@ -1641,6 +1758,8 @@ class Contact(Base, ModelMixin):
|
|||
Store configuration of sender (website-email) and alias.
|
||||
"""
|
||||
|
||||
MAX_NAME_LENGTH = 512
|
||||
|
||||
__tablename__ = "contact"
|
||||
|
||||
__table_args__ = (
|
||||
|
@ -1674,7 +1793,7 @@ class Contact(Base, ModelMixin):
|
|||
is_cc = sa.Column(sa.Boolean, nullable=False, default=False, server_default="0")
|
||||
|
||||
pgp_public_key = sa.Column(sa.Text, nullable=True)
|
||||
pgp_finger_print = sa.Column(sa.String(512), nullable=True)
|
||||
pgp_finger_print = sa.Column(sa.String(512), nullable=True, index=True)
|
||||
|
||||
alias = orm.relationship(Alias, backref="contacts")
|
||||
user = orm.relationship(User)
|
||||
|
@ -1828,6 +1947,7 @@ class Contact(Base, ModelMixin):
|
|||
|
||||
class EmailLog(Base, ModelMixin):
|
||||
__tablename__ = "email_log"
|
||||
__table_args__ = (Index("ix_email_log_created_at", "created_at"),)
|
||||
|
||||
user_id = sa.Column(
|
||||
sa.ForeignKey(User.id, ondelete="cascade"), nullable=False, index=True
|
||||
|
@ -2085,7 +2205,9 @@ class AliasUsedOn(Base, ModelMixin):
|
|||
sa.UniqueConstraint("alias_id", "hostname", name="uq_alias_used"),
|
||||
)
|
||||
|
||||
alias_id = sa.Column(sa.ForeignKey(Alias.id, ondelete="cascade"), nullable=False)
|
||||
alias_id = sa.Column(
|
||||
sa.ForeignKey(Alias.id, ondelete="cascade"), nullable=False, index=True
|
||||
)
|
||||
user_id = sa.Column(sa.ForeignKey(User.id, ondelete="cascade"), nullable=False)
|
||||
|
||||
alias = orm.relationship(Alias)
|
||||
|
@ -2204,6 +2326,7 @@ class CustomDomain(Base, ModelMixin):
|
|||
@classmethod
|
||||
def create(cls, **kwargs):
|
||||
domain = kwargs.get("domain")
|
||||
kwargs["domain"] = domain.replace("\n", "")
|
||||
if DeletedSubdomain.get_by(domain=domain):
|
||||
raise SubdomainInTrashError
|
||||
|
||||
|
@ -2471,6 +2594,28 @@ class Mailbox(Base, ModelMixin):
|
|||
+ Alias.filter_by(mailbox_id=self.id).count()
|
||||
)
|
||||
|
||||
def is_proton(self) -> bool:
|
||||
if (
|
||||
self.email.endswith("@proton.me")
|
||||
or self.email.endswith("@protonmail.com")
|
||||
or self.email.endswith("@protonmail.ch")
|
||||
or self.email.endswith("@proton.ch")
|
||||
or self.email.endswith("@pm.me")
|
||||
):
|
||||
return True
|
||||
|
||||
from app.email_utils import get_email_local_part
|
||||
|
||||
mx_domains: [(int, str)] = get_mx_domains(get_email_local_part(self.email))
|
||||
# Proton is the first domain
|
||||
if mx_domains and mx_domains[0][1] in (
|
||||
"mail.protonmail.ch.",
|
||||
"mailsec.protonmail.ch.",
|
||||
):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
@classmethod
|
||||
def delete(cls, obj_id):
|
||||
mailbox: Mailbox = cls.get(obj_id)
|
||||
|
@ -2503,6 +2648,12 @@ class Mailbox(Base, ModelMixin):
|
|||
|
||||
return ret
|
||||
|
||||
@classmethod
|
||||
def create(cls, **kw):
|
||||
if "email" in kw:
|
||||
kw["email"] = sanitize_email(kw["email"])
|
||||
return super().create(**kw)
|
||||
|
||||
def __repr__(self):
|
||||
return f"<Mailbox {self.id} {self.email}>"
|
||||
|
||||
|
@ -2762,6 +2913,31 @@ class Notification(Base, ModelMixin):
|
|||
)
|
||||
|
||||
|
||||
class Partner(Base, ModelMixin):
|
||||
__tablename__ = "partner"
|
||||
|
||||
name = sa.Column(sa.String(128), unique=True, nullable=False)
|
||||
contact_email = sa.Column(sa.String(128), unique=True, nullable=False)
|
||||
|
||||
@staticmethod
|
||||
def find_by_token(token: str) -> Optional[Partner]:
|
||||
hmaced = PartnerApiToken.hmac_token(token)
|
||||
res = (
|
||||
Session.query(Partner, PartnerApiToken)
|
||||
.filter(
|
||||
and_(
|
||||
PartnerApiToken.token == hmaced,
|
||||
Partner.id == PartnerApiToken.partner_id,
|
||||
)
|
||||
)
|
||||
.first()
|
||||
)
|
||||
if res:
|
||||
partner, partner_api_token = res
|
||||
return partner
|
||||
return None
|
||||
|
||||
|
||||
class SLDomain(Base, ModelMixin):
|
||||
"""SimpleLogin domains"""
|
||||
|
||||
|
@ -2779,12 +2955,23 @@ class SLDomain(Base, ModelMixin):
|
|||
sa.Boolean, nullable=False, default=False, server_default="0"
|
||||
)
|
||||
|
||||
partner_id = sa.Column(
|
||||
sa.ForeignKey(Partner.id, ondelete="cascade"),
|
||||
nullable=True,
|
||||
default=None,
|
||||
server_default="NULL",
|
||||
)
|
||||
|
||||
# if enabled, do not show this domain when user creates a custom alias
|
||||
hidden = sa.Column(sa.Boolean, nullable=False, default=False, server_default="0")
|
||||
|
||||
# the order in which the domains are shown when user creates a custom alias
|
||||
order = sa.Column(sa.Integer, nullable=False, default=0, server_default="0")
|
||||
|
||||
use_as_reverse_alias = sa.Column(
|
||||
sa.Boolean, nullable=False, default=False, server_default="0"
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
return f"<SLDomain {self.domain} {'Premium' if self.premium_only else 'Free'}"
|
||||
|
||||
|
@ -2805,6 +2992,8 @@ class Monitoring(Base, ModelMixin):
|
|||
active_queue = sa.Column(sa.Integer, nullable=False)
|
||||
deferred_queue = sa.Column(sa.Integer, nullable=False)
|
||||
|
||||
__table_args__ = (Index("ix_monitoring_created_at", "created_at"),)
|
||||
|
||||
|
||||
class BatchImport(Base, ModelMixin):
|
||||
__tablename__ = "batch_import"
|
||||
|
@ -2930,6 +3119,8 @@ class Bounce(Base, ModelMixin):
|
|||
email = sa.Column(sa.String(256), nullable=False, index=True)
|
||||
info = sa.Column(sa.Text, nullable=True)
|
||||
|
||||
__table_args__ = (sa.Index("ix_bounce_created_at", "created_at"),)
|
||||
|
||||
|
||||
class TransactionalEmail(Base, ModelMixin):
|
||||
"""Storing all email addresses that receive transactional emails, including account email and mailboxes.
|
||||
|
@ -2939,6 +3130,8 @@ class TransactionalEmail(Base, ModelMixin):
|
|||
__tablename__ = "transactional_email"
|
||||
email = sa.Column(sa.String(256), nullable=False, unique=False)
|
||||
|
||||
__table_args__ = (sa.Index("ix_transactional_email_created_at", "created_at"),)
|
||||
|
||||
|
||||
class Payout(Base, ModelMixin):
|
||||
"""Referral payouts"""
|
||||
|
@ -2991,7 +3184,7 @@ class MessageIDMatching(Base, ModelMixin):
|
|||
|
||||
# to track what email_log that has created this matching
|
||||
email_log_id = sa.Column(
|
||||
sa.ForeignKey("email_log.id", ondelete="cascade"), nullable=True
|
||||
sa.ForeignKey("email_log.id", ondelete="cascade"), nullable=True, index=True
|
||||
)
|
||||
|
||||
email_log = orm.relationship("EmailLog")
|
||||
|
@ -3225,31 +3418,6 @@ class ProviderComplaint(Base, ModelMixin):
|
|||
refused_email = orm.relationship(RefusedEmail, foreign_keys=[refused_email_id])
|
||||
|
||||
|
||||
class Partner(Base, ModelMixin):
|
||||
__tablename__ = "partner"
|
||||
|
||||
name = sa.Column(sa.String(128), unique=True, nullable=False)
|
||||
contact_email = sa.Column(sa.String(128), unique=True, nullable=False)
|
||||
|
||||
@staticmethod
|
||||
def find_by_token(token: str) -> Optional[Partner]:
|
||||
hmaced = PartnerApiToken.hmac_token(token)
|
||||
res = (
|
||||
Session.query(Partner, PartnerApiToken)
|
||||
.filter(
|
||||
and_(
|
||||
PartnerApiToken.token == hmaced,
|
||||
Partner.id == PartnerApiToken.partner_id,
|
||||
)
|
||||
)
|
||||
.first()
|
||||
)
|
||||
if res:
|
||||
partner, partner_api_token = res
|
||||
return partner
|
||||
return None
|
||||
|
||||
|
||||
class PartnerApiToken(Base, ModelMixin):
|
||||
__tablename__ = "partner_api_token"
|
||||
|
||||
|
@ -3319,7 +3487,7 @@ class PartnerSubscription(Base, ModelMixin):
|
|||
)
|
||||
|
||||
# when the partner subscription ends
|
||||
end_at = sa.Column(ArrowType, nullable=False)
|
||||
end_at = sa.Column(ArrowType, nullable=False, index=True)
|
||||
|
||||
partner_user = orm.relationship(PartnerUser)
|
||||
|
||||
|
@ -3349,7 +3517,7 @@ class PartnerSubscription(Base, ModelMixin):
|
|||
|
||||
class Newsletter(Base, ModelMixin):
|
||||
__tablename__ = "newsletter"
|
||||
subject = sa.Column(sa.String(), nullable=False, unique=True, index=True)
|
||||
subject = sa.Column(sa.String(), nullable=False, index=True)
|
||||
|
||||
html = sa.Column(sa.Text)
|
||||
plain_text = sa.Column(sa.Text)
|
||||
|
|
|
@ -1 +1,3 @@
|
|||
from . import views
|
||||
|
||||
__all__ = ["views"]
|
||||
|
|
|
@ -1 +1,3 @@
|
|||
from .views import authorize, token, user_info
|
||||
|
||||
__all__ = ["authorize", "token", "user_info"]
|
||||
|
|
|
@ -64,7 +64,7 @@ def _split_arg(arg_input: Union[str, list]) -> Set[str]:
|
|||
- the response_type/scope passed as a list ?scope=scope_1&scope=scope_2
|
||||
"""
|
||||
res = set()
|
||||
if type(arg_input) is str:
|
||||
if isinstance(arg_input, str):
|
||||
if " " in arg_input:
|
||||
for x in arg_input.split(" "):
|
||||
if x:
|
||||
|
|
|
@ -5,3 +5,11 @@ from .views import (
|
|||
account_activated,
|
||||
extension_redirect,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"index",
|
||||
"final",
|
||||
"setup_done",
|
||||
"account_activated",
|
||||
"extension_redirect",
|
||||
]
|
||||
|
|
|
@ -39,7 +39,6 @@ class _InnerLock:
|
|||
lock_redis.storage.delete(lock_name)
|
||||
|
||||
def __call__(self, f: Callable[..., Any]):
|
||||
|
||||
if self.lock_suffix is None:
|
||||
lock_suffix = f.__name__
|
||||
else:
|
||||
|
|
|
@ -5,3 +5,11 @@ from .views import (
|
|||
provider1_callback,
|
||||
provider2_callback,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"index",
|
||||
"phone_reservation",
|
||||
"twilio_callback",
|
||||
"provider1_callback",
|
||||
"provider2_callback",
|
||||
]
|
||||
|
|
|
@ -7,11 +7,12 @@ from typing import Optional
|
|||
|
||||
from app.account_linking import SLPlan, SLPlanType
|
||||
from app.config import PROTON_EXTRA_HEADER_NAME, PROTON_EXTRA_HEADER_VALUE
|
||||
from app.errors import ProtonAccountNotVerified
|
||||
from app.log import LOG
|
||||
|
||||
_APP_VERSION = "OauthClient_1.0.0"
|
||||
|
||||
PROTON_ERROR_CODE_NOT_EXISTS = 2501
|
||||
PROTON_ERROR_CODE_HV_NEEDED = 9001
|
||||
|
||||
PLAN_FREE = 1
|
||||
PLAN_PREMIUM = 2
|
||||
|
@ -57,6 +58,15 @@ def convert_access_token(access_token_response: str) -> AccessCredentials:
|
|||
)
|
||||
|
||||
|
||||
def handle_response_not_ok(status: int, body: dict, text: str) -> Exception:
|
||||
if status == HTTPStatus.UNPROCESSABLE_ENTITY:
|
||||
res_code = body.get("Code")
|
||||
if res_code == PROTON_ERROR_CODE_HV_NEEDED:
|
||||
return ProtonAccountNotVerified()
|
||||
|
||||
return Exception(f"Unexpected status code. Wanted 200 and got {status}: " + text)
|
||||
|
||||
|
||||
class ProtonClient(ABC):
|
||||
@abstractmethod
|
||||
def get_user(self) -> Optional[UserInformation]:
|
||||
|
@ -124,11 +134,11 @@ class HttpProtonClient(ProtonClient):
|
|||
@staticmethod
|
||||
def __validate_response(res: Response) -> dict:
|
||||
status = res.status_code
|
||||
if status != HTTPStatus.OK:
|
||||
raise Exception(
|
||||
f"Unexpected status code. Wanted 200 and got {status}: " + res.text
|
||||
)
|
||||
as_json = res.json()
|
||||
if status != HTTPStatus.OK:
|
||||
raise HttpProtonClient.__handle_response_not_ok(
|
||||
status=status, body=as_json, text=res.text
|
||||
)
|
||||
res_code = as_json.get("Code")
|
||||
if not res_code or res_code != 1000:
|
||||
raise Exception(
|
||||
|
|
|
@ -6,7 +6,6 @@ from app.session import RedisSessionStore
|
|||
|
||||
|
||||
def initialize_redis_services(app: flask.Flask, redis_url: str):
|
||||
|
||||
if redis_url.startswith("redis://") or redis_url.startswith("rediss://"):
|
||||
storage = limits.storage.RedisStorage(redis_url)
|
||||
app.session_interface = RedisSessionStore(storage.storage, storage.storage, app)
|
||||
|
|
|
@ -75,7 +75,7 @@ class RedisSessionStore(SessionInterface):
|
|||
try:
|
||||
data = pickle.loads(val)
|
||||
return ServerSession(data, session_id=session_id)
|
||||
except:
|
||||
except Exception:
|
||||
pass
|
||||
return ServerSession(session_id=str(uuid.uuid4()))
|
||||
|
||||
|
|
33
app/subscription_webhook.py
Normal file
33
app/subscription_webhook.py
Normal file
|
@ -0,0 +1,33 @@
|
|||
import requests
|
||||
from requests import RequestException
|
||||
|
||||
from app import config
|
||||
from app.log import LOG
|
||||
from app.models import User
|
||||
|
||||
|
||||
def execute_subscription_webhook(user: User):
|
||||
webhook_url = config.SUBSCRIPTION_CHANGE_WEBHOOK
|
||||
if webhook_url is None:
|
||||
return
|
||||
subscription_end = user.get_active_subscription_end(
|
||||
include_partner_subscription=False
|
||||
)
|
||||
sl_subscription_end = None
|
||||
if subscription_end:
|
||||
sl_subscription_end = subscription_end.timestamp
|
||||
payload = {
|
||||
"user_id": user.id,
|
||||
"is_premium": user.is_premium(),
|
||||
"active_subscription_end": sl_subscription_end,
|
||||
}
|
||||
try:
|
||||
response = requests.post(webhook_url, json=payload, timeout=2)
|
||||
if response.status_code == 200:
|
||||
LOG.i("Sent request to subscription update webhook successfully")
|
||||
else:
|
||||
LOG.i(
|
||||
f"Request to webhook failed with statue {response.status_code}: {response.text}"
|
||||
)
|
||||
except RequestException as e:
|
||||
LOG.error(f"Subscription request exception: {e}")
|
14
app/utils.py
14
app/utils.py
|
@ -1,3 +1,4 @@
|
|||
import random
|
||||
import re
|
||||
import secrets
|
||||
import string
|
||||
|
@ -25,11 +26,16 @@ def word_exist(word):
|
|||
return word in _words
|
||||
|
||||
|
||||
def random_words():
|
||||
def random_words(words: int = 2, numbers: int = 0):
|
||||
"""Generate a random words. Used to generate user-facing string, for ex email addresses"""
|
||||
# nb_words = random.randint(2, 3)
|
||||
nb_words = 2
|
||||
return "_".join([secrets.choice(_words) for i in range(nb_words)])
|
||||
fields = [secrets.choice(_words) for i in range(words)]
|
||||
|
||||
if numbers > 0:
|
||||
digits = "".join([str(random.randint(0, 9)) for i in range(numbers)])
|
||||
return "_".join(fields) + digits
|
||||
else:
|
||||
return "_".join(fields)
|
||||
|
||||
|
||||
def random_string(length=10, include_digits=False):
|
||||
|
@ -93,7 +99,7 @@ def sanitize_email(email_address: str, not_lower=False) -> str:
|
|||
email_address = email_address.strip().replace(" ", "").replace("\n", " ")
|
||||
if not not_lower:
|
||||
email_address = email_address.lower()
|
||||
return email_address
|
||||
return email_address.replace("\u200f", "")
|
||||
|
||||
|
||||
class NextUrlSanitizer:
|
||||
|
|
93
cron.py
93
cron.py
|
@ -5,11 +5,11 @@ from typing import List, Tuple
|
|||
|
||||
import arrow
|
||||
import requests
|
||||
from sqlalchemy import func, desc, or_
|
||||
from sqlalchemy import func, desc, or_, and_
|
||||
from sqlalchemy.ext.compiler import compiles
|
||||
from sqlalchemy.orm import joinedload
|
||||
from sqlalchemy.orm.exc import ObjectDeletedError
|
||||
from sqlalchemy.sql import Insert
|
||||
from sqlalchemy.sql import Insert, text
|
||||
|
||||
from app import s3, config
|
||||
from app.alias_utils import nb_email_log_for_mailbox
|
||||
|
@ -22,10 +22,9 @@ from app.email_utils import (
|
|||
render,
|
||||
email_can_be_used_as_mailbox,
|
||||
send_email_with_rate_control,
|
||||
normalize_reply_email,
|
||||
is_valid_email,
|
||||
get_email_domain_part,
|
||||
)
|
||||
from app.email_validation import is_valid_email, normalize_reply_email
|
||||
from app.errors import ProtonPartnerNotSetUp
|
||||
from app.log import LOG
|
||||
from app.mail_sender import load_unsent_mails_from_fs_and_resend
|
||||
|
@ -66,12 +65,14 @@ from server import create_light_app
|
|||
|
||||
def notify_trial_end():
|
||||
for user in User.filter(
|
||||
User.activated.is_(True), User.trial_end.isnot(None), User.lifetime.is_(False)
|
||||
User.activated.is_(True),
|
||||
User.trial_end.isnot(None),
|
||||
User.trial_end >= arrow.now().shift(days=2),
|
||||
User.trial_end < arrow.now().shift(days=3),
|
||||
User.lifetime.is_(False),
|
||||
).all():
|
||||
try:
|
||||
if user.in_trial() and arrow.now().shift(
|
||||
days=3
|
||||
) > user.trial_end >= arrow.now().shift(days=2):
|
||||
if user.in_trial():
|
||||
LOG.d("Send trial end email to user %s", user)
|
||||
send_trial_end_soon_email(user)
|
||||
# happens if user has been deleted in the meantime
|
||||
|
@ -84,27 +85,49 @@ def delete_logs():
|
|||
delete_refused_emails()
|
||||
delete_old_monitoring()
|
||||
|
||||
for t in TransactionalEmail.filter(
|
||||
for t_email in TransactionalEmail.filter(
|
||||
TransactionalEmail.created_at < arrow.now().shift(days=-7)
|
||||
):
|
||||
TransactionalEmail.delete(t.id)
|
||||
TransactionalEmail.delete(t_email.id)
|
||||
|
||||
for b in Bounce.filter(Bounce.created_at < arrow.now().shift(days=-7)):
|
||||
Bounce.delete(b.id)
|
||||
|
||||
Session.commit()
|
||||
|
||||
LOG.d("Delete EmailLog older than 2 weeks")
|
||||
LOG.d("Deleting EmailLog older than 2 weeks")
|
||||
|
||||
max_dt = arrow.now().shift(weeks=-2)
|
||||
nb_deleted = EmailLog.filter(EmailLog.created_at < max_dt).delete()
|
||||
Session.commit()
|
||||
total_deleted = 0
|
||||
batch_size = 500
|
||||
Session.execute("set session statement_timeout=30000").rowcount
|
||||
queries_done = 0
|
||||
cutoff_time = arrow.now().shift(days=-14)
|
||||
rows_to_delete = EmailLog.filter(EmailLog.created_at < cutoff_time).count()
|
||||
expected_queries = int(rows_to_delete / batch_size)
|
||||
sql = text(
|
||||
"DELETE FROM email_log WHERE id IN (SELECT id FROM email_log WHERE created_at < :cutoff_time order by created_at limit :batch_size)"
|
||||
)
|
||||
str_cutoff_time = cutoff_time.isoformat()
|
||||
while total_deleted < rows_to_delete:
|
||||
deleted_count = Session.execute(
|
||||
sql, {"cutoff_time": str_cutoff_time, "batch_size": batch_size}
|
||||
).rowcount
|
||||
Session.commit()
|
||||
total_deleted += deleted_count
|
||||
queries_done += 1
|
||||
LOG.i(
|
||||
f"[{queries_done}/{expected_queries}] Deleted {total_deleted} EmailLog entries"
|
||||
)
|
||||
if deleted_count < batch_size:
|
||||
break
|
||||
|
||||
LOG.i("Delete %s email logs", nb_deleted)
|
||||
LOG.i("Deleted %s email logs", total_deleted)
|
||||
|
||||
|
||||
def delete_refused_emails():
|
||||
for refused_email in RefusedEmail.filter_by(deleted=False).all():
|
||||
for refused_email in (
|
||||
RefusedEmail.filter_by(deleted=False).order_by(RefusedEmail.id).all()
|
||||
):
|
||||
if arrow.now().shift(days=1) > refused_email.delete_at >= arrow.now():
|
||||
LOG.d("Delete refused email %s", refused_email)
|
||||
if refused_email.path:
|
||||
|
@ -138,7 +161,7 @@ def notify_premium_end():
|
|||
|
||||
send_email(
|
||||
user.email,
|
||||
f"Your subscription will end soon",
|
||||
"Your subscription will end soon",
|
||||
render(
|
||||
"transactional/subscription-end.txt",
|
||||
user=user,
|
||||
|
@ -195,7 +218,7 @@ def notify_manual_sub_end():
|
|||
LOG.d("Remind user %s that their manual sub is ending soon", user)
|
||||
send_email(
|
||||
user.email,
|
||||
f"Your subscription will end soon",
|
||||
"Your subscription will end soon",
|
||||
render(
|
||||
"transactional/manual-subscription-end.txt",
|
||||
user=user,
|
||||
|
@ -272,7 +295,11 @@ def compute_metric2() -> Metric2:
|
|||
_24h_ago = now.shift(days=-1)
|
||||
|
||||
nb_referred_user_paid = 0
|
||||
for user in User.filter(User.referral_id.isnot(None)):
|
||||
for user in (
|
||||
User.filter(User.referral_id.isnot(None))
|
||||
.yield_per(500)
|
||||
.enable_eagerloads(False)
|
||||
):
|
||||
if user.is_paid():
|
||||
nb_referred_user_paid += 1
|
||||
|
||||
|
@ -563,21 +590,21 @@ nb_total_bounced_last_24h: {stats_today.nb_total_bounced_last_24h} - {increase_p
|
|||
"""
|
||||
|
||||
monitoring_report += "\n====================================\n"
|
||||
monitoring_report += f"""
|
||||
monitoring_report += """
|
||||
# Account bounce report:
|
||||
"""
|
||||
|
||||
for email, bounces in bounce_report():
|
||||
monitoring_report += f"{email}: {bounces}\n"
|
||||
|
||||
monitoring_report += f"""\n
|
||||
monitoring_report += """\n
|
||||
# Alias creation report:
|
||||
"""
|
||||
|
||||
for email, nb_alias, date in alias_creation_report():
|
||||
monitoring_report += f"{email}, {date}: {nb_alias}\n"
|
||||
|
||||
monitoring_report += f"""\n
|
||||
monitoring_report += """\n
|
||||
# Full bounce detail report:
|
||||
"""
|
||||
monitoring_report += all_bounce_report()
|
||||
|
@ -1020,7 +1047,8 @@ async def check_hibp():
|
|||
)
|
||||
.filter(Alias.enabled)
|
||||
.order_by(Alias.hibp_last_check.asc())
|
||||
.all()
|
||||
.yield_per(500)
|
||||
.enable_eagerloads(False)
|
||||
):
|
||||
await queue.put(alias.id)
|
||||
|
||||
|
@ -1071,14 +1099,14 @@ def notify_hibp():
|
|||
)
|
||||
|
||||
LOG.d(
|
||||
f"Send new breaches found email to %s for %s breaches aliases",
|
||||
"Send new breaches found email to %s for %s breaches aliases",
|
||||
user,
|
||||
len(breached_aliases),
|
||||
)
|
||||
|
||||
send_email(
|
||||
user.email,
|
||||
f"You were in a data breach",
|
||||
"You were in a data breach",
|
||||
render(
|
||||
"transactional/hibp-new-breaches.txt.jinja2",
|
||||
user=user,
|
||||
|
@ -1098,6 +1126,18 @@ def notify_hibp():
|
|||
Session.commit()
|
||||
|
||||
|
||||
def clear_users_scheduled_to_be_deleted():
|
||||
users = User.filter(
|
||||
and_(User.delete_on.isnot(None), User.delete_on < arrow.now())
|
||||
).all()
|
||||
for user in users:
|
||||
LOG.i(
|
||||
f"Scheduled deletion of user {user} with scheduled delete on {user.delete_on}"
|
||||
)
|
||||
User.delete(user.id)
|
||||
Session.commit()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
LOG.d("Start running cronjob")
|
||||
parser = argparse.ArgumentParser()
|
||||
|
@ -1164,3 +1204,6 @@ if __name__ == "__main__":
|
|||
elif args.job == "send_undelivered_mails":
|
||||
LOG.d("Sending undelivered emails")
|
||||
load_unsent_mails_from_fs_and_resend()
|
||||
elif args.job == "delete_scheduled_users":
|
||||
LOG.d("Deleting users scheduled to be deleted")
|
||||
clear_users_scheduled_to_be_deleted()
|
||||
|
|
81
crontab.yml
81
crontab.yml
|
@ -5,65 +5,66 @@ jobs:
|
|||
schedule: "0 0 * * *"
|
||||
captureStderr: true
|
||||
|
||||
- name: SimpleLogin Notify Trial Ends
|
||||
command: python /code/cron.py -j notify_trial_end
|
||||
shell: /bin/bash
|
||||
schedule: "0 8 * * *"
|
||||
captureStderr: true
|
||||
|
||||
- name: SimpleLogin Notify Manual Subscription Ends
|
||||
command: python /code/cron.py -j notify_manual_subscription_end
|
||||
shell: /bin/bash
|
||||
schedule: "0 9 * * *"
|
||||
captureStderr: true
|
||||
|
||||
- name: SimpleLogin Notify Premium Ends
|
||||
command: python /code/cron.py -j notify_premium_end
|
||||
shell: /bin/bash
|
||||
schedule: "0 10 * * *"
|
||||
captureStderr: true
|
||||
|
||||
- name: SimpleLogin Delete Logs
|
||||
command: python /code/cron.py -j delete_logs
|
||||
shell: /bin/bash
|
||||
schedule: "0 11 * * *"
|
||||
captureStderr: true
|
||||
|
||||
- name: SimpleLogin Poll Apple Subscriptions
|
||||
command: python /code/cron.py -j poll_apple_subscription
|
||||
shell: /bin/bash
|
||||
schedule: "0 12 * * *"
|
||||
captureStderr: true
|
||||
|
||||
- name: SimpleLogin Sanity Check
|
||||
command: python /code/cron.py -j sanity_check
|
||||
shell: /bin/bash
|
||||
schedule: "0 2 * * *"
|
||||
captureStderr: true
|
||||
|
||||
- name: SimpleLogin Delete Old Monitoring records
|
||||
command: python /code/cron.py -j delete_old_monitoring
|
||||
shell: /bin/bash
|
||||
schedule: "0 14 * * *"
|
||||
schedule: "15 1 * * *"
|
||||
captureStderr: true
|
||||
|
||||
- name: SimpleLogin Custom Domain check
|
||||
command: python /code/cron.py -j check_custom_domain
|
||||
shell: /bin/bash
|
||||
schedule: "0 15 * * *"
|
||||
schedule: "15 2 * * *"
|
||||
captureStderr: true
|
||||
|
||||
- name: SimpleLogin HIBP check
|
||||
command: python /code/cron.py -j check_hibp
|
||||
shell: /bin/bash
|
||||
schedule: "0 18 * * *"
|
||||
schedule: "15 3 * * *"
|
||||
captureStderr: true
|
||||
concurrencyPolicy: Forbid
|
||||
|
||||
- name: SimpleLogin Notify HIBP breaches
|
||||
command: python /code/cron.py -j notify_hibp
|
||||
shell: /bin/bash
|
||||
schedule: "0 19 * * *"
|
||||
schedule: "15 4 * * *"
|
||||
captureStderr: true
|
||||
concurrencyPolicy: Forbid
|
||||
|
||||
- name: SimpleLogin Delete Logs
|
||||
command: python /code/cron.py -j delete_logs
|
||||
shell: /bin/bash
|
||||
schedule: "15 5 * * *"
|
||||
captureStderr: true
|
||||
|
||||
- name: SimpleLogin Poll Apple Subscriptions
|
||||
command: python /code/cron.py -j poll_apple_subscription
|
||||
shell: /bin/bash
|
||||
schedule: "15 6 * * *"
|
||||
captureStderr: true
|
||||
|
||||
- name: SimpleLogin Notify Trial Ends
|
||||
command: python /code/cron.py -j notify_trial_end
|
||||
shell: /bin/bash
|
||||
schedule: "15 8 * * *"
|
||||
captureStderr: true
|
||||
|
||||
- name: SimpleLogin Notify Manual Subscription Ends
|
||||
command: python /code/cron.py -j notify_manual_subscription_end
|
||||
shell: /bin/bash
|
||||
schedule: "15 9 * * *"
|
||||
captureStderr: true
|
||||
|
||||
- name: SimpleLogin Notify Premium Ends
|
||||
command: python /code/cron.py -j notify_premium_end
|
||||
shell: /bin/bash
|
||||
schedule: "15 10 * * *"
|
||||
captureStderr: true
|
||||
|
||||
- name: SimpleLogin delete users scheduled to be deleted
|
||||
command: echo disabled_user_deletion #python /code/cron.py -j delete_scheduled_users
|
||||
shell: /bin/bash
|
||||
schedule: "15 11 * * *"
|
||||
captureStderr: true
|
||||
concurrencyPolicy: Forbid
|
||||
|
||||
|
|
19
docs/api.md
19
docs/api.md
|
@ -15,6 +15,7 @@
|
|||
- [GET /api/user/cookie_token](#get-apiusercookie_token): Get a one time use token to exchange it for a valid cookie
|
||||
- [PATCH /api/user_info](#patch-apiuser_info): Update user's information.
|
||||
- [POST /api/api_key](#post-apiapi_key): Create a new API key.
|
||||
- [GET /api/stats](#get-apistats): Get user's stats.
|
||||
- [GET /api/logout](#get-apilogout): Log out.
|
||||
|
||||
[Alias endpoints](#alias-endpoints)
|
||||
|
@ -226,6 +227,22 @@ Input:
|
|||
|
||||
Output: same as GET /api/user_info
|
||||
|
||||
#### GET /api/stats
|
||||
|
||||
Given the API Key, return stats about the number of aliases, number of emails forwarded/replied/blocked
|
||||
|
||||
Input:
|
||||
|
||||
- `Authentication` header that contains the api key
|
||||
|
||||
Output: if api key is correct, return a json with the following fields:
|
||||
|
||||
```json
|
||||
{"nb_alias": 1, "nb_block": 0, "nb_forward": 0, "nb_reply": 0}
|
||||
```
|
||||
|
||||
If api key is incorrect, return 401.
|
||||
|
||||
#### PATCH /api/sudo
|
||||
|
||||
Enable sudo mode
|
||||
|
@ -694,7 +711,7 @@ Return 200 and `existed=true` if contact is already added.
|
|||
|
||||
It can return 403 with an error if the user cannot create reverse alias.
|
||||
|
||||
``json
|
||||
```json
|
||||
{
|
||||
"error": "Please upgrade to create a reverse-alias"
|
||||
}
|
||||
|
|
123
docs/ssl.md
123
docs/ssl.md
|
@ -1,4 +1,4 @@
|
|||
# SSL, HTTPS, and HSTS
|
||||
# SSL, HTTPS, HSTS and additional security measures
|
||||
|
||||
It's highly recommended to enable SSL/TLS on your server, both for the web app and email server.
|
||||
|
||||
|
@ -58,3 +58,124 @@ Now, reload Nginx:
|
|||
```bash
|
||||
sudo systemctl reload nginx
|
||||
```
|
||||
|
||||
## Additional security measures
|
||||
|
||||
For additional security, we recommend you take some extra steps.
|
||||
|
||||
### Enable Certificate Authority Authorization (CAA)
|
||||
|
||||
[Certificate Authority Authorization](https://letsencrypt.org/docs/caa/) is a step you can take to restrict the list of certificate authorities that are allowed to issue certificates for your domains.
|
||||
|
||||
Use [SSLMate’s CAA Record Generator](https://sslmate.com/caa/) to create a **CAA record** with the following configuration:
|
||||
|
||||
- `flags`: `0`
|
||||
- `tag`: `issue`
|
||||
- `value`: `"letsencrypt.org"`
|
||||
|
||||
To verify if the DNS works, the following command
|
||||
|
||||
```bash
|
||||
dig @1.1.1.1 mydomain.com caa
|
||||
```
|
||||
|
||||
should return:
|
||||
|
||||
```
|
||||
mydomain.com. 3600 IN CAA 0 issue "letsencrypt.org"
|
||||
```
|
||||
|
||||
### SMTP MTA Strict Transport Security (MTA-STS)
|
||||
|
||||
[MTA-STS](https://datatracker.ietf.org/doc/html/rfc8461) is an extra step you can take to broadcast the ability of your instance to receive and, optionally enforce, TSL-secure SMTP connections to protect email traffic.
|
||||
|
||||
Enabling MTA-STS requires you serve a specific file from subdomain `mta-sts.domain.com` on a well-known route.
|
||||
|
||||
Create a text file `/var/www/.well-known/mta-sts.txt` with the content:
|
||||
|
||||
```txt
|
||||
version: STSv1
|
||||
mode: testing
|
||||
mx: app.mydomain.com
|
||||
max_age: 86400
|
||||
```
|
||||
|
||||
It is recommended to start with `mode: testing` for starters to get time to review failure reports. Add as many `mx:` domain entries as you have matching **MX records** in your DNS configuration.
|
||||
|
||||
Create a **TXT record** for `_mta-sts.mydomain.com.` with the following value:
|
||||
|
||||
```txt
|
||||
v=STSv1; id=UNIX_TIMESTAMP
|
||||
```
|
||||
|
||||
With `UNIX_TIMESTAMP` being the current date/time.
|
||||
|
||||
Use the following command to generate the record:
|
||||
|
||||
```bash
|
||||
echo "v=STSv1; id=$(date +%s)"
|
||||
```
|
||||
|
||||
To verify if the DNS works, the following command
|
||||
|
||||
```bash
|
||||
dig @1.1.1.1 _mta-sts.mydomain.com txt
|
||||
```
|
||||
|
||||
should return a result similar to this one:
|
||||
|
||||
```
|
||||
_mta-sts.mydomain.com. 3600 IN TXT "v=STSv1; id=1689416399"
|
||||
```
|
||||
|
||||
Create an additional Nginx configuration in `/etc/nginx/sites-enabled/mta-sts` with the following content:
|
||||
|
||||
```
|
||||
server {
|
||||
server_name mta-sts.mydomain.com;
|
||||
root /var/www;
|
||||
listen 80;
|
||||
|
||||
location ^~ /.well-known {}
|
||||
}
|
||||
```
|
||||
|
||||
Restart Nginx with the following command:
|
||||
|
||||
```sh
|
||||
sudo service nginx restart
|
||||
```
|
||||
|
||||
A correct configuration of MTA-STS, however, requires that the certificate used to host the `mta-sts` subdomain matches that of the subdomain referred to by the **MX record** from the DNS. In other words, both `mta-sts.mydomain.com` and `app.mydomain.com` must share the same certificate.
|
||||
|
||||
The easiest way to do this is to _expand_ the certificate associated with `app.mydomain.com` to also support the `mta-sts` subdomain using the following command:
|
||||
|
||||
```sh
|
||||
certbot --expand --nginx -d app.mydomain.com,mta-sts.mydomain.com
|
||||
```
|
||||
|
||||
## SMTP TLS Reporting
|
||||
|
||||
[TLSRPT](https://datatracker.ietf.org/doc/html/rfc8460) is used by SMTP systems to report failures in establishing TLS-secure sessions as broadcast by the MTA-STS configuration.
|
||||
|
||||
Configuring MTA-STS in `mode: testing` as shown in the previous section gives you time to review failures from some SMTP senders.
|
||||
|
||||
Create a **TXT record** for `_smtp._tls.mydomain.com.` with the following value:
|
||||
|
||||
```txt
|
||||
v=TSLRPTv1; rua=mailto:YOUR_EMAIL
|
||||
```
|
||||
|
||||
The TLSRPT configuration at the DNS level allows SMTP senders that fail to initiate TLS-secure sessions to send reports to a particular email address. We suggest creating a `tls-reports` alias in SimpleLogin for this purpose.
|
||||
|
||||
To verify if the DNS works, the following command
|
||||
|
||||
```bash
|
||||
dig @1.1.1.1 _smtp._tls.mydomain.com txt
|
||||
```
|
||||
|
||||
should return a result similar to this one:
|
||||
|
||||
```
|
||||
_smtp._tls.mydomain.com. 3600 IN TXT "v=TSLRPTv1; rua=mailto:tls-reports@mydomain.com"
|
||||
```
|
||||
|
|
203
email_handler.py
203
email_handler.py
|
@ -106,8 +106,6 @@ from app.email_utils import (
|
|||
get_header_unicode,
|
||||
generate_reply_email,
|
||||
is_reverse_alias,
|
||||
normalize_reply_email,
|
||||
is_valid_email,
|
||||
replace,
|
||||
should_disable,
|
||||
parse_id_from_bounce,
|
||||
|
@ -123,6 +121,7 @@ from app.email_utils import (
|
|||
generate_verp_email,
|
||||
sl_formataddr,
|
||||
)
|
||||
from app.email_validation import is_valid_email, normalize_reply_email
|
||||
from app.errors import (
|
||||
NonReverseAliasInReplyPhase,
|
||||
VERPTransactional,
|
||||
|
@ -161,6 +160,7 @@ from app.models import (
|
|||
MessageIDMatching,
|
||||
Notification,
|
||||
VerpType,
|
||||
SLDomain,
|
||||
)
|
||||
from app.pgp_utils import (
|
||||
PGPException,
|
||||
|
@ -168,7 +168,7 @@ from app.pgp_utils import (
|
|||
sign_data,
|
||||
load_public_key_and_check,
|
||||
)
|
||||
from app.utils import sanitize_email
|
||||
from app.utils import sanitize_email, canonicalize_email
|
||||
from init_app import load_pgp_public_keys
|
||||
from server import create_light_app
|
||||
|
||||
|
@ -182,6 +182,10 @@ def get_or_create_contact(from_header: str, mail_from: str, alias: Alias) -> Con
|
|||
except ValueError:
|
||||
contact_name, contact_email = "", ""
|
||||
|
||||
# Ensure contact_name is within limits
|
||||
if len(contact_name) >= Contact.MAX_NAME_LENGTH:
|
||||
contact_name = contact_name[0 : Contact.MAX_NAME_LENGTH]
|
||||
|
||||
if not is_valid_email(contact_email):
|
||||
# From header is wrongly formatted, try with mail_from
|
||||
if mail_from and mail_from != "<>":
|
||||
|
@ -231,7 +235,6 @@ def get_or_create_contact(from_header: str, mail_from: str, alias: Alias) -> Con
|
|||
contact.mail_from = mail_from
|
||||
Session.commit()
|
||||
else:
|
||||
|
||||
try:
|
||||
contact = Contact.create(
|
||||
user_id=alias.user_id,
|
||||
|
@ -239,7 +242,7 @@ def get_or_create_contact(from_header: str, mail_from: str, alias: Alias) -> Con
|
|||
website_email=contact_email,
|
||||
name=contact_name,
|
||||
mail_from=mail_from,
|
||||
reply_email=generate_reply_email(contact_email, alias.user)
|
||||
reply_email=generate_reply_email(contact_email, alias)
|
||||
if is_valid_email(contact_email)
|
||||
else NOREPLY,
|
||||
automatic_created=True,
|
||||
|
@ -257,7 +260,7 @@ def get_or_create_contact(from_header: str, mail_from: str, alias: Alias) -> Con
|
|||
|
||||
Session.commit()
|
||||
except IntegrityError:
|
||||
LOG.w("Contact %s %s already exist", alias, contact_email)
|
||||
LOG.w(f"Contact with email {contact_email} for alias {alias} already exist")
|
||||
Session.rollback()
|
||||
contact = Contact.get_by(alias_id=alias.id, website_email=contact_email)
|
||||
|
||||
|
@ -275,6 +278,9 @@ def get_or_create_reply_to_contact(
|
|||
except ValueError:
|
||||
return
|
||||
|
||||
if len(contact_name) >= Contact.MAX_NAME_LENGTH:
|
||||
contact_name = contact_name[0 : Contact.MAX_NAME_LENGTH]
|
||||
|
||||
if not is_valid_email(contact_address):
|
||||
LOG.w(
|
||||
"invalid reply-to address %s. Parse from %s",
|
||||
|
@ -300,7 +306,7 @@ def get_or_create_reply_to_contact(
|
|||
alias_id=alias.id,
|
||||
website_email=contact_address,
|
||||
name=contact_name,
|
||||
reply_email=generate_reply_email(contact_address, alias.user),
|
||||
reply_email=generate_reply_email(contact_address, alias),
|
||||
automatic_created=True,
|
||||
)
|
||||
Session.commit()
|
||||
|
@ -343,6 +349,10 @@ def replace_header_when_forward(msg: Message, alias: Alias, header: str):
|
|||
continue
|
||||
|
||||
contact = Contact.get_by(alias_id=alias.id, website_email=contact_email)
|
||||
contact_name = full_address.display_name
|
||||
if len(contact_name) >= Contact.MAX_NAME_LENGTH:
|
||||
contact_name = contact_name[0 : Contact.MAX_NAME_LENGTH]
|
||||
|
||||
if contact:
|
||||
# update the contact name if needed
|
||||
if contact.name != full_address.display_name:
|
||||
|
@ -350,9 +360,9 @@ def replace_header_when_forward(msg: Message, alias: Alias, header: str):
|
|||
"Update contact %s name %s to %s",
|
||||
contact,
|
||||
contact.name,
|
||||
full_address.display_name,
|
||||
contact_name,
|
||||
)
|
||||
contact.name = full_address.display_name
|
||||
contact.name = contact_name
|
||||
Session.commit()
|
||||
else:
|
||||
LOG.d(
|
||||
|
@ -367,8 +377,8 @@ def replace_header_when_forward(msg: Message, alias: Alias, header: str):
|
|||
user_id=alias.user_id,
|
||||
alias_id=alias.id,
|
||||
website_email=contact_email,
|
||||
name=full_address.display_name,
|
||||
reply_email=generate_reply_email(contact_email, alias.user),
|
||||
name=contact_name,
|
||||
reply_email=generate_reply_email(contact_email, alias),
|
||||
is_cc=header.lower() == "cc",
|
||||
automatic_created=True,
|
||||
)
|
||||
|
@ -536,12 +546,20 @@ def sign_msg(msg: Message) -> Message:
|
|||
signature.add_header("Content-Disposition", 'attachment; filename="signature.asc"')
|
||||
|
||||
try:
|
||||
signature.set_payload(sign_data(message_to_bytes(msg).replace(b"\n", b"\r\n")))
|
||||
payload = sign_data(message_to_bytes(msg).replace(b"\n", b"\r\n"))
|
||||
|
||||
if not payload:
|
||||
raise PGPException("Empty signature by gnupg")
|
||||
|
||||
signature.set_payload(payload)
|
||||
except Exception:
|
||||
LOG.e("Cannot sign, try using pgpy")
|
||||
signature.set_payload(
|
||||
sign_data_with_pgpy(message_to_bytes(msg).replace(b"\n", b"\r\n"))
|
||||
)
|
||||
payload = sign_data_with_pgpy(message_to_bytes(msg).replace(b"\n", b"\r\n"))
|
||||
|
||||
if not payload:
|
||||
raise PGPException("Empty signature by pgpy")
|
||||
|
||||
signature.set_payload(payload)
|
||||
|
||||
container.attach(signature)
|
||||
|
||||
|
@ -618,8 +636,8 @@ def handle_forward(envelope, msg: Message, rcpt_to: str) -> List[Tuple[bool, str
|
|||
|
||||
user = alias.user
|
||||
|
||||
if user.disabled:
|
||||
LOG.w("User %s disabled, disable forwarding emails for %s", user, alias)
|
||||
if not user.can_send_or_receive():
|
||||
LOG.i(f"User {user} cannot receive emails")
|
||||
if should_ignore_bounce(envelope.mail_from):
|
||||
return [(True, status.E207)]
|
||||
else:
|
||||
|
@ -689,6 +707,36 @@ def handle_forward(envelope, msg: Message, rcpt_to: str) -> List[Tuple[bool, str
|
|||
LOG.d("%s unverified, do not forward", mailbox)
|
||||
ret.append((False, status.E517))
|
||||
else:
|
||||
# Check if the mailbox is also an alias and stop the loop
|
||||
mailbox_as_alias = Alias.get_by(email=mailbox.email)
|
||||
if mailbox_as_alias is not None:
|
||||
LOG.info(
|
||||
f"Mailbox {mailbox.id} has email {mailbox.email} that is also alias {alias.id}. Stopping loop"
|
||||
)
|
||||
mailbox.verified = False
|
||||
Session.commit()
|
||||
mailbox_url = f"{URL}/dashboard/mailbox/{mailbox.id}/"
|
||||
send_email_with_rate_control(
|
||||
user,
|
||||
ALERT_MAILBOX_IS_ALIAS,
|
||||
user.email,
|
||||
f"Your mailbox {mailbox.email} is an alias",
|
||||
render(
|
||||
"transactional/mailbox-invalid.txt.jinja2",
|
||||
mailbox=mailbox,
|
||||
mailbox_url=mailbox_url,
|
||||
alias=alias,
|
||||
),
|
||||
render(
|
||||
"transactional/mailbox-invalid.html",
|
||||
mailbox=mailbox,
|
||||
mailbox_url=mailbox_url,
|
||||
alias=alias,
|
||||
),
|
||||
max_nb_alert=1,
|
||||
)
|
||||
ret.append((False, status.E525))
|
||||
continue
|
||||
# create a copy of message for each forward
|
||||
ret.append(
|
||||
forward_email_to_mailbox(
|
||||
|
@ -811,36 +859,40 @@ def forward_email_to_mailbox(
|
|||
f"""Email sent to {alias.email} from an invalid address and cannot be replied""",
|
||||
)
|
||||
|
||||
delete_all_headers_except(
|
||||
msg,
|
||||
[
|
||||
headers.FROM,
|
||||
headers.TO,
|
||||
headers.CC,
|
||||
headers.SUBJECT,
|
||||
headers.DATE,
|
||||
# do not delete original message id
|
||||
headers.MESSAGE_ID,
|
||||
# References and In-Reply-To are used for keeping the email thread
|
||||
headers.REFERENCES,
|
||||
headers.IN_REPLY_TO,
|
||||
]
|
||||
+ headers.MIME_HEADERS,
|
||||
)
|
||||
headers_to_keep = [
|
||||
headers.FROM,
|
||||
headers.TO,
|
||||
headers.CC,
|
||||
headers.SUBJECT,
|
||||
headers.DATE,
|
||||
# do not delete original message id
|
||||
headers.MESSAGE_ID,
|
||||
# References and In-Reply-To are used for keeping the email thread
|
||||
headers.REFERENCES,
|
||||
headers.IN_REPLY_TO,
|
||||
headers.LIST_UNSUBSCRIBE,
|
||||
headers.LIST_UNSUBSCRIBE_POST,
|
||||
] + headers.MIME_HEADERS
|
||||
if user.include_header_email_header:
|
||||
headers_to_keep.append(headers.AUTHENTICATION_RESULTS)
|
||||
delete_all_headers_except(msg, headers_to_keep)
|
||||
|
||||
if mailbox.generic_subject:
|
||||
LOG.d("Use a generic subject for %s", mailbox)
|
||||
orig_subject = msg[headers.SUBJECT]
|
||||
orig_subject = get_header_unicode(orig_subject)
|
||||
add_or_replace_header(msg, "Subject", mailbox.generic_subject)
|
||||
sender = msg[headers.FROM]
|
||||
sender = get_header_unicode(sender)
|
||||
msg = add_header(
|
||||
msg,
|
||||
f"""Forwarded by SimpleLogin to {alias.email} from "{sender}" with "{orig_subject}" as subject""",
|
||||
f"""Forwarded by SimpleLogin to {alias.email} from "{sender}" with <b>{orig_subject}</b> as subject""",
|
||||
)
|
||||
|
||||
# create PGP email if needed
|
||||
if mailbox.pgp_enabled() and user.is_premium() and not alias.disable_pgp:
|
||||
LOG.d("Encrypt message using mailbox %s", mailbox)
|
||||
if mailbox.generic_subject:
|
||||
LOG.d("Use a generic subject for %s", mailbox)
|
||||
orig_subject = msg[headers.SUBJECT]
|
||||
orig_subject = get_header_unicode(orig_subject)
|
||||
add_or_replace_header(msg, "Subject", mailbox.generic_subject)
|
||||
msg = add_header(
|
||||
msg,
|
||||
f"""Forwarded by SimpleLogin to {alias.email} with "{orig_subject}" as subject""",
|
||||
f"""Forwarded by SimpleLogin to {alias.email} with <b>{orig_subject}</b> as subject""",
|
||||
)
|
||||
|
||||
try:
|
||||
msg = prepare_pgp_message(
|
||||
|
@ -861,6 +913,11 @@ def forward_email_to_mailbox(
|
|||
msg[headers.SL_EMAIL_LOG_ID] = str(email_log.id)
|
||||
if user.include_header_email_header:
|
||||
msg[headers.SL_ENVELOPE_FROM] = envelope.mail_from
|
||||
if contact.name:
|
||||
original_from = f"{contact.name} <{contact.website_email}>"
|
||||
else:
|
||||
original_from = contact.website_email
|
||||
msg[headers.SL_ORIGINAL_FROM] = original_from
|
||||
# when an alias isn't in the To: header, there's no way for users to know what alias has received the email
|
||||
msg[headers.SL_ENVELOPE_TO] = alias.email
|
||||
|
||||
|
@ -909,10 +966,11 @@ def forward_email_to_mailbox(
|
|||
envelope.rcpt_options,
|
||||
)
|
||||
|
||||
contact_domain = get_email_domain_part(contact.reply_email)
|
||||
try:
|
||||
sl_sendmail(
|
||||
# use a different envelope sender for each forward (aka VERP)
|
||||
generate_verp_email(VerpType.bounce_forward, email_log.id),
|
||||
generate_verp_email(VerpType.bounce_forward, email_log.id, contact_domain),
|
||||
mailbox.email,
|
||||
msg,
|
||||
envelope.mail_options,
|
||||
|
@ -981,10 +1039,14 @@ def handle_reply(envelope, msg: Message, rcpt_to: str) -> (bool, str):
|
|||
|
||||
reply_email = rcpt_to
|
||||
|
||||
# reply_email must end with EMAIL_DOMAIN
|
||||
reply_domain = get_email_domain_part(reply_email)
|
||||
|
||||
# reply_email must end with EMAIL_DOMAIN or a domain that can be used as reverse alias domain
|
||||
if not reply_email.endswith(EMAIL_DOMAIN):
|
||||
LOG.w(f"Reply email {reply_email} has wrong domain")
|
||||
return False, status.E501
|
||||
sl_domain: SLDomain = SLDomain.get_by(domain=reply_domain)
|
||||
if sl_domain is None:
|
||||
LOG.w(f"Reply email {reply_email} has wrong domain")
|
||||
return False, status.E501
|
||||
|
||||
# handle case where reply email is generated with non-allowed char
|
||||
reply_email = normalize_reply_email(reply_email)
|
||||
|
@ -996,7 +1058,7 @@ def handle_reply(envelope, msg: Message, rcpt_to: str) -> (bool, str):
|
|||
|
||||
alias = contact.alias
|
||||
alias_address: str = contact.alias.email
|
||||
alias_domain = alias_address[alias_address.find("@") + 1 :]
|
||||
alias_domain = get_email_domain_part(alias_address)
|
||||
|
||||
# Sanity check: verify alias domain is managed by SimpleLogin
|
||||
# scenario: a user have removed a domain but due to a bug, the aliases are still there
|
||||
|
@ -1007,13 +1069,8 @@ def handle_reply(envelope, msg: Message, rcpt_to: str) -> (bool, str):
|
|||
user = alias.user
|
||||
mail_from = envelope.mail_from
|
||||
|
||||
if user.disabled:
|
||||
LOG.e(
|
||||
"User %s disabled, disable sending emails from %s to %s",
|
||||
user,
|
||||
alias,
|
||||
contact,
|
||||
)
|
||||
if not user.can_send_or_receive():
|
||||
LOG.i(f"User {user} cannot send emails")
|
||||
return False, status.E504
|
||||
|
||||
# Check if we need to reject or quarantine based on dmarc
|
||||
|
@ -1139,7 +1196,7 @@ def handle_reply(envelope, msg: Message, rcpt_to: str) -> (bool, str):
|
|||
)
|
||||
|
||||
# replace reverse alias by real address for all contacts
|
||||
for (reply_email, website_email) in contact_query.values(
|
||||
for reply_email, website_email in contact_query.values(
|
||||
Contact.reply_email, Contact.website_email
|
||||
):
|
||||
msg = replace(msg, reply_email, website_email)
|
||||
|
@ -1194,7 +1251,6 @@ def handle_reply(envelope, msg: Message, rcpt_to: str) -> (bool, str):
|
|||
if str(msg[headers.TO]).lower() == "undisclosed-recipients:;":
|
||||
# no need to replace TO header
|
||||
LOG.d("email is sent in BCC mode")
|
||||
del msg[headers.TO]
|
||||
else:
|
||||
replace_header_when_reply(msg, alias, headers.TO)
|
||||
|
||||
|
@ -1384,21 +1440,26 @@ def get_mailbox_from_mail_from(mail_from: str, alias) -> Optional[Mailbox]:
|
|||
"""return the corresponding mailbox given the mail_from and alias
|
||||
Usually the mail_from=mailbox.email but it can also be one of the authorized address
|
||||
"""
|
||||
for mailbox in alias.mailboxes:
|
||||
if mailbox.email == mail_from:
|
||||
return mailbox
|
||||
|
||||
for authorized_address in mailbox.authorized_addresses:
|
||||
if authorized_address.email == mail_from:
|
||||
LOG.d(
|
||||
"Found an authorized address for %s %s %s",
|
||||
alias,
|
||||
mailbox,
|
||||
authorized_address,
|
||||
)
|
||||
def __check(email_address: str, alias: Alias) -> Optional[Mailbox]:
|
||||
for mailbox in alias.mailboxes:
|
||||
if mailbox.email == email_address:
|
||||
return mailbox
|
||||
|
||||
return None
|
||||
for authorized_address in mailbox.authorized_addresses:
|
||||
if authorized_address.email == email_address:
|
||||
LOG.d(
|
||||
"Found an authorized address for %s %s %s",
|
||||
alias,
|
||||
mailbox,
|
||||
authorized_address,
|
||||
)
|
||||
return mailbox
|
||||
return None
|
||||
|
||||
# We need to first check for the uncanonicalized version because we still have users in the db with the
|
||||
# email non canonicalized. So if it matches the already existing one use that, otherwise check the canonical one
|
||||
return __check(mail_from, alias) or __check(canonicalize_email(mail_from), alias)
|
||||
|
||||
|
||||
def handle_unknown_mailbox(
|
||||
|
@ -1890,7 +1951,7 @@ def handle_bounce(envelope, email_log: EmailLog, msg: Message) -> str:
|
|||
for is_delivered, smtp_status in handle_forward(envelope, msg, alias.email):
|
||||
res.append((is_delivered, smtp_status))
|
||||
|
||||
for (is_success, smtp_status) in res:
|
||||
for is_success, smtp_status in res:
|
||||
# Consider all deliveries successful if 1 delivery is successful
|
||||
if is_success:
|
||||
return smtp_status
|
||||
|
@ -2210,7 +2271,7 @@ def handle(envelope: Envelope, msg: Message) -> str:
|
|||
if nb_success > 0 and nb_non_success > 0:
|
||||
LOG.e(f"some deliveries fail and some success, {mail_from}, {rcpt_tos}, {res}")
|
||||
|
||||
for (is_success, smtp_status) in res:
|
||||
for is_success, smtp_status in res:
|
||||
# Consider all deliveries successful if 1 delivery is successful
|
||||
if is_success:
|
||||
return smtp_status
|
||||
|
|
|
@ -42,14 +42,16 @@ def add_sl_domains():
|
|||
LOG.d("%s is already a SL domain", alias_domain)
|
||||
else:
|
||||
LOG.i("Add %s to SL domain", alias_domain)
|
||||
SLDomain.create(domain=alias_domain)
|
||||
SLDomain.create(domain=alias_domain, use_as_reverse_alias=True)
|
||||
|
||||
for premium_domain in PREMIUM_ALIAS_DOMAINS:
|
||||
if SLDomain.get_by(domain=premium_domain):
|
||||
LOG.d("%s is already a SL domain", premium_domain)
|
||||
else:
|
||||
LOG.i("Add %s to SL domain", premium_domain)
|
||||
SLDomain.create(domain=premium_domain, premium_only=True)
|
||||
SLDomain.create(
|
||||
domain=premium_domain, premium_only=True, use_as_reverse_alias=True
|
||||
)
|
||||
|
||||
Session.commit()
|
||||
|
||||
|
|
|
@ -159,9 +159,9 @@ def delete_mailbox_job(job: Job):
|
|||
user.email,
|
||||
f"Your mailbox {mailbox_email} has been deleted",
|
||||
f"""Mailbox {mailbox_email} and its alias have been transferred to {alias_transferred_to}.
|
||||
Regards,
|
||||
SimpleLogin team.
|
||||
""",
|
||||
Regards,
|
||||
SimpleLogin team.
|
||||
""",
|
||||
retries=3,
|
||||
)
|
||||
else:
|
||||
|
@ -169,9 +169,9 @@ def delete_mailbox_job(job: Job):
|
|||
user.email,
|
||||
f"Your mailbox {mailbox_email} has been deleted",
|
||||
f"""Mailbox {mailbox_email} along with its aliases have been deleted successfully.
|
||||
Regards,
|
||||
SimpleLogin team.
|
||||
""",
|
||||
Regards,
|
||||
SimpleLogin team.
|
||||
""",
|
||||
retries=3,
|
||||
)
|
||||
|
||||
|
|
332521
local_data/words.txt
332521
local_data/words.txt
File diff suppressed because it is too large
Load diff
|
@ -158677,16 +158677,6 @@ isis
|
|||
isize
|
||||
isl
|
||||
islay
|
||||
islam
|
||||
islamic
|
||||
islamism
|
||||
islamist
|
||||
islamistic
|
||||
islamite
|
||||
islamitic
|
||||
islamitish
|
||||
islamization
|
||||
islamize
|
||||
island
|
||||
islanded
|
||||
islander
|
||||
|
|
31
migrations/versions/2023_040318_5f4a5625da66_.py
Normal file
31
migrations/versions/2023_040318_5f4a5625da66_.py
Normal file
|
@ -0,0 +1,31 @@
|
|||
"""empty message
|
||||
|
||||
Revision ID: 5f4a5625da66
|
||||
Revises: 2c2093c82bc0
|
||||
Create Date: 2023-04-03 18:30:46.488231
|
||||
|
||||
"""
|
||||
import sqlalchemy_utils
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '5f4a5625da66'
|
||||
down_revision = '2c2093c82bc0'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.add_column('public_domain', sa.Column('partner_id', sa.Integer(), nullable=True))
|
||||
op.create_foreign_key(None, 'public_domain', 'partner', ['partner_id'], ['id'], ondelete='cascade')
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_constraint(None, 'public_domain', type_='foreignkey')
|
||||
op.drop_column('public_domain', 'partner_id')
|
||||
# ### end Alembic commands ###
|
29
migrations/versions/2023_041418_893c0d18475f_.py
Normal file
29
migrations/versions/2023_041418_893c0d18475f_.py
Normal file
|
@ -0,0 +1,29 @@
|
|||
"""empty message
|
||||
|
||||
Revision ID: 893c0d18475f
|
||||
Revises: 5f4a5625da66
|
||||
Create Date: 2023-04-14 18:20:03.807367
|
||||
|
||||
"""
|
||||
import sqlalchemy_utils
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '893c0d18475f'
|
||||
down_revision = '5f4a5625da66'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_index(op.f('ix_contact_pgp_finger_print'), 'contact', ['pgp_finger_print'], unique=False)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_index(op.f('ix_contact_pgp_finger_print'), table_name='contact')
|
||||
# ### end Alembic commands ###
|
35
migrations/versions/2023_041419_bc496c0a0279_.py
Normal file
35
migrations/versions/2023_041419_bc496c0a0279_.py
Normal file
|
@ -0,0 +1,35 @@
|
|||
"""empty message
|
||||
|
||||
Revision ID: bc496c0a0279
|
||||
Revises: 893c0d18475f
|
||||
Create Date: 2023-04-14 19:09:38.540514
|
||||
|
||||
"""
|
||||
import sqlalchemy_utils
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'bc496c0a0279'
|
||||
down_revision = '893c0d18475f'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_index(op.f('ix_alias_used_on_alias_id'), 'alias_used_on', ['alias_id'], unique=False)
|
||||
op.create_index(op.f('ix_client_user_alias_id'), 'client_user', ['alias_id'], unique=False)
|
||||
op.create_index(op.f('ix_hibp_notified_alias_alias_id'), 'hibp_notified_alias', ['alias_id'], unique=False)
|
||||
op.create_index(op.f('ix_users_newsletter_alias_id'), 'users', ['newsletter_alias_id'], unique=False)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_index(op.f('ix_users_newsletter_alias_id'), table_name='users')
|
||||
op.drop_index(op.f('ix_hibp_notified_alias_alias_id'), table_name='hibp_notified_alias')
|
||||
op.drop_index(op.f('ix_client_user_alias_id'), table_name='client_user')
|
||||
op.drop_index(op.f('ix_alias_used_on_alias_id'), table_name='alias_used_on')
|
||||
# ### end Alembic commands ###
|
29
migrations/versions/2023_041520_2d89315ac650_.py
Normal file
29
migrations/versions/2023_041520_2d89315ac650_.py
Normal file
|
@ -0,0 +1,29 @@
|
|||
"""empty message
|
||||
|
||||
Revision ID: 2d89315ac650
|
||||
Revises: bc496c0a0279
|
||||
Create Date: 2023-04-15 20:43:44.218020
|
||||
|
||||
"""
|
||||
import sqlalchemy_utils
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '2d89315ac650'
|
||||
down_revision = 'bc496c0a0279'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_index(op.f('ix_partner_subscription_end_at'), 'partner_subscription', ['end_at'], unique=False)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_index(op.f('ix_partner_subscription_end_at'), table_name='partner_subscription')
|
||||
# ### end Alembic commands ###
|
29
migrations/versions/2023_041916_01e2997e90d3_.py
Normal file
29
migrations/versions/2023_041916_01e2997e90d3_.py
Normal file
|
@ -0,0 +1,29 @@
|
|||
"""empty message
|
||||
|
||||
Revision ID: 01e2997e90d3
|
||||
Revises: 893c0d18475f
|
||||
Create Date: 2023-04-19 16:09:11.851588
|
||||
|
||||
"""
|
||||
import sqlalchemy_utils
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '01e2997e90d3'
|
||||
down_revision = '893c0d18475f'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.add_column('public_domain', sa.Column('use_as_reverse_alias', sa.Boolean(), server_default='0', nullable=False))
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_column('public_domain', 'use_as_reverse_alias')
|
||||
# ### end Alembic commands ###
|
25
migrations/versions/2023_042011_2634b41f54db_.py
Normal file
25
migrations/versions/2023_042011_2634b41f54db_.py
Normal file
|
@ -0,0 +1,25 @@
|
|||
"""empty message
|
||||
|
||||
Revision ID: 2634b41f54db
|
||||
Revises: 01e2997e90d3, 2d89315ac650
|
||||
Create Date: 2023-04-20 11:47:43.048536
|
||||
|
||||
"""
|
||||
import sqlalchemy_utils
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '2634b41f54db'
|
||||
down_revision = ('01e2997e90d3', '2d89315ac650')
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
pass
|
||||
|
||||
|
||||
def downgrade():
|
||||
pass
|
42
migrations/versions/2023_072819_01827104004b_.py
Normal file
42
migrations/versions/2023_072819_01827104004b_.py
Normal file
|
@ -0,0 +1,42 @@
|
|||
"""empty message
|
||||
|
||||
Revision ID: 01827104004b
|
||||
Revises: 2634b41f54db
|
||||
Create Date: 2023-07-28 19:39:28.675490
|
||||
|
||||
"""
|
||||
import sqlalchemy_utils
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '01827104004b'
|
||||
down_revision = '2634b41f54db'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
with op.get_context().autocommit_block():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_index(op.f('ix_alias_hibp_last_check'), 'alias', ['hibp_last_check'], unique=False, postgresql_concurrently=True)
|
||||
op.create_index('ix_bounce_created_at', 'bounce', ['created_at'], unique=False, postgresql_concurrently=True)
|
||||
op.create_index('ix_monitoring_created_at', 'monitoring', ['created_at'], unique=False, postgresql_concurrently=True)
|
||||
op.create_index('ix_transactional_email_created_at', 'transactional_email', ['created_at'], unique=False, postgresql_concurrently=True)
|
||||
op.create_index(op.f('ix_users_activated'), 'users', ['activated'], unique=False, postgresql_concurrently=True)
|
||||
op.create_index('ix_users_activated_trial_end_lifetime', 'users', ['activated', 'trial_end', 'lifetime'], unique=False, postgresql_concurrently=True)
|
||||
op.create_index(op.f('ix_users_referral_id'), 'users', ['referral_id'], unique=False, postgresql_concurrently=True)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_index(op.f('ix_users_referral_id'), table_name='users')
|
||||
op.drop_index('ix_users_activated_trial_end_lifetime', table_name='users')
|
||||
op.drop_index(op.f('ix_users_activated'), table_name='users')
|
||||
op.drop_index('ix_transactional_email_created_at', table_name='transactional_email')
|
||||
op.drop_index('ix_monitoring_created_at', table_name='monitoring')
|
||||
op.drop_index('ix_bounce_created_at', table_name='bounce')
|
||||
op.drop_index(op.f('ix_alias_hibp_last_check'), table_name='alias')
|
||||
# ### end Alembic commands ###
|
33
migrations/versions/2023_090715_0a5701a4f5e4_.py
Normal file
33
migrations/versions/2023_090715_0a5701a4f5e4_.py
Normal file
|
@ -0,0 +1,33 @@
|
|||
"""empty message
|
||||
|
||||
Revision ID: 0a5701a4f5e4
|
||||
Revises: 01827104004b
|
||||
Create Date: 2023-09-07 15:28:10.122756
|
||||
|
||||
"""
|
||||
import sqlalchemy_utils
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '0a5701a4f5e4'
|
||||
down_revision = '01827104004b'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.add_column('users', sa.Column('delete_on', sqlalchemy_utils.types.arrow.ArrowType(), nullable=True))
|
||||
with op.get_context().autocommit_block():
|
||||
op.create_index('ix_users_delete_on', 'users', ['delete_on'], unique=False, postgresql_concurrently=True)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
with op.get_context().autocommit_block():
|
||||
op.drop_index('ix_users_delete_on', table_name='users', postgresql_concurrently=True)
|
||||
op.drop_column('users', 'delete_on')
|
||||
# ### end Alembic commands ###
|
34
migrations/versions/2023_092818_ec7fdde8da9f_.py
Normal file
34
migrations/versions/2023_092818_ec7fdde8da9f_.py
Normal file
|
@ -0,0 +1,34 @@
|
|||
"""empty message
|
||||
|
||||
Revision ID: ec7fdde8da9f
|
||||
Revises: 0a5701a4f5e4
|
||||
Create Date: 2023-09-28 18:09:48.016620
|
||||
|
||||
"""
|
||||
import sqlalchemy_utils
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "ec7fdde8da9f"
|
||||
down_revision = "0a5701a4f5e4"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
with op.get_context().autocommit_block():
|
||||
op.create_index(
|
||||
"ix_email_log_created_at", "email_log", ["created_at"], unique=False
|
||||
)
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
with op.get_context().autocommit_block():
|
||||
op.drop_index("ix_email_log_created_at", table_name="email_log")
|
||||
# ### end Alembic commands ###
|
39
migrations/versions/2023_100510_46ecb648a47e_.py
Normal file
39
migrations/versions/2023_100510_46ecb648a47e_.py
Normal file
|
@ -0,0 +1,39 @@
|
|||
"""empty message
|
||||
|
||||
Revision ID: 46ecb648a47e
|
||||
Revises: ec7fdde8da9f
|
||||
Create Date: 2023-10-05 10:43:35.668902
|
||||
|
||||
"""
|
||||
import sqlalchemy_utils
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "46ecb648a47e"
|
||||
down_revision = "ec7fdde8da9f"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
with op.get_context().autocommit_block():
|
||||
op.create_index(
|
||||
op.f("ix_message_id_matching_email_log_id"),
|
||||
"message_id_matching",
|
||||
["email_log_id"],
|
||||
unique=False,
|
||||
)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
with op.get_context().autocommit_block():
|
||||
op.drop_index(
|
||||
op.f("ix_message_id_matching_email_log_id"),
|
||||
table_name="message_id_matching",
|
||||
)
|
||||
# ### end Alembic commands ###
|
31
migrations/versions/2023_110714_4bc54632d9aa_.py
Normal file
31
migrations/versions/2023_110714_4bc54632d9aa_.py
Normal file
|
@ -0,0 +1,31 @@
|
|||
"""empty message
|
||||
|
||||
Revision ID: 4bc54632d9aa
|
||||
Revises: 46ecb648a47e
|
||||
Create Date: 2023-11-07 14:02:17.610226
|
||||
|
||||
"""
|
||||
import sqlalchemy_utils
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '4bc54632d9aa'
|
||||
down_revision = '46ecb648a47e'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_index('ix_newsletter_subject', table_name='newsletter')
|
||||
op.create_index(op.f('ix_newsletter_subject'), 'newsletter', ['subject'], unique=False)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_index(op.f('ix_newsletter_subject'), table_name='newsletter')
|
||||
op.create_index('ix_newsletter_subject', 'newsletter', ['subject'], unique=True)
|
||||
# ### end Alembic commands ###
|
0
monitor/__init__.py
Normal file
0
monitor/__init__.py
Normal file
21
monitor/metric.py
Normal file
21
monitor/metric.py
Normal file
|
@ -0,0 +1,21 @@
|
|||
from dataclasses import dataclass
|
||||
from typing import List
|
||||
|
||||
|
||||
@dataclass
|
||||
class UpcloudRecord:
|
||||
db_role: str
|
||||
label: str
|
||||
time: str
|
||||
value: float
|
||||
|
||||
|
||||
@dataclass
|
||||
class UpcloudMetric:
|
||||
metric_name: str
|
||||
records: List[UpcloudRecord]
|
||||
|
||||
|
||||
@dataclass
|
||||
class UpcloudMetrics:
|
||||
metrics: List[UpcloudMetric]
|
20
monitor/metric_exporter.py
Normal file
20
monitor/metric_exporter.py
Normal file
|
@ -0,0 +1,20 @@
|
|||
from app.config import UPCLOUD_DB_ID, UPCLOUD_PASSWORD, UPCLOUD_USERNAME
|
||||
from app.log import LOG
|
||||
from monitor.newrelic import NewRelicClient
|
||||
from monitor.upcloud import UpcloudClient
|
||||
|
||||
|
||||
class MetricExporter:
|
||||
def __init__(self, newrelic_license: str):
|
||||
self.__upcloud = UpcloudClient(
|
||||
username=UPCLOUD_USERNAME, password=UPCLOUD_PASSWORD
|
||||
)
|
||||
self.__newrelic = NewRelicClient(newrelic_license)
|
||||
|
||||
def run(self):
|
||||
try:
|
||||
metrics = self.__upcloud.get_metrics(UPCLOUD_DB_ID)
|
||||
self.__newrelic.send(metrics)
|
||||
LOG.info("Upcloud metrics sent to NewRelic")
|
||||
except Exception as e:
|
||||
LOG.warn(f"Could not export metrics: {e}")
|
26
monitor/newrelic.py
Normal file
26
monitor/newrelic.py
Normal file
|
@ -0,0 +1,26 @@
|
|||
from monitor.metric import UpcloudMetrics
|
||||
|
||||
from newrelic_telemetry_sdk import GaugeMetric, MetricClient
|
||||
|
||||
_NEWRELIC_BASE_HOST = "metric-api.eu.newrelic.com"
|
||||
|
||||
|
||||
class NewRelicClient:
|
||||
def __init__(self, license_key: str):
|
||||
self.__client = MetricClient(license_key=license_key, host=_NEWRELIC_BASE_HOST)
|
||||
|
||||
def send(self, metrics: UpcloudMetrics):
|
||||
batch = []
|
||||
|
||||
for metric in metrics.metrics:
|
||||
for record in metric.records:
|
||||
batch.append(
|
||||
GaugeMetric(
|
||||
name=f"upcloud.db.{metric.metric_name}",
|
||||
value=record.value,
|
||||
tags={"host": record.label, "db_role": record.db_role},
|
||||
)
|
||||
)
|
||||
|
||||
response = self.__client.send_batch(batch)
|
||||
response.raise_for_status()
|
82
monitor/upcloud.py
Normal file
82
monitor/upcloud.py
Normal file
|
@ -0,0 +1,82 @@
|
|||
from app.log import LOG
|
||||
from monitor.metric import UpcloudMetric, UpcloudMetrics, UpcloudRecord
|
||||
|
||||
import base64
|
||||
import requests
|
||||
from typing import Any
|
||||
|
||||
|
||||
BASE_URL = "https://api.upcloud.com"
|
||||
|
||||
|
||||
def get_metric(json: Any, metric: str) -> UpcloudMetric:
|
||||
records = []
|
||||
|
||||
if metric in json:
|
||||
metric_data = json[metric]
|
||||
data = metric_data["data"]
|
||||
cols = list(map(lambda x: x["label"], data["cols"][1:]))
|
||||
latest = data["rows"][-1]
|
||||
time = latest[0]
|
||||
for column_idx in range(len(cols)):
|
||||
value = latest[1 + column_idx]
|
||||
|
||||
# If the latest value is None, try to fetch the second to last
|
||||
if value is None:
|
||||
value = data["rows"][-2][1 + column_idx]
|
||||
|
||||
if value is not None:
|
||||
label = cols[column_idx]
|
||||
if "(master)" in label:
|
||||
db_role = "master"
|
||||
else:
|
||||
db_role = "standby"
|
||||
records.append(
|
||||
UpcloudRecord(time=time, db_role=db_role, label=label, value=value)
|
||||
)
|
||||
else:
|
||||
LOG.warn(f"Could not get value for metric {metric}")
|
||||
|
||||
return UpcloudMetric(metric_name=metric, records=records)
|
||||
|
||||
|
||||
def get_metrics(json: Any) -> UpcloudMetrics:
|
||||
return UpcloudMetrics(
|
||||
metrics=[
|
||||
get_metric(json, "cpu_usage"),
|
||||
get_metric(json, "disk_usage"),
|
||||
get_metric(json, "diskio_reads"),
|
||||
get_metric(json, "diskio_writes"),
|
||||
get_metric(json, "load_average"),
|
||||
get_metric(json, "mem_usage"),
|
||||
get_metric(json, "net_receive"),
|
||||
get_metric(json, "net_send"),
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
class UpcloudClient:
|
||||
def __init__(self, username: str, password: str):
|
||||
if not username:
|
||||
raise Exception("UpcloudClient username must be set")
|
||||
if not password:
|
||||
raise Exception("UpcloudClient password must be set")
|
||||
|
||||
client = requests.Session()
|
||||
encoded_auth = base64.b64encode(
|
||||
f"{username}:{password}".encode("utf-8")
|
||||
).decode("utf-8")
|
||||
client.headers = {"Authorization": f"Basic {encoded_auth}"}
|
||||
self.__client = client
|
||||
|
||||
def get_metrics(self, db_uuid: str) -> UpcloudMetrics:
|
||||
url = f"{BASE_URL}/1.3/database/{db_uuid}/metrics?period=hour"
|
||||
LOG.d(f"Performing request to {url}")
|
||||
response = self.__client.get(url)
|
||||
LOG.d(f"Status code: {response.status_code}")
|
||||
if response.status_code != 200:
|
||||
return UpcloudMetrics(metrics=[])
|
||||
|
||||
as_json = response.json()
|
||||
|
||||
return get_metrics(as_json)
|
|
@ -1,3 +1,4 @@
|
|||
import configparser
|
||||
import os
|
||||
import subprocess
|
||||
from time import sleep
|
||||
|
@ -7,6 +8,7 @@ import newrelic.agent
|
|||
|
||||
from app.db import Session
|
||||
from app.log import LOG
|
||||
from monitor.metric_exporter import MetricExporter
|
||||
|
||||
# the number of consecutive fails
|
||||
# if more than _max_nb_fails, alert
|
||||
|
@ -19,6 +21,18 @@ _max_nb_fails = 10
|
|||
# the maximum number of emails in incoming & active queue
|
||||
_max_incoming = 50
|
||||
|
||||
_NR_CONFIG_FILE_LOCATION_VAR = "NEW_RELIC_CONFIG_FILE"
|
||||
|
||||
|
||||
def get_newrelic_license() -> str:
|
||||
nr_file = os.environ.get(_NR_CONFIG_FILE_LOCATION_VAR, None)
|
||||
if nr_file is None:
|
||||
raise Exception(f"{_NR_CONFIG_FILE_LOCATION_VAR} not defined")
|
||||
|
||||
config = configparser.ConfigParser()
|
||||
config.read(nr_file)
|
||||
return config["newrelic"]["license_key"]
|
||||
|
||||
|
||||
@newrelic.agent.background_task()
|
||||
def log_postfix_metrics():
|
||||
|
@ -80,10 +94,13 @@ def log_nb_db_connection():
|
|||
|
||||
|
||||
if __name__ == "__main__":
|
||||
exporter = MetricExporter(get_newrelic_license())
|
||||
while True:
|
||||
log_postfix_metrics()
|
||||
log_nb_db_connection()
|
||||
Session.close()
|
||||
|
||||
exporter.run()
|
||||
|
||||
# 1 min
|
||||
sleep(60)
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
"""
|
||||
This is an example on how to integrate SimpleLogin
|
||||
with Requests-OAuthlib, a popular library to work with OAuth in Python.
|
||||
The step-to-step guide can be found on https://docs.simplelogin.io
|
||||
The step-to-step guide can be found on https://simplelogin.io/docs/siwsl/app/
|
||||
This example is based on
|
||||
https://requests-oauthlib.readthedocs.io/en/latest/examples/real_world_example.html
|
||||
"""
|
||||
|
|
5347
poetry.lock
generated
5347
poetry.lock
generated
File diff suppressed because it is too large
Load diff
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue