Compare commits
1 commit
master
...
dependabot
Author | SHA1 | Date | |
---|---|---|---|
![]() |
7d2950974f |
148 changed files with 1803 additions and 4880 deletions
8
.github/workflows/main.yml
vendored
8
.github/workflows/main.yml
vendored
|
@ -15,15 +15,9 @@ jobs:
|
||||||
|
|
||||||
- uses: actions/setup-python@v4
|
- uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: '3.10'
|
python-version: '3.9'
|
||||||
cache: 'poetry'
|
cache: 'poetry'
|
||||||
|
|
||||||
- name: Install OS dependencies
|
|
||||||
if: ${{ matrix.python-version }} == '3.10'
|
|
||||||
run: |
|
|
||||||
sudo apt update
|
|
||||||
sudo apt install -y libre2-dev libpq-dev
|
|
||||||
|
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true'
|
if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true'
|
||||||
run: poetry install --no-interaction
|
run: poetry install --no-interaction
|
||||||
|
|
|
@ -7,19 +7,18 @@ repos:
|
||||||
hooks:
|
hooks:
|
||||||
- id: check-yaml
|
- id: check-yaml
|
||||||
- id: trailing-whitespace
|
- id: trailing-whitespace
|
||||||
|
- repo: https://github.com/psf/black
|
||||||
|
rev: 22.3.0
|
||||||
|
hooks:
|
||||||
|
- id: black
|
||||||
|
- repo: https://github.com/pycqa/flake8
|
||||||
|
rev: 3.9.2
|
||||||
|
hooks:
|
||||||
|
- id: flake8
|
||||||
- repo: https://github.com/Riverside-Healthcare/djLint
|
- repo: https://github.com/Riverside-Healthcare/djLint
|
||||||
rev: v1.3.0
|
rev: v1.3.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: djlint-jinja
|
- id: djlint-jinja
|
||||||
files: '.*\.html'
|
files: '.*\.html'
|
||||||
entry: djlint --reformat
|
entry: djlint --reformat
|
||||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
|
||||||
# Ruff version.
|
|
||||||
rev: v0.1.5
|
|
||||||
hooks:
|
|
||||||
# Run the linter.
|
|
||||||
- id: ruff
|
|
||||||
args: [ --fix ]
|
|
||||||
# Run the formatter.
|
|
||||||
- id: ruff-format
|
|
||||||
|
|
||||||
|
|
|
@ -34,7 +34,7 @@ poetry install
|
||||||
On Mac, sometimes you might need to install some other packages via `brew`:
|
On Mac, sometimes you might need to install some other packages via `brew`:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
brew install pkg-config libffi openssl postgresql@13
|
brew install pkg-config libffi openssl postgresql
|
||||||
```
|
```
|
||||||
|
|
||||||
You also need to install `gpg` tool, on Mac it can be done with:
|
You also need to install `gpg` tool, on Mac it can be done with:
|
||||||
|
@ -169,12 +169,6 @@ For HTML templates, we use `djlint`. Before creating a pull request, please run
|
||||||
poetry run djlint --check templates
|
poetry run djlint --check templates
|
||||||
```
|
```
|
||||||
|
|
||||||
If some files aren't properly formatted, you can format all files with
|
|
||||||
|
|
||||||
```bash
|
|
||||||
poetry run djlint --reformat .
|
|
||||||
```
|
|
||||||
|
|
||||||
## Test sending email
|
## Test sending email
|
||||||
|
|
||||||
[swaks](http://www.jetmore.org/john/code/swaks/) is used for sending test emails to the `email_handler`.
|
[swaks](http://www.jetmore.org/john/code/swaks/) is used for sending test emails to the `email_handler`.
|
||||||
|
|
|
@ -23,15 +23,15 @@ COPY poetry.lock pyproject.toml ./
|
||||||
# Install and setup poetry
|
# Install and setup poetry
|
||||||
RUN pip install -U pip \
|
RUN pip install -U pip \
|
||||||
&& apt-get update \
|
&& apt-get update \
|
||||||
&& apt install -y curl netcat-traditional gcc python3-dev gnupg git libre2-dev cmake ninja-build\
|
&& apt install -y curl netcat gcc python3-dev gnupg git libre2-dev \
|
||||||
&& curl -sSL https://install.python-poetry.org | python3 - \
|
&& curl -sSL https://install.python-poetry.org | python3 - \
|
||||||
# Remove curl and netcat from the image
|
# Remove curl and netcat from the image
|
||||||
&& apt-get purge -y curl netcat-traditional \
|
&& apt-get purge -y curl netcat \
|
||||||
# Run poetry
|
# Run poetry
|
||||||
&& poetry config virtualenvs.create false \
|
&& poetry config virtualenvs.create false \
|
||||||
&& poetry install --no-interaction --no-ansi --no-root \
|
&& poetry install --no-interaction --no-ansi --no-root \
|
||||||
# Clear apt cache \
|
# Clear apt cache \
|
||||||
&& apt-get purge -y libre2-dev cmake ninja-build\
|
&& apt-get purge -y libre2-dev \
|
||||||
&& apt-get clean \
|
&& apt-get clean \
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
|
|
@ -5,15 +5,13 @@ from typing import Optional
|
||||||
|
|
||||||
from arrow import Arrow
|
from arrow import Arrow
|
||||||
from newrelic import agent
|
from newrelic import agent
|
||||||
from sqlalchemy import or_
|
|
||||||
|
|
||||||
from app.db import Session
|
from app.db import Session
|
||||||
from app.email_utils import send_welcome_email
|
from app.email_utils import send_welcome_email
|
||||||
from app.utils import sanitize_email, canonicalize_email
|
from app.utils import sanitize_email
|
||||||
from app.errors import (
|
from app.errors import (
|
||||||
AccountAlreadyLinkedToAnotherPartnerException,
|
AccountAlreadyLinkedToAnotherPartnerException,
|
||||||
AccountIsUsingAliasAsEmail,
|
AccountIsUsingAliasAsEmail,
|
||||||
AccountAlreadyLinkedToAnotherUserException,
|
|
||||||
)
|
)
|
||||||
from app.log import LOG
|
from app.log import LOG
|
||||||
from app.models import (
|
from app.models import (
|
||||||
|
@ -132,9 +130,8 @@ class ClientMergeStrategy(ABC):
|
||||||
class NewUserStrategy(ClientMergeStrategy):
|
class NewUserStrategy(ClientMergeStrategy):
|
||||||
def process(self) -> LinkResult:
|
def process(self) -> LinkResult:
|
||||||
# Will create a new SL User with a random password
|
# Will create a new SL User with a random password
|
||||||
canonical_email = canonicalize_email(self.link_request.email)
|
|
||||||
new_user = User.create(
|
new_user = User.create(
|
||||||
email=canonical_email,
|
email=self.link_request.email,
|
||||||
name=self.link_request.name,
|
name=self.link_request.name,
|
||||||
password=random_string(20),
|
password=random_string(20),
|
||||||
activated=True,
|
activated=True,
|
||||||
|
@ -168,6 +165,7 @@ class NewUserStrategy(ClientMergeStrategy):
|
||||||
|
|
||||||
class ExistingUnlinkedUserStrategy(ClientMergeStrategy):
|
class ExistingUnlinkedUserStrategy(ClientMergeStrategy):
|
||||||
def process(self) -> LinkResult:
|
def process(self) -> LinkResult:
|
||||||
|
|
||||||
partner_user = ensure_partner_user_exists_for_user(
|
partner_user = ensure_partner_user_exists_for_user(
|
||||||
self.link_request, self.user, self.partner
|
self.link_request, self.user, self.partner
|
||||||
)
|
)
|
||||||
|
@ -181,7 +179,7 @@ class ExistingUnlinkedUserStrategy(ClientMergeStrategy):
|
||||||
|
|
||||||
class LinkedWithAnotherPartnerUserStrategy(ClientMergeStrategy):
|
class LinkedWithAnotherPartnerUserStrategy(ClientMergeStrategy):
|
||||||
def process(self) -> LinkResult:
|
def process(self) -> LinkResult:
|
||||||
raise AccountAlreadyLinkedToAnotherUserException()
|
raise AccountAlreadyLinkedToAnotherPartnerException()
|
||||||
|
|
||||||
|
|
||||||
def get_login_strategy(
|
def get_login_strategy(
|
||||||
|
@ -209,26 +207,15 @@ def process_login_case(
|
||||||
) -> LinkResult:
|
) -> LinkResult:
|
||||||
# Sanitize email just in case
|
# Sanitize email just in case
|
||||||
link_request.email = sanitize_email(link_request.email)
|
link_request.email = sanitize_email(link_request.email)
|
||||||
|
check_alias(link_request.email)
|
||||||
# Try to find a SimpleLogin user registered with that partner user id
|
# Try to find a SimpleLogin user registered with that partner user id
|
||||||
partner_user = PartnerUser.get_by(
|
partner_user = PartnerUser.get_by(
|
||||||
partner_id=partner.id, external_user_id=link_request.external_user_id
|
partner_id=partner.id, external_user_id=link_request.external_user_id
|
||||||
)
|
)
|
||||||
if partner_user is None:
|
if partner_user is None:
|
||||||
canonical_email = canonicalize_email(link_request.email)
|
|
||||||
# We didn't find any SimpleLogin user registered with that partner user id
|
# We didn't find any SimpleLogin user registered with that partner user id
|
||||||
# Make sure they aren't using an alias as their link email
|
|
||||||
check_alias(link_request.email)
|
|
||||||
check_alias(canonical_email)
|
|
||||||
# Try to find it using the partner's e-mail address
|
# Try to find it using the partner's e-mail address
|
||||||
users = User.filter(
|
user = User.get_by(email=link_request.email)
|
||||||
or_(User.email == link_request.email, User.email == canonical_email)
|
|
||||||
).all()
|
|
||||||
if len(users) > 1:
|
|
||||||
user = [user for user in users if user.email == canonical_email][0]
|
|
||||||
elif len(users) == 1:
|
|
||||||
user = users[0]
|
|
||||||
else:
|
|
||||||
user = None
|
|
||||||
return get_login_strategy(link_request, user, partner).process()
|
return get_login_strategy(link_request, user, partner).process()
|
||||||
else:
|
else:
|
||||||
# We found the SL user registered with that partner user id
|
# We found the SL user registered with that partner user id
|
||||||
|
|
|
@ -256,17 +256,6 @@ class UserAdmin(SLModelView):
|
||||||
|
|
||||||
Session.commit()
|
Session.commit()
|
||||||
|
|
||||||
@action(
|
|
||||||
"clear_delete_on",
|
|
||||||
"Remove scheduled deletion of user",
|
|
||||||
"This will remove the scheduled deletion for this users",
|
|
||||||
)
|
|
||||||
def clean_delete_on(self, ids):
|
|
||||||
for user in User.filter(User.id.in_(ids)):
|
|
||||||
user.delete_on = None
|
|
||||||
|
|
||||||
Session.commit()
|
|
||||||
|
|
||||||
# @action(
|
# @action(
|
||||||
# "login_as",
|
# "login_as",
|
||||||
# "Login as this user",
|
# "Login as this user",
|
||||||
|
@ -611,26 +600,6 @@ class NewsletterAdmin(SLModelView):
|
||||||
else:
|
else:
|
||||||
flash(error_msg, "error")
|
flash(error_msg, "error")
|
||||||
|
|
||||||
@action(
|
|
||||||
"clone_newsletter",
|
|
||||||
"Clone this newsletter",
|
|
||||||
)
|
|
||||||
def clone_newsletter(self, newsletter_ids):
|
|
||||||
if len(newsletter_ids) != 1:
|
|
||||||
flash("you can only select 1 newsletter", "error")
|
|
||||||
return
|
|
||||||
|
|
||||||
newsletter_id = newsletter_ids[0]
|
|
||||||
newsletter: Newsletter = Newsletter.get(newsletter_id)
|
|
||||||
new_newsletter = Newsletter.create(
|
|
||||||
subject=newsletter.subject,
|
|
||||||
html=newsletter.html,
|
|
||||||
plain_text=newsletter.plain_text,
|
|
||||||
commit=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
flash(f"Newsletter {new_newsletter.subject} has been cloned", "success")
|
|
||||||
|
|
||||||
|
|
||||||
class NewsletterUserAdmin(SLModelView):
|
class NewsletterUserAdmin(SLModelView):
|
||||||
column_searchable_list = ["id"]
|
column_searchable_list = ["id"]
|
||||||
|
|
|
@ -6,7 +6,8 @@ from typing import Optional
|
||||||
import itsdangerous
|
import itsdangerous
|
||||||
from app import config
|
from app import config
|
||||||
from app.log import LOG
|
from app.log import LOG
|
||||||
from app.models import User, AliasOptions, SLDomain
|
from app.models import User
|
||||||
|
|
||||||
|
|
||||||
signer = itsdangerous.TimestampSigner(config.CUSTOM_ALIAS_SECRET)
|
signer = itsdangerous.TimestampSigner(config.CUSTOM_ALIAS_SECRET)
|
||||||
|
|
||||||
|
@ -42,9 +43,7 @@ def check_suffix_signature(signed_suffix: str) -> Optional[str]:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
def verify_prefix_suffix(
|
def verify_prefix_suffix(user: User, alias_prefix, alias_suffix) -> bool:
|
||||||
user: User, alias_prefix, alias_suffix, alias_options: Optional[AliasOptions] = None
|
|
||||||
) -> bool:
|
|
||||||
"""verify if user could create an alias with the given prefix and suffix"""
|
"""verify if user could create an alias with the given prefix and suffix"""
|
||||||
if not alias_prefix or not alias_suffix: # should be caught on frontend
|
if not alias_prefix or not alias_suffix: # should be caught on frontend
|
||||||
return False
|
return False
|
||||||
|
@ -57,7 +56,7 @@ def verify_prefix_suffix(
|
||||||
alias_domain_prefix, alias_domain = alias_suffix.split("@", 1)
|
alias_domain_prefix, alias_domain = alias_suffix.split("@", 1)
|
||||||
|
|
||||||
# alias_domain must be either one of user custom domains or built-in domains
|
# alias_domain must be either one of user custom domains or built-in domains
|
||||||
if alias_domain not in user.available_alias_domains(alias_options=alias_options):
|
if alias_domain not in user.available_alias_domains():
|
||||||
LOG.e("wrong alias suffix %s, user %s", alias_suffix, user)
|
LOG.e("wrong alias suffix %s, user %s", alias_suffix, user)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
@ -65,11 +64,12 @@ def verify_prefix_suffix(
|
||||||
# 1) alias_suffix must start with "." and
|
# 1) alias_suffix must start with "." and
|
||||||
# 2) alias_domain_prefix must come from the word list
|
# 2) alias_domain_prefix must come from the word list
|
||||||
if (
|
if (
|
||||||
alias_domain in user.available_sl_domains(alias_options=alias_options)
|
alias_domain in user.available_sl_domains()
|
||||||
and alias_domain not in user_custom_domains
|
and alias_domain not in user_custom_domains
|
||||||
# when DISABLE_ALIAS_SUFFIX is true, alias_domain_prefix is empty
|
# when DISABLE_ALIAS_SUFFIX is true, alias_domain_prefix is empty
|
||||||
and not config.DISABLE_ALIAS_SUFFIX
|
and not config.DISABLE_ALIAS_SUFFIX
|
||||||
):
|
):
|
||||||
|
|
||||||
if not alias_domain_prefix.startswith("."):
|
if not alias_domain_prefix.startswith("."):
|
||||||
LOG.e("User %s submits a wrong alias suffix %s", user, alias_suffix)
|
LOG.e("User %s submits a wrong alias suffix %s", user, alias_suffix)
|
||||||
return False
|
return False
|
||||||
|
@ -80,18 +80,14 @@ def verify_prefix_suffix(
|
||||||
LOG.e("wrong alias suffix %s, user %s", alias_suffix, user)
|
LOG.e("wrong alias suffix %s, user %s", alias_suffix, user)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if alias_domain not in user.available_sl_domains(
|
if alias_domain not in user.available_sl_domains():
|
||||||
alias_options=alias_options
|
|
||||||
):
|
|
||||||
LOG.e("wrong alias suffix %s, user %s", alias_suffix, user)
|
LOG.e("wrong alias suffix %s, user %s", alias_suffix, user)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
def get_alias_suffixes(
|
def get_alias_suffixes(user: User) -> [AliasSuffix]:
|
||||||
user: User, alias_options: Optional[AliasOptions] = None
|
|
||||||
) -> [AliasSuffix]:
|
|
||||||
"""
|
"""
|
||||||
Similar to as get_available_suffixes() but also return custom domain that doesn't have MX set up.
|
Similar to as get_available_suffixes() but also return custom domain that doesn't have MX set up.
|
||||||
"""
|
"""
|
||||||
|
@ -103,9 +99,7 @@ def get_alias_suffixes(
|
||||||
# for each user domain, generate both the domain and a random suffix version
|
# for each user domain, generate both the domain and a random suffix version
|
||||||
for custom_domain in user_custom_domains:
|
for custom_domain in user_custom_domains:
|
||||||
if custom_domain.random_prefix_generation:
|
if custom_domain.random_prefix_generation:
|
||||||
suffix = (
|
suffix = "." + user.get_random_alias_suffix() + "@" + custom_domain.domain
|
||||||
f".{user.get_random_alias_suffix(custom_domain)}@{custom_domain.domain}"
|
|
||||||
)
|
|
||||||
alias_suffix = AliasSuffix(
|
alias_suffix = AliasSuffix(
|
||||||
is_custom=True,
|
is_custom=True,
|
||||||
suffix=suffix,
|
suffix=suffix,
|
||||||
|
@ -119,7 +113,7 @@ def get_alias_suffixes(
|
||||||
else:
|
else:
|
||||||
alias_suffixes.append(alias_suffix)
|
alias_suffixes.append(alias_suffix)
|
||||||
|
|
||||||
suffix = f"@{custom_domain.domain}"
|
suffix = "@" + custom_domain.domain
|
||||||
alias_suffix = AliasSuffix(
|
alias_suffix = AliasSuffix(
|
||||||
is_custom=True,
|
is_custom=True,
|
||||||
suffix=suffix,
|
suffix=suffix,
|
||||||
|
@ -140,43 +134,16 @@ def get_alias_suffixes(
|
||||||
alias_suffixes.append(alias_suffix)
|
alias_suffixes.append(alias_suffix)
|
||||||
|
|
||||||
# then SimpleLogin domain
|
# then SimpleLogin domain
|
||||||
sl_domains = user.get_sl_domains(alias_options=alias_options)
|
for sl_domain in user.get_sl_domains():
|
||||||
default_domain_found = False
|
suffix = (
|
||||||
for sl_domain in sl_domains:
|
(
|
||||||
prefix = (
|
|
||||||
"" if config.DISABLE_ALIAS_SUFFIX else f".{user.get_random_alias_suffix()}"
|
|
||||||
)
|
|
||||||
suffix = f"{prefix}@{sl_domain.domain}"
|
|
||||||
alias_suffix = AliasSuffix(
|
|
||||||
is_custom=False,
|
|
||||||
suffix=suffix,
|
|
||||||
signed_suffix=signer.sign(suffix).decode(),
|
|
||||||
is_premium=sl_domain.premium_only,
|
|
||||||
domain=sl_domain.domain,
|
|
||||||
mx_verified=True,
|
|
||||||
)
|
|
||||||
# No default or this is not the default
|
|
||||||
if (
|
|
||||||
user.default_alias_public_domain_id is None
|
|
||||||
or user.default_alias_public_domain_id != sl_domain.id
|
|
||||||
):
|
|
||||||
alias_suffixes.append(alias_suffix)
|
|
||||||
else:
|
|
||||||
default_domain_found = True
|
|
||||||
alias_suffixes.insert(0, alias_suffix)
|
|
||||||
|
|
||||||
if not default_domain_found:
|
|
||||||
domain_conditions = {"id": user.default_alias_public_domain_id, "hidden": False}
|
|
||||||
if not user.is_premium():
|
|
||||||
domain_conditions["premium_only"] = False
|
|
||||||
sl_domain = SLDomain.get_by(**domain_conditions)
|
|
||||||
if sl_domain:
|
|
||||||
prefix = (
|
|
||||||
""
|
""
|
||||||
if config.DISABLE_ALIAS_SUFFIX
|
if config.DISABLE_ALIAS_SUFFIX
|
||||||
else f".{user.get_random_alias_suffix()}"
|
else "." + user.get_random_alias_suffix()
|
||||||
|
)
|
||||||
|
+ "@"
|
||||||
|
+ sl_domain.domain
|
||||||
)
|
)
|
||||||
suffix = f"{prefix}@{sl_domain.domain}"
|
|
||||||
alias_suffix = AliasSuffix(
|
alias_suffix = AliasSuffix(
|
||||||
is_custom=False,
|
is_custom=False,
|
||||||
suffix=suffix,
|
suffix=suffix,
|
||||||
|
@ -185,6 +152,11 @@ def get_alias_suffixes(
|
||||||
domain=sl_domain.domain,
|
domain=sl_domain.domain,
|
||||||
mx_verified=True,
|
mx_verified=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# put the default domain to top
|
||||||
|
if user.default_alias_public_domain_id == sl_domain.id:
|
||||||
alias_suffixes.insert(0, alias_suffix)
|
alias_suffixes.insert(0, alias_suffix)
|
||||||
|
else:
|
||||||
|
alias_suffixes.append(alias_suffix)
|
||||||
|
|
||||||
return alias_suffixes
|
return alias_suffixes
|
||||||
|
|
|
@ -21,8 +21,6 @@ from app.email_utils import (
|
||||||
send_cannot_create_directory_alias_disabled,
|
send_cannot_create_directory_alias_disabled,
|
||||||
get_email_local_part,
|
get_email_local_part,
|
||||||
send_cannot_create_domain_alias,
|
send_cannot_create_domain_alias,
|
||||||
send_email,
|
|
||||||
render,
|
|
||||||
)
|
)
|
||||||
from app.errors import AliasInTrashError
|
from app.errors import AliasInTrashError
|
||||||
from app.log import LOG
|
from app.log import LOG
|
||||||
|
@ -38,8 +36,6 @@ from app.models import (
|
||||||
EmailLog,
|
EmailLog,
|
||||||
Contact,
|
Contact,
|
||||||
AutoCreateRule,
|
AutoCreateRule,
|
||||||
AliasUsedOn,
|
|
||||||
ClientUser,
|
|
||||||
)
|
)
|
||||||
from app.regex_utils import regex_match
|
from app.regex_utils import regex_match
|
||||||
|
|
||||||
|
@ -61,8 +57,6 @@ def get_user_if_alias_would_auto_create(
|
||||||
domain_and_rule = check_if_alias_can_be_auto_created_for_custom_domain(
|
domain_and_rule = check_if_alias_can_be_auto_created_for_custom_domain(
|
||||||
address, notify_user=notify_user
|
address, notify_user=notify_user
|
||||||
)
|
)
|
||||||
if DomainDeletedAlias.get_by(email=address):
|
|
||||||
return None
|
|
||||||
if domain_and_rule:
|
if domain_and_rule:
|
||||||
return domain_and_rule[0].user
|
return domain_and_rule[0].user
|
||||||
directory = check_if_alias_can_be_auto_created_for_a_directory(
|
directory = check_if_alias_can_be_auto_created_for_a_directory(
|
||||||
|
@ -403,58 +397,3 @@ def alias_export_csv(user, csv_direct_export=False):
|
||||||
output.headers["Content-Disposition"] = "attachment; filename=aliases.csv"
|
output.headers["Content-Disposition"] = "attachment; filename=aliases.csv"
|
||||||
output.headers["Content-type"] = "text/csv"
|
output.headers["Content-type"] = "text/csv"
|
||||||
return output
|
return output
|
||||||
|
|
||||||
|
|
||||||
def transfer_alias(alias, new_user, new_mailboxes: [Mailbox]):
|
|
||||||
# cannot transfer alias which is used for receiving newsletter
|
|
||||||
if User.get_by(newsletter_alias_id=alias.id):
|
|
||||||
raise Exception("Cannot transfer alias that's used to receive newsletter")
|
|
||||||
|
|
||||||
# update user_id
|
|
||||||
Session.query(Contact).filter(Contact.alias_id == alias.id).update(
|
|
||||||
{"user_id": new_user.id}
|
|
||||||
)
|
|
||||||
|
|
||||||
Session.query(AliasUsedOn).filter(AliasUsedOn.alias_id == alias.id).update(
|
|
||||||
{"user_id": new_user.id}
|
|
||||||
)
|
|
||||||
|
|
||||||
Session.query(ClientUser).filter(ClientUser.alias_id == alias.id).update(
|
|
||||||
{"user_id": new_user.id}
|
|
||||||
)
|
|
||||||
|
|
||||||
# remove existing mailboxes from the alias
|
|
||||||
Session.query(AliasMailbox).filter(AliasMailbox.alias_id == alias.id).delete()
|
|
||||||
|
|
||||||
# set mailboxes
|
|
||||||
alias.mailbox_id = new_mailboxes.pop().id
|
|
||||||
for mb in new_mailboxes:
|
|
||||||
AliasMailbox.create(alias_id=alias.id, mailbox_id=mb.id)
|
|
||||||
|
|
||||||
# alias has never been transferred before
|
|
||||||
if not alias.original_owner_id:
|
|
||||||
alias.original_owner_id = alias.user_id
|
|
||||||
|
|
||||||
# inform previous owner
|
|
||||||
old_user = alias.user
|
|
||||||
send_email(
|
|
||||||
old_user.email,
|
|
||||||
f"Alias {alias.email} has been received",
|
|
||||||
render(
|
|
||||||
"transactional/alias-transferred.txt",
|
|
||||||
alias=alias,
|
|
||||||
),
|
|
||||||
render(
|
|
||||||
"transactional/alias-transferred.html",
|
|
||||||
alias=alias,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
# now the alias belongs to the new user
|
|
||||||
alias.user_id = new_user.id
|
|
||||||
|
|
||||||
# set some fields back to default
|
|
||||||
alias.disable_pgp = False
|
|
||||||
alias.pinned = False
|
|
||||||
|
|
||||||
Session.commit()
|
|
||||||
|
|
|
@ -16,22 +16,3 @@ from .views import (
|
||||||
sudo,
|
sudo,
|
||||||
user,
|
user,
|
||||||
)
|
)
|
||||||
|
|
||||||
__all__ = [
|
|
||||||
"alias_options",
|
|
||||||
"new_custom_alias",
|
|
||||||
"custom_domain",
|
|
||||||
"new_random_alias",
|
|
||||||
"user_info",
|
|
||||||
"auth",
|
|
||||||
"auth_mfa",
|
|
||||||
"alias",
|
|
||||||
"apple",
|
|
||||||
"mailbox",
|
|
||||||
"notification",
|
|
||||||
"setting",
|
|
||||||
"export",
|
|
||||||
"phone",
|
|
||||||
"sudo",
|
|
||||||
"user",
|
|
||||||
]
|
|
||||||
|
|
|
@ -24,7 +24,6 @@ from app.errors import (
|
||||||
ErrContactAlreadyExists,
|
ErrContactAlreadyExists,
|
||||||
ErrAddressInvalid,
|
ErrAddressInvalid,
|
||||||
)
|
)
|
||||||
from app.extensions import limiter
|
|
||||||
from app.models import Alias, Contact, Mailbox, AliasMailbox
|
from app.models import Alias, Contact, Mailbox, AliasMailbox
|
||||||
|
|
||||||
|
|
||||||
|
@ -72,9 +71,6 @@ def get_aliases():
|
||||||
|
|
||||||
|
|
||||||
@api_bp.route("/v2/aliases", methods=["GET", "POST"])
|
@api_bp.route("/v2/aliases", methods=["GET", "POST"])
|
||||||
@limiter.limit(
|
|
||||||
"5/minute",
|
|
||||||
)
|
|
||||||
@require_api_auth
|
@require_api_auth
|
||||||
def get_aliases_v2():
|
def get_aliases_v2():
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -9,7 +9,6 @@ from requests import RequestException
|
||||||
|
|
||||||
from app.api.base import api_bp, require_api_auth
|
from app.api.base import api_bp, require_api_auth
|
||||||
from app.config import APPLE_API_SECRET, MACAPP_APPLE_API_SECRET
|
from app.config import APPLE_API_SECRET, MACAPP_APPLE_API_SECRET
|
||||||
from app.subscription_webhook import execute_subscription_webhook
|
|
||||||
from app.db import Session
|
from app.db import Session
|
||||||
from app.log import LOG
|
from app.log import LOG
|
||||||
from app.models import PlanEnum, AppleSubscription
|
from app.models import PlanEnum, AppleSubscription
|
||||||
|
@ -51,7 +50,6 @@ def apple_process_payment():
|
||||||
|
|
||||||
apple_sub = verify_receipt(receipt_data, user, password)
|
apple_sub = verify_receipt(receipt_data, user, password)
|
||||||
if apple_sub:
|
if apple_sub:
|
||||||
execute_subscription_webhook(user)
|
|
||||||
return jsonify(ok=True), 200
|
return jsonify(ok=True), 200
|
||||||
|
|
||||||
return jsonify(error="Processing failed"), 400
|
return jsonify(error="Processing failed"), 400
|
||||||
|
@ -284,7 +282,6 @@ def apple_update_notification():
|
||||||
apple_sub.plan = plan
|
apple_sub.plan = plan
|
||||||
apple_sub.product_id = transaction["product_id"]
|
apple_sub.product_id = transaction["product_id"]
|
||||||
Session.commit()
|
Session.commit()
|
||||||
execute_subscription_webhook(user)
|
|
||||||
return jsonify(ok=True), 200
|
return jsonify(ok=True), 200
|
||||||
else:
|
else:
|
||||||
LOG.w(
|
LOG.w(
|
||||||
|
@ -557,7 +554,6 @@ def verify_receipt(receipt_data, user, password) -> Optional[AppleSubscription]:
|
||||||
product_id=latest_transaction["product_id"],
|
product_id=latest_transaction["product_id"],
|
||||||
)
|
)
|
||||||
|
|
||||||
execute_subscription_webhook(user)
|
|
||||||
Session.commit()
|
Session.commit()
|
||||||
|
|
||||||
return apple_sub
|
return apple_sub
|
||||||
|
|
|
@ -63,11 +63,6 @@ def auth_login():
|
||||||
elif user.disabled:
|
elif user.disabled:
|
||||||
LoginEvent(LoginEvent.ActionType.disabled_login, LoginEvent.Source.api).send()
|
LoginEvent(LoginEvent.ActionType.disabled_login, LoginEvent.Source.api).send()
|
||||||
return jsonify(error="Account disabled"), 400
|
return jsonify(error="Account disabled"), 400
|
||||||
elif user.delete_on is not None:
|
|
||||||
LoginEvent(
|
|
||||||
LoginEvent.ActionType.scheduled_to_be_deleted, LoginEvent.Source.api
|
|
||||||
).send()
|
|
||||||
return jsonify(error="Account scheduled for deletion"), 400
|
|
||||||
elif not user.activated:
|
elif not user.activated:
|
||||||
LoginEvent(LoginEvent.ActionType.not_activated, LoginEvent.Source.api).send()
|
LoginEvent(LoginEvent.ActionType.not_activated, LoginEvent.Source.api).send()
|
||||||
return jsonify(error="Account not activated"), 422
|
return jsonify(error="Account not activated"), 422
|
||||||
|
@ -362,7 +357,7 @@ def auth_payload(user, device) -> dict:
|
||||||
|
|
||||||
|
|
||||||
@api_bp.route("/auth/forgot_password", methods=["POST"])
|
@api_bp.route("/auth/forgot_password", methods=["POST"])
|
||||||
@limiter.limit("2/minute")
|
@limiter.limit("10/minute")
|
||||||
def forgot_password():
|
def forgot_password():
|
||||||
"""
|
"""
|
||||||
User forgot password
|
User forgot password
|
||||||
|
|
|
@ -13,8 +13,8 @@ from app.db import Session
|
||||||
from app.email_utils import (
|
from app.email_utils import (
|
||||||
mailbox_already_used,
|
mailbox_already_used,
|
||||||
email_can_be_used_as_mailbox,
|
email_can_be_used_as_mailbox,
|
||||||
|
is_valid_email,
|
||||||
)
|
)
|
||||||
from app.email_validation import is_valid_email
|
|
||||||
from app.log import LOG
|
from app.log import LOG
|
||||||
from app.models import Mailbox, Job
|
from app.models import Mailbox, Job
|
||||||
from app.utils import sanitize_email
|
from app.utils import sanitize_email
|
||||||
|
@ -45,7 +45,7 @@ def create_mailbox():
|
||||||
mailbox_email = sanitize_email(request.get_json().get("email"))
|
mailbox_email = sanitize_email(request.get_json().get("email"))
|
||||||
|
|
||||||
if not user.is_premium():
|
if not user.is_premium():
|
||||||
return jsonify(error="Only premium plan can add additional mailbox"), 400
|
return jsonify(error=f"Only premium plan can add additional mailbox"), 400
|
||||||
|
|
||||||
if not is_valid_email(mailbox_email):
|
if not is_valid_email(mailbox_email):
|
||||||
return jsonify(error=f"{mailbox_email} invalid"), 400
|
return jsonify(error=f"{mailbox_email} invalid"), 400
|
||||||
|
|
|
@ -150,7 +150,7 @@ def new_custom_alias_v3():
|
||||||
if not data:
|
if not data:
|
||||||
return jsonify(error="request body cannot be empty"), 400
|
return jsonify(error="request body cannot be empty"), 400
|
||||||
|
|
||||||
if not isinstance(data, dict):
|
if type(data) is not dict:
|
||||||
return jsonify(error="request body does not follow the required format"), 400
|
return jsonify(error="request body does not follow the required format"), 400
|
||||||
|
|
||||||
alias_prefix = data.get("alias_prefix", "").strip().lower().replace(" ", "")
|
alias_prefix = data.get("alias_prefix", "").strip().lower().replace(" ", "")
|
||||||
|
@ -168,7 +168,7 @@ def new_custom_alias_v3():
|
||||||
return jsonify(error="alias prefix invalid format or too long"), 400
|
return jsonify(error="alias prefix invalid format or too long"), 400
|
||||||
|
|
||||||
# check if mailbox is not tempered with
|
# check if mailbox is not tempered with
|
||||||
if not isinstance(mailbox_ids, list):
|
if type(mailbox_ids) is not list:
|
||||||
return jsonify(error="mailbox_ids must be an array of id"), 400
|
return jsonify(error="mailbox_ids must be an array of id"), 400
|
||||||
mailboxes = []
|
mailboxes = []
|
||||||
for mailbox_id in mailbox_ids:
|
for mailbox_id in mailbox_ids:
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
import base64
|
import base64
|
||||||
import dataclasses
|
|
||||||
from io import BytesIO
|
from io import BytesIO
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
|
@ -8,7 +7,6 @@ from flask import jsonify, g, request, make_response
|
||||||
from app import s3, config
|
from app import s3, config
|
||||||
from app.api.base import api_bp, require_api_auth
|
from app.api.base import api_bp, require_api_auth
|
||||||
from app.config import SESSION_COOKIE_NAME
|
from app.config import SESSION_COOKIE_NAME
|
||||||
from app.dashboard.views.index import get_stats
|
|
||||||
from app.db import Session
|
from app.db import Session
|
||||||
from app.models import ApiKey, File, PartnerUser, User
|
from app.models import ApiKey, File, PartnerUser, User
|
||||||
from app.proton.utils import get_proton_partner
|
from app.proton.utils import get_proton_partner
|
||||||
|
@ -138,22 +136,3 @@ def logout():
|
||||||
response.delete_cookie(SESSION_COOKIE_NAME)
|
response.delete_cookie(SESSION_COOKIE_NAME)
|
||||||
|
|
||||||
return response
|
return response
|
||||||
|
|
||||||
|
|
||||||
@api_bp.route("/stats")
|
|
||||||
@require_api_auth
|
|
||||||
def user_stats():
|
|
||||||
"""
|
|
||||||
Return stats
|
|
||||||
|
|
||||||
Output as json
|
|
||||||
- nb_alias
|
|
||||||
- nb_forward
|
|
||||||
- nb_reply
|
|
||||||
- nb_block
|
|
||||||
|
|
||||||
"""
|
|
||||||
user = g.user
|
|
||||||
stats = get_stats(user)
|
|
||||||
|
|
||||||
return jsonify(dataclasses.asdict(stats))
|
|
||||||
|
|
|
@ -17,23 +17,3 @@ from .views import (
|
||||||
recovery,
|
recovery,
|
||||||
api_to_cookie,
|
api_to_cookie,
|
||||||
)
|
)
|
||||||
|
|
||||||
__all__ = [
|
|
||||||
"login",
|
|
||||||
"logout",
|
|
||||||
"register",
|
|
||||||
"activate",
|
|
||||||
"resend_activation",
|
|
||||||
"reset_password",
|
|
||||||
"forgot_password",
|
|
||||||
"github",
|
|
||||||
"google",
|
|
||||||
"facebook",
|
|
||||||
"proton",
|
|
||||||
"change_email",
|
|
||||||
"mfa",
|
|
||||||
"fido",
|
|
||||||
"social",
|
|
||||||
"recovery",
|
|
||||||
"api_to_cookie",
|
|
||||||
]
|
|
||||||
|
|
|
@ -62,7 +62,7 @@ def fido():
|
||||||
browser = MfaBrowser.get_by(token=request.cookies.get("mfa"))
|
browser = MfaBrowser.get_by(token=request.cookies.get("mfa"))
|
||||||
if browser and not browser.is_expired() and browser.user_id == user.id:
|
if browser and not browser.is_expired() and browser.user_id == user.id:
|
||||||
login_user(user)
|
login_user(user)
|
||||||
flash("Welcome back!", "success")
|
flash(f"Welcome back!", "success")
|
||||||
# Redirect user to correct page
|
# Redirect user to correct page
|
||||||
return redirect(next_url or url_for("dashboard.index"))
|
return redirect(next_url or url_for("dashboard.index"))
|
||||||
else:
|
else:
|
||||||
|
@ -110,7 +110,7 @@ def fido():
|
||||||
|
|
||||||
session["sudo_time"] = int(time())
|
session["sudo_time"] = int(time())
|
||||||
login_user(user)
|
login_user(user)
|
||||||
flash("Welcome back!", "success")
|
flash(f"Welcome back!", "success")
|
||||||
|
|
||||||
# Redirect user to correct page
|
# Redirect user to correct page
|
||||||
response = make_response(redirect(next_url or url_for("dashboard.index")))
|
response = make_response(redirect(next_url or url_for("dashboard.index")))
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
from flask import request, render_template, flash, g
|
from flask import request, render_template, redirect, url_for, flash, g
|
||||||
from flask_wtf import FlaskForm
|
from flask_wtf import FlaskForm
|
||||||
from wtforms import StringField, validators
|
from wtforms import StringField, validators
|
||||||
|
|
||||||
|
@ -16,7 +16,7 @@ class ForgotPasswordForm(FlaskForm):
|
||||||
|
|
||||||
@auth_bp.route("/forgot_password", methods=["GET", "POST"])
|
@auth_bp.route("/forgot_password", methods=["GET", "POST"])
|
||||||
@limiter.limit(
|
@limiter.limit(
|
||||||
"10/hour", deduct_when=lambda r: hasattr(g, "deduct_limit") and g.deduct_limit
|
"10/minute", deduct_when=lambda r: hasattr(g, "deduct_limit") and g.deduct_limit
|
||||||
)
|
)
|
||||||
def forgot_password():
|
def forgot_password():
|
||||||
form = ForgotPasswordForm(request.form)
|
form = ForgotPasswordForm(request.form)
|
||||||
|
@ -37,5 +37,6 @@ def forgot_password():
|
||||||
if user:
|
if user:
|
||||||
LOG.d("Send forgot password email to %s", user)
|
LOG.d("Send forgot password email to %s", user)
|
||||||
send_reset_password_email(user)
|
send_reset_password_email(user)
|
||||||
|
return redirect(url_for("auth.forgot_password"))
|
||||||
|
|
||||||
return render_template("auth/forgot_password.html", form=form)
|
return render_template("auth/forgot_password.html", form=form)
|
||||||
|
|
|
@ -54,12 +54,6 @@ def login():
|
||||||
"error",
|
"error",
|
||||||
)
|
)
|
||||||
LoginEvent(LoginEvent.ActionType.disabled_login).send()
|
LoginEvent(LoginEvent.ActionType.disabled_login).send()
|
||||||
elif user.delete_on is not None:
|
|
||||||
flash(
|
|
||||||
f"Your account is scheduled to be deleted on {user.delete_on}",
|
|
||||||
"error",
|
|
||||||
)
|
|
||||||
LoginEvent(LoginEvent.ActionType.scheduled_to_be_deleted).send()
|
|
||||||
elif not user.activated:
|
elif not user.activated:
|
||||||
show_resend_activation = True
|
show_resend_activation = True
|
||||||
flash(
|
flash(
|
||||||
|
|
|
@ -55,7 +55,7 @@ def mfa():
|
||||||
browser = MfaBrowser.get_by(token=request.cookies.get("mfa"))
|
browser = MfaBrowser.get_by(token=request.cookies.get("mfa"))
|
||||||
if browser and not browser.is_expired() and browser.user_id == user.id:
|
if browser and not browser.is_expired() and browser.user_id == user.id:
|
||||||
login_user(user)
|
login_user(user)
|
||||||
flash("Welcome back!", "success")
|
flash(f"Welcome back!", "success")
|
||||||
# Redirect user to correct page
|
# Redirect user to correct page
|
||||||
return redirect(next_url or url_for("dashboard.index"))
|
return redirect(next_url or url_for("dashboard.index"))
|
||||||
else:
|
else:
|
||||||
|
@ -73,7 +73,7 @@ def mfa():
|
||||||
Session.commit()
|
Session.commit()
|
||||||
|
|
||||||
login_user(user)
|
login_user(user)
|
||||||
flash("Welcome back!", "success")
|
flash(f"Welcome back!", "success")
|
||||||
|
|
||||||
# Redirect user to correct page
|
# Redirect user to correct page
|
||||||
response = make_response(redirect(next_url or url_for("dashboard.index")))
|
response = make_response(redirect(next_url or url_for("dashboard.index")))
|
||||||
|
|
|
@ -53,7 +53,7 @@ def recovery_route():
|
||||||
del session[MFA_USER_ID]
|
del session[MFA_USER_ID]
|
||||||
|
|
||||||
login_user(user)
|
login_user(user)
|
||||||
flash("Welcome back!", "success")
|
flash(f"Welcome back!", "success")
|
||||||
|
|
||||||
recovery_code.used = True
|
recovery_code.used = True
|
||||||
recovery_code.used_at = arrow.now()
|
recovery_code.used_at = arrow.now()
|
||||||
|
|
|
@ -94,7 +94,9 @@ def register():
|
||||||
try:
|
try:
|
||||||
send_activation_email(user, next_url)
|
send_activation_email(user, next_url)
|
||||||
RegisterEvent(RegisterEvent.ActionType.success).send()
|
RegisterEvent(RegisterEvent.ActionType.success).send()
|
||||||
DailyMetric.get_or_create_today_metric().nb_new_web_non_proton_user += 1
|
DailyMetric.get_or_create_today_metric().nb_new_web_non_proton_user += (
|
||||||
|
1
|
||||||
|
)
|
||||||
Session.commit()
|
Session.commit()
|
||||||
except Exception:
|
except Exception:
|
||||||
flash("Invalid email, are you sure the email is correct?", "error")
|
flash("Invalid email, are you sure the email is correct?", "error")
|
||||||
|
|
|
@ -60,8 +60,8 @@ def reset_password():
|
||||||
# this can be served to activate user too
|
# this can be served to activate user too
|
||||||
user.activated = True
|
user.activated = True
|
||||||
|
|
||||||
# remove all reset password codes
|
# remove the reset password code
|
||||||
ResetPasswordCode.filter_by(user_id=user.id).delete()
|
ResetPasswordCode.delete(reset_password_code.id)
|
||||||
|
|
||||||
# change the alternative_id to log user out on other browsers
|
# change the alternative_id to log user out on other browsers
|
||||||
user.alternative_id = str(uuid.uuid4())
|
user.alternative_id = str(uuid.uuid4())
|
||||||
|
|
|
@ -532,10 +532,3 @@ if ENABLE_ALL_REVERSE_ALIAS_REPLACEMENT:
|
||||||
SKIP_MX_LOOKUP_ON_CHECK = False
|
SKIP_MX_LOOKUP_ON_CHECK = False
|
||||||
|
|
||||||
DISABLE_RATE_LIMIT = "DISABLE_RATE_LIMIT" in os.environ
|
DISABLE_RATE_LIMIT = "DISABLE_RATE_LIMIT" in os.environ
|
||||||
|
|
||||||
SUBSCRIPTION_CHANGE_WEBHOOK = os.environ.get("SUBSCRIPTION_CHANGE_WEBHOOK", None)
|
|
||||||
MAX_API_KEYS = int(os.environ.get("MAX_API_KEYS", 30))
|
|
||||||
|
|
||||||
UPCLOUD_USERNAME = os.environ.get("UPCLOUD_USERNAME", None)
|
|
||||||
UPCLOUD_PASSWORD = os.environ.get("UPCLOUD_PASSWORD", None)
|
|
||||||
UPCLOUD_DB_ID = os.environ.get("UPCLOUD_DB_ID", None)
|
|
||||||
|
|
|
@ -33,39 +33,3 @@ from .views import (
|
||||||
notification,
|
notification,
|
||||||
support,
|
support,
|
||||||
)
|
)
|
||||||
|
|
||||||
__all__ = [
|
|
||||||
"index",
|
|
||||||
"pricing",
|
|
||||||
"setting",
|
|
||||||
"custom_alias",
|
|
||||||
"subdomain",
|
|
||||||
"billing",
|
|
||||||
"alias_log",
|
|
||||||
"alias_export",
|
|
||||||
"unsubscribe",
|
|
||||||
"api_key",
|
|
||||||
"custom_domain",
|
|
||||||
"alias_contact_manager",
|
|
||||||
"enter_sudo",
|
|
||||||
"mfa_setup",
|
|
||||||
"mfa_cancel",
|
|
||||||
"fido_setup",
|
|
||||||
"coupon",
|
|
||||||
"fido_manage",
|
|
||||||
"domain_detail",
|
|
||||||
"lifetime_licence",
|
|
||||||
"directory",
|
|
||||||
"mailbox",
|
|
||||||
"mailbox_detail",
|
|
||||||
"refused_email",
|
|
||||||
"referral",
|
|
||||||
"contact_detail",
|
|
||||||
"setup_done",
|
|
||||||
"batch_import",
|
|
||||||
"alias_transfer",
|
|
||||||
"app",
|
|
||||||
"delete_account",
|
|
||||||
"notification",
|
|
||||||
"support",
|
|
||||||
]
|
|
||||||
|
|
|
@ -13,10 +13,10 @@ from app import config, parallel_limiter
|
||||||
from app.dashboard.base import dashboard_bp
|
from app.dashboard.base import dashboard_bp
|
||||||
from app.db import Session
|
from app.db import Session
|
||||||
from app.email_utils import (
|
from app.email_utils import (
|
||||||
|
is_valid_email,
|
||||||
generate_reply_email,
|
generate_reply_email,
|
||||||
parse_full_address,
|
parse_full_address,
|
||||||
)
|
)
|
||||||
from app.email_validation import is_valid_email
|
|
||||||
from app.errors import (
|
from app.errors import (
|
||||||
CannotCreateContactForReverseAlias,
|
CannotCreateContactForReverseAlias,
|
||||||
ErrContactErrorUpgradeNeeded,
|
ErrContactErrorUpgradeNeeded,
|
||||||
|
@ -90,7 +90,7 @@ def create_contact(user: User, alias: Alias, contact_address: str) -> Contact:
|
||||||
alias_id=alias.id,
|
alias_id=alias.id,
|
||||||
website_email=contact_email,
|
website_email=contact_email,
|
||||||
name=contact_name,
|
name=contact_name,
|
||||||
reply_email=generate_reply_email(contact_email, alias),
|
reply_email=generate_reply_email(contact_email, user),
|
||||||
)
|
)
|
||||||
|
|
||||||
LOG.d(
|
LOG.d(
|
||||||
|
|
|
@ -87,6 +87,6 @@ def get_alias_log(alias: Alias, page_id=0) -> [AliasLog]:
|
||||||
contact=contact,
|
contact=contact,
|
||||||
)
|
)
|
||||||
logs.append(al)
|
logs.append(al)
|
||||||
logs = sorted(logs, key=lambda log: log.when, reverse=True)
|
logs = sorted(logs, key=lambda l: l.when, reverse=True)
|
||||||
|
|
||||||
return logs
|
return logs
|
||||||
|
|
|
@ -7,19 +7,79 @@ from flask import render_template, redirect, url_for, flash, request
|
||||||
from flask_login import login_required, current_user
|
from flask_login import login_required, current_user
|
||||||
|
|
||||||
from app import config
|
from app import config
|
||||||
from app.alias_utils import transfer_alias
|
|
||||||
from app.dashboard.base import dashboard_bp
|
from app.dashboard.base import dashboard_bp
|
||||||
from app.dashboard.views.enter_sudo import sudo_required
|
from app.dashboard.views.enter_sudo import sudo_required
|
||||||
from app.db import Session
|
from app.db import Session
|
||||||
|
from app.email_utils import send_email, render
|
||||||
from app.extensions import limiter
|
from app.extensions import limiter
|
||||||
from app.log import LOG
|
from app.log import LOG
|
||||||
from app.models import (
|
from app.models import (
|
||||||
Alias,
|
Alias,
|
||||||
|
Contact,
|
||||||
|
AliasUsedOn,
|
||||||
|
AliasMailbox,
|
||||||
|
User,
|
||||||
|
ClientUser,
|
||||||
)
|
)
|
||||||
from app.models import Mailbox
|
from app.models import Mailbox
|
||||||
from app.utils import CSRFValidationForm
|
from app.utils import CSRFValidationForm
|
||||||
|
|
||||||
|
|
||||||
|
def transfer(alias, new_user, new_mailboxes: [Mailbox]):
|
||||||
|
# cannot transfer alias which is used for receiving newsletter
|
||||||
|
if User.get_by(newsletter_alias_id=alias.id):
|
||||||
|
raise Exception("Cannot transfer alias that's used to receive newsletter")
|
||||||
|
|
||||||
|
# update user_id
|
||||||
|
Session.query(Contact).filter(Contact.alias_id == alias.id).update(
|
||||||
|
{"user_id": new_user.id}
|
||||||
|
)
|
||||||
|
|
||||||
|
Session.query(AliasUsedOn).filter(AliasUsedOn.alias_id == alias.id).update(
|
||||||
|
{"user_id": new_user.id}
|
||||||
|
)
|
||||||
|
|
||||||
|
Session.query(ClientUser).filter(ClientUser.alias_id == alias.id).update(
|
||||||
|
{"user_id": new_user.id}
|
||||||
|
)
|
||||||
|
|
||||||
|
# remove existing mailboxes from the alias
|
||||||
|
Session.query(AliasMailbox).filter(AliasMailbox.alias_id == alias.id).delete()
|
||||||
|
|
||||||
|
# set mailboxes
|
||||||
|
alias.mailbox_id = new_mailboxes.pop().id
|
||||||
|
for mb in new_mailboxes:
|
||||||
|
AliasMailbox.create(alias_id=alias.id, mailbox_id=mb.id)
|
||||||
|
|
||||||
|
# alias has never been transferred before
|
||||||
|
if not alias.original_owner_id:
|
||||||
|
alias.original_owner_id = alias.user_id
|
||||||
|
|
||||||
|
# inform previous owner
|
||||||
|
old_user = alias.user
|
||||||
|
send_email(
|
||||||
|
old_user.email,
|
||||||
|
f"Alias {alias.email} has been received",
|
||||||
|
render(
|
||||||
|
"transactional/alias-transferred.txt",
|
||||||
|
alias=alias,
|
||||||
|
),
|
||||||
|
render(
|
||||||
|
"transactional/alias-transferred.html",
|
||||||
|
alias=alias,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
# now the alias belongs to the new user
|
||||||
|
alias.user_id = new_user.id
|
||||||
|
|
||||||
|
# set some fields back to default
|
||||||
|
alias.disable_pgp = False
|
||||||
|
alias.pinned = False
|
||||||
|
|
||||||
|
Session.commit()
|
||||||
|
|
||||||
|
|
||||||
def hmac_alias_transfer_token(transfer_token: str) -> str:
|
def hmac_alias_transfer_token(transfer_token: str) -> str:
|
||||||
alias_hmac = hmac.new(
|
alias_hmac = hmac.new(
|
||||||
config.ALIAS_TRANSFER_TOKEN_SECRET.encode("utf-8"),
|
config.ALIAS_TRANSFER_TOKEN_SECRET.encode("utf-8"),
|
||||||
|
@ -154,7 +214,7 @@ def alias_transfer_receive_route():
|
||||||
mailboxes,
|
mailboxes,
|
||||||
token,
|
token,
|
||||||
)
|
)
|
||||||
transfer_alias(alias, current_user, mailboxes)
|
transfer(alias, current_user, mailboxes)
|
||||||
|
|
||||||
# reset transfer token
|
# reset transfer token
|
||||||
alias.transfer_token = None
|
alias.transfer_token = None
|
||||||
|
|
|
@ -3,11 +3,9 @@ from flask_login import login_required, current_user
|
||||||
from flask_wtf import FlaskForm
|
from flask_wtf import FlaskForm
|
||||||
from wtforms import StringField, validators
|
from wtforms import StringField, validators
|
||||||
|
|
||||||
from app import config
|
|
||||||
from app.dashboard.base import dashboard_bp
|
from app.dashboard.base import dashboard_bp
|
||||||
from app.dashboard.views.enter_sudo import sudo_required
|
from app.dashboard.views.enter_sudo import sudo_required
|
||||||
from app.db import Session
|
from app.db import Session
|
||||||
from app.extensions import limiter
|
|
||||||
from app.models import ApiKey
|
from app.models import ApiKey
|
||||||
from app.utils import CSRFValidationForm
|
from app.utils import CSRFValidationForm
|
||||||
|
|
||||||
|
@ -16,34 +14,9 @@ class NewApiKeyForm(FlaskForm):
|
||||||
name = StringField("Name", validators=[validators.DataRequired()])
|
name = StringField("Name", validators=[validators.DataRequired()])
|
||||||
|
|
||||||
|
|
||||||
def clean_up_unused_or_old_api_keys(user_id: int):
|
|
||||||
total_keys = ApiKey.filter_by(user_id=user_id).count()
|
|
||||||
if total_keys <= config.MAX_API_KEYS:
|
|
||||||
return
|
|
||||||
# Remove oldest unused
|
|
||||||
for api_key in (
|
|
||||||
ApiKey.filter_by(user_id=user_id, last_used=None)
|
|
||||||
.order_by(ApiKey.created_at.asc())
|
|
||||||
.all()
|
|
||||||
):
|
|
||||||
Session.delete(api_key)
|
|
||||||
total_keys -= 1
|
|
||||||
if total_keys <= config.MAX_API_KEYS:
|
|
||||||
return
|
|
||||||
# Clean up oldest used
|
|
||||||
for api_key in (
|
|
||||||
ApiKey.filter_by(user_id=user_id).order_by(ApiKey.last_used.asc()).all()
|
|
||||||
):
|
|
||||||
Session.delete(api_key)
|
|
||||||
total_keys -= 1
|
|
||||||
if total_keys <= config.MAX_API_KEYS:
|
|
||||||
return
|
|
||||||
|
|
||||||
|
|
||||||
@dashboard_bp.route("/api_key", methods=["GET", "POST"])
|
@dashboard_bp.route("/api_key", methods=["GET", "POST"])
|
||||||
@login_required
|
@login_required
|
||||||
@sudo_required
|
@sudo_required
|
||||||
@limiter.limit("10/hour")
|
|
||||||
def api_key():
|
def api_key():
|
||||||
api_keys = (
|
api_keys = (
|
||||||
ApiKey.filter(ApiKey.user_id == current_user.id)
|
ApiKey.filter(ApiKey.user_id == current_user.id)
|
||||||
|
@ -77,7 +50,6 @@ def api_key():
|
||||||
|
|
||||||
elif request.form.get("form-name") == "create":
|
elif request.form.get("form-name") == "create":
|
||||||
if new_api_key_form.validate():
|
if new_api_key_form.validate():
|
||||||
clean_up_unused_or_old_api_keys(current_user.id)
|
|
||||||
new_api_key = ApiKey.create(
|
new_api_key = ApiKey.create(
|
||||||
name=new_api_key_form.name.data, user_id=current_user.id
|
name=new_api_key_form.name.data, user_id=current_user.id
|
||||||
)
|
)
|
||||||
|
|
|
@ -1,9 +1,14 @@
|
||||||
|
from app.db import Session
|
||||||
|
|
||||||
|
"""
|
||||||
|
List of apps that user has used via the "Sign in with SimpleLogin"
|
||||||
|
"""
|
||||||
|
|
||||||
from flask import render_template, request, flash, redirect
|
from flask import render_template, request, flash, redirect
|
||||||
from flask_login import login_required, current_user
|
from flask_login import login_required, current_user
|
||||||
from sqlalchemy.orm import joinedload
|
from sqlalchemy.orm import joinedload
|
||||||
|
|
||||||
from app.dashboard.base import dashboard_bp
|
from app.dashboard.base import dashboard_bp
|
||||||
from app.db import Session
|
|
||||||
from app.models import (
|
from app.models import (
|
||||||
ClientUser,
|
ClientUser,
|
||||||
)
|
)
|
||||||
|
@ -12,10 +17,6 @@ from app.models import (
|
||||||
@dashboard_bp.route("/app", methods=["GET", "POST"])
|
@dashboard_bp.route("/app", methods=["GET", "POST"])
|
||||||
@login_required
|
@login_required
|
||||||
def app_route():
|
def app_route():
|
||||||
"""
|
|
||||||
List of apps that user has used via the "Sign in with SimpleLogin"
|
|
||||||
"""
|
|
||||||
|
|
||||||
client_users = (
|
client_users = (
|
||||||
ClientUser.filter_by(user_id=current_user.id)
|
ClientUser.filter_by(user_id=current_user.id)
|
||||||
.options(joinedload(ClientUser.client))
|
.options(joinedload(ClientUser.client))
|
||||||
|
|
|
@ -68,14 +68,9 @@ def coupon_route():
|
||||||
)
|
)
|
||||||
return redirect(request.url)
|
return redirect(request.url)
|
||||||
|
|
||||||
updated = (
|
coupon.used_by_user_id = current_user.id
|
||||||
Session.query(Coupon)
|
coupon.used = True
|
||||||
.filter_by(code=code, used=False)
|
Session.commit()
|
||||||
.update({"used_by_user_id": current_user.id, "used": True})
|
|
||||||
)
|
|
||||||
if updated != 1:
|
|
||||||
flash("Coupon is not valid", "error")
|
|
||||||
return redirect(request.url)
|
|
||||||
|
|
||||||
manual_sub: ManualSubscription = ManualSubscription.get_by(
|
manual_sub: ManualSubscription = ManualSubscription.get_by(
|
||||||
user_id=current_user.id
|
user_id=current_user.id
|
||||||
|
@ -100,7 +95,7 @@ def coupon_route():
|
||||||
commit=True,
|
commit=True,
|
||||||
)
|
)
|
||||||
flash(
|
flash(
|
||||||
"Your account has been upgraded to Premium, thanks for your support!",
|
f"Your account has been upgraded to Premium, thanks for your support!",
|
||||||
"success",
|
"success",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -67,7 +67,7 @@ def directory():
|
||||||
if request.method == "POST":
|
if request.method == "POST":
|
||||||
if request.form.get("form-name") == "delete":
|
if request.form.get("form-name") == "delete":
|
||||||
if not delete_dir_form.validate():
|
if not delete_dir_form.validate():
|
||||||
flash("Invalid request", "warning")
|
flash(f"Invalid request", "warning")
|
||||||
return redirect(url_for("dashboard.directory"))
|
return redirect(url_for("dashboard.directory"))
|
||||||
dir_obj = Directory.get(delete_dir_form.directory_id.data)
|
dir_obj = Directory.get(delete_dir_form.directory_id.data)
|
||||||
|
|
||||||
|
@ -87,7 +87,7 @@ def directory():
|
||||||
|
|
||||||
if request.form.get("form-name") == "toggle-directory":
|
if request.form.get("form-name") == "toggle-directory":
|
||||||
if not toggle_dir_form.validate():
|
if not toggle_dir_form.validate():
|
||||||
flash("Invalid request", "warning")
|
flash(f"Invalid request", "warning")
|
||||||
return redirect(url_for("dashboard.directory"))
|
return redirect(url_for("dashboard.directory"))
|
||||||
dir_id = toggle_dir_form.directory_id.data
|
dir_id = toggle_dir_form.directory_id.data
|
||||||
dir_obj = Directory.get(dir_id)
|
dir_obj = Directory.get(dir_id)
|
||||||
|
@ -109,7 +109,7 @@ def directory():
|
||||||
|
|
||||||
elif request.form.get("form-name") == "update":
|
elif request.form.get("form-name") == "update":
|
||||||
if not update_dir_form.validate():
|
if not update_dir_form.validate():
|
||||||
flash("Invalid request", "warning")
|
flash(f"Invalid request", "warning")
|
||||||
return redirect(url_for("dashboard.directory"))
|
return redirect(url_for("dashboard.directory"))
|
||||||
dir_id = update_dir_form.directory_id.data
|
dir_id = update_dir_form.directory_id.data
|
||||||
dir_obj = Directory.get(dir_id)
|
dir_obj = Directory.get(dir_id)
|
||||||
|
|
|
@ -8,7 +8,6 @@ from wtforms import PasswordField, validators
|
||||||
|
|
||||||
from app.config import CONNECT_WITH_PROTON
|
from app.config import CONNECT_WITH_PROTON
|
||||||
from app.dashboard.base import dashboard_bp
|
from app.dashboard.base import dashboard_bp
|
||||||
from app.extensions import limiter
|
|
||||||
from app.log import LOG
|
from app.log import LOG
|
||||||
from app.models import PartnerUser
|
from app.models import PartnerUser
|
||||||
from app.proton.utils import get_proton_partner
|
from app.proton.utils import get_proton_partner
|
||||||
|
@ -22,7 +21,6 @@ class LoginForm(FlaskForm):
|
||||||
|
|
||||||
|
|
||||||
@dashboard_bp.route("/enter_sudo", methods=["GET", "POST"])
|
@dashboard_bp.route("/enter_sudo", methods=["GET", "POST"])
|
||||||
@limiter.limit("3/minute")
|
|
||||||
@login_required
|
@login_required
|
||||||
def enter_sudo():
|
def enter_sudo():
|
||||||
password_check_form = LoginForm()
|
password_check_form = LoginForm()
|
||||||
|
|
|
@ -57,10 +57,6 @@ def get_stats(user: User) -> Stats:
|
||||||
methods=["POST"],
|
methods=["POST"],
|
||||||
exempt_when=lambda: request.form.get("form-name") != "create-random-email",
|
exempt_when=lambda: request.form.get("form-name") != "create-random-email",
|
||||||
)
|
)
|
||||||
@limiter.limit(
|
|
||||||
"5/minute",
|
|
||||||
methods=["GET"],
|
|
||||||
)
|
|
||||||
@login_required
|
@login_required
|
||||||
@parallel_limiter.lock(
|
@parallel_limiter.lock(
|
||||||
name="alias_creation",
|
name="alias_creation",
|
||||||
|
|
|
@ -1,7 +1,3 @@
|
||||||
import base64
|
|
||||||
import binascii
|
|
||||||
import json
|
|
||||||
|
|
||||||
import arrow
|
import arrow
|
||||||
from flask import render_template, request, redirect, url_for, flash
|
from flask import render_template, request, redirect, url_for, flash
|
||||||
from flask_login import login_required, current_user
|
from flask_login import login_required, current_user
|
||||||
|
@ -19,8 +15,8 @@ from app.email_utils import (
|
||||||
mailbox_already_used,
|
mailbox_already_used,
|
||||||
render,
|
render,
|
||||||
send_email,
|
send_email,
|
||||||
|
is_valid_email,
|
||||||
)
|
)
|
||||||
from app.email_validation import is_valid_email
|
|
||||||
from app.log import LOG
|
from app.log import LOG
|
||||||
from app.models import Mailbox, Job
|
from app.models import Mailbox, Job
|
||||||
from app.utils import CSRFValidationForm
|
from app.utils import CSRFValidationForm
|
||||||
|
@ -184,9 +180,7 @@ def mailbox_route():
|
||||||
|
|
||||||
def send_verification_email(user, mailbox):
|
def send_verification_email(user, mailbox):
|
||||||
s = TimestampSigner(MAILBOX_SECRET)
|
s = TimestampSigner(MAILBOX_SECRET)
|
||||||
encoded_data = json.dumps([mailbox.id, mailbox.email]).encode("utf-8")
|
mailbox_id_signed = s.sign(str(mailbox.id)).decode()
|
||||||
b64_data = base64.urlsafe_b64encode(encoded_data)
|
|
||||||
mailbox_id_signed = s.sign(b64_data).decode()
|
|
||||||
verification_url = (
|
verification_url = (
|
||||||
URL + "/dashboard/mailbox_verify" + f"?mailbox_id={mailbox_id_signed}"
|
URL + "/dashboard/mailbox_verify" + f"?mailbox_id={mailbox_id_signed}"
|
||||||
)
|
)
|
||||||
|
@ -211,30 +205,18 @@ def send_verification_email(user, mailbox):
|
||||||
@dashboard_bp.route("/mailbox_verify")
|
@dashboard_bp.route("/mailbox_verify")
|
||||||
def mailbox_verify():
|
def mailbox_verify():
|
||||||
s = TimestampSigner(MAILBOX_SECRET)
|
s = TimestampSigner(MAILBOX_SECRET)
|
||||||
mailbox_verify_request = request.args.get("mailbox_id")
|
mailbox_id = request.args.get("mailbox_id")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
mailbox_raw_data = s.unsign(mailbox_verify_request, max_age=900)
|
r_id = int(s.unsign(mailbox_id, max_age=900))
|
||||||
except Exception:
|
except Exception:
|
||||||
flash("Invalid link. Please delete and re-add your mailbox", "error")
|
flash("Invalid link. Please delete and re-add your mailbox", "error")
|
||||||
return redirect(url_for("dashboard.mailbox_route"))
|
return redirect(url_for("dashboard.mailbox_route"))
|
||||||
try:
|
else:
|
||||||
decoded_data = base64.urlsafe_b64decode(mailbox_raw_data)
|
mailbox = Mailbox.get(r_id)
|
||||||
except binascii.Error:
|
|
||||||
flash("Invalid link. Please delete and re-add your mailbox", "error")
|
|
||||||
return redirect(url_for("dashboard.mailbox_route"))
|
|
||||||
mailbox_data = json.loads(decoded_data)
|
|
||||||
if not isinstance(mailbox_data, list) or len(mailbox_data) != 2:
|
|
||||||
flash("Invalid link. Please delete and re-add your mailbox", "error")
|
|
||||||
return redirect(url_for("dashboard.mailbox_route"))
|
|
||||||
mailbox_id = mailbox_data[0]
|
|
||||||
mailbox = Mailbox.get(mailbox_id)
|
|
||||||
if not mailbox:
|
if not mailbox:
|
||||||
flash("Invalid link", "error")
|
flash("Invalid link", "error")
|
||||||
return redirect(url_for("dashboard.mailbox_route"))
|
return redirect(url_for("dashboard.mailbox_route"))
|
||||||
mailbox_email = mailbox_data[1]
|
|
||||||
if mailbox_email != mailbox.email:
|
|
||||||
flash("Invalid link", "error")
|
|
||||||
return redirect(url_for("dashboard.mailbox_route"))
|
|
||||||
|
|
||||||
mailbox.verified = True
|
mailbox.verified = True
|
||||||
Session.commit()
|
Session.commit()
|
||||||
|
|
|
@ -30,7 +30,7 @@ class ChangeEmailForm(FlaskForm):
|
||||||
@dashboard_bp.route("/mailbox/<int:mailbox_id>/", methods=["GET", "POST"])
|
@dashboard_bp.route("/mailbox/<int:mailbox_id>/", methods=["GET", "POST"])
|
||||||
@login_required
|
@login_required
|
||||||
def mailbox_detail_route(mailbox_id):
|
def mailbox_detail_route(mailbox_id):
|
||||||
mailbox: Mailbox = Mailbox.get(mailbox_id)
|
mailbox = Mailbox.get(mailbox_id)
|
||||||
if not mailbox or mailbox.user_id != current_user.id:
|
if not mailbox or mailbox.user_id != current_user.id:
|
||||||
flash("You cannot see this page", "warning")
|
flash("You cannot see this page", "warning")
|
||||||
return redirect(url_for("dashboard.index"))
|
return redirect(url_for("dashboard.index"))
|
||||||
|
@ -144,15 +144,6 @@ def mailbox_detail_route(mailbox_id):
|
||||||
url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox_id)
|
url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox_id)
|
||||||
)
|
)
|
||||||
|
|
||||||
if mailbox.is_proton():
|
|
||||||
flash(
|
|
||||||
"Enabling PGP for a Proton Mail mailbox is redundant and does not add any security benefit",
|
|
||||||
"info",
|
|
||||||
)
|
|
||||||
return redirect(
|
|
||||||
url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox_id)
|
|
||||||
)
|
|
||||||
|
|
||||||
mailbox.pgp_public_key = request.form.get("pgp")
|
mailbox.pgp_public_key = request.form.get("pgp")
|
||||||
try:
|
try:
|
||||||
mailbox.pgp_finger_print = load_public_key_and_check(
|
mailbox.pgp_finger_print = load_public_key_and_check(
|
||||||
|
@ -191,16 +182,25 @@ def mailbox_detail_route(mailbox_id):
|
||||||
)
|
)
|
||||||
elif request.form.get("form-name") == "generic-subject":
|
elif request.form.get("form-name") == "generic-subject":
|
||||||
if request.form.get("action") == "save":
|
if request.form.get("action") == "save":
|
||||||
|
if not mailbox.pgp_enabled():
|
||||||
|
flash(
|
||||||
|
"Generic subject can only be used on PGP-enabled mailbox",
|
||||||
|
"error",
|
||||||
|
)
|
||||||
|
return redirect(
|
||||||
|
url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox_id)
|
||||||
|
)
|
||||||
|
|
||||||
mailbox.generic_subject = request.form.get("generic-subject")
|
mailbox.generic_subject = request.form.get("generic-subject")
|
||||||
Session.commit()
|
Session.commit()
|
||||||
flash("Generic subject is enabled", "success")
|
flash("Generic subject for PGP-encrypted email is enabled", "success")
|
||||||
return redirect(
|
return redirect(
|
||||||
url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox_id)
|
url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox_id)
|
||||||
)
|
)
|
||||||
elif request.form.get("action") == "remove":
|
elif request.form.get("action") == "remove":
|
||||||
mailbox.generic_subject = None
|
mailbox.generic_subject = None
|
||||||
Session.commit()
|
Session.commit()
|
||||||
flash("Generic subject is disabled", "success")
|
flash("Generic subject for PGP-encrypted email is disabled", "success")
|
||||||
return redirect(
|
return redirect(
|
||||||
url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox_id)
|
url_for("dashboard.mailbox_detail_route", mailbox_id=mailbox_id)
|
||||||
)
|
)
|
||||||
|
|
|
@ -128,6 +128,7 @@ def setting():
|
||||||
new_email_valid = True
|
new_email_valid = True
|
||||||
new_email = canonicalize_email(change_email_form.email.data)
|
new_email = canonicalize_email(change_email_form.email.data)
|
||||||
if new_email != current_user.email and not pending_email:
|
if new_email != current_user.email and not pending_email:
|
||||||
|
|
||||||
# check if this email is not already used
|
# check if this email is not already used
|
||||||
if personal_email_already_used(new_email) or Alias.get_by(
|
if personal_email_already_used(new_email) or Alias.get_by(
|
||||||
email=new_email
|
email=new_email
|
||||||
|
@ -197,16 +198,6 @@ def setting():
|
||||||
)
|
)
|
||||||
return redirect(url_for("dashboard.setting"))
|
return redirect(url_for("dashboard.setting"))
|
||||||
|
|
||||||
if current_user.profile_picture_id is not None:
|
|
||||||
current_profile_file = File.get_by(
|
|
||||||
id=current_user.profile_picture_id
|
|
||||||
)
|
|
||||||
if (
|
|
||||||
current_profile_file is not None
|
|
||||||
and current_profile_file.user_id == current_user.id
|
|
||||||
):
|
|
||||||
s3.delete(current_profile_file.path)
|
|
||||||
|
|
||||||
file_path = random_string(30)
|
file_path = random_string(30)
|
||||||
file = File.create(user_id=current_user.id, path=file_path)
|
file = File.create(user_id=current_user.id, path=file_path)
|
||||||
|
|
||||||
|
@ -460,13 +451,8 @@ def send_change_email_confirmation(user: User, email_change: EmailChange):
|
||||||
|
|
||||||
|
|
||||||
@dashboard_bp.route("/resend_email_change", methods=["GET", "POST"])
|
@dashboard_bp.route("/resend_email_change", methods=["GET", "POST"])
|
||||||
@limiter.limit("5/hour")
|
|
||||||
@login_required
|
@login_required
|
||||||
def resend_email_change():
|
def resend_email_change():
|
||||||
form = CSRFValidationForm()
|
|
||||||
if not form.validate():
|
|
||||||
flash("Invalid request. Please try again", "warning")
|
|
||||||
return redirect(url_for("dashboard.setting"))
|
|
||||||
email_change = EmailChange.get_by(user_id=current_user.id)
|
email_change = EmailChange.get_by(user_id=current_user.id)
|
||||||
if email_change:
|
if email_change:
|
||||||
# extend email change expiration
|
# extend email change expiration
|
||||||
|
@ -486,10 +472,6 @@ def resend_email_change():
|
||||||
@dashboard_bp.route("/cancel_email_change", methods=["GET", "POST"])
|
@dashboard_bp.route("/cancel_email_change", methods=["GET", "POST"])
|
||||||
@login_required
|
@login_required
|
||||||
def cancel_email_change():
|
def cancel_email_change():
|
||||||
form = CSRFValidationForm()
|
|
||||||
if not form.validate():
|
|
||||||
flash("Invalid request. Please try again", "warning")
|
|
||||||
return redirect(url_for("dashboard.setting"))
|
|
||||||
email_change = EmailChange.get_by(user_id=current_user.id)
|
email_change = EmailChange.get_by(user_id=current_user.id)
|
||||||
if email_change:
|
if email_change:
|
||||||
EmailChange.delete(email_change.id)
|
EmailChange.delete(email_change.id)
|
||||||
|
|
|
@ -75,11 +75,12 @@ def block_contact(contact_id):
|
||||||
@dashboard_bp.route("/unsubscribe/encoded/<encoded_request>", methods=["GET"])
|
@dashboard_bp.route("/unsubscribe/encoded/<encoded_request>", methods=["GET"])
|
||||||
@login_required
|
@login_required
|
||||||
def encoded_unsubscribe(encoded_request: str):
|
def encoded_unsubscribe(encoded_request: str):
|
||||||
|
|
||||||
unsub_data = UnsubscribeHandler().handle_unsubscribe_from_request(
|
unsub_data = UnsubscribeHandler().handle_unsubscribe_from_request(
|
||||||
current_user, encoded_request
|
current_user, encoded_request
|
||||||
)
|
)
|
||||||
if not unsub_data:
|
if not unsub_data:
|
||||||
flash("Invalid unsubscribe request", "error")
|
flash(f"Invalid unsubscribe request", "error")
|
||||||
return redirect(url_for("dashboard.index"))
|
return redirect(url_for("dashboard.index"))
|
||||||
if unsub_data.action == UnsubscribeAction.DisableAlias:
|
if unsub_data.action == UnsubscribeAction.DisableAlias:
|
||||||
alias = Alias.get(unsub_data.data)
|
alias = Alias.get(unsub_data.data)
|
||||||
|
@ -96,14 +97,14 @@ def encoded_unsubscribe(encoded_request: str):
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
if unsub_data.action == UnsubscribeAction.UnsubscribeNewsletter:
|
if unsub_data.action == UnsubscribeAction.UnsubscribeNewsletter:
|
||||||
flash("You've unsubscribed from the newsletter", "success")
|
flash(f"You've unsubscribed from the newsletter", "success")
|
||||||
return redirect(
|
return redirect(
|
||||||
url_for(
|
url_for(
|
||||||
"dashboard.index",
|
"dashboard.index",
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
if unsub_data.action == UnsubscribeAction.OriginalUnsubscribeMailto:
|
if unsub_data.action == UnsubscribeAction.OriginalUnsubscribeMailto:
|
||||||
flash("The original unsubscribe request has been forwarded", "success")
|
flash(f"The original unsubscribe request has been forwarded", "success")
|
||||||
return redirect(
|
return redirect(
|
||||||
url_for(
|
url_for(
|
||||||
"dashboard.index",
|
"dashboard.index",
|
||||||
|
|
|
@ -1,3 +1 @@
|
||||||
from .views import index, new_client, client_detail
|
from .views import index, new_client, client_detail
|
||||||
|
|
||||||
__all__ = ["index", "new_client", "client_detail"]
|
|
||||||
|
|
|
@ -87,7 +87,7 @@ def client_detail(client_id):
|
||||||
)
|
)
|
||||||
|
|
||||||
flash(
|
flash(
|
||||||
"Thanks for submitting, we are informed and will come back to you asap!",
|
f"Thanks for submitting, we are informed and will come back to you asap!",
|
||||||
"success",
|
"success",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -1,3 +1 @@
|
||||||
from .views import index
|
from .views import index
|
||||||
|
|
||||||
__all__ = ["index"]
|
|
||||||
|
|
|
@ -34,7 +34,7 @@ def get_cname_record(hostname) -> Optional[str]:
|
||||||
|
|
||||||
|
|
||||||
def get_mx_domains(hostname) -> [(int, str)]:
|
def get_mx_domains(hostname) -> [(int, str)]:
|
||||||
"""return list of (priority, domain name) sorted by priority (lowest priority first)
|
"""return list of (priority, domain name).
|
||||||
domain name ends with a "." at the end.
|
domain name ends with a "." at the end.
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
|
@ -50,7 +50,7 @@ def get_mx_domains(hostname) -> [(int, str)]:
|
||||||
|
|
||||||
ret.append((int(parts[0]), parts[1]))
|
ret.append((int(parts[0]), parts[1]))
|
||||||
|
|
||||||
return sorted(ret, key=lambda prio_domain: prio_domain[0])
|
return ret
|
||||||
|
|
||||||
|
|
||||||
_include_spf = "include:"
|
_include_spf = "include:"
|
||||||
|
|
|
@ -20,7 +20,6 @@ X_SPAM_STATUS = "X-Spam-Status"
|
||||||
LIST_UNSUBSCRIBE = "List-Unsubscribe"
|
LIST_UNSUBSCRIBE = "List-Unsubscribe"
|
||||||
LIST_UNSUBSCRIBE_POST = "List-Unsubscribe-Post"
|
LIST_UNSUBSCRIBE_POST = "List-Unsubscribe-Post"
|
||||||
RETURN_PATH = "Return-Path"
|
RETURN_PATH = "Return-Path"
|
||||||
AUTHENTICATION_RESULTS = "Authentication-Results"
|
|
||||||
|
|
||||||
# headers used to DKIM sign in order of preference
|
# headers used to DKIM sign in order of preference
|
||||||
DKIM_HEADERS = [
|
DKIM_HEADERS = [
|
||||||
|
@ -33,7 +32,6 @@ DKIM_HEADERS = [
|
||||||
SL_DIRECTION = "X-SimpleLogin-Type"
|
SL_DIRECTION = "X-SimpleLogin-Type"
|
||||||
SL_EMAIL_LOG_ID = "X-SimpleLogin-EmailLog-ID"
|
SL_EMAIL_LOG_ID = "X-SimpleLogin-EmailLog-ID"
|
||||||
SL_ENVELOPE_FROM = "X-SimpleLogin-Envelope-From"
|
SL_ENVELOPE_FROM = "X-SimpleLogin-Envelope-From"
|
||||||
SL_ORIGINAL_FROM = "X-SimpleLogin-Original-From"
|
|
||||||
SL_ENVELOPE_TO = "X-SimpleLogin-Envelope-To"
|
SL_ENVELOPE_TO = "X-SimpleLogin-Envelope-To"
|
||||||
SL_CLIENT_IP = "X-SimpleLogin-Client-IP"
|
SL_CLIENT_IP = "X-SimpleLogin-Client-IP"
|
||||||
|
|
||||||
|
|
|
@ -54,7 +54,6 @@ from app.models import (
|
||||||
IgnoreBounceSender,
|
IgnoreBounceSender,
|
||||||
InvalidMailboxDomain,
|
InvalidMailboxDomain,
|
||||||
VerpType,
|
VerpType,
|
||||||
available_sl_email,
|
|
||||||
)
|
)
|
||||||
from app.utils import (
|
from app.utils import (
|
||||||
random_string,
|
random_string,
|
||||||
|
@ -93,7 +92,7 @@ def send_welcome_email(user):
|
||||||
|
|
||||||
send_email(
|
send_email(
|
||||||
comm_email,
|
comm_email,
|
||||||
"Welcome to SimpleLogin",
|
f"Welcome to SimpleLogin",
|
||||||
render("com/welcome.txt", user=user, alias=alias),
|
render("com/welcome.txt", user=user, alias=alias),
|
||||||
render("com/welcome.html", user=user, alias=alias),
|
render("com/welcome.html", user=user, alias=alias),
|
||||||
unsubscribe_link,
|
unsubscribe_link,
|
||||||
|
@ -104,7 +103,7 @@ def send_welcome_email(user):
|
||||||
def send_trial_end_soon_email(user):
|
def send_trial_end_soon_email(user):
|
||||||
send_email(
|
send_email(
|
||||||
user.email,
|
user.email,
|
||||||
"Your trial will end soon",
|
f"Your trial will end soon",
|
||||||
render("transactional/trial-end.txt.jinja2", user=user),
|
render("transactional/trial-end.txt.jinja2", user=user),
|
||||||
render("transactional/trial-end.html", user=user),
|
render("transactional/trial-end.html", user=user),
|
||||||
ignore_smtp_error=True,
|
ignore_smtp_error=True,
|
||||||
|
@ -114,7 +113,7 @@ def send_trial_end_soon_email(user):
|
||||||
def send_activation_email(email, activation_link):
|
def send_activation_email(email, activation_link):
|
||||||
send_email(
|
send_email(
|
||||||
email,
|
email,
|
||||||
"Just one more step to join SimpleLogin",
|
f"Just one more step to join SimpleLogin",
|
||||||
render(
|
render(
|
||||||
"transactional/activation.txt",
|
"transactional/activation.txt",
|
||||||
activation_link=activation_link,
|
activation_link=activation_link,
|
||||||
|
@ -768,7 +767,7 @@ def get_header_unicode(header: Union[str, Header]) -> str:
|
||||||
ret = ""
|
ret = ""
|
||||||
for to_decoded_str, charset in decode_header(header):
|
for to_decoded_str, charset in decode_header(header):
|
||||||
if charset is None:
|
if charset is None:
|
||||||
if isinstance(to_decoded_str, bytes):
|
if type(to_decoded_str) is bytes:
|
||||||
decoded_str = to_decoded_str.decode()
|
decoded_str = to_decoded_str.decode()
|
||||||
else:
|
else:
|
||||||
decoded_str = to_decoded_str
|
decoded_str = to_decoded_str
|
||||||
|
@ -805,13 +804,13 @@ def to_bytes(msg: Message):
|
||||||
for generator_policy in [None, policy.SMTP, policy.SMTPUTF8]:
|
for generator_policy in [None, policy.SMTP, policy.SMTPUTF8]:
|
||||||
try:
|
try:
|
||||||
return msg.as_bytes(policy=generator_policy)
|
return msg.as_bytes(policy=generator_policy)
|
||||||
except Exception:
|
except:
|
||||||
LOG.w("as_bytes() fails with %s policy", policy, exc_info=True)
|
LOG.w("as_bytes() fails with %s policy", policy, exc_info=True)
|
||||||
|
|
||||||
msg_string = msg.as_string()
|
msg_string = msg.as_string()
|
||||||
try:
|
try:
|
||||||
return msg_string.encode()
|
return msg_string.encode()
|
||||||
except Exception:
|
except:
|
||||||
LOG.w("as_string().encode() fails", exc_info=True)
|
LOG.w("as_string().encode() fails", exc_info=True)
|
||||||
|
|
||||||
return msg_string.encode(errors="replace")
|
return msg_string.encode(errors="replace")
|
||||||
|
@ -828,6 +827,19 @@ def should_add_dkim_signature(domain: str) -> bool:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def is_valid_email(email_address: str) -> bool:
|
||||||
|
"""
|
||||||
|
Used to check whether an email address is valid
|
||||||
|
NOT run MX check.
|
||||||
|
NOT allow unicode.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
validate_email(email_address, check_deliverability=False, allow_smtputf8=False)
|
||||||
|
return True
|
||||||
|
except EmailNotValidError:
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
class EmailEncoding(enum.Enum):
|
class EmailEncoding(enum.Enum):
|
||||||
BASE64 = "base64"
|
BASE64 = "base64"
|
||||||
QUOTED = "quoted-printable"
|
QUOTED = "quoted-printable"
|
||||||
|
@ -906,7 +918,7 @@ def add_header(msg: Message, text_header, html_header=None) -> Message:
|
||||||
if content_type == "text/plain":
|
if content_type == "text/plain":
|
||||||
encoding = get_encoding(msg)
|
encoding = get_encoding(msg)
|
||||||
payload = msg.get_payload()
|
payload = msg.get_payload()
|
||||||
if isinstance(payload, str):
|
if type(payload) is str:
|
||||||
clone_msg = copy(msg)
|
clone_msg = copy(msg)
|
||||||
new_payload = f"""{text_header}
|
new_payload = f"""{text_header}
|
||||||
------------------------------
|
------------------------------
|
||||||
|
@ -916,7 +928,7 @@ def add_header(msg: Message, text_header, html_header=None) -> Message:
|
||||||
elif content_type == "text/html":
|
elif content_type == "text/html":
|
||||||
encoding = get_encoding(msg)
|
encoding = get_encoding(msg)
|
||||||
payload = msg.get_payload()
|
payload = msg.get_payload()
|
||||||
if isinstance(payload, str):
|
if type(payload) is str:
|
||||||
new_payload = f"""<table width="100%" style="width: 100%; -premailer-width: 100%; -premailer-cellpadding: 0;
|
new_payload = f"""<table width="100%" style="width: 100%; -premailer-width: 100%; -premailer-cellpadding: 0;
|
||||||
-premailer-cellspacing: 0; margin: 0; padding: 0;">
|
-premailer-cellspacing: 0; margin: 0; padding: 0;">
|
||||||
<tr>
|
<tr>
|
||||||
|
@ -938,8 +950,6 @@ def add_header(msg: Message, text_header, html_header=None) -> Message:
|
||||||
for part in msg.get_payload():
|
for part in msg.get_payload():
|
||||||
if isinstance(part, Message):
|
if isinstance(part, Message):
|
||||||
new_parts.append(add_header(part, text_header, html_header))
|
new_parts.append(add_header(part, text_header, html_header))
|
||||||
elif isinstance(part, str):
|
|
||||||
new_parts.append(MIMEText(part))
|
|
||||||
else:
|
else:
|
||||||
new_parts.append(part)
|
new_parts.append(part)
|
||||||
clone_msg = copy(msg)
|
clone_msg = copy(msg)
|
||||||
|
@ -948,14 +958,7 @@ def add_header(msg: Message, text_header, html_header=None) -> Message:
|
||||||
|
|
||||||
elif content_type in ("multipart/mixed", "multipart/signed"):
|
elif content_type in ("multipart/mixed", "multipart/signed"):
|
||||||
new_parts = []
|
new_parts = []
|
||||||
payload = msg.get_payload()
|
parts = list(msg.get_payload())
|
||||||
if isinstance(payload, str):
|
|
||||||
# The message is badly formatted inject as new
|
|
||||||
new_parts = [MIMEText(text_header, "plain"), MIMEText(payload, "plain")]
|
|
||||||
clone_msg = copy(msg)
|
|
||||||
clone_msg.set_payload(new_parts)
|
|
||||||
return clone_msg
|
|
||||||
parts = list(payload)
|
|
||||||
LOG.d("only add header for the first part for %s", content_type)
|
LOG.d("only add header for the first part for %s", content_type)
|
||||||
for ix, part in enumerate(parts):
|
for ix, part in enumerate(parts):
|
||||||
if ix == 0:
|
if ix == 0:
|
||||||
|
@ -972,7 +975,7 @@ def add_header(msg: Message, text_header, html_header=None) -> Message:
|
||||||
|
|
||||||
|
|
||||||
def replace(msg: Union[Message, str], old, new) -> Union[Message, str]:
|
def replace(msg: Union[Message, str], old, new) -> Union[Message, str]:
|
||||||
if isinstance(msg, str):
|
if type(msg) is str:
|
||||||
msg = msg.replace(old, new)
|
msg = msg.replace(old, new)
|
||||||
return msg
|
return msg
|
||||||
|
|
||||||
|
@ -995,7 +998,7 @@ def replace(msg: Union[Message, str], old, new) -> Union[Message, str]:
|
||||||
if content_type in ("text/plain", "text/html"):
|
if content_type in ("text/plain", "text/html"):
|
||||||
encoding = get_encoding(msg)
|
encoding = get_encoding(msg)
|
||||||
payload = msg.get_payload()
|
payload = msg.get_payload()
|
||||||
if isinstance(payload, str):
|
if type(payload) is str:
|
||||||
if encoding == EmailEncoding.QUOTED:
|
if encoding == EmailEncoding.QUOTED:
|
||||||
LOG.d("handle quoted-printable replace %s -> %s", old, new)
|
LOG.d("handle quoted-printable replace %s -> %s", old, new)
|
||||||
# first decode the payload
|
# first decode the payload
|
||||||
|
@ -1040,7 +1043,7 @@ def replace(msg: Union[Message, str], old, new) -> Union[Message, str]:
|
||||||
return msg
|
return msg
|
||||||
|
|
||||||
|
|
||||||
def generate_reply_email(contact_email: str, alias: Alias) -> str:
|
def generate_reply_email(contact_email: str, user: User) -> str:
|
||||||
"""
|
"""
|
||||||
generate a reply_email (aka reverse-alias), make sure it isn't used by any contact
|
generate a reply_email (aka reverse-alias), make sure it isn't used by any contact
|
||||||
"""
|
"""
|
||||||
|
@ -1051,7 +1054,6 @@ def generate_reply_email(contact_email: str, alias: Alias) -> str:
|
||||||
|
|
||||||
include_sender_in_reverse_alias = False
|
include_sender_in_reverse_alias = False
|
||||||
|
|
||||||
user = alias.user
|
|
||||||
# user has set this option explicitly
|
# user has set this option explicitly
|
||||||
if user.include_sender_in_reverse_alias is not None:
|
if user.include_sender_in_reverse_alias is not None:
|
||||||
include_sender_in_reverse_alias = user.include_sender_in_reverse_alias
|
include_sender_in_reverse_alias = user.include_sender_in_reverse_alias
|
||||||
|
@ -1066,12 +1068,6 @@ def generate_reply_email(contact_email: str, alias: Alias) -> str:
|
||||||
contact_email = contact_email.replace(".", "_")
|
contact_email = contact_email.replace(".", "_")
|
||||||
contact_email = convert_to_alphanumeric(contact_email)
|
contact_email = convert_to_alphanumeric(contact_email)
|
||||||
|
|
||||||
reply_domain = config.EMAIL_DOMAIN
|
|
||||||
alias_domain = get_email_domain_part(alias.email)
|
|
||||||
sl_domain = SLDomain.get_by(domain=alias_domain)
|
|
||||||
if sl_domain and sl_domain.use_as_reverse_alias:
|
|
||||||
reply_domain = alias_domain
|
|
||||||
|
|
||||||
# not use while to avoid infinite loop
|
# not use while to avoid infinite loop
|
||||||
for _ in range(1000):
|
for _ in range(1000):
|
||||||
if include_sender_in_reverse_alias and contact_email:
|
if include_sender_in_reverse_alias and contact_email:
|
||||||
|
@ -1079,15 +1075,15 @@ def generate_reply_email(contact_email: str, alias: Alias) -> str:
|
||||||
reply_email = (
|
reply_email = (
|
||||||
# do not use the ra+ anymore
|
# do not use the ra+ anymore
|
||||||
# f"ra+{contact_email}+{random_string(random_length)}@{config.EMAIL_DOMAIN}"
|
# f"ra+{contact_email}+{random_string(random_length)}@{config.EMAIL_DOMAIN}"
|
||||||
f"{contact_email}_{random_string(random_length)}@{reply_domain}"
|
f"{contact_email}_{random_string(random_length)}@{config.EMAIL_DOMAIN}"
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
random_length = random.randint(20, 50)
|
random_length = random.randint(20, 50)
|
||||||
# do not use the ra+ anymore
|
# do not use the ra+ anymore
|
||||||
# reply_email = f"ra+{random_string(random_length)}@{config.EMAIL_DOMAIN}"
|
# reply_email = f"ra+{random_string(random_length)}@{config.EMAIL_DOMAIN}"
|
||||||
reply_email = f"{random_string(random_length)}@{reply_domain}"
|
reply_email = f"{random_string(random_length)}@{config.EMAIL_DOMAIN}"
|
||||||
|
|
||||||
if available_sl_email(reply_email):
|
if not Contact.get_by(reply_email=reply_email):
|
||||||
return reply_email
|
return reply_email
|
||||||
|
|
||||||
raise Exception("Cannot generate reply email")
|
raise Exception("Cannot generate reply email")
|
||||||
|
@ -1103,6 +1099,26 @@ def is_reverse_alias(address: str) -> bool:
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# allow also + and @ that are present in a reply address
|
||||||
|
_ALLOWED_CHARS = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_-.+@"
|
||||||
|
|
||||||
|
|
||||||
|
def normalize_reply_email(reply_email: str) -> str:
|
||||||
|
"""Handle the case where reply email contains *strange* char that was wrongly generated in the past"""
|
||||||
|
if not reply_email.isascii():
|
||||||
|
reply_email = convert_to_id(reply_email)
|
||||||
|
|
||||||
|
ret = []
|
||||||
|
# drop all control characters like shift, separator, etc
|
||||||
|
for c in reply_email:
|
||||||
|
if c not in _ALLOWED_CHARS:
|
||||||
|
ret.append("_")
|
||||||
|
else:
|
||||||
|
ret.append(c)
|
||||||
|
|
||||||
|
return "".join(ret)
|
||||||
|
|
||||||
|
|
||||||
def should_disable(alias: Alias) -> (bool, str):
|
def should_disable(alias: Alias) -> (bool, str):
|
||||||
"""
|
"""
|
||||||
Return whether an alias should be disabled and if yes, the reason why
|
Return whether an alias should be disabled and if yes, the reason why
|
||||||
|
|
|
@ -1,38 +0,0 @@
|
||||||
from email_validator import (
|
|
||||||
validate_email,
|
|
||||||
EmailNotValidError,
|
|
||||||
)
|
|
||||||
|
|
||||||
from app.utils import convert_to_id
|
|
||||||
|
|
||||||
# allow also + and @ that are present in a reply address
|
|
||||||
_ALLOWED_CHARS = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_-.+@"
|
|
||||||
|
|
||||||
|
|
||||||
def is_valid_email(email_address: str) -> bool:
|
|
||||||
"""
|
|
||||||
Used to check whether an email address is valid
|
|
||||||
NOT run MX check.
|
|
||||||
NOT allow unicode.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
validate_email(email_address, check_deliverability=False, allow_smtputf8=False)
|
|
||||||
return True
|
|
||||||
except EmailNotValidError:
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def normalize_reply_email(reply_email: str) -> str:
|
|
||||||
"""Handle the case where reply email contains *strange* char that was wrongly generated in the past"""
|
|
||||||
if not reply_email.isascii():
|
|
||||||
reply_email = convert_to_id(reply_email)
|
|
||||||
|
|
||||||
ret = []
|
|
||||||
# drop all control characters like shift, separator, etc
|
|
||||||
for c in reply_email:
|
|
||||||
if c not in _ALLOWED_CHARS:
|
|
||||||
ret.append("_")
|
|
||||||
else:
|
|
||||||
ret.append(c)
|
|
||||||
|
|
||||||
return "".join(ret)
|
|
|
@ -84,14 +84,6 @@ class ErrAddressInvalid(SLException):
|
||||||
return f"{self.address} is not a valid email address"
|
return f"{self.address} is not a valid email address"
|
||||||
|
|
||||||
|
|
||||||
class InvalidContactEmailError(SLException):
|
|
||||||
def __init__(self, website_email: str): # noqa: F821
|
|
||||||
self.website_email = website_email
|
|
||||||
|
|
||||||
def error_for_user(self) -> str:
|
|
||||||
return f"Cannot create contact with invalid email {self.website_email}"
|
|
||||||
|
|
||||||
|
|
||||||
class ErrContactAlreadyExists(SLException):
|
class ErrContactAlreadyExists(SLException):
|
||||||
"""raised when a contact already exists"""
|
"""raised when a contact already exists"""
|
||||||
|
|
||||||
|
@ -121,10 +113,3 @@ class AccountAlreadyLinkedToAnotherUserException(LinkException):
|
||||||
class AccountIsUsingAliasAsEmail(LinkException):
|
class AccountIsUsingAliasAsEmail(LinkException):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
super().__init__("Your account has an alias as it's email address")
|
super().__init__("Your account has an alias as it's email address")
|
||||||
|
|
||||||
|
|
||||||
class ProtonAccountNotVerified(LinkException):
|
|
||||||
def __init__(self):
|
|
||||||
super().__init__(
|
|
||||||
"The Proton account you are trying to use has not been verified"
|
|
||||||
)
|
|
||||||
|
|
|
@ -9,7 +9,6 @@ class LoginEvent:
|
||||||
failed = 1
|
failed = 1
|
||||||
disabled_login = 2
|
disabled_login = 2
|
||||||
not_activated = 3
|
not_activated = 3
|
||||||
scheduled_to_be_deleted = 4
|
|
||||||
|
|
||||||
class Source(EnumE):
|
class Source(EnumE):
|
||||||
web = 0
|
web = 0
|
||||||
|
|
|
@ -34,10 +34,10 @@ def apply_dmarc_policy_for_forward_phase(
|
||||||
|
|
||||||
from_header = get_header_unicode(msg[headers.FROM])
|
from_header = get_header_unicode(msg[headers.FROM])
|
||||||
|
|
||||||
warning_plain_text = """This email failed anti-phishing checks when it was received by SimpleLogin, be careful with its content.
|
warning_plain_text = f"""This email failed anti-phishing checks when it was received by SimpleLogin, be careful with its content.
|
||||||
More info on https://simplelogin.io/docs/getting-started/anti-phishing/
|
More info on https://simplelogin.io/docs/getting-started/anti-phishing/
|
||||||
"""
|
"""
|
||||||
warning_html = """
|
warning_html = f"""
|
||||||
<p style="color:red">
|
<p style="color:red">
|
||||||
This email failed anti-phishing checks when it was received by SimpleLogin, be careful with its content.
|
This email failed anti-phishing checks when it was received by SimpleLogin, be careful with its content.
|
||||||
More info on <a href="https://simplelogin.io/docs/getting-started/anti-phishing/">anti-phishing measure</a>
|
More info on <a href="https://simplelogin.io/docs/getting-started/anti-phishing/">anti-phishing measure</a>
|
||||||
|
|
|
@ -221,7 +221,7 @@ def handle_complaint(message: Message, origin: ProviderComplaintOrigin) -> bool:
|
||||||
return True
|
return True
|
||||||
|
|
||||||
if is_deleted_alias(msg_info.sender_address):
|
if is_deleted_alias(msg_info.sender_address):
|
||||||
LOG.i("Complaint is for deleted alias. Do nothing")
|
LOG.i(f"Complaint is for deleted alias. Do nothing")
|
||||||
return True
|
return True
|
||||||
|
|
||||||
contact = Contact.get_by(reply_email=msg_info.sender_address)
|
contact = Contact.get_by(reply_email=msg_info.sender_address)
|
||||||
|
@ -231,7 +231,7 @@ def handle_complaint(message: Message, origin: ProviderComplaintOrigin) -> bool:
|
||||||
alias = find_alias_with_address(msg_info.rcpt_address)
|
alias = find_alias_with_address(msg_info.rcpt_address)
|
||||||
|
|
||||||
if is_deleted_alias(msg_info.rcpt_address):
|
if is_deleted_alias(msg_info.rcpt_address):
|
||||||
LOG.i("Complaint is for deleted alias. Do nothing")
|
LOG.i(f"Complaint is for deleted alias. Do nothing")
|
||||||
return True
|
return True
|
||||||
|
|
||||||
if not alias:
|
if not alias:
|
||||||
|
|
|
@ -54,8 +54,9 @@ class UnsubscribeEncoder:
|
||||||
def encode_subject(
|
def encode_subject(
|
||||||
cls, action: UnsubscribeAction, data: Union[int, UnsubscribeOriginalData]
|
cls, action: UnsubscribeAction, data: Union[int, UnsubscribeOriginalData]
|
||||||
) -> str:
|
) -> str:
|
||||||
if action != UnsubscribeAction.OriginalUnsubscribeMailto and not isinstance(
|
if (
|
||||||
data, int
|
action != UnsubscribeAction.OriginalUnsubscribeMailto
|
||||||
|
and type(data) is not int
|
||||||
):
|
):
|
||||||
raise ValueError(f"Data has to be an int for an action of type {action}")
|
raise ValueError(f"Data has to be an int for an action of type {action}")
|
||||||
if action == UnsubscribeAction.OriginalUnsubscribeMailto:
|
if action == UnsubscribeAction.OriginalUnsubscribeMailto:
|
||||||
|
@ -73,8 +74,8 @@ class UnsubscribeEncoder:
|
||||||
)
|
)
|
||||||
signed_data = cls._get_signer().sign(serialized_data).decode("utf-8")
|
signed_data = cls._get_signer().sign(serialized_data).decode("utf-8")
|
||||||
encoded_request = f"{UNSUB_PREFIX}.{signed_data}"
|
encoded_request = f"{UNSUB_PREFIX}.{signed_data}"
|
||||||
if len(encoded_request) > 512:
|
if len(encoded_request) > 256:
|
||||||
LOG.w("Encoded request is longer than 512 chars")
|
LOG.e("Encoded request is longer than 256 chars")
|
||||||
return encoded_request
|
return encoded_request
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
import urllib
|
import urllib
|
||||||
from email.header import Header
|
|
||||||
from email.message import Message
|
from email.message import Message
|
||||||
|
|
||||||
from app.email import headers
|
from app.email import headers
|
||||||
|
@ -10,7 +9,6 @@ from app.handler.unsubscribe_encoder import (
|
||||||
UnsubscribeData,
|
UnsubscribeData,
|
||||||
UnsubscribeOriginalData,
|
UnsubscribeOriginalData,
|
||||||
)
|
)
|
||||||
from app.log import LOG
|
|
||||||
from app.models import Alias, Contact, UnsubscribeBehaviourEnum
|
from app.models import Alias, Contact, UnsubscribeBehaviourEnum
|
||||||
|
|
||||||
|
|
||||||
|
@ -32,10 +30,7 @@ class UnsubscribeGenerator:
|
||||||
"""
|
"""
|
||||||
unsubscribe_data = message[headers.LIST_UNSUBSCRIBE]
|
unsubscribe_data = message[headers.LIST_UNSUBSCRIBE]
|
||||||
if not unsubscribe_data:
|
if not unsubscribe_data:
|
||||||
LOG.info("Email has no unsubscribe header")
|
|
||||||
return message
|
return message
|
||||||
if isinstance(unsubscribe_data, Header):
|
|
||||||
unsubscribe_data = str(unsubscribe_data.encode())
|
|
||||||
raw_methods = [method.strip() for method in unsubscribe_data.split(",")]
|
raw_methods = [method.strip() for method in unsubscribe_data.split(",")]
|
||||||
mailto_unsubs = None
|
mailto_unsubs = None
|
||||||
other_unsubs = []
|
other_unsubs = []
|
||||||
|
@ -49,9 +44,7 @@ class UnsubscribeGenerator:
|
||||||
if url_data.scheme == "mailto":
|
if url_data.scheme == "mailto":
|
||||||
query_data = urllib.parse.parse_qs(url_data.query)
|
query_data = urllib.parse.parse_qs(url_data.query)
|
||||||
mailto_unsubs = (url_data.path, query_data.get("subject", [""])[0])
|
mailto_unsubs = (url_data.path, query_data.get("subject", [""])[0])
|
||||||
LOG.debug(f"Unsub is mailto to {mailto_unsubs}")
|
|
||||||
else:
|
else:
|
||||||
LOG.debug(f"Unsub has {url_data.scheme} scheme")
|
|
||||||
other_unsubs.append(method)
|
other_unsubs.append(method)
|
||||||
# If there are non mailto unsubscribe methods, use those in the header
|
# If there are non mailto unsubscribe methods, use those in the header
|
||||||
if other_unsubs:
|
if other_unsubs:
|
||||||
|
@ -63,19 +56,18 @@ class UnsubscribeGenerator:
|
||||||
add_or_replace_header(
|
add_or_replace_header(
|
||||||
message, headers.LIST_UNSUBSCRIBE_POST, "List-Unsubscribe=One-Click"
|
message, headers.LIST_UNSUBSCRIBE_POST, "List-Unsubscribe=One-Click"
|
||||||
)
|
)
|
||||||
LOG.debug(f"Adding click unsub methods to header {other_unsubs}")
|
|
||||||
return message
|
return message
|
||||||
elif not mailto_unsubs:
|
if not mailto_unsubs:
|
||||||
LOG.debug("No unsubs. Deleting all unsub headers")
|
message = delete_header(message, headers.LIST_UNSUBSCRIBE)
|
||||||
delete_header(message, headers.LIST_UNSUBSCRIBE)
|
message = delete_header(message, headers.LIST_UNSUBSCRIBE_POST)
|
||||||
delete_header(message, headers.LIST_UNSUBSCRIBE_POST)
|
|
||||||
return message
|
return message
|
||||||
unsub_data = UnsubscribeData(
|
return self._add_unsubscribe_header(
|
||||||
|
message,
|
||||||
|
UnsubscribeData(
|
||||||
UnsubscribeAction.OriginalUnsubscribeMailto,
|
UnsubscribeAction.OriginalUnsubscribeMailto,
|
||||||
UnsubscribeOriginalData(alias.id, mailto_unsubs[0], mailto_unsubs[1]),
|
UnsubscribeOriginalData(alias.id, mailto_unsubs[0], mailto_unsubs[1]),
|
||||||
|
),
|
||||||
)
|
)
|
||||||
LOG.debug(f"Adding unsub data {unsub_data}")
|
|
||||||
return self._add_unsubscribe_header(message, unsub_data)
|
|
||||||
|
|
||||||
def _add_unsubscribe_header(
|
def _add_unsubscribe_header(
|
||||||
self, message: Message, unsub: UnsubscribeData
|
self, message: Message, unsub: UnsubscribeData
|
||||||
|
|
|
@ -30,7 +30,7 @@ def handle_batch_import(batch_import: BatchImport):
|
||||||
|
|
||||||
LOG.d("Download file %s from %s", batch_import.file, file_url)
|
LOG.d("Download file %s from %s", batch_import.file, file_url)
|
||||||
r = requests.get(file_url)
|
r = requests.get(file_url)
|
||||||
lines = [line.decode("utf-8") for line in r.iter_lines()]
|
lines = [line.decode() for line in r.iter_lines()]
|
||||||
|
|
||||||
import_from_csv(batch_import, user, lines)
|
import_from_csv(batch_import, user, lines)
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,2 @@
|
||||||
from .integrations import set_enable_proton_cookie
|
from .integrations import set_enable_proton_cookie
|
||||||
from .exit_sudo import exit_sudo_mode
|
from .exit_sudo import exit_sudo_mode
|
||||||
|
|
||||||
__all__ = ["set_enable_proton_cookie", "exit_sudo_mode"]
|
|
||||||
|
|
|
@ -39,8 +39,9 @@ from app.models import (
|
||||||
|
|
||||||
|
|
||||||
class ExportUserDataJob:
|
class ExportUserDataJob:
|
||||||
|
|
||||||
REMOVE_FIELDS = {
|
REMOVE_FIELDS = {
|
||||||
"User": ("otp_secret", "password"),
|
"User": ("otp_secret",),
|
||||||
"Alias": ("ts_vector", "transfer_token", "hibp_last_check"),
|
"Alias": ("ts_vector", "transfer_token", "hibp_last_check"),
|
||||||
"CustomDomain": ("ownership_txt_token",),
|
"CustomDomain": ("ownership_txt_token",),
|
||||||
}
|
}
|
||||||
|
|
|
@ -22,6 +22,7 @@ from app.message_utils import message_to_bytes, message_format_base64_parts
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class SendRequest:
|
class SendRequest:
|
||||||
|
|
||||||
SAVE_EXTENSION = "sendrequest"
|
SAVE_EXTENSION = "sendrequest"
|
||||||
|
|
||||||
envelope_from: str
|
envelope_from: str
|
||||||
|
@ -31,7 +32,6 @@ class SendRequest:
|
||||||
rcpt_options: Dict = {}
|
rcpt_options: Dict = {}
|
||||||
is_forward: bool = False
|
is_forward: bool = False
|
||||||
ignore_smtp_errors: bool = False
|
ignore_smtp_errors: bool = False
|
||||||
retries: int = 0
|
|
||||||
|
|
||||||
def to_bytes(self) -> bytes:
|
def to_bytes(self) -> bytes:
|
||||||
if not config.SAVE_UNSENT_DIR:
|
if not config.SAVE_UNSENT_DIR:
|
||||||
|
@ -45,7 +45,6 @@ class SendRequest:
|
||||||
"mail_options": self.mail_options,
|
"mail_options": self.mail_options,
|
||||||
"rcpt_options": self.rcpt_options,
|
"rcpt_options": self.rcpt_options,
|
||||||
"is_forward": self.is_forward,
|
"is_forward": self.is_forward,
|
||||||
"retries": self.retries,
|
|
||||||
}
|
}
|
||||||
return json.dumps(data).encode("utf-8")
|
return json.dumps(data).encode("utf-8")
|
||||||
|
|
||||||
|
@ -66,33 +65,8 @@ class SendRequest:
|
||||||
mail_options=decoded_data["mail_options"],
|
mail_options=decoded_data["mail_options"],
|
||||||
rcpt_options=decoded_data["rcpt_options"],
|
rcpt_options=decoded_data["rcpt_options"],
|
||||||
is_forward=decoded_data["is_forward"],
|
is_forward=decoded_data["is_forward"],
|
||||||
retries=decoded_data.get("retries", 1),
|
|
||||||
)
|
)
|
||||||
|
|
||||||
def save_request_to_unsent_dir(self, prefix: str = "DeliveryFail"):
|
|
||||||
file_name = (
|
|
||||||
f"{prefix}-{int(time.time())}-{uuid.uuid4()}.{SendRequest.SAVE_EXTENSION}"
|
|
||||||
)
|
|
||||||
file_path = os.path.join(config.SAVE_UNSENT_DIR, file_name)
|
|
||||||
self.save_request_to_file(file_path)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def save_request_to_failed_dir(self, prefix: str = "DeliveryRetryFail"):
|
|
||||||
file_name = (
|
|
||||||
f"{prefix}-{int(time.time())}-{uuid.uuid4()}.{SendRequest.SAVE_EXTENSION}"
|
|
||||||
)
|
|
||||||
dir_name = os.path.join(config.SAVE_UNSENT_DIR, "failed")
|
|
||||||
if not os.path.isdir(dir_name):
|
|
||||||
os.makedirs(dir_name)
|
|
||||||
file_path = os.path.join(dir_name, file_name)
|
|
||||||
self.save_request_to_file(file_path)
|
|
||||||
|
|
||||||
def save_request_to_file(self, file_path: str):
|
|
||||||
file_contents = self.to_bytes()
|
|
||||||
with open(file_path, "wb") as fd:
|
|
||||||
fd.write(file_contents)
|
|
||||||
LOG.i(f"Saved unsent message {file_path}")
|
|
||||||
|
|
||||||
|
|
||||||
class MailSender:
|
class MailSender:
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
|
@ -197,9 +171,21 @@ class MailSender:
|
||||||
f"Could not send message to smtp server {config.POSTFIX_SERVER}:{config.POSTFIX_PORT}"
|
f"Could not send message to smtp server {config.POSTFIX_SERVER}:{config.POSTFIX_PORT}"
|
||||||
)
|
)
|
||||||
if config.SAVE_UNSENT_DIR:
|
if config.SAVE_UNSENT_DIR:
|
||||||
send_request.save_request_to_unsent_dir()
|
self._save_request_to_unsent_dir(send_request)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
def _save_request_to_unsent_dir(
|
||||||
|
self, send_request: SendRequest, prefix: str = "DeliveryFail"
|
||||||
|
):
|
||||||
|
file_name = (
|
||||||
|
f"{prefix}-{int(time.time())}-{uuid.uuid4()}.{SendRequest.SAVE_EXTENSION}"
|
||||||
|
)
|
||||||
|
file_path = os.path.join(config.SAVE_UNSENT_DIR, file_name)
|
||||||
|
file_contents = send_request.to_bytes()
|
||||||
|
with open(file_path, "wb") as fd:
|
||||||
|
fd.write(file_contents)
|
||||||
|
LOG.i(f"Saved unsent message {file_path}")
|
||||||
|
|
||||||
|
|
||||||
mail_sender = MailSender()
|
mail_sender = MailSender()
|
||||||
|
|
||||||
|
@ -233,7 +219,6 @@ def load_unsent_mails_from_fs_and_resend():
|
||||||
LOG.i(f"Trying to re-deliver email {filename}")
|
LOG.i(f"Trying to re-deliver email {filename}")
|
||||||
try:
|
try:
|
||||||
send_request = SendRequest.load_from_file(full_file_path)
|
send_request = SendRequest.load_from_file(full_file_path)
|
||||||
send_request.retries += 1
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
LOG.e(f"Cannot load {filename}. Error {e}")
|
LOG.e(f"Cannot load {filename}. Error {e}")
|
||||||
continue
|
continue
|
||||||
|
@ -245,11 +230,6 @@ def load_unsent_mails_from_fs_and_resend():
|
||||||
"DeliverUnsentEmail", {"delivered": "true"}
|
"DeliverUnsentEmail", {"delivered": "true"}
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
if send_request.retries > 2:
|
|
||||||
os.unlink(full_file_path)
|
|
||||||
send_request.save_request_to_failed_dir()
|
|
||||||
else:
|
|
||||||
send_request.save_request_to_file(full_file_path)
|
|
||||||
newrelic.agent.record_custom_event(
|
newrelic.agent.record_custom_event(
|
||||||
"DeliverUnsentEmail", {"delivered": "false"}
|
"DeliverUnsentEmail", {"delivered": "false"}
|
||||||
)
|
)
|
||||||
|
|
333
app/models.py
333
app/models.py
|
@ -1,7 +1,6 @@
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import base64
|
import base64
|
||||||
import dataclasses
|
|
||||||
import enum
|
import enum
|
||||||
import hashlib
|
import hashlib
|
||||||
import hmac
|
import hmac
|
||||||
|
@ -19,7 +18,7 @@ from flanker.addresslib import address
|
||||||
from flask import url_for
|
from flask import url_for
|
||||||
from flask_login import UserMixin
|
from flask_login import UserMixin
|
||||||
from jinja2 import FileSystemLoader, Environment
|
from jinja2 import FileSystemLoader, Environment
|
||||||
from sqlalchemy import orm, or_
|
from sqlalchemy import orm
|
||||||
from sqlalchemy import text, desc, CheckConstraint, Index, Column
|
from sqlalchemy import text, desc, CheckConstraint, Index, Column
|
||||||
from sqlalchemy.dialects.postgresql import TSVECTOR
|
from sqlalchemy.dialects.postgresql import TSVECTOR
|
||||||
from sqlalchemy.ext.declarative import declarative_base
|
from sqlalchemy.ext.declarative import declarative_base
|
||||||
|
@ -30,8 +29,6 @@ from sqlalchemy_utils import ArrowType
|
||||||
from app import config
|
from app import config
|
||||||
from app import s3
|
from app import s3
|
||||||
from app.db import Session
|
from app.db import Session
|
||||||
from app.dns_utils import get_mx_domains
|
|
||||||
|
|
||||||
from app.errors import (
|
from app.errors import (
|
||||||
AliasInTrashError,
|
AliasInTrashError,
|
||||||
DirectoryInTrashError,
|
DirectoryInTrashError,
|
||||||
|
@ -276,13 +273,6 @@ class IntEnumType(sa.types.TypeDecorator):
|
||||||
return self._enum_type(enum_value)
|
return self._enum_type(enum_value)
|
||||||
|
|
||||||
|
|
||||||
@dataclasses.dataclass
|
|
||||||
class AliasOptions:
|
|
||||||
show_sl_domains: bool = True
|
|
||||||
show_partner_domains: Optional[Partner] = None
|
|
||||||
show_partner_premium: Optional[bool] = None
|
|
||||||
|
|
||||||
|
|
||||||
class Hibp(Base, ModelMixin):
|
class Hibp(Base, ModelMixin):
|
||||||
__tablename__ = "hibp"
|
__tablename__ = "hibp"
|
||||||
name = sa.Column(sa.String(), nullable=False, unique=True, index=True)
|
name = sa.Column(sa.String(), nullable=False, unique=True, index=True)
|
||||||
|
@ -301,9 +291,7 @@ class HibpNotifiedAlias(Base, ModelMixin):
|
||||||
"""
|
"""
|
||||||
|
|
||||||
__tablename__ = "hibp_notified_alias"
|
__tablename__ = "hibp_notified_alias"
|
||||||
alias_id = sa.Column(
|
alias_id = sa.Column(sa.ForeignKey("alias.id", ondelete="cascade"), nullable=False)
|
||||||
sa.ForeignKey("alias.id", ondelete="cascade"), nullable=False, index=True
|
|
||||||
)
|
|
||||||
user_id = sa.Column(sa.ForeignKey("users.id", ondelete="cascade"), nullable=False)
|
user_id = sa.Column(sa.ForeignKey("users.id", ondelete="cascade"), nullable=False)
|
||||||
|
|
||||||
notified_at = sa.Column(ArrowType, default=arrow.utcnow, nullable=False)
|
notified_at = sa.Column(ArrowType, default=arrow.utcnow, nullable=False)
|
||||||
|
@ -344,7 +332,7 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
||||||
sa.Boolean, default=True, nullable=False, server_default="1"
|
sa.Boolean, default=True, nullable=False, server_default="1"
|
||||||
)
|
)
|
||||||
|
|
||||||
activated = sa.Column(sa.Boolean, default=False, nullable=False, index=True)
|
activated = sa.Column(sa.Boolean, default=False, nullable=False)
|
||||||
|
|
||||||
# an account can be disabled if having harmful behavior
|
# an account can be disabled if having harmful behavior
|
||||||
disabled = sa.Column(sa.Boolean, default=False, nullable=False, server_default="0")
|
disabled = sa.Column(sa.Boolean, default=False, nullable=False, server_default="0")
|
||||||
|
@ -414,10 +402,7 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
||||||
)
|
)
|
||||||
|
|
||||||
referral_id = sa.Column(
|
referral_id = sa.Column(
|
||||||
sa.ForeignKey("referral.id", ondelete="SET NULL"),
|
sa.ForeignKey("referral.id", ondelete="SET NULL"), nullable=True, default=None
|
||||||
nullable=True,
|
|
||||||
default=None,
|
|
||||||
index=True,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
referral = orm.relationship("Referral", foreign_keys=[referral_id])
|
referral = orm.relationship("Referral", foreign_keys=[referral_id])
|
||||||
|
@ -434,10 +419,7 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
||||||
|
|
||||||
# newsletter is sent to this address
|
# newsletter is sent to this address
|
||||||
newsletter_alias_id = sa.Column(
|
newsletter_alias_id = sa.Column(
|
||||||
sa.ForeignKey("alias.id", ondelete="SET NULL"),
|
sa.ForeignKey("alias.id", ondelete="SET NULL"), nullable=True, default=None
|
||||||
nullable=True,
|
|
||||||
default=None,
|
|
||||||
index=True,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# whether to include the sender address in reverse-alias
|
# whether to include the sender address in reverse-alias
|
||||||
|
@ -451,7 +433,7 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
||||||
random_alias_suffix = sa.Column(
|
random_alias_suffix = sa.Column(
|
||||||
sa.Integer,
|
sa.Integer,
|
||||||
nullable=False,
|
nullable=False,
|
||||||
default=AliasSuffixEnum.word.value,
|
default=AliasSuffixEnum.random_string.value,
|
||||||
server_default=str(AliasSuffixEnum.random_string.value),
|
server_default=str(AliasSuffixEnum.random_string.value),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -520,8 +502,9 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
||||||
server_default=BlockBehaviourEnum.return_2xx.name,
|
server_default=BlockBehaviourEnum.return_2xx.name,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# to keep existing behavior, the server default is TRUE whereas for new user, the default value is FALSE
|
||||||
include_header_email_header = sa.Column(
|
include_header_email_header = sa.Column(
|
||||||
sa.Boolean, default=True, nullable=False, server_default="1"
|
sa.Boolean, default=False, nullable=False, server_default="1"
|
||||||
)
|
)
|
||||||
|
|
||||||
# bitwise flags. Allow for future expansion
|
# bitwise flags. Allow for future expansion
|
||||||
|
@ -540,16 +523,6 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
||||||
nullable=False,
|
nullable=False,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Trigger hard deletion of the account at this time
|
|
||||||
delete_on = sa.Column(ArrowType, default=None)
|
|
||||||
|
|
||||||
__table_args__ = (
|
|
||||||
sa.Index(
|
|
||||||
"ix_users_activated_trial_end_lifetime", activated, trial_end, lifetime
|
|
||||||
),
|
|
||||||
sa.Index("ix_users_delete_on", delete_on),
|
|
||||||
)
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def directory_quota(self):
|
def directory_quota(self):
|
||||||
return min(
|
return min(
|
||||||
|
@ -584,8 +557,7 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def create(cls, email, name="", password=None, from_partner=False, **kwargs):
|
def create(cls, email, name="", password=None, from_partner=False, **kwargs):
|
||||||
email = sanitize_email(email)
|
user: User = super(User, cls).create(email=email, name=name, **kwargs)
|
||||||
user: User = super(User, cls).create(email=email, name=name[:100], **kwargs)
|
|
||||||
|
|
||||||
if password:
|
if password:
|
||||||
user.set_password(password)
|
user.set_password(password)
|
||||||
|
@ -596,6 +568,19 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
||||||
Session.flush()
|
Session.flush()
|
||||||
user.default_mailbox_id = mb.id
|
user.default_mailbox_id = mb.id
|
||||||
|
|
||||||
|
# create a first alias mail to show user how to use when they login
|
||||||
|
alias = Alias.create_new(
|
||||||
|
user,
|
||||||
|
prefix="simplelogin-newsletter",
|
||||||
|
mailbox_id=mb.id,
|
||||||
|
note="This is your first alias. It's used to receive SimpleLogin communications "
|
||||||
|
"like new features announcements, newsletters.",
|
||||||
|
)
|
||||||
|
Session.flush()
|
||||||
|
|
||||||
|
user.newsletter_alias_id = alias.id
|
||||||
|
Session.flush()
|
||||||
|
|
||||||
# generate an alternative_id if needed
|
# generate an alternative_id if needed
|
||||||
if "alternative_id" not in kwargs:
|
if "alternative_id" not in kwargs:
|
||||||
user.alternative_id = str(uuid.uuid4())
|
user.alternative_id = str(uuid.uuid4())
|
||||||
|
@ -614,19 +599,6 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
||||||
Session.flush()
|
Session.flush()
|
||||||
return user
|
return user
|
||||||
|
|
||||||
# create a first alias mail to show user how to use when they login
|
|
||||||
alias = Alias.create_new(
|
|
||||||
user,
|
|
||||||
prefix="simplelogin-newsletter",
|
|
||||||
mailbox_id=mb.id,
|
|
||||||
note="This is your first alias. It's used to receive SimpleLogin communications "
|
|
||||||
"like new features announcements, newsletters.",
|
|
||||||
)
|
|
||||||
Session.flush()
|
|
||||||
|
|
||||||
user.newsletter_alias_id = alias.id
|
|
||||||
Session.flush()
|
|
||||||
|
|
||||||
if config.DISABLE_ONBOARDING:
|
if config.DISABLE_ONBOARDING:
|
||||||
LOG.d("Disable onboarding emails")
|
LOG.d("Disable onboarding emails")
|
||||||
return user
|
return user
|
||||||
|
@ -652,7 +624,7 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
||||||
return user
|
return user
|
||||||
|
|
||||||
def get_active_subscription(
|
def get_active_subscription(
|
||||||
self, include_partner_subscription: bool = True
|
self,
|
||||||
) -> Optional[
|
) -> Optional[
|
||||||
Union[
|
Union[
|
||||||
Subscription
|
Subscription
|
||||||
|
@ -680,40 +652,19 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
||||||
if coinbase_subscription and coinbase_subscription.is_active():
|
if coinbase_subscription and coinbase_subscription.is_active():
|
||||||
return coinbase_subscription
|
return coinbase_subscription
|
||||||
|
|
||||||
if include_partner_subscription:
|
partner_sub: PartnerSubscription = PartnerSubscription.find_by_user_id(self.id)
|
||||||
partner_sub: PartnerSubscription = PartnerSubscription.find_by_user_id(
|
|
||||||
self.id
|
|
||||||
)
|
|
||||||
if partner_sub and partner_sub.is_active():
|
if partner_sub and partner_sub.is_active():
|
||||||
return partner_sub
|
return partner_sub
|
||||||
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def get_active_subscription_end(
|
|
||||||
self, include_partner_subscription: bool = True
|
|
||||||
) -> Optional[arrow.Arrow]:
|
|
||||||
sub = self.get_active_subscription(
|
|
||||||
include_partner_subscription=include_partner_subscription
|
|
||||||
)
|
|
||||||
if isinstance(sub, Subscription):
|
|
||||||
return arrow.get(sub.next_bill_date)
|
|
||||||
if isinstance(sub, AppleSubscription):
|
|
||||||
return sub.expires_date
|
|
||||||
if isinstance(sub, ManualSubscription):
|
|
||||||
return sub.end_at
|
|
||||||
if isinstance(sub, CoinbaseSubscription):
|
|
||||||
return sub.end_at
|
|
||||||
return None
|
|
||||||
|
|
||||||
# region Billing
|
# region Billing
|
||||||
def lifetime_or_active_subscription(
|
def lifetime_or_active_subscription(self) -> bool:
|
||||||
self, include_partner_subscription: bool = True
|
|
||||||
) -> bool:
|
|
||||||
"""True if user has lifetime licence or active subscription"""
|
"""True if user has lifetime licence or active subscription"""
|
||||||
if self.lifetime:
|
if self.lifetime:
|
||||||
return True
|
return True
|
||||||
|
|
||||||
return self.get_active_subscription(include_partner_subscription) is not None
|
return self.get_active_subscription() is not None
|
||||||
|
|
||||||
def is_paid(self) -> bool:
|
def is_paid(self) -> bool:
|
||||||
"""same as _lifetime_or_active_subscription but not include free manual subscription"""
|
"""same as _lifetime_or_active_subscription but not include free manual subscription"""
|
||||||
|
@ -742,14 +693,14 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def is_premium(self, include_partner_subscription: bool = True) -> bool:
|
def is_premium(self) -> bool:
|
||||||
"""
|
"""
|
||||||
user is premium if they:
|
user is premium if they:
|
||||||
- have a lifetime deal or
|
- have a lifetime deal or
|
||||||
- in trial period or
|
- in trial period or
|
||||||
- active subscription
|
- active subscription
|
||||||
"""
|
"""
|
||||||
if self.lifetime_or_active_subscription(include_partner_subscription):
|
if self.lifetime_or_active_subscription():
|
||||||
return True
|
return True
|
||||||
|
|
||||||
if self.trial_end and arrow.now() < self.trial_end:
|
if self.trial_end and arrow.now() < self.trial_end:
|
||||||
|
@ -838,17 +789,6 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
||||||
< self.max_alias_for_free_account()
|
< self.max_alias_for_free_account()
|
||||||
)
|
)
|
||||||
|
|
||||||
def can_send_or_receive(self) -> bool:
|
|
||||||
if self.disabled:
|
|
||||||
LOG.i(f"User {self} is disabled. Cannot receive or send emails")
|
|
||||||
return False
|
|
||||||
if self.delete_on is not None:
|
|
||||||
LOG.i(
|
|
||||||
f"User {self} is scheduled to be deleted. Cannot receive or send emails"
|
|
||||||
)
|
|
||||||
return False
|
|
||||||
return True
|
|
||||||
|
|
||||||
def profile_picture_url(self):
|
def profile_picture_url(self):
|
||||||
if self.profile_picture_id:
|
if self.profile_picture_id:
|
||||||
return self.profile_picture.get_url()
|
return self.profile_picture.get_url()
|
||||||
|
@ -927,16 +867,14 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
||||||
def custom_domains(self):
|
def custom_domains(self):
|
||||||
return CustomDomain.filter_by(user_id=self.id, verified=True).all()
|
return CustomDomain.filter_by(user_id=self.id, verified=True).all()
|
||||||
|
|
||||||
def available_domains_for_random_alias(
|
def available_domains_for_random_alias(self) -> List[Tuple[bool, str]]:
|
||||||
self, alias_options: Optional[AliasOptions] = None
|
|
||||||
) -> List[Tuple[bool, str]]:
|
|
||||||
"""Return available domains for user to create random aliases
|
"""Return available domains for user to create random aliases
|
||||||
Each result record contains:
|
Each result record contains:
|
||||||
- whether the domain belongs to SimpleLogin
|
- whether the domain belongs to SimpleLogin
|
||||||
- the domain
|
- the domain
|
||||||
"""
|
"""
|
||||||
res = []
|
res = []
|
||||||
for domain in self.available_sl_domains(alias_options=alias_options):
|
for domain in self.available_sl_domains():
|
||||||
res.append((True, domain))
|
res.append((True, domain))
|
||||||
|
|
||||||
for custom_domain in self.verified_custom_domains():
|
for custom_domain in self.verified_custom_domains():
|
||||||
|
@ -1021,65 +959,30 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
||||||
|
|
||||||
return None, "", False
|
return None, "", False
|
||||||
|
|
||||||
def available_sl_domains(
|
def available_sl_domains(self) -> [str]:
|
||||||
self, alias_options: Optional[AliasOptions] = None
|
|
||||||
) -> [str]:
|
|
||||||
"""
|
"""
|
||||||
Return all SimpleLogin domains that user can use when creating a new alias, including:
|
Return all SimpleLogin domains that user can use when creating a new alias, including:
|
||||||
- SimpleLogin public domains, available for all users (ALIAS_DOMAIN)
|
- SimpleLogin public domains, available for all users (ALIAS_DOMAIN)
|
||||||
- SimpleLogin premium domains, only available for Premium accounts (PREMIUM_ALIAS_DOMAIN)
|
- SimpleLogin premium domains, only available for Premium accounts (PREMIUM_ALIAS_DOMAIN)
|
||||||
"""
|
"""
|
||||||
return [
|
return [sl_domain.domain for sl_domain in self.get_sl_domains()]
|
||||||
sl_domain.domain
|
|
||||||
for sl_domain in self.get_sl_domains(alias_options=alias_options)
|
|
||||||
]
|
|
||||||
|
|
||||||
def get_sl_domains(
|
def get_sl_domains(self) -> List["SLDomain"]:
|
||||||
self, alias_options: Optional[AliasOptions] = None
|
query = SLDomain.filter_by(hidden=False).order_by(SLDomain.order)
|
||||||
) -> list["SLDomain"]:
|
|
||||||
if alias_options is None:
|
if self.is_premium():
|
||||||
alias_options = AliasOptions()
|
|
||||||
top_conds = [SLDomain.hidden == False] # noqa: E712
|
|
||||||
or_conds = [] # noqa:E711
|
|
||||||
if self.default_alias_public_domain_id is not None:
|
|
||||||
default_domain_conds = [SLDomain.id == self.default_alias_public_domain_id]
|
|
||||||
if not self.is_premium():
|
|
||||||
default_domain_conds.append(
|
|
||||||
SLDomain.premium_only == False # noqa: E712
|
|
||||||
)
|
|
||||||
or_conds.append(and_(*default_domain_conds).self_group())
|
|
||||||
if alias_options.show_partner_domains is not None:
|
|
||||||
partner_user = PartnerUser.filter_by(
|
|
||||||
user_id=self.id, partner_id=alias_options.show_partner_domains.id
|
|
||||||
).first()
|
|
||||||
if partner_user is not None:
|
|
||||||
partner_domain_cond = [SLDomain.partner_id == partner_user.partner_id]
|
|
||||||
if alias_options.show_partner_premium is None:
|
|
||||||
alias_options.show_partner_premium = self.is_premium()
|
|
||||||
if not alias_options.show_partner_premium:
|
|
||||||
partner_domain_cond.append(
|
|
||||||
SLDomain.premium_only == False # noqa: E712
|
|
||||||
)
|
|
||||||
or_conds.append(and_(*partner_domain_cond).self_group())
|
|
||||||
if alias_options.show_sl_domains:
|
|
||||||
sl_conds = [SLDomain.partner_id == None] # noqa: E711
|
|
||||||
if not self.is_premium():
|
|
||||||
sl_conds.append(SLDomain.premium_only == False) # noqa: E712
|
|
||||||
or_conds.append(and_(*sl_conds).self_group())
|
|
||||||
top_conds.append(or_(*or_conds))
|
|
||||||
query = Session.query(SLDomain).filter(*top_conds).order_by(SLDomain.order)
|
|
||||||
return query.all()
|
return query.all()
|
||||||
|
else:
|
||||||
|
return query.filter_by(premium_only=False).all()
|
||||||
|
|
||||||
def available_alias_domains(
|
def available_alias_domains(self) -> [str]:
|
||||||
self, alias_options: Optional[AliasOptions] = None
|
|
||||||
) -> [str]:
|
|
||||||
"""return all domains that user can use when creating a new alias, including:
|
"""return all domains that user can use when creating a new alias, including:
|
||||||
- SimpleLogin public domains, available for all users (ALIAS_DOMAIN)
|
- SimpleLogin public domains, available for all users (ALIAS_DOMAIN)
|
||||||
- SimpleLogin premium domains, only available for Premium accounts (PREMIUM_ALIAS_DOMAIN)
|
- SimpleLogin premium domains, only available for Premium accounts (PREMIUM_ALIAS_DOMAIN)
|
||||||
- Verified custom domains
|
- Verified custom domains
|
||||||
|
|
||||||
"""
|
"""
|
||||||
domains = self.available_sl_domains(alias_options=alias_options)
|
domains = self.available_sl_domains()
|
||||||
|
|
||||||
for custom_domain in self.verified_custom_domains():
|
for custom_domain in self.verified_custom_domains():
|
||||||
domains.append(custom_domain.domain)
|
domains.append(custom_domain.domain)
|
||||||
|
@ -1097,22 +1000,17 @@ class User(Base, ModelMixin, UserMixin, PasswordOracle):
|
||||||
> 0
|
> 0
|
||||||
)
|
)
|
||||||
|
|
||||||
def get_random_alias_suffix(self, custom_domain: Optional["CustomDomain"] = None):
|
def get_random_alias_suffix(self):
|
||||||
"""Get random suffix for an alias based on user's preference.
|
"""Get random suffix for an alias based on user's preference.
|
||||||
|
|
||||||
Use a shorter suffix in case of custom domain
|
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
str: the random suffix generated
|
str: the random suffix generated
|
||||||
"""
|
"""
|
||||||
if self.random_alias_suffix == AliasSuffixEnum.random_string.value:
|
if self.random_alias_suffix == AliasSuffixEnum.random_string.value:
|
||||||
return random_string(config.ALIAS_RANDOM_SUFFIX_LENGTH, include_digits=True)
|
return random_string(config.ALIAS_RANDOM_SUFFIX_LENGTH, include_digits=True)
|
||||||
|
|
||||||
if custom_domain is None:
|
|
||||||
return random_words(1, 3)
|
return random_words(1, 3)
|
||||||
|
|
||||||
return random_words(1)
|
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return f"<User {self.id} {self.name} {self.email}>"
|
return f"<User {self.id} {self.name} {self.email}>"
|
||||||
|
|
||||||
|
@ -1356,30 +1254,16 @@ class OauthToken(Base, ModelMixin):
|
||||||
return self.expired < arrow.now()
|
return self.expired < arrow.now()
|
||||||
|
|
||||||
|
|
||||||
def available_sl_email(email: str) -> bool:
|
def generate_email(
|
||||||
if (
|
|
||||||
Alias.get_by(email=email)
|
|
||||||
or Contact.get_by(reply_email=email)
|
|
||||||
or DeletedAlias.get_by(email=email)
|
|
||||||
):
|
|
||||||
return False
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
def generate_random_alias_email(
|
|
||||||
scheme: int = AliasGeneratorEnum.word.value,
|
scheme: int = AliasGeneratorEnum.word.value,
|
||||||
in_hex: bool = False,
|
in_hex: bool = False,
|
||||||
alias_domain: str = config.FIRST_ALIAS_DOMAIN,
|
alias_domain=config.FIRST_ALIAS_DOMAIN,
|
||||||
retries: int = 10,
|
|
||||||
) -> str:
|
) -> str:
|
||||||
"""generate an email address that does not exist before
|
"""generate an email address that does not exist before
|
||||||
:param alias_domain: the domain used to generate the alias.
|
:param alias_domain: the domain used to generate the alias.
|
||||||
:param scheme: int, value of AliasGeneratorEnum, indicate how the email is generated
|
:param scheme: int, value of AliasGeneratorEnum, indicate how the email is generated
|
||||||
:param retries: int, How many times we can try to generate an alias in case of collision
|
|
||||||
:type in_hex: bool, if the generate scheme is uuid, is hex favorable?
|
:type in_hex: bool, if the generate scheme is uuid, is hex favorable?
|
||||||
"""
|
"""
|
||||||
if retries <= 0:
|
|
||||||
raise Exception("Cannot generate alias after many retries")
|
|
||||||
if scheme == AliasGeneratorEnum.uuid.value:
|
if scheme == AliasGeneratorEnum.uuid.value:
|
||||||
name = uuid.uuid4().hex if in_hex else uuid.uuid4().__str__()
|
name = uuid.uuid4().hex if in_hex else uuid.uuid4().__str__()
|
||||||
random_email = name + "@" + alias_domain
|
random_email = name + "@" + alias_domain
|
||||||
|
@ -1389,15 +1273,15 @@ def generate_random_alias_email(
|
||||||
random_email = random_email.lower().strip()
|
random_email = random_email.lower().strip()
|
||||||
|
|
||||||
# check that the client does not exist yet
|
# check that the client does not exist yet
|
||||||
if available_sl_email(random_email):
|
if not Alias.get_by(email=random_email) and not DeletedAlias.get_by(
|
||||||
|
email=random_email
|
||||||
|
):
|
||||||
LOG.d("generate email %s", random_email)
|
LOG.d("generate email %s", random_email)
|
||||||
return random_email
|
return random_email
|
||||||
|
|
||||||
# Rerun the function
|
# Rerun the function
|
||||||
LOG.w("email %s already exists, generate a new email", random_email)
|
LOG.w("email %s already exists, generate a new email", random_email)
|
||||||
return generate_random_alias_email(
|
return generate_email(scheme=scheme, in_hex=in_hex)
|
||||||
scheme=scheme, in_hex=in_hex, retries=retries - 1
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class Alias(Base, ModelMixin):
|
class Alias(Base, ModelMixin):
|
||||||
|
@ -1479,7 +1363,7 @@ class Alias(Base, ModelMixin):
|
||||||
)
|
)
|
||||||
|
|
||||||
# have I been pwned
|
# have I been pwned
|
||||||
hibp_last_check = sa.Column(ArrowType, default=None, index=True)
|
hibp_last_check = sa.Column(ArrowType, default=None)
|
||||||
hibp_breaches = orm.relationship("Hibp", secondary="alias_hibp")
|
hibp_breaches = orm.relationship("Hibp", secondary="alias_hibp")
|
||||||
|
|
||||||
# to use Postgres full text search. Only applied on "note" column for now
|
# to use Postgres full text search. Only applied on "note" column for now
|
||||||
|
@ -1596,7 +1480,7 @@ class Alias(Base, ModelMixin):
|
||||||
suffix = user.get_random_alias_suffix()
|
suffix = user.get_random_alias_suffix()
|
||||||
email = f"{prefix}.{suffix}@{config.FIRST_ALIAS_DOMAIN}"
|
email = f"{prefix}.{suffix}@{config.FIRST_ALIAS_DOMAIN}"
|
||||||
|
|
||||||
if available_sl_email(email):
|
if not cls.get_by(email=email) and not DeletedAlias.get_by(email=email):
|
||||||
break
|
break
|
||||||
|
|
||||||
return Alias.create(
|
return Alias.create(
|
||||||
|
@ -1625,7 +1509,7 @@ class Alias(Base, ModelMixin):
|
||||||
|
|
||||||
if user.default_alias_custom_domain_id:
|
if user.default_alias_custom_domain_id:
|
||||||
custom_domain = CustomDomain.get(user.default_alias_custom_domain_id)
|
custom_domain = CustomDomain.get(user.default_alias_custom_domain_id)
|
||||||
random_email = generate_random_alias_email(
|
random_email = generate_email(
|
||||||
scheme=scheme, in_hex=in_hex, alias_domain=custom_domain.domain
|
scheme=scheme, in_hex=in_hex, alias_domain=custom_domain.domain
|
||||||
)
|
)
|
||||||
elif user.default_alias_public_domain_id:
|
elif user.default_alias_public_domain_id:
|
||||||
|
@ -1633,12 +1517,12 @@ class Alias(Base, ModelMixin):
|
||||||
if sl_domain.premium_only and not user.is_premium():
|
if sl_domain.premium_only and not user.is_premium():
|
||||||
LOG.w("%s not premium, cannot use %s", user, sl_domain)
|
LOG.w("%s not premium, cannot use %s", user, sl_domain)
|
||||||
else:
|
else:
|
||||||
random_email = generate_random_alias_email(
|
random_email = generate_email(
|
||||||
scheme=scheme, in_hex=in_hex, alias_domain=sl_domain.domain
|
scheme=scheme, in_hex=in_hex, alias_domain=sl_domain.domain
|
||||||
)
|
)
|
||||||
|
|
||||||
if not random_email:
|
if not random_email:
|
||||||
random_email = generate_random_alias_email(scheme=scheme, in_hex=in_hex)
|
random_email = generate_email(scheme=scheme, in_hex=in_hex)
|
||||||
|
|
||||||
alias = Alias.create(
|
alias = Alias.create(
|
||||||
user_id=user.id,
|
user_id=user.id,
|
||||||
|
@ -1672,9 +1556,7 @@ class ClientUser(Base, ModelMixin):
|
||||||
client_id = sa.Column(sa.ForeignKey(Client.id, ondelete="cascade"), nullable=False)
|
client_id = sa.Column(sa.ForeignKey(Client.id, ondelete="cascade"), nullable=False)
|
||||||
|
|
||||||
# Null means client has access to user original email
|
# Null means client has access to user original email
|
||||||
alias_id = sa.Column(
|
alias_id = sa.Column(sa.ForeignKey(Alias.id, ondelete="cascade"), nullable=True)
|
||||||
sa.ForeignKey(Alias.id, ondelete="cascade"), nullable=True, index=True
|
|
||||||
)
|
|
||||||
|
|
||||||
# user can decide to send to client another name
|
# user can decide to send to client another name
|
||||||
name = sa.Column(
|
name = sa.Column(
|
||||||
|
@ -1793,7 +1675,7 @@ class Contact(Base, ModelMixin):
|
||||||
is_cc = sa.Column(sa.Boolean, nullable=False, default=False, server_default="0")
|
is_cc = sa.Column(sa.Boolean, nullable=False, default=False, server_default="0")
|
||||||
|
|
||||||
pgp_public_key = sa.Column(sa.Text, nullable=True)
|
pgp_public_key = sa.Column(sa.Text, nullable=True)
|
||||||
pgp_finger_print = sa.Column(sa.String(512), nullable=True, index=True)
|
pgp_finger_print = sa.Column(sa.String(512), nullable=True)
|
||||||
|
|
||||||
alias = orm.relationship(Alias, backref="contacts")
|
alias = orm.relationship(Alias, backref="contacts")
|
||||||
user = orm.relationship(User)
|
user = orm.relationship(User)
|
||||||
|
@ -1947,7 +1829,6 @@ class Contact(Base, ModelMixin):
|
||||||
|
|
||||||
class EmailLog(Base, ModelMixin):
|
class EmailLog(Base, ModelMixin):
|
||||||
__tablename__ = "email_log"
|
__tablename__ = "email_log"
|
||||||
__table_args__ = (Index("ix_email_log_created_at", "created_at"),)
|
|
||||||
|
|
||||||
user_id = sa.Column(
|
user_id = sa.Column(
|
||||||
sa.ForeignKey(User.id, ondelete="cascade"), nullable=False, index=True
|
sa.ForeignKey(User.id, ondelete="cascade"), nullable=False, index=True
|
||||||
|
@ -2205,9 +2086,7 @@ class AliasUsedOn(Base, ModelMixin):
|
||||||
sa.UniqueConstraint("alias_id", "hostname", name="uq_alias_used"),
|
sa.UniqueConstraint("alias_id", "hostname", name="uq_alias_used"),
|
||||||
)
|
)
|
||||||
|
|
||||||
alias_id = sa.Column(
|
alias_id = sa.Column(sa.ForeignKey(Alias.id, ondelete="cascade"), nullable=False)
|
||||||
sa.ForeignKey(Alias.id, ondelete="cascade"), nullable=False, index=True
|
|
||||||
)
|
|
||||||
user_id = sa.Column(sa.ForeignKey(User.id, ondelete="cascade"), nullable=False)
|
user_id = sa.Column(sa.ForeignKey(User.id, ondelete="cascade"), nullable=False)
|
||||||
|
|
||||||
alias = orm.relationship(Alias)
|
alias = orm.relationship(Alias)
|
||||||
|
@ -2326,7 +2205,6 @@ class CustomDomain(Base, ModelMixin):
|
||||||
@classmethod
|
@classmethod
|
||||||
def create(cls, **kwargs):
|
def create(cls, **kwargs):
|
||||||
domain = kwargs.get("domain")
|
domain = kwargs.get("domain")
|
||||||
kwargs["domain"] = domain.replace("\n", "")
|
|
||||||
if DeletedSubdomain.get_by(domain=domain):
|
if DeletedSubdomain.get_by(domain=domain):
|
||||||
raise SubdomainInTrashError
|
raise SubdomainInTrashError
|
||||||
|
|
||||||
|
@ -2594,28 +2472,6 @@ class Mailbox(Base, ModelMixin):
|
||||||
+ Alias.filter_by(mailbox_id=self.id).count()
|
+ Alias.filter_by(mailbox_id=self.id).count()
|
||||||
)
|
)
|
||||||
|
|
||||||
def is_proton(self) -> bool:
|
|
||||||
if (
|
|
||||||
self.email.endswith("@proton.me")
|
|
||||||
or self.email.endswith("@protonmail.com")
|
|
||||||
or self.email.endswith("@protonmail.ch")
|
|
||||||
or self.email.endswith("@proton.ch")
|
|
||||||
or self.email.endswith("@pm.me")
|
|
||||||
):
|
|
||||||
return True
|
|
||||||
|
|
||||||
from app.email_utils import get_email_local_part
|
|
||||||
|
|
||||||
mx_domains: [(int, str)] = get_mx_domains(get_email_local_part(self.email))
|
|
||||||
# Proton is the first domain
|
|
||||||
if mx_domains and mx_domains[0][1] in (
|
|
||||||
"mail.protonmail.ch.",
|
|
||||||
"mailsec.protonmail.ch.",
|
|
||||||
):
|
|
||||||
return True
|
|
||||||
|
|
||||||
return False
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def delete(cls, obj_id):
|
def delete(cls, obj_id):
|
||||||
mailbox: Mailbox = cls.get(obj_id)
|
mailbox: Mailbox = cls.get(obj_id)
|
||||||
|
@ -2648,12 +2504,6 @@ class Mailbox(Base, ModelMixin):
|
||||||
|
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def create(cls, **kw):
|
|
||||||
if "email" in kw:
|
|
||||||
kw["email"] = sanitize_email(kw["email"])
|
|
||||||
return super().create(**kw)
|
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return f"<Mailbox {self.id} {self.email}>"
|
return f"<Mailbox {self.id} {self.email}>"
|
||||||
|
|
||||||
|
@ -2913,31 +2763,6 @@ class Notification(Base, ModelMixin):
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class Partner(Base, ModelMixin):
|
|
||||||
__tablename__ = "partner"
|
|
||||||
|
|
||||||
name = sa.Column(sa.String(128), unique=True, nullable=False)
|
|
||||||
contact_email = sa.Column(sa.String(128), unique=True, nullable=False)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def find_by_token(token: str) -> Optional[Partner]:
|
|
||||||
hmaced = PartnerApiToken.hmac_token(token)
|
|
||||||
res = (
|
|
||||||
Session.query(Partner, PartnerApiToken)
|
|
||||||
.filter(
|
|
||||||
and_(
|
|
||||||
PartnerApiToken.token == hmaced,
|
|
||||||
Partner.id == PartnerApiToken.partner_id,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
.first()
|
|
||||||
)
|
|
||||||
if res:
|
|
||||||
partner, partner_api_token = res
|
|
||||||
return partner
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
class SLDomain(Base, ModelMixin):
|
class SLDomain(Base, ModelMixin):
|
||||||
"""SimpleLogin domains"""
|
"""SimpleLogin domains"""
|
||||||
|
|
||||||
|
@ -2955,23 +2780,12 @@ class SLDomain(Base, ModelMixin):
|
||||||
sa.Boolean, nullable=False, default=False, server_default="0"
|
sa.Boolean, nullable=False, default=False, server_default="0"
|
||||||
)
|
)
|
||||||
|
|
||||||
partner_id = sa.Column(
|
|
||||||
sa.ForeignKey(Partner.id, ondelete="cascade"),
|
|
||||||
nullable=True,
|
|
||||||
default=None,
|
|
||||||
server_default="NULL",
|
|
||||||
)
|
|
||||||
|
|
||||||
# if enabled, do not show this domain when user creates a custom alias
|
# if enabled, do not show this domain when user creates a custom alias
|
||||||
hidden = sa.Column(sa.Boolean, nullable=False, default=False, server_default="0")
|
hidden = sa.Column(sa.Boolean, nullable=False, default=False, server_default="0")
|
||||||
|
|
||||||
# the order in which the domains are shown when user creates a custom alias
|
# the order in which the domains are shown when user creates a custom alias
|
||||||
order = sa.Column(sa.Integer, nullable=False, default=0, server_default="0")
|
order = sa.Column(sa.Integer, nullable=False, default=0, server_default="0")
|
||||||
|
|
||||||
use_as_reverse_alias = sa.Column(
|
|
||||||
sa.Boolean, nullable=False, default=False, server_default="0"
|
|
||||||
)
|
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return f"<SLDomain {self.domain} {'Premium' if self.premium_only else 'Free'}"
|
return f"<SLDomain {self.domain} {'Premium' if self.premium_only else 'Free'}"
|
||||||
|
|
||||||
|
@ -2992,8 +2806,6 @@ class Monitoring(Base, ModelMixin):
|
||||||
active_queue = sa.Column(sa.Integer, nullable=False)
|
active_queue = sa.Column(sa.Integer, nullable=False)
|
||||||
deferred_queue = sa.Column(sa.Integer, nullable=False)
|
deferred_queue = sa.Column(sa.Integer, nullable=False)
|
||||||
|
|
||||||
__table_args__ = (Index("ix_monitoring_created_at", "created_at"),)
|
|
||||||
|
|
||||||
|
|
||||||
class BatchImport(Base, ModelMixin):
|
class BatchImport(Base, ModelMixin):
|
||||||
__tablename__ = "batch_import"
|
__tablename__ = "batch_import"
|
||||||
|
@ -3119,8 +2931,6 @@ class Bounce(Base, ModelMixin):
|
||||||
email = sa.Column(sa.String(256), nullable=False, index=True)
|
email = sa.Column(sa.String(256), nullable=False, index=True)
|
||||||
info = sa.Column(sa.Text, nullable=True)
|
info = sa.Column(sa.Text, nullable=True)
|
||||||
|
|
||||||
__table_args__ = (sa.Index("ix_bounce_created_at", "created_at"),)
|
|
||||||
|
|
||||||
|
|
||||||
class TransactionalEmail(Base, ModelMixin):
|
class TransactionalEmail(Base, ModelMixin):
|
||||||
"""Storing all email addresses that receive transactional emails, including account email and mailboxes.
|
"""Storing all email addresses that receive transactional emails, including account email and mailboxes.
|
||||||
|
@ -3130,8 +2940,6 @@ class TransactionalEmail(Base, ModelMixin):
|
||||||
__tablename__ = "transactional_email"
|
__tablename__ = "transactional_email"
|
||||||
email = sa.Column(sa.String(256), nullable=False, unique=False)
|
email = sa.Column(sa.String(256), nullable=False, unique=False)
|
||||||
|
|
||||||
__table_args__ = (sa.Index("ix_transactional_email_created_at", "created_at"),)
|
|
||||||
|
|
||||||
|
|
||||||
class Payout(Base, ModelMixin):
|
class Payout(Base, ModelMixin):
|
||||||
"""Referral payouts"""
|
"""Referral payouts"""
|
||||||
|
@ -3184,7 +2992,7 @@ class MessageIDMatching(Base, ModelMixin):
|
||||||
|
|
||||||
# to track what email_log that has created this matching
|
# to track what email_log that has created this matching
|
||||||
email_log_id = sa.Column(
|
email_log_id = sa.Column(
|
||||||
sa.ForeignKey("email_log.id", ondelete="cascade"), nullable=True, index=True
|
sa.ForeignKey("email_log.id", ondelete="cascade"), nullable=True
|
||||||
)
|
)
|
||||||
|
|
||||||
email_log = orm.relationship("EmailLog")
|
email_log = orm.relationship("EmailLog")
|
||||||
|
@ -3418,6 +3226,31 @@ class ProviderComplaint(Base, ModelMixin):
|
||||||
refused_email = orm.relationship(RefusedEmail, foreign_keys=[refused_email_id])
|
refused_email = orm.relationship(RefusedEmail, foreign_keys=[refused_email_id])
|
||||||
|
|
||||||
|
|
||||||
|
class Partner(Base, ModelMixin):
|
||||||
|
__tablename__ = "partner"
|
||||||
|
|
||||||
|
name = sa.Column(sa.String(128), unique=True, nullable=False)
|
||||||
|
contact_email = sa.Column(sa.String(128), unique=True, nullable=False)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def find_by_token(token: str) -> Optional[Partner]:
|
||||||
|
hmaced = PartnerApiToken.hmac_token(token)
|
||||||
|
res = (
|
||||||
|
Session.query(Partner, PartnerApiToken)
|
||||||
|
.filter(
|
||||||
|
and_(
|
||||||
|
PartnerApiToken.token == hmaced,
|
||||||
|
Partner.id == PartnerApiToken.partner_id,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.first()
|
||||||
|
)
|
||||||
|
if res:
|
||||||
|
partner, partner_api_token = res
|
||||||
|
return partner
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
class PartnerApiToken(Base, ModelMixin):
|
class PartnerApiToken(Base, ModelMixin):
|
||||||
__tablename__ = "partner_api_token"
|
__tablename__ = "partner_api_token"
|
||||||
|
|
||||||
|
@ -3487,7 +3320,7 @@ class PartnerSubscription(Base, ModelMixin):
|
||||||
)
|
)
|
||||||
|
|
||||||
# when the partner subscription ends
|
# when the partner subscription ends
|
||||||
end_at = sa.Column(ArrowType, nullable=False, index=True)
|
end_at = sa.Column(ArrowType, nullable=False)
|
||||||
|
|
||||||
partner_user = orm.relationship(PartnerUser)
|
partner_user = orm.relationship(PartnerUser)
|
||||||
|
|
||||||
|
@ -3517,7 +3350,7 @@ class PartnerSubscription(Base, ModelMixin):
|
||||||
|
|
||||||
class Newsletter(Base, ModelMixin):
|
class Newsletter(Base, ModelMixin):
|
||||||
__tablename__ = "newsletter"
|
__tablename__ = "newsletter"
|
||||||
subject = sa.Column(sa.String(), nullable=False, index=True)
|
subject = sa.Column(sa.String(), nullable=False, unique=True, index=True)
|
||||||
|
|
||||||
html = sa.Column(sa.Text)
|
html = sa.Column(sa.Text)
|
||||||
plain_text = sa.Column(sa.Text)
|
plain_text = sa.Column(sa.Text)
|
||||||
|
|
|
@ -1,3 +1 @@
|
||||||
from . import views
|
from . import views
|
||||||
|
|
||||||
__all__ = ["views"]
|
|
||||||
|
|
|
@ -1,3 +1 @@
|
||||||
from .views import authorize, token, user_info
|
from .views import authorize, token, user_info
|
||||||
|
|
||||||
__all__ = ["authorize", "token", "user_info"]
|
|
||||||
|
|
|
@ -64,7 +64,7 @@ def _split_arg(arg_input: Union[str, list]) -> Set[str]:
|
||||||
- the response_type/scope passed as a list ?scope=scope_1&scope=scope_2
|
- the response_type/scope passed as a list ?scope=scope_1&scope=scope_2
|
||||||
"""
|
"""
|
||||||
res = set()
|
res = set()
|
||||||
if isinstance(arg_input, str):
|
if type(arg_input) is str:
|
||||||
if " " in arg_input:
|
if " " in arg_input:
|
||||||
for x in arg_input.split(" "):
|
for x in arg_input.split(" "):
|
||||||
if x:
|
if x:
|
||||||
|
|
|
@ -5,11 +5,3 @@ from .views import (
|
||||||
account_activated,
|
account_activated,
|
||||||
extension_redirect,
|
extension_redirect,
|
||||||
)
|
)
|
||||||
|
|
||||||
__all__ = [
|
|
||||||
"index",
|
|
||||||
"final",
|
|
||||||
"setup_done",
|
|
||||||
"account_activated",
|
|
||||||
"extension_redirect",
|
|
||||||
]
|
|
||||||
|
|
|
@ -39,6 +39,7 @@ class _InnerLock:
|
||||||
lock_redis.storage.delete(lock_name)
|
lock_redis.storage.delete(lock_name)
|
||||||
|
|
||||||
def __call__(self, f: Callable[..., Any]):
|
def __call__(self, f: Callable[..., Any]):
|
||||||
|
|
||||||
if self.lock_suffix is None:
|
if self.lock_suffix is None:
|
||||||
lock_suffix = f.__name__
|
lock_suffix = f.__name__
|
||||||
else:
|
else:
|
||||||
|
|
|
@ -5,11 +5,3 @@ from .views import (
|
||||||
provider1_callback,
|
provider1_callback,
|
||||||
provider2_callback,
|
provider2_callback,
|
||||||
)
|
)
|
||||||
|
|
||||||
__all__ = [
|
|
||||||
"index",
|
|
||||||
"phone_reservation",
|
|
||||||
"twilio_callback",
|
|
||||||
"provider1_callback",
|
|
||||||
"provider2_callback",
|
|
||||||
]
|
|
||||||
|
|
|
@ -7,12 +7,11 @@ from typing import Optional
|
||||||
|
|
||||||
from app.account_linking import SLPlan, SLPlanType
|
from app.account_linking import SLPlan, SLPlanType
|
||||||
from app.config import PROTON_EXTRA_HEADER_NAME, PROTON_EXTRA_HEADER_VALUE
|
from app.config import PROTON_EXTRA_HEADER_NAME, PROTON_EXTRA_HEADER_VALUE
|
||||||
from app.errors import ProtonAccountNotVerified
|
|
||||||
from app.log import LOG
|
from app.log import LOG
|
||||||
|
|
||||||
_APP_VERSION = "OauthClient_1.0.0"
|
_APP_VERSION = "OauthClient_1.0.0"
|
||||||
|
|
||||||
PROTON_ERROR_CODE_HV_NEEDED = 9001
|
PROTON_ERROR_CODE_NOT_EXISTS = 2501
|
||||||
|
|
||||||
PLAN_FREE = 1
|
PLAN_FREE = 1
|
||||||
PLAN_PREMIUM = 2
|
PLAN_PREMIUM = 2
|
||||||
|
@ -58,15 +57,6 @@ def convert_access_token(access_token_response: str) -> AccessCredentials:
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def handle_response_not_ok(status: int, body: dict, text: str) -> Exception:
|
|
||||||
if status == HTTPStatus.UNPROCESSABLE_ENTITY:
|
|
||||||
res_code = body.get("Code")
|
|
||||||
if res_code == PROTON_ERROR_CODE_HV_NEEDED:
|
|
||||||
return ProtonAccountNotVerified()
|
|
||||||
|
|
||||||
return Exception(f"Unexpected status code. Wanted 200 and got {status}: " + text)
|
|
||||||
|
|
||||||
|
|
||||||
class ProtonClient(ABC):
|
class ProtonClient(ABC):
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def get_user(self) -> Optional[UserInformation]:
|
def get_user(self) -> Optional[UserInformation]:
|
||||||
|
@ -134,11 +124,11 @@ class HttpProtonClient(ProtonClient):
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def __validate_response(res: Response) -> dict:
|
def __validate_response(res: Response) -> dict:
|
||||||
status = res.status_code
|
status = res.status_code
|
||||||
as_json = res.json()
|
|
||||||
if status != HTTPStatus.OK:
|
if status != HTTPStatus.OK:
|
||||||
raise HttpProtonClient.__handle_response_not_ok(
|
raise Exception(
|
||||||
status=status, body=as_json, text=res.text
|
f"Unexpected status code. Wanted 200 and got {status}: " + res.text
|
||||||
)
|
)
|
||||||
|
as_json = res.json()
|
||||||
res_code = as_json.get("Code")
|
res_code = as_json.get("Code")
|
||||||
if not res_code or res_code != 1000:
|
if not res_code or res_code != 1000:
|
||||||
raise Exception(
|
raise Exception(
|
||||||
|
|
|
@ -6,6 +6,7 @@ from app.session import RedisSessionStore
|
||||||
|
|
||||||
|
|
||||||
def initialize_redis_services(app: flask.Flask, redis_url: str):
|
def initialize_redis_services(app: flask.Flask, redis_url: str):
|
||||||
|
|
||||||
if redis_url.startswith("redis://") or redis_url.startswith("rediss://"):
|
if redis_url.startswith("redis://") or redis_url.startswith("rediss://"):
|
||||||
storage = limits.storage.RedisStorage(redis_url)
|
storage = limits.storage.RedisStorage(redis_url)
|
||||||
app.session_interface = RedisSessionStore(storage.storage, storage.storage, app)
|
app.session_interface = RedisSessionStore(storage.storage, storage.storage, app)
|
||||||
|
|
|
@ -75,7 +75,7 @@ class RedisSessionStore(SessionInterface):
|
||||||
try:
|
try:
|
||||||
data = pickle.loads(val)
|
data = pickle.loads(val)
|
||||||
return ServerSession(data, session_id=session_id)
|
return ServerSession(data, session_id=session_id)
|
||||||
except Exception:
|
except:
|
||||||
pass
|
pass
|
||||||
return ServerSession(session_id=str(uuid.uuid4()))
|
return ServerSession(session_id=str(uuid.uuid4()))
|
||||||
|
|
||||||
|
|
|
@ -1,33 +0,0 @@
|
||||||
import requests
|
|
||||||
from requests import RequestException
|
|
||||||
|
|
||||||
from app import config
|
|
||||||
from app.log import LOG
|
|
||||||
from app.models import User
|
|
||||||
|
|
||||||
|
|
||||||
def execute_subscription_webhook(user: User):
|
|
||||||
webhook_url = config.SUBSCRIPTION_CHANGE_WEBHOOK
|
|
||||||
if webhook_url is None:
|
|
||||||
return
|
|
||||||
subscription_end = user.get_active_subscription_end(
|
|
||||||
include_partner_subscription=False
|
|
||||||
)
|
|
||||||
sl_subscription_end = None
|
|
||||||
if subscription_end:
|
|
||||||
sl_subscription_end = subscription_end.timestamp
|
|
||||||
payload = {
|
|
||||||
"user_id": user.id,
|
|
||||||
"is_premium": user.is_premium(),
|
|
||||||
"active_subscription_end": sl_subscription_end,
|
|
||||||
}
|
|
||||||
try:
|
|
||||||
response = requests.post(webhook_url, json=payload, timeout=2)
|
|
||||||
if response.status_code == 200:
|
|
||||||
LOG.i("Sent request to subscription update webhook successfully")
|
|
||||||
else:
|
|
||||||
LOG.i(
|
|
||||||
f"Request to webhook failed with statue {response.status_code}: {response.text}"
|
|
||||||
)
|
|
||||||
except RequestException as e:
|
|
||||||
LOG.error(f"Subscription request exception: {e}")
|
|
|
@ -32,8 +32,8 @@ def random_words(words: int = 2, numbers: int = 0):
|
||||||
fields = [secrets.choice(_words) for i in range(words)]
|
fields = [secrets.choice(_words) for i in range(words)]
|
||||||
|
|
||||||
if numbers > 0:
|
if numbers > 0:
|
||||||
digits = "".join([str(random.randint(0, 9)) for i in range(numbers)])
|
fields.append("".join([str(random.randint(0, 9)) for i in range(numbers)]))
|
||||||
return "_".join(fields) + digits
|
return "".join(fields)
|
||||||
else:
|
else:
|
||||||
return "_".join(fields)
|
return "_".join(fields)
|
||||||
|
|
||||||
|
@ -99,7 +99,7 @@ def sanitize_email(email_address: str, not_lower=False) -> str:
|
||||||
email_address = email_address.strip().replace(" ", "").replace("\n", " ")
|
email_address = email_address.strip().replace(" ", "").replace("\n", " ")
|
||||||
if not not_lower:
|
if not not_lower:
|
||||||
email_address = email_address.lower()
|
email_address = email_address.lower()
|
||||||
return email_address.replace("\u200f", "")
|
return email_address
|
||||||
|
|
||||||
|
|
||||||
class NextUrlSanitizer:
|
class NextUrlSanitizer:
|
||||||
|
|
91
cron.py
91
cron.py
|
@ -5,11 +5,11 @@ from typing import List, Tuple
|
||||||
|
|
||||||
import arrow
|
import arrow
|
||||||
import requests
|
import requests
|
||||||
from sqlalchemy import func, desc, or_, and_
|
from sqlalchemy import func, desc, or_
|
||||||
from sqlalchemy.ext.compiler import compiles
|
from sqlalchemy.ext.compiler import compiles
|
||||||
from sqlalchemy.orm import joinedload
|
from sqlalchemy.orm import joinedload
|
||||||
from sqlalchemy.orm.exc import ObjectDeletedError
|
from sqlalchemy.orm.exc import ObjectDeletedError
|
||||||
from sqlalchemy.sql import Insert, text
|
from sqlalchemy.sql import Insert
|
||||||
|
|
||||||
from app import s3, config
|
from app import s3, config
|
||||||
from app.alias_utils import nb_email_log_for_mailbox
|
from app.alias_utils import nb_email_log_for_mailbox
|
||||||
|
@ -22,9 +22,10 @@ from app.email_utils import (
|
||||||
render,
|
render,
|
||||||
email_can_be_used_as_mailbox,
|
email_can_be_used_as_mailbox,
|
||||||
send_email_with_rate_control,
|
send_email_with_rate_control,
|
||||||
|
normalize_reply_email,
|
||||||
|
is_valid_email,
|
||||||
get_email_domain_part,
|
get_email_domain_part,
|
||||||
)
|
)
|
||||||
from app.email_validation import is_valid_email, normalize_reply_email
|
|
||||||
from app.errors import ProtonPartnerNotSetUp
|
from app.errors import ProtonPartnerNotSetUp
|
||||||
from app.log import LOG
|
from app.log import LOG
|
||||||
from app.mail_sender import load_unsent_mails_from_fs_and_resend
|
from app.mail_sender import load_unsent_mails_from_fs_and_resend
|
||||||
|
@ -65,14 +66,12 @@ from server import create_light_app
|
||||||
|
|
||||||
def notify_trial_end():
|
def notify_trial_end():
|
||||||
for user in User.filter(
|
for user in User.filter(
|
||||||
User.activated.is_(True),
|
User.activated.is_(True), User.trial_end.isnot(None), User.lifetime.is_(False)
|
||||||
User.trial_end.isnot(None),
|
|
||||||
User.trial_end >= arrow.now().shift(days=2),
|
|
||||||
User.trial_end < arrow.now().shift(days=3),
|
|
||||||
User.lifetime.is_(False),
|
|
||||||
).all():
|
).all():
|
||||||
try:
|
try:
|
||||||
if user.in_trial():
|
if user.in_trial() and arrow.now().shift(
|
||||||
|
days=3
|
||||||
|
) > user.trial_end >= arrow.now().shift(days=2):
|
||||||
LOG.d("Send trial end email to user %s", user)
|
LOG.d("Send trial end email to user %s", user)
|
||||||
send_trial_end_soon_email(user)
|
send_trial_end_soon_email(user)
|
||||||
# happens if user has been deleted in the meantime
|
# happens if user has been deleted in the meantime
|
||||||
|
@ -85,49 +84,27 @@ def delete_logs():
|
||||||
delete_refused_emails()
|
delete_refused_emails()
|
||||||
delete_old_monitoring()
|
delete_old_monitoring()
|
||||||
|
|
||||||
for t_email in TransactionalEmail.filter(
|
for t in TransactionalEmail.filter(
|
||||||
TransactionalEmail.created_at < arrow.now().shift(days=-7)
|
TransactionalEmail.created_at < arrow.now().shift(days=-7)
|
||||||
):
|
):
|
||||||
TransactionalEmail.delete(t_email.id)
|
TransactionalEmail.delete(t.id)
|
||||||
|
|
||||||
for b in Bounce.filter(Bounce.created_at < arrow.now().shift(days=-7)):
|
for b in Bounce.filter(Bounce.created_at < arrow.now().shift(days=-7)):
|
||||||
Bounce.delete(b.id)
|
Bounce.delete(b.id)
|
||||||
|
|
||||||
Session.commit()
|
Session.commit()
|
||||||
|
|
||||||
LOG.d("Deleting EmailLog older than 2 weeks")
|
LOG.d("Delete EmailLog older than 2 weeks")
|
||||||
|
|
||||||
total_deleted = 0
|
max_dt = arrow.now().shift(weeks=-2)
|
||||||
batch_size = 500
|
nb_deleted = EmailLog.filter(EmailLog.created_at < max_dt).delete()
|
||||||
Session.execute("set session statement_timeout=30000").rowcount
|
|
||||||
queries_done = 0
|
|
||||||
cutoff_time = arrow.now().shift(days=-14)
|
|
||||||
rows_to_delete = EmailLog.filter(EmailLog.created_at < cutoff_time).count()
|
|
||||||
expected_queries = int(rows_to_delete / batch_size)
|
|
||||||
sql = text(
|
|
||||||
"DELETE FROM email_log WHERE id IN (SELECT id FROM email_log WHERE created_at < :cutoff_time order by created_at limit :batch_size)"
|
|
||||||
)
|
|
||||||
str_cutoff_time = cutoff_time.isoformat()
|
|
||||||
while total_deleted < rows_to_delete:
|
|
||||||
deleted_count = Session.execute(
|
|
||||||
sql, {"cutoff_time": str_cutoff_time, "batch_size": batch_size}
|
|
||||||
).rowcount
|
|
||||||
Session.commit()
|
Session.commit()
|
||||||
total_deleted += deleted_count
|
|
||||||
queries_done += 1
|
|
||||||
LOG.i(
|
|
||||||
f"[{queries_done}/{expected_queries}] Deleted {total_deleted} EmailLog entries"
|
|
||||||
)
|
|
||||||
if deleted_count < batch_size:
|
|
||||||
break
|
|
||||||
|
|
||||||
LOG.i("Deleted %s email logs", total_deleted)
|
LOG.i("Delete %s email logs", nb_deleted)
|
||||||
|
|
||||||
|
|
||||||
def delete_refused_emails():
|
def delete_refused_emails():
|
||||||
for refused_email in (
|
for refused_email in RefusedEmail.filter_by(deleted=False).all():
|
||||||
RefusedEmail.filter_by(deleted=False).order_by(RefusedEmail.id).all()
|
|
||||||
):
|
|
||||||
if arrow.now().shift(days=1) > refused_email.delete_at >= arrow.now():
|
if arrow.now().shift(days=1) > refused_email.delete_at >= arrow.now():
|
||||||
LOG.d("Delete refused email %s", refused_email)
|
LOG.d("Delete refused email %s", refused_email)
|
||||||
if refused_email.path:
|
if refused_email.path:
|
||||||
|
@ -161,7 +138,7 @@ def notify_premium_end():
|
||||||
|
|
||||||
send_email(
|
send_email(
|
||||||
user.email,
|
user.email,
|
||||||
"Your subscription will end soon",
|
f"Your subscription will end soon",
|
||||||
render(
|
render(
|
||||||
"transactional/subscription-end.txt",
|
"transactional/subscription-end.txt",
|
||||||
user=user,
|
user=user,
|
||||||
|
@ -218,7 +195,7 @@ def notify_manual_sub_end():
|
||||||
LOG.d("Remind user %s that their manual sub is ending soon", user)
|
LOG.d("Remind user %s that their manual sub is ending soon", user)
|
||||||
send_email(
|
send_email(
|
||||||
user.email,
|
user.email,
|
||||||
"Your subscription will end soon",
|
f"Your subscription will end soon",
|
||||||
render(
|
render(
|
||||||
"transactional/manual-subscription-end.txt",
|
"transactional/manual-subscription-end.txt",
|
||||||
user=user,
|
user=user,
|
||||||
|
@ -295,11 +272,7 @@ def compute_metric2() -> Metric2:
|
||||||
_24h_ago = now.shift(days=-1)
|
_24h_ago = now.shift(days=-1)
|
||||||
|
|
||||||
nb_referred_user_paid = 0
|
nb_referred_user_paid = 0
|
||||||
for user in (
|
for user in User.filter(User.referral_id.isnot(None)):
|
||||||
User.filter(User.referral_id.isnot(None))
|
|
||||||
.yield_per(500)
|
|
||||||
.enable_eagerloads(False)
|
|
||||||
):
|
|
||||||
if user.is_paid():
|
if user.is_paid():
|
||||||
nb_referred_user_paid += 1
|
nb_referred_user_paid += 1
|
||||||
|
|
||||||
|
@ -590,21 +563,21 @@ nb_total_bounced_last_24h: {stats_today.nb_total_bounced_last_24h} - {increase_p
|
||||||
"""
|
"""
|
||||||
|
|
||||||
monitoring_report += "\n====================================\n"
|
monitoring_report += "\n====================================\n"
|
||||||
monitoring_report += """
|
monitoring_report += f"""
|
||||||
# Account bounce report:
|
# Account bounce report:
|
||||||
"""
|
"""
|
||||||
|
|
||||||
for email, bounces in bounce_report():
|
for email, bounces in bounce_report():
|
||||||
monitoring_report += f"{email}: {bounces}\n"
|
monitoring_report += f"{email}: {bounces}\n"
|
||||||
|
|
||||||
monitoring_report += """\n
|
monitoring_report += f"""\n
|
||||||
# Alias creation report:
|
# Alias creation report:
|
||||||
"""
|
"""
|
||||||
|
|
||||||
for email, nb_alias, date in alias_creation_report():
|
for email, nb_alias, date in alias_creation_report():
|
||||||
monitoring_report += f"{email}, {date}: {nb_alias}\n"
|
monitoring_report += f"{email}, {date}: {nb_alias}\n"
|
||||||
|
|
||||||
monitoring_report += """\n
|
monitoring_report += f"""\n
|
||||||
# Full bounce detail report:
|
# Full bounce detail report:
|
||||||
"""
|
"""
|
||||||
monitoring_report += all_bounce_report()
|
monitoring_report += all_bounce_report()
|
||||||
|
@ -1047,8 +1020,7 @@ async def check_hibp():
|
||||||
)
|
)
|
||||||
.filter(Alias.enabled)
|
.filter(Alias.enabled)
|
||||||
.order_by(Alias.hibp_last_check.asc())
|
.order_by(Alias.hibp_last_check.asc())
|
||||||
.yield_per(500)
|
.all()
|
||||||
.enable_eagerloads(False)
|
|
||||||
):
|
):
|
||||||
await queue.put(alias.id)
|
await queue.put(alias.id)
|
||||||
|
|
||||||
|
@ -1099,14 +1071,14 @@ def notify_hibp():
|
||||||
)
|
)
|
||||||
|
|
||||||
LOG.d(
|
LOG.d(
|
||||||
"Send new breaches found email to %s for %s breaches aliases",
|
f"Send new breaches found email to %s for %s breaches aliases",
|
||||||
user,
|
user,
|
||||||
len(breached_aliases),
|
len(breached_aliases),
|
||||||
)
|
)
|
||||||
|
|
||||||
send_email(
|
send_email(
|
||||||
user.email,
|
user.email,
|
||||||
"You were in a data breach",
|
f"You were in a data breach",
|
||||||
render(
|
render(
|
||||||
"transactional/hibp-new-breaches.txt.jinja2",
|
"transactional/hibp-new-breaches.txt.jinja2",
|
||||||
user=user,
|
user=user,
|
||||||
|
@ -1126,18 +1098,6 @@ def notify_hibp():
|
||||||
Session.commit()
|
Session.commit()
|
||||||
|
|
||||||
|
|
||||||
def clear_users_scheduled_to_be_deleted():
|
|
||||||
users = User.filter(
|
|
||||||
and_(User.delete_on.isnot(None), User.delete_on < arrow.now())
|
|
||||||
).all()
|
|
||||||
for user in users:
|
|
||||||
LOG.i(
|
|
||||||
f"Scheduled deletion of user {user} with scheduled delete on {user.delete_on}"
|
|
||||||
)
|
|
||||||
User.delete(user.id)
|
|
||||||
Session.commit()
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
LOG.d("Start running cronjob")
|
LOG.d("Start running cronjob")
|
||||||
parser = argparse.ArgumentParser()
|
parser = argparse.ArgumentParser()
|
||||||
|
@ -1204,6 +1164,3 @@ if __name__ == "__main__":
|
||||||
elif args.job == "send_undelivered_mails":
|
elif args.job == "send_undelivered_mails":
|
||||||
LOG.d("Sending undelivered emails")
|
LOG.d("Sending undelivered emails")
|
||||||
load_unsent_mails_from_fs_and_resend()
|
load_unsent_mails_from_fs_and_resend()
|
||||||
elif args.job == "delete_scheduled_users":
|
|
||||||
LOG.d("Deleting users scheduled to be deleted")
|
|
||||||
clear_users_scheduled_to_be_deleted()
|
|
||||||
|
|
81
crontab.yml
81
crontab.yml
|
@ -5,66 +5,65 @@ jobs:
|
||||||
schedule: "0 0 * * *"
|
schedule: "0 0 * * *"
|
||||||
captureStderr: true
|
captureStderr: true
|
||||||
|
|
||||||
|
- name: SimpleLogin Notify Trial Ends
|
||||||
|
command: python /code/cron.py -j notify_trial_end
|
||||||
|
shell: /bin/bash
|
||||||
|
schedule: "0 8 * * *"
|
||||||
|
captureStderr: true
|
||||||
|
|
||||||
|
- name: SimpleLogin Notify Manual Subscription Ends
|
||||||
|
command: python /code/cron.py -j notify_manual_subscription_end
|
||||||
|
shell: /bin/bash
|
||||||
|
schedule: "0 9 * * *"
|
||||||
|
captureStderr: true
|
||||||
|
|
||||||
|
- name: SimpleLogin Notify Premium Ends
|
||||||
|
command: python /code/cron.py -j notify_premium_end
|
||||||
|
shell: /bin/bash
|
||||||
|
schedule: "0 10 * * *"
|
||||||
|
captureStderr: true
|
||||||
|
|
||||||
|
- name: SimpleLogin Delete Logs
|
||||||
|
command: python /code/cron.py -j delete_logs
|
||||||
|
shell: /bin/bash
|
||||||
|
schedule: "0 11 * * *"
|
||||||
|
captureStderr: true
|
||||||
|
|
||||||
|
- name: SimpleLogin Poll Apple Subscriptions
|
||||||
|
command: python /code/cron.py -j poll_apple_subscription
|
||||||
|
shell: /bin/bash
|
||||||
|
schedule: "0 12 * * *"
|
||||||
|
captureStderr: true
|
||||||
|
|
||||||
|
- name: SimpleLogin Sanity Check
|
||||||
|
command: python /code/cron.py -j sanity_check
|
||||||
|
shell: /bin/bash
|
||||||
|
schedule: "0 2 * * *"
|
||||||
|
captureStderr: true
|
||||||
|
|
||||||
- name: SimpleLogin Delete Old Monitoring records
|
- name: SimpleLogin Delete Old Monitoring records
|
||||||
command: python /code/cron.py -j delete_old_monitoring
|
command: python /code/cron.py -j delete_old_monitoring
|
||||||
shell: /bin/bash
|
shell: /bin/bash
|
||||||
schedule: "15 1 * * *"
|
schedule: "0 14 * * *"
|
||||||
captureStderr: true
|
captureStderr: true
|
||||||
|
|
||||||
- name: SimpleLogin Custom Domain check
|
- name: SimpleLogin Custom Domain check
|
||||||
command: python /code/cron.py -j check_custom_domain
|
command: python /code/cron.py -j check_custom_domain
|
||||||
shell: /bin/bash
|
shell: /bin/bash
|
||||||
schedule: "15 2 * * *"
|
schedule: "0 15 * * *"
|
||||||
captureStderr: true
|
captureStderr: true
|
||||||
|
|
||||||
- name: SimpleLogin HIBP check
|
- name: SimpleLogin HIBP check
|
||||||
command: python /code/cron.py -j check_hibp
|
command: python /code/cron.py -j check_hibp
|
||||||
shell: /bin/bash
|
shell: /bin/bash
|
||||||
schedule: "15 3 * * *"
|
schedule: "0 18 * * *"
|
||||||
captureStderr: true
|
captureStderr: true
|
||||||
concurrencyPolicy: Forbid
|
concurrencyPolicy: Forbid
|
||||||
|
|
||||||
- name: SimpleLogin Notify HIBP breaches
|
- name: SimpleLogin Notify HIBP breaches
|
||||||
command: python /code/cron.py -j notify_hibp
|
command: python /code/cron.py -j notify_hibp
|
||||||
shell: /bin/bash
|
shell: /bin/bash
|
||||||
schedule: "15 4 * * *"
|
schedule: "0 19 * * *"
|
||||||
captureStderr: true
|
|
||||||
concurrencyPolicy: Forbid
|
|
||||||
|
|
||||||
- name: SimpleLogin Delete Logs
|
|
||||||
command: python /code/cron.py -j delete_logs
|
|
||||||
shell: /bin/bash
|
|
||||||
schedule: "15 5 * * *"
|
|
||||||
captureStderr: true
|
|
||||||
|
|
||||||
- name: SimpleLogin Poll Apple Subscriptions
|
|
||||||
command: python /code/cron.py -j poll_apple_subscription
|
|
||||||
shell: /bin/bash
|
|
||||||
schedule: "15 6 * * *"
|
|
||||||
captureStderr: true
|
|
||||||
|
|
||||||
- name: SimpleLogin Notify Trial Ends
|
|
||||||
command: python /code/cron.py -j notify_trial_end
|
|
||||||
shell: /bin/bash
|
|
||||||
schedule: "15 8 * * *"
|
|
||||||
captureStderr: true
|
|
||||||
|
|
||||||
- name: SimpleLogin Notify Manual Subscription Ends
|
|
||||||
command: python /code/cron.py -j notify_manual_subscription_end
|
|
||||||
shell: /bin/bash
|
|
||||||
schedule: "15 9 * * *"
|
|
||||||
captureStderr: true
|
|
||||||
|
|
||||||
- name: SimpleLogin Notify Premium Ends
|
|
||||||
command: python /code/cron.py -j notify_premium_end
|
|
||||||
shell: /bin/bash
|
|
||||||
schedule: "15 10 * * *"
|
|
||||||
captureStderr: true
|
|
||||||
|
|
||||||
- name: SimpleLogin delete users scheduled to be deleted
|
|
||||||
command: echo disabled_user_deletion #python /code/cron.py -j delete_scheduled_users
|
|
||||||
shell: /bin/bash
|
|
||||||
schedule: "15 11 * * *"
|
|
||||||
captureStderr: true
|
captureStderr: true
|
||||||
concurrencyPolicy: Forbid
|
concurrencyPolicy: Forbid
|
||||||
|
|
||||||
|
|
19
docs/api.md
19
docs/api.md
|
@ -15,7 +15,6 @@
|
||||||
- [GET /api/user/cookie_token](#get-apiusercookie_token): Get a one time use token to exchange it for a valid cookie
|
- [GET /api/user/cookie_token](#get-apiusercookie_token): Get a one time use token to exchange it for a valid cookie
|
||||||
- [PATCH /api/user_info](#patch-apiuser_info): Update user's information.
|
- [PATCH /api/user_info](#patch-apiuser_info): Update user's information.
|
||||||
- [POST /api/api_key](#post-apiapi_key): Create a new API key.
|
- [POST /api/api_key](#post-apiapi_key): Create a new API key.
|
||||||
- [GET /api/stats](#get-apistats): Get user's stats.
|
|
||||||
- [GET /api/logout](#get-apilogout): Log out.
|
- [GET /api/logout](#get-apilogout): Log out.
|
||||||
|
|
||||||
[Alias endpoints](#alias-endpoints)
|
[Alias endpoints](#alias-endpoints)
|
||||||
|
@ -227,22 +226,6 @@ Input:
|
||||||
|
|
||||||
Output: same as GET /api/user_info
|
Output: same as GET /api/user_info
|
||||||
|
|
||||||
#### GET /api/stats
|
|
||||||
|
|
||||||
Given the API Key, return stats about the number of aliases, number of emails forwarded/replied/blocked
|
|
||||||
|
|
||||||
Input:
|
|
||||||
|
|
||||||
- `Authentication` header that contains the api key
|
|
||||||
|
|
||||||
Output: if api key is correct, return a json with the following fields:
|
|
||||||
|
|
||||||
```json
|
|
||||||
{"nb_alias": 1, "nb_block": 0, "nb_forward": 0, "nb_reply": 0}
|
|
||||||
```
|
|
||||||
|
|
||||||
If api key is incorrect, return 401.
|
|
||||||
|
|
||||||
#### PATCH /api/sudo
|
#### PATCH /api/sudo
|
||||||
|
|
||||||
Enable sudo mode
|
Enable sudo mode
|
||||||
|
@ -711,7 +694,7 @@ Return 200 and `existed=true` if contact is already added.
|
||||||
|
|
||||||
It can return 403 with an error if the user cannot create reverse alias.
|
It can return 403 with an error if the user cannot create reverse alias.
|
||||||
|
|
||||||
```json
|
``json
|
||||||
{
|
{
|
||||||
"error": "Please upgrade to create a reverse-alias"
|
"error": "Please upgrade to create a reverse-alias"
|
||||||
}
|
}
|
||||||
|
|
123
docs/ssl.md
123
docs/ssl.md
|
@ -1,4 +1,4 @@
|
||||||
# SSL, HTTPS, HSTS and additional security measures
|
# SSL, HTTPS, and HSTS
|
||||||
|
|
||||||
It's highly recommended to enable SSL/TLS on your server, both for the web app and email server.
|
It's highly recommended to enable SSL/TLS on your server, both for the web app and email server.
|
||||||
|
|
||||||
|
@ -58,124 +58,3 @@ Now, reload Nginx:
|
||||||
```bash
|
```bash
|
||||||
sudo systemctl reload nginx
|
sudo systemctl reload nginx
|
||||||
```
|
```
|
||||||
|
|
||||||
## Additional security measures
|
|
||||||
|
|
||||||
For additional security, we recommend you take some extra steps.
|
|
||||||
|
|
||||||
### Enable Certificate Authority Authorization (CAA)
|
|
||||||
|
|
||||||
[Certificate Authority Authorization](https://letsencrypt.org/docs/caa/) is a step you can take to restrict the list of certificate authorities that are allowed to issue certificates for your domains.
|
|
||||||
|
|
||||||
Use [SSLMate’s CAA Record Generator](https://sslmate.com/caa/) to create a **CAA record** with the following configuration:
|
|
||||||
|
|
||||||
- `flags`: `0`
|
|
||||||
- `tag`: `issue`
|
|
||||||
- `value`: `"letsencrypt.org"`
|
|
||||||
|
|
||||||
To verify if the DNS works, the following command
|
|
||||||
|
|
||||||
```bash
|
|
||||||
dig @1.1.1.1 mydomain.com caa
|
|
||||||
```
|
|
||||||
|
|
||||||
should return:
|
|
||||||
|
|
||||||
```
|
|
||||||
mydomain.com. 3600 IN CAA 0 issue "letsencrypt.org"
|
|
||||||
```
|
|
||||||
|
|
||||||
### SMTP MTA Strict Transport Security (MTA-STS)
|
|
||||||
|
|
||||||
[MTA-STS](https://datatracker.ietf.org/doc/html/rfc8461) is an extra step you can take to broadcast the ability of your instance to receive and, optionally enforce, TSL-secure SMTP connections to protect email traffic.
|
|
||||||
|
|
||||||
Enabling MTA-STS requires you serve a specific file from subdomain `mta-sts.domain.com` on a well-known route.
|
|
||||||
|
|
||||||
Create a text file `/var/www/.well-known/mta-sts.txt` with the content:
|
|
||||||
|
|
||||||
```txt
|
|
||||||
version: STSv1
|
|
||||||
mode: testing
|
|
||||||
mx: app.mydomain.com
|
|
||||||
max_age: 86400
|
|
||||||
```
|
|
||||||
|
|
||||||
It is recommended to start with `mode: testing` for starters to get time to review failure reports. Add as many `mx:` domain entries as you have matching **MX records** in your DNS configuration.
|
|
||||||
|
|
||||||
Create a **TXT record** for `_mta-sts.mydomain.com.` with the following value:
|
|
||||||
|
|
||||||
```txt
|
|
||||||
v=STSv1; id=UNIX_TIMESTAMP
|
|
||||||
```
|
|
||||||
|
|
||||||
With `UNIX_TIMESTAMP` being the current date/time.
|
|
||||||
|
|
||||||
Use the following command to generate the record:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
echo "v=STSv1; id=$(date +%s)"
|
|
||||||
```
|
|
||||||
|
|
||||||
To verify if the DNS works, the following command
|
|
||||||
|
|
||||||
```bash
|
|
||||||
dig @1.1.1.1 _mta-sts.mydomain.com txt
|
|
||||||
```
|
|
||||||
|
|
||||||
should return a result similar to this one:
|
|
||||||
|
|
||||||
```
|
|
||||||
_mta-sts.mydomain.com. 3600 IN TXT "v=STSv1; id=1689416399"
|
|
||||||
```
|
|
||||||
|
|
||||||
Create an additional Nginx configuration in `/etc/nginx/sites-enabled/mta-sts` with the following content:
|
|
||||||
|
|
||||||
```
|
|
||||||
server {
|
|
||||||
server_name mta-sts.mydomain.com;
|
|
||||||
root /var/www;
|
|
||||||
listen 80;
|
|
||||||
|
|
||||||
location ^~ /.well-known {}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
Restart Nginx with the following command:
|
|
||||||
|
|
||||||
```sh
|
|
||||||
sudo service nginx restart
|
|
||||||
```
|
|
||||||
|
|
||||||
A correct configuration of MTA-STS, however, requires that the certificate used to host the `mta-sts` subdomain matches that of the subdomain referred to by the **MX record** from the DNS. In other words, both `mta-sts.mydomain.com` and `app.mydomain.com` must share the same certificate.
|
|
||||||
|
|
||||||
The easiest way to do this is to _expand_ the certificate associated with `app.mydomain.com` to also support the `mta-sts` subdomain using the following command:
|
|
||||||
|
|
||||||
```sh
|
|
||||||
certbot --expand --nginx -d app.mydomain.com,mta-sts.mydomain.com
|
|
||||||
```
|
|
||||||
|
|
||||||
## SMTP TLS Reporting
|
|
||||||
|
|
||||||
[TLSRPT](https://datatracker.ietf.org/doc/html/rfc8460) is used by SMTP systems to report failures in establishing TLS-secure sessions as broadcast by the MTA-STS configuration.
|
|
||||||
|
|
||||||
Configuring MTA-STS in `mode: testing` as shown in the previous section gives you time to review failures from some SMTP senders.
|
|
||||||
|
|
||||||
Create a **TXT record** for `_smtp._tls.mydomain.com.` with the following value:
|
|
||||||
|
|
||||||
```txt
|
|
||||||
v=TSLRPTv1; rua=mailto:YOUR_EMAIL
|
|
||||||
```
|
|
||||||
|
|
||||||
The TLSRPT configuration at the DNS level allows SMTP senders that fail to initiate TLS-secure sessions to send reports to a particular email address. We suggest creating a `tls-reports` alias in SimpleLogin for this purpose.
|
|
||||||
|
|
||||||
To verify if the DNS works, the following command
|
|
||||||
|
|
||||||
```bash
|
|
||||||
dig @1.1.1.1 _smtp._tls.mydomain.com txt
|
|
||||||
```
|
|
||||||
|
|
||||||
should return a result similar to this one:
|
|
||||||
|
|
||||||
```
|
|
||||||
_smtp._tls.mydomain.com. 3600 IN TXT "v=TSLRPTv1; rua=mailto:tls-reports@mydomain.com"
|
|
||||||
```
|
|
||||||
|
|
|
@ -106,6 +106,8 @@ from app.email_utils import (
|
||||||
get_header_unicode,
|
get_header_unicode,
|
||||||
generate_reply_email,
|
generate_reply_email,
|
||||||
is_reverse_alias,
|
is_reverse_alias,
|
||||||
|
normalize_reply_email,
|
||||||
|
is_valid_email,
|
||||||
replace,
|
replace,
|
||||||
should_disable,
|
should_disable,
|
||||||
parse_id_from_bounce,
|
parse_id_from_bounce,
|
||||||
|
@ -121,7 +123,6 @@ from app.email_utils import (
|
||||||
generate_verp_email,
|
generate_verp_email,
|
||||||
sl_formataddr,
|
sl_formataddr,
|
||||||
)
|
)
|
||||||
from app.email_validation import is_valid_email, normalize_reply_email
|
|
||||||
from app.errors import (
|
from app.errors import (
|
||||||
NonReverseAliasInReplyPhase,
|
NonReverseAliasInReplyPhase,
|
||||||
VERPTransactional,
|
VERPTransactional,
|
||||||
|
@ -160,7 +161,6 @@ from app.models import (
|
||||||
MessageIDMatching,
|
MessageIDMatching,
|
||||||
Notification,
|
Notification,
|
||||||
VerpType,
|
VerpType,
|
||||||
SLDomain,
|
|
||||||
)
|
)
|
||||||
from app.pgp_utils import (
|
from app.pgp_utils import (
|
||||||
PGPException,
|
PGPException,
|
||||||
|
@ -235,6 +235,7 @@ def get_or_create_contact(from_header: str, mail_from: str, alias: Alias) -> Con
|
||||||
contact.mail_from = mail_from
|
contact.mail_from = mail_from
|
||||||
Session.commit()
|
Session.commit()
|
||||||
else:
|
else:
|
||||||
|
|
||||||
try:
|
try:
|
||||||
contact = Contact.create(
|
contact = Contact.create(
|
||||||
user_id=alias.user_id,
|
user_id=alias.user_id,
|
||||||
|
@ -242,7 +243,7 @@ def get_or_create_contact(from_header: str, mail_from: str, alias: Alias) -> Con
|
||||||
website_email=contact_email,
|
website_email=contact_email,
|
||||||
name=contact_name,
|
name=contact_name,
|
||||||
mail_from=mail_from,
|
mail_from=mail_from,
|
||||||
reply_email=generate_reply_email(contact_email, alias)
|
reply_email=generate_reply_email(contact_email, alias.user)
|
||||||
if is_valid_email(contact_email)
|
if is_valid_email(contact_email)
|
||||||
else NOREPLY,
|
else NOREPLY,
|
||||||
automatic_created=True,
|
automatic_created=True,
|
||||||
|
@ -260,7 +261,7 @@ def get_or_create_contact(from_header: str, mail_from: str, alias: Alias) -> Con
|
||||||
|
|
||||||
Session.commit()
|
Session.commit()
|
||||||
except IntegrityError:
|
except IntegrityError:
|
||||||
LOG.w(f"Contact with email {contact_email} for alias {alias} already exist")
|
LOG.w("Contact %s %s already exist", alias, contact_email)
|
||||||
Session.rollback()
|
Session.rollback()
|
||||||
contact = Contact.get_by(alias_id=alias.id, website_email=contact_email)
|
contact = Contact.get_by(alias_id=alias.id, website_email=contact_email)
|
||||||
|
|
||||||
|
@ -278,9 +279,6 @@ def get_or_create_reply_to_contact(
|
||||||
except ValueError:
|
except ValueError:
|
||||||
return
|
return
|
||||||
|
|
||||||
if len(contact_name) >= Contact.MAX_NAME_LENGTH:
|
|
||||||
contact_name = contact_name[0 : Contact.MAX_NAME_LENGTH]
|
|
||||||
|
|
||||||
if not is_valid_email(contact_address):
|
if not is_valid_email(contact_address):
|
||||||
LOG.w(
|
LOG.w(
|
||||||
"invalid reply-to address %s. Parse from %s",
|
"invalid reply-to address %s. Parse from %s",
|
||||||
|
@ -306,7 +304,7 @@ def get_or_create_reply_to_contact(
|
||||||
alias_id=alias.id,
|
alias_id=alias.id,
|
||||||
website_email=contact_address,
|
website_email=contact_address,
|
||||||
name=contact_name,
|
name=contact_name,
|
||||||
reply_email=generate_reply_email(contact_address, alias),
|
reply_email=generate_reply_email(contact_address, alias.user),
|
||||||
automatic_created=True,
|
automatic_created=True,
|
||||||
)
|
)
|
||||||
Session.commit()
|
Session.commit()
|
||||||
|
@ -349,10 +347,6 @@ def replace_header_when_forward(msg: Message, alias: Alias, header: str):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
contact = Contact.get_by(alias_id=alias.id, website_email=contact_email)
|
contact = Contact.get_by(alias_id=alias.id, website_email=contact_email)
|
||||||
contact_name = full_address.display_name
|
|
||||||
if len(contact_name) >= Contact.MAX_NAME_LENGTH:
|
|
||||||
contact_name = contact_name[0 : Contact.MAX_NAME_LENGTH]
|
|
||||||
|
|
||||||
if contact:
|
if contact:
|
||||||
# update the contact name if needed
|
# update the contact name if needed
|
||||||
if contact.name != full_address.display_name:
|
if contact.name != full_address.display_name:
|
||||||
|
@ -360,9 +354,9 @@ def replace_header_when_forward(msg: Message, alias: Alias, header: str):
|
||||||
"Update contact %s name %s to %s",
|
"Update contact %s name %s to %s",
|
||||||
contact,
|
contact,
|
||||||
contact.name,
|
contact.name,
|
||||||
contact_name,
|
full_address.display_name,
|
||||||
)
|
)
|
||||||
contact.name = contact_name
|
contact.name = full_address.display_name
|
||||||
Session.commit()
|
Session.commit()
|
||||||
else:
|
else:
|
||||||
LOG.d(
|
LOG.d(
|
||||||
|
@ -377,8 +371,8 @@ def replace_header_when_forward(msg: Message, alias: Alias, header: str):
|
||||||
user_id=alias.user_id,
|
user_id=alias.user_id,
|
||||||
alias_id=alias.id,
|
alias_id=alias.id,
|
||||||
website_email=contact_email,
|
website_email=contact_email,
|
||||||
name=contact_name,
|
name=full_address.display_name,
|
||||||
reply_email=generate_reply_email(contact_email, alias),
|
reply_email=generate_reply_email(contact_email, alias.user),
|
||||||
is_cc=header.lower() == "cc",
|
is_cc=header.lower() == "cc",
|
||||||
automatic_created=True,
|
automatic_created=True,
|
||||||
)
|
)
|
||||||
|
@ -546,20 +540,12 @@ def sign_msg(msg: Message) -> Message:
|
||||||
signature.add_header("Content-Disposition", 'attachment; filename="signature.asc"')
|
signature.add_header("Content-Disposition", 'attachment; filename="signature.asc"')
|
||||||
|
|
||||||
try:
|
try:
|
||||||
payload = sign_data(message_to_bytes(msg).replace(b"\n", b"\r\n"))
|
signature.set_payload(sign_data(message_to_bytes(msg).replace(b"\n", b"\r\n")))
|
||||||
|
|
||||||
if not payload:
|
|
||||||
raise PGPException("Empty signature by gnupg")
|
|
||||||
|
|
||||||
signature.set_payload(payload)
|
|
||||||
except Exception:
|
except Exception:
|
||||||
LOG.e("Cannot sign, try using pgpy")
|
LOG.e("Cannot sign, try using pgpy")
|
||||||
payload = sign_data_with_pgpy(message_to_bytes(msg).replace(b"\n", b"\r\n"))
|
signature.set_payload(
|
||||||
|
sign_data_with_pgpy(message_to_bytes(msg).replace(b"\n", b"\r\n"))
|
||||||
if not payload:
|
)
|
||||||
raise PGPException("Empty signature by pgpy")
|
|
||||||
|
|
||||||
signature.set_payload(payload)
|
|
||||||
|
|
||||||
container.attach(signature)
|
container.attach(signature)
|
||||||
|
|
||||||
|
@ -636,8 +622,8 @@ def handle_forward(envelope, msg: Message, rcpt_to: str) -> List[Tuple[bool, str
|
||||||
|
|
||||||
user = alias.user
|
user = alias.user
|
||||||
|
|
||||||
if not user.can_send_or_receive():
|
if user.disabled:
|
||||||
LOG.i(f"User {user} cannot receive emails")
|
LOG.w("User %s disabled, disable forwarding emails for %s", user, alias)
|
||||||
if should_ignore_bounce(envelope.mail_from):
|
if should_ignore_bounce(envelope.mail_from):
|
||||||
return [(True, status.E207)]
|
return [(True, status.E207)]
|
||||||
else:
|
else:
|
||||||
|
@ -859,7 +845,9 @@ def forward_email_to_mailbox(
|
||||||
f"""Email sent to {alias.email} from an invalid address and cannot be replied""",
|
f"""Email sent to {alias.email} from an invalid address and cannot be replied""",
|
||||||
)
|
)
|
||||||
|
|
||||||
headers_to_keep = [
|
delete_all_headers_except(
|
||||||
|
msg,
|
||||||
|
[
|
||||||
headers.FROM,
|
headers.FROM,
|
||||||
headers.TO,
|
headers.TO,
|
||||||
headers.CC,
|
headers.CC,
|
||||||
|
@ -870,13 +858,13 @@ def forward_email_to_mailbox(
|
||||||
# References and In-Reply-To are used for keeping the email thread
|
# References and In-Reply-To are used for keeping the email thread
|
||||||
headers.REFERENCES,
|
headers.REFERENCES,
|
||||||
headers.IN_REPLY_TO,
|
headers.IN_REPLY_TO,
|
||||||
headers.LIST_UNSUBSCRIBE,
|
]
|
||||||
headers.LIST_UNSUBSCRIBE_POST,
|
+ headers.MIME_HEADERS,
|
||||||
] + headers.MIME_HEADERS
|
)
|
||||||
if user.include_header_email_header:
|
|
||||||
headers_to_keep.append(headers.AUTHENTICATION_RESULTS)
|
|
||||||
delete_all_headers_except(msg, headers_to_keep)
|
|
||||||
|
|
||||||
|
# create PGP email if needed
|
||||||
|
if mailbox.pgp_enabled() and user.is_premium() and not alias.disable_pgp:
|
||||||
|
LOG.d("Encrypt message using mailbox %s", mailbox)
|
||||||
if mailbox.generic_subject:
|
if mailbox.generic_subject:
|
||||||
LOG.d("Use a generic subject for %s", mailbox)
|
LOG.d("Use a generic subject for %s", mailbox)
|
||||||
orig_subject = msg[headers.SUBJECT]
|
orig_subject = msg[headers.SUBJECT]
|
||||||
|
@ -890,10 +878,6 @@ def forward_email_to_mailbox(
|
||||||
f"""Forwarded by SimpleLogin to {alias.email} from "{sender}" with <b>{orig_subject}</b> as subject""",
|
f"""Forwarded by SimpleLogin to {alias.email} from "{sender}" with <b>{orig_subject}</b> as subject""",
|
||||||
)
|
)
|
||||||
|
|
||||||
# create PGP email if needed
|
|
||||||
if mailbox.pgp_enabled() and user.is_premium() and not alias.disable_pgp:
|
|
||||||
LOG.d("Encrypt message using mailbox %s", mailbox)
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
msg = prepare_pgp_message(
|
msg = prepare_pgp_message(
|
||||||
msg, mailbox.pgp_finger_print, mailbox.pgp_public_key, can_sign=True
|
msg, mailbox.pgp_finger_print, mailbox.pgp_public_key, can_sign=True
|
||||||
|
@ -913,11 +897,6 @@ def forward_email_to_mailbox(
|
||||||
msg[headers.SL_EMAIL_LOG_ID] = str(email_log.id)
|
msg[headers.SL_EMAIL_LOG_ID] = str(email_log.id)
|
||||||
if user.include_header_email_header:
|
if user.include_header_email_header:
|
||||||
msg[headers.SL_ENVELOPE_FROM] = envelope.mail_from
|
msg[headers.SL_ENVELOPE_FROM] = envelope.mail_from
|
||||||
if contact.name:
|
|
||||||
original_from = f"{contact.name} <{contact.website_email}>"
|
|
||||||
else:
|
|
||||||
original_from = contact.website_email
|
|
||||||
msg[headers.SL_ORIGINAL_FROM] = original_from
|
|
||||||
# when an alias isn't in the To: header, there's no way for users to know what alias has received the email
|
# when an alias isn't in the To: header, there's no way for users to know what alias has received the email
|
||||||
msg[headers.SL_ENVELOPE_TO] = alias.email
|
msg[headers.SL_ENVELOPE_TO] = alias.email
|
||||||
|
|
||||||
|
@ -966,11 +945,10 @@ def forward_email_to_mailbox(
|
||||||
envelope.rcpt_options,
|
envelope.rcpt_options,
|
||||||
)
|
)
|
||||||
|
|
||||||
contact_domain = get_email_domain_part(contact.reply_email)
|
|
||||||
try:
|
try:
|
||||||
sl_sendmail(
|
sl_sendmail(
|
||||||
# use a different envelope sender for each forward (aka VERP)
|
# use a different envelope sender for each forward (aka VERP)
|
||||||
generate_verp_email(VerpType.bounce_forward, email_log.id, contact_domain),
|
generate_verp_email(VerpType.bounce_forward, email_log.id),
|
||||||
mailbox.email,
|
mailbox.email,
|
||||||
msg,
|
msg,
|
||||||
envelope.mail_options,
|
envelope.mail_options,
|
||||||
|
@ -1039,12 +1017,8 @@ def handle_reply(envelope, msg: Message, rcpt_to: str) -> (bool, str):
|
||||||
|
|
||||||
reply_email = rcpt_to
|
reply_email = rcpt_to
|
||||||
|
|
||||||
reply_domain = get_email_domain_part(reply_email)
|
# reply_email must end with EMAIL_DOMAIN
|
||||||
|
|
||||||
# reply_email must end with EMAIL_DOMAIN or a domain that can be used as reverse alias domain
|
|
||||||
if not reply_email.endswith(EMAIL_DOMAIN):
|
if not reply_email.endswith(EMAIL_DOMAIN):
|
||||||
sl_domain: SLDomain = SLDomain.get_by(domain=reply_domain)
|
|
||||||
if sl_domain is None:
|
|
||||||
LOG.w(f"Reply email {reply_email} has wrong domain")
|
LOG.w(f"Reply email {reply_email} has wrong domain")
|
||||||
return False, status.E501
|
return False, status.E501
|
||||||
|
|
||||||
|
@ -1058,7 +1032,7 @@ def handle_reply(envelope, msg: Message, rcpt_to: str) -> (bool, str):
|
||||||
|
|
||||||
alias = contact.alias
|
alias = contact.alias
|
||||||
alias_address: str = contact.alias.email
|
alias_address: str = contact.alias.email
|
||||||
alias_domain = get_email_domain_part(alias_address)
|
alias_domain = alias_address[alias_address.find("@") + 1 :]
|
||||||
|
|
||||||
# Sanity check: verify alias domain is managed by SimpleLogin
|
# Sanity check: verify alias domain is managed by SimpleLogin
|
||||||
# scenario: a user have removed a domain but due to a bug, the aliases are still there
|
# scenario: a user have removed a domain but due to a bug, the aliases are still there
|
||||||
|
@ -1069,8 +1043,13 @@ def handle_reply(envelope, msg: Message, rcpt_to: str) -> (bool, str):
|
||||||
user = alias.user
|
user = alias.user
|
||||||
mail_from = envelope.mail_from
|
mail_from = envelope.mail_from
|
||||||
|
|
||||||
if not user.can_send_or_receive():
|
if user.disabled:
|
||||||
LOG.i(f"User {user} cannot send emails")
|
LOG.e(
|
||||||
|
"User %s disabled, disable sending emails from %s to %s",
|
||||||
|
user,
|
||||||
|
alias,
|
||||||
|
contact,
|
||||||
|
)
|
||||||
return False, status.E504
|
return False, status.E504
|
||||||
|
|
||||||
# Check if we need to reject or quarantine based on dmarc
|
# Check if we need to reject or quarantine based on dmarc
|
||||||
|
@ -1196,7 +1175,7 @@ def handle_reply(envelope, msg: Message, rcpt_to: str) -> (bool, str):
|
||||||
)
|
)
|
||||||
|
|
||||||
# replace reverse alias by real address for all contacts
|
# replace reverse alias by real address for all contacts
|
||||||
for reply_email, website_email in contact_query.values(
|
for (reply_email, website_email) in contact_query.values(
|
||||||
Contact.reply_email, Contact.website_email
|
Contact.reply_email, Contact.website_email
|
||||||
):
|
):
|
||||||
msg = replace(msg, reply_email, website_email)
|
msg = replace(msg, reply_email, website_email)
|
||||||
|
@ -1251,6 +1230,7 @@ def handle_reply(envelope, msg: Message, rcpt_to: str) -> (bool, str):
|
||||||
if str(msg[headers.TO]).lower() == "undisclosed-recipients:;":
|
if str(msg[headers.TO]).lower() == "undisclosed-recipients:;":
|
||||||
# no need to replace TO header
|
# no need to replace TO header
|
||||||
LOG.d("email is sent in BCC mode")
|
LOG.d("email is sent in BCC mode")
|
||||||
|
del msg[headers.TO]
|
||||||
else:
|
else:
|
||||||
replace_header_when_reply(msg, alias, headers.TO)
|
replace_header_when_reply(msg, alias, headers.TO)
|
||||||
|
|
||||||
|
@ -1951,7 +1931,7 @@ def handle_bounce(envelope, email_log: EmailLog, msg: Message) -> str:
|
||||||
for is_delivered, smtp_status in handle_forward(envelope, msg, alias.email):
|
for is_delivered, smtp_status in handle_forward(envelope, msg, alias.email):
|
||||||
res.append((is_delivered, smtp_status))
|
res.append((is_delivered, smtp_status))
|
||||||
|
|
||||||
for is_success, smtp_status in res:
|
for (is_success, smtp_status) in res:
|
||||||
# Consider all deliveries successful if 1 delivery is successful
|
# Consider all deliveries successful if 1 delivery is successful
|
||||||
if is_success:
|
if is_success:
|
||||||
return smtp_status
|
return smtp_status
|
||||||
|
@ -2271,7 +2251,7 @@ def handle(envelope: Envelope, msg: Message) -> str:
|
||||||
if nb_success > 0 and nb_non_success > 0:
|
if nb_success > 0 and nb_non_success > 0:
|
||||||
LOG.e(f"some deliveries fail and some success, {mail_from}, {rcpt_tos}, {res}")
|
LOG.e(f"some deliveries fail and some success, {mail_from}, {rcpt_tos}, {res}")
|
||||||
|
|
||||||
for is_success, smtp_status in res:
|
for (is_success, smtp_status) in res:
|
||||||
# Consider all deliveries successful if 1 delivery is successful
|
# Consider all deliveries successful if 1 delivery is successful
|
||||||
if is_success:
|
if is_success:
|
||||||
return smtp_status
|
return smtp_status
|
||||||
|
|
|
@ -42,16 +42,14 @@ def add_sl_domains():
|
||||||
LOG.d("%s is already a SL domain", alias_domain)
|
LOG.d("%s is already a SL domain", alias_domain)
|
||||||
else:
|
else:
|
||||||
LOG.i("Add %s to SL domain", alias_domain)
|
LOG.i("Add %s to SL domain", alias_domain)
|
||||||
SLDomain.create(domain=alias_domain, use_as_reverse_alias=True)
|
SLDomain.create(domain=alias_domain)
|
||||||
|
|
||||||
for premium_domain in PREMIUM_ALIAS_DOMAINS:
|
for premium_domain in PREMIUM_ALIAS_DOMAINS:
|
||||||
if SLDomain.get_by(domain=premium_domain):
|
if SLDomain.get_by(domain=premium_domain):
|
||||||
LOG.d("%s is already a SL domain", premium_domain)
|
LOG.d("%s is already a SL domain", premium_domain)
|
||||||
else:
|
else:
|
||||||
LOG.i("Add %s to SL domain", premium_domain)
|
LOG.i("Add %s to SL domain", premium_domain)
|
||||||
SLDomain.create(
|
SLDomain.create(domain=premium_domain, premium_only=True)
|
||||||
domain=premium_domain, premium_only=True, use_as_reverse_alias=True
|
|
||||||
)
|
|
||||||
|
|
||||||
Session.commit()
|
Session.commit()
|
||||||
|
|
||||||
|
|
|
@ -89,6 +89,7 @@ aghast
|
||||||
agile
|
agile
|
||||||
agility
|
agility
|
||||||
aging
|
aging
|
||||||
|
agnostic
|
||||||
agonize
|
agonize
|
||||||
agonizing
|
agonizing
|
||||||
agony
|
agony
|
||||||
|
@ -374,6 +375,8 @@ augmented
|
||||||
august
|
august
|
||||||
authentic
|
authentic
|
||||||
author
|
author
|
||||||
|
autism
|
||||||
|
autistic
|
||||||
autograph
|
autograph
|
||||||
automaker
|
automaker
|
||||||
automated
|
automated
|
||||||
|
@ -443,6 +446,7 @@ backyard
|
||||||
bacon
|
bacon
|
||||||
bacteria
|
bacteria
|
||||||
bacterium
|
bacterium
|
||||||
|
badass
|
||||||
badge
|
badge
|
||||||
badland
|
badland
|
||||||
badly
|
badly
|
||||||
|
@ -1102,6 +1106,7 @@ clinic
|
||||||
clinking
|
clinking
|
||||||
clip
|
clip
|
||||||
clique
|
clique
|
||||||
|
cloak
|
||||||
clobber
|
clobber
|
||||||
clock
|
clock
|
||||||
clone
|
clone
|
||||||
|
@ -1771,6 +1776,7 @@ diagnosis
|
||||||
diagram
|
diagram
|
||||||
dial
|
dial
|
||||||
diameter
|
diameter
|
||||||
|
diaper
|
||||||
diaphragm
|
diaphragm
|
||||||
diary
|
diary
|
||||||
dice
|
dice
|
||||||
|
@ -2026,6 +2032,9 @@ duffel
|
||||||
dugout
|
dugout
|
||||||
duh
|
duh
|
||||||
duke
|
duke
|
||||||
|
duller
|
||||||
|
dullness
|
||||||
|
duly
|
||||||
dumping
|
dumping
|
||||||
dumpling
|
dumpling
|
||||||
dumpster
|
dumpster
|
||||||
|
@ -2518,6 +2527,8 @@ feisty
|
||||||
feline
|
feline
|
||||||
felt-tip
|
felt-tip
|
||||||
feminine
|
feminine
|
||||||
|
feminism
|
||||||
|
feminist
|
||||||
feminize
|
feminize
|
||||||
femur
|
femur
|
||||||
fence
|
fence
|
||||||
|
@ -2656,6 +2667,7 @@ fondness
|
||||||
fondue
|
fondue
|
||||||
font
|
font
|
||||||
food
|
food
|
||||||
|
fool
|
||||||
footage
|
footage
|
||||||
football
|
football
|
||||||
footbath
|
footbath
|
||||||
|
@ -2765,6 +2777,7 @@ gag
|
||||||
gainfully
|
gainfully
|
||||||
gaining
|
gaining
|
||||||
gains
|
gains
|
||||||
|
gala
|
||||||
gallantly
|
gallantly
|
||||||
galleria
|
galleria
|
||||||
gallery
|
gallery
|
||||||
|
@ -3151,6 +3164,8 @@ hardware
|
||||||
hardwired
|
hardwired
|
||||||
hardwood
|
hardwood
|
||||||
hardy
|
hardy
|
||||||
|
harmful
|
||||||
|
harmless
|
||||||
harmonica
|
harmonica
|
||||||
harmonics
|
harmonics
|
||||||
harmonize
|
harmonize
|
||||||
|
@ -3325,6 +3340,7 @@ identical
|
||||||
identify
|
identify
|
||||||
identity
|
identity
|
||||||
ideology
|
ideology
|
||||||
|
idiocy
|
||||||
idiom
|
idiom
|
||||||
idly
|
idly
|
||||||
igloo
|
igloo
|
||||||
|
@ -3341,6 +3357,7 @@ imaging
|
||||||
imbecile
|
imbecile
|
||||||
imitate
|
imitate
|
||||||
imitation
|
imitation
|
||||||
|
immature
|
||||||
immerse
|
immerse
|
||||||
immersion
|
immersion
|
||||||
imminent
|
imminent
|
||||||
|
@ -3370,10 +3387,14 @@ implode
|
||||||
implosion
|
implosion
|
||||||
implosive
|
implosive
|
||||||
imply
|
imply
|
||||||
|
impolite
|
||||||
important
|
important
|
||||||
importer
|
importer
|
||||||
impose
|
impose
|
||||||
imposing
|
imposing
|
||||||
|
impotence
|
||||||
|
impotency
|
||||||
|
impotent
|
||||||
impound
|
impound
|
||||||
imprecise
|
imprecise
|
||||||
imprint
|
imprint
|
||||||
|
@ -3403,6 +3424,8 @@ irritable
|
||||||
irritably
|
irritably
|
||||||
irritant
|
irritant
|
||||||
irritate
|
irritate
|
||||||
|
islamic
|
||||||
|
islamist
|
||||||
isolated
|
isolated
|
||||||
isolating
|
isolating
|
||||||
isolation
|
isolation
|
||||||
|
@ -3501,6 +3524,7 @@ june
|
||||||
junior
|
junior
|
||||||
juniper
|
juniper
|
||||||
junkie
|
junkie
|
||||||
|
junkman
|
||||||
junkyard
|
junkyard
|
||||||
jurist
|
jurist
|
||||||
juror
|
juror
|
||||||
|
@ -3546,6 +3570,9 @@ king
|
||||||
kinship
|
kinship
|
||||||
kinsman
|
kinsman
|
||||||
kinswoman
|
kinswoman
|
||||||
|
kissable
|
||||||
|
kisser
|
||||||
|
kissing
|
||||||
kitchen
|
kitchen
|
||||||
kite
|
kite
|
||||||
kitten
|
kitten
|
||||||
|
@ -3622,6 +3649,7 @@ laundry
|
||||||
laurel
|
laurel
|
||||||
lavender
|
lavender
|
||||||
lavish
|
lavish
|
||||||
|
laxative
|
||||||
lazily
|
lazily
|
||||||
laziness
|
laziness
|
||||||
lazy
|
lazy
|
||||||
|
@ -3662,6 +3690,7 @@ liable
|
||||||
liberty
|
liberty
|
||||||
librarian
|
librarian
|
||||||
library
|
library
|
||||||
|
licking
|
||||||
licorice
|
licorice
|
||||||
lid
|
lid
|
||||||
life
|
life
|
||||||
|
@ -3712,6 +3741,8 @@ livestock
|
||||||
lividly
|
lividly
|
||||||
living
|
living
|
||||||
lizard
|
lizard
|
||||||
|
lubricant
|
||||||
|
lubricate
|
||||||
lucid
|
lucid
|
||||||
luckily
|
luckily
|
||||||
luckiness
|
luckiness
|
||||||
|
@ -3847,6 +3878,7 @@ marshland
|
||||||
marshy
|
marshy
|
||||||
marsupial
|
marsupial
|
||||||
marvelous
|
marvelous
|
||||||
|
marxism
|
||||||
mascot
|
mascot
|
||||||
masculine
|
masculine
|
||||||
mashed
|
mashed
|
||||||
|
@ -3882,6 +3914,8 @@ maximum
|
||||||
maybe
|
maybe
|
||||||
mayday
|
mayday
|
||||||
mayflower
|
mayflower
|
||||||
|
moaner
|
||||||
|
moaning
|
||||||
mobile
|
mobile
|
||||||
mobility
|
mobility
|
||||||
mobilize
|
mobilize
|
||||||
|
@ -4090,6 +4124,7 @@ nemeses
|
||||||
nemesis
|
nemesis
|
||||||
neon
|
neon
|
||||||
nephew
|
nephew
|
||||||
|
nerd
|
||||||
nervous
|
nervous
|
||||||
nervy
|
nervy
|
||||||
nest
|
nest
|
||||||
|
@ -4104,6 +4139,7 @@ never
|
||||||
next
|
next
|
||||||
nibble
|
nibble
|
||||||
nickname
|
nickname
|
||||||
|
nicotine
|
||||||
niece
|
niece
|
||||||
nifty
|
nifty
|
||||||
nimble
|
nimble
|
||||||
|
@ -4131,10 +4167,14 @@ nuptials
|
||||||
nursery
|
nursery
|
||||||
nursing
|
nursing
|
||||||
nurture
|
nurture
|
||||||
|
nutcase
|
||||||
nutlike
|
nutlike
|
||||||
nutmeg
|
nutmeg
|
||||||
nutrient
|
nutrient
|
||||||
nutshell
|
nutshell
|
||||||
|
nuttiness
|
||||||
|
nutty
|
||||||
|
nuzzle
|
||||||
nylon
|
nylon
|
||||||
oaf
|
oaf
|
||||||
oak
|
oak
|
||||||
|
@ -4165,6 +4205,7 @@ obstinate
|
||||||
obstruct
|
obstruct
|
||||||
obtain
|
obtain
|
||||||
obtrusive
|
obtrusive
|
||||||
|
obtuse
|
||||||
obvious
|
obvious
|
||||||
occultist
|
occultist
|
||||||
occupancy
|
occupancy
|
||||||
|
@ -4405,6 +4446,7 @@ palpitate
|
||||||
paltry
|
paltry
|
||||||
pampered
|
pampered
|
||||||
pamperer
|
pamperer
|
||||||
|
pampers
|
||||||
pamphlet
|
pamphlet
|
||||||
panama
|
panama
|
||||||
pancake
|
pancake
|
||||||
|
@ -4609,6 +4651,7 @@ plated
|
||||||
platform
|
platform
|
||||||
plating
|
plating
|
||||||
platinum
|
platinum
|
||||||
|
platonic
|
||||||
platter
|
platter
|
||||||
platypus
|
platypus
|
||||||
plausible
|
plausible
|
||||||
|
@ -4734,6 +4777,8 @@ prancing
|
||||||
pranker
|
pranker
|
||||||
prankish
|
prankish
|
||||||
prankster
|
prankster
|
||||||
|
prayer
|
||||||
|
praying
|
||||||
preacher
|
preacher
|
||||||
preaching
|
preaching
|
||||||
preachy
|
preachy
|
||||||
|
@ -4751,6 +4796,8 @@ prefix
|
||||||
preflight
|
preflight
|
||||||
preformed
|
preformed
|
||||||
pregame
|
pregame
|
||||||
|
pregnancy
|
||||||
|
pregnant
|
||||||
preheated
|
preheated
|
||||||
prelaunch
|
prelaunch
|
||||||
prelaw
|
prelaw
|
||||||
|
@ -4890,6 +4937,7 @@ prudishly
|
||||||
prune
|
prune
|
||||||
pruning
|
pruning
|
||||||
pry
|
pry
|
||||||
|
psychic
|
||||||
public
|
public
|
||||||
publisher
|
publisher
|
||||||
pucker
|
pucker
|
||||||
|
@ -4909,7 +4957,8 @@ punctual
|
||||||
punctuate
|
punctuate
|
||||||
punctured
|
punctured
|
||||||
pungent
|
pungent
|
||||||
punishe
|
punisher
|
||||||
|
punk
|
||||||
pupil
|
pupil
|
||||||
puppet
|
puppet
|
||||||
puppy
|
puppy
|
||||||
|
@ -4991,6 +5040,7 @@ quote
|
||||||
rabid
|
rabid
|
||||||
race
|
race
|
||||||
racing
|
racing
|
||||||
|
racism
|
||||||
rack
|
rack
|
||||||
racoon
|
racoon
|
||||||
radar
|
radar
|
||||||
|
@ -5105,6 +5155,7 @@ recount
|
||||||
recoup
|
recoup
|
||||||
recovery
|
recovery
|
||||||
recreate
|
recreate
|
||||||
|
rectal
|
||||||
rectangle
|
rectangle
|
||||||
rectified
|
rectified
|
||||||
rectify
|
rectify
|
||||||
|
@ -5571,6 +5622,7 @@ sarcastic
|
||||||
sardine
|
sardine
|
||||||
sash
|
sash
|
||||||
sasquatch
|
sasquatch
|
||||||
|
sassy
|
||||||
satchel
|
satchel
|
||||||
satiable
|
satiable
|
||||||
satin
|
satin
|
||||||
|
@ -5599,6 +5651,7 @@ scaling
|
||||||
scallion
|
scallion
|
||||||
scallop
|
scallop
|
||||||
scalping
|
scalping
|
||||||
|
scam
|
||||||
scandal
|
scandal
|
||||||
scanner
|
scanner
|
||||||
scanning
|
scanning
|
||||||
|
@ -5875,6 +5928,8 @@ silent
|
||||||
silica
|
silica
|
||||||
silicon
|
silicon
|
||||||
silk
|
silk
|
||||||
|
silliness
|
||||||
|
silly
|
||||||
silo
|
silo
|
||||||
silt
|
silt
|
||||||
silver
|
silver
|
||||||
|
@ -5936,6 +5991,7 @@ skimmer
|
||||||
skimming
|
skimming
|
||||||
skimpily
|
skimpily
|
||||||
skincare
|
skincare
|
||||||
|
skinhead
|
||||||
skinless
|
skinless
|
||||||
skinning
|
skinning
|
||||||
skinny
|
skinny
|
||||||
|
@ -6141,6 +6197,7 @@ splinter
|
||||||
splotchy
|
splotchy
|
||||||
splurge
|
splurge
|
||||||
spoilage
|
spoilage
|
||||||
|
spoiled
|
||||||
spoiler
|
spoiler
|
||||||
spoiling
|
spoiling
|
||||||
spoils
|
spoils
|
||||||
|
@ -7022,6 +7079,7 @@ undocked
|
||||||
undoing
|
undoing
|
||||||
undone
|
undone
|
||||||
undrafted
|
undrafted
|
||||||
|
undress
|
||||||
undrilled
|
undrilled
|
||||||
undusted
|
undusted
|
||||||
undying
|
undying
|
||||||
|
|
|
@ -158677,6 +158677,16 @@ isis
|
||||||
isize
|
isize
|
||||||
isl
|
isl
|
||||||
islay
|
islay
|
||||||
|
islam
|
||||||
|
islamic
|
||||||
|
islamism
|
||||||
|
islamist
|
||||||
|
islamistic
|
||||||
|
islamite
|
||||||
|
islamitic
|
||||||
|
islamitish
|
||||||
|
islamization
|
||||||
|
islamize
|
||||||
island
|
island
|
||||||
islanded
|
islanded
|
||||||
islander
|
islander
|
||||||
|
|
|
@ -1,31 +0,0 @@
|
||||||
"""empty message
|
|
||||||
|
|
||||||
Revision ID: 5f4a5625da66
|
|
||||||
Revises: 2c2093c82bc0
|
|
||||||
Create Date: 2023-04-03 18:30:46.488231
|
|
||||||
|
|
||||||
"""
|
|
||||||
import sqlalchemy_utils
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
|
||||||
revision = '5f4a5625da66'
|
|
||||||
down_revision = '2c2093c82bc0'
|
|
||||||
branch_labels = None
|
|
||||||
depends_on = None
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade():
|
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
|
||||||
op.add_column('public_domain', sa.Column('partner_id', sa.Integer(), nullable=True))
|
|
||||||
op.create_foreign_key(None, 'public_domain', 'partner', ['partner_id'], ['id'], ondelete='cascade')
|
|
||||||
# ### end Alembic commands ###
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade():
|
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
|
||||||
op.drop_constraint(None, 'public_domain', type_='foreignkey')
|
|
||||||
op.drop_column('public_domain', 'partner_id')
|
|
||||||
# ### end Alembic commands ###
|
|
|
@ -1,29 +0,0 @@
|
||||||
"""empty message
|
|
||||||
|
|
||||||
Revision ID: 893c0d18475f
|
|
||||||
Revises: 5f4a5625da66
|
|
||||||
Create Date: 2023-04-14 18:20:03.807367
|
|
||||||
|
|
||||||
"""
|
|
||||||
import sqlalchemy_utils
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
|
||||||
revision = '893c0d18475f'
|
|
||||||
down_revision = '5f4a5625da66'
|
|
||||||
branch_labels = None
|
|
||||||
depends_on = None
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade():
|
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
|
||||||
op.create_index(op.f('ix_contact_pgp_finger_print'), 'contact', ['pgp_finger_print'], unique=False)
|
|
||||||
# ### end Alembic commands ###
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade():
|
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
|
||||||
op.drop_index(op.f('ix_contact_pgp_finger_print'), table_name='contact')
|
|
||||||
# ### end Alembic commands ###
|
|
|
@ -1,35 +0,0 @@
|
||||||
"""empty message
|
|
||||||
|
|
||||||
Revision ID: bc496c0a0279
|
|
||||||
Revises: 893c0d18475f
|
|
||||||
Create Date: 2023-04-14 19:09:38.540514
|
|
||||||
|
|
||||||
"""
|
|
||||||
import sqlalchemy_utils
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
|
||||||
revision = 'bc496c0a0279'
|
|
||||||
down_revision = '893c0d18475f'
|
|
||||||
branch_labels = None
|
|
||||||
depends_on = None
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade():
|
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
|
||||||
op.create_index(op.f('ix_alias_used_on_alias_id'), 'alias_used_on', ['alias_id'], unique=False)
|
|
||||||
op.create_index(op.f('ix_client_user_alias_id'), 'client_user', ['alias_id'], unique=False)
|
|
||||||
op.create_index(op.f('ix_hibp_notified_alias_alias_id'), 'hibp_notified_alias', ['alias_id'], unique=False)
|
|
||||||
op.create_index(op.f('ix_users_newsletter_alias_id'), 'users', ['newsletter_alias_id'], unique=False)
|
|
||||||
# ### end Alembic commands ###
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade():
|
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
|
||||||
op.drop_index(op.f('ix_users_newsletter_alias_id'), table_name='users')
|
|
||||||
op.drop_index(op.f('ix_hibp_notified_alias_alias_id'), table_name='hibp_notified_alias')
|
|
||||||
op.drop_index(op.f('ix_client_user_alias_id'), table_name='client_user')
|
|
||||||
op.drop_index(op.f('ix_alias_used_on_alias_id'), table_name='alias_used_on')
|
|
||||||
# ### end Alembic commands ###
|
|
|
@ -1,29 +0,0 @@
|
||||||
"""empty message
|
|
||||||
|
|
||||||
Revision ID: 2d89315ac650
|
|
||||||
Revises: bc496c0a0279
|
|
||||||
Create Date: 2023-04-15 20:43:44.218020
|
|
||||||
|
|
||||||
"""
|
|
||||||
import sqlalchemy_utils
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
|
||||||
revision = '2d89315ac650'
|
|
||||||
down_revision = 'bc496c0a0279'
|
|
||||||
branch_labels = None
|
|
||||||
depends_on = None
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade():
|
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
|
||||||
op.create_index(op.f('ix_partner_subscription_end_at'), 'partner_subscription', ['end_at'], unique=False)
|
|
||||||
# ### end Alembic commands ###
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade():
|
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
|
||||||
op.drop_index(op.f('ix_partner_subscription_end_at'), table_name='partner_subscription')
|
|
||||||
# ### end Alembic commands ###
|
|
|
@ -1,29 +0,0 @@
|
||||||
"""empty message
|
|
||||||
|
|
||||||
Revision ID: 01e2997e90d3
|
|
||||||
Revises: 893c0d18475f
|
|
||||||
Create Date: 2023-04-19 16:09:11.851588
|
|
||||||
|
|
||||||
"""
|
|
||||||
import sqlalchemy_utils
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
|
||||||
revision = '01e2997e90d3'
|
|
||||||
down_revision = '893c0d18475f'
|
|
||||||
branch_labels = None
|
|
||||||
depends_on = None
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade():
|
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
|
||||||
op.add_column('public_domain', sa.Column('use_as_reverse_alias', sa.Boolean(), server_default='0', nullable=False))
|
|
||||||
# ### end Alembic commands ###
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade():
|
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
|
||||||
op.drop_column('public_domain', 'use_as_reverse_alias')
|
|
||||||
# ### end Alembic commands ###
|
|
|
@ -1,25 +0,0 @@
|
||||||
"""empty message
|
|
||||||
|
|
||||||
Revision ID: 2634b41f54db
|
|
||||||
Revises: 01e2997e90d3, 2d89315ac650
|
|
||||||
Create Date: 2023-04-20 11:47:43.048536
|
|
||||||
|
|
||||||
"""
|
|
||||||
import sqlalchemy_utils
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
|
||||||
revision = '2634b41f54db'
|
|
||||||
down_revision = ('01e2997e90d3', '2d89315ac650')
|
|
||||||
branch_labels = None
|
|
||||||
depends_on = None
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade():
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade():
|
|
||||||
pass
|
|
|
@ -1,42 +0,0 @@
|
||||||
"""empty message
|
|
||||||
|
|
||||||
Revision ID: 01827104004b
|
|
||||||
Revises: 2634b41f54db
|
|
||||||
Create Date: 2023-07-28 19:39:28.675490
|
|
||||||
|
|
||||||
"""
|
|
||||||
import sqlalchemy_utils
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
|
||||||
revision = '01827104004b'
|
|
||||||
down_revision = '2634b41f54db'
|
|
||||||
branch_labels = None
|
|
||||||
depends_on = None
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade():
|
|
||||||
with op.get_context().autocommit_block():
|
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
|
||||||
op.create_index(op.f('ix_alias_hibp_last_check'), 'alias', ['hibp_last_check'], unique=False, postgresql_concurrently=True)
|
|
||||||
op.create_index('ix_bounce_created_at', 'bounce', ['created_at'], unique=False, postgresql_concurrently=True)
|
|
||||||
op.create_index('ix_monitoring_created_at', 'monitoring', ['created_at'], unique=False, postgresql_concurrently=True)
|
|
||||||
op.create_index('ix_transactional_email_created_at', 'transactional_email', ['created_at'], unique=False, postgresql_concurrently=True)
|
|
||||||
op.create_index(op.f('ix_users_activated'), 'users', ['activated'], unique=False, postgresql_concurrently=True)
|
|
||||||
op.create_index('ix_users_activated_trial_end_lifetime', 'users', ['activated', 'trial_end', 'lifetime'], unique=False, postgresql_concurrently=True)
|
|
||||||
op.create_index(op.f('ix_users_referral_id'), 'users', ['referral_id'], unique=False, postgresql_concurrently=True)
|
|
||||||
# ### end Alembic commands ###
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade():
|
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
|
||||||
op.drop_index(op.f('ix_users_referral_id'), table_name='users')
|
|
||||||
op.drop_index('ix_users_activated_trial_end_lifetime', table_name='users')
|
|
||||||
op.drop_index(op.f('ix_users_activated'), table_name='users')
|
|
||||||
op.drop_index('ix_transactional_email_created_at', table_name='transactional_email')
|
|
||||||
op.drop_index('ix_monitoring_created_at', table_name='monitoring')
|
|
||||||
op.drop_index('ix_bounce_created_at', table_name='bounce')
|
|
||||||
op.drop_index(op.f('ix_alias_hibp_last_check'), table_name='alias')
|
|
||||||
# ### end Alembic commands ###
|
|
|
@ -1,33 +0,0 @@
|
||||||
"""empty message
|
|
||||||
|
|
||||||
Revision ID: 0a5701a4f5e4
|
|
||||||
Revises: 01827104004b
|
|
||||||
Create Date: 2023-09-07 15:28:10.122756
|
|
||||||
|
|
||||||
"""
|
|
||||||
import sqlalchemy_utils
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
|
||||||
revision = '0a5701a4f5e4'
|
|
||||||
down_revision = '01827104004b'
|
|
||||||
branch_labels = None
|
|
||||||
depends_on = None
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade():
|
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
|
||||||
op.add_column('users', sa.Column('delete_on', sqlalchemy_utils.types.arrow.ArrowType(), nullable=True))
|
|
||||||
with op.get_context().autocommit_block():
|
|
||||||
op.create_index('ix_users_delete_on', 'users', ['delete_on'], unique=False, postgresql_concurrently=True)
|
|
||||||
# ### end Alembic commands ###
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade():
|
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
|
||||||
with op.get_context().autocommit_block():
|
|
||||||
op.drop_index('ix_users_delete_on', table_name='users', postgresql_concurrently=True)
|
|
||||||
op.drop_column('users', 'delete_on')
|
|
||||||
# ### end Alembic commands ###
|
|
|
@ -1,34 +0,0 @@
|
||||||
"""empty message
|
|
||||||
|
|
||||||
Revision ID: ec7fdde8da9f
|
|
||||||
Revises: 0a5701a4f5e4
|
|
||||||
Create Date: 2023-09-28 18:09:48.016620
|
|
||||||
|
|
||||||
"""
|
|
||||||
import sqlalchemy_utils
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
|
||||||
revision = "ec7fdde8da9f"
|
|
||||||
down_revision = "0a5701a4f5e4"
|
|
||||||
branch_labels = None
|
|
||||||
depends_on = None
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade():
|
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
|
||||||
with op.get_context().autocommit_block():
|
|
||||||
op.create_index(
|
|
||||||
"ix_email_log_created_at", "email_log", ["created_at"], unique=False
|
|
||||||
)
|
|
||||||
|
|
||||||
# ### end Alembic commands ###
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade():
|
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
|
||||||
with op.get_context().autocommit_block():
|
|
||||||
op.drop_index("ix_email_log_created_at", table_name="email_log")
|
|
||||||
# ### end Alembic commands ###
|
|
|
@ -1,39 +0,0 @@
|
||||||
"""empty message
|
|
||||||
|
|
||||||
Revision ID: 46ecb648a47e
|
|
||||||
Revises: ec7fdde8da9f
|
|
||||||
Create Date: 2023-10-05 10:43:35.668902
|
|
||||||
|
|
||||||
"""
|
|
||||||
import sqlalchemy_utils
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
|
||||||
revision = "46ecb648a47e"
|
|
||||||
down_revision = "ec7fdde8da9f"
|
|
||||||
branch_labels = None
|
|
||||||
depends_on = None
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade():
|
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
|
||||||
with op.get_context().autocommit_block():
|
|
||||||
op.create_index(
|
|
||||||
op.f("ix_message_id_matching_email_log_id"),
|
|
||||||
"message_id_matching",
|
|
||||||
["email_log_id"],
|
|
||||||
unique=False,
|
|
||||||
)
|
|
||||||
# ### end Alembic commands ###
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade():
|
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
|
||||||
with op.get_context().autocommit_block():
|
|
||||||
op.drop_index(
|
|
||||||
op.f("ix_message_id_matching_email_log_id"),
|
|
||||||
table_name="message_id_matching",
|
|
||||||
)
|
|
||||||
# ### end Alembic commands ###
|
|
|
@ -1,31 +0,0 @@
|
||||||
"""empty message
|
|
||||||
|
|
||||||
Revision ID: 4bc54632d9aa
|
|
||||||
Revises: 46ecb648a47e
|
|
||||||
Create Date: 2023-11-07 14:02:17.610226
|
|
||||||
|
|
||||||
"""
|
|
||||||
import sqlalchemy_utils
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
|
||||||
revision = '4bc54632d9aa'
|
|
||||||
down_revision = '46ecb648a47e'
|
|
||||||
branch_labels = None
|
|
||||||
depends_on = None
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade():
|
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
|
||||||
op.drop_index('ix_newsletter_subject', table_name='newsletter')
|
|
||||||
op.create_index(op.f('ix_newsletter_subject'), 'newsletter', ['subject'], unique=False)
|
|
||||||
# ### end Alembic commands ###
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade():
|
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
|
||||||
op.drop_index(op.f('ix_newsletter_subject'), table_name='newsletter')
|
|
||||||
op.create_index('ix_newsletter_subject', 'newsletter', ['subject'], unique=True)
|
|
||||||
# ### end Alembic commands ###
|
|
|
@ -1,21 +0,0 @@
|
||||||
from dataclasses import dataclass
|
|
||||||
from typing import List
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class UpcloudRecord:
|
|
||||||
db_role: str
|
|
||||||
label: str
|
|
||||||
time: str
|
|
||||||
value: float
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class UpcloudMetric:
|
|
||||||
metric_name: str
|
|
||||||
records: List[UpcloudRecord]
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class UpcloudMetrics:
|
|
||||||
metrics: List[UpcloudMetric]
|
|
|
@ -1,20 +0,0 @@
|
||||||
from app.config import UPCLOUD_DB_ID, UPCLOUD_PASSWORD, UPCLOUD_USERNAME
|
|
||||||
from app.log import LOG
|
|
||||||
from monitor.newrelic import NewRelicClient
|
|
||||||
from monitor.upcloud import UpcloudClient
|
|
||||||
|
|
||||||
|
|
||||||
class MetricExporter:
|
|
||||||
def __init__(self, newrelic_license: str):
|
|
||||||
self.__upcloud = UpcloudClient(
|
|
||||||
username=UPCLOUD_USERNAME, password=UPCLOUD_PASSWORD
|
|
||||||
)
|
|
||||||
self.__newrelic = NewRelicClient(newrelic_license)
|
|
||||||
|
|
||||||
def run(self):
|
|
||||||
try:
|
|
||||||
metrics = self.__upcloud.get_metrics(UPCLOUD_DB_ID)
|
|
||||||
self.__newrelic.send(metrics)
|
|
||||||
LOG.info("Upcloud metrics sent to NewRelic")
|
|
||||||
except Exception as e:
|
|
||||||
LOG.warn(f"Could not export metrics: {e}")
|
|
|
@ -1,26 +0,0 @@
|
||||||
from monitor.metric import UpcloudMetrics
|
|
||||||
|
|
||||||
from newrelic_telemetry_sdk import GaugeMetric, MetricClient
|
|
||||||
|
|
||||||
_NEWRELIC_BASE_HOST = "metric-api.eu.newrelic.com"
|
|
||||||
|
|
||||||
|
|
||||||
class NewRelicClient:
|
|
||||||
def __init__(self, license_key: str):
|
|
||||||
self.__client = MetricClient(license_key=license_key, host=_NEWRELIC_BASE_HOST)
|
|
||||||
|
|
||||||
def send(self, metrics: UpcloudMetrics):
|
|
||||||
batch = []
|
|
||||||
|
|
||||||
for metric in metrics.metrics:
|
|
||||||
for record in metric.records:
|
|
||||||
batch.append(
|
|
||||||
GaugeMetric(
|
|
||||||
name=f"upcloud.db.{metric.metric_name}",
|
|
||||||
value=record.value,
|
|
||||||
tags={"host": record.label, "db_role": record.db_role},
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
response = self.__client.send_batch(batch)
|
|
||||||
response.raise_for_status()
|
|
|
@ -1,82 +0,0 @@
|
||||||
from app.log import LOG
|
|
||||||
from monitor.metric import UpcloudMetric, UpcloudMetrics, UpcloudRecord
|
|
||||||
|
|
||||||
import base64
|
|
||||||
import requests
|
|
||||||
from typing import Any
|
|
||||||
|
|
||||||
|
|
||||||
BASE_URL = "https://api.upcloud.com"
|
|
||||||
|
|
||||||
|
|
||||||
def get_metric(json: Any, metric: str) -> UpcloudMetric:
|
|
||||||
records = []
|
|
||||||
|
|
||||||
if metric in json:
|
|
||||||
metric_data = json[metric]
|
|
||||||
data = metric_data["data"]
|
|
||||||
cols = list(map(lambda x: x["label"], data["cols"][1:]))
|
|
||||||
latest = data["rows"][-1]
|
|
||||||
time = latest[0]
|
|
||||||
for column_idx in range(len(cols)):
|
|
||||||
value = latest[1 + column_idx]
|
|
||||||
|
|
||||||
# If the latest value is None, try to fetch the second to last
|
|
||||||
if value is None:
|
|
||||||
value = data["rows"][-2][1 + column_idx]
|
|
||||||
|
|
||||||
if value is not None:
|
|
||||||
label = cols[column_idx]
|
|
||||||
if "(master)" in label:
|
|
||||||
db_role = "master"
|
|
||||||
else:
|
|
||||||
db_role = "standby"
|
|
||||||
records.append(
|
|
||||||
UpcloudRecord(time=time, db_role=db_role, label=label, value=value)
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
LOG.warn(f"Could not get value for metric {metric}")
|
|
||||||
|
|
||||||
return UpcloudMetric(metric_name=metric, records=records)
|
|
||||||
|
|
||||||
|
|
||||||
def get_metrics(json: Any) -> UpcloudMetrics:
|
|
||||||
return UpcloudMetrics(
|
|
||||||
metrics=[
|
|
||||||
get_metric(json, "cpu_usage"),
|
|
||||||
get_metric(json, "disk_usage"),
|
|
||||||
get_metric(json, "diskio_reads"),
|
|
||||||
get_metric(json, "diskio_writes"),
|
|
||||||
get_metric(json, "load_average"),
|
|
||||||
get_metric(json, "mem_usage"),
|
|
||||||
get_metric(json, "net_receive"),
|
|
||||||
get_metric(json, "net_send"),
|
|
||||||
]
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class UpcloudClient:
|
|
||||||
def __init__(self, username: str, password: str):
|
|
||||||
if not username:
|
|
||||||
raise Exception("UpcloudClient username must be set")
|
|
||||||
if not password:
|
|
||||||
raise Exception("UpcloudClient password must be set")
|
|
||||||
|
|
||||||
client = requests.Session()
|
|
||||||
encoded_auth = base64.b64encode(
|
|
||||||
f"{username}:{password}".encode("utf-8")
|
|
||||||
).decode("utf-8")
|
|
||||||
client.headers = {"Authorization": f"Basic {encoded_auth}"}
|
|
||||||
self.__client = client
|
|
||||||
|
|
||||||
def get_metrics(self, db_uuid: str) -> UpcloudMetrics:
|
|
||||||
url = f"{BASE_URL}/1.3/database/{db_uuid}/metrics?period=hour"
|
|
||||||
LOG.d(f"Performing request to {url}")
|
|
||||||
response = self.__client.get(url)
|
|
||||||
LOG.d(f"Status code: {response.status_code}")
|
|
||||||
if response.status_code != 200:
|
|
||||||
return UpcloudMetrics(metrics=[])
|
|
||||||
|
|
||||||
as_json = response.json()
|
|
||||||
|
|
||||||
return get_metrics(as_json)
|
|
|
@ -1,4 +1,3 @@
|
||||||
import configparser
|
|
||||||
import os
|
import os
|
||||||
import subprocess
|
import subprocess
|
||||||
from time import sleep
|
from time import sleep
|
||||||
|
@ -8,7 +7,6 @@ import newrelic.agent
|
||||||
|
|
||||||
from app.db import Session
|
from app.db import Session
|
||||||
from app.log import LOG
|
from app.log import LOG
|
||||||
from monitor.metric_exporter import MetricExporter
|
|
||||||
|
|
||||||
# the number of consecutive fails
|
# the number of consecutive fails
|
||||||
# if more than _max_nb_fails, alert
|
# if more than _max_nb_fails, alert
|
||||||
|
@ -21,18 +19,6 @@ _max_nb_fails = 10
|
||||||
# the maximum number of emails in incoming & active queue
|
# the maximum number of emails in incoming & active queue
|
||||||
_max_incoming = 50
|
_max_incoming = 50
|
||||||
|
|
||||||
_NR_CONFIG_FILE_LOCATION_VAR = "NEW_RELIC_CONFIG_FILE"
|
|
||||||
|
|
||||||
|
|
||||||
def get_newrelic_license() -> str:
|
|
||||||
nr_file = os.environ.get(_NR_CONFIG_FILE_LOCATION_VAR, None)
|
|
||||||
if nr_file is None:
|
|
||||||
raise Exception(f"{_NR_CONFIG_FILE_LOCATION_VAR} not defined")
|
|
||||||
|
|
||||||
config = configparser.ConfigParser()
|
|
||||||
config.read(nr_file)
|
|
||||||
return config["newrelic"]["license_key"]
|
|
||||||
|
|
||||||
|
|
||||||
@newrelic.agent.background_task()
|
@newrelic.agent.background_task()
|
||||||
def log_postfix_metrics():
|
def log_postfix_metrics():
|
||||||
|
@ -94,13 +80,10 @@ def log_nb_db_connection():
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
exporter = MetricExporter(get_newrelic_license())
|
|
||||||
while True:
|
while True:
|
||||||
log_postfix_metrics()
|
log_postfix_metrics()
|
||||||
log_nb_db_connection()
|
log_nb_db_connection()
|
||||||
Session.close()
|
Session.close()
|
||||||
|
|
||||||
exporter.run()
|
|
||||||
|
|
||||||
# 1 min
|
# 1 min
|
||||||
sleep(60)
|
sleep(60)
|
||||||
|
|
1315
poetry.lock
generated
1315
poetry.lock
generated
File diff suppressed because it is too large
Load diff
|
@ -18,9 +18,6 @@ exclude = '''
|
||||||
)
|
)
|
||||||
'''
|
'''
|
||||||
|
|
||||||
[tool.ruff]
|
|
||||||
ignore-init-module-imports = true
|
|
||||||
|
|
||||||
[tool.djlint]
|
[tool.djlint]
|
||||||
indent = 2
|
indent = 2
|
||||||
profile = "jinja"
|
profile = "jinja"
|
||||||
|
@ -56,7 +53,7 @@ packages = [
|
||||||
include = ["templates/*", "templates/**/*", "local_data/*.txt"]
|
include = ["templates/*", "templates/**/*", "local_data/*.txt"]
|
||||||
|
|
||||||
[tool.poetry.dependencies]
|
[tool.poetry.dependencies]
|
||||||
python = "^3.10"
|
python = "^3.7.2"
|
||||||
flask = "^1.1.2"
|
flask = "^1.1.2"
|
||||||
flask_login = "^0.5.0"
|
flask_login = "^0.5.0"
|
||||||
wtforms = "^2.3.3"
|
wtforms = "^2.3.3"
|
||||||
|
@ -98,12 +95,13 @@ webauthn = "^0.4.7"
|
||||||
pyspf = "^2.0.14"
|
pyspf = "^2.0.14"
|
||||||
Flask-Limiter = "^1.4"
|
Flask-Limiter = "^1.4"
|
||||||
memory_profiler = "^0.57.0"
|
memory_profiler = "^0.57.0"
|
||||||
gevent = "22.10.2"
|
gevent = "^21.12.0"
|
||||||
|
aiospamc = "^0.6.1"
|
||||||
email_validator = "^1.1.1"
|
email_validator = "^1.1.1"
|
||||||
PGPy = "0.5.4"
|
PGPy = "0.5.4"
|
||||||
coinbase-commerce = "^1.0.1"
|
coinbase-commerce = "^1.0.1"
|
||||||
requests = "^2.25.1"
|
requests = "^2.25.1"
|
||||||
newrelic = "8.8.0"
|
newrelic = "^7.10.0"
|
||||||
flanker = "^0.9.11"
|
flanker = "^0.9.11"
|
||||||
pyre2 = "^0.3.6"
|
pyre2 = "^0.3.6"
|
||||||
tldextract = "^3.1.2"
|
tldextract = "^3.1.2"
|
||||||
|
@ -113,8 +111,6 @@ Deprecated = "^1.2.13"
|
||||||
cryptography = "37.0.1"
|
cryptography = "37.0.1"
|
||||||
SQLAlchemy = "1.3.24"
|
SQLAlchemy = "1.3.24"
|
||||||
redis = "^4.5.3"
|
redis = "^4.5.3"
|
||||||
newrelic-telemetry-sdk = "^0.5.0"
|
|
||||||
aiospamc = "0.10"
|
|
||||||
|
|
||||||
[tool.poetry.dev-dependencies]
|
[tool.poetry.dev-dependencies]
|
||||||
pytest = "^7.0.0"
|
pytest = "^7.0.0"
|
||||||
|
@ -124,9 +120,6 @@ black = "^22.1.0"
|
||||||
djlint = "^1.3.0"
|
djlint = "^1.3.0"
|
||||||
pylint = "^2.14.4"
|
pylint = "^2.14.4"
|
||||||
|
|
||||||
[tool.poetry.group.dev.dependencies]
|
|
||||||
ruff = "^0.1.5"
|
|
||||||
|
|
||||||
[build-system]
|
[build-system]
|
||||||
requires = ["poetry>=0.12"]
|
requires = ["poetry>=0.12"]
|
||||||
build-backend = "poetry.masonry.api"
|
build-backend = "poetry.masonry.api"
|
||||||
|
|
10
server.py
10
server.py
|
@ -79,7 +79,6 @@ from app.config import (
|
||||||
MEM_STORE_URI,
|
MEM_STORE_URI,
|
||||||
)
|
)
|
||||||
from app.dashboard.base import dashboard_bp
|
from app.dashboard.base import dashboard_bp
|
||||||
from app.subscription_webhook import execute_subscription_webhook
|
|
||||||
from app.db import Session
|
from app.db import Session
|
||||||
from app.developer.base import developer_bp
|
from app.developer.base import developer_bp
|
||||||
from app.discover.base import discover_bp
|
from app.discover.base import discover_bp
|
||||||
|
@ -492,7 +491,6 @@ def setup_paddle_callback(app: Flask):
|
||||||
# in case user cancels a plan and subscribes a new plan
|
# in case user cancels a plan and subscribes a new plan
|
||||||
sub.cancelled = False
|
sub.cancelled = False
|
||||||
|
|
||||||
execute_subscription_webhook(user)
|
|
||||||
LOG.d("User %s upgrades!", user)
|
LOG.d("User %s upgrades!", user)
|
||||||
|
|
||||||
Session.commit()
|
Session.commit()
|
||||||
|
@ -511,7 +509,6 @@ def setup_paddle_callback(app: Flask):
|
||||||
).date()
|
).date()
|
||||||
|
|
||||||
Session.commit()
|
Session.commit()
|
||||||
execute_subscription_webhook(sub.user)
|
|
||||||
|
|
||||||
elif request.form.get("alert_name") == "subscription_cancelled":
|
elif request.form.get("alert_name") == "subscription_cancelled":
|
||||||
subscription_id = request.form.get("subscription_id")
|
subscription_id = request.form.get("subscription_id")
|
||||||
|
@ -541,7 +538,6 @@ def setup_paddle_callback(app: Flask):
|
||||||
end_date=request.form.get("cancellation_effective_date"),
|
end_date=request.form.get("cancellation_effective_date"),
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
execute_subscription_webhook(sub.user)
|
|
||||||
|
|
||||||
else:
|
else:
|
||||||
# user might have deleted their account
|
# user might have deleted their account
|
||||||
|
@ -584,7 +580,6 @@ def setup_paddle_callback(app: Flask):
|
||||||
sub.cancelled = False
|
sub.cancelled = False
|
||||||
|
|
||||||
Session.commit()
|
Session.commit()
|
||||||
execute_subscription_webhook(sub.user)
|
|
||||||
else:
|
else:
|
||||||
LOG.w(
|
LOG.w(
|
||||||
f"update non-exist subscription {subscription_id}. {request.form}"
|
f"update non-exist subscription {subscription_id}. {request.form}"
|
||||||
|
@ -601,7 +596,6 @@ def setup_paddle_callback(app: Flask):
|
||||||
Subscription.delete(sub.id)
|
Subscription.delete(sub.id)
|
||||||
Session.commit()
|
Session.commit()
|
||||||
LOG.e("%s requests a refund", user)
|
LOG.e("%s requests a refund", user)
|
||||||
execute_subscription_webhook(sub.user)
|
|
||||||
|
|
||||||
elif request.form.get("alert_name") == "subscription_payment_refunded":
|
elif request.form.get("alert_name") == "subscription_payment_refunded":
|
||||||
subscription_id = request.form.get("subscription_id")
|
subscription_id = request.form.get("subscription_id")
|
||||||
|
@ -635,13 +629,12 @@ def setup_paddle_callback(app: Flask):
|
||||||
LOG.e("Unknown plan_id %s", plan_id)
|
LOG.e("Unknown plan_id %s", plan_id)
|
||||||
else:
|
else:
|
||||||
LOG.w("partial subscription_payment_refunded, not handled")
|
LOG.w("partial subscription_payment_refunded, not handled")
|
||||||
execute_subscription_webhook(sub.user)
|
|
||||||
|
|
||||||
return "OK"
|
return "OK"
|
||||||
|
|
||||||
@app.route("/paddle_coupon", methods=["GET", "POST"])
|
@app.route("/paddle_coupon", methods=["GET", "POST"])
|
||||||
def paddle_coupon():
|
def paddle_coupon():
|
||||||
LOG.d("paddle coupon callback %s", request.form)
|
LOG.d(f"paddle coupon callback %s", request.form)
|
||||||
|
|
||||||
if not paddle_utils.verify_incoming_request(dict(request.form)):
|
if not paddle_utils.verify_incoming_request(dict(request.form)):
|
||||||
LOG.e("request not coming from paddle. Request data:%s", dict(request.form))
|
LOG.e("request not coming from paddle. Request data:%s", dict(request.form))
|
||||||
|
@ -749,7 +742,6 @@ def handle_coinbase_event(event) -> bool:
|
||||||
coinbase_subscription=coinbase_subscription,
|
coinbase_subscription=coinbase_subscription,
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
execute_subscription_webhook(user)
|
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
7
shell.py
7
shell.py
|
@ -1,12 +1,13 @@
|
||||||
|
from time import sleep
|
||||||
|
|
||||||
import flask_migrate
|
import flask_migrate
|
||||||
from IPython import embed
|
from IPython import embed
|
||||||
from sqlalchemy_utils import create_database, database_exists, drop_database
|
from sqlalchemy_utils import create_database, database_exists, drop_database
|
||||||
|
|
||||||
from app import models
|
from app import models
|
||||||
from app.config import DB_URI
|
from app.config import DB_URI
|
||||||
from app.db import Session
|
from app.models import *
|
||||||
from app.log import LOG
|
|
||||||
from app.models import User, RecoveryCode
|
|
||||||
|
|
||||||
if False:
|
if False:
|
||||||
# noinspection PyUnreachableCode
|
# noinspection PyUnreachableCode
|
||||||
|
|
Binary file not shown.
Before Width: | Height: | Size: 38 KiB After Width: | Height: | Size: 13 KiB |
|
@ -155,8 +155,10 @@ $(".pin-alias").change(async function () {
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
async function handleNoteChange(aliasId, aliasEmail) {
|
$(".save-note").on("click", async function () {
|
||||||
const note = document.getElementById(`note-${aliasId}`).value;
|
let oldValue;
|
||||||
|
let aliasId = $(this).data("alias");
|
||||||
|
let note = $(`#note-${aliasId}`).val();
|
||||||
|
|
||||||
try {
|
try {
|
||||||
let res = await fetch(`/api/aliases/${aliasId}`, {
|
let res = await fetch(`/api/aliases/${aliasId}`, {
|
||||||
|
@ -170,27 +172,26 @@ async function handleNoteChange(aliasId, aliasEmail) {
|
||||||
});
|
});
|
||||||
|
|
||||||
if (res.ok) {
|
if (res.ok) {
|
||||||
toastr.success(`Description saved for ${aliasEmail}`);
|
toastr.success(`Saved`);
|
||||||
} else {
|
} else {
|
||||||
toastr.error("Sorry for the inconvenience! Could you refresh the page & retry please?", "Unknown Error");
|
toastr.error("Sorry for the inconvenience! Could you refresh the page & retry please?", "Unknown Error");
|
||||||
|
// reset to the original value
|
||||||
|
oldValue = !$(this).prop("checked");
|
||||||
|
$(this).prop("checked", oldValue);
|
||||||
}
|
}
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
toastr.error("Sorry for the inconvenience! Could you refresh the page & retry please?", "Unknown Error");
|
toastr.error("Sorry for the inconvenience! Could you refresh the page & retry please?", "Unknown Error");
|
||||||
|
// reset to the original value
|
||||||
|
oldValue = !$(this).prop("checked");
|
||||||
|
$(this).prop("checked", oldValue);
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
});
|
||||||
|
|
||||||
function handleNoteFocus(aliasId) {
|
$(".save-mailbox").on("click", async function () {
|
||||||
document.getElementById(`note-focus-message-${aliasId}`).classList.remove('d-none');
|
let oldValue;
|
||||||
}
|
let aliasId = $(this).data("alias");
|
||||||
|
let mailbox_ids = $(`#mailbox-${aliasId}`).val();
|
||||||
function handleNoteBlur(aliasId) {
|
|
||||||
document.getElementById(`note-focus-message-${aliasId}`).classList.add('d-none');
|
|
||||||
}
|
|
||||||
|
|
||||||
async function handleMailboxChange(aliasId, aliasEmail) {
|
|
||||||
const selectedOptions = document.getElementById(`mailbox-${aliasId}`).selectedOptions;
|
|
||||||
const mailbox_ids = Array.from(selectedOptions).map((selectedOption) => selectedOption.value);
|
|
||||||
|
|
||||||
if (mailbox_ids.length === 0) {
|
if (mailbox_ids.length === 0) {
|
||||||
toastr.error("You must select at least a mailbox", "Error");
|
toastr.error("You must select at least a mailbox", "Error");
|
||||||
|
@ -209,18 +210,25 @@ async function handleMailboxChange(aliasId, aliasEmail) {
|
||||||
});
|
});
|
||||||
|
|
||||||
if (res.ok) {
|
if (res.ok) {
|
||||||
toastr.success(`Mailbox updated for ${aliasEmail}`);
|
toastr.success(`Mailbox Updated`);
|
||||||
} else {
|
} else {
|
||||||
toastr.error("Sorry for the inconvenience! Could you refresh the page & retry please?", "Unknown Error");
|
toastr.error("Sorry for the inconvenience! Could you refresh the page & retry please?", "Unknown Error");
|
||||||
|
// reset to the original value
|
||||||
|
oldValue = !$(this).prop("checked");
|
||||||
|
$(this).prop("checked", oldValue);
|
||||||
}
|
}
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
toastr.error("Sorry for the inconvenience! Could you refresh the page & retry please?", "Unknown Error");
|
toastr.error("Sorry for the inconvenience! Could you refresh the page & retry please?", "Unknown Error");
|
||||||
|
// reset to the original value
|
||||||
|
oldValue = !$(this).prop("checked");
|
||||||
|
$(this).prop("checked", oldValue);
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
});
|
||||||
|
|
||||||
async function handleDisplayNameChange(aliasId, aliasEmail) {
|
$(".save-alias-name").on("click", async function () {
|
||||||
const name = document.getElementById(`alias-name-${aliasId}`).value;
|
let aliasId = $(this).data("alias");
|
||||||
|
let name = $(`#alias-name-${aliasId}`).val();
|
||||||
|
|
||||||
try {
|
try {
|
||||||
let res = await fetch(`/api/aliases/${aliasId}`, {
|
let res = await fetch(`/api/aliases/${aliasId}`, {
|
||||||
|
@ -234,7 +242,7 @@ async function handleDisplayNameChange(aliasId, aliasEmail) {
|
||||||
});
|
});
|
||||||
|
|
||||||
if (res.ok) {
|
if (res.ok) {
|
||||||
toastr.success(`Display name saved for ${aliasEmail}`);
|
toastr.success(`Alias Name Saved`);
|
||||||
} else {
|
} else {
|
||||||
toastr.error("Sorry for the inconvenience! Could you refresh the page & retry please?", "Unknown Error");
|
toastr.error("Sorry for the inconvenience! Could you refresh the page & retry please?", "Unknown Error");
|
||||||
}
|
}
|
||||||
|
@ -242,41 +250,24 @@ async function handleDisplayNameChange(aliasId, aliasEmail) {
|
||||||
toastr.error("Sorry for the inconvenience! Could you refresh the page & retry please?", "Unknown Error");
|
toastr.error("Sorry for the inconvenience! Could you refresh the page & retry please?", "Unknown Error");
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
});
|
||||||
|
|
||||||
function handleDisplayNameFocus(aliasId) {
|
|
||||||
document.getElementById(`display-name-focus-message-${aliasId}`).classList.remove('d-none');
|
|
||||||
}
|
|
||||||
|
|
||||||
function handleDisplayNameBlur(aliasId) {
|
|
||||||
document.getElementById(`display-name-focus-message-${aliasId}`).classList.add('d-none');
|
|
||||||
}
|
|
||||||
|
|
||||||
new Vue({
|
new Vue({
|
||||||
el: '#filter-app',
|
el: '#filter-app',
|
||||||
delimiters: ["[[", "]]"], // necessary to avoid conflict with jinja
|
delimiters: ["[[", "]]"], // necessary to avoid conflict with jinja
|
||||||
data: {
|
data: {
|
||||||
showFilter: false,
|
showFilter: false
|
||||||
showStats: false
|
|
||||||
},
|
},
|
||||||
methods: {
|
methods: {
|
||||||
async toggleFilter() {
|
async toggleFilter() {
|
||||||
let that = this;
|
let that = this;
|
||||||
that.showFilter = !that.showFilter;
|
that.showFilter = !that.showFilter;
|
||||||
store.set('showFilter', that.showFilter);
|
store.set('showFilter', that.showFilter);
|
||||||
},
|
|
||||||
|
|
||||||
async toggleStats() {
|
|
||||||
let that = this;
|
|
||||||
that.showStats = !that.showStats;
|
|
||||||
store.set('showStats', that.showStats);
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
async mounted() {
|
async mounted() {
|
||||||
if (store.get("showFilter"))
|
if (store.get("showFilter"))
|
||||||
this.showFilter = true;
|
this.showFilter = true;
|
||||||
|
|
||||||
if (store.get("showStats"))
|
|
||||||
this.showStats = true;
|
|
||||||
}
|
}
|
||||||
});
|
});
|
16
static/package-lock.json
generated
vendored
16
static/package-lock.json
generated
vendored
|
@ -69,12 +69,12 @@
|
||||||
"font-awesome": {
|
"font-awesome": {
|
||||||
"version": "4.7.0",
|
"version": "4.7.0",
|
||||||
"resolved": "https://registry.npmjs.org/font-awesome/-/font-awesome-4.7.0.tgz",
|
"resolved": "https://registry.npmjs.org/font-awesome/-/font-awesome-4.7.0.tgz",
|
||||||
"integrity": "sha512-U6kGnykA/6bFmg1M/oT9EkFeIYv7JlX3bozwQJWiiLz6L0w3F5vBVPxHlwyX/vtNq1ckcpRKOB9f2Qal/VtFpg=="
|
"integrity": "sha1-j6jPBBGhoxr9B7BtKQK7n8gVoTM="
|
||||||
},
|
},
|
||||||
"htmx.org": {
|
"htmx.org": {
|
||||||
"version": "1.7.0",
|
"version": "1.6.1",
|
||||||
"resolved": "https://registry.npmjs.org/htmx.org/-/htmx.org-1.7.0.tgz",
|
"resolved": "https://registry.npmjs.org/htmx.org/-/htmx.org-1.6.1.tgz",
|
||||||
"integrity": "sha512-wIQ3yNq7yiLTm+6BhV7Z8qKKTzEQv9xN/I4QsN5FvdGi69SNWTsSMlhH69HPa1rpZ8zSq1A/e7gTbTySxliP8g=="
|
"integrity": "sha512-i+1k5ee2eFWaZbomjckyrDjUpa3FMDZWufatUSBmmsjXVksn89nsXvr1KLGIdAajiz+ZSL7TE4U/QaZVd2U2sA=="
|
||||||
},
|
},
|
||||||
"intro.js": {
|
"intro.js": {
|
||||||
"version": "2.9.3",
|
"version": "2.9.3",
|
||||||
|
@ -82,9 +82,9 @@
|
||||||
"integrity": "sha512-hC+EXWnEuJeA3CveGMat3XHePd2iaXNFJIVfvJh2E9IzBMGLTlhWvPIVHAgKlOpO4lNayCxEqzr4N02VmHFr9Q=="
|
"integrity": "sha512-hC+EXWnEuJeA3CveGMat3XHePd2iaXNFJIVfvJh2E9IzBMGLTlhWvPIVHAgKlOpO4lNayCxEqzr4N02VmHFr9Q=="
|
||||||
},
|
},
|
||||||
"jquery": {
|
"jquery": {
|
||||||
"version": "3.6.4",
|
"version": "3.5.1",
|
||||||
"resolved": "https://registry.npmjs.org/jquery/-/jquery-3.6.4.tgz",
|
"resolved": "https://registry.npmjs.org/jquery/-/jquery-3.5.1.tgz",
|
||||||
"integrity": "sha512-v28EW9DWDFpzcD9O5iyJXg3R3+q+mET5JhnjJzQUZMHOv67bpSIHq81GEYpPNZHG+XXHsfSme3nxp/hndKEcsQ=="
|
"integrity": "sha512-XwIBPqcMn57FxfT+Go5pzySnm4KWkT1Tv7gjrpT1srtf8Weynl6R273VJ5GjkRb51IzMp5nbaPjJXMWeju2MKg=="
|
||||||
},
|
},
|
||||||
"multiple-select": {
|
"multiple-select": {
|
||||||
"version": "1.5.2",
|
"version": "1.5.2",
|
||||||
|
@ -107,7 +107,7 @@
|
||||||
"toastr": {
|
"toastr": {
|
||||||
"version": "2.1.4",
|
"version": "2.1.4",
|
||||||
"resolved": "https://registry.npmjs.org/toastr/-/toastr-2.1.4.tgz",
|
"resolved": "https://registry.npmjs.org/toastr/-/toastr-2.1.4.tgz",
|
||||||
"integrity": "sha512-LIy77F5n+sz4tefMmFOntcJ6HL0Fv3k1TDnNmFZ0bU/GcvIIfy6eG2v7zQmMiYgaalAiUv75ttFrPn5s0gyqlA==",
|
"integrity": "sha1-i0O+ZPudDEFIcURvLbjoyk6V8YE=",
|
||||||
"requires": {
|
"requires": {
|
||||||
"jquery": ">=1.12.0"
|
"jquery": ">=1.12.0"
|
||||||
}
|
}
|
||||||
|
|
|
@ -9,13 +9,10 @@
|
||||||
<h1 class="card-title">Create new account</h1>
|
<h1 class="card-title">Create new account</h1>
|
||||||
<div class="form-group">
|
<div class="form-group">
|
||||||
<label class="form-label">Email address</label>
|
<label class="form-label">Email address</label>
|
||||||
{{ form.email(class="form-control", type="email", placeholder="YourName@protonmail.com") }}
|
{{ form.email(class="form-control", type="email") }}
|
||||||
<div class="small-text alert alert-info" style="margin-top: 1px">
|
<div class="small-text alert alert-info" style="margin-top: 1px">
|
||||||
Emails sent to your alias will be forwarded to this email address.
|
Emails sent to your alias will be forwarded to this email address.
|
||||||
<br>
|
|
||||||
It can't be a disposable or forwarding email address.
|
It can't be a disposable or forwarding email address.
|
||||||
<br>
|
|
||||||
We recommend using a <a href="https://proton.me/mail" target="_blank">Proton Mail</a> address
|
|
||||||
</div>
|
</div>
|
||||||
{{ render_field_errors(form.email) }}
|
{{ render_field_errors(form.email) }}
|
||||||
</div>
|
</div>
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue