Browse Source

chore(): remove gitzones 😢

Peter Thomassen 2 years ago
parent
commit
0719c11dc5

+ 1 - 6
api/Dockerfile

@@ -13,7 +13,7 @@ ENV PIP_NO_CACHE_DIR=1
 
 COPY requirements.txt /usr/src/app/
 # freetype-dev is needed for captcha generation
-RUN apk add --no-cache gcc freetype-dev libffi-dev musl-dev libmemcached-dev postgresql-dev jpeg-dev zlib-dev git \
+RUN apk add --no-cache gcc freetype-dev libffi-dev musl-dev libmemcached-dev postgresql-dev jpeg-dev zlib-dev \
     && pip install --upgrade pip \
     && pip install -r requirements.txt \
     && pip freeze
@@ -24,11 +24,6 @@ ADD ["cronhook/crontab", "cronhook/start-cron.sh", "/root/cronhook/"]
 RUN crontab /root/cronhook/crontab
 RUN chmod +x /root/cronhook/start-cron.sh
 
-RUN mkdir /zones /var/run/celerybeat-schedule \
-    && chown nobody /zones /var/run/celerybeat-schedule \
-    && chmod 755 /zones \
-    && chmod 700 /var/run/celerybeat-schedule
-
 COPY . /usr/src/app
 
 EXPOSE 8000

+ 0 - 25
api/api/settings.py

@@ -12,9 +12,7 @@ https://docs.djangoproject.com/en/1.7/ref/settings/
 import os
 from datetime import timedelta
 
-from celery.schedules import crontab
 from django.conf.global_settings import PASSWORD_HASHERS as DEFAULT_PASSWORD_HASHERS
-from kombu import Queue, Exchange
 
 BASE_DIR = os.path.dirname(os.path.dirname(__file__))
 
@@ -180,29 +178,6 @@ TASK_CONFIG = {  # The first entry is the default queue
     'email_fast_lane': {'rate_limit': '1/s'},
     'email_immediate_lane': {'rate_limit': None},
 }
-CELERY_TIMEZONE = 'UTC'  # timezone for task schedule below
-CELERY_BEAT_SCHEDULE = {
-    'rotate_signatures': {
-        'task': 'desecapi.replication.update_all',
-        'schedule': crontab(minute=0, hour=0, day_of_week='thursday'),
-        'options': {'priority': 5},  # priority must be higher than rotation jobs for individual domains
-    },
-    'remove_history': {
-        'task': 'desecapi.replication.remove_history',
-        'schedule': crontab(minute=42, hour='*/3'),
-        'options': {'priority': 5},
-    },
-}
-CELERY_TASK_QUEUES = [
-    Queue(
-        'replication',
-        Exchange('replication'),
-        routing_key='replication',
-        queue_arguments={'x-max-priority': 10},  # make the replication queue a priority-queue
-    ),
-    # Other queues are created automatically
-]
-CELERY_BEAT_MAX_LOOP_INTERVAL = 15  # Low value important for running e2e2 tests in reasonable time
 
 # pdns accepts request payloads of this size.
 # This will hopefully soon be configurable: https://github.com/PowerDNS/pdns/pull/7550

+ 1 - 5
api/desecapi/metrics.py

@@ -1,4 +1,4 @@
-from prometheus_client import Counter, Histogram, Summary
+from prometheus_client import Counter, Histogram
 
 metrics = {}
 
@@ -15,10 +15,6 @@ def set_histogram(name, *args, **kwargs):
     metrics[name] = Histogram(name, *args, **kwargs)
 
 
-def set_summary(name, *args, **kwargs):
-    metrics[name] = Summary(name, *args, **kwargs)
-
-
 # models.py metrics
 set_counter('desecapi_captcha_content_created', 'number of times captcha content created', ['kind'])
 set_counter('desecapi_autodelegation_created', 'number of autodelegations added')

+ 21 - 0
api/desecapi/migrations/0026_remove_domain_replicated_and_more.py

@@ -0,0 +1,21 @@
+# Generated by Django 4.1 on 2022-08-11 20:28
+
+from django.db import migrations
+
+
+class Migration(migrations.Migration):
+
+    dependencies = [
+        ('desecapi', '0025_alter_token_max_age_alter_token_max_unused_period'),
+    ]
+
+    operations = [
+        migrations.RemoveField(
+            model_name='domain',
+            name='replicated',
+        ),
+        migrations.RemoveField(
+            model_name='domain',
+            name='replication_duration',
+        ),
+    ]

+ 0 - 2
api/desecapi/models.py

@@ -239,8 +239,6 @@ class Domain(ExportModelOperationsMixin('Domain'), models.Model):
                             validators=validate_domain_name)
     owner = models.ForeignKey(User, on_delete=models.PROTECT, related_name='domains')
     published = models.DateTimeField(null=True, blank=True)
-    replicated = models.DateTimeField(null=True, blank=True)
-    replication_duration = models.DurationField(null=True, blank=True)
     minimum_ttl = models.PositiveIntegerField(default=_minimum_ttl_default.__func__)
     renewal_state = models.IntegerField(choices=RenewalState.choices, default=RenewalState.IMMORTAL)
     renewal_changed = models.DateTimeField(auto_now_add=True)

+ 1 - 5
api/desecapi/pdns_change_tracker.py

@@ -5,7 +5,7 @@ from django.db.models.signals import post_save, post_delete
 from django.db.transaction import atomic
 from django.utils import timezone
 
-from desecapi import metrics, replication
+from desecapi import metrics
 from desecapi.models import RRset, RR, Domain
 from desecapi.pdns import _pdns_post, NSLORD, NSMASTER, _pdns_delete, _pdns_patch, _pdns_put, pdns_id, \
     construct_catalog_rrset
@@ -254,12 +254,10 @@ class PDNSChangeTracker:
         # TODO introduce two phase commit protocol
         changes = self._compute_changes()
         axfr_required = set()
-        replication_required = set()
         for change in changes:
             try:
                 change.pdns_do()
                 change.api_do()
-                replication_required.add(change.domain_name)
                 if change.axfr_required:
                     axfr_required.add(change.domain_name)
             except Exception as e:
@@ -269,8 +267,6 @@ class PDNSChangeTracker:
 
         self.transaction.__exit__(None, None, None)
 
-        for name in replication_required:
-            replication.update.delay(name)
         for name in axfr_required:
             _pdns_put(NSMASTER, '/zones/%s/axfr-retrieve' % pdns_id(name))
         Domain.objects.filter(name__in=axfr_required).update(published=timezone.now())

+ 0 - 202
api/desecapi/replication.py

@@ -1,202 +0,0 @@
-import os
-import subprocess
-from datetime import datetime, timedelta
-from typing import List
-
-import dns.query
-import dns.zone
-from celery import shared_task
-from django.utils import timezone
-
-from desecapi import models
-
-
-class ReplicationException(Exception):
-
-    def __init__(self, message, **kwargs):
-        super().__init__(message)
-        for k, v in kwargs.items():
-            self.__setattr__(k, v)
-
-
-class GitRepositoryException(ReplicationException):
-    pass
-
-
-class UnsupportedZoneNameException(ReplicationException):
-    pass
-
-
-class Repository:
-    # TODO replication performance could potentially(*) be further improved by allowing to run multiple AXFR in
-    #  parallel, and then use a file lock to synchronize git file system actions
-    #  (*) but only if the signing server can sign multiple requests in parallel
-
-    _config = {
-        'user.email': 'api@desec.internal',
-        'user.name': 'deSEC API',
-    }
-
-    def __init__(self, path):
-        self.path = path
-
-    def _git(self, *args):
-        cmd = ['/usr/bin/git'] + list(args)
-        print('>>> ' + str(cmd))
-
-        with subprocess.Popen(
-                cmd,
-                bufsize=0,
-                cwd=self.path,
-                stderr=subprocess.PIPE,
-                stdout=subprocess.PIPE,
-                env={'HOME': '/'},  # Celery does not adjust $HOME when dropping privileges
-        ) as p:
-            try:
-                stdout, stderr = p.communicate(input=None, timeout=60)
-                rcode = p.returncode
-                stderr, stdout = stderr.decode(), stdout.decode()
-            except subprocess.TimeoutExpired:
-                p.kill()
-                raise
-            except UnicodeDecodeError:
-                GitRepositoryException('git stdout or stderr was not valid unicode!',
-                                       cmd=cmd, rcode=rcode, stderr=stderr, stdout=stdout)
-
-        print('\n'.join('<<< ' + s for s in stdout.split('\n')))
-        return cmd, rcode, stdout, stderr
-
-    def _git_do(self, *args):
-        cmd, rcode, stdout, stderr = self._git(*args)
-
-        if rcode != 0:
-            raise GitRepositoryException(f'{cmd} returned nonzero error code',
-                                         cmd=cmd, rcode=rcode, stdout=stdout, stderr=stderr)
-
-        if stderr.strip():
-            raise GitRepositoryException(f'{cmd} returned non-empty error output',
-                                         cmd=cmd, rcode=rcode, stdout=stdout, stderr=stderr)
-
-        return stdout
-
-    def _git_check(self, *args):
-        _, rcode, _, _ = self._git(*args)
-        return rcode
-
-    def commit_all(self, msg=None):
-        self._git_do('add', '.')
-        if self._git_check('diff', '--exit-code', '--numstat', '--staged'):
-            self._git_do('commit', '-m', msg or 'update')
-
-    def init(self):
-        self._git_do('init', '-b', 'main')
-        for k, v in self._config.items():
-            self._git_do('config', k, v)
-
-    def get_head(self):
-        return self.get_commit('HEAD')
-
-    def get_commit(self, rev):
-        try:
-            commit_hash, commit_msg = self._git_do('show', rev, '--format=%H%n%s', '-s').split('\n', 1)
-            return commit_hash, commit_msg[:-1]
-        except GitRepositoryException:
-            return None, None
-
-    def remove_history(self, before: datetime):
-        rev = self._git_do('log', f'--before={before.isoformat()}Z', '-1', '--format=%H')
-        with open(os.path.join(self.path, '.git', 'shallow'), 'w') as f:
-            f.writelines([rev])
-        self._git_do('reflog', 'expire', '--expire=now', '--all')
-        self._git_do('gc', '--prune=now')  # prune only
-        self._git_do('gc')  # remaining garbage collection (e.g. compressing file revisions)
-
-
-class ZoneRepository(Repository):
-    AXFR_SOURCE = '172.16.1.11'
-
-    def __init__(self, path):
-        super().__init__(path)
-        self._config['gc.auto'] = '0'
-        if not os.path.exists(os.path.join(self.path, '.git')):
-            self.init()
-            self.commit_all(msg='Inception or Recovery')
-            update_all.delay()
-
-    def refresh(self, name):
-        if '/' in name or '\x00' in name:
-            raise UnsupportedZoneNameException
-
-        # obtain AXFR
-        timeout = 60  # if AXFR take longer, the timeout must be increased (see also settings.py)
-        try:
-            xfr = list(dns.query.xfr(self.AXFR_SOURCE, name, timeout=timeout))
-        except dns.query.TransferError as e:
-            if e.rcode == dns.rcode.Rcode.NOTAUTH:
-                self._delete_zone(name)
-            else:
-                raise
-        else:
-            self._update_zone(name, xfr)
-
-    def _update_zone(self, name: str, xfr: List[dns.message.QueryMessage]):
-        z = dns.zone.from_xfr(xfr, check_origin=False)
-        try:
-            print(f'New SOA for {name}: '
-                  f'{z.get_rrset(name="", rdtype=dns.rdatatype.SOA).to_text()}')
-            print(f'         Signature: '
-                  f'{z.get_rrset(name="", rdtype=dns.rdatatype.RRSIG, covers=dns.rdatatype.SOA).to_text()}')
-        except AttributeError:
-            print(f'WARNING {name} has no SOA record?!')
-
-        # TODO sort AXFR? (but take care with SOA)
-        #  stable output can be achieved with
-        #  output = '\n'.join(sorted('\n'.split(z.to_text())))
-        #  but we need to see first if the frontend can handle this messed up zone file
-
-        # write zone file
-        filename = os.path.join(self.path, name + '.zone')
-        with open(filename + '~', 'w') as f:
-            f.write(f'; Generated by deSEC at {datetime.utcnow()}Z\n')  # TODO if sorting, remove this to avoid overhead
-            z.to_file(f)
-        os.rename(filename + '~', filename)
-
-    def _delete_zone(self, name: str):
-        os.remove(os.path.join(self.path, name + '.zone'))
-
-
-ZONE_REPOSITORY_PATH = '/zones'
-
-
-@shared_task(queue='replication')
-def update(name: str):
-    # TODO this task runs through following steps:
-    #  (1) retrieve AXFR  (dedyn.io 01/2021: 8.5s)
-    #  (2) parse AXFR     (dedyn.io 01/2021: 1.8s)
-    #  (3) write AXFR into zone file (dedyn.io 01/2021: 2.3s)
-    #  (4) commit into git repository  (dedyn.io 01/2021: 0.5s)
-    #  To enhance performance, steps 1-3 can be executed in parallel for multiple zones with multiprocessing.
-    #  Step 4, which takes 0.5s even for very large zones, can only be executed by a single worker, as
-    #  two parallel git commits will fail
-    print(f'updating {name}')
-    t = timezone.now()
-    zones = ZoneRepository(ZONE_REPOSITORY_PATH)
-    zones.refresh(name)
-    zones.commit_all(f'Update for {name}')
-    models.Domain.objects.filter(name=name).update(replicated=timezone.now(), replication_duration=timezone.now() - t)
-
-
-@shared_task(queue='replication', priority=9)
-def update_all():
-    names = models.Domain.objects.all().values_list('name', flat=True)
-    print(f'Queuing replication for all {len(names)} zones.')
-    for name in names:
-        update.s(name).apply_async(priority=1)
-
-
-@shared_task(queue='replication')
-def remove_history():
-    before = datetime.now() - timedelta(days=2)
-    print(f'Cleaning repo data from before {before}')
-    zones = ZoneRepository(ZONE_REPOSITORY_PATH)
-    zones.remove_history(before=before)

+ 0 - 18
api/desecapi/tests/base.py

@@ -17,7 +17,6 @@ from rest_framework.reverse import reverse
 from rest_framework.test import APITestCase, APIClient
 from rest_framework.utils import json
 
-from desecapi import replication
 from desecapi.models import User, Domain, Token, RRset, RR, psl, RR_SET_TYPES_AUTOMATIC, RR_SET_TYPES_UNSUPPORTED, \
     RR_SET_TYPES_MANAGEABLE
 
@@ -489,10 +488,6 @@ class MockPDNSTestCase(APITestCase):
             'priority': 1,  # avoid collision with DELETE zones/(?P<id>[^/]+)$ (httpretty does not match the method)
         }
 
-    def __init__(self, methodName: str = ...) -> None:
-        super().__init__(methodName)
-        self._mock_replication = None
-
     def assertPdnsRequests(self, *expected_requests, expect_order=True, exit_hook=None):
         """
         Assert the given requests are made. To build requests, use the `MockPDNSTestCase.request_*` functions.
@@ -577,9 +572,6 @@ class MockPDNSTestCase(APITestCase):
                             for hashed in Token.objects.filter(user=user).values_list('key', flat=True)))
         self.assertEqual(len(Token.make_hash(plain).split('$')), 4)
 
-    def assertReplication(self, name):
-        replication.update.delay.assert_called_with(name)
-
     @classmethod
     def setUpTestData(cls):
         httpretty.enable(allow_net_connect=False)
@@ -611,7 +603,6 @@ class MockPDNSTestCase(APITestCase):
         httpretty.disable()
 
     def setUp(self):
-        # configure mocks for nslord
         def request_callback(r, _, response_headers):
             try:
                 request = json.loads(r.parsed_body)
@@ -650,15 +641,6 @@ class MockPDNSTestCase(APITestCase):
                     priority=-100,
                 )
 
-        # configure mocks for replication
-        self._mock_replication = mock.patch('desecapi.replication.update.delay', return_value=None, wraps=None)
-        self._mock_replication.start()
-
-    def tearDown(self) -> None:
-        if self._mock_replication:
-            self._mock_replication.stop()
-        super().tearDown()
-
 
 class DesecTestCase(MockPDNSTestCase):
     """

+ 0 - 4
api/desecapi/tests/test_pdns_change_tracker.py

@@ -19,15 +19,11 @@ class PdnsChangeTrackerTestCase(DesecTestCase):
         cls.full_domain = Domain.objects.create(owner=cls.user, name=cls.random_domain_name())
 
     def assertPdnsZoneUpdate(self, name, rr_sets):
-        def _assert_replication():
-            self.assertReplication(name)
-
         return self.assertPdnsRequests(
             [
                 self.request_pdns_zone_update_assert_body(name, rr_sets),
                 self.request_pdns_zone_axfr(name),
             ],
-            exit_hook=_assert_replication,
         )
 
     def test_rrset_does_not_exist_exception(self):

+ 0 - 102
api/desecapi/tests/test_replication.py

@@ -1,15 +1,7 @@
 import json
-import os
-import random
-import string
-import time
-from datetime import datetime
-from tempfile import TemporaryDirectory
 
-from django.test import testcases
 from rest_framework import status
 
-from desecapi.replication import Repository
 from desecapi.tests.base import DesecTestCase
 
 
@@ -40,97 +32,3 @@ class ReplicationTest(DesecTestCase):
 
             # Do not expect pdns request in next iteration (result will be cached)
             pdns_requests = []
-
-
-class RepositoryTest(testcases.TestCase):
-
-    def assertGit(self, path):
-        self.assertTrue(
-            os.path.exists(os.path.join(path, '.git')),
-            f'Expected a git repository at {path} but did not find .git subdirectory.'
-        )
-
-    def assertHead(self, repo, message=None, sha=None):
-        actual_sha, actual_message = repo.get_head()
-        if actual_sha is None:
-            self.fail(f'Expected HEAD to have commit message "{message}" and hash "{sha}", but repository has no '
-                      f'commits.')
-        if sha:
-            self.assertEqual(actual_sha, sha, f'Expected HEAD to have hash "{sha}" but had "{actual_sha}".')
-        if message:
-            self.assertIn(
-                message, actual_message,
-                f'Expected "{message}" to appear in the last commit message, but only found "{actual_message}".',
-            )
-
-    def assertHasCommit(self, repo: Repository, commit_id):
-        self.assertIsNotNone(
-            repo.get_commit(commit_id)[0], f'Expected repository to have commit {commit_id}, but it had not.'
-        )
-
-    def assertHasCommits(self, repo: Repository, commit_id_list):
-        for commit in commit_id_list:
-            self.assertHasCommit(repo, commit)
-
-    def assertHasNotCommit(self, repo: Repository, commit_id):
-        self.assertIsNone(
-            repo.get_commit(commit_id)[0], f'Expected repository to not have commit {commit_id}, but it had.'
-        )
-
-    def assertHasNotCommits(self, repo: Repository, commit_id_list):
-        for commit in commit_id_list:
-            self.assertHasNotCommit(repo, commit)
-
-    def assertNoCommits(self, repo: Repository):
-        head = repo.get_head()
-        self.assertEqual(head, (None, None), f'Expected that repository has no commits, but HEAD was {head}.')
-
-    @staticmethod
-    def _random_string(length):
-        return ''.join(random.choices(string.ascii_lowercase, k=length))
-
-    def _random_commit(self, repo: Repository, message=''):
-        with open(os.path.join(repo.path, self._random_string(16)), 'w') as f:
-            f.write(self._random_string(500))
-        repo.commit_all(message)
-        return repo.get_head()[0]
-
-    def _random_commits(self, num, repo: Repository, message=''):
-        return [self._random_commit(repo, message) for _ in range(num)]
-
-    def test_init(self):
-        with TemporaryDirectory() as path:
-            repo = Repository(path)
-            repo.init()
-            self.assertGit(path)
-
-    def test_commit(self):
-        with TemporaryDirectory() as path:
-            repo = Repository(path)
-            repo.init()
-            repo.commit_all('commit1')
-            self.assertNoCommits(repo)
-
-            with open(os.path.join(path, 'test_commit'), 'w') as f:
-                f.write('foo')
-
-            repo.commit_all('commit2')
-            self.assertHead(repo, message='commit2')
-
-    def test_remove_history(self):
-        with TemporaryDirectory() as path:
-            repo = Repository(path)
-            repo.init()
-
-            remove = self._random_commits(5, repo, 'to be removed')  # we're going to remove these 'old' commits
-            keep = self._random_commits(1, repo, 'anchor to be kept')  # as sync anchor, the last 'old' commit is kept
-            cutoff = datetime.now()
-            time.sleep(1)
-            keep += self._random_commits(5, repo, 'to be kept')  # we're going to keep these 'new' commits
-
-            self.assertHasCommits(repo, remove + keep)
-
-            repo.remove_history(before=cutoff)
-
-            self.assertHasCommits(repo, keep)
-            self.assertHasNotCommits(repo, remove)

+ 0 - 4
docker-compose.dev.yml

@@ -59,10 +59,6 @@ services:
     logging:
       driver: "json-file"
 
-  celery-replication:
-    logging:
-      driver: "json-file"
-
   memcached:
     logging:
       driver: "json-file"

+ 0 - 11
docker-compose.test-e2e2.yml

@@ -28,16 +28,6 @@ services:
     volumes:
     - faketime:/etc/faketime/:ro
 
-  celery-replication:
-    environment:
-    - DESECSTACK_E2E_TEST=TRUE # increase abuse limits and such
-    # faketime setup
-    - LD_PRELOAD=/lib/libfaketime.so
-    - FAKETIME_TIMESTAMP_FILE=/etc/faketime/faketime.rc
-    - FAKETIME_NO_CACHE=1
-    volumes:
-    - faketime:/etc/faketime/:ro
-
   nslord:
     networks:
       front:
@@ -77,7 +67,6 @@ services:
     - FAKETIME_NO_CACHE=1
     volumes:
     - autocert:/autocert/:ro
-    - zones:/zones:ro
     - faketime:/etc/faketime/:rw
     mac_address: 06:42:ac:10:00:7f
     depends_on:

+ 0 - 68
docker-compose.yml

@@ -132,12 +132,9 @@ services:
     - nslord
     - nsmaster
     - celery-email
-    - celery-replication
     - memcached
     tmpfs:
     - /var/local/django_metrics:size=500m
-    volumes:
-    - zones:/zones:rw
     environment:
     - DESECSTACK_DOMAIN
     - DESECSTACK_NS
@@ -250,8 +247,6 @@ services:
     - dbapi
     - nslord
     - rabbitmq
-    volumes:
-    - zones:/zones:rw
     environment:
     - DESECSTACK_DOMAIN
     - DESECSTACK_NS
@@ -281,49 +276,6 @@ services:
         tag: "desec/celery-email"
     restart: unless-stopped
 
-  celery-replication:
-    build: api
-    image: desec/dedyn-api:latest
-    init: true
-    command: celery -A api worker -E -B -s /var/run/celerybeat-schedule/db -Q replication -n replication -c 1 -l info --uid nobody --gid nogroup
-    depends_on:
-    - dbapi
-    - nslord
-    - rabbitmq
-    volumes:
-    - zones:/zones:rw
-    - celerybeat:/var/run/celerybeat-schedule
-    environment:
-    - DESECSTACK_DOMAIN
-    - DESECSTACK_NS
-    - DESECSTACK_API_ADMIN
-    - DESECSTACK_API_SEPA_CREDITOR_ID
-    - DESECSTACK_API_SEPA_CREDITOR_NAME
-    - DESECSTACK_API_EMAIL_HOST
-    - DESECSTACK_API_EMAIL_HOST_USER
-    - DESECSTACK_API_EMAIL_HOST_PASSWORD
-    - DESECSTACK_API_EMAIL_PORT
-    - DESECSTACK_API_SECRETKEY
-    - DESECSTACK_API_PSL_RESOLVER
-    - DESECSTACK_DBAPI_PASSWORD_desec
-    - DESECSTACK_IPV4_REAR_PREFIX16
-    - DESECSTACK_IPV6_SUBNET
-    - DESECSTACK_NSLORD_APIKEY
-    - DESECSTACK_NSLORD_DEFAULT_TTL
-    - DESECSTACK_NSMASTER_APIKEY
-    - DESECSTACK_MINIMUM_TTL_DEFAULT
-    - DJANGO_SETTINGS_MODULE=api.settings
-    networks:
-      rearapi_celery:
-      rearapi_dbapi:
-      rearapi_ns:
-        ipv4_address: ${DESECSTACK_IPV4_REAR_PREFIX16}.1.13
-    logging:
-      driver: "syslog"
-      options:
-        tag: "desec/celery-replication"
-    restart: unless-stopped
-
   memcached:
     image: memcached:1.6-alpine
     init: true
@@ -403,23 +355,6 @@ services:
         tag: "desec/prometheus"
     restart: unless-stopped
 
-  gitzones:
-    build: gitzones
-    image: desec/gitzones:latest
-    init: true
-    environment:
-      - DESECSTACK_DOMAIN
-    volumes:
-      - zones:/zones:ro
-      - gitzones_keys:/etc/ssh/keys/:rw
-    ports:
-      - "222:22"
-    logging:
-      driver: "syslog"
-      options:
-        tag: "desec/gitzones"
-    restart: unless-stopped
-
 volumes:
   dbapi_postgres:
   dblord_mysql:
@@ -427,9 +362,6 @@ volumes:
   openvpn-server_logs:
   prometheus:
   rabbitmq_data:
-  zones:
-  celerybeat:
-  gitzones_keys:
 
 networks:
   # Note that it is required that the front network ranks lower (in lexical order)

+ 0 - 14
gitzones/Dockerfile

@@ -1,14 +0,0 @@
-FROM alpine:latest
-
-RUN apk add --no-cache openssh git python3
-
-RUN adduser -D -s /usr/bin/git-shell git \
-  && mkdir /home/git/.ssh \
-  && ln -s /etc/ssh/keys/git_authorized_keys /home/git/.ssh/authorized_keys \
-  && passwd -u git  # sshd config prohibits passwordless login
-
-COPY git-shell-commands /home/git/
-COPY sshd_config /etc/ssh/
-COPY entrypoint.sh auth /usr/local/bin/
-
-ENTRYPOINT entrypoint.sh

+ 0 - 49
gitzones/README.md

@@ -1,49 +0,0 @@
-# git Zones Repository SSH Server
-
-Provides *read only* git access to the zones git repository which is stored in volume `zones`.
-
-
-## Server Authentication
-
-The server identity is based on an ED25519 key pair generated on first startup and stored in the `gitzones_keys` volume.
-To make sure clients are connecting to the correct zone server, use the `auth` command of the gitzones container:
-
-    $ docker-compose exec gitzones auth id
-    desec.example.dedyn.io ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIKxVBPSvHDFGzorms9x76+nAo7Zs+0PhaKnMblcdVPos root@c269a29f451d
-
-The output can be appended to any client's `~/.ssh/known_hosts` file.
-
-
-## Client Authentication
-
-To allow clients to read from the zones repository, add their keys to the `gitzones_authorized_keys` file.
-This container ships a tool for key management.
-
-To add a key, use
-
-    docker-compose exec gitzones auth add ssh-rsa AAAAB<omitted>= ns23.desec.io
-
-The command line arguments after `auth add` can usually be copied from the client's SSH public key file.
-The last argument is the label under which the key is stored.
-Unlike SSH, we insist on unique labels for each key.
-
-To remove a key, use
-
-    docker-compose exec gitzones auth rm ns23.desec.io
-
-To list all labels of currently authorized keys,
-
-    docker-compose exec gitzones auth ls
-
-A `-v` flag can be added to also display the keys.
-To see the bare contents of the authorized_keys file,
-
-    docker-compose exec gitzones auth cat
-
-
-## Security Considerations
-
-Read-only access to the repository is enforced by docker volume options.
-SSH configuration is pretty restrictive, extra features like X11 forwarding are disabled.
-SSH access is only granted via a non-interactive git shell, but all clients share the same UNIX user (`git`).
-For mutual authentication, see above.

+ 0 - 167
gitzones/auth

@@ -1,167 +0,0 @@
-#!/usr/bin/python3
-# this file is formatted using black
-import argparse
-import os
-import sys
-from typing import List, Dict
-
-authorized_keys_file = "/home/git/.ssh/authorized_keys"
-host_public_key_file = "/etc/ssh/keys/ssh_host_ed25519_key.pub"
-usage = """auth <command> [<args>]
-
-Available commands are:
-  id                          Shows server key formatted for client's ~/.ssh/known_hosts
-  add <proto> <key> <label>   Adds a client key to the list of authorized keys.
-                              <proto> <key> <label> is typically the content of the public key file of the client.
-                              The label must be unique; usage of existing labels will overwrite the existing key
-                              without warning.
-  rm <label>                  Removes client key with given label from the list of authorized keys
-  ls [-v]                     Lists all authorized keys.
-  cat                         Print contents of the authorized_keys file.
-"""
-
-
-class AuthorizedKeysException(Exception):
-    pass
-
-
-class AuthorizedKeys(list):
-    """ Provides management of an SSH "authorized_keys" file, restricted to a subset of functionality. """
-
-    def __init__(self, location: str = authorized_keys_file) -> None:
-        super().__init__()
-        self.location = location
-        self.db = {}
-        try:
-            with open(location) as f:
-                for line in f:
-                    if not line.strip() or line.startswith("#"):
-                        continue
-                    proto, key, label = line.strip().split(" ", maxsplit=2)
-                    self.db[label] = proto + " " + key
-        except FileNotFoundError:
-            pass
-
-    def add(self, proto: str, key: str, label: str) -> None:
-        """ Adds an authorized key. """
-        if label in self.db:
-            raise AuthorizedKeysException(f'Key with label "{label}" already exists.')
-        self.db[label] = proto + " " + key
-        self._save()
-
-    def rm(self, label: str) -> None:
-        """ Removes an authorized key, identified by its label. Raises if key with given label cannot be found. """
-        try:
-            del self.db[label]
-        except KeyError:
-            raise AuthorizedKeysException(
-                f'Could not find authorized key with label "{label}".'
-            )
-        self._save()
-
-    def ls(self) -> Dict[str, str]:
-        """ Returns dictionary of authorized keys, identified by their labels. """
-        return self.db.copy()
-
-    def _save(self) -> None:
-        real_location = os.path.realpath(self.location)
-        with open(real_location + '~', "w") as f:
-            for label, proto_key in self.db.items():
-                f.write(proto_key + " " + label + "\n")
-        os.rename(real_location + '~', real_location)
-
-
-def main(args: List[str]) -> None:
-    """ Command line application main entry point. """
-    parser = argparse.ArgumentParser(
-        usage=usage,
-    )
-    parser.add_argument("command", help="Subcommand to run")
-    parsed = parser.parse_args(args[0:1])
-
-    cmd = {
-        "id": cmd_id,
-        "add": cmd_add,
-        "rm": cmd_rm,
-        "ls": cmd_ls,
-        "cat": cmd_cat,
-    }.get(parsed.command, None)
-    if not callable(cmd):
-        print(f'Unrecognized command "{parsed.command}"')
-        parser.print_usage()
-        exit(1)
-
-    try:
-        cmd(args[1:])
-    except AuthorizedKeysException as e:
-        sys.stderr.write(str(e) + "\n")
-        exit(1)
-
-
-def cmd_id(args: List[str]) -> None:
-    """ Entrypoint for CLI command that shows this hosts SSH public key. """
-    parser = argparse.ArgumentParser()
-    parser.parse_args(args)
-    with open(host_public_key_file, "r") as f:
-        print(f'desec.{os.environ["DESECSTACK_DOMAIN"]} {f.readline().strip()}')
-
-
-def cmd_add(args: List[str]) -> None:
-    """ Entrypoint for CLI command that adds an authorized SSH key. """
-    parser = argparse.ArgumentParser()
-    parser.add_argument("proto", help="Protocol used with given key.")
-    parser.add_argument("key", help="Public key of authorized key pair.")
-    parser.add_argument("label", help="Label under which the key is stored.")
-    parsed = parser.parse_args(args)
-    AuthorizedKeys().add(parsed.proto, parsed.key, parsed.label)
-
-
-def cmd_rm(args: List[str]) -> None:
-    """ Entrypoint for CLI command that removes an authorized SSH key. """
-    parser = argparse.ArgumentParser()
-    parser.add_argument("label", help="The key with this label will be removed.")
-    parsed = parser.parse_args(args)
-    AuthorizedKeys().rm(parsed.label)
-
-
-def cmd_ls(args: List[str]) -> None:
-    """ Entrypoint for CLI command that shows all authorized SSH keys. """
-    parser = argparse.ArgumentParser()
-    parser.add_argument(
-        "-v",
-        "--verbose",
-        action="store_true",
-        help="Lists all authorized keys.",
-    )
-    parsed = parser.parse_args(args)
-    keys = AuthorizedKeys().ls()
-
-    if not keys:
-        return
-
-    if parsed.verbose:
-        label_length = max(len(label) for label in keys) + 2
-        print(
-            "\n".join(
-                f"{label:{label_length}s} {proto_key}"
-                for label, proto_key in keys.items()
-            )
-        )
-    else:
-        print("\n".join(keys))
-
-
-def cmd_cat(args: List[str]) -> None:
-    """ Entrypoint for CLI command that outputs an exact copy of the SSH authorized keys file. """
-    parser = argparse.ArgumentParser()
-    parser.parse_args(args)
-    try:
-        with open(authorized_keys_file, "r") as f:
-            print(f.read())
-    except FileNotFoundError:
-        sys.stderr.write(f'Authorized keys file not found at "{authorized_keys_file}".')
-        exit(1)
-
-
-if __name__ == "__main__":
-    main(sys.argv[1:])

+ 0 - 6
gitzones/entrypoint.sh

@@ -1,6 +0,0 @@
-#!/bin/sh
-if ! test -f /etc/ssh/keys/ssh_host_ed25519_key; then
-  ssh-keygen -t ed25519 -f /etc/ssh/keys/ssh_host_ed25519_key
-fi
-touch /etc/ssh/keys/git_authorized_keys
-exec /usr/sbin/sshd -D -e  # -D to not daemonize, -e to log to stdout/stderr

+ 0 - 3
gitzones/git-shell-commands/no-interactive-login

@@ -1,3 +0,0 @@
-#!/bin/sh
-printf '%s\n' "No interactive login."
-exit 128

+ 0 - 24
gitzones/sshd_config

@@ -1,24 +0,0 @@
-# Auth & security settings
-PasswordAuthentication no
-ChallengeResponseAuthentication no
-AuthenticationMethods publickey
-PermitRootLogin no
-AllowUsers git
-
-# Features
-AllowAgentForwarding no
-AllowTcpForwarding no
-X11Forwarding no
-PermitTTY no
-PrintMotd no
-
-# Logging
-LogLevel INFO
-
-# Keys
-HostKey /etc/ssh/keys/ssh_host_ed25519_key
-
-# Mozilla's "Modern" Cipher Settings (https://infosec.mozilla.org/guidelines/openssh)
-KexAlgorithms curve25519-sha256@libssh.org,ecdh-sha2-nistp521,ecdh-sha2-nistp384,ecdh-sha2-nistp256,diffie-hellman-group-exchange-sha256
-Ciphers chacha20-poly1305@openssh.com,aes256-gcm@openssh.com,aes128-gcm@openssh.com,aes256-ctr,aes192-ctr,aes128-ctr
-MACs hmac-sha2-512-etm@openssh.com,hmac-sha2-256-etm@openssh.com,umac-128-etm@openssh.com,hmac-sha2-512,hmac-sha2-256,umac-128@openssh.com

+ 1 - 1
nslord/conf/pdns.conf.var

@@ -1,4 +1,4 @@
-allow-axfr-ips=${DESECSTACK_IPV4_REAR_PREFIX16}.1.12,${DESECSTACK_IPV4_REAR_PREFIX16}.1.13
+allow-axfr-ips=${DESECSTACK_IPV4_REAR_PREFIX16}.1.12
 api=yes
 api-key=${DESECSTACK_NSLORD_APIKEY}
 default-api-rectify=no

+ 0 - 30
test/e2e2/conftest.py

@@ -448,36 +448,6 @@ class NSLordClient(NSClient):
     where = os.environ["DESECSTACK_IPV4_REAR_PREFIX16"] + '.0.129'
 
 
-def query_replication(zone: str, qname: str, qtype: str, covers: str = None):
-    if qtype == 'RRSIG':
-        assert covers, 'If querying RRSIG, covers parameter must be set to a RR type, e.g. SOA.'
-    else:
-        assert not covers
-        covers = dns.rdatatype.NONE
-
-    zonefile = os.path.join('/zones', zone + '.zone')
-    zone = dns.name.from_text(zone, origin=dns.name.root)
-    qname = dns.name.from_text(qname, origin=zone)
-
-    if not os.path.exists(zonefile):
-        tsprint(f'RPL <<< Zone file for {zone} not found '
-                f'(number of zones: {len(list(filter(lambda f: f.endswith(".zone"), os.listdir("/zones"))))})')
-        return None
-
-    try:
-        tsprint(f'RPL >>> {qname}/{qtype} in {zone}')
-        z = dns.zone.from_file(f=zonefile, origin=zone, relativize=False)
-        v = {i.to_text() for i in z.find_rrset(qname, qtype, covers=covers).items}
-        tsprint(f'RPL <<< {v}')
-        return v
-    except KeyError:
-        tsprint(f'RPL <<< RR Set {qname}/{qtype} not found')
-        return {}
-    except dns.zone.NoSOA:
-        tsprint(f'RPL <<< Zone {zone} not found')
-        return None
-
-
 def return_eventually(expression: callable, min_pause: float = .1, max_pause: float = 2, timeout: float = 5,
                       retry_on: Tuple[type] = (Exception,)):
     if not callable(expression):

+ 1 - 13
test/e2e2/spec/test_api_rr.py

@@ -2,7 +2,7 @@ from typing import List, Tuple
 
 import pytest
 
-from conftest import DeSECAPIV1Client, query_replication, NSLordClient, assert_eventually
+from conftest import DeSECAPIV1Client, NSLordClient, assert_eventually
 
 
 def generate_params(dict_value_lists_by_type: dict) -> List[Tuple[str, str]]:
@@ -381,10 +381,6 @@ def test_create_valid_canonical(api_user_domain: DeSECAPIV1Client, rr_type: str,
         expected.add(value)
     rrset = {rr.to_text() for rr in NSLordClient.query(f'{subname}.{domain_name}'.strip('.'), rr_type)}
     assert rrset == expected
-    assert_eventually(
-        lambda: query_replication(domain_name, subname, rr_type) == expected,
-        timeout=20,
-    )
 
 
 @pytest.mark.parametrize("rr_type,value", generate_params(VALID_RECORDS_NON_CANONICAL))
@@ -400,11 +396,6 @@ def test_create_valid_non_canonical(api_user_domain: DeSECAPIV1Client, rr_type:
         expected.add(value)
     rrset = NSLordClient.query(f'{subname}.{domain_name}'.strip('.'), rr_type)
     assert len(rrset) == len(expected)
-    assert_eventually(
-        lambda: len(query_replication(domain_name, subname, rr_type)) == len(expected),
-        retry_on=(TypeError, AssertionError),
-        timeout=20,
-    )
 
 
 @pytest.mark.parametrize("rr_type,value", INVALID_RECORDS_PARAMS)
@@ -416,7 +407,6 @@ def test_create_long_subname(api_user_domain: DeSECAPIV1Client):
     subname = 'a' * 63
     assert api_user_domain.rr_set_create(api_user_domain.domain, "AAAA", ["::1"], subname=subname).status_code == 201
     assert NSLordClient.query(f"{subname}.{api_user_domain.domain}", "AAAA")[0].to_text() == "::1"
-    assert_eventually(lambda: query_replication(api_user_domain.domain, subname, "AAAA") == {"::1"})
 
 
 def test_add_remove_DNSKEY(api_user_domain: DeSECAPIV1Client):
@@ -427,9 +417,7 @@ def test_add_remove_DNSKEY(api_user_domain: DeSECAPIV1Client):
     value = '257 3 13 aCoEWYBBVsP9Fek2oC8yqU8ocKmnS1iD SFZNORnQuHKtJ9Wpyz+kNryquB78Pyk/ NTEoai5bxoipVQQXzHlzyg=='
     assert api_user_domain.rr_set_create(domain_name, 'DNSKEY', [value], subname='').status_code == 201
     assert {rr.to_text() for rr in NSLordClient.query(domain_name, 'DNSKEY')} == auto_dnskeys | {value}
-    assert_eventually(lambda: query_replication(domain_name, '', 'DNSKEY') == auto_dnskeys | {value})
 
     # After deleting it, we expect that the automatically managed ones are still there
     assert api_user_domain.rr_set_delete(domain_name, "DNSKEY", subname='').status_code == 204
     assert {rr.to_text() for rr in NSLordClient.query(domain_name, 'DNSKEY')} == auto_dnskeys
-    assert_eventually(lambda: query_replication(domain_name, '', 'DNSKEY') == auto_dnskeys)

+ 1 - 2
test/e2e2/spec/test_dyndns.py

@@ -1,7 +1,7 @@
 import ipaddress
 import os
 
-from conftest import DeSECAPIV1Client, query_replication, NSLordClient, assert_eventually
+from conftest import DeSECAPIV1Client, NSLordClient, assert_eventually
 
 import base64
 import pytest
@@ -51,7 +51,6 @@ def test(api_user_lps_domain: DeSECAPIV1Client, auth_method, base_url, subname):
             assert len(rrs_dns[qtype]) == (1 if expected_net else 0)
             assert _ips_in_network(rrs_api[qtype], expected_net)
             assert _ips_in_network(rrs_dns[qtype], expected_net)
-            assert_eventually(lambda: _ips_in_network(query_replication(domain, '', qtype), expected_net))
 
     headers = {}
     params = {}

+ 0 - 112
test/e2e2/spec/test_replication.py

@@ -1,112 +0,0 @@
-from base64 import b64decode
-import os
-import socket
-
-import dns.query
-import pytest
-
-from conftest import DeSECAPIV1Client, return_eventually, query_replication, random_domainname, assert_eventually, \
-    FaketimeShift
-
-
-some_ds_records = [
-    '60604 8 1 ef66f772935b412376c8445c4442b802b0322814',
-    '60604 8 2 c2739629145faaf464ff1bc65612fd1eb5766e80c96932d808edfb55d1e1f2ce',
-    '60604 8 4 5943dac4fc4aad637445f483b0f43bd4152fab19250fd26df82bf12020a7f7101caa17e723cf433f43d2bbed11231e03',
-]
-
-
-def test_signature_rotation(api_user_domain: DeSECAPIV1Client):
-    name = random_domainname()
-    api_user_domain.domain_create(name)
-    assert_eventually(lambda: query_replication(name, "", 'RRSIG', covers='SOA') is not None, timeout=60)
-    rrsig = query_replication(name, "", 'RRSIG', covers='SOA')
-    with FaketimeShift(days=7):
-        assert_eventually(lambda: rrsig != query_replication(name, "", 'RRSIG', covers='SOA'), timeout=60)
-
-
-def test_zone_deletion(api_user_domain: DeSECAPIV1Client):
-    name = api_user_domain.domain
-    assert_eventually(lambda: query_replication(name, "", 'SOA') is not None, timeout=20)
-    api_user_domain.domain_destroy(name)
-    assert_eventually(lambda: query_replication(name, "", 'SOA') is None, timeout=20)
-
-
-@pytest.mark.performance
-def test_signature_rotation_performance(api_user_domain: DeSECAPIV1Client):
-    root_domain = api_user_domain.domain
-
-    # test configuration
-    bulk_block_size = 500
-    domain_sizes = {
-        # number of delegations: number of zones
-        2000: 1,
-        1000: 2,
-        10: 10,
-    }
-
-    # create test domains
-    domain_names = {
-        num_delegations: [random_domainname() + f'.num-ds-{num_delegations}.' + root_domain for _ in range(num_zones)]
-        for num_delegations, num_zones in domain_sizes.items()
-    }
-    for num_delegations, names in domain_names.items():
-        for name in names:
-            # create a domain with name `name` and `num_delegations` delegations
-            api_user_domain.domain_create(name)
-            for a in range(0, num_delegations, bulk_block_size):  # run block-wise to avoid exceeding max request size
-                r = api_user_domain.rr_set_create_bulk(
-                    name,
-                    [
-                        {"subname": f'x{i}', "type": "DS", "ttl": 3600, "records": some_ds_records}
-                        for i in range(a, a + bulk_block_size)
-                    ] + [
-                        {"subname": f'x{i}', "type": "NS", "ttl": 3600, "records": ['ns1.test.', 'ns2.test.']}
-                        for i in range(a, a + bulk_block_size)
-                    ]
-                )
-                assert r.status_code == 200
-
-    # retrieve all SOA RRSIGs
-    soa_rrsig = {}
-    for names in domain_names.values():
-        for name in names:
-            soa_rrsig[name] = return_eventually(lambda: query_replication(name, "", 'RRSIG', covers='SOA'), timeout=20)
-
-    # rotate signatures
-    with FaketimeShift(days=7):
-        # assert SOA RRSIG has been updated
-        for names in domain_names.values():
-            for name in names:
-                assert_eventually(
-                    lambda: soa_rrsig[name] != query_replication(name, "", 'RRSIG', covers='SOA'),
-                    timeout=600,  # depending on number of domains in the database, this value requires increase
-                )
-
-
-def test_tsig_axfr(api_user_domain: DeSECAPIV1Client):
-    ns_ip = socket.gethostbyname('nsmaster')
-
-    def count_xfr_rrsets(**kwargs):
-        xfr = dns.query.xfr(ns_ip, api_user_domain.domain, **kwargs)
-        zone = dns.zone.from_xfr(xfr)
-
-        ## from dnspython 2.2.0 on
-        #zone = dns.zone.Zone(api_user_domain.domain)
-        #query, _ = dns.xfr.make_query(zone, **kwargs)
-        #dns.query.inbound_xfr(ns_ip, zone, query)
-        return sum(1 for _ in zone.iterate_rdatasets())
-
-    with pytest.raises(dns.xfr.TransferError) as exc_info:
-        count_xfr_rrsets()
-
-    assert exc_info.value.rcode == dns.rcode.NOTAUTH
-
-    keyring = {'default.': b64decode('XXXXXXXXXXXXXXXXXXXXXX==')}
-    with pytest.raises(dns.xfr.TransferError) as exc_info:
-        count_xfr_rrsets(keyring=keyring, keyname=None)
-
-    assert exc_info.value.rcode == dns.rcode.NOTAUTH
-
-    keyring = {'default.': b64decode(os.environ['DESECSTACK_NSMASTER_TSIGKEY'])}
-    assert_eventually(lambda: count_xfr_rrsets(keyring=keyring, keyname=None) > 5, timeout=20, retry_on=(Exception,))