diff --git a/.builds/tests-minimal.yml b/.builds/tests-minimal.yml
index 186e71f4bf513113d3b13928f10cd6c85aa1dec2..afc28eaa9eaef184b80cddca7961585fb7890231 100644
--- a/.builds/tests-minimal.yml
+++ b/.builds/tests-minimal.yml
@@ -3,12 +3,13 @@
 # TODO: It might make more sense to test with an older Ubuntu or Fedora version
 # here, and consider that our "oldest suppported environment".
 
-image: alpine/3.15
+image: alpine/3.17 # python 3.10
 packages:
   - docker
   - docker-cli
   - docker-compose
   - py3-pip
+  - python3-dev
 sources:
   - https://github.com/pimutils/vdirsyncer
 environment:
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index e4c2b7b9785153575b9ab5c096200664d115b70c..7304056b9b9763ae33eff22523fa23831778e9f0 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -1,6 +1,6 @@
 repos:
   - repo: https://github.com/pre-commit/pre-commit-hooks
-    rev: v4.4.0
+    rev: v4.5.0
     hooks:
       - id: trailing-whitespace
         args: [--markdown-linebreak-ext=md]
@@ -9,11 +9,11 @@ repos:
       - id: check-added-large-files
       - id: debug-statements
   - repo: https://github.com/psf/black
-    rev: "23.3.0"
+    rev: "24.2.0"
     hooks:
       - id: black
   - repo: https://github.com/pre-commit/mirrors-mypy
-    rev: "v1.2.0"
+    rev: "v1.8.0"
     hooks:
       - id: mypy
         files: vdirsyncer/.*
@@ -23,9 +23,10 @@ repos:
           - types-requests
           - types-atomicwrites
   - repo: https://github.com/charliermarsh/ruff-pre-commit
-    rev: 'v0.0.265'
+    rev: 'v0.2.2'
     hooks:
       - id: ruff
+        args: [--fix, --exit-non-zero-on-fix]
   - repo: local
     hooks:
       - id: typos-syncroniz
diff --git a/AUTHORS.rst b/AUTHORS.rst
index ad1b23296e193c65c92cbd1890c3fdae8c984403..56473030332074ce3e7b1b3e81095f508e0527ab 100644
--- a/AUTHORS.rst
+++ b/AUTHORS.rst
@@ -4,10 +4,13 @@ Contributors
 In alphabetical order:
 
 - Ben Boeckel
+- Bleala
 - Christian Geier
 - Clément Mondon
 - Corey Hinshaw
+- Kai Herlemann
 - Hugo Osvaldo Barrera
+- Jason Cox
 - Julian Mehne
 - Malte Kiefer
 - Marek Marczykowski-Górecki
diff --git a/CHANGELOG.rst b/CHANGELOG.rst
index 4f0e4cef4f83ab40044cdf264e87ae860a252991..f0ded6a0ea5c4a0abf2f9be849f217c00e16f053 100644
--- a/CHANGELOG.rst
+++ b/CHANGELOG.rst
@@ -9,6 +9,16 @@ Package maintainers and users who have to manually update their installation
 may want to subscribe to `GitHub's tag feed
 <https://github.com/pimutils/vdirsyncer/tags.atom>`_.
 
+Version 0.19.3
+==============
+
+- Added a no_delete option to the storage configuration. :gh:`1090`
+- Fix crash when running ``vdirsyncer repair`` on a collection. :gh:`1019`
+- Add an option to request vCard v4.0.  :gh:`1066`
+- Require matching ``BEGIN`` and ``END`` lines in vobjects. :gh:`1103`
+- A Docker environment for Vdirsyncer has been added `Vdirsyncer DOCKERIZED <https://github.com/Bleala/Vdirsyncer-DOCKERIZED>`_.
+- Implement digest auth. :gh:`1137`
+
 Version 0.19.2
 ==============
 
diff --git a/README.rst b/README.rst
index 82857499607f30bc9b22832cc4065cdf6159429d..49388cdcfad6a7e7bf706c90a3e42b02e4746c13 100644
--- a/README.rst
+++ b/README.rst
@@ -59,6 +59,15 @@ Links of interest
 
 * `Donations <https://vdirsyncer.pimutils.org/en/stable/donations.html>`_
 
+Dockerized
+=================
+If you want to run `Vdirsyncer <https://vdirsyncer.pimutils.org/en/stable/>`_ in a
+Docker environment, you can check out the following GitHub Repository:
+
+* `Vdirsyncer DOCKERIZED <https://github.com/Bleala/Vdirsyncer-DOCKERIZED>`_
+
+Note: This is an unofficial Docker build, it is maintained by `Bleala <https://github.com/Bleala>`_.
+
 License
 =======
 
diff --git a/contrib/conflict_resolution/resolve_interactively.py b/contrib/conflict_resolution/resolve_interactively.py
index 6da9ab72c3091fe2004928b5cfb6ceff3a059145..d676496b0e41ffde85c04d1b9a584f5571c2db5e 100755
--- a/contrib/conflict_resolution/resolve_interactively.py
+++ b/contrib/conflict_resolution/resolve_interactively.py
@@ -16,6 +16,8 @@ SPDX-License-Identifier: BSD-3-Clause
 SPDX-FileCopyrightText: 2021 Intevation GmbH <https://intevation.de>
 Author: <bernhard.reiter@intevation.de>
 """
+from __future__ import annotations
+
 import re
 import subprocess
 import sys
diff --git a/docs/conf.py b/docs/conf.py
index 533ddec338743833e3ad57b957b0bede466c032f..807997b7027baeba6e26009bc644516556f029be 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
 import datetime
 import os
 
diff --git a/docs/config.rst b/docs/config.rst
index f291d24f8decbf0eceb2b5f9ad800f5fdac80506..d157be3166f4777c2cc9ce1db24c1ff1b7b098e1 100644
--- a/docs/config.rst
+++ b/docs/config.rst
@@ -61,7 +61,8 @@ Pair Section
   sync`` is executed. See also :ref:`collections_tutorial`.
 
   The special values ``"from a"`` and ``"from b"``, tell vdirsyncer to try
-  autodiscovery on a specific storage.
+  autodiscovery on a specific storage. It means all the collections on side A /
+  side B.
 
   If the collection you want to sync doesn't have the same name on each side,
   you may also use a value of the form ``["config_name", "name_a", "name_b"]``.
@@ -71,8 +72,8 @@ Pair Section
 
   Examples:
 
-  - ``collections = ["from b", "foo", "bar"]`` makes vdirsyncer synchronize the
-    collections from side B, and also the collections named "foo" and "bar".
+  - ``collections = ["from b", "foo", "bar"]`` makes vdirsyncer synchronize all
+    the collections from side B, and also the collections named "foo" and "bar".
 
   - ``collections = ["from b", "from a"]`` makes vdirsyncer synchronize all
     existing collections on either side.
@@ -238,6 +239,7 @@ CalDAV and CardDAV
      #useragent = "vdirsyncer/0.16.4"
      #verify_fingerprint = null
      #auth_cert = null
+     #use_vcard_4 = false
 
    :param url: Base URL or an URL to an addressbook.
    :param username: Username for authentication.
@@ -255,6 +257,7 @@ CalDAV and CardDAV
                      certificate and the key or a list of paths to the files
                      with them.
    :param useragent: Default ``vdirsyncer``.
+   :param use_vcard_4: Whether the server use vCard 4.0.
 
 Google
 ++++++
@@ -376,6 +379,7 @@ Local
       fileext = "..."
       #encoding = "utf-8"
       #post_hook = null
+      #pre_deletion_hook = null
       #fileignoreext = ".tmp"
 
     Can be used with `khal <http://lostpackets.de/khal/>`_. See :doc:`vdir` for
@@ -397,6 +401,8 @@ Local
     :param post_hook: A command to call for each item creation and
         modification. The command will be called with the path of the
         new/updated file.
+    :param pre_deletion_hook: A command to call for each item deletion.
+        The command will be called with the path of the deleted file.
     :param fileeignoreext: The file extention to ignore. It is only useful
         if fileext is set to the empty string. The default is ``.tmp``.
 
diff --git a/docs/contact.rst b/docs/contact.rst
index 3ce031b5646b674211bd820c5e39a74317aa123b..b2dacb341e5d52a0a41d8fa4004230bd3abf892a 100644
--- a/docs/contact.rst
+++ b/docs/contact.rst
@@ -9,7 +9,4 @@ Support and Contact
 * Open `a GitHub issue <https://github.com/pimutils/vdirsyncer/issues/>`_ for
   concrete bug reports and feature requests.
 
-* Lastly, you can also `contact the author directly
-  <https://unterwaditzer.net/contact.html>`_. Do this for security issues. If
-  that doesn't work out (i.e. if I don't respond within one week), use
-  ``contact@pimutils.org``.
+* For security issues, contact ``contact@pimutils.org``.
diff --git a/docs/installation.rst b/docs/installation.rst
index ae57ef180d4ab04bcb549aaa6027b97e38a14fb5..b927cc2d6b7ddc71348de847bf769771c65a1154 100644
--- a/docs/installation.rst
+++ b/docs/installation.rst
@@ -10,7 +10,7 @@ OS/distro packages
 The following packages are community-contributed and were up-to-date at the
 time of writing:
 
-- `ArchLinux <https://www.archlinux.org/packages/community/any/vdirsyncer/>`_
+- `Arch Linux <https://archlinux.org/packages/extra/any/vdirsyncer/>`_
 - `Ubuntu and Debian, x86_64-only
   <https://packagecloud.io/pimutils/vdirsyncer>`_ (packages also exist
   in the official repositories but may be out of date)
@@ -59,21 +59,25 @@ pipx: The clean, easy way
 ~~~~~~~~~~~~~~~~~~~~~~~~~
 
 pipx_ is a new package manager for Python-based software that automatically
-sets up a virtual environment for each program you install. Assuming you have
-it installed on your operating system, you can do::
+sets up a virtual environment for each program it installs. Please note that
+installing via pipx will not include manual pages nor systemd services.
+
+pipx will install vdirsyncer into ``~/.local/pipx/venvs/vdirsyncer``
+
+Assuming that pipx is installed, vdirsyncer can be installed with::
 
     pipx install vdirsyncer
 
-and ``~/.local/pipx/venvs/vdirsyncer`` will be your new vdirsyncer installation. To
-update vdirsyncer to the latest version::
+It can later be updated to the latest version with::
 
     pipx upgrade vdirsyncer
 
-If you're done with vdirsyncer, you can do::
+And can be uninstalled with::
 
     pipx uninstall vdirsyncer
 
-and vdirsyncer will be uninstalled, including its dependencies.
+This last command will remove vdirsyncer and any dependencies installed into
+the above location.
 
 .. _pipx: https://github.com/pipxproject/pipx
 
diff --git a/docs/keyring.rst b/docs/keyring.rst
index 747553d32ae41babdcb500d4a52683fcd5a67550..a7ef946dd4ea93d505f083cd9ca4683247de60d6 100644
--- a/docs/keyring.rst
+++ b/docs/keyring.rst
@@ -78,3 +78,19 @@ You can also simply prompt for the password::
     type = "caldav"
     username = "myusername"
     password.fetch = ["prompt", "Password for CalDAV"]
+
+Environment variable
+===============
+
+To read the password from an environment variable::
+
+    [storage foo]
+    type = "caldav"
+    username = "myusername"
+    password.fetch = ["command", "printenv", "DAV_PW"]
+
+This is especially handy if you use the same password multiple times
+(say, for a CardDAV and a CalDAV storage).
+On bash, you can read and export the password without printing::
+
+    read -s DAV_PW "DAV Password: " && export DAV_PW
diff --git a/docs/ssl-tutorial.rst b/docs/ssl-tutorial.rst
index 9d9975fdf4d10fad89e97db0b217e5b6e80cc596..f04b829de0f3c49fe6310b94b23937f6e47728b0 100644
--- a/docs/ssl-tutorial.rst
+++ b/docs/ssl-tutorial.rst
@@ -14,14 +14,14 @@ To pin the certificate by fingerprint::
     [storage foo]
     type = "caldav"
     ...
-    verify_fingerprint = "94:FD:7A:CB:50:75:A4:69:82:0A:F8:23:DF:07:FC:69:3E:CD:90:CA"
+    verify_fingerprint = "6D:83:EA:32:6C:39:BA:08:ED:EB:C9:BC:BE:12:BB:BF:0F:D9:83:00:CC:89:7E:C7:32:05:94:96:CA:C5:59:5E"
 
-SHA256-Fingerprints can be used. CA validation is disabled when pinning a
-fingerprint.
+SHA256-Fingerprints must be used, MD5 and SHA-1 are insecure and not supported.
+CA validation is disabled when pinning a fingerprint.
 
-You can use the following command for obtaining a SHA-1 fingerprint::
+You can use the following command for obtaining a SHA256 fingerprint::
 
-    echo -n | openssl s_client -connect unterwaditzer.net:443 | openssl x509 -noout -fingerprint
+    echo -n | openssl s_client -connect unterwaditzer.net:443 | openssl x509 -noout -fingerprint -sha256
 
 However, please consider using `Let's Encrypt <https://letsencrypt.org/>`_ such
 that you can forget about all of that. It is easier to deploy a free
diff --git a/docs/tutorial.rst b/docs/tutorial.rst
index 49dfa6a23f076c379e7dd1edefa3fe2d9b6953d3..e7d2c7c769c8b1339875c4872ef67a2a3f995ecf 100644
--- a/docs/tutorial.rst
+++ b/docs/tutorial.rst
@@ -176,8 +176,11 @@ as a file called ``color`` within the calendar folder.
 More information about collections
 ----------------------------------
 
-"Collection" is a collective term for addressbooks and calendars. Each
-collection from a storage has a "collection name", a unique identifier for each
+"Collection" is a collective term for addressbooks and calendars. A Cardav or
+Caldav server can contains several "collections" which correspond to several
+addressbooks or calendar.
+
+Each collection from a storage has a "collection name", a unique identifier for each
 collection. In the case of :storage:`filesystem`-storage, this is the name of the
 directory that represents the collection, in the case of the DAV-storages this
 is the last segment of the URL. We use this identifier in the ``collections``
diff --git a/docs/tutorials/todoman.rst b/docs/tutorials/todoman.rst
index a1afd0c407deaec2cc4cfe633574fae07eeba053..ccbefbe42140722b7b9947b27821b0c8dd0a484d 100644
--- a/docs/tutorials/todoman.rst
+++ b/docs/tutorials/todoman.rst
@@ -48,10 +48,9 @@ instance to subfolders of ``~/.calendar/``.
 Setting up todoman
 ==================
 
-Write this to ``~/.config/todoman/todoman.conf``::
+Write this to ``~/.config/todoman/config.py``::
 
-    [main]
-    path = ~/.calendars/*
+    path = "~/.calendars/*"
 
 The glob_ pattern in ``path`` will match all subfolders in ``~/.calendars/``,
 which is exactly the tasklists we want. Now you can use ``todoman`` as
diff --git a/publish-release.yaml b/publish-release.yaml
index 91c4a84200289c48e5d101e03039a22bd5a858cc..0ea9fa4eee7f2c4aa369b9c6b52794e65a756e35 100644
--- a/publish-release.yaml
+++ b/publish-release.yaml
@@ -24,4 +24,4 @@ tasks:
   - publish: |
       cd vdirsyncer
       python setup.py sdist bdist_wheel
-      twine upload dist/*
+      twine upload --non-interactive dist/*
diff --git a/pyproject.toml b/pyproject.toml
index 314b9ce587a1767a77a485355fd03be40a9e737b..6d7b0319f3e7cab7e61134a67b9f3db15e45ff77 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -14,6 +14,7 @@ target-version = "py37"
 
 [tool.ruff.isort]
 force-single-line = true
+required-imports = ["from __future__ import annotations"]
 
 [tool.pytest.ini_options]
 addopts = """
@@ -28,3 +29,8 @@ addopts = """
 
 [tool.mypy]
 ignore_missing_imports = true
+
+[tool.coverage.report]
+exclude_lines = [
+    "if TYPE_CHECKING:",
+]
diff --git a/scripts/release-deb.sh b/scripts/release-deb.sh
index 7dca2dd4a3347864873606d63eec5d2200d15206..f3d64d0826516e5e24e0b2c69682786e7efc9246 100644
--- a/scripts/release-deb.sh
+++ b/scripts/release-deb.sh
@@ -5,8 +5,10 @@ set -xeu
 SCRIPT_PATH=$(realpath "$0")
 SCRIPT_DIR=$(dirname "$SCRIPT_PATH")
 
-DISTRO=$1
-DISTROVER=$2
+# E.g.: debian, ubuntu
+DISTRO=${DISTRO:1}
+# E.g.: bullseye, bookwork
+DISTROVER=${DISTROVER:2}
 CONTAINER_NAME="vdirsyncer-${DISTRO}-${DISTROVER}"
 CONTEXT="$(mktemp -d)"
 
@@ -21,7 +23,7 @@ trap cleanup EXIT
 cp scripts/_build_deb_in_container.bash "$CONTEXT"
 python setup.py sdist -d "$CONTEXT"
 
-podman run -it \
+docker run -it \
   --name "$CONTAINER_NAME" \
   --volume "$CONTEXT:/source" \
   "$DISTRO:$DISTROVER" \
diff --git a/setup.py b/setup.py
index 0e8ba30306bbc5db96d8ac0ca35ce10a42c7646d..d9acadcaafd1671dcba10fd8d17555f029c6b94e 100644
--- a/setup.py
+++ b/setup.py
@@ -4,6 +4,9 @@ Vdirsyncer synchronizes calendars and contacts.
 Please refer to https://vdirsyncer.pimutils.org/en/stable/packaging.html for
 how to package vdirsyncer.
 """
+
+from __future__ import annotations
+
 from setuptools import Command
 from setuptools import find_packages
 from setuptools import setup
@@ -13,9 +16,6 @@ requirements = [
     "click>=5.0,<9.0",
     "click-log>=0.3.0, <0.5.0",
     "requests >=2.20.0",
-    # https://github.com/sigmavirus24/requests-toolbelt/pull/28
-    # And https://github.com/sigmavirus24/requests-toolbelt/issues/54
-    "requests_toolbelt >=0.4.0",
     # https://github.com/untitaker/python-atomicwrites/commit/4d12f23227b6a944ab1d99c507a69fdbc7c9ed6d  # noqa
     "atomicwrites>=0.1.7",
     "aiohttp>=3.8.0,<4.0.0",
@@ -64,7 +64,7 @@ setup(
     include_package_data=True,
     cmdclass={"minimal_requirements": PrintRequirements},
     use_scm_version={"write_to": "vdirsyncer/version.py"},
-    entry_points={"console_scripts": ["vdirsyncer = vdirsyncer.cli:main"]},
+    entry_points={"console_scripts": ["vdirsyncer = vdirsyncer.cli:app"]},
     classifiers=[
         "Development Status :: 4 - Beta",
         "Environment :: Console",
diff --git a/tests/__init__.py b/tests/__init__.py
index c8b4c2c4893dfdec44fb4f3552998945c030ce04..bb654125cf5bc504080c8317f604db2f445d8e4b 100644
--- a/tests/__init__.py
+++ b/tests/__init__.py
@@ -1,6 +1,9 @@
 """
 Test suite for vdirsyncer.
 """
+
+from __future__ import annotations
+
 import hypothesis.strategies as st
 import urllib3.exceptions
 
diff --git a/tests/conftest.py b/tests/conftest.py
index 2cc19be615e80a0aa42b31d58b81edd9c5fb5da5..813deb53840c9ab05f31179d4b00d84c8245cab0 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -1,6 +1,9 @@
 """
 General-purpose fixtures for vdirsyncer's testsuite.
 """
+
+from __future__ import annotations
+
 import logging
 import os
 
diff --git a/tests/storage/__init__.py b/tests/storage/__init__.py
index cbd6218125a4717adc94c4c760ff71d06212c2ef..0f119301e1b03c8b74945e99d9b519c75eac436b 100644
--- a/tests/storage/__init__.py
+++ b/tests/storage/__init__.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
 import random
 import textwrap
 import uuid
@@ -383,7 +385,7 @@ class StorageTests:
         uid = str(uuid.uuid4())
         item = Item(
             textwrap.dedent(
-                """
+                f"""
         BEGIN:VCALENDAR
         VERSION:2.0
         BEGIN:VEVENT
@@ -417,9 +419,7 @@ class StorageTests:
         TRANSP:OPAQUE
         END:VEVENT
         END:VCALENDAR
-        """.format(
-                    uid=uid
-                )
+        """
             ).strip()
         )
 
diff --git a/tests/storage/conftest.py b/tests/storage/conftest.py
index 76fe8d37155084473a01d98d2b8ac19a102f995b..5b7485f94738e3f8fb63f2945db520bd7227d6f9 100644
--- a/tests/storage/conftest.py
+++ b/tests/storage/conftest.py
@@ -1,9 +1,10 @@
+from __future__ import annotations
+
 import asyncio
 import contextlib
 import subprocess
 import time
 import uuid
-from typing import Type
 
 import aiostream
 import pytest
@@ -90,7 +91,7 @@ async def slow_create_collection(request, aio_connector):
     # storage limits.
     to_delete = []
 
-    async def inner(cls: Type, args: dict, collection_name: str) -> dict:
+    async def inner(cls: type, args: dict, collection_name: str) -> dict:
         """Create a collection
 
         Returns args necessary to create a Storage instance pointing to it.
diff --git a/tests/storage/dav/__init__.py b/tests/storage/dav/__init__.py
index b97b2cc9bba2ed9e24f96b91e7a9fd0d85dfbe43..912530e15d771c064fac674ac90206b96890f021 100644
--- a/tests/storage/dav/__init__.py
+++ b/tests/storage/dav/__init__.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
 import os
 import uuid
 
diff --git a/tests/storage/dav/test_caldav.py b/tests/storage/dav/test_caldav.py
index e68d09b011508f3e663620dff3bb939f4433d30d..656c7b40414c10857cc1bff38ec6dfcbbc245d95 100644
--- a/tests/storage/dav/test_caldav.py
+++ b/tests/storage/dav/test_caldav.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
 import datetime
 from textwrap import dedent
 
diff --git a/tests/storage/dav/test_carddav.py b/tests/storage/dav/test_carddav.py
index 5a42399693e0c07851b0f302fd2f6cb6f937eb76..e9c6c1f33b78284429d04ea207f40e428f15551c 100644
--- a/tests/storage/dav/test_carddav.py
+++ b/tests/storage/dav/test_carddav.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
 import pytest
 
 from vdirsyncer.storage.dav import CardDAVStorage
diff --git a/tests/storage/dav/test_main.py b/tests/storage/dav/test_main.py
index 196094b55047c8a0279add05635a9a9f6c2b2164..4fa875db8040c0dabf840fdb734525be54dd096c 100644
--- a/tests/storage/dav/test_main.py
+++ b/tests/storage/dav/test_main.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
 import pytest
 
 from vdirsyncer.storage.dav import _BAD_XML_CHARS
@@ -39,8 +41,8 @@ def test_xml_utilities():
 def test_xml_specialchars(char):
     x = _parse_xml(
         '<?xml version="1.0" encoding="UTF-8" ?>'
-        "<foo>ye{}s\r\n"
-        "hello</foo>".format(chr(char)).encode("ascii")
+        f"<foo>ye{chr(char)}s\r\n"
+        "hello</foo>".encode("ascii")
     )
 
     if char in _BAD_XML_CHARS:
diff --git a/tests/storage/servers/baikal/__init__.py b/tests/storage/servers/baikal/__init__.py
index ad27b398c6ce3754af1813b9e7ac61497bec0bd1..fa5cd14053a5433b04bfcfa2dfd225d85fb1a496 100644
--- a/tests/storage/servers/baikal/__init__.py
+++ b/tests/storage/servers/baikal/__init__.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
 import pytest
 
 
diff --git a/tests/storage/servers/davical/__init__.py b/tests/storage/servers/davical/__init__.py
index abe0669d7c2c6b457bb9bd895dcabb65a4cf16c9..b3af4138916333f1e49e10b48a86d899477c48cd 100644
--- a/tests/storage/servers/davical/__init__.py
+++ b/tests/storage/servers/davical/__init__.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
 import os
 import uuid
 
diff --git a/tests/storage/servers/fastmail/__init__.py b/tests/storage/servers/fastmail/__init__.py
index 548aa4908335b30f2929edf5788ccacf12ac5701..6b6608fa2af1b9951c95afd7427f703acfb19c8b 100644
--- a/tests/storage/servers/fastmail/__init__.py
+++ b/tests/storage/servers/fastmail/__init__.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
 import os
 
 import pytest
diff --git a/tests/storage/servers/icloud/__init__.py b/tests/storage/servers/icloud/__init__.py
index 096702c1a50f4dd67ce5b5e7a50fbb8503ccaba4..eb6af67baf2a22b890919834859a682cd9493fdc 100644
--- a/tests/storage/servers/icloud/__init__.py
+++ b/tests/storage/servers/icloud/__init__.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
 import os
 
 import pytest
diff --git a/tests/storage/servers/radicale/__init__.py b/tests/storage/servers/radicale/__init__.py
index f59cc8138e60b514718f63c1799024d58df1a452..fc302b3d43733b82736e802746b4fff2e68036fb 100644
--- a/tests/storage/servers/radicale/__init__.py
+++ b/tests/storage/servers/radicale/__init__.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
 import pytest
 
 
diff --git a/tests/storage/servers/skip/__init__.py b/tests/storage/servers/skip/__init__.py
index db17b0a1cde83b7fe8ac9d5a83fbaff5987b0d6b..f8cc61c97f9b7202f2961c578c1ee48f934fa3e3 100644
--- a/tests/storage/servers/skip/__init__.py
+++ b/tests/storage/servers/skip/__init__.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
 import pytest
 
 
diff --git a/tests/storage/servers/xandikos/__init__.py b/tests/storage/servers/xandikos/__init__.py
index 36b4eda199230a3a4abf84b32981749edae8381e..f8936dc6dd318d20dac28e24ec11533a56070837 100644
--- a/tests/storage/servers/xandikos/__init__.py
+++ b/tests/storage/servers/xandikos/__init__.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
 import pytest
 
 
diff --git a/tests/storage/test_filesystem.py b/tests/storage/test_filesystem.py
index 9ae6ec0092eeaae4deceb4552af9885e0668d148..ca7e9d3195aae1a6dbcc50b8f1667e2c5afaf5d4 100644
--- a/tests/storage/test_filesystem.py
+++ b/tests/storage/test_filesystem.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
 import subprocess
 
 import aiostream
diff --git a/tests/storage/test_http.py b/tests/storage/test_http.py
index 0e0247f6007f59c25bf5f7b9eb713715579adb8f..091dfd9813084353f1be030d848f9753460a3476 100644
--- a/tests/storage/test_http.py
+++ b/tests/storage/test_http.py
@@ -1,10 +1,13 @@
+from __future__ import annotations
+
 import pytest
-from aiohttp import BasicAuth
 from aioresponses import CallbackResult
 from aioresponses import aioresponses
 
 from tests import normalize_item
 from vdirsyncer.exceptions import UserError
+from vdirsyncer.http import BasicAuthMethod
+from vdirsyncer.http import DigestAuthMethod
 from vdirsyncer.storage.http import HttpStorage
 from vdirsyncer.storage.http import prepare_auth
 
@@ -89,16 +92,14 @@ def test_readonly_param(aio_connector):
 def test_prepare_auth():
     assert prepare_auth(None, "", "") is None
 
-    assert prepare_auth(None, "user", "pwd") == BasicAuth("user", "pwd")
-    assert prepare_auth("basic", "user", "pwd") == BasicAuth("user", "pwd")
+    assert prepare_auth(None, "user", "pwd") == BasicAuthMethod("user", "pwd")
+    assert prepare_auth("basic", "user", "pwd") == BasicAuthMethod("user", "pwd")
 
     with pytest.raises(ValueError) as excinfo:
         assert prepare_auth("basic", "", "pwd")
     assert "you need to specify username and password" in str(excinfo.value).lower()
 
-    from requests.auth import HTTPDigestAuth
-
-    assert isinstance(prepare_auth("digest", "user", "pwd"), HTTPDigestAuth)
+    assert isinstance(prepare_auth("digest", "user", "pwd"), DigestAuthMethod)
 
     with pytest.raises(ValueError) as excinfo:
         prepare_auth("ladida", "user", "pwd")
@@ -106,20 +107,12 @@ def test_prepare_auth():
     assert "unknown authentication method" in str(excinfo.value).lower()
 
 
-def test_prepare_auth_guess(monkeypatch):
-    import requests_toolbelt.auth.guess
-
-    assert isinstance(
-        prepare_auth("guess", "user", "pwd"),
-        requests_toolbelt.auth.guess.GuessAuth,
-    )
-
-    monkeypatch.delattr(requests_toolbelt.auth.guess, "GuessAuth")
-
+def test_prepare_auth_guess():
+    # guess auth is currently not supported
     with pytest.raises(UserError) as excinfo:
-        prepare_auth("guess", "user", "pwd")
+        prepare_auth("guess", "usr", "pwd")
 
-    assert "requests_toolbelt is too old" in str(excinfo.value).lower()
+    assert "not supported" in str(excinfo.value).lower()
 
 
 def test_verify_false_disallowed(aio_connector):
diff --git a/tests/storage/test_http_with_singlefile.py b/tests/storage/test_http_with_singlefile.py
index 3a157ca515eb3e1fac66066dd88c4b6a3a3b99e2..5d29eaeaa685eb5d0abfc4a93d3e82c99fd30c20 100644
--- a/tests/storage/test_http_with_singlefile.py
+++ b/tests/storage/test_http_with_singlefile.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
 import aiostream
 import pytest
 from aioresponses import CallbackResult
diff --git a/tests/storage/test_memory.py b/tests/storage/test_memory.py
index d180897acc526ea6798f458ad1a347c887be9292..a3a0143b5a31b187c5bebfb9caa4581b91c1030c 100644
--- a/tests/storage/test_memory.py
+++ b/tests/storage/test_memory.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
 import pytest
 
 from vdirsyncer.storage.memory import MemoryStorage
diff --git a/tests/storage/test_singlefile.py b/tests/storage/test_singlefile.py
index 9e838aa8cc4f87f5c4f9aade64545223e09f1166..96cccda7cde6ab4e186cbc6ea0a772a93e3bf79a 100644
--- a/tests/storage/test_singlefile.py
+++ b/tests/storage/test_singlefile.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
 import pytest
 
 from vdirsyncer.storage.singlefile import SingleFileStorage
diff --git a/tests/system/cli/conftest.py b/tests/system/cli/conftest.py
index 85e07ca432cf6dee43a9b9e05e9672805f987436..c9e206cd77e8018ffbe682ab72d8738811954479 100644
--- a/tests/system/cli/conftest.py
+++ b/tests/system/cli/conftest.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
 from textwrap import dedent
 
 import pytest
diff --git a/tests/system/cli/test_config.py b/tests/system/cli/test_config.py
index 3e037c998832d675e210a8b1f394ded1edfcbaa0..f701bb88a04ba38c1b03f70af271288f1a572a56 100644
--- a/tests/system/cli/test_config.py
+++ b/tests/system/cli/test_config.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
 import io
 from textwrap import dedent
 
diff --git a/tests/system/cli/test_discover.py b/tests/system/cli/test_discover.py
index fb9aeac25079e5ccb4f8ed5bd24995357ef45940..a50b56ad63791bc3a9aeee42768b2d79563313a9 100644
--- a/tests/system/cli/test_discover.py
+++ b/tests/system/cli/test_discover.py
@@ -1,6 +1,7 @@
+from __future__ import annotations
+
 import json
 from textwrap import dedent
-from typing import List
 
 import pytest
 
@@ -152,7 +153,7 @@ def test_discover_direct_path(tmpdir, runner):
 def test_null_collection_with_named_collection(tmpdir, runner):
     runner.write_with_general(
         dedent(
-            """
+            f"""
     [pair foobar]
     a = "foo"
     b = "bar"
@@ -160,15 +161,13 @@ def test_null_collection_with_named_collection(tmpdir, runner):
 
     [storage foo]
     type = "filesystem"
-    path = "{base}/foo/"
+    path = "{str(tmpdir)}/foo/"
     fileext = ".txt"
 
     [storage bar]
     type = "singlefile"
-    path = "{base}/bar.txt"
-    """.format(
-                base=str(tmpdir)
-            )
+    path = "{str(tmpdir)}/bar.txt"
+    """
         )
     )
 
@@ -212,7 +211,7 @@ def test_collection_required(a_requires, b_requires, tmpdir, runner, monkeypatch
         async def get(self, href: str):
             raise NotImplementedError
 
-        async def list(self) -> List[tuple]:
+        async def list(self) -> list[tuple]:
             raise NotImplementedError
 
     from vdirsyncer.cli.utils import storage_names
@@ -221,7 +220,7 @@ def test_collection_required(a_requires, b_requires, tmpdir, runner, monkeypatch
 
     runner.write_with_general(
         dedent(
-            """
+            f"""
     [pair foobar]
     a = "foo"
     b = "bar"
@@ -229,14 +228,12 @@ def test_collection_required(a_requires, b_requires, tmpdir, runner, monkeypatch
 
     [storage foo]
     type = "test"
-    require_collection = {a}
+    require_collection = {json.dumps(a_requires)}
 
     [storage bar]
     type = "test"
-    require_collection = {b}
-    """.format(
-                a=json.dumps(a_requires), b=json.dumps(b_requires)
-            )
+    require_collection = {json.dumps(b_requires)}
+    """
         )
     )
 
diff --git a/tests/system/cli/test_fetchparams.py b/tests/system/cli/test_fetchparams.py
index f45876088d72dbee024730a823a61b586e7d77ed..af257b06e75a87d3298ea18b11e3551faafdda72 100644
--- a/tests/system/cli/test_fetchparams.py
+++ b/tests/system/cli/test_fetchparams.py
@@ -1,10 +1,12 @@
+from __future__ import annotations
+
 from textwrap import dedent
 
 
 def test_get_password_from_command(tmpdir, runner):
     runner.write_with_general(
         dedent(
-            """
+            f"""
         [pair foobar]
         a = "foo"
         b = "bar"
@@ -12,16 +14,14 @@ def test_get_password_from_command(tmpdir, runner):
 
         [storage foo]
         type.fetch = ["shell", "echo filesystem"]
-        path = "{base}/foo/"
+        path = "{str(tmpdir)}/foo/"
         fileext.fetch = ["command", "echo", ".txt"]
 
         [storage bar]
         type = "filesystem"
-        path = "{base}/bar/"
+        path = "{str(tmpdir)}/bar/"
         fileext.fetch = ["prompt", "Fileext for bar"]
-    """.format(
-                base=str(tmpdir)
-            )
+    """
         )
     )
 
diff --git a/tests/system/cli/test_repair.py b/tests/system/cli/test_repair.py
index 7eed4ad21d98381c5d55994d76c0feef734c6f7e..26e2edb9aa9f2b5a1cb7c58f522b682066b81e97 100644
--- a/tests/system/cli/test_repair.py
+++ b/tests/system/cli/test_repair.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
 from textwrap import dedent
 
 import pytest
diff --git a/tests/system/cli/test_sync.py b/tests/system/cli/test_sync.py
index 9deee66aaae18cbaecaa2c606733d7b74efd0de5..ee2afbc7acedf76078601e732810ce70dc5d10ac 100644
--- a/tests/system/cli/test_sync.py
+++ b/tests/system/cli/test_sync.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
 import json
 import sys
 from textwrap import dedent
@@ -280,24 +282,22 @@ def test_multiple_pairs(tmpdir, runner):
 def test_create_collections(collections, tmpdir, runner):
     runner.write_with_general(
         dedent(
-            """
+            f"""
     [pair foobar]
     a = "foo"
     b = "bar"
-    collections = {colls}
+    collections = {json.dumps(list(collections))}
 
     [storage foo]
     type = "filesystem"
-    path = "{base}/foo/"
+    path = "{str(tmpdir)}/foo/"
     fileext = ".txt"
 
     [storage bar]
     type = "filesystem"
-    path = "{base}/bar/"
+    path = "{str(tmpdir)}/bar/"
     fileext = ".txt"
-    """.format(
-                base=str(tmpdir), colls=json.dumps(list(collections))
-            )
+    """
         )
     )
 
@@ -315,7 +315,7 @@ def test_create_collections(collections, tmpdir, runner):
 def test_ident_conflict(tmpdir, runner):
     runner.write_with_general(
         dedent(
-            """
+            f"""
     [pair foobar]
     a = "foo"
     b = "bar"
@@ -323,16 +323,14 @@ def test_ident_conflict(tmpdir, runner):
 
     [storage foo]
     type = "filesystem"
-    path = "{base}/foo/"
+    path = "{str(tmpdir)}/foo/"
     fileext = ".txt"
 
     [storage bar]
     type = "filesystem"
-    path = "{base}/bar/"
+    path = "{str(tmpdir)}/bar/"
     fileext = ".txt"
-    """.format(
-                base=str(tmpdir)
-            )
+    """
         )
     )
 
@@ -371,7 +369,7 @@ def test_ident_conflict(tmpdir, runner):
 def test_unknown_storage(tmpdir, runner, existing, missing):
     runner.write_with_general(
         dedent(
-            """
+            f"""
     [pair foobar]
     a = "foo"
     b = "bar"
@@ -379,11 +377,9 @@ def test_unknown_storage(tmpdir, runner, existing, missing):
 
     [storage {existing}]
     type = "filesystem"
-    path = "{base}/{existing}/"
+    path = "{str(tmpdir)}/{existing}/"
     fileext = ".txt"
-    """.format(
-                base=str(tmpdir), existing=existing
-            )
+    """
         )
     )
 
@@ -393,10 +389,8 @@ def test_unknown_storage(tmpdir, runner, existing, missing):
     assert result.exception
 
     assert (
-        "Storage '{missing}' not found. "
-        "These are the configured storages: ['{existing}']".format(
-            missing=missing, existing=existing
-        )
+        f"Storage '{missing}' not found. "
+        f"These are the configured storages: ['{existing}']"
     ) in result.output
 
 
@@ -416,25 +410,23 @@ def test_no_configured_pairs(tmpdir, runner, cmd):
 def test_conflict_resolution(tmpdir, runner, resolution, expect_foo, expect_bar):
     runner.write_with_general(
         dedent(
-            """
+            f"""
     [pair foobar]
     a = "foo"
     b = "bar"
     collections = null
-    conflict_resolution = {val}
+    conflict_resolution = {json.dumps(resolution)}
 
     [storage foo]
     type = "filesystem"
     fileext = ".txt"
-    path = "{base}/foo"
+    path = "{str(tmpdir)}/foo"
 
     [storage bar]
     type = "filesystem"
     fileext = ".txt"
-    path = "{base}/bar"
-    """.format(
-                base=str(tmpdir), val=json.dumps(resolution)
-            )
+    path = "{str(tmpdir)}/bar"
+    """
         )
     )
 
@@ -526,13 +518,11 @@ def test_fetch_only_necessary_params(tmpdir, runner):
     fetch_script = tmpdir.join("fetch_script")
     fetch_script.write(
         dedent(
-            """
+            f"""
     set -e
-    touch "{}"
+    touch "{str(fetched_file)}"
     echo ".txt"
-    """.format(
-                str(fetched_file)
-            )
+    """
         )
     )
 
diff --git a/tests/system/cli/test_utils.py b/tests/system/cli/test_utils.py
index fe6722e25d493b2f37296f46b794d6d0c37a4ece..56e561b883b54e63d82f516e12a0d63e5a5f788b 100644
--- a/tests/system/cli/test_utils.py
+++ b/tests/system/cli/test_utils.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
 import pytest
 
 from vdirsyncer import exceptions
diff --git a/tests/system/conftest.py b/tests/system/conftest.py
index 59271e5717f21ad053cb26b523fd6c015371846f..c4dc9b3ceda1422d06738e810c15d1cb2e461ff0 100644
--- a/tests/system/conftest.py
+++ b/tests/system/conftest.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
 import ssl
 
 import pytest
diff --git a/tests/system/utils/test_main.py b/tests/system/utils/test_main.py
index 4d179e707cc64fc72796cd75aab8a818daf0baac..0d31aeee9e0096b22b72946d15d30bf72d6bd4a6 100644
--- a/tests/system/utils/test_main.py
+++ b/tests/system/utils/test_main.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
 import logging
 
 import aiohttp
@@ -20,7 +22,7 @@ def test_get_storage_init_args():
     from vdirsyncer.storage.memory import MemoryStorage
 
     all, required = utils.get_storage_init_args(MemoryStorage)
-    assert all == {"fileext", "collection", "read_only", "instance_name"}
+    assert all == {"fileext", "collection", "read_only", "instance_name", "no_delete"}
     assert not required
 
 
diff --git a/tests/unit/cli/test_config.py b/tests/unit/cli/test_config.py
index 19edd5ebebc0b47d0d70cda459d086774326f053..245fcb64098237e4a5a00ab23d299d30a57d0f07 100644
--- a/tests/unit/cli/test_config.py
+++ b/tests/unit/cli/test_config.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
 import os
 
 from vdirsyncer.cli.config import _resolve_conflict_via_command
diff --git a/tests/unit/cli/test_discover.py b/tests/unit/cli/test_discover.py
index 4c24a38789ac69e309a7f303cefd6f7a45fbfd62..fef646eb263f3371be8e1314d62b53edc3479a7a 100644
--- a/tests/unit/cli/test_discover.py
+++ b/tests/unit/cli/test_discover.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
 import aiostream
 import pytest
 
diff --git a/tests/unit/cli/test_fetchparams.py b/tests/unit/cli/test_fetchparams.py
index d3d72292fedcec8dd4cc67d3930280726fdbcd42..677e86db4558a1856f0a5a86d38ad18489a94da7 100644
--- a/tests/unit/cli/test_fetchparams.py
+++ b/tests/unit/cli/test_fetchparams.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
 from contextlib import contextmanager
 from unittest.mock import patch
 
diff --git a/tests/unit/sync/test_status.py b/tests/unit/sync/test_status.py
index 7255e2c59decb2f5c115ec3ad463ad6fa3cdc67f..a88d572ad723724dbad3bd84e1dda9ef83e81f81 100644
--- a/tests/unit/sync/test_status.py
+++ b/tests/unit/sync/test_status.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
 import hypothesis.strategies as st
 from hypothesis import assume
 from hypothesis import given
diff --git a/tests/unit/sync/test_sync.py b/tests/unit/sync/test_sync.py
index 7720d30b40848cca62cf52cd1e2da09479a6a112..c3ea627332c82bc32a0105671c06c1714cdebe2d 100644
--- a/tests/unit/sync/test_sync.py
+++ b/tests/unit/sync/test_sync.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
 import asyncio
 from copy import deepcopy
 
diff --git a/tests/unit/test_exceptions.py b/tests/unit/test_exceptions.py
index aba6e65ce2bcfdb346b4f7390722891a45237023..2f1f235252e69802c506fff6404d747e99ce08a5 100644
--- a/tests/unit/test_exceptions.py
+++ b/tests/unit/test_exceptions.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
 from vdirsyncer import exceptions
 
 
diff --git a/tests/unit/test_metasync.py b/tests/unit/test_metasync.py
index e685580adfdc2068499298467a30de640cd2810b..e099db1424fec6951a2c788cfb967a002c8d94fc 100644
--- a/tests/unit/test_metasync.py
+++ b/tests/unit/test_metasync.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
 import hypothesis.strategies as st
 import pytest
 import pytest_asyncio
diff --git a/tests/unit/test_repair.py b/tests/unit/test_repair.py
index 3a153bca7e248faf6a6afb156dd055ebc09cf2f2..3a4b3cbda880755bf8c550648708a436dd74054b 100644
--- a/tests/unit/test_repair.py
+++ b/tests/unit/test_repair.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
 import aiostream
 import pytest
 from hypothesis import HealthCheck
diff --git a/tests/unit/utils/test_vobject.py b/tests/unit/utils/test_vobject.py
index abd1e05b1451202cf19aee33ec50a1493db2e923..150c92fc4c9824729fdfd68cf2698f003c6092ee 100644
--- a/tests/unit/utils/test_vobject.py
+++ b/tests/unit/utils/test_vobject.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
 from textwrap import dedent
 
 import hypothesis.strategies as st
@@ -235,6 +237,31 @@ def test_broken_item():
     assert item.parsed is None
 
 
+def test_mismatched_end():
+    with pytest.raises(ValueError) as excinfo:
+        vobject._Component.parse(
+            [
+                "BEGIN:FOO",
+                "END:BAR",
+            ]
+        )
+
+    assert "Got END:BAR, expected END:FOO at line 2" in str(excinfo.value)
+
+
+def test_missing_end():
+    with pytest.raises(ValueError) as excinfo:
+        vobject._Component.parse(
+            [
+                "BEGIN:FOO",
+                "BEGIN:BAR",
+                "END:BAR",
+            ]
+        )
+
+    assert "Missing END for component(s): FOO" in str(excinfo.value)
+
+
 def test_multiple_items():
     with pytest.raises(ValueError) as excinfo:
         vobject._Component.parse(
diff --git a/vdirsyncer/__init__.py b/vdirsyncer/__init__.py
index 76ba468ab3a0bdab1f9adbf0d4a1370351e8fdb8..e1d4e70c004de0809e485f3a1e26fc0528bd4ca7 100644
--- a/vdirsyncer/__init__.py
+++ b/vdirsyncer/__init__.py
@@ -2,6 +2,7 @@
 Vdirsyncer synchronizes calendars and contacts.
 """
 
+from __future__ import annotations
 
 PROJECT_HOME = "https://github.com/pimutils/vdirsyncer"
 BUGTRACKER_HOME = PROJECT_HOME + "/issues"
@@ -17,10 +18,10 @@ except ImportError:  # pragma: no cover
     )
 
 
-def _check_python_version():  # pragma: no cover
+def _check_python_version():
     import sys
 
-    if sys.version_info < (3, 7, 0):
+    if sys.version_info < (3, 7, 0):  # noqa: UP036
         print("vdirsyncer requires at least Python 3.7.")
         sys.exit(1)
 
diff --git a/vdirsyncer/__main__.py b/vdirsyncer/__main__.py
index e867465e58ed5f52e8cdaabcb8c73fe6128cd1ef..edf332d1b9febc696bdf4669a742c799d5849316 100644
--- a/vdirsyncer/__main__.py
+++ b/vdirsyncer/__main__.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
 if __name__ == "__main__":
     from vdirsyncer.cli import app
 
diff --git a/vdirsyncer/cli/__init__.py b/vdirsyncer/cli/__init__.py
index 37d05c7b528904df1f257d4ce212c4e626d4d9c9..0885429972f5d25c33fdb92f9a9f54ab5f013113 100644
--- a/vdirsyncer/cli/__init__.py
+++ b/vdirsyncer/cli/__init__.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
 import asyncio
 import functools
 import json
@@ -14,6 +16,9 @@ from .. import __version__
 cli_logger = logging.getLogger(__name__)
 click_log.basic_config("vdirsyncer")
 
+# add short option for the help option
+click_context_settings = {"help_option_names": ["-h", "--help"]}
+
 
 class AppContext:
     def __init__(self):
@@ -39,13 +44,13 @@ def catch_errors(f):
     return inner
 
 
-@click.group()
+@click.group(context_settings=click_context_settings)
 @click_log.simple_verbosity_option("vdirsyncer")
 @click.version_option(version=__version__)
 @click.option("--config", "-c", metavar="FILE", help="Config file to use.")
 @pass_context
 @catch_errors
-def app(ctx, config):
+def app(ctx, config: str):
     """
     Synchronize calendars and contacts
     """
@@ -54,7 +59,7 @@ def app(ctx, config):
         cli_logger.warning(
             "Vdirsyncer currently does not support Windows. "
             "You will likely encounter bugs. "
-            "See {}/535 for more information.".format(BUGTRACKER_HOME)
+            f"See {BUGTRACKER_HOME}/535 for more information."
         )
 
     if not ctx.config:
@@ -63,9 +68,6 @@ def app(ctx, config):
         ctx.config = load_config(config)
 
 
-main = app
-
-
 def collections_arg_callback(ctx, param, value):
     """
     Expand the various CLI shortforms ("pair, pair/collection") to an iterable
diff --git a/vdirsyncer/cli/config.py b/vdirsyncer/cli/config.py
index 59e7425dd10d2e69bd739a73777e38f1b896d07f..8aed8249017ad058f0a11bdbf91c35b4c1b67edb 100644
--- a/vdirsyncer/cli/config.py
+++ b/vdirsyncer/cli/config.py
@@ -5,11 +5,15 @@ import os
 import string
 from configparser import RawConfigParser
 from itertools import chain
+from typing import IO
+from typing import Any
+from typing import Generator
 
 from .. import PROJECT_HOME
 from .. import exceptions
 from ..utils import cached_property
 from ..utils import expand_path
+from ..vobject import Item
 from .fetchparams import expand_fetch_params
 from .utils import storage_class_from_config
 
@@ -23,16 +27,16 @@ def validate_section_name(name, section_type):
     if invalid:
         chars_display = "".join(sorted(SECTION_NAME_CHARS))
         raise exceptions.UserError(
-            'The {}-section "{}" contains invalid characters. Only '
+            f'The {section_type}-section "{name}" contains invalid characters. Only '
             "the following characters are allowed for storage and "
-            "pair names:\n{}".format(section_type, name, chars_display)
+            f"pair names:\n{chars_display}"
         )
 
 
-def _validate_general_section(general_config):
+def _validate_general_section(general_config: dict[str, str]):
     invalid = set(general_config) - GENERAL_ALL
     missing = GENERAL_REQUIRED - set(general_config)
-    problems = []
+    problems: list[str] = []
 
     if invalid:
         problems.append(
@@ -47,7 +51,7 @@ def _validate_general_section(general_config):
     if problems:
         raise exceptions.UserError(
             "Invalid general section. Copy the example "
-            "config from the repository and edit it: {}".format(PROJECT_HOME),
+            f"config from the repository and edit it: {PROJECT_HOME}",
             problems=problems,
         )
 
@@ -92,17 +96,19 @@ def _validate_collections_param(collections):
 
 
 class _ConfigReader:
-    def __init__(self, f):
-        self._file = f
+    def __init__(self, f: IO[Any]):
+        self._file: IO[Any] = f
         self._parser = c = RawConfigParser()
         c.read_file(f)
-        self._seen_names = set()
+        self._seen_names: set = set()
 
-        self._general = {}
-        self._pairs = {}
-        self._storages = {}
+        self._general: dict[str, str] = {}
+        self._pairs: dict[str, dict[str, str]] = {}
+        self._storages: dict[str, dict[str, str]] = {}
 
-    def _parse_section(self, section_type, name, options):
+    def _parse_section(
+        self, section_type: str, name: str, options: dict[str, Any]
+    ) -> None:
         validate_section_name(name, section_type)
         if name in self._seen_names:
             raise ValueError(f'Name "{name}" already used.')
@@ -119,7 +125,9 @@ class _ConfigReader:
         else:
             raise ValueError("Unknown section type.")
 
-    def parse(self):
+    def parse(
+        self,
+    ) -> tuple[dict[str, str], dict[str, dict[str, str]], dict[str, dict[str, str]]]:
         for section in self._parser.sections():
             if " " in section:
                 section_type, name = section.split(" ", 1)
@@ -145,7 +153,9 @@ class _ConfigReader:
         return self._general, self._pairs, self._storages
 
 
-def _parse_options(items, section=None):
+def _parse_options(
+    items: list[tuple[str, str]], section: str | None = None
+) -> Generator[tuple[str, dict[str, str]], None, None]:
     for key, value in items:
         try:
             yield key, json.loads(value)
@@ -154,13 +164,18 @@ def _parse_options(items, section=None):
 
 
 class Config:
-    def __init__(self, general, pairs, storages):
+    def __init__(
+        self,
+        general: dict[str, str],
+        pairs: dict[str, dict[str, str]],
+        storages: dict[str, dict[str, str]],
+    ) -> None:
         self.general = general
         self.storages = storages
         for name, options in storages.items():
             options["instance_name"] = name
 
-        self.pairs = {}
+        self.pairs: dict[str, PairConfig] = {}
         for name, options in pairs.items():
             try:
                 self.pairs[name] = PairConfig(self, name, options)
@@ -168,12 +183,12 @@ class Config:
                 raise exceptions.UserError(f"Pair {name}: {e}")
 
     @classmethod
-    def from_fileobject(cls, f):
+    def from_fileobject(cls, f: IO[Any]):
         reader = _ConfigReader(f)
         return cls(*reader.parse())
 
     @classmethod
-    def from_filename_or_environment(cls, fname=None):
+    def from_filename_or_environment(cls, fname: str | None = None):
         if fname is None:
             fname = os.environ.get("VDIRSYNCER_CONFIG", None)
         if fname is None:
@@ -190,15 +205,13 @@ class Config:
         except Exception as e:
             raise exceptions.UserError(f"Error during reading config {fname}: {e}")
 
-    def get_storage_args(self, storage_name):
+    def get_storage_args(self, storage_name: str):
         try:
             args = self.storages[storage_name]
         except KeyError:
             raise exceptions.UserError(
-                "Storage {!r} not found. "
-                "These are the configured storages: {}".format(
-                    storage_name, list(self.storages)
-                )
+                f"Storage {storage_name!r} not found. "
+                f"These are the configured storages: {list(self.storages)}"
             )
         else:
             return expand_fetch_params(args)
@@ -211,13 +224,13 @@ class Config:
 
 
 class PairConfig:
-    def __init__(self, full_config, name, options):
-        self._config = full_config
-        self.name = name
-        self.name_a = options.pop("a")
-        self.name_b = options.pop("b")
+    def __init__(self, full_config: Config, name: str, options: dict[str, str]):
+        self._config: Config = full_config
+        self.name: str = name
+        self.name_a: str = options.pop("a")
+        self.name_b: str = options.pop("b")
 
-        self._partial_sync = options.pop("partial_sync", None)
+        self._partial_sync: str | None = options.pop("partial_sync", None)
         self.metadata = options.pop("metadata", None) or ()
 
         self.conflict_resolution = self._process_conflict_resolution_param(
@@ -238,7 +251,9 @@ class PairConfig:
         if options:
             raise ValueError("Unknown options: {}".format(", ".join(options)))
 
-    def _process_conflict_resolution_param(self, conflict_resolution):
+    def _process_conflict_resolution_param(
+        self, conflict_resolution: str | list[str] | None
+    ):
         if conflict_resolution in (None, "a wins", "b wins"):
             return conflict_resolution
         elif (
@@ -302,10 +317,10 @@ class PairConfig:
 
 
 class CollectionConfig:
-    def __init__(self, pair, name, config_a, config_b):
+    def __init__(self, pair, name: str, config_a, config_b):
         self.pair = pair
         self._config = pair._config
-        self.name = name
+        self.name: str = name
         self.config_a = config_a
         self.config_b = config_b
 
@@ -314,7 +329,9 @@ class CollectionConfig:
 load_config = Config.from_filename_or_environment
 
 
-def _resolve_conflict_via_command(a, b, command, a_name, b_name, _check_call=None):
+def _resolve_conflict_via_command(
+    a, b, command, a_name, b_name, _check_call=None
+) -> Item:
     import shutil
     import tempfile
 
diff --git a/vdirsyncer/cli/discover.py b/vdirsyncer/cli/discover.py
index c28af1413f072e10d4a2752f3d2a680448ae5378..247306ea519fdd4c44b9f296c4b905f91d787927 100644
--- a/vdirsyncer/cli/discover.py
+++ b/vdirsyncer/cli/discover.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
 import asyncio
 import hashlib
 import json
@@ -57,7 +59,7 @@ async def collections_for_pair(
     cache_key = _get_collections_cache_key(pair)
     if from_cache:
         rv = load_status(status_path, pair.name, data_type="collections")
-        if rv and rv.get("cache_key", None) == cache_key:
+        if rv.get("cache_key", None) == cache_key:
             return list(
                 _expand_collections_cache(
                     rv["collections"], pair.config_a, pair.config_b
@@ -66,18 +68,18 @@ async def collections_for_pair(
         elif rv:
             raise exceptions.UserError(
                 "Detected change in config file, "
-                "please run `vdirsyncer discover {}`.".format(pair.name)
+                f"please run `vdirsyncer discover {pair.name}`."
             )
         else:
             raise exceptions.UserError(
-                "Please run `vdirsyncer discover {}` "
-                " before synchronization.".format(pair.name)
+                f"Please run `vdirsyncer discover {pair.name}` "
+                " before synchronization."
             )
 
     logger.info(f"Discovering collections for pair {pair.name}")
 
-    a_discovered = _DiscoverResult(pair.config_a, connector=connector)
-    b_discovered = _DiscoverResult(pair.config_b, connector=connector)
+    a_discovered = DiscoverResult(pair.config_a, connector=connector)
+    b_discovered = DiscoverResult(pair.config_b, connector=connector)
 
     if list_collections:
         # TODO: We should gather data and THEN print, so it can be async.
@@ -108,8 +110,8 @@ async def collections_for_pair(
     await _sanity_check_collections(rv, connector=connector)
 
     save_status(
-        status_path,
-        pair.name,
+        base_path=status_path,
+        pair=pair.name,
         data_type="collections",
         data={
             "collections": list(
@@ -155,7 +157,7 @@ def _expand_collections_cache(collections, config_a, config_b):
         yield name, (a, b)
 
 
-class _DiscoverResult:
+class DiscoverResult:
     def __init__(self, config, *, connector):
         self._cls, _ = storage_class_from_config(config)
 
@@ -271,8 +273,8 @@ async def _print_collections(
 
         logger.debug("".join(traceback.format_tb(sys.exc_info()[2])))
         logger.warning(
-            "Failed to discover collections for {}, use `-vdebug` "
-            "to see the full traceback.".format(instance_name)
+            f"Failed to discover collections for {instance_name}, use `-vdebug` "
+            "to see the full traceback."
         )
         return
     logger.info(f"{instance_name}:")
diff --git a/vdirsyncer/cli/fetchparams.py b/vdirsyncer/cli/fetchparams.py
index 2cdcea95407e057cba34e45b96f2da6211dcc91f..87a483a399c64535e2eb771d3059d82da12170a7 100644
--- a/vdirsyncer/cli/fetchparams.py
+++ b/vdirsyncer/cli/fetchparams.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
 import logging
 
 import click
@@ -65,8 +67,7 @@ def _fetch_value(opts, key):
     else:
         if not rv:
             raise exceptions.UserError(
-                "Empty value for {}, this most likely "
-                "indicates an error.".format(key)
+                f"Empty value for {key}, this most likely indicates an error."
             )
         password_cache[cache_key] = rv
         return rv
diff --git a/vdirsyncer/cli/tasks.py b/vdirsyncer/cli/tasks.py
index 83014f6022e5063b14d5b2d785f849ad783522c8..6e8f5ba3e2d637daeceeae9f6311d9081b2b53f1 100644
--- a/vdirsyncer/cli/tasks.py
+++ b/vdirsyncer/cli/tasks.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
 import json
 
 import aiohttp
@@ -5,8 +7,8 @@ import aiohttp
 from .. import exceptions
 from .. import sync
 from .config import CollectionConfig
+from .discover import DiscoverResult
 from .discover import collections_for_pair
-from .discover import storage_class_from_config
 from .discover import storage_instance_from_config
 from .utils import JobFailed
 from .utils import cli_logger
@@ -115,15 +117,14 @@ async def repair_collection(
 
     if collection is not None:
         cli_logger.info("Discovering collections (skipping cache).")
-        cls, config = storage_class_from_config(config)
-        async for config in cls.discover(**config):  # noqa E902
+        get_discovered = DiscoverResult(config, connector=connector)
+        discovered = await get_discovered.get_self()
+        for config in discovered.values():
             if config["collection"] == collection:
                 break
         else:
             raise exceptions.UserError(
-                "Couldn't find collection {} for storage {}.".format(
-                    collection, storage_name
-                )
+                f"Couldn't find collection {collection} for storage {storage_name}."
             )
 
     config["type"] = storage_type
@@ -143,11 +144,11 @@ async def metasync_collection(collection, general, *, connector: aiohttp.TCPConn
     try:
         cli_logger.info(f"Metasyncing {status_name}")
 
-        status = (
-            load_status(
-                general["status_path"], pair.name, collection.name, data_type="metadata"
-            )
-            or {}
+        status = load_status(
+            general["status_path"],
+            pair.name,
+            collection.name,
+            data_type="metadata",
         )
 
         a = await storage_instance_from_config(collection.config_a, connector=connector)
@@ -165,9 +166,9 @@ async def metasync_collection(collection, general, *, connector: aiohttp.TCPConn
         raise JobFailed
 
     save_status(
-        general["status_path"],
-        pair.name,
-        collection.name,
+        base_path=general["status_path"],
+        pair=pair.name,
         data_type="metadata",
         data=status,
+        collection=collection.name,
     )
diff --git a/vdirsyncer/cli/utils.py b/vdirsyncer/cli/utils.py
index 0f48a2eb65aa7c8de70223bedc8287a778190e8a..428a9c9a3e3113ae11d7b4e44513b2894e1787c4 100644
--- a/vdirsyncer/cli/utils.py
+++ b/vdirsyncer/cli/utils.py
@@ -1,9 +1,12 @@
+from __future__ import annotations
+
 import contextlib
 import errno
 import importlib
 import json
 import os
 import sys
+from typing import Any
 
 import aiohttp
 import click
@@ -12,6 +15,7 @@ from atomicwrites import atomic_write
 from .. import BUGTRACKER_HOME
 from .. import DOCS_HOME
 from .. import exceptions
+from ..storage.base import Storage
 from ..sync.exceptions import IdentConflict
 from ..sync.exceptions import PartialSync
 from ..sync.exceptions import StorageEmpty
@@ -27,7 +31,7 @@ STATUS_DIR_PERMISSIONS = 0o700
 
 class _StorageIndex:
     def __init__(self):
-        self._storages = {
+        self._storages: dict[str, str] = {
             "caldav": "vdirsyncer.storage.dav.CalDAVStorage",
             "carddav": "vdirsyncer.storage.dav.CardDAVStorage",
             "filesystem": "vdirsyncer.storage.filesystem.FilesystemStorage",
@@ -37,7 +41,7 @@ class _StorageIndex:
             "google_contacts": "vdirsyncer.storage.google.GoogleContactsStorage",
         }
 
-    def __getitem__(self, name):
+    def __getitem__(self, name: str) -> Storage:
         item = self._storages[name]
         if not isinstance(item, str):
             return item
@@ -84,23 +88,19 @@ def handle_cli_error(status_name=None, e=None):
         )
     except PartialSync as e:
         cli_logger.error(
-            "{status_name}: Attempted change on {storage}, which is read-only"
+            f"{status_name}: Attempted change on {e.storage}, which is read-only"
             ". Set `partial_sync` in your pair section to `ignore` to ignore "
-            "those changes, or `revert` to revert them on the other side.".format(
-                status_name=status_name, storage=e.storage
-            )
+            "those changes, or `revert` to revert them on the other side."
         )
     except SyncConflict as e:
         cli_logger.error(
-            "{status_name}: One item changed on both sides. Resolve this "
+            f"{status_name}: One item changed on both sides. Resolve this "
             "conflict manually, or by setting the `conflict_resolution` "
             "parameter in your config file.\n"
-            "See also {docs}/config.html#pair-section\n"
-            "Item ID: {e.ident}\n"
-            "Item href on side A: {e.href_a}\n"
-            "Item href on side B: {e.href_b}\n".format(
-                status_name=status_name, e=e, docs=DOCS_HOME
-            )
+            f"See also {DOCS_HOME}/config.html#pair-section\n"
+            f"Item ID: {e.ident}\n"
+            f"Item href on side A: {e.href_a}\n"
+            f"Item href on side B: {e.href_b}\n"
         )
     except IdentConflict as e:
         cli_logger.error(
@@ -121,17 +121,17 @@ def handle_cli_error(status_name=None, e=None):
         pass
     except exceptions.PairNotFound as e:
         cli_logger.error(
-            "Pair {pair_name} does not exist. Please check your "
+            f"Pair {e.pair_name} does not exist. Please check your "
             "configuration file and make sure you've typed the pair name "
-            "correctly".format(pair_name=e.pair_name)
+            "correctly"
         )
     except exceptions.InvalidResponse as e:
         cli_logger.error(
             "The server returned something vdirsyncer doesn't understand. "
-            "Error message: {!r}\n"
+            f"Error message: {e!r}\n"
             "While this is most likely a serverside problem, the vdirsyncer "
             "devs are generally interested in such bugs. Please report it in "
-            "the issue tracker at {}".format(e, BUGTRACKER_HOME)
+            f"the issue tracker at {BUGTRACKER_HOME}"
         )
     except exceptions.CollectionRequired:
         cli_logger.error(
@@ -154,13 +154,18 @@ def handle_cli_error(status_name=None, e=None):
         cli_logger.debug("".join(tb))
 
 
-def get_status_name(pair, collection):
+def get_status_name(pair: str, collection: str | None) -> str:
     if collection is None:
         return pair
     return pair + "/" + collection
 
 
-def get_status_path(base_path, pair, collection=None, data_type=None):
+def get_status_path(
+    base_path: str,
+    pair: str,
+    collection: str | None = None,
+    data_type: str | None = None,
+) -> str:
     assert data_type is not None
     status_name = get_status_name(pair, collection)
     path = expand_path(os.path.join(base_path, status_name))
@@ -174,10 +179,15 @@ def get_status_path(base_path, pair, collection=None, data_type=None):
     return path
 
 
-def load_status(base_path, pair, collection=None, data_type=None):
+def load_status(
+    base_path: str,
+    pair: str,
+    collection: str | None = None,
+    data_type: str | None = None,
+) -> dict[str, Any]:
     path = get_status_path(base_path, pair, collection, data_type)
     if not os.path.exists(path):
-        return None
+        return {}
     assert_permissions(path, STATUS_PERMISSIONS)
 
     with open(path) as f:
@@ -189,7 +199,7 @@ def load_status(base_path, pair, collection=None, data_type=None):
     return {}
 
 
-def prepare_status_path(path):
+def prepare_status_path(path: str) -> None:
     dirname = os.path.dirname(path)
 
     try:
@@ -200,7 +210,7 @@ def prepare_status_path(path):
 
 
 @contextlib.contextmanager
-def manage_sync_status(base_path, pair_name, collection_name):
+def manage_sync_status(base_path: str, pair_name: str, collection_name: str):
     path = get_status_path(base_path, pair_name, collection_name, "items")
     status = None
     legacy_status = None
@@ -225,9 +235,13 @@ def manage_sync_status(base_path, pair_name, collection_name):
     yield status
 
 
-def save_status(base_path, pair, collection=None, data_type=None, data=None):
-    assert data_type is not None
-    assert data is not None
+def save_status(
+    base_path: str,
+    pair: str,
+    data_type: str,
+    data: dict[str, Any],
+    collection: str | None = None,
+) -> None:
     status_name = get_status_name(pair, collection)
     path = expand_path(os.path.join(base_path, status_name)) + "." + data_type
     prepare_status_path(path)
@@ -319,13 +333,11 @@ def handle_storage_init_error(cls, config):
     )
 
 
-def assert_permissions(path, wanted):
+def assert_permissions(path: str, wanted: int) -> None:
     permissions = os.stat(path).st_mode & 0o777
     if permissions > wanted:
         cli_logger.warning(
-            "Correcting permissions of {} from {:o} to {:o}".format(
-                path, permissions, wanted
-            )
+            f"Correcting permissions of {path} from {permissions:o} to {wanted:o}"
         )
         os.chmod(path, wanted)
 
@@ -351,7 +363,7 @@ async def handle_collection_not_found(config, collection, e=None):
             cli_logger.error(e)
 
     raise exceptions.UserError(
-        'Unable to find or create collection "{collection}" for '
-        'storage "{storage}". Please create the collection '
-        "yourself.".format(collection=collection, storage=storage_name)
+        f'Unable to find or create collection "{collection}" for '
+        f'storage "{storage_name}". Please create the collection '
+        "yourself."
     )
diff --git a/vdirsyncer/exceptions.py b/vdirsyncer/exceptions.py
index 53a119c908aba694d1832b00b16b5aec6939cf8e..82ba0c2a311902bc6932c750370e053e05375ade 100644
--- a/vdirsyncer/exceptions.py
+++ b/vdirsyncer/exceptions.py
@@ -3,6 +3,8 @@ Contains exception classes used by vdirsyncer. Not all exceptions are here,
 only the most commonly used ones.
 """
 
+from __future__ import annotations
+
 
 class Error(Exception):
     """Baseclass for all errors."""
diff --git a/vdirsyncer/http.py b/vdirsyncer/http.py
index 30cfe909c3a98a0ad5499c235f379f216d2a23ef..16c60aa574ba60e2445b58cd55ce9ae3c950d2ce 100644
--- a/vdirsyncer/http.py
+++ b/vdirsyncer/http.py
@@ -1,9 +1,15 @@
+from __future__ import annotations
+
 import logging
+import re
+from abc import ABC, abstractmethod
+from base64 import b64encode
 from ssl import create_default_context
 
 import aiohttp
+import requests.auth
+from requests.utils import parse_dict_header
 
-from . import DOCS_HOME
 from . import __version__
 from . import exceptions
 from .utils import expand_path
@@ -12,53 +18,87 @@ logger = logging.getLogger(__name__)
 USERAGENT = f"vdirsyncer/{__version__}"
 
 
-def _detect_faulty_requests():  # pragma: no cover
-    text = (
-        "Error during import: {e}\n\n"
-        "If you have installed vdirsyncer from a distro package, please file "
-        "a bug against that package, not vdirsyncer.\n\n"
-        "Consult {d}/problems.html#requests-related-importerrors"
-        "-based-distributions on how to work around this."
-    )
+class AuthMethod(ABC):
+    def __init__(self, username, password):
+        self.username = username
+        self.password = password
+
+    @abstractmethod
+    def handle_401(self, response):
+        raise NotImplementedError
+
+    @abstractmethod
+    def get_auth_header(self, method, url):
+        raise NotImplementedError
+
+    def __eq__(self, other):
+        if not isinstance(other, AuthMethod):
+            return False
+        return self.__class__ == other.__class__ and self.username == other.username and self.password == other.password
+
+
+class BasicAuthMethod(AuthMethod):
+    def handle_401(self, _response):
+        pass
+
+    def get_auth_header(self, _method, _url):
+        auth_str = f"{self.username}:{self.password}"
+        return "Basic " + b64encode(auth_str.encode('utf-8')).decode("utf-8")
+
+
+class DigestAuthMethod(AuthMethod):
+    # make class var to 'cache' the state, which is more efficient because otherwise
+    # each request would first require another 'initialization' request.
+    _auth_helpers = {}
 
-    try:
-        from requests_toolbelt.auth.guess import GuessAuth  # noqa
-    except ImportError as e:
-        import sys
+    def __init__(self, username, password):
+        super().__init__(username, password)
 
-        print(text.format(e=str(e), d=DOCS_HOME), file=sys.stderr)
-        sys.exit(1)
+        self._auth_helper = self._auth_helpers.get(
+            (username, password),
+            requests.auth.HTTPDigestAuth(username, password)
+        )
+        self._auth_helpers[(username, password)] = self._auth_helper
+
+    @property
+    def auth_helper_vars(self):
+        return self._auth_helper._thread_local
+
+    def handle_401(self, response):
+        s_auth = response.headers.get("www-authenticate", "")
+
+        if "digest" in s_auth.lower():
+            # Original source:
+            # https://github.com/psf/requests/blob/f12ccbef6d6b95564da8d22e280d28c39d53f0e9/src/requests/auth.py#L262-L263
+            pat = re.compile(r"digest ", flags=re.IGNORECASE)
+            self.auth_helper_vars.chal = parse_dict_header(pat.sub("", s_auth, count=1))
+
+    def get_auth_header(self, method, url):
+        self._auth_helper.init_per_thread_state()
 
+        if not self.auth_helper_vars.chal:
+            # Need to do init request first
+            return ''
 
-_detect_faulty_requests()
-del _detect_faulty_requests
+        return self._auth_helper.build_digest_header(method, url)
 
 
 def prepare_auth(auth, username, password):
     if username and password:
         if auth == "basic" or auth is None:
-            return aiohttp.BasicAuth(username, password)
+            return BasicAuthMethod(username, password)
         elif auth == "digest":
-            from requests.auth import HTTPDigestAuth
-
-            return HTTPDigestAuth(username, password)
+            return DigestAuthMethod(username, password)
         elif auth == "guess":
-            try:
-                from requests_toolbelt.auth.guess import GuessAuth
-            except ImportError:
-                raise exceptions.UserError(
-                    "Your version of requests_toolbelt is too "
-                    "old for `guess` authentication. At least "
-                    "version 0.4.0 is required."
-                )
-            else:
-                return GuessAuth(username, password)
+            raise exceptions.UserError(f"'Guess' authentication is not supported in this version of vdirsyncer. \n"
+                                       f"Please explicitly specify either 'basic' or 'digest' auth instead. \n"
+                                       f"See the following issue for more information: "
+                                       f"https://github.com/pimutils/vdirsyncer/issues/1015")
         else:
             raise exceptions.UserError(f"Unknown authentication method: {auth}")
     elif auth:
         raise exceptions.UserError(
-            "You need to specify username and password "
-            "for {} authentication.".format(auth)
+            f"You need to specify username and password for {auth} authentication."
         )
 
     return None
@@ -96,14 +136,17 @@ async def request(
     method,
     url,
     session,
+    auth=None,
     latin1_fallback=True,
     **kwargs,
 ):
-    """Wrapper method for requests, to ease logging and mocking.
+    """Wrapper method for requests, to ease logging and mocking as well as to
+    support auth methods currently unsupported by aiohttp.
 
-    Parameters should be the same as for ``aiohttp.request``, as well as:
+    Parameters should be the same as for ``aiohttp.request``, except:
 
     :param session: A requests session object to use.
+    :param auth: The HTTP ``AuthMethod`` to use for authentication.
     :param verify_fingerprint: Optional. SHA256 of the expected server certificate.
     :param latin1_fallback: RFC-2616 specifies the default Content-Type of
         text/* to be latin1, which is not always correct, but exactly what
@@ -133,7 +176,23 @@ async def request(
         ssl_context.load_cert_chain(*cert)
         kwargs["ssl"] = ssl_context
 
-    response = await session.request(method, url, **kwargs)
+    headers = kwargs.pop("headers", {})
+    num_401 = 0
+    while num_401 < 2:
+        if auth:
+            headers["Authorization"] = auth.get_auth_header(method, url)
+        response = await session.request(method, url, headers=headers, **kwargs)
+
+        if response.ok or not auth:
+            # we don't need to do the 401-loop if we don't do auth in the first place
+            break
+
+        if response.status == 401:
+            num_401 += 1
+            auth.handle_401(response)
+        else:
+            # some other error, will be handled later on
+            break
 
     # See https://github.com/kennethreitz/requests/issues/2042
     content_type = response.headers.get("Content-Type", "")
diff --git a/vdirsyncer/metasync.py b/vdirsyncer/metasync.py
index cf5a282a130a8418485ebc88e26a141d6d686d4b..1d34dd253055bb0da995207d9a7f11b6622aeebb 100644
--- a/vdirsyncer/metasync.py
+++ b/vdirsyncer/metasync.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
 import logging
 
 from . import exceptions
diff --git a/vdirsyncer/repair.py b/vdirsyncer/repair.py
index e72e480f32d45963f79ab3c49953883d2b73e303..7a1116e623631c4ad03c335d1143cbf980853668 100644
--- a/vdirsyncer/repair.py
+++ b/vdirsyncer/repair.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
 import logging
 from os.path import basename
 
@@ -24,9 +26,9 @@ async def repair_storage(storage, repair_unsafe_uid):
             new_item = repair_item(href, item, seen_uids, repair_unsafe_uid)
         except IrreparableItem:
             logger.error(
-                "Item {!r} is malformed beyond repair. "
+                f"Item {href!r} is malformed beyond repair. "
                 "The PRODID property may indicate which software "
-                "created this item.".format(href)
+                "created this item."
             )
             logger.error(f"Item content: {item.raw!r}")
             continue
diff --git a/vdirsyncer/storage/base.py b/vdirsyncer/storage/base.py
index ee9acdf7512750c5e1e445aaf771ed2269861471..fcbc3795a1ce05fd13e61bba441fe28ab2cdf7ec 100644
--- a/vdirsyncer/storage/base.py
+++ b/vdirsyncer/storage/base.py
@@ -1,10 +1,10 @@
+from __future__ import annotations
+
 import contextlib
 import functools
 from abc import ABCMeta
 from abc import abstractmethod
 from typing import Iterable
-from typing import List
-from typing import Optional
 
 from vdirsyncer.vobject import Item
 
@@ -34,7 +34,6 @@ class StorageMeta(ABCMeta):
 
 
 class Storage(metaclass=StorageMeta):
-
     """Superclass of all storages, interface that all storages have to
     implement.
 
@@ -67,21 +66,37 @@ class Storage(metaclass=StorageMeta):
     # The machine-readable name of this collection.
     collection = None
 
+    # A value of False means storage does not support delete requests. A
+    # value of True mean the storage supports it.
+    no_delete = False
+
     # A value of True means the storage does not support write-methods such as
     # upload, update and delete.  A value of False means the storage does
     # support those methods.
     read_only = False
 
     # The attribute values to show in the representation of the storage.
-    _repr_attributes: List[str] = []
-
-    def __init__(self, instance_name=None, read_only=None, collection=None):
+    _repr_attributes: list[str] = []
+
+    def __init__(
+        self,
+        instance_name=None,
+        read_only=None,
+        no_delete=None,
+        collection=None,
+    ):
         if read_only is None:
             read_only = self.read_only
         if self.read_only and not read_only:
             raise exceptions.UserError("This storage can only be read-only.")
         self.read_only = bool(read_only)
 
+        if no_delete is None:
+            no_delete = self.no_delete
+        if self.no_delete and not no_delete:
+            raise exceptions.UserError("Nothing can be deleted in this storage.")
+        self.no_delete = bool(no_delete)
+
         if collection and instance_name:
             instance_name = f"{instance_name}/{collection}"
         self.instance_name = instance_name
@@ -132,7 +147,7 @@ class Storage(metaclass=StorageMeta):
         )
 
     @abstractmethod
-    async def list(self) -> List[tuple]:
+    async def list(self) -> list[tuple]:
         """
         :returns: list of (href, etag)
         """
@@ -227,7 +242,7 @@ class Storage(metaclass=StorageMeta):
         """
         yield
 
-    async def get_meta(self, key: str) -> Optional[str]:
+    async def get_meta(self, key: str) -> str | None:
         """Get metadata value for collection/storage.
 
         See the vdir specification for the keys that *have* to be accepted.
@@ -237,7 +252,7 @@ class Storage(metaclass=StorageMeta):
         """
         raise NotImplementedError("This storage does not support metadata.")
 
-    async def set_meta(self, key: str, value: Optional[str]):
+    async def set_meta(self, key: str, value: str | None):
         """Set metadata value for collection/storage.
 
         :param key: The metadata key.
@@ -246,7 +261,7 @@ class Storage(metaclass=StorageMeta):
         raise NotImplementedError("This storage does not support metadata.")
 
 
-def normalize_meta_value(value) -> Optional[str]:
+def normalize_meta_value(value) -> str | None:
     # `None` is returned by iCloud for empty properties.
     if value is None or value == "None":
         return None
diff --git a/vdirsyncer/storage/dav.py b/vdirsyncer/storage/dav.py
index dcaf15e96c059081ab06e16c7b9a7ec451b298ec..697ebf05ed4c2b728a49649effeaafd69d0d2fcf 100644
--- a/vdirsyncer/storage/dav.py
+++ b/vdirsyncer/storage/dav.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
 import datetime
 import logging
 import urllib.parse as urlparse
@@ -5,8 +7,6 @@ import xml.etree.ElementTree as etree
 from abc import abstractmethod
 from inspect import getfullargspec
 from inspect import signature
-from typing import Optional
-from typing import Type
 
 import aiohttp
 import aiostream
@@ -92,8 +92,7 @@ def _parse_xml(content):
         return etree.XML(_clean_body(content))
     except etree.ParseError as e:
         raise InvalidXMLResponse(
-            "Invalid XML encountered: {}\n"
-            "Double-check the URLs in your config.".format(e)
+            f"Invalid XML encountered: {e}\nDouble-check the URLs in your config."
         )
 
 
@@ -128,7 +127,7 @@ class Discover:
 
     @property
     @abstractmethod
-    def _resourcetype(self) -> Optional[str]:
+    def _resourcetype(self) -> str | None:
         pass
 
     @property
@@ -198,9 +197,7 @@ class Discover:
             # E.g. Synology NAS
             # See https://github.com/pimutils/vdirsyncer/issues/498
             dav_logger.debug(
-                "No current-user-principal returned, re-using URL {}".format(
-                    response.url
-                )
+                f"No current-user-principal returned, re-using URL {response.url}"
             )
             return response.url.human_repr()
         return urlparse.urljoin(str(response.url), rv.text).rstrip("/") + "/"
@@ -342,7 +339,7 @@ class CalDiscover(Discover):
 
 class CardDiscover(Discover):
     _namespace = "urn:ietf:params:xml:ns:carddav"
-    _resourcetype: Optional[str] = "{%s}addressbook" % _namespace
+    _resourcetype: str | None = "{%s}addressbook" % _namespace
     _homeset_xml = b"""
     <propfind xmlns="DAV:" xmlns:c="urn:ietf:params:xml:ns:carddav">
         <prop>
@@ -451,7 +448,7 @@ class DAVStorage(Storage):
 
     @property
     @abstractmethod
-    def discovery_class(self) -> Type[Discover]:
+    def discovery_class(self) -> type[Discover]:
         """Discover subclass to use."""
 
     # The DAVSession class to use
@@ -649,9 +646,7 @@ class DAVStorage(Storage):
             contenttype = getattr(props.find("{DAV:}getcontenttype"), "text", None)
             if not self._is_item_mimetype(contenttype):
                 dav_logger.debug(
-                    "Skipping {!r}, {!r} != {!r}.".format(
-                        href, contenttype, self.item_mimetype
-                    )
+                    f"Skipping {href!r}, {contenttype!r} != {self.item_mimetype!r}."
                 )
                 continue
 
@@ -686,7 +681,7 @@ class DAVStorage(Storage):
         for href, etag, _prop in rv:
             yield href, etag
 
-    async def get_meta(self, key) -> Optional[str]:
+    async def get_meta(self, key) -> str | None:
         try:
             tagname, namespace = self._property_table[key]
         except KeyError:
@@ -831,9 +826,7 @@ class CalDAVStorage(DAVStorage):
                 start = start.strftime(CALDAV_DT_FORMAT)
                 end = end.strftime(CALDAV_DT_FORMAT)
 
-                timefilter = '<C:time-range start="{start}" end="{end}"/>'.format(
-                    start=start, end=end
-                )
+                timefilter = f'<C:time-range start="{start}" end="{end}"/>'
             else:
                 timefilter = ""
 
@@ -901,14 +894,21 @@ class CardDAVStorage(DAVStorage):
     item_mimetype = "text/vcard"
     discovery_class = CardDiscover
 
-    get_multi_template = """<?xml version="1.0" encoding="utf-8" ?>
+    def __init__(self, *args, use_vcard_4=False, **kwargs):
+        self.use_vcard_4 = use_vcard_4
+        super().__init__(*args, **kwargs)
+
+    @property
+    def get_multi_template(self):
+        ct = 'Content-Type="text/vcard" version="4.0"' if self.use_vcard_4 else ""
+        return f"""<?xml version="1.0" encoding="utf-8" ?>
             <C:addressbook-multiget xmlns="DAV:"
                     xmlns:C="urn:ietf:params:xml:ns:carddav">
                 <prop>
                     <getetag/>
-                    <C:address-data/>
+                    <C:address-data {ct}/>
                 </prop>
-                {hrefs}
+                {{hrefs}}
             </C:addressbook-multiget>"""
 
     get_multi_data_query = "{urn:ietf:params:xml:ns:carddav}address-data"
diff --git a/vdirsyncer/storage/filesystem.py b/vdirsyncer/storage/filesystem.py
index 4992bc113ae9c2352d4b3cc3b9dbb87fe47f2981..ec0ab52e333ff70cc5ef9f9a5071f16c0b9b4470 100644
--- a/vdirsyncer/storage/filesystem.py
+++ b/vdirsyncer/storage/filesystem.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
 import errno
 import logging
 import os
@@ -27,6 +29,7 @@ class FilesystemStorage(Storage):
         fileext,
         encoding="utf-8",
         post_hook=None,
+        pre_deletion_hook=None,
         fileignoreext=".tmp",
         **kwargs,
     ):
@@ -38,6 +41,7 @@ class FilesystemStorage(Storage):
         self.fileext = fileext
         self.fileignoreext = fileignoreext
         self.post_hook = post_hook
+        self.pre_deletion_hook = pre_deletion_hook
 
     @classmethod
     async def discover(cls, path, **kwargs):
@@ -164,6 +168,9 @@ class FilesystemStorage(Storage):
         actual_etag = get_etag_from_file(fpath)
         if etag != actual_etag:
             raise exceptions.WrongEtagError(etag, actual_etag)
+        if self.pre_deletion_hook:
+            self._run_pre_deletion_hook(fpath)
+
         os.remove(fpath)
 
     def _run_post_hook(self, fpath):
@@ -173,6 +180,15 @@ class FilesystemStorage(Storage):
         except OSError as e:
             logger.warning(f"Error executing external hook: {str(e)}")
 
+    def _run_pre_deletion_hook(self, fpath):
+        logger.info(
+            f"Calling pre_deletion_hook={self.pre_deletion_hook} with argument={fpath}"
+        )
+        try:
+            subprocess.call([self.pre_deletion_hook, fpath])
+        except OSError as e:
+            logger.warning(f"Error executing external hook: {str(e)}")
+
     async def get_meta(self, key):
         fpath = os.path.join(self.path, key)
         try:
diff --git a/vdirsyncer/storage/google.py b/vdirsyncer/storage/google.py
index bc80cee52f2cc4ddd596c1440049fd4789ac08c8..93b784872cb6f123d548e032a99ec18f32dc90c5 100644
--- a/vdirsyncer/storage/google.py
+++ b/vdirsyncer/storage/google.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
 import json
 import logging
 import os
@@ -106,8 +108,8 @@ class GoogleSession(dav.DAVSession):
             pass
         except ValueError as e:
             raise exceptions.UserError(
-                "Failed to load token file {}, try deleting it. "
-                "Original error: {}".format(self._token_file, e)
+                f"Failed to load token file {self._token_file}, try deleting it. "
+                f"Original error: {e}"
             )
 
         if not self._token:
diff --git a/vdirsyncer/storage/google_helpers.py b/vdirsyncer/storage/google_helpers.py
index d85e730204537ed02357e683e015ab0b2fdbda85..ac333f48954e0e04689de92fbbfe384ca22e5400 100644
--- a/vdirsyncer/storage/google_helpers.py
+++ b/vdirsyncer/storage/google_helpers.py
@@ -2,15 +2,14 @@
 #
 # Based on:
 # https://github.com/googleapis/google-auth-library-python-oauthlib/blob/1fb16be1bad9050ee29293541be44e41e82defd7/google_auth_oauthlib/flow.py#L513
+from __future__ import annotations
 
 import logging
 import wsgiref.simple_server
 import wsgiref.util
 from typing import Any
 from typing import Callable
-from typing import Dict
 from typing import Iterable
-from typing import Optional
 
 logger = logging.getLogger(__name__)
 
@@ -29,7 +28,7 @@ class _RedirectWSGIApp:
     Stores the request URI and displays the given success message.
     """
 
-    last_request_uri: Optional[str]
+    last_request_uri: str | None
 
     def __init__(self, success_message: str):
         """
@@ -41,7 +40,7 @@ class _RedirectWSGIApp:
 
     def __call__(
         self,
-        environ: Dict[str, Any],
+        environ: dict[str, Any],
         start_response: Callable[[str, list], None],
     ) -> Iterable[bytes]:
         """WSGI Callable.
diff --git a/vdirsyncer/storage/http.py b/vdirsyncer/storage/http.py
index 177e769326b79f32f55f25d1df69223fc3c9066f..41d94e83094955352a7877b02736f4099f8404e1 100644
--- a/vdirsyncer/storage/http.py
+++ b/vdirsyncer/storage/http.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
 import urllib.parse as urlparse
 
 import aiohttp
@@ -34,7 +36,7 @@ class HttpStorage(Storage):
         auth_cert=None,
         *,
         connector,
-        **kwargs
+        **kwargs,
     ) -> None:
         super().__init__(**kwargs)
 
diff --git a/vdirsyncer/storage/memory.py b/vdirsyncer/storage/memory.py
index 9e60bcc291e1c0b71c03597121c27205a2ab6c72..d872312133d8b3f67bafbca59a32b73b97020a6f 100644
--- a/vdirsyncer/storage/memory.py
+++ b/vdirsyncer/storage/memory.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
 import random
 
 from .. import exceptions
diff --git a/vdirsyncer/storage/singlefile.py b/vdirsyncer/storage/singlefile.py
index 12f48bb0a81ef9f8e2c0ab295b61add9dd5955c9..982763295257f0236c6fb1533726a67aecd4c0cc 100644
--- a/vdirsyncer/storage/singlefile.py
+++ b/vdirsyncer/storage/singlefile.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
 import collections
 import contextlib
 import functools
@@ -177,11 +179,9 @@ class SingleFileStorage(Storage):
             self.path
         ):
             raise exceptions.PreconditionFailed(
-                (
-                    "Some other program modified the file {!r}. Re-run the "
-                    "synchronization and make sure absolutely no other program is "
-                    "writing into the same file."
-                ).format(self.path)
+                f"Some other program modified the file {self.path!r}. Re-run the "
+                "synchronization and make sure absolutely no other program is "
+                "writing into the same file."
             )
         text = join_collection(item.raw for item, etag in self._items.values())
         try:
diff --git a/vdirsyncer/sync/__init__.py b/vdirsyncer/sync/__init__.py
index 330a2204107caa16e6ec946e81780706e4acec28..25b46a320d0d1f966324a27cd56e59c58fed030d 100644
--- a/vdirsyncer/sync/__init__.py
+++ b/vdirsyncer/sync/__init__.py
@@ -9,6 +9,9 @@ Yang: http://blog.ezyang.com/2012/08/how-offlineimap-works/
 Some modifications to it are explained in
 https://unterwaditzer.net/2016/sync-algorithm.html
 """
+
+from __future__ import annotations
+
 import contextlib
 import itertools
 import logging
@@ -205,9 +208,7 @@ class Upload(Action):
             href = etag = None
         else:
             sync_logger.info(
-                "Copying (uploading) item {} to {}".format(
-                    self.ident, self.dest.storage
-                )
+                f"Copying (uploading) item {self.ident} to {self.dest.storage}"
             )
             href, etag = await self.dest.storage.upload(self.item)
             assert href is not None
@@ -243,7 +244,11 @@ class Delete(Action):
 
     async def _run_impl(self, a, b):
         meta = self.dest.status.get_new(self.ident)
-        if not self.dest.storage.read_only:
+        if self.dest.storage.read_only or self.dest.storage.no_delete:
+            sync_logger.debug(
+                f"Skipping deletion of item {self.ident} from {self.dest.storage}"
+            )
+        else:
             sync_logger.info(f"Deleting item {self.ident} from {self.dest.storage}")
             await self.dest.storage.delete(meta.href, meta.etag)
 
@@ -291,7 +296,7 @@ class ResolveConflict(Action):
                 )
 
 
-def _get_actions(a_info, b_info):
+def _get_actions(a_info: _StorageInfo, b_info: _StorageInfo):
     for ident in uniq(
         itertools.chain(
             a_info.status.parent.iter_new(), a_info.status.parent.iter_old()
diff --git a/vdirsyncer/sync/exceptions.py b/vdirsyncer/sync/exceptions.py
index 863d5a45882c88b708d07befffb754eae8232ddd..9fa5bf2a59332b88aecf1ce2c0226f6c0def2e2f 100644
--- a/vdirsyncer/sync/exceptions.py
+++ b/vdirsyncer/sync/exceptions.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
 from .. import exceptions
 
 
diff --git a/vdirsyncer/sync/status.py b/vdirsyncer/sync/status.py
index d0d2c10e6c50d991ad67032079138f93fba9e655..1f3f9100560dac0d77b616b800c111feecb1adfe 100644
--- a/vdirsyncer/sync/status.py
+++ b/vdirsyncer/sync/status.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
 import abc
 import contextlib
 import sqlite3
@@ -185,7 +187,7 @@ class SqliteStatus(_StatusBase):
                 self._c = new_c
                 yield
                 self._c.execute("DELETE FROM status")
-                self._c.execute("INSERT INTO status " "SELECT * FROM new_status")
+                self._c.execute("INSERT INTO status SELECT * FROM new_status")
                 self._c.execute("DELETE FROM new_status")
         finally:
             self._c = old_c
@@ -197,7 +199,7 @@ class SqliteStatus(_StatusBase):
             raise IdentAlreadyExists(old_href=old_props.href, new_href=a_props.href)
         b_props = self.get_new_b(ident) or ItemMetadata()
         self._c.execute(
-            "INSERT OR REPLACE INTO new_status " "VALUES(?, ?, ?, ?, ?, ?, ?)",
+            "INSERT OR REPLACE INTO new_status VALUES(?, ?, ?, ?, ?, ?, ?)",
             (
                 ident,
                 a_props.href,
@@ -216,7 +218,7 @@ class SqliteStatus(_StatusBase):
             raise IdentAlreadyExists(old_href=old_props.href, new_href=b_props.href)
         a_props = self.get_new_a(ident) or ItemMetadata()
         self._c.execute(
-            "INSERT OR REPLACE INTO new_status " "VALUES(?, ?, ?, ?, ?, ?, ?)",
+            "INSERT OR REPLACE INTO new_status VALUES(?, ?, ?, ?, ?, ?, ?)",
             (
                 ident,
                 a_props.href,
@@ -230,14 +232,14 @@ class SqliteStatus(_StatusBase):
 
     def update_ident_a(self, ident, props):
         self._c.execute(
-            "UPDATE new_status" " SET href_a=?, hash_a=?, etag_a=?" " WHERE ident=?",
+            "UPDATE new_status SET href_a=?, hash_a=?, etag_a=? WHERE ident=?",
             (props.href, props.hash, props.etag, ident),
         )
         assert self._c.rowcount > 0
 
     def update_ident_b(self, ident, props):
         self._c.execute(
-            "UPDATE new_status" " SET href_b=?, hash_b=?, etag_b=?" " WHERE ident=?",
+            "UPDATE new_status SET href_b=?, hash_b=?, etag_b=? WHERE ident=?",
             (props.href, props.hash, props.etag, ident),
         )
         assert self._c.rowcount > 0
@@ -247,10 +249,10 @@ class SqliteStatus(_StatusBase):
 
     def _get_impl(self, ident, side, table):
         res = self._c.execute(
-            "SELECT href_{side} AS href,"
-            "       hash_{side} AS hash,"
-            "       etag_{side} AS etag "
-            "FROM {table} WHERE ident=?".format(side=side, table=table),
+            f"SELECT href_{side} AS href,"
+            f"       hash_{side} AS hash,"
+            f"       etag_{side} AS etag "
+            f"FROM {table} WHERE ident=?",
             (ident,),
         ).fetchone()
         if res is None:
@@ -298,14 +300,14 @@ class SqliteStatus(_StatusBase):
             return
 
         self._c.execute(
-            "INSERT OR REPLACE INTO new_status" " VALUES (?, ?, ?, ?, ?, ?, ?)",
+            "INSERT OR REPLACE INTO new_status VALUES (?, ?, ?, ?, ?, ?, ?)",
             (ident, a.href, b.href, a.hash, b.hash, a.etag, b.etag),
         )
 
     def _get_by_href_impl(self, href, default=(None, None), side=None):
         res = self._c.execute(
-            "SELECT ident, hash_{side} AS hash, etag_{side} AS etag "
-            "FROM status WHERE href_{side}=?".format(side=side),
+            f"SELECT ident, hash_{side} AS hash, etag_{side} AS etag "
+            f"FROM status WHERE href_{side}=?",
             (href,),
         ).fetchone()
         if not res:
@@ -326,7 +328,7 @@ class SqliteStatus(_StatusBase):
 
 
 class SubStatus:
-    def __init__(self, parent, side):
+    def __init__(self, parent: SqliteStatus, side: str):
         self.parent = parent
         assert side in "ab"
 
diff --git a/vdirsyncer/utils.py b/vdirsyncer/utils.py
index b4ae10239bae7e7156ee015fa3c70aa425405eb0..2e0fd9ce4920b2d0b5f10d71219eb23da16708e1 100644
--- a/vdirsyncer/utils.py
+++ b/vdirsyncer/utils.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
 import functools
 import os
 import sys
diff --git a/vdirsyncer/vobject.py b/vdirsyncer/vobject.py
index 2f18f69c7a27a04697a3828eda87cc87fedc242d..514822159ba2bd31904e900fc44dbbdea67446ff 100644
--- a/vdirsyncer/vobject.py
+++ b/vdirsyncer/vobject.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
 import hashlib
 from itertools import chain
 from itertools import tee
@@ -34,7 +36,6 @@ IGNORE_PROPS = (
 
 
 class Item:
-
     """Immutable wrapper class for VCALENDAR (VEVENT, VTODO) and
     VCARD"""
 
@@ -279,6 +280,12 @@ class _Component:
                     stack.append(cls(c_name, [], []))
                 elif line.startswith("END:"):
                     component = stack.pop()
+                    c_name = line[len("END:") :].strip().upper()
+                    if c_name != component.name:
+                        raise ValueError(
+                            f"Got END:{c_name}, expected END:{component.name}"
+                            + f" at line {_i + 1}"
+                        )
                     if stack:
                         stack[-1].subcomponents.append(component)
                     else:
@@ -289,6 +296,11 @@ class _Component:
         except IndexError:
             raise ValueError(f"Parsing error at line {_i + 1}")
 
+        if len(stack) > 0:
+            raise ValueError(
+                f"Missing END for component(s): {', '.join(c.name for c in stack)}"
+            )
+
         if multiple:
             return rv
         elif len(rv) != 1: