diff --git a/debian/gitlab-ci.yml b/debian/gitlab-ci.yml
index 478e1fea8e0e09265a7091c866ba731c65ca1485..bd4c8f7318dbbaec22c622d524f55d50a9767b63 100644
--- a/debian/gitlab-ci.yml
+++ b/debian/gitlab-ci.yml
@@ -3,7 +3,6 @@ include:
       file: '/steam-gitlab-ci-common.yml'
 
 variables:
-    BUILD_IMAGE: registry.gitlab.steamos.cloud/steamrt/scout/sdk:beta
     STEAM_CI_USE_BINARIES_FROM: build
     STEAM_CI_DEPENDENCIES: >-
         debhelper
@@ -51,14 +50,37 @@ variables:
     # different devices
     STEAM_CI_TMPDIR: "${CI_PROJECT_DIR}/debian/tmpdir"
 
+    BUILD_IMAGE: '${SCOUT_DOCKER_REGISTRY}/${SCOUT_DOCKER_IMAGE}'
+
+    # These need to be configured in
+    # https://gitlab.steamos.cloud/groups/steamrt/-/settings/ci_cd
+    # Hostname of the machine that receives pressure-vessel releases
+    PRESSURE_VESSEL_CI_UPLOAD_HOST: ''
+    # Create a File variable with the public key(s) of P_V_CI_UPLOAD_HOST,
+    # in the usual ~/.ssh/known_hosts format:
+    # upload-host.example ssh-rsa AAA...
+    # upload-host.example ecdsa-sha2-nistp256 AAA...
+    # upload-host.example ssh-ed25519 AAA...
+    PRESSURE_VESSEL_CI_UPLOAD_HOST_SSH_PUBLIC_KEYS_FILE: ''
+    # Path on P_V_CI_UPLOAD_HOST: /srv/VHOST/www/pressure-vessel/snapshots
+    PRESSURE_VESSEL_CI_UPLOAD_PATH: ''
+    # Similar path on P_V_CI_UPLOAD_HOST for unreleased test-builds
+    PRESSURE_VESSEL_CI_UPLOAD_PLAYGROUND_PATH: ''
+    # User to log in on P_V_CI_UPLOAD_HOST
+    PRESSURE_VESSEL_CI_UPLOAD_USER: ''
+    # Create a File variable with a private key authorized for P_V_CI_UPLOAD_USER
+    PRESSURE_VESSEL_CI_UPLOAD_SSH_PRIVATE_KEY_FILE: ''
+
 stages:
     - build
     - relocatable-install
+    - deploy
     - test
 
 package:
     extends: .build_package
     variables:
+        BUILD_IMAGE: '${SCOUT_DOCKER_REGISTRY}/${SCOUT_DOCKER_IMAGE}'
         STEAM_CI_DEB_BUILD: full    # debuild --build=full, aka -F
         STEAM_CI_INSTALL_SCRIPT: |
             apt-get -y install pkg-create-dbgsym
@@ -66,7 +88,7 @@ package:
 package:i386:
     extends: .build_package
     variables:
-        BUILD_IMAGE: registry.gitlab.steamos.cloud/steamrt/scout/sdk/i386:beta
+        BUILD_IMAGE: '${SCOUT_DOCKER_REGISTRY}/${SCOUT_I386_DOCKER_IMAGE}'
         STEAM_CI_DEB_BUILD: any     # debuild --build=any, aka -B
         STEAM_CI_INSTALL_SCRIPT: |
             apt-get -y install pkg-create-dbgsym
@@ -199,7 +221,7 @@ build:scout-i386:
             - _build/scout-i386/meson-logs/*.txt
         when: always
 
-relocatable-install:
+relocatable-install:debug:
     stage: relocatable-install
     needs:
         - build:scout
@@ -213,30 +235,85 @@ relocatable-install:
         - |
             apt-get -y --no-install-recommends install \
             bubblewrap \
-            libglib2.0-dev \
-            libxau-dev \
+            libglib2.0-dev:amd64 \
+            libglib2.0-dev:i386 \
+            libxau-dev:amd64 \
+            libxau-dev:i386 \
             meson \
+            waffle-utils-multiarch:amd64 \
+            waffle-utils-multiarch:i386 \
             ${NULL+}
 
-            rm -fr _build/scout/relocatable-install
+            rm -fr _build/debug/relocatable-install
+            mkdir -p _build/debug
             _build/scout/prefix/lib/pressure-vessel/relocatable/bin/pressure-vessel-build-relocatable-install \
-              --output _build/scout/relocatable-install \
-              --archive "$(pwd)/_build/scout" \
+              --output _build/debug/relocatable-install \
+              --archive "$(pwd)/_build/debug" \
+              ${CI_ALLOW_MISSING_SOURCES:+--allow-missing-sources} \
+              ${NULL+}
+            prove -epython3.5 -v ./tests/pressure-vessel/relocatable-install.py :: \
+              "$(pwd)/_build/debug/relocatable-install"
+    artifacts:
+        paths:
+            - _build/debug/pressure-vessel-*-bin.tar.gz
+            - _build/debug/pressure-vessel-*-bin+src.tar.gz
+        when: on_success
+
+relocatable-install:production:
+    stage: relocatable-install
+    needs:
+        - package
+        - package:i386
+    tags:
+        - docker
+        - linux
+    image: "${SCOUT_DOCKER_REGISTRY}/${SCOUT_DOCKER_IMAGE}"
+    script:
+        - *prepare_scout
+        - |
+            apt-get -y --no-install-recommends install \
+            bubblewrap \
+            libglib2.0-dev:amd64 \
+            libglib2.0-dev:i386 \
+            libxau-dev:amd64 \
+            libxau-dev:i386 \
+            meson \
+            waffle-utils-multiarch:amd64 \
+            waffle-utils-multiarch:i386 \
+            ${NULL+}
+
+            dpkg -i \
+            debian/tmp/artifacts/build/*_amd64.*deb \
+            debian/tmp/artifacts/build/libsteam-runtime-tools-0-0-dbgsym_*_i386.*deb \
+            debian/tmp/artifacts/build/libsteam-runtime-tools-0-0_*_i386.deb \
+            debian/tmp/artifacts/build/libsteam-runtime-tools-0-dev_*_i386.deb \
+            debian/tmp/artifacts/build/libsteam-runtime-tools-0-helpers-dbgsym_*_i386.*deb \
+            debian/tmp/artifacts/build/libsteam-runtime-tools-0-helpers_*_i386.deb \
+            debian/tmp/artifacts/build/libsteam-runtime-tools-0-relocatable-libs_*_i386.deb \
+            debian/tmp/artifacts/build/pressure-vessel-libs-i386_*_i386.deb \
+            ${NULL+}
+
+            rm -fr _build/production/relocatable-install
+            mkdir -p _build/production
+            /usr/lib/pressure-vessel/relocatable/bin/pressure-vessel-build-relocatable-install \
+              --output _build/production/relocatable-install \
+              --archive "$(pwd)/_build/production" \
+              --no-archive-versions \
               ${CI_ALLOW_MISSING_SOURCES:+--allow-missing-sources} \
               ${NULL+}
             prove -epython3.5 -v ./tests/pressure-vessel/relocatable-install.py :: \
-              "$(pwd)/_build/scout/relocatable-install"
+              "$(pwd)/_build/production/relocatable-install"
     artifacts:
         paths:
-            - _build/scout/pressure-vessel-*-bin.tar.gz
-            - _build/scout/pressure-vessel-*-bin+src.tar.gz
+            - _build/production/pressure-vessel-bin.tar.gz
+            - _build/production/pressure-vessel-bin+src.tar.gz
         when: on_success
 
 .prepare_test: &prepare_test
     - |
         set -eux
 
-        PRESSURE_VESSEL=$(ls _build/scout/pressure-vessel-*-bin.tar.gz | head -1)
+        PRESSURE_VESSEL=_build/production/pressure-vessel-bin.tar.gz
         mkdir -p _build/depot-template/common
 
         if [ -n "${IMAGES_DOWNLOAD_URL}" ] && [ -n "${IMAGES_DOWNLOAD_CREDENTIAL}" ]; then
@@ -276,7 +353,7 @@ relocatable-install:
 
 .test_template:
     needs:
-        - relocatable-install
+        - relocatable-install:production
     stage: test
     tags:
         - docker
@@ -396,4 +473,65 @@ autopkgtest:
             ${NULL+}
             apt-get -y -f install
 
+deploy:
+    stage: deploy
+    tags:
+        - docker
+        - linux
+    needs:
+        - package
+        - package:i386
+        - relocatable-install:production
+    rules:
+        - if: '$DEVEL_DOCKER_REGISTRY == ""'
+          when: never
+        - if: '$DEVEL_DOCKER_IMAGE == ""'
+          when: never
+        - if: '$PRESSURE_VESSEL_CI_UPLOAD_HOST == ""'
+          when: never
+        - if: '$PRESSURE_VESSEL_CI_UPLOAD_HOST_SSH_PUBLIC_KEYS_FILE == ""'
+          when: never
+        - if: '$CI_COMMIT_TAG && $PRESSURE_VESSEL_CI_UPLOAD_PATH == ""'
+          when: never
+        - if: >-
+            $PRESSURE_VESSEL_CI_UPLOAD_PATH == ""
+            && $PRESSURE_VESSEL_CI_UPLOAD_PLAYGROUND_PATH == ""
+          when: never
+        - if: '$PRESSURE_VESSEL_CI_UPLOAD_USER == ""'
+          when: never
+        - if: '$PRESSURE_VESSEL_CI_UPLOAD_SSH_PRIVATE_KEY_FILE == ""'
+          when: never
+        - if: '$CI_COMMIT_TAG'
+          when: always
+        - when: manual
+    image: "${DEVEL_DOCKER_REGISTRY}/${DEVEL_DOCKER_IMAGE}"
+    variables:
+        STEAM_CI_DEPENDENCIES: >-
+            openssh-client
+    script:
+        - |
+            if ! [ -f "$PRESSURE_VESSEL_CI_UPLOAD_HOST_SSH_PUBLIC_KEYS_FILE" ]; then
+                echo "P_V_CI_UPLOAD_HOST_SSH_PUBLIC_KEYS_FILE must be of type File" >&2
+                exit 1
+            fi
+            if ! [ -f "$PRESSURE_VESSEL_CI_UPLOAD_SSH_PRIVATE_KEY_FILE" ]; then
+                echo "P_V_CI_UPLOAD_SSH_PRIVATE_KEY_FILE must be of type File" >&2
+                exit 1
+            fi
+            chmod 0600 "$PRESSURE_VESSEL_CI_UPLOAD_SSH_PRIVATE_KEY_FILE"
+
+            if [ -n "${CI_COMMIT_TAG-}" ]; then
+                path="$PRESSURE_VESSEL_CI_UPLOAD_PATH"
+            else
+                path="$PRESSURE_VESSEL_CI_UPLOAD_PLAYGROUND_PATH"
+            fi
+
+            ./pressure-vessel/upload-artifacts.py \
+            --host="$PRESSURE_VESSEL_CI_UPLOAD_HOST" \
+            --path="$path" \
+            --login="$PRESSURE_VESSEL_CI_UPLOAD_USER" \
+            --ssh-known-hosts="$PRESSURE_VESSEL_CI_UPLOAD_HOST_SSH_PUBLIC_KEYS_FILE" \
+            --ssh-private-key="$PRESSURE_VESSEL_CI_UPLOAD_SSH_PRIVATE_KEY_FILE" \
+            ${NULL+}
+
 # vim:set sw=4 sts=4 et:
diff --git a/pressure-vessel/THIRD-PARTY.md b/pressure-vessel/THIRD-PARTY.md
index 54d79303c9c36432ee5c6eff2d09909143662b83..a45aedbe7dfe4d11d4c0fc72fa2230f580cef88d 100644
--- a/pressure-vessel/THIRD-PARTY.md
+++ b/pressure-vessel/THIRD-PARTY.md
@@ -29,7 +29,7 @@ programs and libraries:
 - bubblewrap: [LGPL-2.0-or-later][]. See bubblewrap.txt.
 
 - libblkid.so.1, libmount.so.1 from util-linux: [LGPL-2.1-or-later][].
-  See util-linux.txt.
+  See libblkid1.txt, libmount1.txt.
 
 - libffi.so.6 from libffi: [MIT][]. See libffi.txt.
 
diff --git a/pressure-vessel/build-relocatable-install.py b/pressure-vessel/build-relocatable-install.py
index aa19dcdbc56b634129fdb7f887bfbef5a9099f8c..159916879da7de912e09e29c8e2608ad4c4e27f8 100755
--- a/pressure-vessel/build-relocatable-install.py
+++ b/pressure-vessel/build-relocatable-install.py
@@ -85,6 +85,7 @@ DEPENDENCIES = {
 WRAPPED_PROGRAMS = {
     'bwrap': 'bubblewrap',
 }
+# same as DEPENDENCIES
 PRIMARY_ARCH_DEPENDENCIES = {
     'bubblewrap': 'bubblewrap',
     'libblkid1': 'util-linux',
@@ -97,9 +98,15 @@ PRIMARY_ARCH_DEPENDENCIES = {
     'libselinux1': 'libselinux',
     'libxau6': 'libxau',
 }
+# executable => binary package in DEPENDENCIES
 HELPERS = {
     'wflinfo': 'waffle-utils-multiarch',
 }
+# Packages where different binary packages can have different copyright
+# files
+DIFFERENT_COPYRIGHT_FILES = [
+    'util-linux',
+]
 SCRIPTS = [
     'pressure-vessel-locale-gen',
     'pressure-vessel-test-ui',
@@ -522,14 +529,24 @@ def main():
             if os.path.exists('/usr/share/doc/{}/copyright'.format(package)):
                 installed_binaries.add(package)
 
-                install(
-                    '/usr/share/doc/{}/copyright'.format(package),
-                    os.path.join(
-                        installation,
-                        'metadata',
-                        '{}.txt'.format(source),
-                    ),
-                )
+                if source in DIFFERENT_COPYRIGHT_FILES:
+                    install(
+                        '/usr/share/doc/{}/copyright'.format(package),
+                        os.path.join(
+                            installation,
+                            'metadata',
+                            '{}.txt'.format(package),
+                        ),
+                    )
+                else:
+                    install(
+                        '/usr/share/doc/{}/copyright'.format(package),
+                        os.path.join(
+                            installation,
+                            'metadata',
+                            '{}.txt'.format(source),
+                        ),
+                    )
 
                 for expr in set(
                     v_check_output([
diff --git a/pressure-vessel/upload-artifacts.py b/pressure-vessel/upload-artifacts.py
new file mode 100755
index 0000000000000000000000000000000000000000..29eed3f080691da33688cf2826756584efffa039
--- /dev/null
+++ b/pressure-vessel/upload-artifacts.py
@@ -0,0 +1,394 @@
+#!/usr/bin/env python3
+
+# Copyright © 2018-2021 Collabora Ltd.
+#
+# SPDX-License-Identifier: MIT
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+import argparse
+import contextlib
+import hashlib
+import logging
+import os
+import shlex
+import shutil
+import subprocess
+import sys
+import tarfile
+import tempfile
+import textwrap
+import typing
+from pathlib import Path
+
+
+logger = logging.getLogger('pressure-vessel.deploy')
+
+COMMAND = typing.Union[str, typing.List[str]]
+
+
+@contextlib.contextmanager
+def RemoteTemporaryDirectory(
+    ssh: typing.List[str],
+    parent: typing.Optional[str] = None,
+):
+    argv = ssh + [
+        'mktemp', '-d',
+    ]
+
+    if parent is not None:
+        argv.append('-p')
+        argv.append(parent)
+
+    tmpdir = subprocess.check_output(
+        argv,
+        universal_newlines=True,
+    ).strip('\n')
+
+    try:
+        yield tmpdir
+    finally:
+        subprocess.call(ssh + [
+            'rm', '-fr', tmpdir,
+        ])
+
+
+@contextlib.contextmanager
+def SshMaster(
+    ssh: typing.List[str],
+):
+    logger.debug('Opening persistent ssh connection...')
+
+    env = dict(os.environ)
+
+    process = subprocess.Popen(
+        ssh + ['-M', 'cat'],
+        env=env,
+        stdin=subprocess.PIPE,
+        stdout=subprocess.DEVNULL,
+    )
+
+    try:
+        assert process.stdin is not None
+        with process.stdin:
+            yield
+    finally:
+        logger.debug('Closing persistent ssh connection...')
+        process.wait()
+        logger.debug('Closed persistent ssh connection')
+
+
+class Uploader:
+    def __init__(
+        self,
+        host: str,
+        path: str,
+        login: str,
+        dry_run: bool,
+        ssh_known_hosts: typing.Optional[str] = None,
+        ssh_private_key: typing.Optional[str] = None,
+    ) -> None:
+        self.host = host
+        self.basedir = path
+        self.login = login
+        self.dry_run = dry_run
+
+        self.ssh_target = '{}@{}'.format(self.login, self.host)
+        self.ssh_known_hosts = ssh_known_hosts
+        self.ssh_private_key = ssh_private_key
+
+        self.stack = contextlib.ExitStack()
+        self.local_tmpdir = None    # type: typing.Optional[str]
+        self.ssh = ['false']        # type: typing.List[str]
+        self.remote_tmpdir = None   # type: typing.Optional[str]
+
+    def __enter__(self) -> 'Uploader':
+        self.local_tmpdir = self.stack.enter_context(
+            tempfile.TemporaryDirectory()
+        )
+        assert self.local_tmpdir is not None
+        self.ssh = [
+            'ssh',
+            '-oControlPath={}/socket'.format(self.local_tmpdir),
+        ]
+
+        if self.ssh_known_hosts is not None:
+            self.ssh.append('-oUserKnownHostsFile=' + self.ssh_known_hosts)
+
+        if self.ssh_private_key is not None:
+            self.ssh.extend(['-i', self.ssh_private_key])
+
+        self.ssh.append(self.ssh_target)
+        self.stack.enter_context(SshMaster(self.ssh))
+        self.remote_tmpdir = self.stack.enter_context(
+            RemoteTemporaryDirectory(self.ssh)
+        )
+        return self
+
+    def __exit__(self, *exc) -> None:
+        self.stack.__exit__(*exc)
+
+    def remote_command(
+        self,
+        command: COMMAND,
+        chdir=True,
+        shell=False,
+    ) -> str:
+        preamble = textwrap.dedent('''\
+            set -eu;
+            umask 0022;
+        ''')
+
+        if chdir:
+            preamble = preamble + 'cd {};\n'.format(shlex.quote(self.basedir))
+
+        if shell:
+            assert isinstance(command, str)
+            return preamble + command
+        else:
+            assert isinstance(command, list)
+            return preamble + ' '.join(map(shlex.quote, command))
+
+    def popen(self, command: COMMAND, chdir=True, shell=False, **kwargs):
+        logger.debug('remote: %s', command)
+        if self.dry_run:
+            return None
+        else:
+            return subprocess.Popen(self.ssh + [
+                self.remote_command(command, chdir=chdir, shell=shell),
+            ], **kwargs)
+
+    def check_call(
+        self,
+        command: COMMAND,
+        chdir=True,
+        shell=False,
+        **kwargs
+    ) -> None:
+        logger.debug('remote: %s', command)
+        if not self.dry_run:
+            subprocess.check_call(self.ssh + [
+                self.remote_command(command, chdir=chdir, shell=shell),
+            ], **kwargs)
+
+    def check_output(
+        self,
+        command: COMMAND,
+        chdir=True,
+        shell=False,
+        **kwargs
+    ):
+        logger.debug('remote: %s', command)
+        if self.dry_run:
+            return None
+        else:
+            return subprocess.check_output(self.ssh + [
+                self.remote_command(command, chdir=chdir, shell=shell),
+            ], **kwargs)
+
+    def call(self, command: COMMAND, chdir=True, shell=False, **kwargs):
+        logger.debug('remote: %s', command)
+        return subprocess.call(self.ssh + [
+            self.remote_command(command, chdir=chdir, shell=shell),
+        ], **kwargs)
+
+    def run(self):
+        with self:
+            self.check_call([
+                'mkdir', '-p', self.basedir,
+            ], chdir=False)
+
+            self.upload()
+
+    def upload(self) -> None:
+        assert self.local_tmpdir is not None
+        assert self.remote_tmpdir is not None
+
+        upload = Path('_build', 'upload')
+
+        with contextlib.suppress(FileNotFoundError):
+            shutil.rmtree(str(upload))
+
+        upload.mkdir()
+        sources = Path('_build', 'upload', 'sources')
+        sources.mkdir()
+
+        for a in Path('debian', 'tmp', 'artifacts', 'build').iterdir():
+            if str(a).endswith('.dsc'):
+                subprocess.check_call([
+                    'dcmd', 'ln', str(a), str(sources),
+                ])
+
+        a = Path('_build', 'production', 'pressure-vessel-bin.tar.gz')
+        os.link(str(a), upload / a.name)
+
+        a = Path('_build', 'production', 'pressure-vessel-bin+src.tar.gz')
+
+        # Unpack sources/*.{dsc,tar.*,txt,...} into sources/
+        with tarfile.open(str(a), 'r') as unarchiver:
+            for member in unarchiver:
+                parts = member.name.split('/')
+
+                if (
+                    member.isfile()
+                    and len(parts) >= 2
+                    and parts[-2] == 'sources'
+                ):
+                    extract = unarchiver.extractfile(member)
+                    assert extract is not None
+                    with extract:
+                        with open(
+                            str(sources / parts[-1]), 'wb'
+                        ) as writer:
+                            shutil.copyfileobj(extract, writer)
+
+        os.link(str(sources / 'VERSION.txt'), str(upload / 'VERSION.txt'))
+
+        to_hash: typing.List[str] = []
+
+        for dirpath, dirnames, filenames in os.walk(str(upload)):
+            relpath = Path(dirpath).relative_to(upload)
+
+            for f in filenames:
+                to_hash.append(str(Path(relpath, f)))
+
+        with open(str(upload / 'SHA256SUMS'), 'w') as text_writer:
+            for f in sorted(to_hash):
+                hasher = hashlib.sha256()
+
+                with open(str(upload / f), 'rb') as binary_reader:
+                    while True:
+                        blob = binary_reader.read(4096)
+
+                        if not blob:
+                            break
+
+                        hasher.update(blob)
+
+                text_writer.write('{} *{}\n'.format(hasher.hexdigest(), f))
+
+        with open(str(upload / 'VERSION.txt')) as reader:
+            version = reader.read().strip()
+
+        self.check_call([
+            'mkdir', version,
+        ])
+        self.check_call(
+            'if [ -d latest ]; then cp -al latest/sources {}; fi'.format(
+                shlex.quote(version)
+            ),
+            shell=True,
+        )
+
+        logger.info('Uploading artifacts using rsync...')
+
+        for argv in [
+            # First pass: upload with --size-only to preserve hard-links
+            # among source tarballs, excluding *.txt because they might
+            # legitimately change without their size changing
+            [
+                'rsync',
+                '--rsh', ' '.join(map(shlex.quote, self.ssh[:-1])),
+                '--chmod=a+rX,og-w',
+                '--delete',
+                '--exclude=*.txt',
+                '--links',
+                '--partial',
+                '--perms',
+                '--recursive',
+                '--size-only',
+                '--verbose',
+                '_build/upload/sources/',
+                '{}:{}/{}/sources/'.format(
+                    self.ssh_target, self.basedir, version,
+                ),
+            ],
+            # Second pass: upload everything except Debian source packages
+            # without --size-only
+            [
+                'rsync',
+                '--rsh', ' '.join(map(shlex.quote, self.ssh[:-1])),
+                '--chmod=a+rX,og-w',
+                '--delete',
+                '--exclude=*.debian.tar.*',
+                '--exclude=*.diff.gz',
+                '--exclude=*.dsc',
+                '--exclude=*.orig.tar.*',
+                '--links',
+                '--partial',
+                '--perms',
+                '--recursive',
+                '--verbose',
+                '_build/upload/',
+                '{}:{}/{}/'.format(self.ssh_target, self.basedir, version),
+            ]
+        ]:
+            if self.dry_run:
+                logger.info('Would run: %r', argv)
+            else:
+                logger.info('%r', argv)
+                subprocess.check_call(argv)
+
+        # Check that our rsync options didn't optimize away a change that
+        # should have happened
+        with open(str(upload / 'SHA256SUMS')) as reader:
+            self.check_call([
+                'env', '--chdir', '{}'.format(version),
+                'sha256sum', '--strict', '--quiet', '-c',
+            ], stdin=reader)
+
+        self.check_call([
+            'ln', '-fns', version, 'latest',
+        ])
+
+
+def main() -> None:
+    if sys.stderr.isatty():
+        try:
+            import colorlog
+        except ImportError:
+            logging.basicConfig()
+        else:
+            formatter = colorlog.ColoredFormatter(
+                '%(log_color)s%(levelname)s:%(name)s:%(reset)s %(message)s')
+            handler = logging.StreamHandler()
+            handler.setFormatter(formatter)
+            logging.getLogger().addHandler(handler)
+    else:
+        logging.basicConfig()
+
+    logging.getLogger().setLevel(logging.DEBUG)
+
+    parser = argparse.ArgumentParser(
+        description='Upload a pressure-vessel release'
+    )
+
+    parser.add_argument('--host', required=True)
+    parser.add_argument('--path', required=True)
+    parser.add_argument('--login', required=True)
+    parser.add_argument('--dry-run', action='store_true')
+    parser.add_argument('--ssh-known-hosts', default=None)
+    parser.add_argument('--ssh-private-key', default=None)
+    args = parser.parse_args()
+    Uploader(**vars(args)).run()
+
+
+if __name__ == '__main__':
+    main()