diff --git a/build-relocatable-install.py b/build-relocatable-install.py
index 3238f1ec0a0d90217d08a9ec79d159c1b0717de5..d0bc4ab253464c9ea8b58af87c9d05fa64b68049 100755
--- a/build-relocatable-install.py
+++ b/build-relocatable-install.py
@@ -220,7 +220,9 @@ def main():
 
     if args.destdir:
         args.prefix = args.destdir + args.prefix
-        args.srcdir = args.destdir + args.srcdir
+
+        if os.path.exists(args.destdir + args.srcdir):
+            args.srcdir = args.destdir + args.srcdir
 
     if args.archive is None and args.output is None:
         parser.error('Either --archive or --output is required')
diff --git a/meson.build b/meson.build
index c9d320c0f131f3418438772a30aac32344394b9a..e29eb930e14ac0cce9bd53674cbff1aaca85127f 100644
--- a/meson.build
+++ b/meson.build
@@ -26,6 +26,7 @@ project(
   default_options: [
     'c_std=c99',
     'cpp_std=c++11',
+    'prefix=/usr/lib/pressure-vessel/relocatable',
     'warning_level=2',
   ],
 )
diff --git a/meson_options.txt b/meson_options.txt
index 8004724c56d86ec8a5e08eff4d6d72adc2b9d09c..bb52e5149ee8fec2307ed9334d44974223c476be 100644
--- a/meson_options.txt
+++ b/meson_options.txt
@@ -1,4 +1,5 @@
 option('man', type : 'boolean', value : false, description : 'enable man pages')
 option('python', type : 'string', value : '')
 option('srcdir', type : 'string', value : '')
+option('test_containers_dir', type : 'string', value : '')
 option('version', type : 'string', value : 'auto')
diff --git a/tests/containers.py b/tests/containers.py
new file mode 100755
index 0000000000000000000000000000000000000000..7be681096170991204666dce2e9c774117e04f29
--- /dev/null
+++ b/tests/containers.py
@@ -0,0 +1,402 @@
+#!/usr/bin/env python3
+# Copyright 2020 Collabora Ltd.
+#
+# SPDX-License-Identifier: MIT
+
+"""
+Test pressure-vessel against an out-of-band set of pre-prepared containers.
+
+To run during build-time testing, build with
+-Dtest_containers_dir=/path/to/containers, where /path/to/containers
+ideally contains at least:
+
+* pressure-vessel:
+    A sufficiently recent copy of pressure-vessel, used for the parts
+    that must run inside the container (because we cannot assume that
+    an arbitrary build done on the host system is compatible with the
+    libraries inside the container)
+* steam-runtime:
+    An LD_LIBRARY_PATH runtime, which we use to find versions of
+    steam-runtime-system-info and capsule-capture-libs that can run
+    on the host system
+* scout/files:
+    The Platform merged-/usr from the SteamLinuxRuntime depot
+* scout_sysroot:
+    An SDK sysroot like the one recommended for the Docker container
+
+and run (for example) 'meson test -v -C _build' as usual.
+
+The same test can also be run against a version of pressure-vessel
+that was built against scout:
+
+    export PRESSURE_VESSEL_TEST_CONTAINERS=/path/to/containers
+    ./sysroot/run-in-sysroot.py --sysroot ../scout-sysroot -- \
+        ninja -C _build-for-sysroot
+    env DESTDIR="$(pwd)/_build-for-sysroot/DESTDIR" \
+        ./sysroot/run-in-sysroot.py --sysroot ../scout-sysroot -- \
+        ninja -C _build-for-sysroot install
+    rm -fr "$PRESSURE_VESSEL_TEST_CONTAINERS/pressure-vessel"
+    env DESTDIR="$(pwd)/_build-for-sysroot/DESTDIR" \
+        ./sysroot/run-in-sysroot.py --sysroot ../scout-sysroot -- \
+        python3.5 ./build-relocatable-install.py \
+        --output="$PRESSURE_VESSEL_TEST_CONTAINERS/pressure-vessel" \
+        --check-source-directory="$PRESSURE_VESSEL_TEST_CONTAINERS" \
+        --allow-missing-sources \
+        --srcdir="$(pwd)" --set-version "$(git describe)"
+    ./tests/containers.py
+
+or against the SteamLinuxRuntime depot that gets uploaded to the Steam CDN:
+
+    export PRESSURE_VESSEL_TEST_CONTAINERS=../SteamLinuxRuntime/depot
+    ./tests/containers.py
+
+Influential environment variables:
+
+* AUTOPKGTEST_ARTIFACTS:
+    Write test artifacts to this directory (borrowed from Debian's
+    autopkgtest framework) instead of a temporary directory. This makes
+    debugging easier.
+* BWRAP:
+    A bubblewrap executable
+* G_TEST_SRCDIR:
+    The ./tests subdirectory of the source root, typically $(pwd)/tests
+* G_TEST_BUILDDIR:
+    The ./tests subdirectory of the build root, typically $(pwd)/_build/tests
+* PRESSURE_VESSEL_LIBCAPSULE_TOOLS:
+    Override the location of capsule-capture-libs etc.
+* PRESSURE_VESSEL_TEST_CONTAINERS:
+    A complete relocatable pressure-vessel installation, including its
+    dependencies such as libsteam-runtime-tools
+* PRESSURE_VESSEL_UNINSTALLED:
+    Set when running from the source/build trees
+* STEAM_RUNTIME_SYSTEM_INFO:
+    Path to a steam-runtime-system-info executable for the host system
+
+Please keep this script compatible with python3.4 so that it can be
+run on the oldest platforms where pressure-vessel works:
+SteamOS 2 'brewmaster', Debian 8 'jessie', Ubuntu 14.04 'trusty'.
+"""
+
+import contextlib
+import logging
+import os
+import shutil
+import subprocess
+import sys
+import unittest
+
+try:
+    import typing
+    typing      # placate pyflakes
+except ImportError:
+    pass
+
+from testutils import (
+    BaseTest,
+    run_subprocess,
+    tee_file_and_stderr,
+    test_main,
+)
+
+
+logger = logging.getLogger('test-containers')
+
+
+class TestContainers(BaseTest):
+    bwrap = None            # type: typing.Optional[str]
+    containers_dir = ''
+    pv_dir = ''
+    pv_wrap = ''
+
+    @classmethod
+    def setUpClass(cls) -> None:
+        super().setUpClass()
+
+        if not os.environ.get('PRESSURE_VESSEL_TEST_CONTAINERS', ''):
+            raise unittest.SkipTest('Containers not available')
+
+        bwrap = os.environ.get('BWRAP', shutil.which('bwrap'))
+
+        if bwrap is not None and subprocess.run(
+            [bwrap, '--dev-bind', '/', '/', 'sh', '-c', 'true'],
+            stdout=2,
+            stderr=2,
+        ).returncode != 0:
+            # Right now this means we will skip all the tests, but in
+            # future we will be able to do some tests that just do the
+            # setup and do not actually go as far as running the container.
+            cls.bwrap = None
+        else:
+            cls.bwrap = bwrap
+
+        cls.containers_dir = os.path.abspath(
+            os.environ['PRESSURE_VESSEL_TEST_CONTAINERS']
+        )
+
+        cls.pv_dir = os.path.join(cls.tmpdir.name, 'pressure-vessel')
+        os.makedirs(cls.pv_dir, exist_ok=True)
+
+        if 'PRESSURE_VESSEL_UNINSTALLED' in os.environ:
+            os.makedirs(os.path.join(cls.pv_dir, 'bin'))
+
+            for exe in (
+                'pressure-vessel-wrap',
+            ):
+                shutil.copy2(
+                    os.path.join(cls.top_builddir, 'src', exe),
+                    os.path.join(cls.pv_dir, 'bin', exe),
+                )
+
+            for exe in (
+                'pressure-vessel-locale-gen',
+            ):
+                shutil.copy2(
+                    os.path.join(cls.top_srcdir, exe),
+                    os.path.join(cls.pv_dir, 'bin', exe),
+                )
+
+            for exe in (
+                'pressure-vessel-try-setlocale',
+                'pressure-vessel-with-lock',
+            ):
+                in_containers_dir = os.path.join(
+                    cls.containers_dir,
+                    'pressure-vessel',
+                    'bin',
+                    exe,
+                )
+
+                if os.path.exists(in_containers_dir):
+                    # Asssume it's a close enough version that we can
+                    # use it with the newer pressure-vessel-wrap.
+                    # We don't necessarily want to use versions from
+                    # the builddir because they can have dependencies
+                    # that are newer than the container's libraries.
+                    shutil.copy2(
+                        in_containers_dir,
+                        os.path.join(cls.pv_dir, 'bin', exe),
+                    )
+                else:
+                    shutil.copy2(
+                        os.path.join(cls.top_builddir, 'src', exe),
+                        os.path.join(cls.pv_dir, 'bin', exe),
+                    )
+
+            for d in (
+                'subprojects',
+            ):
+                shutil.copytree(
+                    os.path.join(cls.top_builddir, d),
+                    os.path.join(cls.pv_dir, d),
+                    symlinks=True,
+                )
+
+            fallback_libcapsule_tools = os.path.join(
+                cls.containers_dir,
+                'steam-runtime', 'usr', 'lib', 'libcapsule', 'relocatable',
+            )
+
+            if not os.path.isdir(fallback_libcapsule_tools):
+                fallback_libcapsule_tools = '/usr/lib/libcapsule/relocatable'
+
+            for multiarch in ('i386-linux-gnu', 'x86_64-linux-gnu'):
+                for tool in ('capsule-capture-libs', 'capsule-symbols'):
+                    exe = multiarch + '-' + tool
+                    tool_path = os.path.join(cls.pv_dir, 'bin', exe)
+
+                    exe = multiarch + '-' + tool
+                    tool_path = os.path.join(cls.pv_dir, 'bin', exe)
+                    found = shutil.which(exe)
+                    relocatable = os.path.join(
+                        os.environ.get(
+                            'PRESSURE_VESSEL_LIBCAPSULE_TOOLS',
+                            fallback_libcapsule_tools,
+                        ),
+                        exe,
+                    )
+
+                    if found is not None:
+                        shutil.copy2(found, tool_path)
+                    elif os.path.isfile(relocatable):
+                        shutil.copy2(relocatable, tool_path)
+                    else:
+                        raise unittest.SkipTest('{} not found'.format(exe))
+        else:
+            cls.pv_dir = os.path.join(cls.containers_dir, 'pressure-vessel')
+
+            if not os.path.isdir(cls.pv_dir):
+                raise unittest.SkipTest('{} not found'.format(cls.pv_dir))
+
+        cls.pv_wrap = os.path.join(cls.pv_dir, 'bin', 'pressure-vessel-wrap')
+
+        host_srsi = os.getenv('STEAM_RUNTIME_SYSTEM_INFO')
+
+        if host_srsi is None:
+            rt = os.path.join(cls.containers_dir, 'steam-runtime')
+            cpu = os.uname().machine
+
+            if cpu == 'x86_64':
+                arch = 'amd64'
+            elif cpu.startswith('i') and len(cpu) == 4 and cpu.endswith('86'):
+                arch = 'i386'
+            else:
+                arch = cpu
+
+            host_srsi = shutil.which(
+                os.path.join(
+                    rt, arch, 'usr', 'bin', 'steam-runtime-system-info',
+                )
+            )
+
+        if host_srsi is None:
+            host_srsi = shutil.which('steam-runtime-system-info')
+
+        if host_srsi is not None:
+            with open(
+                os.path.join(cls.artifacts, 'host-srsi.json'),
+                'w',
+            ) as writer:
+                subprocess.run(
+                    [
+                        host_srsi,
+                        '--verbose',
+                    ],
+                    cwd=cls.artifacts,
+                    stdout=writer,
+                    stderr=2,
+                    universal_newlines=True,
+                )
+
+            os.environ['HOST_STEAM_RUNTIME_SYSTEM_INFO_JSON'] = os.path.join(
+                cls.artifacts, 'host-srsi.json',
+            )
+        else:
+            os.environ.pop('HOST_STEAM_RUNTIME_SYSTEM_INFO_JSON', None)
+
+        try:
+            os.environ['HOST_LD_LINUX_SO_REALPATH'] = os.path.realpath(
+                '/lib/ld-linux.so.2'
+            )
+        except OSError:
+            os.environ.pop('HOST_LD_LINUX_SO_REALPATH', None)
+
+        try:
+            os.environ['HOST_LD_LINUX_X86_64_SO_REALPATH'] = os.path.realpath(
+                '/lib64/ld-linux-x86-64.so.2'
+            )
+        except OSError:
+            os.environ.pop('HOST_LD_LINUX_X86_64_SO_REALPATH', None)
+
+    def setUp(self) -> None:
+        super().setUp()
+        cls = self.__class__
+        self.bwrap = cls.bwrap
+        self.containers_dir = cls.containers_dir
+        self.pv_dir = cls.pv_dir
+        self.pv_wrap = cls.pv_wrap
+
+        # The artifacts directory is going to be the current working
+        # directory inside the container, so we copy things we will
+        # need into that directory.
+        os.makedirs(os.path.join(cls.artifacts, 'tmp'), exist_ok=True)
+
+        for f in ('testutils.py', 'inside-scout.py'):
+            shutil.copy2(
+                os.path.join(cls.G_TEST_SRCDIR, f),
+                os.path.join(cls.artifacts, 'tmp', f),
+            )
+
+    def tearDown(self) -> None:
+        with contextlib.suppress(FileNotFoundError):
+            shutil.rmtree(os.path.join(self.artifacts, 'tmp'))
+
+        super().tearDown()
+
+    @classmethod
+    def tearDownClass(cls) -> None:
+        super().tearDownClass()
+
+    def run_subprocess(
+        self,
+        args,           # type: typing.Union[typing.List[str], str]
+        check=False,
+        input=None,     # type: typing.Optional[bytes]
+        timeout=None,   # type: typing.Optional[int]
+        **kwargs        # type: typing.Any
+    ):
+        logger.info('Running: %r', args)
+        return run_subprocess(
+            args, check=check, input=input, timeout=timeout, **kwargs
+        )
+
+    def _test_scout(
+        self,
+        test_name: str,
+        scout: str,
+        *,
+        locales: bool = False
+    ) -> None:
+        if self.bwrap is None:
+            self.skipTest('Unable to run bwrap (in a container?)')
+
+        if not os.path.isdir(scout):
+            self.skipTest('{} not found'.format(scout))
+
+        artifacts = os.path.join(
+            self.artifacts,
+            test_name,
+        )
+        os.makedirs(artifacts, exist_ok=True)
+
+        argv = [
+            self.pv_wrap,
+            '--runtime', scout,
+            '--verbose',
+        ]
+
+        if not locales:
+            argv.append('--no-generate-locales')
+
+        argv.extend([
+            '--',
+            'env',
+            'TEST_INSIDE_SCOUT_ARTIFACTS=' + artifacts,
+            'TEST_INSIDE_SCOUT_LOCALES=' + ('1' if locales else ''),
+            'python3.5',
+            os.path.join(self.artifacts, 'tmp', 'inside-scout.py'),
+        ])
+
+        with tee_file_and_stderr(
+            os.path.join(artifacts, 'inside-scout.log')
+        ) as tee:
+            completed = subprocess.run(
+                argv,
+                cwd=self.artifacts,
+                stdout=tee.stdin,
+                stderr=tee.stdin,
+                universal_newlines=True,
+            )
+
+        self.assertEqual(completed.returncode, 0)
+
+    def test_scout_sysroot(self) -> None:
+        scout = os.path.join(self.containers_dir, 'scout_sysroot')
+
+        if os.path.isdir(os.path.join(scout, 'files')):
+            scout = os.path.join(scout, 'files')
+
+        self._test_scout('scout_sysroot', scout, locales=True)
+
+    def test_scout_usr(self) -> None:
+        scout = os.path.join(self.containers_dir, 'scout', 'files')
+
+        self._test_scout('scout', scout, locales=True)
+
+
+if __name__ == '__main__':
+    assert sys.version_info >= (3, 4), \
+        'Python 3.4+ is required'
+
+    test_main()
+
+# vi: set sw=4 sts=4 et:
diff --git a/tests/inside-scout.py b/tests/inside-scout.py
new file mode 100755
index 0000000000000000000000000000000000000000..3b9f4885358c7de050a1a3d4eefaffaac4e41d63
--- /dev/null
+++ b/tests/inside-scout.py
@@ -0,0 +1,530 @@
+#!/usr/bin/python3.5
+# Copyright 2020 Collabora Ltd.
+#
+# SPDX-License-Identifier: MIT
+
+import contextlib
+import ctypes
+import json
+import logging
+import os
+import shlex
+import subprocess
+import sys
+import typing
+from pathlib import Path
+
+from testutils import (
+    BaseTest,
+    test_main,
+)
+
+"""
+Test script intended to be run inside a SteamRT 1 'scout' container,
+to assert that everything is as it should be.
+"""
+
+logger = logging.getLogger('test-inside-scout')
+
+
+class HostInfo:
+    def __init__(self) -> None:
+        self.path = Path('/run/host')
+
+        self.os_release = {}   # type: typing.Dict[str, str]
+
+        for osr in ('etc/os-release', 'usr/lib/os-release'):
+            try:
+                reader = (self.path / osr).open()
+            except OSError:
+                continue
+            else:
+                with reader:
+                    for line in reader:
+                        if '=' not in line:
+                            logger.warning(
+                                'Invalid line in %r: %r',
+                                self.path / osr, line,
+                            )
+                            continue
+                        key, value = line.split('=', 1)
+
+                        try:
+                            tokens = shlex.split(value)
+                        except ValueError as e:
+                            logger.warning(
+                                'Invalid line in %r: %r: %s',
+                                self.path / osr, line, e,
+                            )
+                            continue
+
+                        if len(tokens) != 1:
+                            logger.warning(
+                                'Invalid line in %r: %r',
+                                self.path / osr, line,
+                            )
+                            continue
+
+                        self.os_release[key] = tokens[0]
+                break
+
+
+class TestInsideScout(BaseTest):
+    def setUp(self) -> None:
+        super().setUp()
+        self.host = HostInfo()
+
+        artifacts = os.getenv('TEST_INSIDE_SCOUT_ARTIFACTS')
+
+        if artifacts is not None:
+            self.artifacts = Path(artifacts)
+        else:
+            self.artifacts = Path(self.tmpdir.name)
+
+        self.artifacts.mkdir(exist_ok=True)
+
+    def tearDown(self) -> None:
+        super().tearDown()
+
+    @contextlib.contextmanager
+    def catch(
+        self,
+        msg,                # type: str
+        diagnostic=None,    # type: typing.Any
+        **kwargs            # type: typing.Any
+    ):
+        """
+        Run a sub-test, with additional logging. If it fails, we still
+        continue to test.
+        """
+
+        if kwargs:
+            logger.info('Starting: %r (%r)', msg, kwargs)
+        else:
+            logger.info('Starting: %r', msg)
+        with self.subTest(msg, **kwargs):
+            try:
+                yield
+            except Exception:
+                logger.error(msg, exc_info=True)
+
+                if diagnostic is not None:
+                    logger.error('%r', diagnostic)
+
+                raise
+
+    def test_os_release(self) -> None:
+        """
+        Assert that both /etc/os-release and /usr/lib/os-release have
+        the contents we expect them to.
+        """
+
+        for osr in ('/etc/os-release', '/usr/lib/os-release'):
+            data = {}   # type: typing.Dict[str, str]
+
+            with open(osr) as reader:
+                for line in reader:
+                    assert '=' in line, line
+                    key, value = line.split('=', 1)
+
+                    logger.info('%s %r: %r', osr, key, value)
+
+                    tokens = shlex.split(value)
+
+                    assert len(tokens) == 1, tokens
+                    data[key] = tokens[0]
+
+            self.assertEqual(data.get('VERSION_ID'), '1')
+            self.assertEqual(data.get('ID'), 'steamrt')
+            self.assertEqual(data.get('ID_LIKE'), 'ubuntu')
+            self.assertIsNotNone(data.get('BUILD_ID'))
+
+    def test_environ(self) -> None:
+        logger.info('PATH: %r', os.environ.get('PATH'))
+        logger.info(
+            'LD_LIBRARY_PATH: %r', os.environ.get('LD_LIBRARY_PATH')
+        )
+        # No actual *tests* here just yet - we just log what's there.
+
+    def test_glibc(self) -> None:
+        """
+        Assert that we took the glibc version from the host OS.
+
+        We assume this will always be true for scout, because scout
+        is based on Ubuntu 12.04, the oldest operating system we support;
+        and in cases where our glibc is the same version as the glibc of
+        the host OS, we prefer the host.
+        """
+        glibc = ctypes.cdll.LoadLibrary('libc.so.6')
+        gnu_get_libc_version = glibc.gnu_get_libc_version
+        gnu_get_libc_version.restype = ctypes.c_char_p
+        glibc_version = gnu_get_libc_version().decode('ascii')
+        logger.info('glibc version in use: %s', glibc_version)
+        major, minor, *rest = glibc_version.split('.')
+        self.assertGreaterEqual((int(major), int(minor)), (2, 15))
+
+        # This assumes that uname -m matches the multiarch tuple
+        # closely enough. On x86_64 it does, and on i386 we have
+        # symlinks /overrides/lib/i[456]86-linux-gnu.
+        host_glibc = ctypes.cdll.LoadLibrary(
+            '/overrides/lib/{}-linux-gnu/libc.so.6'.format(os.uname().machine),
+        )
+        gnu_get_libc_version = host_glibc.gnu_get_libc_version
+        gnu_get_libc_version.restype = ctypes.c_char_p
+        host_glibc_version = gnu_get_libc_version().decode('ascii')
+        logger.info('host glibc version: %s', host_glibc_version)
+        self.assertEqual(host_glibc_version, glibc_version)
+
+        if (
+            'HOST_LD_LINUX_SO_REALPATH' in os.environ
+            and Path('/usr/lib/i386-linux-gnu').is_dir()
+        ):
+            host_path = os.environ['HOST_LD_LINUX_SO_REALPATH']
+            expected = self.host.path / host_path.lstrip('/')
+            expected_stat = expected.stat()
+            logger.info('host ld-linux.so.2: %s', host_path)
+
+            for really in (
+                '/lib/ld-linux.so.2',
+                '/lib/i386-linux-gnu/ld-linux.so.2',
+                '/lib/i386-linux-gnu/ld-2.15.so',
+            ):
+                really_stat = Path(really).stat()
+                # Either it's a symlink to the same file, or the same file
+                # was mounted over it
+                self.assertEqual(really_stat.st_dev, expected_stat.st_dev)
+                self.assertEqual(really_stat.st_ino, expected_stat.st_ino)
+
+        if (
+            'HOST_LD_LINUX_X86_64_SO_REALPATH' in os.environ
+            and Path('/usr/lib/x86_64-linux-gnu').is_dir()
+        ):
+            host_path = os.environ['HOST_LD_LINUX_X86_64_SO_REALPATH']
+            expected = self.host.path / host_path.lstrip('/')
+            expected_stat = expected.stat()
+            logger.info('host ld-linux-x86-64.so.2: %s', host_path)
+
+            for really in (
+                '/lib64/ld-linux-x86-64.so.2',
+                '/lib/x86_64-linux-gnu/ld-linux-x86-64.so.2',
+                '/lib/x86_64-linux-gnu/ld-2.15.so',
+            ):
+                really_stat = Path(really).stat()
+                self.assertEqual(really_stat.st_dev, expected_stat.st_dev)
+                self.assertEqual(really_stat.st_ino, expected_stat.st_ino)
+
+    def test_srsi(self) -> None:
+        if 'HOST_STEAM_RUNTIME_SYSTEM_INFO_JSON' in os.environ:
+            with open(
+                os.environ['HOST_STEAM_RUNTIME_SYSTEM_INFO_JSON'],
+                'r',
+            ) as reader:
+                host_parsed = json.load(reader)
+        else:
+            host_parsed = {}
+
+        with (self.artifacts / 'srsi.json').open('w') as writer:
+            logger.info('steam-runtime-system-info --verbose...')
+            completed = subprocess.run(
+                # We specifically want the container's version, not
+                # any other version that might have crept into PATH.
+                ['/usr/bin/steam-runtime-system-info', '--verbose'],
+                stdout=writer,
+                stderr=subprocess.PIPE,
+                check=True,
+            )
+            if completed.stderr:
+                logger.info(
+                    '(stderr) -> \n%s',
+                    completed.stderr.decode('utf-8', 'backslashreplace')
+                )
+            else:
+                logger.info('(no stderr)')
+
+        with (self.artifacts / 'srsi.json').open('r') as reader:
+            for line in reader:
+                print(line.rstrip('\n'), file=sys.stderr)
+
+        with (self.artifacts / 'srsi.json').open('r') as reader:
+            parsed = json.load(reader)
+
+        self.assertIsInstance(parsed, dict)
+
+        if host_parsed:
+            host_os_release = host_parsed.get('os-release', {})
+        else:
+            host_os_release = parsed.get(
+                'container', {}
+            ).get(
+                'host', {}
+            ).get(
+                'os-release', {}
+            )
+
+        if host_os_release.get('id') == 'debian':
+            logger.info('Host OS is Debian')
+            host_is_debian_derived = True
+        elif 'debian' in host_os_release.get('id_like', []):
+            logger.info('Host OS is Debian-derived')
+            host_is_debian_derived = True
+        else:
+            logger.info('Host OS is not Debian-derived')
+            host_is_debian_derived = False
+
+        with self.catch(
+            'runtime information',
+            diagnostic=parsed.get('runtime'),
+        ):
+            self.assertIn('runtime', parsed)
+            self.assertEqual('/', parsed['runtime'].get('path'))
+            self.assertIn('version', parsed['runtime'])
+            issues = parsed['runtime'].get('issues', [])
+            self.assertNotIn('disabled', issues)
+            self.assertNotIn('internal-error', issues)
+            self.assertNotIn('not-in-environment', issues)
+            self.assertNotIn('not-in-ld-path', issues)
+            self.assertNotIn('not-in-path', issues)
+            self.assertNotIn('not-runtime', issues)
+            self.assertNotIn('not-using-newer-host-libraries', issues)
+            self.assertNotIn('unexpected-location', issues)
+            self.assertNotIn('unexpected-version', issues)
+            # Don't assert whether it contains 'unofficial':
+            # we want to be able to test unofficial runtimes too
+
+            self.assertIn('overrides', parsed['runtime'])
+            self.assertNotIn('pinned_libs_32', parsed['runtime'])
+            self.assertNotIn('pinned_libs_64', parsed['runtime'])
+
+        with self.catch(
+            'os-release information',
+            diagnostic=parsed.get('os-release'),
+        ):
+            self.assertIn('os-release', parsed)
+            self.assertEqual('steamrt', parsed['os-release']['id'])
+            self.assertNotIn(
+                parsed['os-release']['id'],
+                parsed['os-release'].get('id_like', [])
+            )
+            self.assertIn('name', parsed['os-release'])
+            self.assertIn('pretty_name', parsed['os-release'])
+            self.assertIn('version_id', parsed['os-release'])
+            self.assertEqual('1', parsed['os-release']['version_id'])
+            self.assertEqual('scout', parsed['os-release']['version_codename'])
+            self.assertIn('build_id', parsed['os-release'])
+
+        with self.catch(
+            'container info',
+            diagnostic=parsed.get('container'),
+        ):
+            self.assertIn('container', parsed)
+            self.assertEqual(parsed['container']['type'], 'pressure-vessel')
+            self.assertEqual(parsed['container']['host']['path'], '/run/host')
+
+        if host_parsed:
+            self.assertEqual(
+                parsed['container']['host']['os-release'],
+                host_parsed['os-release'],
+            )
+
+            if os.environ.get('TEST_INSIDE_SCOUT_LOCALES'):
+                for locale, host_details in host_parsed.get(
+                    'locales', {}
+                ).items():
+                    self.assertIn(locale, parsed['locales'])
+                    details = parsed['locales'][locale]
+
+                    # Every locale that worked on the host works in the
+                    # container
+                    if 'resulting-name' in host_details:
+                        self.assertEqual(host_details, details)
+
+                    # en_US.UTF-8 should also work in the container, because we
+                    # make sure to generate it
+                    if locale == 'en_US.UTF-8':
+                        self.assertEqual(details.get('error'), None)
+
+                # en_US.UTF-8 should also work in the container, because we
+                # make sure to generate it
+                if locale == 'en_US.UTF-8':
+                    self.assertEqual(details.get('error'), None)
+
+        self.assertIn('architectures', parsed)
+
+        for multiarch in parsed['architectures']:
+            if not (Path('/usr/lib') / multiarch).is_dir():
+                continue
+
+            arch_info = parsed['architectures'][multiarch]
+            host_info = host_parsed.get('architectures', {}).get(multiarch, {})
+
+            with self.catch(
+                'per-architecture information',
+                diagnostic=arch_info,
+                arch=multiarch,
+            ):
+                self.assertTrue(arch_info['can-run'])
+                self.assertEqual([], arch_info['library-issues-summary'])
+                # Graphics driver support depends on the host system, so we
+                # don't assert that everything is fine, only that we have
+                # the information.
+                self.assertIn('graphics-details', arch_info)
+                self.assertIn('glx/gl', arch_info['graphics-details'])
+
+            for soname, details in arch_info['library-details'].items():
+                with self.catch(
+                    'per-library information',
+                    diagnostic=details,
+                    arch=multiarch,
+                    soname=soname,
+                ):
+                    self.assertIn('path', details)
+                    self.assertEqual(
+                        [],
+                        details.get('missing-symbols', []),
+                    )
+                    self.assertEqual(
+                        [],
+                        details.get('misversioned-symbols', []),
+                    )
+                    self.assertEqual([], details.get('issues', []))
+
+            for soname in (
+                'libBrokenLocale.so.1',
+                'libanl.so.1',
+                'libc.so.6',
+                'libcrypt.so.1',
+                'libdl.so.2',
+                'libm.so.6',
+                'libnsl.so.1',
+                'libpthread.so.0',
+                'libresolv.so.2',
+                'librt.so.1',
+                'libutil.so.1',
+            ):
+                # These are from glibc, which is depended on by Mesa, and
+                # is at least as new as scout's version in every supported
+                # version of the Steam Runtime.
+                self.assertEqual(
+                    arch_info['library-details'][soname]['path'],
+                    '/overrides/lib/{}/{}'.format(multiarch, soname),
+                )
+
+            for soname in (
+                'libSDL-1.2.so.0',
+                'libfltk.so.1.1',
+            ):
+                # These libraries are definitely not part of the graphics
+                # driver stack
+                self.assertEqual(
+                    arch_info['library-details'][soname]['path'],
+                    '/usr/lib/{}/{}'.format(multiarch, soname),
+                )
+
+            if host_info:
+                expect_symlinks = {
+                }    # type: typing.Dict[str, typing.List[str]]
+                for dri in host_info.get('dri_drivers', ()):
+                    path = dri['library_path']
+
+                    if path.startswith((    # any of:
+                        '/usr/lib/dri/',
+                        '/usr/lib32/dri/',
+                        '/usr/lib64/dri/',
+                        '/usr/lib/{}/dri/'.format(multiarch),
+                    )):
+                        # We don't make any assertion about the search
+                        # order here.
+                        host_path = '/run/host' + path
+
+                        # Take the realpath() on non-Debian-derived hosts,
+                        # because on Arch Linux, we find drivers in
+                        # /usr/lib64 that are physically in /usr/lib.
+                        # Be more strict on Debian because we know more
+                        # about the canonical paths there.
+                        if not host_is_debian_derived:
+                            with contextlib.suppress(OSError):
+                                host_path = os.path.realpath(host_path)
+
+                        expect_symlinks.setdefault(
+                            os.path.basename(path), []
+                        ).append(host_path)
+
+                for k, vs in expect_symlinks.items():
+                    with self.subTest(dri_symlink=k):
+                        link = '/overrides/lib/{}/dri/{}'.format(multiarch, k)
+                        logger.info('Target of %s should be in %s', link, vs)
+                        target = os.readlink(link)
+
+                        # Again, take the realpath() on non-Debian-derived
+                        # hosts, but be more strict on Debian.
+                        if not host_is_debian_derived:
+                            with contextlib.suppress(OSError):
+                                target = os.path.realpath(link)
+
+                        self.assertIn(target, vs)
+
+                for stack, host_details in (
+                    host_info.get('graphics-details', {}).items()
+                ):
+                    if stack not in arch_info['graphics-details']:
+                        continue
+
+                    # On Debian hosts with SDK containers, this might
+                    # not work because we get confused about whether
+                    # our libedit.so.2 is older or newer than the
+                    # host's. (T21954)
+                    if (
+                        host_is_debian_derived
+                        and (
+                            Path('/usr/lib') / multiarch / 'libedit.so.2'
+                        ).exists()
+                    ):
+                        logger.info(
+                            'libedit.so.2 exists, skipping graphics check'
+                        )
+                        continue
+
+                    # Compounding the above, capsule-capture-libs 0.20190926.0
+                    # will not capture the i386 libedit.so.2 from the host
+                    # if it thinks the *x86_64* libedit.so.2 from the
+                    # container is newer, due to a bug. We work around it
+                    # for now, to get some test coverage going.
+                    if (
+                        host_is_debian_derived
+                        and Path(
+                            '/usr/lib/x86_64-linux-gnu/libedit.so.2'
+                        ).exists()
+                    ):
+                        logger.info(
+                            'x86_64 libedit.so.2 exists, skipping graphics '
+                            'check'
+                        )
+                        continue
+
+                    with self.subTest(stack=stack):
+                        details = arch_info['graphics-details'][stack]
+
+                        # If it works on the host, it should work in
+                        # the container (modulo caveats above).
+                        if not host_details.get('issues', ()):
+                            self.assertFalse(details.get('issues', ()))
+
+                        for key in (
+                            'renderer',
+                            'version',
+                            'library-vendor',
+                        ):
+                            with self.subTest(key=key):
+                                if key in host_details:
+                                    self.assertEqual(
+                                        host_details[key],
+                                        details.get(key),
+                                    )
+
+
+if __name__ == '__main__':
+    assert sys.version_info >= (3, 5), 'Python 3.5+ is required'
+
+    test_main()
+
+# vi: set sw=4 sts=4 et:
diff --git a/tests/meson.build b/tests/meson.build
index 3c9541e30ae2e53075da3605dd6672f20527a36b..8b2bc8d6f9905cd7ba3f4998550998c3ed42d82c 100644
--- a/tests/meson.build
+++ b/tests/meson.build
@@ -24,10 +24,15 @@
 test_env = environment()
 test_env.set('G_TEST_SRCDIR', meson.current_source_dir())
 test_env.set('G_TEST_BUILDDIR', meson.current_build_dir())
+test_env.set(
+  'PRESSURE_VESSEL_TEST_CONTAINERS',
+  get_option('test_containers_dir'),
+)
 test_env.set('PRESSURE_VESSEL_UNINSTALLED', 'yes')
 
 tests = [
   'cheap-copy.py',
+  'containers.py',
   'invocation.py',
   'mypy.sh',
   'pycodestyle.sh',
@@ -76,16 +81,22 @@ endforeach
 
 foreach test_name : tests
   test_args = ['-v', files(test_name)]
+  timeout = 30
 
   if test_name.endswith('.py')
     test_args += ['-e', python.path()]
   endif
 
+  if test_name.endswith('containers.py')
+    timeout = 300
+  endif
+
   if prove.found()
     test(
       test_name, prove,
       args : test_args,
       env : test_env,
+      timeout : timeout,
     )
   endif
 endforeach
diff --git a/tests/testutils.py b/tests/testutils.py
index 3d39aaa17f30e27045bfdfb3ef25ebf985732e87..a5dc0e4752d9e0b764071f41cfb5a2dee34113ca 100644
--- a/tests/testutils.py
+++ b/tests/testutils.py
@@ -5,6 +5,7 @@
 import logging
 import os
 import subprocess
+import sys
 import tempfile
 import unittest
 
@@ -79,33 +80,70 @@ class BaseTest(unittest.TestCase):
     Base class with some useful test setup.
     """
 
-    def setUp(self) -> None:
-        self.G_TEST_SRCDIR = os.getenv(
+    G_TEST_BUILDDIR = ''
+    G_TEST_SRCDIR = ''
+    artifacts = ''
+    tmpdir = None       # type: tempfile.TemporaryDirectory
+    top_builddir = ''
+    top_srcdir = ''
+
+    @classmethod
+    def setUpClass(cls) -> None:
+        cls.G_TEST_SRCDIR = os.getenv(
             'G_TEST_SRCDIR',
             os.path.abspath(os.path.dirname(__file__)),
         )
-        self.top_srcdir = os.path.dirname(self.G_TEST_SRCDIR)
-        self.G_TEST_BUILDDIR = os.getenv(
+        cls.top_srcdir = os.path.dirname(cls.G_TEST_SRCDIR)
+        cls.G_TEST_BUILDDIR = os.getenv(
             'G_TEST_BUILDDIR',
             os.path.abspath(
                 os.path.join(os.path.dirname(__file__), '..', '_build'),
             ),
         )
-        self.top_builddir = os.path.dirname(self.G_TEST_BUILDDIR)
+        cls.top_builddir = os.path.dirname(cls.G_TEST_BUILDDIR)
 
-        self.tmpdir = tempfile.TemporaryDirectory()
-        self.addCleanup(self.tmpdir.cleanup)
+        cls.tmpdir = tempfile.TemporaryDirectory()
 
         artifacts = os.getenv('AUTOPKGTEST_ARTIFACTS')
 
         if artifacts is not None:
-            self.artifacts = artifacts
+            cls.artifacts = os.path.abspath(artifacts)
         else:
-            self.artifacts = self.tmpdir.name
+            cls.artifacts = cls.tmpdir.name
+
+    def setUp(self) -> None:
+        cls = self.__class__
+        self.G_TEST_BUILDDIR = cls.G_TEST_BUILDDIR
+        self.G_TEST_SRCDIR = cls.G_TEST_SRCDIR
+        self.artifacts = cls.artifacts
+        self.top_builddir = cls.top_builddir
+        self.top_srcdir = cls.top_srcdir
+
+        # Class and each test get separate temp directories
+        self.tmpdir = tempfile.TemporaryDirectory()
+        self.addCleanup(self.tmpdir.cleanup)
 
     def tearDown(self) -> None:
         pass
 
+    @classmethod
+    def tearDownClass(cls) -> None:
+        cls.tmpdir.cleanup()
+
+
+def tee_file_and_stderr(path: str) -> subprocess.Popen:
+    """
+    Return a context manager with a stdin attribute.
+    Anything written to its stdin will be written to `path`
+    and also to stderr.
+    """
+    return subprocess.Popen(
+        ['tee', '--', path],
+        stdin=subprocess.PIPE,
+        stdout=2,
+        stderr=2,
+    )
+
 
 def test_main():
     logging.basicConfig(level=logging.DEBUG)
@@ -132,5 +170,6 @@ def test_main():
                 'not ok 1 - %r (tap module not available)'
                 % program.result
             )
+            sys.exit(1)
 
 # vi: set sw=4 sts=4 et: