Skip to content
Snippets Groups Projects
populate-depot.py 66.6 KiB
Newer Older
                        include_sdk_debug=self.include_sdk_debug,
                        include_sdk_runtime=self.include_sdk_runtime,
                        include_sdk_sysroot=self.include_sdk_sysroot,
                    )
                    subdir = '{}_platform_{}'.format(runtime.name, version)
                dest = os.path.join(self.depot, subdir)
                runtime_files.add(subdir + '/')

                with suppress(FileNotFoundError):
                    shutil.rmtree(dest)

                os.makedirs(dest, exist_ok=True)
                argv = [
                    'tar',
                    '-C', dest,
                    os.path.join(self.cache, runtime.tarball),
                ]
                logger.info('%r', argv)
                subprocess.run(argv, check=True)
                self.prune_runtime(Path(dest))

                if self.minimize:
                    self.minimize_runtime(dest)

                        sdk_subdir = '{}_sdk_{}'.format(runtime.name, version)
                        sdk_subdir = '{}_sdk'.format(runtime.name)

                    dest = os.path.join(self.depot, sdk_subdir)
                    runtime_files.add(sdk_subdir + '/')

                    with suppress(FileNotFoundError):
                        shutil.rmtree(os.path.join(dest, 'files'))

                    with suppress(FileNotFoundError):
                        os.remove(os.path.join(dest, 'metadata'))

                    os.makedirs(
                        os.path.join(dest, 'files', 'lib', 'debug'),
                        exist_ok=True,
                    )
                    argv = [
                        'tar',
                        '-C', dest,
                        '-xf', os.path.join(self.cache, runtime.sdk_tarball),
                    ]
                    logger.info('%r', argv)
                    subprocess.run(argv, check=True)
                    self.prune_runtime(Path(dest))

                    if self.minimize:
                        self.minimize_runtime(dest)

                    if self.include_sdk_debug:
                        argv = [
                            'tar',
                            '-C', os.path.join(dest, 'files', 'lib', 'debug'),
                            '--transform', r's,^\(\./\)\?files\(/\|$\),,',
                            '-xf',
                            os.path.join(self.cache, runtime.debug_tarball),
                        ]
                        logger.info('%r', argv)
                        subprocess.run(argv, check=True)

                if self.include_sdk_sysroot:
                        sysroot_subdir = '{}_sysroot_{}'.format(
                            runtime.name, version,
                        sysroot_subdir = '{}_sysroot'.format(runtime.name)

                    sysroot = os.path.join(self.depot, sysroot_subdir)
                    runtime_files.add(sysroot_subdir + '/')

                    with suppress(FileNotFoundError):
                        shutil.rmtree(sysroot)

                    os.makedirs(os.path.join(sysroot, 'files'), exist_ok=True)
                    argv = [
                        'tar',
                        '-C', os.path.join(sysroot, 'files'),
                        '--exclude', 'dev/*',
                        os.path.join(self.cache, runtime.sysroot_tarball),
                    ]
                    logger.info('%r', argv)
                    subprocess.run(argv, check=True)

                    os.makedirs(
                        os.path.join(
                            sysroot, 'files', 'usr', 'lib', 'debug',
                        ),
                        exist_ok=True,
                    )

                    if self.include_sdk_debug:
                        if self.include_sdk_sysroot:
                            argv = [
                                'cp',
                                '-al',
                                os.path.join(dest, 'files', 'lib', 'debug'),
                                os.path.join(sysroot, 'files', 'usr', 'lib'),
                            ]
                            logger.info('%r', argv)
                            subprocess.run(argv, check=True)
                        else:
                            argv = [
                                'tar',
                                '-C', os.path.join(
                                    sysroot, 'files', 'usr', 'lib', 'debug'
                                ),
                                '--transform', r's,^\(\./\)\?files\(/\|$\),,',
                                '-xf',
                                os.path.join(
                                    self.cache, runtime.debug_tarball,
                                ),
                            ]
                            logger.info('%r', argv)
                            subprocess.run(argv, check=True)
            with open(
                os.path.join(self.depot, 'run-in-' + runtime.name), 'w'
            ) as writer:
                    writer.write(
                        RUN_IN_DIR_SOURCE.format(
                            escaped_dir=shlex.quote(subdir),
                            source_for_generated_file=(
                                'Generated file, do not edit'
                            ),
                        )
                    )
                else:
                    writer.write(
                        RUN_IN_ARCHIVE_SOURCE.format(
                            escaped_arch=shlex.quote(runtime.architecture),
                            escaped_name=shlex.quote(runtime.name),
                            escaped_runtime=shlex.quote(runtime.platform),
                            escaped_suite=shlex.quote(runtime.suite),
                            source_for_generated_file=(
                                'Generated file, do not edit'
                            ),
                        )
                    )
            os.chmod(os.path.join(self.depot, 'run-in-' + runtime.name), 0o755)

            comment = ', '.join(sorted(runtime_files))

                comment += ' (from local build)'

            component_version.version = version
            component_version.runtime = runtime.suite
            component_version.runtime_version = version
            component_version.comment = comment
            self.versions.append(component_version)

        for runtime in (self.runtime,):     # too much to reindent right now
                os.path.join(self.depot, 'toolmanifest.vdf'), 'w'
                writer.write('// Generated file, do not edit\n')
                words = [
                    '/_v2-entry-point',
                    '--verb=%verb%',
                    '--',
                ]
                content = dict(
                    manifest=dict(
                        commandline=' '.join(words),
                        version='2',
                        use_tool_subprocess_reaper='1',
                )       # type: Dict[str, Any]
                if runtime.suite != 'scout':
                    content['manifest']['unlisted'] = '1'

                content['manifest']['compatmanager_layer_name'] = (
                    'container-runtime'
                )

                vdf.dump(content, writer, pretty=True, escaped=True)

            shutil.copy2(
                os.path.join(self.depot, 'run-in-' + runtime.name),
                os.path.join(self.depot, 'run'),
            )
            os.chmod(os.path.join(self.depot, 'run'), 0o755)

        self.write_component_versions()

    def write_component_versions(self) -> None:
        try:
            with subprocess.Popen(
                [
                    'git', 'describe',
                    '--always',
                    '--dirty',
                    '--long',
                ],
                cwd=os.path.dirname(__file__),
                stdout=subprocess.PIPE,
                universal_newlines=True,
            ) as describe:
                stdout = describe.stdout
                assert stdout is not None
                version = stdout.read().strip()
                # Deliberately ignoring exit status:
                # if git is missing or old we'll use 'unknown'
        except (OSError, subprocess.SubprocessError):
            version = ''

        try:
            with open(HERE / '.tarball-version', 'r') as reader:
                version = reader.read().strip()
        except OSError:
        if self.scripts_version:
            version = self.scripts_version

        if self.depot_version:
            component_version = ComponentVersion('depot', sort_weight=-1)
            component_version.version = self.depot_version
            component_version.comment = 'Overall version number'
            self.versions.append(component_version)

        component_version = ComponentVersion('scripts')
        component_version.version = version or 'unknown'
        component_version.comment = 'from steam-runtime-tools'
        self.versions.append(component_version)

        with open(os.path.join(self.depot, 'VERSIONS.txt'), 'w') as writer:
            writer.write(
                '#Name\tVersion\t\tRuntime\tRuntime_Version\tComment\n'
            )
            for entry in sorted(self.versions, key=lambda v: v.to_sort_key()):
                logger.info('Component version: %s', entry)
                writer.write(entry.to_tsv())
    def use_local_pressure_vessel(self, path: str = '.') -> None:
        pv_dir = os.path.join(self.depot, 'pressure-vessel')
        os.makedirs(pv_dir, exist_ok=True)
        argv = ['tar', '-C', pv_dir, '--strip-components=1', '-xf']

        if os.path.isfile(path):
            argv.append(path)
        else:
            argv.append(os.path.join(path, 'pressure-vessel-bin.tar.gz'))

        logger.info('%r', argv)
        subprocess.run(argv, check=True)

    def download_pressure_vessel_standalone(
        self,
        version: str,
    ) -> str:
        pv = PressureVesselRelease(
            cache=self.cache,
            ssh_host=self.pressure_vessel_ssh_host,
            ssh_path=self.pressure_vessel_ssh_path,
            uri=self.pressure_vessel_uri,
            version=version,
        )
        pinned = pv.pin_version(self.opener)
        self.use_local_pressure_vessel(
            pv.fetch('pressure-vessel-bin.tar.gz', self.opener, pinned)
        )
        return pinned

    def download_pressure_vessel_from_runtime(self, runtime: Runtime) -> str:
        filename = 'pressure-vessel-bin.tar.gz'
        runtime.pin_version(self.opener)
        downloaded = runtime.fetch(
            filename,
            self.opener,
        )
        os.makedirs(self.depot, exist_ok=True)
        subprocess.run(
            [
                'tar', '-C', self.depot, '-xf', downloaded,
            ],
            check=True,
        )
    def use_local_runtime(self, runtime: Runtime) -> None:
        for basename in runtime.get_archives(
            include_sdk_debug=self.include_sdk_debug,
            include_sdk_runtime=self.include_sdk_runtime,
            include_sdk_sysroot=self.include_sdk_sysroot,
        ):
            src = os.path.join(runtime.path, basename)
            dest = os.path.join(self.cache, basename)
            logger.info('Hard-linking local runtime %r to %r', src, dest)

            with suppress(FileNotFoundError):
                os.unlink(dest)
            if self.include_archives:
                dest = os.path.join(self.depot, basename)
                logger.info('Hard-linking local runtime %r to %r', src, dest)

                with suppress(FileNotFoundError):
                    os.unlink(dest)
                os.link(src, dest)

        if self.include_archives:
                os.path.join(self.depot, runtime.build_id_file), 'w',
            ) as writer:
                writer.write(f'{runtime.version}\n')

            if self.include_sdk_runtime or self.include_sdk_sysroot:
                with open(
                    os.path.join(self.depot, runtime.sdk_build_id_file), 'w',
                ) as writer:
                    writer.write(f'{runtime.version}\n')

        if self.unpack_sources:
            with open(
                os.path.join(runtime.path, runtime.sources), 'rb',
            ) as reader:
                for stanza in Sources.iter_paragraphs(
                    sequence=reader,
                    use_apt_pkg=True,
                ):
                    if stanza['package'] in self.unpack_sources:
                        for f in stanza['files']:
                            name = f['name']

                            if name.endswith('.dsc'):
                                dest = os.path.join(
                                    self.unpack_sources_into,
                                    runtime.name,
                                    stanza['package'],
                                )

                                with suppress(FileNotFoundError):
                                    logger.info('Removing %r', dest)
                                    shutil.rmtree(dest)

                                        os.path.join(
                                            runtime.path,
                                            'sources',
                                            f['name'],
                                        ),
                                    ],
                                    check=True,
                                )

    def download_runtime(self, runtime: Runtime) -> None:
        """
        Download a pre-prepared Platform from a previous container
        runtime build.
        """

        pinned = runtime.pin_version(self.opener)
        for basename in runtime.get_archives(
            include_sdk_debug=self.include_sdk_debug,
            include_sdk_runtime=self.include_sdk_runtime,
            include_sdk_sysroot=self.include_sdk_sysroot,
        ):
            downloaded = runtime.fetch(basename, self.opener)
                dest = os.path.join(self.depot, basename)

                with suppress(FileNotFoundError):
                    os.unlink(dest)

                os.link(downloaded, dest)
                os.path.join(self.depot, runtime.build_id_file), 'w',
            ) as writer:
                writer.write(f'{pinned}\n')

            if self.include_sdk_runtime or self.include_sdk_sysroot:
                with open(
                    os.path.join(self.depot, runtime.sdk_build_id_file), 'w',
                ) as writer:
                    writer.write(f'{pinned}\n')

        if self.unpack_sources:
            with tempfile.TemporaryDirectory(prefix='populate-depot.') as tmp:
                want = set(self.unpack_sources)
                downloaded = runtime.fetch(
                with open(downloaded, 'rb') as reader:
                    for stanza in Sources.iter_paragraphs(
                        sequence=reader,
                        use_apt_pkg=True,
                    ):
                        if stanza['package'] in self.unpack_sources:
                            logger.info(
                                'Found %s in %s',
                                stanza['package'], runtime.name,
                            )
                            want.discard(stanza['package'])
                            os.makedirs(
                                os.path.join(self.cache or tmp, 'sources'),
                            file_path = {}    # type: Dict[str, str]

                            for f in stanza['files']:
                                name = f['name']
                                file_path[name] = runtime.fetch(
                                    os.path.join('sources', name),
                                    self.opener,
                                )

                            for f in stanza['files']:
                                name = f['name']

                                if name.endswith('.dsc'):
                                    dest = os.path.join(
                                        self.unpack_sources_into,
                                        runtime.name,
                                        stanza['package'],
                                    )

                                    with suppress(FileNotFoundError):
                                        logger.info('Removing %r', dest)
                                        shutil.rmtree(dest)

                                        ],
                                        check=True,
                                    )

                if want:
                    logger.warning(
                        'Did not find source package(s) %s in %s',
                        ', '.join(want), runtime.name,
                    )

    def download_scout_tarball(self, runtime: Runtime) -> None:
        """
        Download a pre-prepared LD_LIBRARY_PATH Steam Runtime from a
        previous scout build.
        """
        filename = 'steam-runtime.tar.xz'

        pinned = runtime.pin_version(self.opener)
        logger.info('Downloading steam-runtime build %s', pinned)
        os.makedirs(self.unpack_ld_library_path, exist_ok=True)

        downloaded = runtime.fetch(
            filename,
            self.opener,
        )
        subprocess.run(
            [
                'tar', '-C', self.unpack_ld_library_path, '-xf',
                downloaded,
            ],
            check=True,
        )
    def octal_escape_char(self, match: 're.Match') -> str:
        ret = []    # type: List[str]

        for byte in match.group(0).encode('utf-8', 'surrogateescape'):
            ret.append('\\%03o' % byte)

        return ''.join(ret)

    _NEEDS_OCTAL_ESCAPE = re.compile(r'[^-A-Za-z0-9+,./:@_]')

    def octal_escape(self, s: str) -> str:
        return self._NEEDS_OCTAL_ESCAPE.sub(self.octal_escape_char, s)

    def filename_is_windows_friendly(self, s: str) -> bool:
        for c in s:
            # This is the set of characters that are reserved in Windows
            # filenames, excluding '/' which obviously we're fine with
            # using as a directory separator.
            if c in r'<>:"\|?*':
                return False

            if c >= '\uDC80' and c <= '\uDCFF':
                # surrogate escape, not Unicode
                return False

        return True

    def write_lookaside(self, runtime: str) -> None:
        with tempfile.TemporaryDirectory(prefix='slr-mtree-') as temp:
            lc_names = {}                   # type: Dict[str, str]
            differ_only_by_case = set()     # type: Set[str]
            not_windows_friendly = set()    # type: Set[str]
            sha256 = {}                     # type: Dict[Tuple[int, int], str]
            paths = {}                      # type: Dict[Tuple[int, int], str]

            writer = gzip.open(os.path.join(temp, 'usr-mtree.txt.gz'), 'wt')

            writer.write('#mtree\n')
            writer.write('. type=dir\n')

            for member in Path(runtime).rglob("*"):
                relative_path = member.relative_to(runtime)
                try:
                    name = str(relative_path.relative_to('files'))
                except ValueError:
                if not self.filename_is_windows_friendly(name):
                    not_windows_friendly.add(name)

                if name.lower() in lc_names:
                    differ_only_by_case.add(lc_names[name.lower()])
                    differ_only_by_case.add(name)
                else:
                    lc_names[name.lower()] = name

                fields = ['./' + self.octal_escape(name)]

                if stat.S_ISREG(stat_info.st_mode):
                    fields.append('type=file')
                    fields.append('mode=%o' % stat_info.st_mode)

                    # With sub-second precision, note that some versions
                    # of mtree use the part after the dot as integer
                    # nanoseconds, so "1.234" is actually 1 sec + 234 ns,
                    # or what normal people would write as 1.000000234.
                    # To be compatible with both, we always show the time
                    # with 9 digits after the decimal point.
                    fields.append(f'time={stat_info.st_mtime:.9f}')

                    fields.append(f'size={stat_info.st_size}')
                    file_id = (stat_info.st_dev, stat_info.st_ino)
                    if stat_info.st_size > 0:
                        if file_id not in sha256:
                            sha256[file_id] = hasher.hexdigest()
                        fields.append(f'sha256={sha256[file_id]}')

                    if stat_info.st_nlink > 1:
                        if file_id in paths:
                            writer.write(
                                '# hard link to {}\n'.format(
                                    self.octal_escape(paths[file_id]),
                                ),
                            )
                        else:
                            paths[file_id] = str(relative_path)

                elif stat.S_ISLNK(stat_info.st_mode):
                    fields.append(
                        f'link={self.octal_escape(os.readlink(member))}')
                elif stat.S_ISDIR(stat_info.st_mode):
                    fields.append('type=dir')
                else:
                    writer.write(
                        '# unknown file type: {}\n'.format(
            if '.ref' not in lc_names:
                writer.write('./.ref type=file size=0 mode=644\n')

            if differ_only_by_case:
                writer.write('\n')
                writer.write('# Files whose names differ only by case:\n')

                for name in sorted(differ_only_by_case):
                    writer.write('# {}\n'.format(self.octal_escape(name)))

            if not_windows_friendly:
                writer.write('\n')
                writer.write('# Files whose names are not Windows-friendly:\n')

                for name in sorted(not_windows_friendly):
                    writer.write('# {}\n'.format(self.octal_escape(name)))

            # We need to close the gzip before copying it, otherwise we
            # will end up with a corrupted file
            writer.close()
            shutil.copy2(writer.name, runtime)

    def minimize_runtime(self, root: str) -> None:
        '''
        Remove files that pressure-vessel can reconstitute from the manifest.

        This is the equivalent of:

        find $root/files -type l -delete
        find $root/files -empty -delete

        Note that this needs to be done before ensure_ref(), otherwise
        it will delete files/.ref too.
        '''
        for (dirpath, dirnames, filenames) in os.walk(
            os.path.join(root, 'files'),
            topdown=False,
        ):
            for f in filenames + dirnames:
                path = os.path.join(dirpath, f)

                try:
                    statinfo = os.lstat(path)
                except FileNotFoundError:
                    continue

                if stat.S_ISLNK(statinfo.st_mode) or statinfo.st_size == 0:
                    os.remove(path)
            try:
                os.rmdir(dirpath)
            except OSError as e:
                if e.errno != errno.ENOTEMPTY:
                    raise


def main() -> None:
    logging.basicConfig()
    logging.getLogger().setLevel(logging.DEBUG)

    parser = argparse.ArgumentParser(
        description=__doc__,
        formatter_class=argparse.RawDescriptionHelpFormatter,
    )

    parser.add_argument(
        '--architecture', default='amd64,i386',
        help=(
            'Default dpkg architecture or comma-separated list of '
            'architectures'
        )
    )
    parser.add_argument(
        '--suite', default='',
        help=(
            'Default suite to use if none is specified'
        )
    )
    parser.add_argument(
        '--version', default='',
        help=(
            'Default version to use if none is specified'
        )
    )

        '--cache', default='.cache',
        help=(
            'Cache downloaded files that are not in --depot here'
        ),
    )
    parser.add_argument(
        '--credential-env',
        action='append',
        default=[],
        dest='credential_envs',
        help=(
            'Environment variable to be evaluated for login:password, '
            'or a pair of environment variables VAR1:VAR2 to be evaluated '
            'for login and password respectively'
        ),
    )
    parser.add_argument(
        '--credential-host',
        action='append',
        default=[],
        dest='credential_hosts',
        metavar='HOST',
        help=(
            'Use --credential-env when downloading from the given HOST'
            '(default: hostname of --images-uri)'
        ),
    )
    parser.add_argument(
        '--images-uri',
        default=DEFAULT_IMAGES_URI,
        metavar='URI',
        help=(
            'Download files from the given URI. '
            '"SUITE" will be replaced with the suite name.'
        ),
    )
        '--ssh-host', default='', metavar='HOST',
        help='Use ssh and rsync to download files from HOST',
    )
    parser.add_argument(
        '--ssh-path', default='', metavar='PATH',
        help=(
            'Use ssh and rsync to download files from PATH on HOST. '
            '"SUITE" will be replaced with the suite name.'
        ),
    parser.add_argument(
        '--depot', default='depot',
        help=(
            'Download runtime into this existing directory'
    parser.add_argument(
        '--depot-version', default='',
        help=(
            'Set an overall version number for the depot contents'
        )
    )
    parser.add_argument(
        '--scripts-version', default='',
        help=(
            'Set a version number for the scripts from steam-runtime-tools'
        )
    )

    parser.add_argument(
        '--pressure-vessel-uri',
        default=DEFAULT_PRESSURE_VESSEL_URI,
        metavar='URI',
        help=(
            'Download pressure-vessel from a versioned subdirectory of URI'
        ),
    )
    parser.add_argument(
        '--pressure-vessel-ssh-host', default='', metavar='HOST',
        help=(
            'Use ssh and rsync to download pressure-vessel from HOST '
            '[default: same as --ssh-host]'
        ),
    )
    parser.add_argument(
        '--pressure-vessel-ssh-path', default='', metavar='PATH',
        help=(
            'Use ssh and rsync to download pressure-vessel from a versioned '
            'subdirectory of PATH on HOST'
        ),
    )
    parser.add_argument(
        '--pressure-vessel-version', default='', metavar='0.x.y|latest',
        help=(
            'Use this version of pressure-vessel from --pressure-vessel-uri '
            'or --pressure-vessel-ssh-path'
        )
    )
        '--pressure-vessel-archive', default='', metavar='PATH',
            'Unpack pressure-vessel from the named archive'
        ),
    )
    parser.add_argument(
        '--pressure-vessel-from-runtime', default='', metavar='NAME',
        help=(
            'Get pressure-vessel from the named runtime (default "scout")'
        ),
    )
    parser.add_argument(
        '--pressure-vessel-from-runtime-json', default='', metavar='NAME',
        help=(
            'Get pressure-vessel from a separate runtime version given as a '
            'JSON object'
        ),
    )
    parser.add_argument(
        '--pressure-vessel', default='', metavar='NAME|PATH|DETAILS',
        dest='pressure_vessel_guess',
        help=(
            '--pressure-vessel-archive=ARCHIVE, '
            '--pressure-vessel-archive=DIRECTORY/pressure-vessel-bin.tar.gz, '
            '--pressure-vessel-from-runtime=NAME or '
            '--pressure-vessel-from-runtime-json=DETAILS, '
            'based on form of argument given '
            '(disambiguate with ./ if necessary)'
        ),
        '--include-archives', action='store_true', default=False,
            'Provide the runtime as an archive to be unpacked'
        )
    )
    parser.add_argument(
        '--no-include-archives', action='store_false', dest='include_archives',
        help=(
            'Do not provide the runtime as an archive to be unpacked '
            '[default]'
    parser.add_argument(
        '--include-sdk', default=False, action='store_true',
        help='Include a corresponding SDK',
    )
    parser.add_argument(
        '--include-sdk-debug', default=False, action='store_true',
        help='Include a corresponding SDK',
    )
    parser.add_argument(
        '--include-sdk-runtime', default=False, action='store_true',
        help='Include a corresponding SDK',
    )
    parser.add_argument(
        '--include-sdk-sysroot', default=False, action='store_true',
        help='Include a corresponding SDK',
    )
    parser.add_argument(
        '--layered', default=False, action='store_true',
        help='Produce a layered runtime that runs scout on soldier',
    )
    parser.add_argument(
        '--minimize', action='store_true', default=False,
        help=(
            'Omit empty files, empty directories and symlinks from '
            'runtime content, requiring pressure-vessel to fill them in '
            'from the mtree manifest'
        )
    )
    parser.add_argument(
        '--no-minimize', action='store_false', dest='minimize',
        help=(
            'Include empty files, empty directories and symlinks in '
            'runtime content [default]'
        )
    )
        '--source-dir', default=str(HERE),
        help=(
            'Source directory for files to include in the depot'
        )
    )
    # Not actually used for anything at the moment, but kept for
    # CLI backwards-compat. We could potentially use it to select
    # depot configuration in steampipe/
    parser.add_argument(
        '--steam-app-id', default='',
        help='Set Steam app ID for the depot',
    )
    parser.add_argument(
        '--toolmanifest', default=False, action='store_true',
        help='Generate toolmanifest.vdf',
    )
    parser.add_argument(
        '--unpack-ld-library-path', metavar='PATH', default='',
        help=(
            'Get the steam-runtime.tar.xz from the same place as '
            'pressure-vessel and unpack it into the given PATH, '
            'for use in regression testing.'
        )
    )
    parser.add_argument(
        '--unpack-runtime', '--unpack-runtimes',
        action='store_true', default=True,
            "Unpack the runtime into the --depot, for use with "
            "pressure-vessel's tests/containers.py. [default]"
        '--no-unpack-runtime', '--no-unpack-runtimes',
        action='store_false', dest='unpack_runtime',
            "Don't unpack the runtime into the --depot"
    parser.add_argument(
        '--unpack-source', metavar='PACKAGE', action='append', default=[],
        dest='unpack_sources',
        help=(
            'Download and unpack the given source package from each runtime '
            'if it exists, for use in regression testing. May be repeated.'
        )
    )
    parser.add_argument(
        '--unpack-sources-into', metavar='PATH', default='.',
        help=(
            'Unpack any source packages specified by --unpack-source '
            'into PATH/RUNTIME/SOURCE (default: ./RUNTIME/SOURCE).'
        )
    )
        '--versioned-directories', action='store_true', default=True,
            'Include version number in unpacked runtime directories '
            '[default]'
        )
    )
    parser.add_argument(
        '--no-versioned-directories', action='store_false',
        dest='versioned_directories',
        help=(
            'Do not include version number in unpacked runtime directories'
        metavar='NAME[="DETAILS"]',
        help=(
            'Runtime to download, in the form NAME or NAME="DETAILS". '
            'DETAILS is a JSON object containing something like '
            '{"path": "../prebuilt", "suite: "scout", "version": "latest", '
            '"architecture": "amd64,i386"}, or the '
            'path to a file with the same JSON object in. All JSON fields '
            'are optional.'

        args.include_sdk_debug = args.include_sdk_debug or args.include_sdk
        args.include_sdk_runtime = args.include_sdk_runtime or args.include_sdk
        args.include_sdk_sysroot = args.include_sdk_sysroot or args.include_sdk

        Main(**vars(args)).run()
    except InvocationError as e:
        parser.error(str(e))


if __name__ == '__main__':
    main()