Skip to content
Snippets Groups Projects
populate-depot.py 75.2 KiB
Newer Older
                'as pressure-vessel into %r',
                self.unpack_ld_library_path)
            self.download_scout_tarball(pressure_vessel_runtime)
        if self.unpack_sources:
            logger.info(
                'Will download %s source code into %r',
                ', '.join(self.unpack_sources), self.unpack_sources_into)
            os.makedirs(self.unpack_sources_into, exist_ok=True)

            os.makedirs(
                os.path.join(self.unpack_sources_into, self.runtime.name),
                exist_ok=True,
            )
        for runtime in (self.runtime,):     # too much to reindent right now
                    'Using runtime from local directory %r',
                self.use_local_runtime(runtime)
                    'Downloading runtime from %s',
                self.download_runtime(runtime)
            component_version = ComponentVersion(runtime.name)

            if runtime.path:
                with open(
                    os.path.join(runtime.path, runtime.build_id_file), 'r',
                ) as text_reader:
                    version = text_reader.read().strip()
            else:
                version = runtime.pinned_version or ''
                assert version
                    runtime.get_archives(
                        include_sdk_debug=self.include_sdk_debug,
                        include_sdk_runtime=self.include_sdk_runtime,
                        include_sdk_sysroot=self.include_sdk_sysroot,
                    )
                    subdir = '{}_platform_{}'.format(runtime.name, version)
                dest = os.path.join(self.depot, subdir)
                runtime_files.add(subdir + '/')

                with suppress(FileNotFoundError):
                    shutil.rmtree(dest)

                os.makedirs(dest, exist_ok=True)
                argv = [
                    'tar',
                    '-C', dest,
                    os.path.join(self.cache, runtime.tarball),
                ]
                logger.info('%r', argv)
                subprocess.run(argv, check=True)
                self.prune_runtime(Path(dest))

                if self.minimize:
                    self.minimize_runtime(dest)

                        sdk_subdir = '{}_sdk_{}'.format(runtime.name, version)
                        sdk_subdir = '{}_sdk'.format(runtime.name)

                    dest = os.path.join(self.depot, sdk_subdir)
                    runtime_files.add(sdk_subdir + '/')

                    with suppress(FileNotFoundError):
                        shutil.rmtree(os.path.join(dest, 'files'))

                    with suppress(FileNotFoundError):
                        os.remove(os.path.join(dest, 'metadata'))

                    os.makedirs(
                        os.path.join(dest, 'files', 'lib', 'debug'),
                        exist_ok=True,
                    )
                    argv = [
                        'tar',
                        '-C', dest,
                        '-xf', os.path.join(self.cache, runtime.sdk_tarball),
                    ]
                    logger.info('%r', argv)
                    subprocess.run(argv, check=True)
                    self.prune_runtime(Path(dest))

                    if self.minimize:
                        self.minimize_runtime(dest)

                    if self.include_sdk_debug:
                        argv = [
                            'tar',
                            '-C', os.path.join(dest, 'files', 'lib', 'debug'),
                            '--transform', r's,^\(\./\)\?files\(/\|$\),,',
                            '-xf',
                            os.path.join(self.cache, runtime.debug_tarball),
                        ]
                        logger.info('%r', argv)
                        subprocess.run(argv, check=True)

                if self.include_sdk_sysroot:
                        sysroot_subdir = '{}_sysroot_{}'.format(
                            runtime.name, version,
                        sysroot_subdir = '{}_sysroot'.format(runtime.name)

                    sysroot = os.path.join(self.depot, sysroot_subdir)
                    runtime_files.add(sysroot_subdir + '/')

                    with suppress(FileNotFoundError):
                        shutil.rmtree(sysroot)

                    os.makedirs(os.path.join(sysroot, 'files'), exist_ok=True)
                    argv = [
                        'tar',
                        '-C', os.path.join(sysroot, 'files'),
                        '--exclude', 'dev/*',
                        os.path.join(self.cache, runtime.sysroot_tarball),
                    ]
                    logger.info('%r', argv)
                    subprocess.run(argv, check=True)

                    os.makedirs(
                        os.path.join(
                            sysroot, 'files', 'usr', 'lib', 'debug',
                        ),
                        exist_ok=True,
                    )

                    if self.include_sdk_debug:
                        if self.include_sdk_sysroot:
                            argv = [
                                'cp',
                                '-al',
                                os.path.join(dest, 'files', 'lib', 'debug'),
                                os.path.join(sysroot, 'files', 'usr', 'lib'),
                            ]
                            logger.info('%r', argv)
                            subprocess.run(argv, check=True)
                        else:
                            argv = [
                                'tar',
                                '-C', os.path.join(
                                    sysroot, 'files', 'usr', 'lib', 'debug'
                                ),
                                '--transform', r's,^\(\./\)\?files\(/\|$\),,',
                                '-xf',
                                os.path.join(
                                    self.cache, runtime.debug_tarball,
                                ),
                            ]
                            logger.info('%r', argv)
                            subprocess.run(argv, check=True)
            with open(
                os.path.join(self.depot, 'run-in-' + runtime.name), 'w'
            ) as writer:
                    writer.write(
                        RUN_IN_DIR_SOURCE.format(
                            escaped_dir=shlex.quote(subdir),
                            source_for_generated_file=(
                                'Generated file, do not edit'
                            ),
                        )
                    )
                else:
                    writer.write(
                        RUN_IN_ARCHIVE_SOURCE.format(
                            escaped_arch=shlex.quote(runtime.architecture),
                            escaped_name=shlex.quote(runtime.name),
                            escaped_runtime=shlex.quote(runtime.platform),
                            escaped_suite=shlex.quote(runtime.suite),
                            source_for_generated_file=(
                                'Generated file, do not edit'
                            ),
                        )
                    )
            os.chmod(os.path.join(self.depot, 'run-in-' + runtime.name), 0o755)

            comment = ', '.join(sorted(runtime_files))

            if runtime.path and not runtime.official:
                comment += ' (from local build)'

            component_version.version = version
            component_version.runtime = runtime.suite
            component_version.runtime_version = version
            component_version.comment = comment
            self.versions.append(component_version)

        if self.toolmanifest:
                os.path.join(self.depot, 'toolmanifest.vdf'), 'w'
                writer.write('// Generated file, do not edit\n')
                words = [
                    '/_v2-entry-point',
                    '--verb=%verb%',
                    '--',
                ]
                content: Dict[str, Any] = dict(
                    manifest=dict(
                        commandline=' '.join(words),
                        version='2',
                        use_tool_subprocess_reaper='1',
                if runtime.suite != 'scout':
                    content['manifest']['unlisted'] = '1'

                content['manifest']['compatmanager_layer_name'] = (
                    'container-runtime'
                )

                vdf.dump(content, writer, pretty=True, escaped=True)

            shutil.copy2(
                os.path.join(self.depot, 'run-in-' + runtime.name),
                os.path.join(self.depot, 'run'),
            )
            os.chmod(os.path.join(self.depot, 'run'), 0o755)

        self.write_component_versions()

    def write_component_versions(self) -> None:
        try:
            with subprocess.Popen(
                [
                    'git', 'describe',
                    '--always',
                    '--dirty',
                    '--long',
                ],
                cwd=os.path.dirname(__file__),
                stdout=subprocess.PIPE,
                universal_newlines=True,
            ) as describe:
                stdout = describe.stdout
                assert stdout is not None
                version = stdout.read().strip()
                # Deliberately ignoring exit status:
                # if git is missing or old we'll use 'unknown'
        except (OSError, subprocess.SubprocessError):
            version = ''

        try:
            with open(HERE / '.tarball-version', 'r') as reader:
                version = reader.read().strip()
        except OSError:
        if self.scripts_version:
            version = self.scripts_version

        if self.depot_version:
            component_version = ComponentVersion('depot', sort_weight=-1)
            component_version.version = self.depot_version
            component_version.comment = 'Overall version number'
            self.versions.append(component_version)

        component_version = ComponentVersion('scripts')
        component_version.version = version or 'unknown'
        component_version.comment = 'from steam-runtime-tools'
        self.versions.append(component_version)

        with open(os.path.join(self.depot, 'VERSIONS.txt'), 'w') as writer:
            writer.write(
                '#Name\tVersion\t\tRuntime\tRuntime_Version\tComment\n'
            )
            for entry in sorted(self.versions, key=lambda v: v.to_sort_key()):
                logger.info('Component version: %s', entry)
                writer.write(entry.to_tsv())
    def use_local_pressure_vessel(self, path: str = '.') -> None:
        pv_dir = os.path.join(self.depot, 'pressure-vessel')
        os.makedirs(pv_dir, exist_ok=True)
        argv = ['tar', '-C', pv_dir, '--strip-components=1', '-xf']

        if os.path.isfile(path):
            argv.append(path)
        else:
            argv.append(os.path.join(path, 'pressure-vessel-bin.tar.gz'))

        logger.info('%r', argv)
        subprocess.run(argv, check=True)

    def download_pressure_vessel_standalone(
        self,
        version: str,
    ) -> str:
        pv = PressureVesselRelease(
            cache=self.cache,
            ssh_host=self.pressure_vessel_ssh_host,
            ssh_path=self.pressure_vessel_ssh_path,
            uri=self.pressure_vessel_uri,
            version=version,
        )
        pinned = pv.pin_version(self.opener)
        self.use_local_pressure_vessel(
            pv.fetch('pressure-vessel-bin.tar.gz', self.opener, pinned)
        )
        return pinned

    def download_pressure_vessel_from_runtime(self, runtime: Runtime) -> str:
        filename = 'pressure-vessel-bin.tar.gz'
        runtime.pin_version(self.opener)
        downloaded = runtime.fetch(
            filename,
            self.opener,
        )
        os.makedirs(self.depot, exist_ok=True)
        subprocess.run(
            [
                'tar', '-C', self.depot, '-xf', downloaded,
            ],
            check=True,
        )
    def use_local_runtime(self, runtime: Runtime) -> None:
        for basename in runtime.get_archives(
            include_sdk_debug=self.include_sdk_debug,
            include_sdk_runtime=self.include_sdk_runtime,
            include_sdk_sysroot=self.include_sdk_sysroot,
        ):
            src = os.path.join(runtime.path, basename)
            dest = os.path.join(self.cache, basename)
            logger.info('Hard-linking local runtime %r to %r', src, dest)

            with suppress(FileNotFoundError):
                os.unlink(dest)
            if self.include_archives:
                dest = os.path.join(self.depot, basename)
                logger.info('Hard-linking local runtime %r to %r', src, dest)

                with suppress(FileNotFoundError):
                    os.unlink(dest)
                os.link(src, dest)

        if self.include_archives:
                os.path.join(self.depot, runtime.build_id_file), 'w',
            ) as writer:
                writer.write(f'{runtime.version}\n')

            if self.include_sdk_runtime or self.include_sdk_sysroot:
                with open(
                    os.path.join(self.depot, runtime.sdk_build_id_file), 'w',
                ) as writer:
                    writer.write(f'{runtime.version}\n')

        if self.unpack_sources:
            with open(
                os.path.join(runtime.path, runtime.sources), 'rb',
            ) as reader:
                for stanza in Sources.iter_paragraphs(
                    sequence=reader,
                    use_apt_pkg=True,
                ):
                    if stanza['package'] in self.unpack_sources:
                        for f in stanza['files']:
                            name = f['name']

                            if name.endswith('.dsc'):
                                dest = os.path.join(
                                    self.unpack_sources_into,
                                    runtime.name,
                                    stanza['package'],
                                )

                                with suppress(FileNotFoundError):
                                    logger.info('Removing %r', dest)
                                    shutil.rmtree(dest)

                                        os.path.join(
                                            runtime.path,
                                            'sources',
                                            f['name'],
                                        ),
                                    ],
                                    check=True,
                                )

    def download_runtime(self, runtime: Runtime) -> None:
        """
        Download a pre-prepared Platform from a previous container
        runtime build.
        """

        pinned = runtime.pin_version(self.opener)
        for basename in runtime.get_archives(
            include_sdk_debug=self.include_sdk_debug,
            include_sdk_runtime=self.include_sdk_runtime,
            include_sdk_sysroot=self.include_sdk_sysroot,
        ):
            downloaded = runtime.fetch(basename, self.opener)
                dest = os.path.join(self.depot, basename)

                with suppress(FileNotFoundError):
                    os.unlink(dest)

                os.link(downloaded, dest)
                os.path.join(self.depot, runtime.build_id_file), 'w',
            ) as writer:
                writer.write(f'{pinned}\n')

            if self.include_sdk_runtime or self.include_sdk_sysroot:
                with open(
                    os.path.join(self.depot, runtime.sdk_build_id_file), 'w',
                ) as writer:
                    writer.write(f'{pinned}\n')

        if self.unpack_sources:
            with tempfile.TemporaryDirectory(prefix='populate-depot.') as tmp:
                want = set(self.unpack_sources)
                downloaded = runtime.fetch(
                with open(downloaded, 'rb') as reader:
                    for stanza in Sources.iter_paragraphs(
                        sequence=reader,
                        use_apt_pkg=True,
                    ):
                        if stanza['package'] in self.unpack_sources:
                            logger.info(
                                'Found %s in %s',
                                stanza['package'], runtime.name,
                            )
                            want.discard(stanza['package'])
                            os.makedirs(
                                os.path.join(self.cache or tmp, 'sources'),
                            file_path: Dict[str, str] = {}
                            for f in stanza['files']:
                                name = f['name']
                                file_path[name] = runtime.fetch(
                                    os.path.join('sources', name),
                                    self.opener,
                                )

                            for f in stanza['files']:
                                name = f['name']

                                if name.endswith('.dsc'):
                                    dest = os.path.join(
                                        self.unpack_sources_into,
                                        runtime.name,
                                        stanza['package'],
                                    )

                                    with suppress(FileNotFoundError):
                                        logger.info('Removing %r', dest)
                                        shutil.rmtree(dest)

                                        ],
                                        check=True,
                                    )

                if want:
                    logger.warning(
                        'Did not find source package(s) %s in %s',
                        ', '.join(want), runtime.name,
                    )

    def download_scout_tarball(self, runtime: Runtime) -> None:
        """
        Download a pre-prepared LD_LIBRARY_PATH Steam Runtime from a
        previous scout build.
        """
        filename = 'steam-runtime.tar.xz'

        pinned = runtime.pin_version(self.opener)
        logger.info('Downloading steam-runtime build %s', pinned)
        os.makedirs(self.unpack_ld_library_path, exist_ok=True)

        downloaded = runtime.fetch(
            filename,
            self.opener,
        )
        subprocess.run(
            [
                'tar', '-C', self.unpack_ld_library_path, '-xf',
                downloaded,
            ],
            check=True,
        )
    def octal_escape_char(self, match: 're.Match') -> str:

        for byte in match.group(0).encode('utf-8', 'surrogateescape'):
            ret.append('\\%03o' % byte)

        return ''.join(ret)

    _NEEDS_OCTAL_ESCAPE = re.compile(r'[^-A-Za-z0-9+,./:@_]')

    def octal_escape(self, s: str) -> str:
        return self._NEEDS_OCTAL_ESCAPE.sub(self.octal_escape_char, s)

    def filename_is_windows_friendly(self, s: str) -> bool:
        for c in s:
            # This is the set of characters that are reserved in Windows
            # filenames, excluding '/' which obviously we're fine with
            # using as a directory separator.
            if c in r'<>:"\|?*':
                return False

            if c >= '\uDC80' and c <= '\uDCFF':
                # surrogate escape, not Unicode
                return False

        return True

    def write_mtree(
        self,
        top: Path,
        writer: TextIO,
        *,
        preserve_mode: bool = True,
        preserve_time: bool = True,
        skip_runtime_files: bool = False
    ) -> Dict[str, str]:
        lc_names: Dict[str, str] = {}
        differ_only_by_case: Set[str] = set()
        not_windows_friendly: Set[str] = set()
        sha256: Dict[Tuple[int, int], str] = {}
        paths: Dict[Tuple[int, int], str] = {}

        writer.write('#mtree\n')
        writer.write('. type=dir\n')

        for dirpath, dirnames, filenames in os.walk(top):
            for base in sorted(dirnames + filenames):
                member = Path(dirpath) / base
                name = str(member.relative_to(top))

                if name == 'steampipe' and base in dirnames:
                    dirnames.remove(base)
                    continue

                if (
                    skip_runtime_files
                    and base == 'files'
                    and base in dirnames
                    and (member.parent / 'usr-mtree.txt.gz').exists()
                ):
                    escaped = self.octal_escape(name)
                    writer.write(f'./{escaped} type=dir ignore\n')
                    dirnames.remove(base)
                if not self.filename_is_windows_friendly(name):
                    not_windows_friendly.add(name)

                if name.lower() in lc_names:
                    differ_only_by_case.add(lc_names[name.lower()])
                    differ_only_by_case.add(name)
                else:
                    lc_names[name.lower()] = name

                fields = ['./' + self.octal_escape(name)]

                if stat.S_ISREG(stat_info.st_mode):
                    fields.append('type=file')

                    if preserve_mode:
                        fields.append('mode=%o' % (stat_info.st_mode & 0o777))
                    elif stat_info.st_mode & 0o111:
                        fields.append('mode=755')

                    if preserve_time:
                        # With sub-second precision, note that some versions
                        # of mtree use the part after the dot as integer
                        # nanoseconds, so "1.234" is actually 1 sec + 234 ns,
                        # or what normal people would write as 1.000000234.
                        # To be compatible with both, we always show the time
                        # with 9 digits after the decimal point, unless it's
                        # exactly an integer.
                        if stat_info.st_mtime == int(stat_info.st_mtime):
                            fields.append(f'time={stat_info.st_mtime:.1f}')
                        else:
                            fields.append(f'time={stat_info.st_mtime:.9f}')

                    fields.append(f'size={stat_info.st_size}')
                    file_id = (stat_info.st_dev, stat_info.st_ino)
                    if stat_info.st_size > 0:
                        if file_id not in sha256:
                            sha256[file_id] = hasher.hexdigest()
                        fields.append(f'sha256={sha256[file_id]}')

                    if stat_info.st_nlink > 1:
                        if file_id in paths:
                            writer.write(
                                '# hard link to {}\n'.format(
                                    self.octal_escape(paths[file_id]),
                                ),
                            )
                        else:
                            paths[file_id] = str(name)

                elif stat.S_ISLNK(stat_info.st_mode):
                    fields.append(
                        f'link={self.octal_escape(os.readlink(member))}')
                elif stat.S_ISDIR(stat_info.st_mode):
                    fields.append('type=dir')
                else:
                    writer.write(
                        '# unknown file type: {}\n'.format(
            if differ_only_by_case:
                writer.write('\n')
                writer.write('# Files whose names differ only by case:\n')

                for name in sorted(differ_only_by_case):
                    writer.write('# {}\n'.format(self.octal_escape(name)))

            if not_windows_friendly:
                writer.write('\n')
                writer.write('# Files whose names are not Windows-friendly:\n')

                for name in sorted(not_windows_friendly):
                    writer.write('# {}\n'.format(self.octal_escape(name)))

        return lc_names

    def write_lookaside(self, runtime: str) -> None:
        with tempfile.TemporaryDirectory(prefix='slr-mtree-') as temp:
            writer = gzip.open(os.path.join(temp, 'usr-mtree.txt.gz'), 'wt')

            lc_names = self.write_mtree(Path(runtime) / 'files', writer)

            if '.ref' not in lc_names:
                writer.write('./.ref type=file size=0 mode=644\n')

            # We need to close the gzip before copying it, otherwise we
            # will end up with a corrupted file
            writer.close()
            shutil.copy2(writer.name, runtime)

    def write_top_level_mtree(self) -> None:
        with tempfile.TemporaryDirectory(prefix='slr-mtree-') as temp:
            writer = gzip.open(os.path.join(temp, 'mtree.txt.gz'), 'wt')

            lc_names = self.write_mtree(
                Path(self.depot),
                writer,
                preserve_mode=False,
                preserve_time=False,
                skip_runtime_files=True,
            )

            if '.ref' not in lc_names:
                writer.write('./.ref type=file size=0 optional\n')
            if (
                self.steam_app_id
                and self.steam_depot_id
                and 'steampipe' not in lc_names
            ):
                writer.write('./steampipe type=dir ignore optional\n')

            if 'var' not in lc_names:
                writer.write('./var type=dir ignore optional\n')

            writer.write('./mtree.txt.gz type=file\n')
            writer.close()
            shutil.copy2(writer.name, self.depot)

    def minimize_runtime(self, root: str) -> None:
        '''
        Remove files that pressure-vessel can reconstitute from the manifest.

        This is the equivalent of:

        find $root/files -type l -delete
        find $root/files -empty -delete

        Note that this needs to be done before ensure_ref(), otherwise
        it will delete files/.ref too.
        '''
        for (dirpath, dirnames, filenames) in os.walk(
            os.path.join(root, 'files'),
            topdown=False,
        ):
            for f in filenames + dirnames:
                path = os.path.join(dirpath, f)

                try:
                    statinfo = os.lstat(path)
                except FileNotFoundError:
                    continue

                if stat.S_ISLNK(statinfo.st_mode) or statinfo.st_size == 0:
                    os.remove(path)
            try:
                os.rmdir(dirpath)
            except OSError as e:
                if e.errno != errno.ENOTEMPTY:
                    raise

    def write_steampipe_config(self) -> None:
        import vdf                          # noqa
        from vdf.vdict import VDFDict       # noqa

        assert self.steam_app_id
        assert self.steam_depot_id

        depot = Path(self.depot)
        steampipe = depot / 'steampipe'
        steampipe.mkdir(exist_ok=True)
        app_vdf = f'app_build_{self.steam_app_id}.vdf'
        depot_vdf = f'depot_build_{self.steam_depot_id}.vdf'

        content = dict(
            appbuild=dict(
                appid=self.steam_app_id,
                buildoutput='output',
                depots={self.steam_depot_id: depot_vdf},
            )
        )

        with open(steampipe / app_vdf, 'w') as writer:
            vdf.dump(content, writer, pretty=True, escaped=True)

        file_mappings: List[Tuple[str, Any]] = []

        for child in sorted(depot.iterdir()):
            if child.name in ('steampipe', 'var'):
                continue
            elif child.is_dir():
                file_mappings.append(
                    (
                        'FileMapping', dict(
                            LocalPath=f'../{child.name}/*',
                            DepotPath=f'{child.name}/',
                            recursive='1',
                        ),
                    )
                )
            else:
                file_mappings.append(
                    (
                        'FileMapping', dict(
                            LocalPath='../' + str(child.relative_to(depot)),
                            DepotPath='.',
                        ),
                    )
                )

        content = dict(
            DepotBuildConfig=VDFDict(
                [('DepotID', self.steam_depot_id)] + file_mappings,
            )
        )

        with open(steampipe / depot_vdf, 'w') as writer:
            vdf.dump(content, writer, pretty=True, escaped=True)

    def do_depot_archive(self, name: str) -> None:
        if name.endswith('.tar.gz'):
            compress_command = ['pigz', '--fast', '-c', '-n', '--rsyncable']
            artifact_prefix = name[:-len('.tar.gz')]
        elif name.endswith('.tar.xz'):
            if self.fast:
                compress_command = ['xz', '-0']
            else:
                compress_command = ['xz']

            artifact_prefix = name[:-len('.tar.xz')]
        else:
            raise InvocationError(f'Unknown archive format: {name}')

        stem = Path(artifact_prefix).name

        with open(
            name, 'wb'
        ) as archive_writer, subprocess.Popen(
            compress_command,
            stdin=subprocess.PIPE,
            stdout=archive_writer,
        ) as compressor, tarfile.open(
            name,
            mode='w|',
            format=tarfile.GNU_FORMAT,
            fileobj=compressor.stdin,
        ) as archiver:
            members = []
            depot = Path(self.depot)

            for dir_path, dirs, files in os.walk(
                depot,
                topdown=True,
                followlinks=False,
            ):
                rel_dir = Path(dir_path).relative_to(depot)

                if rel_dir == Path('.'):
                    for exclude in ('var',):
                        try:
                            dirs.remove(exclude)
                        except ValueError:
                            pass

                for item in dirs + files:
                    members.append(rel_dir / item)

            root = tarfile.TarInfo(stem)
            root.size = 0
            root.type = tarfile.DIRTYPE
            root = self.normalize_tar_entry(root)
            archiver.addfile(root)

            for member in sorted(members):
                archiver.add(
                    str(depot / member),
                    arcname=f'{stem}/{member}',
                    recursive=False,
                    filter=self.normalize_tar_entry,
                )

        if not self.layered:
            with open(
                HERE / 'SteamLinuxRuntime_whatever.sh.in'
            ) as reader, open(
                artifact_prefix + '.sh', 'w'
            ) as writer:
                for line in reader:
                    writer.write(line.replace('@RUNTIME@', stem))

            shutil.copy(
                depot / 'VERSIONS.txt',
                artifact_prefix + '.VERSIONS.txt',
            )
            os.chmod(artifact_prefix + '.VERSIONS.txt', 0o644)
            os.chmod(artifact_prefix + '.sh', 0o755)

    def normalize_tar_entry(self, entry: tarfile.TarInfo) -> tarfile.TarInfo:
        entry.uid = 65534
        entry.gid = 65534

        if entry.mtime > self.reference_timestamp:
            entry.mtime = self.reference_timestamp

        if (entry.mode & 0o111) != 0 or entry.isdir():
            entry.mode = 0o755
        else:
            entry.mode = 0o644

        entry.uname = 'nobody'
        entry.gname = 'nogroup'

        return entry


def main() -> None:
    logging.basicConfig()
    logging.getLogger().setLevel(logging.DEBUG)

    parser = argparse.ArgumentParser(
        description=__doc__,
        formatter_class=argparse.RawDescriptionHelpFormatter,
    )

    parser.add_argument(
        '--architecture', default='amd64,i386',
        help=(
            'Default dpkg architecture or comma-separated list of '
            'architectures'
        )
    )
    parser.add_argument(
        '--suite', default='',
        help=(
            'Default suite to use if none is specified'
        )
    )
    parser.add_argument(
        '--version', default='',
        help=(
            'Default version to use if none is specified'
        )
    )

        '--cache', default='.cache',
        help=(
            'Cache downloaded files that are not in --depot here'
        ),
    )
    parser.add_argument(
        '--credential-env',
        action='append',
        default=[],
        dest='credential_envs',
        help=(
            'Environment variable to be evaluated for login:password, '
            'or a pair of environment variables VAR1:VAR2 to be evaluated '
            'for login and password respectively'
        ),
    )
    parser.add_argument(
        '--credential-host',
        action='append',
        default=[],
        dest='credential_hosts',
        metavar='HOST',
        help=(
            'Use --credential-env when downloading from the given HOST'
            '(default: hostname of --images-uri)'
        ),
    )
    parser.add_argument(
        '--images-uri',
        default=DEFAULT_IMAGES_URI,
        metavar='URI',
        help=(