From c93cf32cb9dd51683bc5c802f438743bd7529d31 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Fri, 5 Feb 2021 14:10:25 +0000 Subject: [PATCH] Import Debian package fixing logic. --- README.md | 3 + ognibuild/debian/build.py | 184 ++++ ognibuild/debian/fix_build.py | 1200 ++++++++++++++++++++++ ognibuild/dist.py | 524 ++++++++++ ognibuild/tests/__init__.py | 2 + ognibuild/tests/test_debian_build.py | 108 ++ ognibuild/tests/test_debian_fix_build.py | 201 ++++ setup.cfg | 1 + setup.py | 9 +- 9 files changed, 2230 insertions(+), 2 deletions(-) create mode 100644 ognibuild/debian/build.py create mode 100644 ognibuild/debian/fix_build.py create mode 100644 ognibuild/dist.py create mode 100644 ognibuild/tests/test_debian_build.py create mode 100644 ognibuild/tests/test_debian_fix_build.py diff --git a/README.md b/README.md index 20729da..30d9861 100644 --- a/README.md +++ b/README.md @@ -31,6 +31,9 @@ Ognibuild has a number of subcommands: * ``ogni install`` - install the package * ``ogni test`` - run the testsuite in the source directory +It also includes a subcommand that can fix up the build dependencies +for Debian packages, called deb-fix-build. + License ------- diff --git a/ognibuild/debian/build.py b/ognibuild/debian/build.py new file mode 100644 index 0000000..5445278 --- /dev/null +++ b/ognibuild/debian/build.py @@ -0,0 +1,184 @@ +#!/usr/bin/python +# Copyright (C) 2018 Jelmer Vernooij +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + +__all__ = [ + 'changes_filename', + 'get_build_architecture', + 'add_dummy_changelog_entry', + 'build', + 'SbuildFailure', +] + +from datetime import datetime +import logging +import os +import re +import subprocess +import sys + +from debian.changelog import Changelog +from debmutate.changelog import get_maintainer, format_datetime + +from breezy import osutils +from breezy.plugins.debian.util import ( + changes_filename, + get_build_architecture, + ) +from breezy.mutabletree import MutableTree +from silver_platter.debian import ( + BuildFailedError, + DEFAULT_BUILDER, + ) + +from buildlog_consultant.sbuild import ( + worker_failure_from_sbuild_log, + SbuildFailure, + ) + + +class MissingChangesFile(Exception): + """Expected changes file was not written.""" + + def __init__(self, filename): + self.filename = filename + + +def add_dummy_changelog_entry( + tree: MutableTree, subpath: str, suffix: str, suite: str, + message: str, timestamp=None, maintainer=None): + """Add a dummy changelog entry to a package. + + Args: + directory: Directory to run in + suffix: Suffix for the version + suite: Debian suite + message: Changelog message + """ + def add_suffix(v, suffix): + m = re.fullmatch('(.*)(' + re.escape(suffix) + ')([0-9]+)', v,) + if m: + return (m.group(1) + m.group(2) + '%d' % (int(m.group(3)) + 1)) + else: + return v + suffix + '1' + + path = os.path.join(subpath, 'debian', 'changelog') + if maintainer is None: + maintainer = get_maintainer() + if timestamp is None: + timestamp = datetime.now() + with tree.get_file(path) as f: + cl = Changelog() + cl.parse_changelog( + f, max_blocks=None, allow_empty_author=True, strict=False) + version = cl[0].version + if version.debian_revision: + version.debian_revision = add_suffix( + version.debian_revision, suffix) + else: + version.upstream_version = add_suffix( + version.upstream_version, suffix) + cl.new_block( + package=cl[0].package, + version=version, + urgency='low', + distributions=suite, + author='%s <%s>' % maintainer, + date=format_datetime(timestamp), + changes=['', ' * ' + message, '']) + cl_str = cl._format(allow_missing_author=True) + tree.put_file_bytes_non_atomic(path, cl_str.encode(cl._encoding)) + + +def get_latest_changelog_version(local_tree, subpath=''): + path = osutils.pathjoin(subpath, 'debian/changelog') + with local_tree.get_file(path) as f: + cl = Changelog(f, max_blocks=1) + return cl.package, cl.version + + +def build(local_tree, outf, build_command=DEFAULT_BUILDER, result_dir=None, + distribution=None, subpath='', source_date_epoch=None): + args = [sys.executable, '-m', 'breezy', 'builddeb', + '--guess-upstream-branch-url', '--builder=%s' % build_command] + if result_dir: + args.append('--result-dir=%s' % result_dir) + outf.write('Running %r\n' % (build_command, )) + outf.flush() + env = dict(os.environ.items()) + if distribution is not None: + env['DISTRIBUTION'] = distribution + if source_date_epoch is not None: + env['SOURCE_DATE_EPOCH'] = '%d' % source_date_epoch + logging.info('Building debian packages, running %r.', build_command) + try: + subprocess.check_call( + args, cwd=local_tree.abspath(subpath), stdout=outf, stderr=outf, + env=env) + except subprocess.CalledProcessError: + raise BuildFailedError() + + +def build_once( + local_tree, build_suite, output_directory, build_command, + subpath='', source_date_epoch=None): + build_log_path = os.path.join(output_directory, 'build.log') + try: + with open(build_log_path, 'w') as f: + build(local_tree, outf=f, build_command=build_command, + result_dir=output_directory, distribution=build_suite, + subpath=subpath, source_date_epoch=source_date_epoch) + except BuildFailedError: + with open(build_log_path, 'rb') as f: + raise worker_failure_from_sbuild_log(f) + + (cl_package, cl_version) = get_latest_changelog_version( + local_tree, subpath) + changes_name = changes_filename( + cl_package, cl_version, get_build_architecture()) + changes_path = os.path.join(output_directory, changes_name) + if not os.path.exists(changes_path): + raise MissingChangesFile(changes_name) + return (changes_name, cl_version) + + +def gbp_dch(path): + subprocess.check_call(['gbp', 'dch'], cwd=path) + + +def attempt_build( + local_tree, suffix, build_suite, output_directory, build_command, + build_changelog_entry='Build for debian-janitor apt repository.', + subpath='', source_date_epoch=None): + """Attempt a build, with a custom distribution set. + + Args: + local_tree: Tree to build in + suffix: Suffix to add to version string + build_suite: Name of suite (i.e. distribution) to build for + output_directory: Directory to write output to + build_command: Build command to build package + build_changelog_entry: Changelog entry to use + subpath: Sub path in tree where package lives + source_date_epoch: Source date epoch to set + Returns: Tuple with (changes_name, cl_version) + """ + add_dummy_changelog_entry( + local_tree, subpath, suffix, build_suite, + build_changelog_entry) + return build_once( + local_tree, build_suite, output_directory, build_command, subpath, + source_date_epoch=source_date_epoch) diff --git a/ognibuild/debian/fix_build.py b/ognibuild/debian/fix_build.py new file mode 100644 index 0000000..18c31bc --- /dev/null +++ b/ognibuild/debian/fix_build.py @@ -0,0 +1,1200 @@ +#!/usr/bin/python +# Copyright (C) 2018 Jelmer Vernooij +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + +__all__ = [ + 'build_incrementally', +] + +import logging +import os +import re +import subprocess +import sys +from typing import Iterator, List, Callable, Type, Tuple, Set + +from debian.deb822 import ( + Deb822, + PkgRelation, + Release, + ) + +from breezy.commit import PointlessCommit +from breezy.tree import Tree +from debmutate.control import ( + ensure_some_version, + ensure_minimum_version, + pg_buildext_updatecontrol, + ControlEditor, + ) +from debmutate.debhelper import ( + get_debhelper_compat_level, + ) +from debmutate.deb822 import ( + Deb822Editor, + ) +from debmutate.reformatting import ( + FormattingUnpreservable, + GeneratedFile, + ) +from lintian_brush import ( + reset_tree, + ) +from lintian_brush.changelog import ( + add_changelog_entry, + ) + +from lintian_brush.rules import ( + dh_invoke_add_with, + update_rules, + ) +from silver_platter.debian import ( + debcommit, + DEFAULT_BUILDER, + ) + +from breezy.plugins.debian.util import get_build_architecture +from .build import attempt_build +from buildlog_consultant.sbuild import ( + Problem, + MissingConfigStatusInput, + MissingPythonModule, + MissingPythonDistribution, + MissingCHeader, + MissingPkgConfig, + MissingCommand, + MissingFile, + MissingJavaScriptRuntime, + MissingSprocketsFile, + MissingGoPackage, + MissingPerlFile, + MissingPerlModule, + MissingXmlEntity, + MissingJDKFile, + MissingNodeModule, + MissingPhpClass, + MissingRubyGem, + MissingLibrary, + MissingJavaClass, + MissingCSharpCompiler, + MissingConfigure, + MissingAutomakeInput, + MissingRPackage, + MissingRubyFile, + MissingAutoconfMacro, + MissingValaPackage, + MissingXfceDependency, + MissingHaskellDependencies, + NeedPgBuildExtUpdateControl, + SbuildFailure, + DhAddonLoadFailure, + AptFetchFailure, + MissingMavenArtifacts, + GnomeCommonMissing, + MissingGnomeCommonDependency, + ) + + +DEFAULT_MAX_ITERATIONS = 10 + + +class CircularDependency(Exception): + """Adding dependency would introduce cycle.""" + + def __init__(self, package): + self.package = package + + +class DependencyContext(object): + + def __init__(self, tree, subpath='', committer=None, + update_changelog=True): + self.tree = tree + self.subpath = subpath + self.committer = committer + self.update_changelog = update_changelog + + def add_dependency(self, package, minimum_version=None): + raise NotImplementedError(self.add_dependency) + + +class BuildDependencyContext(DependencyContext): + + def add_dependency(self, package, minimum_version=None): + return add_build_dependency( + self.tree, package, minimum_version=minimum_version, + committer=self.committer, subpath=self.subpath, + update_changelog=self.update_changelog) + + +class AutopkgtestDependencyContext(DependencyContext): + + def __init__(self, testname, tree, subpath='', committer=None, + update_changelog=True): + self.testname = testname + super(AutopkgtestDependencyContext, self).__init__( + tree, subpath, committer, update_changelog) + + def add_dependency(self, package, minimum_version=None): + return add_test_dependency( + self.tree, self.testname, package, + minimum_version=minimum_version, + committer=self.committer, subpath=self.subpath, + update_changelog=self.update_changelog) + + +def add_build_dependency(tree, package, minimum_version=None, + committer=None, subpath='', update_changelog=True): + if not isinstance(package, str): + raise TypeError(package) + + control_path = os.path.join(tree.abspath(subpath), 'debian/control') + try: + with ControlEditor(path=control_path) as updater: + for binary in updater.binaries: + if binary["Package"] == package: + raise CircularDependency(package) + if minimum_version: + updater.source["Build-Depends"] = ensure_minimum_version( + updater.source.get("Build-Depends", ""), + package, minimum_version) + else: + updater.source["Build-Depends"] = ensure_some_version( + updater.source.get("Build-Depends", ""), package) + except FormattingUnpreservable as e: + logging.info( + 'Unable to edit %s in a way that preserves formatting.', + e.path) + return False + + if minimum_version: + desc = "%s (>= %s)" % (package, minimum_version) + else: + desc = package + + if not updater.changed: + logging.info('Giving up; dependency %s was already present.', desc) + return False + + logging.info("Adding build dependency: %s", desc) + return commit_debian_changes( + tree, subpath, "Add missing build dependency on %s." % desc, + committer=committer, update_changelog=update_changelog) + + +def add_test_dependency(tree, testname, package, minimum_version=None, + committer=None, subpath='', update_changelog=True): + if not isinstance(package, str): + raise TypeError(package) + + tests_control_path = os.path.join( + tree.abspath(subpath), 'debian/tests/control') + + try: + with Deb822Editor(path=tests_control_path) as updater: + command_counter = 1 + for control in updater.paragraphs: + try: + name = control["Tests"] + except KeyError: + name = "command%d" % command_counter + command_counter += 1 + if name != testname: + continue + if minimum_version: + control["Depends"] = ensure_minimum_version( + control.get("Depends", ""), + package, minimum_version) + else: + control["Depends"] = ensure_some_version( + control.get("Depends", ""), package) + except FormattingUnpreservable as e: + logging.info( + 'Unable to edit %s in a way that preserves formatting.', + e.path) + return False + if not updater.changed: + return False + + if minimum_version: + desc = "%s (>= %s)" % (package, minimum_version) + else: + desc = package + + logging.info("Adding dependency to test %s: %s", testname, desc) + return commit_debian_changes( + tree, subpath, + "Add missing dependency for test %s on %s." % (testname, desc), + update_changelog=update_changelog) + + +def commit_debian_changes(tree, subpath, summary, committer=None, + update_changelog=True): + with tree.lock_write(): + try: + if update_changelog: + add_changelog_entry( + tree, os.path.join(subpath, 'debian/changelog'), [summary]) + debcommit(tree, committer=committer, subpath=subpath) + else: + tree.commit(message=summary, committer=committer, + specific_files=[subpath]) + except PointlessCommit: + return False + else: + return True + + +class FileSearcher(object): + + def search_files(self, path, regex=False): + raise NotImplementedError(self.search_files) + + +class ContentsFileNotFound(Exception): + """The contents file was not found.""" + + +class AptContentsFileSearcher(FileSearcher): + + def __init__(self): + self._db = {} + + @classmethod + def from_env(cls): + sources = os.environ['REPOSITORIES'].split(':') + return cls.from_repositories(sources) + + def __setitem__(self, path, package): + self._db[path] = package + + def search_files(self, path, regex=False): + for p, pkg in sorted(self._db.items()): + if regex: + if re.match(path, p): + yield pkg + else: + if path == p: + yield pkg + + def load_file(self, f): + for line in f: + (path, rest) = line.rsplit(maxsplit=1) + package = rest.split(b'/')[-1] + decoded_path = '/' + path.decode('utf-8', 'surrogateescape') + self[decoded_path] = package.decode('utf-8') + + @classmethod + def from_urls(cls, urls): + self = cls() + for url in urls: + self.load_url(url) + return self + + @classmethod + def from_repositories(cls, sources): + # TODO(jelmer): Verify signatures, etc. + urls = [] + arches = [get_build_architecture(), 'all'] + for source in sources: + parts = source.split(' ') + if parts[0] != 'deb': + logging.warning('Invalid line in sources: %r', source) + continue + base_url = parts[1] + name = parts[2] + components = parts[3:] + response = cls._get('%s/%s/Release' % (base_url, name)) + r = Release(response) + desired_files = set() + for component in components: + for arch in arches: + desired_files.add('%s/Contents-%s' % (component, arch)) + for entry in r['MD5Sum']: + if entry['name'] in desired_files: + urls.append('%s/%s/%s' % (base_url, name, entry['name'])) + return cls.from_urls(urls) + + @staticmethod + def _get(url): + from urllib.request import urlopen, Request + request = Request(url, headers={'User-Agent': 'Debian Janitor'}) + return urlopen(request) + + def load_url(self, url): + from urllib.error import HTTPError + try: + response = self._get(url) + except HTTPError as e: + if e.status == 404: + raise ContentsFileNotFound(url) + raise + if url.endswith('.gz'): + import gzip + f = gzip.GzipFile(fileobj=response) + elif response.headers.get_content_type() == 'text/plain': + f = response + else: + raise Exception( + 'Unknown content type %r' % + response.headers.get_content_type()) + self.load_file(f) + + +class GeneratedFileSearcher(FileSearcher): + + def __init__(self, db): + self._db = db + + def search_files(self, path, regex=False): + for p, pkg in sorted(self._db.items()): + if regex: + if re.match(path, p): + yield pkg + else: + if path == p: + yield pkg + + +# TODO(jelmer): read from a file +GENERATED_FILE_SEARCHER = GeneratedFileSearcher({ + '/etc/locale.gen': 'locales', + # Alternative + '/usr/bin/rst2html': '/usr/share/docutils/scripts/python3/rst2html'}) + + +_apt_file_searcher = None + + +def search_apt_file(path: str, regex: bool = False) -> Iterator[FileSearcher]: + global _apt_file_searcher + if _apt_file_searcher is None: + # TODO(jelmer): cache file + _apt_file_searcher = AptContentsFileSearcher.from_env() + if _apt_file_searcher: + yield from _apt_file_searcher.search_files(path, regex=regex) + yield from GENERATED_FILE_SEARCHER.search_files(path, regex=regex) + + +def get_package_for_paths(paths, regex=False): + candidates = set() + for path in paths: + candidates.update(search_apt_file(path, regex=regex)) + if candidates: + break + if len(candidates) == 0: + logging.warning('No packages found that contain %r', paths) + return None + if len(candidates) > 1: + logging.warning( + 'More than 1 packages found that contain %r: %r', + path, candidates) + # Euhr. Pick the one with the shortest name? + return sorted(candidates, key=len)[0] + else: + return candidates.pop() + + +def get_package_for_python_module(module, python_version): + if python_version == 'python3': + paths = [ + os.path.join( + '/usr/lib/python3/dist-packages', + module.replace('.', '/'), + '__init__.py'), + os.path.join( + '/usr/lib/python3/dist-packages', + module.replace('.', '/') + '.py'), + os.path.join( + '/usr/lib/python3\\.[0-9]+/lib-dynload', + module.replace('.', '/') + '\\.cpython-.*\\.so'), + os.path.join( + '/usr/lib/python3\\.[0-9]+/', + module.replace('.', '/') + '.py'), + os.path.join( + '/usr/lib/python3\\.[0-9]+/', + module.replace('.', '/'), '__init__.py'), + ] + elif python_version == 'python2': + paths = [ + os.path.join( + '/usr/lib/python2\\.[0-9]/dist-packages', + module.replace('.', '/'), + '__init__.py'), + os.path.join( + '/usr/lib/python2\\.[0-9]/dist-packages', + module.replace('.', '/') + '.py'), + os.path.join( + '/usr/lib/python2.\\.[0-9]/lib-dynload', + module.replace('.', '/') + '.so')] + elif python_version == 'pypy': + paths = [ + os.path.join( + '/usr/lib/pypy/dist-packages', + module.replace('.', '/'), + '__init__.py'), + os.path.join( + '/usr/lib/pypy/dist-packages', + module.replace('.', '/') + '.py'), + os.path.join( + '/usr/lib/pypy/dist-packages', + module.replace('.', '/') + '\\.pypy-.*\\.so'), + ] + else: + raise AssertionError( + 'unknown python version %r' % python_version) + return get_package_for_paths(paths, regex=True) + + +def targeted_python_versions(tree: Tree) -> Set[str]: + with tree.get_file('debian/control') as f: + control = Deb822(f) + build_depends = PkgRelation.parse_relations( + control.get('Build-Depends', '')) + all_build_deps: Set[str] = set() + for or_deps in build_depends: + all_build_deps.update(or_dep['name'] for or_dep in or_deps) + targeted = set() + if any(x.startswith('pypy') for x in all_build_deps): + targeted.add('pypy') + if any(x.startswith('python-') for x in all_build_deps): + targeted.add('cpython2') + if any(x.startswith('python3-') for x in all_build_deps): + targeted.add('cpython3') + return targeted + + +apt_cache = None + + +def package_exists(package): + global apt_cache + if apt_cache is None: + import apt_pkg + apt_cache = apt_pkg.Cache() + for p in apt_cache.packages: + if p.name == package: + return True + return False + + +def fix_missing_javascript_runtime(error, context): + package = get_package_for_paths( + ['/usr/bin/node', '/usr/bin/duk'], + regex=False) + if package is None: + return False + return context.add_dependency(package) + + +def fix_missing_python_distribution(error, context): + targeted = targeted_python_versions(context.tree) + default = not targeted + + pypy_pkg = get_package_for_paths( + ['/usr/lib/pypy/dist-packages/%s-.*.egg-info' % error.distribution], + regex=True) + if pypy_pkg is None: + pypy_pkg = 'pypy-%s' % error.distribution + if not package_exists(pypy_pkg): + pypy_pkg = None + + py2_pkg = get_package_for_paths( + ['/usr/lib/python2\\.[0-9]/dist-packages/%s-.*.egg-info' % + error.distribution], regex=True) + if py2_pkg is None: + py2_pkg = 'python-%s' % error.distribution + if not package_exists(py2_pkg): + py2_pkg = None + + py3_pkg = get_package_for_paths( + ['/usr/lib/python3/dist-packages/%s-.*.egg-info' % + error.distribution], regex=True) + if py3_pkg is None: + py3_pkg = 'python3-%s' % error.distribution + if not package_exists(py3_pkg): + py3_pkg = None + + extra_build_deps = [] + if error.python_version == 2: + if 'pypy' in targeted: + if not pypy_pkg: + logging.warning('no pypy package found for %s', error.module) + else: + extra_build_deps.append(pypy_pkg) + if 'cpython2' in targeted or default: + if not py2_pkg: + logging.warning( + 'no python 2 package found for %s', error.module) + return False + extra_build_deps.append(py2_pkg) + elif error.python_version == 3: + if not py3_pkg: + logging.warning('no python 3 package found for %s', error.module) + return False + extra_build_deps.append(py3_pkg) + else: + if py3_pkg and ('cpython3' in targeted or default): + extra_build_deps.append(py3_pkg) + if py2_pkg and ('cpython2' in targeted or default): + extra_build_deps.append(py2_pkg) + if pypy_pkg and 'pypy' in targeted: + extra_build_deps.append(pypy_pkg) + + if not extra_build_deps: + return False + + for dep_pkg in extra_build_deps: + assert dep_pkg is not None + if not context.add_dependency( + dep_pkg, minimum_version=error.minimum_version): + return False + return True + + +def fix_missing_python_module(error, context): + if getattr(context, 'tree', None) is not None: + targeted = targeted_python_versions(context.tree) + else: + targeted = set() + default = (not targeted) + + pypy_pkg = get_package_for_python_module(error.module, 'pypy') + py2_pkg = get_package_for_python_module(error.module, 'python2') + py3_pkg = get_package_for_python_module(error.module, 'python3') + + extra_build_deps = [] + if error.python_version == 2: + if 'pypy' in targeted: + if not pypy_pkg: + logging.warning('no pypy package found for %s', error.module) + else: + extra_build_deps.append(pypy_pkg) + if 'cpython2' in targeted or default: + if not py2_pkg: + logging.warning( + 'no python 2 package found for %s', error.module) + return False + extra_build_deps.append(py2_pkg) + elif error.python_version == 3: + if not py3_pkg: + logging.warning( + 'no python 3 package found for %s', error.module) + return False + extra_build_deps.append(py3_pkg) + else: + if py3_pkg and ('cpython3' in targeted or default): + extra_build_deps.append(py3_pkg) + if py2_pkg and ('cpython2' in targeted or default): + extra_build_deps.append(py2_pkg) + if pypy_pkg and 'pypy' in targeted: + extra_build_deps.append(pypy_pkg) + + if not extra_build_deps: + return False + + for dep_pkg in extra_build_deps: + assert dep_pkg is not None + if not context.add_dependency(dep_pkg, error.minimum_version): + return False + return True + + +def fix_missing_go_package(error, context): + package = get_package_for_paths( + [os.path.join('/usr/share/gocode/src', error.package, '.*')], + regex=True) + if package is None: + return False + return context.add_dependency(package) + + +def fix_missing_c_header(error, context): + package = get_package_for_paths( + [os.path.join('/usr/include', error.header)], regex=False) + if package is None: + package = get_package_for_paths( + [os.path.join('/usr/include', '.*', error.header)], regex=True) + if package is None: + return False + return context.add_dependency(package) + + +def fix_missing_pkg_config(error, context): + package = get_package_for_paths( + [os.path.join('/usr/lib/pkgconfig', error.module + '.pc')]) + if package is None: + package = get_package_for_paths( + [os.path.join('/usr/lib', '.*', 'pkgconfig', + error.module + '.pc')], + regex=True) + if package is None: + return False + return context.add_dependency( + package, minimum_version=error.minimum_version) + + +def fix_missing_command(error, context): + if os.path.isabs(error.command): + paths = [error.command] + else: + paths = [ + os.path.join(dirname, error.command) + for dirname in ['/usr/bin', '/bin']] + package = get_package_for_paths(paths) + if package is None: + logging.info('No packages found that contain %r', paths) + return False + return context.add_dependency(package) + + +def fix_missing_file(error, context): + package = get_package_for_paths([error.path]) + if package is None: + return False + return context.add_dependency(package) + + +def fix_missing_sprockets_file(error, context): + if error.content_type == 'application/javascript': + path = '/usr/share/.*/app/assets/javascripts/%s.js$' % error.name + else: + logging.warning('unable to handle content type %s', error.content_type) + return False + package = get_package_for_paths([path], regex=True) + if package is None: + return False + return context.add_dependency(package) + + +DEFAULT_PERL_PATHS = ['/usr/share/perl5'] + + +def fix_missing_perl_file(error, context): + + if (error.filename == 'Makefile.PL' and + not context.tree.has_filename('Makefile.PL') and + context.tree.has_filename('dist.ini')): + # TODO(jelmer): add dist-zilla add-on to debhelper + raise NotImplementedError + + if error.inc is None: + if error.filename is None: + filename = error.module.replace('::', '/') + '.pm' + paths = [os.path.join(inc, filename) + for inc in DEFAULT_PERL_PATHS] + elif not os.path.isabs(error.filename): + return False + else: + paths = [error.filename] + else: + paths = [os.path.join(inc, error.filename) for inc in error.inc] + package = get_package_for_paths(paths, regex=False) + if package is None: + if getattr(error, 'module', None): + logging.warning( + 'no perl package found for %s (%r).', + error.module, error.filename) + else: + logging.warning( + 'perl file %s not found (paths searched for: %r).', + error.filename, paths) + return False + return context.add_dependency(package) + + +def get_package_for_node_package(node_package): + paths = [ + '/usr/share/nodejs/.*/node_modules/%s/package.json' % node_package, + '/usr/lib/nodejs/%s/package.json' % node_package, + '/usr/share/nodejs/%s/package.json' % node_package] + return get_package_for_paths(paths, regex=True) + + +def fix_missing_node_module(error, context): + package = get_package_for_node_package(error.module) + if package is None: + logging.warning( + 'no node package found for %s.', + error.module) + return False + return context.add_dependency(package) + + +def fix_missing_dh_addon(error, context): + paths = [os.path.join('/usr/share/perl5', error.path)] + package = get_package_for_paths(paths) + if package is None: + logging.warning('no package for debhelper addon %s', error.name) + return False + return context.add_dependency(package) + + +def retry_apt_failure(error, context): + return True + + +def fix_missing_php_class(error, context): + path = '/usr/share/php/%s.php' % error.php_class.replace('\\', '/') + package = get_package_for_paths([path]) + if package is None: + logging.warning('no package for PHP class %s', error.php_class) + return False + return context.add_dependency(package) + + +def fix_missing_jdk_file(error, context): + path = error.jdk_path + '.*/' + error.filename + package = get_package_for_paths([path], regex=True) + if package is None: + logging.warning( + 'no package found for %s (JDK: %s) - regex %s', + error.filename, error.jdk_path, path) + return False + return context.add_dependency(package) + + +def fix_missing_vala_package(error, context): + path = '/usr/share/vala-[0-9.]+/vapi/%s.vapi' % error.package + package = get_package_for_paths([path], regex=True) + if package is None: + logging.warning( + 'no file found for package %s - regex %s', + error.package, path) + return False + return context.add_dependency(package) + + +def fix_missing_xml_entity(error, context): + # Ideally we should be using the XML catalog for this, but hardcoding + # a few URLs will do for now.. + URL_MAP = { + 'http://www.oasis-open.org/docbook/xml/': + '/usr/share/xml/docbook/schema/dtd/' + } + for url, path in URL_MAP.items(): + if error.url.startswith(url): + search_path = os.path.join(path, error.url[len(url):]) + break + else: + return False + + package = get_package_for_paths([search_path], regex=False) + if package is None: + return False + return context.add_dependency(package) + + +def fix_missing_library(error, context): + paths = [os.path.join('/usr/lib/lib%s.so$' % error.library), + os.path.join('/usr/lib/.*/lib%s.so$' % error.library), + os.path.join('/usr/lib/lib%s.a$' % error.library), + os.path.join('/usr/lib/.*/lib%s.a$' % error.library)] + package = get_package_for_paths(paths, regex=True) + if package is None: + logging.warning('no package for library %s', error.library) + return False + return context.add_dependency(package) + + +def fix_missing_ruby_gem(error, context): + paths = [os.path.join( + '/usr/share/rubygems-integration/all/' + 'specifications/%s-.*\\.gemspec' % error.gem)] + package = get_package_for_paths(paths, regex=True) + if package is None: + logging.warning('no package for gem %s', error.gem) + return False + return context.add_dependency(package, minimum_version=error.version) + + +def fix_missing_ruby_file(error, context): + paths = [ + os.path.join('/usr/lib/ruby/vendor_ruby/%s.rb' % error.filename)] + package = get_package_for_paths(paths) + if package is not None: + return context.add_dependency(package) + paths = [ + os.path.join(r'/usr/share/rubygems-integration/all/gems/([^/]+)/' + 'lib/%s.rb' % error.filename)] + package = get_package_for_paths(paths, regex=True) + if package is not None: + return context.add_dependency(package) + + logging.warning('no package for ruby file %s', error.filename) + return False + + +def fix_missing_r_package(error, context): + paths = [os.path.join('/usr/lib/R/site-library/.*/R/%s$' % error.package)] + package = get_package_for_paths(paths, regex=True) + if package is None: + logging.warning('no package for R package %s', error.package) + return False + return context.add_dependency( + package, minimum_version=error.minimum_version) + + +def fix_missing_java_class(error, context): + # Unfortunately this only finds classes in jars installed on the host + # system :( + output = subprocess.check_output( + ["java-propose-classpath", "-c" + error.classname]) + classpath = [ + p for p in output.decode().strip(":").strip().split(':') if p] + if not classpath: + logging.warning('unable to find classpath for %s', error.classname) + return False + logging.info('Classpath for %s: %r', error.classname, classpath) + package = get_package_for_paths(classpath) + if package is None: + logging.warning('no package for files in %r', classpath) + return False + return context.add_dependency(package) + + +def enable_dh_autoreconf(context): + # Debhelper >= 10 depends on dh-autoreconf and enables autoreconf by + # default. + debhelper_compat_version = get_debhelper_compat_level( + context.tree.abspath('.')) + if debhelper_compat_version is not None and debhelper_compat_version < 10: + def add_with_autoreconf(line, target): + if target != b'%': + return line + if not line.startswith(b'dh '): + return line + return dh_invoke_add_with(line, b'autoreconf') + + if update_rules(command_line_cb=add_with_autoreconf): + return context.add_dependency('dh-autoreconf') + + return False + + +def fix_missing_configure(error, context): + if (not context.tree.has_filename('configure.ac') and + not context.tree.has_filename('configure.in')): + return False + + return enable_dh_autoreconf(context) + + +def fix_missing_automake_input(error, context): + # TODO(jelmer): If it's ./NEWS, ./AUTHORS or ./README that's missing, then + # try to set 'export AUTOMAKE = automake --foreign' in debian/rules. + # https://salsa.debian.org/jelmer/debian-janitor/issues/88 + return enable_dh_autoreconf(context) + + +def fix_missing_maven_artifacts(error, context): + artifact = error.artifacts[0] + parts = artifact.split(':') + if len(parts) == 4: + (group_id, artifact_id, kind, version) = parts + regex = False + elif len(parts) == 3: + (group_id, artifact_id, version) = parts + kind = 'jar' + regex = False + elif len(parts) == 2: + version = '.*' + (group_id, artifact_id) = parts + kind = 'jar' + regex = True + else: + raise AssertionError( + 'invalid number of parts to artifact %s' % artifact) + paths = [os.path.join( + '/usr/share/maven-repo', group_id.replace('.', '/'), + artifact_id, version, '%s-%s.%s' % (artifact_id, version, kind))] + package = get_package_for_paths(paths, regex=regex) + if package is None: + logging.warning('no package for artifact %s', artifact) + return False + return context.add_dependency(package) + + +def install_gnome_common(error, context): + return context.add_dependency('gnome-common') + + +def install_gnome_common_dep(error, context): + if error.package == 'glib-gettext': + package = get_package_for_paths(['/usr/bin/glib-gettextize']) + else: + package = None + if package is None: + logging.warning('No debian package for package %s', error.package) + return False + return context.add_dependency( + package=package, + minimum_version=error.minimum_version) + + +def install_xfce_dep(error, context): + if error.package == 'gtk-doc': + package = get_package_for_paths(['/usr/bin/gtkdocize']) + else: + package = None + if package is None: + logging.warning('No debian package for package %s', error.package) + return False + return context.add_dependency(package=package) + + +def fix_missing_config_status_input(error, context): + autogen_path = 'autogen.sh' + rules_path = 'debian/rules' + if context.subpath not in ('.', ''): + autogen_path = os.path.join(context.subpath, autogen_path) + rules_path = os.path.join(context.subpath, rules_path) + if not context.tree.has_filename(autogen_path): + return False + + def add_autogen(mf): + rule = any(mf.iter_rules(b'override_dh_autoreconf')) + if rule: + return + rule = mf.add_rule(b'override_dh_autoreconf') + rule.append_command(b'dh_autoreconf ./autogen.sh') + + if not update_rules(makefile_cb=add_autogen, path=rules_path): + return False + + if context.update_changelog: + commit_debian_changes( + context.tree, context.subpath, + 'Run autogen.sh during build.', committer=context.committer, + update_changelog=context.update_changelog) + + return True + + +def _find_aclocal_fun(macro): + # TODO(jelmer): Use the API for codesearch.debian.net instead? + defun_prefix = b'AC_DEFUN([%s],' % macro.encode('ascii') + for entry in os.scandir('/usr/share/aclocal'): + if not entry.is_file(): + continue + with open(entry.path, 'rb') as f: + for line in f: + if line.startswith(defun_prefix): + return entry.path + raise KeyError + + +def run_pgbuildext_updatecontrol(error, context): + logging.info("Running 'pg_buildext updatecontrol'") + # TODO(jelmer): run in the schroot + pg_buildext_updatecontrol(context.tree.abspath(context.subpath)) + return commit_debian_changes( + context.tree, context.subpath, "Run 'pgbuildext updatecontrol'.", + committer=context.committer, update_changelog=False) + + +def fix_missing_autoconf_macro(error, context): + try: + path = _find_aclocal_fun(error.macro) + except KeyError: + logging.info('No local m4 file found defining %s', error.macro) + return False + package = get_package_for_paths([path]) + if package is None: + logging.warning('no package for macro file %s', path) + return False + return context.add_dependency(package) + + +def fix_missing_c_sharp_compiler(error, context): + return context.add_dependency('mono-mcs') + + +def fix_missing_haskell_dependencies(error, context): + path = "/var/lib/ghc/package.conf.d/%s-.*.conf" % error.deps[0][0] + package = get_package_for_paths([path], regex=True) + if package is None: + logging.warning('no package for macro file %s', path) + return False + return context.add_dependency(package) + + +VERSIONED_PACKAGE_FIXERS: List[ + Tuple[Type[Problem], Callable[[Problem, DependencyContext], bool]]] = [ + (NeedPgBuildExtUpdateControl, run_pgbuildext_updatecontrol), + (MissingConfigure, fix_missing_configure), + (MissingAutomakeInput, fix_missing_automake_input), +] + + +APT_FIXERS: List[ + Tuple[Type[Problem], Callable[[Problem, DependencyContext], bool]]] = [ + (MissingPythonModule, fix_missing_python_module), + (MissingPythonDistribution, fix_missing_python_distribution), + (MissingCHeader, fix_missing_c_header), + (MissingPkgConfig, fix_missing_pkg_config), + (MissingCommand, fix_missing_command), + (MissingFile, fix_missing_file), + (MissingSprocketsFile, fix_missing_sprockets_file), + (MissingGoPackage, fix_missing_go_package), + (MissingPerlFile, fix_missing_perl_file), + (MissingPerlModule, fix_missing_perl_file), + (MissingXmlEntity, fix_missing_xml_entity), + (MissingNodeModule, fix_missing_node_module), + (MissingRubyGem, fix_missing_ruby_gem), + (MissingRPackage, fix_missing_r_package), + (MissingLibrary, fix_missing_library), + (MissingJavaClass, fix_missing_java_class), + (DhAddonLoadFailure, fix_missing_dh_addon), + (MissingPhpClass, fix_missing_php_class), + (AptFetchFailure, retry_apt_failure), + (MissingMavenArtifacts, fix_missing_maven_artifacts), + (GnomeCommonMissing, install_gnome_common), + (MissingGnomeCommonDependency, install_gnome_common_dep), + (MissingXfceDependency, install_xfce_dep), + (MissingConfigStatusInput, fix_missing_config_status_input), + (MissingJDKFile, fix_missing_jdk_file), + (MissingRubyFile, fix_missing_ruby_file), + (MissingJavaScriptRuntime, fix_missing_javascript_runtime), + (MissingAutoconfMacro, fix_missing_autoconf_macro), + (MissingValaPackage, fix_missing_vala_package), + (MissingCSharpCompiler, fix_missing_c_sharp_compiler), + (MissingHaskellDependencies, fix_missing_haskell_dependencies), +] + + +def resolve_error(error, context, fixers): + relevant_fixers = [] + for error_cls, fixer in fixers: + if isinstance(error, error_cls): + relevant_fixers.append(fixer) + if not relevant_fixers: + logging.warning('No fixer found for %r', error) + return False + for fixer in relevant_fixers: + logging.info( + 'Attempting to use fixer %r to address %r', + fixer, error) + try: + made_changes = fixer(error, context) + except GeneratedFile: + logging.warning('Control file is generated, unable to edit.') + return False + if made_changes: + return True + return False + + +def build_incrementally( + local_tree, suffix, build_suite, output_directory, build_command, + build_changelog_entry='Build for debian-janitor apt repository.', + committer=None, max_iterations=DEFAULT_MAX_ITERATIONS, + subpath='', source_date_epoch=None, update_changelog=True): + fixed_errors = [] + while True: + try: + return attempt_build( + local_tree, suffix, build_suite, output_directory, + build_command, build_changelog_entry, subpath=subpath, + source_date_epoch=source_date_epoch) + except SbuildFailure as e: + if e.error is None: + logging.warning( + 'Build failed with unidentified error. Giving up.') + raise + if e.context is None: + logging.info('No relevant context, not making any changes.') + raise + if (e.error, e.context) in fixed_errors: + logging.warning( + 'Error was still not fixed on second try. Giving up.') + raise + if max_iterations is not None \ + and len(fixed_errors) > max_iterations: + logging.warning( + 'Last fix did not address the issue. Giving up.') + raise + reset_tree(local_tree, local_tree.basis_tree(), subpath=subpath) + if e.context[0] == 'build': + context = BuildDependencyContext( + local_tree, subpath=subpath, committer=committer, + update_changelog=update_changelog) + elif e.context[0] == 'autopkgtest': + context = AutopkgtestDependencyContext( + e.context[1], + local_tree, subpath=subpath, committer=committer, + update_changelog=update_changelog) + else: + logging.warning('unable to install for context %r', e.context) + raise + try: + if not resolve_error( + e.error, context, + VERSIONED_PACKAGE_FIXERS + APT_FIXERS): + logging.warning( + 'Failed to resolve error %r. Giving up.', e.error) + raise + except CircularDependency: + logging.warning( + 'Unable to fix %r; it would introduce a circular ' + 'dependency.', e.error) + raise e + fixed_errors.append((e.error, e.context)) + if os.path.exists(os.path.join(output_directory, 'build.log')): + i = 1 + while os.path.exists( + os.path.join(output_directory, 'build.log.%d' % i)): + i += 1 + os.rename(os.path.join(output_directory, 'build.log'), + os.path.join(output_directory, 'build.log.%d' % i)) + + +def main(argv=None): + import argparse + parser = argparse.ArgumentParser('janitor.fix_build') + parser.add_argument('--suffix', type=str, + help="Suffix to use for test builds.", + default='fixbuild1') + parser.add_argument('--suite', type=str, + help="Suite to target.", + default='unstable') + parser.add_argument('--output-directory', type=str, + help="Output directory.", default=None) + parser.add_argument('--committer', type=str, + help='Committer string (name and email)', + default=None) + parser.add_argument( + '--build-command', type=str, + help='Build command', + default=(DEFAULT_BUILDER + ' -A -s -v')) + parser.add_argument( + '--no-update-changelog', action="store_false", default=None, + dest="update_changelog", help="do not update the changelog") + parser.add_argument( + '--update-changelog', action="store_true", dest="update_changelog", + help="force updating of the changelog", default=None) + + args = parser.parse_args() + from breezy.workingtree import WorkingTree + tree = WorkingTree.open('.') + build_incrementally( + tree, args.suffix, args.suite, args.output_directory, + args.build_command, committer=args.committer, + update_changelog=args.update_changelog) + + +if __name__ == '__main__': + sys.exit(main(sys.argv)) diff --git a/ognibuild/dist.py b/ognibuild/dist.py new file mode 100644 index 0000000..6770ca8 --- /dev/null +++ b/ognibuild/dist.py @@ -0,0 +1,524 @@ +#!/usr/bin/python3 +# Copyright (C) 2020 Jelmer Vernooij +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + +import errno +import logging +import os +import re +import shutil +import subprocess +import sys +import tempfile +from typing import Optional, List, Tuple, Callable, Type + +from debian.deb822 import Deb822 + +from breezy.export import export +from breezy.tree import Tree +from breezy.workingtree import WorkingTree + +from breezy.plugins.debian.repack_tarball import get_filetype + +from .fix_build import ( + DependencyContext, + resolve_error, + APT_FIXERS, + ) +from buildlog_consultant.sbuild import ( + find_apt_get_failure, + find_build_failure_description, + Problem, + MissingPerlModule, + MissingCommand, + NoSpaceOnDevice, + ) +from ognibuild import shebang_binary +from ognibuild.session import Session +from ognibuild.session.schroot import SchrootSession + + +def run_apt(session: Session, args: List[str]) -> None: + args = ['apt', '-y'] + args + retcode, lines = run_with_tee(session, args, cwd='/', user='root') + if retcode == 0: + return + offset, line, error = find_apt_get_failure(lines) + if error is not None: + raise DetailedDistCommandFailed(retcode, args, error) + if line is not None: + raise UnidentifiedError( + retcode, args, lines, secondary=(offset, line)) + raise UnidentifiedError(retcode, args, lines) + + +def apt_install(session: Session, packages: List[str]) -> None: + run_apt(session, ['install'] + packages) + + +def apt_satisfy(session: Session, deps: List[str]) -> None: + run_apt(session, ['satisfy'] + deps) + + +def satisfy_build_deps(session: Session, tree): + source = Deb822(tree.get_file('debian/control')) + deps = [] + for name in ['Build-Depends', 'Build-Depends-Indep', 'Build-Depends-Arch']: + try: + deps.append(source[name].strip().strip(',')) + except KeyError: + pass + for name in ['Build-Conflicts', 'Build-Conflicts-Indeo', + 'Build-Conflicts-Arch']: + try: + deps.append('Conflicts: ' + source[name]) + except KeyError: + pass + deps = [ + dep.strip().strip(',') + for dep in deps] + apt_satisfy(session, deps) + + +def run_with_tee(session: Session, args: List[str], **kwargs): + p = session.Popen( + args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, **kwargs) + contents = [] + while p.poll() is None: + line = p.stdout.readline() + sys.stdout.buffer.write(line) + sys.stdout.buffer.flush() + contents.append(line.decode('utf-8', 'surrogateescape')) + return p.returncode, contents + + +class SchrootDependencyContext(DependencyContext): + + def __init__(self, session): + self.session = session + + def add_dependency(self, package, minimum_version=None): + # TODO(jelmer): Handle minimum_version + apt_install(self.session, [package]) + return True + + +class DetailedDistCommandFailed(Exception): + + def __init__(self, retcode, argv, error): + self.retcode = retcode + self.argv = argv + self.error = error + + +class UnidentifiedError(Exception): + + def __init__(self, retcode, argv, lines, secondary=None): + self.retcode = retcode + self.argv = argv + self.lines = lines + self.secondary = secondary + + +def fix_perl_module_from_cpan(error, context): + # TODO(jelmer): Specify -T to skip tests? + context.session.check_call( + ['cpan', '-i', error.module], user='root', + env={'PERL_MM_USE_DEFAULT': '1'}) + return True + + +NPM_COMMAND_PACKAGES = { + 'del-cli': 'del-cli', + } + + +def fix_npm_missing_command(error, context): + try: + package = NPM_COMMAND_PACKAGES[error.command] + except KeyError: + return False + + context.session.check_call(['npm', '-g', 'install', package]) + return True + + +GENERIC_INSTALL_FIXERS: List[ + Tuple[Type[Problem], Callable[[Problem, DependencyContext], bool]]] = [ + (MissingPerlModule, fix_perl_module_from_cpan), + (MissingCommand, fix_npm_missing_command), +] + + +def run_with_build_fixer(session: Session, args: List[str]): + logging.info('Running %r', args) + fixed_errors = [] + while True: + retcode, lines = run_with_tee(session, args) + if retcode == 0: + return + offset, line, error = find_build_failure_description(lines) + if error is None: + logging.warning('Build failed with unidentified error. Giving up.') + if line is not None: + raise UnidentifiedError( + retcode, args, lines, secondary=(offset, line)) + raise UnidentifiedError(retcode, args, lines) + + logging.info('Identified error: %r', error) + if error in fixed_errors: + logging.warning( + 'Failed to resolve error %r, it persisted. Giving up.', + error) + raise DetailedDistCommandFailed(retcode, args, error) + if not resolve_error( + error, SchrootDependencyContext(session), + fixers=(APT_FIXERS + GENERIC_INSTALL_FIXERS)): + logging.warning( + 'Failed to find resolution for error %r. Giving up.', + error) + raise DetailedDistCommandFailed(retcode, args, error) + fixed_errors.append(error) + + +class NoBuildToolsFound(Exception): + """No supported build tools were found.""" + + +def run_dist_in_chroot(session): + apt_install(session, ['git']) + + # Some things want to write to the user's home directory, + # e.g. pip caches in ~/.cache + session.create_home() + + if os.path.exists('package.xml'): + apt_install(session, ['php-pear', 'php-horde-core']) + logging.info('Found package.xml, assuming pear package.') + session.check_call(['pear', 'package']) + return + + if os.path.exists('pyproject.toml'): + import toml + with open('pyproject.toml', 'r') as pf: + pyproject = toml.load(pf) + if 'poetry' in pyproject.get('tool', []): + logging.info( + 'Found pyproject.toml with poetry section, ' + 'assuming poetry project.') + apt_install(session, ['python3-venv', 'python3-pip']) + session.check_call(['pip3', 'install', 'poetry'], user='root') + session.check_call(['poetry', 'build', '-f', 'sdist']) + return + + if os.path.exists('setup.py'): + logging.info('Found setup.py, assuming python project.') + apt_install(session, ['python3', 'python3-pip']) + with open('setup.py', 'r') as f: + setup_py_contents = f.read() + try: + with open('setup.cfg', 'r') as f: + setup_cfg_contents = f.read() + except FileNotFoundError: + setup_cfg_contents = '' + if 'setuptools' in setup_py_contents: + logging.info('Reference to setuptools found, installing.') + apt_install(session, ['python3-setuptools']) + if ('setuptools_scm' in setup_py_contents or + 'setuptools_scm' in setup_cfg_contents): + logging.info('Reference to setuptools-scm found, installing.') + apt_install( + session, ['python3-setuptools-scm', 'git', 'mercurial']) + + # TODO(jelmer): Install setup_requires + + interpreter = shebang_binary('setup.py') + if interpreter is not None: + if interpreter == 'python3': + apt_install(session, ['python3']) + elif interpreter == 'python2': + apt_install(session, ['python2']) + elif interpreter == 'python': + apt_install(session, ['python']) + else: + raise ValueError('Unknown interpreter %s' % interpreter) + apt_install(session, ['python2', 'python3']) + run_with_build_fixer(session, ['./setup.py', 'sdist']) + else: + # Just assume it's Python 3 + apt_install(session, ['python3']) + run_with_build_fixer(session, ['python3', './setup.py', 'sdist']) + return + + if os.path.exists('setup.cfg'): + logging.info('Found setup.cfg, assuming python project.') + apt_install(session, ['python3-pep517', 'python3-pip']) + session.check_call(['python3', '-m', 'pep517.build', '-s', '.']) + return + + if os.path.exists('dist.ini') and not os.path.exists('Makefile.PL'): + apt_install(session, ['libdist-inkt-perl']) + with open('dist.ini', 'rb') as f: + for line in f: + if not line.startswith(b';;'): + continue + try: + (key, value) = line[2:].split(b'=', 1) + except ValueError: + continue + if (key.strip() == b'class' and + value.strip().startswith(b"'Dist::Inkt")): + logging.info( + 'Found Dist::Inkt section in dist.ini, ' + 'assuming distinkt.') + # TODO(jelmer): install via apt if possible + session.check_call( + ['cpan', 'install', value.decode().strip("'")], + user='root') + run_with_build_fixer(session, ['distinkt-dist']) + return + # Default to invoking Dist::Zilla + logging.info('Found dist.ini, assuming dist-zilla.') + apt_install(session, ['libdist-zilla-perl']) + run_with_build_fixer(session, ['dzil', 'build', '--in', '..']) + return + + if os.path.exists('package.json'): + apt_install(session, ['npm']) + run_with_build_fixer(session, ['npm', 'pack']) + return + + gemfiles = [name for name in os.listdir('.') if name.endswith('.gem')] + if gemfiles: + apt_install(session, ['gem2deb']) + if len(gemfiles) > 1: + logging.warning('More than one gemfile. Trying the first?') + run_with_build_fixer(session, ['gem2tgz', gemfiles[0]]) + return + + if os.path.exists('waf'): + apt_install(session, ['python3']) + run_with_build_fixer(session, ['./waf', 'dist']) + return + + if os.path.exists('Makefile.PL') and not os.path.exists('Makefile'): + apt_install(session, ['perl']) + run_with_build_fixer(session, ['perl', 'Makefile.PL']) + + if not os.path.exists('Makefile') and not os.path.exists('configure'): + if os.path.exists('autogen.sh'): + if shebang_binary('autogen.sh') is None: + run_with_build_fixer(session, ['/bin/sh', './autogen.sh']) + try: + run_with_build_fixer(session, ['./autogen.sh']) + except UnidentifiedError as e: + if ("Gnulib not yet bootstrapped; " + "run ./bootstrap instead.\n" in e.lines): + run_with_build_fixer(session, ["./bootstrap"]) + run_with_build_fixer(session, ['./autogen.sh']) + else: + raise + + elif os.path.exists('configure.ac') or os.path.exists('configure.in'): + apt_install(session, [ + 'autoconf', 'automake', 'gettext', 'libtool', 'gnu-standards']) + run_with_build_fixer(session, ['autoreconf', '-i']) + + if not os.path.exists('Makefile') and os.path.exists('configure'): + session.check_call(['./configure']) + + if os.path.exists('Makefile'): + apt_install(session, ['make']) + try: + run_with_build_fixer(session, ['make', 'dist']) + except UnidentifiedError as e: + if "make: *** No rule to make target 'dist'. Stop.\n" in e.lines: + pass + elif ("make[1]: *** No rule to make target 'dist'. Stop.\n" + in e.lines): + pass + elif ("Reconfigure the source tree " + "(via './config' or 'perl Configure'), please.\n" + ) in e.lines: + run_with_build_fixer(session, ['./config']) + run_with_build_fixer(session, ['make', 'dist']) + elif ( + "Please try running 'make manifest' and then run " + "'make dist' again.\n" in e.lines): + run_with_build_fixer(session, ['make', 'manifest']) + run_with_build_fixer(session, ['make', 'dist']) + elif "Please run ./configure first\n" in e.lines: + run_with_build_fixer(session, ['./configure']) + run_with_build_fixer(session, ['make', 'dist']) + elif any([re.match( + r'Makefile:[0-9]+: \*\*\* Missing \'Make.inc\' ' + r'Run \'./configure \[options\]\' and retry. Stop.\n', + line) for line in e.lines]): + run_with_build_fixer(session, ['./configure']) + run_with_build_fixer(session, ['make', 'dist']) + elif any([re.match( + r'Problem opening MANIFEST: No such file or directory ' + r'at .* line [0-9]+\.', line) for line in e.lines]): + run_with_build_fixer(session, ['make', 'manifest']) + run_with_build_fixer(session, ['make', 'dist']) + else: + raise + else: + return + + raise NoBuildToolsFound() + + +def export_vcs_tree(tree, directory): + try: + export(tree, directory, 'dir', None) + except OSError as e: + if e.errno == errno.ENOSPC: + raise DetailedDistCommandFailed( + 1, ['export'], NoSpaceOnDevice()) + raise + + +def dupe_vcs_tree(tree, directory): + with tree.lock_read(): + if isinstance(tree, WorkingTree): + tree = tree.basis_tree() + try: + result = tree._repository.controldir.sprout( + directory, create_tree_if_local=True, + revision_id=tree.get_revision_id()) + except OSError as e: + if e.errno == errno.ENOSPC: + raise DetailedDistCommandFailed( + 1, ['sprout'], NoSpaceOnDevice()) + raise + # Copy parent location - some scripts need this + base_branch = tree._repository.controldir.open_branch() + parent = base_branch.get_parent() + if parent: + result.open_branch().set_parent(parent) + + +def create_dist_schroot( + tree: Tree, target_dir: str, + chroot: str, packaging_tree: Optional[Tree] = None, + include_controldir: bool = True, + subdir: Optional[str] = None) -> Optional[str]: + if subdir is None: + subdir = 'package' + with SchrootSession(chroot) as session: + if packaging_tree is not None: + satisfy_build_deps(session, packaging_tree) + build_dir = os.path.join(session.location, 'build') + + try: + directory = tempfile.mkdtemp(dir=build_dir) + except OSError as e: + if e.errno == errno.ENOSPC: + raise DetailedDistCommandFailed( + 1, ['mkdtemp'], NoSpaceOnDevice()) + reldir = '/' + os.path.relpath(directory, session.location) + + export_directory = os.path.join(directory, subdir) + if not include_controldir: + export_vcs_tree(tree, export_directory) + else: + dupe_vcs_tree(tree, export_directory) + + existing_files = os.listdir(export_directory) + + oldcwd = os.getcwd() + os.chdir(export_directory) + try: + session.chdir(os.path.join(reldir, subdir)) + run_dist_in_chroot(session) + except NoBuildToolsFound: + logging.info( + 'No build tools found, falling back to simple export.') + return None + finally: + os.chdir(oldcwd) + + new_files = os.listdir(export_directory) + diff_files = set(new_files) - set(existing_files) + diff = set([n for n in diff_files if get_filetype(n) is not None]) + if len(diff) == 1: + fn = diff.pop() + logging.info('Found tarball %s in package directory.', fn) + shutil.copy( + os.path.join(export_directory, fn), + target_dir) + return fn + if 'dist' in diff_files: + for entry in os.scandir(os.path.join(export_directory, 'dist')): + if get_filetype(entry.name) is not None: + logging.info( + 'Found tarball %s in dist directory.', entry.name) + shutil.copy(entry.path, target_dir) + return entry.name + logging.info('No tarballs found in dist directory.') + + diff = set(os.listdir(directory)) - set([subdir]) + if len(diff) == 1: + fn = diff.pop() + logging.info('Found tarball %s in parent directory.', fn) + shutil.copy( + os.path.join(directory, fn), + target_dir) + return fn + + logging.info('No tarball created :(') + return None + + +if __name__ == '__main__': + import argparse + import breezy.bzr + import breezy.git # noqa: F401 + + parser = argparse.ArgumentParser() + parser.add_argument( + '--chroot', default='unstable-amd64-sbuild', type=str, + help='Name of chroot to use') + parser.add_argument( + 'directory', default='.', type=str, nargs='?', + help='Directory with upstream source.') + parser.add_argument( + '--packaging-directory', type=str, + help='Path to packaging directory.') + parser.add_argument( + '--target-directory', type=str, default='..', + help='Target directory') + args = parser.parse_args() + tree = WorkingTree.open(args.directory) + if args.packaging_directory: + packaging_tree = WorkingTree.open(args.packaging_directory) + with packaging_tree.lock_read(): + source = Deb822(packaging_tree.get_file('debian/control')) + package = source['Source'] + subdir = package + else: + packaging_tree = None + subdir = None + + ret = create_dist_schroot( + tree, subdir=subdir, target_dir=os.path.abspath(args.target_directory), + packaging_tree=packaging_tree, + chroot=args.chroot) + if ret: + sys.exit(0) + else: + sys.exit(1) diff --git a/ognibuild/tests/__init__.py b/ognibuild/tests/__init__.py index 49e3b94..0072367 100644 --- a/ognibuild/tests/__init__.py +++ b/ognibuild/tests/__init__.py @@ -22,6 +22,8 @@ import unittest def test_suite(): names = [ + 'debian_build', + 'debian_fix_build', ] module_names = ['ognibuild.tests.test_' + name for name in names] loader = unittest.TestLoader() diff --git a/ognibuild/tests/test_debian_build.py b/ognibuild/tests/test_debian_build.py new file mode 100644 index 0000000..da2541a --- /dev/null +++ b/ognibuild/tests/test_debian_build.py @@ -0,0 +1,108 @@ +#!/usr/bin/python +# Copyright (C) 2020 Jelmer Vernooij +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + +import datetime +from ..debian.build import add_dummy_changelog_entry + +from breezy.tests import TestCaseWithTransport + + +class AddDummyChangelogEntryTests(TestCaseWithTransport): + + def test_simple(self): + tree = self.make_branch_and_tree('.') + self.build_tree_contents([('debian/', ), ('debian/changelog', """\ +janitor (0.1-1) UNRELEASED; urgency=medium + + * Initial release. (Closes: #XXXXXX) + + -- Jelmer Vernooij Sat, 04 Apr 2020 14:12:13 +0000 +""")]) + tree.add(['debian', 'debian/changelog']) + add_dummy_changelog_entry( + tree, '', 'jan+some', 'some-fixes', 'Dummy build.', + timestamp=datetime.datetime(2020, 9, 5, 12, 35, 4, 899654), + maintainer=("Jelmer Vernooij", "jelmer@debian.org")) + self.assertFileEqual("""\ +janitor (0.1-1jan+some1) some-fixes; urgency=low + + * Dummy build. + + -- Jelmer Vernooij Sat, 05 Sep 2020 12:35:04 -0000 + +janitor (0.1-1) UNRELEASED; urgency=medium + + * Initial release. (Closes: #XXXXXX) + + -- Jelmer Vernooij Sat, 04 Apr 2020 14:12:13 +0000 +""", 'debian/changelog') + + def test_native(self): + tree = self.make_branch_and_tree('.') + self.build_tree_contents([('debian/', ), ('debian/changelog', """\ +janitor (0.1) UNRELEASED; urgency=medium + + * Initial release. (Closes: #XXXXXX) + + -- Jelmer Vernooij Sat, 04 Apr 2020 14:12:13 +0000 +""")]) + tree.add(['debian', 'debian/changelog']) + add_dummy_changelog_entry( + tree, '', 'jan+some', 'some-fixes', 'Dummy build.', + timestamp=datetime.datetime(2020, 9, 5, 12, 35, 4, 899654), + maintainer=("Jelmer Vernooij", "jelmer@debian.org")) + self.assertFileEqual("""\ +janitor (0.1jan+some1) some-fixes; urgency=low + + * Dummy build. + + -- Jelmer Vernooij Sat, 05 Sep 2020 12:35:04 -0000 + +janitor (0.1) UNRELEASED; urgency=medium + + * Initial release. (Closes: #XXXXXX) + + -- Jelmer Vernooij Sat, 04 Apr 2020 14:12:13 +0000 +""", 'debian/changelog') + + def test_exists(self): + tree = self.make_branch_and_tree('.') + self.build_tree_contents([('debian/', ), ('debian/changelog', """\ +janitor (0.1-1jan+some1) UNRELEASED; urgency=medium + + * Initial release. (Closes: #XXXXXX) + + -- Jelmer Vernooij Sat, 04 Apr 2020 14:12:13 +0000 +""")]) + tree.add(['debian', 'debian/changelog']) + add_dummy_changelog_entry( + tree, '', 'jan+some', 'some-fixes', 'Dummy build.', + timestamp=datetime.datetime(2020, 9, 5, 12, 35, 4, 899654), + maintainer=("Jelmer Vernooij", "jelmer@debian.org")) + self.assertFileEqual("""\ +janitor (0.1-1jan+some2) some-fixes; urgency=low + + * Dummy build. + + -- Jelmer Vernooij Sat, 05 Sep 2020 12:35:04 -0000 + +janitor (0.1-1jan+some1) UNRELEASED; urgency=medium + + * Initial release. (Closes: #XXXXXX) + + -- Jelmer Vernooij Sat, 04 Apr 2020 14:12:13 +0000 +""", 'debian/changelog') diff --git a/ognibuild/tests/test_debian_fix_build.py b/ognibuild/tests/test_debian_fix_build.py new file mode 100644 index 0000000..7b0fa18 --- /dev/null +++ b/ognibuild/tests/test_debian_fix_build.py @@ -0,0 +1,201 @@ +#!/usr/bin/python +# Copyright (C) 2020 Jelmer Vernooij +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + +import re + +from debian.deb822 import Deb822 + +from buildlog_consultant.sbuild import ( + MissingCommand, + MissingGoPackage, + MissingPerlModule, + MissingPkgConfig, + MissingPythonModule, + MissingRubyFile, + MissingRubyGem, + MissingValaPackage, + ) +from ..debian import fix_build +from ..debian.fix_build import ( + resolve_error, + VERSIONED_PACKAGE_FIXERS, + APT_FIXERS, + BuildDependencyContext, + ) +from breezy.tests import TestCaseWithTransport + + +class ResolveErrorTests(TestCaseWithTransport): + + def setUp(self): + super(ResolveErrorTests, self).setUp() + self.tree = self.make_branch_and_tree('.') + self.build_tree_contents([('debian/', ), ('debian/control', """\ +Source: blah +Build-Depends: libc6 + +Package: python-blah +Depends: ${python3:Depends} +Description: A python package + Foo +"""), ('debian/changelog', """\ +blah (0.1) UNRELEASED; urgency=medium + + * Initial release. (Closes: #XXXXXX) + + -- Jelmer Vernooij Sat, 04 Apr 2020 14:12:13 +0000 +""")]) + self.tree.add(['debian', 'debian/control', 'debian/changelog']) + self.tree.commit('Initial commit') + self.overrideAttr(fix_build, 'search_apt_file', self._search_apt_file) + self._apt_files = {} + + def _search_apt_file(self, path, regex=False): + for p, pkg in sorted(self._apt_files.items()): + if regex: + if re.match(path, p): + yield pkg + else: + if path == p: + yield pkg + + def resolve(self, error, context=('build', )): + context = BuildDependencyContext( + self.tree, subpath='', committer='Janitor ', + update_changelog=True) + return resolve_error( + error, context, VERSIONED_PACKAGE_FIXERS + APT_FIXERS) + + def get_build_deps(self): + with open(self.tree.abspath('debian/control'), 'r') as f: + return next(Deb822.iter_paragraphs(f)).get('Build-Depends', '') + + def test_missing_command_unknown(self): + self._apt_files = {} + self.assertFalse(self.resolve( + MissingCommand('acommandthatdoesnotexist'))) + + def test_missing_command_brz(self): + self._apt_files = { + '/usr/bin/b': 'bash', + '/usr/bin/brz': 'brz', + '/usr/bin/brzier': 'bash', + } + self.assertTrue(self.resolve(MissingCommand('brz'))) + self.assertEqual('libc6, brz', self.get_build_deps()) + rev = self.tree.branch.repository.get_revision( + self.tree.branch.last_revision()) + self.assertEqual( + 'Add missing build dependency on brz.\n', + rev.message) + self.assertFalse(self.resolve(MissingCommand('brz'))) + self.assertEqual('libc6, brz', self.get_build_deps()) + + def test_missing_command_ps(self): + self._apt_files = { + '/bin/ps': 'procps', + '/usr/bin/pscal': 'xcal', + } + self.assertTrue(self.resolve(MissingCommand('ps'))) + self.assertEqual('libc6, procps', self.get_build_deps()) + + def test_missing_ruby_file(self): + self._apt_files = { + '/usr/lib/ruby/vendor_ruby/rake/testtask.rb': 'rake', + } + self.assertTrue(self.resolve(MissingRubyFile('rake/testtask'))) + self.assertEqual('libc6, rake', self.get_build_deps()) + + def test_missing_ruby_file_from_gem(self): + self._apt_files = { + '/usr/share/rubygems-integration/all/gems/activesupport-' + '5.2.3/lib/active_support/core_ext/string/strip.rb': + 'ruby-activesupport'} + self.assertTrue(self.resolve( + MissingRubyFile('active_support/core_ext/string/strip'))) + self.assertEqual('libc6, ruby-activesupport', self.get_build_deps()) + + def test_missing_ruby_gem(self): + self._apt_files = { + '/usr/share/rubygems-integration/all/specifications/' + 'bio-1.5.2.gemspec': 'ruby-bio', + '/usr/share/rubygems-integration/all/specifications/' + 'bio-2.0.2.gemspec': 'ruby-bio', + } + self.assertTrue(self.resolve(MissingRubyGem('bio', None))) + self.assertEqual('libc6, ruby-bio', self.get_build_deps()) + self.assertTrue(self.resolve(MissingRubyGem('bio', '2.0.3'))) + self.assertEqual('libc6, ruby-bio (>= 2.0.3)', self.get_build_deps()) + + def test_missing_perl_module(self): + self._apt_files = { + '/usr/share/perl5/App/cpanminus/fatscript.pm': 'cpanminus'} + self.assertTrue(self.resolve(MissingPerlModule( + 'App/cpanminus/fatscript.pm', 'App::cpanminus::fatscript', [ + '/<>/blib/lib', + '/<>/blib/arch', + '/etc/perl', + '/usr/local/lib/x86_64-linux-gnu/perl/5.30.0', + '/usr/local/share/perl/5.30.0', + '/usr/lib/x86_64-linux-gnu/perl5/5.30', + '/usr/share/perl5', + '/usr/lib/x86_64-linux-gnu/perl/5.30', + '/usr/share/perl/5.30', + '/usr/local/lib/site_perl', + '/usr/lib/x86_64-linux-gnu/perl-base', + '.']))) + self.assertEqual('libc6, cpanminus', self.get_build_deps()) + + def test_missing_pkg_config(self): + self._apt_files = { + '/usr/lib/x86_64-linux-gnu/pkgconfig/xcb-xfixes.pc': + 'libxcb-xfixes0-dev'} + self.assertTrue(self.resolve(MissingPkgConfig('xcb-xfixes'))) + self.assertEqual('libc6, libxcb-xfixes0-dev', self.get_build_deps()) + + def test_missing_pkg_config_versioned(self): + self._apt_files = { + '/usr/lib/x86_64-linux-gnu/pkgconfig/xcb-xfixes.pc': + 'libxcb-xfixes0-dev'} + self.assertTrue(self.resolve(MissingPkgConfig('xcb-xfixes', '1.0'))) + self.assertEqual( + 'libc6, libxcb-xfixes0-dev (>= 1.0)', self.get_build_deps()) + + def test_missing_python_module(self): + self._apt_files = { + '/usr/lib/python3/dist-packages/m2r.py': 'python3-m2r' + } + self.assertTrue(self.resolve(MissingPythonModule('m2r'))) + self.assertEqual('libc6, python3-m2r', self.get_build_deps()) + + def test_missing_go_package(self): + self._apt_files = { + '/usr/share/gocode/src/github.com/chzyer/readline/utils_test.go': + 'golang-github-chzyer-readline-dev', + } + self.assertTrue(self.resolve( + MissingGoPackage('github.com/chzyer/readline'))) + self.assertEqual( + 'libc6, golang-github-chzyer-readline-dev', + self.get_build_deps()) + + def test_missing_vala_package(self): + self._apt_files = { + '/usr/share/vala-0.48/vapi/posix.vapi': 'valac-0.48-vapi', + } + self.assertTrue(self.resolve(MissingValaPackage('posix'))) + self.assertEqual('libc6, valac-0.48-vapi', self.get_build_deps()) diff --git a/setup.cfg b/setup.cfg index eb9fed9..fc17caa 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,5 +1,6 @@ [flake8] application-package-names = ognibuild +banned-modules = silver-platter = Should not use silver-platter [mypy] # A number of ognibuilds' dependencies don't have type hints yet diff --git a/setup.py b/setup.py index dacac1f..55075f2 100755 --- a/setup.py +++ b/setup.py @@ -23,5 +23,10 @@ setup(name="ognibuild", ], entry_points={ "console_scripts": [ - "ogni=ognibuild.__main__:main"] - }) + "ogni=ognibuild.__main__:main", + "deb-fix-build=ognibuild.debian.fix_build:main", + ] + }, + install_requires=['breezy', 'buildlog-consultant'], + test_suite='ognibuild.tests.test_suite', + )