From c93cf32cb9dd51683bc5c802f438743bd7529d31 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Fri, 5 Feb 2021 14:10:25 +0000 Subject: [PATCH 01/83] Import Debian package fixing logic. --- README.md | 3 + ognibuild/debian/build.py | 184 ++++ ognibuild/debian/fix_build.py | 1200 ++++++++++++++++++++++ ognibuild/dist.py | 524 ++++++++++ ognibuild/tests/__init__.py | 2 + ognibuild/tests/test_debian_build.py | 108 ++ ognibuild/tests/test_debian_fix_build.py | 201 ++++ setup.cfg | 1 + setup.py | 9 +- 9 files changed, 2230 insertions(+), 2 deletions(-) create mode 100644 ognibuild/debian/build.py create mode 100644 ognibuild/debian/fix_build.py create mode 100644 ognibuild/dist.py create mode 100644 ognibuild/tests/test_debian_build.py create mode 100644 ognibuild/tests/test_debian_fix_build.py diff --git a/README.md b/README.md index 20729da..30d9861 100644 --- a/README.md +++ b/README.md @@ -31,6 +31,9 @@ Ognibuild has a number of subcommands: * ``ogni install`` - install the package * ``ogni test`` - run the testsuite in the source directory +It also includes a subcommand that can fix up the build dependencies +for Debian packages, called deb-fix-build. + License ------- diff --git a/ognibuild/debian/build.py b/ognibuild/debian/build.py new file mode 100644 index 0000000..5445278 --- /dev/null +++ b/ognibuild/debian/build.py @@ -0,0 +1,184 @@ +#!/usr/bin/python +# Copyright (C) 2018 Jelmer Vernooij +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + +__all__ = [ + 'changes_filename', + 'get_build_architecture', + 'add_dummy_changelog_entry', + 'build', + 'SbuildFailure', +] + +from datetime import datetime +import logging +import os +import re +import subprocess +import sys + +from debian.changelog import Changelog +from debmutate.changelog import get_maintainer, format_datetime + +from breezy import osutils +from breezy.plugins.debian.util import ( + changes_filename, + get_build_architecture, + ) +from breezy.mutabletree import MutableTree +from silver_platter.debian import ( + BuildFailedError, + DEFAULT_BUILDER, + ) + +from buildlog_consultant.sbuild import ( + worker_failure_from_sbuild_log, + SbuildFailure, + ) + + +class MissingChangesFile(Exception): + """Expected changes file was not written.""" + + def __init__(self, filename): + self.filename = filename + + +def add_dummy_changelog_entry( + tree: MutableTree, subpath: str, suffix: str, suite: str, + message: str, timestamp=None, maintainer=None): + """Add a dummy changelog entry to a package. + + Args: + directory: Directory to run in + suffix: Suffix for the version + suite: Debian suite + message: Changelog message + """ + def add_suffix(v, suffix): + m = re.fullmatch('(.*)(' + re.escape(suffix) + ')([0-9]+)', v,) + if m: + return (m.group(1) + m.group(2) + '%d' % (int(m.group(3)) + 1)) + else: + return v + suffix + '1' + + path = os.path.join(subpath, 'debian', 'changelog') + if maintainer is None: + maintainer = get_maintainer() + if timestamp is None: + timestamp = datetime.now() + with tree.get_file(path) as f: + cl = Changelog() + cl.parse_changelog( + f, max_blocks=None, allow_empty_author=True, strict=False) + version = cl[0].version + if version.debian_revision: + version.debian_revision = add_suffix( + version.debian_revision, suffix) + else: + version.upstream_version = add_suffix( + version.upstream_version, suffix) + cl.new_block( + package=cl[0].package, + version=version, + urgency='low', + distributions=suite, + author='%s <%s>' % maintainer, + date=format_datetime(timestamp), + changes=['', ' * ' + message, '']) + cl_str = cl._format(allow_missing_author=True) + tree.put_file_bytes_non_atomic(path, cl_str.encode(cl._encoding)) + + +def get_latest_changelog_version(local_tree, subpath=''): + path = osutils.pathjoin(subpath, 'debian/changelog') + with local_tree.get_file(path) as f: + cl = Changelog(f, max_blocks=1) + return cl.package, cl.version + + +def build(local_tree, outf, build_command=DEFAULT_BUILDER, result_dir=None, + distribution=None, subpath='', source_date_epoch=None): + args = [sys.executable, '-m', 'breezy', 'builddeb', + '--guess-upstream-branch-url', '--builder=%s' % build_command] + if result_dir: + args.append('--result-dir=%s' % result_dir) + outf.write('Running %r\n' % (build_command, )) + outf.flush() + env = dict(os.environ.items()) + if distribution is not None: + env['DISTRIBUTION'] = distribution + if source_date_epoch is not None: + env['SOURCE_DATE_EPOCH'] = '%d' % source_date_epoch + logging.info('Building debian packages, running %r.', build_command) + try: + subprocess.check_call( + args, cwd=local_tree.abspath(subpath), stdout=outf, stderr=outf, + env=env) + except subprocess.CalledProcessError: + raise BuildFailedError() + + +def build_once( + local_tree, build_suite, output_directory, build_command, + subpath='', source_date_epoch=None): + build_log_path = os.path.join(output_directory, 'build.log') + try: + with open(build_log_path, 'w') as f: + build(local_tree, outf=f, build_command=build_command, + result_dir=output_directory, distribution=build_suite, + subpath=subpath, source_date_epoch=source_date_epoch) + except BuildFailedError: + with open(build_log_path, 'rb') as f: + raise worker_failure_from_sbuild_log(f) + + (cl_package, cl_version) = get_latest_changelog_version( + local_tree, subpath) + changes_name = changes_filename( + cl_package, cl_version, get_build_architecture()) + changes_path = os.path.join(output_directory, changes_name) + if not os.path.exists(changes_path): + raise MissingChangesFile(changes_name) + return (changes_name, cl_version) + + +def gbp_dch(path): + subprocess.check_call(['gbp', 'dch'], cwd=path) + + +def attempt_build( + local_tree, suffix, build_suite, output_directory, build_command, + build_changelog_entry='Build for debian-janitor apt repository.', + subpath='', source_date_epoch=None): + """Attempt a build, with a custom distribution set. + + Args: + local_tree: Tree to build in + suffix: Suffix to add to version string + build_suite: Name of suite (i.e. distribution) to build for + output_directory: Directory to write output to + build_command: Build command to build package + build_changelog_entry: Changelog entry to use + subpath: Sub path in tree where package lives + source_date_epoch: Source date epoch to set + Returns: Tuple with (changes_name, cl_version) + """ + add_dummy_changelog_entry( + local_tree, subpath, suffix, build_suite, + build_changelog_entry) + return build_once( + local_tree, build_suite, output_directory, build_command, subpath, + source_date_epoch=source_date_epoch) diff --git a/ognibuild/debian/fix_build.py b/ognibuild/debian/fix_build.py new file mode 100644 index 0000000..18c31bc --- /dev/null +++ b/ognibuild/debian/fix_build.py @@ -0,0 +1,1200 @@ +#!/usr/bin/python +# Copyright (C) 2018 Jelmer Vernooij +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + +__all__ = [ + 'build_incrementally', +] + +import logging +import os +import re +import subprocess +import sys +from typing import Iterator, List, Callable, Type, Tuple, Set + +from debian.deb822 import ( + Deb822, + PkgRelation, + Release, + ) + +from breezy.commit import PointlessCommit +from breezy.tree import Tree +from debmutate.control import ( + ensure_some_version, + ensure_minimum_version, + pg_buildext_updatecontrol, + ControlEditor, + ) +from debmutate.debhelper import ( + get_debhelper_compat_level, + ) +from debmutate.deb822 import ( + Deb822Editor, + ) +from debmutate.reformatting import ( + FormattingUnpreservable, + GeneratedFile, + ) +from lintian_brush import ( + reset_tree, + ) +from lintian_brush.changelog import ( + add_changelog_entry, + ) + +from lintian_brush.rules import ( + dh_invoke_add_with, + update_rules, + ) +from silver_platter.debian import ( + debcommit, + DEFAULT_BUILDER, + ) + +from breezy.plugins.debian.util import get_build_architecture +from .build import attempt_build +from buildlog_consultant.sbuild import ( + Problem, + MissingConfigStatusInput, + MissingPythonModule, + MissingPythonDistribution, + MissingCHeader, + MissingPkgConfig, + MissingCommand, + MissingFile, + MissingJavaScriptRuntime, + MissingSprocketsFile, + MissingGoPackage, + MissingPerlFile, + MissingPerlModule, + MissingXmlEntity, + MissingJDKFile, + MissingNodeModule, + MissingPhpClass, + MissingRubyGem, + MissingLibrary, + MissingJavaClass, + MissingCSharpCompiler, + MissingConfigure, + MissingAutomakeInput, + MissingRPackage, + MissingRubyFile, + MissingAutoconfMacro, + MissingValaPackage, + MissingXfceDependency, + MissingHaskellDependencies, + NeedPgBuildExtUpdateControl, + SbuildFailure, + DhAddonLoadFailure, + AptFetchFailure, + MissingMavenArtifacts, + GnomeCommonMissing, + MissingGnomeCommonDependency, + ) + + +DEFAULT_MAX_ITERATIONS = 10 + + +class CircularDependency(Exception): + """Adding dependency would introduce cycle.""" + + def __init__(self, package): + self.package = package + + +class DependencyContext(object): + + def __init__(self, tree, subpath='', committer=None, + update_changelog=True): + self.tree = tree + self.subpath = subpath + self.committer = committer + self.update_changelog = update_changelog + + def add_dependency(self, package, minimum_version=None): + raise NotImplementedError(self.add_dependency) + + +class BuildDependencyContext(DependencyContext): + + def add_dependency(self, package, minimum_version=None): + return add_build_dependency( + self.tree, package, minimum_version=minimum_version, + committer=self.committer, subpath=self.subpath, + update_changelog=self.update_changelog) + + +class AutopkgtestDependencyContext(DependencyContext): + + def __init__(self, testname, tree, subpath='', committer=None, + update_changelog=True): + self.testname = testname + super(AutopkgtestDependencyContext, self).__init__( + tree, subpath, committer, update_changelog) + + def add_dependency(self, package, minimum_version=None): + return add_test_dependency( + self.tree, self.testname, package, + minimum_version=minimum_version, + committer=self.committer, subpath=self.subpath, + update_changelog=self.update_changelog) + + +def add_build_dependency(tree, package, minimum_version=None, + committer=None, subpath='', update_changelog=True): + if not isinstance(package, str): + raise TypeError(package) + + control_path = os.path.join(tree.abspath(subpath), 'debian/control') + try: + with ControlEditor(path=control_path) as updater: + for binary in updater.binaries: + if binary["Package"] == package: + raise CircularDependency(package) + if minimum_version: + updater.source["Build-Depends"] = ensure_minimum_version( + updater.source.get("Build-Depends", ""), + package, minimum_version) + else: + updater.source["Build-Depends"] = ensure_some_version( + updater.source.get("Build-Depends", ""), package) + except FormattingUnpreservable as e: + logging.info( + 'Unable to edit %s in a way that preserves formatting.', + e.path) + return False + + if minimum_version: + desc = "%s (>= %s)" % (package, minimum_version) + else: + desc = package + + if not updater.changed: + logging.info('Giving up; dependency %s was already present.', desc) + return False + + logging.info("Adding build dependency: %s", desc) + return commit_debian_changes( + tree, subpath, "Add missing build dependency on %s." % desc, + committer=committer, update_changelog=update_changelog) + + +def add_test_dependency(tree, testname, package, minimum_version=None, + committer=None, subpath='', update_changelog=True): + if not isinstance(package, str): + raise TypeError(package) + + tests_control_path = os.path.join( + tree.abspath(subpath), 'debian/tests/control') + + try: + with Deb822Editor(path=tests_control_path) as updater: + command_counter = 1 + for control in updater.paragraphs: + try: + name = control["Tests"] + except KeyError: + name = "command%d" % command_counter + command_counter += 1 + if name != testname: + continue + if minimum_version: + control["Depends"] = ensure_minimum_version( + control.get("Depends", ""), + package, minimum_version) + else: + control["Depends"] = ensure_some_version( + control.get("Depends", ""), package) + except FormattingUnpreservable as e: + logging.info( + 'Unable to edit %s in a way that preserves formatting.', + e.path) + return False + if not updater.changed: + return False + + if minimum_version: + desc = "%s (>= %s)" % (package, minimum_version) + else: + desc = package + + logging.info("Adding dependency to test %s: %s", testname, desc) + return commit_debian_changes( + tree, subpath, + "Add missing dependency for test %s on %s." % (testname, desc), + update_changelog=update_changelog) + + +def commit_debian_changes(tree, subpath, summary, committer=None, + update_changelog=True): + with tree.lock_write(): + try: + if update_changelog: + add_changelog_entry( + tree, os.path.join(subpath, 'debian/changelog'), [summary]) + debcommit(tree, committer=committer, subpath=subpath) + else: + tree.commit(message=summary, committer=committer, + specific_files=[subpath]) + except PointlessCommit: + return False + else: + return True + + +class FileSearcher(object): + + def search_files(self, path, regex=False): + raise NotImplementedError(self.search_files) + + +class ContentsFileNotFound(Exception): + """The contents file was not found.""" + + +class AptContentsFileSearcher(FileSearcher): + + def __init__(self): + self._db = {} + + @classmethod + def from_env(cls): + sources = os.environ['REPOSITORIES'].split(':') + return cls.from_repositories(sources) + + def __setitem__(self, path, package): + self._db[path] = package + + def search_files(self, path, regex=False): + for p, pkg in sorted(self._db.items()): + if regex: + if re.match(path, p): + yield pkg + else: + if path == p: + yield pkg + + def load_file(self, f): + for line in f: + (path, rest) = line.rsplit(maxsplit=1) + package = rest.split(b'/')[-1] + decoded_path = '/' + path.decode('utf-8', 'surrogateescape') + self[decoded_path] = package.decode('utf-8') + + @classmethod + def from_urls(cls, urls): + self = cls() + for url in urls: + self.load_url(url) + return self + + @classmethod + def from_repositories(cls, sources): + # TODO(jelmer): Verify signatures, etc. + urls = [] + arches = [get_build_architecture(), 'all'] + for source in sources: + parts = source.split(' ') + if parts[0] != 'deb': + logging.warning('Invalid line in sources: %r', source) + continue + base_url = parts[1] + name = parts[2] + components = parts[3:] + response = cls._get('%s/%s/Release' % (base_url, name)) + r = Release(response) + desired_files = set() + for component in components: + for arch in arches: + desired_files.add('%s/Contents-%s' % (component, arch)) + for entry in r['MD5Sum']: + if entry['name'] in desired_files: + urls.append('%s/%s/%s' % (base_url, name, entry['name'])) + return cls.from_urls(urls) + + @staticmethod + def _get(url): + from urllib.request import urlopen, Request + request = Request(url, headers={'User-Agent': 'Debian Janitor'}) + return urlopen(request) + + def load_url(self, url): + from urllib.error import HTTPError + try: + response = self._get(url) + except HTTPError as e: + if e.status == 404: + raise ContentsFileNotFound(url) + raise + if url.endswith('.gz'): + import gzip + f = gzip.GzipFile(fileobj=response) + elif response.headers.get_content_type() == 'text/plain': + f = response + else: + raise Exception( + 'Unknown content type %r' % + response.headers.get_content_type()) + self.load_file(f) + + +class GeneratedFileSearcher(FileSearcher): + + def __init__(self, db): + self._db = db + + def search_files(self, path, regex=False): + for p, pkg in sorted(self._db.items()): + if regex: + if re.match(path, p): + yield pkg + else: + if path == p: + yield pkg + + +# TODO(jelmer): read from a file +GENERATED_FILE_SEARCHER = GeneratedFileSearcher({ + '/etc/locale.gen': 'locales', + # Alternative + '/usr/bin/rst2html': '/usr/share/docutils/scripts/python3/rst2html'}) + + +_apt_file_searcher = None + + +def search_apt_file(path: str, regex: bool = False) -> Iterator[FileSearcher]: + global _apt_file_searcher + if _apt_file_searcher is None: + # TODO(jelmer): cache file + _apt_file_searcher = AptContentsFileSearcher.from_env() + if _apt_file_searcher: + yield from _apt_file_searcher.search_files(path, regex=regex) + yield from GENERATED_FILE_SEARCHER.search_files(path, regex=regex) + + +def get_package_for_paths(paths, regex=False): + candidates = set() + for path in paths: + candidates.update(search_apt_file(path, regex=regex)) + if candidates: + break + if len(candidates) == 0: + logging.warning('No packages found that contain %r', paths) + return None + if len(candidates) > 1: + logging.warning( + 'More than 1 packages found that contain %r: %r', + path, candidates) + # Euhr. Pick the one with the shortest name? + return sorted(candidates, key=len)[0] + else: + return candidates.pop() + + +def get_package_for_python_module(module, python_version): + if python_version == 'python3': + paths = [ + os.path.join( + '/usr/lib/python3/dist-packages', + module.replace('.', '/'), + '__init__.py'), + os.path.join( + '/usr/lib/python3/dist-packages', + module.replace('.', '/') + '.py'), + os.path.join( + '/usr/lib/python3\\.[0-9]+/lib-dynload', + module.replace('.', '/') + '\\.cpython-.*\\.so'), + os.path.join( + '/usr/lib/python3\\.[0-9]+/', + module.replace('.', '/') + '.py'), + os.path.join( + '/usr/lib/python3\\.[0-9]+/', + module.replace('.', '/'), '__init__.py'), + ] + elif python_version == 'python2': + paths = [ + os.path.join( + '/usr/lib/python2\\.[0-9]/dist-packages', + module.replace('.', '/'), + '__init__.py'), + os.path.join( + '/usr/lib/python2\\.[0-9]/dist-packages', + module.replace('.', '/') + '.py'), + os.path.join( + '/usr/lib/python2.\\.[0-9]/lib-dynload', + module.replace('.', '/') + '.so')] + elif python_version == 'pypy': + paths = [ + os.path.join( + '/usr/lib/pypy/dist-packages', + module.replace('.', '/'), + '__init__.py'), + os.path.join( + '/usr/lib/pypy/dist-packages', + module.replace('.', '/') + '.py'), + os.path.join( + '/usr/lib/pypy/dist-packages', + module.replace('.', '/') + '\\.pypy-.*\\.so'), + ] + else: + raise AssertionError( + 'unknown python version %r' % python_version) + return get_package_for_paths(paths, regex=True) + + +def targeted_python_versions(tree: Tree) -> Set[str]: + with tree.get_file('debian/control') as f: + control = Deb822(f) + build_depends = PkgRelation.parse_relations( + control.get('Build-Depends', '')) + all_build_deps: Set[str] = set() + for or_deps in build_depends: + all_build_deps.update(or_dep['name'] for or_dep in or_deps) + targeted = set() + if any(x.startswith('pypy') for x in all_build_deps): + targeted.add('pypy') + if any(x.startswith('python-') for x in all_build_deps): + targeted.add('cpython2') + if any(x.startswith('python3-') for x in all_build_deps): + targeted.add('cpython3') + return targeted + + +apt_cache = None + + +def package_exists(package): + global apt_cache + if apt_cache is None: + import apt_pkg + apt_cache = apt_pkg.Cache() + for p in apt_cache.packages: + if p.name == package: + return True + return False + + +def fix_missing_javascript_runtime(error, context): + package = get_package_for_paths( + ['/usr/bin/node', '/usr/bin/duk'], + regex=False) + if package is None: + return False + return context.add_dependency(package) + + +def fix_missing_python_distribution(error, context): + targeted = targeted_python_versions(context.tree) + default = not targeted + + pypy_pkg = get_package_for_paths( + ['/usr/lib/pypy/dist-packages/%s-.*.egg-info' % error.distribution], + regex=True) + if pypy_pkg is None: + pypy_pkg = 'pypy-%s' % error.distribution + if not package_exists(pypy_pkg): + pypy_pkg = None + + py2_pkg = get_package_for_paths( + ['/usr/lib/python2\\.[0-9]/dist-packages/%s-.*.egg-info' % + error.distribution], regex=True) + if py2_pkg is None: + py2_pkg = 'python-%s' % error.distribution + if not package_exists(py2_pkg): + py2_pkg = None + + py3_pkg = get_package_for_paths( + ['/usr/lib/python3/dist-packages/%s-.*.egg-info' % + error.distribution], regex=True) + if py3_pkg is None: + py3_pkg = 'python3-%s' % error.distribution + if not package_exists(py3_pkg): + py3_pkg = None + + extra_build_deps = [] + if error.python_version == 2: + if 'pypy' in targeted: + if not pypy_pkg: + logging.warning('no pypy package found for %s', error.module) + else: + extra_build_deps.append(pypy_pkg) + if 'cpython2' in targeted or default: + if not py2_pkg: + logging.warning( + 'no python 2 package found for %s', error.module) + return False + extra_build_deps.append(py2_pkg) + elif error.python_version == 3: + if not py3_pkg: + logging.warning('no python 3 package found for %s', error.module) + return False + extra_build_deps.append(py3_pkg) + else: + if py3_pkg and ('cpython3' in targeted or default): + extra_build_deps.append(py3_pkg) + if py2_pkg and ('cpython2' in targeted or default): + extra_build_deps.append(py2_pkg) + if pypy_pkg and 'pypy' in targeted: + extra_build_deps.append(pypy_pkg) + + if not extra_build_deps: + return False + + for dep_pkg in extra_build_deps: + assert dep_pkg is not None + if not context.add_dependency( + dep_pkg, minimum_version=error.minimum_version): + return False + return True + + +def fix_missing_python_module(error, context): + if getattr(context, 'tree', None) is not None: + targeted = targeted_python_versions(context.tree) + else: + targeted = set() + default = (not targeted) + + pypy_pkg = get_package_for_python_module(error.module, 'pypy') + py2_pkg = get_package_for_python_module(error.module, 'python2') + py3_pkg = get_package_for_python_module(error.module, 'python3') + + extra_build_deps = [] + if error.python_version == 2: + if 'pypy' in targeted: + if not pypy_pkg: + logging.warning('no pypy package found for %s', error.module) + else: + extra_build_deps.append(pypy_pkg) + if 'cpython2' in targeted or default: + if not py2_pkg: + logging.warning( + 'no python 2 package found for %s', error.module) + return False + extra_build_deps.append(py2_pkg) + elif error.python_version == 3: + if not py3_pkg: + logging.warning( + 'no python 3 package found for %s', error.module) + return False + extra_build_deps.append(py3_pkg) + else: + if py3_pkg and ('cpython3' in targeted or default): + extra_build_deps.append(py3_pkg) + if py2_pkg and ('cpython2' in targeted or default): + extra_build_deps.append(py2_pkg) + if pypy_pkg and 'pypy' in targeted: + extra_build_deps.append(pypy_pkg) + + if not extra_build_deps: + return False + + for dep_pkg in extra_build_deps: + assert dep_pkg is not None + if not context.add_dependency(dep_pkg, error.minimum_version): + return False + return True + + +def fix_missing_go_package(error, context): + package = get_package_for_paths( + [os.path.join('/usr/share/gocode/src', error.package, '.*')], + regex=True) + if package is None: + return False + return context.add_dependency(package) + + +def fix_missing_c_header(error, context): + package = get_package_for_paths( + [os.path.join('/usr/include', error.header)], regex=False) + if package is None: + package = get_package_for_paths( + [os.path.join('/usr/include', '.*', error.header)], regex=True) + if package is None: + return False + return context.add_dependency(package) + + +def fix_missing_pkg_config(error, context): + package = get_package_for_paths( + [os.path.join('/usr/lib/pkgconfig', error.module + '.pc')]) + if package is None: + package = get_package_for_paths( + [os.path.join('/usr/lib', '.*', 'pkgconfig', + error.module + '.pc')], + regex=True) + if package is None: + return False + return context.add_dependency( + package, minimum_version=error.minimum_version) + + +def fix_missing_command(error, context): + if os.path.isabs(error.command): + paths = [error.command] + else: + paths = [ + os.path.join(dirname, error.command) + for dirname in ['/usr/bin', '/bin']] + package = get_package_for_paths(paths) + if package is None: + logging.info('No packages found that contain %r', paths) + return False + return context.add_dependency(package) + + +def fix_missing_file(error, context): + package = get_package_for_paths([error.path]) + if package is None: + return False + return context.add_dependency(package) + + +def fix_missing_sprockets_file(error, context): + if error.content_type == 'application/javascript': + path = '/usr/share/.*/app/assets/javascripts/%s.js$' % error.name + else: + logging.warning('unable to handle content type %s', error.content_type) + return False + package = get_package_for_paths([path], regex=True) + if package is None: + return False + return context.add_dependency(package) + + +DEFAULT_PERL_PATHS = ['/usr/share/perl5'] + + +def fix_missing_perl_file(error, context): + + if (error.filename == 'Makefile.PL' and + not context.tree.has_filename('Makefile.PL') and + context.tree.has_filename('dist.ini')): + # TODO(jelmer): add dist-zilla add-on to debhelper + raise NotImplementedError + + if error.inc is None: + if error.filename is None: + filename = error.module.replace('::', '/') + '.pm' + paths = [os.path.join(inc, filename) + for inc in DEFAULT_PERL_PATHS] + elif not os.path.isabs(error.filename): + return False + else: + paths = [error.filename] + else: + paths = [os.path.join(inc, error.filename) for inc in error.inc] + package = get_package_for_paths(paths, regex=False) + if package is None: + if getattr(error, 'module', None): + logging.warning( + 'no perl package found for %s (%r).', + error.module, error.filename) + else: + logging.warning( + 'perl file %s not found (paths searched for: %r).', + error.filename, paths) + return False + return context.add_dependency(package) + + +def get_package_for_node_package(node_package): + paths = [ + '/usr/share/nodejs/.*/node_modules/%s/package.json' % node_package, + '/usr/lib/nodejs/%s/package.json' % node_package, + '/usr/share/nodejs/%s/package.json' % node_package] + return get_package_for_paths(paths, regex=True) + + +def fix_missing_node_module(error, context): + package = get_package_for_node_package(error.module) + if package is None: + logging.warning( + 'no node package found for %s.', + error.module) + return False + return context.add_dependency(package) + + +def fix_missing_dh_addon(error, context): + paths = [os.path.join('/usr/share/perl5', error.path)] + package = get_package_for_paths(paths) + if package is None: + logging.warning('no package for debhelper addon %s', error.name) + return False + return context.add_dependency(package) + + +def retry_apt_failure(error, context): + return True + + +def fix_missing_php_class(error, context): + path = '/usr/share/php/%s.php' % error.php_class.replace('\\', '/') + package = get_package_for_paths([path]) + if package is None: + logging.warning('no package for PHP class %s', error.php_class) + return False + return context.add_dependency(package) + + +def fix_missing_jdk_file(error, context): + path = error.jdk_path + '.*/' + error.filename + package = get_package_for_paths([path], regex=True) + if package is None: + logging.warning( + 'no package found for %s (JDK: %s) - regex %s', + error.filename, error.jdk_path, path) + return False + return context.add_dependency(package) + + +def fix_missing_vala_package(error, context): + path = '/usr/share/vala-[0-9.]+/vapi/%s.vapi' % error.package + package = get_package_for_paths([path], regex=True) + if package is None: + logging.warning( + 'no file found for package %s - regex %s', + error.package, path) + return False + return context.add_dependency(package) + + +def fix_missing_xml_entity(error, context): + # Ideally we should be using the XML catalog for this, but hardcoding + # a few URLs will do for now.. + URL_MAP = { + 'http://www.oasis-open.org/docbook/xml/': + '/usr/share/xml/docbook/schema/dtd/' + } + for url, path in URL_MAP.items(): + if error.url.startswith(url): + search_path = os.path.join(path, error.url[len(url):]) + break + else: + return False + + package = get_package_for_paths([search_path], regex=False) + if package is None: + return False + return context.add_dependency(package) + + +def fix_missing_library(error, context): + paths = [os.path.join('/usr/lib/lib%s.so$' % error.library), + os.path.join('/usr/lib/.*/lib%s.so$' % error.library), + os.path.join('/usr/lib/lib%s.a$' % error.library), + os.path.join('/usr/lib/.*/lib%s.a$' % error.library)] + package = get_package_for_paths(paths, regex=True) + if package is None: + logging.warning('no package for library %s', error.library) + return False + return context.add_dependency(package) + + +def fix_missing_ruby_gem(error, context): + paths = [os.path.join( + '/usr/share/rubygems-integration/all/' + 'specifications/%s-.*\\.gemspec' % error.gem)] + package = get_package_for_paths(paths, regex=True) + if package is None: + logging.warning('no package for gem %s', error.gem) + return False + return context.add_dependency(package, minimum_version=error.version) + + +def fix_missing_ruby_file(error, context): + paths = [ + os.path.join('/usr/lib/ruby/vendor_ruby/%s.rb' % error.filename)] + package = get_package_for_paths(paths) + if package is not None: + return context.add_dependency(package) + paths = [ + os.path.join(r'/usr/share/rubygems-integration/all/gems/([^/]+)/' + 'lib/%s.rb' % error.filename)] + package = get_package_for_paths(paths, regex=True) + if package is not None: + return context.add_dependency(package) + + logging.warning('no package for ruby file %s', error.filename) + return False + + +def fix_missing_r_package(error, context): + paths = [os.path.join('/usr/lib/R/site-library/.*/R/%s$' % error.package)] + package = get_package_for_paths(paths, regex=True) + if package is None: + logging.warning('no package for R package %s', error.package) + return False + return context.add_dependency( + package, minimum_version=error.minimum_version) + + +def fix_missing_java_class(error, context): + # Unfortunately this only finds classes in jars installed on the host + # system :( + output = subprocess.check_output( + ["java-propose-classpath", "-c" + error.classname]) + classpath = [ + p for p in output.decode().strip(":").strip().split(':') if p] + if not classpath: + logging.warning('unable to find classpath for %s', error.classname) + return False + logging.info('Classpath for %s: %r', error.classname, classpath) + package = get_package_for_paths(classpath) + if package is None: + logging.warning('no package for files in %r', classpath) + return False + return context.add_dependency(package) + + +def enable_dh_autoreconf(context): + # Debhelper >= 10 depends on dh-autoreconf and enables autoreconf by + # default. + debhelper_compat_version = get_debhelper_compat_level( + context.tree.abspath('.')) + if debhelper_compat_version is not None and debhelper_compat_version < 10: + def add_with_autoreconf(line, target): + if target != b'%': + return line + if not line.startswith(b'dh '): + return line + return dh_invoke_add_with(line, b'autoreconf') + + if update_rules(command_line_cb=add_with_autoreconf): + return context.add_dependency('dh-autoreconf') + + return False + + +def fix_missing_configure(error, context): + if (not context.tree.has_filename('configure.ac') and + not context.tree.has_filename('configure.in')): + return False + + return enable_dh_autoreconf(context) + + +def fix_missing_automake_input(error, context): + # TODO(jelmer): If it's ./NEWS, ./AUTHORS or ./README that's missing, then + # try to set 'export AUTOMAKE = automake --foreign' in debian/rules. + # https://salsa.debian.org/jelmer/debian-janitor/issues/88 + return enable_dh_autoreconf(context) + + +def fix_missing_maven_artifacts(error, context): + artifact = error.artifacts[0] + parts = artifact.split(':') + if len(parts) == 4: + (group_id, artifact_id, kind, version) = parts + regex = False + elif len(parts) == 3: + (group_id, artifact_id, version) = parts + kind = 'jar' + regex = False + elif len(parts) == 2: + version = '.*' + (group_id, artifact_id) = parts + kind = 'jar' + regex = True + else: + raise AssertionError( + 'invalid number of parts to artifact %s' % artifact) + paths = [os.path.join( + '/usr/share/maven-repo', group_id.replace('.', '/'), + artifact_id, version, '%s-%s.%s' % (artifact_id, version, kind))] + package = get_package_for_paths(paths, regex=regex) + if package is None: + logging.warning('no package for artifact %s', artifact) + return False + return context.add_dependency(package) + + +def install_gnome_common(error, context): + return context.add_dependency('gnome-common') + + +def install_gnome_common_dep(error, context): + if error.package == 'glib-gettext': + package = get_package_for_paths(['/usr/bin/glib-gettextize']) + else: + package = None + if package is None: + logging.warning('No debian package for package %s', error.package) + return False + return context.add_dependency( + package=package, + minimum_version=error.minimum_version) + + +def install_xfce_dep(error, context): + if error.package == 'gtk-doc': + package = get_package_for_paths(['/usr/bin/gtkdocize']) + else: + package = None + if package is None: + logging.warning('No debian package for package %s', error.package) + return False + return context.add_dependency(package=package) + + +def fix_missing_config_status_input(error, context): + autogen_path = 'autogen.sh' + rules_path = 'debian/rules' + if context.subpath not in ('.', ''): + autogen_path = os.path.join(context.subpath, autogen_path) + rules_path = os.path.join(context.subpath, rules_path) + if not context.tree.has_filename(autogen_path): + return False + + def add_autogen(mf): + rule = any(mf.iter_rules(b'override_dh_autoreconf')) + if rule: + return + rule = mf.add_rule(b'override_dh_autoreconf') + rule.append_command(b'dh_autoreconf ./autogen.sh') + + if not update_rules(makefile_cb=add_autogen, path=rules_path): + return False + + if context.update_changelog: + commit_debian_changes( + context.tree, context.subpath, + 'Run autogen.sh during build.', committer=context.committer, + update_changelog=context.update_changelog) + + return True + + +def _find_aclocal_fun(macro): + # TODO(jelmer): Use the API for codesearch.debian.net instead? + defun_prefix = b'AC_DEFUN([%s],' % macro.encode('ascii') + for entry in os.scandir('/usr/share/aclocal'): + if not entry.is_file(): + continue + with open(entry.path, 'rb') as f: + for line in f: + if line.startswith(defun_prefix): + return entry.path + raise KeyError + + +def run_pgbuildext_updatecontrol(error, context): + logging.info("Running 'pg_buildext updatecontrol'") + # TODO(jelmer): run in the schroot + pg_buildext_updatecontrol(context.tree.abspath(context.subpath)) + return commit_debian_changes( + context.tree, context.subpath, "Run 'pgbuildext updatecontrol'.", + committer=context.committer, update_changelog=False) + + +def fix_missing_autoconf_macro(error, context): + try: + path = _find_aclocal_fun(error.macro) + except KeyError: + logging.info('No local m4 file found defining %s', error.macro) + return False + package = get_package_for_paths([path]) + if package is None: + logging.warning('no package for macro file %s', path) + return False + return context.add_dependency(package) + + +def fix_missing_c_sharp_compiler(error, context): + return context.add_dependency('mono-mcs') + + +def fix_missing_haskell_dependencies(error, context): + path = "/var/lib/ghc/package.conf.d/%s-.*.conf" % error.deps[0][0] + package = get_package_for_paths([path], regex=True) + if package is None: + logging.warning('no package for macro file %s', path) + return False + return context.add_dependency(package) + + +VERSIONED_PACKAGE_FIXERS: List[ + Tuple[Type[Problem], Callable[[Problem, DependencyContext], bool]]] = [ + (NeedPgBuildExtUpdateControl, run_pgbuildext_updatecontrol), + (MissingConfigure, fix_missing_configure), + (MissingAutomakeInput, fix_missing_automake_input), +] + + +APT_FIXERS: List[ + Tuple[Type[Problem], Callable[[Problem, DependencyContext], bool]]] = [ + (MissingPythonModule, fix_missing_python_module), + (MissingPythonDistribution, fix_missing_python_distribution), + (MissingCHeader, fix_missing_c_header), + (MissingPkgConfig, fix_missing_pkg_config), + (MissingCommand, fix_missing_command), + (MissingFile, fix_missing_file), + (MissingSprocketsFile, fix_missing_sprockets_file), + (MissingGoPackage, fix_missing_go_package), + (MissingPerlFile, fix_missing_perl_file), + (MissingPerlModule, fix_missing_perl_file), + (MissingXmlEntity, fix_missing_xml_entity), + (MissingNodeModule, fix_missing_node_module), + (MissingRubyGem, fix_missing_ruby_gem), + (MissingRPackage, fix_missing_r_package), + (MissingLibrary, fix_missing_library), + (MissingJavaClass, fix_missing_java_class), + (DhAddonLoadFailure, fix_missing_dh_addon), + (MissingPhpClass, fix_missing_php_class), + (AptFetchFailure, retry_apt_failure), + (MissingMavenArtifacts, fix_missing_maven_artifacts), + (GnomeCommonMissing, install_gnome_common), + (MissingGnomeCommonDependency, install_gnome_common_dep), + (MissingXfceDependency, install_xfce_dep), + (MissingConfigStatusInput, fix_missing_config_status_input), + (MissingJDKFile, fix_missing_jdk_file), + (MissingRubyFile, fix_missing_ruby_file), + (MissingJavaScriptRuntime, fix_missing_javascript_runtime), + (MissingAutoconfMacro, fix_missing_autoconf_macro), + (MissingValaPackage, fix_missing_vala_package), + (MissingCSharpCompiler, fix_missing_c_sharp_compiler), + (MissingHaskellDependencies, fix_missing_haskell_dependencies), +] + + +def resolve_error(error, context, fixers): + relevant_fixers = [] + for error_cls, fixer in fixers: + if isinstance(error, error_cls): + relevant_fixers.append(fixer) + if not relevant_fixers: + logging.warning('No fixer found for %r', error) + return False + for fixer in relevant_fixers: + logging.info( + 'Attempting to use fixer %r to address %r', + fixer, error) + try: + made_changes = fixer(error, context) + except GeneratedFile: + logging.warning('Control file is generated, unable to edit.') + return False + if made_changes: + return True + return False + + +def build_incrementally( + local_tree, suffix, build_suite, output_directory, build_command, + build_changelog_entry='Build for debian-janitor apt repository.', + committer=None, max_iterations=DEFAULT_MAX_ITERATIONS, + subpath='', source_date_epoch=None, update_changelog=True): + fixed_errors = [] + while True: + try: + return attempt_build( + local_tree, suffix, build_suite, output_directory, + build_command, build_changelog_entry, subpath=subpath, + source_date_epoch=source_date_epoch) + except SbuildFailure as e: + if e.error is None: + logging.warning( + 'Build failed with unidentified error. Giving up.') + raise + if e.context is None: + logging.info('No relevant context, not making any changes.') + raise + if (e.error, e.context) in fixed_errors: + logging.warning( + 'Error was still not fixed on second try. Giving up.') + raise + if max_iterations is not None \ + and len(fixed_errors) > max_iterations: + logging.warning( + 'Last fix did not address the issue. Giving up.') + raise + reset_tree(local_tree, local_tree.basis_tree(), subpath=subpath) + if e.context[0] == 'build': + context = BuildDependencyContext( + local_tree, subpath=subpath, committer=committer, + update_changelog=update_changelog) + elif e.context[0] == 'autopkgtest': + context = AutopkgtestDependencyContext( + e.context[1], + local_tree, subpath=subpath, committer=committer, + update_changelog=update_changelog) + else: + logging.warning('unable to install for context %r', e.context) + raise + try: + if not resolve_error( + e.error, context, + VERSIONED_PACKAGE_FIXERS + APT_FIXERS): + logging.warning( + 'Failed to resolve error %r. Giving up.', e.error) + raise + except CircularDependency: + logging.warning( + 'Unable to fix %r; it would introduce a circular ' + 'dependency.', e.error) + raise e + fixed_errors.append((e.error, e.context)) + if os.path.exists(os.path.join(output_directory, 'build.log')): + i = 1 + while os.path.exists( + os.path.join(output_directory, 'build.log.%d' % i)): + i += 1 + os.rename(os.path.join(output_directory, 'build.log'), + os.path.join(output_directory, 'build.log.%d' % i)) + + +def main(argv=None): + import argparse + parser = argparse.ArgumentParser('janitor.fix_build') + parser.add_argument('--suffix', type=str, + help="Suffix to use for test builds.", + default='fixbuild1') + parser.add_argument('--suite', type=str, + help="Suite to target.", + default='unstable') + parser.add_argument('--output-directory', type=str, + help="Output directory.", default=None) + parser.add_argument('--committer', type=str, + help='Committer string (name and email)', + default=None) + parser.add_argument( + '--build-command', type=str, + help='Build command', + default=(DEFAULT_BUILDER + ' -A -s -v')) + parser.add_argument( + '--no-update-changelog', action="store_false", default=None, + dest="update_changelog", help="do not update the changelog") + parser.add_argument( + '--update-changelog', action="store_true", dest="update_changelog", + help="force updating of the changelog", default=None) + + args = parser.parse_args() + from breezy.workingtree import WorkingTree + tree = WorkingTree.open('.') + build_incrementally( + tree, args.suffix, args.suite, args.output_directory, + args.build_command, committer=args.committer, + update_changelog=args.update_changelog) + + +if __name__ == '__main__': + sys.exit(main(sys.argv)) diff --git a/ognibuild/dist.py b/ognibuild/dist.py new file mode 100644 index 0000000..6770ca8 --- /dev/null +++ b/ognibuild/dist.py @@ -0,0 +1,524 @@ +#!/usr/bin/python3 +# Copyright (C) 2020 Jelmer Vernooij +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + +import errno +import logging +import os +import re +import shutil +import subprocess +import sys +import tempfile +from typing import Optional, List, Tuple, Callable, Type + +from debian.deb822 import Deb822 + +from breezy.export import export +from breezy.tree import Tree +from breezy.workingtree import WorkingTree + +from breezy.plugins.debian.repack_tarball import get_filetype + +from .fix_build import ( + DependencyContext, + resolve_error, + APT_FIXERS, + ) +from buildlog_consultant.sbuild import ( + find_apt_get_failure, + find_build_failure_description, + Problem, + MissingPerlModule, + MissingCommand, + NoSpaceOnDevice, + ) +from ognibuild import shebang_binary +from ognibuild.session import Session +from ognibuild.session.schroot import SchrootSession + + +def run_apt(session: Session, args: List[str]) -> None: + args = ['apt', '-y'] + args + retcode, lines = run_with_tee(session, args, cwd='/', user='root') + if retcode == 0: + return + offset, line, error = find_apt_get_failure(lines) + if error is not None: + raise DetailedDistCommandFailed(retcode, args, error) + if line is not None: + raise UnidentifiedError( + retcode, args, lines, secondary=(offset, line)) + raise UnidentifiedError(retcode, args, lines) + + +def apt_install(session: Session, packages: List[str]) -> None: + run_apt(session, ['install'] + packages) + + +def apt_satisfy(session: Session, deps: List[str]) -> None: + run_apt(session, ['satisfy'] + deps) + + +def satisfy_build_deps(session: Session, tree): + source = Deb822(tree.get_file('debian/control')) + deps = [] + for name in ['Build-Depends', 'Build-Depends-Indep', 'Build-Depends-Arch']: + try: + deps.append(source[name].strip().strip(',')) + except KeyError: + pass + for name in ['Build-Conflicts', 'Build-Conflicts-Indeo', + 'Build-Conflicts-Arch']: + try: + deps.append('Conflicts: ' + source[name]) + except KeyError: + pass + deps = [ + dep.strip().strip(',') + for dep in deps] + apt_satisfy(session, deps) + + +def run_with_tee(session: Session, args: List[str], **kwargs): + p = session.Popen( + args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, **kwargs) + contents = [] + while p.poll() is None: + line = p.stdout.readline() + sys.stdout.buffer.write(line) + sys.stdout.buffer.flush() + contents.append(line.decode('utf-8', 'surrogateescape')) + return p.returncode, contents + + +class SchrootDependencyContext(DependencyContext): + + def __init__(self, session): + self.session = session + + def add_dependency(self, package, minimum_version=None): + # TODO(jelmer): Handle minimum_version + apt_install(self.session, [package]) + return True + + +class DetailedDistCommandFailed(Exception): + + def __init__(self, retcode, argv, error): + self.retcode = retcode + self.argv = argv + self.error = error + + +class UnidentifiedError(Exception): + + def __init__(self, retcode, argv, lines, secondary=None): + self.retcode = retcode + self.argv = argv + self.lines = lines + self.secondary = secondary + + +def fix_perl_module_from_cpan(error, context): + # TODO(jelmer): Specify -T to skip tests? + context.session.check_call( + ['cpan', '-i', error.module], user='root', + env={'PERL_MM_USE_DEFAULT': '1'}) + return True + + +NPM_COMMAND_PACKAGES = { + 'del-cli': 'del-cli', + } + + +def fix_npm_missing_command(error, context): + try: + package = NPM_COMMAND_PACKAGES[error.command] + except KeyError: + return False + + context.session.check_call(['npm', '-g', 'install', package]) + return True + + +GENERIC_INSTALL_FIXERS: List[ + Tuple[Type[Problem], Callable[[Problem, DependencyContext], bool]]] = [ + (MissingPerlModule, fix_perl_module_from_cpan), + (MissingCommand, fix_npm_missing_command), +] + + +def run_with_build_fixer(session: Session, args: List[str]): + logging.info('Running %r', args) + fixed_errors = [] + while True: + retcode, lines = run_with_tee(session, args) + if retcode == 0: + return + offset, line, error = find_build_failure_description(lines) + if error is None: + logging.warning('Build failed with unidentified error. Giving up.') + if line is not None: + raise UnidentifiedError( + retcode, args, lines, secondary=(offset, line)) + raise UnidentifiedError(retcode, args, lines) + + logging.info('Identified error: %r', error) + if error in fixed_errors: + logging.warning( + 'Failed to resolve error %r, it persisted. Giving up.', + error) + raise DetailedDistCommandFailed(retcode, args, error) + if not resolve_error( + error, SchrootDependencyContext(session), + fixers=(APT_FIXERS + GENERIC_INSTALL_FIXERS)): + logging.warning( + 'Failed to find resolution for error %r. Giving up.', + error) + raise DetailedDistCommandFailed(retcode, args, error) + fixed_errors.append(error) + + +class NoBuildToolsFound(Exception): + """No supported build tools were found.""" + + +def run_dist_in_chroot(session): + apt_install(session, ['git']) + + # Some things want to write to the user's home directory, + # e.g. pip caches in ~/.cache + session.create_home() + + if os.path.exists('package.xml'): + apt_install(session, ['php-pear', 'php-horde-core']) + logging.info('Found package.xml, assuming pear package.') + session.check_call(['pear', 'package']) + return + + if os.path.exists('pyproject.toml'): + import toml + with open('pyproject.toml', 'r') as pf: + pyproject = toml.load(pf) + if 'poetry' in pyproject.get('tool', []): + logging.info( + 'Found pyproject.toml with poetry section, ' + 'assuming poetry project.') + apt_install(session, ['python3-venv', 'python3-pip']) + session.check_call(['pip3', 'install', 'poetry'], user='root') + session.check_call(['poetry', 'build', '-f', 'sdist']) + return + + if os.path.exists('setup.py'): + logging.info('Found setup.py, assuming python project.') + apt_install(session, ['python3', 'python3-pip']) + with open('setup.py', 'r') as f: + setup_py_contents = f.read() + try: + with open('setup.cfg', 'r') as f: + setup_cfg_contents = f.read() + except FileNotFoundError: + setup_cfg_contents = '' + if 'setuptools' in setup_py_contents: + logging.info('Reference to setuptools found, installing.') + apt_install(session, ['python3-setuptools']) + if ('setuptools_scm' in setup_py_contents or + 'setuptools_scm' in setup_cfg_contents): + logging.info('Reference to setuptools-scm found, installing.') + apt_install( + session, ['python3-setuptools-scm', 'git', 'mercurial']) + + # TODO(jelmer): Install setup_requires + + interpreter = shebang_binary('setup.py') + if interpreter is not None: + if interpreter == 'python3': + apt_install(session, ['python3']) + elif interpreter == 'python2': + apt_install(session, ['python2']) + elif interpreter == 'python': + apt_install(session, ['python']) + else: + raise ValueError('Unknown interpreter %s' % interpreter) + apt_install(session, ['python2', 'python3']) + run_with_build_fixer(session, ['./setup.py', 'sdist']) + else: + # Just assume it's Python 3 + apt_install(session, ['python3']) + run_with_build_fixer(session, ['python3', './setup.py', 'sdist']) + return + + if os.path.exists('setup.cfg'): + logging.info('Found setup.cfg, assuming python project.') + apt_install(session, ['python3-pep517', 'python3-pip']) + session.check_call(['python3', '-m', 'pep517.build', '-s', '.']) + return + + if os.path.exists('dist.ini') and not os.path.exists('Makefile.PL'): + apt_install(session, ['libdist-inkt-perl']) + with open('dist.ini', 'rb') as f: + for line in f: + if not line.startswith(b';;'): + continue + try: + (key, value) = line[2:].split(b'=', 1) + except ValueError: + continue + if (key.strip() == b'class' and + value.strip().startswith(b"'Dist::Inkt")): + logging.info( + 'Found Dist::Inkt section in dist.ini, ' + 'assuming distinkt.') + # TODO(jelmer): install via apt if possible + session.check_call( + ['cpan', 'install', value.decode().strip("'")], + user='root') + run_with_build_fixer(session, ['distinkt-dist']) + return + # Default to invoking Dist::Zilla + logging.info('Found dist.ini, assuming dist-zilla.') + apt_install(session, ['libdist-zilla-perl']) + run_with_build_fixer(session, ['dzil', 'build', '--in', '..']) + return + + if os.path.exists('package.json'): + apt_install(session, ['npm']) + run_with_build_fixer(session, ['npm', 'pack']) + return + + gemfiles = [name for name in os.listdir('.') if name.endswith('.gem')] + if gemfiles: + apt_install(session, ['gem2deb']) + if len(gemfiles) > 1: + logging.warning('More than one gemfile. Trying the first?') + run_with_build_fixer(session, ['gem2tgz', gemfiles[0]]) + return + + if os.path.exists('waf'): + apt_install(session, ['python3']) + run_with_build_fixer(session, ['./waf', 'dist']) + return + + if os.path.exists('Makefile.PL') and not os.path.exists('Makefile'): + apt_install(session, ['perl']) + run_with_build_fixer(session, ['perl', 'Makefile.PL']) + + if not os.path.exists('Makefile') and not os.path.exists('configure'): + if os.path.exists('autogen.sh'): + if shebang_binary('autogen.sh') is None: + run_with_build_fixer(session, ['/bin/sh', './autogen.sh']) + try: + run_with_build_fixer(session, ['./autogen.sh']) + except UnidentifiedError as e: + if ("Gnulib not yet bootstrapped; " + "run ./bootstrap instead.\n" in e.lines): + run_with_build_fixer(session, ["./bootstrap"]) + run_with_build_fixer(session, ['./autogen.sh']) + else: + raise + + elif os.path.exists('configure.ac') or os.path.exists('configure.in'): + apt_install(session, [ + 'autoconf', 'automake', 'gettext', 'libtool', 'gnu-standards']) + run_with_build_fixer(session, ['autoreconf', '-i']) + + if not os.path.exists('Makefile') and os.path.exists('configure'): + session.check_call(['./configure']) + + if os.path.exists('Makefile'): + apt_install(session, ['make']) + try: + run_with_build_fixer(session, ['make', 'dist']) + except UnidentifiedError as e: + if "make: *** No rule to make target 'dist'. Stop.\n" in e.lines: + pass + elif ("make[1]: *** No rule to make target 'dist'. Stop.\n" + in e.lines): + pass + elif ("Reconfigure the source tree " + "(via './config' or 'perl Configure'), please.\n" + ) in e.lines: + run_with_build_fixer(session, ['./config']) + run_with_build_fixer(session, ['make', 'dist']) + elif ( + "Please try running 'make manifest' and then run " + "'make dist' again.\n" in e.lines): + run_with_build_fixer(session, ['make', 'manifest']) + run_with_build_fixer(session, ['make', 'dist']) + elif "Please run ./configure first\n" in e.lines: + run_with_build_fixer(session, ['./configure']) + run_with_build_fixer(session, ['make', 'dist']) + elif any([re.match( + r'Makefile:[0-9]+: \*\*\* Missing \'Make.inc\' ' + r'Run \'./configure \[options\]\' and retry. Stop.\n', + line) for line in e.lines]): + run_with_build_fixer(session, ['./configure']) + run_with_build_fixer(session, ['make', 'dist']) + elif any([re.match( + r'Problem opening MANIFEST: No such file or directory ' + r'at .* line [0-9]+\.', line) for line in e.lines]): + run_with_build_fixer(session, ['make', 'manifest']) + run_with_build_fixer(session, ['make', 'dist']) + else: + raise + else: + return + + raise NoBuildToolsFound() + + +def export_vcs_tree(tree, directory): + try: + export(tree, directory, 'dir', None) + except OSError as e: + if e.errno == errno.ENOSPC: + raise DetailedDistCommandFailed( + 1, ['export'], NoSpaceOnDevice()) + raise + + +def dupe_vcs_tree(tree, directory): + with tree.lock_read(): + if isinstance(tree, WorkingTree): + tree = tree.basis_tree() + try: + result = tree._repository.controldir.sprout( + directory, create_tree_if_local=True, + revision_id=tree.get_revision_id()) + except OSError as e: + if e.errno == errno.ENOSPC: + raise DetailedDistCommandFailed( + 1, ['sprout'], NoSpaceOnDevice()) + raise + # Copy parent location - some scripts need this + base_branch = tree._repository.controldir.open_branch() + parent = base_branch.get_parent() + if parent: + result.open_branch().set_parent(parent) + + +def create_dist_schroot( + tree: Tree, target_dir: str, + chroot: str, packaging_tree: Optional[Tree] = None, + include_controldir: bool = True, + subdir: Optional[str] = None) -> Optional[str]: + if subdir is None: + subdir = 'package' + with SchrootSession(chroot) as session: + if packaging_tree is not None: + satisfy_build_deps(session, packaging_tree) + build_dir = os.path.join(session.location, 'build') + + try: + directory = tempfile.mkdtemp(dir=build_dir) + except OSError as e: + if e.errno == errno.ENOSPC: + raise DetailedDistCommandFailed( + 1, ['mkdtemp'], NoSpaceOnDevice()) + reldir = '/' + os.path.relpath(directory, session.location) + + export_directory = os.path.join(directory, subdir) + if not include_controldir: + export_vcs_tree(tree, export_directory) + else: + dupe_vcs_tree(tree, export_directory) + + existing_files = os.listdir(export_directory) + + oldcwd = os.getcwd() + os.chdir(export_directory) + try: + session.chdir(os.path.join(reldir, subdir)) + run_dist_in_chroot(session) + except NoBuildToolsFound: + logging.info( + 'No build tools found, falling back to simple export.') + return None + finally: + os.chdir(oldcwd) + + new_files = os.listdir(export_directory) + diff_files = set(new_files) - set(existing_files) + diff = set([n for n in diff_files if get_filetype(n) is not None]) + if len(diff) == 1: + fn = diff.pop() + logging.info('Found tarball %s in package directory.', fn) + shutil.copy( + os.path.join(export_directory, fn), + target_dir) + return fn + if 'dist' in diff_files: + for entry in os.scandir(os.path.join(export_directory, 'dist')): + if get_filetype(entry.name) is not None: + logging.info( + 'Found tarball %s in dist directory.', entry.name) + shutil.copy(entry.path, target_dir) + return entry.name + logging.info('No tarballs found in dist directory.') + + diff = set(os.listdir(directory)) - set([subdir]) + if len(diff) == 1: + fn = diff.pop() + logging.info('Found tarball %s in parent directory.', fn) + shutil.copy( + os.path.join(directory, fn), + target_dir) + return fn + + logging.info('No tarball created :(') + return None + + +if __name__ == '__main__': + import argparse + import breezy.bzr + import breezy.git # noqa: F401 + + parser = argparse.ArgumentParser() + parser.add_argument( + '--chroot', default='unstable-amd64-sbuild', type=str, + help='Name of chroot to use') + parser.add_argument( + 'directory', default='.', type=str, nargs='?', + help='Directory with upstream source.') + parser.add_argument( + '--packaging-directory', type=str, + help='Path to packaging directory.') + parser.add_argument( + '--target-directory', type=str, default='..', + help='Target directory') + args = parser.parse_args() + tree = WorkingTree.open(args.directory) + if args.packaging_directory: + packaging_tree = WorkingTree.open(args.packaging_directory) + with packaging_tree.lock_read(): + source = Deb822(packaging_tree.get_file('debian/control')) + package = source['Source'] + subdir = package + else: + packaging_tree = None + subdir = None + + ret = create_dist_schroot( + tree, subdir=subdir, target_dir=os.path.abspath(args.target_directory), + packaging_tree=packaging_tree, + chroot=args.chroot) + if ret: + sys.exit(0) + else: + sys.exit(1) diff --git a/ognibuild/tests/__init__.py b/ognibuild/tests/__init__.py index 49e3b94..0072367 100644 --- a/ognibuild/tests/__init__.py +++ b/ognibuild/tests/__init__.py @@ -22,6 +22,8 @@ import unittest def test_suite(): names = [ + 'debian_build', + 'debian_fix_build', ] module_names = ['ognibuild.tests.test_' + name for name in names] loader = unittest.TestLoader() diff --git a/ognibuild/tests/test_debian_build.py b/ognibuild/tests/test_debian_build.py new file mode 100644 index 0000000..da2541a --- /dev/null +++ b/ognibuild/tests/test_debian_build.py @@ -0,0 +1,108 @@ +#!/usr/bin/python +# Copyright (C) 2020 Jelmer Vernooij +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + +import datetime +from ..debian.build import add_dummy_changelog_entry + +from breezy.tests import TestCaseWithTransport + + +class AddDummyChangelogEntryTests(TestCaseWithTransport): + + def test_simple(self): + tree = self.make_branch_and_tree('.') + self.build_tree_contents([('debian/', ), ('debian/changelog', """\ +janitor (0.1-1) UNRELEASED; urgency=medium + + * Initial release. (Closes: #XXXXXX) + + -- Jelmer Vernooij Sat, 04 Apr 2020 14:12:13 +0000 +""")]) + tree.add(['debian', 'debian/changelog']) + add_dummy_changelog_entry( + tree, '', 'jan+some', 'some-fixes', 'Dummy build.', + timestamp=datetime.datetime(2020, 9, 5, 12, 35, 4, 899654), + maintainer=("Jelmer Vernooij", "jelmer@debian.org")) + self.assertFileEqual("""\ +janitor (0.1-1jan+some1) some-fixes; urgency=low + + * Dummy build. + + -- Jelmer Vernooij Sat, 05 Sep 2020 12:35:04 -0000 + +janitor (0.1-1) UNRELEASED; urgency=medium + + * Initial release. (Closes: #XXXXXX) + + -- Jelmer Vernooij Sat, 04 Apr 2020 14:12:13 +0000 +""", 'debian/changelog') + + def test_native(self): + tree = self.make_branch_and_tree('.') + self.build_tree_contents([('debian/', ), ('debian/changelog', """\ +janitor (0.1) UNRELEASED; urgency=medium + + * Initial release. (Closes: #XXXXXX) + + -- Jelmer Vernooij Sat, 04 Apr 2020 14:12:13 +0000 +""")]) + tree.add(['debian', 'debian/changelog']) + add_dummy_changelog_entry( + tree, '', 'jan+some', 'some-fixes', 'Dummy build.', + timestamp=datetime.datetime(2020, 9, 5, 12, 35, 4, 899654), + maintainer=("Jelmer Vernooij", "jelmer@debian.org")) + self.assertFileEqual("""\ +janitor (0.1jan+some1) some-fixes; urgency=low + + * Dummy build. + + -- Jelmer Vernooij Sat, 05 Sep 2020 12:35:04 -0000 + +janitor (0.1) UNRELEASED; urgency=medium + + * Initial release. (Closes: #XXXXXX) + + -- Jelmer Vernooij Sat, 04 Apr 2020 14:12:13 +0000 +""", 'debian/changelog') + + def test_exists(self): + tree = self.make_branch_and_tree('.') + self.build_tree_contents([('debian/', ), ('debian/changelog', """\ +janitor (0.1-1jan+some1) UNRELEASED; urgency=medium + + * Initial release. (Closes: #XXXXXX) + + -- Jelmer Vernooij Sat, 04 Apr 2020 14:12:13 +0000 +""")]) + tree.add(['debian', 'debian/changelog']) + add_dummy_changelog_entry( + tree, '', 'jan+some', 'some-fixes', 'Dummy build.', + timestamp=datetime.datetime(2020, 9, 5, 12, 35, 4, 899654), + maintainer=("Jelmer Vernooij", "jelmer@debian.org")) + self.assertFileEqual("""\ +janitor (0.1-1jan+some2) some-fixes; urgency=low + + * Dummy build. + + -- Jelmer Vernooij Sat, 05 Sep 2020 12:35:04 -0000 + +janitor (0.1-1jan+some1) UNRELEASED; urgency=medium + + * Initial release. (Closes: #XXXXXX) + + -- Jelmer Vernooij Sat, 04 Apr 2020 14:12:13 +0000 +""", 'debian/changelog') diff --git a/ognibuild/tests/test_debian_fix_build.py b/ognibuild/tests/test_debian_fix_build.py new file mode 100644 index 0000000..7b0fa18 --- /dev/null +++ b/ognibuild/tests/test_debian_fix_build.py @@ -0,0 +1,201 @@ +#!/usr/bin/python +# Copyright (C) 2020 Jelmer Vernooij +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + +import re + +from debian.deb822 import Deb822 + +from buildlog_consultant.sbuild import ( + MissingCommand, + MissingGoPackage, + MissingPerlModule, + MissingPkgConfig, + MissingPythonModule, + MissingRubyFile, + MissingRubyGem, + MissingValaPackage, + ) +from ..debian import fix_build +from ..debian.fix_build import ( + resolve_error, + VERSIONED_PACKAGE_FIXERS, + APT_FIXERS, + BuildDependencyContext, + ) +from breezy.tests import TestCaseWithTransport + + +class ResolveErrorTests(TestCaseWithTransport): + + def setUp(self): + super(ResolveErrorTests, self).setUp() + self.tree = self.make_branch_and_tree('.') + self.build_tree_contents([('debian/', ), ('debian/control', """\ +Source: blah +Build-Depends: libc6 + +Package: python-blah +Depends: ${python3:Depends} +Description: A python package + Foo +"""), ('debian/changelog', """\ +blah (0.1) UNRELEASED; urgency=medium + + * Initial release. (Closes: #XXXXXX) + + -- Jelmer Vernooij Sat, 04 Apr 2020 14:12:13 +0000 +""")]) + self.tree.add(['debian', 'debian/control', 'debian/changelog']) + self.tree.commit('Initial commit') + self.overrideAttr(fix_build, 'search_apt_file', self._search_apt_file) + self._apt_files = {} + + def _search_apt_file(self, path, regex=False): + for p, pkg in sorted(self._apt_files.items()): + if regex: + if re.match(path, p): + yield pkg + else: + if path == p: + yield pkg + + def resolve(self, error, context=('build', )): + context = BuildDependencyContext( + self.tree, subpath='', committer='Janitor ', + update_changelog=True) + return resolve_error( + error, context, VERSIONED_PACKAGE_FIXERS + APT_FIXERS) + + def get_build_deps(self): + with open(self.tree.abspath('debian/control'), 'r') as f: + return next(Deb822.iter_paragraphs(f)).get('Build-Depends', '') + + def test_missing_command_unknown(self): + self._apt_files = {} + self.assertFalse(self.resolve( + MissingCommand('acommandthatdoesnotexist'))) + + def test_missing_command_brz(self): + self._apt_files = { + '/usr/bin/b': 'bash', + '/usr/bin/brz': 'brz', + '/usr/bin/brzier': 'bash', + } + self.assertTrue(self.resolve(MissingCommand('brz'))) + self.assertEqual('libc6, brz', self.get_build_deps()) + rev = self.tree.branch.repository.get_revision( + self.tree.branch.last_revision()) + self.assertEqual( + 'Add missing build dependency on brz.\n', + rev.message) + self.assertFalse(self.resolve(MissingCommand('brz'))) + self.assertEqual('libc6, brz', self.get_build_deps()) + + def test_missing_command_ps(self): + self._apt_files = { + '/bin/ps': 'procps', + '/usr/bin/pscal': 'xcal', + } + self.assertTrue(self.resolve(MissingCommand('ps'))) + self.assertEqual('libc6, procps', self.get_build_deps()) + + def test_missing_ruby_file(self): + self._apt_files = { + '/usr/lib/ruby/vendor_ruby/rake/testtask.rb': 'rake', + } + self.assertTrue(self.resolve(MissingRubyFile('rake/testtask'))) + self.assertEqual('libc6, rake', self.get_build_deps()) + + def test_missing_ruby_file_from_gem(self): + self._apt_files = { + '/usr/share/rubygems-integration/all/gems/activesupport-' + '5.2.3/lib/active_support/core_ext/string/strip.rb': + 'ruby-activesupport'} + self.assertTrue(self.resolve( + MissingRubyFile('active_support/core_ext/string/strip'))) + self.assertEqual('libc6, ruby-activesupport', self.get_build_deps()) + + def test_missing_ruby_gem(self): + self._apt_files = { + '/usr/share/rubygems-integration/all/specifications/' + 'bio-1.5.2.gemspec': 'ruby-bio', + '/usr/share/rubygems-integration/all/specifications/' + 'bio-2.0.2.gemspec': 'ruby-bio', + } + self.assertTrue(self.resolve(MissingRubyGem('bio', None))) + self.assertEqual('libc6, ruby-bio', self.get_build_deps()) + self.assertTrue(self.resolve(MissingRubyGem('bio', '2.0.3'))) + self.assertEqual('libc6, ruby-bio (>= 2.0.3)', self.get_build_deps()) + + def test_missing_perl_module(self): + self._apt_files = { + '/usr/share/perl5/App/cpanminus/fatscript.pm': 'cpanminus'} + self.assertTrue(self.resolve(MissingPerlModule( + 'App/cpanminus/fatscript.pm', 'App::cpanminus::fatscript', [ + '/<>/blib/lib', + '/<>/blib/arch', + '/etc/perl', + '/usr/local/lib/x86_64-linux-gnu/perl/5.30.0', + '/usr/local/share/perl/5.30.0', + '/usr/lib/x86_64-linux-gnu/perl5/5.30', + '/usr/share/perl5', + '/usr/lib/x86_64-linux-gnu/perl/5.30', + '/usr/share/perl/5.30', + '/usr/local/lib/site_perl', + '/usr/lib/x86_64-linux-gnu/perl-base', + '.']))) + self.assertEqual('libc6, cpanminus', self.get_build_deps()) + + def test_missing_pkg_config(self): + self._apt_files = { + '/usr/lib/x86_64-linux-gnu/pkgconfig/xcb-xfixes.pc': + 'libxcb-xfixes0-dev'} + self.assertTrue(self.resolve(MissingPkgConfig('xcb-xfixes'))) + self.assertEqual('libc6, libxcb-xfixes0-dev', self.get_build_deps()) + + def test_missing_pkg_config_versioned(self): + self._apt_files = { + '/usr/lib/x86_64-linux-gnu/pkgconfig/xcb-xfixes.pc': + 'libxcb-xfixes0-dev'} + self.assertTrue(self.resolve(MissingPkgConfig('xcb-xfixes', '1.0'))) + self.assertEqual( + 'libc6, libxcb-xfixes0-dev (>= 1.0)', self.get_build_deps()) + + def test_missing_python_module(self): + self._apt_files = { + '/usr/lib/python3/dist-packages/m2r.py': 'python3-m2r' + } + self.assertTrue(self.resolve(MissingPythonModule('m2r'))) + self.assertEqual('libc6, python3-m2r', self.get_build_deps()) + + def test_missing_go_package(self): + self._apt_files = { + '/usr/share/gocode/src/github.com/chzyer/readline/utils_test.go': + 'golang-github-chzyer-readline-dev', + } + self.assertTrue(self.resolve( + MissingGoPackage('github.com/chzyer/readline'))) + self.assertEqual( + 'libc6, golang-github-chzyer-readline-dev', + self.get_build_deps()) + + def test_missing_vala_package(self): + self._apt_files = { + '/usr/share/vala-0.48/vapi/posix.vapi': 'valac-0.48-vapi', + } + self.assertTrue(self.resolve(MissingValaPackage('posix'))) + self.assertEqual('libc6, valac-0.48-vapi', self.get_build_deps()) diff --git a/setup.cfg b/setup.cfg index eb9fed9..fc17caa 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,5 +1,6 @@ [flake8] application-package-names = ognibuild +banned-modules = silver-platter = Should not use silver-platter [mypy] # A number of ognibuilds' dependencies don't have type hints yet diff --git a/setup.py b/setup.py index dacac1f..55075f2 100755 --- a/setup.py +++ b/setup.py @@ -23,5 +23,10 @@ setup(name="ognibuild", ], entry_points={ "console_scripts": [ - "ogni=ognibuild.__main__:main"] - }) + "ogni=ognibuild.__main__:main", + "deb-fix-build=ognibuild.debian.fix_build:main", + ] + }, + install_requires=['breezy', 'buildlog-consultant'], + test_suite='ognibuild.tests.test_suite', + ) From 7f90276cd85c8dbccbae2424cb2b1e22ee8a7cbb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Fri, 5 Feb 2021 14:19:24 +0000 Subject: [PATCH 02/83] Fix import. --- ognibuild/debian/fix_build.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ognibuild/debian/fix_build.py b/ognibuild/debian/fix_build.py index 18c31bc..9aa728d 100644 --- a/ognibuild/debian/fix_build.py +++ b/ognibuild/debian/fix_build.py @@ -57,7 +57,7 @@ from lintian_brush.changelog import ( add_changelog_entry, ) -from lintian_brush.rules import ( +from debmutate._rules import ( dh_invoke_add_with, update_rules, ) From e098fa66f53304904c75dd13ec29680d0e8a2ceb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Fri, 5 Feb 2021 14:20:35 +0000 Subject: [PATCH 03/83] Fix import. --- ognibuild/dist.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ognibuild/dist.py b/ognibuild/dist.py index 6770ca8..8d8c688 100644 --- a/ognibuild/dist.py +++ b/ognibuild/dist.py @@ -33,7 +33,7 @@ from breezy.workingtree import WorkingTree from breezy.plugins.debian.repack_tarball import get_filetype -from .fix_build import ( +from .debian.fix_build import ( DependencyContext, resolve_error, APT_FIXERS, From a0a494b4f8d188bbc39dec170d2ba40f7fcc9e3d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Fri, 5 Feb 2021 15:56:24 +0000 Subject: [PATCH 04/83] Add stubs for clean/test/install. --- ognibuild/__init__.py | 16 ++++++++++++++++ ognibuild/__main__.py | 17 +++++++++++++++-- 2 files changed, 31 insertions(+), 2 deletions(-) diff --git a/ognibuild/__init__.py b/ognibuild/__init__.py index a9dfcac..b4f0492 100644 --- a/ognibuild/__init__.py +++ b/ognibuild/__init__.py @@ -87,6 +87,22 @@ def run_with_build_fixer(session, args): session.check_call(args) +def run_build(session): + raise NotImplementedError + + +def run_clean(session): + raise NotImplementedError + + +def run_test(session): + raise NotImplementedError + + +def run_install(session): + raise NotImplementedError + + def run_dist(session): # TODO(jelmer): Check $PATH rather than hardcoding? if not os.path.exists('/usr/bin/git'): diff --git a/ognibuild/__main__.py b/ognibuild/__main__.py index d2b5f3a..2571abc 100644 --- a/ognibuild/__main__.py +++ b/ognibuild/__main__.py @@ -17,13 +17,18 @@ import os import sys -from . import run_dist, NoBuildToolsFound, note +from . import ( + run_dist, run_build, run_clean, run_install, run_test, NoBuildToolsFound, + note + ) def main(): import argparse parser = argparse.ArgumentParser() - parser.add_argument('subcommand', type=str, choices=['dist']) + parser.add_argument( + 'subcommand', type=str, + choices=['dist', 'build', 'clean', 'test', 'install']) parser.add_argument( '--directory', '-d', type=str, help='Directory for project.', default='.') @@ -41,6 +46,14 @@ def main(): try: if args.subcommand == 'dist': run_dist(session) + if args.subcommand == 'build': + run_build(session) + if args.subcommand == 'clean': + run_clean(session) + if args.subcommand == 'install': + run_install(session) + if args.subcommand == 'test': + run_test(session) except NoBuildToolsFound: note('No build tools found.') return 1 From ac69987508e9a4dd156f9405c89a1401ebb5e7e3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Fri, 5 Feb 2021 16:25:06 +0000 Subject: [PATCH 05/83] Add missing dependencies. --- setup.py | 1 + 1 file changed, 1 insertion(+) diff --git a/setup.py b/setup.py index 55075f2..e7e819d 100755 --- a/setup.py +++ b/setup.py @@ -28,5 +28,6 @@ setup(name="ognibuild", ] }, install_requires=['breezy', 'buildlog-consultant'], + test_requires=['python_debian', 'buildlog-consultant', 'breezy'], test_suite='ognibuild.tests.test_suite', ) From 75dcd97776f6a2515b9fd58da9de5c97a769d91b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Fri, 5 Feb 2021 17:05:12 +0000 Subject: [PATCH 06/83] Fix typo. --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index e7e819d..1e6d85e 100755 --- a/setup.py +++ b/setup.py @@ -28,6 +28,6 @@ setup(name="ognibuild", ] }, install_requires=['breezy', 'buildlog-consultant'], - test_requires=['python_debian', 'buildlog-consultant', 'breezy'], + tests_require=['python_debian', 'buildlog-consultant', 'breezy'], test_suite='ognibuild.tests.test_suite', ) From 3df3935b8d2efae3d81a845500a93c33b025ccb9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Fri, 5 Feb 2021 17:06:57 +0000 Subject: [PATCH 07/83] Install cython. --- .github/workflows/pythonpackage.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pythonpackage.yml b/.github/workflows/pythonpackage.yml index a50607f..2837b1d 100644 --- a/.github/workflows/pythonpackage.yml +++ b/.github/workflows/pythonpackage.yml @@ -20,7 +20,7 @@ jobs: python-version: ${{ matrix.python-version }} - name: Install dependencies run: | - python -m pip install --upgrade pip flake8 + python -m pip install --upgrade pip flake8 cython python setup.py develop - name: Style checks run: | From db07573ca9b8e6f26acb88240878de19a93aabce Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Fri, 5 Feb 2021 17:10:14 +0000 Subject: [PATCH 08/83] Add python-debian dependency. --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 1e6d85e..724d40a 100755 --- a/setup.py +++ b/setup.py @@ -27,7 +27,7 @@ setup(name="ognibuild", "deb-fix-build=ognibuild.debian.fix_build:main", ] }, - install_requires=['breezy', 'buildlog-consultant'], + install_requires=['breezy', 'buildlog-consultant', 'python_debian'], tests_require=['python_debian', 'buildlog-consultant', 'breezy'], test_suite='ognibuild.tests.test_suite', ) From 49b90ef7a4a3e018d429d8ba9601c12af008abc2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Fri, 5 Feb 2021 17:13:48 +0000 Subject: [PATCH 09/83] Add debmutate dependency. --- setup.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 724d40a..d78a7ae 100755 --- a/setup.py +++ b/setup.py @@ -27,7 +27,12 @@ setup(name="ognibuild", "deb-fix-build=ognibuild.debian.fix_build:main", ] }, - install_requires=['breezy', 'buildlog-consultant', 'python_debian'], + install_requires=[ + 'breezy', + 'buildlog-consultant', + 'python_debian', + 'debmutate', + ], tests_require=['python_debian', 'buildlog-consultant', 'breezy'], test_suite='ognibuild.tests.test_suite', ) From 8d5dc49b3929eee0889f1a76a9e4bd4d5a60a81f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Fri, 5 Feb 2021 17:19:53 +0000 Subject: [PATCH 10/83] Add releaser config. --- releaser.conf | 10 ++++++++++ 1 file changed, 10 insertions(+) create mode 100644 releaser.conf diff --git a/releaser.conf b/releaser.conf new file mode 100644 index 0000000..8b1386f --- /dev/null +++ b/releaser.conf @@ -0,0 +1,10 @@ +name: "ognibuild" +pypi: "ognibuild" +timeout_days: 5 +tag_name: "v$VERSION" +verify_command: "python3 setup.py test" +update_version { + path: "setup.py" + match: "^ version=\"(.*)\",$" + new_line: " version=\"$VERSION\"," +} From b7f2f8dbe3f1f0cefe048014a4e848b2a94142cf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Fri, 5 Feb 2021 17:34:12 +0000 Subject: [PATCH 11/83] Install brz-debian. --- .github/workflows/pythonpackage.yml | 2 ++ .gitignore | 1 + 2 files changed, 3 insertions(+) diff --git a/.github/workflows/pythonpackage.yml b/.github/workflows/pythonpackage.yml index 2837b1d..31bc00e 100644 --- a/.github/workflows/pythonpackage.yml +++ b/.github/workflows/pythonpackage.yml @@ -21,6 +21,8 @@ jobs: - name: Install dependencies run: | python -m pip install --upgrade pip flake8 cython + mkdir -p $HOME/.config/breezy/plugins + bzr branch lp:brz-debian $HOME/.config/breezy/plugins/debian python setup.py develop - name: Style checks run: | diff --git a/.gitignore b/.gitignore index ff722b0..989ed96 100644 --- a/.gitignore +++ b/.gitignore @@ -3,3 +3,4 @@ build ognibuild.egg-info dist __pycache__ +.eggs From f6f6b5a696c121ad4d45beb567cb39170d2fc730 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Fri, 5 Feb 2021 17:41:24 +0000 Subject: [PATCH 12/83] Remove duplicate run_with_tee. --- ognibuild/__init__.py | 13 +------------ ognibuild/dist.py | 13 +------------ ognibuild/session/__init__.py | 14 ++++++++++++++ 3 files changed, 16 insertions(+), 24 deletions(-) diff --git a/ognibuild/__init__.py b/ognibuild/__init__.py index b4f0492..1261574 100644 --- a/ognibuild/__init__.py +++ b/ognibuild/__init__.py @@ -21,6 +21,7 @@ import stat import subprocess import sys from typing import List +from .session import run_with_tee DEFAULT_PYTHON = 'python3' @@ -59,18 +60,6 @@ def warning(m): sys.stderr.write('WARNING: %s\n' % m) -def run_with_tee(session, args: List[str], **kwargs): - p = session.Popen( - args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, **kwargs) - contents = [] - while p.poll() is None: - line = p.stdout.readline() - sys.stdout.buffer.write(line) - sys.stdout.buffer.flush() - contents.append(line.decode('utf-8', 'surrogateescape')) - return p.returncode, contents - - def run_apt(session, args: List[str]) -> None: args = ['apt', '-y'] + args retcode, lines = run_with_tee(session, args, cwd='/', user='root') diff --git a/ognibuild/dist.py b/ognibuild/dist.py index 8d8c688..2213076 100644 --- a/ognibuild/dist.py +++ b/ognibuild/dist.py @@ -33,6 +33,7 @@ from breezy.workingtree import WorkingTree from breezy.plugins.debian.repack_tarball import get_filetype +from .session import run_with_tee from .debian.fix_build import ( DependencyContext, resolve_error, @@ -93,18 +94,6 @@ def satisfy_build_deps(session: Session, tree): apt_satisfy(session, deps) -def run_with_tee(session: Session, args: List[str], **kwargs): - p = session.Popen( - args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, **kwargs) - contents = [] - while p.poll() is None: - line = p.stdout.readline() - sys.stdout.buffer.write(line) - sys.stdout.buffer.flush() - contents.append(line.decode('utf-8', 'surrogateescape')) - return p.returncode, contents - - class SchrootDependencyContext(DependencyContext): def __init__(self, session): diff --git a/ognibuild/session/__init__.py b/ognibuild/session/__init__.py index 6917def..3cc87af 100644 --- a/ognibuild/session/__init__.py +++ b/ognibuild/session/__init__.py @@ -17,6 +17,8 @@ # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA from typing import Optional, List, Dict +import sys +import subprocess class Session(object): @@ -64,3 +66,15 @@ class Session(object): class SessionSetupFailure(Exception): """Session failed to be set up.""" + + +def run_with_tee(session: Session, args: List[str], **kwargs): + p = session.Popen( + args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, **kwargs) + contents = [] + while p.poll() is None: + line = p.stdout.readline() + sys.stdout.buffer.write(line) + sys.stdout.buffer.flush() + contents.append(line.decode('utf-8', 'surrogateescape')) + return p.returncode, contents From 5693f20de648d0b6ff3ab00a4e89c0b2a6a5784a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Fri, 5 Feb 2021 18:01:56 +0000 Subject: [PATCH 13/83] Add UnidentifiedError. --- ognibuild/apt.py | 26 ++++++++++++++++++++++++++ 1 file changed, 26 insertions(+) create mode 100644 ognibuild/apt.py diff --git a/ognibuild/apt.py b/ognibuild/apt.py new file mode 100644 index 0000000..7454307 --- /dev/null +++ b/ognibuild/apt.py @@ -0,0 +1,26 @@ +#!/usr/bin/python +# Copyright (C) 2019-2020 Jelmer Vernooij +# encoding: utf-8 +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + + +class UnidentifiedError(Exception): + + def __init__(self, retcode, argv, lines, secondary=None): + self.retcode = retcode + self.argv = argv + self.lines = lines + self.secondary = secondary From 7919d42f0520dcca87782ebdae2fee4b88b16e37 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Sat, 6 Feb 2021 03:45:26 +0000 Subject: [PATCH 14/83] Some more work on integrating dist. --- ognibuild/__init__.py | 175 ++--------------------------------- ognibuild/__main__.py | 3 +- ognibuild/apt.py | 35 +++++++ ognibuild/dist.py | 116 ++++++++--------------- ognibuild/session/schroot.py | 10 +- 5 files changed, 90 insertions(+), 249 deletions(-) diff --git a/ognibuild/__init__.py b/ognibuild/__init__.py index 1261574..d769c5e 100644 --- a/ognibuild/__init__.py +++ b/ognibuild/__init__.py @@ -18,27 +18,24 @@ import os import stat -import subprocess import sys -from typing import List -from .session import run_with_tee DEFAULT_PYTHON = 'python3' -class UnidentifiedError(Exception): - - def __init__(self, retcode, argv, lines): - self.retcode = retcode - self.argv = argv - self.lines = lines - - class NoBuildToolsFound(Exception): """No supported build tools were found.""" +class DetailedFailure(Exception): + + def __init__(self, retcode, argv, error): + self.retcode = retcode + self.argv = argv + self.error = error + + def shebang_binary(p): if not (os.stat(p).st_mode & stat.S_IEXEC): return None @@ -48,8 +45,8 @@ def shebang_binary(p): return None args = firstline[2:].split(b' ') if args[0] in (b'/usr/bin/env', b'env'): - return os.path.basename(args[1].decode()) - return os.path.basename(args[0].decode()) + return os.path.basename(args[1].decode()).strip() + return os.path.basename(args[0].decode()).strip() def note(m): @@ -60,18 +57,6 @@ def warning(m): sys.stderr.write('WARNING: %s\n' % m) -def run_apt(session, args: List[str]) -> None: - args = ['apt', '-y'] + args - retcode, lines = run_with_tee(session, args, cwd='/', user='root') - if retcode == 0: - return - raise UnidentifiedError(retcode, args, lines) - - -def apt_install(session, packages: List[str]) -> None: - run_apt(session, ['install'] + packages) - - def run_with_build_fixer(session, args): session.check_call(args) @@ -90,143 +75,3 @@ def run_test(session): def run_install(session): raise NotImplementedError - - -def run_dist(session): - # TODO(jelmer): Check $PATH rather than hardcoding? - if not os.path.exists('/usr/bin/git'): - apt_install(session, ['git']) - - # Some things want to write to the user's home directory, - # e.g. pip caches in ~/.cache - session.create_home() - - if os.path.exists('package.xml'): - apt_install(session, ['php-pear', 'php-horde-core']) - note('Found package.xml, assuming pear package.') - session.check_call(['pear', 'package']) - return - - if os.path.exists('pyproject.toml'): - import toml - with open('pyproject.toml', 'r') as pf: - pyproject = toml.load(pf) - if 'poetry' in pyproject.get('tool', []): - note('Found pyproject.toml with poetry section, ' - 'assuming poetry project.') - apt_install(session, ['python3-venv', 'python3-pip']) - session.check_call(['pip3', 'install', 'poetry'], user='root') - session.check_call(['poetry', 'build', '-f', 'sdist']) - return - - if os.path.exists('setup.py'): - note('Found setup.py, assuming python project.') - apt_install(session, ['python3', 'python3-pip']) - with open('setup.py', 'r') as f: - setup_py_contents = f.read() - try: - with open('setup.cfg', 'r') as f: - setup_cfg_contents = f.read() - except FileNotFoundError: - setup_cfg_contents = '' - if 'setuptools' in setup_py_contents: - note('Reference to setuptools found, installing.') - apt_install(session, ['python3-setuptools']) - if ('setuptools_scm' in setup_py_contents or - 'setuptools_scm' in setup_cfg_contents): - note('Reference to setuptools-scm found, installing.') - apt_install( - session, ['python3-setuptools-scm', 'git', 'mercurial']) - - # TODO(jelmer): Install setup_requires - - interpreter = shebang_binary('setup.py') - if interpreter is not None: - if interpreter == 'python2' or interpreter.startswith('python2.'): - apt_install(session, [interpreter]) - elif (interpreter == 'python3' or - interpreter.startswith('python3.')): - apt_install(session, [interpreter]) - else: - apt_install(session, [DEFAULT_PYTHON]) - run_with_build_fixer(session, ['./setup.py', 'sdist']) - else: - # Just assume it's Python 3 - apt_install(session, ['python3']) - run_with_build_fixer(session, ['python3', './setup.py', 'sdist']) - return - - if os.path.exists('setup.cfg'): - note('Found setup.cfg, assuming python project.') - apt_install(session, ['python3-pep517', 'python3-pip']) - session.check_call(['python3', '-m', 'pep517.build', '-s', '.']) - return - - if os.path.exists('dist.ini') and not os.path.exists('Makefile.PL'): - apt_install(session, ['libdist-inkt-perl']) - with open('dist.ini', 'rb') as f: - for line in f: - if not line.startswith(b';;'): - continue - try: - (key, value) = line[2:].split(b'=', 1) - except ValueError: - continue - if (key.strip() == b'class' and - value.strip().startswith(b"'Dist::Inkt")): - note('Found Dist::Inkt section in dist.ini, ' - 'assuming distinkt.') - # TODO(jelmer): install via apt if possible - session.check_call( - ['cpan', 'install', value.decode().strip("'")], - user='root') - run_with_build_fixer(session, ['distinkt-dist']) - return - # Default to invoking Dist::Zilla - note('Found dist.ini, assuming dist-zilla.') - apt_install(session, ['libdist-zilla-perl']) - run_with_build_fixer(session, ['dzil', 'build', '--in', '..']) - return - - if os.path.exists('package.json'): - apt_install(session, ['npm']) - run_with_build_fixer(session, ['npm', 'pack']) - return - - gemfiles = [name for name in os.listdir('.') if name.endswith('.gem')] - if gemfiles: - apt_install(session, ['gem2deb']) - if len(gemfiles) > 1: - warning('More than one gemfile. Trying the first?') - run_with_build_fixer(session, ['gem2tgz', gemfiles[0]]) - return - - if os.path.exists('waf'): - apt_install(session, ['python3']) - run_with_build_fixer(session, ['./waf', 'dist']) - return - - if os.path.exists('Makefile.PL') and not os.path.exists('Makefile'): - apt_install(session, ['perl']) - run_with_build_fixer(session, ['perl', 'Makefile.PL']) - - if not os.path.exists('Makefile') and not os.path.exists('configure'): - if os.path.exists('autogen.sh'): - if shebang_binary('autogen.sh') is None: - run_with_build_fixer(session, ['/bin/sh', './autogen.sh']) - else: - run_with_build_fixer(session, ['./autogen.sh']) - - elif os.path.exists('configure.ac') or os.path.exists('configure.in'): - apt_install(session, [ - 'autoconf', 'automake', 'gettext', 'libtool', 'gnu-standards']) - run_with_build_fixer(session, ['autoreconf', '-i']) - - if not os.path.exists('Makefile') and os.path.exists('configure'): - session.check_call(['./configure']) - - if os.path.exists('Makefile'): - apt_install(session, ['make']) - run_with_build_fixer(session, ['make', 'dist']) - - raise NoBuildToolsFound() diff --git a/ognibuild/__main__.py b/ognibuild/__main__.py index 2571abc..f4b6ba1 100644 --- a/ognibuild/__main__.py +++ b/ognibuild/__main__.py @@ -18,9 +18,10 @@ import os import sys from . import ( - run_dist, run_build, run_clean, run_install, run_test, NoBuildToolsFound, + run_build, run_clean, run_install, run_test, NoBuildToolsFound, note ) +from .dist import run_dist def main(): diff --git a/ognibuild/apt.py b/ognibuild/apt.py index 7454307..90510fc 100644 --- a/ognibuild/apt.py +++ b/ognibuild/apt.py @@ -17,6 +17,16 @@ # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +from typing import List + +from buildlog_consultant.sbuild import ( + find_apt_get_failure, + ) + +from . import DetailedFailure +from .session import Session, run_with_tee + + class UnidentifiedError(Exception): def __init__(self, retcode, argv, lines, secondary=None): @@ -24,3 +34,28 @@ class UnidentifiedError(Exception): self.argv = argv self.lines = lines self.secondary = secondary + + +def run_apt(session: Session, args: List[str]) -> None: + """Run apt.""" + args = ['apt', '-y'] + args + retcode, lines = run_with_tee(session, args, cwd='/', user='root') + if retcode == 0: + return + offset, line, error = find_apt_get_failure(lines) + if error is not None: + raise DetailedFailure(retcode, args, error) + if line is not None: + raise UnidentifiedError( + retcode, args, lines, secondary=(offset, line)) + while lines and lines[-1] == '': + lines.pop(-1) + raise UnidentifiedError(retcode, args, lines) + + +def install(session: Session, packages: List[str]) -> None: + run_apt(session, ['install'] + packages) + + +def satisfy(session: Session, deps: List[str]) -> None: + run_apt(session, ['satisfy'] + deps) diff --git a/ognibuild/dist.py b/ognibuild/dist.py index 2213076..ee61f77 100644 --- a/ognibuild/dist.py +++ b/ognibuild/dist.py @@ -20,7 +20,6 @@ import logging import os import re import shutil -import subprocess import sys import tempfile from typing import Optional, List, Tuple, Callable, Type @@ -33,45 +32,21 @@ from breezy.workingtree import WorkingTree from breezy.plugins.debian.repack_tarball import get_filetype -from .session import run_with_tee +from . import apt, DetailedFailure, shebang_binary +from .session import run_with_tee, Session +from .session.schroot import SchrootSession from .debian.fix_build import ( DependencyContext, resolve_error, APT_FIXERS, ) from buildlog_consultant.sbuild import ( - find_apt_get_failure, find_build_failure_description, Problem, MissingPerlModule, MissingCommand, NoSpaceOnDevice, ) -from ognibuild import shebang_binary -from ognibuild.session import Session -from ognibuild.session.schroot import SchrootSession - - -def run_apt(session: Session, args: List[str]) -> None: - args = ['apt', '-y'] + args - retcode, lines = run_with_tee(session, args, cwd='/', user='root') - if retcode == 0: - return - offset, line, error = find_apt_get_failure(lines) - if error is not None: - raise DetailedDistCommandFailed(retcode, args, error) - if line is not None: - raise UnidentifiedError( - retcode, args, lines, secondary=(offset, line)) - raise UnidentifiedError(retcode, args, lines) - - -def apt_install(session: Session, packages: List[str]) -> None: - run_apt(session, ['install'] + packages) - - -def apt_satisfy(session: Session, deps: List[str]) -> None: - run_apt(session, ['satisfy'] + deps) def satisfy_build_deps(session: Session, tree): @@ -91,7 +66,7 @@ def satisfy_build_deps(session: Session, tree): deps = [ dep.strip().strip(',') for dep in deps] - apt_satisfy(session, deps) + apt.satisfy(session, deps) class SchrootDependencyContext(DependencyContext): @@ -101,27 +76,10 @@ class SchrootDependencyContext(DependencyContext): def add_dependency(self, package, minimum_version=None): # TODO(jelmer): Handle minimum_version - apt_install(self.session, [package]) + apt.install(self.session, [package]) return True -class DetailedDistCommandFailed(Exception): - - def __init__(self, retcode, argv, error): - self.retcode = retcode - self.argv = argv - self.error = error - - -class UnidentifiedError(Exception): - - def __init__(self, retcode, argv, lines, secondary=None): - self.retcode = retcode - self.argv = argv - self.lines = lines - self.secondary = secondary - - def fix_perl_module_from_cpan(error, context): # TODO(jelmer): Specify -T to skip tests? context.session.check_call( @@ -163,23 +121,23 @@ def run_with_build_fixer(session: Session, args: List[str]): if error is None: logging.warning('Build failed with unidentified error. Giving up.') if line is not None: - raise UnidentifiedError( + raise apt.UnidentifiedError( retcode, args, lines, secondary=(offset, line)) - raise UnidentifiedError(retcode, args, lines) + raise apt.UnidentifiedError(retcode, args, lines) logging.info('Identified error: %r', error) if error in fixed_errors: logging.warning( 'Failed to resolve error %r, it persisted. Giving up.', error) - raise DetailedDistCommandFailed(retcode, args, error) + raise DetailedFailure(retcode, args, error) if not resolve_error( error, SchrootDependencyContext(session), fixers=(APT_FIXERS + GENERIC_INSTALL_FIXERS)): logging.warning( 'Failed to find resolution for error %r. Giving up.', error) - raise DetailedDistCommandFailed(retcode, args, error) + raise DetailedFailure(retcode, args, error) fixed_errors.append(error) @@ -187,15 +145,15 @@ class NoBuildToolsFound(Exception): """No supported build tools were found.""" -def run_dist_in_chroot(session): - apt_install(session, ['git']) +def run_dist(session): + apt.install(session, ['git']) # Some things want to write to the user's home directory, # e.g. pip caches in ~/.cache session.create_home() if os.path.exists('package.xml'): - apt_install(session, ['php-pear', 'php-horde-core']) + apt.install(session, ['php-pear', 'php-horde-core']) logging.info('Found package.xml, assuming pear package.') session.check_call(['pear', 'package']) return @@ -208,14 +166,14 @@ def run_dist_in_chroot(session): logging.info( 'Found pyproject.toml with poetry section, ' 'assuming poetry project.') - apt_install(session, ['python3-venv', 'python3-pip']) + apt.install(session, ['python3-venv', 'python3-pip']) session.check_call(['pip3', 'install', 'poetry'], user='root') session.check_call(['poetry', 'build', '-f', 'sdist']) return if os.path.exists('setup.py'): logging.info('Found setup.py, assuming python project.') - apt_install(session, ['python3', 'python3-pip']) + apt.install(session, ['python3', 'python3-pip']) with open('setup.py', 'r') as f: setup_py_contents = f.read() try: @@ -225,11 +183,11 @@ def run_dist_in_chroot(session): setup_cfg_contents = '' if 'setuptools' in setup_py_contents: logging.info('Reference to setuptools found, installing.') - apt_install(session, ['python3-setuptools']) + apt.install(session, ['python3-setuptools']) if ('setuptools_scm' in setup_py_contents or 'setuptools_scm' in setup_cfg_contents): logging.info('Reference to setuptools-scm found, installing.') - apt_install( + apt.install( session, ['python3-setuptools-scm', 'git', 'mercurial']) # TODO(jelmer): Install setup_requires @@ -237,29 +195,29 @@ def run_dist_in_chroot(session): interpreter = shebang_binary('setup.py') if interpreter is not None: if interpreter == 'python3': - apt_install(session, ['python3']) + apt.install(session, ['python3']) elif interpreter == 'python2': - apt_install(session, ['python2']) + apt.install(session, ['python2']) elif interpreter == 'python': - apt_install(session, ['python']) + apt.install(session, ['python']) else: - raise ValueError('Unknown interpreter %s' % interpreter) - apt_install(session, ['python2', 'python3']) + raise ValueError('Unknown interpreter %r' % interpreter) + apt.install(session, ['python2', 'python3']) run_with_build_fixer(session, ['./setup.py', 'sdist']) else: # Just assume it's Python 3 - apt_install(session, ['python3']) + apt.install(session, ['python3']) run_with_build_fixer(session, ['python3', './setup.py', 'sdist']) return if os.path.exists('setup.cfg'): logging.info('Found setup.cfg, assuming python project.') - apt_install(session, ['python3-pep517', 'python3-pip']) + apt.install(session, ['python3-pep517', 'python3-pip']) session.check_call(['python3', '-m', 'pep517.build', '-s', '.']) return if os.path.exists('dist.ini') and not os.path.exists('Makefile.PL'): - apt_install(session, ['libdist-inkt-perl']) + apt.install(session, ['libdist-inkt-perl']) with open('dist.ini', 'rb') as f: for line in f: if not line.startswith(b';;'): @@ -281,30 +239,30 @@ def run_dist_in_chroot(session): return # Default to invoking Dist::Zilla logging.info('Found dist.ini, assuming dist-zilla.') - apt_install(session, ['libdist-zilla-perl']) + apt.install(session, ['libdist-zilla-perl']) run_with_build_fixer(session, ['dzil', 'build', '--in', '..']) return if os.path.exists('package.json'): - apt_install(session, ['npm']) + apt.install(session, ['npm']) run_with_build_fixer(session, ['npm', 'pack']) return gemfiles = [name for name in os.listdir('.') if name.endswith('.gem')] if gemfiles: - apt_install(session, ['gem2deb']) + apt.install(session, ['gem2deb']) if len(gemfiles) > 1: logging.warning('More than one gemfile. Trying the first?') run_with_build_fixer(session, ['gem2tgz', gemfiles[0]]) return if os.path.exists('waf'): - apt_install(session, ['python3']) + apt.install(session, ['python3']) run_with_build_fixer(session, ['./waf', 'dist']) return if os.path.exists('Makefile.PL') and not os.path.exists('Makefile'): - apt_install(session, ['perl']) + apt.install(session, ['perl']) run_with_build_fixer(session, ['perl', 'Makefile.PL']) if not os.path.exists('Makefile') and not os.path.exists('configure'): @@ -313,7 +271,7 @@ def run_dist_in_chroot(session): run_with_build_fixer(session, ['/bin/sh', './autogen.sh']) try: run_with_build_fixer(session, ['./autogen.sh']) - except UnidentifiedError as e: + except apt.UnidentifiedError as e: if ("Gnulib not yet bootstrapped; " "run ./bootstrap instead.\n" in e.lines): run_with_build_fixer(session, ["./bootstrap"]) @@ -322,7 +280,7 @@ def run_dist_in_chroot(session): raise elif os.path.exists('configure.ac') or os.path.exists('configure.in'): - apt_install(session, [ + apt.install(session, [ 'autoconf', 'automake', 'gettext', 'libtool', 'gnu-standards']) run_with_build_fixer(session, ['autoreconf', '-i']) @@ -330,10 +288,10 @@ def run_dist_in_chroot(session): session.check_call(['./configure']) if os.path.exists('Makefile'): - apt_install(session, ['make']) + apt.install(session, ['make']) try: run_with_build_fixer(session, ['make', 'dist']) - except UnidentifiedError as e: + except apt.UnidentifiedError as e: if "make: *** No rule to make target 'dist'. Stop.\n" in e.lines: pass elif ("make[1]: *** No rule to make target 'dist'. Stop.\n" @@ -376,7 +334,7 @@ def export_vcs_tree(tree, directory): export(tree, directory, 'dir', None) except OSError as e: if e.errno == errno.ENOSPC: - raise DetailedDistCommandFailed( + raise DetailedFailure( 1, ['export'], NoSpaceOnDevice()) raise @@ -391,7 +349,7 @@ def dupe_vcs_tree(tree, directory): revision_id=tree.get_revision_id()) except OSError as e: if e.errno == errno.ENOSPC: - raise DetailedDistCommandFailed( + raise DetailedFailure( 1, ['sprout'], NoSpaceOnDevice()) raise # Copy parent location - some scripts need this @@ -417,7 +375,7 @@ def create_dist_schroot( directory = tempfile.mkdtemp(dir=build_dir) except OSError as e: if e.errno == errno.ENOSPC: - raise DetailedDistCommandFailed( + raise DetailedFailure( 1, ['mkdtemp'], NoSpaceOnDevice()) reldir = '/' + os.path.relpath(directory, session.location) @@ -433,7 +391,7 @@ def create_dist_schroot( os.chdir(export_directory) try: session.chdir(os.path.join(reldir, subdir)) - run_dist_in_chroot(session) + run_dist(session) except NoBuildToolsFound: logging.info( 'No build tools found, falling back to simple export.') diff --git a/ognibuild/session/schroot.py b/ognibuild/session/schroot.py index cb03f6f..2a7388c 100644 --- a/ognibuild/session/schroot.py +++ b/ognibuild/session/schroot.py @@ -15,6 +15,7 @@ # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +import logging import shlex import subprocess @@ -120,8 +121,9 @@ class SchrootSession(Session): def create_home(self) -> None: """Create the user's home directory.""" home = self.check_output( - ['sh', '-c', 'echo $HOME']).decode().rstrip('\n') + ['sh', '-c', 'echo $HOME'], cwd='/').decode().rstrip('\n') user = self.check_output( - ['sh', '-c', 'echo $LOGNAME']).decode().rstrip('\n') - self.check_call(['mkdir', '-p', home], user='root') - self.check_call(['chown', user, home], user='root') + ['sh', '-c', 'echo $LOGNAME'], cwd='/').decode().rstrip('\n') + logging.info('Creating directory %s', home) + self.check_call(['mkdir', '-p', home], cwd='/', user='root') + self.check_call(['chown', user, home], cwd='/', user='root') From 69ae73b960ef5a50b7100ffa3ee7c833b3084b81 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Sat, 6 Feb 2021 14:59:49 +0000 Subject: [PATCH 15/83] Split out build system. --- ognibuild/__init__.py | 4 ---- ognibuild/buildsystem.py | 39 +++++++++++++++++++++++++++++++++++++++ ognibuild/dist.py | 9 +++++---- 3 files changed, 44 insertions(+), 8 deletions(-) create mode 100644 ognibuild/buildsystem.py diff --git a/ognibuild/__init__.py b/ognibuild/__init__.py index d769c5e..c0a0b30 100644 --- a/ognibuild/__init__.py +++ b/ognibuild/__init__.py @@ -24,10 +24,6 @@ import sys DEFAULT_PYTHON = 'python3' -class NoBuildToolsFound(Exception): - """No supported build tools were found.""" - - class DetailedFailure(Exception): def __init__(self, retcode, argv, error): diff --git a/ognibuild/buildsystem.py b/ognibuild/buildsystem.py new file mode 100644 index 0000000..a68a9a4 --- /dev/null +++ b/ognibuild/buildsystem.py @@ -0,0 +1,39 @@ +#!/usr/bin/python +# Copyright (C) 2019-2020 Jelmer Vernooij +# encoding: utf-8 +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + + +import logging + + +class NoBuildToolsFound(Exception): + """No supported build tools were found.""" + + +class BuildSystem(object): + """A particular buildsystem.""" + + def __init__(self, session): + self.session = session + + def dist(self): + raise NotImplementedError(self.dist) + + +def detect_buildsystems(session): + """Detect build systems.""" + return [] diff --git a/ognibuild/dist.py b/ognibuild/dist.py index ee61f77..75294c9 100644 --- a/ognibuild/dist.py +++ b/ognibuild/dist.py @@ -33,6 +33,7 @@ from breezy.workingtree import WorkingTree from breezy.plugins.debian.repack_tarball import get_filetype from . import apt, DetailedFailure, shebang_binary +from .buildsystem import detect_buildsystems from .session import run_with_tee, Session from .session.schroot import SchrootSession from .debian.fix_build import ( @@ -141,10 +142,6 @@ def run_with_build_fixer(session: Session, args: List[str]): fixed_errors.append(error) -class NoBuildToolsFound(Exception): - """No supported build tools were found.""" - - def run_dist(session): apt.install(session, ['git']) @@ -152,6 +149,10 @@ def run_dist(session): # e.g. pip caches in ~/.cache session.create_home() + for buildsystem in detect_buildsystems(session): + buildsystem.dist() + return + if os.path.exists('package.xml'): apt.install(session, ['php-pear', 'php-horde-core']) logging.info('Found package.xml, assuming pear package.') From 093343b0852c7011ad3a0f0a93a4fbfcd60c3b26 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Sat, 6 Feb 2021 15:20:55 +0000 Subject: [PATCH 16/83] Move some more things around. --- ognibuild/dist.py | 25 ++++++++++++++++++++----- 1 file changed, 20 insertions(+), 5 deletions(-) diff --git a/ognibuild/dist.py b/ognibuild/dist.py index 75294c9..9899e79 100644 --- a/ognibuild/dist.py +++ b/ognibuild/dist.py @@ -26,6 +26,7 @@ from typing import Optional, List, Tuple, Callable, Type from debian.deb822 import Deb822 +from breezy.errors import NotBranchError from breezy.export import export from breezy.tree import Tree from breezy.workingtree import WorkingTree @@ -33,7 +34,7 @@ from breezy.workingtree import WorkingTree from breezy.plugins.debian.repack_tarball import get_filetype from . import apt, DetailedFailure, shebang_binary -from .buildsystem import detect_buildsystems +from .buildsystem import detect_buildsystems, NoBuildToolsFound from .session import run_with_tee, Session from .session.schroot import SchrootSession from .debian.fix_build import ( @@ -50,6 +51,10 @@ from buildlog_consultant.sbuild import ( ) +class DistNoTarball(Exception): + """Dist operation did not create a tarball.""" + + def satisfy_build_deps(session: Session, tree): source = Deb822(tree.get_file('debian/control')) deps = [] @@ -353,9 +358,16 @@ def dupe_vcs_tree(tree, directory): raise DetailedFailure( 1, ['sprout'], NoSpaceOnDevice()) raise + if not result.has_workingtree(): + raise AssertionError # Copy parent location - some scripts need this - base_branch = tree._repository.controldir.open_branch() - parent = base_branch.get_parent() + if isinstance(tree, WorkingTree): + parent = tree.branch.get_parent() + else: + try: + parent = tree._repository.controldir.open_branch() + except NotBranchError: + parent = None if parent: result.open_branch().set_parent(parent) @@ -429,12 +441,12 @@ def create_dist_schroot( return fn logging.info('No tarball created :(') - return None + raise DistNoTarball() if __name__ == '__main__': import argparse - import breezy.bzr + import breezy.bzr # noqa: F401 import breezy.git # noqa: F401 parser = argparse.ArgumentParser() @@ -451,6 +463,9 @@ if __name__ == '__main__': '--target-directory', type=str, default='..', help='Target directory') args = parser.parse_args() + + logging.basicConfig(level=logging.INFO) + tree = WorkingTree.open(args.directory) if args.packaging_directory: packaging_tree = WorkingTree.open(args.packaging_directory) From 1dac0c527aa16d8217e0629facbc83f2c0fc8fef Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Sat, 6 Feb 2021 15:30:31 +0000 Subject: [PATCH 17/83] Split out DistCatcher. --- ognibuild/dist.py | 100 +++++++++++++++++++++++++++------------------- 1 file changed, 59 insertions(+), 41 deletions(-) diff --git a/ognibuild/dist.py b/ognibuild/dist.py index 9899e79..bd5520d 100644 --- a/ognibuild/dist.py +++ b/ognibuild/dist.py @@ -365,13 +365,56 @@ def dupe_vcs_tree(tree, directory): parent = tree.branch.get_parent() else: try: - parent = tree._repository.controldir.open_branch() + parent = tree._repository.controldir.open_branch().get_parent() except NotBranchError: parent = None if parent: result.open_branch().set_parent(parent) +class DistCatcher(object): + + def __init__(self, directory): + self.export_directory = directory + self.files = [] + self.existing_files = None + + def __enter__(self): + self.existing_files = os.listdir(self.export_directory) + return self + + def find_files(self): + new_files = os.listdir(self.export_directory) + diff_files = set(new_files) - set(self.existing_files) + diff = set([n for n in diff_files if get_filetype(n) is not None]) + if len(diff) == 1: + fn = diff.pop() + logging.info('Found tarball %s in package directory.', fn) + self.files.append(os.path.join(self.export_directory, fn)) + return fn + if 'dist' in diff_files: + for entry in os.scandir( + os.path.join(self.export_directory, 'dist')): + if get_filetype(entry.name) is not None: + logging.info( + 'Found tarball %s in dist directory.', entry.name) + self.files.append(entry.path) + return entry.name + logging.info('No tarballs found in dist directory.') + + parent_directory = os.path.dirname(self.export_directory) + diff = set(os.listdir(parent_directory)) - set([subdir]) + if len(diff) == 1: + fn = diff.pop() + logging.info('Found tarball %s in parent directory.', fn) + self.files.append(os.path.join(parent_directory, fn)) + return fn + + def __exit__(self, exc_type, exc_val, exc_tb): + self.find_files() + return False + + def create_dist_schroot( tree: Tree, target_dir: str, chroot: str, packaging_tree: Optional[Tree] = None, @@ -398,47 +441,22 @@ def create_dist_schroot( else: dupe_vcs_tree(tree, export_directory) - existing_files = os.listdir(export_directory) + with DistCatcher(export_directory) as dc: + oldcwd = os.getcwd() + os.chdir(export_directory) + try: + session.chdir(os.path.join(reldir, subdir)) + run_dist(session) + except NoBuildToolsFound: + logging.info( + 'No build tools found, falling back to simple export.') + return None + finally: + os.chdir(oldcwd) - oldcwd = os.getcwd() - os.chdir(export_directory) - try: - session.chdir(os.path.join(reldir, subdir)) - run_dist(session) - except NoBuildToolsFound: - logging.info( - 'No build tools found, falling back to simple export.') - return None - finally: - os.chdir(oldcwd) - - new_files = os.listdir(export_directory) - diff_files = set(new_files) - set(existing_files) - diff = set([n for n in diff_files if get_filetype(n) is not None]) - if len(diff) == 1: - fn = diff.pop() - logging.info('Found tarball %s in package directory.', fn) - shutil.copy( - os.path.join(export_directory, fn), - target_dir) - return fn - if 'dist' in diff_files: - for entry in os.scandir(os.path.join(export_directory, 'dist')): - if get_filetype(entry.name) is not None: - logging.info( - 'Found tarball %s in dist directory.', entry.name) - shutil.copy(entry.path, target_dir) - return entry.name - logging.info('No tarballs found in dist directory.') - - diff = set(os.listdir(directory)) - set([subdir]) - if len(diff) == 1: - fn = diff.pop() - logging.info('Found tarball %s in parent directory.', fn) - shutil.copy( - os.path.join(directory, fn), - target_dir) - return fn + for path in dc.files: + shutil.copy(path, target_dir) + return os.path.join(target_dir, os.path.basename(path)) logging.info('No tarball created :(') raise DistNoTarball() From 8133a5fa6899c83bf254ba28469859088377499a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Sat, 6 Feb 2021 16:09:49 +0000 Subject: [PATCH 18/83] Raise NoBuildToolsFound. --- ognibuild/__main__.py | 6 +++--- ognibuild/dist.py | 24 ++++++++++++------------ 2 files changed, 15 insertions(+), 15 deletions(-) diff --git a/ognibuild/__main__.py b/ognibuild/__main__.py index f4b6ba1..3cb47ae 100644 --- a/ognibuild/__main__.py +++ b/ognibuild/__main__.py @@ -18,9 +18,9 @@ import os import sys from . import ( - run_build, run_clean, run_install, run_test, NoBuildToolsFound, - note - ) + run_build, run_clean, run_install, run_test, + note) +from .buildsystem import NoBuildToolsFound from .dist import run_dist diff --git a/ognibuild/dist.py b/ognibuild/dist.py index bd5520d..62837b7 100644 --- a/ognibuild/dist.py +++ b/ognibuild/dist.py @@ -419,7 +419,7 @@ def create_dist_schroot( tree: Tree, target_dir: str, chroot: str, packaging_tree: Optional[Tree] = None, include_controldir: bool = True, - subdir: Optional[str] = None) -> Optional[str]: + subdir: Optional[str] = None) -> str: if subdir is None: subdir = 'package' with SchrootSession(chroot) as session: @@ -447,10 +447,6 @@ def create_dist_schroot( try: session.chdir(os.path.join(reldir, subdir)) run_dist(session) - except NoBuildToolsFound: - logging.info( - 'No build tools found, falling back to simple export.') - return None finally: os.chdir(oldcwd) @@ -495,11 +491,15 @@ if __name__ == '__main__': packaging_tree = None subdir = None - ret = create_dist_schroot( - tree, subdir=subdir, target_dir=os.path.abspath(args.target_directory), - packaging_tree=packaging_tree, - chroot=args.chroot) - if ret: - sys.exit(0) + try: + ret = create_dist_schroot( + tree, subdir=subdir, + target_dir=os.path.abspath(args.target_directory), + packaging_tree=packaging_tree, + chroot=args.chroot) + except NoBuildToolsFound: + logging.info('No build tools found, falling back to simple export.') + export(tree, 'dist.tar.gz', 'tgz', None) else: - sys.exit(1) + print('Created %s' % ret) + sys.exit(0) From db616ee3b0785299a7284339f3c012c8d2c18246 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Sat, 6 Feb 2021 18:35:23 +0000 Subject: [PATCH 19/83] Don't invoke apt unless we have to. --- ognibuild/apt.py | 30 +++++++++++++++--- ognibuild/dist.py | 59 ++++++++++++++++++----------------- ognibuild/session/__init__.py | 2 +- ognibuild/session/plain.py | 2 ++ 4 files changed, 60 insertions(+), 33 deletions(-) diff --git a/ognibuild/apt.py b/ognibuild/apt.py index 90510fc..f38a04e 100644 --- a/ognibuild/apt.py +++ b/ognibuild/apt.py @@ -19,6 +19,8 @@ from typing import List +import apt_pkg +import os from buildlog_consultant.sbuild import ( find_apt_get_failure, ) @@ -53,9 +55,29 @@ def run_apt(session: Session, args: List[str]) -> None: raise UnidentifiedError(retcode, args, lines) -def install(session: Session, packages: List[str]) -> None: - run_apt(session, ['install'] + packages) +class AptResolver(object): + session: Session -def satisfy(session: Session, deps: List[str]) -> None: - run_apt(session, ['satisfy'] + deps) + def __init__(self, session): + self.session = session + + def missing(self, packages): + root = getattr(self.session, 'location', '/') + status_path = os.path.join(root, 'var/lib/dpkg/status') + missing = set(packages) + with apt_pkg.TagFile(status_path) as tagf: + while tagf and missing: + tagf.step() + if tagf.section['Package'] in missing: + if tagf.section['Status'] == 'install ok installed': + missing.remove(tagf.section['Package']) + return list(missing) + + def install(self, packages: List[str]) -> None: + packages = self.missing(packages) + if packages: + run_apt(self.session, ['install'] + packages) + + def satisfy(self, deps: List[str]) -> None: + run_apt(self.session, ['satisfy'] + deps) diff --git a/ognibuild/dist.py b/ognibuild/dist.py index 62837b7..01f7d8e 100644 --- a/ognibuild/dist.py +++ b/ognibuild/dist.py @@ -33,7 +33,8 @@ from breezy.workingtree import WorkingTree from breezy.plugins.debian.repack_tarball import get_filetype -from . import apt, DetailedFailure, shebang_binary +from . import DetailedFailure, shebang_binary +from .apt import AptResolver, UnidentifiedError from .buildsystem import detect_buildsystems, NoBuildToolsFound from .session import run_with_tee, Session from .session.schroot import SchrootSession @@ -72,17 +73,19 @@ def satisfy_build_deps(session: Session, tree): deps = [ dep.strip().strip(',') for dep in deps] - apt.satisfy(session, deps) + apt = AptResolver(session) + apt.satisfy(deps) class SchrootDependencyContext(DependencyContext): def __init__(self, session): self.session = session + self.apt = AptResolver(session) def add_dependency(self, package, minimum_version=None): # TODO(jelmer): Handle minimum_version - apt.install(self.session, [package]) + self.apt.install([package]) return True @@ -127,9 +130,9 @@ def run_with_build_fixer(session: Session, args: List[str]): if error is None: logging.warning('Build failed with unidentified error. Giving up.') if line is not None: - raise apt.UnidentifiedError( + raise UnidentifiedError( retcode, args, lines, secondary=(offset, line)) - raise apt.UnidentifiedError(retcode, args, lines) + raise UnidentifiedError(retcode, args, lines) logging.info('Identified error: %r', error) if error in fixed_errors: @@ -148,7 +151,8 @@ def run_with_build_fixer(session: Session, args: List[str]): def run_dist(session): - apt.install(session, ['git']) + apt = AptResolver(session) + apt.install(['git']) # Some things want to write to the user's home directory, # e.g. pip caches in ~/.cache @@ -159,7 +163,7 @@ def run_dist(session): return if os.path.exists('package.xml'): - apt.install(session, ['php-pear', 'php-horde-core']) + apt.install(['php-pear', 'php-horde-core']) logging.info('Found package.xml, assuming pear package.') session.check_call(['pear', 'package']) return @@ -172,14 +176,14 @@ def run_dist(session): logging.info( 'Found pyproject.toml with poetry section, ' 'assuming poetry project.') - apt.install(session, ['python3-venv', 'python3-pip']) + apt.install(['python3-venv', 'python3-pip']) session.check_call(['pip3', 'install', 'poetry'], user='root') session.check_call(['poetry', 'build', '-f', 'sdist']) return if os.path.exists('setup.py'): logging.info('Found setup.py, assuming python project.') - apt.install(session, ['python3', 'python3-pip']) + apt.install(['python3', 'python3-pip']) with open('setup.py', 'r') as f: setup_py_contents = f.read() try: @@ -189,41 +193,40 @@ def run_dist(session): setup_cfg_contents = '' if 'setuptools' in setup_py_contents: logging.info('Reference to setuptools found, installing.') - apt.install(session, ['python3-setuptools']) + apt.install(['python3-setuptools']) if ('setuptools_scm' in setup_py_contents or 'setuptools_scm' in setup_cfg_contents): logging.info('Reference to setuptools-scm found, installing.') - apt.install( - session, ['python3-setuptools-scm', 'git', 'mercurial']) + apt.install(['python3-setuptools-scm', 'git', 'mercurial']) # TODO(jelmer): Install setup_requires interpreter = shebang_binary('setup.py') if interpreter is not None: if interpreter == 'python3': - apt.install(session, ['python3']) + apt.install(['python3']) elif interpreter == 'python2': - apt.install(session, ['python2']) + apt.install(['python2']) elif interpreter == 'python': - apt.install(session, ['python']) + apt.install(['python']) else: raise ValueError('Unknown interpreter %r' % interpreter) - apt.install(session, ['python2', 'python3']) + apt.install(['python2', 'python3']) run_with_build_fixer(session, ['./setup.py', 'sdist']) else: # Just assume it's Python 3 - apt.install(session, ['python3']) + apt.install(['python3']) run_with_build_fixer(session, ['python3', './setup.py', 'sdist']) return if os.path.exists('setup.cfg'): logging.info('Found setup.cfg, assuming python project.') - apt.install(session, ['python3-pep517', 'python3-pip']) + apt.install(['python3-pep517', 'python3-pip']) session.check_call(['python3', '-m', 'pep517.build', '-s', '.']) return if os.path.exists('dist.ini') and not os.path.exists('Makefile.PL'): - apt.install(session, ['libdist-inkt-perl']) + apt.install(['libdist-inkt-perl']) with open('dist.ini', 'rb') as f: for line in f: if not line.startswith(b';;'): @@ -245,30 +248,30 @@ def run_dist(session): return # Default to invoking Dist::Zilla logging.info('Found dist.ini, assuming dist-zilla.') - apt.install(session, ['libdist-zilla-perl']) + apt.install(['libdist-zilla-perl']) run_with_build_fixer(session, ['dzil', 'build', '--in', '..']) return if os.path.exists('package.json'): - apt.install(session, ['npm']) + apt.install(['npm']) run_with_build_fixer(session, ['npm', 'pack']) return gemfiles = [name for name in os.listdir('.') if name.endswith('.gem')] if gemfiles: - apt.install(session, ['gem2deb']) + apt.install(['gem2deb']) if len(gemfiles) > 1: logging.warning('More than one gemfile. Trying the first?') run_with_build_fixer(session, ['gem2tgz', gemfiles[0]]) return if os.path.exists('waf'): - apt.install(session, ['python3']) + apt.install(['python3']) run_with_build_fixer(session, ['./waf', 'dist']) return if os.path.exists('Makefile.PL') and not os.path.exists('Makefile'): - apt.install(session, ['perl']) + apt.install(['perl']) run_with_build_fixer(session, ['perl', 'Makefile.PL']) if not os.path.exists('Makefile') and not os.path.exists('configure'): @@ -277,7 +280,7 @@ def run_dist(session): run_with_build_fixer(session, ['/bin/sh', './autogen.sh']) try: run_with_build_fixer(session, ['./autogen.sh']) - except apt.UnidentifiedError as e: + except UnidentifiedError as e: if ("Gnulib not yet bootstrapped; " "run ./bootstrap instead.\n" in e.lines): run_with_build_fixer(session, ["./bootstrap"]) @@ -286,7 +289,7 @@ def run_dist(session): raise elif os.path.exists('configure.ac') or os.path.exists('configure.in'): - apt.install(session, [ + apt.install([ 'autoconf', 'automake', 'gettext', 'libtool', 'gnu-standards']) run_with_build_fixer(session, ['autoreconf', '-i']) @@ -294,10 +297,10 @@ def run_dist(session): session.check_call(['./configure']) if os.path.exists('Makefile'): - apt.install(session, ['make']) + apt.install(['make']) try: run_with_build_fixer(session, ['make', 'dist']) - except apt.UnidentifiedError as e: + except UnidentifiedError as e: if "make: *** No rule to make target 'dist'. Stop.\n" in e.lines: pass elif ("make[1]: *** No rule to make target 'dist'. Stop.\n" diff --git a/ognibuild/session/__init__.py b/ognibuild/session/__init__.py index 3cc87af..e78510b 100644 --- a/ognibuild/session/__init__.py +++ b/ognibuild/session/__init__.py @@ -34,7 +34,7 @@ class Session(object): @property def location(self) -> str: - raise NotImplementedError(self.location) + raise NotImplementedError def check_call( self, diff --git a/ognibuild/session/plain.py b/ognibuild/session/plain.py index deb27cb..1b4fbfb 100644 --- a/ognibuild/session/plain.py +++ b/ognibuild/session/plain.py @@ -24,6 +24,8 @@ import subprocess class PlainSession(Session): """Session ignoring user.""" + location = '/' + def create_home(self): pass From 3b71585f6cb7b4553d8d73d1427721d15df9e570 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Sat, 6 Feb 2021 18:48:00 +0000 Subject: [PATCH 20/83] Split out fix_build module from dist. --- .gitignore | 3 ++ ognibuild/apt.py | 2 +- ognibuild/dist.py | 98 ++++--------------------------------- ognibuild/fix_build.py | 108 +++++++++++++++++++++++++++++++++++++++++ 4 files changed, 122 insertions(+), 89 deletions(-) create mode 100644 ognibuild/fix_build.py diff --git a/.gitignore b/.gitignore index 989ed96..13818ea 100644 --- a/.gitignore +++ b/.gitignore @@ -4,3 +4,6 @@ ognibuild.egg-info dist __pycache__ .eggs +*.swp +*.swo +*.swn diff --git a/ognibuild/apt.py b/ognibuild/apt.py index f38a04e..ad29696 100644 --- a/ognibuild/apt.py +++ b/ognibuild/apt.py @@ -55,7 +55,7 @@ def run_apt(session: Session, args: List[str]) -> None: raise UnidentifiedError(retcode, args, lines) -class AptResolver(object): +class AptManager(object): session: Session diff --git a/ognibuild/dist.py b/ognibuild/dist.py index 01f7d8e..cf270b1 100644 --- a/ognibuild/dist.py +++ b/ognibuild/dist.py @@ -33,29 +33,24 @@ from breezy.workingtree import WorkingTree from breezy.plugins.debian.repack_tarball import get_filetype +from buildlog_consultant.sbuild import ( + NoSpaceOnDevice, + ) + from . import DetailedFailure, shebang_binary -from .apt import AptResolver, UnidentifiedError +from .apt import AptManager, UnidentifiedError +from .fix_build import run_with_build_fixer from .buildsystem import detect_buildsystems, NoBuildToolsFound from .session import run_with_tee, Session from .session.schroot import SchrootSession -from .debian.fix_build import ( - DependencyContext, - resolve_error, - APT_FIXERS, - ) -from buildlog_consultant.sbuild import ( - find_build_failure_description, - Problem, - MissingPerlModule, - MissingCommand, - NoSpaceOnDevice, - ) +from .debian.fix_build import DependencyContext class DistNoTarball(Exception): """Dist operation did not create a tarball.""" +# TODO(jelmer): move this to debian/ def satisfy_build_deps(session: Session, tree): source = Deb822(tree.get_file('debian/control')) deps = [] @@ -73,85 +68,12 @@ def satisfy_build_deps(session: Session, tree): deps = [ dep.strip().strip(',') for dep in deps] - apt = AptResolver(session) + apt = AptManager(session) apt.satisfy(deps) -class SchrootDependencyContext(DependencyContext): - - def __init__(self, session): - self.session = session - self.apt = AptResolver(session) - - def add_dependency(self, package, minimum_version=None): - # TODO(jelmer): Handle minimum_version - self.apt.install([package]) - return True - - -def fix_perl_module_from_cpan(error, context): - # TODO(jelmer): Specify -T to skip tests? - context.session.check_call( - ['cpan', '-i', error.module], user='root', - env={'PERL_MM_USE_DEFAULT': '1'}) - return True - - -NPM_COMMAND_PACKAGES = { - 'del-cli': 'del-cli', - } - - -def fix_npm_missing_command(error, context): - try: - package = NPM_COMMAND_PACKAGES[error.command] - except KeyError: - return False - - context.session.check_call(['npm', '-g', 'install', package]) - return True - - -GENERIC_INSTALL_FIXERS: List[ - Tuple[Type[Problem], Callable[[Problem, DependencyContext], bool]]] = [ - (MissingPerlModule, fix_perl_module_from_cpan), - (MissingCommand, fix_npm_missing_command), -] - - -def run_with_build_fixer(session: Session, args: List[str]): - logging.info('Running %r', args) - fixed_errors = [] - while True: - retcode, lines = run_with_tee(session, args) - if retcode == 0: - return - offset, line, error = find_build_failure_description(lines) - if error is None: - logging.warning('Build failed with unidentified error. Giving up.') - if line is not None: - raise UnidentifiedError( - retcode, args, lines, secondary=(offset, line)) - raise UnidentifiedError(retcode, args, lines) - - logging.info('Identified error: %r', error) - if error in fixed_errors: - logging.warning( - 'Failed to resolve error %r, it persisted. Giving up.', - error) - raise DetailedFailure(retcode, args, error) - if not resolve_error( - error, SchrootDependencyContext(session), - fixers=(APT_FIXERS + GENERIC_INSTALL_FIXERS)): - logging.warning( - 'Failed to find resolution for error %r. Giving up.', - error) - raise DetailedFailure(retcode, args, error) - fixed_errors.append(error) - - def run_dist(session): - apt = AptResolver(session) + apt = AptManager(session) apt.install(['git']) # Some things want to write to the user's home directory, diff --git a/ognibuild/fix_build.py b/ognibuild/fix_build.py new file mode 100644 index 0000000..af4f338 --- /dev/null +++ b/ognibuild/fix_build.py @@ -0,0 +1,108 @@ +#!/usr/bin/python3 +# Copyright (C) 2020 Jelmer Vernooij +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + +import logging +from typing import List, Tuple, Callable, Type + +from buildlog_consultant.sbuild import ( + find_build_failure_description, + Problem, + MissingPerlModule, + MissingCommand, + ) + +from . import DetailedFailure +from .apt import UnidentifiedError, AptManager +from .debian.fix_build import ( + DependencyContext, + resolve_error, + APT_FIXERS, + ) +from .session import Session, run_with_tee + + +class SchrootDependencyContext(DependencyContext): + + def __init__(self, session): + self.session = session + self.apt = AptManager(session) + + def add_dependency(self, package, minimum_version=None): + # TODO(jelmer): Handle minimum_version + self.apt.install([package]) + return True + + +def fix_perl_module_from_cpan(error, context): + # TODO(jelmer): Specify -T to skip tests? + context.session.check_call( + ['cpan', '-i', error.module], user='root', + env={'PERL_MM_USE_DEFAULT': '1'}) + return True + + +NPM_COMMAND_PACKAGES = { + 'del-cli': 'del-cli', + } + + +def fix_npm_missing_command(error, context): + try: + package = NPM_COMMAND_PACKAGES[error.command] + except KeyError: + return False + + context.session.check_call(['npm', '-g', 'install', package]) + return True + + +GENERIC_INSTALL_FIXERS: List[ + Tuple[Type[Problem], Callable[[Problem, DependencyContext], bool]]] = [ + (MissingPerlModule, fix_perl_module_from_cpan), + (MissingCommand, fix_npm_missing_command), +] + + +def run_with_build_fixer(session: Session, args: List[str]): + logging.info('Running %r', args) + fixed_errors = [] + while True: + retcode, lines = run_with_tee(session, args) + if retcode == 0: + return + offset, line, error = find_build_failure_description(lines) + if error is None: + logging.warning('Build failed with unidentified error. Giving up.') + if line is not None: + raise UnidentifiedError( + retcode, args, lines, secondary=(offset, line)) + raise UnidentifiedError(retcode, args, lines) + + logging.info('Identified error: %r', error) + if error in fixed_errors: + logging.warning( + 'Failed to resolve error %r, it persisted. Giving up.', + error) + raise DetailedFailure(retcode, args, error) + if not resolve_error( + error, SchrootDependencyContext(session), + fixers=(APT_FIXERS + GENERIC_INSTALL_FIXERS)): + logging.warning( + 'Failed to find resolution for error %r. Giving up.', + error) + raise DetailedFailure(retcode, args, error) + fixed_errors.append(error) From 88213d8500b5771d6ba35d55f64cdae4b0662229 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Sat, 6 Feb 2021 18:50:52 +0000 Subject: [PATCH 21/83] Split out vcs module. --- ognibuild/dist.py | 40 +---------------------------- ognibuild/vcs.py | 65 +++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 66 insertions(+), 39 deletions(-) create mode 100644 ognibuild/vcs.py diff --git a/ognibuild/dist.py b/ognibuild/dist.py index cf270b1..0000eee 100644 --- a/ognibuild/dist.py +++ b/ognibuild/dist.py @@ -26,8 +26,6 @@ from typing import Optional, List, Tuple, Callable, Type from debian.deb822 import Deb822 -from breezy.errors import NotBranchError -from breezy.export import export from breezy.tree import Tree from breezy.workingtree import WorkingTree @@ -44,6 +42,7 @@ from .buildsystem import detect_buildsystems, NoBuildToolsFound from .session import run_with_tee, Session from .session.schroot import SchrootSession from .debian.fix_build import DependencyContext +from .vcs import dupe_vcs_tree, export_vcs_tree class DistNoTarball(Exception): @@ -260,43 +259,6 @@ def run_dist(session): raise NoBuildToolsFound() -def export_vcs_tree(tree, directory): - try: - export(tree, directory, 'dir', None) - except OSError as e: - if e.errno == errno.ENOSPC: - raise DetailedFailure( - 1, ['export'], NoSpaceOnDevice()) - raise - - -def dupe_vcs_tree(tree, directory): - with tree.lock_read(): - if isinstance(tree, WorkingTree): - tree = tree.basis_tree() - try: - result = tree._repository.controldir.sprout( - directory, create_tree_if_local=True, - revision_id=tree.get_revision_id()) - except OSError as e: - if e.errno == errno.ENOSPC: - raise DetailedFailure( - 1, ['sprout'], NoSpaceOnDevice()) - raise - if not result.has_workingtree(): - raise AssertionError - # Copy parent location - some scripts need this - if isinstance(tree, WorkingTree): - parent = tree.branch.get_parent() - else: - try: - parent = tree._repository.controldir.open_branch().get_parent() - except NotBranchError: - parent = None - if parent: - result.open_branch().set_parent(parent) - - class DistCatcher(object): def __init__(self, directory): diff --git a/ognibuild/vcs.py b/ognibuild/vcs.py new file mode 100644 index 0000000..23994e5 --- /dev/null +++ b/ognibuild/vcs.py @@ -0,0 +1,65 @@ +#!/usr/bin/python3 +# Copyright (C) 2020 Jelmer Vernooij +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + +import errno + +from breezy.errors import NotBranchError +from breezy.export import export +from breezy.workingtree import WorkingTree + +from buildlog_consultant.sbuild import ( + NoSpaceOnDevice, + ) + +from . import DetailedFailure + + +def export_vcs_tree(tree, directory): + try: + export(tree, directory, 'dir', None) + except OSError as e: + if e.errno == errno.ENOSPC: + raise DetailedFailure( + 1, ['export'], NoSpaceOnDevice()) + raise + + +def dupe_vcs_tree(tree, directory): + with tree.lock_read(): + if isinstance(tree, WorkingTree): + tree = tree.basis_tree() + try: + result = tree._repository.controldir.sprout( + directory, create_tree_if_local=True, + revision_id=tree.get_revision_id()) + except OSError as e: + if e.errno == errno.ENOSPC: + raise DetailedFailure( + 1, ['sprout'], NoSpaceOnDevice()) + raise + if not result.has_workingtree(): + raise AssertionError + # Copy parent location - some scripts need this + if isinstance(tree, WorkingTree): + parent = tree.branch.get_parent() + else: + try: + parent = tree._repository.controldir.open_branch().get_parent() + except NotBranchError: + parent = None + if parent: + result.open_branch().set_parent(parent) From 21ed7c4a7d0100b1455eeedec5434e100fd60d73 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Sun, 7 Feb 2021 21:55:47 +0000 Subject: [PATCH 22/83] Implement basic subcommands. --- ognibuild/__main__.py | 8 ++++--- ognibuild/apt.py | 4 +++- ognibuild/build.py | 34 ++++++++++++++++++++++++++++ ognibuild/buildsystem.py | 48 +++++++++++++++++++++++++++++++++++++++- ognibuild/clean.py | 34 ++++++++++++++++++++++++++++ ognibuild/install.py | 31 ++++++++++++++++++++++++++ ognibuild/test.py | 34 ++++++++++++++++++++++++++++ 7 files changed, 188 insertions(+), 5 deletions(-) create mode 100644 ognibuild/build.py create mode 100644 ognibuild/clean.py create mode 100644 ognibuild/install.py create mode 100644 ognibuild/test.py diff --git a/ognibuild/__main__.py b/ognibuild/__main__.py index 3cb47ae..aef0c6f 100644 --- a/ognibuild/__main__.py +++ b/ognibuild/__main__.py @@ -17,11 +17,13 @@ import os import sys -from . import ( - run_build, run_clean, run_install, run_test, - note) +from . import note from .buildsystem import NoBuildToolsFound +from .build import run_build +from .clean import run_clean from .dist import run_dist +from .install import run_install +from .test import run_test def main(): diff --git a/ognibuild/apt.py b/ognibuild/apt.py index ad29696..00ca1a7 100644 --- a/ognibuild/apt.py +++ b/ognibuild/apt.py @@ -67,8 +67,10 @@ class AptManager(object): status_path = os.path.join(root, 'var/lib/dpkg/status') missing = set(packages) with apt_pkg.TagFile(status_path) as tagf: - while tagf and missing: + while missing: tagf.step() + if not tagf.section: + break if tagf.section['Package'] in missing: if tagf.section['Status'] == 'install ok installed': missing.remove(tagf.section['Package']) diff --git a/ognibuild/build.py b/ognibuild/build.py new file mode 100644 index 0000000..405a5b4 --- /dev/null +++ b/ognibuild/build.py @@ -0,0 +1,34 @@ +#!/usr/bin/python3 +# Copyright (C) 2020-2021 Jelmer Vernooij +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + +from .apt import AptManager +from .buildsystem import detect_buildsystems, NoBuildToolsFound + + +def run_build(session): + apt = AptManager(session) + apt.install(['git']) + + # Some things want to write to the user's home directory, + # e.g. pip caches in ~/.cache + session.create_home() + + for buildsystem in detect_buildsystems(session): + buildsystem.build() + return + + raise NoBuildToolsFound() diff --git a/ognibuild/buildsystem.py b/ognibuild/buildsystem.py index a68a9a4..9ce72be 100644 --- a/ognibuild/buildsystem.py +++ b/ognibuild/buildsystem.py @@ -18,6 +18,10 @@ import logging +import os + +from .apt import AptManager +from .fix_build import run_with_build_fixer class NoBuildToolsFound(Exception): @@ -33,7 +37,49 @@ class BuildSystem(object): def dist(self): raise NotImplementedError(self.dist) + def test(self): + raise NotImplementedError(self.test) + + def build(self): + raise NotImplementedError(self.build) + + def clean(self): + raise NotImplementedError(self.clean) + + def install(self): + raise NotImplementedError(self.install) + + +class Pear(BuildSystem): + + def dist(self): + apt = AptManager(self.session) + apt.install(['php-pear']) + run_with_build_fixer(self.session, ['pear', 'package']) + + def test(self): + apt = AptManager(self.session) + apt.install(['php-pear']) + run_with_build_fixer(self.session, ['pear', 'run-tests']) + + def build(self): + apt = AptManager(self.session) + apt.install(['php-pear']) + run_with_build_fixer(self.session, ['pear', 'build']) + + def clean(self): + apt = AptManager(self.session) + apt.install(['php-pear']) + # TODO + + def install(self): + apt = AptManager(self.session) + apt.install(['php-pear']) + run_with_build_fixer(self.session, ['pear', 'install']) + def detect_buildsystems(session): """Detect build systems.""" - return [] + if os.path.exists('package.xml'): + logging.info('Found package.xml, assuming pear package.') + yield Pear(session) diff --git a/ognibuild/clean.py b/ognibuild/clean.py new file mode 100644 index 0000000..80bfaa0 --- /dev/null +++ b/ognibuild/clean.py @@ -0,0 +1,34 @@ +#!/usr/bin/python3 +# Copyright (C) 2020-2021 Jelmer Vernooij +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + +from .apt import AptManager +from .buildsystem import detect_buildsystems, NoBuildToolsFound + + +def run_clean(session): + apt = AptManager(session) + apt.install(['git']) + + # Some things want to write to the user's home directory, + # e.g. pip caches in ~/.cache + session.create_home() + + for buildsystem in detect_buildsystems(session): + buildsystem.clean() + return + + raise NoBuildToolsFound() diff --git a/ognibuild/install.py b/ognibuild/install.py new file mode 100644 index 0000000..27ec180 --- /dev/null +++ b/ognibuild/install.py @@ -0,0 +1,31 @@ +#!/usr/bin/python3 +# Copyright (C) 2020-2021 Jelmer Vernooij +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + +from .apt import AptManager +from .buildsystem import detect_buildsystems, NoBuildToolsFound + + +def run_install(session): + # Some things want to write to the user's home directory, + # e.g. pip caches in ~/.cache + session.create_home() + + for buildsystem in detect_buildsystems(session): + buildsystem.install() + return + + raise NoBuildToolsFound() diff --git a/ognibuild/test.py b/ognibuild/test.py new file mode 100644 index 0000000..10c90d7 --- /dev/null +++ b/ognibuild/test.py @@ -0,0 +1,34 @@ +#!/usr/bin/python3 +# Copyright (C) 2020-2021 Jelmer Vernooij +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + +from .apt import AptManager +from .buildsystem import detect_buildsystems, NoBuildToolsFound + + +def run_test(session): + apt = AptManager(session) + apt.install(['git']) + + # Some things want to write to the user's home directory, + # e.g. pip caches in ~/.cache + session.create_home() + + for buildsystem in detect_buildsystems(session): + buildsystem.test() + return + + raise NoBuildToolsFound() From 3c884f2c51d5d2f35f756193c1c8c65e3f1cd482 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Sun, 7 Feb 2021 22:08:29 +0000 Subject: [PATCH 23/83] Some more improvements. --- ognibuild/__main__.py | 3 ++ ognibuild/buildsystem.py | 90 ++++++++++++++++++++++++++++++++++++++++ ognibuild/dist.py | 64 ++-------------------------- 3 files changed, 96 insertions(+), 61 deletions(-) diff --git a/ognibuild/__main__.py b/ognibuild/__main__.py index aef0c6f..32a1739 100644 --- a/ognibuild/__main__.py +++ b/ognibuild/__main__.py @@ -37,6 +37,9 @@ def main(): default='.') parser.add_argument( '--schroot', type=str, help='schroot to run in.') + parser.add_argument( + '--resolve', choices=['explain', 'apt', 'native'], + help='What to do about missing dependencies') args = parser.parse_args() if args.schroot: from .session.schroot import SchrootSession diff --git a/ognibuild/buildsystem.py b/ognibuild/buildsystem.py index 9ce72be..31e8b41 100644 --- a/ognibuild/buildsystem.py +++ b/ognibuild/buildsystem.py @@ -20,6 +20,7 @@ import logging import os +from . import shebang_binary from .apt import AptManager from .fix_build import run_with_build_fixer @@ -78,8 +79,97 @@ class Pear(BuildSystem): run_with_build_fixer(self.session, ['pear', 'install']) +class SetupPy(BuildSystem): + + def prereqs(self): + apt = AptManager(self.session) + apt.install(['python3', 'python3-pip']) + with open('setup.py', 'r') as f: + setup_py_contents = f.read() + try: + with open('setup.cfg', 'r') as f: + setup_cfg_contents = f.read() + except FileNotFoundError: + setup_cfg_contents = '' + if 'setuptools' in setup_py_contents: + logging.info('Reference to setuptools found, installing.') + apt.install(['python3-setuptools']) + if ('setuptools_scm' in setup_py_contents or + 'setuptools_scm' in setup_cfg_contents): + logging.info('Reference to setuptools-scm found, installing.') + apt.install(['python3-setuptools-scm', 'git', 'mercurial']) + + # TODO(jelmer): Install setup_requires + + def test(self): + self.prereqs() + self._run_setup(['test']) + + def dist(self): + self.prereqs() + self._run_setup(['sdist']) + + def clean(self): + self.prereqs() + self._run_setup(['clean']) + + def install(self): + self.prereqs() + self._run_setup(['install']) + + def _run_setup(self, args): + apt = AptManager(self.session) + interpreter = shebang_binary('setup.py') + if interpreter is not None: + if interpreter == 'python3': + apt.install(['python3']) + elif interpreter == 'python2': + apt.install(['python2']) + elif interpreter == 'python': + apt.install(['python']) + else: + raise ValueError('Unknown interpreter %r' % interpreter) + apt.install(['python2', 'python3']) + run_with_build_fixer( + self.session, ['./setup.py'] + args) + else: + # Just assume it's Python 3 + apt.install(['python3']) + run_with_build_fixer( + self.session, ['python3', './setup.py'] + args) + + +class PyProject(BuildSystem): + + def load_toml(self): + import toml + with open('pyproject.toml', 'r') as pf: + return toml.load(pf) + + def dist(self): + apt = AptManager(self.session) + pyproject = self.load_toml() + if 'poetry' in pyproject.get('tool', []): + logging.info( + 'Found pyproject.toml with poetry section, ' + 'assuming poetry project.') + apt.install(['python3-venv', 'python3-pip']) + self.session.check_call(['pip3', 'install', 'poetry'], user='root') + self.session.check_call(['poetry', 'build', '-f', 'sdist']) + return + raise AssertionError('no supported section in pyproject.toml') + + def detect_buildsystems(session): """Detect build systems.""" if os.path.exists('package.xml'): logging.info('Found package.xml, assuming pear package.') yield Pear(session) + + if os.path.exists('setup.py'): + logging.info('Found setup.py, assuming python project.') + yield SetupPy(session) + + if os.path.exists('pyproject.toml'): + logging.info('Found pyproject.toml, assuming python project.') + yield PyProject(session) diff --git a/ognibuild/dist.py b/ognibuild/dist.py index 0000eee..dcab9b1 100644 --- a/ognibuild/dist.py +++ b/ognibuild/dist.py @@ -22,7 +22,7 @@ import re import shutil import sys import tempfile -from typing import Optional, List, Tuple, Callable, Type +from typing import Optional, List, Tuple from debian.deb822 import Deb822 @@ -39,9 +39,8 @@ from . import DetailedFailure, shebang_binary from .apt import AptManager, UnidentifiedError from .fix_build import run_with_build_fixer from .buildsystem import detect_buildsystems, NoBuildToolsFound -from .session import run_with_tee, Session +from .session import Session from .session.schroot import SchrootSession -from .debian.fix_build import DependencyContext from .vcs import dupe_vcs_tree, export_vcs_tree @@ -58,7 +57,7 @@ def satisfy_build_deps(session: Session, tree): deps.append(source[name].strip().strip(',')) except KeyError: pass - for name in ['Build-Conflicts', 'Build-Conflicts-Indeo', + for name in ['Build-Conflicts', 'Build-Conflicts-Indep', 'Build-Conflicts-Arch']: try: deps.append('Conflicts: ' + source[name]) @@ -83,63 +82,6 @@ def run_dist(session): buildsystem.dist() return - if os.path.exists('package.xml'): - apt.install(['php-pear', 'php-horde-core']) - logging.info('Found package.xml, assuming pear package.') - session.check_call(['pear', 'package']) - return - - if os.path.exists('pyproject.toml'): - import toml - with open('pyproject.toml', 'r') as pf: - pyproject = toml.load(pf) - if 'poetry' in pyproject.get('tool', []): - logging.info( - 'Found pyproject.toml with poetry section, ' - 'assuming poetry project.') - apt.install(['python3-venv', 'python3-pip']) - session.check_call(['pip3', 'install', 'poetry'], user='root') - session.check_call(['poetry', 'build', '-f', 'sdist']) - return - - if os.path.exists('setup.py'): - logging.info('Found setup.py, assuming python project.') - apt.install(['python3', 'python3-pip']) - with open('setup.py', 'r') as f: - setup_py_contents = f.read() - try: - with open('setup.cfg', 'r') as f: - setup_cfg_contents = f.read() - except FileNotFoundError: - setup_cfg_contents = '' - if 'setuptools' in setup_py_contents: - logging.info('Reference to setuptools found, installing.') - apt.install(['python3-setuptools']) - if ('setuptools_scm' in setup_py_contents or - 'setuptools_scm' in setup_cfg_contents): - logging.info('Reference to setuptools-scm found, installing.') - apt.install(['python3-setuptools-scm', 'git', 'mercurial']) - - # TODO(jelmer): Install setup_requires - - interpreter = shebang_binary('setup.py') - if interpreter is not None: - if interpreter == 'python3': - apt.install(['python3']) - elif interpreter == 'python2': - apt.install(['python2']) - elif interpreter == 'python': - apt.install(['python']) - else: - raise ValueError('Unknown interpreter %r' % interpreter) - apt.install(['python2', 'python3']) - run_with_build_fixer(session, ['./setup.py', 'sdist']) - else: - # Just assume it's Python 3 - apt.install(['python3']) - run_with_build_fixer(session, ['python3', './setup.py', 'sdist']) - return - if os.path.exists('setup.cfg'): logging.info('Found setup.cfg, assuming python project.') apt.install(['python3-pep517', 'python3-pip']) From 4da2bd76337ed49e290f03f43a413c61446e23a8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Sun, 7 Feb 2021 23:55:21 +0000 Subject: [PATCH 24/83] Finish splitting build systems. --- ognibuild/buildsystem.py | 169 ++++++++++++++++++++++++++++++++++++++- ognibuild/dist.py | 116 --------------------------- 2 files changed, 168 insertions(+), 117 deletions(-) diff --git a/ognibuild/buildsystem.py b/ognibuild/buildsystem.py index 31e8b41..e963db3 100644 --- a/ognibuild/buildsystem.py +++ b/ognibuild/buildsystem.py @@ -18,10 +18,11 @@ import logging +import re import os from . import shebang_binary -from .apt import AptManager +from .apt import AptManager, UnidentifiedError from .fix_build import run_with_build_fixer @@ -160,6 +161,148 @@ class PyProject(BuildSystem): raise AssertionError('no supported section in pyproject.toml') +class SetupCfg(BuildSystem): + + def dist(self): + apt = AptManager(self.session) + apt.install(['python3-pep517', 'python3-pip']) + self.session.check_call(['python3', '-m', 'pep517.build', '-s', '.']) + + +class NpmPackage(BuildSystem): + + def dist(self): + apt = AptManager(self.session) + apt.install(['npm']) + run_with_build_fixer(self.session, ['npm', 'pack']) + + +class Waf(BuildSystem): + + def dist(self): + apt = AptManager(self.session) + apt.install(['python3']) + run_with_build_fixer(self.session, ['./waf', 'dist']) + + +class Gem(BuildSystem): + + def dist(self): + apt = AptManager(self.session) + apt.install(['gem2deb']) + gemfiles = [name for name in os.listdir('.') if name.endswith('.gem')] + if len(gemfiles) > 1: + logging.warning('More than one gemfile. Trying the first?') + run_with_build_fixer(self.session, ['gem2tgz', gemfiles[0]]) + + +class DistInkt(BuildSystem): + + def dist(self): + apt = AptManager(self.session) + apt.install(['libdist-inkt-perl']) + with open('dist.ini', 'rb') as f: + for line in f: + if not line.startswith(b';;'): + continue + try: + (key, value) = line[2:].split(b'=', 1) + except ValueError: + continue + if (key.strip() == b'class' and + value.strip().startswith(b"'Dist::Inkt")): + logging.info( + 'Found Dist::Inkt section in dist.ini, ' + 'assuming distinkt.') + # TODO(jelmer): install via apt if possible + self.session.check_call( + ['cpan', 'install', value.decode().strip("'")], + user='root') + run_with_build_fixer(self.session, ['distinkt-dist']) + return + # Default to invoking Dist::Zilla + logging.info('Found dist.ini, assuming dist-zilla.') + apt.install(['libdist-zilla-perl']) + run_with_build_fixer(self.session, ['dzil', 'build', '--in', '..']) + + +class Make(BuildSystem): + + def setup(self): + apt = AptManager(self.session) + if os.path.exists('Makefile.PL') and not os.path.exists('Makefile'): + apt.install(['perl']) + run_with_build_fixer(self.session, ['perl', 'Makefile.PL']) + + if not os.path.exists('Makefile') and not os.path.exists('configure'): + if os.path.exists('autogen.sh'): + if shebang_binary('autogen.sh') is None: + run_with_build_fixer( + self.session, ['/bin/sh', './autogen.sh']) + try: + run_with_build_fixer( + self.session, ['./autogen.sh']) + except UnidentifiedError as e: + if ("Gnulib not yet bootstrapped; " + "run ./bootstrap instead.\n" in e.lines): + run_with_build_fixer(self.session, ["./bootstrap"]) + run_with_build_fixer(self.session, ['./autogen.sh']) + else: + raise + + elif (os.path.exists('configure.ac') or + os.path.exists('configure.in')): + apt.install([ + 'autoconf', 'automake', 'gettext', 'libtool', + 'gnu-standards']) + run_with_build_fixer(self.session, ['autoreconf', '-i']) + + if not os.path.exists('Makefile') and os.path.exists('configure'): + self.session.check_call(['./configure']) + + def dist(self): + self.setup() + apt = AptManager(self.session) + apt.install(['make']) + try: + run_with_build_fixer(self.session, ['make', 'dist']) + except UnidentifiedError as e: + if ("make: *** No rule to make target 'dist'. Stop.\n" + in e.lines): + pass + elif ("make[1]: *** No rule to make target 'dist'. Stop.\n" + in e.lines): + pass + elif ("Reconfigure the source tree " + "(via './config' or 'perl Configure'), please.\n" + ) in e.lines: + run_with_build_fixer(self.session, ['./config']) + run_with_build_fixer(self.session, ['make', 'dist']) + elif ( + "Please try running 'make manifest' and then run " + "'make dist' again.\n" in e.lines): + run_with_build_fixer(self.session, ['make', 'manifest']) + run_with_build_fixer(self.session, ['make', 'dist']) + elif "Please run ./configure first\n" in e.lines: + run_with_build_fixer(self.session, ['./configure']) + run_with_build_fixer(self.session, ['make', 'dist']) + elif any([re.match( + r'Makefile:[0-9]+: \*\*\* Missing \'Make.inc\' ' + r'Run \'./configure \[options\]\' and retry. Stop.\n', + line) for line in e.lines]): + run_with_build_fixer(self.session, ['./configure']) + run_with_build_fixer(self.session, ['make', 'dist']) + elif any([re.match( + r'Problem opening MANIFEST: No such file or directory ' + r'at .* line [0-9]+\.', line) for line in e.lines]): + run_with_build_fixer(self.session, ['make', 'manifest']) + run_with_build_fixer(self.session, ['make', 'dist']) + else: + raise + else: + return + + def detect_buildsystems(session): """Detect build systems.""" if os.path.exists('package.xml'): @@ -173,3 +316,27 @@ def detect_buildsystems(session): if os.path.exists('pyproject.toml'): logging.info('Found pyproject.toml, assuming python project.') yield PyProject(session) + + if os.path.exists('setup.cfg'): + logging.info('Found setup.cfg, assuming python project.') + yield SetupCfg(session) + + if os.path.exists('package.json'): + logging.info('Found package.json, assuming node package.') + yield NpmPackage(session) + + if os.path.exists('waf'): + logging.info('Found waf, assuming waf package.') + yield Waf(session) + + gemfiles = [name for name in os.listdir('.') if name.endswith('.gem')] + if gemfiles: + yield Gem(session) + + if os.path.exists('dist.ini') and not os.path.exists('Makefile.PL'): + yield DistInkt(session) + + if any([os.path.exists(p) for p in [ + 'Makefile', 'Makefile.PL', 'autogen.sh', 'configure.ac', + 'configure.in']]): + yield Make(session) diff --git a/ognibuild/dist.py b/ognibuild/dist.py index dcab9b1..247f5c1 100644 --- a/ognibuild/dist.py +++ b/ognibuild/dist.py @@ -82,122 +82,6 @@ def run_dist(session): buildsystem.dist() return - if os.path.exists('setup.cfg'): - logging.info('Found setup.cfg, assuming python project.') - apt.install(['python3-pep517', 'python3-pip']) - session.check_call(['python3', '-m', 'pep517.build', '-s', '.']) - return - - if os.path.exists('dist.ini') and not os.path.exists('Makefile.PL'): - apt.install(['libdist-inkt-perl']) - with open('dist.ini', 'rb') as f: - for line in f: - if not line.startswith(b';;'): - continue - try: - (key, value) = line[2:].split(b'=', 1) - except ValueError: - continue - if (key.strip() == b'class' and - value.strip().startswith(b"'Dist::Inkt")): - logging.info( - 'Found Dist::Inkt section in dist.ini, ' - 'assuming distinkt.') - # TODO(jelmer): install via apt if possible - session.check_call( - ['cpan', 'install', value.decode().strip("'")], - user='root') - run_with_build_fixer(session, ['distinkt-dist']) - return - # Default to invoking Dist::Zilla - logging.info('Found dist.ini, assuming dist-zilla.') - apt.install(['libdist-zilla-perl']) - run_with_build_fixer(session, ['dzil', 'build', '--in', '..']) - return - - if os.path.exists('package.json'): - apt.install(['npm']) - run_with_build_fixer(session, ['npm', 'pack']) - return - - gemfiles = [name for name in os.listdir('.') if name.endswith('.gem')] - if gemfiles: - apt.install(['gem2deb']) - if len(gemfiles) > 1: - logging.warning('More than one gemfile. Trying the first?') - run_with_build_fixer(session, ['gem2tgz', gemfiles[0]]) - return - - if os.path.exists('waf'): - apt.install(['python3']) - run_with_build_fixer(session, ['./waf', 'dist']) - return - - if os.path.exists('Makefile.PL') and not os.path.exists('Makefile'): - apt.install(['perl']) - run_with_build_fixer(session, ['perl', 'Makefile.PL']) - - if not os.path.exists('Makefile') and not os.path.exists('configure'): - if os.path.exists('autogen.sh'): - if shebang_binary('autogen.sh') is None: - run_with_build_fixer(session, ['/bin/sh', './autogen.sh']) - try: - run_with_build_fixer(session, ['./autogen.sh']) - except UnidentifiedError as e: - if ("Gnulib not yet bootstrapped; " - "run ./bootstrap instead.\n" in e.lines): - run_with_build_fixer(session, ["./bootstrap"]) - run_with_build_fixer(session, ['./autogen.sh']) - else: - raise - - elif os.path.exists('configure.ac') or os.path.exists('configure.in'): - apt.install([ - 'autoconf', 'automake', 'gettext', 'libtool', 'gnu-standards']) - run_with_build_fixer(session, ['autoreconf', '-i']) - - if not os.path.exists('Makefile') and os.path.exists('configure'): - session.check_call(['./configure']) - - if os.path.exists('Makefile'): - apt.install(['make']) - try: - run_with_build_fixer(session, ['make', 'dist']) - except UnidentifiedError as e: - if "make: *** No rule to make target 'dist'. Stop.\n" in e.lines: - pass - elif ("make[1]: *** No rule to make target 'dist'. Stop.\n" - in e.lines): - pass - elif ("Reconfigure the source tree " - "(via './config' or 'perl Configure'), please.\n" - ) in e.lines: - run_with_build_fixer(session, ['./config']) - run_with_build_fixer(session, ['make', 'dist']) - elif ( - "Please try running 'make manifest' and then run " - "'make dist' again.\n" in e.lines): - run_with_build_fixer(session, ['make', 'manifest']) - run_with_build_fixer(session, ['make', 'dist']) - elif "Please run ./configure first\n" in e.lines: - run_with_build_fixer(session, ['./configure']) - run_with_build_fixer(session, ['make', 'dist']) - elif any([re.match( - r'Makefile:[0-9]+: \*\*\* Missing \'Make.inc\' ' - r'Run \'./configure \[options\]\' and retry. Stop.\n', - line) for line in e.lines]): - run_with_build_fixer(session, ['./configure']) - run_with_build_fixer(session, ['make', 'dist']) - elif any([re.match( - r'Problem opening MANIFEST: No such file or directory ' - r'at .* line [0-9]+\.', line) for line in e.lines]): - run_with_build_fixer(session, ['make', 'manifest']) - run_with_build_fixer(session, ['make', 'dist']) - else: - raise - else: - return - raise NoBuildToolsFound() From 83c3a7bd0dbbe7b9f88bc58e54ca374a2b4ccd80 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Mon, 8 Feb 2021 02:25:34 +0000 Subject: [PATCH 25/83] Drop unnecessary git deps. --- ognibuild/build.py | 4 ---- ognibuild/clean.py | 4 ---- ognibuild/dist.py | 35 ++++------------------------------- ognibuild/install.py | 1 - ognibuild/test.py | 4 ---- 5 files changed, 4 insertions(+), 44 deletions(-) diff --git a/ognibuild/build.py b/ognibuild/build.py index 405a5b4..b582b11 100644 --- a/ognibuild/build.py +++ b/ognibuild/build.py @@ -15,14 +15,10 @@ # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA -from .apt import AptManager from .buildsystem import detect_buildsystems, NoBuildToolsFound def run_build(session): - apt = AptManager(session) - apt.install(['git']) - # Some things want to write to the user's home directory, # e.g. pip caches in ~/.cache session.create_home() diff --git a/ognibuild/clean.py b/ognibuild/clean.py index 80bfaa0..67cf27a 100644 --- a/ognibuild/clean.py +++ b/ognibuild/clean.py @@ -15,14 +15,10 @@ # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA -from .apt import AptManager from .buildsystem import detect_buildsystems, NoBuildToolsFound def run_clean(session): - apt = AptManager(session) - apt.install(['git']) - # Some things want to write to the user's home directory, # e.g. pip caches in ~/.cache session.create_home() diff --git a/ognibuild/dist.py b/ognibuild/dist.py index 247f5c1..8499e0c 100644 --- a/ognibuild/dist.py +++ b/ognibuild/dist.py @@ -18,11 +18,10 @@ import errno import logging import os -import re import shutil import sys import tempfile -from typing import Optional, List, Tuple +from typing import Optional from debian.deb822 import Deb822 @@ -35,11 +34,8 @@ from buildlog_consultant.sbuild import ( NoSpaceOnDevice, ) -from . import DetailedFailure, shebang_binary -from .apt import AptManager, UnidentifiedError -from .fix_build import run_with_build_fixer +from . import DetailedFailure from .buildsystem import detect_buildsystems, NoBuildToolsFound -from .session import Session from .session.schroot import SchrootSession from .vcs import dupe_vcs_tree, export_vcs_tree @@ -48,32 +44,7 @@ class DistNoTarball(Exception): """Dist operation did not create a tarball.""" -# TODO(jelmer): move this to debian/ -def satisfy_build_deps(session: Session, tree): - source = Deb822(tree.get_file('debian/control')) - deps = [] - for name in ['Build-Depends', 'Build-Depends-Indep', 'Build-Depends-Arch']: - try: - deps.append(source[name].strip().strip(',')) - except KeyError: - pass - for name in ['Build-Conflicts', 'Build-Conflicts-Indep', - 'Build-Conflicts-Arch']: - try: - deps.append('Conflicts: ' + source[name]) - except KeyError: - pass - deps = [ - dep.strip().strip(',') - for dep in deps] - apt = AptManager(session) - apt.satisfy(deps) - - def run_dist(session): - apt = AptManager(session) - apt.install(['git']) - # Some things want to write to the user's home directory, # e.g. pip caches in ~/.cache session.create_home() @@ -137,6 +108,7 @@ def create_dist_schroot( subdir = 'package' with SchrootSession(chroot) as session: if packaging_tree is not None: + from .debian import satisfy_build_deps satisfy_build_deps(session, packaging_tree) build_dir = os.path.join(session.location, 'build') @@ -175,6 +147,7 @@ if __name__ == '__main__': import argparse import breezy.bzr # noqa: F401 import breezy.git # noqa: F401 + from breezy.export import export parser = argparse.ArgumentParser() parser.add_argument( diff --git a/ognibuild/install.py b/ognibuild/install.py index 27ec180..b2c3922 100644 --- a/ognibuild/install.py +++ b/ognibuild/install.py @@ -15,7 +15,6 @@ # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA -from .apt import AptManager from .buildsystem import detect_buildsystems, NoBuildToolsFound diff --git a/ognibuild/test.py b/ognibuild/test.py index 10c90d7..eb60e40 100644 --- a/ognibuild/test.py +++ b/ognibuild/test.py @@ -15,14 +15,10 @@ # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA -from .apt import AptManager from .buildsystem import detect_buildsystems, NoBuildToolsFound def run_test(session): - apt = AptManager(session) - apt.install(['git']) - # Some things want to write to the user's home directory, # e.g. pip caches in ~/.cache session.create_home() From 26548c45e9fba754bf64b040e690bf69530dbc37 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Mon, 8 Feb 2021 02:27:24 +0000 Subject: [PATCH 26/83] Factor out setup. --- ognibuild/buildsystem.py | 52 +++++++++++++++++++++++++--------------- 1 file changed, 33 insertions(+), 19 deletions(-) diff --git a/ognibuild/buildsystem.py b/ognibuild/buildsystem.py index e963db3..fb1300c 100644 --- a/ognibuild/buildsystem.py +++ b/ognibuild/buildsystem.py @@ -54,35 +54,34 @@ class BuildSystem(object): class Pear(BuildSystem): - def dist(self): + def setup(self): apt = AptManager(self.session) apt.install(['php-pear']) + + def dist(self): + self.setup() run_with_build_fixer(self.session, ['pear', 'package']) def test(self): - apt = AptManager(self.session) - apt.install(['php-pear']) + self.setup() run_with_build_fixer(self.session, ['pear', 'run-tests']) def build(self): - apt = AptManager(self.session) - apt.install(['php-pear']) + self.setup() run_with_build_fixer(self.session, ['pear', 'build']) def clean(self): - apt = AptManager(self.session) - apt.install(['php-pear']) + self.setup() # TODO def install(self): - apt = AptManager(self.session) - apt.install(['php-pear']) + self.setup() run_with_build_fixer(self.session, ['pear', 'install']) class SetupPy(BuildSystem): - def prereqs(self): + def setup(self): apt = AptManager(self.session) apt.install(['python3', 'python3-pip']) with open('setup.py', 'r') as f: @@ -103,19 +102,19 @@ class SetupPy(BuildSystem): # TODO(jelmer): Install setup_requires def test(self): - self.prereqs() + self.setup() self._run_setup(['test']) def dist(self): - self.prereqs() + self.setup() self._run_setup(['sdist']) def clean(self): - self.prereqs() + self.setup() self._run_setup(['clean']) def install(self): - self.prereqs() + self.setup() self._run_setup(['install']) def _run_setup(self, args): @@ -163,33 +162,44 @@ class PyProject(BuildSystem): class SetupCfg(BuildSystem): - def dist(self): + def setup(self): apt = AptManager(self.session) apt.install(['python3-pep517', 'python3-pip']) + + def dist(self): self.session.check_call(['python3', '-m', 'pep517.build', '-s', '.']) class NpmPackage(BuildSystem): - def dist(self): + def setup(self): apt = AptManager(self.session) apt.install(['npm']) + + def dist(self): + self.setup() run_with_build_fixer(self.session, ['npm', 'pack']) class Waf(BuildSystem): - def dist(self): + def setup(self): apt = AptManager(self.session) apt.install(['python3']) + + def dist(self): + self.setup() run_with_build_fixer(self.session, ['./waf', 'dist']) class Gem(BuildSystem): - def dist(self): + def setup(self): apt = AptManager(self.session) apt.install(['gem2deb']) + + def dist(self): + self.setup() gemfiles = [name for name in os.listdir('.') if name.endswith('.gem')] if len(gemfiles) > 1: logging.warning('More than one gemfile. Trying the first?') @@ -198,9 +208,13 @@ class Gem(BuildSystem): class DistInkt(BuildSystem): - def dist(self): + def setup(self): apt = AptManager(self.session) apt.install(['libdist-inkt-perl']) + + def dist(self): + self.setup() + apt = AptManager(self.session) with open('dist.ini', 'rb') as f: for line in f: if not line.startswith(b';;'): From d47bd99e21ada6553758ce3e3cd82f6bcd1e20bb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Mon, 8 Feb 2021 02:27:57 +0000 Subject: [PATCH 27/83] Factor out satisfy_build_depends. --- ognibuild/debian/__init__.py | 43 ++++++++++++++++++++++++++++++++++++ 1 file changed, 43 insertions(+) create mode 100644 ognibuild/debian/__init__.py diff --git a/ognibuild/debian/__init__.py b/ognibuild/debian/__init__.py new file mode 100644 index 0000000..3fffa9a --- /dev/null +++ b/ognibuild/debian/__init__.py @@ -0,0 +1,43 @@ +#!/usr/bin/python +# Copyright (C) 2018 Jelmer Vernooij +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + +from debian.deb822 import Deb822 + +from .apt import AptManager +from .session import Session + + +# TODO(jelmer): move this to debian/ +def satisfy_build_deps(session: Session, tree): + source = Deb822(tree.get_file('debian/control')) + deps = [] + for name in ['Build-Depends', 'Build-Depends-Indep', 'Build-Depends-Arch']: + try: + deps.append(source[name].strip().strip(',')) + except KeyError: + pass + for name in ['Build-Conflicts', 'Build-Conflicts-Indep', + 'Build-Conflicts-Arch']: + try: + deps.append('Conflicts: ' + source[name]) + except KeyError: + pass + deps = [ + dep.strip().strip(',') + for dep in deps] + apt = AptManager(session) + apt.satisfy(deps) From ab99144388fe3890a4478ac9ef60ce7cda97f065 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Mon, 8 Feb 2021 02:29:17 +0000 Subject: [PATCH 28/83] Drop stubs. --- ognibuild/__init__.py | 20 -------------------- 1 file changed, 20 deletions(-) diff --git a/ognibuild/__init__.py b/ognibuild/__init__.py index c0a0b30..3822fbe 100644 --- a/ognibuild/__init__.py +++ b/ognibuild/__init__.py @@ -51,23 +51,3 @@ def note(m): def warning(m): sys.stderr.write('WARNING: %s\n' % m) - - -def run_with_build_fixer(session, args): - session.check_call(args) - - -def run_build(session): - raise NotImplementedError - - -def run_clean(session): - raise NotImplementedError - - -def run_test(session): - raise NotImplementedError - - -def run_install(session): - raise NotImplementedError From 9870c12a33c2eb87320d718b3d47b7313c21c4a5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Mon, 8 Feb 2021 17:35:08 +0000 Subject: [PATCH 29/83] Fix compatibility with newer buildlog-consultant. --- ognibuild/debian/__init__.py | 4 ++-- ognibuild/debian/fix_build.py | 12 ++++++++---- ognibuild/dist.py | 15 ++++++++++++++- ognibuild/tests/test_debian_fix_build.py | 2 +- 4 files changed, 25 insertions(+), 8 deletions(-) diff --git a/ognibuild/debian/__init__.py b/ognibuild/debian/__init__.py index 3fffa9a..9d64ab8 100644 --- a/ognibuild/debian/__init__.py +++ b/ognibuild/debian/__init__.py @@ -17,8 +17,8 @@ from debian.deb822 import Deb822 -from .apt import AptManager -from .session import Session +from ..apt import AptManager +from ..session import Session # TODO(jelmer): move this to debian/ diff --git a/ognibuild/debian/fix_build.py b/ognibuild/debian/fix_build.py index 9aa728d..16b4e92 100644 --- a/ognibuild/debian/fix_build.py +++ b/ognibuild/debian/fix_build.py @@ -68,8 +68,8 @@ from silver_platter.debian import ( from breezy.plugins.debian.util import get_build_architecture from .build import attempt_build -from buildlog_consultant.sbuild import ( - Problem, +from buildlog_consultant import Problem +from buildlog_consultant.common import ( MissingConfigStatusInput, MissingPythonModule, MissingPythonDistribution, @@ -99,13 +99,17 @@ from buildlog_consultant.sbuild import ( MissingXfceDependency, MissingHaskellDependencies, NeedPgBuildExtUpdateControl, - SbuildFailure, DhAddonLoadFailure, - AptFetchFailure, MissingMavenArtifacts, GnomeCommonMissing, MissingGnomeCommonDependency, ) +from buildlog_consultant.apt import ( + AptFetchFailure, + ) +from buildlog_consultant.sbuild import ( + SbuildFailure, + ) DEFAULT_MAX_ITERATIONS = 10 diff --git a/ognibuild/dist.py b/ognibuild/dist.py index 8499e0c..a7130da 100644 --- a/ognibuild/dist.py +++ b/ognibuild/dist.py @@ -30,7 +30,20 @@ from breezy.workingtree import WorkingTree from breezy.plugins.debian.repack_tarball import get_filetype -from buildlog_consultant.sbuild import ( +from . import apt, DetailedFailure, shebang_binary +from .buildsystem import detect_buildsystems, NoBuildToolsFound +from .session import run_with_tee, Session +from .session.schroot import SchrootSession +from .debian.fix_build import ( + DependencyContext, + resolve_error, + APT_FIXERS, + ) +from buildlog_consultant.common import ( + find_build_failure_description, + Problem, + MissingPerlModule, + MissingCommand, NoSpaceOnDevice, ) diff --git a/ognibuild/tests/test_debian_fix_build.py b/ognibuild/tests/test_debian_fix_build.py index 7b0fa18..68e1c63 100644 --- a/ognibuild/tests/test_debian_fix_build.py +++ b/ognibuild/tests/test_debian_fix_build.py @@ -19,7 +19,7 @@ import re from debian.deb822 import Deb822 -from buildlog_consultant.sbuild import ( +from buildlog_consultant.common import ( MissingCommand, MissingGoPackage, MissingPerlModule, From 68a4964d5961ce0437c8cfddf1e9aaed22ccb28e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Mon, 8 Feb 2021 17:48:12 +0000 Subject: [PATCH 30/83] Fix import. --- ognibuild/fix_build.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ognibuild/fix_build.py b/ognibuild/fix_build.py index af4f338..899c34f 100644 --- a/ognibuild/fix_build.py +++ b/ognibuild/fix_build.py @@ -18,7 +18,7 @@ import logging from typing import List, Tuple, Callable, Type -from buildlog_consultant.sbuild import ( +from buildlog_consultant.common import ( find_build_failure_description, Problem, MissingPerlModule, From dc29ed8b1d94a42f287502d8492ec3859a3deb91 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Mon, 8 Feb 2021 18:06:21 +0000 Subject: [PATCH 31/83] Add Session.exists / Session.scandir. --- ognibuild/buildsystem.py | 36 ++++++++++++++++++----------------- ognibuild/session/__init__.py | 7 +++++++ ognibuild/session/plain.py | 7 +++++++ ognibuild/session/schroot.py | 14 ++++++++++++++ 4 files changed, 47 insertions(+), 17 deletions(-) diff --git a/ognibuild/buildsystem.py b/ognibuild/buildsystem.py index fb1300c..784376b 100644 --- a/ognibuild/buildsystem.py +++ b/ognibuild/buildsystem.py @@ -19,7 +19,6 @@ import logging import re -import os from . import shebang_binary from .apt import AptManager, UnidentifiedError @@ -200,7 +199,8 @@ class Gem(BuildSystem): def dist(self): self.setup() - gemfiles = [name for name in os.listdir('.') if name.endswith('.gem')] + gemfiles = [entry.name for entry in self.session.scandir('.') + if entry.name.endswith('.gem')] if len(gemfiles) > 1: logging.warning('More than one gemfile. Trying the first?') run_with_build_fixer(self.session, ['gem2tgz', gemfiles[0]]) @@ -244,12 +244,12 @@ class Make(BuildSystem): def setup(self): apt = AptManager(self.session) - if os.path.exists('Makefile.PL') and not os.path.exists('Makefile'): + if self.session.exists('Makefile.PL') and not self.session.exists('Makefile'): apt.install(['perl']) run_with_build_fixer(self.session, ['perl', 'Makefile.PL']) - if not os.path.exists('Makefile') and not os.path.exists('configure'): - if os.path.exists('autogen.sh'): + if not self.session.exists('Makefile') and not self.session.exists('configure'): + if self.session.exists('autogen.sh'): if shebang_binary('autogen.sh') is None: run_with_build_fixer( self.session, ['/bin/sh', './autogen.sh']) @@ -264,14 +264,14 @@ class Make(BuildSystem): else: raise - elif (os.path.exists('configure.ac') or - os.path.exists('configure.in')): + elif (self.session.exists('configure.ac') or + self.session.exists('configure.in')): apt.install([ 'autoconf', 'automake', 'gettext', 'libtool', 'gnu-standards']) run_with_build_fixer(self.session, ['autoreconf', '-i']) - if not os.path.exists('Makefile') and os.path.exists('configure'): + if not self.session.exists('Makefile') and self.session.exists('configure'): self.session.check_call(['./configure']) def dist(self): @@ -319,38 +319,40 @@ class Make(BuildSystem): def detect_buildsystems(session): """Detect build systems.""" - if os.path.exists('package.xml'): + if session.exists('package.xml'): logging.info('Found package.xml, assuming pear package.') yield Pear(session) - if os.path.exists('setup.py'): + if session.exists('setup.py'): logging.info('Found setup.py, assuming python project.') yield SetupPy(session) - if os.path.exists('pyproject.toml'): + if session.exists('pyproject.toml'): logging.info('Found pyproject.toml, assuming python project.') yield PyProject(session) - if os.path.exists('setup.cfg'): + if session.exists('setup.cfg'): logging.info('Found setup.cfg, assuming python project.') yield SetupCfg(session) - if os.path.exists('package.json'): + if session.exists('package.json'): logging.info('Found package.json, assuming node package.') yield NpmPackage(session) - if os.path.exists('waf'): + if session.exists('waf'): logging.info('Found waf, assuming waf package.') yield Waf(session) - gemfiles = [name for name in os.listdir('.') if name.endswith('.gem')] + gemfiles = [ + entry.name for entry in session.scandir('.') + if entry.name.endswith('.gem')] if gemfiles: yield Gem(session) - if os.path.exists('dist.ini') and not os.path.exists('Makefile.PL'): + if session.exists('dist.ini') and not session.exists('Makefile.PL'): yield DistInkt(session) - if any([os.path.exists(p) for p in [ + if any([session.exists(p) for p in [ 'Makefile', 'Makefile.PL', 'autogen.sh', 'configure.ac', 'configure.in']]): yield Make(session) diff --git a/ognibuild/session/__init__.py b/ognibuild/session/__init__.py index e78510b..f8feaf0 100644 --- a/ognibuild/session/__init__.py +++ b/ognibuild/session/__init__.py @@ -63,6 +63,13 @@ class Session(object): """Create the user's home directory.""" raise NotImplementedError(self.create_home) + def exists(self, path: str) -> bool: + """Check whether a path exists in the chroot.""" + raise NotImplementedError(self.exists) + + def scandir(self, path: str): + raise NotImplementedError(self.scandir) + class SessionSetupFailure(Exception): """Session failed to be set up.""" diff --git a/ognibuild/session/plain.py b/ognibuild/session/plain.py index 1b4fbfb..1f71b59 100644 --- a/ognibuild/session/plain.py +++ b/ognibuild/session/plain.py @@ -18,6 +18,7 @@ from . import Session +import os import subprocess @@ -35,3 +36,9 @@ class PlainSession(Session): def Popen(self, args, stdout=None, stderr=None, user=None, cwd=None): return subprocess.Popen( args, stdout=stdout, stderr=stderr, cwd=cwd) + + def exists(self, path): + return os.path.exists(path) + + def scandir(self, path): + return os.scandir(path) diff --git a/ognibuild/session/schroot.py b/ognibuild/session/schroot.py index 2a7388c..4e75a91 100644 --- a/ognibuild/session/schroot.py +++ b/ognibuild/session/schroot.py @@ -16,6 +16,7 @@ # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA import logging +import os import shlex import subprocess @@ -127,3 +128,16 @@ class SchrootSession(Session): logging.info('Creating directory %s', home) self.check_call(['mkdir', '-p', home], cwd='/', user='root') self.check_call(['chown', user, home], cwd='/', user='root') + + def _fullpath(self, path: str) -> str: + return os.path.join( + self.location, + os.path.join(self._cwd, path).lstrip('/')) + + def exists(self, path: str) -> bool: + fullpath = self._fullpath(path) + return os.path.exists(fullpath) + + def scandir(self, path: str): + fullpath = self._fullpath(path) + return os.scandir(fullpath) From 3fe7cb2a7eb54245180b08bd851a7c1923f58749 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Wed, 10 Feb 2021 01:54:29 +0000 Subject: [PATCH 32/83] Avoid use of breezy-debian. --- .github/workflows/pythonpackage.yml | 5 ++-- ognibuild/apt.py | 2 +- ognibuild/debian/build.py | 20 ++++++++++--- ognibuild/debian/fix_build.py | 1 - ognibuild/dist.py | 44 ++++++++++++++++------------ ognibuild/tests/test_debian_build.py | 14 +++++++-- 6 files changed, 55 insertions(+), 31 deletions(-) diff --git a/.github/workflows/pythonpackage.yml b/.github/workflows/pythonpackage.yml index 31bc00e..8705708 100644 --- a/.github/workflows/pythonpackage.yml +++ b/.github/workflows/pythonpackage.yml @@ -20,13 +20,12 @@ jobs: python-version: ${{ matrix.python-version }} - name: Install dependencies run: | - python -m pip install --upgrade pip flake8 cython - mkdir -p $HOME/.config/breezy/plugins - bzr branch lp:brz-debian $HOME/.config/breezy/plugins/debian + python -m pip install --upgrade pip flake8 cython mypy python setup.py develop - name: Style checks run: | python -m flake8 + python -m mypy - name: Typing checks run: | pip install -U mypy diff --git a/ognibuild/apt.py b/ognibuild/apt.py index 00ca1a7..acd52b4 100644 --- a/ognibuild/apt.py +++ b/ognibuild/apt.py @@ -21,7 +21,7 @@ from typing import List import apt_pkg import os -from buildlog_consultant.sbuild import ( +from buildlog_consultant.apt import ( find_apt_get_failure, ) diff --git a/ognibuild/debian/build.py b/ognibuild/debian/build.py index 5445278..f82f1d1 100644 --- a/ognibuild/debian/build.py +++ b/ognibuild/debian/build.py @@ -34,10 +34,6 @@ from debian.changelog import Changelog from debmutate.changelog import get_maintainer, format_datetime from breezy import osutils -from breezy.plugins.debian.util import ( - changes_filename, - get_build_architecture, - ) from breezy.mutabletree import MutableTree from silver_platter.debian import ( BuildFailedError, @@ -57,6 +53,22 @@ class MissingChangesFile(Exception): self.filename = filename +def changes_filename(package, version, arch): + non_epoch_version = version.upstream_version + if version.debian_version is not None: + non_epoch_version += "-%s" % version.debian_version + return "%s_%s_%s.changes" % (package, non_epoch_version, arch) + + +def get_build_architecture(): + try: + return subprocess.check_output( + ['dpkg-architecture', '-qDEB_BUILD_ARCH']).strip().decode() + except subprocess.CalledProcessError as e: + raise Exception( + "Could not find the build architecture: %s" % e) + + def add_dummy_changelog_entry( tree: MutableTree, subpath: str, suffix: str, suite: str, message: str, timestamp=None, maintainer=None): diff --git a/ognibuild/debian/fix_build.py b/ognibuild/debian/fix_build.py index 16b4e92..3e1d7b1 100644 --- a/ognibuild/debian/fix_build.py +++ b/ognibuild/debian/fix_build.py @@ -66,7 +66,6 @@ from silver_platter.debian import ( DEFAULT_BUILDER, ) -from breezy.plugins.debian.util import get_build_architecture from .build import attempt_build from buildlog_consultant import Problem from buildlog_consultant.common import ( diff --git a/ognibuild/dist.py b/ognibuild/dist.py index a7130da..1448f36 100644 --- a/ognibuild/dist.py +++ b/ognibuild/dist.py @@ -28,17 +28,8 @@ from debian.deb822 import Deb822 from breezy.tree import Tree from breezy.workingtree import WorkingTree -from breezy.plugins.debian.repack_tarball import get_filetype - -from . import apt, DetailedFailure, shebang_binary +from . import DetailedFailure from .buildsystem import detect_buildsystems, NoBuildToolsFound -from .session import run_with_tee, Session -from .session.schroot import SchrootSession -from .debian.fix_build import ( - DependencyContext, - resolve_error, - APT_FIXERS, - ) from buildlog_consultant.common import ( find_build_failure_description, Problem, @@ -47,12 +38,29 @@ from buildlog_consultant.common import ( NoSpaceOnDevice, ) -from . import DetailedFailure -from .buildsystem import detect_buildsystems, NoBuildToolsFound from .session.schroot import SchrootSession from .vcs import dupe_vcs_tree, export_vcs_tree +SUPPORTED_DIST_EXTENSIONS = [ + ".tar.gz", + ".tgz", + ".tar.bz2", + ".tar.xz", + ".tar.lzma", + ".tbz2", + ".tar", + ".zip", + ] + + +def is_dist_file(fn): + for ext in SUPPORTED_DIST_EXTENSIONS: + if fn.endswith(ext): + return True + return False + + class DistNoTarball(Exception): """Dist operation did not create a tarball.""" @@ -83,18 +91,16 @@ class DistCatcher(object): def find_files(self): new_files = os.listdir(self.export_directory) diff_files = set(new_files) - set(self.existing_files) - diff = set([n for n in diff_files if get_filetype(n) is not None]) + diff = set([n for n in diff_files if is_dist_file(n)]) if len(diff) == 1: fn = diff.pop() logging.info('Found tarball %s in package directory.', fn) self.files.append(os.path.join(self.export_directory, fn)) return fn - if 'dist' in diff_files: - for entry in os.scandir( - os.path.join(self.export_directory, 'dist')): - if get_filetype(entry.name) is not None: - logging.info( - 'Found tarball %s in dist directory.', entry.name) + if "dist" in diff_files: + for entry in os.scandir(os.path.join(self.export_directory, "dist")): + if is_dist_file(entry.name): + logging.info("Found tarball %s in dist directory.", entry.name) self.files.append(entry.path) return entry.name logging.info('No tarballs found in dist directory.') diff --git a/ognibuild/tests/test_debian_build.py b/ognibuild/tests/test_debian_build.py index da2541a..1203c3d 100644 --- a/ognibuild/tests/test_debian_build.py +++ b/ognibuild/tests/test_debian_build.py @@ -16,9 +16,9 @@ # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA import datetime -from ..debian.build import add_dummy_changelog_entry +from ..debian.build import add_dummy_changelog_entry, get_build_architecture -from breezy.tests import TestCaseWithTransport +from breezy.tests import TestCaseWithTransport, TestCase class AddDummyChangelogEntryTests(TestCaseWithTransport): @@ -105,4 +105,12 @@ janitor (0.1-1jan+some1) UNRELEASED; urgency=medium * Initial release. (Closes: #XXXXXX) -- Jelmer Vernooij Sat, 04 Apr 2020 14:12:13 +0000 -""", 'debian/changelog') +""", + "debian/changelog", + ) + + +class BuildArchitectureTests(TestCase): + + def test_is_str(self): + self.assertIsInstance(get_build_architecture(), str) From 43dfe81f66998c1b89d9065fc0fbc72885b27a56 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Wed, 10 Feb 2021 02:19:06 +0000 Subject: [PATCH 33/83] Fix formatting. --- .flake8 | 5 + ognibuild/__init__.py | 15 +- ognibuild/__main__.py | 32 +- ognibuild/apt.py | 26 +- ognibuild/buildsystem.py | 303 ++++----- ognibuild/debian/__init__.py | 15 +- ognibuild/debian/build.py | 167 +++-- ognibuild/debian/fix_build.py | 783 +++++++++++++---------- ognibuild/dist.py | 79 +-- ognibuild/fix_build.py | 41 +- ognibuild/session/__init__.py | 37 +- ognibuild/session/plain.py | 5 +- ognibuild/session/schroot.py | 111 ++-- ognibuild/tests/__init__.py | 6 +- ognibuild/tests/test_debian_build.py | 94 ++- ognibuild/tests/test_debian_fix_build.py | 206 +++--- ognibuild/vcs.py | 14 +- 17 files changed, 1085 insertions(+), 854 deletions(-) create mode 100644 .flake8 diff --git a/.flake8 b/.flake8 new file mode 100644 index 0000000..bde4be3 --- /dev/null +++ b/.flake8 @@ -0,0 +1,5 @@ +[flake8] +extend-ignore = E203, E266, E501, W293, W291 +max-line-length = 88 +max-complexity = 18 +select = B,C,E,F,W,T4,B9 diff --git a/ognibuild/__init__.py b/ognibuild/__init__.py index 3822fbe..f9e1a36 100644 --- a/ognibuild/__init__.py +++ b/ognibuild/__init__.py @@ -21,11 +21,10 @@ import stat import sys -DEFAULT_PYTHON = 'python3' +DEFAULT_PYTHON = "python3" class DetailedFailure(Exception): - def __init__(self, retcode, argv, error): self.retcode = retcode self.argv = argv @@ -35,19 +34,19 @@ class DetailedFailure(Exception): def shebang_binary(p): if not (os.stat(p).st_mode & stat.S_IEXEC): return None - with open(p, 'rb') as f: + with open(p, "rb") as f: firstline = f.readline() - if not firstline.startswith(b'#!'): + if not firstline.startswith(b"#!"): return None - args = firstline[2:].split(b' ') - if args[0] in (b'/usr/bin/env', b'env'): + args = firstline[2:].split(b" ") + if args[0] in (b"/usr/bin/env", b"env"): return os.path.basename(args[1].decode()).strip() return os.path.basename(args[0].decode()).strip() def note(m): - sys.stdout.write('%s\n' % m) + sys.stdout.write("%s\n" % m) def warning(m): - sys.stderr.write('WARNING: %s\n' % m) + sys.stderr.write("WARNING: %s\n" % m) diff --git a/ognibuild/__main__.py b/ognibuild/__main__.py index 32a1739..c2b3939 100644 --- a/ognibuild/__main__.py +++ b/ognibuild/__main__.py @@ -28,40 +28,44 @@ from .test import run_test def main(): import argparse + parser = argparse.ArgumentParser() parser.add_argument( - 'subcommand', type=str, - choices=['dist', 'build', 'clean', 'test', 'install']) + "subcommand", type=str, choices=["dist", "build", "clean", "test", "install"] + ) parser.add_argument( - '--directory', '-d', type=str, help='Directory for project.', - default='.') + "--directory", "-d", type=str, help="Directory for project.", default="." + ) + parser.add_argument("--schroot", type=str, help="schroot to run in.") parser.add_argument( - '--schroot', type=str, help='schroot to run in.') - parser.add_argument( - '--resolve', choices=['explain', 'apt', 'native'], - help='What to do about missing dependencies') + "--resolve", + choices=["explain", "apt", "native"], + help="What to do about missing dependencies", + ) args = parser.parse_args() if args.schroot: from .session.schroot import SchrootSession + session = SchrootSession(args.schroot) else: from .session.plain import PlainSession + session = PlainSession() with session: os.chdir(args.directory) try: - if args.subcommand == 'dist': + if args.subcommand == "dist": run_dist(session) - if args.subcommand == 'build': + if args.subcommand == "build": run_build(session) - if args.subcommand == 'clean': + if args.subcommand == "clean": run_clean(session) - if args.subcommand == 'install': + if args.subcommand == "install": run_install(session) - if args.subcommand == 'test': + if args.subcommand == "test": run_test(session) except NoBuildToolsFound: - note('No build tools found.') + note("No build tools found.") return 1 return 0 diff --git a/ognibuild/apt.py b/ognibuild/apt.py index acd52b4..3f849c2 100644 --- a/ognibuild/apt.py +++ b/ognibuild/apt.py @@ -23,14 +23,13 @@ import apt_pkg import os from buildlog_consultant.apt import ( find_apt_get_failure, - ) +) from . import DetailedFailure from .session import Session, run_with_tee class UnidentifiedError(Exception): - def __init__(self, retcode, argv, lines, secondary=None): self.retcode = retcode self.argv = argv @@ -40,17 +39,16 @@ class UnidentifiedError(Exception): def run_apt(session: Session, args: List[str]) -> None: """Run apt.""" - args = ['apt', '-y'] + args - retcode, lines = run_with_tee(session, args, cwd='/', user='root') + args = ["apt", "-y"] + args + retcode, lines = run_with_tee(session, args, cwd="/", user="root") if retcode == 0: return offset, line, error = find_apt_get_failure(lines) if error is not None: raise DetailedFailure(retcode, args, error) if line is not None: - raise UnidentifiedError( - retcode, args, lines, secondary=(offset, line)) - while lines and lines[-1] == '': + raise UnidentifiedError(retcode, args, lines, secondary=(offset, line)) + while lines and lines[-1] == "": lines.pop(-1) raise UnidentifiedError(retcode, args, lines) @@ -63,23 +61,23 @@ class AptManager(object): self.session = session def missing(self, packages): - root = getattr(self.session, 'location', '/') - status_path = os.path.join(root, 'var/lib/dpkg/status') + root = getattr(self.session, "location", "/") + status_path = os.path.join(root, "var/lib/dpkg/status") missing = set(packages) with apt_pkg.TagFile(status_path) as tagf: while missing: tagf.step() if not tagf.section: break - if tagf.section['Package'] in missing: - if tagf.section['Status'] == 'install ok installed': - missing.remove(tagf.section['Package']) + if tagf.section["Package"] in missing: + if tagf.section["Status"] == "install ok installed": + missing.remove(tagf.section["Package"]) return list(missing) def install(self, packages: List[str]) -> None: packages = self.missing(packages) if packages: - run_apt(self.session, ['install'] + packages) + run_apt(self.session, ["install"] + packages) def satisfy(self, deps: List[str]) -> None: - run_apt(self.session, ['satisfy'] + deps) + run_apt(self.session, ["satisfy"] + deps) diff --git a/ognibuild/buildsystem.py b/ognibuild/buildsystem.py index 784376b..998803e 100644 --- a/ognibuild/buildsystem.py +++ b/ognibuild/buildsystem.py @@ -52,22 +52,21 @@ class BuildSystem(object): class Pear(BuildSystem): - def setup(self): apt = AptManager(self.session) - apt.install(['php-pear']) + apt.install(["php-pear"]) def dist(self): self.setup() - run_with_build_fixer(self.session, ['pear', 'package']) + run_with_build_fixer(self.session, ["pear", "package"]) def test(self): self.setup() - run_with_build_fixer(self.session, ['pear', 'run-tests']) + run_with_build_fixer(self.session, ["pear", "run-tests"]) def build(self): self.setup() - run_with_build_fixer(self.session, ['pear', 'build']) + run_with_build_fixer(self.session, ["pear", "build"]) def clean(self): self.setup() @@ -75,242 +74,251 @@ class Pear(BuildSystem): def install(self): self.setup() - run_with_build_fixer(self.session, ['pear', 'install']) + run_with_build_fixer(self.session, ["pear", "install"]) class SetupPy(BuildSystem): - def setup(self): apt = AptManager(self.session) - apt.install(['python3', 'python3-pip']) - with open('setup.py', 'r') as f: + apt.install(["python3", "python3-pip"]) + with open("setup.py", "r") as f: setup_py_contents = f.read() try: - with open('setup.cfg', 'r') as f: + with open("setup.cfg", "r") as f: setup_cfg_contents = f.read() except FileNotFoundError: - setup_cfg_contents = '' - if 'setuptools' in setup_py_contents: - logging.info('Reference to setuptools found, installing.') - apt.install(['python3-setuptools']) - if ('setuptools_scm' in setup_py_contents or - 'setuptools_scm' in setup_cfg_contents): - logging.info('Reference to setuptools-scm found, installing.') - apt.install(['python3-setuptools-scm', 'git', 'mercurial']) + setup_cfg_contents = "" + if "setuptools" in setup_py_contents: + logging.info("Reference to setuptools found, installing.") + apt.install(["python3-setuptools"]) + if ( + "setuptools_scm" in setup_py_contents + or "setuptools_scm" in setup_cfg_contents + ): + logging.info("Reference to setuptools-scm found, installing.") + apt.install(["python3-setuptools-scm", "git", "mercurial"]) # TODO(jelmer): Install setup_requires def test(self): self.setup() - self._run_setup(['test']) + self._run_setup(["test"]) def dist(self): self.setup() - self._run_setup(['sdist']) + self._run_setup(["sdist"]) def clean(self): self.setup() - self._run_setup(['clean']) + self._run_setup(["clean"]) def install(self): self.setup() - self._run_setup(['install']) + self._run_setup(["install"]) def _run_setup(self, args): apt = AptManager(self.session) - interpreter = shebang_binary('setup.py') + interpreter = shebang_binary("setup.py") if interpreter is not None: - if interpreter == 'python3': - apt.install(['python3']) - elif interpreter == 'python2': - apt.install(['python2']) - elif interpreter == 'python': - apt.install(['python']) + if interpreter == "python3": + apt.install(["python3"]) + elif interpreter == "python2": + apt.install(["python2"]) + elif interpreter == "python": + apt.install(["python"]) else: - raise ValueError('Unknown interpreter %r' % interpreter) - apt.install(['python2', 'python3']) - run_with_build_fixer( - self.session, ['./setup.py'] + args) + raise ValueError("Unknown interpreter %r" % interpreter) + apt.install(["python2", "python3"]) + run_with_build_fixer(self.session, ["./setup.py"] + args) else: # Just assume it's Python 3 - apt.install(['python3']) - run_with_build_fixer( - self.session, ['python3', './setup.py'] + args) + apt.install(["python3"]) + run_with_build_fixer(self.session, ["python3", "./setup.py"] + args) class PyProject(BuildSystem): - def load_toml(self): import toml - with open('pyproject.toml', 'r') as pf: + + with open("pyproject.toml", "r") as pf: return toml.load(pf) def dist(self): apt = AptManager(self.session) pyproject = self.load_toml() - if 'poetry' in pyproject.get('tool', []): + if "poetry" in pyproject.get("tool", []): logging.info( - 'Found pyproject.toml with poetry section, ' - 'assuming poetry project.') - apt.install(['python3-venv', 'python3-pip']) - self.session.check_call(['pip3', 'install', 'poetry'], user='root') - self.session.check_call(['poetry', 'build', '-f', 'sdist']) + "Found pyproject.toml with poetry section, " "assuming poetry project." + ) + apt.install(["python3-venv", "python3-pip"]) + self.session.check_call(["pip3", "install", "poetry"], user="root") + self.session.check_call(["poetry", "build", "-f", "sdist"]) return - raise AssertionError('no supported section in pyproject.toml') + raise AssertionError("no supported section in pyproject.toml") class SetupCfg(BuildSystem): - def setup(self): apt = AptManager(self.session) - apt.install(['python3-pep517', 'python3-pip']) + apt.install(["python3-pep517", "python3-pip"]) def dist(self): - self.session.check_call(['python3', '-m', 'pep517.build', '-s', '.']) + self.session.check_call(["python3", "-m", "pep517.build", "-s", "."]) class NpmPackage(BuildSystem): - def setup(self): apt = AptManager(self.session) - apt.install(['npm']) + apt.install(["npm"]) def dist(self): self.setup() - run_with_build_fixer(self.session, ['npm', 'pack']) + run_with_build_fixer(self.session, ["npm", "pack"]) class Waf(BuildSystem): - def setup(self): apt = AptManager(self.session) - apt.install(['python3']) + apt.install(["python3"]) def dist(self): self.setup() - run_with_build_fixer(self.session, ['./waf', 'dist']) + run_with_build_fixer(self.session, ["./waf", "dist"]) class Gem(BuildSystem): - def setup(self): apt = AptManager(self.session) - apt.install(['gem2deb']) + apt.install(["gem2deb"]) def dist(self): self.setup() - gemfiles = [entry.name for entry in self.session.scandir('.') - if entry.name.endswith('.gem')] + gemfiles = [ + entry.name + for entry in self.session.scandir(".") + if entry.name.endswith(".gem") + ] if len(gemfiles) > 1: - logging.warning('More than one gemfile. Trying the first?') - run_with_build_fixer(self.session, ['gem2tgz', gemfiles[0]]) + logging.warning("More than one gemfile. Trying the first?") + run_with_build_fixer(self.session, ["gem2tgz", gemfiles[0]]) class DistInkt(BuildSystem): - def setup(self): apt = AptManager(self.session) - apt.install(['libdist-inkt-perl']) + apt.install(["libdist-inkt-perl"]) def dist(self): self.setup() apt = AptManager(self.session) - with open('dist.ini', 'rb') as f: + with open("dist.ini", "rb") as f: for line in f: - if not line.startswith(b';;'): + if not line.startswith(b";;"): continue try: - (key, value) = line[2:].split(b'=', 1) + (key, value) = line[2:].split(b"=", 1) except ValueError: continue - if (key.strip() == b'class' and - value.strip().startswith(b"'Dist::Inkt")): + if key.strip() == b"class" and value.strip().startswith(b"'Dist::Inkt"): logging.info( - 'Found Dist::Inkt section in dist.ini, ' - 'assuming distinkt.') + "Found Dist::Inkt section in dist.ini, " "assuming distinkt." + ) # TODO(jelmer): install via apt if possible self.session.check_call( - ['cpan', 'install', value.decode().strip("'")], - user='root') - run_with_build_fixer(self.session, ['distinkt-dist']) + ["cpan", "install", value.decode().strip("'")], user="root" + ) + run_with_build_fixer(self.session, ["distinkt-dist"]) return # Default to invoking Dist::Zilla - logging.info('Found dist.ini, assuming dist-zilla.') - apt.install(['libdist-zilla-perl']) - run_with_build_fixer(self.session, ['dzil', 'build', '--in', '..']) + logging.info("Found dist.ini, assuming dist-zilla.") + apt.install(["libdist-zilla-perl"]) + run_with_build_fixer(self.session, ["dzil", "build", "--in", ".."]) class Make(BuildSystem): - def setup(self): apt = AptManager(self.session) - if self.session.exists('Makefile.PL') and not self.session.exists('Makefile'): - apt.install(['perl']) - run_with_build_fixer(self.session, ['perl', 'Makefile.PL']) + if self.session.exists("Makefile.PL") and not self.session.exists("Makefile"): + apt.install(["perl"]) + run_with_build_fixer(self.session, ["perl", "Makefile.PL"]) - if not self.session.exists('Makefile') and not self.session.exists('configure'): - if self.session.exists('autogen.sh'): - if shebang_binary('autogen.sh') is None: - run_with_build_fixer( - self.session, ['/bin/sh', './autogen.sh']) + if not self.session.exists("Makefile") and not self.session.exists("configure"): + if self.session.exists("autogen.sh"): + if shebang_binary("autogen.sh") is None: + run_with_build_fixer(self.session, ["/bin/sh", "./autogen.sh"]) try: - run_with_build_fixer( - self.session, ['./autogen.sh']) + run_with_build_fixer(self.session, ["./autogen.sh"]) except UnidentifiedError as e: - if ("Gnulib not yet bootstrapped; " - "run ./bootstrap instead.\n" in e.lines): + if ( + "Gnulib not yet bootstrapped; " + "run ./bootstrap instead.\n" in e.lines + ): run_with_build_fixer(self.session, ["./bootstrap"]) - run_with_build_fixer(self.session, ['./autogen.sh']) + run_with_build_fixer(self.session, ["./autogen.sh"]) else: raise - elif (self.session.exists('configure.ac') or - self.session.exists('configure.in')): - apt.install([ - 'autoconf', 'automake', 'gettext', 'libtool', - 'gnu-standards']) - run_with_build_fixer(self.session, ['autoreconf', '-i']) + elif self.session.exists("configure.ac") or self.session.exists( + "configure.in" + ): + apt.install( + ["autoconf", "automake", "gettext", "libtool", "gnu-standards"] + ) + run_with_build_fixer(self.session, ["autoreconf", "-i"]) - if not self.session.exists('Makefile') and self.session.exists('configure'): - self.session.check_call(['./configure']) + if not self.session.exists("Makefile") and self.session.exists("configure"): + self.session.check_call(["./configure"]) def dist(self): self.setup() apt = AptManager(self.session) - apt.install(['make']) + apt.install(["make"]) try: - run_with_build_fixer(self.session, ['make', 'dist']) + run_with_build_fixer(self.session, ["make", "dist"]) except UnidentifiedError as e: - if ("make: *** No rule to make target 'dist'. Stop.\n" - in e.lines): + if "make: *** No rule to make target 'dist'. Stop.\n" in e.lines: pass - elif ("make[1]: *** No rule to make target 'dist'. Stop.\n" - in e.lines): + elif "make[1]: *** No rule to make target 'dist'. Stop.\n" in e.lines: pass - elif ("Reconfigure the source tree " - "(via './config' or 'perl Configure'), please.\n" - ) in e.lines: - run_with_build_fixer(self.session, ['./config']) - run_with_build_fixer(self.session, ['make', 'dist']) elif ( - "Please try running 'make manifest' and then run " - "'make dist' again.\n" in e.lines): - run_with_build_fixer(self.session, ['make', 'manifest']) - run_with_build_fixer(self.session, ['make', 'dist']) + "Reconfigure the source tree " + "(via './config' or 'perl Configure'), please.\n" + ) in e.lines: + run_with_build_fixer(self.session, ["./config"]) + run_with_build_fixer(self.session, ["make", "dist"]) + elif ( + "Please try running 'make manifest' and then run " + "'make dist' again.\n" in e.lines + ): + run_with_build_fixer(self.session, ["make", "manifest"]) + run_with_build_fixer(self.session, ["make", "dist"]) elif "Please run ./configure first\n" in e.lines: - run_with_build_fixer(self.session, ['./configure']) - run_with_build_fixer(self.session, ['make', 'dist']) - elif any([re.match( - r'Makefile:[0-9]+: \*\*\* Missing \'Make.inc\' ' - r'Run \'./configure \[options\]\' and retry. Stop.\n', - line) for line in e.lines]): - run_with_build_fixer(self.session, ['./configure']) - run_with_build_fixer(self.session, ['make', 'dist']) - elif any([re.match( - r'Problem opening MANIFEST: No such file or directory ' - r'at .* line [0-9]+\.', line) for line in e.lines]): - run_with_build_fixer(self.session, ['make', 'manifest']) - run_with_build_fixer(self.session, ['make', 'dist']) + run_with_build_fixer(self.session, ["./configure"]) + run_with_build_fixer(self.session, ["make", "dist"]) + elif any( + [ + re.match( + r"Makefile:[0-9]+: \*\*\* Missing \'Make.inc\' " + r"Run \'./configure \[options\]\' and retry. Stop.\n", + line, + ) + for line in e.lines + ] + ): + run_with_build_fixer(self.session, ["./configure"]) + run_with_build_fixer(self.session, ["make", "dist"]) + elif any( + [ + re.match( + r"Problem opening MANIFEST: No such file or directory " + r"at .* line [0-9]+\.", + line, + ) + for line in e.lines + ] + ): + run_with_build_fixer(self.session, ["make", "manifest"]) + run_with_build_fixer(self.session, ["make", "dist"]) else: raise else: @@ -319,40 +327,49 @@ class Make(BuildSystem): def detect_buildsystems(session): """Detect build systems.""" - if session.exists('package.xml'): - logging.info('Found package.xml, assuming pear package.') + if session.exists("package.xml"): + logging.info("Found package.xml, assuming pear package.") yield Pear(session) - if session.exists('setup.py'): - logging.info('Found setup.py, assuming python project.') + if session.exists("setup.py"): + logging.info("Found setup.py, assuming python project.") yield SetupPy(session) - if session.exists('pyproject.toml'): - logging.info('Found pyproject.toml, assuming python project.') + if session.exists("pyproject.toml"): + logging.info("Found pyproject.toml, assuming python project.") yield PyProject(session) - if session.exists('setup.cfg'): - logging.info('Found setup.cfg, assuming python project.') + if session.exists("setup.cfg"): + logging.info("Found setup.cfg, assuming python project.") yield SetupCfg(session) - if session.exists('package.json'): - logging.info('Found package.json, assuming node package.') + if session.exists("package.json"): + logging.info("Found package.json, assuming node package.") yield NpmPackage(session) - if session.exists('waf'): - logging.info('Found waf, assuming waf package.') + if session.exists("waf"): + logging.info("Found waf, assuming waf package.") yield Waf(session) gemfiles = [ - entry.name for entry in session.scandir('.') - if entry.name.endswith('.gem')] + entry.name for entry in session.scandir(".") if entry.name.endswith(".gem") + ] if gemfiles: yield Gem(session) - if session.exists('dist.ini') and not session.exists('Makefile.PL'): + if session.exists("dist.ini") and not session.exists("Makefile.PL"): yield DistInkt(session) - if any([session.exists(p) for p in [ - 'Makefile', 'Makefile.PL', 'autogen.sh', 'configure.ac', - 'configure.in']]): + if any( + [ + session.exists(p) + for p in [ + "Makefile", + "Makefile.PL", + "autogen.sh", + "configure.ac", + "configure.in", + ] + ] + ): yield Make(session) diff --git a/ognibuild/debian/__init__.py b/ognibuild/debian/__init__.py index 9d64ab8..449cea0 100644 --- a/ognibuild/debian/__init__.py +++ b/ognibuild/debian/__init__.py @@ -23,21 +23,18 @@ from ..session import Session # TODO(jelmer): move this to debian/ def satisfy_build_deps(session: Session, tree): - source = Deb822(tree.get_file('debian/control')) + source = Deb822(tree.get_file("debian/control")) deps = [] - for name in ['Build-Depends', 'Build-Depends-Indep', 'Build-Depends-Arch']: + for name in ["Build-Depends", "Build-Depends-Indep", "Build-Depends-Arch"]: try: - deps.append(source[name].strip().strip(',')) + deps.append(source[name].strip().strip(",")) except KeyError: pass - for name in ['Build-Conflicts', 'Build-Conflicts-Indep', - 'Build-Conflicts-Arch']: + for name in ["Build-Conflicts", "Build-Conflicts-Indep", "Build-Conflicts-Arch"]: try: - deps.append('Conflicts: ' + source[name]) + deps.append("Conflicts: " + source[name]) except KeyError: pass - deps = [ - dep.strip().strip(',') - for dep in deps] + deps = [dep.strip().strip(",") for dep in deps] apt = AptManager(session) apt.satisfy(deps) diff --git a/ognibuild/debian/build.py b/ognibuild/debian/build.py index f82f1d1..fab4568 100644 --- a/ognibuild/debian/build.py +++ b/ognibuild/debian/build.py @@ -16,11 +16,11 @@ # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA __all__ = [ - 'changes_filename', - 'get_build_architecture', - 'add_dummy_changelog_entry', - 'build', - 'SbuildFailure', + "changes_filename", + "get_build_architecture", + "add_dummy_changelog_entry", + "build", + "SbuildFailure", ] from datetime import datetime @@ -38,12 +38,12 @@ from breezy.mutabletree import MutableTree from silver_platter.debian import ( BuildFailedError, DEFAULT_BUILDER, - ) +) from buildlog_consultant.sbuild import ( worker_failure_from_sbuild_log, SbuildFailure, - ) +) class MissingChangesFile(Exception): @@ -62,16 +62,24 @@ def changes_filename(package, version, arch): def get_build_architecture(): try: - return subprocess.check_output( - ['dpkg-architecture', '-qDEB_BUILD_ARCH']).strip().decode() + return ( + subprocess.check_output(["dpkg-architecture", "-qDEB_BUILD_ARCH"]) + .strip() + .decode() + ) except subprocess.CalledProcessError as e: - raise Exception( - "Could not find the build architecture: %s" % e) + raise Exception("Could not find the build architecture: %s" % e) def add_dummy_changelog_entry( - tree: MutableTree, subpath: str, suffix: str, suite: str, - message: str, timestamp=None, maintainer=None): + tree: MutableTree, + subpath: str, + suffix: str, + suite: str, + message: str, + timestamp=None, + maintainer=None, +): """Add a dummy changelog entry to a package. Args: @@ -80,87 +88,111 @@ def add_dummy_changelog_entry( suite: Debian suite message: Changelog message """ - def add_suffix(v, suffix): - m = re.fullmatch('(.*)(' + re.escape(suffix) + ')([0-9]+)', v,) - if m: - return (m.group(1) + m.group(2) + '%d' % (int(m.group(3)) + 1)) - else: - return v + suffix + '1' - path = os.path.join(subpath, 'debian', 'changelog') + def add_suffix(v, suffix): + m = re.fullmatch( + "(.*)(" + re.escape(suffix) + ")([0-9]+)", + v, + ) + if m: + return m.group(1) + m.group(2) + "%d" % (int(m.group(3)) + 1) + else: + return v + suffix + "1" + + path = os.path.join(subpath, "debian", "changelog") if maintainer is None: maintainer = get_maintainer() if timestamp is None: timestamp = datetime.now() with tree.get_file(path) as f: cl = Changelog() - cl.parse_changelog( - f, max_blocks=None, allow_empty_author=True, strict=False) + cl.parse_changelog(f, max_blocks=None, allow_empty_author=True, strict=False) version = cl[0].version if version.debian_revision: - version.debian_revision = add_suffix( - version.debian_revision, suffix) + version.debian_revision = add_suffix(version.debian_revision, suffix) else: - version.upstream_version = add_suffix( - version.upstream_version, suffix) + version.upstream_version = add_suffix(version.upstream_version, suffix) cl.new_block( package=cl[0].package, version=version, - urgency='low', + urgency="low", distributions=suite, - author='%s <%s>' % maintainer, + author="%s <%s>" % maintainer, date=format_datetime(timestamp), - changes=['', ' * ' + message, '']) + changes=["", " * " + message, ""], + ) cl_str = cl._format(allow_missing_author=True) tree.put_file_bytes_non_atomic(path, cl_str.encode(cl._encoding)) -def get_latest_changelog_version(local_tree, subpath=''): - path = osutils.pathjoin(subpath, 'debian/changelog') +def get_latest_changelog_version(local_tree, subpath=""): + path = osutils.pathjoin(subpath, "debian/changelog") with local_tree.get_file(path) as f: cl = Changelog(f, max_blocks=1) return cl.package, cl.version -def build(local_tree, outf, build_command=DEFAULT_BUILDER, result_dir=None, - distribution=None, subpath='', source_date_epoch=None): - args = [sys.executable, '-m', 'breezy', 'builddeb', - '--guess-upstream-branch-url', '--builder=%s' % build_command] +def build( + local_tree, + outf, + build_command=DEFAULT_BUILDER, + result_dir=None, + distribution=None, + subpath="", + source_date_epoch=None, +): + args = [ + sys.executable, + "-m", + "breezy", + "builddeb", + "--guess-upstream-branch-url", + "--builder=%s" % build_command, + ] if result_dir: - args.append('--result-dir=%s' % result_dir) - outf.write('Running %r\n' % (build_command, )) + args.append("--result-dir=%s" % result_dir) + outf.write("Running %r\n" % (build_command,)) outf.flush() env = dict(os.environ.items()) if distribution is not None: - env['DISTRIBUTION'] = distribution + env["DISTRIBUTION"] = distribution if source_date_epoch is not None: - env['SOURCE_DATE_EPOCH'] = '%d' % source_date_epoch - logging.info('Building debian packages, running %r.', build_command) + env["SOURCE_DATE_EPOCH"] = "%d" % source_date_epoch + logging.info("Building debian packages, running %r.", build_command) try: subprocess.check_call( - args, cwd=local_tree.abspath(subpath), stdout=outf, stderr=outf, - env=env) + args, cwd=local_tree.abspath(subpath), stdout=outf, stderr=outf, env=env + ) except subprocess.CalledProcessError: raise BuildFailedError() def build_once( - local_tree, build_suite, output_directory, build_command, - subpath='', source_date_epoch=None): - build_log_path = os.path.join(output_directory, 'build.log') + local_tree, + build_suite, + output_directory, + build_command, + subpath="", + source_date_epoch=None, +): + build_log_path = os.path.join(output_directory, "build.log") try: - with open(build_log_path, 'w') as f: - build(local_tree, outf=f, build_command=build_command, - result_dir=output_directory, distribution=build_suite, - subpath=subpath, source_date_epoch=source_date_epoch) + with open(build_log_path, "w") as f: + build( + local_tree, + outf=f, + build_command=build_command, + result_dir=output_directory, + distribution=build_suite, + subpath=subpath, + source_date_epoch=source_date_epoch, + ) except BuildFailedError: - with open(build_log_path, 'rb') as f: + with open(build_log_path, "rb") as f: raise worker_failure_from_sbuild_log(f) - (cl_package, cl_version) = get_latest_changelog_version( - local_tree, subpath) - changes_name = changes_filename( - cl_package, cl_version, get_build_architecture()) + (cl_package, cl_version) = get_latest_changelog_version(local_tree, subpath) + changes_name = changes_filename(cl_package, cl_version, get_build_architecture()) changes_path = os.path.join(output_directory, changes_name) if not os.path.exists(changes_path): raise MissingChangesFile(changes_name) @@ -168,13 +200,19 @@ def build_once( def gbp_dch(path): - subprocess.check_call(['gbp', 'dch'], cwd=path) + subprocess.check_call(["gbp", "dch"], cwd=path) def attempt_build( - local_tree, suffix, build_suite, output_directory, build_command, - build_changelog_entry='Build for debian-janitor apt repository.', - subpath='', source_date_epoch=None): + local_tree, + suffix, + build_suite, + output_directory, + build_command, + build_changelog_entry="Build for debian-janitor apt repository.", + subpath="", + source_date_epoch=None, +): """Attempt a build, with a custom distribution set. Args: @@ -189,8 +227,13 @@ def attempt_build( Returns: Tuple with (changes_name, cl_version) """ add_dummy_changelog_entry( - local_tree, subpath, suffix, build_suite, - build_changelog_entry) + local_tree, subpath, suffix, build_suite, build_changelog_entry + ) return build_once( - local_tree, build_suite, output_directory, build_command, subpath, - source_date_epoch=source_date_epoch) + local_tree, + build_suite, + output_directory, + build_command, + subpath, + source_date_epoch=source_date_epoch, + ) diff --git a/ognibuild/debian/fix_build.py b/ognibuild/debian/fix_build.py index 3e1d7b1..cf88224 100644 --- a/ognibuild/debian/fix_build.py +++ b/ognibuild/debian/fix_build.py @@ -16,7 +16,7 @@ # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA __all__ = [ - 'build_incrementally', + "build_incrementally", ] import logging @@ -30,7 +30,7 @@ from debian.deb822 import ( Deb822, PkgRelation, Release, - ) +) from breezy.commit import PointlessCommit from breezy.tree import Tree @@ -39,34 +39,34 @@ from debmutate.control import ( ensure_minimum_version, pg_buildext_updatecontrol, ControlEditor, - ) +) from debmutate.debhelper import ( get_debhelper_compat_level, - ) +) from debmutate.deb822 import ( Deb822Editor, - ) +) from debmutate.reformatting import ( FormattingUnpreservable, GeneratedFile, - ) +) from lintian_brush import ( reset_tree, - ) +) from lintian_brush.changelog import ( add_changelog_entry, - ) +) from debmutate._rules import ( dh_invoke_add_with, update_rules, - ) +) from silver_platter.debian import ( debcommit, DEFAULT_BUILDER, - ) +) -from .build import attempt_build +from .build import attempt_build, get_build_architecture from buildlog_consultant import Problem from buildlog_consultant.common import ( MissingConfigStatusInput, @@ -102,13 +102,13 @@ from buildlog_consultant.common import ( MissingMavenArtifacts, GnomeCommonMissing, MissingGnomeCommonDependency, - ) +) from buildlog_consultant.apt import ( AptFetchFailure, - ) +) from buildlog_consultant.sbuild import ( SbuildFailure, - ) +) DEFAULT_MAX_ITERATIONS = 10 @@ -122,9 +122,7 @@ class CircularDependency(Exception): class DependencyContext(object): - - def __init__(self, tree, subpath='', committer=None, - update_changelog=True): + def __init__(self, tree, subpath="", committer=None, update_changelog=True): self.tree = tree self.subpath = subpath self.committer = committer @@ -135,36 +133,50 @@ class DependencyContext(object): class BuildDependencyContext(DependencyContext): - def add_dependency(self, package, minimum_version=None): return add_build_dependency( - self.tree, package, minimum_version=minimum_version, - committer=self.committer, subpath=self.subpath, - update_changelog=self.update_changelog) + self.tree, + package, + minimum_version=minimum_version, + committer=self.committer, + subpath=self.subpath, + update_changelog=self.update_changelog, + ) class AutopkgtestDependencyContext(DependencyContext): - - def __init__(self, testname, tree, subpath='', committer=None, - update_changelog=True): + def __init__( + self, testname, tree, subpath="", committer=None, update_changelog=True + ): self.testname = testname super(AutopkgtestDependencyContext, self).__init__( - tree, subpath, committer, update_changelog) + tree, subpath, committer, update_changelog + ) def add_dependency(self, package, minimum_version=None): return add_test_dependency( - self.tree, self.testname, package, + self.tree, + self.testname, + package, minimum_version=minimum_version, - committer=self.committer, subpath=self.subpath, - update_changelog=self.update_changelog) + committer=self.committer, + subpath=self.subpath, + update_changelog=self.update_changelog, + ) -def add_build_dependency(tree, package, minimum_version=None, - committer=None, subpath='', update_changelog=True): +def add_build_dependency( + tree, + package, + minimum_version=None, + committer=None, + subpath="", + update_changelog=True, +): if not isinstance(package, str): raise TypeError(package) - control_path = os.path.join(tree.abspath(subpath), 'debian/control') + control_path = os.path.join(tree.abspath(subpath), "debian/control") try: with ControlEditor(path=control_path) as updater: for binary in updater.binaries: @@ -172,15 +184,14 @@ def add_build_dependency(tree, package, minimum_version=None, raise CircularDependency(package) if minimum_version: updater.source["Build-Depends"] = ensure_minimum_version( - updater.source.get("Build-Depends", ""), - package, minimum_version) + updater.source.get("Build-Depends", ""), package, minimum_version + ) else: updater.source["Build-Depends"] = ensure_some_version( - updater.source.get("Build-Depends", ""), package) + updater.source.get("Build-Depends", ""), package + ) except FormattingUnpreservable as e: - logging.info( - 'Unable to edit %s in a way that preserves formatting.', - e.path) + logging.info("Unable to edit %s in a way that preserves formatting.", e.path) return False if minimum_version: @@ -189,22 +200,32 @@ def add_build_dependency(tree, package, minimum_version=None, desc = package if not updater.changed: - logging.info('Giving up; dependency %s was already present.', desc) + logging.info("Giving up; dependency %s was already present.", desc) return False logging.info("Adding build dependency: %s", desc) return commit_debian_changes( - tree, subpath, "Add missing build dependency on %s." % desc, - committer=committer, update_changelog=update_changelog) + tree, + subpath, + "Add missing build dependency on %s." % desc, + committer=committer, + update_changelog=update_changelog, + ) -def add_test_dependency(tree, testname, package, minimum_version=None, - committer=None, subpath='', update_changelog=True): +def add_test_dependency( + tree, + testname, + package, + minimum_version=None, + committer=None, + subpath="", + update_changelog=True, +): if not isinstance(package, str): raise TypeError(package) - tests_control_path = os.path.join( - tree.abspath(subpath), 'debian/tests/control') + tests_control_path = os.path.join(tree.abspath(subpath), "debian/tests/control") try: with Deb822Editor(path=tests_control_path) as updater: @@ -219,15 +240,14 @@ def add_test_dependency(tree, testname, package, minimum_version=None, continue if minimum_version: control["Depends"] = ensure_minimum_version( - control.get("Depends", ""), - package, minimum_version) + control.get("Depends", ""), package, minimum_version + ) else: control["Depends"] = ensure_some_version( - control.get("Depends", ""), package) + control.get("Depends", ""), package + ) except FormattingUnpreservable as e: - logging.info( - 'Unable to edit %s in a way that preserves formatting.', - e.path) + logging.info("Unable to edit %s in a way that preserves formatting.", e.path) return False if not updater.changed: return False @@ -239,22 +259,27 @@ def add_test_dependency(tree, testname, package, minimum_version=None, logging.info("Adding dependency to test %s: %s", testname, desc) return commit_debian_changes( - tree, subpath, + tree, + subpath, "Add missing dependency for test %s on %s." % (testname, desc), - update_changelog=update_changelog) + update_changelog=update_changelog, + ) -def commit_debian_changes(tree, subpath, summary, committer=None, - update_changelog=True): +def commit_debian_changes( + tree, subpath, summary, committer=None, update_changelog=True +): with tree.lock_write(): try: if update_changelog: add_changelog_entry( - tree, os.path.join(subpath, 'debian/changelog'), [summary]) + tree, os.path.join(subpath, "debian/changelog"), [summary] + ) debcommit(tree, committer=committer, subpath=subpath) else: - tree.commit(message=summary, committer=committer, - specific_files=[subpath]) + tree.commit( + message=summary, committer=committer, specific_files=[subpath] + ) except PointlessCommit: return False else: @@ -262,7 +287,6 @@ def commit_debian_changes(tree, subpath, summary, committer=None, class FileSearcher(object): - def search_files(self, path, regex=False): raise NotImplementedError(self.search_files) @@ -272,13 +296,12 @@ class ContentsFileNotFound(Exception): class AptContentsFileSearcher(FileSearcher): - def __init__(self): self._db = {} @classmethod def from_env(cls): - sources = os.environ['REPOSITORIES'].split(':') + sources = os.environ["REPOSITORIES"].split(":") return cls.from_repositories(sources) def __setitem__(self, path, package): @@ -296,9 +319,9 @@ class AptContentsFileSearcher(FileSearcher): def load_file(self, f): for line in f: (path, rest) = line.rsplit(maxsplit=1) - package = rest.split(b'/')[-1] - decoded_path = '/' + path.decode('utf-8', 'surrogateescape') - self[decoded_path] = package.decode('utf-8') + package = rest.split(b"/")[-1] + decoded_path = "/" + path.decode("utf-8", "surrogateescape") + self[decoded_path] = package.decode("utf-8") @classmethod def from_urls(cls, urls): @@ -311,54 +334,56 @@ class AptContentsFileSearcher(FileSearcher): def from_repositories(cls, sources): # TODO(jelmer): Verify signatures, etc. urls = [] - arches = [get_build_architecture(), 'all'] + arches = [get_build_architecture(), "all"] for source in sources: - parts = source.split(' ') - if parts[0] != 'deb': - logging.warning('Invalid line in sources: %r', source) + parts = source.split(" ") + if parts[0] != "deb": + logging.warning("Invalid line in sources: %r", source) continue base_url = parts[1] name = parts[2] components = parts[3:] - response = cls._get('%s/%s/Release' % (base_url, name)) + response = cls._get("%s/%s/Release" % (base_url, name)) r = Release(response) desired_files = set() for component in components: for arch in arches: - desired_files.add('%s/Contents-%s' % (component, arch)) - for entry in r['MD5Sum']: - if entry['name'] in desired_files: - urls.append('%s/%s/%s' % (base_url, name, entry['name'])) + desired_files.add("%s/Contents-%s" % (component, arch)) + for entry in r["MD5Sum"]: + if entry["name"] in desired_files: + urls.append("%s/%s/%s" % (base_url, name, entry["name"])) return cls.from_urls(urls) @staticmethod def _get(url): from urllib.request import urlopen, Request - request = Request(url, headers={'User-Agent': 'Debian Janitor'}) + + request = Request(url, headers={"User-Agent": "Debian Janitor"}) return urlopen(request) def load_url(self, url): from urllib.error import HTTPError + try: response = self._get(url) except HTTPError as e: if e.status == 404: raise ContentsFileNotFound(url) raise - if url.endswith('.gz'): + if url.endswith(".gz"): import gzip + f = gzip.GzipFile(fileobj=response) - elif response.headers.get_content_type() == 'text/plain': + elif response.headers.get_content_type() == "text/plain": f = response else: raise Exception( - 'Unknown content type %r' % - response.headers.get_content_type()) + "Unknown content type %r" % response.headers.get_content_type() + ) self.load_file(f) class GeneratedFileSearcher(FileSearcher): - def __init__(self, db): self._db = db @@ -373,10 +398,13 @@ class GeneratedFileSearcher(FileSearcher): # TODO(jelmer): read from a file -GENERATED_FILE_SEARCHER = GeneratedFileSearcher({ - '/etc/locale.gen': 'locales', - # Alternative - '/usr/bin/rst2html': '/usr/share/docutils/scripts/python3/rst2html'}) +GENERATED_FILE_SEARCHER = GeneratedFileSearcher( + { + "/etc/locale.gen": "locales", + # Alternative + "/usr/bin/rst2html": "/usr/share/docutils/scripts/python3/rst2html", + } +) _apt_file_searcher = None @@ -399,12 +427,12 @@ def get_package_for_paths(paths, regex=False): if candidates: break if len(candidates) == 0: - logging.warning('No packages found that contain %r', paths) + logging.warning("No packages found that contain %r", paths) return None if len(candidates) > 1: logging.warning( - 'More than 1 packages found that contain %r: %r', - path, candidates) + "More than 1 packages found that contain %r: %r", path, candidates + ) # Euhr. Pick the one with the shortest name? return sorted(candidates, key=len)[0] else: @@ -412,71 +440,75 @@ def get_package_for_paths(paths, regex=False): def get_package_for_python_module(module, python_version): - if python_version == 'python3': + if python_version == "python3": paths = [ os.path.join( - '/usr/lib/python3/dist-packages', - module.replace('.', '/'), - '__init__.py'), + "/usr/lib/python3/dist-packages", + module.replace(".", "/"), + "__init__.py", + ), os.path.join( - '/usr/lib/python3/dist-packages', - module.replace('.', '/') + '.py'), + "/usr/lib/python3/dist-packages", module.replace(".", "/") + ".py" + ), os.path.join( - '/usr/lib/python3\\.[0-9]+/lib-dynload', - module.replace('.', '/') + '\\.cpython-.*\\.so'), + "/usr/lib/python3\\.[0-9]+/lib-dynload", + module.replace(".", "/") + "\\.cpython-.*\\.so", + ), os.path.join( - '/usr/lib/python3\\.[0-9]+/', - module.replace('.', '/') + '.py'), + "/usr/lib/python3\\.[0-9]+/", module.replace(".", "/") + ".py" + ), os.path.join( - '/usr/lib/python3\\.[0-9]+/', - module.replace('.', '/'), '__init__.py'), - ] - elif python_version == 'python2': + "/usr/lib/python3\\.[0-9]+/", module.replace(".", "/"), "__init__.py" + ), + ] + elif python_version == "python2": paths = [ os.path.join( - '/usr/lib/python2\\.[0-9]/dist-packages', - module.replace('.', '/'), - '__init__.py'), + "/usr/lib/python2\\.[0-9]/dist-packages", + module.replace(".", "/"), + "__init__.py", + ), os.path.join( - '/usr/lib/python2\\.[0-9]/dist-packages', - module.replace('.', '/') + '.py'), + "/usr/lib/python2\\.[0-9]/dist-packages", + module.replace(".", "/") + ".py", + ), os.path.join( - '/usr/lib/python2.\\.[0-9]/lib-dynload', - module.replace('.', '/') + '.so')] - elif python_version == 'pypy': + "/usr/lib/python2.\\.[0-9]/lib-dynload", + module.replace(".", "/") + ".so", + ), + ] + elif python_version == "pypy": paths = [ os.path.join( - '/usr/lib/pypy/dist-packages', - module.replace('.', '/'), - '__init__.py'), + "/usr/lib/pypy/dist-packages", module.replace(".", "/"), "__init__.py" + ), os.path.join( - '/usr/lib/pypy/dist-packages', - module.replace('.', '/') + '.py'), + "/usr/lib/pypy/dist-packages", module.replace(".", "/") + ".py" + ), os.path.join( - '/usr/lib/pypy/dist-packages', - module.replace('.', '/') + '\\.pypy-.*\\.so'), - ] + "/usr/lib/pypy/dist-packages", + module.replace(".", "/") + "\\.pypy-.*\\.so", + ), + ] else: - raise AssertionError( - 'unknown python version %r' % python_version) + raise AssertionError("unknown python version %r" % python_version) return get_package_for_paths(paths, regex=True) def targeted_python_versions(tree: Tree) -> Set[str]: - with tree.get_file('debian/control') as f: + with tree.get_file("debian/control") as f: control = Deb822(f) - build_depends = PkgRelation.parse_relations( - control.get('Build-Depends', '')) + build_depends = PkgRelation.parse_relations(control.get("Build-Depends", "")) all_build_deps: Set[str] = set() for or_deps in build_depends: - all_build_deps.update(or_dep['name'] for or_dep in or_deps) + all_build_deps.update(or_dep["name"] for or_dep in or_deps) targeted = set() - if any(x.startswith('pypy') for x in all_build_deps): - targeted.add('pypy') - if any(x.startswith('python-') for x in all_build_deps): - targeted.add('cpython2') - if any(x.startswith('python3-') for x in all_build_deps): - targeted.add('cpython3') + if any(x.startswith("pypy") for x in all_build_deps): + targeted.add("pypy") + if any(x.startswith("python-") for x in all_build_deps): + targeted.add("cpython2") + if any(x.startswith("python3-") for x in all_build_deps): + targeted.add("cpython3") return targeted @@ -487,6 +519,7 @@ def package_exists(package): global apt_cache if apt_cache is None: import apt_pkg + apt_cache = apt_pkg.Cache() for p in apt_cache.packages: if p.name == package: @@ -495,66 +528,65 @@ def package_exists(package): def fix_missing_javascript_runtime(error, context): - package = get_package_for_paths( - ['/usr/bin/node', '/usr/bin/duk'], - regex=False) + package = get_package_for_paths(["/usr/bin/node", "/usr/bin/duk"], regex=False) if package is None: return False return context.add_dependency(package) -def fix_missing_python_distribution(error, context): +def fix_missing_python_distribution(error, context): # noqa: C901 targeted = targeted_python_versions(context.tree) default = not targeted pypy_pkg = get_package_for_paths( - ['/usr/lib/pypy/dist-packages/%s-.*.egg-info' % error.distribution], - regex=True) + ["/usr/lib/pypy/dist-packages/%s-.*.egg-info" % error.distribution], regex=True + ) if pypy_pkg is None: - pypy_pkg = 'pypy-%s' % error.distribution + pypy_pkg = "pypy-%s" % error.distribution if not package_exists(pypy_pkg): pypy_pkg = None py2_pkg = get_package_for_paths( - ['/usr/lib/python2\\.[0-9]/dist-packages/%s-.*.egg-info' % - error.distribution], regex=True) + ["/usr/lib/python2\\.[0-9]/dist-packages/%s-.*.egg-info" % error.distribution], + regex=True, + ) if py2_pkg is None: - py2_pkg = 'python-%s' % error.distribution + py2_pkg = "python-%s" % error.distribution if not package_exists(py2_pkg): py2_pkg = None py3_pkg = get_package_for_paths( - ['/usr/lib/python3/dist-packages/%s-.*.egg-info' % - error.distribution], regex=True) + ["/usr/lib/python3/dist-packages/%s-.*.egg-info" % error.distribution], + regex=True, + ) if py3_pkg is None: - py3_pkg = 'python3-%s' % error.distribution + py3_pkg = "python3-%s" % error.distribution if not package_exists(py3_pkg): py3_pkg = None extra_build_deps = [] if error.python_version == 2: - if 'pypy' in targeted: + if "pypy" in targeted: if not pypy_pkg: - logging.warning('no pypy package found for %s', error.module) + logging.warning("no pypy package found for %s", error.module) else: extra_build_deps.append(pypy_pkg) - if 'cpython2' in targeted or default: + if "cpython2" in targeted or default: if not py2_pkg: - logging.warning( - 'no python 2 package found for %s', error.module) + logging.warning("no python 2 package found for %s", error.module) return False extra_build_deps.append(py2_pkg) elif error.python_version == 3: if not py3_pkg: - logging.warning('no python 3 package found for %s', error.module) + logging.warning("no python 3 package found for %s", error.module) return False extra_build_deps.append(py3_pkg) else: - if py3_pkg and ('cpython3' in targeted or default): + if py3_pkg and ("cpython3" in targeted or default): extra_build_deps.append(py3_pkg) - if py2_pkg and ('cpython2' in targeted or default): + if py2_pkg and ("cpython2" in targeted or default): extra_build_deps.append(py2_pkg) - if pypy_pkg and 'pypy' in targeted: + if pypy_pkg and "pypy" in targeted: extra_build_deps.append(pypy_pkg) if not extra_build_deps: @@ -562,48 +594,45 @@ def fix_missing_python_distribution(error, context): for dep_pkg in extra_build_deps: assert dep_pkg is not None - if not context.add_dependency( - dep_pkg, minimum_version=error.minimum_version): + if not context.add_dependency(dep_pkg, minimum_version=error.minimum_version): return False return True def fix_missing_python_module(error, context): - if getattr(context, 'tree', None) is not None: + if getattr(context, "tree", None) is not None: targeted = targeted_python_versions(context.tree) else: targeted = set() - default = (not targeted) + default = not targeted - pypy_pkg = get_package_for_python_module(error.module, 'pypy') - py2_pkg = get_package_for_python_module(error.module, 'python2') - py3_pkg = get_package_for_python_module(error.module, 'python3') + pypy_pkg = get_package_for_python_module(error.module, "pypy") + py2_pkg = get_package_for_python_module(error.module, "python2") + py3_pkg = get_package_for_python_module(error.module, "python3") extra_build_deps = [] if error.python_version == 2: - if 'pypy' in targeted: + if "pypy" in targeted: if not pypy_pkg: - logging.warning('no pypy package found for %s', error.module) + logging.warning("no pypy package found for %s", error.module) else: extra_build_deps.append(pypy_pkg) - if 'cpython2' in targeted or default: + if "cpython2" in targeted or default: if not py2_pkg: - logging.warning( - 'no python 2 package found for %s', error.module) + logging.warning("no python 2 package found for %s", error.module) return False extra_build_deps.append(py2_pkg) elif error.python_version == 3: if not py3_pkg: - logging.warning( - 'no python 3 package found for %s', error.module) + logging.warning("no python 3 package found for %s", error.module) return False extra_build_deps.append(py3_pkg) else: - if py3_pkg and ('cpython3' in targeted or default): + if py3_pkg and ("cpython3" in targeted or default): extra_build_deps.append(py3_pkg) - if py2_pkg and ('cpython2' in targeted or default): + if py2_pkg and ("cpython2" in targeted or default): extra_build_deps.append(py2_pkg) - if pypy_pkg and 'pypy' in targeted: + if pypy_pkg and "pypy" in targeted: extra_build_deps.append(pypy_pkg) if not extra_build_deps: @@ -618,8 +647,8 @@ def fix_missing_python_module(error, context): def fix_missing_go_package(error, context): package = get_package_for_paths( - [os.path.join('/usr/share/gocode/src', error.package, '.*')], - regex=True) + [os.path.join("/usr/share/gocode/src", error.package, ".*")], regex=True + ) if package is None: return False return context.add_dependency(package) @@ -627,10 +656,12 @@ def fix_missing_go_package(error, context): def fix_missing_c_header(error, context): package = get_package_for_paths( - [os.path.join('/usr/include', error.header)], regex=False) + [os.path.join("/usr/include", error.header)], regex=False + ) if package is None: package = get_package_for_paths( - [os.path.join('/usr/include', '.*', error.header)], regex=True) + [os.path.join("/usr/include", ".*", error.header)], regex=True + ) if package is None: return False return context.add_dependency(package) @@ -638,16 +669,16 @@ def fix_missing_c_header(error, context): def fix_missing_pkg_config(error, context): package = get_package_for_paths( - [os.path.join('/usr/lib/pkgconfig', error.module + '.pc')]) + [os.path.join("/usr/lib/pkgconfig", error.module + ".pc")] + ) if package is None: package = get_package_for_paths( - [os.path.join('/usr/lib', '.*', 'pkgconfig', - error.module + '.pc')], - regex=True) + [os.path.join("/usr/lib", ".*", "pkgconfig", error.module + ".pc")], + regex=True, + ) if package is None: return False - return context.add_dependency( - package, minimum_version=error.minimum_version) + return context.add_dependency(package, minimum_version=error.minimum_version) def fix_missing_command(error, context): @@ -655,11 +686,11 @@ def fix_missing_command(error, context): paths = [error.command] else: paths = [ - os.path.join(dirname, error.command) - for dirname in ['/usr/bin', '/bin']] + os.path.join(dirname, error.command) for dirname in ["/usr/bin", "/bin"] + ] package = get_package_for_paths(paths) if package is None: - logging.info('No packages found that contain %r', paths) + logging.info("No packages found that contain %r", paths) return False return context.add_dependency(package) @@ -672,10 +703,10 @@ def fix_missing_file(error, context): def fix_missing_sprockets_file(error, context): - if error.content_type == 'application/javascript': - path = '/usr/share/.*/app/assets/javascripts/%s.js$' % error.name + if error.content_type == "application/javascript": + path = "/usr/share/.*/app/assets/javascripts/%s.js$" % error.name else: - logging.warning('unable to handle content type %s', error.content_type) + logging.warning("unable to handle content type %s", error.content_type) return False package = get_package_for_paths([path], regex=True) if package is None: @@ -683,22 +714,23 @@ def fix_missing_sprockets_file(error, context): return context.add_dependency(package) -DEFAULT_PERL_PATHS = ['/usr/share/perl5'] +DEFAULT_PERL_PATHS = ["/usr/share/perl5"] def fix_missing_perl_file(error, context): - if (error.filename == 'Makefile.PL' and - not context.tree.has_filename('Makefile.PL') and - context.tree.has_filename('dist.ini')): + if ( + error.filename == "Makefile.PL" + and not context.tree.has_filename("Makefile.PL") + and context.tree.has_filename("dist.ini") + ): # TODO(jelmer): add dist-zilla add-on to debhelper raise NotImplementedError if error.inc is None: if error.filename is None: - filename = error.module.replace('::', '/') + '.pm' - paths = [os.path.join(inc, filename) - for inc in DEFAULT_PERL_PATHS] + filename = error.module.replace("::", "/") + ".pm" + paths = [os.path.join(inc, filename) for inc in DEFAULT_PERL_PATHS] elif not os.path.isabs(error.filename): return False else: @@ -707,41 +739,42 @@ def fix_missing_perl_file(error, context): paths = [os.path.join(inc, error.filename) for inc in error.inc] package = get_package_for_paths(paths, regex=False) if package is None: - if getattr(error, 'module', None): + if getattr(error, "module", None): logging.warning( - 'no perl package found for %s (%r).', - error.module, error.filename) + "no perl package found for %s (%r).", error.module, error.filename + ) else: logging.warning( - 'perl file %s not found (paths searched for: %r).', - error.filename, paths) + "perl file %s not found (paths searched for: %r).", + error.filename, + paths, + ) return False return context.add_dependency(package) def get_package_for_node_package(node_package): paths = [ - '/usr/share/nodejs/.*/node_modules/%s/package.json' % node_package, - '/usr/lib/nodejs/%s/package.json' % node_package, - '/usr/share/nodejs/%s/package.json' % node_package] + "/usr/share/nodejs/.*/node_modules/%s/package.json" % node_package, + "/usr/lib/nodejs/%s/package.json" % node_package, + "/usr/share/nodejs/%s/package.json" % node_package, + ] return get_package_for_paths(paths, regex=True) def fix_missing_node_module(error, context): package = get_package_for_node_package(error.module) if package is None: - logging.warning( - 'no node package found for %s.', - error.module) + logging.warning("no node package found for %s.", error.module) return False return context.add_dependency(package) def fix_missing_dh_addon(error, context): - paths = [os.path.join('/usr/share/perl5', error.path)] + paths = [os.path.join("/usr/share/perl5", error.path)] package = get_package_for_paths(paths) if package is None: - logging.warning('no package for debhelper addon %s', error.name) + logging.warning("no package for debhelper addon %s", error.name) return False return context.add_dependency(package) @@ -751,32 +784,33 @@ def retry_apt_failure(error, context): def fix_missing_php_class(error, context): - path = '/usr/share/php/%s.php' % error.php_class.replace('\\', '/') + path = "/usr/share/php/%s.php" % error.php_class.replace("\\", "/") package = get_package_for_paths([path]) if package is None: - logging.warning('no package for PHP class %s', error.php_class) + logging.warning("no package for PHP class %s", error.php_class) return False return context.add_dependency(package) def fix_missing_jdk_file(error, context): - path = error.jdk_path + '.*/' + error.filename + path = error.jdk_path + ".*/" + error.filename package = get_package_for_paths([path], regex=True) if package is None: logging.warning( - 'no package found for %s (JDK: %s) - regex %s', - error.filename, error.jdk_path, path) + "no package found for %s (JDK: %s) - regex %s", + error.filename, + error.jdk_path, + path, + ) return False return context.add_dependency(package) def fix_missing_vala_package(error, context): - path = '/usr/share/vala-[0-9.]+/vapi/%s.vapi' % error.package + path = "/usr/share/vala-[0-9.]+/vapi/%s.vapi" % error.package package = get_package_for_paths([path], regex=True) if package is None: - logging.warning( - 'no file found for package %s - regex %s', - error.package, path) + logging.warning("no file found for package %s - regex %s", error.package, path) return False return context.add_dependency(package) @@ -785,12 +819,11 @@ def fix_missing_xml_entity(error, context): # Ideally we should be using the XML catalog for this, but hardcoding # a few URLs will do for now.. URL_MAP = { - 'http://www.oasis-open.org/docbook/xml/': - '/usr/share/xml/docbook/schema/dtd/' + "http://www.oasis-open.org/docbook/xml/": "/usr/share/xml/docbook/schema/dtd/" } for url, path in URL_MAP.items(): if error.url.startswith(url): - search_path = os.path.join(path, error.url[len(url):]) + search_path = os.path.join(path, error.url[len(url) :]) break else: return False @@ -802,69 +835,73 @@ def fix_missing_xml_entity(error, context): def fix_missing_library(error, context): - paths = [os.path.join('/usr/lib/lib%s.so$' % error.library), - os.path.join('/usr/lib/.*/lib%s.so$' % error.library), - os.path.join('/usr/lib/lib%s.a$' % error.library), - os.path.join('/usr/lib/.*/lib%s.a$' % error.library)] + paths = [ + os.path.join("/usr/lib/lib%s.so$" % error.library), + os.path.join("/usr/lib/.*/lib%s.so$" % error.library), + os.path.join("/usr/lib/lib%s.a$" % error.library), + os.path.join("/usr/lib/.*/lib%s.a$" % error.library), + ] package = get_package_for_paths(paths, regex=True) if package is None: - logging.warning('no package for library %s', error.library) + logging.warning("no package for library %s", error.library) return False return context.add_dependency(package) def fix_missing_ruby_gem(error, context): - paths = [os.path.join( - '/usr/share/rubygems-integration/all/' - 'specifications/%s-.*\\.gemspec' % error.gem)] + paths = [ + os.path.join( + "/usr/share/rubygems-integration/all/" + "specifications/%s-.*\\.gemspec" % error.gem + ) + ] package = get_package_for_paths(paths, regex=True) if package is None: - logging.warning('no package for gem %s', error.gem) + logging.warning("no package for gem %s", error.gem) return False return context.add_dependency(package, minimum_version=error.version) def fix_missing_ruby_file(error, context): - paths = [ - os.path.join('/usr/lib/ruby/vendor_ruby/%s.rb' % error.filename)] + paths = [os.path.join("/usr/lib/ruby/vendor_ruby/%s.rb" % error.filename)] package = get_package_for_paths(paths) if package is not None: return context.add_dependency(package) paths = [ - os.path.join(r'/usr/share/rubygems-integration/all/gems/([^/]+)/' - 'lib/%s.rb' % error.filename)] + os.path.join( + r"/usr/share/rubygems-integration/all/gems/([^/]+)/" + "lib/%s.rb" % error.filename + ) + ] package = get_package_for_paths(paths, regex=True) if package is not None: return context.add_dependency(package) - logging.warning('no package for ruby file %s', error.filename) + logging.warning("no package for ruby file %s", error.filename) return False def fix_missing_r_package(error, context): - paths = [os.path.join('/usr/lib/R/site-library/.*/R/%s$' % error.package)] + paths = [os.path.join("/usr/lib/R/site-library/.*/R/%s$" % error.package)] package = get_package_for_paths(paths, regex=True) if package is None: - logging.warning('no package for R package %s', error.package) + logging.warning("no package for R package %s", error.package) return False - return context.add_dependency( - package, minimum_version=error.minimum_version) + return context.add_dependency(package, minimum_version=error.minimum_version) def fix_missing_java_class(error, context): # Unfortunately this only finds classes in jars installed on the host # system :( - output = subprocess.check_output( - ["java-propose-classpath", "-c" + error.classname]) - classpath = [ - p for p in output.decode().strip(":").strip().split(':') if p] + output = subprocess.check_output(["java-propose-classpath", "-c" + error.classname]) + classpath = [p for p in output.decode().strip(":").strip().split(":") if p] if not classpath: - logging.warning('unable to find classpath for %s', error.classname) + logging.warning("unable to find classpath for %s", error.classname) return False - logging.info('Classpath for %s: %r', error.classname, classpath) + logging.info("Classpath for %s: %r", error.classname, classpath) package = get_package_for_paths(classpath) if package is None: - logging.warning('no package for files in %r', classpath) + logging.warning("no package for files in %r", classpath) return False return context.add_dependency(package) @@ -872,25 +909,26 @@ def fix_missing_java_class(error, context): def enable_dh_autoreconf(context): # Debhelper >= 10 depends on dh-autoreconf and enables autoreconf by # default. - debhelper_compat_version = get_debhelper_compat_level( - context.tree.abspath('.')) + debhelper_compat_version = get_debhelper_compat_level(context.tree.abspath(".")) if debhelper_compat_version is not None and debhelper_compat_version < 10: + def add_with_autoreconf(line, target): - if target != b'%': + if target != b"%": return line - if not line.startswith(b'dh '): + if not line.startswith(b"dh "): return line - return dh_invoke_add_with(line, b'autoreconf') + return dh_invoke_add_with(line, b"autoreconf") if update_rules(command_line_cb=add_with_autoreconf): - return context.add_dependency('dh-autoreconf') + return context.add_dependency("dh-autoreconf") return False def fix_missing_configure(error, context): - if (not context.tree.has_filename('configure.ac') and - not context.tree.has_filename('configure.in')): + if not context.tree.has_filename("configure.ac") and not context.tree.has_filename( + "configure.in" + ): return False return enable_dh_autoreconf(context) @@ -905,95 +943,103 @@ def fix_missing_automake_input(error, context): def fix_missing_maven_artifacts(error, context): artifact = error.artifacts[0] - parts = artifact.split(':') + parts = artifact.split(":") if len(parts) == 4: (group_id, artifact_id, kind, version) = parts regex = False elif len(parts) == 3: (group_id, artifact_id, version) = parts - kind = 'jar' + kind = "jar" regex = False elif len(parts) == 2: - version = '.*' + version = ".*" (group_id, artifact_id) = parts - kind = 'jar' + kind = "jar" regex = True else: - raise AssertionError( - 'invalid number of parts to artifact %s' % artifact) - paths = [os.path.join( - '/usr/share/maven-repo', group_id.replace('.', '/'), - artifact_id, version, '%s-%s.%s' % (artifact_id, version, kind))] + raise AssertionError("invalid number of parts to artifact %s" % artifact) + paths = [ + os.path.join( + "/usr/share/maven-repo", + group_id.replace(".", "/"), + artifact_id, + version, + "%s-%s.%s" % (artifact_id, version, kind), + ) + ] package = get_package_for_paths(paths, regex=regex) if package is None: - logging.warning('no package for artifact %s', artifact) + logging.warning("no package for artifact %s", artifact) return False return context.add_dependency(package) def install_gnome_common(error, context): - return context.add_dependency('gnome-common') + return context.add_dependency("gnome-common") def install_gnome_common_dep(error, context): - if error.package == 'glib-gettext': - package = get_package_for_paths(['/usr/bin/glib-gettextize']) + if error.package == "glib-gettext": + package = get_package_for_paths(["/usr/bin/glib-gettextize"]) else: package = None if package is None: - logging.warning('No debian package for package %s', error.package) + logging.warning("No debian package for package %s", error.package) return False return context.add_dependency( - package=package, - minimum_version=error.minimum_version) + package=package, minimum_version=error.minimum_version + ) def install_xfce_dep(error, context): - if error.package == 'gtk-doc': - package = get_package_for_paths(['/usr/bin/gtkdocize']) + if error.package == "gtk-doc": + package = get_package_for_paths(["/usr/bin/gtkdocize"]) else: package = None if package is None: - logging.warning('No debian package for package %s', error.package) + logging.warning("No debian package for package %s", error.package) return False return context.add_dependency(package=package) def fix_missing_config_status_input(error, context): - autogen_path = 'autogen.sh' - rules_path = 'debian/rules' - if context.subpath not in ('.', ''): + autogen_path = "autogen.sh" + rules_path = "debian/rules" + if context.subpath not in (".", ""): autogen_path = os.path.join(context.subpath, autogen_path) rules_path = os.path.join(context.subpath, rules_path) if not context.tree.has_filename(autogen_path): return False def add_autogen(mf): - rule = any(mf.iter_rules(b'override_dh_autoreconf')) + rule = any(mf.iter_rules(b"override_dh_autoreconf")) if rule: return - rule = mf.add_rule(b'override_dh_autoreconf') - rule.append_command(b'dh_autoreconf ./autogen.sh') + rule = mf.add_rule(b"override_dh_autoreconf") + rule.append_command(b"dh_autoreconf ./autogen.sh") if not update_rules(makefile_cb=add_autogen, path=rules_path): return False if context.update_changelog: commit_debian_changes( - context.tree, context.subpath, - 'Run autogen.sh during build.', committer=context.committer, - update_changelog=context.update_changelog) + context.tree, + context.subpath, + "Run autogen.sh during build.", + committer=context.committer, + update_changelog=context.update_changelog, + ) return True def _find_aclocal_fun(macro): # TODO(jelmer): Use the API for codesearch.debian.net instead? - defun_prefix = b'AC_DEFUN([%s],' % macro.encode('ascii') - for entry in os.scandir('/usr/share/aclocal'): + defun_prefix = b"AC_DEFUN([%s]," % macro.encode("ascii") + for entry in os.scandir("/usr/share/aclocal"): if not entry.is_file(): continue - with open(entry.path, 'rb') as f: + with open(entry.path, "rb") as f: for line in f: if line.startswith(defun_prefix): return entry.path @@ -1005,46 +1051,50 @@ def run_pgbuildext_updatecontrol(error, context): # TODO(jelmer): run in the schroot pg_buildext_updatecontrol(context.tree.abspath(context.subpath)) return commit_debian_changes( - context.tree, context.subpath, "Run 'pgbuildext updatecontrol'.", - committer=context.committer, update_changelog=False) + context.tree, + context.subpath, + "Run 'pgbuildext updatecontrol'.", + committer=context.committer, + update_changelog=False, + ) def fix_missing_autoconf_macro(error, context): try: path = _find_aclocal_fun(error.macro) except KeyError: - logging.info('No local m4 file found defining %s', error.macro) + logging.info("No local m4 file found defining %s", error.macro) return False package = get_package_for_paths([path]) if package is None: - logging.warning('no package for macro file %s', path) + logging.warning("no package for macro file %s", path) return False return context.add_dependency(package) def fix_missing_c_sharp_compiler(error, context): - return context.add_dependency('mono-mcs') + return context.add_dependency("mono-mcs") def fix_missing_haskell_dependencies(error, context): path = "/var/lib/ghc/package.conf.d/%s-.*.conf" % error.deps[0][0] package = get_package_for_paths([path], regex=True) if package is None: - logging.warning('no package for macro file %s', path) + logging.warning("no package for macro file %s", path) return False return context.add_dependency(package) VERSIONED_PACKAGE_FIXERS: List[ - Tuple[Type[Problem], Callable[[Problem, DependencyContext], bool]]] = [ + Tuple[Type[Problem], Callable[[Problem, DependencyContext], bool]] +] = [ (NeedPgBuildExtUpdateControl, run_pgbuildext_updatecontrol), (MissingConfigure, fix_missing_configure), (MissingAutomakeInput, fix_missing_automake_input), ] -APT_FIXERS: List[ - Tuple[Type[Problem], Callable[[Problem, DependencyContext], bool]]] = [ +APT_FIXERS: List[Tuple[Type[Problem], Callable[[Problem, DependencyContext], bool]]] = [ (MissingPythonModule, fix_missing_python_module), (MissingPythonDistribution, fix_missing_python_distribution), (MissingCHeader, fix_missing_c_header), @@ -1085,16 +1135,14 @@ def resolve_error(error, context, fixers): if isinstance(error, error_cls): relevant_fixers.append(fixer) if not relevant_fixers: - logging.warning('No fixer found for %r', error) + logging.warning("No fixer found for %r", error) return False for fixer in relevant_fixers: - logging.info( - 'Attempting to use fixer %r to address %r', - fixer, error) + logging.info("Attempting to use fixer %r to address %r", fixer, error) try: made_changes = fixer(error, context) except GeneratedFile: - logging.warning('Control file is generated, unable to edit.') + logging.warning("Control file is generated, unable to edit.") return False if made_changes: return True @@ -1102,102 +1150,139 @@ def resolve_error(error, context, fixers): def build_incrementally( - local_tree, suffix, build_suite, output_directory, build_command, - build_changelog_entry='Build for debian-janitor apt repository.', - committer=None, max_iterations=DEFAULT_MAX_ITERATIONS, - subpath='', source_date_epoch=None, update_changelog=True): + local_tree, + suffix, + build_suite, + output_directory, + build_command, + build_changelog_entry="Build for debian-janitor apt repository.", + committer=None, + max_iterations=DEFAULT_MAX_ITERATIONS, + subpath="", + source_date_epoch=None, + update_changelog=True, +): fixed_errors = [] while True: try: return attempt_build( - local_tree, suffix, build_suite, output_directory, - build_command, build_changelog_entry, subpath=subpath, - source_date_epoch=source_date_epoch) + local_tree, + suffix, + build_suite, + output_directory, + build_command, + build_changelog_entry, + subpath=subpath, + source_date_epoch=source_date_epoch, + ) except SbuildFailure as e: if e.error is None: - logging.warning( - 'Build failed with unidentified error. Giving up.') + logging.warning("Build failed with unidentified error. Giving up.") raise if e.context is None: - logging.info('No relevant context, not making any changes.') + logging.info("No relevant context, not making any changes.") raise if (e.error, e.context) in fixed_errors: - logging.warning( - 'Error was still not fixed on second try. Giving up.') + logging.warning("Error was still not fixed on second try. Giving up.") raise - if max_iterations is not None \ - and len(fixed_errors) > max_iterations: - logging.warning( - 'Last fix did not address the issue. Giving up.') + if max_iterations is not None and len(fixed_errors) > max_iterations: + logging.warning("Last fix did not address the issue. Giving up.") raise reset_tree(local_tree, local_tree.basis_tree(), subpath=subpath) - if e.context[0] == 'build': + if e.context[0] == "build": context = BuildDependencyContext( - local_tree, subpath=subpath, committer=committer, - update_changelog=update_changelog) - elif e.context[0] == 'autopkgtest': + local_tree, + subpath=subpath, + committer=committer, + update_changelog=update_changelog, + ) + elif e.context[0] == "autopkgtest": context = AutopkgtestDependencyContext( e.context[1], - local_tree, subpath=subpath, committer=committer, - update_changelog=update_changelog) + local_tree, + subpath=subpath, + committer=committer, + update_changelog=update_changelog, + ) else: - logging.warning('unable to install for context %r', e.context) + logging.warning("unable to install for context %r", e.context) raise try: if not resolve_error( - e.error, context, - VERSIONED_PACKAGE_FIXERS + APT_FIXERS): - logging.warning( - 'Failed to resolve error %r. Giving up.', e.error) + e.error, context, VERSIONED_PACKAGE_FIXERS + APT_FIXERS + ): + logging.warning("Failed to resolve error %r. Giving up.", e.error) raise except CircularDependency: logging.warning( - 'Unable to fix %r; it would introduce a circular ' - 'dependency.', e.error) + "Unable to fix %r; it would introduce a circular " "dependency.", + e.error, + ) raise e fixed_errors.append((e.error, e.context)) - if os.path.exists(os.path.join(output_directory, 'build.log')): + if os.path.exists(os.path.join(output_directory, "build.log")): i = 1 while os.path.exists( - os.path.join(output_directory, 'build.log.%d' % i)): + os.path.join(output_directory, "build.log.%d" % i) + ): i += 1 - os.rename(os.path.join(output_directory, 'build.log'), - os.path.join(output_directory, 'build.log.%d' % i)) + os.rename( + os.path.join(output_directory, "build.log"), + os.path.join(output_directory, "build.log.%d" % i), + ) def main(argv=None): import argparse - parser = argparse.ArgumentParser('janitor.fix_build') - parser.add_argument('--suffix', type=str, - help="Suffix to use for test builds.", - default='fixbuild1') - parser.add_argument('--suite', type=str, - help="Suite to target.", - default='unstable') - parser.add_argument('--output-directory', type=str, - help="Output directory.", default=None) - parser.add_argument('--committer', type=str, - help='Committer string (name and email)', - default=None) + + parser = argparse.ArgumentParser("janitor.fix_build") parser.add_argument( - '--build-command', type=str, - help='Build command', - default=(DEFAULT_BUILDER + ' -A -s -v')) + "--suffix", type=str, help="Suffix to use for test builds.", default="fixbuild1" + ) parser.add_argument( - '--no-update-changelog', action="store_false", default=None, - dest="update_changelog", help="do not update the changelog") + "--suite", type=str, help="Suite to target.", default="unstable" + ) parser.add_argument( - '--update-changelog', action="store_true", dest="update_changelog", - help="force updating of the changelog", default=None) + "--output-directory", type=str, help="Output directory.", default=None + ) + parser.add_argument( + "--committer", type=str, help="Committer string (name and email)", default=None + ) + parser.add_argument( + "--build-command", + type=str, + help="Build command", + default=(DEFAULT_BUILDER + " -A -s -v"), + ) + parser.add_argument( + "--no-update-changelog", + action="store_false", + default=None, + dest="update_changelog", + help="do not update the changelog", + ) + parser.add_argument( + "--update-changelog", + action="store_true", + dest="update_changelog", + help="force updating of the changelog", + default=None, + ) args = parser.parse_args() from breezy.workingtree import WorkingTree - tree = WorkingTree.open('.') + + tree = WorkingTree.open(".") build_incrementally( - tree, args.suffix, args.suite, args.output_directory, - args.build_command, committer=args.committer, - update_changelog=args.update_changelog) + tree, + args.suffix, + args.suite, + args.output_directory, + args.build_command, + committer=args.committer, + update_changelog=args.update_changelog, + ) -if __name__ == '__main__': +if __name__ == "__main__": sys.exit(main(sys.argv)) diff --git a/ognibuild/dist.py b/ognibuild/dist.py index 1448f36..a473c22 100644 --- a/ognibuild/dist.py +++ b/ognibuild/dist.py @@ -31,12 +31,8 @@ from breezy.workingtree import WorkingTree from . import DetailedFailure from .buildsystem import detect_buildsystems, NoBuildToolsFound from buildlog_consultant.common import ( - find_build_failure_description, - Problem, - MissingPerlModule, - MissingCommand, NoSpaceOnDevice, - ) +) from .session.schroot import SchrootSession from .vcs import dupe_vcs_tree, export_vcs_tree @@ -51,7 +47,7 @@ SUPPORTED_DIST_EXTENSIONS = [ ".tbz2", ".tar", ".zip", - ] +] def is_dist_file(fn): @@ -78,7 +74,6 @@ def run_dist(session): class DistCatcher(object): - def __init__(self, directory): self.export_directory = directory self.files = [] @@ -94,7 +89,7 @@ class DistCatcher(object): diff = set([n for n in diff_files if is_dist_file(n)]) if len(diff) == 1: fn = diff.pop() - logging.info('Found tarball %s in package directory.', fn) + logging.info("Found tarball %s in package directory.", fn) self.files.append(os.path.join(self.export_directory, fn)) return fn if "dist" in diff_files: @@ -103,13 +98,13 @@ class DistCatcher(object): logging.info("Found tarball %s in dist directory.", entry.name) self.files.append(entry.path) return entry.name - logging.info('No tarballs found in dist directory.') + logging.info("No tarballs found in dist directory.") parent_directory = os.path.dirname(self.export_directory) diff = set(os.listdir(parent_directory)) - set([subdir]) if len(diff) == 1: fn = diff.pop() - logging.info('Found tarball %s in parent directory.', fn) + logging.info("Found tarball %s in parent directory.", fn) self.files.append(os.path.join(parent_directory, fn)) return fn @@ -119,25 +114,28 @@ class DistCatcher(object): def create_dist_schroot( - tree: Tree, target_dir: str, - chroot: str, packaging_tree: Optional[Tree] = None, - include_controldir: bool = True, - subdir: Optional[str] = None) -> str: + tree: Tree, + target_dir: str, + chroot: str, + packaging_tree: Optional[Tree] = None, + include_controldir: bool = True, + subdir: Optional[str] = None, +) -> str: if subdir is None: - subdir = 'package' + subdir = "package" with SchrootSession(chroot) as session: if packaging_tree is not None: from .debian import satisfy_build_deps + satisfy_build_deps(session, packaging_tree) - build_dir = os.path.join(session.location, 'build') + build_dir = os.path.join(session.location, "build") try: directory = tempfile.mkdtemp(dir=build_dir) except OSError as e: if e.errno == errno.ENOSPC: - raise DetailedFailure( - 1, ['mkdtemp'], NoSpaceOnDevice()) - reldir = '/' + os.path.relpath(directory, session.location) + raise DetailedFailure(1, ["mkdtemp"], NoSpaceOnDevice()) + reldir = "/" + os.path.relpath(directory, session.location) export_directory = os.path.join(directory, subdir) if not include_controldir: @@ -158,11 +156,11 @@ def create_dist_schroot( shutil.copy(path, target_dir) return os.path.join(target_dir, os.path.basename(path)) - logging.info('No tarball created :(') + logging.info("No tarball created :(") raise DistNoTarball() -if __name__ == '__main__': +if __name__ == "__main__": import argparse import breezy.bzr # noqa: F401 import breezy.git # noqa: F401 @@ -170,17 +168,24 @@ if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument( - '--chroot', default='unstable-amd64-sbuild', type=str, - help='Name of chroot to use') + "--chroot", + default="unstable-amd64-sbuild", + type=str, + help="Name of chroot to use", + ) parser.add_argument( - 'directory', default='.', type=str, nargs='?', - help='Directory with upstream source.') + "directory", + default=".", + type=str, + nargs="?", + help="Directory with upstream source.", + ) parser.add_argument( - '--packaging-directory', type=str, - help='Path to packaging directory.') + "--packaging-directory", type=str, help="Path to packaging directory." + ) parser.add_argument( - '--target-directory', type=str, default='..', - help='Target directory') + "--target-directory", type=str, default="..", help="Target directory" + ) args = parser.parse_args() logging.basicConfig(level=logging.INFO) @@ -189,8 +194,8 @@ if __name__ == '__main__': if args.packaging_directory: packaging_tree = WorkingTree.open(args.packaging_directory) with packaging_tree.lock_read(): - source = Deb822(packaging_tree.get_file('debian/control')) - package = source['Source'] + source = Deb822(packaging_tree.get_file("debian/control")) + package = source["Source"] subdir = package else: packaging_tree = None @@ -198,13 +203,15 @@ if __name__ == '__main__': try: ret = create_dist_schroot( - tree, subdir=subdir, + tree, + subdir=subdir, target_dir=os.path.abspath(args.target_directory), packaging_tree=packaging_tree, - chroot=args.chroot) + chroot=args.chroot, + ) except NoBuildToolsFound: - logging.info('No build tools found, falling back to simple export.') - export(tree, 'dist.tar.gz', 'tgz', None) + logging.info("No build tools found, falling back to simple export.") + export(tree, "dist.tar.gz", "tgz", None) else: - print('Created %s' % ret) + print("Created %s" % ret) sys.exit(0) diff --git a/ognibuild/fix_build.py b/ognibuild/fix_build.py index 899c34f..9b02ed6 100644 --- a/ognibuild/fix_build.py +++ b/ognibuild/fix_build.py @@ -23,7 +23,7 @@ from buildlog_consultant.common import ( Problem, MissingPerlModule, MissingCommand, - ) +) from . import DetailedFailure from .apt import UnidentifiedError, AptManager @@ -31,12 +31,11 @@ from .debian.fix_build import ( DependencyContext, resolve_error, APT_FIXERS, - ) +) from .session import Session, run_with_tee class SchrootDependencyContext(DependencyContext): - def __init__(self, session): self.session = session self.apt = AptManager(session) @@ -50,14 +49,14 @@ class SchrootDependencyContext(DependencyContext): def fix_perl_module_from_cpan(error, context): # TODO(jelmer): Specify -T to skip tests? context.session.check_call( - ['cpan', '-i', error.module], user='root', - env={'PERL_MM_USE_DEFAULT': '1'}) + ["cpan", "-i", error.module], user="root", env={"PERL_MM_USE_DEFAULT": "1"} + ) return True NPM_COMMAND_PACKAGES = { - 'del-cli': 'del-cli', - } + "del-cli": "del-cli", +} def fix_npm_missing_command(error, context): @@ -66,19 +65,20 @@ def fix_npm_missing_command(error, context): except KeyError: return False - context.session.check_call(['npm', '-g', 'install', package]) + context.session.check_call(["npm", "-g", "install", package]) return True GENERIC_INSTALL_FIXERS: List[ - Tuple[Type[Problem], Callable[[Problem, DependencyContext], bool]]] = [ + Tuple[Type[Problem], Callable[[Problem, DependencyContext], bool]] +] = [ (MissingPerlModule, fix_perl_module_from_cpan), (MissingCommand, fix_npm_missing_command), ] def run_with_build_fixer(session: Session, args: List[str]): - logging.info('Running %r', args) + logging.info("Running %r", args) fixed_errors = [] while True: retcode, lines = run_with_tee(session, args) @@ -86,23 +86,22 @@ def run_with_build_fixer(session: Session, args: List[str]): return offset, line, error = find_build_failure_description(lines) if error is None: - logging.warning('Build failed with unidentified error. Giving up.') + logging.warning("Build failed with unidentified error. Giving up.") if line is not None: - raise UnidentifiedError( - retcode, args, lines, secondary=(offset, line)) + raise UnidentifiedError(retcode, args, lines, secondary=(offset, line)) raise UnidentifiedError(retcode, args, lines) - logging.info('Identified error: %r', error) + logging.info("Identified error: %r", error) if error in fixed_errors: logging.warning( - 'Failed to resolve error %r, it persisted. Giving up.', - error) + "Failed to resolve error %r, it persisted. Giving up.", error + ) raise DetailedFailure(retcode, args, error) if not resolve_error( - error, SchrootDependencyContext(session), - fixers=(APT_FIXERS + GENERIC_INSTALL_FIXERS)): - logging.warning( - 'Failed to find resolution for error %r. Giving up.', - error) + error, + SchrootDependencyContext(session), + fixers=(APT_FIXERS + GENERIC_INSTALL_FIXERS), + ): + logging.warning("Failed to find resolution for error %r. Giving up.", error) raise DetailedFailure(retcode, args, error) fixed_errors.append(error) diff --git a/ognibuild/session/__init__.py b/ognibuild/session/__init__.py index f8feaf0..7cdd15f 100644 --- a/ognibuild/session/__init__.py +++ b/ognibuild/session/__init__.py @@ -22,8 +22,7 @@ import subprocess class Session(object): - - def __enter__(self) -> 'Session': + def __enter__(self) -> "Session": return self def __exit__(self, exc_type, exc_val, exc_tb): @@ -37,26 +36,31 @@ class Session(object): raise NotImplementedError def check_call( - self, - argv: List[str], cwd: Optional[str] = None, - user: Optional[str] = None, - env: Optional[Dict[str, str]] = None): + self, + argv: List[str], + cwd: Optional[str] = None, + user: Optional[str] = None, + env: Optional[Dict[str, str]] = None, + ): raise NotImplementedError(self.check_call) def check_output( - self, - argv: List[str], cwd: Optional[str] = None, - user: Optional[str] = None, - env: Optional[Dict[str, str]] = None) -> bytes: + self, + argv: List[str], + cwd: Optional[str] = None, + user: Optional[str] = None, + env: Optional[Dict[str, str]] = None, + ) -> bytes: raise NotImplementedError(self.check_output) - def Popen(self, argv, cwd: Optional[str] = None, - user: Optional[str] = None, **kwargs): + def Popen( + self, argv, cwd: Optional[str] = None, user: Optional[str] = None, **kwargs + ): raise NotImplementedError(self.Popen) def call( - self, argv: List[str], cwd: Optional[str] = None, - user: Optional[str] = None): + self, argv: List[str], cwd: Optional[str] = None, user: Optional[str] = None + ): raise NotImplementedError(self.call) def create_home(self) -> None: @@ -76,12 +80,11 @@ class SessionSetupFailure(Exception): def run_with_tee(session: Session, args: List[str], **kwargs): - p = session.Popen( - args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, **kwargs) + p = session.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, **kwargs) contents = [] while p.poll() is None: line = p.stdout.readline() sys.stdout.buffer.write(line) sys.stdout.buffer.flush() - contents.append(line.decode('utf-8', 'surrogateescape')) + contents.append(line.decode("utf-8", "surrogateescape")) return p.returncode, contents diff --git a/ognibuild/session/plain.py b/ognibuild/session/plain.py index 1f71b59..7a1eb6c 100644 --- a/ognibuild/session/plain.py +++ b/ognibuild/session/plain.py @@ -25,7 +25,7 @@ import subprocess class PlainSession(Session): """Session ignoring user.""" - location = '/' + location = "/" def create_home(self): pass @@ -34,8 +34,7 @@ class PlainSession(Session): return subprocess.check_call(args) def Popen(self, args, stdout=None, stderr=None, user=None, cwd=None): - return subprocess.Popen( - args, stdout=stdout, stderr=stderr, cwd=cwd) + return subprocess.Popen(args, stdout=stdout, stderr=stderr, cwd=cwd) def exists(self, path): return os.path.exists(path) diff --git a/ognibuild/session/schroot.py b/ognibuild/session/schroot.py index 4e75a91..ac979b4 100644 --- a/ognibuild/session/schroot.py +++ b/ognibuild/session/schroot.py @@ -34,24 +34,30 @@ class SchrootSession(Session): def __init__(self, chroot: str): if not isinstance(chroot, str): - raise TypeError('not a valid chroot: %r' % chroot) + raise TypeError("not a valid chroot: %r" % chroot) self.chroot = chroot self._location = None self._cwd = None def _get_location(self) -> str: - return subprocess.check_output( - ['schroot', '--location', '-c', 'session:' + self.session_id - ]).strip().decode() + return ( + subprocess.check_output( + ["schroot", "--location", "-c", "session:" + self.session_id] + ) + .strip() + .decode() + ) def _end_session(self) -> None: - subprocess.check_output( - ['schroot', '-c', 'session:' + self.session_id, '-e']) + subprocess.check_output(["schroot", "-c", "session:" + self.session_id, "-e"]) - def __enter__(self) -> 'Session': + def __enter__(self) -> "Session": try: - self.session_id = subprocess.check_output( - ['schroot', '-c', self.chroot, '-b']).strip().decode() + self.session_id = ( + subprocess.check_output(["schroot", "-c", self.chroot, "-b"]) + .strip() + .decode() + ) except subprocess.CalledProcessError: # TODO(jelmer): Capture stderr and forward in SessionSetupFailure raise SessionSetupFailure() @@ -70,69 +76,84 @@ class SchrootSession(Session): self._location = self._get_location() return self._location - def _run_argv(self, argv: List[str], cwd: Optional[str] = None, - user: Optional[str] = None, - env: Optional[Dict[str, str]] = None): - base_argv = ['schroot', '-r', '-c', 'session:' + self.session_id] + def _run_argv( + self, + argv: List[str], + cwd: Optional[str] = None, + user: Optional[str] = None, + env: Optional[Dict[str, str]] = None, + ): + base_argv = ["schroot", "-r", "-c", "session:" + self.session_id] if cwd is None: cwd = self._cwd if cwd is not None: - base_argv.extend(['-d', cwd]) + base_argv.extend(["-d", cwd]) if user is not None: - base_argv.extend(['-u', user]) + base_argv.extend(["-u", user]) if env: argv = [ - 'sh', '-c', - ' '.join( - ['%s=%s ' % (key, shlex.quote(value)) - for (key, value) in env.items()] + - [shlex.quote(arg) for arg in argv])] - return base_argv + ['--'] + argv + "sh", + "-c", + " ".join( + [ + "%s=%s " % (key, shlex.quote(value)) + for (key, value) in env.items() + ] + + [shlex.quote(arg) for arg in argv] + ), + ] + return base_argv + ["--"] + argv def check_call( - self, - argv: List[str], cwd: Optional[str] = None, - user: Optional[str] = None, - env: Optional[Dict[str, str]] = None): + self, + argv: List[str], + cwd: Optional[str] = None, + user: Optional[str] = None, + env: Optional[Dict[str, str]] = None, + ): try: subprocess.check_call(self._run_argv(argv, cwd, user, env=env)) except subprocess.CalledProcessError as e: raise subprocess.CalledProcessError(e.returncode, argv) def check_output( - self, - argv: List[str], cwd: Optional[str] = None, - user: Optional[str] = None, - env: Optional[Dict[str, str]] = None) -> bytes: + self, + argv: List[str], + cwd: Optional[str] = None, + user: Optional[str] = None, + env: Optional[Dict[str, str]] = None, + ) -> bytes: try: - return subprocess.check_output( - self._run_argv(argv, cwd, user, env=env)) + return subprocess.check_output(self._run_argv(argv, cwd, user, env=env)) except subprocess.CalledProcessError as e: raise subprocess.CalledProcessError(e.returncode, argv) - def Popen(self, argv, cwd: Optional[str] = None, - user: Optional[str] = None, **kwargs): + def Popen( + self, argv, cwd: Optional[str] = None, user: Optional[str] = None, **kwargs + ): return subprocess.Popen(self._run_argv(argv, cwd, user), **kwargs) def call( - self, argv: List[str], cwd: Optional[str] = None, - user: Optional[str] = None): + self, argv: List[str], cwd: Optional[str] = None, user: Optional[str] = None + ): return subprocess.call(self._run_argv(argv, cwd, user)) def create_home(self) -> None: """Create the user's home directory.""" - home = self.check_output( - ['sh', '-c', 'echo $HOME'], cwd='/').decode().rstrip('\n') - user = self.check_output( - ['sh', '-c', 'echo $LOGNAME'], cwd='/').decode().rstrip('\n') - logging.info('Creating directory %s', home) - self.check_call(['mkdir', '-p', home], cwd='/', user='root') - self.check_call(['chown', user, home], cwd='/', user='root') + home = ( + self.check_output(["sh", "-c", "echo $HOME"], cwd="/").decode().rstrip("\n") + ) + user = ( + self.check_output(["sh", "-c", "echo $LOGNAME"], cwd="/") + .decode() + .rstrip("\n") + ) + logging.info("Creating directory %s", home) + self.check_call(["mkdir", "-p", home], cwd="/", user="root") + self.check_call(["chown", user, home], cwd="/", user="root") def _fullpath(self, path: str) -> str: - return os.path.join( - self.location, - os.path.join(self._cwd, path).lstrip('/')) + return os.path.join(self.location, os.path.join(self._cwd, path).lstrip("/")) def exists(self, path: str) -> bool: fullpath = self._fullpath(path) diff --git a/ognibuild/tests/__init__.py b/ognibuild/tests/__init__.py index 0072367..42d25c4 100644 --- a/ognibuild/tests/__init__.py +++ b/ognibuild/tests/__init__.py @@ -22,9 +22,9 @@ import unittest def test_suite(): names = [ - 'debian_build', - 'debian_fix_build', + "debian_build", + "debian_fix_build", ] - module_names = ['ognibuild.tests.test_' + name for name in names] + module_names = ["ognibuild.tests.test_" + name for name in names] loader = unittest.TestLoader() return loader.loadTestsFromNames(module_names) diff --git a/ognibuild/tests/test_debian_build.py b/ognibuild/tests/test_debian_build.py index 1203c3d..8280946 100644 --- a/ognibuild/tests/test_debian_build.py +++ b/ognibuild/tests/test_debian_build.py @@ -22,22 +22,35 @@ from breezy.tests import TestCaseWithTransport, TestCase class AddDummyChangelogEntryTests(TestCaseWithTransport): - def test_simple(self): - tree = self.make_branch_and_tree('.') - self.build_tree_contents([('debian/', ), ('debian/changelog', """\ + tree = self.make_branch_and_tree(".") + self.build_tree_contents( + [ + ("debian/",), + ( + "debian/changelog", + """\ janitor (0.1-1) UNRELEASED; urgency=medium * Initial release. (Closes: #XXXXXX) -- Jelmer Vernooij Sat, 04 Apr 2020 14:12:13 +0000 -""")]) - tree.add(['debian', 'debian/changelog']) +""", + ), + ] + ) + tree.add(["debian", "debian/changelog"]) add_dummy_changelog_entry( - tree, '', 'jan+some', 'some-fixes', 'Dummy build.', + tree, + "", + "jan+some", + "some-fixes", + "Dummy build.", timestamp=datetime.datetime(2020, 9, 5, 12, 35, 4, 899654), - maintainer=("Jelmer Vernooij", "jelmer@debian.org")) - self.assertFileEqual("""\ + maintainer=("Jelmer Vernooij", "jelmer@debian.org"), + ) + self.assertFileEqual( + """\ janitor (0.1-1jan+some1) some-fixes; urgency=low * Dummy build. @@ -49,23 +62,39 @@ janitor (0.1-1) UNRELEASED; urgency=medium * Initial release. (Closes: #XXXXXX) -- Jelmer Vernooij Sat, 04 Apr 2020 14:12:13 +0000 -""", 'debian/changelog') +""", + "debian/changelog", + ) def test_native(self): - tree = self.make_branch_and_tree('.') - self.build_tree_contents([('debian/', ), ('debian/changelog', """\ + tree = self.make_branch_and_tree(".") + self.build_tree_contents( + [ + ("debian/",), + ( + "debian/changelog", + """\ janitor (0.1) UNRELEASED; urgency=medium * Initial release. (Closes: #XXXXXX) -- Jelmer Vernooij Sat, 04 Apr 2020 14:12:13 +0000 -""")]) - tree.add(['debian', 'debian/changelog']) +""", + ), + ] + ) + tree.add(["debian", "debian/changelog"]) add_dummy_changelog_entry( - tree, '', 'jan+some', 'some-fixes', 'Dummy build.', + tree, + "", + "jan+some", + "some-fixes", + "Dummy build.", timestamp=datetime.datetime(2020, 9, 5, 12, 35, 4, 899654), - maintainer=("Jelmer Vernooij", "jelmer@debian.org")) - self.assertFileEqual("""\ + maintainer=("Jelmer Vernooij", "jelmer@debian.org"), + ) + self.assertFileEqual( + """\ janitor (0.1jan+some1) some-fixes; urgency=low * Dummy build. @@ -77,23 +106,39 @@ janitor (0.1) UNRELEASED; urgency=medium * Initial release. (Closes: #XXXXXX) -- Jelmer Vernooij Sat, 04 Apr 2020 14:12:13 +0000 -""", 'debian/changelog') +""", + "debian/changelog", + ) def test_exists(self): - tree = self.make_branch_and_tree('.') - self.build_tree_contents([('debian/', ), ('debian/changelog', """\ + tree = self.make_branch_and_tree(".") + self.build_tree_contents( + [ + ("debian/",), + ( + "debian/changelog", + """\ janitor (0.1-1jan+some1) UNRELEASED; urgency=medium * Initial release. (Closes: #XXXXXX) -- Jelmer Vernooij Sat, 04 Apr 2020 14:12:13 +0000 -""")]) - tree.add(['debian', 'debian/changelog']) +""", + ), + ] + ) + tree.add(["debian", "debian/changelog"]) add_dummy_changelog_entry( - tree, '', 'jan+some', 'some-fixes', 'Dummy build.', + tree, + "", + "jan+some", + "some-fixes", + "Dummy build.", timestamp=datetime.datetime(2020, 9, 5, 12, 35, 4, 899654), - maintainer=("Jelmer Vernooij", "jelmer@debian.org")) - self.assertFileEqual("""\ + maintainer=("Jelmer Vernooij", "jelmer@debian.org"), + ) + self.assertFileEqual( + """\ janitor (0.1-1jan+some2) some-fixes; urgency=low * Dummy build. @@ -111,6 +156,5 @@ janitor (0.1-1jan+some1) UNRELEASED; urgency=medium class BuildArchitectureTests(TestCase): - def test_is_str(self): self.assertIsInstance(get_build_architecture(), str) diff --git a/ognibuild/tests/test_debian_fix_build.py b/ognibuild/tests/test_debian_fix_build.py index 68e1c63..0e8d716 100644 --- a/ognibuild/tests/test_debian_fix_build.py +++ b/ognibuild/tests/test_debian_fix_build.py @@ -28,23 +28,27 @@ from buildlog_consultant.common import ( MissingRubyFile, MissingRubyGem, MissingValaPackage, - ) +) from ..debian import fix_build from ..debian.fix_build import ( resolve_error, VERSIONED_PACKAGE_FIXERS, APT_FIXERS, BuildDependencyContext, - ) +) from breezy.tests import TestCaseWithTransport class ResolveErrorTests(TestCaseWithTransport): - def setUp(self): super(ResolveErrorTests, self).setUp() - self.tree = self.make_branch_and_tree('.') - self.build_tree_contents([('debian/', ), ('debian/control', """\ + self.tree = self.make_branch_and_tree(".") + self.build_tree_contents( + [ + ("debian/",), + ( + "debian/control", + """\ Source: blah Build-Depends: libc6 @@ -52,16 +56,23 @@ Package: python-blah Depends: ${python3:Depends} Description: A python package Foo -"""), ('debian/changelog', """\ +""", + ), + ( + "debian/changelog", + """\ blah (0.1) UNRELEASED; urgency=medium * Initial release. (Closes: #XXXXXX) -- Jelmer Vernooij Sat, 04 Apr 2020 14:12:13 +0000 -""")]) - self.tree.add(['debian', 'debian/control', 'debian/changelog']) - self.tree.commit('Initial commit') - self.overrideAttr(fix_build, 'search_apt_file', self._search_apt_file) +""", + ), + ] + ) + self.tree.add(["debian", "debian/control", "debian/changelog"]) + self.tree.commit("Initial commit") + self.overrideAttr(fix_build, "search_apt_file", self._search_apt_file) self._apt_files = {} def _search_apt_file(self, path, regex=False): @@ -73,129 +84,130 @@ blah (0.1) UNRELEASED; urgency=medium if path == p: yield pkg - def resolve(self, error, context=('build', )): + def resolve(self, error, context=("build",)): context = BuildDependencyContext( - self.tree, subpath='', committer='Janitor ', - update_changelog=True) - return resolve_error( - error, context, VERSIONED_PACKAGE_FIXERS + APT_FIXERS) + self.tree, + subpath="", + committer="Janitor ", + update_changelog=True, + ) + return resolve_error(error, context, VERSIONED_PACKAGE_FIXERS + APT_FIXERS) def get_build_deps(self): - with open(self.tree.abspath('debian/control'), 'r') as f: - return next(Deb822.iter_paragraphs(f)).get('Build-Depends', '') + with open(self.tree.abspath("debian/control"), "r") as f: + return next(Deb822.iter_paragraphs(f)).get("Build-Depends", "") def test_missing_command_unknown(self): self._apt_files = {} - self.assertFalse(self.resolve( - MissingCommand('acommandthatdoesnotexist'))) + self.assertFalse(self.resolve(MissingCommand("acommandthatdoesnotexist"))) def test_missing_command_brz(self): self._apt_files = { - '/usr/bin/b': 'bash', - '/usr/bin/brz': 'brz', - '/usr/bin/brzier': 'bash', - } - self.assertTrue(self.resolve(MissingCommand('brz'))) - self.assertEqual('libc6, brz', self.get_build_deps()) - rev = self.tree.branch.repository.get_revision( - self.tree.branch.last_revision()) - self.assertEqual( - 'Add missing build dependency on brz.\n', - rev.message) - self.assertFalse(self.resolve(MissingCommand('brz'))) - self.assertEqual('libc6, brz', self.get_build_deps()) + "/usr/bin/b": "bash", + "/usr/bin/brz": "brz", + "/usr/bin/brzier": "bash", + } + self.assertTrue(self.resolve(MissingCommand("brz"))) + self.assertEqual("libc6, brz", self.get_build_deps()) + rev = self.tree.branch.repository.get_revision(self.tree.branch.last_revision()) + self.assertEqual("Add missing build dependency on brz.\n", rev.message) + self.assertFalse(self.resolve(MissingCommand("brz"))) + self.assertEqual("libc6, brz", self.get_build_deps()) def test_missing_command_ps(self): self._apt_files = { - '/bin/ps': 'procps', - '/usr/bin/pscal': 'xcal', + "/bin/ps": "procps", + "/usr/bin/pscal": "xcal", } - self.assertTrue(self.resolve(MissingCommand('ps'))) - self.assertEqual('libc6, procps', self.get_build_deps()) + self.assertTrue(self.resolve(MissingCommand("ps"))) + self.assertEqual("libc6, procps", self.get_build_deps()) def test_missing_ruby_file(self): self._apt_files = { - '/usr/lib/ruby/vendor_ruby/rake/testtask.rb': 'rake', - } - self.assertTrue(self.resolve(MissingRubyFile('rake/testtask'))) - self.assertEqual('libc6, rake', self.get_build_deps()) + "/usr/lib/ruby/vendor_ruby/rake/testtask.rb": "rake", + } + self.assertTrue(self.resolve(MissingRubyFile("rake/testtask"))) + self.assertEqual("libc6, rake", self.get_build_deps()) def test_missing_ruby_file_from_gem(self): self._apt_files = { - '/usr/share/rubygems-integration/all/gems/activesupport-' - '5.2.3/lib/active_support/core_ext/string/strip.rb': - 'ruby-activesupport'} - self.assertTrue(self.resolve( - MissingRubyFile('active_support/core_ext/string/strip'))) - self.assertEqual('libc6, ruby-activesupport', self.get_build_deps()) + "/usr/share/rubygems-integration/all/gems/activesupport-" + "5.2.3/lib/active_support/core_ext/string/strip.rb": "ruby-activesupport" + } + self.assertTrue( + self.resolve(MissingRubyFile("active_support/core_ext/string/strip")) + ) + self.assertEqual("libc6, ruby-activesupport", self.get_build_deps()) def test_missing_ruby_gem(self): self._apt_files = { - '/usr/share/rubygems-integration/all/specifications/' - 'bio-1.5.2.gemspec': 'ruby-bio', - '/usr/share/rubygems-integration/all/specifications/' - 'bio-2.0.2.gemspec': 'ruby-bio', - } - self.assertTrue(self.resolve(MissingRubyGem('bio', None))) - self.assertEqual('libc6, ruby-bio', self.get_build_deps()) - self.assertTrue(self.resolve(MissingRubyGem('bio', '2.0.3'))) - self.assertEqual('libc6, ruby-bio (>= 2.0.3)', self.get_build_deps()) + "/usr/share/rubygems-integration/all/specifications/" + "bio-1.5.2.gemspec": "ruby-bio", + "/usr/share/rubygems-integration/all/specifications/" + "bio-2.0.2.gemspec": "ruby-bio", + } + self.assertTrue(self.resolve(MissingRubyGem("bio", None))) + self.assertEqual("libc6, ruby-bio", self.get_build_deps()) + self.assertTrue(self.resolve(MissingRubyGem("bio", "2.0.3"))) + self.assertEqual("libc6, ruby-bio (>= 2.0.3)", self.get_build_deps()) def test_missing_perl_module(self): - self._apt_files = { - '/usr/share/perl5/App/cpanminus/fatscript.pm': 'cpanminus'} - self.assertTrue(self.resolve(MissingPerlModule( - 'App/cpanminus/fatscript.pm', 'App::cpanminus::fatscript', [ - '/<>/blib/lib', - '/<>/blib/arch', - '/etc/perl', - '/usr/local/lib/x86_64-linux-gnu/perl/5.30.0', - '/usr/local/share/perl/5.30.0', - '/usr/lib/x86_64-linux-gnu/perl5/5.30', - '/usr/share/perl5', - '/usr/lib/x86_64-linux-gnu/perl/5.30', - '/usr/share/perl/5.30', - '/usr/local/lib/site_perl', - '/usr/lib/x86_64-linux-gnu/perl-base', - '.']))) - self.assertEqual('libc6, cpanminus', self.get_build_deps()) + self._apt_files = {"/usr/share/perl5/App/cpanminus/fatscript.pm": "cpanminus"} + self.assertTrue( + self.resolve( + MissingPerlModule( + "App/cpanminus/fatscript.pm", + "App::cpanminus::fatscript", + [ + "/<>/blib/lib", + "/<>/blib/arch", + "/etc/perl", + "/usr/local/lib/x86_64-linux-gnu/perl/5.30.0", + "/usr/local/share/perl/5.30.0", + "/usr/lib/x86_64-linux-gnu/perl5/5.30", + "/usr/share/perl5", + "/usr/lib/x86_64-linux-gnu/perl/5.30", + "/usr/share/perl/5.30", + "/usr/local/lib/site_perl", + "/usr/lib/x86_64-linux-gnu/perl-base", + ".", + ], + ) + ) + ) + self.assertEqual("libc6, cpanminus", self.get_build_deps()) def test_missing_pkg_config(self): self._apt_files = { - '/usr/lib/x86_64-linux-gnu/pkgconfig/xcb-xfixes.pc': - 'libxcb-xfixes0-dev'} - self.assertTrue(self.resolve(MissingPkgConfig('xcb-xfixes'))) - self.assertEqual('libc6, libxcb-xfixes0-dev', self.get_build_deps()) + "/usr/lib/x86_64-linux-gnu/pkgconfig/xcb-xfixes.pc": "libxcb-xfixes0-dev" + } + self.assertTrue(self.resolve(MissingPkgConfig("xcb-xfixes"))) + self.assertEqual("libc6, libxcb-xfixes0-dev", self.get_build_deps()) def test_missing_pkg_config_versioned(self): self._apt_files = { - '/usr/lib/x86_64-linux-gnu/pkgconfig/xcb-xfixes.pc': - 'libxcb-xfixes0-dev'} - self.assertTrue(self.resolve(MissingPkgConfig('xcb-xfixes', '1.0'))) - self.assertEqual( - 'libc6, libxcb-xfixes0-dev (>= 1.0)', self.get_build_deps()) + "/usr/lib/x86_64-linux-gnu/pkgconfig/xcb-xfixes.pc": "libxcb-xfixes0-dev" + } + self.assertTrue(self.resolve(MissingPkgConfig("xcb-xfixes", "1.0"))) + self.assertEqual("libc6, libxcb-xfixes0-dev (>= 1.0)", self.get_build_deps()) def test_missing_python_module(self): - self._apt_files = { - '/usr/lib/python3/dist-packages/m2r.py': 'python3-m2r' - } - self.assertTrue(self.resolve(MissingPythonModule('m2r'))) - self.assertEqual('libc6, python3-m2r', self.get_build_deps()) + self._apt_files = {"/usr/lib/python3/dist-packages/m2r.py": "python3-m2r"} + self.assertTrue(self.resolve(MissingPythonModule("m2r"))) + self.assertEqual("libc6, python3-m2r", self.get_build_deps()) def test_missing_go_package(self): self._apt_files = { - '/usr/share/gocode/src/github.com/chzyer/readline/utils_test.go': - 'golang-github-chzyer-readline-dev', - } - self.assertTrue(self.resolve( - MissingGoPackage('github.com/chzyer/readline'))) + "/usr/share/gocode/src/github.com/chzyer/readline/utils_test.go": "golang-github-chzyer-readline-dev", + } + self.assertTrue(self.resolve(MissingGoPackage("github.com/chzyer/readline"))) self.assertEqual( - 'libc6, golang-github-chzyer-readline-dev', - self.get_build_deps()) + "libc6, golang-github-chzyer-readline-dev", self.get_build_deps() + ) def test_missing_vala_package(self): self._apt_files = { - '/usr/share/vala-0.48/vapi/posix.vapi': 'valac-0.48-vapi', - } - self.assertTrue(self.resolve(MissingValaPackage('posix'))) - self.assertEqual('libc6, valac-0.48-vapi', self.get_build_deps()) + "/usr/share/vala-0.48/vapi/posix.vapi": "valac-0.48-vapi", + } + self.assertTrue(self.resolve(MissingValaPackage("posix"))) + self.assertEqual("libc6, valac-0.48-vapi", self.get_build_deps()) diff --git a/ognibuild/vcs.py b/ognibuild/vcs.py index 23994e5..b9bbf77 100644 --- a/ognibuild/vcs.py +++ b/ognibuild/vcs.py @@ -23,18 +23,17 @@ from breezy.workingtree import WorkingTree from buildlog_consultant.sbuild import ( NoSpaceOnDevice, - ) +) from . import DetailedFailure def export_vcs_tree(tree, directory): try: - export(tree, directory, 'dir', None) + export(tree, directory, "dir", None) except OSError as e: if e.errno == errno.ENOSPC: - raise DetailedFailure( - 1, ['export'], NoSpaceOnDevice()) + raise DetailedFailure(1, ["export"], NoSpaceOnDevice()) raise @@ -44,12 +43,11 @@ def dupe_vcs_tree(tree, directory): tree = tree.basis_tree() try: result = tree._repository.controldir.sprout( - directory, create_tree_if_local=True, - revision_id=tree.get_revision_id()) + directory, create_tree_if_local=True, revision_id=tree.get_revision_id() + ) except OSError as e: if e.errno == errno.ENOSPC: - raise DetailedFailure( - 1, ['sprout'], NoSpaceOnDevice()) + raise DetailedFailure(1, ["sprout"], NoSpaceOnDevice()) raise if not result.has_workingtree(): raise AssertionError From b26ba81bf6f0fda0997d6739b8ea038a51b3f07c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Wed, 10 Feb 2021 02:24:32 +0000 Subject: [PATCH 34/83] Fix mypy install. --- .github/workflows/pythonpackage.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.github/workflows/pythonpackage.yml b/.github/workflows/pythonpackage.yml index 8705708..2837b1d 100644 --- a/.github/workflows/pythonpackage.yml +++ b/.github/workflows/pythonpackage.yml @@ -20,12 +20,11 @@ jobs: python-version: ${{ matrix.python-version }} - name: Install dependencies run: | - python -m pip install --upgrade pip flake8 cython mypy + python -m pip install --upgrade pip flake8 cython python setup.py develop - name: Style checks run: | python -m flake8 - python -m mypy - name: Typing checks run: | pip install -U mypy From 7bfa909c0090248ea70b4185288275eb58ddaff7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Wed, 10 Feb 2021 02:34:57 +0000 Subject: [PATCH 35/83] Raise error when no cwd is set. --- ognibuild/session/schroot.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/ognibuild/session/schroot.py b/ognibuild/session/schroot.py index ac979b4..1b1b645 100644 --- a/ognibuild/session/schroot.py +++ b/ognibuild/session/schroot.py @@ -153,6 +153,8 @@ class SchrootSession(Session): self.check_call(["chown", user, home], cwd="/", user="root") def _fullpath(self, path: str) -> str: + if self._cwd is None: + raise ValueError('no cwd set') return os.path.join(self.location, os.path.join(self._cwd, path).lstrip("/")) def exists(self, path: str) -> bool: From efa2bdb894bb00ac593c890456d603a092eb813d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Wed, 10 Feb 2021 02:39:31 +0000 Subject: [PATCH 36/83] Avoid apt_pkg. --- ognibuild/apt.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ognibuild/apt.py b/ognibuild/apt.py index 3f849c2..3509c73 100644 --- a/ognibuild/apt.py +++ b/ognibuild/apt.py @@ -19,7 +19,6 @@ from typing import List -import apt_pkg import os from buildlog_consultant.apt import ( find_apt_get_failure, @@ -64,6 +63,7 @@ class AptManager(object): root = getattr(self.session, "location", "/") status_path = os.path.join(root, "var/lib/dpkg/status") missing = set(packages) + import apt_pkg with apt_pkg.TagFile(status_path) as tagf: while missing: tagf.step() From 0133fb90ec04192bbfa258995de99f0fd53fe3ee Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Sat, 13 Feb 2021 14:53:42 +0000 Subject: [PATCH 37/83] Avoid imorting from silver-platter. --- ognibuild/debian/build.py | 7 +++---- ognibuild/debian/fix_build.py | 12 ++++++------ 2 files changed, 9 insertions(+), 10 deletions(-) diff --git a/ognibuild/debian/build.py b/ognibuild/debian/build.py index fab4568..2345981 100644 --- a/ognibuild/debian/build.py +++ b/ognibuild/debian/build.py @@ -35,16 +35,15 @@ from debmutate.changelog import get_maintainer, format_datetime from breezy import osutils from breezy.mutabletree import MutableTree -from silver_platter.debian import ( - BuildFailedError, - DEFAULT_BUILDER, -) +from breezy.plugins.debian.builder import BuildFailedError from buildlog_consultant.sbuild import ( worker_failure_from_sbuild_log, SbuildFailure, ) +DEFAULT_BUILDER = "sbuild --no-clean-source" + class MissingChangesFile(Exception): """Expected changes file was not written.""" diff --git a/ognibuild/debian/fix_build.py b/ognibuild/debian/fix_build.py index cf88224..4233126 100644 --- a/ognibuild/debian/fix_build.py +++ b/ognibuild/debian/fix_build.py @@ -61,12 +61,13 @@ from debmutate._rules import ( dh_invoke_add_with, update_rules, ) -from silver_platter.debian import ( - debcommit, - DEFAULT_BUILDER, -) -from .build import attempt_build, get_build_architecture +from .build import ( + attempt_build, + get_build_architecture, + DEFAULT_BUILDER, + ) +from breezy.plugins.debian.changelog import debcommit from buildlog_consultant import Problem from buildlog_consultant.common import ( MissingConfigStatusInput, @@ -110,7 +111,6 @@ from buildlog_consultant.sbuild import ( SbuildFailure, ) - DEFAULT_MAX_ITERATIONS = 10 From 43cb8ebb1be5f214a55fecb77a0f1bb99fbc0cc9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Tue, 16 Feb 2021 01:17:03 +0000 Subject: [PATCH 38/83] Install python-debian from git, to fix build on Windows. --- .github/workflows/pythonpackage.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/pythonpackage.yml b/.github/workflows/pythonpackage.yml index 2837b1d..fd830ce 100644 --- a/.github/workflows/pythonpackage.yml +++ b/.github/workflows/pythonpackage.yml @@ -20,7 +20,8 @@ jobs: python-version: ${{ matrix.python-version }} - name: Install dependencies run: | - python -m pip install --upgrade pip flake8 cython + python -m pip install --upgrade pip flake8 cython \ + git+https://salsa.debian.org/python-debian-team/python-debian python setup.py develop - name: Style checks run: | From ecd4906be1c39d81870b2873eade8ea8fae5547c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Tue, 16 Feb 2021 17:40:58 +0000 Subject: [PATCH 39/83] Dro pypi. --- releaser.conf | 1 - 1 file changed, 1 deletion(-) diff --git a/releaser.conf b/releaser.conf index 8b1386f..deea576 100644 --- a/releaser.conf +++ b/releaser.conf @@ -1,5 +1,4 @@ name: "ognibuild" -pypi: "ognibuild" timeout_days: 5 tag_name: "v$VERSION" verify_command: "python3 setup.py test" From 503537327a3d5b5dfd8cf56fcf6c22e7a02402e2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Tue, 16 Feb 2021 18:54:04 +0000 Subject: [PATCH 40/83] Add missing deps. --- .github/workflows/pythonpackage.yml | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/.github/workflows/pythonpackage.yml b/.github/workflows/pythonpackage.yml index fd830ce..b2ea1cb 100644 --- a/.github/workflows/pythonpackage.yml +++ b/.github/workflows/pythonpackage.yml @@ -21,8 +21,12 @@ jobs: - name: Install dependencies run: | python -m pip install --upgrade pip flake8 cython \ - git+https://salsa.debian.org/python-debian-team/python-debian + git+https://salsa.debian.org/python-debian-team/python-debian \ + git+https://salsa.debian.org/jelmer/lintian-brush \ + brz python setup.py develop + mkdir -p ~/.config/breezy/plugins + brz branch lp:brz-debian ~/.config/breezy/plugins/debian - name: Style checks run: | python -m flake8 From bb16d21668bc03f2c2fbcfed44b599ab97d73546 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Tue, 16 Feb 2021 19:02:11 +0000 Subject: [PATCH 41/83] breezy, not brz --- .github/workflows/pythonpackage.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pythonpackage.yml b/.github/workflows/pythonpackage.yml index b2ea1cb..3a283a3 100644 --- a/.github/workflows/pythonpackage.yml +++ b/.github/workflows/pythonpackage.yml @@ -23,7 +23,7 @@ jobs: python -m pip install --upgrade pip flake8 cython \ git+https://salsa.debian.org/python-debian-team/python-debian \ git+https://salsa.debian.org/jelmer/lintian-brush \ - brz + breezy python setup.py develop mkdir -p ~/.config/breezy/plugins brz branch lp:brz-debian ~/.config/breezy/plugins/debian From 3181bcf75679482ff0c7bcb181988a373fbaffe5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Tue, 16 Feb 2021 20:03:31 +0000 Subject: [PATCH 42/83] Install distro-info from git. --- .github/workflows/pythonpackage.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/pythonpackage.yml b/.github/workflows/pythonpackage.yml index 3a283a3..81584aa 100644 --- a/.github/workflows/pythonpackage.yml +++ b/.github/workflows/pythonpackage.yml @@ -22,6 +22,7 @@ jobs: run: | python -m pip install --upgrade pip flake8 cython \ git+https://salsa.debian.org/python-debian-team/python-debian \ + "https://salsa.debian.org/debian/distro-info#subdirectory=python" \ git+https://salsa.debian.org/jelmer/lintian-brush \ breezy python setup.py develop From ccef935c67163e303ded1b0b8c3e1bcf883c8ec0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Tue, 16 Feb 2021 20:05:17 +0000 Subject: [PATCH 43/83] Set egg=. --- .github/workflows/pythonpackage.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pythonpackage.yml b/.github/workflows/pythonpackage.yml index 81584aa..d7ba1e1 100644 --- a/.github/workflows/pythonpackage.yml +++ b/.github/workflows/pythonpackage.yml @@ -22,7 +22,7 @@ jobs: run: | python -m pip install --upgrade pip flake8 cython \ git+https://salsa.debian.org/python-debian-team/python-debian \ - "https://salsa.debian.org/debian/distro-info#subdirectory=python" \ + "git+https://salsa.debian.org/debian/distro-info#subdirectory=python&egg=distro-info" \ git+https://salsa.debian.org/jelmer/lintian-brush \ breezy python setup.py develop From 3751f56628b00587af62a140437ec4b46c5e82af Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Tue, 16 Feb 2021 20:12:40 +0000 Subject: [PATCH 44/83] Add dependency on testtools. --- .github/workflows/pythonpackage.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/pythonpackage.yml b/.github/workflows/pythonpackage.yml index d7ba1e1..2a87030 100644 --- a/.github/workflows/pythonpackage.yml +++ b/.github/workflows/pythonpackage.yml @@ -24,7 +24,8 @@ jobs: git+https://salsa.debian.org/python-debian-team/python-debian \ "git+https://salsa.debian.org/debian/distro-info#subdirectory=python&egg=distro-info" \ git+https://salsa.debian.org/jelmer/lintian-brush \ - breezy + breezy \ + testtools python setup.py develop mkdir -p ~/.config/breezy/plugins brz branch lp:brz-debian ~/.config/breezy/plugins/debian From 9b211ee38c333e8cecf0d209d1c12b3c508ab77a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Tue, 16 Feb 2021 23:15:09 +0000 Subject: [PATCH 45/83] Fix tests when DEBFULLNAME is not me. --- ognibuild/tests/test_debian_fix_build.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/ognibuild/tests/test_debian_fix_build.py b/ognibuild/tests/test_debian_fix_build.py index 0e8d716..0000b13 100644 --- a/ognibuild/tests/test_debian_fix_build.py +++ b/ognibuild/tests/test_debian_fix_build.py @@ -107,6 +107,8 @@ blah (0.1) UNRELEASED; urgency=medium "/usr/bin/brz": "brz", "/usr/bin/brzier": "bash", } + self.overrideEnv('DEBEMAIL', 'jelmer@debian.org') + self.overrideEnv('DEBFULLNAME', 'Jelmer Vernooij') self.assertTrue(self.resolve(MissingCommand("brz"))) self.assertEqual("libc6, brz", self.get_build_deps()) rev = self.tree.branch.repository.get_revision(self.tree.branch.last_revision()) From 0a2cc19f35368f3eb8ab8822cd2e137d67ed2721 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Tue, 16 Feb 2021 23:30:17 +0000 Subject: [PATCH 46/83] Skip some tests on non-debian systems. --- ognibuild/tests/test_debian_fix_build.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/ognibuild/tests/test_debian_fix_build.py b/ognibuild/tests/test_debian_fix_build.py index 0000b13..bcb1421 100644 --- a/ognibuild/tests/test_debian_fix_build.py +++ b/ognibuild/tests/test_debian_fix_build.py @@ -42,6 +42,8 @@ from breezy.tests import TestCaseWithTransport class ResolveErrorTests(TestCaseWithTransport): def setUp(self): super(ResolveErrorTests, self).setUp() + if os.path.exists('/usr/bin/dpkg-architecture'): + self.skipTest('not a debian system') self.tree = self.make_branch_and_tree(".") self.build_tree_contents( [ From d34c76972c28b9c780016e7fa4704a124f895bf5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Tue, 16 Feb 2021 23:31:58 +0000 Subject: [PATCH 47/83] Drop pypy support for now. --- .github/workflows/pythonpackage.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pythonpackage.yml b/.github/workflows/pythonpackage.yml index 2a87030..2c21781 100644 --- a/.github/workflows/pythonpackage.yml +++ b/.github/workflows/pythonpackage.yml @@ -9,7 +9,7 @@ jobs: strategy: matrix: os: [ubuntu-latest, macos-latest, windows-latest] - python-version: [3.7, 3.8, pypy3] + python-version: [3.7, 3.8] fail-fast: false steps: From 1aeadfdf5d0cfed23f253f653024ea49942650d7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Tue, 16 Feb 2021 23:32:38 +0000 Subject: [PATCH 48/83] Drop windows support. --- .github/workflows/pythonpackage.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pythonpackage.yml b/.github/workflows/pythonpackage.yml index 2c21781..864f28f 100644 --- a/.github/workflows/pythonpackage.yml +++ b/.github/workflows/pythonpackage.yml @@ -8,7 +8,7 @@ jobs: runs-on: ${{ matrix.os }} strategy: matrix: - os: [ubuntu-latest, macos-latest, windows-latest] + os: [ubuntu-latest, macos-latest] python-version: [3.7, 3.8] fail-fast: false From 17b79eb3264eaf3b3d9ad343aa1a0e76ba678cda Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Tue, 16 Feb 2021 23:37:44 +0000 Subject: [PATCH 49/83] Missing import. --- ognibuild/tests/test_debian_fix_build.py | 1 + 1 file changed, 1 insertion(+) diff --git a/ognibuild/tests/test_debian_fix_build.py b/ognibuild/tests/test_debian_fix_build.py index bcb1421..7cb3eac 100644 --- a/ognibuild/tests/test_debian_fix_build.py +++ b/ognibuild/tests/test_debian_fix_build.py @@ -15,6 +15,7 @@ # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +import os import re from debian.deb822 import Deb822 From 49e6237ef3620f2a8ebc12cb2da4ffe065409b9a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Tue, 16 Feb 2021 23:46:44 +0000 Subject: [PATCH 50/83] Oops. --- ognibuild/tests/test_debian_fix_build.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ognibuild/tests/test_debian_fix_build.py b/ognibuild/tests/test_debian_fix_build.py index 7cb3eac..ad5c035 100644 --- a/ognibuild/tests/test_debian_fix_build.py +++ b/ognibuild/tests/test_debian_fix_build.py @@ -43,7 +43,7 @@ from breezy.tests import TestCaseWithTransport class ResolveErrorTests(TestCaseWithTransport): def setUp(self): super(ResolveErrorTests, self).setUp() - if os.path.exists('/usr/bin/dpkg-architecture'): + if not os.path.exists('/usr/bin/dpkg-architecture'): self.skipTest('not a debian system') self.tree = self.make_branch_and_tree(".") self.build_tree_contents( From 8669257e13188395e9700ab66ebd6d59512c1dbd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Wed, 17 Feb 2021 00:06:48 +0000 Subject: [PATCH 51/83] Skip tests on non-debian systems. --- ognibuild/tests/test_debian_build.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/ognibuild/tests/test_debian_build.py b/ognibuild/tests/test_debian_build.py index 8280946..8cacfc9 100644 --- a/ognibuild/tests/test_debian_build.py +++ b/ognibuild/tests/test_debian_build.py @@ -156,5 +156,11 @@ janitor (0.1-1jan+some1) UNRELEASED; urgency=medium class BuildArchitectureTests(TestCase): + + def setUp(self): + super(BuildArchitectureTests, self).setUp() + if not os.path.exists('/usr/bin/dpkg-architecture'): + self.skipTest('not a debian system') + def test_is_str(self): self.assertIsInstance(get_build_architecture(), str) From a411b4dc385fc009e1e18e912ce6f20b3f5fd99d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Wed, 17 Feb 2021 02:35:55 +0000 Subject: [PATCH 52/83] add missing import --- ognibuild/tests/test_debian_build.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/ognibuild/tests/test_debian_build.py b/ognibuild/tests/test_debian_build.py index 8cacfc9..274f5f8 100644 --- a/ognibuild/tests/test_debian_build.py +++ b/ognibuild/tests/test_debian_build.py @@ -16,6 +16,8 @@ # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA import datetime +import os + from ..debian.build import add_dummy_changelog_entry, get_build_architecture from breezy.tests import TestCaseWithTransport, TestCase From 95f5bc2a4c21f5e943adecca955c8ccc279dc4a4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Mon, 22 Feb 2021 17:17:12 +0000 Subject: [PATCH 53/83] Import some fixes from resolver branch. --- .github/workflows/pythonpackage.yml | 8 ++---- AUTHORS | 1 + ROADMAP | 44 +++++++++++++++++++++++++++++ notes/architecture.md | 30 ++++++++++++++++++++ ognibuild/apt.py | 13 +++++---- ognibuild/debian/build.py | 10 +++---- ognibuild/dist.py | 7 +++-- setup.cfg | 1 - 8 files changed, 93 insertions(+), 21 deletions(-) create mode 100644 AUTHORS create mode 100644 ROADMAP create mode 100644 notes/architecture.md diff --git a/.github/workflows/pythonpackage.yml b/.github/workflows/pythonpackage.yml index 864f28f..123fa45 100644 --- a/.github/workflows/pythonpackage.yml +++ b/.github/workflows/pythonpackage.yml @@ -20,12 +20,8 @@ jobs: python-version: ${{ matrix.python-version }} - name: Install dependencies run: | - python -m pip install --upgrade pip flake8 cython \ - git+https://salsa.debian.org/python-debian-team/python-debian \ - "git+https://salsa.debian.org/debian/distro-info#subdirectory=python&egg=distro-info" \ - git+https://salsa.debian.org/jelmer/lintian-brush \ - breezy \ - testtools + python -m pip install --upgrade pip flake8 cython + python -m pip install git+https://github.com/jelmer/buildlog-consultant python setup.py develop mkdir -p ~/.config/breezy/plugins brz branch lp:brz-debian ~/.config/breezy/plugins/debian diff --git a/AUTHORS b/AUTHORS new file mode 100644 index 0000000..b1ac686 --- /dev/null +++ b/AUTHORS @@ -0,0 +1 @@ +Jelmer Vernooij diff --git a/ROADMAP b/ROADMAP new file mode 100644 index 0000000..f48556d --- /dev/null +++ b/ROADMAP @@ -0,0 +1,44 @@ +class UpstreamRequirement(object): + + family: str + + +class PythonPackageRequirement(UpstreamRequirement): + + package: str + + +SetupPy.get_build_requirements() yields some PythonPackageRequirement objects + +apt_resolver.install([PythonPackageRequirement(...)]) then: + + * needs to translate to apt package name + + +Once we find errors during build, buildlog consultant extracts them ("MissingPythonPackage", "configure.ac needs updating"). + +fix_build then takes the problem found and converts it to an action: + + * modifying some of the source files + * resolving requirements + +Resolving requirements dependencies means creating e.g. a PythonPackageRequirement() object and feeding it to resolver.install() + +we have specific handlers for each kind of thingy + +resolver.install() needs to translate the upstream information to an apt name or a cpan name or update dependencies or raise an exception or.. + +MissingPythonPackage() -> PythonPackageRequirement() + +PythonPackageRequirement() can either: + + * directly provide apt names, if they are known + * look up apt names + +We specifically want to support multiple resolvers. In some cases a resolver can't deal with a particular kind of requirement. + +Who is responsible for taking a PythonPackageRequirement and translating it to an apt package name? + + 1) PythonPackageRequirement itself? That would mean knowledge about package naming etc, is with the requirement object, which seems wrong. + 2) PythonPackageRequirement.apt_name(apt_archive) - i.e. find the package name given an archive object of some sort + 3) The apt resolver has a list of callbacks to map requirements to apt package names diff --git a/notes/architecture.md b/notes/architecture.md new file mode 100644 index 0000000..960892c --- /dev/null +++ b/notes/architecture.md @@ -0,0 +1,30 @@ +Upstream requirements are expressed as objects derived from UpstreamRequirement. + +They can either be: + + * extracted from the build system + * extracted from errors in build logs + +The details of UpstreamRequirements are specific to the kind of requirement, +and otherwise opaque to ognibuild. + +When building a package, we first make sure that all declared upstream +requirements are met. + +Then we attempt to build. + +If any problems are found in the log, buildlog-consultant will report them. + +ognibuild can then invoke "fixers" to address Problems. + +Problems can be converted to UpstreamRequirements by UpstreamRequirementFixer + +Other Fixer can do things like e.g. upgrade configure.ac to a newer version. + +UpstreamRequirementFixer uses a UpstreamRequirementResolver object that +can translate UpstreamRequirement objects into apt package names or +e.g. cpan commands. + +ognibuild keeps finding problems, resolving them and rebuilding until it finds +a problem it can not resolve or that it thinks it has already resolved +(i.e. seen before). diff --git a/ognibuild/apt.py b/ognibuild/apt.py index 3509c73..8783a49 100644 --- a/ognibuild/apt.py +++ b/ognibuild/apt.py @@ -16,19 +16,22 @@ # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA - -from typing import List +import logging +import re +from typing import List, Iterator, Optional, Set import os from buildlog_consultant.apt import ( find_apt_get_failure, ) +from debian.deb822 import Release from . import DetailedFailure from .session import Session, run_with_tee class UnidentifiedError(Exception): + def __init__(self, retcode, argv, lines, secondary=None): self.retcode = retcode self.argv = argv @@ -42,11 +45,11 @@ def run_apt(session: Session, args: List[str]) -> None: retcode, lines = run_with_tee(session, args, cwd="/", user="root") if retcode == 0: return - offset, line, error = find_apt_get_failure(lines) + match, error = find_apt_get_failure(lines) if error is not None: raise DetailedFailure(retcode, args, error) - if line is not None: - raise UnidentifiedError(retcode, args, lines, secondary=(offset, line)) + if match is not None: + raise UnidentifiedError(retcode, args, lines, secondary=(match.lineno, match.line)) while lines and lines[-1] == "": lines.pop(-1) raise UnidentifiedError(retcode, args, lines) diff --git a/ognibuild/debian/build.py b/ognibuild/debian/build.py index 2345981..122f477 100644 --- a/ognibuild/debian/build.py +++ b/ognibuild/debian/build.py @@ -61,13 +61,11 @@ def changes_filename(package, version, arch): def get_build_architecture(): try: - return ( - subprocess.check_output(["dpkg-architecture", "-qDEB_BUILD_ARCH"]) - .strip() - .decode() - ) + return subprocess.check_output( + ['dpkg-architecture', '-qDEB_BUILD_ARCH']).strip().decode() except subprocess.CalledProcessError as e: - raise Exception("Could not find the build architecture: %s" % e) + raise Exception( + "Could not find the build architecture: %s" % e) def add_dummy_changelog_entry( diff --git a/ognibuild/dist.py b/ognibuild/dist.py index a473c22..e52dc25 100644 --- a/ognibuild/dist.py +++ b/ognibuild/dist.py @@ -28,12 +28,13 @@ from debian.deb822 import Deb822 from breezy.tree import Tree from breezy.workingtree import WorkingTree -from . import DetailedFailure -from .buildsystem import detect_buildsystems, NoBuildToolsFound from buildlog_consultant.common import ( NoSpaceOnDevice, ) + +from . import DetailedFailure +from .buildsystem import detect_buildsystems, NoBuildToolsFound from .session.schroot import SchrootSession from .vcs import dupe_vcs_tree, export_vcs_tree @@ -47,7 +48,7 @@ SUPPORTED_DIST_EXTENSIONS = [ ".tbz2", ".tar", ".zip", -] + ] def is_dist_file(fn): diff --git a/setup.cfg b/setup.cfg index fc17caa..99a5d9d 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,5 +1,4 @@ [flake8] -application-package-names = ognibuild banned-modules = silver-platter = Should not use silver-platter [mypy] From 7d996d007b5aa7dda14bc74c4ff25a7411df1ba6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Mon, 22 Feb 2021 17:19:29 +0000 Subject: [PATCH 54/83] Use logging. --- ognibuild/__init__.py | 8 -------- ognibuild/__main__.py | 4 ++-- 2 files changed, 2 insertions(+), 10 deletions(-) diff --git a/ognibuild/__init__.py b/ognibuild/__init__.py index f9e1a36..9a0c5a8 100644 --- a/ognibuild/__init__.py +++ b/ognibuild/__init__.py @@ -42,11 +42,3 @@ def shebang_binary(p): if args[0] in (b"/usr/bin/env", b"env"): return os.path.basename(args[1].decode()).strip() return os.path.basename(args[0].decode()).strip() - - -def note(m): - sys.stdout.write("%s\n" % m) - - -def warning(m): - sys.stderr.write("WARNING: %s\n" % m) diff --git a/ognibuild/__main__.py b/ognibuild/__main__.py index c2b3939..1c82b1e 100644 --- a/ognibuild/__main__.py +++ b/ognibuild/__main__.py @@ -15,9 +15,9 @@ # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +import logging import os import sys -from . import note from .buildsystem import NoBuildToolsFound from .build import run_build from .clean import run_clean @@ -65,7 +65,7 @@ def main(): if args.subcommand == "test": run_test(session) except NoBuildToolsFound: - note("No build tools found.") + logging.info("No build tools found.") return 1 return 0 From 2aab09121d0d67d97f34f1a62bdf583a82ebe37b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Tue, 23 Feb 2021 03:22:41 +0000 Subject: [PATCH 55/83] Move code to .apt. --- ognibuild/apt.py | 86 --------- ognibuild/debian/__init__.py | 2 +- ognibuild/debian/apt.py | 225 +++++++++++++++++++++++ ognibuild/debian/fix_build.py | 135 +------------- ognibuild/tests/test_debian_fix_build.py | 4 +- 5 files changed, 229 insertions(+), 223 deletions(-) delete mode 100644 ognibuild/apt.py create mode 100644 ognibuild/debian/apt.py diff --git a/ognibuild/apt.py b/ognibuild/apt.py deleted file mode 100644 index 8783a49..0000000 --- a/ognibuild/apt.py +++ /dev/null @@ -1,86 +0,0 @@ -#!/usr/bin/python -# Copyright (C) 2019-2020 Jelmer Vernooij -# encoding: utf-8 -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation; either version 2 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA - -import logging -import re -from typing import List, Iterator, Optional, Set - -import os -from buildlog_consultant.apt import ( - find_apt_get_failure, -) -from debian.deb822 import Release - -from . import DetailedFailure -from .session import Session, run_with_tee - - -class UnidentifiedError(Exception): - - def __init__(self, retcode, argv, lines, secondary=None): - self.retcode = retcode - self.argv = argv - self.lines = lines - self.secondary = secondary - - -def run_apt(session: Session, args: List[str]) -> None: - """Run apt.""" - args = ["apt", "-y"] + args - retcode, lines = run_with_tee(session, args, cwd="/", user="root") - if retcode == 0: - return - match, error = find_apt_get_failure(lines) - if error is not None: - raise DetailedFailure(retcode, args, error) - if match is not None: - raise UnidentifiedError(retcode, args, lines, secondary=(match.lineno, match.line)) - while lines and lines[-1] == "": - lines.pop(-1) - raise UnidentifiedError(retcode, args, lines) - - -class AptManager(object): - - session: Session - - def __init__(self, session): - self.session = session - - def missing(self, packages): - root = getattr(self.session, "location", "/") - status_path = os.path.join(root, "var/lib/dpkg/status") - missing = set(packages) - import apt_pkg - with apt_pkg.TagFile(status_path) as tagf: - while missing: - tagf.step() - if not tagf.section: - break - if tagf.section["Package"] in missing: - if tagf.section["Status"] == "install ok installed": - missing.remove(tagf.section["Package"]) - return list(missing) - - def install(self, packages: List[str]) -> None: - packages = self.missing(packages) - if packages: - run_apt(self.session, ["install"] + packages) - - def satisfy(self, deps: List[str]) -> None: - run_apt(self.session, ["satisfy"] + deps) diff --git a/ognibuild/debian/__init__.py b/ognibuild/debian/__init__.py index 449cea0..4578b6d 100644 --- a/ognibuild/debian/__init__.py +++ b/ognibuild/debian/__init__.py @@ -17,7 +17,6 @@ from debian.deb822 import Deb822 -from ..apt import AptManager from ..session import Session @@ -36,5 +35,6 @@ def satisfy_build_deps(session: Session, tree): except KeyError: pass deps = [dep.strip().strip(",") for dep in deps] + from .apt import AptManager apt = AptManager(session) apt.satisfy(deps) diff --git a/ognibuild/debian/apt.py b/ognibuild/debian/apt.py new file mode 100644 index 0000000..4f16ef7 --- /dev/null +++ b/ognibuild/debian/apt.py @@ -0,0 +1,225 @@ +#!/usr/bin/python +# Copyright (C) 2019-2020 Jelmer Vernooij +# encoding: utf-8 +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + +import logging +import re +from typing import List, Iterator, Optional, Set + +import os +from buildlog_consultant.apt import ( + find_apt_get_failure, +) +from debian.deb822 import Release + +from .. import DetailedFailure +from ..session import Session, run_with_tee + +from .build import get_build_architecture + + +class UnidentifiedError(Exception): + + def __init__(self, retcode, argv, lines, secondary=None): + self.retcode = retcode + self.argv = argv + self.lines = lines + self.secondary = secondary + + +def run_apt(session: Session, args: List[str]) -> None: + """Run apt.""" + args = ["apt", "-y"] + args + retcode, lines = run_with_tee(session, args, cwd="/", user="root") + if retcode == 0: + return + match, error = find_apt_get_failure(lines) + if error is not None: + raise DetailedFailure(retcode, args, error) + if match is not None: + raise UnidentifiedError(retcode, args, lines, secondary=(match.lineno, match.line)) + while lines and lines[-1] == "": + lines.pop(-1) + raise UnidentifiedError(retcode, args, lines) + + +class AptManager(object): + + session: Session + + def __init__(self, session): + self.session = session + + def missing(self, packages): + root = getattr(self.session, "location", "/") + status_path = os.path.join(root, "var/lib/dpkg/status") + missing = set(packages) + import apt_pkg + with apt_pkg.TagFile(status_path) as tagf: + while missing: + tagf.step() + if not tagf.section: + break + if tagf.section["Package"] in missing: + if tagf.section["Status"] == "install ok installed": + missing.remove(tagf.section["Package"]) + return list(missing) + + def install(self, packages: List[str]) -> None: + packages = self.missing(packages) + if packages: + run_apt(self.session, ["install"] + packages) + + def satisfy(self, deps: List[str]) -> None: + run_apt(self.session, ["satisfy"] + deps) + + +class FileSearcher(object): + def search_files(self, path, regex=False): + raise NotImplementedError(self.search_files) + + +class ContentsFileNotFound(Exception): + """The contents file was not found.""" + + +class AptContentsFileSearcher(FileSearcher): + + _user_agent = 'ognibuild/0.1' + + def __init__(self): + self._db = {} + + @classmethod + def from_env(cls): + sources = os.environ["REPOSITORIES"].split(":") + return cls.from_repositories(sources) + + def __setitem__(self, path, package): + self._db[path] = package + + def search_files(self, path, regex=False): + for p, pkg in sorted(self._db.items()): + if regex: + if re.match(path, p): + yield pkg + else: + if path == p: + yield pkg + + def load_file(self, f): + for line in f: + (path, rest) = line.rsplit(maxsplit=1) + package = rest.split(b"/")[-1] + decoded_path = "/" + path.decode("utf-8", "surrogateescape") + self[decoded_path] = package.decode("utf-8") + + @classmethod + def from_urls(cls, urls): + self = cls() + for url in urls: + self.load_url(url) + return self + + @classmethod + def from_repositories(cls, sources): + # TODO(jelmer): Verify signatures, etc. + urls = [] + arches = [get_build_architecture(), "all"] + for source in sources: + parts = source.split(" ") + if parts[0] != "deb": + logging.warning("Invalid line in sources: %r", source) + continue + base_url = parts[1] + name = parts[2] + components = parts[3:] + response = cls._get("%s/%s/Release" % (base_url, name)) + r = Release(response) + desired_files = set() + for component in components: + for arch in arches: + desired_files.add("%s/Contents-%s" % (component, arch)) + for entry in r["MD5Sum"]: + if entry["name"] in desired_files: + urls.append("%s/%s/%s" % (base_url, name, entry["name"])) + return cls.from_urls(urls) + + @classmethod + def _get(cls, url): + from urllib.request import urlopen, Request + + request = Request(url, headers={"User-Agent": cls._user_agent}) + return urlopen(request) + + def load_url(self, url): + from urllib.error import HTTPError + + try: + response = self._get(url) + except HTTPError as e: + if e.status == 404: + raise ContentsFileNotFound(url) + raise + if url.endswith(".gz"): + import gzip + + f = gzip.GzipFile(fileobj=response) + elif response.headers.get_content_type() == "text/plain": + f = response + else: + raise Exception( + "Unknown content type %r" % response.headers.get_content_type() + ) + self.load_file(f) + + +class GeneratedFileSearcher(FileSearcher): + def __init__(self, db): + self._db = db + + def search_files(self, path, regex=False): + for p, pkg in sorted(self._db.items()): + if regex: + if re.match(path, p): + yield pkg + else: + if path == p: + yield pkg + + +# TODO(jelmer): read from a file +GENERATED_FILE_SEARCHER = GeneratedFileSearcher( + { + "/etc/locale.gen": "locales", + # Alternative + "/usr/bin/rst2html": "/usr/share/docutils/scripts/python3/rst2html", + } +) + + +_apt_file_searcher = None + + +def search_apt_file(path: str, regex: bool = False) -> Iterator[FileSearcher]: + global _apt_file_searcher + if _apt_file_searcher is None: + # TODO(jelmer): cache file + _apt_file_searcher = AptContentsFileSearcher.from_env() + if _apt_file_searcher: + yield from _apt_file_searcher.search_files(path, regex=regex) + yield from GENERATED_FILE_SEARCHER.search_files(path, regex=regex) diff --git a/ognibuild/debian/fix_build.py b/ognibuild/debian/fix_build.py index 4233126..a5ef5ae 100644 --- a/ognibuild/debian/fix_build.py +++ b/ognibuild/debian/fix_build.py @@ -286,141 +286,8 @@ def commit_debian_changes( return True -class FileSearcher(object): - def search_files(self, path, regex=False): - raise NotImplementedError(self.search_files) - - -class ContentsFileNotFound(Exception): - """The contents file was not found.""" - - -class AptContentsFileSearcher(FileSearcher): - def __init__(self): - self._db = {} - - @classmethod - def from_env(cls): - sources = os.environ["REPOSITORIES"].split(":") - return cls.from_repositories(sources) - - def __setitem__(self, path, package): - self._db[path] = package - - def search_files(self, path, regex=False): - for p, pkg in sorted(self._db.items()): - if regex: - if re.match(path, p): - yield pkg - else: - if path == p: - yield pkg - - def load_file(self, f): - for line in f: - (path, rest) = line.rsplit(maxsplit=1) - package = rest.split(b"/")[-1] - decoded_path = "/" + path.decode("utf-8", "surrogateescape") - self[decoded_path] = package.decode("utf-8") - - @classmethod - def from_urls(cls, urls): - self = cls() - for url in urls: - self.load_url(url) - return self - - @classmethod - def from_repositories(cls, sources): - # TODO(jelmer): Verify signatures, etc. - urls = [] - arches = [get_build_architecture(), "all"] - for source in sources: - parts = source.split(" ") - if parts[0] != "deb": - logging.warning("Invalid line in sources: %r", source) - continue - base_url = parts[1] - name = parts[2] - components = parts[3:] - response = cls._get("%s/%s/Release" % (base_url, name)) - r = Release(response) - desired_files = set() - for component in components: - for arch in arches: - desired_files.add("%s/Contents-%s" % (component, arch)) - for entry in r["MD5Sum"]: - if entry["name"] in desired_files: - urls.append("%s/%s/%s" % (base_url, name, entry["name"])) - return cls.from_urls(urls) - - @staticmethod - def _get(url): - from urllib.request import urlopen, Request - - request = Request(url, headers={"User-Agent": "Debian Janitor"}) - return urlopen(request) - - def load_url(self, url): - from urllib.error import HTTPError - - try: - response = self._get(url) - except HTTPError as e: - if e.status == 404: - raise ContentsFileNotFound(url) - raise - if url.endswith(".gz"): - import gzip - - f = gzip.GzipFile(fileobj=response) - elif response.headers.get_content_type() == "text/plain": - f = response - else: - raise Exception( - "Unknown content type %r" % response.headers.get_content_type() - ) - self.load_file(f) - - -class GeneratedFileSearcher(FileSearcher): - def __init__(self, db): - self._db = db - - def search_files(self, path, regex=False): - for p, pkg in sorted(self._db.items()): - if regex: - if re.match(path, p): - yield pkg - else: - if path == p: - yield pkg - - -# TODO(jelmer): read from a file -GENERATED_FILE_SEARCHER = GeneratedFileSearcher( - { - "/etc/locale.gen": "locales", - # Alternative - "/usr/bin/rst2html": "/usr/share/docutils/scripts/python3/rst2html", - } -) - - -_apt_file_searcher = None - - -def search_apt_file(path: str, regex: bool = False) -> Iterator[FileSearcher]: - global _apt_file_searcher - if _apt_file_searcher is None: - # TODO(jelmer): cache file - _apt_file_searcher = AptContentsFileSearcher.from_env() - if _apt_file_searcher: - yield from _apt_file_searcher.search_files(path, regex=regex) - yield from GENERATED_FILE_SEARCHER.search_files(path, regex=regex) - - def get_package_for_paths(paths, regex=False): + from .apt import search_apt_file candidates = set() for path in paths: candidates.update(search_apt_file(path, regex=regex)) diff --git a/ognibuild/tests/test_debian_fix_build.py b/ognibuild/tests/test_debian_fix_build.py index ad5c035..d95bbe3 100644 --- a/ognibuild/tests/test_debian_fix_build.py +++ b/ognibuild/tests/test_debian_fix_build.py @@ -30,7 +30,7 @@ from buildlog_consultant.common import ( MissingRubyGem, MissingValaPackage, ) -from ..debian import fix_build +from ..debian import apt from ..debian.fix_build import ( resolve_error, VERSIONED_PACKAGE_FIXERS, @@ -75,7 +75,7 @@ blah (0.1) UNRELEASED; urgency=medium ) self.tree.add(["debian", "debian/control", "debian/changelog"]) self.tree.commit("Initial commit") - self.overrideAttr(fix_build, "search_apt_file", self._search_apt_file) + self.overrideAttr(apt, "search_apt_file", self._search_apt_file) self._apt_files = {} def _search_apt_file(self, path, regex=False): From d89426738fa8e0ad85aa536d5be39a89d1aa4a61 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Mon, 8 Feb 2021 17:47:39 +0000 Subject: [PATCH 56/83] More work on resolvers. --- notes/structure.md | 30 +++++ ognibuild/__main__.py | 36 +++--- ognibuild/build.py | 4 +- ognibuild/buildsystem.py | 243 ++++++++++++++++++++------------------- ognibuild/clean.py | 4 +- ognibuild/dist.py | 4 +- ognibuild/install.py | 4 +- ognibuild/resolver.py | 81 +++++++++++++ ognibuild/test.py | 4 +- 9 files changed, 267 insertions(+), 143 deletions(-) create mode 100644 notes/structure.md create mode 100644 ognibuild/resolver.py diff --git a/notes/structure.md b/notes/structure.md new file mode 100644 index 0000000..960892c --- /dev/null +++ b/notes/structure.md @@ -0,0 +1,30 @@ +Upstream requirements are expressed as objects derived from UpstreamRequirement. + +They can either be: + + * extracted from the build system + * extracted from errors in build logs + +The details of UpstreamRequirements are specific to the kind of requirement, +and otherwise opaque to ognibuild. + +When building a package, we first make sure that all declared upstream +requirements are met. + +Then we attempt to build. + +If any problems are found in the log, buildlog-consultant will report them. + +ognibuild can then invoke "fixers" to address Problems. + +Problems can be converted to UpstreamRequirements by UpstreamRequirementFixer + +Other Fixer can do things like e.g. upgrade configure.ac to a newer version. + +UpstreamRequirementFixer uses a UpstreamRequirementResolver object that +can translate UpstreamRequirement objects into apt package names or +e.g. cpan commands. + +ognibuild keeps finding problems, resolving them and rebuilding until it finds +a problem it can not resolve or that it thinks it has already resolved +(i.e. seen before). diff --git a/ognibuild/__main__.py b/ognibuild/__main__.py index 1c82b1e..b0a330b 100644 --- a/ognibuild/__main__.py +++ b/ognibuild/__main__.py @@ -38,10 +38,9 @@ def main(): ) parser.add_argument("--schroot", type=str, help="schroot to run in.") parser.add_argument( - "--resolve", - choices=["explain", "apt", "native"], - help="What to do about missing dependencies", - ) + '--resolve', choices=['explain', 'apt', 'native'], + default='apt', + help='What to do about missing dependencies') args = parser.parse_args() if args.schroot: from .session.schroot import SchrootSession @@ -52,18 +51,27 @@ def main(): session = PlainSession() with session: + if args.resolve == 'apt': + from .resolver import AptResolver + resolver = AptResolver.from_session(session) + elif args.resolve == 'explain': + from .resolver import ExplainResolver + resolver = ExplainResolver.from_session(session) + elif args.resolve == 'native': + from .resolver import NativeResolver + resolver = NativeResolver.from_session(session) os.chdir(args.directory) try: - if args.subcommand == "dist": - run_dist(session) - if args.subcommand == "build": - run_build(session) - if args.subcommand == "clean": - run_clean(session) - if args.subcommand == "install": - run_install(session) - if args.subcommand == "test": - run_test(session) + if args.subcommand == 'dist': + run_dist(session=session, resolver=resolver) + if args.subcommand == 'build': + run_build(session, resolver=resolver) + if args.subcommand == 'clean': + run_clean(session, resolver=resolver) + if args.subcommand == 'install': + run_install(session, resolver=resolver) + if args.subcommand == 'test': + run_test(session, resolver=resolver) except NoBuildToolsFound: logging.info("No build tools found.") return 1 diff --git a/ognibuild/build.py b/ognibuild/build.py index b582b11..ea3fe03 100644 --- a/ognibuild/build.py +++ b/ognibuild/build.py @@ -18,13 +18,13 @@ from .buildsystem import detect_buildsystems, NoBuildToolsFound -def run_build(session): +def run_build(session, resolver): # Some things want to write to the user's home directory, # e.g. pip caches in ~/.cache session.create_home() for buildsystem in detect_buildsystems(session): - buildsystem.build() + buildsystem.build(resolver) return raise NoBuildToolsFound() diff --git a/ognibuild/buildsystem.py b/ognibuild/buildsystem.py index 998803e..77fc2dd 100644 --- a/ognibuild/buildsystem.py +++ b/ognibuild/buildsystem.py @@ -20,8 +20,8 @@ import logging import re -from . import shebang_binary -from .apt import AptManager, UnidentifiedError +from . import shebang_binary, UpstreamPackage +from .apt import UnidentifiedError from .fix_build import run_with_build_fixer @@ -35,105 +35,106 @@ class BuildSystem(object): def __init__(self, session): self.session = session - def dist(self): + def dist(self, resolver): raise NotImplementedError(self.dist) - def test(self): + def test(self, resolver): raise NotImplementedError(self.test) - def build(self): + def build(self, resolver): raise NotImplementedError(self.build) - def clean(self): + def clean(self, resolver): raise NotImplementedError(self.clean) - def install(self): + def install(self, resolver): raise NotImplementedError(self.install) class Pear(BuildSystem): - def setup(self): - apt = AptManager(self.session) - apt.install(["php-pear"]) - def dist(self): - self.setup() - run_with_build_fixer(self.session, ["pear", "package"]) + def setup(self, resolver): + resolver.install([UpstreamPackage('binary', 'pear')]) - def test(self): + def dist(self, resolver): + self.setup(resolver) + run_with_build_fixer(self.session, ['pear', 'package']) + + def test(self, resolver): self.setup() run_with_build_fixer(self.session, ["pear", "run-tests"]) - def build(self): - self.setup() - run_with_build_fixer(self.session, ["pear", "build"]) + def build(self, resolver): + self.setup(resolver) + run_with_build_fixer(self.session, ['pear', 'build']) - def clean(self): - self.setup() + def clean(self, resolver): + self.setup(resolver) # TODO - def install(self): - self.setup() - run_with_build_fixer(self.session, ["pear", "install"]) + def install(self, resolver): + self.setup(resolver) + run_with_build_fixer(self.session, ['pear', 'install']) class SetupPy(BuildSystem): - def setup(self): - apt = AptManager(self.session) - apt.install(["python3", "python3-pip"]) - with open("setup.py", "r") as f: + + def setup(self, resolver): + resolver.install([ + UpstreamPackage('python3', 'pip'), + UpstreamPackage('binary', 'python3'), + ]) + with open('setup.py', 'r') as f: setup_py_contents = f.read() try: with open("setup.cfg", "r") as f: setup_cfg_contents = f.read() except FileNotFoundError: - setup_cfg_contents = "" - if "setuptools" in setup_py_contents: - logging.info("Reference to setuptools found, installing.") - apt.install(["python3-setuptools"]) - if ( - "setuptools_scm" in setup_py_contents - or "setuptools_scm" in setup_cfg_contents - ): - logging.info("Reference to setuptools-scm found, installing.") - apt.install(["python3-setuptools-scm", "git", "mercurial"]) + setup_cfg_contents = '' + if 'setuptools' in setup_py_contents: + logging.info('Reference to setuptools found, installing.') + resolver.install([UpstreamPackage('python3', 'setuptools')]) + if ('setuptools_scm' in setup_py_contents or + 'setuptools_scm' in setup_cfg_contents): + logging.info('Reference to setuptools-scm found, installing.') + resolver.install([ + UpstreamPackage('python3', 'setuptools-scm'), + UpstreamPackage('binary', 'git'), + UpstreamPackage('binary', 'mercurial'), + ]) # TODO(jelmer): Install setup_requires - def test(self): - self.setup() - self._run_setup(["test"]) + def test(self, resolver): + self.setup(resolver) + self._run_setup(resolver, ['test']) - def dist(self): - self.setup() - self._run_setup(["sdist"]) + def dist(self, resolver): + self.setup(resolver) + self._run_setup(resolver, ['sdist']) - def clean(self): - self.setup() - self._run_setup(["clean"]) + def clean(self, resolver): + self.setup(resolver) + self._run_setup(resolver, ['clean']) - def install(self): - self.setup() - self._run_setup(["install"]) + def install(self, resolver): + self.setup(resolver) + self._run_setup(resolver, ['install']) - def _run_setup(self, args): - apt = AptManager(self.session) - interpreter = shebang_binary("setup.py") + def _run_setup(self, resolver, args): + interpreter = shebang_binary('setup.py') if interpreter is not None: - if interpreter == "python3": - apt.install(["python3"]) - elif interpreter == "python2": - apt.install(["python2"]) - elif interpreter == "python": - apt.install(["python"]) + if interpreter in ('python3', 'python2', 'python'): + resolver.install([UpstreamPackage('binary', interpreter)]) else: - raise ValueError("Unknown interpreter %r" % interpreter) - apt.install(["python2", "python3"]) - run_with_build_fixer(self.session, ["./setup.py"] + args) + raise ValueError('Unknown interpreter %r' % interpreter) + run_with_build_fixer( + self.session, ['./setup.py'] + args) else: # Just assume it's Python 3 - apt.install(["python3"]) - run_with_build_fixer(self.session, ["python3", "./setup.py"] + args) + resolver.install([UpstreamPackage('binary', 'python3')]) + run_with_build_fixer( + self.session, ['python3', './setup.py'] + args) class PyProject(BuildSystem): @@ -143,75 +144,79 @@ class PyProject(BuildSystem): with open("pyproject.toml", "r") as pf: return toml.load(pf) - def dist(self): - apt = AptManager(self.session) + def dist(self, resolver): pyproject = self.load_toml() if "poetry" in pyproject.get("tool", []): logging.info( - "Found pyproject.toml with poetry section, " "assuming poetry project." - ) - apt.install(["python3-venv", "python3-pip"]) - self.session.check_call(["pip3", "install", "poetry"], user="root") - self.session.check_call(["poetry", "build", "-f", "sdist"]) + 'Found pyproject.toml with poetry section, ' + 'assuming poetry project.') + resolver.install([ + UpstreamPackage('python3', 'venv'), + UpstreamPackage('python3', 'pip'), + ]) + self.session.check_call(['pip3', 'install', 'poetry'], user='root') + self.session.check_call(['poetry', 'build', '-f', 'sdist']) return raise AssertionError("no supported section in pyproject.toml") class SetupCfg(BuildSystem): - def setup(self): - apt = AptManager(self.session) - apt.install(["python3-pep517", "python3-pip"]) - def dist(self): - self.session.check_call(["python3", "-m", "pep517.build", "-s", "."]) + def setup(self, resolver): + resolver.install([ + UpstreamPackage('python3', 'pep517'), + UpstreamPackage('python3', 'pip'), + ]) + + def dist(self, resolver): + self.setup(resolver) + self.session.check_call(['python3', '-m', 'pep517.build', '-s', '.']) class NpmPackage(BuildSystem): - def setup(self): - apt = AptManager(self.session) - apt.install(["npm"]) - def dist(self): - self.setup() - run_with_build_fixer(self.session, ["npm", "pack"]) + def setup(self, resolver): + resolver.install([UpstreamPackage('binary', 'npm')]) + + def dist(self, resolver): + self.setup(resolver) + run_with_build_fixer(self.session, ['npm', 'pack']) class Waf(BuildSystem): - def setup(self): - apt = AptManager(self.session) - apt.install(["python3"]) - def dist(self): - self.setup() - run_with_build_fixer(self.session, ["./waf", "dist"]) + def setup(self, resolver): + resolver.install([UpstreamPackage('binary', 'python3')]) + + def dist(self, resolver): + self.setup(resolver) + run_with_build_fixer(self.session, ['./waf', 'dist']) class Gem(BuildSystem): - def setup(self): - apt = AptManager(self.session) - apt.install(["gem2deb"]) - def dist(self): - self.setup() - gemfiles = [ - entry.name - for entry in self.session.scandir(".") - if entry.name.endswith(".gem") - ] + def setup(self, resolver): + resolver.install([UpstreamPackage('binary', 'gem2deb')]) + + def dist(self, resolver): + self.setup(resolver) + gemfiles = [entry.name for entry in self.session.scandir('.') + if entry.name.endswith('.gem')] if len(gemfiles) > 1: logging.warning("More than one gemfile. Trying the first?") run_with_build_fixer(self.session, ["gem2tgz", gemfiles[0]]) class DistInkt(BuildSystem): - def setup(self): - apt = AptManager(self.session) - apt.install(["libdist-inkt-perl"]) - def dist(self): - self.setup() - apt = AptManager(self.session) - with open("dist.ini", "rb") as f: + def setup(self, resolver): + resolver.install([ + UpstreamPackage('perl', 'Dist::Inkt'), + ]) + + def dist(self, resolver): + self.setup(resolver) + with open('dist.ini', 'rb') as f: for line in f: if not line.startswith(b";;"): continue @@ -230,22 +235,23 @@ class DistInkt(BuildSystem): run_with_build_fixer(self.session, ["distinkt-dist"]) return # Default to invoking Dist::Zilla - logging.info("Found dist.ini, assuming dist-zilla.") - apt.install(["libdist-zilla-perl"]) - run_with_build_fixer(self.session, ["dzil", "build", "--in", ".."]) + logging.info('Found dist.ini, assuming dist-zilla.') + resolver.install([UpstreamPackage('perl', 'Dist::Zilla')]) + run_with_build_fixer(self.session, ['dzil', 'build', '--in', '..']) class Make(BuildSystem): - def setup(self): - apt = AptManager(self.session) - if self.session.exists("Makefile.PL") and not self.session.exists("Makefile"): - apt.install(["perl"]) - run_with_build_fixer(self.session, ["perl", "Makefile.PL"]) - if not self.session.exists("Makefile") and not self.session.exists("configure"): - if self.session.exists("autogen.sh"): - if shebang_binary("autogen.sh") is None: - run_with_build_fixer(self.session, ["/bin/sh", "./autogen.sh"]) + def setup(self, resolver): + if self.session.exists('Makefile.PL') and not self.session.exists('Makefile'): + resolver.install([UpstreamPackage('binary', 'perl')]) + run_with_build_fixer(self.session, ['perl', 'Makefile.PL']) + + if not self.session.exists('Makefile') and not self.session.exists('configure'): + if self.session.exists('autogen.sh'): + if shebang_binary('autogen.sh') is None: + run_with_build_fixer( + self.session, ['/bin/sh', './autogen.sh']) try: run_with_build_fixer(self.session, ["./autogen.sh"]) except UnidentifiedError as e: @@ -269,10 +275,9 @@ class Make(BuildSystem): if not self.session.exists("Makefile") and self.session.exists("configure"): self.session.check_call(["./configure"]) - def dist(self): - self.setup() - apt = AptManager(self.session) - apt.install(["make"]) + def dist(self, resolver): + self.setup(resolver) + resolver.install([UpstreamPackage('binary', 'make')]) try: run_with_build_fixer(self.session, ["make", "dist"]) except UnidentifiedError as e: diff --git a/ognibuild/clean.py b/ognibuild/clean.py index 67cf27a..cabf76f 100644 --- a/ognibuild/clean.py +++ b/ognibuild/clean.py @@ -18,13 +18,13 @@ from .buildsystem import detect_buildsystems, NoBuildToolsFound -def run_clean(session): +def run_clean(session, resolver): # Some things want to write to the user's home directory, # e.g. pip caches in ~/.cache session.create_home() for buildsystem in detect_buildsystems(session): - buildsystem.clean() + buildsystem.clean(resolver) return raise NoBuildToolsFound() diff --git a/ognibuild/dist.py b/ognibuild/dist.py index e52dc25..1fa77cd 100644 --- a/ognibuild/dist.py +++ b/ognibuild/dist.py @@ -62,13 +62,13 @@ class DistNoTarball(Exception): """Dist operation did not create a tarball.""" -def run_dist(session): +def run_dist(session, resolver): # Some things want to write to the user's home directory, # e.g. pip caches in ~/.cache session.create_home() for buildsystem in detect_buildsystems(session): - buildsystem.dist() + buildsystem.dist(resolver) return raise NoBuildToolsFound() diff --git a/ognibuild/install.py b/ognibuild/install.py index b2c3922..5d386c0 100644 --- a/ognibuild/install.py +++ b/ognibuild/install.py @@ -18,13 +18,13 @@ from .buildsystem import detect_buildsystems, NoBuildToolsFound -def run_install(session): +def run_install(session, resolver): # Some things want to write to the user's home directory, # e.g. pip caches in ~/.cache session.create_home() for buildsystem in detect_buildsystems(session): - buildsystem.install() + buildsystem.install(resolver) return raise NoBuildToolsFound() diff --git a/ognibuild/resolver.py b/ognibuild/resolver.py new file mode 100644 index 0000000..288e9d7 --- /dev/null +++ b/ognibuild/resolver.py @@ -0,0 +1,81 @@ +#!/usr/bin/python3 +# Copyright (C) 2020 Jelmer Vernooij +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + + +class Resolver(object): + + def install(self, requirements): + raise NotImplementedError(self.install) + + def explain(self, requirements): + raise NotImplementedError(self.explain) + + +class AptResolver(Resolver): + + def __init__(self, apt): + self.apt = apt + + @classmethod + def from_session(cls, session): + from .apt import AptManager + return cls(AptManager(session)) + + def install(self, requirements): + self.apt.install(list(self.resolve(requirements))) + + def explain(self, requirements): + raise NotImplementedError(self.explain) + + def resolve(self, requirements): + for req in requirements: + if req.family == 'python3': + yield 'python3-%s' % req.name + else: + yield self.apt.find_file('/usr/bin/%s' % req.name) + + +class NativeResolver(Resolver): + + def __init__(self, session): + self.session = session + + @classmethod + def from_session(cls, session): + return cls(session) + + def install(self, requirements): + raise NotImplementedError(self.install) + + def explain(self, requirements): + raise NotImplementedError(self.explain) + + +class ExplainResolver(Resolver): + + def __init__(self, session): + self.session = session + + @classmethod + def from_session(cls, session): + return cls(session) + + def install(self, requirements): + raise NotImplementedError(self.install) + + def explain(self, requirements): + raise NotImplementedError(self.explain) diff --git a/ognibuild/test.py b/ognibuild/test.py index eb60e40..8f7ca08 100644 --- a/ognibuild/test.py +++ b/ognibuild/test.py @@ -18,13 +18,13 @@ from .buildsystem import detect_buildsystems, NoBuildToolsFound -def run_test(session): +def run_test(session, resolver): # Some things want to write to the user's home directory, # e.g. pip caches in ~/.cache session.create_home() for buildsystem in detect_buildsystems(session): - buildsystem.test() + buildsystem.test(resolver) return raise NoBuildToolsFound() From 713a624b0038f8bc67457d44bdfd6564a1f9b077 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Mon, 8 Feb 2021 20:48:23 +0000 Subject: [PATCH 57/83] More work on resolvers. --- ognibuild/__main__.py | 44 ++++++++++++++++++++++++++++++++++++++++--- ognibuild/resolver.py | 39 ++++++++++++++++++++++++++++++++++++-- 2 files changed, 78 insertions(+), 5 deletions(-) diff --git a/ognibuild/__main__.py b/ognibuild/__main__.py index b0a330b..7dff0e3 100644 --- a/ognibuild/__main__.py +++ b/ognibuild/__main__.py @@ -18,14 +18,30 @@ import logging import os import sys +from . import UpstreamPackage from .buildsystem import NoBuildToolsFound from .build import run_build from .clean import run_clean from .dist import run_dist from .install import run_install +from .resolver import ( + AptResolver, + ExplainResolver, + AutoResolver, + NativeResolver, + MissingDependencies, + ) from .test import run_test +def install_declared_requirements(resolver, requirements, subcommand): + missing = [] + for req in requirements: + # TODO(jelmer): Look at stage + missing.append(UpstreamPackage(req.package.family, req.package.name)) + resolver.install(missing) + + def main(): import argparse @@ -41,6 +57,10 @@ def main(): '--resolve', choices=['explain', 'apt', 'native'], default='apt', help='What to do about missing dependencies') + parser.add_argument( + '--ignore-declared-dependencies', + action='store_true', + help='Ignore declared dependencies, follow build errors only') args = parser.parse_args() if args.schroot: from .session.schroot import SchrootSession @@ -52,16 +72,25 @@ def main(): session = PlainSession() with session: if args.resolve == 'apt': - from .resolver import AptResolver resolver = AptResolver.from_session(session) elif args.resolve == 'explain': - from .resolver import ExplainResolver resolver = ExplainResolver.from_session(session) elif args.resolve == 'native': - from .resolver import NativeResolver resolver = NativeResolver.from_session(session) + elif args.resolver == 'auto': + resolver = AutoResolver.from_session(session) os.chdir(args.directory) try: + if not args.ignore_declared_dependencies: + from upstream_ontologist.guess import get_upstream_info + buildsystem, requirements, metadata = get_upstream_info( + path=args.directory, + trust_package=True, + net_access=True, + consult_external_directory=True, + check=True) + install_declared_requirements( + resolver, requirements, args.subcommand) if args.subcommand == 'dist': run_dist(session=session, resolver=resolver) if args.subcommand == 'build': @@ -75,6 +104,15 @@ def main(): except NoBuildToolsFound: logging.info("No build tools found.") return 1 + except MissingDependencies as e: + for req in e.requirements: + note('Missing dependency (%s:%s)' % ( + req.family, req.name)) + for resolver in [ + AptResolver.from_session(session), + NativeResolver.from_session(session)]: + note(' %s' % (resolver.explain([req]), )) + return 2 return 0 diff --git a/ognibuild/resolver.py b/ognibuild/resolver.py index 288e9d7..dffb263 100644 --- a/ognibuild/resolver.py +++ b/ognibuild/resolver.py @@ -16,6 +16,12 @@ # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +class MissingDependencies(Exception): + + def __init__(self, reqs): + self.requirements = reqs + + class Resolver(object): def install(self, requirements): @@ -36,17 +42,31 @@ class AptResolver(Resolver): return cls(AptManager(session)) def install(self, requirements): - self.apt.install(list(self.resolve(requirements))) + missing = [] + for req in requirements: + pps = list(self._possible_paths(req)) + if (not pps or + not any(self.apt.session.exists(p) for p in pps)): + missing.append(req) + if missing: + self.apt.install(list(self.resolve(missing))) def explain(self, requirements): raise NotImplementedError(self.explain) + def _possible_paths(self, req): + if req.family == 'binary': + yield '/usr/bin/%s' % req.name + else: + return + def resolve(self, requirements): for req in requirements: if req.family == 'python3': yield 'python3-%s' % req.name else: - yield self.apt.find_file('/usr/bin/%s' % req.name) + path = list(self._possible_paths(req)) + raise NotImplementedError class NativeResolver(Resolver): @@ -67,6 +87,21 @@ class NativeResolver(Resolver): class ExplainResolver(Resolver): + def __init__(self, session): + self.session = session + + @classmethod + def from_session(cls, session): + return cls(session) + + def install(self, requirements): + raise MissingDependencies(requirements) + + +class AutoResolver(Resolver): + """Automatically find out the most appropriate way to instal dependencies. + """ + def __init__(self, session): self.session = session From fa222a6ae4a934dc026687c149e4415ac20e56bf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Tue, 9 Feb 2021 02:11:02 +0000 Subject: [PATCH 58/83] More work on factoring out resolver. --- ognibuild/__init__.py | 14 ++ ognibuild/__main__.py | 50 ++-- ognibuild/build.py | 8 +- ognibuild/buildsystem.py | 503 +++++++++++++++++++++++++-------------- ognibuild/clean.py | 8 +- ognibuild/dist.py | 26 +- ognibuild/install.py | 8 +- ognibuild/test.py | 8 +- 8 files changed, 399 insertions(+), 226 deletions(-) diff --git a/ognibuild/__init__.py b/ognibuild/__init__.py index 9a0c5a8..45b2b82 100644 --- a/ognibuild/__init__.py +++ b/ognibuild/__init__.py @@ -42,3 +42,17 @@ def shebang_binary(p): if args[0] in (b"/usr/bin/env", b"env"): return os.path.basename(args[1].decode()).strip() return os.path.basename(args[0].decode()).strip() + + +class UpstreamRequirement(object): + + def __init__(self, family, name): + self.family = family + self.name = name + + +class UpstreamOutput(object): + + def __init__(self, family, name): + self.family = family + self.name = name diff --git a/ognibuild/__main__.py b/ognibuild/__main__.py index 7dff0e3..71e3f63 100644 --- a/ognibuild/__main__.py +++ b/ognibuild/__main__.py @@ -34,14 +34,31 @@ from .resolver import ( from .test import run_test -def install_declared_requirements(resolver, requirements, subcommand): +def get_necessary_declared_requirements(resolver, requirements, stages): missing = [] - for req in requirements: - # TODO(jelmer): Look at stage - missing.append(UpstreamPackage(req.package.family, req.package.name)) + for stage, req in requirements: + if stage in stages: + missing.append(req) + return missing + + +def install_necessary_declared_requirements(resolver, buildsystem, stages): + missing = [] + missing.extend(get_necessary_declared_requirements( + resolver, buildsystem.get_declared_dependencies(), + stages)) resolver.install(missing) +STAGE_MAP = { + 'dist': [], + 'install': ['build'], + 'test': ['test', 'dev'], + 'build': ['build'], + 'clean': [] +} + + def main(): import argparse @@ -81,26 +98,23 @@ def main(): resolver = AutoResolver.from_session(session) os.chdir(args.directory) try: + bss = list(detect_buildsystems(args.directory)) if not args.ignore_declared_dependencies: - from upstream_ontologist.guess import get_upstream_info - buildsystem, requirements, metadata = get_upstream_info( - path=args.directory, - trust_package=True, - net_access=True, - consult_external_directory=True, - check=True) - install_declared_requirements( - resolver, requirements, args.subcommand) + stages = STAGE_MAP[args.subcommand] + if stages: + for bs in bss: + install_necessary_declared_requirements( + resolver, bs, stages) if args.subcommand == 'dist': - run_dist(session=session, resolver=resolver) + run_dist(session=session, buildsystems=bss, resolver=resolver) if args.subcommand == 'build': - run_build(session, resolver=resolver) + run_build(session, buildsystems=bss, resolver=resolver) if args.subcommand == 'clean': - run_clean(session, resolver=resolver) + run_clean(session, buildsystems=bss, resolver=resolver) if args.subcommand == 'install': - run_install(session, resolver=resolver) + run_install(session, buildsystems=bss, resolver=resolver) if args.subcommand == 'test': - run_test(session, resolver=resolver) + run_test(session, buildsystems=bss, resolver=resolver) except NoBuildToolsFound: logging.info("No build tools found.") return 1 diff --git a/ognibuild/build.py b/ognibuild/build.py index ea3fe03..b58db3a 100644 --- a/ognibuild/build.py +++ b/ognibuild/build.py @@ -15,16 +15,16 @@ # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA -from .buildsystem import detect_buildsystems, NoBuildToolsFound +from .buildsystem import NoBuildToolsFound -def run_build(session, resolver): +def run_build(session, buildsystems, resolver): # Some things want to write to the user's home directory, # e.g. pip caches in ~/.cache session.create_home() - for buildsystem in detect_buildsystems(session): - buildsystem.build(resolver) + for buildsystem in buildsystems: + buildsystem.build(session, resolver) return raise NoBuildToolsFound() diff --git a/ognibuild/buildsystem.py b/ognibuild/buildsystem.py index 77fc2dd..fa070e3 100644 --- a/ognibuild/buildsystem.py +++ b/ognibuild/buildsystem.py @@ -18,9 +18,11 @@ import logging +import os import re +import warnings -from . import shebang_binary, UpstreamPackage +from . import shebang_binary, UpstreamRequirement, UpstreamOutput from .apt import UnidentifiedError from .fix_build import run_with_build_fixer @@ -32,57 +34,73 @@ class NoBuildToolsFound(Exception): class BuildSystem(object): """A particular buildsystem.""" - def __init__(self, session): - self.session = session + name: str - def dist(self, resolver): + def dist(self, session, resolver): raise NotImplementedError(self.dist) - def test(self, resolver): + def test(self, session, resolver): raise NotImplementedError(self.test) - def build(self, resolver): + def build(self, session, resolver): raise NotImplementedError(self.build) - def clean(self, resolver): + def clean(self, session, resolver): raise NotImplementedError(self.clean) - def install(self, resolver): + def install(self, session, resolver): raise NotImplementedError(self.install) + def get_declared_dependencies(self): + raise NotImplementedError(self.get_declared_dependencies) + + def get_declared_outputs(self): + raise NotImplementedError(self.get_declared_outputs) + class Pear(BuildSystem): + name = 'pear' + + def __init__(self, path): + self.path = path + def setup(self, resolver): - resolver.install([UpstreamPackage('binary', 'pear')]) + resolver.install([UpstreamRequirement('binary', 'pear')]) - def dist(self, resolver): + def dist(self, session, resolver): self.setup(resolver) - run_with_build_fixer(self.session, ['pear', 'package']) + run_with_build_fixer(session, ['pear', 'package']) - def test(self, resolver): - self.setup() - run_with_build_fixer(self.session, ["pear", "run-tests"]) - - def build(self, resolver): + def test(self, session, resolver): self.setup(resolver) - run_with_build_fixer(self.session, ['pear', 'build']) + run_with_build_fixer(session, ['pear', 'run-tests']) - def clean(self, resolver): + def build(self, session, resolver): + self.setup(resolver) + run_with_build_fixer(session, ['pear', 'build']) + + def clean(self, session, resolver): self.setup(resolver) # TODO - def install(self, resolver): + def install(self, session, resolver): self.setup(resolver) - run_with_build_fixer(self.session, ['pear', 'install']) + run_with_build_fixer(session, ['pear', 'install']) class SetupPy(BuildSystem): + name = 'setup.py' + + def __init__(self, path): + from distutils.core import run_setup + self.result = run_setup(os.path.abspath(path), stop_after="init") + def setup(self, resolver): resolver.install([ - UpstreamPackage('python3', 'pip'), - UpstreamPackage('binary', 'python3'), + UpstreamRequirement('python3', 'pip'), + UpstreamRequirement('binary', 'python3'), ]) with open('setup.py', 'r') as f: setup_py_contents = f.read() @@ -93,129 +111,175 @@ class SetupPy(BuildSystem): setup_cfg_contents = '' if 'setuptools' in setup_py_contents: logging.info('Reference to setuptools found, installing.') - resolver.install([UpstreamPackage('python3', 'setuptools')]) + resolver.install([UpstreamRequirement('python3', 'setuptools')]) if ('setuptools_scm' in setup_py_contents or 'setuptools_scm' in setup_cfg_contents): logging.info('Reference to setuptools-scm found, installing.') resolver.install([ - UpstreamPackage('python3', 'setuptools-scm'), - UpstreamPackage('binary', 'git'), - UpstreamPackage('binary', 'mercurial'), + UpstreamRequirement('python3', 'setuptools-scm'), + UpstreamRequirement('binary', 'git'), + UpstreamRequirement('binary', 'mercurial'), ]) # TODO(jelmer): Install setup_requires - def test(self, resolver): + def test(self, session, resolver): self.setup(resolver) - self._run_setup(resolver, ['test']) + self._run_setup(session, resolver, ['test']) - def dist(self, resolver): + def dist(self, session, resolver): self.setup(resolver) - self._run_setup(resolver, ['sdist']) + self._run_setup(session, resolver, ['sdist']) - def clean(self, resolver): + def clean(self, session, resolver): self.setup(resolver) - self._run_setup(resolver, ['clean']) + self._run_setup(session, resolver, ['clean']) - def install(self, resolver): + def install(self, session, resolver): self.setup(resolver) - self._run_setup(resolver, ['install']) + self._run_setup(session, resolver, ['install']) - def _run_setup(self, resolver, args): + def _run_setup(self, session, resolver, args): interpreter = shebang_binary('setup.py') if interpreter is not None: if interpreter in ('python3', 'python2', 'python'): - resolver.install([UpstreamPackage('binary', interpreter)]) + resolver.install([UpstreamRequirement('binary', interpreter)]) else: raise ValueError('Unknown interpreter %r' % interpreter) run_with_build_fixer( - self.session, ['./setup.py'] + args) + session, ['./setup.py'] + args) else: # Just assume it's Python 3 - resolver.install([UpstreamPackage('binary', 'python3')]) + resolver.install([UpstreamRequirement('binary', 'python3')]) run_with_build_fixer( - self.session, ['python3', './setup.py'] + args) + session, ['python3', './setup.py'] + args) + + def get_declared_dependencies(self): + for require in self.result.get_requires(): + yield 'build', UpstreamRequirement('python3', require) + for require in self.result.install_requires: + yield 'install', UpstreamRequirement('python3', require) + for require in self.result.tests_require: + yield 'test', UpstreamRequirement('python3', require) + + def get_declared_outputs(self): + for script in (self.result.scripts or []): + yield UpstreamOutput('binary', os.path.basename(script)) + entry_points = self.result.entry_points or {} + for script in entry_points.get('console_scripts', []): + yield UpstreamOutput('binary', script.split('=')[0]) + for package in self.result.packages or []: + yield UpstreamOutput('python3', package) class PyProject(BuildSystem): + + name = 'pyproject' + def load_toml(self): import toml with open("pyproject.toml", "r") as pf: return toml.load(pf) - def dist(self, resolver): + def dist(self, session, resolver): pyproject = self.load_toml() if "poetry" in pyproject.get("tool", []): logging.info( 'Found pyproject.toml with poetry section, ' 'assuming poetry project.') resolver.install([ - UpstreamPackage('python3', 'venv'), - UpstreamPackage('python3', 'pip'), + UpstreamRequirement('python3', 'venv'), + UpstreamRequirement('python3', 'pip'), ]) - self.session.check_call(['pip3', 'install', 'poetry'], user='root') - self.session.check_call(['poetry', 'build', '-f', 'sdist']) + session.check_call(['pip3', 'install', 'poetry'], user='root') + session.check_call(['poetry', 'build', '-f', 'sdist']) return raise AssertionError("no supported section in pyproject.toml") class SetupCfg(BuildSystem): + name = 'setup.cfg' + + def __init__(self, path): + self.path = path + def setup(self, resolver): resolver.install([ - UpstreamPackage('python3', 'pep517'), - UpstreamPackage('python3', 'pip'), + UpstreamRequirement('python3', 'pep517'), + UpstreamRequirement('python3', 'pip'), ]) - def dist(self, resolver): + def dist(self, session, resolver): self.setup(resolver) - self.session.check_call(['python3', '-m', 'pep517.build', '-s', '.']) + session.check_call(['python3', '-m', 'pep517.build', '-s', '.']) -class NpmPackage(BuildSystem): +class Npm(BuildSystem): + + name = 'npm' + + def __init__(self, path): + import json + with open(path, 'r') as f: + self.package = json.load(f) + + def get_declared_dependencies(self): + if 'devDependencies' in self.package: + for name, unused_version in ( + self.package['devDependencies'].items()): + # TODO(jelmer): Look at version + yield 'dev', UpstreamRequirement('npm', name) def setup(self, resolver): - resolver.install([UpstreamPackage('binary', 'npm')]) + resolver.install([UpstreamRequirement('binary', 'npm')]) - def dist(self, resolver): + def dist(self, session, resolver): self.setup(resolver) - run_with_build_fixer(self.session, ['npm', 'pack']) + run_with_build_fixer(session, ['npm', 'pack']) class Waf(BuildSystem): - def setup(self, resolver): - resolver.install([UpstreamPackage('binary', 'python3')]) + name = 'waf' - def dist(self, resolver): + def __init__(self, path): + self.path = path + + def setup(self, resolver): + resolver.install([UpstreamRequirement('binary', 'python3')]) + + def dist(self, session, resolver): self.setup(resolver) - run_with_build_fixer(self.session, ['./waf', 'dist']) + run_with_build_fixer(session, ['./waf', 'dist']) class Gem(BuildSystem): - def setup(self, resolver): - resolver.install([UpstreamPackage('binary', 'gem2deb')]) + name = 'gem' - def dist(self, resolver): + def __init__(self, path): + self.path = path + + def setup(self, resolver): + resolver.install([UpstreamRequirement('binary', 'gem2deb')]) + + def dist(self, session, resolver): self.setup(resolver) - gemfiles = [entry.name for entry in self.session.scandir('.') + gemfiles = [entry.name for entry in session.scandir('.') if entry.name.endswith('.gem')] if len(gemfiles) > 1: - logging.warning("More than one gemfile. Trying the first?") - run_with_build_fixer(self.session, ["gem2tgz", gemfiles[0]]) + logging.warning('More than one gemfile. Trying the first?') + run_with_build_fixer(session, ['gem2tgz', gemfiles[0]]) class DistInkt(BuildSystem): - def setup(self, resolver): - resolver.install([ - UpstreamPackage('perl', 'Dist::Inkt'), - ]) - - def dist(self, resolver): - self.setup(resolver) + def __init__(self, path): + self.path = path + self.name = 'dist-zilla' + self.dist_inkt_class = None with open('dist.ini', 'rb') as f: for line in f: if not line.startswith(b";;"): @@ -226,155 +290,234 @@ class DistInkt(BuildSystem): continue if key.strip() == b"class" and value.strip().startswith(b"'Dist::Inkt"): logging.info( - "Found Dist::Inkt section in dist.ini, " "assuming distinkt." - ) - # TODO(jelmer): install via apt if possible - self.session.check_call( - ["cpan", "install", value.decode().strip("'")], user="root" - ) - run_with_build_fixer(self.session, ["distinkt-dist"]) + 'Found Dist::Inkt section in dist.ini, ' + 'assuming distinkt.') + self.name = 'dist-inkt' + self.dist_inkt_class = value.decode().strip("'") return - # Default to invoking Dist::Zilla logging.info('Found dist.ini, assuming dist-zilla.') - resolver.install([UpstreamPackage('perl', 'Dist::Zilla')]) - run_with_build_fixer(self.session, ['dzil', 'build', '--in', '..']) + + def setup(self, resolver): + resolver.install([ + UpstreamRequirement('perl', 'Dist::Inkt'), + ]) + + def dist(self, session, resolver): + self.setup(resolver) + if self.name == 'dist-inkt': + resolver.install([ + UpstreamRequirement('perl-module', self.dist_inkt_class)]) + run_with_build_fixer(session, ['distinkt-dist']) + else: + # Default to invoking Dist::Zilla + resolver.install([UpstreamRequirement('perl', 'Dist::Zilla')]) + run_with_build_fixer(session, ['dzil', 'build', '--in', '..']) class Make(BuildSystem): - def setup(self, resolver): - if self.session.exists('Makefile.PL') and not self.session.exists('Makefile'): - resolver.install([UpstreamPackage('binary', 'perl')]) - run_with_build_fixer(self.session, ['perl', 'Makefile.PL']) + name = 'make' - if not self.session.exists('Makefile') and not self.session.exists('configure'): - if self.session.exists('autogen.sh'): + def setup(self, session, resolver): + if session.exists('Makefile.PL') and not session.exists('Makefile'): + resolver.install([UpstreamRequirement('binary', 'perl')]) + run_with_build_fixer(session, ['perl', 'Makefile.PL']) + + if not session.exists('Makefile') and not session.exists('configure'): + if session.exists('autogen.sh'): if shebang_binary('autogen.sh') is None: run_with_build_fixer( - self.session, ['/bin/sh', './autogen.sh']) + session, ['/bin/sh', './autogen.sh']) try: - run_with_build_fixer(self.session, ["./autogen.sh"]) + run_with_build_fixer( + session, ['./autogen.sh']) except UnidentifiedError as e: - if ( - "Gnulib not yet bootstrapped; " - "run ./bootstrap instead.\n" in e.lines - ): - run_with_build_fixer(self.session, ["./bootstrap"]) - run_with_build_fixer(self.session, ["./autogen.sh"]) + if ("Gnulib not yet bootstrapped; " + "run ./bootstrap instead.\n" in e.lines): + run_with_build_fixer(session, ["./bootstrap"]) + run_with_build_fixer(session, ['./autogen.sh']) else: raise - elif self.session.exists("configure.ac") or self.session.exists( - "configure.in" - ): - apt.install( - ["autoconf", "automake", "gettext", "libtool", "gnu-standards"] - ) - run_with_build_fixer(self.session, ["autoreconf", "-i"]) + elif (session.exists('configure.ac') or + session.exists('configure.in')): + resolver.install([ + UpstreamRequirement('binary', 'autoconf'), + UpstreamRequirement('binary', 'automake'), + UpstreamRequirement('binary', 'gettextize'), + UpstreamRequirement('binary', 'libtoolize'), + ]) + run_with_build_fixer(session, ['autoreconf', '-i']) - if not self.session.exists("Makefile") and self.session.exists("configure"): - self.session.check_call(["./configure"]) + if not session.exists('Makefile') and session.exists('configure'): + session.check_call(['./configure']) - def dist(self, resolver): - self.setup(resolver) - resolver.install([UpstreamPackage('binary', 'make')]) + def dist(self, session, resolver): + self.setup(session, resolver) + resolver.install([UpstreamRequirement('binary', 'make')]) try: - run_with_build_fixer(self.session, ["make", "dist"]) + run_with_build_fixer(session, ['make', 'dist']) except UnidentifiedError as e: if "make: *** No rule to make target 'dist'. Stop.\n" in e.lines: pass elif "make[1]: *** No rule to make target 'dist'. Stop.\n" in e.lines: pass + elif ("Reconfigure the source tree " + "(via './config' or 'perl Configure'), please.\n" + ) in e.lines: + run_with_build_fixer(session, ['./config']) + run_with_build_fixer(session, ['make', 'dist']) elif ( - "Reconfigure the source tree " - "(via './config' or 'perl Configure'), please.\n" - ) in e.lines: - run_with_build_fixer(self.session, ["./config"]) - run_with_build_fixer(self.session, ["make", "dist"]) - elif ( - "Please try running 'make manifest' and then run " - "'make dist' again.\n" in e.lines - ): - run_with_build_fixer(self.session, ["make", "manifest"]) - run_with_build_fixer(self.session, ["make", "dist"]) + "Please try running 'make manifest' and then run " + "'make dist' again.\n" in e.lines): + run_with_build_fixer(session, ['make', 'manifest']) + run_with_build_fixer(session, ['make', 'dist']) elif "Please run ./configure first\n" in e.lines: - run_with_build_fixer(self.session, ["./configure"]) - run_with_build_fixer(self.session, ["make", "dist"]) - elif any( - [ - re.match( - r"Makefile:[0-9]+: \*\*\* Missing \'Make.inc\' " - r"Run \'./configure \[options\]\' and retry. Stop.\n", - line, - ) - for line in e.lines - ] - ): - run_with_build_fixer(self.session, ["./configure"]) - run_with_build_fixer(self.session, ["make", "dist"]) - elif any( - [ - re.match( - r"Problem opening MANIFEST: No such file or directory " - r"at .* line [0-9]+\.", - line, - ) - for line in e.lines - ] - ): - run_with_build_fixer(self.session, ["make", "manifest"]) - run_with_build_fixer(self.session, ["make", "dist"]) + run_with_build_fixer(session, ['./configure']) + run_with_build_fixer(session, ['make', 'dist']) + elif any([re.match( + r'Makefile:[0-9]+: \*\*\* Missing \'Make.inc\' ' + r'Run \'./configure \[options\]\' and retry. Stop.\n', + line) for line in e.lines]): + run_with_build_fixer(session, ['./configure']) + run_with_build_fixer(session, ['make', 'dist']) + elif any([re.match( + r'Problem opening MANIFEST: No such file or directory ' + r'at .* line [0-9]+\.', line) for line in e.lines]): + run_with_build_fixer(session, ['make', 'manifest']) + run_with_build_fixer(session, ['make', 'dist']) else: raise else: return + def get_declared_dependencies(self): + # TODO(jelmer): Split out the perl-specific stuff? + if os.path.exists('META.yml'): + # See http://module-build.sourceforge.net/META-spec-v1.4.html for + # the specification of the format. + import ruamel.yaml + import ruamel.yaml.reader + with open('META.yml', 'rb') as f: + try: + data = ruamel.yaml.load(f, ruamel.yaml.SafeLoader) + except ruamel.yaml.reader.ReaderError as e: + warnings.warn('Unable to parse META.yml: %s' % e) + return + for require in data.get('requires', []): + yield 'build', UpstreamRequirement('perl', require) -def detect_buildsystems(session): + +class Cargo(BuildSystem): + + name = 'cargo' + + def __init__(self, path): + from toml.decoder import load, TomlDecodeError + with open(path, 'r') as f: + self.cargo = load(f) + + def get_declared_dependencies(self): + if 'dependencies' in self.cargo: + for name, details in self.cargo['dependencies'].items(): + # TODO(jelmer): Look at details['features'], details['version'] + yield 'build', UpstreamRequirement('cargo-crate', name) + + +class Golang(BuildSystem): + """Go builds.""" + + name = 'golang' + + +class Maven(BuildSystem): + + name = 'maven' + + def __init__(self, path): + self.path = path + + +class Cabal(BuildSystem): + + name = 'cabal' + + def __init__(self, path): + self.path = path + + +def detect_buildsystems(path): """Detect build systems.""" - if session.exists("package.xml"): - logging.info("Found package.xml, assuming pear package.") - yield Pear(session) + if os.path.exists(os.path.join(path, 'package.xml')): + logging.info('Found package.xml, assuming pear package.') + yield Pear('package.xml') - if session.exists("setup.py"): - logging.info("Found setup.py, assuming python project.") - yield SetupPy(session) + if os.path.exists(os.path.join(path, 'setup.py')): + logging.info('Found setup.py, assuming python project.') + yield SetupPy('setup.py') + elif os.path.exists(os.path.join(path, 'pyproject.toml')): + logging.info('Found pyproject.toml, assuming python project.') + yield PyProject() + elif os.path.exists(os.path.join(path, 'setup.cfg')): + logging.info('Found setup.cfg, assuming python project.') + yield SetupCfg('setup.cfg') - if session.exists("pyproject.toml"): - logging.info("Found pyproject.toml, assuming python project.") - yield PyProject(session) + if os.path.exists(os.path.join(path, 'package.json')): + logging.info('Found package.json, assuming node package.') + yield Npm('package.json') - if session.exists("setup.cfg"): - logging.info("Found setup.cfg, assuming python project.") - yield SetupCfg(session) + if os.path.exists(os.path.join(path, 'waf')): + logging.info('Found waf, assuming waf package.') + yield Waf('waf') - if session.exists("package.json"): - logging.info("Found package.json, assuming node package.") - yield NpmPackage(session) + if os.path.exists(os.path.join(path, 'Cargo.toml')): + logging.info('Found Cargo.toml, assuming rust cargo package.') + yield Cargo('Cargo.toml') - if session.exists("waf"): - logging.info("Found waf, assuming waf package.") - yield Waf(session) + if os.path.exists(os.path.join(path, 'pom.xml')): + logging.info('Found pom.xml, assuming maven package.') + yield Maven('pom.xml') + + if (os.path.exists(os.path.join(path, 'dist.ini')) and + not os.path.exists(os.path.join(path, 'Makefile.PL'))): + yield DistInkt('dist.ini') gemfiles = [ - entry.name for entry in session.scandir(".") if entry.name.endswith(".gem") - ] + entry.name for entry in os.scandir(path) + if entry.name.endswith('.gem')] if gemfiles: - yield Gem(session) + yield Gem(gemfiles[0]) - if session.exists("dist.ini") and not session.exists("Makefile.PL"): - yield DistInkt(session) + if any([os.path.exists(os.path.join(path, p)) for p in [ + 'Makefile', 'Makefile.PL', 'autogen.sh', 'configure.ac', + 'configure.in']]): + yield Make() - if any( - [ - session.exists(p) - for p in [ - "Makefile", - "Makefile.PL", - "autogen.sh", - "configure.ac", - "configure.in", - ] - ] - ): - yield Make(session) + cabal_filenames = [ + entry.name for entry in os.scandir(path) + if entry.name.endswith('.cabal')] + if cabal_filenames: + if len(cabal_filenames) == 1: + yield Cabal(cabal_filenames[0]) + else: + warnings.warn( + 'More than one cabal filename, ignoring all: %r' % + cabal_filenames) + + if os.path.exists(os.path.join(path, '.travis.yml')): + import yaml + import ruamel.yaml.reader + with open('.travis.yml', 'rb') as f: + try: + data = ruamel.yaml.load(f, ruamel.yaml.SafeLoader) + except ruamel.yaml.reader.ReaderError as e: + warnings.warn('Unable to parse .travis.yml: %s' % (e, )) + else: + language = data.get('language') + if language == 'go': + yield Golang() + + for entry in os.scandir(path): + if entry.name.endswith('.go'): + yield Golang() + break diff --git a/ognibuild/clean.py b/ognibuild/clean.py index cabf76f..9f1c4d1 100644 --- a/ognibuild/clean.py +++ b/ognibuild/clean.py @@ -15,16 +15,16 @@ # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA -from .buildsystem import detect_buildsystems, NoBuildToolsFound +from .buildsystem import NoBuildToolsFound -def run_clean(session, resolver): +def run_clean(session, buildsystems, resolver): # Some things want to write to the user's home directory, # e.g. pip caches in ~/.cache session.create_home() - for buildsystem in detect_buildsystems(session): - buildsystem.clean(resolver) + for buildsystem in buildsystems: + buildsystem.clean(session, resolver) return raise NoBuildToolsFound() diff --git a/ognibuild/dist.py b/ognibuild/dist.py index 1fa77cd..79610d5 100644 --- a/ognibuild/dist.py +++ b/ognibuild/dist.py @@ -34,7 +34,7 @@ from buildlog_consultant.common import ( from . import DetailedFailure -from .buildsystem import detect_buildsystems, NoBuildToolsFound +from .buildsystem import NoBuildToolsFound from .session.schroot import SchrootSession from .vcs import dupe_vcs_tree, export_vcs_tree @@ -62,13 +62,13 @@ class DistNoTarball(Exception): """Dist operation did not create a tarball.""" -def run_dist(session, resolver): +def run_dist(session, buildsystems, resolver): # Some things want to write to the user's home directory, # e.g. pip caches in ~/.cache session.create_home() - for buildsystem in detect_buildsystems(session): - buildsystem.dist(resolver) + for buildsystem in buildsystems: + buildsystem.dist(session, resolver) return raise NoBuildToolsFound() @@ -115,13 +115,12 @@ class DistCatcher(object): def create_dist_schroot( - tree: Tree, - target_dir: str, - chroot: str, - packaging_tree: Optional[Tree] = None, - include_controldir: bool = True, - subdir: Optional[str] = None, -) -> str: + tree: Tree, target_dir: str, + chroot: str, packaging_tree: Optional[Tree] = None, + include_controldir: bool = True, + subdir: Optional[str] = None) -> str: + from .buildsystem import detect_buildsystems + from .apt import AptResolver if subdir is None: subdir = "package" with SchrootSession(chroot) as session: @@ -144,12 +143,15 @@ def create_dist_schroot( else: dupe_vcs_tree(tree, export_directory) + buildsystems = list(detect_buildsystems(export_directory)) + resolver = AptResolver.from_session(session) + with DistCatcher(export_directory) as dc: oldcwd = os.getcwd() os.chdir(export_directory) try: session.chdir(os.path.join(reldir, subdir)) - run_dist(session) + run_dist(session, buildsystems, resolver) finally: os.chdir(oldcwd) diff --git a/ognibuild/install.py b/ognibuild/install.py index 5d386c0..df0e61f 100644 --- a/ognibuild/install.py +++ b/ognibuild/install.py @@ -15,16 +15,16 @@ # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA -from .buildsystem import detect_buildsystems, NoBuildToolsFound +from .buildsystem import NoBuildToolsFound -def run_install(session, resolver): +def run_install(session, buildsystems, resolver): # Some things want to write to the user's home directory, # e.g. pip caches in ~/.cache session.create_home() - for buildsystem in detect_buildsystems(session): - buildsystem.install(resolver) + for buildsystem in buildsystems: + buildsystem.install(session, resolver) return raise NoBuildToolsFound() diff --git a/ognibuild/test.py b/ognibuild/test.py index 8f7ca08..8560347 100644 --- a/ognibuild/test.py +++ b/ognibuild/test.py @@ -15,16 +15,16 @@ # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA -from .buildsystem import detect_buildsystems, NoBuildToolsFound +from .buildsystem import NoBuildToolsFound -def run_test(session, resolver): +def run_test(session, buildsystems, resolver): # Some things want to write to the user's home directory, # e.g. pip caches in ~/.cache session.create_home() - for buildsystem in detect_buildsystems(session): - buildsystem.test(resolver) + for buildsystem in buildsystems: + buildsystem.test(session, resolver) return raise NoBuildToolsFound() From 4b1591d864f6e8f727060725fff74f2776f4cd80 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Tue, 9 Feb 2021 03:05:16 +0000 Subject: [PATCH 59/83] Fix typing. --- ognibuild/__main__.py | 2 +- ognibuild/buildsystem.py | 3 +-- ognibuild/debian/fix_build.py | 21 ++++++++++++++------- ognibuild/dist.py | 2 +- ognibuild/resolver.py | 2 +- 5 files changed, 18 insertions(+), 12 deletions(-) diff --git a/ognibuild/__main__.py b/ognibuild/__main__.py index 71e3f63..32ebe57 100644 --- a/ognibuild/__main__.py +++ b/ognibuild/__main__.py @@ -19,7 +19,7 @@ import logging import os import sys from . import UpstreamPackage -from .buildsystem import NoBuildToolsFound +from .buildsystem import NoBuildToolsFound, detect_buildsystems from .build import run_build from .clean import run_clean from .dist import run_dist diff --git a/ognibuild/buildsystem.py b/ognibuild/buildsystem.py index fa070e3..63c62bd 100644 --- a/ognibuild/buildsystem.py +++ b/ognibuild/buildsystem.py @@ -413,7 +413,7 @@ class Cargo(BuildSystem): name = 'cargo' def __init__(self, path): - from toml.decoder import load, TomlDecodeError + from toml.decoder import load with open(path, 'r') as f: self.cargo = load(f) @@ -505,7 +505,6 @@ def detect_buildsystems(path): cabal_filenames) if os.path.exists(os.path.join(path, '.travis.yml')): - import yaml import ruamel.yaml.reader with open('.travis.yml', 'rb') as f: try: diff --git a/ognibuild/debian/fix_build.py b/ognibuild/debian/fix_build.py index a5ef5ae..211d452 100644 --- a/ognibuild/debian/fix_build.py +++ b/ognibuild/debian/fix_build.py @@ -24,15 +24,17 @@ import os import re import subprocess import sys -from typing import Iterator, List, Callable, Type, Tuple, Set +from typing import Iterator, List, Callable, Type, Tuple, Set, Optional from debian.deb822 import ( Deb822, PkgRelation, Release, -) + ) +from debian.changelog import Version from breezy.commit import PointlessCommit +from breezy.mutabletree import MutableTree from breezy.tree import Tree from debmutate.control import ( ensure_some_version, @@ -122,13 +124,18 @@ class CircularDependency(Exception): class DependencyContext(object): - def __init__(self, tree, subpath="", committer=None, update_changelog=True): + + def __init__(self, tree: MutableTree, + subpath: str = '', committer: Optional[str] = None, + update_changelog: bool = True): self.tree = tree self.subpath = subpath self.committer = committer self.update_changelog = update_changelog - def add_dependency(self, package, minimum_version=None): + def add_dependency( + self, package: str, + minimum_version: Optional[Version] = None) -> bool: raise NotImplementedError(self.add_dependency) @@ -266,9 +273,9 @@ def add_test_dependency( ) -def commit_debian_changes( - tree, subpath, summary, committer=None, update_changelog=True -): +def commit_debian_changes(tree: MutableTree, subpath: str, + summary: str, committer: Optional[str] = None, + update_changelog: bool = True) -> bool: with tree.lock_write(): try: if update_changelog: diff --git a/ognibuild/dist.py b/ognibuild/dist.py index 79610d5..57d1cf9 100644 --- a/ognibuild/dist.py +++ b/ognibuild/dist.py @@ -120,7 +120,7 @@ def create_dist_schroot( include_controldir: bool = True, subdir: Optional[str] = None) -> str: from .buildsystem import detect_buildsystems - from .apt import AptResolver + from .resolver import AptResolver if subdir is None: subdir = "package" with SchrootSession(chroot) as session: diff --git a/ognibuild/resolver.py b/ognibuild/resolver.py index dffb263..e663559 100644 --- a/ognibuild/resolver.py +++ b/ognibuild/resolver.py @@ -65,7 +65,7 @@ class AptResolver(Resolver): if req.family == 'python3': yield 'python3-%s' % req.name else: - path = list(self._possible_paths(req)) + list(self._possible_paths(req)) raise NotImplementedError From 8aae9c93d80da379cfbe8bed104f720bce3543de Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Tue, 9 Feb 2021 23:19:40 +0000 Subject: [PATCH 60/83] Reformat using black. --- ognibuild/__init__.py | 2 - ognibuild/__main__.py | 71 +++--- ognibuild/buildsystem.py | 457 +++++++++++++++++++--------------- ognibuild/debian/build.py | 3 +- ognibuild/debian/fix_build.py | 28 ++- ognibuild/dist.py | 12 +- ognibuild/resolver.py | 20 +- 7 files changed, 327 insertions(+), 266 deletions(-) diff --git a/ognibuild/__init__.py b/ognibuild/__init__.py index 45b2b82..7238a06 100644 --- a/ognibuild/__init__.py +++ b/ognibuild/__init__.py @@ -45,14 +45,12 @@ def shebang_binary(p): class UpstreamRequirement(object): - def __init__(self, family, name): self.family = family self.name = name class UpstreamOutput(object): - def __init__(self, family, name): self.family = family self.name = name diff --git a/ognibuild/__main__.py b/ognibuild/__main__.py index 32ebe57..af0b214 100644 --- a/ognibuild/__main__.py +++ b/ognibuild/__main__.py @@ -18,7 +18,8 @@ import logging import os import sys -from . import UpstreamPackage +from . import note, UpstreamPackage +from .apt import UnidentifiedError from .buildsystem import NoBuildToolsFound, detect_buildsystems from .build import run_build from .clean import run_clean @@ -30,7 +31,7 @@ from .resolver import ( AutoResolver, NativeResolver, MissingDependencies, - ) +) from .test import run_test @@ -44,18 +45,20 @@ def get_necessary_declared_requirements(resolver, requirements, stages): def install_necessary_declared_requirements(resolver, buildsystem, stages): missing = [] - missing.extend(get_necessary_declared_requirements( - resolver, buildsystem.get_declared_dependencies(), - stages)) + missing.extend( + get_necessary_declared_requirements( + resolver, buildsystem.get_declared_dependencies(), stages + ) + ) resolver.install(missing) STAGE_MAP = { - 'dist': [], - 'install': ['build'], - 'test': ['test', 'dev'], - 'build': ['build'], - 'clean': [] + "dist": [], + "install": ["build"], + "test": ["test", "dev"], + "build": ["build"], + "clean": [], } @@ -71,13 +74,16 @@ def main(): ) parser.add_argument("--schroot", type=str, help="schroot to run in.") parser.add_argument( - '--resolve', choices=['explain', 'apt', 'native'], - default='apt', - help='What to do about missing dependencies') + "--resolve", + choices=["explain", "apt", "native"], + default="apt", + help="What to do about missing dependencies", + ) parser.add_argument( - '--ignore-declared-dependencies', - action='store_true', - help='Ignore declared dependencies, follow build errors only') + "--ignore-declared-dependencies", + action="store_true", + help="Ignore declared dependencies, follow build errors only", + ) args = parser.parse_args() if args.schroot: from .session.schroot import SchrootSession @@ -88,13 +94,13 @@ def main(): session = PlainSession() with session: - if args.resolve == 'apt': + if args.resolve == "apt": resolver = AptResolver.from_session(session) - elif args.resolve == 'explain': + elif args.resolve == "explain": resolver = ExplainResolver.from_session(session) - elif args.resolve == 'native': + elif args.resolve == "native": resolver = NativeResolver.from_session(session) - elif args.resolver == 'auto': + elif args.resolver == "auto": resolver = AutoResolver.from_session(session) os.chdir(args.directory) try: @@ -103,29 +109,30 @@ def main(): stages = STAGE_MAP[args.subcommand] if stages: for bs in bss: - install_necessary_declared_requirements( - resolver, bs, stages) - if args.subcommand == 'dist': + install_necessary_declared_requirements(resolver, bs, stages) + if args.subcommand == "dist": run_dist(session=session, buildsystems=bss, resolver=resolver) - if args.subcommand == 'build': + if args.subcommand == "build": run_build(session, buildsystems=bss, resolver=resolver) - if args.subcommand == 'clean': + if args.subcommand == "clean": run_clean(session, buildsystems=bss, resolver=resolver) - if args.subcommand == 'install': + if args.subcommand == "install": run_install(session, buildsystems=bss, resolver=resolver) - if args.subcommand == 'test': + if args.subcommand == "test": run_test(session, buildsystems=bss, resolver=resolver) + except UnidentifiedError: + return 1 except NoBuildToolsFound: logging.info("No build tools found.") return 1 except MissingDependencies as e: for req in e.requirements: - note('Missing dependency (%s:%s)' % ( - req.family, req.name)) + note("Missing dependency (%s:%s)" % (req.family, req.name)) for resolver in [ - AptResolver.from_session(session), - NativeResolver.from_session(session)]: - note(' %s' % (resolver.explain([req]), )) + AptResolver.from_session(session), + NativeResolver.from_session(session), + ]: + note(" %s" % (resolver.explain([req]),)) return 2 return 0 diff --git a/ognibuild/buildsystem.py b/ognibuild/buildsystem.py index 63c62bd..54d9791 100644 --- a/ognibuild/buildsystem.py +++ b/ognibuild/buildsystem.py @@ -60,25 +60,25 @@ class BuildSystem(object): class Pear(BuildSystem): - name = 'pear' + name = "pear" def __init__(self, path): self.path = path def setup(self, resolver): - resolver.install([UpstreamRequirement('binary', 'pear')]) + resolver.install([UpstreamRequirement("binary", "pear")]) def dist(self, session, resolver): self.setup(resolver) - run_with_build_fixer(session, ['pear', 'package']) + run_with_build_fixer(session, ["pear", "package"]) def test(self, session, resolver): self.setup(resolver) - run_with_build_fixer(session, ['pear', 'run-tests']) + run_with_build_fixer(session, ["pear", "run-tests"]) def build(self, session, resolver): self.setup(resolver) - run_with_build_fixer(session, ['pear', 'build']) + run_with_build_fixer(session, ["pear", "build"]) def clean(self, session, resolver): self.setup(resolver) @@ -86,201 +86,215 @@ class Pear(BuildSystem): def install(self, session, resolver): self.setup(resolver) - run_with_build_fixer(session, ['pear', 'install']) + run_with_build_fixer(session, ["pear", "install"]) class SetupPy(BuildSystem): - name = 'setup.py' + name = "setup.py" def __init__(self, path): from distutils.core import run_setup + self.result = run_setup(os.path.abspath(path), stop_after="init") def setup(self, resolver): - resolver.install([ - UpstreamRequirement('python3', 'pip'), - UpstreamRequirement('binary', 'python3'), - ]) - with open('setup.py', 'r') as f: + resolver.install( + [ + UpstreamRequirement("python3", "pip"), + UpstreamRequirement("binary", "python3"), + ] + ) + with open("setup.py", "r") as f: setup_py_contents = f.read() try: with open("setup.cfg", "r") as f: setup_cfg_contents = f.read() except FileNotFoundError: - setup_cfg_contents = '' - if 'setuptools' in setup_py_contents: - logging.info('Reference to setuptools found, installing.') - resolver.install([UpstreamRequirement('python3', 'setuptools')]) - if ('setuptools_scm' in setup_py_contents or - 'setuptools_scm' in setup_cfg_contents): - logging.info('Reference to setuptools-scm found, installing.') - resolver.install([ - UpstreamRequirement('python3', 'setuptools-scm'), - UpstreamRequirement('binary', 'git'), - UpstreamRequirement('binary', 'mercurial'), - ]) + setup_cfg_contents = "" + if "setuptools" in setup_py_contents: + logging.info("Reference to setuptools found, installing.") + resolver.install([UpstreamRequirement("python3", "setuptools")]) + if ( + "setuptools_scm" in setup_py_contents + or "setuptools_scm" in setup_cfg_contents + ): + logging.info("Reference to setuptools-scm found, installing.") + resolver.install( + [ + UpstreamRequirement("python3", "setuptools-scm"), + UpstreamRequirement("binary", "git"), + UpstreamRequirement("binary", "mercurial"), + ] + ) # TODO(jelmer): Install setup_requires def test(self, session, resolver): self.setup(resolver) - self._run_setup(session, resolver, ['test']) + self._run_setup(session, resolver, ["test"]) def dist(self, session, resolver): self.setup(resolver) - self._run_setup(session, resolver, ['sdist']) + self._run_setup(session, resolver, ["sdist"]) def clean(self, session, resolver): self.setup(resolver) - self._run_setup(session, resolver, ['clean']) + self._run_setup(session, resolver, ["clean"]) def install(self, session, resolver): self.setup(resolver) - self._run_setup(session, resolver, ['install']) + self._run_setup(session, resolver, ["install"]) def _run_setup(self, session, resolver, args): - interpreter = shebang_binary('setup.py') + interpreter = shebang_binary("setup.py") if interpreter is not None: - if interpreter in ('python3', 'python2', 'python'): - resolver.install([UpstreamRequirement('binary', interpreter)]) + if interpreter in ("python3", "python2", "python"): + resolver.install([UpstreamRequirement("binary", interpreter)]) else: - raise ValueError('Unknown interpreter %r' % interpreter) - run_with_build_fixer( - session, ['./setup.py'] + args) + raise ValueError("Unknown interpreter %r" % interpreter) + run_with_build_fixer(session, ["./setup.py"] + args) else: # Just assume it's Python 3 - resolver.install([UpstreamRequirement('binary', 'python3')]) - run_with_build_fixer( - session, ['python3', './setup.py'] + args) + resolver.install([UpstreamRequirement("binary", "python3")]) + run_with_build_fixer(session, ["python3", "./setup.py"] + args) def get_declared_dependencies(self): for require in self.result.get_requires(): - yield 'build', UpstreamRequirement('python3', require) - for require in self.result.install_requires: - yield 'install', UpstreamRequirement('python3', require) - for require in self.result.tests_require: - yield 'test', UpstreamRequirement('python3', require) + yield "build", UpstreamRequirement("python3", require) + if self.result.install_requires: + for require in self.result.install_requires: + yield "install", UpstreamRequirement("python3", require) + if self.result.tests_require: + for require in self.result.tests_require: + yield "test", UpstreamRequirement("python3", require) def get_declared_outputs(self): - for script in (self.result.scripts or []): - yield UpstreamOutput('binary', os.path.basename(script)) + for script in self.result.scripts or []: + yield UpstreamOutput("binary", os.path.basename(script)) entry_points = self.result.entry_points or {} - for script in entry_points.get('console_scripts', []): - yield UpstreamOutput('binary', script.split('=')[0]) + for script in entry_points.get("console_scripts", []): + yield UpstreamOutput("binary", script.split("=")[0]) for package in self.result.packages or []: - yield UpstreamOutput('python3', package) + yield UpstreamOutput("python3", package) class PyProject(BuildSystem): - name = 'pyproject' + name = "pyproject" + + def __init__(self, path): + self.path = path + self.pyproject = self.load_toml() def load_toml(self): import toml - with open("pyproject.toml", "r") as pf: + with open(self.path, "r") as pf: return toml.load(pf) def dist(self, session, resolver): - pyproject = self.load_toml() - if "poetry" in pyproject.get("tool", []): + if "poetry" in self.pyproject.get("tool", []): logging.info( - 'Found pyproject.toml with poetry section, ' - 'assuming poetry project.') - resolver.install([ - UpstreamRequirement('python3', 'venv'), - UpstreamRequirement('python3', 'pip'), - ]) - session.check_call(['pip3', 'install', 'poetry'], user='root') - session.check_call(['poetry', 'build', '-f', 'sdist']) + "Found pyproject.toml with poetry section, " "assuming poetry project." + ) + resolver.install( + [ + UpstreamRequirement("python3", "venv"), + UpstreamRequirement("python3", "pip"), + ] + ) + session.check_call(["pip3", "install", "poetry"], user="root") + session.check_call(["poetry", "build", "-f", "sdist"]) return raise AssertionError("no supported section in pyproject.toml") class SetupCfg(BuildSystem): - name = 'setup.cfg' + name = "setup.cfg" def __init__(self, path): self.path = path def setup(self, resolver): - resolver.install([ - UpstreamRequirement('python3', 'pep517'), - UpstreamRequirement('python3', 'pip'), - ]) + resolver.install( + [ + UpstreamRequirement("python3", "pep517"), + UpstreamRequirement("python3", "pip"), + ] + ) def dist(self, session, resolver): self.setup(resolver) - session.check_call(['python3', '-m', 'pep517.build', '-s', '.']) + session.check_call(["python3", "-m", "pep517.build", "-s", "."]) class Npm(BuildSystem): - name = 'npm' + name = "npm" def __init__(self, path): import json - with open(path, 'r') as f: + + with open(path, "r") as f: self.package = json.load(f) def get_declared_dependencies(self): - if 'devDependencies' in self.package: - for name, unused_version in ( - self.package['devDependencies'].items()): + if "devDependencies" in self.package: + for name, unused_version in self.package["devDependencies"].items(): # TODO(jelmer): Look at version - yield 'dev', UpstreamRequirement('npm', name) + yield "dev", UpstreamRequirement("npm", name) def setup(self, resolver): - resolver.install([UpstreamRequirement('binary', 'npm')]) + resolver.install([UpstreamRequirement("binary", "npm")]) def dist(self, session, resolver): self.setup(resolver) - run_with_build_fixer(session, ['npm', 'pack']) + run_with_build_fixer(session, ["npm", "pack"]) class Waf(BuildSystem): - name = 'waf' + name = "waf" def __init__(self, path): self.path = path def setup(self, resolver): - resolver.install([UpstreamRequirement('binary', 'python3')]) + resolver.install([UpstreamRequirement("binary", "python3")]) def dist(self, session, resolver): self.setup(resolver) - run_with_build_fixer(session, ['./waf', 'dist']) + run_with_build_fixer(session, ["./waf", "dist"]) class Gem(BuildSystem): - name = 'gem' + name = "gem" def __init__(self, path): self.path = path def setup(self, resolver): - resolver.install([UpstreamRequirement('binary', 'gem2deb')]) + resolver.install([UpstreamRequirement("binary", "gem2deb")]) def dist(self, session, resolver): self.setup(resolver) - gemfiles = [entry.name for entry in session.scandir('.') - if entry.name.endswith('.gem')] + gemfiles = [ + entry.name for entry in session.scandir(".") if entry.name.endswith(".gem") + ] if len(gemfiles) > 1: - logging.warning('More than one gemfile. Trying the first?') - run_with_build_fixer(session, ['gem2tgz', gemfiles[0]]) + logging.warning("More than one gemfile. Trying the first?") + run_with_build_fixer(session, ["gem2tgz", gemfiles[0]]) class DistInkt(BuildSystem): - def __init__(self, path): self.path = path - self.name = 'dist-zilla' + self.name = "dist-zilla" self.dist_inkt_class = None - with open('dist.ini', 'rb') as f: + with open("dist.ini", "rb") as f: for line in f: if not line.startswith(b";;"): continue @@ -290,102 +304,119 @@ class DistInkt(BuildSystem): continue if key.strip() == b"class" and value.strip().startswith(b"'Dist::Inkt"): logging.info( - 'Found Dist::Inkt section in dist.ini, ' - 'assuming distinkt.') - self.name = 'dist-inkt' + "Found Dist::Inkt section in dist.ini, " "assuming distinkt." + ) + self.name = "dist-inkt" self.dist_inkt_class = value.decode().strip("'") return - logging.info('Found dist.ini, assuming dist-zilla.') + logging.info("Found dist.ini, assuming dist-zilla.") def setup(self, resolver): - resolver.install([ - UpstreamRequirement('perl', 'Dist::Inkt'), - ]) + resolver.install( + [ + UpstreamRequirement("perl", "Dist::Inkt"), + ] + ) def dist(self, session, resolver): self.setup(resolver) - if self.name == 'dist-inkt': - resolver.install([ - UpstreamRequirement('perl-module', self.dist_inkt_class)]) - run_with_build_fixer(session, ['distinkt-dist']) + if self.name == "dist-inkt": + resolver.install([UpstreamRequirement("perl-module", self.dist_inkt_class)]) + run_with_build_fixer(session, ["distinkt-dist"]) else: # Default to invoking Dist::Zilla - resolver.install([UpstreamRequirement('perl', 'Dist::Zilla')]) - run_with_build_fixer(session, ['dzil', 'build', '--in', '..']) + resolver.install([UpstreamRequirement("perl", "Dist::Zilla")]) + run_with_build_fixer(session, ["dzil", "build", "--in", ".."]) class Make(BuildSystem): - name = 'make' + name = "make" def setup(self, session, resolver): - if session.exists('Makefile.PL') and not session.exists('Makefile'): - resolver.install([UpstreamRequirement('binary', 'perl')]) - run_with_build_fixer(session, ['perl', 'Makefile.PL']) + if session.exists("Makefile.PL") and not session.exists("Makefile"): + resolver.install([UpstreamRequirement("binary", "perl")]) + run_with_build_fixer(session, ["perl", "Makefile.PL"]) - if not session.exists('Makefile') and not session.exists('configure'): - if session.exists('autogen.sh'): - if shebang_binary('autogen.sh') is None: - run_with_build_fixer( - session, ['/bin/sh', './autogen.sh']) + if not session.exists("Makefile") and not session.exists("configure"): + if session.exists("autogen.sh"): + if shebang_binary("autogen.sh") is None: + run_with_build_fixer(session, ["/bin/sh", "./autogen.sh"]) try: - run_with_build_fixer( - session, ['./autogen.sh']) + run_with_build_fixer(session, ["./autogen.sh"]) except UnidentifiedError as e: - if ("Gnulib not yet bootstrapped; " - "run ./bootstrap instead.\n" in e.lines): + if ( + "Gnulib not yet bootstrapped; " + "run ./bootstrap instead.\n" in e.lines + ): run_with_build_fixer(session, ["./bootstrap"]) - run_with_build_fixer(session, ['./autogen.sh']) + run_with_build_fixer(session, ["./autogen.sh"]) else: raise - elif (session.exists('configure.ac') or - session.exists('configure.in')): - resolver.install([ - UpstreamRequirement('binary', 'autoconf'), - UpstreamRequirement('binary', 'automake'), - UpstreamRequirement('binary', 'gettextize'), - UpstreamRequirement('binary', 'libtoolize'), - ]) - run_with_build_fixer(session, ['autoreconf', '-i']) + elif session.exists("configure.ac") or session.exists("configure.in"): + resolver.install( + [ + UpstreamRequirement("binary", "autoconf"), + UpstreamRequirement("binary", "automake"), + UpstreamRequirement("binary", "gettextize"), + UpstreamRequirement("binary", "libtoolize"), + ] + ) + run_with_build_fixer(session, ["autoreconf", "-i"]) - if not session.exists('Makefile') and session.exists('configure'): - session.check_call(['./configure']) + if not session.exists("Makefile") and session.exists("configure"): + session.check_call(["./configure"]) def dist(self, session, resolver): self.setup(session, resolver) - resolver.install([UpstreamRequirement('binary', 'make')]) + resolver.install([UpstreamRequirement("binary", "make")]) try: - run_with_build_fixer(session, ['make', 'dist']) + run_with_build_fixer(session, ["make", "dist"]) except UnidentifiedError as e: if "make: *** No rule to make target 'dist'. Stop.\n" in e.lines: pass elif "make[1]: *** No rule to make target 'dist'. Stop.\n" in e.lines: pass - elif ("Reconfigure the source tree " - "(via './config' or 'perl Configure'), please.\n" - ) in e.lines: - run_with_build_fixer(session, ['./config']) - run_with_build_fixer(session, ['make', 'dist']) elif ( - "Please try running 'make manifest' and then run " - "'make dist' again.\n" in e.lines): - run_with_build_fixer(session, ['make', 'manifest']) - run_with_build_fixer(session, ['make', 'dist']) + "Reconfigure the source tree " + "(via './config' or 'perl Configure'), please.\n" + ) in e.lines: + run_with_build_fixer(session, ["./config"]) + run_with_build_fixer(session, ["make", "dist"]) + elif ( + "Please try running 'make manifest' and then run " + "'make dist' again.\n" in e.lines + ): + run_with_build_fixer(session, ["make", "manifest"]) + run_with_build_fixer(session, ["make", "dist"]) elif "Please run ./configure first\n" in e.lines: - run_with_build_fixer(session, ['./configure']) - run_with_build_fixer(session, ['make', 'dist']) - elif any([re.match( - r'Makefile:[0-9]+: \*\*\* Missing \'Make.inc\' ' - r'Run \'./configure \[options\]\' and retry. Stop.\n', - line) for line in e.lines]): - run_with_build_fixer(session, ['./configure']) - run_with_build_fixer(session, ['make', 'dist']) - elif any([re.match( - r'Problem opening MANIFEST: No such file or directory ' - r'at .* line [0-9]+\.', line) for line in e.lines]): - run_with_build_fixer(session, ['make', 'manifest']) - run_with_build_fixer(session, ['make', 'dist']) + run_with_build_fixer(session, ["./configure"]) + run_with_build_fixer(session, ["make", "dist"]) + elif any( + [ + re.match( + r"Makefile:[0-9]+: \*\*\* Missing \'Make.inc\' " + r"Run \'./configure \[options\]\' and retry. Stop.\n", + line, + ) + for line in e.lines + ] + ): + run_with_build_fixer(session, ["./configure"]) + run_with_build_fixer(session, ["make", "dist"]) + elif any( + [ + re.match( + r"Problem opening MANIFEST: No such file or directory " + r"at .* line [0-9]+\.", + line, + ) + for line in e.lines + ] + ): + run_with_build_fixer(session, ["make", "manifest"]) + run_with_build_fixer(session, ["make", "dist"]) else: raise else: @@ -393,46 +424,48 @@ class Make(BuildSystem): def get_declared_dependencies(self): # TODO(jelmer): Split out the perl-specific stuff? - if os.path.exists('META.yml'): + if os.path.exists("META.yml"): # See http://module-build.sourceforge.net/META-spec-v1.4.html for # the specification of the format. import ruamel.yaml import ruamel.yaml.reader - with open('META.yml', 'rb') as f: + + with open("META.yml", "rb") as f: try: data = ruamel.yaml.load(f, ruamel.yaml.SafeLoader) except ruamel.yaml.reader.ReaderError as e: - warnings.warn('Unable to parse META.yml: %s' % e) + warnings.warn("Unable to parse META.yml: %s" % e) return - for require in data.get('requires', []): - yield 'build', UpstreamRequirement('perl', require) + for require in data.get("requires", []): + yield "build", UpstreamRequirement("perl", require) class Cargo(BuildSystem): - name = 'cargo' + name = "cargo" def __init__(self, path): from toml.decoder import load - with open(path, 'r') as f: + + with open(path, "r") as f: self.cargo = load(f) def get_declared_dependencies(self): - if 'dependencies' in self.cargo: - for name, details in self.cargo['dependencies'].items(): + if "dependencies" in self.cargo: + for name, details in self.cargo["dependencies"].items(): # TODO(jelmer): Look at details['features'], details['version'] - yield 'build', UpstreamRequirement('cargo-crate', name) + yield "build", UpstreamRequirement("cargo-crate", name) class Golang(BuildSystem): """Go builds.""" - name = 'golang' + name = "golang" class Maven(BuildSystem): - name = 'maven' + name = "maven" def __init__(self, path): self.path = path @@ -440,83 +473,99 @@ class Maven(BuildSystem): class Cabal(BuildSystem): - name = 'cabal' + name = "cabal" def __init__(self, path): self.path = path -def detect_buildsystems(path): +def detect_buildsystems(path, trust_package=False): """Detect build systems.""" - if os.path.exists(os.path.join(path, 'package.xml')): - logging.info('Found package.xml, assuming pear package.') - yield Pear('package.xml') + if os.path.exists(os.path.join(path, "package.xml")): + logging.info("Found package.xml, assuming pear package.") + yield Pear("package.xml") - if os.path.exists(os.path.join(path, 'setup.py')): - logging.info('Found setup.py, assuming python project.') - yield SetupPy('setup.py') - elif os.path.exists(os.path.join(path, 'pyproject.toml')): - logging.info('Found pyproject.toml, assuming python project.') - yield PyProject() - elif os.path.exists(os.path.join(path, 'setup.cfg')): - logging.info('Found setup.cfg, assuming python project.') - yield SetupCfg('setup.cfg') + if os.path.exists(os.path.join(path, "setup.py")): + logging.info("Found setup.py, assuming python project.") + yield SetupPy("setup.py") + elif os.path.exists(os.path.join(path, "pyproject.toml")): + logging.info("Found pyproject.toml, assuming python project.") + yield PyProject("pyproject.toml") + elif os.path.exists(os.path.join(path, "setup.cfg")): + logging.info("Found setup.cfg, assuming python project.") + yield SetupCfg("setup.cfg") - if os.path.exists(os.path.join(path, 'package.json')): - logging.info('Found package.json, assuming node package.') - yield Npm('package.json') + if os.path.exists(os.path.join(path, "package.json")): + logging.info("Found package.json, assuming node package.") + yield Npm("package.json") - if os.path.exists(os.path.join(path, 'waf')): - logging.info('Found waf, assuming waf package.') - yield Waf('waf') + if os.path.exists(os.path.join(path, "waf")): + logging.info("Found waf, assuming waf package.") + yield Waf("waf") - if os.path.exists(os.path.join(path, 'Cargo.toml')): - logging.info('Found Cargo.toml, assuming rust cargo package.') - yield Cargo('Cargo.toml') + if os.path.exists(os.path.join(path, "Cargo.toml")): + logging.info("Found Cargo.toml, assuming rust cargo package.") + yield Cargo("Cargo.toml") - if os.path.exists(os.path.join(path, 'pom.xml')): - logging.info('Found pom.xml, assuming maven package.') - yield Maven('pom.xml') + if os.path.exists(os.path.join(path, "pom.xml")): + logging.info("Found pom.xml, assuming maven package.") + yield Maven("pom.xml") - if (os.path.exists(os.path.join(path, 'dist.ini')) and - not os.path.exists(os.path.join(path, 'Makefile.PL'))): - yield DistInkt('dist.ini') + if os.path.exists(os.path.join(path, "dist.ini")) and not os.path.exists( + os.path.join(path, "Makefile.PL") + ): + yield DistInkt("dist.ini") - gemfiles = [ - entry.name for entry in os.scandir(path) - if entry.name.endswith('.gem')] + gemfiles = [entry.name for entry in os.scandir(path) if entry.name.endswith(".gem")] if gemfiles: yield Gem(gemfiles[0]) - if any([os.path.exists(os.path.join(path, p)) for p in [ - 'Makefile', 'Makefile.PL', 'autogen.sh', 'configure.ac', - 'configure.in']]): + if any( + [ + os.path.exists(os.path.join(path, p)) + for p in [ + "Makefile", + "Makefile.PL", + "autogen.sh", + "configure.ac", + "configure.in", + ] + ] + ): yield Make() cabal_filenames = [ - entry.name for entry in os.scandir(path) - if entry.name.endswith('.cabal')] + entry.name for entry in os.scandir(path) if entry.name.endswith(".cabal") + ] if cabal_filenames: if len(cabal_filenames) == 1: yield Cabal(cabal_filenames[0]) else: warnings.warn( - 'More than one cabal filename, ignoring all: %r' % - cabal_filenames) + "More than one cabal filename, ignoring all: %r" % cabal_filenames + ) - if os.path.exists(os.path.join(path, '.travis.yml')): + if os.path.exists(os.path.join(path, ".travis.yml")): import ruamel.yaml.reader - with open('.travis.yml', 'rb') as f: + + with open(".travis.yml", "rb") as f: try: data = ruamel.yaml.load(f, ruamel.yaml.SafeLoader) except ruamel.yaml.reader.ReaderError as e: - warnings.warn('Unable to parse .travis.yml: %s' % (e, )) + warnings.warn("Unable to parse .travis.yml: %s" % (e,)) else: - language = data.get('language') - if language == 'go': + language = data.get("language") + if language == "go": yield Golang() for entry in os.scandir(path): - if entry.name.endswith('.go'): + if entry.name.endswith(".go"): yield Golang() break + + +def get_buildsystem(path, trust_package=False): + for buildsystem in detect_buildsystems(path, trust_package=trust_package): + return buildsystem + + raise NoBuildToolsFound() diff --git a/ognibuild/debian/build.py b/ognibuild/debian/build.py index 122f477..04b2fe3 100644 --- a/ognibuild/debian/build.py +++ b/ognibuild/debian/build.py @@ -35,13 +35,14 @@ from debmutate.changelog import get_maintainer, format_datetime from breezy import osutils from breezy.mutabletree import MutableTree -from breezy.plugins.debian.builder import BuildFailedError +from breezy.plugins.debian.util import BuildFailedError from buildlog_consultant.sbuild import ( worker_failure_from_sbuild_log, SbuildFailure, ) + DEFAULT_BUILDER = "sbuild --no-clean-source" diff --git a/ognibuild/debian/fix_build.py b/ognibuild/debian/fix_build.py index 211d452..1a651ad 100644 --- a/ognibuild/debian/fix_build.py +++ b/ognibuild/debian/fix_build.py @@ -30,7 +30,7 @@ from debian.deb822 import ( Deb822, PkgRelation, Release, - ) +) from debian.changelog import Version from breezy.commit import PointlessCommit @@ -113,6 +113,7 @@ from buildlog_consultant.sbuild import ( SbuildFailure, ) + DEFAULT_MAX_ITERATIONS = 10 @@ -124,18 +125,21 @@ class CircularDependency(Exception): class DependencyContext(object): - - def __init__(self, tree: MutableTree, - subpath: str = '', committer: Optional[str] = None, - update_changelog: bool = True): + def __init__( + self, + tree: MutableTree, + subpath: str = "", + committer: Optional[str] = None, + update_changelog: bool = True, + ): self.tree = tree self.subpath = subpath self.committer = committer self.update_changelog = update_changelog def add_dependency( - self, package: str, - minimum_version: Optional[Version] = None) -> bool: + self, package: str, minimum_version: Optional[Version] = None + ) -> bool: raise NotImplementedError(self.add_dependency) @@ -273,9 +277,13 @@ def add_test_dependency( ) -def commit_debian_changes(tree: MutableTree, subpath: str, - summary: str, committer: Optional[str] = None, - update_changelog: bool = True) -> bool: +def commit_debian_changes( + tree: MutableTree, + subpath: str, + summary: str, + committer: Optional[str] = None, + update_changelog: bool = True, +) -> bool: with tree.lock_write(): try: if update_changelog: diff --git a/ognibuild/dist.py b/ognibuild/dist.py index 57d1cf9..f9d8dec 100644 --- a/ognibuild/dist.py +++ b/ognibuild/dist.py @@ -115,12 +115,16 @@ class DistCatcher(object): def create_dist_schroot( - tree: Tree, target_dir: str, - chroot: str, packaging_tree: Optional[Tree] = None, - include_controldir: bool = True, - subdir: Optional[str] = None) -> str: + tree: Tree, + target_dir: str, + chroot: str, + packaging_tree: Optional[Tree] = None, + include_controldir: bool = True, + subdir: Optional[str] = None, +) -> str: from .buildsystem import detect_buildsystems from .resolver import AptResolver + if subdir is None: subdir = "package" with SchrootSession(chroot) as session: diff --git a/ognibuild/resolver.py b/ognibuild/resolver.py index e663559..63a473a 100644 --- a/ognibuild/resolver.py +++ b/ognibuild/resolver.py @@ -17,13 +17,11 @@ class MissingDependencies(Exception): - def __init__(self, reqs): self.requirements = reqs class Resolver(object): - def install(self, requirements): raise NotImplementedError(self.install) @@ -32,21 +30,20 @@ class Resolver(object): class AptResolver(Resolver): - def __init__(self, apt): self.apt = apt @classmethod def from_session(cls, session): from .apt import AptManager + return cls(AptManager(session)) def install(self, requirements): missing = [] for req in requirements: pps = list(self._possible_paths(req)) - if (not pps or - not any(self.apt.session.exists(p) for p in pps)): + if not pps or not any(self.apt.session.exists(p) for p in pps): missing.append(req) if missing: self.apt.install(list(self.resolve(missing))) @@ -55,22 +52,21 @@ class AptResolver(Resolver): raise NotImplementedError(self.explain) def _possible_paths(self, req): - if req.family == 'binary': - yield '/usr/bin/%s' % req.name + if req.family == "binary": + yield "/usr/bin/%s" % req.name else: return def resolve(self, requirements): for req in requirements: - if req.family == 'python3': - yield 'python3-%s' % req.name + if req.family == "python3": + yield "python3-%s" % req.name else: list(self._possible_paths(req)) raise NotImplementedError class NativeResolver(Resolver): - def __init__(self, session): self.session = session @@ -86,7 +82,6 @@ class NativeResolver(Resolver): class ExplainResolver(Resolver): - def __init__(self, session): self.session = session @@ -99,8 +94,7 @@ class ExplainResolver(Resolver): class AutoResolver(Resolver): - """Automatically find out the most appropriate way to instal dependencies. - """ + """Automatically find out the most appropriate way to instal dependencies.""" def __init__(self, session): self.session = session From 73e47483ff537b844550469c97063b90b476759f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Wed, 10 Feb 2021 01:33:35 +0000 Subject: [PATCH 61/83] Fix formatting. --- ognibuild/__main__.py | 2 +- ognibuild/buildsystem.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/ognibuild/__main__.py b/ognibuild/__main__.py index af0b214..0e74b37 100644 --- a/ognibuild/__main__.py +++ b/ognibuild/__main__.py @@ -62,7 +62,7 @@ STAGE_MAP = { } -def main(): +def main(): # noqa: C901 import argparse parser = argparse.ArgumentParser() diff --git a/ognibuild/buildsystem.py b/ognibuild/buildsystem.py index 54d9791..9e2d832 100644 --- a/ognibuild/buildsystem.py +++ b/ognibuild/buildsystem.py @@ -479,7 +479,7 @@ class Cabal(BuildSystem): self.path = path -def detect_buildsystems(path, trust_package=False): +def detect_buildsystems(path, trust_package=False): # noqa: C901 """Detect build systems.""" if os.path.exists(os.path.join(path, "package.xml")): logging.info("Found package.xml, assuming pear package.") From 4d4c8d880bcf410c90ade2f5dc9755d06593ec7b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Wed, 10 Feb 2021 01:54:29 +0000 Subject: [PATCH 62/83] Avoid use of breezy-debian. --- ognibuild/dist.py | 1 + 1 file changed, 1 insertion(+) diff --git a/ognibuild/dist.py b/ognibuild/dist.py index f9d8dec..3b47bec 100644 --- a/ognibuild/dist.py +++ b/ognibuild/dist.py @@ -75,6 +75,7 @@ def run_dist(session, buildsystems, resolver): class DistCatcher(object): + def __init__(self, directory): self.export_directory = directory self.files = [] From 7359c07b96f99fbd5c83c47e3d4a90d6048fa78b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Wed, 10 Feb 2021 14:02:46 +0000 Subject: [PATCH 63/83] Drop warn and note functions. --- ognibuild/__init__.py | 6 +++--- ognibuild/__main__.py | 5 ++--- 2 files changed, 5 insertions(+), 6 deletions(-) diff --git a/ognibuild/__init__.py b/ognibuild/__init__.py index 7238a06..c693ada 100644 --- a/ognibuild/__init__.py +++ b/ognibuild/__init__.py @@ -21,10 +21,8 @@ import stat import sys -DEFAULT_PYTHON = "python3" - - class DetailedFailure(Exception): + def __init__(self, retcode, argv, error): self.retcode = retcode self.argv = argv @@ -45,12 +43,14 @@ def shebang_binary(p): class UpstreamRequirement(object): + def __init__(self, family, name): self.family = family self.name = name class UpstreamOutput(object): + def __init__(self, family, name): self.family = family self.name = name diff --git a/ognibuild/__main__.py b/ognibuild/__main__.py index 0e74b37..d7061f6 100644 --- a/ognibuild/__main__.py +++ b/ognibuild/__main__.py @@ -18,7 +18,6 @@ import logging import os import sys -from . import note, UpstreamPackage from .apt import UnidentifiedError from .buildsystem import NoBuildToolsFound, detect_buildsystems from .build import run_build @@ -127,12 +126,12 @@ def main(): # noqa: C901 return 1 except MissingDependencies as e: for req in e.requirements: - note("Missing dependency (%s:%s)" % (req.family, req.name)) + logging.info("Missing dependency (%s:%s)", (req.family, req.name)) for resolver in [ AptResolver.from_session(session), NativeResolver.from_session(session), ]: - note(" %s" % (resolver.explain([req]),)) + logging.info(" %s", resolver.explain([req])) return 2 return 0 From ee5a8462f315134e74e98116cf742eb3dc6ed640 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Sat, 13 Feb 2021 14:50:09 +0000 Subject: [PATCH 64/83] More work on resolvers. --- ognibuild/__init__.py | 7 +- ognibuild/__main__.py | 3 +- ognibuild/buildsystem.py | 78 ++++----- ognibuild/debian/fix_build.py | 149 +++++++----------- ognibuild/dist.py | 2 +- ognibuild/fix_build.py | 14 +- ognibuild/requirements.py | 64 ++++++++ .../{resolver.py => resolver/__init__.py} | 42 +---- ognibuild/resolver/apt.py | 84 ++++++++++ ognibuild/tests/test_debian_fix_build.py | 3 + 10 files changed, 273 insertions(+), 173 deletions(-) create mode 100644 ognibuild/requirements.py rename ognibuild/{resolver.py => resolver/__init__.py} (66%) create mode 100644 ognibuild/resolver/apt.py diff --git a/ognibuild/__init__.py b/ognibuild/__init__.py index c693ada..132e417 100644 --- a/ognibuild/__init__.py +++ b/ognibuild/__init__.py @@ -18,7 +18,6 @@ import os import stat -import sys class DetailedFailure(Exception): @@ -44,9 +43,11 @@ def shebang_binary(p): class UpstreamRequirement(object): - def __init__(self, family, name): + # Name of the family of requirements - e.g. "python-package" + family: str + + def __init__(self, family): self.family = family - self.name = name class UpstreamOutput(object): diff --git a/ognibuild/__main__.py b/ognibuild/__main__.py index d7061f6..ab562ce 100644 --- a/ognibuild/__main__.py +++ b/ognibuild/__main__.py @@ -25,12 +25,12 @@ from .clean import run_clean from .dist import run_dist from .install import run_install from .resolver import ( - AptResolver, ExplainResolver, AutoResolver, NativeResolver, MissingDependencies, ) +from .resolver.apt import AptResolver from .test import run_test @@ -84,6 +84,7 @@ def main(): # noqa: C901 help="Ignore declared dependencies, follow build errors only", ) args = parser.parse_args() + logging.basicConfig(level=logging.INFO) if args.schroot: from .session.schroot import SchrootSession diff --git a/ognibuild/buildsystem.py b/ognibuild/buildsystem.py index 9e2d832..d36f019 100644 --- a/ognibuild/buildsystem.py +++ b/ognibuild/buildsystem.py @@ -22,7 +22,14 @@ import os import re import warnings -from . import shebang_binary, UpstreamRequirement, UpstreamOutput +from . import shebang_binary, UpstreamOutput +from .requirements import ( + BinaryRequirement, + PythonPackageRequirement, + PerlModuleRequirement, + NodePackageRequirement, + CargoCrateRequirement, + ) from .apt import UnidentifiedError from .fix_build import run_with_build_fixer @@ -66,7 +73,7 @@ class Pear(BuildSystem): self.path = path def setup(self, resolver): - resolver.install([UpstreamRequirement("binary", "pear")]) + resolver.install([BinaryRequirement("pear")]) def dist(self, session, resolver): self.setup(resolver) @@ -94,18 +101,13 @@ class SetupPy(BuildSystem): name = "setup.py" def __init__(self, path): + self.path = path from distutils.core import run_setup - self.result = run_setup(os.path.abspath(path), stop_after="init") def setup(self, resolver): - resolver.install( - [ - UpstreamRequirement("python3", "pip"), - UpstreamRequirement("binary", "python3"), - ] - ) - with open("setup.py", "r") as f: + resolver.install([PythonPackageRequirement('pip')]) + with open(self.path, "r") as f: setup_py_contents = f.read() try: with open("setup.cfg", "r") as f: @@ -114,7 +116,7 @@ class SetupPy(BuildSystem): setup_cfg_contents = "" if "setuptools" in setup_py_contents: logging.info("Reference to setuptools found, installing.") - resolver.install([UpstreamRequirement("python3", "setuptools")]) + resolver.install([PythonPackageRequirement("setuptools")]) if ( "setuptools_scm" in setup_py_contents or "setuptools_scm" in setup_cfg_contents @@ -122,9 +124,9 @@ class SetupPy(BuildSystem): logging.info("Reference to setuptools-scm found, installing.") resolver.install( [ - UpstreamRequirement("python3", "setuptools-scm"), - UpstreamRequirement("binary", "git"), - UpstreamRequirement("binary", "mercurial"), + PythonPackageRequirement("setuptools-scm"), + BinaryRequirement("git"), + BinaryRequirement("mercurial"), ] ) @@ -150,24 +152,24 @@ class SetupPy(BuildSystem): interpreter = shebang_binary("setup.py") if interpreter is not None: if interpreter in ("python3", "python2", "python"): - resolver.install([UpstreamRequirement("binary", interpreter)]) + resolver.install([BinaryRequirement(interpreter)]) else: raise ValueError("Unknown interpreter %r" % interpreter) run_with_build_fixer(session, ["./setup.py"] + args) else: # Just assume it's Python 3 - resolver.install([UpstreamRequirement("binary", "python3")]) + resolver.install([BinaryRequirement("python3")]) run_with_build_fixer(session, ["python3", "./setup.py"] + args) def get_declared_dependencies(self): for require in self.result.get_requires(): - yield "build", UpstreamRequirement("python3", require) + yield "build", PythonPackageRequirement(require) if self.result.install_requires: for require in self.result.install_requires: - yield "install", UpstreamRequirement("python3", require) + yield "install", PythonPackageRequirement(require) if self.result.tests_require: for require in self.result.tests_require: - yield "test", UpstreamRequirement("python3", require) + yield "test", PythonPackageRequirement(require) def get_declared_outputs(self): for script in self.result.scripts or []: @@ -200,8 +202,8 @@ class PyProject(BuildSystem): ) resolver.install( [ - UpstreamRequirement("python3", "venv"), - UpstreamRequirement("python3", "pip"), + PythonPackageRequirement("venv"), + PythonPackageRequirement("pip"), ] ) session.check_call(["pip3", "install", "poetry"], user="root") @@ -220,8 +222,8 @@ class SetupCfg(BuildSystem): def setup(self, resolver): resolver.install( [ - UpstreamRequirement("python3", "pep517"), - UpstreamRequirement("python3", "pip"), + PythonPackageRequirement("pep517"), + PythonPackageRequirement("pip"), ] ) @@ -244,10 +246,10 @@ class Npm(BuildSystem): if "devDependencies" in self.package: for name, unused_version in self.package["devDependencies"].items(): # TODO(jelmer): Look at version - yield "dev", UpstreamRequirement("npm", name) + yield "dev", NodePackageRequirement(name) def setup(self, resolver): - resolver.install([UpstreamRequirement("binary", "npm")]) + resolver.install([BinaryRequirement("npm")]) def dist(self, session, resolver): self.setup(resolver) @@ -262,7 +264,7 @@ class Waf(BuildSystem): self.path = path def setup(self, resolver): - resolver.install([UpstreamRequirement("binary", "python3")]) + resolver.install([BinaryRequirement("python3")]) def dist(self, session, resolver): self.setup(resolver) @@ -277,7 +279,7 @@ class Gem(BuildSystem): self.path = path def setup(self, resolver): - resolver.install([UpstreamRequirement("binary", "gem2deb")]) + resolver.install([BinaryRequirement("gem2deb")]) def dist(self, session, resolver): self.setup(resolver) @@ -314,18 +316,18 @@ class DistInkt(BuildSystem): def setup(self, resolver): resolver.install( [ - UpstreamRequirement("perl", "Dist::Inkt"), + PerlModuleRequirement("Dist::Inkt"), ] ) def dist(self, session, resolver): self.setup(resolver) if self.name == "dist-inkt": - resolver.install([UpstreamRequirement("perl-module", self.dist_inkt_class)]) + resolver.install([PerlModuleRequirement(self.dist_inkt_class)]) run_with_build_fixer(session, ["distinkt-dist"]) else: # Default to invoking Dist::Zilla - resolver.install([UpstreamRequirement("perl", "Dist::Zilla")]) + resolver.install([PerlModuleRequirement("Dist::Zilla")]) run_with_build_fixer(session, ["dzil", "build", "--in", ".."]) @@ -335,7 +337,7 @@ class Make(BuildSystem): def setup(self, session, resolver): if session.exists("Makefile.PL") and not session.exists("Makefile"): - resolver.install([UpstreamRequirement("binary", "perl")]) + resolver.install([BinaryRequirement("perl")]) run_with_build_fixer(session, ["perl", "Makefile.PL"]) if not session.exists("Makefile") and not session.exists("configure"): @@ -357,10 +359,10 @@ class Make(BuildSystem): elif session.exists("configure.ac") or session.exists("configure.in"): resolver.install( [ - UpstreamRequirement("binary", "autoconf"), - UpstreamRequirement("binary", "automake"), - UpstreamRequirement("binary", "gettextize"), - UpstreamRequirement("binary", "libtoolize"), + BinaryRequirement("autoconf"), + BinaryRequirement("automake"), + BinaryRequirement("gettextize"), + BinaryRequirement("libtoolize"), ] ) run_with_build_fixer(session, ["autoreconf", "-i"]) @@ -370,7 +372,7 @@ class Make(BuildSystem): def dist(self, session, resolver): self.setup(session, resolver) - resolver.install([UpstreamRequirement("binary", "make")]) + resolver.install([BinaryRequirement("make")]) try: run_with_build_fixer(session, ["make", "dist"]) except UnidentifiedError as e: @@ -437,7 +439,7 @@ class Make(BuildSystem): warnings.warn("Unable to parse META.yml: %s" % e) return for require in data.get("requires", []): - yield "build", UpstreamRequirement("perl", require) + yield "build", PerlModuleRequirement(require) class Cargo(BuildSystem): @@ -454,7 +456,7 @@ class Cargo(BuildSystem): if "dependencies" in self.cargo: for name, details in self.cargo["dependencies"].items(): # TODO(jelmer): Look at details['features'], details['version'] - yield "build", UpstreamRequirement("cargo-crate", name) + yield "build", CargoCrateRequirement(name) class Golang(BuildSystem): diff --git a/ognibuild/debian/fix_build.py b/ognibuild/debian/fix_build.py index 1a651ad..2e8848e 100644 --- a/ognibuild/debian/fix_build.py +++ b/ognibuild/debian/fix_build.py @@ -21,15 +21,13 @@ __all__ = [ import logging import os -import re import subprocess import sys -from typing import Iterator, List, Callable, Type, Tuple, Set, Optional +from typing import List, Callable, Type, Tuple, Set, Optional from debian.deb822 import ( Deb822, PkgRelation, - Release, ) from debian.changelog import Version @@ -113,6 +111,11 @@ from buildlog_consultant.sbuild import ( SbuildFailure, ) +from ..apt import AptManager, LocalAptManager +from ..resolver.apt import AptResolver +from ..requirements import BinaryRequirement +from .build import attempt_build + DEFAULT_MAX_ITERATIONS = 10 @@ -128,15 +131,21 @@ class DependencyContext(object): def __init__( self, tree: MutableTree, + apt: AptManager, subpath: str = "", committer: Optional[str] = None, update_changelog: bool = True, ): self.tree = tree + self.apt = apt + self.resolver = AptResolver(apt) self.subpath = subpath self.committer = committer self.update_changelog = update_changelog + def resolve_apt(self, req): + return self.resolver.resolve(req) + def add_dependency( self, package: str, minimum_version: Optional[Version] = None ) -> bool: @@ -157,11 +166,11 @@ class BuildDependencyContext(DependencyContext): class AutopkgtestDependencyContext(DependencyContext): def __init__( - self, testname, tree, subpath="", committer=None, update_changelog=True + self, testname, tree, apt, subpath="", committer=None, update_changelog=True ): self.testname = testname super(AutopkgtestDependencyContext, self).__init__( - tree, subpath, committer, update_changelog + tree, apt, subpath, committer, update_changelog ) def add_dependency(self, package, minimum_version=None): @@ -301,27 +310,7 @@ def commit_debian_changes( return True -def get_package_for_paths(paths, regex=False): - from .apt import search_apt_file - candidates = set() - for path in paths: - candidates.update(search_apt_file(path, regex=regex)) - if candidates: - break - if len(candidates) == 0: - logging.warning("No packages found that contain %r", paths) - return None - if len(candidates) > 1: - logging.warning( - "More than 1 packages found that contain %r: %r", path, candidates - ) - # Euhr. Pick the one with the shortest name? - return sorted(candidates, key=len)[0] - else: - return candidates.pop() - - -def get_package_for_python_module(module, python_version): +def get_package_for_python_module(apt, module, python_version): if python_version == "python3": paths = [ os.path.join( @@ -374,7 +363,7 @@ def get_package_for_python_module(module, python_version): ] else: raise AssertionError("unknown python version %r" % python_version) - return get_package_for_paths(paths, regex=True) + return apt.get_package_for_paths(paths, regex=True) def targeted_python_versions(tree: Tree) -> Set[str]: @@ -394,23 +383,8 @@ def targeted_python_versions(tree: Tree) -> Set[str]: return targeted -apt_cache = None - - -def package_exists(package): - global apt_cache - if apt_cache is None: - import apt_pkg - - apt_cache = apt_pkg.Cache() - for p in apt_cache.packages: - if p.name == package: - return True - return False - - def fix_missing_javascript_runtime(error, context): - package = get_package_for_paths(["/usr/bin/node", "/usr/bin/duk"], regex=False) + package = context.apt.get_package_for_paths(["/usr/bin/node", "/usr/bin/duk"], regex=False) if package is None: return False return context.add_dependency(package) @@ -420,30 +394,30 @@ def fix_missing_python_distribution(error, context): # noqa: C901 targeted = targeted_python_versions(context.tree) default = not targeted - pypy_pkg = get_package_for_paths( + pypy_pkg = context.apt.get_package_for_paths( ["/usr/lib/pypy/dist-packages/%s-.*.egg-info" % error.distribution], regex=True ) if pypy_pkg is None: pypy_pkg = "pypy-%s" % error.distribution - if not package_exists(pypy_pkg): + if not context.apt.package_exists(pypy_pkg): pypy_pkg = None - py2_pkg = get_package_for_paths( + py2_pkg = context.apt.get_package_for_paths( ["/usr/lib/python2\\.[0-9]/dist-packages/%s-.*.egg-info" % error.distribution], regex=True, ) if py2_pkg is None: py2_pkg = "python-%s" % error.distribution - if not package_exists(py2_pkg): + if not context.apt.package_exists(py2_pkg): py2_pkg = None - py3_pkg = get_package_for_paths( + py3_pkg = context.apt.get_package_for_paths( ["/usr/lib/python3/dist-packages/%s-.*.egg-info" % error.distribution], regex=True, ) if py3_pkg is None: py3_pkg = "python3-%s" % error.distribution - if not package_exists(py3_pkg): + if not context.apt.package_exists(py3_pkg): py3_pkg = None extra_build_deps = [] @@ -488,9 +462,9 @@ def fix_missing_python_module(error, context): targeted = set() default = not targeted - pypy_pkg = get_package_for_python_module(error.module, "pypy") - py2_pkg = get_package_for_python_module(error.module, "python2") - py3_pkg = get_package_for_python_module(error.module, "python3") + pypy_pkg = get_package_for_python_module(context.apt, error.module, "pypy") + py2_pkg = get_package_for_python_module(context.apt, error.module, "python2") + py3_pkg = get_package_for_python_module(context.apt, error.module, "python3") extra_build_deps = [] if error.python_version == 2: @@ -528,7 +502,7 @@ def fix_missing_python_module(error, context): def fix_missing_go_package(error, context): - package = get_package_for_paths( + package = context.apt.get_package_for_paths( [os.path.join("/usr/share/gocode/src", error.package, ".*")], regex=True ) if package is None: @@ -537,11 +511,11 @@ def fix_missing_go_package(error, context): def fix_missing_c_header(error, context): - package = get_package_for_paths( + package = context.apt.get_package_for_paths( [os.path.join("/usr/include", error.header)], regex=False ) if package is None: - package = get_package_for_paths( + package = context.apt.get_package_for_paths( [os.path.join("/usr/include", ".*", error.header)], regex=True ) if package is None: @@ -550,11 +524,11 @@ def fix_missing_c_header(error, context): def fix_missing_pkg_config(error, context): - package = get_package_for_paths( + package = context.apt.get_package_for_paths( [os.path.join("/usr/lib/pkgconfig", error.module + ".pc")] ) if package is None: - package = get_package_for_paths( + package = context.apt.get_package_for_paths( [os.path.join("/usr/lib", ".*", "pkgconfig", error.module + ".pc")], regex=True, ) @@ -564,21 +538,12 @@ def fix_missing_pkg_config(error, context): def fix_missing_command(error, context): - if os.path.isabs(error.command): - paths = [error.command] - else: - paths = [ - os.path.join(dirname, error.command) for dirname in ["/usr/bin", "/bin"] - ] - package = get_package_for_paths(paths) - if package is None: - logging.info("No packages found that contain %r", paths) - return False + package = context.resolve_apt(BinaryRequirement(error.command)) return context.add_dependency(package) def fix_missing_file(error, context): - package = get_package_for_paths([error.path]) + package = context.apt.get_package_for_paths([error.path]) if package is None: return False return context.add_dependency(package) @@ -590,7 +555,7 @@ def fix_missing_sprockets_file(error, context): else: logging.warning("unable to handle content type %s", error.content_type) return False - package = get_package_for_paths([path], regex=True) + package = context.apt.get_package_for_paths([path], regex=True) if package is None: return False return context.add_dependency(package) @@ -619,7 +584,7 @@ def fix_missing_perl_file(error, context): paths = [error.filename] else: paths = [os.path.join(inc, error.filename) for inc in error.inc] - package = get_package_for_paths(paths, regex=False) + package = context.apt.get_package_for_paths(paths, regex=False) if package is None: if getattr(error, "module", None): logging.warning( @@ -635,17 +600,17 @@ def fix_missing_perl_file(error, context): return context.add_dependency(package) -def get_package_for_node_package(node_package): +def get_package_for_node_package(apt, node_package): paths = [ "/usr/share/nodejs/.*/node_modules/%s/package.json" % node_package, "/usr/lib/nodejs/%s/package.json" % node_package, "/usr/share/nodejs/%s/package.json" % node_package, ] - return get_package_for_paths(paths, regex=True) + return apt.get_package_for_paths(paths, regex=True) def fix_missing_node_module(error, context): - package = get_package_for_node_package(error.module) + package = get_package_for_node_package(context.apt, error.module) if package is None: logging.warning("no node package found for %s.", error.module) return False @@ -654,7 +619,7 @@ def fix_missing_node_module(error, context): def fix_missing_dh_addon(error, context): paths = [os.path.join("/usr/share/perl5", error.path)] - package = get_package_for_paths(paths) + package = context.apt.get_package_for_paths(paths) if package is None: logging.warning("no package for debhelper addon %s", error.name) return False @@ -667,7 +632,7 @@ def retry_apt_failure(error, context): def fix_missing_php_class(error, context): path = "/usr/share/php/%s.php" % error.php_class.replace("\\", "/") - package = get_package_for_paths([path]) + package = context.apt.get_package_for_paths([path]) if package is None: logging.warning("no package for PHP class %s", error.php_class) return False @@ -676,7 +641,7 @@ def fix_missing_php_class(error, context): def fix_missing_jdk_file(error, context): path = error.jdk_path + ".*/" + error.filename - package = get_package_for_paths([path], regex=True) + package = context.apt.get_package_for_paths([path], regex=True) if package is None: logging.warning( "no package found for %s (JDK: %s) - regex %s", @@ -690,7 +655,7 @@ def fix_missing_jdk_file(error, context): def fix_missing_vala_package(error, context): path = "/usr/share/vala-[0-9.]+/vapi/%s.vapi" % error.package - package = get_package_for_paths([path], regex=True) + package = context.apt.get_package_for_paths([path], regex=True) if package is None: logging.warning("no file found for package %s - regex %s", error.package, path) return False @@ -710,7 +675,7 @@ def fix_missing_xml_entity(error, context): else: return False - package = get_package_for_paths([search_path], regex=False) + package = context.apt.get_package_for_paths([search_path], regex=False) if package is None: return False return context.add_dependency(package) @@ -723,7 +688,7 @@ def fix_missing_library(error, context): os.path.join("/usr/lib/lib%s.a$" % error.library), os.path.join("/usr/lib/.*/lib%s.a$" % error.library), ] - package = get_package_for_paths(paths, regex=True) + package = context.apt.get_package_for_paths(paths, regex=True) if package is None: logging.warning("no package for library %s", error.library) return False @@ -737,7 +702,7 @@ def fix_missing_ruby_gem(error, context): "specifications/%s-.*\\.gemspec" % error.gem ) ] - package = get_package_for_paths(paths, regex=True) + package = context.apt.get_package_for_paths(paths, regex=True) if package is None: logging.warning("no package for gem %s", error.gem) return False @@ -746,7 +711,7 @@ def fix_missing_ruby_gem(error, context): def fix_missing_ruby_file(error, context): paths = [os.path.join("/usr/lib/ruby/vendor_ruby/%s.rb" % error.filename)] - package = get_package_for_paths(paths) + package = context.apt.get_package_for_paths(paths) if package is not None: return context.add_dependency(package) paths = [ @@ -755,7 +720,7 @@ def fix_missing_ruby_file(error, context): "lib/%s.rb" % error.filename ) ] - package = get_package_for_paths(paths, regex=True) + package = context.apt.get_package_for_paths(paths, regex=True) if package is not None: return context.add_dependency(package) @@ -765,7 +730,7 @@ def fix_missing_ruby_file(error, context): def fix_missing_r_package(error, context): paths = [os.path.join("/usr/lib/R/site-library/.*/R/%s$" % error.package)] - package = get_package_for_paths(paths, regex=True) + package = context.apt.get_package_for_paths(paths, regex=True) if package is None: logging.warning("no package for R package %s", error.package) return False @@ -781,7 +746,7 @@ def fix_missing_java_class(error, context): logging.warning("unable to find classpath for %s", error.classname) return False logging.info("Classpath for %s: %r", error.classname, classpath) - package = get_package_for_paths(classpath) + package = context.apt.get_package_for_paths(classpath) if package is None: logging.warning("no package for files in %r", classpath) return False @@ -849,7 +814,7 @@ def fix_missing_maven_artifacts(error, context): "%s-%s.%s" % (artifact_id, version, kind), ) ] - package = get_package_for_paths(paths, regex=regex) + package = context.apt.get_package_for_paths(paths, regex=regex) if package is None: logging.warning("no package for artifact %s", artifact) return False @@ -862,7 +827,7 @@ def install_gnome_common(error, context): def install_gnome_common_dep(error, context): if error.package == "glib-gettext": - package = get_package_for_paths(["/usr/bin/glib-gettextize"]) + package = context.apt.get_package_for_paths(["/usr/bin/glib-gettextize"]) else: package = None if package is None: @@ -875,7 +840,7 @@ def install_gnome_common_dep(error, context): def install_xfce_dep(error, context): if error.package == "gtk-doc": - package = get_package_for_paths(["/usr/bin/gtkdocize"]) + package = context.apt.get_package_for_paths(["/usr/bin/gtkdocize"]) else: package = None if package is None: @@ -947,7 +912,7 @@ def fix_missing_autoconf_macro(error, context): except KeyError: logging.info("No local m4 file found defining %s", error.macro) return False - package = get_package_for_paths([path]) + package = context.apt.get_package_for_paths([path]) if package is None: logging.warning("no package for macro file %s", path) return False @@ -960,7 +925,7 @@ def fix_missing_c_sharp_compiler(error, context): def fix_missing_haskell_dependencies(error, context): path = "/var/lib/ghc/package.conf.d/%s-.*.conf" % error.deps[0][0] - package = get_package_for_paths([path], regex=True) + package = context.apt.get_package_for_paths([path], regex=True) if package is None: logging.warning("no package for macro file %s", path) return False @@ -1033,6 +998,7 @@ def resolve_error(error, context, fixers): def build_incrementally( local_tree, + apt, suffix, build_suite, output_directory, @@ -1074,6 +1040,7 @@ def build_incrementally( if e.context[0] == "build": context = BuildDependencyContext( local_tree, + apt, subpath=subpath, committer=committer, update_changelog=update_changelog, @@ -1082,6 +1049,7 @@ def build_incrementally( context = AutopkgtestDependencyContext( e.context[1], local_tree, + apt, subpath=subpath, committer=committer, update_changelog=update_changelog, @@ -1154,9 +1122,12 @@ def main(argv=None): args = parser.parse_args() from breezy.workingtree import WorkingTree + apt = LocalAptManager() + tree = WorkingTree.open(".") build_incrementally( tree, + apt, args.suffix, args.suite, args.output_directory, diff --git a/ognibuild/dist.py b/ognibuild/dist.py index 3b47bec..d226e0f 100644 --- a/ognibuild/dist.py +++ b/ognibuild/dist.py @@ -124,7 +124,7 @@ def create_dist_schroot( subdir: Optional[str] = None, ) -> str: from .buildsystem import detect_buildsystems - from .resolver import AptResolver + from .resolver.apt import AptResolver if subdir is None: subdir = "package" diff --git a/ognibuild/fix_build.py b/ognibuild/fix_build.py index 9b02ed6..c393164 100644 --- a/ognibuild/fix_build.py +++ b/ognibuild/fix_build.py @@ -22,6 +22,7 @@ from buildlog_consultant.common import ( find_build_failure_description, Problem, MissingPerlModule, + MissingPythonDistribution, MissingCommand, ) @@ -69,10 +70,16 @@ def fix_npm_missing_command(error, context): return True +def fix_python_package_from_pip(error, context): + context.session.check_call(["pip", "install", error.distribution]) + return True + + GENERIC_INSTALL_FIXERS: List[ Tuple[Type[Problem], Callable[[Problem, DependencyContext], bool]] ] = [ (MissingPerlModule, fix_perl_module_from_cpan), + (MissingPythonDistribution, fix_python_package_from_pip), (MissingCommand, fix_npm_missing_command), ] @@ -84,11 +91,12 @@ def run_with_build_fixer(session: Session, args: List[str]): retcode, lines = run_with_tee(session, args) if retcode == 0: return - offset, line, error = find_build_failure_description(lines) + match, error = find_build_failure_description(lines) if error is None: logging.warning("Build failed with unidentified error. Giving up.") - if line is not None: - raise UnidentifiedError(retcode, args, lines, secondary=(offset, line)) + if match is not None: + raise UnidentifiedError( + retcode, args, lines, secondary=(match.lineno, match.line)) raise UnidentifiedError(retcode, args, lines) logging.info("Identified error: %r", error) diff --git a/ognibuild/requirements.py b/ognibuild/requirements.py new file mode 100644 index 0000000..65bf1d5 --- /dev/null +++ b/ognibuild/requirements.py @@ -0,0 +1,64 @@ +#!/usr/bin/python +# Copyright (C) 2019-2020 Jelmer Vernooij +# encoding: utf-8 +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + +from . import UpstreamRequirement + + +class PythonPackageRequirement(UpstreamRequirement): + + package: str + + def __init__(self, package): + super(PythonPackageRequirement, self).__init__('python-package') + self.package = package + + +class BinaryRequirement(UpstreamRequirement): + + binary_name: str + + def __init__(self, binary_name): + super(BinaryRequirement, self).__init__('binary') + self.binary_name = binary_name + + +class PerlModuleRequirement(UpstreamRequirement): + + module: str + + def __init__(self, module): + super(PerlModuleRequirement, self).__init__('perl-module') + self.module = module + + +class NodePackageRequirement(UpstreamRequirement): + + package: str + + def __init__(self, package): + super(NodePackageRequirement, self).__init__('npm-package') + self.package = package + + +class CargoCrateRequirement(UpstreamRequirement): + + crate: str + + def __init__(self, crate): + super(CargoCrateRequirement, self).__init__('cargo-crate') + self.crate = crate diff --git a/ognibuild/resolver.py b/ognibuild/resolver/__init__.py similarity index 66% rename from ognibuild/resolver.py rename to ognibuild/resolver/__init__.py index 63a473a..9384482 100644 --- a/ognibuild/resolver.py +++ b/ognibuild/resolver/__init__.py @@ -17,11 +17,13 @@ class MissingDependencies(Exception): + def __init__(self, reqs): self.requirements = reqs class Resolver(object): + def install(self, requirements): raise NotImplementedError(self.install) @@ -29,43 +31,6 @@ class Resolver(object): raise NotImplementedError(self.explain) -class AptResolver(Resolver): - def __init__(self, apt): - self.apt = apt - - @classmethod - def from_session(cls, session): - from .apt import AptManager - - return cls(AptManager(session)) - - def install(self, requirements): - missing = [] - for req in requirements: - pps = list(self._possible_paths(req)) - if not pps or not any(self.apt.session.exists(p) for p in pps): - missing.append(req) - if missing: - self.apt.install(list(self.resolve(missing))) - - def explain(self, requirements): - raise NotImplementedError(self.explain) - - def _possible_paths(self, req): - if req.family == "binary": - yield "/usr/bin/%s" % req.name - else: - return - - def resolve(self, requirements): - for req in requirements: - if req.family == "python3": - yield "python3-%s" % req.name - else: - list(self._possible_paths(req)) - raise NotImplementedError - - class NativeResolver(Resolver): def __init__(self, session): self.session = session @@ -94,7 +59,8 @@ class ExplainResolver(Resolver): class AutoResolver(Resolver): - """Automatically find out the most appropriate way to instal dependencies.""" + """Automatically find out the most appropriate way to install dependencies. + """ def __init__(self, session): self.session = session diff --git a/ognibuild/resolver/apt.py b/ognibuild/resolver/apt.py new file mode 100644 index 0000000..5fe42d8 --- /dev/null +++ b/ognibuild/resolver/apt.py @@ -0,0 +1,84 @@ +#!/usr/bin/python3 +# Copyright (C) 2020 Jelmer Vernooij +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + +import posixpath + +from ..apt import AptManager + +from . import Resolver +from ..requirements import ( + BinaryRequirement, + PythonPackageRequirement, + ) + + +class NoAptPackage(Exception): + """No apt package.""" + + +def resolve_binary_req(apt_mgr, req): + if posixpath.isabs(req.binary_name): + paths = [req.binary_name] + else: + paths = [ + posixpath.join(dirname, req.binary_name) + for dirname in ["/usr/bin", "/bin"] + ] + return apt_mgr.get_package_for_paths(paths) + + +APT_REQUIREMENT_RESOLVERS = [ + (BinaryRequirement, resolve_binary_req), +] + + +class AptResolver(Resolver): + + def __init__(self, apt): + self.apt = apt + + @classmethod + def from_session(cls, session): + return cls(AptManager(session)) + + def install(self, requirements): + missing = [] + for req in requirements: + try: + pps = list(req.possible_paths()) + except NotImplementedError: + missing.append(req) + else: + if not pps or not any(self.apt.session.exists(p) for p in pps): + missing.append(req) + if missing: + self.apt.install(list(self.resolve(missing))) + + def explain(self, requirements): + raise NotImplementedError(self.explain) + + def resolve(self, requirements): + for req in requirements: + for rr_class, rr_fn in APT_REQUIREMENT_RESOLVERS: + if isinstance(req, rr_class): + package_name = rr_fn(self.apt, req) + if package_name is None: + raise NoAptPackage() + yield package_name + break + else: + raise NotImplementedError diff --git a/ognibuild/tests/test_debian_fix_build.py b/ognibuild/tests/test_debian_fix_build.py index d95bbe3..07725f3 100644 --- a/ognibuild/tests/test_debian_fix_build.py +++ b/ognibuild/tests/test_debian_fix_build.py @@ -31,6 +31,7 @@ from buildlog_consultant.common import ( MissingValaPackage, ) from ..debian import apt +from ..debian.apt import LocalAptManager from ..debian.fix_build import ( resolve_error, VERSIONED_PACKAGE_FIXERS, @@ -88,8 +89,10 @@ blah (0.1) UNRELEASED; urgency=medium yield pkg def resolve(self, error, context=("build",)): + apt = LocalAptManager() context = BuildDependencyContext( self.tree, + apt, subpath="", committer="Janitor ", update_changelog=True, From b689774fa6b254699ef3765b39bd81e0611fcf13 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Sat, 13 Feb 2021 14:52:09 +0000 Subject: [PATCH 65/83] Avoid using silver-platter. --- ognibuild/debian/build.py | 4 +++- ognibuild/debian/fix_build.py | 7 +------ 2 files changed, 4 insertions(+), 7 deletions(-) diff --git a/ognibuild/debian/build.py b/ognibuild/debian/build.py index 04b2fe3..b19c640 100644 --- a/ognibuild/debian/build.py +++ b/ognibuild/debian/build.py @@ -35,13 +35,15 @@ from debmutate.changelog import get_maintainer, format_datetime from breezy import osutils from breezy.mutabletree import MutableTree -from breezy.plugins.debian.util import BuildFailedError +from breezy.plugins.debian.builder import BuildFailedError from buildlog_consultant.sbuild import ( worker_failure_from_sbuild_log, SbuildFailure, ) +DEFAULT_BUILDER = "sbuild --no-clean-source" + DEFAULT_BUILDER = "sbuild --no-clean-source" diff --git a/ognibuild/debian/fix_build.py b/ognibuild/debian/fix_build.py index 2e8848e..71ee188 100644 --- a/ognibuild/debian/fix_build.py +++ b/ognibuild/debian/fix_build.py @@ -62,11 +62,6 @@ from debmutate._rules import ( update_rules, ) -from .build import ( - attempt_build, - get_build_architecture, - DEFAULT_BUILDER, - ) from breezy.plugins.debian.changelog import debcommit from buildlog_consultant import Problem from buildlog_consultant.common import ( @@ -114,7 +109,7 @@ from buildlog_consultant.sbuild import ( from ..apt import AptManager, LocalAptManager from ..resolver.apt import AptResolver from ..requirements import BinaryRequirement -from .build import attempt_build +from .build import attempt_build, DEFAULT_BUILDER DEFAULT_MAX_ITERATIONS = 10 From f0e45ab26ba38af7e57b35e30331c0e26a52f1eb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Sat, 20 Feb 2021 01:50:29 +0000 Subject: [PATCH 66/83] Rename architecture.md. --- notes/structure.md | 30 ------------------------------ ognibuild/resolver/apt.py | 1 - 2 files changed, 31 deletions(-) delete mode 100644 notes/structure.md diff --git a/notes/structure.md b/notes/structure.md deleted file mode 100644 index 960892c..0000000 --- a/notes/structure.md +++ /dev/null @@ -1,30 +0,0 @@ -Upstream requirements are expressed as objects derived from UpstreamRequirement. - -They can either be: - - * extracted from the build system - * extracted from errors in build logs - -The details of UpstreamRequirements are specific to the kind of requirement, -and otherwise opaque to ognibuild. - -When building a package, we first make sure that all declared upstream -requirements are met. - -Then we attempt to build. - -If any problems are found in the log, buildlog-consultant will report them. - -ognibuild can then invoke "fixers" to address Problems. - -Problems can be converted to UpstreamRequirements by UpstreamRequirementFixer - -Other Fixer can do things like e.g. upgrade configure.ac to a newer version. - -UpstreamRequirementFixer uses a UpstreamRequirementResolver object that -can translate UpstreamRequirement objects into apt package names or -e.g. cpan commands. - -ognibuild keeps finding problems, resolving them and rebuilding until it finds -a problem it can not resolve or that it thinks it has already resolved -(i.e. seen before). diff --git a/ognibuild/resolver/apt.py b/ognibuild/resolver/apt.py index 5fe42d8..df1adb2 100644 --- a/ognibuild/resolver/apt.py +++ b/ognibuild/resolver/apt.py @@ -22,7 +22,6 @@ from ..apt import AptManager from . import Resolver from ..requirements import ( BinaryRequirement, - PythonPackageRequirement, ) From 9064024b83e0e0e396478ed538c649dabfdd1133 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Mon, 22 Feb 2021 17:14:08 +0000 Subject: [PATCH 67/83] Remove duplicate variable. --- ognibuild/debian/build.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/ognibuild/debian/build.py b/ognibuild/debian/build.py index b19c640..100f56d 100644 --- a/ognibuild/debian/build.py +++ b/ognibuild/debian/build.py @@ -42,8 +42,6 @@ from buildlog_consultant.sbuild import ( SbuildFailure, ) -DEFAULT_BUILDER = "sbuild --no-clean-source" - DEFAULT_BUILDER = "sbuild --no-clean-source" From 5beef23fc84338511dfe4311199e92251a0482a6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Wed, 24 Feb 2021 01:28:49 +0000 Subject: [PATCH 68/83] Use resolver in more cases. --- ognibuild/debian/apt.py | 66 ++++- ognibuild/debian/fix_build.py | 445 +++++++--------------------------- ognibuild/requirements.py | 182 ++++++++++++++ ognibuild/resolver/apt.py | 310 ++++++++++++++++++++++- 4 files changed, 627 insertions(+), 376 deletions(-) diff --git a/ognibuild/debian/apt.py b/ognibuild/debian/apt.py index 4f16ef7..e8a6934 100644 --- a/ognibuild/debian/apt.py +++ b/ognibuild/debian/apt.py @@ -29,8 +29,6 @@ from debian.deb822 import Release from .. import DetailedFailure from ..session import Session, run_with_tee -from .build import get_build_architecture - class UnidentifiedError(Exception): @@ -64,6 +62,12 @@ class AptManager(object): def __init__(self, session): self.session = session + def package_exists(self, package: str) -> bool: + raise NotImplementedError(self.package_exists) + + def get_package_for_paths(self, paths, regex=False): + raise NotImplementedError(self.get_package_for_paths) + def missing(self, packages): root = getattr(self.session, "location", "/") status_path = os.path.join(root, "var/lib/dpkg/status") @@ -80,6 +84,7 @@ class AptManager(object): return list(missing) def install(self, packages: List[str]) -> None: + logging.info('Installing using apt: %r', packages) packages = self.missing(packages) if packages: run_apt(self.session, ["install"] + packages) @@ -88,8 +93,30 @@ class AptManager(object): run_apt(self.session, ["satisfy"] + deps) +class LocalAptManager(AptManager): + + def __init__(self): + from ..session.plain import PlainSession + self.session = PlainSession() + self._apt_cache = None + + def package_exists(self, package): + if self._apt_cache is None: + import apt_pkg + + self._apt_cache = apt_pkg.Cache() + for p in self._apt_cache.packages: + if p.name == package: + return True + return False + + def get_package_for_paths(self, paths, regex=False): + # TODO(jelmer): Make sure we use whatever is configured in self.session + return get_package_for_paths(paths, regex=regex) + + class FileSearcher(object): - def search_files(self, path, regex=False): + def search_files(self, path: str, regex: bool = False) -> Iterator[str]: raise NotImplementedError(self.search_files) @@ -98,9 +125,6 @@ class ContentsFileNotFound(Exception): class AptContentsFileSearcher(FileSearcher): - - _user_agent = 'ognibuild/0.1' - def __init__(self): self._db = {} @@ -137,6 +161,7 @@ class AptContentsFileSearcher(FileSearcher): @classmethod def from_repositories(cls, sources): + from .debian.build import get_build_architecture # TODO(jelmer): Verify signatures, etc. urls = [] arches = [get_build_architecture(), "all"] @@ -159,11 +184,11 @@ class AptContentsFileSearcher(FileSearcher): urls.append("%s/%s/%s" % (base_url, name, entry["name"])) return cls.from_urls(urls) - @classmethod - def _get(cls, url): + @staticmethod + def _get(url): from urllib.request import urlopen, Request - request = Request(url, headers={"User-Agent": cls._user_agent}) + request = Request(url, headers={"User-Agent": "Debian Janitor"}) return urlopen(request) def load_url(self, url): @@ -192,7 +217,7 @@ class GeneratedFileSearcher(FileSearcher): def __init__(self, db): self._db = db - def search_files(self, path, regex=False): + def search_files(self, path: str, regex: bool = False) -> Iterator[str]: for p, pkg in sorted(self._db.items()): if regex: if re.match(path, p): @@ -215,7 +240,7 @@ GENERATED_FILE_SEARCHER = GeneratedFileSearcher( _apt_file_searcher = None -def search_apt_file(path: str, regex: bool = False) -> Iterator[FileSearcher]: +def search_apt_file(path: str, regex: bool = False) -> Iterator[str]: global _apt_file_searcher if _apt_file_searcher is None: # TODO(jelmer): cache file @@ -223,3 +248,22 @@ def search_apt_file(path: str, regex: bool = False) -> Iterator[FileSearcher]: if _apt_file_searcher: yield from _apt_file_searcher.search_files(path, regex=regex) yield from GENERATED_FILE_SEARCHER.search_files(path, regex=regex) + + +def get_package_for_paths(paths: List[str], regex: bool = False) -> Optional[str]: + candidates: Set[str] = set() + for path in paths: + candidates.update(search_apt_file(path, regex=regex)) + if candidates: + break + if len(candidates) == 0: + logging.warning("No packages found that contain %r", paths) + return None + if len(candidates) > 1: + logging.warning( + "More than 1 packages found that contain %r: %r", path, candidates + ) + # Euhr. Pick the one with the shortest name? + return sorted(candidates, key=len)[0] + else: + return candidates.pop() diff --git a/ognibuild/debian/fix_build.py b/ognibuild/debian/fix_build.py index 71ee188..d72721f 100644 --- a/ognibuild/debian/fix_build.py +++ b/ognibuild/debian/fix_build.py @@ -21,7 +21,6 @@ __all__ = [ import logging import os -import subprocess import sys from typing import List, Callable, Type, Tuple, Set, Optional @@ -106,9 +105,35 @@ from buildlog_consultant.sbuild import ( SbuildFailure, ) -from ..apt import AptManager, LocalAptManager -from ..resolver.apt import AptResolver -from ..requirements import BinaryRequirement +from .apt import AptManager, LocalAptManager +from ..resolver.apt import ( + AptResolver, + NoAptPackage, + get_package_for_python_module, + ) +from ..requirements import ( + BinaryRequirement, + PathRequirement, + PkgConfigRequirement, + CHeaderRequirement, + JavaScriptRuntimeRequirement, + ValaPackageRequirement, + RubyGemRequirement, + GoPackageRequirement, + DhAddonRequirement, + PhpClassRequirement, + RPackageRequirement, + NodePackageRequirement, + LibraryRequirement, + RubyFileRequirement, + XmlEntityRequirement, + SprocketsFileRequirement, + JavaClassRequirement, + HaskellPackageRequirement, + MavenArtifactRequirement, + GnomeCommonRequirement, + JDKFileRequirement, + ) from .build import attempt_build, DEFAULT_BUILDER @@ -148,7 +173,7 @@ class DependencyContext(object): class BuildDependencyContext(DependencyContext): - def add_dependency(self, package, minimum_version=None): + def add_dependency(self, package: str, minimum_version: Optional[Version] = None): return add_build_dependency( self.tree, package, @@ -181,12 +206,12 @@ class AutopkgtestDependencyContext(DependencyContext): def add_build_dependency( - tree, - package, - minimum_version=None, - committer=None, - subpath="", - update_changelog=True, + tree: Tree, + package: str, + minimum_version: Optional[Version] = None, + committer: Optional[str] = None, + subpath: str = "", + update_changelog: bool = True, ): if not isinstance(package, str): raise TypeError(package) @@ -305,62 +330,6 @@ def commit_debian_changes( return True -def get_package_for_python_module(apt, module, python_version): - if python_version == "python3": - paths = [ - os.path.join( - "/usr/lib/python3/dist-packages", - module.replace(".", "/"), - "__init__.py", - ), - os.path.join( - "/usr/lib/python3/dist-packages", module.replace(".", "/") + ".py" - ), - os.path.join( - "/usr/lib/python3\\.[0-9]+/lib-dynload", - module.replace(".", "/") + "\\.cpython-.*\\.so", - ), - os.path.join( - "/usr/lib/python3\\.[0-9]+/", module.replace(".", "/") + ".py" - ), - os.path.join( - "/usr/lib/python3\\.[0-9]+/", module.replace(".", "/"), "__init__.py" - ), - ] - elif python_version == "python2": - paths = [ - os.path.join( - "/usr/lib/python2\\.[0-9]/dist-packages", - module.replace(".", "/"), - "__init__.py", - ), - os.path.join( - "/usr/lib/python2\\.[0-9]/dist-packages", - module.replace(".", "/") + ".py", - ), - os.path.join( - "/usr/lib/python2.\\.[0-9]/lib-dynload", - module.replace(".", "/") + ".so", - ), - ] - elif python_version == "pypy": - paths = [ - os.path.join( - "/usr/lib/pypy/dist-packages", module.replace(".", "/"), "__init__.py" - ), - os.path.join( - "/usr/lib/pypy/dist-packages", module.replace(".", "/") + ".py" - ), - os.path.join( - "/usr/lib/pypy/dist-packages", - module.replace(".", "/") + "\\.pypy-.*\\.so", - ), - ] - else: - raise AssertionError("unknown python version %r" % python_version) - return apt.get_package_for_paths(paths, regex=True) - - def targeted_python_versions(tree: Tree) -> Set[str]: with tree.get_file("debian/control") as f: control = Deb822(f) @@ -378,13 +347,6 @@ def targeted_python_versions(tree: Tree) -> Set[str]: return targeted -def fix_missing_javascript_runtime(error, context): - package = context.apt.get_package_for_paths(["/usr/bin/node", "/usr/bin/duk"], regex=False) - if package is None: - return False - return context.add_dependency(package) - - def fix_missing_python_distribution(error, context): # noqa: C901 targeted = targeted_python_versions(context.tree) default = not targeted @@ -496,62 +458,60 @@ def fix_missing_python_module(error, context): return True -def fix_missing_go_package(error, context): - package = context.apt.get_package_for_paths( - [os.path.join("/usr/share/gocode/src", error.package, ".*")], regex=True - ) - if package is None: - return False - return context.add_dependency(package) - - -def fix_missing_c_header(error, context): - package = context.apt.get_package_for_paths( - [os.path.join("/usr/include", error.header)], regex=False - ) - if package is None: - package = context.apt.get_package_for_paths( - [os.path.join("/usr/include", ".*", error.header)], regex=True - ) - if package is None: - return False - return context.add_dependency(package) - - -def fix_missing_pkg_config(error, context): - package = context.apt.get_package_for_paths( - [os.path.join("/usr/lib/pkgconfig", error.module + ".pc")] - ) - if package is None: - package = context.apt.get_package_for_paths( - [os.path.join("/usr/lib", ".*", "pkgconfig", error.module + ".pc")], - regex=True, - ) - if package is None: - return False - return context.add_dependency(package, minimum_version=error.minimum_version) - - -def fix_missing_command(error, context): - package = context.resolve_apt(BinaryRequirement(error.command)) - return context.add_dependency(package) - - -def fix_missing_file(error, context): - package = context.apt.get_package_for_paths([error.path]) - if package is None: - return False - return context.add_dependency(package) - - -def fix_missing_sprockets_file(error, context): - if error.content_type == "application/javascript": - path = "/usr/share/.*/app/assets/javascripts/%s.js$" % error.name +def fix_missing_requirement(error, context): + if isinstance(error, MissingFile): + req = PathRequirement(error.path) + elif isinstance(error, MissingCommand): + req = BinaryRequirement(error.command) + elif isinstance(error, MissingPkgConfig): + req = PkgConfigRequirement( + error.module, error.minimum_version) + elif isinstance(error, MissingCHeader): + req = CHeaderRequirement(error.header) + elif isinstance(error, MissingJavaScriptRuntime): + req = JavaScriptRuntimeRequirement() + elif isinstance(error, MissingRubyGem): + req = RubyGemRequirement(error.gem, error.version) + elif isinstance(error, MissingValaPackage): + req = ValaPackageRequirement(error.package) + elif isinstance(error, MissingGoPackage): + req = GoPackageRequirement(error.package) + elif isinstance(error, DhAddonLoadFailure): + req = DhAddonRequirement(error.path) + elif isinstance(error, MissingPhpClass): + req = PhpClassRequirement(error.php_class) + elif isinstance(error, MissingRPackage): + req = RPackageRequirement(error.package, error.minimum_version) + elif isinstance(error, MissingNodeModule): + req = NodePackageRequirement(error.module) + elif isinstance(error, MissingLibrary): + req = LibraryRequirement(error.library) + elif isinstance(error, MissingRubyFile): + req = RubyFileRequirement(error.filename) + elif isinstance(error, MissingXmlEntity): + req = XmlEntityRequirement(error.url) + elif isinstance(error, MissingSprocketsFile): + req = SprocketsFileRequirement(error.content_type, error.name) + elif isinstance(error, MissingJavaClass): + req = JavaClassRequirement(error.classname) + elif isinstance(error, MissingHaskellDependencies): + # TODO(jelmer): Create multiple HaskellPackageRequirement objects? + req = HaskellPackageRequirement(error.package) + elif isinstance(error, MissingMavenArtifacts): + # TODO(jelmer): Create multiple MavenArtifactRequirement objects? + req = MavenArtifactRequirement(error.artifacts) + elif isinstance(error, MissingCSharpCompiler): + req = BinaryRequirement('msc') + elif isinstance(error, GnomeCommonMissing): + req = GnomeCommonRequirement() + elif isinstance(error, MissingJDKFile): + req = JDKFileRequirement(error.jdk_path, error.filename) else: - logging.warning("unable to handle content type %s", error.content_type) - return False - package = context.apt.get_package_for_paths([path], regex=True) - if package is None: + return None + + try: + package = context.resolve_apt(req) + except NoAptPackage: return False return context.add_dependency(package) @@ -595,159 +555,10 @@ def fix_missing_perl_file(error, context): return context.add_dependency(package) -def get_package_for_node_package(apt, node_package): - paths = [ - "/usr/share/nodejs/.*/node_modules/%s/package.json" % node_package, - "/usr/lib/nodejs/%s/package.json" % node_package, - "/usr/share/nodejs/%s/package.json" % node_package, - ] - return apt.get_package_for_paths(paths, regex=True) - - -def fix_missing_node_module(error, context): - package = get_package_for_node_package(context.apt, error.module) - if package is None: - logging.warning("no node package found for %s.", error.module) - return False - return context.add_dependency(package) - - -def fix_missing_dh_addon(error, context): - paths = [os.path.join("/usr/share/perl5", error.path)] - package = context.apt.get_package_for_paths(paths) - if package is None: - logging.warning("no package for debhelper addon %s", error.name) - return False - return context.add_dependency(package) - - def retry_apt_failure(error, context): return True -def fix_missing_php_class(error, context): - path = "/usr/share/php/%s.php" % error.php_class.replace("\\", "/") - package = context.apt.get_package_for_paths([path]) - if package is None: - logging.warning("no package for PHP class %s", error.php_class) - return False - return context.add_dependency(package) - - -def fix_missing_jdk_file(error, context): - path = error.jdk_path + ".*/" + error.filename - package = context.apt.get_package_for_paths([path], regex=True) - if package is None: - logging.warning( - "no package found for %s (JDK: %s) - regex %s", - error.filename, - error.jdk_path, - path, - ) - return False - return context.add_dependency(package) - - -def fix_missing_vala_package(error, context): - path = "/usr/share/vala-[0-9.]+/vapi/%s.vapi" % error.package - package = context.apt.get_package_for_paths([path], regex=True) - if package is None: - logging.warning("no file found for package %s - regex %s", error.package, path) - return False - return context.add_dependency(package) - - -def fix_missing_xml_entity(error, context): - # Ideally we should be using the XML catalog for this, but hardcoding - # a few URLs will do for now.. - URL_MAP = { - "http://www.oasis-open.org/docbook/xml/": "/usr/share/xml/docbook/schema/dtd/" - } - for url, path in URL_MAP.items(): - if error.url.startswith(url): - search_path = os.path.join(path, error.url[len(url) :]) - break - else: - return False - - package = context.apt.get_package_for_paths([search_path], regex=False) - if package is None: - return False - return context.add_dependency(package) - - -def fix_missing_library(error, context): - paths = [ - os.path.join("/usr/lib/lib%s.so$" % error.library), - os.path.join("/usr/lib/.*/lib%s.so$" % error.library), - os.path.join("/usr/lib/lib%s.a$" % error.library), - os.path.join("/usr/lib/.*/lib%s.a$" % error.library), - ] - package = context.apt.get_package_for_paths(paths, regex=True) - if package is None: - logging.warning("no package for library %s", error.library) - return False - return context.add_dependency(package) - - -def fix_missing_ruby_gem(error, context): - paths = [ - os.path.join( - "/usr/share/rubygems-integration/all/" - "specifications/%s-.*\\.gemspec" % error.gem - ) - ] - package = context.apt.get_package_for_paths(paths, regex=True) - if package is None: - logging.warning("no package for gem %s", error.gem) - return False - return context.add_dependency(package, minimum_version=error.version) - - -def fix_missing_ruby_file(error, context): - paths = [os.path.join("/usr/lib/ruby/vendor_ruby/%s.rb" % error.filename)] - package = context.apt.get_package_for_paths(paths) - if package is not None: - return context.add_dependency(package) - paths = [ - os.path.join( - r"/usr/share/rubygems-integration/all/gems/([^/]+)/" - "lib/%s.rb" % error.filename - ) - ] - package = context.apt.get_package_for_paths(paths, regex=True) - if package is not None: - return context.add_dependency(package) - - logging.warning("no package for ruby file %s", error.filename) - return False - - -def fix_missing_r_package(error, context): - paths = [os.path.join("/usr/lib/R/site-library/.*/R/%s$" % error.package)] - package = context.apt.get_package_for_paths(paths, regex=True) - if package is None: - logging.warning("no package for R package %s", error.package) - return False - return context.add_dependency(package, minimum_version=error.minimum_version) - - -def fix_missing_java_class(error, context): - # Unfortunately this only finds classes in jars installed on the host - # system :( - output = subprocess.check_output(["java-propose-classpath", "-c" + error.classname]) - classpath = [p for p in output.decode().strip(":").strip().split(":") if p] - if not classpath: - logging.warning("unable to find classpath for %s", error.classname) - return False - logging.info("Classpath for %s: %r", error.classname, classpath) - package = context.apt.get_package_for_paths(classpath) - if package is None: - logging.warning("no package for files in %r", classpath) - return False - return context.add_dependency(package) - - def enable_dh_autoreconf(context): # Debhelper >= 10 depends on dh-autoreconf and enables autoreconf by # default. @@ -768,9 +579,8 @@ def enable_dh_autoreconf(context): def fix_missing_configure(error, context): - if not context.tree.has_filename("configure.ac") and not context.tree.has_filename( - "configure.in" - ): + if (not context.tree.has_filename("configure.ac") and + not context.tree.has_filename("configure.in")): return False return enable_dh_autoreconf(context) @@ -783,43 +593,6 @@ def fix_missing_automake_input(error, context): return enable_dh_autoreconf(context) -def fix_missing_maven_artifacts(error, context): - artifact = error.artifacts[0] - parts = artifact.split(":") - if len(parts) == 4: - (group_id, artifact_id, kind, version) = parts - regex = False - elif len(parts) == 3: - (group_id, artifact_id, version) = parts - kind = "jar" - regex = False - elif len(parts) == 2: - version = ".*" - (group_id, artifact_id) = parts - kind = "jar" - regex = True - else: - raise AssertionError("invalid number of parts to artifact %s" % artifact) - paths = [ - os.path.join( - "/usr/share/maven-repo", - group_id.replace(".", "/"), - artifact_id, - version, - "%s-%s.%s" % (artifact_id, version, kind), - ) - ] - package = context.apt.get_package_for_paths(paths, regex=regex) - if package is None: - logging.warning("no package for artifact %s", artifact) - return False - return context.add_dependency(package) - - -def install_gnome_common(error, context): - return context.add_dependency("gnome-common") - - def install_gnome_common_dep(error, context): if error.package == "glib-gettext": package = context.apt.get_package_for_paths(["/usr/bin/glib-gettextize"]) @@ -914,19 +687,6 @@ def fix_missing_autoconf_macro(error, context): return context.add_dependency(package) -def fix_missing_c_sharp_compiler(error, context): - return context.add_dependency("mono-mcs") - - -def fix_missing_haskell_dependencies(error, context): - path = "/var/lib/ghc/package.conf.d/%s-.*.conf" % error.deps[0][0] - package = context.apt.get_package_for_paths([path], regex=True) - if package is None: - logging.warning("no package for macro file %s", path) - return False - return context.add_dependency(package) - - VERSIONED_PACKAGE_FIXERS: List[ Tuple[Type[Problem], Callable[[Problem, DependencyContext], bool]] ] = [ @@ -939,35 +699,14 @@ VERSIONED_PACKAGE_FIXERS: List[ APT_FIXERS: List[Tuple[Type[Problem], Callable[[Problem, DependencyContext], bool]]] = [ (MissingPythonModule, fix_missing_python_module), (MissingPythonDistribution, fix_missing_python_distribution), - (MissingCHeader, fix_missing_c_header), - (MissingPkgConfig, fix_missing_pkg_config), - (MissingCommand, fix_missing_command), - (MissingFile, fix_missing_file), - (MissingSprocketsFile, fix_missing_sprockets_file), - (MissingGoPackage, fix_missing_go_package), (MissingPerlFile, fix_missing_perl_file), (MissingPerlModule, fix_missing_perl_file), - (MissingXmlEntity, fix_missing_xml_entity), - (MissingNodeModule, fix_missing_node_module), - (MissingRubyGem, fix_missing_ruby_gem), - (MissingRPackage, fix_missing_r_package), - (MissingLibrary, fix_missing_library), - (MissingJavaClass, fix_missing_java_class), - (DhAddonLoadFailure, fix_missing_dh_addon), - (MissingPhpClass, fix_missing_php_class), (AptFetchFailure, retry_apt_failure), - (MissingMavenArtifacts, fix_missing_maven_artifacts), - (GnomeCommonMissing, install_gnome_common), (MissingGnomeCommonDependency, install_gnome_common_dep), (MissingXfceDependency, install_xfce_dep), (MissingConfigStatusInput, fix_missing_config_status_input), - (MissingJDKFile, fix_missing_jdk_file), - (MissingRubyFile, fix_missing_ruby_file), - (MissingJavaScriptRuntime, fix_missing_javascript_runtime), (MissingAutoconfMacro, fix_missing_autoconf_macro), - (MissingValaPackage, fix_missing_vala_package), - (MissingCSharpCompiler, fix_missing_c_sharp_compiler), - (MissingHaskellDependencies, fix_missing_haskell_dependencies), + (Problem, fix_missing_requirement), ] diff --git a/ognibuild/requirements.py b/ognibuild/requirements.py index 65bf1d5..98d929c 100644 --- a/ognibuild/requirements.py +++ b/ognibuild/requirements.py @@ -16,6 +16,9 @@ # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +import posixpath +from typing import Optional, List, Tuple + from . import UpstreamRequirement @@ -62,3 +65,182 @@ class CargoCrateRequirement(UpstreamRequirement): def __init__(self, crate): super(CargoCrateRequirement, self).__init__('cargo-crate') self.crate = crate + + +class PkgConfigRequirement(UpstreamRequirement): + + module: str + + def __init__(self, module, minimum_version=None): + super(PkgConfigRequirement, self).__init__('pkg-config') + self.module = module + self.minimum_version = minimum_version + + +class PathRequirement(UpstreamRequirement): + + path: str + + def __init__(self, path): + super(PathRequirement, self).__init__('path') + self.path = path + + +class CHeaderRequirement(UpstreamRequirement): + + header: str + + def __init__(self, header): + super(CHeaderRequirement, self).__init__('c-header') + self.header = header + + +class JavaScriptRuntimeRequirement(UpstreamRequirement): + + def __init__(self): + super(JavaScriptRuntimeRequirement, self).__init__( + 'javascript-runtime') + + +class ValaPackageRequirement(UpstreamRequirement): + + package: str + + def __init__(self, package: str): + super(ValaPackageRequirement, self).__init__('vala') + self.package = package + + +class RubyGemRequirement(UpstreamRequirement): + + gem: str + minimum_version: Optional[str] + + def __init__(self, gem: str, minimum_version: Optional[str]): + super(RubyGemRequirement, self).__init__('gem') + self.gem = gem + self.minimum_version = minimum_version + + +class GoPackageRequirement(UpstreamRequirement): + + package: str + + def __init__(self, package: str): + super(GoPackageRequirement, self).__init__('go') + self.package = package + + +class DhAddonRequirement(UpstreamRequirement): + + path: str + + def __init__(self, path: str): + super(DhAddonRequirement, self).__init__('dh-addon') + self.path = path + + +class PhpClassRequirement(UpstreamRequirement): + + php_class: str + + def __init__(self, php_class: str): + super(PhpClassRequirement, self).__init__('php-class') + self.php_class = php_class + + +class RPackageRequirement(UpstreamRequirement): + + package: str + minimum_version: Optional[str] + + def __init__(self, package: str, minimum_version: Optional[str] = None): + super(RPackageRequirement, self).__init__('r-package') + self.package = package + self.minimum_version = minimum_version + + +class LibraryRequirement(UpstreamRequirement): + + library: str + + def __init__(self, library: str): + super(LibraryRequirement, self).__init__('lib') + self.library = library + + +class RubyFileRequirement(UpstreamRequirement): + + filename: str + + def __init__(self, filename: str): + super(RubyFileRequirement, self).__init__('ruby-file') + self.filename = filename + + +class XmlEntityRequirement(UpstreamRequirement): + + url: str + + def __init__(self, url: str): + super(XmlEntityRequirement, self).__init__('xml-entity') + self.url = url + + +class SprocketsFileRequirement(UpstreamRequirement): + + content_type: str + name: str + + def __init__(self, content_type: str, name: str): + super(SprocketsFileRequirement, self).__init__('sprockets-file') + self.content_type = content_type + self.name = name + + +class JavaClassRequirement(UpstreamRequirement): + + classname: str + + def __init__(self, classname: str): + super(JavaClassRequirement, self).__init__('java-class') + self.classname = classname + + +class HaskellPackageRequirement(UpstreamRequirement): + + package: str + + def __init__(self, package: str): + super(HaskellPackageRequirement, self).__init__('haskell-package') + self.package = package + + +class MavenArtifactRequirement(UpstreamRequirement): + + artifacts: List[Tuple[str, str, str]] + + def __init__(self, artifacts): + super(MavenArtifactRequirement, self).__init__('maven-artifact') + self.artifacts = artifacts + + +class GnomeCommonRequirement(UpstreamRequirement): + + def __init__(self): + super(GnomeCommonRequirement, self).__init__('gnome-common') + + +class JDKFileRequirement(UpstreamRequirement): + + jdk_path: str + filename: str + + def __init__(self, jdk_path: str, filename: str): + super(JDKFileRequirement, self).__init__('jdk-file') + self.jdk_path = jdk_path + self.filename = filename + + @property + def path(self): + return posixpath.join(self.jdk_path, self.filename) diff --git a/ognibuild/resolver/apt.py b/ognibuild/resolver/apt.py index df1adb2..7d8e444 100644 --- a/ognibuild/resolver/apt.py +++ b/ognibuild/resolver/apt.py @@ -15,13 +15,35 @@ # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +import logging import posixpath -from ..apt import AptManager +from ..debian.apt import AptManager from . import Resolver from ..requirements import ( BinaryRequirement, + CHeaderRequirement, + PkgConfigRequirement, + PathRequirement, + UpstreamRequirement, + JavaScriptRuntimeRequirement, + ValaPackageRequirement, + RubyGemRequirement, + GoPackageRequirement, + DhAddonRequirement, + PhpClassRequirement, + RPackageRequirement, + NodePackageRequirement, + LibraryRequirement, + RubyFileRequirement, + XmlEntityRequirement, + SprocketsFileRequirement, + JavaClassRequirement, + HaskellPackageRequirement, + MavenArtifactRequirement, + GnomeCommonRequirement, + JDKFileRequirement, ) @@ -29,6 +51,62 @@ class NoAptPackage(Exception): """No apt package.""" +def get_package_for_python_module(apt_mgr, module, python_version): + if python_version == "python3": + paths = [ + posixpath.join( + "/usr/lib/python3/dist-packages", + module.replace(".", "/"), + "__init__.py", + ), + posixpath.join( + "/usr/lib/python3/dist-packages", module.replace(".", "/") + ".py" + ), + posixpath.join( + "/usr/lib/python3\\.[0-9]+/lib-dynload", + module.replace(".", "/") + "\\.cpython-.*\\.so", + ), + posixpath.join( + "/usr/lib/python3\\.[0-9]+/", module.replace(".", "/") + ".py" + ), + posixpath.join( + "/usr/lib/python3\\.[0-9]+/", module.replace(".", "/"), "__init__.py" + ), + ] + elif python_version == "python2": + paths = [ + posixpath.join( + "/usr/lib/python2\\.[0-9]/dist-packages", + module.replace(".", "/"), + "__init__.py", + ), + posixpath.join( + "/usr/lib/python2\\.[0-9]/dist-packages", + module.replace(".", "/") + ".py", + ), + posixpath.join( + "/usr/lib/python2.\\.[0-9]/lib-dynload", + module.replace(".", "/") + ".so", + ), + ] + elif python_version == "pypy": + paths = [ + posixpath.join( + "/usr/lib/pypy/dist-packages", module.replace(".", "/"), "__init__.py" + ), + posixpath.join( + "/usr/lib/pypy/dist-packages", module.replace(".", "/") + ".py" + ), + posixpath.join( + "/usr/lib/pypy/dist-packages", + module.replace(".", "/") + "\\.pypy-.*\\.so", + ), + ] + else: + raise AssertionError("unknown python version %r" % python_version) + return apt_mgr.get_package_for_paths(paths, regex=True) + + def resolve_binary_req(apt_mgr, req): if posixpath.isabs(req.binary_name): paths = [req.binary_name] @@ -40,8 +118,218 @@ def resolve_binary_req(apt_mgr, req): return apt_mgr.get_package_for_paths(paths) +def resolve_pkg_config_req(apt_mgr, req): + package = apt_mgr.get_package_for_paths( + [posixpath.join("/usr/lib/pkgconfig", req.module + ".pc")], + req.minimum_version + ) + if package is None: + package = apt_mgr.get_package_for_paths( + [posixpath.join("/usr/lib", ".*", "pkgconfig", req.module + ".pc")], + regex=True, + minimum_version=req.minimum_version) + return package + + +def resolve_path_req(apt_mgr, req): + return apt_mgr.get_package_for_paths([req.path]) + + +def resolve_c_header_req(apt_mgr, req): + package = apt_mgr.get_package_for_paths( + [posixpath.join("/usr/include", req.header)], regex=False + ) + if package is None: + package = apt_mgr.get_package_for_paths( + [posixpath.join("/usr/include", ".*", req.header)], regex=True + ) + return package + + +def resolve_js_runtime_req(apt_mgr, req): + return apt_mgr.get_package_for_paths( + ["/usr/bin/node", "/usr/bin/duk"], regex=False) + + +def resolve_vala_package_req(apt_mgr, req): + path = "/usr/share/vala-[0-9.]+/vapi/%s.vapi" % req.package + return apt_mgr.get_package_for_paths([path], regex=True) + + +def resolve_ruby_gem_req(apt_mgr, req): + paths = [ + posixpath.join( + "/usr/share/rubygems-integration/all/" + "specifications/%s-.*\\.gemspec" % req.gem + ) + ] + return apt_mgr.get_package_for_paths( + paths, regex=True, minimum_version=req.minimum_version) + + +def resolve_go_package_req(apt_mgr, req): + return apt_mgr.get_package_for_paths( + [posixpath.join("/usr/share/gocode/src", req.package, ".*")], + regex=True + ) + + +def resolve_dh_addon_req(apt_mgr, req): + paths = [posixpath.join("/usr/share/perl5", req.path)] + return apt_mgr.get_package_for_paths(paths) + + +def resolve_php_class_req(apt_mgr, req): + path = "/usr/share/php/%s.php" % req.php_class.replace("\\", "/") + return apt_mgr.get_package_for_paths([path]) + + +def resolve_r_package_req(apt_mgr, req): + paths = [posixpath.join("/usr/lib/R/site-library/.*/R/%s$" % req.package)] + return apt_mgr.get_package_for_paths(paths, regex=True) + + +def resolve_node_package_req(apt_mgr, req): + paths = [ + "/usr/share/nodejs/.*/node_modules/%s/package.json" % req.package, + "/usr/lib/nodejs/%s/package.json" % req.package, + "/usr/share/nodejs/%s/package.json" % req.package, + ] + return apt_mgr.get_package_for_paths(paths, regex=True) + + +def resolve_library_req(apt_mgr, req): + paths = [ + posixpath.join("/usr/lib/lib%s.so$" % req.library), + posixpath.join("/usr/lib/.*/lib%s.so$" % req.library), + posixpath.join("/usr/lib/lib%s.a$" % req.library), + posixpath.join("/usr/lib/.*/lib%s.a$" % req.library), + ] + return apt_mgr.get_package_for_paths(paths, regex=True) + + +def resolve_ruby_file_req(apt_mgr, req): + paths = [posixpath.join("/usr/lib/ruby/vendor_ruby/%s.rb" % req.filename)] + package = apt_mgr.get_package_for_paths(paths) + if package is not None: + return package + paths = [ + posixpath.join( + r"/usr/share/rubygems-integration/all/gems/([^/]+)/" + "lib/%s.rb" % req.filename + ) + ] + return apt_mgr.get_package_for_paths(paths, regex=True) + + +def resolve_xml_entity_req(apt_mgr, req): + # Ideally we should be using the XML catalog for this, but hardcoding + # a few URLs will do for now.. + URL_MAP = { + "http://www.oasis-open.org/docbook/xml/": "/usr/share/xml/docbook/schema/dtd/" + } + for url, path in URL_MAP.items(): + if req.url.startswith(url): + search_path = posixpath.join(path, req.url[len(url) :]) + break + else: + return None + + return apt_mgr.get_package_for_paths([search_path], regex=False) + + +def resolve_sprockets_file_req(apt_mgr, req): + if req.content_type == "application/javascript": + path = "/usr/share/.*/app/assets/javascripts/%s.js$" % req.name + else: + logging.warning("unable to handle content type %s", req.content_type) + return None + return apt_mgr.get_package_for_paths([path], regex=True) + + +def resolve_java_class_req(apt_mgr, req): + # Unfortunately this only finds classes in jars installed on the host + # system :( + # TODO(jelmer): Call in session + output = apt_mgr.session.check_output( + ["java-propose-classpath", "-c" + req.classname]) + classpath = [p for p in output.decode().strip(":").strip().split(":") if p] + if not classpath: + logging.warning("unable to find classpath for %s", req.classname) + return False + logging.info("Classpath for %s: %r", req.classname, classpath) + package = apt_mgr.get_package_for_paths(classpath) + if package is None: + logging.warning("no package for files in %r", classpath) + return None + return package + + +def resolve_haskell_package_req(apt_mgr, req): + path = "/var/lib/ghc/package.conf.d/%s-.*.conf" % req.deps[0][0] + return apt_mgr.get_package_for_paths([path], regex=True) + + +def resolve_maven_artifact_req(apt_mgr, req): + artifact = req.artifacts[0] + parts = artifact.split(":") + if len(parts) == 4: + (group_id, artifact_id, kind, version) = parts + regex = False + elif len(parts) == 3: + (group_id, artifact_id, version) = parts + kind = "jar" + regex = False + elif len(parts) == 2: + version = ".*" + (group_id, artifact_id) = parts + kind = "jar" + regex = True + else: + raise AssertionError("invalid number of parts to artifact %s" % artifact) + paths = [ + posixpath.join( + "/usr/share/maven-repo", + group_id.replace(".", "/"), + artifact_id, + version, + "%s-%s.%s" % (artifact_id, version, kind), + ) + ] + return apt_mgr.get_package_for_paths(paths, regex=regex) + + +def resolve_gnome_common_req(apt_mgr, req): + return 'gnome-common' + + +def resolve_jdk_file_req(apt_mgr, req): + path = req.jdk_path + ".*/" + req.filename + return apt_mgr.get_package_for_paths([path], regex=True) + + APT_REQUIREMENT_RESOLVERS = [ (BinaryRequirement, resolve_binary_req), + (PkgConfigRequirement, resolve_pkg_config_req), + (PathRequirement, resolve_path_req), + (CHeaderRequirement, resolve_c_header_req), + (JavaScriptRuntimeRequirement, resolve_js_runtime_req), + (ValaPackageRequirement, resolve_vala_package_req), + (RubyGemRequirement, resolve_ruby_gem_req), + (GoPackageRequirement, resolve_go_package_req), + (DhAddonRequirement, resolve_dh_addon_req), + (PhpClassRequirement, resolve_php_class_req), + (RPackageRequirement, resolve_r_package_req), + (NodePackageRequirement, resolve_node_package_req), + (LibraryRequirement, resolve_library_req), + (RubyFileRequirement, resolve_ruby_file_req), + (XmlEntityRequirement, resolve_xml_entity_req), + (SprocketsFileRequirement, resolve_sprockets_file_req), + (JavaClassRequirement, resolve_java_class_req), + (HaskellPackageRequirement, resolve_haskell_package_req), + (MavenArtifactRequirement, resolve_maven_artifact_req), + (GnomeCommonRequirement, resolve_gnome_common_req), + (JDKFileRequirement, resolve_jdk_file_req), ] @@ -70,14 +358,12 @@ class AptResolver(Resolver): def explain(self, requirements): raise NotImplementedError(self.explain) - def resolve(self, requirements): - for req in requirements: - for rr_class, rr_fn in APT_REQUIREMENT_RESOLVERS: - if isinstance(req, rr_class): - package_name = rr_fn(self.apt, req) - if package_name is None: - raise NoAptPackage() - yield package_name - break - else: - raise NotImplementedError + def resolve(self, req: UpstreamRequirement): + for rr_class, rr_fn in APT_REQUIREMENT_RESOLVERS: + if isinstance(req, rr_class): + deb_req = rr_fn(self.apt, req) + if deb_req is None: + raise NoAptPackage() + return deb_req + else: + raise NotImplementedError From c13fcb830638184568b6cb5b9ea5c9d8e7f58094 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Wed, 24 Feb 2021 02:54:18 +0000 Subject: [PATCH 69/83] Move more to resolver-apt. --- ognibuild/debian/fix_build.py | 135 +++++++++++----------------------- ognibuild/requirements.py | 27 ++++++- ognibuild/resolver/apt.py | 48 ++++++++++++ 3 files changed, 118 insertions(+), 92 deletions(-) diff --git a/ognibuild/debian/fix_build.py b/ognibuild/debian/fix_build.py index d72721f..0d9e0e2 100644 --- a/ognibuild/debian/fix_build.py +++ b/ognibuild/debian/fix_build.py @@ -133,6 +133,9 @@ from ..requirements import ( MavenArtifactRequirement, GnomeCommonRequirement, JDKFileRequirement, + PerlModuleRequirement, + PerlFileRequirement, + AutoconfMacroRequirement, ) from .build import attempt_build, DEFAULT_BUILDER @@ -506,6 +509,31 @@ def fix_missing_requirement(error, context): req = GnomeCommonRequirement() elif isinstance(error, MissingJDKFile): req = JDKFileRequirement(error.jdk_path, error.filename) + elif isinstance(error, MissingGnomeCommonDependency): + if error.package == "glib-gettext": + req = BinaryRequirement('glib-gettextize') + else: + logging.warning( + "No known command for gnome-common dependency %s", + error.package) + return None + elif isinstance(error, MissingXfceDependency): + if error.package == "gtk-doc": + req = BinaryRequirement("gtkdocize") + else: + logging.warning( + "No known command for xfce dependency %s", + error.package) + return None + elif isinstance(error, MissingPerlModule): + req = PerlModuleRequirement( + module=error.module, + filename=error.filename, + inc=error.inc) + elif isinstance(error, MissingPerlFile): + req = PerlFileRequirement(filename=error.filename) + elif isinstance(error, MissingAutoconfMacro): + req = AutoconfMacroRequirement(error.macro) else: return None @@ -519,42 +547,6 @@ def fix_missing_requirement(error, context): DEFAULT_PERL_PATHS = ["/usr/share/perl5"] -def fix_missing_perl_file(error, context): - - if ( - error.filename == "Makefile.PL" - and not context.tree.has_filename("Makefile.PL") - and context.tree.has_filename("dist.ini") - ): - # TODO(jelmer): add dist-zilla add-on to debhelper - raise NotImplementedError - - if error.inc is None: - if error.filename is None: - filename = error.module.replace("::", "/") + ".pm" - paths = [os.path.join(inc, filename) for inc in DEFAULT_PERL_PATHS] - elif not os.path.isabs(error.filename): - return False - else: - paths = [error.filename] - else: - paths = [os.path.join(inc, error.filename) for inc in error.inc] - package = context.apt.get_package_for_paths(paths, regex=False) - if package is None: - if getattr(error, "module", None): - logging.warning( - "no perl package found for %s (%r).", error.module, error.filename - ) - else: - logging.warning( - "perl file %s not found (paths searched for: %r).", - error.filename, - paths, - ) - return False - return context.add_dependency(package) - - def retry_apt_failure(error, context): return True @@ -593,30 +585,6 @@ def fix_missing_automake_input(error, context): return enable_dh_autoreconf(context) -def install_gnome_common_dep(error, context): - if error.package == "glib-gettext": - package = context.apt.get_package_for_paths(["/usr/bin/glib-gettextize"]) - else: - package = None - if package is None: - logging.warning("No debian package for package %s", error.package) - return False - return context.add_dependency( - package=package, minimum_version=error.minimum_version - ) - - -def install_xfce_dep(error, context): - if error.package == "gtk-doc": - package = context.apt.get_package_for_paths(["/usr/bin/gtkdocize"]) - else: - package = None - if package is None: - logging.warning("No debian package for package %s", error.package) - return False - return context.add_dependency(package=package) - - def fix_missing_config_status_input(error, context): autogen_path = "autogen.sh" rules_path = "debian/rules" @@ -648,19 +616,6 @@ def fix_missing_config_status_input(error, context): return True -def _find_aclocal_fun(macro): - # TODO(jelmer): Use the API for codesearch.debian.net instead? - defun_prefix = b"AC_DEFUN([%s]," % macro.encode("ascii") - for entry in os.scandir("/usr/share/aclocal"): - if not entry.is_file(): - continue - with open(entry.path, "rb") as f: - for line in f: - if line.startswith(defun_prefix): - return entry.path - raise KeyError - - def run_pgbuildext_updatecontrol(error, context): logging.info("Running 'pg_buildext updatecontrol'") # TODO(jelmer): run in the schroot @@ -674,17 +629,15 @@ def run_pgbuildext_updatecontrol(error, context): ) -def fix_missing_autoconf_macro(error, context): - try: - path = _find_aclocal_fun(error.macro) - except KeyError: - logging.info("No local m4 file found defining %s", error.macro) - return False - package = context.apt.get_package_for_paths([path]) - if package is None: - logging.warning("no package for macro file %s", path) - return False - return context.add_dependency(package) +def fix_missing_makefile_pl(error, context): + if ( + error.filename == "Makefile.PL" + and not context.tree.has_filename("Makefile.PL") + and context.tree.has_filename("dist.ini") + ): + # TODO(jelmer): add dist-zilla add-on to debhelper + raise NotImplementedError + return False VERSIONED_PACKAGE_FIXERS: List[ @@ -699,17 +652,17 @@ VERSIONED_PACKAGE_FIXERS: List[ APT_FIXERS: List[Tuple[Type[Problem], Callable[[Problem, DependencyContext], bool]]] = [ (MissingPythonModule, fix_missing_python_module), (MissingPythonDistribution, fix_missing_python_distribution), - (MissingPerlFile, fix_missing_perl_file), - (MissingPerlModule, fix_missing_perl_file), (AptFetchFailure, retry_apt_failure), - (MissingGnomeCommonDependency, install_gnome_common_dep), - (MissingXfceDependency, install_xfce_dep), - (MissingConfigStatusInput, fix_missing_config_status_input), - (MissingAutoconfMacro, fix_missing_autoconf_macro), + (MissingPerlFile, fix_missing_makefile_pl), (Problem, fix_missing_requirement), ] +GENERIC_FIXERS: List[Tuple[Type[Problem], Callable[[Problem, DependencyContext], bool]]] = [ + (MissingConfigStatusInput, fix_missing_config_status_input), +] + + def resolve_error(error, context, fixers): relevant_fixers = [] for error_cls, fixer in fixers: @@ -793,7 +746,7 @@ def build_incrementally( raise try: if not resolve_error( - e.error, context, VERSIONED_PACKAGE_FIXERS + APT_FIXERS + e.error, context, VERSIONED_PACKAGE_FIXERS + APT_FIXERS + GENERIC_FIXERS ): logging.warning("Failed to resolve error %r. Giving up.", e.error) raise diff --git a/ognibuild/requirements.py b/ognibuild/requirements.py index 98d929c..45c176a 100644 --- a/ognibuild/requirements.py +++ b/ognibuild/requirements.py @@ -43,10 +43,17 @@ class BinaryRequirement(UpstreamRequirement): class PerlModuleRequirement(UpstreamRequirement): module: str + filename: Optional[str] + inc: Optional[List[str]] - def __init__(self, module): + def __init__(self, module, filename=None, inc=None): super(PerlModuleRequirement, self).__init__('perl-module') self.module = module + self.filename = filename + self.inc = inc + + def relfilename(self): + return self.module.replace("::", "/") + ".pm" class NodePackageRequirement(UpstreamRequirement): @@ -244,3 +251,21 @@ class JDKFileRequirement(UpstreamRequirement): @property def path(self): return posixpath.join(self.jdk_path, self.filename) + + +class PerlFileRequirement(UpstreamRequirement): + + filename: str + + def __init__(self, filename: str): + super(PerlFileRequirement, self).__init__('perl-file') + self.filename = filename + + +class AutoconfMacroRequirement(UpstreamRequirement): + + macro: str + + def __init__(self, macro: str): + super(AutoconfMacroRequirement, self).__init__('autoconf-macro') + self.macro = macro diff --git a/ognibuild/resolver/apt.py b/ognibuild/resolver/apt.py index 7d8e444..6864119 100644 --- a/ognibuild/resolver/apt.py +++ b/ognibuild/resolver/apt.py @@ -44,6 +44,9 @@ from ..requirements import ( MavenArtifactRequirement, GnomeCommonRequirement, JDKFileRequirement, + PerlModuleRequirement, + PerlFileRequirement, + AutoconfMacroRequirement, ) @@ -308,6 +311,48 @@ def resolve_jdk_file_req(apt_mgr, req): return apt_mgr.get_package_for_paths([path], regex=True) +def resolve_perl_module_req(apt_mgr, req): + DEFAULT_PERL_PATHS = ["/usr/share/perl5"] + + if req.inc is None: + if req.filename is None: + paths = [posixpath.join(inc, req.relfilename) + for inc in DEFAULT_PERL_PATHS] + elif not posixpath.isabs(req.filename): + return False + else: + paths = [req.filename] + else: + paths = [posixpath.join(inc, req.filename) for inc in req.inc] + return apt_mgr.get_package_for_paths(paths, regex=False) + + +def resolve_perl_file_req(apt_mgr, req): + return apt_mgr.get_package_for_paths([req.filename], regex=False) + + +def _find_aclocal_fun(macro): + # TODO(jelmer): Use the API for codesearch.debian.net instead? + defun_prefix = b"AC_DEFUN([%s]," % macro.encode("ascii") + for entry in os.scandir("/usr/share/aclocal"): + if not entry.is_file(): + continue + with open(entry.path, "rb") as f: + for line in f: + if line.startswith(defun_prefix): + return entry.path + raise KeyError + + +def resolve_autoconf_macro_req(apt_mgr, req): + try: + path = _find_aclocal_fun(req.macro) + except KeyError: + logging.info("No local m4 file found defining %s", req.macro) + return None + return apt_mgr.get_package_for_paths([path]) + + APT_REQUIREMENT_RESOLVERS = [ (BinaryRequirement, resolve_binary_req), (PkgConfigRequirement, resolve_pkg_config_req), @@ -330,6 +375,9 @@ APT_REQUIREMENT_RESOLVERS = [ (MavenArtifactRequirement, resolve_maven_artifact_req), (GnomeCommonRequirement, resolve_gnome_common_req), (JDKFileRequirement, resolve_jdk_file_req), + (PerlModuleRequirement, resolve_perl_module_req), + (PerlFileRequirement, resolve_perl_file_req), + (AutoconfMacroRequirement, resolve_autoconf_macro_req), ] From eddc4d272bc37e4c40cc361e68964b763283b2bd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Wed, 24 Feb 2021 03:21:48 +0000 Subject: [PATCH 70/83] Add fixer class. --- ognibuild/debian/fix_build.py | 28 ++++++++++---------- ognibuild/fix_build.py | 49 ++++++++++++++++++++++++++++------- 2 files changed, 54 insertions(+), 23 deletions(-) diff --git a/ognibuild/debian/fix_build.py b/ognibuild/debian/fix_build.py index 0d9e0e2..677de43 100644 --- a/ognibuild/debian/fix_build.py +++ b/ognibuild/debian/fix_build.py @@ -106,6 +106,7 @@ from buildlog_consultant.sbuild import ( ) from .apt import AptManager, LocalAptManager +from ..fix_build import BuildFixer, SimpleBuildFixer from ..resolver.apt import ( AptResolver, NoAptPackage, @@ -640,26 +641,25 @@ def fix_missing_makefile_pl(error, context): return False -VERSIONED_PACKAGE_FIXERS: List[ - Tuple[Type[Problem], Callable[[Problem, DependencyContext], bool]] -] = [ - (NeedPgBuildExtUpdateControl, run_pgbuildext_updatecontrol), - (MissingConfigure, fix_missing_configure), - (MissingAutomakeInput, fix_missing_automake_input), +VERSIONED_PACKAGE_FIXERS: List[BuildFixer] = [ + SimpleBuildFixer( + NeedPgBuildExtUpdateControl, run_pgbuildext_updatecontrol), + SimpleBuildFixer(MissingConfigure, fix_missing_configure), + SimpleBuildFixer(MissingAutomakeInput, fix_missing_automake_input), ] -APT_FIXERS: List[Tuple[Type[Problem], Callable[[Problem, DependencyContext], bool]]] = [ - (MissingPythonModule, fix_missing_python_module), - (MissingPythonDistribution, fix_missing_python_distribution), - (AptFetchFailure, retry_apt_failure), - (MissingPerlFile, fix_missing_makefile_pl), - (Problem, fix_missing_requirement), +APT_FIXERS: List[BuildFixer] = [ + SimpleBuildFixer(MissingPythonModule, fix_missing_python_module), + SimpleBuildFixer(MissingPythonDistribution, fix_missing_python_distribution), + SimpleBuildFixer(AptFetchFailure, retry_apt_failure), + SimpleBuildFixer(MissingPerlFile, fix_missing_makefile_pl), + SimpleBuildFixer(Problem, fix_missing_requirement), ] -GENERIC_FIXERS: List[Tuple[Type[Problem], Callable[[Problem, DependencyContext], bool]]] = [ - (MissingConfigStatusInput, fix_missing_config_status_input), +GENERIC_FIXERS: List[BuildFixer] = [ + SimpleBuildFixer(MissingConfigStatusInput, fix_missing_config_status_input), ] diff --git a/ognibuild/fix_build.py b/ognibuild/fix_build.py index c393164..a6326f2 100644 --- a/ognibuild/fix_build.py +++ b/ognibuild/fix_build.py @@ -16,7 +16,7 @@ # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA import logging -from typing import List, Tuple, Callable, Type +from typing import List, Tuple, Callable, Type, Optional from buildlog_consultant.common import ( find_build_failure_description, @@ -36,6 +36,34 @@ from .debian.fix_build import ( from .session import Session, run_with_tee +class BuildFixer(object): + """Build fixer.""" + + def can_fix(self, problem): + raise NotImplementedError(self.can_fix) + + def _fix(self, problem, context): + raise NotImplementedError(self._fix) + + def fix(self, problem, context): + if not self.can_fix(problem): + return None + return self._fix(problem, context) + + +class SimpleBuildFixer(BuildFixer): + + def __init__(self, problem_cls, fn): + self._problem_cls = problem_cls + self._fn = fn + + def can_fix(self, problem): + return isinstance(problem, self._problem_cls) + + def _fix(self, problem, context): + return self._fn(problem, context) + + class SchrootDependencyContext(DependencyContext): def __init__(self, session): self.session = session @@ -75,16 +103,19 @@ def fix_python_package_from_pip(error, context): return True -GENERIC_INSTALL_FIXERS: List[ - Tuple[Type[Problem], Callable[[Problem, DependencyContext], bool]] -] = [ - (MissingPerlModule, fix_perl_module_from_cpan), - (MissingPythonDistribution, fix_python_package_from_pip), - (MissingCommand, fix_npm_missing_command), +GENERIC_INSTALL_FIXERS: List[BuildFixer] = [ + SimpleBuildFixer(MissingPerlModule, fix_perl_module_from_cpan), + SimpleBuildFixer(MissingPythonDistribution, fix_python_package_from_pip), + SimpleBuildFixer(MissingCommand, fix_npm_missing_command), ] -def run_with_build_fixer(session: Session, args: List[str]): +def run_with_build_fixer( + session: Session, args: List[str], + fixers: Optional[List[BuildFixer]] = None): + if fixers is None: + from .debian.fix_build import APT_FIXERS + fixers = GENERIC_INSTALL_FIXERS + APT_FIXERS logging.info("Running %r", args) fixed_errors = [] while True: @@ -108,7 +139,7 @@ def run_with_build_fixer(session: Session, args: List[str]): if not resolve_error( error, SchrootDependencyContext(session), - fixers=(APT_FIXERS + GENERIC_INSTALL_FIXERS), + fixers=fixers, ): logging.warning("Failed to find resolution for error %r. Giving up.", error) raise DetailedFailure(retcode, args, error) From e3ca48a8e0558c635de55427857c0d4c3ad88aee Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Wed, 24 Feb 2021 15:15:30 +0000 Subject: [PATCH 71/83] Add SECURITY.md. --- SECURITY.md | 10 ++++++++++ 1 file changed, 10 insertions(+) create mode 100644 SECURITY.md diff --git a/SECURITY.md b/SECURITY.md new file mode 100644 index 0000000..c0935a0 --- /dev/null +++ b/SECURITY.md @@ -0,0 +1,10 @@ +# Security Policy + +## Supported Versions + +ognibuild is still under heavy development. Only the latest version is security +supported. + +## Reporting a Vulnerability + +Please report security issues by e-mail to jelmer@jelmer.uk, ideally PGP encrypted to the key at https://jelmer.uk/D729A457.asc From 22551c1ffb5adaf7375a4853401bdd804a56bc56 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Wed, 24 Feb 2021 15:17:06 +0000 Subject: [PATCH 72/83] add code of conduct. --- CODE_OF_CONDUCT.md | 76 ++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 76 insertions(+) create mode 100644 CODE_OF_CONDUCT.md diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md new file mode 100644 index 0000000..dd4bb87 --- /dev/null +++ b/CODE_OF_CONDUCT.md @@ -0,0 +1,76 @@ +# Contributor Covenant Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, sex characteristics, gender identity and expression, +level of experience, education, socio-economic status, nationality, personal +appearance, race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or + advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or +reject comments, commits, code, wiki edits, issues, and other contributions +that are not aligned to this Code of Conduct, or to ban temporarily or +permanently any contributor for other behaviors that they deem inappropriate, +threatening, offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +## Enforcement + +Instances of abusive, harassing, or otherwise unacceptable behavior may be +reported by contacting the project team at team@dulwich.io. All +complaints will be reviewed and investigated and will result in a response that +is deemed necessary and appropriate to the circumstances. The project team is +obligated to maintain confidentiality with regard to the reporter of an incident. +Further details of specific enforcement policies may be posted separately. + +Project maintainers who do not follow or enforce the Code of Conduct in good +faith may face temporary or permanent repercussions as determined by other +members of the project's leadership. + +## Attribution + +This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, +available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html + +[homepage]: https://www.contributor-covenant.org + +For answers to common questions about this code of conduct, see +https://www.contributor-covenant.org/faq From 8a7ad4fdd881120634085825464cff8bc60a6c88 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Wed, 24 Feb 2021 15:33:44 +0000 Subject: [PATCH 73/83] Update email. --- CODE_OF_CONDUCT.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md index dd4bb87..d84740b 100644 --- a/CODE_OF_CONDUCT.md +++ b/CODE_OF_CONDUCT.md @@ -55,7 +55,7 @@ further defined and clarified by project maintainers. ## Enforcement Instances of abusive, harassing, or otherwise unacceptable behavior may be -reported by contacting the project team at team@dulwich.io. All +reported by contacting the project lead at jelmer@jelmer.uk. All complaints will be reviewed and investigated and will result in a response that is deemed necessary and appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. From c184e01aef01bc94ec2ffd7ad749b58aaace9af9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Thu, 25 Feb 2021 03:22:55 +0000 Subject: [PATCH 74/83] More refactoring. --- notes/architecture.md | 29 ++++- ROADMAP => notes/roadmap.md | 0 ognibuild/__init__.py | 12 ++ ognibuild/__main__.py | 5 +- ognibuild/buildsystem.py | 12 +- ognibuild/debian/apt.py | 11 +- ognibuild/debian/fix_build.py | 222 ++++++++++++++-------------------- ognibuild/fix_build.py | 48 ++++++-- ognibuild/resolver/apt.py | 21 ++-- 9 files changed, 197 insertions(+), 163 deletions(-) rename ROADMAP => notes/roadmap.md (100%) diff --git a/notes/architecture.md b/notes/architecture.md index 960892c..02ee04f 100644 --- a/notes/architecture.md +++ b/notes/architecture.md @@ -13,14 +13,16 @@ requirements are met. Then we attempt to build. -If any problems are found in the log, buildlog-consultant will report them. +If any Problems are found in the log, buildlog-consultant will report them. -ognibuild can then invoke "fixers" to address Problems. +ognibuild can then invoke "fixers" to address Problems. Fixers can do things +like e.g. upgrade configure.ac to a newer version, or invoke autoreconf. + +A list of possible fixers can be provided. Each fixer will be called +(in order) until one of them claims to ahve fixed the issue. Problems can be converted to UpstreamRequirements by UpstreamRequirementFixer -Other Fixer can do things like e.g. upgrade configure.ac to a newer version. - UpstreamRequirementFixer uses a UpstreamRequirementResolver object that can translate UpstreamRequirement objects into apt package names or e.g. cpan commands. @@ -28,3 +30,22 @@ e.g. cpan commands. ognibuild keeps finding problems, resolving them and rebuilding until it finds a problem it can not resolve or that it thinks it has already resolved (i.e. seen before). + +Operations are run in a Session - this can represent a virtualized +environment of some sort (e.g. a chroot or virtualenv) or simply +on the host machine. + +For e.g. PerlModuleRequirement, need to be able to: + + * install from apt package + + DebianInstallFixer(AptResolver()).fix(problem) + * update debian package (source, runtime, test) deps to include apt package + + DebianPackageDepFixer(AptResolver()).fix(problem, ('test', 'foo')) + * suggest command to run to install from apt package + + DebianInstallFixer(AptResolver()).command(problem) + * install from cpan + + CpanInstallFixer().fix(problem) + * suggest command to run to install from cpan package + + CpanInstallFixer().command(problem) + * update source package reqs to depend on perl module + + PerlDepFixer().fix(problem) diff --git a/ROADMAP b/notes/roadmap.md similarity index 100% rename from ROADMAP rename to notes/roadmap.md diff --git a/ognibuild/__init__.py b/ognibuild/__init__.py index 132e417..eb32b9d 100644 --- a/ognibuild/__init__.py +++ b/ognibuild/__init__.py @@ -28,6 +28,15 @@ class DetailedFailure(Exception): self.error = error +class UnidentifiedError(Exception): + + def __init__(self, retcode, argv, lines, secondary=None): + self.retcode = retcode + self.argv = argv + self.lines = lines + self.secondary = secondary + + def shebang_binary(p): if not (os.stat(p).st_mode & stat.S_IEXEC): return None @@ -49,6 +58,9 @@ class UpstreamRequirement(object): def __init__(self, family): self.family = family + def possible_paths(self): + raise NotImplementedError + class UpstreamOutput(object): diff --git a/ognibuild/__main__.py b/ognibuild/__main__.py index ab562ce..808eb76 100644 --- a/ognibuild/__main__.py +++ b/ognibuild/__main__.py @@ -18,7 +18,7 @@ import logging import os import sys -from .apt import UnidentifiedError +from . import UnidentifiedError from .buildsystem import NoBuildToolsFound, detect_buildsystems from .build import run_build from .clean import run_clean @@ -127,7 +127,8 @@ def main(): # noqa: C901 return 1 except MissingDependencies as e: for req in e.requirements: - logging.info("Missing dependency (%s:%s)", (req.family, req.name)) + logging.info("Missing dependency (%s:%s)", + req.family, req.name) for resolver in [ AptResolver.from_session(session), NativeResolver.from_session(session), diff --git a/ognibuild/buildsystem.py b/ognibuild/buildsystem.py index d36f019..6d311f8 100644 --- a/ognibuild/buildsystem.py +++ b/ognibuild/buildsystem.py @@ -22,7 +22,7 @@ import os import re import warnings -from . import shebang_binary, UpstreamOutput +from . import shebang_binary, UpstreamOutput, UnidentifiedError from .requirements import ( BinaryRequirement, PythonPackageRequirement, @@ -30,7 +30,6 @@ from .requirements import ( NodePackageRequirement, CargoCrateRequirement, ) -from .apt import UnidentifiedError from .fix_build import run_with_build_fixer @@ -136,6 +135,10 @@ class SetupPy(BuildSystem): self.setup(resolver) self._run_setup(session, resolver, ["test"]) + def build(self, session, resolver): + self.setup(resolver) + self._run_setup(session, resolver, ["build"]) + def dist(self, session, resolver): self.setup(resolver) self._run_setup(session, resolver, ["sdist"]) @@ -370,6 +373,11 @@ class Make(BuildSystem): if not session.exists("Makefile") and session.exists("configure"): session.check_call(["./configure"]) + def build(self, session, resolver): + self.setup(session, resolver) + resolver.install([BinaryRequirement("make")]) + run_with_build_fixer(session, ["make", "all"]) + def dist(self, session, resolver): self.setup(session, resolver) resolver.install([BinaryRequirement("make")]) diff --git a/ognibuild/debian/apt.py b/ognibuild/debian/apt.py index e8a6934..cd55fa5 100644 --- a/ognibuild/debian/apt.py +++ b/ognibuild/debian/apt.py @@ -26,19 +26,10 @@ from buildlog_consultant.apt import ( ) from debian.deb822 import Release -from .. import DetailedFailure +from .. import DetailedFailure, UnidentifiedError from ..session import Session, run_with_tee -class UnidentifiedError(Exception): - - def __init__(self, retcode, argv, lines, secondary=None): - self.retcode = retcode - self.argv = argv - self.lines = lines - self.secondary = secondary - - def run_apt(session: Session, args: List[str]) -> None: """Run apt.""" args = ["apt", "-y"] + args diff --git a/ognibuild/debian/fix_build.py b/ognibuild/debian/fix_build.py index 677de43..50ecc24 100644 --- a/ognibuild/debian/fix_build.py +++ b/ognibuild/debian/fix_build.py @@ -22,7 +22,7 @@ __all__ = [ import logging import os import sys -from typing import List, Callable, Type, Tuple, Set, Optional +from typing import List, Set, Optional from debian.deb822 import ( Deb822, @@ -105,10 +105,9 @@ from buildlog_consultant.sbuild import ( SbuildFailure, ) -from .apt import AptManager, LocalAptManager -from ..fix_build import BuildFixer, SimpleBuildFixer +from .apt import LocalAptManager +from ..fix_build import BuildFixer, SimpleBuildFixer, resolve_error, DependencyContext from ..resolver.apt import ( - AptResolver, NoAptPackage, get_package_for_python_module, ) @@ -151,31 +150,6 @@ class CircularDependency(Exception): self.package = package -class DependencyContext(object): - def __init__( - self, - tree: MutableTree, - apt: AptManager, - subpath: str = "", - committer: Optional[str] = None, - update_changelog: bool = True, - ): - self.tree = tree - self.apt = apt - self.resolver = AptResolver(apt) - self.subpath = subpath - self.committer = committer - self.update_changelog = update_changelog - - def resolve_apt(self, req): - return self.resolver.resolve(req) - - def add_dependency( - self, package: str, minimum_version: Optional[Version] = None - ) -> bool: - raise NotImplementedError(self.add_dependency) - - class BuildDependencyContext(DependencyContext): def add_dependency(self, package: str, minimum_version: Optional[Version] = None): return add_build_dependency( @@ -462,90 +436,95 @@ def fix_missing_python_module(error, context): return True -def fix_missing_requirement(error, context): - if isinstance(error, MissingFile): - req = PathRequirement(error.path) - elif isinstance(error, MissingCommand): - req = BinaryRequirement(error.command) - elif isinstance(error, MissingPkgConfig): - req = PkgConfigRequirement( - error.module, error.minimum_version) - elif isinstance(error, MissingCHeader): - req = CHeaderRequirement(error.header) - elif isinstance(error, MissingJavaScriptRuntime): - req = JavaScriptRuntimeRequirement() - elif isinstance(error, MissingRubyGem): - req = RubyGemRequirement(error.gem, error.version) - elif isinstance(error, MissingValaPackage): - req = ValaPackageRequirement(error.package) - elif isinstance(error, MissingGoPackage): - req = GoPackageRequirement(error.package) - elif isinstance(error, DhAddonLoadFailure): - req = DhAddonRequirement(error.path) - elif isinstance(error, MissingPhpClass): - req = PhpClassRequirement(error.php_class) - elif isinstance(error, MissingRPackage): - req = RPackageRequirement(error.package, error.minimum_version) - elif isinstance(error, MissingNodeModule): - req = NodePackageRequirement(error.module) - elif isinstance(error, MissingLibrary): - req = LibraryRequirement(error.library) - elif isinstance(error, MissingRubyFile): - req = RubyFileRequirement(error.filename) - elif isinstance(error, MissingXmlEntity): - req = XmlEntityRequirement(error.url) - elif isinstance(error, MissingSprocketsFile): - req = SprocketsFileRequirement(error.content_type, error.name) - elif isinstance(error, MissingJavaClass): - req = JavaClassRequirement(error.classname) - elif isinstance(error, MissingHaskellDependencies): +def problem_to_upstream_requirement(problem, context): + if isinstance(problem, MissingFile): + return PathRequirement(problem.path) + elif isinstance(problem, MissingCommand): + return BinaryRequirement(problem.command) + elif isinstance(problem, MissingPkgConfig): + return PkgConfigRequirement( + problem.module, problem.minimum_version) + elif isinstance(problem, MissingCHeader): + return CHeaderRequirement(problem.header) + elif isinstance(problem, MissingJavaScriptRuntime): + return JavaScriptRuntimeRequirement() + elif isinstance(problem, MissingRubyGem): + return RubyGemRequirement(problem.gem, problem.version) + elif isinstance(problem, MissingValaPackage): + return ValaPackageRequirement(problem.package) + elif isinstance(problem, MissingGoPackage): + return GoPackageRequirement(problem.package) + elif isinstance(problem, DhAddonLoadFailure): + return DhAddonRequirement(problem.path) + elif isinstance(problem, MissingPhpClass): + return PhpClassRequirement(problem.php_class) + elif isinstance(problem, MissingRPackage): + return RPackageRequirement(problem.package, problem.minimum_version) + elif isinstance(problem, MissingNodeModule): + return NodePackageRequirement(problem.module) + elif isinstance(problem, MissingLibrary): + return LibraryRequirement(problem.library) + elif isinstance(problem, MissingRubyFile): + return RubyFileRequirement(problem.filename) + elif isinstance(problem, MissingXmlEntity): + return XmlEntityRequirement(problem.url) + elif isinstance(problem, MissingSprocketsFile): + return SprocketsFileRequirement(problem.content_type, problem.name) + elif isinstance(problem, MissingJavaClass): + return JavaClassRequirement(problem.classname) + elif isinstance(problem, MissingHaskellDependencies): # TODO(jelmer): Create multiple HaskellPackageRequirement objects? - req = HaskellPackageRequirement(error.package) - elif isinstance(error, MissingMavenArtifacts): + return HaskellPackageRequirement(problem.package) + elif isinstance(problem, MissingMavenArtifacts): # TODO(jelmer): Create multiple MavenArtifactRequirement objects? - req = MavenArtifactRequirement(error.artifacts) - elif isinstance(error, MissingCSharpCompiler): - req = BinaryRequirement('msc') - elif isinstance(error, GnomeCommonMissing): - req = GnomeCommonRequirement() - elif isinstance(error, MissingJDKFile): - req = JDKFileRequirement(error.jdk_path, error.filename) - elif isinstance(error, MissingGnomeCommonDependency): - if error.package == "glib-gettext": - req = BinaryRequirement('glib-gettextize') + return MavenArtifactRequirement(problem.artifacts) + elif isinstance(problem, MissingCSharpCompiler): + return BinaryRequirement('msc') + elif isinstance(problem, GnomeCommonMissing): + return GnomeCommonRequirement() + elif isinstance(problem, MissingJDKFile): + return JDKFileRequirement(problem.jdk_path, problem.filename) + elif isinstance(problem, MissingGnomeCommonDependency): + if problem.package == "glib-gettext": + return BinaryRequirement('glib-gettextize') else: logging.warning( "No known command for gnome-common dependency %s", - error.package) + problem.package) return None - elif isinstance(error, MissingXfceDependency): - if error.package == "gtk-doc": - req = BinaryRequirement("gtkdocize") + elif isinstance(problem, MissingXfceDependency): + if problem.package == "gtk-doc": + return BinaryRequirement("gtkdocize") else: logging.warning( "No known command for xfce dependency %s", - error.package) + problem.package) return None - elif isinstance(error, MissingPerlModule): - req = PerlModuleRequirement( - module=error.module, - filename=error.filename, - inc=error.inc) - elif isinstance(error, MissingPerlFile): - req = PerlFileRequirement(filename=error.filename) - elif isinstance(error, MissingAutoconfMacro): - req = AutoconfMacroRequirement(error.macro) + elif isinstance(problem, MissingPerlModule): + return PerlModuleRequirement( + module=problem.module, + filename=problem.filename, + inc=problem.inc) + elif isinstance(problem, MissingPerlFile): + return PerlFileRequirement(filename=problem.filename) + elif isinstance(problem, MissingAutoconfMacro): + return AutoconfMacroRequirement(problem.macro) else: return None - try: - package = context.resolve_apt(req) - except NoAptPackage: - return False - return context.add_dependency(package) +class UpstreamRequirementFixer(BuildFixer): -DEFAULT_PERL_PATHS = ["/usr/share/perl5"] + def fix_missing_requirement(self, error, context): + req = problem_to_upstream_requirement(error) + if req is None: + return False + + try: + package = context.resolver.resolve(req) + except NoAptPackage: + return False + return context.add_dependency(package) def retry_apt_failure(error, context): @@ -646,6 +625,7 @@ VERSIONED_PACKAGE_FIXERS: List[BuildFixer] = [ NeedPgBuildExtUpdateControl, run_pgbuildext_updatecontrol), SimpleBuildFixer(MissingConfigure, fix_missing_configure), SimpleBuildFixer(MissingAutomakeInput, fix_missing_automake_input), + SimpleBuildFixer(MissingConfigStatusInput, fix_missing_config_status_input), ] @@ -653,36 +633,15 @@ APT_FIXERS: List[BuildFixer] = [ SimpleBuildFixer(MissingPythonModule, fix_missing_python_module), SimpleBuildFixer(MissingPythonDistribution, fix_missing_python_distribution), SimpleBuildFixer(AptFetchFailure, retry_apt_failure), - SimpleBuildFixer(MissingPerlFile, fix_missing_makefile_pl), - SimpleBuildFixer(Problem, fix_missing_requirement), + UpstreamRequirementFixer(), ] GENERIC_FIXERS: List[BuildFixer] = [ - SimpleBuildFixer(MissingConfigStatusInput, fix_missing_config_status_input), + SimpleBuildFixer(MissingPerlFile, fix_missing_makefile_pl), ] -def resolve_error(error, context, fixers): - relevant_fixers = [] - for error_cls, fixer in fixers: - if isinstance(error, error_cls): - relevant_fixers.append(fixer) - if not relevant_fixers: - logging.warning("No fixer found for %r", error) - return False - for fixer in relevant_fixers: - logging.info("Attempting to use fixer %r to address %r", fixer, error) - try: - made_changes = fixer(error, context) - except GeneratedFile: - logging.warning("Control file is generated, unable to edit.") - return False - if made_changes: - return True - return False - - def build_incrementally( local_tree, apt, @@ -714,17 +673,17 @@ def build_incrementally( if e.error is None: logging.warning("Build failed with unidentified error. Giving up.") raise - if e.context is None: + if e.phase is None: logging.info("No relevant context, not making any changes.") raise - if (e.error, e.context) in fixed_errors: + if (e.error, e.phase) in fixed_errors: logging.warning("Error was still not fixed on second try. Giving up.") raise if max_iterations is not None and len(fixed_errors) > max_iterations: logging.warning("Last fix did not address the issue. Giving up.") raise reset_tree(local_tree, local_tree.basis_tree(), subpath=subpath) - if e.context[0] == "build": + if e.phase[0] == "build": context = BuildDependencyContext( local_tree, apt, @@ -732,9 +691,9 @@ def build_incrementally( committer=committer, update_changelog=update_changelog, ) - elif e.context[0] == "autopkgtest": + elif e.phase[0] == "autopkgtest": context = AutopkgtestDependencyContext( - e.context[1], + e.phase[1], local_tree, apt, subpath=subpath, @@ -742,7 +701,7 @@ def build_incrementally( update_changelog=update_changelog, ) else: - logging.warning("unable to install for context %r", e.context) + logging.warning("unable to install for context %r", e.phase) raise try: if not resolve_error( @@ -750,13 +709,18 @@ def build_incrementally( ): logging.warning("Failed to resolve error %r. Giving up.", e.error) raise + except GeneratedFile: + logging.warning( + "Control file is generated, unable to edit to " + "resolver error %r.", e.error) + raise e except CircularDependency: logging.warning( "Unable to fix %r; it would introduce a circular " "dependency.", e.error, ) raise e - fixed_errors.append((e.error, e.context)) + fixed_errors.append((e.error, e.phase)) if os.path.exists(os.path.join(output_directory, "build.log")): i = 1 while os.path.exists( @@ -772,7 +736,7 @@ def build_incrementally( def main(argv=None): import argparse - parser = argparse.ArgumentParser("janitor.fix_build") + parser = argparse.ArgumentParser("ognibuild.debian.fix_build") parser.add_argument( "--suffix", type=str, help="Suffix to use for test builds.", default="fixbuild1" ) diff --git a/ognibuild/fix_build.py b/ognibuild/fix_build.py index a6326f2..5b6aef0 100644 --- a/ognibuild/fix_build.py +++ b/ognibuild/fix_build.py @@ -25,14 +25,10 @@ from buildlog_consultant.common import ( MissingPythonDistribution, MissingCommand, ) +from breezy.mutabletree import MutableTree -from . import DetailedFailure -from .apt import UnidentifiedError, AptManager -from .debian.fix_build import ( - DependencyContext, - resolve_error, - APT_FIXERS, -) +from . import DetailedFailure, UnidentifiedError +from .debian.apt import AptManager from .session import Session, run_with_tee @@ -64,6 +60,28 @@ class SimpleBuildFixer(BuildFixer): return self._fn(problem, context) +class DependencyContext(object): + def __init__( + self, + tree: MutableTree, + apt: AptManager, + subpath: str = "", + committer: Optional[str] = None, + update_changelog: bool = True, + ): + self.tree = tree + self.apt = apt + self.resolver = AptResolver(apt) + self.subpath = subpath + self.committer = committer + self.update_changelog = update_changelog + + def add_dependency( + self, package: str, minimum_version: Optional['Version'] = None + ) -> bool: + raise NotImplementedError(self.add_dependency) + + class SchrootDependencyContext(DependencyContext): def __init__(self, session): self.session = session @@ -144,3 +162,19 @@ def run_with_build_fixer( logging.warning("Failed to find resolution for error %r. Giving up.", error) raise DetailedFailure(retcode, args, error) fixed_errors.append(error) + + +def resolve_error(error, context, fixers): + relevant_fixers = [] + for error_cls, fixer in fixers: + if isinstance(error, error_cls): + relevant_fixers.append(fixer) + if not relevant_fixers: + logging.warning("No fixer found for %r", error) + return False + for fixer in relevant_fixers: + logging.info("Attempting to use fixer %r to address %r", fixer, error) + made_changes = fixer(error, context) + if made_changes: + return True + return False diff --git a/ognibuild/resolver/apt.py b/ognibuild/resolver/apt.py index 6864119..a5a5cc5 100644 --- a/ognibuild/resolver/apt.py +++ b/ognibuild/resolver/apt.py @@ -381,6 +381,16 @@ APT_REQUIREMENT_RESOLVERS = [ ] +def resolve_requirement_apt(apt_mgr, req: UpstreamRequirement): + for rr_class, rr_fn in APT_REQUIREMENT_RESOLVERS: + if isinstance(req, rr_class): + deb_req = rr_fn(apt_mgr, req) + if deb_req is None: + raise NoAptPackage() + return deb_req + raise NotImplementedError(type(req)) + + class AptResolver(Resolver): def __init__(self, apt): @@ -401,17 +411,10 @@ class AptResolver(Resolver): if not pps or not any(self.apt.session.exists(p) for p in pps): missing.append(req) if missing: - self.apt.install(list(self.resolve(missing))) + self.apt.install([self.resolve(m) for m in missing]) def explain(self, requirements): raise NotImplementedError(self.explain) def resolve(self, req: UpstreamRequirement): - for rr_class, rr_fn in APT_REQUIREMENT_RESOLVERS: - if isinstance(req, rr_class): - deb_req = rr_fn(self.apt, req) - if deb_req is None: - raise NoAptPackage() - return deb_req - else: - raise NotImplementedError + return resolve_requirement_apt(self.apt, req) From be24ed6b4fc941b63cd6c808d1f2cac402096e1c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Thu, 25 Feb 2021 03:37:09 +0000 Subject: [PATCH 75/83] More refactoring. --- ognibuild/debian/fix_build.py | 12 ++++++++++++ ognibuild/fix_build.py | 1 + ognibuild/requirements.py | 16 +++++++++++++++- ognibuild/resolver/apt.py | 1 + 4 files changed, 29 insertions(+), 1 deletion(-) diff --git a/ognibuild/debian/fix_build.py b/ognibuild/debian/fix_build.py index 50ecc24..2a5b4a4 100644 --- a/ognibuild/debian/fix_build.py +++ b/ognibuild/debian/fix_build.py @@ -136,6 +136,8 @@ from ..requirements import ( PerlModuleRequirement, PerlFileRequirement, AutoconfMacroRequirement, + PythonModuleRequirement, + PythonPackageRequirement, ) from .build import attempt_build, DEFAULT_BUILDER @@ -509,6 +511,16 @@ def problem_to_upstream_requirement(problem, context): return PerlFileRequirement(filename=problem.filename) elif isinstance(problem, MissingAutoconfMacro): return AutoconfMacroRequirement(problem.macro) + elif isinstance(problem, MissingPythonModule): + return PythonModuleRequirement( + problem.module, + python_version=problem.python_version, + minimum_version=problem.minimum_version) + elif isinstance(problem, MissingPythonDistribution): + return PythonPackageRequirement( + problem.module, + python_version=problem.python_version, + minimum_version=problem.minimum_version) else: return None diff --git a/ognibuild/fix_build.py b/ognibuild/fix_build.py index 5b6aef0..4b4bbf0 100644 --- a/ognibuild/fix_build.py +++ b/ognibuild/fix_build.py @@ -71,6 +71,7 @@ class DependencyContext(object): ): self.tree = tree self.apt = apt + from .resolver.apt import AptResolver self.resolver = AptResolver(apt) self.subpath = subpath self.committer = committer diff --git a/ognibuild/requirements.py b/ognibuild/requirements.py index 45c176a..24e9e88 100644 --- a/ognibuild/requirements.py +++ b/ognibuild/requirements.py @@ -26,9 +26,11 @@ class PythonPackageRequirement(UpstreamRequirement): package: str - def __init__(self, package): + def __init__(self, package, python_version=None, minimum_version=None): super(PythonPackageRequirement, self).__init__('python-package') self.package = package + self.python_version = python_version + self.minimum_version = minimum_version class BinaryRequirement(UpstreamRequirement): @@ -269,3 +271,15 @@ class AutoconfMacroRequirement(UpstreamRequirement): def __init__(self, macro: str): super(AutoconfMacroRequirement, self).__init__('autoconf-macro') self.macro = macro + + +class PythonModuleRequirement(UpstreamRequirement): + + module: str + python_version: Optional[str] + minimum_version: Optional[str] + + def __init__(self, module, python_version=None, minimum_version=None): + super(PythonModuleRequirement, self).__init__('python-module') + self.python_version = python_version + self.minimum_version = minimum_version diff --git a/ognibuild/resolver/apt.py b/ognibuild/resolver/apt.py index a5a5cc5..bb5cb5a 100644 --- a/ognibuild/resolver/apt.py +++ b/ognibuild/resolver/apt.py @@ -16,6 +16,7 @@ # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA import logging +import os import posixpath from ..debian.apt import AptManager From 795bca3a13b376b9f81fe77f82b34dfbe871103c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Thu, 25 Feb 2021 23:38:34 +0000 Subject: [PATCH 76/83] Some more refactoring. --- TODO | 1 + ognibuild/__main__.py | 9 +- ognibuild/debian/apt.py | 190 ++++++++++++++++++++------------- ognibuild/debian/fix_build.py | 12 +-- ognibuild/requirements.py | 5 + ognibuild/resolver/__init__.py | 3 + ognibuild/resolver/apt.py | 51 ++++++++- 7 files changed, 183 insertions(+), 88 deletions(-) create mode 100644 TODO diff --git a/TODO b/TODO new file mode 100644 index 0000000..29105b7 --- /dev/null +++ b/TODO @@ -0,0 +1 @@ +- Need to be able to check up front whether a requirement is satisfied, before attempting to install it (which is more expensive) diff --git a/ognibuild/__main__.py b/ognibuild/__main__.py index 808eb76..f395f45 100644 --- a/ognibuild/__main__.py +++ b/ognibuild/__main__.py @@ -83,8 +83,15 @@ def main(): # noqa: C901 action="store_true", help="Ignore declared dependencies, follow build errors only", ) + parser.add_argument( + "--verbose", + action="store_true", + help="Be verbose") args = parser.parse_args() - logging.basicConfig(level=logging.INFO) + if args.verbose: + logging.basicConfig(level=logging.DEBUG) + else: + logging.basicConfig(level=logging.INFO) if args.schroot: from .session.schroot import SchrootSession diff --git a/ognibuild/debian/apt.py b/ognibuild/debian/apt.py index cd55fa5..ab2ff16 100644 --- a/ognibuild/debian/apt.py +++ b/ognibuild/debian/apt.py @@ -46,18 +46,43 @@ def run_apt(session: Session, args: List[str]) -> None: raise UnidentifiedError(retcode, args, lines) +class FileSearcher(object): + def search_files(self, path: str, regex: bool = False) -> Iterator[str]: + raise NotImplementedError(self.search_files) + + class AptManager(object): session: Session + _searchers: Optional[List[FileSearcher]] def __init__(self, session): self.session = session + self._apt_cache = None + self._searchers = None - def package_exists(self, package: str) -> bool: - raise NotImplementedError(self.package_exists) + def searchers(self): + if self._searchers is None: + self._searchers = [ + RemoteAptContentsFileSearcher.from_session(self.session), + GENERATED_FILE_SEARCHER] + return self._searchers + + def package_exists(self, package): + if self._apt_cache is None: + import apt_pkg + + # TODO(jelmer): Load from self.session + self._apt_cache = apt_pkg.Cache() + for p in self._apt_cache.packages: + if p.name == package: + return True + return False def get_package_for_paths(self, paths, regex=False): - raise NotImplementedError(self.get_package_for_paths) + logging.debug('Searching for packages containing %r', paths) + # TODO(jelmer): Make sure we use whatever is configured in self.session + return get_package_for_paths(paths, self.searchers(), regex=regex) def missing(self, packages): root = getattr(self.session, "location", "/") @@ -84,45 +109,22 @@ class AptManager(object): run_apt(self.session, ["satisfy"] + deps) -class LocalAptManager(AptManager): - - def __init__(self): - from ..session.plain import PlainSession - self.session = PlainSession() - self._apt_cache = None - - def package_exists(self, package): - if self._apt_cache is None: - import apt_pkg - - self._apt_cache = apt_pkg.Cache() - for p in self._apt_cache.packages: - if p.name == package: - return True - return False - - def get_package_for_paths(self, paths, regex=False): - # TODO(jelmer): Make sure we use whatever is configured in self.session - return get_package_for_paths(paths, regex=regex) - - -class FileSearcher(object): - def search_files(self, path: str, regex: bool = False) -> Iterator[str]: - raise NotImplementedError(self.search_files) - - class ContentsFileNotFound(Exception): """The contents file was not found.""" -class AptContentsFileSearcher(FileSearcher): +class RemoteAptContentsFileSearcher(FileSearcher): def __init__(self): self._db = {} @classmethod - def from_env(cls): - sources = os.environ["REPOSITORIES"].split(":") - return cls.from_repositories(sources) + def from_session(cls, session): + logging.info('Loading apt contents information') + # TODO(jelmer): what about sources.list.d? + with open(os.path.join(session.location, 'etc/apt/sources.list'), 'r') as f: + return cls.from_repositories( + f.readlines(), + cache_dir=os.path.join(session.location, 'var/lib/apt/lists')) def __setitem__(self, path, package): self._db[path] = package @@ -144,36 +146,75 @@ class AptContentsFileSearcher(FileSearcher): self[decoded_path] = package.decode("utf-8") @classmethod - def from_urls(cls, urls): + def _load_cache_file(cls, url, cache_dir): + from urllib.parse import urlparse + parsed = urlparse(url) + p = os.path.join( + cache_dir, + parsed.hostname + parsed.path.replace('/', '_') + '.lz4') + logging.debug('Loading cached contents file %s', p) + if not os.path.exists(p): + return None + import lz4.frame + return lz4.frame.open(p, mode='rb') + + @classmethod + def from_urls(cls, urls, cache_dir=None): self = cls() - for url in urls: - self.load_url(url) + for url, mandatory in urls: + f = cls._load_cache_file(url, cache_dir) + if f is not None: + self.load_file(f) + elif not mandatory and self._db: + logging.debug( + 'Not attempting to fetch optional contents file %s', url) + else: + logging.debug('Fetching contents file %s', url) + try: + self.load_url(url) + except ContentsFileNotFound: + if mandatory: + raise + logging.debug( + 'Unable to fetch optional contents file %s', url) return self @classmethod - def from_repositories(cls, sources): - from .debian.build import get_build_architecture + def from_repositories(cls, sources, cache_dir=None): + # TODO(jelmer): Use aptsources.sourceslist.SourcesList + from .build import get_build_architecture # TODO(jelmer): Verify signatures, etc. urls = [] - arches = [get_build_architecture(), "all"] + arches = [(get_build_architecture(), True), ("all", False)] for source in sources: + if not source.strip(): + continue + if source.strip().startswith('#'): + continue parts = source.split(" ") + if parts[0] == "deb-src": + continue if parts[0] != "deb": logging.warning("Invalid line in sources: %r", source) continue - base_url = parts[1] - name = parts[2] - components = parts[3:] - response = cls._get("%s/%s/Release" % (base_url, name)) - r = Release(response) - desired_files = set() - for component in components: - for arch in arches: - desired_files.add("%s/Contents-%s" % (component, arch)) - for entry in r["MD5Sum"]: - if entry["name"] in desired_files: - urls.append("%s/%s/%s" % (base_url, name, entry["name"])) - return cls.from_urls(urls) + base_url = parts[1].strip().rstrip("/") + name = parts[2].strip() + components = [c.strip() for c in parts[3:]] + if components: + dists_url = base_url + "/dists" + else: + dists_url = base_url + if components: + for component in components: + for arch, mandatory in arches: + urls.append( + ("%s/%s/%s/Contents-%s" % ( + dists_url, name, component, arch), mandatory)) + else: + for arch, mandatory in arches: + urls.append( + ("%s/%s/Contents-%s" % (dists_url, name.rstrip('/'), arch), mandatory)) + return cls.from_urls(urls, cache_dir=cache_dir) @staticmethod def _get(url): @@ -182,19 +223,27 @@ class AptContentsFileSearcher(FileSearcher): request = Request(url, headers={"User-Agent": "Debian Janitor"}) return urlopen(request) - def load_url(self, url): + def load_url(self, url, allow_cache=True): from urllib.error import HTTPError - try: - response = self._get(url) - except HTTPError as e: - if e.status == 404: - raise ContentsFileNotFound(url) - raise - if url.endswith(".gz"): + for ext in ['.xz', '.gz', '']: + try: + response = self._get(url + ext) + except HTTPError as e: + if e.status == 404: + continue + raise + break + else: + raise ContentsFileNotFound(url) + if ext == '.gz': import gzip f = gzip.GzipFile(fileobj=response) + elif ext == '.xz': + import lzma + from io import BytesIO + f = BytesIO(lzma.decompress(response.read())) elif response.headers.get_content_type() == "text/plain": f = response else: @@ -228,23 +277,12 @@ GENERATED_FILE_SEARCHER = GeneratedFileSearcher( ) -_apt_file_searcher = None - - -def search_apt_file(path: str, regex: bool = False) -> Iterator[str]: - global _apt_file_searcher - if _apt_file_searcher is None: - # TODO(jelmer): cache file - _apt_file_searcher = AptContentsFileSearcher.from_env() - if _apt_file_searcher: - yield from _apt_file_searcher.search_files(path, regex=regex) - yield from GENERATED_FILE_SEARCHER.search_files(path, regex=regex) - - -def get_package_for_paths(paths: List[str], regex: bool = False) -> Optional[str]: +def get_package_for_paths( + paths: List[str], searchers: List[FileSearcher], regex: bool = False) -> Optional[str]: candidates: Set[str] = set() for path in paths: - candidates.update(search_apt_file(path, regex=regex)) + for searcher in searchers: + candidates.update(searcher.search_files(path, regex=regex)) if candidates: break if len(candidates) == 0: diff --git a/ognibuild/debian/fix_build.py b/ognibuild/debian/fix_build.py index 2a5b4a4..d76d305 100644 --- a/ognibuild/debian/fix_build.py +++ b/ognibuild/debian/fix_build.py @@ -105,7 +105,6 @@ from buildlog_consultant.sbuild import ( SbuildFailure, ) -from .apt import LocalAptManager from ..fix_build import BuildFixer, SimpleBuildFixer, resolve_error, DependencyContext from ..resolver.apt import ( NoAptPackage, @@ -332,7 +331,7 @@ def fix_missing_python_distribution(error, context): # noqa: C901 default = not targeted pypy_pkg = context.apt.get_package_for_paths( - ["/usr/lib/pypy/dist-packages/%s-.*.egg-info" % error.distribution], regex=True + ["/usr/lib/pypy/dist-packages/%s-.*.egg-info/PKG-INFO" % error.distribution], regex=True ) if pypy_pkg is None: pypy_pkg = "pypy-%s" % error.distribution @@ -340,7 +339,7 @@ def fix_missing_python_distribution(error, context): # noqa: C901 pypy_pkg = None py2_pkg = context.apt.get_package_for_paths( - ["/usr/lib/python2\\.[0-9]/dist-packages/%s-.*.egg-info" % error.distribution], + ["/usr/lib/python2\\.[0-9]/dist-packages/%s-.*.egg-info/PKG-INFO" % error.distribution], regex=True, ) if py2_pkg is None: @@ -349,7 +348,7 @@ def fix_missing_python_distribution(error, context): # noqa: C901 py2_pkg = None py3_pkg = context.apt.get_package_for_paths( - ["/usr/lib/python3/dist-packages/%s-.*.egg-info" % error.distribution], + ["/usr/lib/python3/dist-packages/%s-.*.egg-info/PKG-INFO" % error.distribution], regex=True, ) if py3_pkg is None: @@ -784,8 +783,9 @@ def main(argv=None): args = parser.parse_args() from breezy.workingtree import WorkingTree - - apt = LocalAptManager() + from .apt import AptManager + from ..session.plain import PlainSession + apt = AptManager(PlainSession()) tree = WorkingTree.open(".") build_incrementally( diff --git a/ognibuild/requirements.py b/ognibuild/requirements.py index 24e9e88..56483ba 100644 --- a/ognibuild/requirements.py +++ b/ognibuild/requirements.py @@ -32,6 +32,11 @@ class PythonPackageRequirement(UpstreamRequirement): self.python_version = python_version self.minimum_version = minimum_version + def __repr__(self): + return "%s(%r, %r, %r)" % ( + type(self).__name__, self.package, self.python_version, + self.minimum_version) + class BinaryRequirement(UpstreamRequirement): diff --git a/ognibuild/resolver/__init__.py b/ognibuild/resolver/__init__.py index 9384482..18bbd98 100644 --- a/ognibuild/resolver/__init__.py +++ b/ognibuild/resolver/__init__.py @@ -30,6 +30,9 @@ class Resolver(object): def explain(self, requirements): raise NotImplementedError(self.explain) + def met(self, requirement): + raise NotImplementedError(self.met) + class NativeResolver(Resolver): def __init__(self, session): diff --git a/ognibuild/resolver/apt.py b/ognibuild/resolver/apt.py index bb5cb5a..39f4e0f 100644 --- a/ognibuild/resolver/apt.py +++ b/ognibuild/resolver/apt.py @@ -48,6 +48,8 @@ from ..requirements import ( PerlModuleRequirement, PerlFileRequirement, AutoconfMacroRequirement, + PythonModuleRequirement, + PythonPackageRequirement, ) @@ -55,6 +57,23 @@ class NoAptPackage(Exception): """No apt package.""" +def get_package_for_python_package(apt_mgr, package, python_version): + if python_version == "pypy": + return apt_mgr.get_package_for_paths( + ["/usr/lib/pypy/dist-packages/%s-.*.egg-info/PKG-INFO" % package], + regex=True) + elif python_version == "cpython2": + return apt_mgr.get_package_for_paths( + ["/usr/lib/python2\\.[0-9]/dist-packages/%s-.*.egg-info/PKG-INFO" % package], + regex=True) + elif python_version == "cpython3": + return apt_mgr.get_package_for_paths( + ["/usr/lib/python3/dist-packages/%s-.*.egg-info/PKG-INFO" % package], + regex=True) + else: + raise NotImplementedError + + def get_package_for_python_module(apt_mgr, module, python_version): if python_version == "python3": paths = [ @@ -354,6 +373,24 @@ def resolve_autoconf_macro_req(apt_mgr, req): return apt_mgr.get_package_for_paths([path]) +def resolve_python_module_req(apt_mgr, req): + if req.python_version == 2: + return get_package_for_python_module(apt_mgr, req.module, "cpython2") + elif req.python_version in (None, 3): + return get_package_for_python_module(apt_mgr, req.module, "cpython3") + else: + return None + + +def resolve_python_package_req(apt_mgr, req): + if req.python_version == 2: + return get_package_for_python_package(apt_mgr, req.package, "cpython2") + elif req.python_version in (None, 3): + return get_package_for_python_package(apt_mgr, req.package, "cpython3") + else: + return None + + APT_REQUIREMENT_RESOLVERS = [ (BinaryRequirement, resolve_binary_req), (PkgConfigRequirement, resolve_pkg_config_req), @@ -379,6 +416,8 @@ APT_REQUIREMENT_RESOLVERS = [ (PerlModuleRequirement, resolve_perl_module_req), (PerlFileRequirement, resolve_perl_file_req), (AutoconfMacroRequirement, resolve_autoconf_macro_req), + (PythonModuleRequirement, resolve_python_module_req), + (PythonPackageRequirement, resolve_python_package_req), ] @@ -387,7 +426,7 @@ def resolve_requirement_apt(apt_mgr, req: UpstreamRequirement): if isinstance(req, rr_class): deb_req = rr_fn(apt_mgr, req) if deb_req is None: - raise NoAptPackage() + raise NoAptPackage(req) return deb_req raise NotImplementedError(type(req)) @@ -401,16 +440,18 @@ class AptResolver(Resolver): def from_session(cls, session): return cls(AptManager(session)) + def met(self, requirement): + pps = list(requirement.possible_paths()) + return any(self.apt.session.exists(p) for p in pps) + def install(self, requirements): missing = [] for req in requirements: try: - pps = list(req.possible_paths()) + if not self.met(req): + missing.append(req) except NotImplementedError: missing.append(req) - else: - if not pps or not any(self.apt.session.exists(p) for p in pps): - missing.append(req) if missing: self.apt.install([self.resolve(m) for m in missing]) From aa2a3e47fa8d54f784b40222130f5b67f7360229 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Fri, 26 Feb 2021 01:41:05 +0000 Subject: [PATCH 77/83] More refactoring. --- TODO | 1 + ognibuild/__init__.py | 4 +-- ognibuild/__main__.py | 32 ++++++++++++----- ognibuild/buildsystem.py | 38 +++++++++++++++----- ognibuild/debian/fix_build.py | 8 +++-- ognibuild/fix_build.py | 6 ++-- ognibuild/info.py | 45 ++++++++++++++++++++++++ ognibuild/install.py | 9 +++-- ognibuild/requirements.py | 5 ++- ognibuild/resolver/apt.py | 23 ++++++------ ognibuild/tests/test_debian_fix_build.py | 5 +-- 11 files changed, 136 insertions(+), 40 deletions(-) create mode 100644 ognibuild/info.py diff --git a/TODO b/TODO index 29105b7..eab3954 100644 --- a/TODO +++ b/TODO @@ -1 +1,2 @@ - Need to be able to check up front whether a requirement is satisfied, before attempting to install it (which is more expensive) +- Cache parsed Contents files during test suite runs and/or speed up reading diff --git a/ognibuild/__init__.py b/ognibuild/__init__.py index eb32b9d..552f109 100644 --- a/ognibuild/__init__.py +++ b/ognibuild/__init__.py @@ -58,8 +58,8 @@ class UpstreamRequirement(object): def __init__(self, family): self.family = family - def possible_paths(self): - raise NotImplementedError + def met(self, session): + raise NotImplementedError(self) class UpstreamOutput(object): diff --git a/ognibuild/__main__.py b/ognibuild/__main__.py index f395f45..b6dbde2 100644 --- a/ognibuild/__main__.py +++ b/ognibuild/__main__.py @@ -20,10 +20,6 @@ import os import sys from . import UnidentifiedError from .buildsystem import NoBuildToolsFound, detect_buildsystems -from .build import run_build -from .clean import run_clean -from .dist import run_dist -from .install import run_install from .resolver import ( ExplainResolver, AutoResolver, @@ -31,7 +27,6 @@ from .resolver import ( MissingDependencies, ) from .resolver.apt import AptResolver -from .test import run_test def get_necessary_declared_requirements(resolver, requirements, stages): @@ -54,6 +49,7 @@ def install_necessary_declared_requirements(resolver, buildsystem, stages): STAGE_MAP = { "dist": [], + "info": [], "install": ["build"], "test": ["test", "dev"], "build": ["build"], @@ -65,9 +61,6 @@ def main(): # noqa: C901 import argparse parser = argparse.ArgumentParser() - parser.add_argument( - "subcommand", type=str, choices=["dist", "build", "clean", "test", "install"] - ) parser.add_argument( "--directory", "-d", type=str, help="Directory for project.", default="." ) @@ -87,6 +80,16 @@ def main(): # noqa: C901 "--verbose", action="store_true", help="Be verbose") + subparsers = parser.add_subparsers(dest='subcommand') + subparsers.add_parser('dist') + subparsers.add_parser('build') + subparsers.add_parser('clean') + subparsers.add_parser('test') + subparsers.add_parser('info') + install_parser = subparsers.add_parser('install') + install_parser.add_argument( + '--user', action='store_true', help='Install in local-user directories.') + args = parser.parse_args() if args.verbose: logging.basicConfig(level=logging.DEBUG) @@ -112,21 +115,32 @@ def main(): # noqa: C901 os.chdir(args.directory) try: bss = list(detect_buildsystems(args.directory)) + logging.info('Detected buildsystems: %r', bss) if not args.ignore_declared_dependencies: stages = STAGE_MAP[args.subcommand] if stages: for bs in bss: install_necessary_declared_requirements(resolver, bs, stages) if args.subcommand == "dist": + from .dist import run_dist run_dist(session=session, buildsystems=bss, resolver=resolver) if args.subcommand == "build": + from .build import run_build run_build(session, buildsystems=bss, resolver=resolver) if args.subcommand == "clean": + from .clean import run_clean run_clean(session, buildsystems=bss, resolver=resolver) if args.subcommand == "install": - run_install(session, buildsystems=bss, resolver=resolver) + from .install import run_install + run_install( + session, buildsystems=bss, resolver=resolver, + user=args.user) if args.subcommand == "test": + from .test import run_test run_test(session, buildsystems=bss, resolver=resolver) + if args.subcommand == "info": + from .info import run_info + run_info(session, buildsystems=bss, resolver=resolver) except UnidentifiedError: return 1 except NoBuildToolsFound: diff --git a/ognibuild/buildsystem.py b/ognibuild/buildsystem.py index 6d311f8..8f9f6ba 100644 --- a/ognibuild/buildsystem.py +++ b/ognibuild/buildsystem.py @@ -20,6 +20,7 @@ import logging import os import re +from typing import Optional import warnings from . import shebang_binary, UpstreamOutput, UnidentifiedError @@ -37,6 +38,14 @@ class NoBuildToolsFound(Exception): """No supported build tools were found.""" +class InstallTarget(object): + + # Whether to prefer user-specific installation + user: Optional[bool] + + # TODO(jelmer): Add information about target directory, layout, etc. + + class BuildSystem(object): """A particular buildsystem.""" @@ -54,7 +63,7 @@ class BuildSystem(object): def clean(self, session, resolver): raise NotImplementedError(self.clean) - def install(self, session, resolver): + def install(self, session, resolver, install_target): raise NotImplementedError(self.install) def get_declared_dependencies(self): @@ -84,15 +93,15 @@ class Pear(BuildSystem): def build(self, session, resolver): self.setup(resolver) - run_with_build_fixer(session, ["pear", "build"]) + run_with_build_fixer(session, ["pear", "build", self.path]) def clean(self, session, resolver): self.setup(resolver) # TODO - def install(self, session, resolver): + def install(self, session, resolver, install_target): self.setup(resolver) - run_with_build_fixer(session, ["pear", "install"]) + run_with_build_fixer(session, ["pear", "install", self.path]) class SetupPy(BuildSystem): @@ -104,6 +113,9 @@ class SetupPy(BuildSystem): from distutils.core import run_setup self.result = run_setup(os.path.abspath(path), stop_after="init") + def __repr__(self): + return "%s(%r)" % (type(self).__name__, self.path) + def setup(self, resolver): resolver.install([PythonPackageRequirement('pip')]) with open(self.path, "r") as f: @@ -147,9 +159,12 @@ class SetupPy(BuildSystem): self.setup(resolver) self._run_setup(session, resolver, ["clean"]) - def install(self, session, resolver): + def install(self, session, resolver, install_target): self.setup(resolver) - self._run_setup(session, resolver, ["install"]) + extra_args = [] + if install_target.user: + extra_args.append('--user') + self._run_setup(session, resolver, ["install"] + extra_args) def _run_setup(self, session, resolver, args): interpreter = shebang_binary("setup.py") @@ -338,7 +353,12 @@ class Make(BuildSystem): name = "make" + def __repr__(self): + return "%s()" % type(self).__name__ + def setup(self, session, resolver): + resolver.install([BinaryRequirement("make")]) + if session.exists("Makefile.PL") and not session.exists("Makefile"): resolver.install([BinaryRequirement("perl")]) run_with_build_fixer(session, ["perl", "Makefile.PL"]) @@ -375,12 +395,14 @@ class Make(BuildSystem): def build(self, session, resolver): self.setup(session, resolver) - resolver.install([BinaryRequirement("make")]) run_with_build_fixer(session, ["make", "all"]) + def install(self, session, resolver, install_target): + self.setup(session, resolver) + run_with_build_fixer(session, ["make", "install"]) + def dist(self, session, resolver): self.setup(session, resolver) - resolver.install([BinaryRequirement("make")]) try: run_with_build_fixer(session, ["make", "dist"]) except UnidentifiedError as e: diff --git a/ognibuild/debian/fix_build.py b/ognibuild/debian/fix_build.py index d76d305..36f6139 100644 --- a/ognibuild/debian/fix_build.py +++ b/ognibuild/debian/fix_build.py @@ -437,7 +437,7 @@ def fix_missing_python_module(error, context): return True -def problem_to_upstream_requirement(problem, context): +def problem_to_upstream_requirement(problem): if isinstance(problem, MissingFile): return PathRequirement(problem.path) elif isinstance(problem, MissingCommand): @@ -526,7 +526,11 @@ def problem_to_upstream_requirement(problem, context): class UpstreamRequirementFixer(BuildFixer): - def fix_missing_requirement(self, error, context): + def can_fix(self, error): + req = problem_to_upstream_requirement(error) + return req is not None + + def fix(self, error, context): req = problem_to_upstream_requirement(error) if req is None: return False diff --git a/ognibuild/fix_build.py b/ognibuild/fix_build.py index 4b4bbf0..d2d6a9f 100644 --- a/ognibuild/fix_build.py +++ b/ognibuild/fix_build.py @@ -167,15 +167,15 @@ def run_with_build_fixer( def resolve_error(error, context, fixers): relevant_fixers = [] - for error_cls, fixer in fixers: - if isinstance(error, error_cls): + for fixer in fixers: + if fixer.can_fix(error): relevant_fixers.append(fixer) if not relevant_fixers: logging.warning("No fixer found for %r", error) return False for fixer in relevant_fixers: logging.info("Attempting to use fixer %r to address %r", fixer, error) - made_changes = fixer(error, context) + made_changes = fixer.fix(error, context) if made_changes: return True return False diff --git a/ognibuild/info.py b/ognibuild/info.py new file mode 100644 index 0000000..a5e4c9f --- /dev/null +++ b/ognibuild/info.py @@ -0,0 +1,45 @@ +#!/usr/bin/python3 +# Copyright (C) 2020-2021 Jelmer Vernooij +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + +from .buildsystem import NoBuildToolsFound, InstallTarget + + +def run_info(session, buildsystems, resolver): + for buildsystem in buildsystems: + print('%r:' % buildsystem) + deps = {} + try: + for kind, dep in buildsystem.get_declared_dependencies(): + deps.setdefault(kind, []).append(dep) + except NotImplementedError: + print('\tUnable to detect declared dependencies for this type of build system') + if deps: + print('\tDeclared dependencies:') + for kind in deps: + print('\t\t%s:' % kind) + for dep in deps[kind]: + print('\t\t\t%s' % dep) + print('') + try: + outputs = list(buildsystem.get_declared_outputs()) + except NotImplementedError: + print('\tUnable to detect declared outputs for this type of build system') + outputs = [] + if outputs: + print('\tDeclared outputs:') + for output in outputs: + print('\t\t%s' % output) diff --git a/ognibuild/install.py b/ognibuild/install.py index df0e61f..c30967a 100644 --- a/ognibuild/install.py +++ b/ognibuild/install.py @@ -15,16 +15,19 @@ # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA -from .buildsystem import NoBuildToolsFound +from .buildsystem import NoBuildToolsFound, InstallTarget -def run_install(session, buildsystems, resolver): +def run_install(session, buildsystems, resolver, user: bool = False): # Some things want to write to the user's home directory, # e.g. pip caches in ~/.cache session.create_home() + install_target = InstallTarget() + install_target.user = user + for buildsystem in buildsystems: - buildsystem.install(session, resolver) + buildsystem.install(session, resolver, install_target) return raise NoBuildToolsFound() diff --git a/ognibuild/requirements.py b/ognibuild/requirements.py index 56483ba..44c9f27 100644 --- a/ognibuild/requirements.py +++ b/ognibuild/requirements.py @@ -33,10 +33,13 @@ class PythonPackageRequirement(UpstreamRequirement): self.minimum_version = minimum_version def __repr__(self): - return "%s(%r, %r, %r)" % ( + return "%s(%r, python_version=%r, minimum_version=%r)" % ( type(self).__name__, self.package, self.python_version, self.minimum_version) + def __str__(self): + return "python package: %s" % self.package + class BinaryRequirement(UpstreamRequirement): diff --git a/ognibuild/resolver/apt.py b/ognibuild/resolver/apt.py index 39f4e0f..0c6a783 100644 --- a/ognibuild/resolver/apt.py +++ b/ognibuild/resolver/apt.py @@ -57,7 +57,7 @@ class NoAptPackage(Exception): """No apt package.""" -def get_package_for_python_package(apt_mgr, package, python_version): +def get_package_for_python_package(apt_mgr, package, python_version, minimum_version=None): if python_version == "pypy": return apt_mgr.get_package_for_paths( ["/usr/lib/pypy/dist-packages/%s-.*.egg-info/PKG-INFO" % package], @@ -375,18 +375,18 @@ def resolve_autoconf_macro_req(apt_mgr, req): def resolve_python_module_req(apt_mgr, req): if req.python_version == 2: - return get_package_for_python_module(apt_mgr, req.module, "cpython2") + return get_package_for_python_module(apt_mgr, req.module, "cpython2", req.minimum_version) elif req.python_version in (None, 3): - return get_package_for_python_module(apt_mgr, req.module, "cpython3") + return get_package_for_python_module(apt_mgr, req.module, "cpython3", req.minimum_version) else: return None def resolve_python_package_req(apt_mgr, req): if req.python_version == 2: - return get_package_for_python_package(apt_mgr, req.package, "cpython2") + return get_package_for_python_package(apt_mgr, req.package, "cpython2", req.minimum_version) elif req.python_version in (None, 3): - return get_package_for_python_package(apt_mgr, req.package, "cpython3") + return get_package_for_python_package(apt_mgr, req.package, "cpython3", req.minimum_version) else: return None @@ -421,6 +421,13 @@ APT_REQUIREMENT_RESOLVERS = [ ] +class AptRequirement(object): + + def __init__(self, package, minimum_version=None): + self.package = package + self.minimum_version = minimum_version + + def resolve_requirement_apt(apt_mgr, req: UpstreamRequirement): for rr_class, rr_fn in APT_REQUIREMENT_RESOLVERS: if isinstance(req, rr_class): @@ -440,15 +447,11 @@ class AptResolver(Resolver): def from_session(cls, session): return cls(AptManager(session)) - def met(self, requirement): - pps = list(requirement.possible_paths()) - return any(self.apt.session.exists(p) for p in pps) - def install(self, requirements): missing = [] for req in requirements: try: - if not self.met(req): + if not req.met(self.apt.session): missing.append(req) except NotImplementedError: missing.append(req) diff --git a/ognibuild/tests/test_debian_fix_build.py b/ognibuild/tests/test_debian_fix_build.py index 07725f3..0cd80ec 100644 --- a/ognibuild/tests/test_debian_fix_build.py +++ b/ognibuild/tests/test_debian_fix_build.py @@ -31,7 +31,7 @@ from buildlog_consultant.common import ( MissingValaPackage, ) from ..debian import apt -from ..debian.apt import LocalAptManager +from ..debian.apt import AptManager from ..debian.fix_build import ( resolve_error, VERSIONED_PACKAGE_FIXERS, @@ -89,7 +89,8 @@ blah (0.1) UNRELEASED; urgency=medium yield pkg def resolve(self, error, context=("build",)): - apt = LocalAptManager() + from ..session.plain import PlainSession + apt = AptManager(PlainSession()) context = BuildDependencyContext( self.tree, apt, From 6b30479b97defc956a584394cff44e99a2f5436e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Fri, 26 Feb 2021 03:19:33 +0000 Subject: [PATCH 78/83] More fixes. --- ognibuild/__main__.py | 11 +- ognibuild/buildlog.py | 192 +++++++++++++++++++ ognibuild/buildsystem.py | 8 +- ognibuild/debian/apt.py | 48 +++-- ognibuild/debian/fix_build.py | 227 ++++------------------- ognibuild/fix_build.py | 61 ++---- ognibuild/resolver/__init__.py | 101 +++++++++- ognibuild/resolver/apt.py | 163 ++++++++++++---- ognibuild/tests/test_debian_fix_build.py | 8 +- 9 files changed, 499 insertions(+), 320 deletions(-) create mode 100644 ognibuild/buildlog.py diff --git a/ognibuild/__main__.py b/ognibuild/__main__.py index b6dbde2..d1ffb02 100644 --- a/ognibuild/__main__.py +++ b/ognibuild/__main__.py @@ -23,7 +23,7 @@ from .buildsystem import NoBuildToolsFound, detect_buildsystems from .resolver import ( ExplainResolver, AutoResolver, - NativeResolver, + native_resolvers, MissingDependencies, ) from .resolver.apt import AptResolver @@ -91,6 +91,9 @@ def main(): # noqa: C901 '--user', action='store_true', help='Install in local-user directories.') args = parser.parse_args() + if not args.subcommand: + parser.print_usage() + return 1 if args.verbose: logging.basicConfig(level=logging.DEBUG) else: @@ -109,7 +112,7 @@ def main(): # noqa: C901 elif args.resolve == "explain": resolver = ExplainResolver.from_session(session) elif args.resolve == "native": - resolver = NativeResolver.from_session(session) + resolver = native_resolvers(session) elif args.resolver == "auto": resolver = AutoResolver.from_session(session) os.chdir(args.directory) @@ -149,10 +152,10 @@ def main(): # noqa: C901 except MissingDependencies as e: for req in e.requirements: logging.info("Missing dependency (%s:%s)", - req.family, req.name) + req.family, req.package) for resolver in [ AptResolver.from_session(session), - NativeResolver.from_session(session), + native_resolvers(session), ]: logging.info(" %s", resolver.explain([req])) return 2 diff --git a/ognibuild/buildlog.py b/ognibuild/buildlog.py new file mode 100644 index 0000000..0ff19a8 --- /dev/null +++ b/ognibuild/buildlog.py @@ -0,0 +1,192 @@ +#!/usr/bin/python3 +# Copyright (C) 2020 Jelmer Vernooij +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + +"""Convert problems found in the buildlog to upstream requirements. +""" + +import logging + +from buildlog_consultant.common import ( + MissingConfigStatusInput, + MissingPythonModule, + MissingPythonDistribution, + MissingCHeader, + MissingPkgConfig, + MissingCommand, + MissingFile, + MissingJavaScriptRuntime, + MissingSprocketsFile, + MissingGoPackage, + MissingPerlFile, + MissingPerlModule, + MissingXmlEntity, + MissingJDKFile, + MissingNodeModule, + MissingPhpClass, + MissingRubyGem, + MissingLibrary, + MissingJavaClass, + MissingCSharpCompiler, + MissingConfigure, + MissingAutomakeInput, + MissingRPackage, + MissingRubyFile, + MissingAutoconfMacro, + MissingValaPackage, + MissingXfceDependency, + MissingHaskellDependencies, + NeedPgBuildExtUpdateControl, + DhAddonLoadFailure, + MissingMavenArtifacts, + GnomeCommonMissing, + MissingGnomeCommonDependency, +) + +from .fix_build import BuildFixer +from .requirements import ( + BinaryRequirement, + PathRequirement, + PkgConfigRequirement, + CHeaderRequirement, + JavaScriptRuntimeRequirement, + ValaPackageRequirement, + RubyGemRequirement, + GoPackageRequirement, + DhAddonRequirement, + PhpClassRequirement, + RPackageRequirement, + NodePackageRequirement, + LibraryRequirement, + RubyFileRequirement, + XmlEntityRequirement, + SprocketsFileRequirement, + JavaClassRequirement, + HaskellPackageRequirement, + MavenArtifactRequirement, + GnomeCommonRequirement, + JDKFileRequirement, + PerlModuleRequirement, + PerlFileRequirement, + AutoconfMacroRequirement, + PythonModuleRequirement, + PythonPackageRequirement, + ) + + +def problem_to_upstream_requirement(problem): + if isinstance(problem, MissingFile): + return PathRequirement(problem.path) + elif isinstance(problem, MissingCommand): + return BinaryRequirement(problem.command) + elif isinstance(problem, MissingPkgConfig): + return PkgConfigRequirement( + problem.module, problem.minimum_version) + elif isinstance(problem, MissingCHeader): + return CHeaderRequirement(problem.header) + elif isinstance(problem, MissingJavaScriptRuntime): + return JavaScriptRuntimeRequirement() + elif isinstance(problem, MissingRubyGem): + return RubyGemRequirement(problem.gem, problem.version) + elif isinstance(problem, MissingValaPackage): + return ValaPackageRequirement(problem.package) + elif isinstance(problem, MissingGoPackage): + return GoPackageRequirement(problem.package) + elif isinstance(problem, DhAddonLoadFailure): + return DhAddonRequirement(problem.path) + elif isinstance(problem, MissingPhpClass): + return PhpClassRequirement(problem.php_class) + elif isinstance(problem, MissingRPackage): + return RPackageRequirement(problem.package, problem.minimum_version) + elif isinstance(problem, MissingNodeModule): + return NodePackageRequirement(problem.module) + elif isinstance(problem, MissingLibrary): + return LibraryRequirement(problem.library) + elif isinstance(problem, MissingRubyFile): + return RubyFileRequirement(problem.filename) + elif isinstance(problem, MissingXmlEntity): + return XmlEntityRequirement(problem.url) + elif isinstance(problem, MissingSprocketsFile): + return SprocketsFileRequirement(problem.content_type, problem.name) + elif isinstance(problem, MissingJavaClass): + return JavaClassRequirement(problem.classname) + elif isinstance(problem, MissingHaskellDependencies): + # TODO(jelmer): Create multiple HaskellPackageRequirement objects? + return HaskellPackageRequirement(problem.package) + elif isinstance(problem, MissingMavenArtifacts): + # TODO(jelmer): Create multiple MavenArtifactRequirement objects? + return MavenArtifactRequirement(problem.artifacts) + elif isinstance(problem, MissingCSharpCompiler): + return BinaryRequirement('msc') + elif isinstance(problem, GnomeCommonMissing): + return GnomeCommonRequirement() + elif isinstance(problem, MissingJDKFile): + return JDKFileRequirement(problem.jdk_path, problem.filename) + elif isinstance(problem, MissingGnomeCommonDependency): + if problem.package == "glib-gettext": + return BinaryRequirement('glib-gettextize') + else: + logging.warning( + "No known command for gnome-common dependency %s", + problem.package) + return None + elif isinstance(problem, MissingXfceDependency): + if problem.package == "gtk-doc": + return BinaryRequirement("gtkdocize") + else: + logging.warning( + "No known command for xfce dependency %s", + problem.package) + return None + elif isinstance(problem, MissingPerlModule): + return PerlModuleRequirement( + module=problem.module, + filename=problem.filename, + inc=problem.inc) + elif isinstance(problem, MissingPerlFile): + return PerlFileRequirement(filename=problem.filename) + elif isinstance(problem, MissingAutoconfMacro): + return AutoconfMacroRequirement(problem.macro) + elif isinstance(problem, MissingPythonModule): + return PythonModuleRequirement( + problem.module, + python_version=problem.python_version, + minimum_version=problem.minimum_version) + elif isinstance(problem, MissingPythonDistribution): + return PythonPackageRequirement( + problem.module, + python_version=problem.python_version, + minimum_version=problem.minimum_version) + else: + return None + + +class UpstreamRequirementFixer(BuildFixer): + + def __init__(self, resolver): + self.resolver = resolver + + def can_fix(self, error): + req = problem_to_upstream_requirement(error) + return req is not None + + def fix(self, error, context): + req = problem_to_upstream_requirement(error) + if req is None: + return False + + package = self.resolver.resolve(req) + return context.add_dependency(package) diff --git a/ognibuild/buildsystem.py b/ognibuild/buildsystem.py index 8f9f6ba..cc8b30e 100644 --- a/ognibuild/buildsystem.py +++ b/ognibuild/buildsystem.py @@ -182,17 +182,19 @@ class SetupPy(BuildSystem): def get_declared_dependencies(self): for require in self.result.get_requires(): yield "build", PythonPackageRequirement(require) - if self.result.install_requires: + # Not present for distutils-only packages + if getattr(self.result, 'install_requires', []): for require in self.result.install_requires: yield "install", PythonPackageRequirement(require) - if self.result.tests_require: + # Not present for distutils-only packages + if getattr(self.result, 'tests_require', []): for require in self.result.tests_require: yield "test", PythonPackageRequirement(require) def get_declared_outputs(self): for script in self.result.scripts or []: yield UpstreamOutput("binary", os.path.basename(script)) - entry_points = self.result.entry_points or {} + entry_points = getattr(self.result, 'entry_points', None) or {} for script in entry_points.get("console_scripts", []): yield UpstreamOutput("binary", script.split("=")[0]) for package in self.result.packages or []: diff --git a/ognibuild/debian/apt.py b/ognibuild/debian/apt.py index ab2ff16..c4e2938 100644 --- a/ognibuild/debian/apt.py +++ b/ognibuild/debian/apt.py @@ -70,14 +70,9 @@ class AptManager(object): def package_exists(self, package): if self._apt_cache is None: - import apt_pkg - - # TODO(jelmer): Load from self.session - self._apt_cache = apt_pkg.Cache() - for p in self._apt_cache.packages: - if p.name == package: - return True - return False + import apt + self._apt_cache = apt.Cache(rootdir=self.session.location) + return package in self._apt_cache def get_package_for_paths(self, paths, regex=False): logging.debug('Searching for packages containing %r', paths) @@ -121,10 +116,12 @@ class RemoteAptContentsFileSearcher(FileSearcher): def from_session(cls, session): logging.info('Loading apt contents information') # TODO(jelmer): what about sources.list.d? - with open(os.path.join(session.location, 'etc/apt/sources.list'), 'r') as f: - return cls.from_repositories( - f.readlines(), - cache_dir=os.path.join(session.location, 'var/lib/apt/lists')) + from aptsources.sourceslist import SourcesList + sl = SourcesList() + sl.load(os.path.join(session.location, 'etc/apt/sources.list')) + return cls.from_sources_list( + sl, + cache_dir=os.path.join(session.location, 'var/lib/apt/lists')) def __setitem__(self, path, package): self._db[path] = package @@ -174,32 +171,31 @@ class RemoteAptContentsFileSearcher(FileSearcher): self.load_url(url) except ContentsFileNotFound: if mandatory: - raise - logging.debug( - 'Unable to fetch optional contents file %s', url) + logging.warning( + 'Unable to fetch contents file %s', url) + else: + logging.debug( + 'Unable to fetch optional contents file %s', url) return self @classmethod - def from_repositories(cls, sources, cache_dir=None): + def from_sources_list(cls, sl, cache_dir=None): # TODO(jelmer): Use aptsources.sourceslist.SourcesList from .build import get_build_architecture # TODO(jelmer): Verify signatures, etc. urls = [] arches = [(get_build_architecture(), True), ("all", False)] - for source in sources: - if not source.strip(): + for source in sl.list: + if source.invalid or source.disabled: continue - if source.strip().startswith('#'): + if source.type == 'deb-src': continue - parts = source.split(" ") - if parts[0] == "deb-src": - continue - if parts[0] != "deb": + if source.type != 'deb': logging.warning("Invalid line in sources: %r", source) continue - base_url = parts[1].strip().rstrip("/") - name = parts[2].strip() - components = [c.strip() for c in parts[3:]] + base_url = source.uri.rstrip('/') + name = source.dist.rstrip('/') + components = source.comps if components: dists_url = base_url + "/dists" else: diff --git a/ognibuild/debian/fix_build.py b/ognibuild/debian/fix_build.py index 36f6139..a32219b 100644 --- a/ognibuild/debian/fix_build.py +++ b/ognibuild/debian/fix_build.py @@ -63,81 +63,28 @@ from debmutate._rules import ( from breezy.plugins.debian.changelog import debcommit from buildlog_consultant import Problem -from buildlog_consultant.common import ( - MissingConfigStatusInput, - MissingPythonModule, - MissingPythonDistribution, - MissingCHeader, - MissingPkgConfig, - MissingCommand, - MissingFile, - MissingJavaScriptRuntime, - MissingSprocketsFile, - MissingGoPackage, - MissingPerlFile, - MissingPerlModule, - MissingXmlEntity, - MissingJDKFile, - MissingNodeModule, - MissingPhpClass, - MissingRubyGem, - MissingLibrary, - MissingJavaClass, - MissingCSharpCompiler, - MissingConfigure, - MissingAutomakeInput, - MissingRPackage, - MissingRubyFile, - MissingAutoconfMacro, - MissingValaPackage, - MissingXfceDependency, - MissingHaskellDependencies, - NeedPgBuildExtUpdateControl, - DhAddonLoadFailure, - MissingMavenArtifacts, - GnomeCommonMissing, - MissingGnomeCommonDependency, -) from buildlog_consultant.apt import ( AptFetchFailure, ) +from buildlog_consultant.common import ( + MissingConfigStatusInput, + MissingAutomakeInput, + MissingConfigure, + NeedPgBuildExtUpdateControl, + MissingPythonModule, + MissingPythonDistribution, + MissingPerlFile, + ) from buildlog_consultant.sbuild import ( SbuildFailure, ) -from ..fix_build import BuildFixer, SimpleBuildFixer, resolve_error, DependencyContext +from ..fix_build import BuildFixer, resolve_error, DependencyContext +from ..buildlog import UpstreamRequirementFixer from ..resolver.apt import ( NoAptPackage, get_package_for_python_module, ) -from ..requirements import ( - BinaryRequirement, - PathRequirement, - PkgConfigRequirement, - CHeaderRequirement, - JavaScriptRuntimeRequirement, - ValaPackageRequirement, - RubyGemRequirement, - GoPackageRequirement, - DhAddonRequirement, - PhpClassRequirement, - RPackageRequirement, - NodePackageRequirement, - LibraryRequirement, - RubyFileRequirement, - XmlEntityRequirement, - SprocketsFileRequirement, - JavaClassRequirement, - HaskellPackageRequirement, - MavenArtifactRequirement, - GnomeCommonRequirement, - JDKFileRequirement, - PerlModuleRequirement, - PerlFileRequirement, - AutoconfMacroRequirement, - PythonModuleRequirement, - PythonPackageRequirement, - ) from .build import attempt_build, DEFAULT_BUILDER @@ -437,111 +384,6 @@ def fix_missing_python_module(error, context): return True -def problem_to_upstream_requirement(problem): - if isinstance(problem, MissingFile): - return PathRequirement(problem.path) - elif isinstance(problem, MissingCommand): - return BinaryRequirement(problem.command) - elif isinstance(problem, MissingPkgConfig): - return PkgConfigRequirement( - problem.module, problem.minimum_version) - elif isinstance(problem, MissingCHeader): - return CHeaderRequirement(problem.header) - elif isinstance(problem, MissingJavaScriptRuntime): - return JavaScriptRuntimeRequirement() - elif isinstance(problem, MissingRubyGem): - return RubyGemRequirement(problem.gem, problem.version) - elif isinstance(problem, MissingValaPackage): - return ValaPackageRequirement(problem.package) - elif isinstance(problem, MissingGoPackage): - return GoPackageRequirement(problem.package) - elif isinstance(problem, DhAddonLoadFailure): - return DhAddonRequirement(problem.path) - elif isinstance(problem, MissingPhpClass): - return PhpClassRequirement(problem.php_class) - elif isinstance(problem, MissingRPackage): - return RPackageRequirement(problem.package, problem.minimum_version) - elif isinstance(problem, MissingNodeModule): - return NodePackageRequirement(problem.module) - elif isinstance(problem, MissingLibrary): - return LibraryRequirement(problem.library) - elif isinstance(problem, MissingRubyFile): - return RubyFileRequirement(problem.filename) - elif isinstance(problem, MissingXmlEntity): - return XmlEntityRequirement(problem.url) - elif isinstance(problem, MissingSprocketsFile): - return SprocketsFileRequirement(problem.content_type, problem.name) - elif isinstance(problem, MissingJavaClass): - return JavaClassRequirement(problem.classname) - elif isinstance(problem, MissingHaskellDependencies): - # TODO(jelmer): Create multiple HaskellPackageRequirement objects? - return HaskellPackageRequirement(problem.package) - elif isinstance(problem, MissingMavenArtifacts): - # TODO(jelmer): Create multiple MavenArtifactRequirement objects? - return MavenArtifactRequirement(problem.artifacts) - elif isinstance(problem, MissingCSharpCompiler): - return BinaryRequirement('msc') - elif isinstance(problem, GnomeCommonMissing): - return GnomeCommonRequirement() - elif isinstance(problem, MissingJDKFile): - return JDKFileRequirement(problem.jdk_path, problem.filename) - elif isinstance(problem, MissingGnomeCommonDependency): - if problem.package == "glib-gettext": - return BinaryRequirement('glib-gettextize') - else: - logging.warning( - "No known command for gnome-common dependency %s", - problem.package) - return None - elif isinstance(problem, MissingXfceDependency): - if problem.package == "gtk-doc": - return BinaryRequirement("gtkdocize") - else: - logging.warning( - "No known command for xfce dependency %s", - problem.package) - return None - elif isinstance(problem, MissingPerlModule): - return PerlModuleRequirement( - module=problem.module, - filename=problem.filename, - inc=problem.inc) - elif isinstance(problem, MissingPerlFile): - return PerlFileRequirement(filename=problem.filename) - elif isinstance(problem, MissingAutoconfMacro): - return AutoconfMacroRequirement(problem.macro) - elif isinstance(problem, MissingPythonModule): - return PythonModuleRequirement( - problem.module, - python_version=problem.python_version, - minimum_version=problem.minimum_version) - elif isinstance(problem, MissingPythonDistribution): - return PythonPackageRequirement( - problem.module, - python_version=problem.python_version, - minimum_version=problem.minimum_version) - else: - return None - - -class UpstreamRequirementFixer(BuildFixer): - - def can_fix(self, error): - req = problem_to_upstream_requirement(error) - return req is not None - - def fix(self, error, context): - req = problem_to_upstream_requirement(error) - if req is None: - return False - - try: - package = context.resolver.resolve(req) - except NoAptPackage: - return False - return context.add_dependency(package) - - def retry_apt_failure(error, context): return True @@ -635,26 +477,39 @@ def fix_missing_makefile_pl(error, context): return False -VERSIONED_PACKAGE_FIXERS: List[BuildFixer] = [ - SimpleBuildFixer( - NeedPgBuildExtUpdateControl, run_pgbuildext_updatecontrol), - SimpleBuildFixer(MissingConfigure, fix_missing_configure), - SimpleBuildFixer(MissingAutomakeInput, fix_missing_automake_input), - SimpleBuildFixer(MissingConfigStatusInput, fix_missing_config_status_input), -] +class SimpleBuildFixer(BuildFixer): + + def __init__(self, problem_cls, fn): + self._problem_cls = problem_cls + self._fn = fn + + def can_fix(self, problem): + return isinstance(problem, self._problem_cls) + + def _fix(self, problem, context): + return self._fn(problem, context) -APT_FIXERS: List[BuildFixer] = [ - SimpleBuildFixer(MissingPythonModule, fix_missing_python_module), - SimpleBuildFixer(MissingPythonDistribution, fix_missing_python_distribution), - SimpleBuildFixer(AptFetchFailure, retry_apt_failure), - UpstreamRequirementFixer(), -] +def versioned_package_fixers(): + return [ + SimpleBuildFixer( + NeedPgBuildExtUpdateControl, run_pgbuildext_updatecontrol), + SimpleBuildFixer(MissingConfigure, fix_missing_configure), + SimpleBuildFixer(MissingAutomakeInput, fix_missing_automake_input), + SimpleBuildFixer(MissingConfigStatusInput, fix_missing_config_status_input), + SimpleBuildFixer(MissingPerlFile, fix_missing_makefile_pl), + ] -GENERIC_FIXERS: List[BuildFixer] = [ - SimpleBuildFixer(MissingPerlFile, fix_missing_makefile_pl), -] +def apt_fixers(apt) -> List[BuildFixer]: + from ..resolver.apt import AptResolver + resolver = AptResolver(apt) + return [ + SimpleBuildFixer(MissingPythonModule, fix_missing_python_module), + SimpleBuildFixer(MissingPythonDistribution, fix_missing_python_distribution), + SimpleBuildFixer(AptFetchFailure, retry_apt_failure), + UpstreamRequirementFixer(resolver), + ] def build_incrementally( @@ -720,7 +575,7 @@ def build_incrementally( raise try: if not resolve_error( - e.error, context, VERSIONED_PACKAGE_FIXERS + APT_FIXERS + GENERIC_FIXERS + e.error, context, versioned_package_fixers() + apt_fixers(apt) ): logging.warning("Failed to resolve error %r. Giving up.", e.error) raise diff --git a/ognibuild/fix_build.py b/ognibuild/fix_build.py index d2d6a9f..c6d25a1 100644 --- a/ognibuild/fix_build.py +++ b/ognibuild/fix_build.py @@ -47,19 +47,6 @@ class BuildFixer(object): return self._fix(problem, context) -class SimpleBuildFixer(BuildFixer): - - def __init__(self, problem_cls, fn): - self._problem_cls = problem_cls - self._fn = fn - - def can_fix(self, problem): - return isinstance(problem, self._problem_cls) - - def _fix(self, problem, context): - return self._fn(problem, context) - - class DependencyContext(object): def __init__( self, @@ -71,8 +58,6 @@ class DependencyContext(object): ): self.tree = tree self.apt = apt - from .resolver.apt import AptResolver - self.resolver = AptResolver(apt) self.subpath = subpath self.committer = committer self.update_changelog = update_changelog @@ -94,47 +79,23 @@ class SchrootDependencyContext(DependencyContext): return True -def fix_perl_module_from_cpan(error, context): - # TODO(jelmer): Specify -T to skip tests? - context.session.check_call( - ["cpan", "-i", error.module], user="root", env={"PERL_MM_USE_DEFAULT": "1"} - ) - return True - - -NPM_COMMAND_PACKAGES = { - "del-cli": "del-cli", -} - - -def fix_npm_missing_command(error, context): - try: - package = NPM_COMMAND_PACKAGES[error.command] - except KeyError: - return False - - context.session.check_call(["npm", "-g", "install", package]) - return True - - -def fix_python_package_from_pip(error, context): - context.session.check_call(["pip", "install", error.distribution]) - return True - - -GENERIC_INSTALL_FIXERS: List[BuildFixer] = [ - SimpleBuildFixer(MissingPerlModule, fix_perl_module_from_cpan), - SimpleBuildFixer(MissingPythonDistribution, fix_python_package_from_pip), - SimpleBuildFixer(MissingCommand, fix_npm_missing_command), -] +def generic_install_fixers(session): + from .buildlog import UpstreamRequirementFixer + from .resolver import CPANResolver, PypiResolver, NpmResolver + return [ + UpstreamRequirementFixer(CPANResolver(session)), + UpstreamRequirementFixer(PypiResolver(session)), + UpstreamRequirementFixer(NpmResolver(session)), + ] def run_with_build_fixer( session: Session, args: List[str], fixers: Optional[List[BuildFixer]] = None): if fixers is None: - from .debian.fix_build import APT_FIXERS - fixers = GENERIC_INSTALL_FIXERS + APT_FIXERS + from .debian.fix_build import apt_fixers + from .resolver.apt import AptResolver + fixers = generic_install_fixers(session) + apt_fixers(AptResolver.from_session(session)) logging.info("Running %r", args) fixed_errors = [] while True: diff --git a/ognibuild/resolver/__init__.py b/ognibuild/resolver/__init__.py index 18bbd98..90e40c7 100644 --- a/ognibuild/resolver/__init__.py +++ b/ognibuild/resolver/__init__.py @@ -34,20 +34,109 @@ class Resolver(object): raise NotImplementedError(self.met) -class NativeResolver(Resolver): +class CPANResolver(object): + def __init__(self, session): self.session = session - @classmethod - def from_session(cls, session): - return cls(session) - def install(self, requirements): - raise NotImplementedError(self.install) + from ..requirements import PerlModuleRequirement + missing = [] + for requirement in requirements: + if not isinstance(requirement, PerlModuleRequirement): + missing.append(requirement) + continue + # TODO(jelmer): Specify -T to skip tests? + self.session.check_call( + ["cpan", "-i", requirement.module], + user="root", env={"PERL_MM_USE_DEFAULT": "1"} + ) + if missing: + raise MissingDependencies(missing) def explain(self, requirements): raise NotImplementedError(self.explain) + def met(self, requirement): + raise NotImplementedError(self.met) + + +class PypiResolver(object): + + def __init__(self, session): + self.session = session + + def install(self, requirements): + from ..requirements import PythonPackageRequirement + missing = [] + for requirement in requirements: + if not isinstance(requirement, PythonPackageRequirement): + missing.append(requirement) + continue + self.session.check_call(["pip", "install", requirement.package]) + if missing: + raise MissingDependencies(missing) + + def explain(self, requirements): + raise NotImplementedError(self.explain) + + def met(self, requirement): + raise NotImplementedError(self.met) + + +NPM_COMMAND_PACKAGES = { + "del-cli": "del-cli", +} + + +class NpmResolver(object): + + def __init__(self, session): + self.session = session + + def install(self, requirements): + from ..requirements import NodePackageRequirement + missing = [] + for requirement in requirements: + if not isinstance(requirement, NodePackageRequirement): + missing.append(requirement) + continue + try: + package = NPM_COMMAND_PACKAGES[requirement.command] + except KeyError: + missing.append(requirement) + continue + self.session.check_call(["npm", "-g", "install", package]) + if missing: + raise MissingDependencies(missing) + + def explain(self, requirements): + raise NotImplementedError(self.explain) + + def met(self, requirement): + raise NotImplementedError(self.met) + + +class StackedResolver(Resolver): + def __init__(self, subs): + self.subs = subs + + def install(self, requirements): + for sub in self.subs: + try: + sub.install(requirements) + except MissingDependencies as e: + requirements = e.requirements + else: + return + + +def native_resolvers(session): + return StackedResolver([ + CPANResolver(session), + PypiResolver(session), + NpmResolver(session)]) + class ExplainResolver(Resolver): def __init__(self, session): diff --git a/ognibuild/resolver/apt.py b/ognibuild/resolver/apt.py index 0c6a783..e34be07 100644 --- a/ognibuild/resolver/apt.py +++ b/ognibuild/resolver/apt.py @@ -21,7 +21,7 @@ import posixpath from ..debian.apt import AptManager -from . import Resolver +from . import Resolver, MissingDependencies from ..requirements import ( BinaryRequirement, CHeaderRequirement, @@ -57,24 +57,35 @@ class NoAptPackage(Exception): """No apt package.""" +class AptRequirement(object): + + def __init__(self, package, minimum_version=None): + self.package = package + self.minimum_version = minimum_version + + def get_package_for_python_package(apt_mgr, package, python_version, minimum_version=None): if python_version == "pypy": - return apt_mgr.get_package_for_paths( + pkg_name = apt_mgr.get_package_for_paths( ["/usr/lib/pypy/dist-packages/%s-.*.egg-info/PKG-INFO" % package], regex=True) elif python_version == "cpython2": - return apt_mgr.get_package_for_paths( + pkg_name = apt_mgr.get_package_for_paths( ["/usr/lib/python2\\.[0-9]/dist-packages/%s-.*.egg-info/PKG-INFO" % package], regex=True) elif python_version == "cpython3": - return apt_mgr.get_package_for_paths( + pkg_name = apt_mgr.get_package_for_paths( ["/usr/lib/python3/dist-packages/%s-.*.egg-info/PKG-INFO" % package], regex=True) else: raise NotImplementedError + # TODO(jelmer): Dealing with epoch, etc? + if pkg_name is not None: + return AptRequirement(pkg_name, minimum_version) + return None -def get_package_for_python_module(apt_mgr, module, python_version): +def get_package_for_python_module(apt_mgr, module, python_version, minimum_version): if python_version == "python3": paths = [ posixpath.join( @@ -127,7 +138,10 @@ def get_package_for_python_module(apt_mgr, module, python_version): ] else: raise AssertionError("unknown python version %r" % python_version) - return apt_mgr.get_package_for_paths(paths, regex=True) + pkg_name = apt_mgr.get_package_for_paths(paths, regex=True) + if pkg_name is not None: + return AptRequirement(pkg_name, minimum_version=minimum_version) + return None def resolve_binary_req(apt_mgr, req): @@ -138,7 +152,10 @@ def resolve_binary_req(apt_mgr, req): posixpath.join(dirname, req.binary_name) for dirname in ["/usr/bin", "/bin"] ] - return apt_mgr.get_package_for_paths(paths) + pkg_name = apt_mgr.get_package_for_paths(paths) + if pkg_name is not None: + return AptRequirement(pkg_name) + return None def resolve_pkg_config_req(apt_mgr, req): @@ -151,11 +168,16 @@ def resolve_pkg_config_req(apt_mgr, req): [posixpath.join("/usr/lib", ".*", "pkgconfig", req.module + ".pc")], regex=True, minimum_version=req.minimum_version) - return package + if package is not None: + return AptRequirement(package) + return None def resolve_path_req(apt_mgr, req): - return apt_mgr.get_package_for_paths([req.path]) + package = apt_mgr.get_package_for_paths([req.path]) + if package is not None: + return AptRequirement(package) + return None def resolve_c_header_req(apt_mgr, req): @@ -166,17 +188,25 @@ def resolve_c_header_req(apt_mgr, req): package = apt_mgr.get_package_for_paths( [posixpath.join("/usr/include", ".*", req.header)], regex=True ) - return package + if package is None: + return None + return AptRequirement(package) def resolve_js_runtime_req(apt_mgr, req): - return apt_mgr.get_package_for_paths( + package = apt_mgr.get_package_for_paths( ["/usr/bin/node", "/usr/bin/duk"], regex=False) + if package is not None: + return AptRequirement(package) + return None def resolve_vala_package_req(apt_mgr, req): path = "/usr/share/vala-[0-9.]+/vapi/%s.vapi" % req.package - return apt_mgr.get_package_for_paths([path], regex=True) + package = apt_mgr.get_package_for_paths([path], regex=True) + if package is not None: + return AptRequirement(package) + return None def resolve_ruby_gem_req(apt_mgr, req): @@ -186,30 +216,45 @@ def resolve_ruby_gem_req(apt_mgr, req): "specifications/%s-.*\\.gemspec" % req.gem ) ] - return apt_mgr.get_package_for_paths( - paths, regex=True, minimum_version=req.minimum_version) + package = apt_mgr.get_package_for_paths( + paths, regex=True) + if package is not None: + return AptRequirement(package, minimum_version=req.minimum_version) + return None def resolve_go_package_req(apt_mgr, req): - return apt_mgr.get_package_for_paths( + package = apt_mgr.get_package_for_paths( [posixpath.join("/usr/share/gocode/src", req.package, ".*")], regex=True ) + if package is not None: + return AptRequirement(package) + return None def resolve_dh_addon_req(apt_mgr, req): paths = [posixpath.join("/usr/share/perl5", req.path)] - return apt_mgr.get_package_for_paths(paths) + package = apt_mgr.get_package_for_paths(paths) + if package is not None: + return AptRequirement(package) + return None def resolve_php_class_req(apt_mgr, req): path = "/usr/share/php/%s.php" % req.php_class.replace("\\", "/") - return apt_mgr.get_package_for_paths([path]) + package = apt_mgr.get_package_for_paths([path]) + if package is not None: + return AptRequirement(package) + return None def resolve_r_package_req(apt_mgr, req): paths = [posixpath.join("/usr/lib/R/site-library/.*/R/%s$" % req.package)] - return apt_mgr.get_package_for_paths(paths, regex=True) + package = apt_mgr.get_package_for_paths(paths, regex=True) + if package is not None: + return AptRequirement(package) + return None def resolve_node_package_req(apt_mgr, req): @@ -218,7 +263,10 @@ def resolve_node_package_req(apt_mgr, req): "/usr/lib/nodejs/%s/package.json" % req.package, "/usr/share/nodejs/%s/package.json" % req.package, ] - return apt_mgr.get_package_for_paths(paths, regex=True) + pkg_name = apt_mgr.get_package_for_paths(paths, regex=True) + if pkg_name is not None: + return AptRequirement(pkg_name) + return None def resolve_library_req(apt_mgr, req): @@ -228,21 +276,27 @@ def resolve_library_req(apt_mgr, req): posixpath.join("/usr/lib/lib%s.a$" % req.library), posixpath.join("/usr/lib/.*/lib%s.a$" % req.library), ] - return apt_mgr.get_package_for_paths(paths, regex=True) + pkg_name = apt_mgr.get_package_for_paths(paths, regex=True) + if pkg_name is not None: + return AptRequirement(pkg_name) + return None def resolve_ruby_file_req(apt_mgr, req): paths = [posixpath.join("/usr/lib/ruby/vendor_ruby/%s.rb" % req.filename)] package = apt_mgr.get_package_for_paths(paths) if package is not None: - return package + return AptRequirement(package) paths = [ posixpath.join( r"/usr/share/rubygems-integration/all/gems/([^/]+)/" "lib/%s.rb" % req.filename ) ] - return apt_mgr.get_package_for_paths(paths, regex=True) + pkg_name = apt_mgr.get_package_for_paths(paths, regex=True) + if pkg_name is not None: + return AptRequirement(pkg_name) + return None def resolve_xml_entity_req(apt_mgr, req): @@ -258,7 +312,10 @@ def resolve_xml_entity_req(apt_mgr, req): else: return None - return apt_mgr.get_package_for_paths([search_path], regex=False) + pkg_name = apt_mgr.get_package_for_paths([search_path], regex=False) + if pkg_name is not None: + return AptRequirement(pkg_name) + return None def resolve_sprockets_file_req(apt_mgr, req): @@ -267,7 +324,10 @@ def resolve_sprockets_file_req(apt_mgr, req): else: logging.warning("unable to handle content type %s", req.content_type) return None - return apt_mgr.get_package_for_paths([path], regex=True) + pkg_name = apt_mgr.get_package_for_paths([path], regex=True) + if pkg_name is not None: + return AptRequirement(pkg_name) + return None def resolve_java_class_req(apt_mgr, req): @@ -285,12 +345,15 @@ def resolve_java_class_req(apt_mgr, req): if package is None: logging.warning("no package for files in %r", classpath) return None - return package + return AptRequirement(package) def resolve_haskell_package_req(apt_mgr, req): path = "/var/lib/ghc/package.conf.d/%s-.*.conf" % req.deps[0][0] - return apt_mgr.get_package_for_paths([path], regex=True) + pkg_name = apt_mgr.get_package_for_paths([path], regex=True) + if pkg_name is not None: + return AptRequirement(pkg_name) + return None def resolve_maven_artifact_req(apt_mgr, req): @@ -319,16 +382,22 @@ def resolve_maven_artifact_req(apt_mgr, req): "%s-%s.%s" % (artifact_id, version, kind), ) ] - return apt_mgr.get_package_for_paths(paths, regex=regex) + pkg_name = apt_mgr.get_package_for_paths(paths, regex=regex) + if pkg_name is not None: + return AptRequirement(pkg_name) + return None def resolve_gnome_common_req(apt_mgr, req): - return 'gnome-common' + return AptRequirement('gnome-common') def resolve_jdk_file_req(apt_mgr, req): path = req.jdk_path + ".*/" + req.filename - return apt_mgr.get_package_for_paths([path], regex=True) + pkg_name = apt_mgr.get_package_for_paths([path], regex=True) + if pkg_name is not None: + return AptRequirement(pkg_name) + return None def resolve_perl_module_req(apt_mgr, req): @@ -344,11 +413,17 @@ def resolve_perl_module_req(apt_mgr, req): paths = [req.filename] else: paths = [posixpath.join(inc, req.filename) for inc in req.inc] - return apt_mgr.get_package_for_paths(paths, regex=False) + pkg_name = apt_mgr.get_package_for_paths(paths, regex=False) + if pkg_name is not None: + return AptRequirement(pkg_name) + return None def resolve_perl_file_req(apt_mgr, req): - return apt_mgr.get_package_for_paths([req.filename], regex=False) + pkg_name = apt_mgr.get_package_for_paths([req.filename], regex=False) + if pkg_name is not None: + return AptRequirement(pkg_name) + return None def _find_aclocal_fun(macro): @@ -370,7 +445,10 @@ def resolve_autoconf_macro_req(apt_mgr, req): except KeyError: logging.info("No local m4 file found defining %s", req.macro) return None - return apt_mgr.get_package_for_paths([path]) + pkg_name = apt_mgr.get_package_for_paths([path]) + if pkg_name is not None: + return AptRequirement(pkg_name) + return None def resolve_python_module_req(apt_mgr, req): @@ -421,14 +499,7 @@ APT_REQUIREMENT_RESOLVERS = [ ] -class AptRequirement(object): - - def __init__(self, package, minimum_version=None): - self.package = package - self.minimum_version = minimum_version - - -def resolve_requirement_apt(apt_mgr, req: UpstreamRequirement): +def resolve_requirement_apt(apt_mgr, req: UpstreamRequirement) -> AptRequirement: for rr_class, rr_fn in APT_REQUIREMENT_RESOLVERS: if isinstance(req, rr_class): deb_req = rr_fn(apt_mgr, req) @@ -456,7 +527,17 @@ class AptResolver(Resolver): except NotImplementedError: missing.append(req) if missing: - self.apt.install([self.resolve(m) for m in missing]) + still_missing = [] + apt_requirements = [] + for m in missing: + try: + apt_requirements.append(self.resolve(m)) + except NoAptPackage: + still_missing.append(m) + self.apt.install( + [req.package for req in apt_requirements]) + if still_missing: + raise MissingDependencies(still_missing) def explain(self, requirements): raise NotImplementedError(self.explain) diff --git a/ognibuild/tests/test_debian_fix_build.py b/ognibuild/tests/test_debian_fix_build.py index 0cd80ec..c978008 100644 --- a/ognibuild/tests/test_debian_fix_build.py +++ b/ognibuild/tests/test_debian_fix_build.py @@ -34,8 +34,8 @@ from ..debian import apt from ..debian.apt import AptManager from ..debian.fix_build import ( resolve_error, - VERSIONED_PACKAGE_FIXERS, - APT_FIXERS, + versioned_package_fixers, + apt_fixers, BuildDependencyContext, ) from breezy.tests import TestCaseWithTransport @@ -95,10 +95,10 @@ blah (0.1) UNRELEASED; urgency=medium self.tree, apt, subpath="", - committer="Janitor ", + committer="ognibuild ", update_changelog=True, ) - return resolve_error(error, context, VERSIONED_PACKAGE_FIXERS + APT_FIXERS) + return resolve_error(error, context, versioned_package_fixers() + apt_fixers(apt)) def get_build_deps(self): with open(self.tree.abspath("debian/control"), "r") as f: From dd14deb00d1be5ebff24f2a6edbf248cee940dc1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Sat, 27 Feb 2021 16:05:36 +0000 Subject: [PATCH 79/83] Fix all tests. --- ognibuild/__init__.py | 3 + ognibuild/__main__.py | 56 +++++----- ognibuild/build.py | 4 +- ognibuild/buildlog.py | 2 + ognibuild/buildsystem.py | 136 ++++++++++++----------- ognibuild/clean.py | 4 +- ognibuild/debian/fix_build.py | 118 +++++++++++--------- ognibuild/dist.py | 4 +- ognibuild/fix_build.py | 19 +--- ognibuild/info.py | 2 +- ognibuild/install.py | 4 +- ognibuild/resolver/__init__.py | 79 +++++++++---- ognibuild/resolver/apt.py | 25 ++--- ognibuild/session/plain.py | 3 + ognibuild/test.py | 4 +- ognibuild/tests/test_debian_fix_build.py | 34 +++--- setup.py | 5 +- 17 files changed, 280 insertions(+), 222 deletions(-) diff --git a/ognibuild/__init__.py b/ognibuild/__init__.py index 552f109..9b7b07f 100644 --- a/ognibuild/__init__.py +++ b/ognibuild/__init__.py @@ -67,3 +67,6 @@ class UpstreamOutput(object): def __init__(self, family, name): self.family = family self.name = name + + def __repr__(self): + return "%s(%r, %r)" % (type(self).__name__, self.family, self.name) diff --git a/ognibuild/__main__.py b/ognibuild/__main__.py index d1ffb02..3db88bf 100644 --- a/ognibuild/__main__.py +++ b/ognibuild/__main__.py @@ -21,8 +21,7 @@ import sys from . import UnidentifiedError from .buildsystem import NoBuildToolsFound, detect_buildsystems from .resolver import ( - ExplainResolver, - AutoResolver, + auto_resolver, native_resolvers, MissingDependencies, ) @@ -56,6 +55,11 @@ STAGE_MAP = { "clean": [], } +def determine_fixers(session, resolver): + from .buildlog import UpstreamRequirementFixer + from .resolver.apt import AptResolver + return [UpstreamRequirementFixer(resolver)] + def main(): # noqa: C901 import argparse @@ -67,12 +71,17 @@ def main(): # noqa: C901 parser.add_argument("--schroot", type=str, help="schroot to run in.") parser.add_argument( "--resolve", - choices=["explain", "apt", "native"], - default="apt", + choices=["apt", "native", "auto"], + default="auto", help="What to do about missing dependencies", ) + parser.add_argument( + "--explain", + action='store_true', + help="Explain what needs to be done rather than making changes") parser.add_argument( "--ignore-declared-dependencies", + "--optimistic", action="store_true", help="Ignore declared dependencies, follow build errors only", ) @@ -109,56 +118,53 @@ def main(): # noqa: C901 with session: if args.resolve == "apt": resolver = AptResolver.from_session(session) - elif args.resolve == "explain": - resolver = ExplainResolver.from_session(session) elif args.resolve == "native": resolver = native_resolvers(session) - elif args.resolver == "auto": - resolver = AutoResolver.from_session(session) + elif args.resolve == "auto": + resolver = auto_resolver(session) + logging.info('Using requirement resolver: %s', resolver) os.chdir(args.directory) try: bss = list(detect_buildsystems(args.directory)) logging.info('Detected buildsystems: %r', bss) - if not args.ignore_declared_dependencies: + if not args.ignore_declared_dependencies and not args.explain: stages = STAGE_MAP[args.subcommand] if stages: for bs in bss: install_necessary_declared_requirements(resolver, bs, stages) + fixers = determine_fixers(session, resolver) if args.subcommand == "dist": from .dist import run_dist - run_dist(session=session, buildsystems=bss, resolver=resolver) + run_dist( + session=session, buildsystems=bss, resolver=resolver, + fixers=fixers) if args.subcommand == "build": from .build import run_build - run_build(session, buildsystems=bss, resolver=resolver) + run_build( + session, buildsystems=bss, resolver=resolver, + fixers=fixers) if args.subcommand == "clean": from .clean import run_clean - run_clean(session, buildsystems=bss, resolver=resolver) + run_clean( + session, buildsystems=bss, resolver=resolver, + fixers=fixers) if args.subcommand == "install": from .install import run_install run_install( session, buildsystems=bss, resolver=resolver, - user=args.user) + fixers=fixers, user=args.user) if args.subcommand == "test": from .test import run_test - run_test(session, buildsystems=bss, resolver=resolver) + run_test(session, buildsystems=bss, resolver=resolver, + fixers=fixers) if args.subcommand == "info": from .info import run_info - run_info(session, buildsystems=bss, resolver=resolver) + run_info(session, buildsystems=bss) except UnidentifiedError: return 1 except NoBuildToolsFound: logging.info("No build tools found.") return 1 - except MissingDependencies as e: - for req in e.requirements: - logging.info("Missing dependency (%s:%s)", - req.family, req.package) - for resolver in [ - AptResolver.from_session(session), - native_resolvers(session), - ]: - logging.info(" %s", resolver.explain([req])) - return 2 return 0 diff --git a/ognibuild/build.py b/ognibuild/build.py index b58db3a..1b03bf5 100644 --- a/ognibuild/build.py +++ b/ognibuild/build.py @@ -18,13 +18,13 @@ from .buildsystem import NoBuildToolsFound -def run_build(session, buildsystems, resolver): +def run_build(session, buildsystems, resolver, fixers): # Some things want to write to the user's home directory, # e.g. pip caches in ~/.cache session.create_home() for buildsystem in buildsystems: - buildsystem.build(session, resolver) + buildsystem.build(session, resolver, fixers) return raise NoBuildToolsFound() diff --git a/ognibuild/buildlog.py b/ognibuild/buildlog.py index 0ff19a8..ae358d5 100644 --- a/ognibuild/buildlog.py +++ b/ognibuild/buildlog.py @@ -189,4 +189,6 @@ class UpstreamRequirementFixer(BuildFixer): return False package = self.resolver.resolve(req) + if package is None: + return False return context.add_dependency(package) diff --git a/ognibuild/buildsystem.py b/ognibuild/buildsystem.py index cc8b30e..37261aa 100644 --- a/ognibuild/buildsystem.py +++ b/ognibuild/buildsystem.py @@ -31,7 +31,7 @@ from .requirements import ( NodePackageRequirement, CargoCrateRequirement, ) -from .fix_build import run_with_build_fixer +from .fix_build import run_with_build_fixers class NoBuildToolsFound(Exception): @@ -51,19 +51,19 @@ class BuildSystem(object): name: str - def dist(self, session, resolver): + def dist(self, session, resolver, fixers): raise NotImplementedError(self.dist) - def test(self, session, resolver): + def test(self, session, resolver, fixers): raise NotImplementedError(self.test) - def build(self, session, resolver): + def build(self, session, resolver, fixers): raise NotImplementedError(self.build) - def clean(self, session, resolver): + def clean(self, session, resolver, fixers): raise NotImplementedError(self.clean) - def install(self, session, resolver, install_target): + def install(self, session, resolver, fixers, install_target): raise NotImplementedError(self.install) def get_declared_dependencies(self): @@ -83,25 +83,25 @@ class Pear(BuildSystem): def setup(self, resolver): resolver.install([BinaryRequirement("pear")]) - def dist(self, session, resolver): + def dist(self, session, resolver, fixers): self.setup(resolver) - run_with_build_fixer(session, ["pear", "package"]) + run_with_build_fixers(session, ["pear", "package"], fixers) - def test(self, session, resolver): + def test(self, session, resolver, fixers): self.setup(resolver) - run_with_build_fixer(session, ["pear", "run-tests"]) + run_with_build_fixers(session, ["pear", "run-tests"], fixers) - def build(self, session, resolver): + def build(self, session, resolver, fixers): self.setup(resolver) - run_with_build_fixer(session, ["pear", "build", self.path]) + run_with_build_fixers(session, ["pear", "build", self.path], fixers) - def clean(self, session, resolver): + def clean(self, session, resolver, fixers): self.setup(resolver) # TODO - def install(self, session, resolver, install_target): + def install(self, session, resolver, fixers, install_target): self.setup(resolver) - run_with_build_fixer(session, ["pear", "install", self.path]) + run_with_build_fixers(session, ["pear", "install", self.path], fixers) class SetupPy(BuildSystem): @@ -143,41 +143,40 @@ class SetupPy(BuildSystem): # TODO(jelmer): Install setup_requires - def test(self, session, resolver): + def test(self, session, resolver, fixers): self.setup(resolver) - self._run_setup(session, resolver, ["test"]) + self._run_setup(session, resolver, ["test"], fixers) - def build(self, session, resolver): + def build(self, session, resolver, fixers): self.setup(resolver) - self._run_setup(session, resolver, ["build"]) + self._run_setup(session, resolver, ["build"], fixers) - def dist(self, session, resolver): + def dist(self, session, resolver, fixers): self.setup(resolver) - self._run_setup(session, resolver, ["sdist"]) + self._run_setup(session, resolver, ["sdist"], fixers) - def clean(self, session, resolver): + def clean(self, session, resolver, fixers): self.setup(resolver) - self._run_setup(session, resolver, ["clean"]) + self._run_setup(session, resolver, ["clean"], fixers) - def install(self, session, resolver, install_target): + def install(self, session, resolver, fixers, install_target): self.setup(resolver) extra_args = [] if install_target.user: extra_args.append('--user') - self._run_setup(session, resolver, ["install"] + extra_args) + self._run_setup(session, resolver, ["install"] + extra_args, fixers) - def _run_setup(self, session, resolver, args): + def _run_setup(self, session, resolver, args, fixers): interpreter = shebang_binary("setup.py") if interpreter is not None: - if interpreter in ("python3", "python2", "python"): - resolver.install([BinaryRequirement(interpreter)]) - else: - raise ValueError("Unknown interpreter %r" % interpreter) - run_with_build_fixer(session, ["./setup.py"] + args) + resolver.install([BinaryRequirement(interpreter)]) + run_with_build_fixers(session, ["./setup.py"] + args, fixers) else: # Just assume it's Python 3 resolver.install([BinaryRequirement("python3")]) - run_with_build_fixer(session, ["python3", "./setup.py"] + args) + run_with_build_fixers( + session, ["python3", "./setup.py"] + args, + fixers) def get_declared_dependencies(self): for require in self.result.get_requires(): @@ -215,7 +214,7 @@ class PyProject(BuildSystem): with open(self.path, "r") as pf: return toml.load(pf) - def dist(self, session, resolver): + def dist(self, session, resolver, fixers): if "poetry" in self.pyproject.get("tool", []): logging.info( "Found pyproject.toml with poetry section, " "assuming poetry project." @@ -247,7 +246,7 @@ class SetupCfg(BuildSystem): ] ) - def dist(self, session, resolver): + def dist(self, session, resolver, fixers): self.setup(resolver) session.check_call(["python3", "-m", "pep517.build", "-s", "."]) @@ -271,9 +270,9 @@ class Npm(BuildSystem): def setup(self, resolver): resolver.install([BinaryRequirement("npm")]) - def dist(self, session, resolver): + def dist(self, session, resolver, fixers): self.setup(resolver) - run_with_build_fixer(session, ["npm", "pack"]) + run_with_build_fixers(session, ["npm", "pack"], fixers) class Waf(BuildSystem): @@ -286,9 +285,9 @@ class Waf(BuildSystem): def setup(self, resolver): resolver.install([BinaryRequirement("python3")]) - def dist(self, session, resolver): + def dist(self, session, resolver, fixers): self.setup(resolver) - run_with_build_fixer(session, ["./waf", "dist"]) + run_with_build_fixers(session, ["./waf", "dist"], fixers) class Gem(BuildSystem): @@ -301,14 +300,14 @@ class Gem(BuildSystem): def setup(self, resolver): resolver.install([BinaryRequirement("gem2deb")]) - def dist(self, session, resolver): + def dist(self, session, resolver, fixers): self.setup(resolver) gemfiles = [ entry.name for entry in session.scandir(".") if entry.name.endswith(".gem") ] if len(gemfiles) > 1: logging.warning("More than one gemfile. Trying the first?") - run_with_build_fixer(session, ["gem2tgz", gemfiles[0]]) + run_with_build_fixers(session, ["gem2tgz", gemfiles[0]], fixers) class DistInkt(BuildSystem): @@ -340,15 +339,16 @@ class DistInkt(BuildSystem): ] ) - def dist(self, session, resolver): + def dist(self, session, resolver, fixers): self.setup(resolver) if self.name == "dist-inkt": resolver.install([PerlModuleRequirement(self.dist_inkt_class)]) - run_with_build_fixer(session, ["distinkt-dist"]) + run_with_build_fixers(session, ["distinkt-dist"], fixers) else: # Default to invoking Dist::Zilla resolver.install([PerlModuleRequirement("Dist::Zilla")]) - run_with_build_fixer(session, ["dzil", "build", "--in", ".."]) + run_with_build_fixers( + session, ["dzil", "build", "--in", ".."], fixers) class Make(BuildSystem): @@ -358,26 +358,30 @@ class Make(BuildSystem): def __repr__(self): return "%s()" % type(self).__name__ - def setup(self, session, resolver): + def setup(self, session, resolver, fixers): resolver.install([BinaryRequirement("make")]) if session.exists("Makefile.PL") and not session.exists("Makefile"): resolver.install([BinaryRequirement("perl")]) - run_with_build_fixer(session, ["perl", "Makefile.PL"]) + run_with_build_fixers(session, ["perl", "Makefile.PL"], fixers) if not session.exists("Makefile") and not session.exists("configure"): if session.exists("autogen.sh"): if shebang_binary("autogen.sh") is None: - run_with_build_fixer(session, ["/bin/sh", "./autogen.sh"]) + run_with_build_fixers( + session, ["/bin/sh", "./autogen.sh"], fixers) try: - run_with_build_fixer(session, ["./autogen.sh"]) + run_with_build_fixers( + session, ["./autogen.sh"], fixers) except UnidentifiedError as e: if ( "Gnulib not yet bootstrapped; " "run ./bootstrap instead.\n" in e.lines ): - run_with_build_fixer(session, ["./bootstrap"]) - run_with_build_fixer(session, ["./autogen.sh"]) + run_with_build_fixers( + session, ["./bootstrap"], fixers) + run_with_build_fixers( + session, ["./autogen.sh"], fixers) else: raise @@ -390,23 +394,23 @@ class Make(BuildSystem): BinaryRequirement("libtoolize"), ] ) - run_with_build_fixer(session, ["autoreconf", "-i"]) + run_with_build_fixers(session, ["autoreconf", "-i"], fixers) if not session.exists("Makefile") and session.exists("configure"): session.check_call(["./configure"]) - def build(self, session, resolver): + def build(self, session, resolver, fixers): self.setup(session, resolver) - run_with_build_fixer(session, ["make", "all"]) + run_with_build_fixers(session, ["make", "all"], fixers) - def install(self, session, resolver, install_target): + def install(self, session, resolver, fixers, install_target): self.setup(session, resolver) - run_with_build_fixer(session, ["make", "install"]) + run_with_build_fixers(session, ["make", "install"], fixers) - def dist(self, session, resolver): + def dist(self, session, resolver, fixers): self.setup(session, resolver) try: - run_with_build_fixer(session, ["make", "dist"]) + run_with_build_fixers(session, ["make", "dist"], fixers) except UnidentifiedError as e: if "make: *** No rule to make target 'dist'. Stop.\n" in e.lines: pass @@ -416,17 +420,17 @@ class Make(BuildSystem): "Reconfigure the source tree " "(via './config' or 'perl Configure'), please.\n" ) in e.lines: - run_with_build_fixer(session, ["./config"]) - run_with_build_fixer(session, ["make", "dist"]) + run_with_build_fixers(session, ["./config"], fixers) + run_with_build_fixers(session, ["make", "dist"], fixers) elif ( "Please try running 'make manifest' and then run " "'make dist' again.\n" in e.lines ): - run_with_build_fixer(session, ["make", "manifest"]) - run_with_build_fixer(session, ["make", "dist"]) + run_with_build_fixers(session, ["make", "manifest"], fixers) + run_with_build_fixers(session, ["make", "dist"], fixers) elif "Please run ./configure first\n" in e.lines: - run_with_build_fixer(session, ["./configure"]) - run_with_build_fixer(session, ["make", "dist"]) + run_with_build_fixers(session, ["./configure"], fixers) + run_with_build_fixers(session, ["make", "dist"], fixers) elif any( [ re.match( @@ -437,8 +441,8 @@ class Make(BuildSystem): for line in e.lines ] ): - run_with_build_fixer(session, ["./configure"]) - run_with_build_fixer(session, ["make", "dist"]) + run_with_build_fixers(session, ["./configure"], fixers) + run_with_build_fixers(session, ["make", "dist"], fixers) elif any( [ re.match( @@ -449,8 +453,8 @@ class Make(BuildSystem): for line in e.lines ] ): - run_with_build_fixer(session, ["make", "manifest"]) - run_with_build_fixer(session, ["make", "dist"]) + run_with_build_fixers(session, ["make", "manifest"], fixers) + run_with_build_fixers(session, ["make", "dist"], fixers) else: raise else: diff --git a/ognibuild/clean.py b/ognibuild/clean.py index 9f1c4d1..6bbb3ee 100644 --- a/ognibuild/clean.py +++ b/ognibuild/clean.py @@ -18,13 +18,13 @@ from .buildsystem import NoBuildToolsFound -def run_clean(session, buildsystems, resolver): +def run_clean(session, buildsystems, resolver, fixers): # Some things want to write to the user's home directory, # e.g. pip caches in ~/.cache session.create_home() for buildsystem in buildsystems: - buildsystem.clean(session, resolver) + buildsystem.clean(session, resolver, fixers) return raise NoBuildToolsFound() diff --git a/ognibuild/debian/fix_build.py b/ognibuild/debian/fix_build.py index a32219b..1ccc9bf 100644 --- a/ognibuild/debian/fix_build.py +++ b/ognibuild/debian/fix_build.py @@ -36,7 +36,6 @@ from breezy.tree import Tree from debmutate.control import ( ensure_some_version, ensure_minimum_version, - pg_buildext_updatecontrol, ControlEditor, ) from debmutate.debhelper import ( @@ -82,7 +81,7 @@ from buildlog_consultant.sbuild import ( from ..fix_build import BuildFixer, resolve_error, DependencyContext from ..buildlog import UpstreamRequirementFixer from ..resolver.apt import ( - NoAptPackage, + AptRequirement, get_package_for_python_module, ) from .build import attempt_build, DEFAULT_BUILDER @@ -99,11 +98,11 @@ class CircularDependency(Exception): class BuildDependencyContext(DependencyContext): - def add_dependency(self, package: str, minimum_version: Optional[Version] = None): + + def add_dependency(self, requirement: AptRequirement): return add_build_dependency( self.tree, - package, - minimum_version=minimum_version, + requirement, committer=self.committer, subpath=self.subpath, update_changelog=self.update_changelog, @@ -119,12 +118,11 @@ class AutopkgtestDependencyContext(DependencyContext): tree, apt, subpath, committer, update_changelog ) - def add_dependency(self, package, minimum_version=None): + def add_dependency(self, requirement): return add_test_dependency( self.tree, self.testname, - package, - minimum_version=minimum_version, + requirement, committer=self.committer, subpath=self.subpath, update_changelog=self.update_changelog, @@ -133,37 +131,38 @@ class AutopkgtestDependencyContext(DependencyContext): def add_build_dependency( tree: Tree, - package: str, - minimum_version: Optional[Version] = None, + requirement: AptRequirement, committer: Optional[str] = None, subpath: str = "", update_changelog: bool = True, ): - if not isinstance(package, str): - raise TypeError(package) + if not isinstance(requirement, AptRequirement): + raise TypeError(requirement) control_path = os.path.join(tree.abspath(subpath), "debian/control") try: with ControlEditor(path=control_path) as updater: for binary in updater.binaries: - if binary["Package"] == package: - raise CircularDependency(package) - if minimum_version: + if binary["Package"] == requirement.package: + raise CircularDependency(requirement.package) + if requirement.minimum_version: updater.source["Build-Depends"] = ensure_minimum_version( - updater.source.get("Build-Depends", ""), package, minimum_version + updater.source.get("Build-Depends", ""), + requirement.package, requirement.minimum_version ) else: updater.source["Build-Depends"] = ensure_some_version( - updater.source.get("Build-Depends", ""), package + updater.source.get("Build-Depends", ""), + requirement.package ) except FormattingUnpreservable as e: logging.info("Unable to edit %s in a way that preserves formatting.", e.path) return False - if minimum_version: - desc = "%s (>= %s)" % (package, minimum_version) + if requirement.minimum_version: + desc = "%s (>= %s)" % (requirement.package, requirement.minimum_version) else: - desc = package + desc = requirement.package if not updater.changed: logging.info("Giving up; dependency %s was already present.", desc) @@ -182,14 +181,13 @@ def add_build_dependency( def add_test_dependency( tree, testname, - package, - minimum_version=None, + requirement, committer=None, subpath="", update_changelog=True, ): - if not isinstance(package, str): - raise TypeError(package) + if not isinstance(requirement, AptRequirement): + raise TypeError(requirement) tests_control_path = os.path.join(tree.abspath(subpath), "debian/tests/control") @@ -204,13 +202,14 @@ def add_test_dependency( command_counter += 1 if name != testname: continue - if minimum_version: + if requirement.minimum_version: control["Depends"] = ensure_minimum_version( - control.get("Depends", ""), package, minimum_version + control.get("Depends", ""), + requirement.package, requirement.minimum_version ) else: control["Depends"] = ensure_some_version( - control.get("Depends", ""), package + control.get("Depends", ""), requirement.package ) except FormattingUnpreservable as e: logging.info("Unable to edit %s in a way that preserves formatting.", e.path) @@ -218,10 +217,11 @@ def add_test_dependency( if not updater.changed: return False - if minimum_version: - desc = "%s (>= %s)" % (package, minimum_version) + if requirement.minimum_version: + desc = "%s (>= %s)" % ( + requirement.package, requirement.minimum_version) else: - desc = package + desc = requirement.package logging.info("Adding dependency to test %s: %s", testname, desc) return commit_debian_changes( @@ -333,7 +333,9 @@ def fix_missing_python_distribution(error, context): # noqa: C901 for dep_pkg in extra_build_deps: assert dep_pkg is not None - if not context.add_dependency(dep_pkg, minimum_version=error.minimum_version): + if not context.add_dependency( + AptRequirement( + dep_pkg.package, minimum_version=error.minimum_version)): return False return True @@ -345,9 +347,9 @@ def fix_missing_python_module(error, context): targeted = set() default = not targeted - pypy_pkg = get_package_for_python_module(context.apt, error.module, "pypy") - py2_pkg = get_package_for_python_module(context.apt, error.module, "python2") - py3_pkg = get_package_for_python_module(context.apt, error.module, "python3") + pypy_pkg = get_package_for_python_module(context.apt, error.module, "pypy", None) + py2_pkg = get_package_for_python_module(context.apt, error.module, "python2", None) + py3_pkg = get_package_for_python_module(context.apt, error.module, "python3", None) extra_build_deps = [] if error.python_version == 2: @@ -379,7 +381,8 @@ def fix_missing_python_module(error, context): for dep_pkg in extra_build_deps: assert dep_pkg is not None - if not context.add_dependency(dep_pkg, error.minimum_version): + if not context.add_dependency( + AptRequirement(dep_pkg.package, error.minimum_version)): return False return True @@ -402,7 +405,7 @@ def enable_dh_autoreconf(context): return dh_invoke_add_with(line, b"autoreconf") if update_rules(command_line_cb=add_with_autoreconf): - return context.add_dependency("dh-autoreconf") + return context.add_dependency(AptRequirement("dh-autoreconf")) return False @@ -453,17 +456,27 @@ def fix_missing_config_status_input(error, context): return True -def run_pgbuildext_updatecontrol(error, context): - logging.info("Running 'pg_buildext updatecontrol'") - # TODO(jelmer): run in the schroot - pg_buildext_updatecontrol(context.tree.abspath(context.subpath)) - return commit_debian_changes( - context.tree, - context.subpath, - "Run 'pgbuildext updatecontrol'.", - committer=context.committer, - update_changelog=False, - ) +class PgBuildExtOutOfDateControlFixer(BuildFixer): + + def __init__(self, session): + self.session = session + + def can_fix(self, problem): + return isinstance(problem, NeedPgBuildExtUpdateControl) + + def _fix(self, problem, context): + return self._fn(problem, context) + + def _fix(self, error, context): + logging.info("Running 'pg_buildext updatecontrol'") + self.session.check_call(["pg_buildext", "updatecontrol"]) + return commit_debian_changes( + context.tree, + context.subpath, + "Run 'pgbuildext updatecontrol'.", + committer=context.committer, + update_changelog=False, + ) def fix_missing_makefile_pl(error, context): @@ -490,10 +503,9 @@ class SimpleBuildFixer(BuildFixer): return self._fn(problem, context) -def versioned_package_fixers(): +def versioned_package_fixers(session): return [ - SimpleBuildFixer( - NeedPgBuildExtUpdateControl, run_pgbuildext_updatecontrol), + PgBuildExtOutOfDateControlFixer(session), SimpleBuildFixer(MissingConfigure, fix_missing_configure), SimpleBuildFixer(MissingAutomakeInput, fix_missing_automake_input), SimpleBuildFixer(MissingConfigStatusInput, fix_missing_config_status_input), @@ -527,6 +539,8 @@ def build_incrementally( update_changelog=True, ): fixed_errors = [] + fixers = versioned_package_fixers(apt.session) + apt_fixers(apt) + logging.info('Using fixers: %r', fixers) while True: try: return attempt_build( @@ -574,9 +588,7 @@ def build_incrementally( logging.warning("unable to install for context %r", e.phase) raise try: - if not resolve_error( - e.error, context, versioned_package_fixers() + apt_fixers(apt) - ): + if not resolve_error(e.error, context, fixers): logging.warning("Failed to resolve error %r. Giving up.", e.error) raise except GeneratedFile: diff --git a/ognibuild/dist.py b/ognibuild/dist.py index d226e0f..cfe38d2 100644 --- a/ognibuild/dist.py +++ b/ognibuild/dist.py @@ -62,13 +62,13 @@ class DistNoTarball(Exception): """Dist operation did not create a tarball.""" -def run_dist(session, buildsystems, resolver): +def run_dist(session, buildsystems, resolver, fixers): # Some things want to write to the user's home directory, # e.g. pip caches in ~/.cache session.create_home() for buildsystem in buildsystems: - buildsystem.dist(session, resolver) + buildsystem.dist(session, resolver, fixers) return raise NoBuildToolsFound() diff --git a/ognibuild/fix_build.py b/ognibuild/fix_build.py index c6d25a1..5520e31 100644 --- a/ognibuild/fix_build.py +++ b/ognibuild/fix_build.py @@ -79,23 +79,8 @@ class SchrootDependencyContext(DependencyContext): return True -def generic_install_fixers(session): - from .buildlog import UpstreamRequirementFixer - from .resolver import CPANResolver, PypiResolver, NpmResolver - return [ - UpstreamRequirementFixer(CPANResolver(session)), - UpstreamRequirementFixer(PypiResolver(session)), - UpstreamRequirementFixer(NpmResolver(session)), - ] - - -def run_with_build_fixer( - session: Session, args: List[str], - fixers: Optional[List[BuildFixer]] = None): - if fixers is None: - from .debian.fix_build import apt_fixers - from .resolver.apt import AptResolver - fixers = generic_install_fixers(session) + apt_fixers(AptResolver.from_session(session)) +def run_with_build_fixers( + session: Session, args: List[str], fixers: List[BuildFixer]): logging.info("Running %r", args) fixed_errors = [] while True: diff --git a/ognibuild/info.py b/ognibuild/info.py index a5e4c9f..3848b2b 100644 --- a/ognibuild/info.py +++ b/ognibuild/info.py @@ -18,7 +18,7 @@ from .buildsystem import NoBuildToolsFound, InstallTarget -def run_info(session, buildsystems, resolver): +def run_info(session, buildsystems): for buildsystem in buildsystems: print('%r:' % buildsystem) deps = {} diff --git a/ognibuild/install.py b/ognibuild/install.py index c30967a..bf7bf62 100644 --- a/ognibuild/install.py +++ b/ognibuild/install.py @@ -18,7 +18,7 @@ from .buildsystem import NoBuildToolsFound, InstallTarget -def run_install(session, buildsystems, resolver, user: bool = False): +def run_install(session, buildsystems, resolver, fixers, user: bool = False): # Some things want to write to the user's home directory, # e.g. pip caches in ~/.cache session.create_home() @@ -27,7 +27,7 @@ def run_install(session, buildsystems, resolver, user: bool = False): install_target.user = user for buildsystem in buildsystems: - buildsystem.install(session, resolver, install_target) + buildsystem.install(session, resolver, fixers, install_target) return raise NoBuildToolsFound() diff --git a/ognibuild/resolver/__init__.py b/ognibuild/resolver/__init__.py index 90e40c7..dd12b60 100644 --- a/ognibuild/resolver/__init__.py +++ b/ognibuild/resolver/__init__.py @@ -34,11 +34,14 @@ class Resolver(object): raise NotImplementedError(self.met) -class CPANResolver(object): +class CPANResolver(Resolver): def __init__(self, session): self.session = session + def __str__(self): + return "cpan" + def install(self, requirements): from ..requirements import PerlModuleRequirement missing = [] @@ -61,11 +64,42 @@ class CPANResolver(object): raise NotImplementedError(self.met) -class PypiResolver(object): +class CargoResolver(Resolver): def __init__(self, session): self.session = session + def __str__(self): + return "cargo" + + def install(self, requirements): + from ..requirements import CargoCrateRequirement + missing = [] + for requirement in requirements: + if not isinstance(requirement, CargoCrateRequirement): + missing.append(requirement) + continue + self.session.check_call( + ["cargo", "install", requirement.crate], + user="root") + if missing: + raise MissingDependencies(missing) + + def explain(self, requirements): + raise NotImplementedError(self.explain) + + def met(self, requirement): + raise NotImplementedError(self.met) + + +class PypiResolver(Resolver): + + def __init__(self, session): + self.session = session + + def __str__(self): + return "pypi" + def install(self, requirements): from ..requirements import PythonPackageRequirement missing = [] @@ -89,11 +123,14 @@ NPM_COMMAND_PACKAGES = { } -class NpmResolver(object): +class NpmResolver(Resolver): def __init__(self, session): self.session = session + def __str__(self): + return "npm" + def install(self, requirements): from ..requirements import NodePackageRequirement missing = [] @@ -121,6 +158,9 @@ class StackedResolver(Resolver): def __init__(self, subs): self.subs = subs + def __str__(self): + return "[" + ", ".join(map(str, self.subs)) + "]" + def install(self, requirements): for sub in self.subs: try: @@ -135,7 +175,8 @@ def native_resolvers(session): return StackedResolver([ CPANResolver(session), PypiResolver(session), - NpmResolver(session)]) + NpmResolver(session), + CargoResolver(session)]) class ExplainResolver(Resolver): @@ -150,19 +191,17 @@ class ExplainResolver(Resolver): raise MissingDependencies(requirements) -class AutoResolver(Resolver): - """Automatically find out the most appropriate way to install dependencies. - """ - - def __init__(self, session): - self.session = session - - @classmethod - def from_session(cls, session): - return cls(session) - - def install(self, requirements): - raise NotImplementedError(self.install) - - def explain(self, requirements): - raise NotImplementedError(self.explain) +def auto_resolver(session): + # TODO(jelmer): if session is SchrootSession or if we're root, use apt + from .apt import AptResolver + from ..session.schroot import SchrootSession + user = session.check_output(['echo', '$USER']).decode().strip() + resolvers = [] + if isinstance(session, SchrootSession) or user == 'root': + resolvers.append(AptResolver.from_session(session)) + resolvers.extend([ + CPANResolver(session), + PypiResolver(session), + NpmResolver(session), + CargoResolver(session)]) + return StackedResolver(resolvers) diff --git a/ognibuild/resolver/apt.py b/ognibuild/resolver/apt.py index e34be07..49d9470 100644 --- a/ognibuild/resolver/apt.py +++ b/ognibuild/resolver/apt.py @@ -53,10 +53,6 @@ from ..requirements import ( ) -class NoAptPackage(Exception): - """No apt package.""" - - class AptRequirement(object): def __init__(self, package, minimum_version=None): @@ -161,15 +157,13 @@ def resolve_binary_req(apt_mgr, req): def resolve_pkg_config_req(apt_mgr, req): package = apt_mgr.get_package_for_paths( [posixpath.join("/usr/lib/pkgconfig", req.module + ".pc")], - req.minimum_version ) if package is None: package = apt_mgr.get_package_for_paths( [posixpath.join("/usr/lib", ".*", "pkgconfig", req.module + ".pc")], - regex=True, - minimum_version=req.minimum_version) + regex=True) if package is not None: - return AptRequirement(package) + return AptRequirement(package, minimum_version=req.minimum_version) return None @@ -502,10 +496,7 @@ APT_REQUIREMENT_RESOLVERS = [ def resolve_requirement_apt(apt_mgr, req: UpstreamRequirement) -> AptRequirement: for rr_class, rr_fn in APT_REQUIREMENT_RESOLVERS: if isinstance(req, rr_class): - deb_req = rr_fn(apt_mgr, req) - if deb_req is None: - raise NoAptPackage(req) - return deb_req + return rr_fn(apt_mgr, req) raise NotImplementedError(type(req)) @@ -514,6 +505,9 @@ class AptResolver(Resolver): def __init__(self, apt): self.apt = apt + def __str__(self): + return "apt" + @classmethod def from_session(cls, session): return cls(AptManager(session)) @@ -530,10 +524,11 @@ class AptResolver(Resolver): still_missing = [] apt_requirements = [] for m in missing: - try: - apt_requirements.append(self.resolve(m)) - except NoAptPackage: + apt_req = self.resolve(m) + if apt_req is None: still_missing.append(m) + else: + apt_requirements.append(m) self.apt.install( [req.package for req in apt_requirements]) if still_missing: diff --git a/ognibuild/session/plain.py b/ognibuild/session/plain.py index 7a1eb6c..084fa1b 100644 --- a/ognibuild/session/plain.py +++ b/ognibuild/session/plain.py @@ -33,6 +33,9 @@ class PlainSession(Session): def check_call(self, args): return subprocess.check_call(args) + def check_output(self, args): + return subprocess.check_output(args) + def Popen(self, args, stdout=None, stderr=None, user=None, cwd=None): return subprocess.Popen(args, stdout=stdout, stderr=stderr, cwd=cwd) diff --git a/ognibuild/test.py b/ognibuild/test.py index 8560347..750143f 100644 --- a/ognibuild/test.py +++ b/ognibuild/test.py @@ -18,13 +18,13 @@ from .buildsystem import NoBuildToolsFound -def run_test(session, buildsystems, resolver): +def run_test(session, buildsystems, resolver, fixers): # Some things want to write to the user's home directory, # e.g. pip caches in ~/.cache session.create_home() for buildsystem in buildsystems: - buildsystem.test(session, resolver) + buildsystem.test(session, resolver, fixers) return raise NoBuildToolsFound() diff --git a/ognibuild/tests/test_debian_fix_build.py b/ognibuild/tests/test_debian_fix_build.py index c978008..6246c03 100644 --- a/ognibuild/tests/test_debian_fix_build.py +++ b/ognibuild/tests/test_debian_fix_build.py @@ -31,7 +31,7 @@ from buildlog_consultant.common import ( MissingValaPackage, ) from ..debian import apt -from ..debian.apt import AptManager +from ..debian.apt import AptManager, FileSearcher from ..debian.fix_build import ( resolve_error, versioned_package_fixers, @@ -41,6 +41,21 @@ from ..debian.fix_build import ( from breezy.tests import TestCaseWithTransport +class DummyAptSearcher(FileSearcher): + + def __init__(self, files): + self._apt_files = files + + def search_files(self, path, regex=False): + for p, pkg in sorted(self._apt_files.items()): + if regex: + if re.match(path, p): + yield pkg + else: + if path == p: + yield pkg + + class ResolveErrorTests(TestCaseWithTransport): def setUp(self): super(ResolveErrorTests, self).setUp() @@ -76,21 +91,13 @@ blah (0.1) UNRELEASED; urgency=medium ) self.tree.add(["debian", "debian/control", "debian/changelog"]) self.tree.commit("Initial commit") - self.overrideAttr(apt, "search_apt_file", self._search_apt_file) self._apt_files = {} - def _search_apt_file(self, path, regex=False): - for p, pkg in sorted(self._apt_files.items()): - if regex: - if re.match(path, p): - yield pkg - else: - if path == p: - yield pkg - def resolve(self, error, context=("build",)): from ..session.plain import PlainSession - apt = AptManager(PlainSession()) + session = PlainSession() + apt = AptManager(session) + apt._searchers = [DummyAptSearcher(self._apt_files)] context = BuildDependencyContext( self.tree, apt, @@ -98,7 +105,8 @@ blah (0.1) UNRELEASED; urgency=medium committer="ognibuild ", update_changelog=True, ) - return resolve_error(error, context, versioned_package_fixers() + apt_fixers(apt)) + fixers = versioned_package_fixers(session) + apt_fixers(apt) + return resolve_error(error, context, fixers) def get_build_deps(self): with open(self.tree.abspath("debian/control"), "r") as f: diff --git a/setup.py b/setup.py index d78a7ae..26a9cce 100755 --- a/setup.py +++ b/setup.py @@ -30,9 +30,10 @@ setup(name="ognibuild", install_requires=[ 'breezy', 'buildlog-consultant', - 'python_debian', - 'debmutate', ], + extras_require={ + 'debian': ['debmutate', 'python_debian', 'python_apt'], + }, tests_require=['python_debian', 'buildlog-consultant', 'breezy'], test_suite='ognibuild.tests.test_suite', ) From 7c61fa0e43d496d539e08b186ada46c84d1b4aa1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Sat, 27 Feb 2021 16:20:07 +0000 Subject: [PATCH 80/83] Some more refactoring. --- ognibuild/__main__.py | 2 +- ognibuild/buildsystem.py | 5 +---- ognibuild/dist.py | 14 ++++++++++++-- ognibuild/resolver/__init__.py | 14 +++++++------- ognibuild/resolver/apt.py | 29 +++++++++++++++-------------- ognibuild/session/schroot.py | 3 +++ 6 files changed, 39 insertions(+), 28 deletions(-) diff --git a/ognibuild/__main__.py b/ognibuild/__main__.py index 3db88bf..f7b61f7 100644 --- a/ognibuild/__main__.py +++ b/ognibuild/__main__.py @@ -23,7 +23,7 @@ from .buildsystem import NoBuildToolsFound, detect_buildsystems from .resolver import ( auto_resolver, native_resolvers, - MissingDependencies, + UnsatisfiedRequirements, ) from .resolver.apt import AptResolver diff --git a/ognibuild/buildsystem.py b/ognibuild/buildsystem.py index 37261aa..5304573 100644 --- a/ognibuild/buildsystem.py +++ b/ognibuild/buildsystem.py @@ -117,7 +117,6 @@ class SetupPy(BuildSystem): return "%s(%r)" % (type(self).__name__, self.path) def setup(self, resolver): - resolver.install([PythonPackageRequirement('pip')]) with open(self.path, "r") as f: setup_py_contents = f.read() try: @@ -222,10 +221,9 @@ class PyProject(BuildSystem): resolver.install( [ PythonPackageRequirement("venv"), - PythonPackageRequirement("pip"), + PythonPackageRequirement("poetry"), ] ) - session.check_call(["pip3", "install", "poetry"], user="root") session.check_call(["poetry", "build", "-f", "sdist"]) return raise AssertionError("no supported section in pyproject.toml") @@ -242,7 +240,6 @@ class SetupCfg(BuildSystem): resolver.install( [ PythonPackageRequirement("pep517"), - PythonPackageRequirement("pip"), ] ) diff --git a/ognibuild/dist.py b/ognibuild/dist.py index cfe38d2..2349fe7 100644 --- a/ognibuild/dist.py +++ b/ognibuild/dist.py @@ -125,6 +125,7 @@ def create_dist_schroot( ) -> str: from .buildsystem import detect_buildsystems from .resolver.apt import AptResolver + from .buildlog import UpstreamRequirementFixer if subdir is None: subdir = "package" @@ -150,13 +151,14 @@ def create_dist_schroot( buildsystems = list(detect_buildsystems(export_directory)) resolver = AptResolver.from_session(session) + fixers = [UpstreamRequirementFixer(resolver)] with DistCatcher(export_directory) as dc: oldcwd = os.getcwd() os.chdir(export_directory) try: session.chdir(os.path.join(reldir, subdir)) - run_dist(session, buildsystems, resolver) + run_dist(session, buildsystems, resolver, fixers) finally: os.chdir(oldcwd) @@ -194,9 +196,17 @@ if __name__ == "__main__": parser.add_argument( "--target-directory", type=str, default="..", help="Target directory" ) + parser.add_argument( + "--verbose", + action="store_true", + help="Be verbose") + args = parser.parse_args() - logging.basicConfig(level=logging.INFO) + if args.verbose: + logging.basicConfig(level=logging.DEBUG) + else: + logging.basicConfig(level=logging.INFO) tree = WorkingTree.open(args.directory) if args.packaging_directory: diff --git a/ognibuild/resolver/__init__.py b/ognibuild/resolver/__init__.py index dd12b60..93074ab 100644 --- a/ognibuild/resolver/__init__.py +++ b/ognibuild/resolver/__init__.py @@ -16,7 +16,7 @@ # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA -class MissingDependencies(Exception): +class UnsatisfiedRequirements(Exception): def __init__(self, reqs): self.requirements = reqs @@ -55,7 +55,7 @@ class CPANResolver(Resolver): user="root", env={"PERL_MM_USE_DEFAULT": "1"} ) if missing: - raise MissingDependencies(missing) + raise UnsatisfiedRequirements(missing) def explain(self, requirements): raise NotImplementedError(self.explain) @@ -83,7 +83,7 @@ class CargoResolver(Resolver): ["cargo", "install", requirement.crate], user="root") if missing: - raise MissingDependencies(missing) + raise UnsatisfiedRequirements(missing) def explain(self, requirements): raise NotImplementedError(self.explain) @@ -109,7 +109,7 @@ class PypiResolver(Resolver): continue self.session.check_call(["pip", "install", requirement.package]) if missing: - raise MissingDependencies(missing) + raise UnsatisfiedRequirements(missing) def explain(self, requirements): raise NotImplementedError(self.explain) @@ -145,7 +145,7 @@ class NpmResolver(Resolver): continue self.session.check_call(["npm", "-g", "install", package]) if missing: - raise MissingDependencies(missing) + raise UnsatisfiedRequirements(missing) def explain(self, requirements): raise NotImplementedError(self.explain) @@ -165,7 +165,7 @@ class StackedResolver(Resolver): for sub in self.subs: try: sub.install(requirements) - except MissingDependencies as e: + except UnsatisfiedRequirements as e: requirements = e.requirements else: return @@ -188,7 +188,7 @@ class ExplainResolver(Resolver): return cls(session) def install(self, requirements): - raise MissingDependencies(requirements) + raise UnsatisfiedRequirements(requirements) def auto_resolver(session): diff --git a/ognibuild/resolver/apt.py b/ognibuild/resolver/apt.py index 49d9470..fc11525 100644 --- a/ognibuild/resolver/apt.py +++ b/ognibuild/resolver/apt.py @@ -21,7 +21,7 @@ import posixpath from ..debian.apt import AptManager -from . import Resolver, MissingDependencies +from . import Resolver, UnsatisfiedRequirements from ..requirements import ( BinaryRequirement, CHeaderRequirement, @@ -520,19 +520,20 @@ class AptResolver(Resolver): missing.append(req) except NotImplementedError: missing.append(req) - if missing: - still_missing = [] - apt_requirements = [] - for m in missing: - apt_req = self.resolve(m) - if apt_req is None: - still_missing.append(m) - else: - apt_requirements.append(m) - self.apt.install( - [req.package for req in apt_requirements]) - if still_missing: - raise MissingDependencies(still_missing) + if not missing: + return + still_missing = [] + apt_requirements = [] + for m in missing: + apt_req = self.resolve(m) + if apt_req is None: + still_missing.append(m) + else: + apt_requirements.append(m) + self.apt.install( + [req.package for req in apt_requirements]) + if still_missing: + raise UnsatisfiedRequirements(still_missing) def explain(self, requirements): raise NotImplementedError(self.explain) diff --git a/ognibuild/session/schroot.py b/ognibuild/session/schroot.py index 1b1b645..8941844 100644 --- a/ognibuild/session/schroot.py +++ b/ognibuild/session/schroot.py @@ -61,6 +61,9 @@ class SchrootSession(Session): except subprocess.CalledProcessError: # TODO(jelmer): Capture stderr and forward in SessionSetupFailure raise SessionSetupFailure() + logging.info( + 'Opened schroot session %s (from %s)', self.session_id, + self.chroot) return self def __exit__(self, exc_type, exc_val, exc_tb): From a963db22beeabf888ce68938480c6303a716c694 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Sat, 27 Feb 2021 16:35:32 +0000 Subject: [PATCH 81/83] Fix schroot operation. --- ognibuild/debian/apt.py | 52 ++++++++++++++++++++++----------------- ognibuild/resolver/apt.py | 6 ++--- 2 files changed, 33 insertions(+), 25 deletions(-) diff --git a/ognibuild/debian/apt.py b/ognibuild/debian/apt.py index c4e2938..1b33327 100644 --- a/ognibuild/debian/apt.py +++ b/ognibuild/debian/apt.py @@ -121,15 +121,18 @@ class RemoteAptContentsFileSearcher(FileSearcher): sl.load(os.path.join(session.location, 'etc/apt/sources.list')) return cls.from_sources_list( sl, - cache_dir=os.path.join(session.location, 'var/lib/apt/lists')) + cache_dirs=[ + os.path.join(session.location, 'var/lib/apt/lists'), + '/var/lib/apt/lists']) def __setitem__(self, path, package): self._db[path] = package def search_files(self, path, regex=False): + c = re.compile(path) for p, pkg in sorted(self._db.items()): if regex: - if re.match(path, p): + if c.match(p): yield pkg else: if path == p: @@ -149,37 +152,42 @@ class RemoteAptContentsFileSearcher(FileSearcher): p = os.path.join( cache_dir, parsed.hostname + parsed.path.replace('/', '_') + '.lz4') - logging.debug('Loading cached contents file %s', p) if not os.path.exists(p): return None + logging.debug('Loading cached contents file %s', p) import lz4.frame return lz4.frame.open(p, mode='rb') @classmethod - def from_urls(cls, urls, cache_dir=None): + def from_urls(cls, urls, cache_dirs=None): self = cls() for url, mandatory in urls: - f = cls._load_cache_file(url, cache_dir) - if f is not None: - self.load_file(f) - elif not mandatory and self._db: - logging.debug( - 'Not attempting to fetch optional contents file %s', url) + for cache_dir in cache_dirs or []: + f = cls._load_cache_file(url, cache_dir) + if f is not None: + self.load_file(f) + break else: - logging.debug('Fetching contents file %s', url) - try: - self.load_url(url) - except ContentsFileNotFound: - if mandatory: - logging.warning( - 'Unable to fetch contents file %s', url) - else: - logging.debug( - 'Unable to fetch optional contents file %s', url) + if not mandatory and self._db: + logging.debug( + 'Not attempting to fetch optional contents ' + 'file %s', url) + else: + logging.debug('Fetching contents file %s', url) + try: + self.load_url(url) + except ContentsFileNotFound: + if mandatory: + logging.warning( + 'Unable to fetch contents file %s', url) + else: + logging.debug( + 'Unable to fetch optional contents file %s', + url) return self @classmethod - def from_sources_list(cls, sl, cache_dir=None): + def from_sources_list(cls, sl, cache_dirs=None): # TODO(jelmer): Use aptsources.sourceslist.SourcesList from .build import get_build_architecture # TODO(jelmer): Verify signatures, etc. @@ -210,7 +218,7 @@ class RemoteAptContentsFileSearcher(FileSearcher): for arch, mandatory in arches: urls.append( ("%s/%s/Contents-%s" % (dists_url, name.rstrip('/'), arch), mandatory)) - return cls.from_urls(urls, cache_dir=cache_dir) + return cls.from_urls(urls, cache_dirs=cache_dirs) @staticmethod def _get(url): diff --git a/ognibuild/resolver/apt.py b/ognibuild/resolver/apt.py index fc11525..4439bfa 100644 --- a/ognibuild/resolver/apt.py +++ b/ognibuild/resolver/apt.py @@ -529,9 +529,9 @@ class AptResolver(Resolver): if apt_req is None: still_missing.append(m) else: - apt_requirements.append(m) - self.apt.install( - [req.package for req in apt_requirements]) + apt_requirements.append(apt_req) + if apt_requirements: + self.apt.install([r.package for r in apt_requirements]) if still_missing: raise UnsatisfiedRequirements(still_missing) From 27a02686d6e496998a66d6f33199ac6fffbbde3e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Sat, 27 Feb 2021 17:23:51 +0000 Subject: [PATCH 82/83] Add hackage repository support. --- ognibuild/__main__.py | 14 ++++-- ognibuild/buildlog.py | 32 ++++++++---- ognibuild/buildsystem.py | 89 ++++++++++++++++++++++------------ ognibuild/fix_build.py | 2 +- ognibuild/resolver/__init__.py | 44 ++++++++++++----- 5 files changed, 122 insertions(+), 59 deletions(-) diff --git a/ognibuild/__main__.py b/ognibuild/__main__.py index f7b61f7..f253b97 100644 --- a/ognibuild/__main__.py +++ b/ognibuild/__main__.py @@ -38,11 +38,17 @@ def get_necessary_declared_requirements(resolver, requirements, stages): def install_necessary_declared_requirements(resolver, buildsystem, stages): missing = [] - missing.extend( - get_necessary_declared_requirements( - resolver, buildsystem.get_declared_dependencies(), stages + try: + declared_reqs = buildsystem.get_declared_dependencies() + except NotImplementedError: + logging.warning( + 'Unable to determine declared dependencies from %s', buildsystem) + else: + missing.extend( + get_necessary_declared_requirements( + resolver, declared_reqs, stages + ) ) - ) resolver.install(missing) diff --git a/ognibuild/buildlog.py b/ognibuild/buildlog.py index ae358d5..b2a90db 100644 --- a/ognibuild/buildlog.py +++ b/ognibuild/buildlog.py @@ -124,11 +124,10 @@ def problem_to_upstream_requirement(problem): elif isinstance(problem, MissingJavaClass): return JavaClassRequirement(problem.classname) elif isinstance(problem, MissingHaskellDependencies): - # TODO(jelmer): Create multiple HaskellPackageRequirement objects? - return HaskellPackageRequirement(problem.package) + return [HaskellPackageRequirement(dep) for dep in problem.deps] elif isinstance(problem, MissingMavenArtifacts): - # TODO(jelmer): Create multiple MavenArtifactRequirement objects? - return MavenArtifactRequirement(problem.artifacts) + return [MavenArtifactRequirement(artifact) + for artifact in problem.artifacts] elif isinstance(problem, MissingCSharpCompiler): return BinaryRequirement('msc') elif isinstance(problem, GnomeCommonMissing): @@ -179,16 +178,29 @@ class UpstreamRequirementFixer(BuildFixer): def __init__(self, resolver): self.resolver = resolver + def __repr__(self): + return "%s(%r)" % (type(self).__name__, self.resolver) + + def __str__(self): + return "upstream requirement fixer(%s)" % self.resolver + def can_fix(self, error): req = problem_to_upstream_requirement(error) return req is not None def fix(self, error, context): - req = problem_to_upstream_requirement(error) - if req is None: + reqs = problem_to_upstream_requirement(error) + if reqs is None: return False - package = self.resolver.resolve(req) - if package is None: - return False - return context.add_dependency(package) + if not isinstance(reqs, list): + reqs = [reqs] + + changed = False + for req in reqs: + package = self.resolver.resolve(reqs) + if package is None: + return False + if context.add_dependency(package): + changed = True + return changed diff --git a/ognibuild/buildsystem.py b/ognibuild/buildsystem.py index 5304573..60309e6 100644 --- a/ognibuild/buildsystem.py +++ b/ognibuild/buildsystem.py @@ -51,6 +51,9 @@ class BuildSystem(object): name: str + def __str__(self): + return self.name + def dist(self, session, resolver, fixers): raise NotImplementedError(self.dist) @@ -125,13 +128,13 @@ class SetupPy(BuildSystem): except FileNotFoundError: setup_cfg_contents = "" if "setuptools" in setup_py_contents: - logging.info("Reference to setuptools found, installing.") + logging.debug("Reference to setuptools found, installing.") resolver.install([PythonPackageRequirement("setuptools")]) if ( "setuptools_scm" in setup_py_contents or "setuptools_scm" in setup_cfg_contents ): - logging.info("Reference to setuptools-scm found, installing.") + logging.debug("Reference to setuptools-scm found, installing.") resolver.install( [ PythonPackageRequirement("setuptools-scm"), @@ -215,8 +218,9 @@ class PyProject(BuildSystem): def dist(self, session, resolver, fixers): if "poetry" in self.pyproject.get("tool", []): - logging.info( - "Found pyproject.toml with poetry section, " "assuming poetry project." + logging.debug( + "Found pyproject.toml with poetry section, " + "assuming poetry project." ) resolver.install( [ @@ -279,13 +283,17 @@ class Waf(BuildSystem): def __init__(self, path): self.path = path - def setup(self, resolver): + def setup(self, session, resolver, fixers): resolver.install([BinaryRequirement("python3")]) def dist(self, session, resolver, fixers): - self.setup(resolver) + self.setup(session, resolver, fixers) run_with_build_fixers(session, ["./waf", "dist"], fixers) + def test(self, session, resolver, fixers): + self.setup(session, resolver, fixers) + run_with_build_fixers(session, ["./waf", "test"], fixers) + class Gem(BuildSystem): @@ -321,13 +329,14 @@ class DistInkt(BuildSystem): except ValueError: continue if key.strip() == b"class" and value.strip().startswith(b"'Dist::Inkt"): - logging.info( - "Found Dist::Inkt section in dist.ini, " "assuming distinkt." + logging.debug( + "Found Dist::Inkt section in dist.ini, " + "assuming distinkt." ) self.name = "dist-inkt" self.dist_inkt_class = value.decode().strip("'") return - logging.info("Found dist.ini, assuming dist-zilla.") + logging.debug("Found dist.ini, assuming dist-zilla.") def setup(self, resolver): resolver.install( @@ -397,15 +406,19 @@ class Make(BuildSystem): session.check_call(["./configure"]) def build(self, session, resolver, fixers): - self.setup(session, resolver) + self.setup(session, resolver, fixers) run_with_build_fixers(session, ["make", "all"], fixers) + def test(self, session, resolver, fixers): + self.setup(session, resolver, fixers) + run_with_build_fixers(session, ["make", "check"], fixers) + def install(self, session, resolver, fixers, install_target): - self.setup(session, resolver) + self.setup(session, resolver, fixers) run_with_build_fixers(session, ["make", "install"], fixers) def dist(self, session, resolver, fixers): - self.setup(session, resolver) + self.setup(session, resolver, fixers) try: run_with_build_fixers(session, ["make", "dist"], fixers) except UnidentifiedError as e: @@ -491,6 +504,9 @@ class Cargo(BuildSystem): # TODO(jelmer): Look at details['features'], details['version'] yield "build", CargoCrateRequirement(name) + def test(self, session, resolver, fixers): + run_with_build_fixers(session, ["cargo", "test"], fixers) + class Golang(BuildSystem): """Go builds.""" @@ -513,37 +529,59 @@ class Cabal(BuildSystem): def __init__(self, path): self.path = path + def __repr__(self): + return "%s(%r)" % (type(self).__name__, self.path) + + def _run(self, session, args, fixers): + try: + run_with_build_fixers( + session, ["runhaskell", "Setup.hs"] + args, fixers) + except UnidentifiedError as e: + if "Run the 'configure' command first.\n" in e.lines: + run_with_build_fixers( + session, ["runhaskell", "Setup.hs", "configure"], fixers) + run_with_build_fixers( + session, ["runhaskell", "Setup.hs"] + args, fixers) + else: + raise + + def test(self, session, resolver, fixers): + self._run(session, ["test"], fixers) def detect_buildsystems(path, trust_package=False): # noqa: C901 """Detect build systems.""" if os.path.exists(os.path.join(path, "package.xml")): - logging.info("Found package.xml, assuming pear package.") + logging.debug("Found package.xml, assuming pear package.") yield Pear("package.xml") if os.path.exists(os.path.join(path, "setup.py")): - logging.info("Found setup.py, assuming python project.") + logging.debug("Found setup.py, assuming python project.") yield SetupPy("setup.py") elif os.path.exists(os.path.join(path, "pyproject.toml")): - logging.info("Found pyproject.toml, assuming python project.") + logging.debug("Found pyproject.toml, assuming python project.") yield PyProject("pyproject.toml") elif os.path.exists(os.path.join(path, "setup.cfg")): - logging.info("Found setup.cfg, assuming python project.") + logging.debug("Found setup.cfg, assuming python project.") yield SetupCfg("setup.cfg") if os.path.exists(os.path.join(path, "package.json")): - logging.info("Found package.json, assuming node package.") + logging.debug("Found package.json, assuming node package.") yield Npm("package.json") if os.path.exists(os.path.join(path, "waf")): - logging.info("Found waf, assuming waf package.") + logging.debug("Found waf, assuming waf package.") yield Waf("waf") if os.path.exists(os.path.join(path, "Cargo.toml")): - logging.info("Found Cargo.toml, assuming rust cargo package.") + logging.debug("Found Cargo.toml, assuming rust cargo package.") yield Cargo("Cargo.toml") + if os.path.exists(os.path.join(path, 'Setup.hs')): + logging.debug("Found Setup.hs, assuming haskell package.") + yield Cabal('Setup.hs') + if os.path.exists(os.path.join(path, "pom.xml")): - logging.info("Found pom.xml, assuming maven package.") + logging.debug("Found pom.xml, assuming maven package.") yield Maven("pom.xml") if os.path.exists(os.path.join(path, "dist.ini")) and not os.path.exists( @@ -569,17 +607,6 @@ def detect_buildsystems(path, trust_package=False): # noqa: C901 ): yield Make() - cabal_filenames = [ - entry.name for entry in os.scandir(path) if entry.name.endswith(".cabal") - ] - if cabal_filenames: - if len(cabal_filenames) == 1: - yield Cabal(cabal_filenames[0]) - else: - warnings.warn( - "More than one cabal filename, ignoring all: %r" % cabal_filenames - ) - if os.path.exists(os.path.join(path, ".travis.yml")): import ruamel.yaml.reader diff --git a/ognibuild/fix_build.py b/ognibuild/fix_build.py index 5520e31..d46016d 100644 --- a/ognibuild/fix_build.py +++ b/ognibuild/fix_build.py @@ -120,7 +120,7 @@ def resolve_error(error, context, fixers): logging.warning("No fixer found for %r", error) return False for fixer in relevant_fixers: - logging.info("Attempting to use fixer %r to address %r", fixer, error) + logging.info("Attempting to use fixer %s to address %r", fixer, error) made_changes = fixer.fix(error, context) if made_changes: return True diff --git a/ognibuild/resolver/__init__.py b/ognibuild/resolver/__init__.py index 93074ab..6dc48cc 100644 --- a/ognibuild/resolver/__init__.py +++ b/ognibuild/resolver/__init__.py @@ -60,8 +60,30 @@ class CPANResolver(Resolver): def explain(self, requirements): raise NotImplementedError(self.explain) - def met(self, requirement): - raise NotImplementedError(self.met) + +class HackageResolver(Resolver): + + def __init__(self, session): + self.session = session + + def __str__(self): + return "hackage" + + def install(self, requirements): + from ..requirements import HaskellPackageRequirement + missing = [] + for requirement in requirements: + if not isinstance(requirement, HaskellPackageRequirement): + missing.append(requirement) + continue + self.session.check_call( + ["cabal", "install", requirement.package], + user="root") + if missing: + raise UnsatisfiedRequirements(missing) + + def explain(self, requirements): + raise NotImplementedError(self.explain) class CargoResolver(Resolver): @@ -88,9 +110,6 @@ class CargoResolver(Resolver): def explain(self, requirements): raise NotImplementedError(self.explain) - def met(self, requirement): - raise NotImplementedError(self.met) - class PypiResolver(Resolver): @@ -114,9 +133,6 @@ class PypiResolver(Resolver): def explain(self, requirements): raise NotImplementedError(self.explain) - def met(self, requirement): - raise NotImplementedError(self.met) - NPM_COMMAND_PACKAGES = { "del-cli": "del-cli", @@ -150,14 +166,14 @@ class NpmResolver(Resolver): def explain(self, requirements): raise NotImplementedError(self.explain) - def met(self, requirement): - raise NotImplementedError(self.met) - class StackedResolver(Resolver): def __init__(self, subs): self.subs = subs + def __repr__(self): + return "%s(%r)" % (type(self).__name__, self.subs) + def __str__(self): return "[" + ", ".join(map(str, self.subs)) + "]" @@ -176,7 +192,8 @@ def native_resolvers(session): CPANResolver(session), PypiResolver(session), NpmResolver(session), - CargoResolver(session)]) + CargoResolver(session), + HackageResolver(session)]) class ExplainResolver(Resolver): @@ -203,5 +220,6 @@ def auto_resolver(session): CPANResolver(session), PypiResolver(session), NpmResolver(session), - CargoResolver(session)]) + CargoResolver(session), + HackageResolver(session)]) return StackedResolver(resolvers) From 2528295181266e44bf1c272b00c316406b77f609 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Sun, 28 Feb 2021 14:49:17 +0000 Subject: [PATCH 83/83] Add definition of resolve. --- ognibuild/resolver/__init__.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/ognibuild/resolver/__init__.py b/ognibuild/resolver/__init__.py index 6dc48cc..bd72c51 100644 --- a/ognibuild/resolver/__init__.py +++ b/ognibuild/resolver/__init__.py @@ -27,6 +27,9 @@ class Resolver(object): def install(self, requirements): raise NotImplementedError(self.install) + def resolve(self, requirement): + raise NotImplementedError(self.resolve) + def explain(self, requirements): raise NotImplementedError(self.explain)