diff --git a/.github/workflows/pythonpackage.yml b/.github/workflows/pythonpackage.yml index 9c0a80c..0725c4c 100644 --- a/.github/workflows/pythonpackage.yml +++ b/.github/workflows/pythonpackage.yml @@ -28,6 +28,7 @@ jobs: python -m pip install wheel python -m pip install git+https://salsa.debian.org/apt-team/python-apt python -m pip install -e ".[debian]" + python -m pip install testtools mkdir -p ~/.config/breezy/plugins brz branch lp:brz-debian ~/.config/breezy/plugins/debian if: "matrix.python-version != 'pypy3' && matrix.os == 'ubuntu-latest'" diff --git a/PKG-INFO b/PKG-INFO index f7c7bb4..2ec5144 100644 --- a/PKG-INFO +++ b/PKG-INFO @@ -1,6 +1,6 @@ Metadata-Version: 2.1 Name: ognibuild -Version: 0.0.3 +Version: 0.0.4 Summary: Detect and run any build system Home-page: https://jelmer.uk/code/ognibuild Maintainer: Jelmer Vernooij diff --git a/README.md b/README.md index 2eb483d..24951b2 100644 --- a/README.md +++ b/README.md @@ -54,6 +54,8 @@ issues (or lack of support for a particular ecosystem), please file a bug. - Octave - Perl - Module::Build::Tiny + - Dist::Zilla + - Minilla - PHP Pear - Python - setup.py/setup.cfg/pyproject.toml - R diff --git a/ognibuild.egg-info/PKG-INFO b/ognibuild.egg-info/PKG-INFO index f7c7bb4..2ec5144 100644 --- a/ognibuild.egg-info/PKG-INFO +++ b/ognibuild.egg-info/PKG-INFO @@ -1,6 +1,6 @@ Metadata-Version: 2.1 Name: ognibuild -Version: 0.0.3 +Version: 0.0.4 Summary: Detect and run any build system Home-page: https://jelmer.uk/code/ognibuild Maintainer: Jelmer Vernooij diff --git a/ognibuild/__init__.py b/ognibuild/__init__.py index ab42dbb..02b30ed 100644 --- a/ognibuild/__init__.py +++ b/ognibuild/__init__.py @@ -20,7 +20,7 @@ import os import stat -__version__ = (0, 0, 3) +__version__ = (0, 0, 4) USER_AGENT = "Ognibuild" diff --git a/ognibuild/__main__.py b/ognibuild/__main__.py index 8f82ca2..0de3307 100644 --- a/ognibuild/__main__.py +++ b/ognibuild/__main__.py @@ -150,6 +150,10 @@ def main(): # noqa: C901 external_dir, internal_dir = session.setup_from_directory(args.directory) session.chdir(internal_dir) os.chdir(external_dir) + + if not session.is_temporary and args.subcommand == 'info': + args.explain = True + if args.resolve == "apt": resolver = AptResolver.from_session(session) elif args.resolve == "native": @@ -211,7 +215,7 @@ def main(): # noqa: C901 if args.subcommand == "info": from .info import run_info - run_info(session, buildsystems=bss) + run_info(session, buildsystems=bss, fixers=fixers) except ExplainInstall as e: display_explain_commands(e.commands) except (UnidentifiedError, DetailedFailure): diff --git a/ognibuild/buildlog.py b/ognibuild/buildlog.py index fabb279..e3110ee 100644 --- a/ognibuild/buildlog.py +++ b/ognibuild/buildlog.py @@ -53,6 +53,7 @@ from buildlog_consultant.common import ( MissingVagueDependency, DhAddonLoadFailure, MissingMavenArtifacts, + MissingIntrospectionTypelib, GnomeCommonMissing, MissingGnomeCommonDependency, UnknownCertificateAuthority, @@ -60,7 +61,11 @@ from buildlog_consultant.common import ( MissingLibtool, MissingQt, MissingX11, + MissingPerlPredeclared, + MissingLatexFile, + MissingCargoCrate, ) +from buildlog_consultant.apt import UnsatisfiedAptDependencies from .fix_build import BuildFixer from .requirements import ( @@ -99,6 +104,10 @@ from .requirements import ( X11Requirement, LibtoolRequirement, VagueDependencyRequirement, + IntrospectionTypelibRequirement, + PerlPreDeclaredRequirement, + LatexPackageRequirement, + CargoCrateRequirement, ) from .resolver import UnsatisfiedRequirements @@ -112,6 +121,8 @@ def problem_to_upstream_requirement(problem): # noqa: C901 return PkgConfigRequirement(problem.module, problem.minimum_version) elif isinstance(problem, MissingCHeader): return CHeaderRequirement(problem.header) + elif isinstance(problem, MissingIntrospectionTypelib): + return IntrospectionTypelibRequirement(problem.library) elif isinstance(problem, MissingJavaScriptRuntime): return JavaScriptRuntimeRequirement() elif isinstance(problem, MissingRubyGem): @@ -130,6 +141,10 @@ def problem_to_upstream_requirement(problem): # noqa: C901 return NodeModuleRequirement(problem.module) elif isinstance(problem, MissingNodePackage): return NodePackageRequirement(problem.package) + elif isinstance(problem, MissingLatexFile): + if problem.filename.endswith('.sty'): + return LatexPackageRequirement(problem.filename[:-4]) + return None elif isinstance(problem, MissingVagueDependency): return VagueDependencyRequirement(problem.name, minimum_version=problem.minimum_version) elif isinstance(problem, MissingLibrary): @@ -169,6 +184,11 @@ def problem_to_upstream_requirement(problem): # noqa: C901 return LibtoolRequirement() elif isinstance(problem, UnknownCertificateAuthority): return CertificateAuthorityRequirement(problem.url) + elif isinstance(problem, MissingPerlPredeclared): + return PerlPreDeclaredRequirement(problem.name) + elif isinstance(problem, MissingCargoCrate): + # TODO(jelmer): handle problem.requirements + return CargoCrateRequirement(problem.crate) elif isinstance(problem, MissingSetupPyCommand): if problem.command == "test": return PythonPackageRequirement("setuptools") @@ -207,6 +227,9 @@ def problem_to_upstream_requirement(problem): # noqa: C901 python_version=problem.python_version, minimum_version=problem.minimum_version, ) + elif isinstance(problem, UnsatisfiedAptDependencies): + from .resolver.apt import AptRequirement + return AptRequirement(problem.relations) else: return None diff --git a/ognibuild/buildsystem.py b/ognibuild/buildsystem.py index 0776e57..07562df 100644 --- a/ognibuild/buildsystem.py +++ b/ognibuild/buildsystem.py @@ -118,6 +118,11 @@ class Pear(BuildSystem): name = "pear" + PEAR_NAMESPACES = [ + "http://pear.php.net/dtd/package-2.0", + "http://pear.php.net/dtd/package-2.1", + ] + def __init__(self, path): self.path = path @@ -146,10 +151,7 @@ class Pear(BuildSystem): try: root = xmlparse_simplify_namespaces( path, - [ - "http://pear.php.net/dtd/package-2.0", - "http://pear.php.net/dtd/package-2.1", - ], + self.PEAR_NAMESPACES ) except ET.ParseError as e: logging.warning("Unable to parse package.xml: %s", e) @@ -173,9 +175,25 @@ class Pear(BuildSystem): @classmethod def probe(cls, path): - if os.path.exists(os.path.join(path, "package.xml")): - logging.debug("Found package.xml, assuming pear package.") - return cls(os.path.join(path, "package.xml")) + package_xml_path = os.path.join(path, "package.xml") + if not os.path.exists(package_xml_path): + return + + import xml.etree.ElementTree as ET + try: + tree = ET.iterparse(package_xml_path) + except ET.ParseError as e: + logging.warning("Unable to parse package.xml: %s", e) + return + + if not tree.root: + # No root? + return + + for ns in cls.PEAR_NAMESPACES: + if tree.root.tag == '{%s}package' % ns: + logging.debug("Found package.xml with namespace %s, assuming pear package.") + return cls(path) # run_setup, but setting __name__ @@ -221,7 +239,6 @@ import sys script_name = %(script_name)s -save_argv = sys.argv.copy() g = {"__file__": script_name, "__name__": "__main__"} try: core._setup_stop_after = "init" @@ -522,9 +539,6 @@ class SetupPy(BuildSystem): if os.path.exists(os.path.join(path, "setup.py")): logging.debug("Found setup.py, assuming python project.") return cls(path) - if os.path.exists(os.path.join(path, "setup.cfg")): - logging.debug("Found setup.py, assuming python project.") - return cls(path) if os.path.exists(os.path.join(path, "pyproject.toml")): logging.debug("Found pyproject.toml, assuming python project.") return cls(path) @@ -713,6 +727,10 @@ class R(BuildSystem): if "Imports" in description: for s in parse_list(description["Imports"]): yield "build", RPackageRequirement.from_str(s) + if "LinkingTo" in description: + for s in parse_list(description["LinkingTo"]): + yield "build", RPackageRequirement.from_str(s) + # TODO(jelmer): Suggests def get_declared_outputs(self, session, fixers=None): description = self._read_description() @@ -978,7 +996,9 @@ class RunTests(BuildSystem): def _read_cpanfile(session, args, kind, fixers): for line in run_with_build_fixers(session, ["cpanfile-dump"] + args, fixers): - yield kind, PerlModuleRequirement(line) + line = line.strip() + if line: + yield kind, PerlModuleRequirement(line) def _declared_deps_from_cpanfile(session, fixers): @@ -986,6 +1006,26 @@ def _declared_deps_from_cpanfile(session, fixers): yield from _read_cpanfile(session, ["--test"], "test", fixers) +def _declared_deps_from_meta_yml(f): + # See http://module-build.sourceforge.net/META-spec-v1.4.html for + # the specification of the format. + import ruamel.yaml + import ruamel.yaml.reader + + try: + data = ruamel.yaml.load(f, ruamel.yaml.SafeLoader) + except ruamel.yaml.reader.ReaderError as e: + warnings.warn("Unable to parse META.yml: %s" % e) + return + for require in data.get("requires", []): + yield "core", PerlModuleRequirement(require) + for require in data.get("build_requires", []): + yield "build", PerlModuleRequirement(require) + for require in data.get("configure_requires", []): + yield "build", PerlModuleRequirement(require) + # TODO(jelmer): recommends + + class Make(BuildSystem): name = "make" @@ -1106,20 +1146,9 @@ class Make(BuildSystem): something = False # TODO(jelmer): Split out the perl-specific stuff? if os.path.exists(os.path.join(self.path, "META.yml")): - # See http://module-build.sourceforge.net/META-spec-v1.4.html for - # the specification of the format. - import ruamel.yaml - import ruamel.yaml.reader - with open(os.path.join(self.path, "META.yml"), "rb") as f: - try: - data = ruamel.yaml.load(f, ruamel.yaml.SafeLoader) - except ruamel.yaml.reader.ReaderError as e: - warnings.warn("Unable to parse META.yml: %s" % e) - return - for require in data.get("requires", []): - yield "build", PerlModuleRequirement(require) - something = True + yield from _declared_deps_from_meta_yml(f) + something = True if os.path.exists(os.path.join(self.path, "cpanfile")): yield from _declared_deps_from_cpanfile(session, fixers) something = True @@ -1204,6 +1233,7 @@ def _parse_go_mod(f): while line: parts = line.strip().split(" ") if not parts or parts == [""]: + line = readline() continue if len(parts) == 2 and parts[1] == "(": line = readline() @@ -1393,6 +1423,7 @@ class PerlBuildTiny(BuildSystem): def __init__(self, path): self.path = path + self.minilla = os.path.exists(os.path.join(self.path, "minil.toml")) def __repr__(self): return "%s(%r)" % (type(self).__name__, self.path) @@ -1402,7 +1433,10 @@ class PerlBuildTiny(BuildSystem): def test(self, session, resolver, fixers): self.setup(session, fixers) - run_with_build_fixers(session, ["./Build", "test"], fixers) + if self.minilla: + run_with_build_fixers(session, ["minil", "test"], fixers) + else: + run_with_build_fixers(session, ["./Build", "test"], fixers) def build(self, session, resolver, fixers): self.setup(session, fixers) @@ -1412,9 +1446,45 @@ class PerlBuildTiny(BuildSystem): self.setup(session, fixers) run_with_build_fixers(session, ["./Build", "clean"], fixers) + def dist(self, session, resolver, fixers, target_directory, quiet=False): + self.setup(session, fixers) + with DistCatcher([session.external_path('.')]) as dc: + if self.minilla: + run_with_build_fixers(session, ["minil", "dist"], fixers) + else: + try: + run_with_build_fixers(session, ["./Build", "dist"], fixers) + except UnidentifiedError as e: + if "Can't find dist packages without a MANIFEST file" in e.lines: + run_with_build_fixers(session, ["./Build", "manifest"], fixers) + run_with_build_fixers(session, ["./Build", "dist"], fixers) + elif "No such action 'dist'" in e.lines: + raise NotImplementedError + else: + raise + return dc.copy_single(target_directory) + def install(self, session, resolver, fixers, install_target): self.setup(session, fixers) - run_with_build_fixers(session, ["./Build", "install"], fixers) + if self.minilla: + run_with_build_fixers(session, ["minil", "install"], fixers) + else: + run_with_build_fixers(session, ["./Build", "install"], fixers) + + def get_declared_dependencies(self, session, fixers=None): + self.setup(session, fixers) + try: + run_with_build_fixers(session, ["./Build", "distmeta"], fixers) + except UnidentifiedError as e: + if "No such action 'distmeta'" in e.lines: + pass + else: + raise + try: + with open(os.path.join(self.path, 'META.yml'), 'r') as f: + yield from _declared_deps_from_meta_yml(f) + except FileNotFoundError: + pass @classmethod def probe(cls, path): diff --git a/ognibuild/debian/apt.py b/ognibuild/debian/apt.py index 7c168aa..1e704b0 100644 --- a/ognibuild/debian/apt.py +++ b/ognibuild/debian/apt.py @@ -28,7 +28,7 @@ from .. import DetailedFailure, UnidentifiedError from ..session import Session, run_with_tee, get_user from .file_search import ( FileSearcher, - AptCachedContentsFileSearcher, + get_apt_contents_file_searcher, GENERATED_FILE_SEARCHER, get_packages_for_paths, ) @@ -41,6 +41,7 @@ def run_apt( if prefix is None: prefix = [] args = prefix = ["apt", "-y"] + args + logging.info('apt: running %r', args) retcode, lines = run_with_tee(session, args, cwd="/", user="root") if retcode == 0: return @@ -76,21 +77,27 @@ class AptManager(object): def searchers(self): if self._searchers is None: self._searchers = [ - AptCachedContentsFileSearcher.from_session(self.session), + get_apt_contents_file_searcher(self.session), GENERATED_FILE_SEARCHER, ] return self._searchers - def package_exists(self, package): + @property + def apt_cache(self): if self._apt_cache is None: import apt self._apt_cache = apt.Cache(rootdir=self.session.location) - return package in self._apt_cache + return self._apt_cache + + def package_exists(self, package): + return package in self.apt_cache + + def package_versions(self, package): + return list(self.apt_cache[package].versions) def get_packages_for_paths(self, paths, regex=False, case_insensitive=False): logging.debug("Searching for packages containing %r", paths) - # TODO(jelmer): Make sure we use whatever is configured in self.session return get_packages_for_paths( paths, self.searchers(), regex=regex, case_insensitive=case_insensitive ) diff --git a/ognibuild/debian/build.py b/ognibuild/debian/build.py index cca50fb..189b0ae 100644 --- a/ognibuild/debian/build.py +++ b/ognibuild/debian/build.py @@ -16,22 +16,24 @@ # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA __all__ = [ - "changes_filename", "get_build_architecture", "add_dummy_changelog_entry", "build", - "SbuildFailure", + "DetailedDebianBuildFailure", + "UnidentifiedDebianBuildError", ] from datetime import datetime +from debmutate.changelog import ChangelogEditor import logging import os import re +import shlex import subprocess import sys from debian.changelog import Changelog -from debmutate.changelog import get_maintainer, format_datetime +from debmutate.changelog import get_maintainer from breezy.mutabletree import MutableTree from breezy.plugins.debian.builder import BuildFailedError @@ -39,13 +41,33 @@ from breezy.tree import Tree from buildlog_consultant.sbuild import ( worker_failure_from_sbuild_log, - SbuildFailure, ) +from .. import DetailedFailure as DetailedFailure, UnidentifiedError + DEFAULT_BUILDER = "sbuild --no-clean-source" +class DetailedDebianBuildFailure(DetailedFailure): + + def __init__(self, stage, phase, retcode, argv, error, description): + super(DetailedDebianBuildFailure, self).__init__(retcode, argv, error) + self.stage = stage + self.phase = phase + self.description = description + + +class UnidentifiedDebianBuildError(UnidentifiedError): + + def __init__(self, stage, phase, retcode, argv, lines, description, secondary=None): + super(UnidentifiedDebianBuildError, self).__init__( + retcode, argv, lines, secondary) + self.stage = stage + self.phase = phase + self.description = description + + class MissingChangesFile(Exception): """Expected changes file was not written.""" @@ -53,11 +75,15 @@ class MissingChangesFile(Exception): self.filename = filename -def changes_filename(package, version, arch): +def find_changes_files(path, package, version): non_epoch_version = version.upstream_version if version.debian_version is not None: non_epoch_version += "-%s" % version.debian_version - return "%s_%s_%s.changes" % (package, non_epoch_version, arch) + c = re.compile('%s_%s_(.*).changes' % (re.escape(package), re.escape(non_epoch_version))) + for entry in os.scandir(path): + m = c.match(entry.name) + if m: + yield m.group(1), entry def get_build_architecture(): @@ -119,25 +145,16 @@ def add_dummy_changelog_entry( maintainer = get_maintainer() if timestamp is None: timestamp = datetime.now() - with tree.get_file(path) as f: - cl = Changelog() - cl.parse_changelog(f, max_blocks=None, allow_empty_author=True, strict=False) - version = cl[0].version + with ChangelogEditor(tree.abspath(os.path.join(path))) as editor: + version = editor[0].version if version.debian_revision: version.debian_revision = add_suffix(version.debian_revision, suffix) else: version.upstream_version = add_suffix(version.upstream_version, suffix) - cl.new_block( - package=cl[0].package, - version=version, - urgency="low", - distributions=suite, - author="%s <%s>" % maintainer, - date=format_datetime(timestamp), - changes=["", " * " + message, ""], - ) - cl_str = cl._format(allow_missing_author=True) - tree.put_file_bytes_non_atomic(path, cl_str.encode(cl._encoding)) + editor.auto_version(version, timestamp=timestamp) + editor.add_entry( + summary=[message], maintainer=maintainer, timestamp=timestamp, urgency='low') + editor[0].distributions = suite def get_latest_changelog_version(local_tree, subpath=""): @@ -158,7 +175,10 @@ def build( distribution=None, subpath="", source_date_epoch=None, + extra_repositories=None, ): + for repo in extra_repositories or []: + build_command += " --extra-repository=" + shlex.quote(repo) args = [ sys.executable, "-m", @@ -192,8 +212,10 @@ def build_once( build_command, subpath="", source_date_epoch=None, + extra_repositories=None ): build_log_path = os.path.join(output_directory, "build.log") + logging.debug("Writing build log to %s", build_log_path) try: with open(build_log_path, "w") as f: build( @@ -204,21 +226,35 @@ def build_once( distribution=build_suite, subpath=subpath, source_date_epoch=source_date_epoch, + extra_repositories=extra_repositories, ) - except BuildFailedError: + except BuildFailedError as e: with open(build_log_path, "rb") as f: - raise worker_failure_from_sbuild_log(f) + sbuild_failure = worker_failure_from_sbuild_log(f) + retcode = getattr(e, 'returncode', None) + if sbuild_failure.error: + raise DetailedDebianBuildFailure( + sbuild_failure.stage, + sbuild_failure.phase, retcode, + shlex.split(build_command), + sbuild_failure.error, + sbuild_failure.description) + else: + raise UnidentifiedDebianBuildError( + sbuild_failure.stage, + sbuild_failure.phase, + retcode, shlex.split(build_command), + [], sbuild_failure.description) (cl_package, cl_version) = get_latest_changelog_version(local_tree, subpath) - changes_name = changes_filename(cl_package, cl_version, get_build_architecture()) - changes_path = os.path.join(output_directory, changes_name) - if not os.path.exists(changes_path): - raise MissingChangesFile(changes_name) - return (changes_name, cl_version) + changes_names = [] + for kind, entry in find_changes_files(output_directory, cl_package, cl_version): + changes_names.append((entry.name)) + return (changes_names, cl_version) def gbp_dch(path): - subprocess.check_call(["gbp", "dch"], cwd=path) + subprocess.check_call(["gbp", "dch", "--ignore-branch"], cwd=path) def attempt_build( @@ -230,6 +266,8 @@ def attempt_build( build_changelog_entry=None, subpath="", source_date_epoch=None, + run_gbp_dch=False, + extra_repositories=None ): """Attempt a build, with a custom distribution set. @@ -244,6 +282,8 @@ def attempt_build( source_date_epoch: Source date epoch to set Returns: Tuple with (changes_name, cl_version) """ + if run_gbp_dch and not subpath: + gbp_dch(local_tree.abspath(subpath)) if build_changelog_entry is not None: add_dummy_changelog_entry( local_tree, subpath, suffix, build_suite, build_changelog_entry @@ -255,4 +295,5 @@ def attempt_build( build_command, subpath, source_date_epoch=source_date_epoch, + extra_repositories=extra_repositories, ) diff --git a/ognibuild/debian/build_deps.py b/ognibuild/debian/build_deps.py index 888325f..246a058 100644 --- a/ognibuild/debian/build_deps.py +++ b/ognibuild/debian/build_deps.py @@ -65,9 +65,9 @@ class BuildDependencyTieBreaker(object): return None top = max(by_count.items(), key=lambda k: k[1]) logging.info( - "Breaking tie between %r to %r based on build-depends count", - [repr(r) for r in reqs], - top[0], + "Breaking tie between [%s] to %s based on build-depends count", + ', '.join([repr(r.pkg_relation_str()) for r in reqs]), + repr(top[0].pkg_relation_str()), ) return top[0] diff --git a/ognibuild/debian/file_search.py b/ognibuild/debian/file_search.py index d0d9ff4..4f33ebf 100644 --- a/ognibuild/debian/file_search.py +++ b/ognibuild/debian/file_search.py @@ -21,11 +21,13 @@ from datetime import datetime from debian.deb822 import Release import os import re +import subprocess from typing import Iterator, List import logging from .. import USER_AGENT +from ..session import Session class FileSearcher(object): @@ -158,7 +160,68 @@ def load_apt_cache_file(url, cache_dir): raise FileNotFoundError(url) -class AptCachedContentsFileSearcher(FileSearcher): +class AptFileFileSearcher(FileSearcher): + + CACHE_IS_EMPTY_PATH = '/usr/share/apt-file/is-cache-empty' + + def __init__(self, session: Session): + self.session = session + + @classmethod + def has_cache(cls, session: Session) -> bool: + if not os.path.exists(session.external_path(cls.CACHE_IS_EMPTY_PATH)): + return False + try: + session.check_call([cls.CACHE_IS_EMPTY_PATH]) + except subprocess.CalledProcessError as e: + if e.returncode == 1: + return True + raise + else: + return False + + @classmethod + def from_session(cls, session): + logging.info('Using apt-file to search apt contents') + if not os.path.exists(session.external_path(cls.CACHE_IS_EMPTY_PATH)): + from .apt import AptManager + AptManager.from_session(session).install(['apt-file']) + if not cls.has_cache(session): + session.check_call(['apt-file', 'update'], user='root') + return cls(session) + + def search_files(self, path, regex=False, case_insensitive=False): + args = [] + if regex: + args.append('-x') + else: + args.append('-F') + if case_insensitive: + args.append('-i') + args.append(path) + try: + output = self.session.check_output(['/usr/bin/apt-file', 'search'] + args) + except subprocess.CalledProcessError as e: + if e.returncode == 1: + # No results + return + if e.returncode == 3: + raise Exception('apt-file cache is empty') + raise + + for line in output.splitlines(False): + pkg, path = line.split(b': ') + yield pkg.decode('utf-8') + + +def get_apt_contents_file_searcher(session): + if AptFileFileSearcher.has_cache(session): + return AptFileFileSearcher.from_session(session) + + return RemoteContentsFileSearcher.from_session(session) + + +class RemoteContentsFileSearcher(FileSearcher): def __init__(self): self._db = {} @@ -268,12 +331,12 @@ class GeneratedFileSearcher(FileSearcher): with open(path, "r") as f: for line in f: (path, pkg) = line.strip().split(None, 1) - self._db[path] = pkg + self._db.append(path, pkg) def search_files( self, path: str, regex: bool = False, case_insensitive: bool = False ) -> Iterator[str]: - for p, pkg in sorted(self._db.items()): + for p, pkg in self._db: if regex: flags = 0 if case_insensitive: @@ -290,16 +353,16 @@ class GeneratedFileSearcher(FileSearcher): # TODO(jelmer): read from a file GENERATED_FILE_SEARCHER = GeneratedFileSearcher( - { - "/etc/locale.gen": "locales", + [ + ("/etc/locale.gen", "locales"), # Alternative - "/usr/bin/rst2html": "python3-docutils", + ("/usr/bin/rst2html", "python3-docutils"), # aclocal is a symlink to aclocal-1.XY - "/usr/bin/aclocal": "automake", - "/usr/bin/automake": "automake", + ("/usr/bin/aclocal", "automake"), + ("/usr/bin/automake", "automake"), # maven lives in /usr/share - "/usr/bin/mvn": "maven", - } + ("/usr/bin/mvn", "maven"), + ] ) @@ -322,6 +385,7 @@ def get_packages_for_paths( def main(argv): import argparse + from ..session.plain import PlainSession parser = argparse.ArgumentParser() parser.add_argument("path", help="Path to search for.", type=str, nargs="*") @@ -334,7 +398,7 @@ def main(argv): else: logging.basicConfig(level=logging.INFO) - main_searcher = AptCachedContentsFileSearcher() + main_searcher = get_apt_contents_file_searcher(PlainSession()) main_searcher.load_local() searchers = [main_searcher, GENERATED_FILE_SEARCHER] diff --git a/ognibuild/debian/fix_build.py b/ognibuild/debian/fix_build.py index 5e12726..2b998df 100644 --- a/ognibuild/debian/fix_build.py +++ b/ognibuild/debian/fix_build.py @@ -22,6 +22,7 @@ __all__ = [ from functools import partial import logging import os +import re import shutil import sys from typing import List, Set, Optional, Type @@ -31,7 +32,7 @@ from debian.deb822 import ( PkgRelation, ) -from breezy.commit import PointlessCommit +from breezy.commit import PointlessCommit, NullCommitReporter from breezy.tree import Tree from debmutate.changelog import ChangelogEditor from debmutate.control import ( @@ -111,9 +112,13 @@ from buildlog_consultant.common import ( MissingPerlFile, ) from buildlog_consultant.sbuild import ( - SbuildFailure, -) + DebcargoUnacceptablePredicate, + ) +from .build import ( + DetailedDebianBuildFailure, + UnidentifiedDebianBuildError, + ) from ..buildlog import problem_to_upstream_requirement from ..fix_build import BuildFixer, resolve_error from ..resolver.apt import ( @@ -154,7 +159,10 @@ class DebianPackagingContext(object): cl_path = self.abspath("debian/changelog") with ChangelogEditor(cl_path) as editor: editor.add_entry([summary]) - debcommit(self.tree, committer=self.committer, subpath=self.subpath) + debcommit( + self.tree, committer=self.committer, + subpath=self.subpath, + reporter=self.commit_reporter) else: self.tree.commit( message=summary, @@ -304,6 +312,8 @@ def python_tie_breaker(tree, subpath, reqs): return True if pkg.startswith("lib%s-" % python_version): return True + if re.match(r'lib%s\.[0-9]-dev' % python_version, pkg): + return True return False for python_version in targeted: @@ -423,6 +433,13 @@ def fix_missing_makefile_pl(error, phase, context): return False +def coerce_unaccpetable_predicate(error, phase, context): + from debmutate.debcargo import DebcargoEditor + with DebcargoEditor(context.abspath('debian/debcargo.toml')) as editor: + editor['allow_prerelease_deps'] = True + return context.commit('Enable allow_prerelease_deps.') + + class SimpleBuildFixer(BuildFixer): def __init__(self, packaging_context, problem_cls: Type[Problem], fn): self.context = packaging_context @@ -475,6 +492,7 @@ def versioned_package_fixers(session, packaging_context, apt): packaging_context, MissingConfigStatusInput, fix_missing_config_status_input ), SimpleBuildFixer(packaging_context, MissingPerlFile, fix_missing_makefile_pl), + SimpleBuildFixer(packaging_context, DebcargoUnacceptablePredicate, coerce_unaccpetable_predicate), ] @@ -497,6 +515,16 @@ def apt_fixers(apt, packaging_context) -> List[BuildFixer]: ] +def default_fixers(local_tree, subpath, apt, committer=None, update_changelog=None): + packaging_context = DebianPackagingContext( + local_tree, subpath, committer, update_changelog, + commit_reporter=NullCommitReporter() + ) + return versioned_package_fixers(apt.session, packaging_context, apt) + apt_fixers( + apt, packaging_context + ) + + def build_incrementally( local_tree, apt, @@ -510,14 +538,14 @@ def build_incrementally( subpath="", source_date_epoch=None, update_changelog=True, + extra_repositories=None, + fixers=None ): fixed_errors = [] - packaging_context = DebianPackagingContext( - local_tree, subpath, committer, update_changelog - ) - fixers = versioned_package_fixers(apt.session, packaging_context, apt) + apt_fixers( - apt, packaging_context - ) + if fixers is None: + fixers = default_fixers( + local_tree, subpath, apt, committer=committer, + update_changelog=update_changelog) logging.info("Using fixers: %r", fixers) while True: try: @@ -530,11 +558,13 @@ def build_incrementally( build_changelog_entry, subpath=subpath, source_date_epoch=source_date_epoch, + run_gbp_dch=(update_changelog is False), + extra_repositories=extra_repositories, ) - except SbuildFailure as e: - if e.error is None: - logging.warning("Build failed with unidentified error. Giving up.") - raise + except UnidentifiedDebianBuildError: + logging.warning("Build failed with unidentified error. Giving up.") + raise + except DetailedDebianBuildFailure as e: if e.phase is None: logging.info("No relevant context, not making any changes.") raise @@ -603,6 +633,11 @@ def main(argv=None): dest="update_changelog", help="do not update the changelog", ) + parser.add_argument( + '--max-iterations', + type=int, + default=DEFAULT_MAX_ITERATIONS, + help='Maximum number of issues to attempt to fix before giving up.') parser.add_argument( "--update-changelog", action="store_true", @@ -646,7 +681,7 @@ def main(argv=None): apt = AptManager(session) try: - (changes_filename, cl_version) = build_incrementally( + (changes_filenames, cl_version) = build_incrementally( tree, apt, args.suffix, @@ -656,23 +691,30 @@ def main(argv=None): None, committer=args.committer, update_changelog=args.update_changelog, + max_iterations=args.max_iterations, ) - except SbuildFailure as e: + except DetailedDebianBuildFailure as e: if e.phase is None: phase = "unknown phase" elif len(e.phase) == 1: phase = e.phase[0] else: phase = "%s (%s)" % (e.phase[0], e.phase[1]) - if e.error: - logging.fatal("Error during %s: %s", phase, e.error) + logging.fatal("Error during %s: %s", phase, e.error) + return 1 + except UnidentifiedDebianBuildError as e: + if e.phase is None: + phase = "unknown phase" + elif len(e.phase) == 1: + phase = e.phase[0] else: - logging.fatal("Error during %s: %s", phase, e.description) + phase = "%s (%s)" % (e.phase[0], e.phase[1]) + logging.fatal("Error during %s: %s", phase, e.description) return 1 logging.info( - 'Built %s - changes file at %s.', - os.path.join(output_directory, changes_filename)) + 'Built %s - changes file at %r.', + cl_version, changes_filenames) if __name__ == "__main__": diff --git a/ognibuild/dist.py b/ognibuild/dist.py index cdf1265..8206920 100644 --- a/ognibuild/dist.py +++ b/ognibuild/dist.py @@ -51,6 +51,8 @@ def run_dist(session, buildsystems, resolver, fixers, target_directory, quiet=Fa # e.g. pip caches in ~/.cache session.create_home() + logging.info('Using dependency resolver: %s', resolver) + for buildsystem in buildsystems: filename = buildsystem.dist( session, resolver, fixers, target_directory, quiet=quiet diff --git a/ognibuild/dist_catcher.py b/ognibuild/dist_catcher.py index 2739773..b2546a1 100644 --- a/ognibuild/dist_catcher.py +++ b/ognibuild/dist_catcher.py @@ -94,7 +94,8 @@ class DistCatcher(object): logging.warning( "Found multiple tarballs %r in %s.", possible_new, directory ) - return + self.files.extend([entry.path for entry in possible_new]) + return possible_new[0].name if len(possible_updated) == 1: entry = possible_updated[0] diff --git a/ognibuild/fix_build.py b/ognibuild/fix_build.py index e83b84b..90d67a2 100644 --- a/ognibuild/fix_build.py +++ b/ognibuild/fix_build.py @@ -17,7 +17,7 @@ from functools import partial import logging -from typing import List, Tuple, Callable, Any +from typing import List, Tuple, Callable, Any, Optional from buildlog_consultant import Problem from buildlog_consultant.common import ( @@ -104,8 +104,10 @@ def iterate_with_build_fixers(fixers: List[BuildFixer], cb: Callable[[], Any]): def run_with_build_fixers( - session: Session, args: List[str], fixers: List[BuildFixer], **kwargs + session: Session, args: List[str], fixers: Optional[List[BuildFixer]], **kwargs ): + if fixers is None: + fixers = [] return iterate_with_build_fixers( fixers, partial(run_detecting_problems, session, args, **kwargs) ) diff --git a/ognibuild/info.py b/ognibuild/info.py index 0ee1cfd..c0bc425 100644 --- a/ognibuild/info.py +++ b/ognibuild/info.py @@ -16,12 +16,12 @@ # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA -def run_info(session, buildsystems): +def run_info(session, buildsystems, fixers=None): for buildsystem in buildsystems: print("%r:" % buildsystem) deps = {} try: - for kind, dep in buildsystem.get_declared_dependencies(session): + for kind, dep in buildsystem.get_declared_dependencies(session, fixers=fixers): deps.setdefault(kind, []).append(dep) except NotImplementedError: print( @@ -35,7 +35,7 @@ def run_info(session, buildsystems): print("\t\t\t%s" % dep) print("") try: - outputs = list(buildsystem.get_declared_outputs(session)) + outputs = list(buildsystem.get_declared_outputs(session, fixers=fixers)) except NotImplementedError: print("\tUnable to detect declared outputs for this type of build system") outputs = [] diff --git a/ognibuild/requirements.py b/ognibuild/requirements.py index 014bee9..858ad09 100644 --- a/ognibuild/requirements.py +++ b/ognibuild/requirements.py @@ -82,6 +82,15 @@ class PythonPackageRequirement(Requirement): return p.returncode == 0 +class LatexPackageRequirement(Requirement): + + def __init__(self, package: str): + self.package = package + + def __repr__(self): + return "%s(%r)" % (type(self).__name__, self.package) + + class PhpPackageRequirement(Requirement): def __init__( self, @@ -192,6 +201,32 @@ class NodePackageRequirement(Requirement): return "%s(%r)" % (type(self).__name__, self.package) +class PerlPreDeclaredRequirement(Requirement): + + name: str + + # TODO(jelmer): Can we obtain this information elsewhere? + KNOWN_MODULES = { + 'auto_set_repository': 'Module::Install::Repository', + 'author_tests': 'Module::Install::AuthorTests', + 'readme_from': 'Module::Install::ReadmeFromPod', + 'catalyst': 'Module::Install::Catalyst', + 'githubmeta': 'Module::Install::GithubMeta', + 'use_ppport': 'Module::Install::XSUtil', + } + + def __init__(self, name): + super(PerlPreDeclaredRequirement, self).__init__("perl-predeclared") + self.name = name + + def lookup_module(self): + module = self.KNOWN_MODULES[self.name] + return PerlModuleRequirement(module=module) + + def __repr__(self): + return "%s(%r)" % (type(self).__name__, self.name) + + class NodeModuleRequirement(Requirement): module: str @@ -246,6 +281,10 @@ class PkgConfigRequirement(Requirement): self.module = module self.minimum_version = minimum_version + def __repr__(self): + return "%s(%r, minimum_version=%r)" % ( + type(self).__name__, self.module, self.minimum_version) + class PathRequirement(Requirement): @@ -255,6 +294,9 @@ class PathRequirement(Requirement): super(PathRequirement, self).__init__("path") self.path = path + def __repr__(self): + return "%s(%r)" % (type(self).__name__, self.path) + class CHeaderRequirement(Requirement): @@ -264,6 +306,9 @@ class CHeaderRequirement(Requirement): super(CHeaderRequirement, self).__init__("c-header") self.header = header + def __repr__(self): + return "%s(%r)" % (type(self).__name__, self.header) + class JavaScriptRuntimeRequirement(Requirement): def __init__(self): @@ -587,6 +632,11 @@ class LibtoolRequirement(Requirement): super(LibtoolRequirement, self).__init__("libtool") +class IntrospectionTypelibRequirement(Requirement): + def __init__(self, library): + self.library = library + + class PythonModuleRequirement(Requirement): module: str @@ -619,3 +669,8 @@ class PythonModuleRequirement(Requirement): ) p.communicate() return p.returncode == 0 + + def __repr__(self): + return "%s(%r, python_version=%r, minimum_version=%r)" % ( + type(self).__name__, self.module, self.python_version, + self.minimum_version) diff --git a/ognibuild/resolver/__init__.py b/ognibuild/resolver/__init__.py index 7451481..5402454 100644 --- a/ognibuild/resolver/__init__.py +++ b/ognibuild/resolver/__init__.py @@ -16,7 +16,9 @@ # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +import logging import subprocess +from .. import UnidentifiedError from ..fix_build import run_detecting_problems @@ -67,7 +69,7 @@ class CPANResolver(Resolver): continue perlreqs.append(requirement) if perlreqs: - yield (self._cmd(perlreqs), [perlreqs]) + yield (self._cmd(perlreqs), perlreqs) def install(self, requirements): from ..requirements import PerlModuleRequirement @@ -88,9 +90,11 @@ class CPANResolver(Resolver): if not isinstance(requirement, PerlModuleRequirement): missing.append(requirement) continue + cmd = self._cmd([requirement]) + logging.info("CPAN: running %r", cmd) run_detecting_problems( self.session, - self._cmd([requirement]), + cmd, env=env, user=user, ) @@ -98,6 +102,73 @@ class CPANResolver(Resolver): raise UnsatisfiedRequirements(missing) +class TlmgrResolver(Resolver): + def __init__(self, session, repository: str, user_local=False): + self.session = session + self.user_local = user_local + self.repository = repository + + def __str__(self): + if self.repository.startswith('http://') or self.repository.startswith('https://'): + return 'tlmgr(%r)' % self.repository + else: + return self.repository + + def __repr__(self): + return "%s(%r, %r)" % ( + type(self).__name__, self.session, self.repository) + + def _cmd(self, reqs): + ret = ["tlmgr", "--repository=%s" % self.repository, "install"] + if self.user_local: + ret.append("--usermode") + ret.extend([req.package for req in reqs]) + return ret + + def explain(self, requirements): + from ..requirements import LatexPackageRequirement + + latexreqs = [] + for requirement in requirements: + if not isinstance(requirement, LatexPackageRequirement): + continue + latexreqs.append(requirement) + if latexreqs: + yield (self._cmd(latexreqs), latexreqs) + + def install(self, requirements): + from ..requirements import LatexPackageRequirement + + if not self.user_local: + user = "root" + else: + user = None + + missing = [] + for requirement in requirements: + if not isinstance(requirement, LatexPackageRequirement): + missing.append(requirement) + continue + cmd = self._cmd([requirement]) + logging.info("tlmgr: running %r", cmd) + try: + run_detecting_problems(self.session, cmd, user=user) + except UnidentifiedError as e: + if "tlmgr: user mode not initialized, please read the documentation!" in e.lines: + self.session.check_call(['tlmgr', 'init-usertree']) + else: + raise + if missing: + raise UnsatisfiedRequirements(missing) + + +class CTANResolver(TlmgrResolver): + + def __init__(self, session, user_local=False): + super(CTANResolver, self).__init__( + session, "ctan", user_local=user_local) + + class RResolver(Resolver): def __init__(self, session, repos, user_local=False): self.session = session @@ -142,7 +213,9 @@ class RResolver(Resolver): if not isinstance(requirement, RPackageRequirement): missing.append(requirement) continue - self.session.check_call(self._cmd(requirement), user=user) + cmd = self._cmd(requirement) + logging.info("RResolver(%r): running %r", self.repos, cmd) + run_detecting_problems(self.session, cmd, user=user) if missing: raise UnsatisfiedRequirements(missing) @@ -186,7 +259,9 @@ class OctaveForgeResolver(Resolver): if not isinstance(requirement, OctavePackageRequirement): missing.append(requirement) continue - self.session.check_call(self._cmd(requirement), user=user) + cmd = self._cmd(requirement) + logging.info("Octave: running %r", cmd) + run_detecting_problems(self.session, cmd, user=user) if missing: raise UnsatisfiedRequirements(missing) @@ -235,7 +310,9 @@ class HackageResolver(Resolver): if not isinstance(requirement, HaskellPackageRequirement): missing.append(requirement) continue - self.session.check_call(self._cmd([requirement]), user=user) + cmd = self._cmd([requirement]) + logging.info("Hackage: running %r", cmd) + run_detecting_problems(self.session, cmd, user=user) if missing: raise UnsatisfiedRequirements(missing) @@ -281,8 +358,10 @@ class PypiResolver(Resolver): if not isinstance(requirement, PythonPackageRequirement): missing.append(requirement) continue + cmd = self._cmd([requirement]) + logging.info("pip: running %r", cmd) try: - self.session.check_call(self._cmd([requirement]), user=user) + run_detecting_problems(self.session, cmd, user=user) except subprocess.CalledProcessError: missing.append(requirement) if missing: @@ -325,7 +404,9 @@ class GoResolver(Resolver): if not isinstance(requirement, GoPackageRequirement): missing.append(requirement) continue - self.session.check_call(["go", "get", requirement.package], env=env) + cmd = ["go", "get", requirement.package] + logging.info("go: running %r", cmd) + run_detecting_problems(self.session, cmd, env=env) if missing: raise UnsatisfiedRequirements(missing) @@ -344,6 +425,7 @@ class GoResolver(Resolver): NPM_COMMAND_PACKAGES = { "del-cli": "del-cli", "husky": "husky", + "cross-env": "cross-env", } @@ -382,13 +464,17 @@ class NpmResolver(Resolver): requirement = NodePackageRequirement(package) if isinstance(requirement, NodeModuleRequirement): # TODO: Is this legit? - requirement = NodePackageRequirement(requirement.module.split("/")[0]) + parts = requirement.module.split("/") + if parts[0].startswith('@'): + requirement = NodePackageRequirement('/'.join(parts[:2])) + else: + requirement = NodePackageRequirement(parts[0]) if not isinstance(requirement, NodePackageRequirement): missing.append(requirement) continue - self.session.check_call( - ["npm", "-g", "install", requirement.package], user=user - ) + cmd = ["npm", "-g", "install", requirement.package] + logging.info("npm: running %r", cmd) + run_detecting_problems(self.session, cmd, user=user) if missing: raise UnsatisfiedRequirements(missing) @@ -445,6 +531,7 @@ class StackedResolver(Resolver): NATIVE_RESOLVER_CLS = [ CPANResolver, + CTANResolver, PypiResolver, NpmResolver, GoResolver, diff --git a/ognibuild/resolver/apt.py b/ognibuild/resolver/apt.py index 618206c..cc2177b 100644 --- a/ognibuild/resolver/apt.py +++ b/ognibuild/resolver/apt.py @@ -68,6 +68,8 @@ from ..requirements import ( CertificateAuthorityRequirement, LibtoolRequirement, VagueDependencyRequirement, + PerlPreDeclaredRequirement, + IntrospectionTypelibRequirement, ) @@ -115,6 +117,36 @@ class AptRequirement(Requirement): return True return False + def satisfied_by(self, binaries, version): + def binary_pkg_matches(entry, binary): + # TODO(jelmer): check versions + if entry['name'] == binary['Package']: + return True + for provides_top in PkgRelation.parse_relations( + binary.get('Provides', '')): + for provides in provides_top: + if entry['name'] == provides['name']: + return True + return False + + for rel in self.relations: + for entry in rel: + if any(binary_pkg_matches(entry, binary) for binary in binaries): + break + else: + return False + return True + + +def resolve_perl_predeclared_req(apt_mgr, req): + try: + req = req.lookup_module() + except KeyError: + logging.warning( + 'Unable to map predeclared function %s to a perl module', req.name) + return None + return resolve_perl_module_req(apt_mgr, req) + def find_package_names( apt_mgr: AptManager, paths: List[str], regex: bool = False, case_insensitive=False @@ -282,6 +314,17 @@ def get_package_for_python_module(apt_mgr, module, python_version, specs): vague_map = { "the Gnu Scientific Library": "libgsl-dev", "the required FreeType library": "libfreetype-dev", + "the Boost C++ libraries": "libboost-dev", + + # TODO(jelmer): Support resolving virtual packages + "PythonLibs": "libpython3-dev", + "ZLIB": "libz3-dev", + "Osmium": "libosmium2-dev", + "glib": "libglib2.0-dev", + + # TODO(jelmer): For Python, check minimum_version and map to python 2 or python 3 + "Python": "libpython3-dev", + "Lua": "liblua5.4-dev", } @@ -289,7 +332,7 @@ def resolve_vague_dep_req(apt_mgr, req): name = req.name options = [] if name in vague_map: - options.append(AptRequirement.simple(vague_map[name])) + options.append(AptRequirement.simple(vague_map[name], minimum_version=req.minimum_version)) for x in req.expand(): options.extend(resolve_requirement_apt(apt_mgr, x)) return options @@ -393,9 +436,9 @@ def resolve_php_package_req(apt_mgr, req): def resolve_r_package_req(apt_mgr, req): paths = [ - posixpath.join("/usr/lib/R/site-library/.*/R/%s$" % re.escape(req.package)) + posixpath.join("/usr/lib/R/site-library", req.package, "DESCRIPTION") ] - return find_reqs_simple(apt_mgr, paths, regex=True) + return find_reqs_simple(apt_mgr, paths, minimum_version=req.minimum_version) def resolve_node_module_req(apt_mgr, req): @@ -485,7 +528,7 @@ def resolve_cmake_file_req(apt_mgr, req): def resolve_haskell_package_req(apt_mgr, req): - path = "/var/lib/ghc/package\\.conf\\.d/%s-.*\\.conf" % re.escape(req.deps[0][0]) + path = "/var/lib/ghc/package\\.conf\\.d/%s-.*\\.conf" % re.escape(req.package) return find_reqs_simple(apt_mgr, [path], regex=True) @@ -620,6 +663,12 @@ def resolve_ca_req(apt_mgr, req): return [AptRequirement.simple("ca-certificates")] +def resolve_introspection_typelib_req(apt_mgr, req): + return find_reqs_simple( + apt_mgr, [r'/usr/lib/.*/girepository-.*/%s-.*\.typelib' % re.escape(req.library)], + regex=True) + + def resolve_apt_req(apt_mgr, req): # TODO(jelmer): This should be checking whether versions match as well. for package_name in req.package_names(): @@ -632,6 +681,7 @@ APT_REQUIREMENT_RESOLVERS = [ (AptRequirement, resolve_apt_req), (BinaryRequirement, resolve_binary_req), (VagueDependencyRequirement, resolve_vague_dep_req), + (PerlPreDeclaredRequirement, resolve_perl_predeclared_req), (PkgConfigRequirement, resolve_pkg_config_req), (PathRequirement, resolve_path_req), (CHeaderRequirement, resolve_c_header_req), @@ -668,6 +718,7 @@ APT_REQUIREMENT_RESOLVERS = [ (PythonPackageRequirement, resolve_python_package_req), (CertificateAuthorityRequirement, resolve_ca_req), (CargoCrateRequirement, resolve_cargo_crate_req), + (IntrospectionTypelibRequirement, resolve_introspection_typelib_req), ] @@ -683,11 +734,20 @@ def resolve_requirement_apt(apt_mgr, req: Requirement) -> List[AptRequirement]: raise NotImplementedError(type(req)) +def default_tie_breakers(session): + from ..debian.udd import popcon_tie_breaker + from ..debian.build_deps import BuildDependencyTieBreaker + return [ + BuildDependencyTieBreaker.from_session(session), + popcon_tie_breaker, + ] + + class AptResolver(Resolver): def __init__(self, apt, tie_breakers=None): self.apt = apt if tie_breakers is None: - tie_breakers = [] + tie_breakers = default_tie_breakers(apt.session) self.tie_breakers = tie_breakers def __str__(self): diff --git a/ognibuild/session/schroot.py b/ognibuild/session/schroot.py index f7f45b2..ff5b436 100644 --- a/ognibuild/session/schroot.py +++ b/ognibuild/session/schroot.py @@ -185,6 +185,8 @@ class SchrootSession(Session): self.check_call(["chown", user, home], cwd="/", user="root") def external_path(self, path: str) -> str: + if os.path.isabs(path): + return os.path.join(self.location, path.lstrip("/")) if self._cwd is None: raise ValueError("no cwd set") return os.path.join(self.location, os.path.join(self._cwd, path).lstrip("/")) diff --git a/ognibuild/tests/test_debian_build.py b/ognibuild/tests/test_debian_build.py index 6e5970a..0b06869 100644 --- a/ognibuild/tests/test_debian_build.py +++ b/ognibuild/tests/test_debian_build.py @@ -53,17 +53,12 @@ janitor (0.1-1) UNRELEASED; urgency=medium ) self.assertFileEqual( """\ -janitor (0.1-1jan+some1) some-fixes; urgency=low +janitor (0.1-1jan+some1) some-fixes; urgency=medium + * Initial release. (Closes: #XXXXXX) * Dummy build. -- Jelmer Vernooij Sat, 05 Sep 2020 12:35:04 -0000 - -janitor (0.1-1) UNRELEASED; urgency=medium - - * Initial release. (Closes: #XXXXXX) - - -- Jelmer Vernooij Sat, 04 Apr 2020 14:12:13 +0000 """, "debian/changelog", ) @@ -97,17 +92,12 @@ janitor (0.1) UNRELEASED; urgency=medium ) self.assertFileEqual( """\ -janitor (0.1jan+some1) some-fixes; urgency=low +janitor (0.1jan+some1) some-fixes; urgency=medium + * Initial release. (Closes: #XXXXXX) * Dummy build. -- Jelmer Vernooij Sat, 05 Sep 2020 12:35:04 -0000 - -janitor (0.1) UNRELEASED; urgency=medium - - * Initial release. (Closes: #XXXXXX) - - -- Jelmer Vernooij Sat, 04 Apr 2020 14:12:13 +0000 """, "debian/changelog", ) @@ -141,17 +131,12 @@ janitor (0.1-1jan+some1) UNRELEASED; urgency=medium ) self.assertFileEqual( """\ -janitor (0.1-1jan+some2) some-fixes; urgency=low +janitor (0.1-1jan+some2) some-fixes; urgency=medium + * Initial release. (Closes: #XXXXXX) * Dummy build. -- Jelmer Vernooij Sat, 05 Sep 2020 12:35:04 -0000 - -janitor (0.1-1jan+some1) UNRELEASED; urgency=medium - - * Initial release. (Closes: #XXXXXX) - - -- Jelmer Vernooij Sat, 04 Apr 2020 14:12:13 +0000 """, "debian/changelog", ) diff --git a/setup.py b/setup.py index d3676b2..86ecf2e 100755 --- a/setup.py +++ b/setup.py @@ -6,12 +6,12 @@ from setuptools import setup setup(name="ognibuild", description="Detect and run any build system", - version="0.0.3", + version="0.0.4", maintainer="Jelmer Vernooij", maintainer_email="jelmer@jelmer.uk", license="GNU GPLv2 or later", url="https://jelmer.uk/code/ognibuild", - packages=['ognibuild'], + packages=['ognibuild', 'ognibuild.tests', 'ognibuild.debian', 'ognibuild.resolver', 'ognibuild.session'], classifiers=[ 'Development Status :: 4 - Beta', 'License :: OSI Approved :: '