New upstream release.

This commit is contained in:
Jelmer Vernooij 2021-03-02 17:56:20 +00:00
commit f367193566
No known key found for this signature in database
GPG key ID: 579C160D4C9E23E8
23 changed files with 1145 additions and 535 deletions

6
debian/changelog vendored
View file

@ -1,3 +1,9 @@
ognibuild (0.0.2-1) UNRELEASED; urgency=medium
* New upstream release.
-- Jelmer Vernooij <jelmer@debian.org> Tue, 02 Mar 2021 17:55:58 +0000
ognibuild (0.0.1~git20210228.bc79314-1) unstable; urgency=medium
* New upstream snapshot.

View file

@ -1,11 +1,11 @@
Upstream requirements are expressed as objects derived from UpstreamRequirement.
Upstream requirements are expressed as objects derived from Requirement.
They can either be:
* extracted from the build system
* extracted from errors in build logs
The details of UpstreamRequirements are specific to the kind of requirement,
The details of Requirements are specific to the kind of requirement,
and otherwise opaque to ognibuild.
When building a package, we first make sure that all declared upstream
@ -21,10 +21,10 @@ like e.g. upgrade configure.ac to a newer version, or invoke autoreconf.
A list of possible fixers can be provided. Each fixer will be called
(in order) until one of them claims to ahve fixed the issue.
Problems can be converted to UpstreamRequirements by UpstreamRequirementFixer
Problems can be converted to Requirements by RequirementFixer
UpstreamRequirementFixer uses a UpstreamRequirementResolver object that
can translate UpstreamRequirement objects into apt package names or
InstallFixer uses a Resolver object that
can translate Requirement objects into apt package names or
e.g. cpan commands.
ognibuild keeps finding problems, resolving them and rebuilding until it finds
@ -38,14 +38,14 @@ on the host machine.
For e.g. PerlModuleRequirement, need to be able to:
* install from apt package
+ DebianInstallFixer(AptResolver()).fix(problem)
+ InstallFixer(AptResolver()).fix(problem)
* update debian package (source, runtime, test) deps to include apt package
+ DebianPackageDepFixer(AptResolver()).fix(problem, ('test', 'foo'))
* suggest command to run to install from apt package
+ DebianInstallFixer(AptResolver()).command(problem)
+ InstallFixer(AptResolver()).command(problem)
* install from cpan
+ CpanInstallFixer().fix(problem)
+ InstallFixer(CpanResolver()).fix(problem)
* suggest command to run to install from cpan package
+ CpanInstallFixer().command(problem)
+ InstallFixer(CpanResolver()).command(problem)
* update source package reqs to depend on perl module
+ PerlDepFixer().fix(problem)

49
notes/concepts.md Normal file
View file

@ -0,0 +1,49 @@
Requirement
===========
Some sort of constraint about the environment that can be specified and satisfied.
Examples:
* a dependency on version 1.3 of the python package "foo"
* a dependency on the apt package "blah"
Requirements can be discovered from build system metadata files and from build logs.
Different kinds of requirements are subclassed from the main Requirement class.
Output
======
A build artifact that can be produced by a build system, e.g. an
executable file or a Perl module.
Problem
=======
An issue found in a build log by buildlog-consultant.
BuildFixer
==========
Takes a build problem and tries to resolve it in some way.
This can mean changing the project that's being built
(by modifying the source tree), or changing the environment
(e.g. by install packages from apt).
Common fixers:
+ InstallFixer([(resolver, repository)])
+ DebianDependencyFixer(tree, resolver)
Repository
==========
Some sort of provider of external requirements. Can satisfy environment
requirements.
Resolver
========
Can take one kind of upstream requirement and turn it into another. E.g.
converting missing Python modules to apt or pypi packages.

View file

@ -21,7 +21,6 @@ import stat
class DetailedFailure(Exception):
def __init__(self, retcode, argv, error):
self.retcode = retcode
self.argv = argv
@ -29,7 +28,6 @@ class DetailedFailure(Exception):
class UnidentifiedError(Exception):
def __init__(self, retcode, argv, lines, secondary=None):
self.retcode = retcode
self.argv = argv
@ -50,7 +48,7 @@ def shebang_binary(p):
return os.path.basename(args[0].decode()).strip()
class UpstreamRequirement(object):
class Requirement(object):
# Name of the family of requirements - e.g. "python-package"
family: str
@ -63,10 +61,8 @@ class UpstreamRequirement(object):
class UpstreamOutput(object):
def __init__(self, family, name):
def __init__(self, family):
self.family = family
self.name = name
def __repr__(self):
return "%s(%r, %r)" % (type(self).__name__, self.family, self.name)
def get_declared_dependencies(self):
raise NotImplementedError(self.get_declared_dependencies)

View file

@ -17,8 +17,10 @@
import logging
import os
import shlex
import sys
from . import UnidentifiedError
from . import UnidentifiedError, DetailedFailure
from .buildlog import InstallFixer, ExplainInstallFixer, ExplainInstall
from .buildsystem import NoBuildToolsFound, detect_buildsystems
from .resolver import (
auto_resolver,
@ -28,6 +30,15 @@ from .resolver import (
from .resolver.apt import AptResolver
def display_explain_commands(commands):
logging.info("Run one or more of the following commands:")
for command, reqs in commands:
if isinstance(command, list):
command = shlex.join(command)
logging.info(
' %s (to install %s)', command, ', '.join(map(str, reqs)))
def get_necessary_declared_requirements(resolver, requirements, stages):
missing = []
for stage, req in requirements:
@ -36,35 +47,57 @@ def get_necessary_declared_requirements(resolver, requirements, stages):
return missing
def install_necessary_declared_requirements(resolver, buildsystem, stages):
missing = []
try:
declared_reqs = buildsystem.get_declared_dependencies()
except NotImplementedError:
logging.warning(
'Unable to determine declared dependencies from %s', buildsystem)
else:
missing.extend(
get_necessary_declared_requirements(
resolver, declared_reqs, stages
def install_necessary_declared_requirements(session, resolver, buildsystems, stages, explain=False):
relevant = []
declared_reqs = []
for buildsystem in buildsystems:
try:
declared_reqs.extend(buildsystem.get_declared_dependencies())
except NotImplementedError:
logging.warning(
"Unable to determine declared dependencies from %r", buildsystem
)
)
resolver.install(missing)
relevant.extend(
get_necessary_declared_requirements(resolver, declared_reqs, stages)
)
missing = []
for req in relevant:
try:
if not req.met(session):
missing.append(req)
except NotImplementedError:
missing.append(req)
if missing:
if explain:
commands = resolver.explain(missing)
if not commands:
raise UnsatisfiedRequirements(missing)
raise ExplainInstall(commands)
else:
resolver.install(missing)
# Types of dependencies:
# - core: necessary to do anything with the package
# - build: necessary to build the package
# - test: necessary to run the tests
# - dev: necessary for development (e.g. linters, yacc)
STAGE_MAP = {
"dist": [],
"info": [],
"install": ["build"],
"test": ["test", "dev"],
"build": ["build"],
"install": ["core", "build"],
"test": ["test", "build", "core"],
"build": ["build", "core"],
"clean": [],
}
def determine_fixers(session, resolver):
from .buildlog import UpstreamRequirementFixer
from .resolver.apt import AptResolver
return [UpstreamRequirementFixer(resolver)]
def determine_fixers(session, resolver, explain=False):
if explain:
return [ExplainInstallFixer(resolver)]
else:
return [InstallFixer(resolver)]
def main(): # noqa: C901
@ -83,36 +116,35 @@ def main(): # noqa: C901
)
parser.add_argument(
"--explain",
action='store_true',
help="Explain what needs to be done rather than making changes")
action="store_true",
help="Explain what needs to be done rather than making changes",
)
parser.add_argument(
"--ignore-declared-dependencies",
"--optimistic",
action="store_true",
help="Ignore declared dependencies, follow build errors only",
)
parser.add_argument(
"--verbose",
action="store_true",
help="Be verbose")
subparsers = parser.add_subparsers(dest='subcommand')
subparsers.add_parser('dist')
subparsers.add_parser('build')
subparsers.add_parser('clean')
subparsers.add_parser('test')
subparsers.add_parser('info')
install_parser = subparsers.add_parser('install')
parser.add_argument("--verbose", action="store_true", help="Be verbose")
subparsers = parser.add_subparsers(dest="subcommand")
subparsers.add_parser("dist")
subparsers.add_parser("build")
subparsers.add_parser("clean")
subparsers.add_parser("test")
subparsers.add_parser("info")
install_parser = subparsers.add_parser("install")
install_parser.add_argument(
'--user', action='store_true', help='Install in local-user directories.')
"--user", action="store_true", help="Install in local-user directories."
)
args = parser.parse_args()
if not args.subcommand:
parser.print_usage()
return 1
if args.verbose:
logging.basicConfig(level=logging.DEBUG)
logging.basicConfig(level=logging.DEBUG, format="%(message)s")
else:
logging.basicConfig(level=logging.INFO)
logging.basicConfig(level=logging.INFO, format="%(message)s")
if args.schroot:
from .session.schroot import SchrootSession
@ -128,45 +160,58 @@ def main(): # noqa: C901
resolver = native_resolvers(session)
elif args.resolve == "auto":
resolver = auto_resolver(session)
logging.info('Using requirement resolver: %s', resolver)
logging.info("Using requirement resolver: %s", resolver)
os.chdir(args.directory)
try:
bss = list(detect_buildsystems(args.directory))
logging.info('Detected buildsystems: %r', bss)
if not args.ignore_declared_dependencies and not args.explain:
logging.info(
"Detected buildsystems: %s", ', '.join(map(str, bss)))
if not args.ignore_declared_dependencies:
stages = STAGE_MAP[args.subcommand]
if stages:
for bs in bss:
install_necessary_declared_requirements(resolver, bs, stages)
fixers = determine_fixers(session, resolver)
logging.info("Checking that declared requirements are present")
try:
install_necessary_declared_requirements(
session, resolver, bss, stages, explain=args.explain)
except ExplainInstall as e:
display_explain_commands(e.commands)
return 1
fixers = determine_fixers(session, resolver, explain=args.explain)
if args.subcommand == "dist":
from .dist import run_dist
run_dist(
session=session, buildsystems=bss, resolver=resolver,
fixers=fixers)
session=session, buildsystems=bss, resolver=resolver, fixers=fixers
)
if args.subcommand == "build":
from .build import run_build
run_build(
session, buildsystems=bss, resolver=resolver,
fixers=fixers)
run_build(session, buildsystems=bss, resolver=resolver, fixers=fixers)
if args.subcommand == "clean":
from .clean import run_clean
run_clean(
session, buildsystems=bss, resolver=resolver,
fixers=fixers)
run_clean(session, buildsystems=bss, resolver=resolver, fixers=fixers)
if args.subcommand == "install":
from .install import run_install
run_install(
session, buildsystems=bss, resolver=resolver,
fixers=fixers, user=args.user)
session,
buildsystems=bss,
resolver=resolver,
fixers=fixers,
user=args.user,
)
if args.subcommand == "test":
from .test import run_test
run_test(session, buildsystems=bss, resolver=resolver,
fixers=fixers)
run_test(session, buildsystems=bss, resolver=resolver, fixers=fixers)
if args.subcommand == "info":
from .info import run_info
run_info(session, buildsystems=bss)
except UnidentifiedError:
except ExplainInstall as e:
display_explain_commands(e.commands)
except (UnidentifiedError, DetailedFailure):
return 1
except NoBuildToolsFound:
logging.info("No build tools found.")

View file

@ -21,7 +21,6 @@
import logging
from buildlog_consultant.common import (
MissingConfigStatusInput,
MissingPythonModule,
MissingPythonDistribution,
MissingCHeader,
@ -41,15 +40,12 @@ from buildlog_consultant.common import (
MissingLibrary,
MissingJavaClass,
MissingCSharpCompiler,
MissingConfigure,
MissingAutomakeInput,
MissingRPackage,
MissingRubyFile,
MissingAutoconfMacro,
MissingValaPackage,
MissingXfceDependency,
MissingHaskellDependencies,
NeedPgBuildExtUpdateControl,
DhAddonLoadFailure,
MissingMavenArtifacts,
GnomeCommonMissing,
@ -84,17 +80,17 @@ from .requirements import (
AutoconfMacroRequirement,
PythonModuleRequirement,
PythonPackageRequirement,
)
)
from .resolver import UnsatisfiedRequirements
def problem_to_upstream_requirement(problem):
def problem_to_upstream_requirement(problem): # noqa: C901
if isinstance(problem, MissingFile):
return PathRequirement(problem.path)
elif isinstance(problem, MissingCommand):
return BinaryRequirement(problem.command)
elif isinstance(problem, MissingPkgConfig):
return PkgConfigRequirement(
problem.module, problem.minimum_version)
return PkgConfigRequirement(problem.module, problem.minimum_version)
elif isinstance(problem, MissingCHeader):
return CHeaderRequirement(problem.header)
elif isinstance(problem, MissingJavaScriptRuntime):
@ -124,37 +120,33 @@ def problem_to_upstream_requirement(problem):
elif isinstance(problem, MissingJavaClass):
return JavaClassRequirement(problem.classname)
elif isinstance(problem, MissingHaskellDependencies):
return [HaskellPackageRequirement(dep) for dep in problem.deps]
return [HaskellPackageRequirement.from_string(dep) for dep in problem.deps]
elif isinstance(problem, MissingMavenArtifacts):
return [MavenArtifactRequirement(artifact)
for artifact in problem.artifacts]
return [MavenArtifactRequirement(artifact) for artifact in problem.artifacts]
elif isinstance(problem, MissingCSharpCompiler):
return BinaryRequirement('msc')
return BinaryRequirement("msc")
elif isinstance(problem, GnomeCommonMissing):
return GnomeCommonRequirement()
elif isinstance(problem, MissingJDKFile):
return JDKFileRequirement(problem.jdk_path, problem.filename)
elif isinstance(problem, MissingGnomeCommonDependency):
if problem.package == "glib-gettext":
return BinaryRequirement('glib-gettextize')
return BinaryRequirement("glib-gettextize")
else:
logging.warning(
"No known command for gnome-common dependency %s",
problem.package)
"No known command for gnome-common dependency %s", problem.package
)
return None
elif isinstance(problem, MissingXfceDependency):
if problem.package == "gtk-doc":
return BinaryRequirement("gtkdocize")
else:
logging.warning(
"No known command for xfce dependency %s",
problem.package)
logging.warning("No known command for xfce dependency %s", problem.package)
return None
elif isinstance(problem, MissingPerlModule):
return PerlModuleRequirement(
module=problem.module,
filename=problem.filename,
inc=problem.inc)
module=problem.module, filename=problem.filename, inc=problem.inc
)
elif isinstance(problem, MissingPerlFile):
return PerlFileRequirement(filename=problem.filename)
elif isinstance(problem, MissingAutoconfMacro):
@ -163,18 +155,19 @@ def problem_to_upstream_requirement(problem):
return PythonModuleRequirement(
problem.module,
python_version=problem.python_version,
minimum_version=problem.minimum_version)
minimum_version=problem.minimum_version,
)
elif isinstance(problem, MissingPythonDistribution):
return PythonPackageRequirement(
problem.module,
python_version=problem.python_version,
minimum_version=problem.minimum_version)
minimum_version=problem.minimum_version,
)
else:
return None
class UpstreamRequirementFixer(BuildFixer):
class InstallFixer(BuildFixer):
def __init__(self, resolver):
self.resolver = resolver
@ -196,11 +189,42 @@ class UpstreamRequirementFixer(BuildFixer):
if not isinstance(reqs, list):
reqs = [reqs]
changed = False
for req in reqs:
package = self.resolver.resolve(req)
if package is None:
return False
if context.add_dependency(package):
changed = True
return changed
try:
self.resolver.install(reqs)
except UnsatisfiedRequirements:
return False
return True
class ExplainInstall(Exception):
def __init__(self, commands):
self.commands = commands
class ExplainInstallFixer(BuildFixer):
def __init__(self, resolver):
self.resolver = resolver
def __repr__(self):
return "%s(%r)" % (type(self).__name__, self.resolver)
def __str__(self):
return "upstream requirement install explainer(%s)" % self.resolver
def can_fix(self, error):
req = problem_to_upstream_requirement(error)
return req is not None
def fix(self, error, context):
reqs = problem_to_upstream_requirement(error)
if reqs is None:
return False
if not isinstance(reqs, list):
reqs = [reqs]
explanations = list(self.resolver.explain(reqs))
if not explanations:
return False
raise ExplainInstall(explanations)

View file

@ -23,14 +23,18 @@ import re
from typing import Optional
import warnings
from . import shebang_binary, UpstreamOutput, UnidentifiedError
from . import shebang_binary, UnidentifiedError
from .outputs import (
BinaryOutput,
PythonPackageOutput,
)
from .requirements import (
BinaryRequirement,
PythonPackageRequirement,
PerlModuleRequirement,
NodePackageRequirement,
CargoCrateRequirement,
)
)
from .fix_build import run_with_build_fixers
@ -54,7 +58,7 @@ class BuildSystem(object):
def __str__(self):
return self.name
def dist(self, session, resolver, fixers):
def dist(self, session, resolver, fixers, quiet=False):
raise NotImplementedError(self.dist)
def test(self, session, resolver, fixers):
@ -86,7 +90,7 @@ class Pear(BuildSystem):
def setup(self, resolver):
resolver.install([BinaryRequirement("pear")])
def dist(self, session, resolver, fixers):
def dist(self, session, resolver, fixers, quiet=False):
self.setup(resolver)
run_with_build_fixers(session, ["pear", "package"], fixers)
@ -107,14 +111,61 @@ class Pear(BuildSystem):
run_with_build_fixers(session, ["pear", "install", self.path], fixers)
# run_setup, but setting __name__
# Imported from Python's distutils.core, Copyright (C) PSF
def run_setup(script_name, script_args=None, stop_after="run"):
from distutils import core
import sys
if stop_after not in ("init", "config", "commandline", "run"):
raise ValueError("invalid value for 'stop_after': %r" % (stop_after,))
core._setup_stop_after = stop_after
save_argv = sys.argv.copy()
g = {"__file__": script_name, "__name__": "__main__"}
try:
try:
sys.argv[0] = script_name
if script_args is not None:
sys.argv[1:] = script_args
with open(script_name, "rb") as f:
exec(f.read(), g)
finally:
sys.argv = save_argv
core._setup_stop_after = None
except SystemExit:
# Hmm, should we do something if exiting with a non-zero code
# (ie. error)?
pass
if core._setup_distribution is None:
raise RuntimeError(
(
"'distutils.core.setup()' was never called -- "
"perhaps '%s' is not a Distutils setup script?"
)
% script_name
)
return core._setup_distribution
class SetupPy(BuildSystem):
name = "setup.py"
def __init__(self, path):
self.path = path
from distutils.core import run_setup
self.result = run_setup(os.path.abspath(path), stop_after="init")
# TODO(jelmer): Perhaps run this in session, so we can install
# missing dependencies?
try:
self.result = run_setup(os.path.abspath(path), stop_after="init")
except RuntimeError as e:
logging.warning("Unable to load setup.py metadata: %s", e)
self.result = None
def __repr__(self):
return "%s(%r)" % (type(self).__name__, self.path)
@ -137,9 +188,9 @@ class SetupPy(BuildSystem):
logging.debug("Reference to setuptools-scm found, installing.")
resolver.install(
[
PythonPackageRequirement("setuptools-scm"),
PythonPackageRequirement("setuptools_scm"),
BinaryRequirement("git"),
BinaryRequirement("mercurial"),
BinaryRequirement("hg"),
]
)
@ -153,9 +204,12 @@ class SetupPy(BuildSystem):
self.setup(resolver)
self._run_setup(session, resolver, ["build"], fixers)
def dist(self, session, resolver, fixers):
def dist(self, session, resolver, fixers, quiet=False):
self.setup(resolver)
self._run_setup(session, resolver, ["sdist"], fixers)
preargs = []
if quiet:
preargs.append("--quiet")
self._run_setup(session, resolver, preargs + ["sdist"], fixers)
def clean(self, session, resolver, fixers):
self.setup(resolver)
@ -165,7 +219,7 @@ class SetupPy(BuildSystem):
self.setup(resolver)
extra_args = []
if install_target.user:
extra_args.append('--user')
extra_args.append("--user")
self._run_setup(session, resolver, ["install"] + extra_args, fixers)
def _run_setup(self, session, resolver, args, fixers):
@ -176,30 +230,85 @@ class SetupPy(BuildSystem):
else:
# Just assume it's Python 3
resolver.install([BinaryRequirement("python3")])
run_with_build_fixers(
session, ["python3", "./setup.py"] + args,
fixers)
run_with_build_fixers(session, ["python3", "./setup.py"] + args, fixers)
def get_declared_dependencies(self):
if self.result is None:
raise NotImplementedError
for require in self.result.get_requires():
yield "build", PythonPackageRequirement(require)
yield "core", PythonPackageRequirement.from_requirement_str(require)
# Not present for distutils-only packages
if getattr(self.result, 'install_requires', []):
if getattr(self.result, "install_requires", []):
for require in self.result.install_requires:
yield "install", PythonPackageRequirement(require)
yield "core", PythonPackageRequirement.from_requirement_str(require)
# Not present for distutils-only packages
if getattr(self.result, 'tests_require', []):
if getattr(self.result, "tests_require", []):
for require in self.result.tests_require:
yield "test", PythonPackageRequirement(require)
yield "test", PythonPackageRequirement.from_requirement_str(require)
def get_declared_outputs(self):
if self.result is None:
raise NotImplementedError
for script in self.result.scripts or []:
yield UpstreamOutput("binary", os.path.basename(script))
entry_points = getattr(self.result, 'entry_points', None) or {}
yield BinaryOutput(os.path.basename(script))
entry_points = getattr(self.result, "entry_points", None) or {}
for script in entry_points.get("console_scripts", []):
yield UpstreamOutput("binary", script.split("=")[0])
yield BinaryOutput(script.split("=")[0])
for package in self.result.packages or []:
yield UpstreamOutput("python3", package)
yield PythonPackageOutput(package, python_version="cpython3")
class Gradle(BuildSystem):
name = "gradle"
def __init__(self, path):
self.path = path
def __repr__(self):
return "%s(%r)" % (type(self).__name__, self.path)
def clean(self, session, resolver, fixers):
run_with_build_fixers(session, ["gradle", "clean"], fixers)
def build(self, session, resolver, fixers):
run_with_build_fixers(session, ["gradle", "build"], fixers)
def test(self, session, resolver, fixers):
run_with_build_fixers(session, ["gradle", "test"], fixers)
class Meson(BuildSystem):
name = "meson"
def __init__(self, path):
self.path = path
def __repr__(self):
return "%s(%r)" % (type(self).__name__, self.path)
def _setup(self, session, fixers):
if session.exists("build"):
return
session.mkdir("build")
run_with_build_fixers(session, ["meson", "setup", "build"], fixers)
def clean(self, session, resolver, fixers):
self._setup(session, fixers)
run_with_build_fixers(session, ["ninja", "-C", "build", "clean"], fixers)
def build(self, session, resolver, fixers):
self._setup(session, fixers)
run_with_build_fixers(session, ["ninja", "-C", "build"], fixers)
def test(self, session, resolver, fixers):
self._setup(session, fixers)
run_with_build_fixers(session, ["ninja", "-C", "build", "test"], fixers)
def install(self, session, resolver, fixers, install_target):
self._setup(session, fixers)
run_with_build_fixers(session, ["ninja", "-C", "build", "install"], fixers)
class PyProject(BuildSystem):
@ -210,17 +319,19 @@ class PyProject(BuildSystem):
self.path = path
self.pyproject = self.load_toml()
def __repr__(self):
return "%s(%r)" % (type(self).__name__, self.path)
def load_toml(self):
import toml
with open(self.path, "r") as pf:
return toml.load(pf)
def dist(self, session, resolver, fixers):
def dist(self, session, resolver, fixers, quiet=False):
if "poetry" in self.pyproject.get("tool", []):
logging.debug(
"Found pyproject.toml with poetry section, "
"assuming poetry project."
"Found pyproject.toml with poetry section, " "assuming poetry project."
)
resolver.install(
[
@ -247,7 +358,7 @@ class SetupCfg(BuildSystem):
]
)
def dist(self, session, resolver, fixers):
def dist(self, session, resolver, fixers, quiet=False):
self.setup(resolver)
session.check_call(["python3", "-m", "pep517.build", "-s", "."])
@ -271,7 +382,7 @@ class Npm(BuildSystem):
def setup(self, resolver):
resolver.install([BinaryRequirement("npm")])
def dist(self, session, resolver, fixers):
def dist(self, session, resolver, fixers, quiet=False):
self.setup(resolver)
run_with_build_fixers(session, ["npm", "pack"], fixers)
@ -286,7 +397,7 @@ class Waf(BuildSystem):
def setup(self, session, resolver, fixers):
resolver.install([BinaryRequirement("python3")])
def dist(self, session, resolver, fixers):
def dist(self, session, resolver, fixers, quiet=False):
self.setup(session, resolver, fixers)
run_with_build_fixers(session, ["./waf", "dist"], fixers)
@ -305,7 +416,7 @@ class Gem(BuildSystem):
def setup(self, resolver):
resolver.install([BinaryRequirement("gem2deb")])
def dist(self, session, resolver, fixers):
def dist(self, session, resolver, fixers, quiet=False):
self.setup(resolver)
gemfiles = [
entry.name for entry in session.scandir(".") if entry.name.endswith(".gem")
@ -330,8 +441,7 @@ class DistInkt(BuildSystem):
continue
if key.strip() == b"class" and value.strip().startswith(b"'Dist::Inkt"):
logging.debug(
"Found Dist::Inkt section in dist.ini, "
"assuming distinkt."
"Found Dist::Inkt section in dist.ini, " "assuming distinkt."
)
self.name = "dist-inkt"
self.dist_inkt_class = value.decode().strip("'")
@ -345,7 +455,7 @@ class DistInkt(BuildSystem):
]
)
def dist(self, session, resolver, fixers):
def dist(self, session, resolver, fixers, quiet=False):
self.setup(resolver)
if self.name == "dist-inkt":
resolver.install([PerlModuleRequirement(self.dist_inkt_class)])
@ -353,8 +463,7 @@ class DistInkt(BuildSystem):
else:
# Default to invoking Dist::Zilla
resolver.install([PerlModuleRequirement("Dist::Zilla")])
run_with_build_fixers(
session, ["dzil", "build", "--in", ".."], fixers)
run_with_build_fixers(session, ["dzil", "build", "--in", ".."], fixers)
class Make(BuildSystem):
@ -367,27 +476,28 @@ class Make(BuildSystem):
def setup(self, session, resolver, fixers):
resolver.install([BinaryRequirement("make")])
if session.exists("Makefile.PL") and not session.exists("Makefile"):
def makefile_exists():
return any(
[session.exists(p) for p in ["Makefile", "GNUmakefile", "makefile"]]
)
if session.exists("Makefile.PL") and not makefile_exists():
resolver.install([BinaryRequirement("perl")])
run_with_build_fixers(session, ["perl", "Makefile.PL"], fixers)
if not session.exists("Makefile") and not session.exists("configure"):
if not makefile_exists() and not session.exists("configure"):
if session.exists("autogen.sh"):
if shebang_binary("autogen.sh") is None:
run_with_build_fixers(
session, ["/bin/sh", "./autogen.sh"], fixers)
run_with_build_fixers(session, ["/bin/sh", "./autogen.sh"], fixers)
try:
run_with_build_fixers(
session, ["./autogen.sh"], fixers)
run_with_build_fixers(session, ["./autogen.sh"], fixers)
except UnidentifiedError as e:
if (
"Gnulib not yet bootstrapped; "
"run ./bootstrap instead.\n" in e.lines
):
run_with_build_fixers(
session, ["./bootstrap"], fixers)
run_with_build_fixers(
session, ["./autogen.sh"], fixers)
run_with_build_fixers(session, ["./bootstrap"], fixers)
run_with_build_fixers(session, ["./autogen.sh"], fixers)
else:
raise
@ -402,13 +512,17 @@ class Make(BuildSystem):
)
run_with_build_fixers(session, ["autoreconf", "-i"], fixers)
if not session.exists("Makefile") and session.exists("configure"):
session.check_call(["./configure"])
if not makefile_exists() and session.exists("configure"):
run_with_build_fixers(session, ["./configure"], fixers)
def build(self, session, resolver, fixers):
self.setup(session, resolver, fixers)
run_with_build_fixers(session, ["make", "all"], fixers)
def clean(self, session, resolver, fixers):
self.setup(session, resolver, fixers)
run_with_build_fixers(session, ["make", "clean"], fixers)
def test(self, session, resolver, fixers):
self.setup(session, resolver, fixers)
run_with_build_fixers(session, ["make", "check"], fixers)
@ -417,7 +531,7 @@ class Make(BuildSystem):
self.setup(session, resolver, fixers)
run_with_build_fixers(session, ["make", "install"], fixers)
def dist(self, session, resolver, fixers):
def dist(self, session, resolver, fixers, quiet=False):
self.setup(session, resolver, fixers)
try:
run_with_build_fixers(session, ["make", "dist"], fixers)
@ -444,7 +558,8 @@ class Make(BuildSystem):
elif any(
[
re.match(
r"Makefile:[0-9]+: \*\*\* Missing \'Make.inc\' "
r"(Makefile|GNUmakefile|makefile):[0-9]+: "
r"\*\*\* Missing \'Make.inc\' "
r"Run \'./configure \[options\]\' and retry. Stop.\n",
line,
)
@ -486,15 +601,22 @@ class Make(BuildSystem):
return
for require in data.get("requires", []):
yield "build", PerlModuleRequirement(require)
else:
raise NotImplementedError
class Cargo(BuildSystem):
name = "cargo"
def __repr__(self):
return "%s(%r)" % (type(self).__name__, self.path)
def __init__(self, path):
from toml.decoder import load
self.path = path
with open(path, "r") as f:
self.cargo = load(f)
@ -507,12 +629,33 @@ class Cargo(BuildSystem):
def test(self, session, resolver, fixers):
run_with_build_fixers(session, ["cargo", "test"], fixers)
def clean(self, session, resolver, fixers):
run_with_build_fixers(session, ["cargo", "clean"], fixers)
def build(self, session, resolver, fixers):
run_with_build_fixers(session, ["cargo", "build"], fixers)
class Golang(BuildSystem):
"""Go builds."""
name = "golang"
def __repr__(self):
return "%s()" % (type(self).__name__)
def test(self, session, resolver, fixers):
run_with_build_fixers(session, ["go", "test"], fixers)
def build(self, session, resolver, fixers):
run_with_build_fixers(session, ["go", "build"], fixers)
def install(self, session, resolver, fixers):
run_with_build_fixers(session, ["go", "install"], fixers)
def clean(self, session, resolver, fixers):
session.check_call(["go", "clean"])
class Maven(BuildSystem):
@ -534,20 +677,25 @@ class Cabal(BuildSystem):
def _run(self, session, args, fixers):
try:
run_with_build_fixers(
session, ["runhaskell", "Setup.hs"] + args, fixers)
run_with_build_fixers(session, ["runhaskell", "Setup.hs"] + args, fixers)
except UnidentifiedError as e:
if "Run the 'configure' command first.\n" in e.lines:
run_with_build_fixers(
session, ["runhaskell", "Setup.hs", "configure"], fixers)
session, ["runhaskell", "Setup.hs", "configure"], fixers
)
run_with_build_fixers(
session, ["runhaskell", "Setup.hs"] + args, fixers)
session, ["runhaskell", "Setup.hs"] + args, fixers
)
else:
raise
def test(self, session, resolver, fixers):
self._run(session, ["test"], fixers)
def dist(self, session, resolver, fixers, quiet=False):
self._run(session, ["sdist"], fixers)
def detect_buildsystems(path, trust_package=False): # noqa: C901
"""Detect build systems."""
if os.path.exists(os.path.join(path, "package.xml")):
@ -576,9 +724,17 @@ def detect_buildsystems(path, trust_package=False): # noqa: C901
logging.debug("Found Cargo.toml, assuming rust cargo package.")
yield Cargo("Cargo.toml")
if os.path.exists(os.path.join(path, 'Setup.hs')):
if os.path.exists(os.path.join(path, "build.gradle")):
logging.debug("Found build.gradle, assuming gradle package.")
yield Gradle("build.gradle")
if os.path.exists(os.path.join(path, "meson.build")):
logging.debug("Found meson.build, assuming meson package.")
yield Meson("meson.build")
if os.path.exists(os.path.join(path, "Setup.hs")):
logging.debug("Found Setup.hs, assuming haskell package.")
yield Cabal('Setup.hs')
yield Cabal("Setup.hs")
if os.path.exists(os.path.join(path, "pom.xml")):
logging.debug("Found pom.xml, assuming maven package.")
@ -598,7 +754,10 @@ def detect_buildsystems(path, trust_package=False): # noqa: C901
os.path.exists(os.path.join(path, p))
for p in [
"Makefile",
"GNUmakefile",
"makefile",
"Makefile.PL",
"CMakeLists.txt",
"autogen.sh",
"configure.ac",
"configure.in",
@ -607,6 +766,7 @@ def detect_buildsystems(path, trust_package=False): # noqa: C901
):
yield Make()
seen_golang = False
if os.path.exists(os.path.join(path, ".travis.yml")):
import ruamel.yaml.reader
@ -619,11 +779,13 @@ def detect_buildsystems(path, trust_package=False): # noqa: C901
language = data.get("language")
if language == "go":
yield Golang()
seen_golang = True
for entry in os.scandir(path):
if entry.name.endswith(".go"):
yield Golang()
break
if not seen_golang:
for entry in os.scandir(path):
if entry.name.endswith(".go"):
yield Golang()
break
def get_buildsystem(path, trust_package=False):

View file

@ -36,5 +36,6 @@ def satisfy_build_deps(session: Session, tree):
pass
deps = [dep.strip().strip(",") for dep in deps]
from .apt import AptManager
apt = AptManager(session)
apt.satisfy(deps)

View file

@ -24,7 +24,6 @@ import os
from buildlog_consultant.apt import (
find_apt_get_failure,
)
from debian.deb822 import Release
from .. import DetailedFailure, UnidentifiedError
from ..session import Session, run_with_tee
@ -39,11 +38,9 @@ def run_apt(session: Session, args: List[str]) -> None:
match, error = find_apt_get_failure(lines)
if error is not None:
raise DetailedFailure(retcode, args, error)
if match is not None:
raise UnidentifiedError(retcode, args, lines, secondary=(match.lineno, match.line))
while lines and lines[-1] == "":
lines.pop(-1)
raise UnidentifiedError(retcode, args, lines)
raise UnidentifiedError(retcode, args, lines, secondary=match)
class FileSearcher(object):
@ -64,18 +61,20 @@ class AptManager(object):
def searchers(self):
if self._searchers is None:
self._searchers = [
RemoteAptContentsFileSearcher.from_session(self.session),
GENERATED_FILE_SEARCHER]
AptContentsFileSearcher.from_session(self.session),
GENERATED_FILE_SEARCHER,
]
return self._searchers
def package_exists(self, package):
if self._apt_cache is None:
import apt
self._apt_cache = apt.Cache(rootdir=self.session.location)
return package in self._apt_cache
def get_package_for_paths(self, paths, regex=False):
logging.debug('Searching for packages containing %r', paths)
logging.debug("Searching for packages containing %r", paths)
# TODO(jelmer): Make sure we use whatever is configured in self.session
return get_package_for_paths(paths, self.searchers(), regex=regex)
@ -84,6 +83,7 @@ class AptManager(object):
status_path = os.path.join(root, "var/lib/dpkg/status")
missing = set(packages)
import apt_pkg
with apt_pkg.TagFile(status_path) as tagf:
while missing:
tagf.step()
@ -95,7 +95,7 @@ class AptManager(object):
return list(missing)
def install(self, packages: List[str]) -> None:
logging.info('Installing using apt: %r', packages)
logging.info("Installing using apt: %r", packages)
packages = self.missing(packages)
if packages:
run_apt(self.session, ["install"] + packages)
@ -108,22 +108,25 @@ class ContentsFileNotFound(Exception):
"""The contents file was not found."""
class RemoteAptContentsFileSearcher(FileSearcher):
class AptContentsFileSearcher(FileSearcher):
def __init__(self):
self._db = {}
@classmethod
def from_session(cls, session):
logging.info('Loading apt contents information')
logging.info("Loading apt contents information")
# TODO(jelmer): what about sources.list.d?
from aptsources.sourceslist import SourcesList
sl = SourcesList()
sl.load(os.path.join(session.location, 'etc/apt/sources.list'))
sl.load(os.path.join(session.location, "etc/apt/sources.list"))
return cls.from_sources_list(
sl,
cache_dirs=[
os.path.join(session.location, 'var/lib/apt/lists'),
'/var/lib/apt/lists'])
os.path.join(session.location, "var/lib/apt/lists"),
"/var/lib/apt/lists",
],
)
def __setitem__(self, path, package):
self._db[path] = package
@ -148,15 +151,17 @@ class RemoteAptContentsFileSearcher(FileSearcher):
@classmethod
def _load_cache_file(cls, url, cache_dir):
from urllib.parse import urlparse
parsed = urlparse(url)
p = os.path.join(
cache_dir,
parsed.hostname + parsed.path.replace('/', '_') + '.lz4')
cache_dir, parsed.hostname + parsed.path.replace("/", "_") + ".lz4"
)
if not os.path.exists(p):
return None
logging.debug('Loading cached contents file %s', p)
logging.debug("Loading cached contents file %s", p)
import lz4.frame
return lz4.frame.open(p, mode='rb')
return lz4.frame.open(p, mode="rb")
@classmethod
def from_urls(cls, urls, cache_dirs=None):
@ -170,39 +175,39 @@ class RemoteAptContentsFileSearcher(FileSearcher):
else:
if not mandatory and self._db:
logging.debug(
'Not attempting to fetch optional contents '
'file %s', url)
"Not attempting to fetch optional contents " "file %s", url
)
else:
logging.debug('Fetching contents file %s', url)
logging.debug("Fetching contents file %s", url)
try:
self.load_url(url)
except ContentsFileNotFound:
if mandatory:
logging.warning(
'Unable to fetch contents file %s', url)
logging.warning("Unable to fetch contents file %s", url)
else:
logging.debug(
'Unable to fetch optional contents file %s',
url)
"Unable to fetch optional contents file %s", url
)
return self
@classmethod
def from_sources_list(cls, sl, cache_dirs=None):
# TODO(jelmer): Use aptsources.sourceslist.SourcesList
from .build import get_build_architecture
# TODO(jelmer): Verify signatures, etc.
urls = []
arches = [(get_build_architecture(), True), ("all", False)]
for source in sl.list:
if source.invalid or source.disabled:
continue
if source.type == 'deb-src':
if source.type == "deb-src":
continue
if source.type != 'deb':
if source.type != "deb":
logging.warning("Invalid line in sources: %r", source)
continue
base_url = source.uri.rstrip('/')
name = source.dist.rstrip('/')
base_url = source.uri.rstrip("/")
name = source.dist.rstrip("/")
components = source.comps
if components:
dists_url = base_url + "/dists"
@ -212,12 +217,20 @@ class RemoteAptContentsFileSearcher(FileSearcher):
for component in components:
for arch, mandatory in arches:
urls.append(
("%s/%s/%s/Contents-%s" % (
dists_url, name, component, arch), mandatory))
(
"%s/%s/%s/Contents-%s"
% (dists_url, name, component, arch),
mandatory,
)
)
else:
for arch, mandatory in arches:
urls.append(
("%s/%s/Contents-%s" % (dists_url, name.rstrip('/'), arch), mandatory))
(
"%s/%s/Contents-%s" % (dists_url, name.rstrip("/"), arch),
mandatory,
)
)
return cls.from_urls(urls, cache_dirs=cache_dirs)
@staticmethod
@ -230,23 +243,24 @@ class RemoteAptContentsFileSearcher(FileSearcher):
def load_url(self, url, allow_cache=True):
from urllib.error import HTTPError
for ext in ['.xz', '.gz', '']:
for ext in [".xz", ".gz", ""]:
try:
response = self._get(url + ext)
except HTTPError as e:
if e.status == 404:
continue
continue
raise
break
else:
raise ContentsFileNotFound(url)
if ext == '.gz':
if ext == ".gz":
import gzip
f = gzip.GzipFile(fileobj=response)
elif ext == '.xz':
elif ext == ".xz":
import lzma
from io import BytesIO
f = BytesIO(lzma.decompress(response.read()))
elif response.headers.get_content_type() == "text/plain":
f = response
@ -282,7 +296,8 @@ GENERATED_FILE_SEARCHER = GeneratedFileSearcher(
def get_package_for_paths(
paths: List[str], searchers: List[FileSearcher], regex: bool = False) -> Optional[str]:
paths: List[str], searchers: List[FileSearcher], regex: bool = False
) -> Optional[str]:
candidates: Set[str] = set()
for path in paths:
for searcher in searchers:

View file

@ -62,11 +62,13 @@ def changes_filename(package, version, arch):
def get_build_architecture():
try:
return subprocess.check_output(
['dpkg-architecture', '-qDEB_BUILD_ARCH']).strip().decode()
return (
subprocess.check_output(["dpkg-architecture", "-qDEB_BUILD_ARCH"])
.strip()
.decode()
)
except subprocess.CalledProcessError as e:
raise Exception(
"Could not find the build architecture: %s" % e)
raise Exception("Could not find the build architecture: %s" % e)
def add_dummy_changelog_entry(
@ -207,7 +209,7 @@ def attempt_build(
build_suite,
output_directory,
build_command,
build_changelog_entry="Build for debian-janitor apt repository.",
build_changelog_entry=None,
subpath="",
source_date_epoch=None,
):
@ -224,9 +226,10 @@ def attempt_build(
source_date_epoch: Source date epoch to set
Returns: Tuple with (changes_name, cl_version)
"""
add_dummy_changelog_entry(
local_tree, subpath, suffix, build_suite, build_changelog_entry
)
if build_changelog_entry is not None:
add_dummy_changelog_entry(
local_tree, subpath, suffix, build_suite, build_changelog_entry
)
return build_once(
local_tree,
build_suite,

View file

@ -22,20 +22,18 @@ __all__ = [
import logging
import os
import sys
from typing import List, Set, Optional
from typing import List, Set, Optional, Type
from debian.deb822 import (
Deb822,
PkgRelation,
)
from debian.changelog import Version
from breezy.commit import PointlessCommit
from breezy.mutabletree import MutableTree
from breezy.tree import Tree
from debmutate.control import (
ensure_some_version,
ensure_minimum_version,
ensure_relation,
ControlEditor,
)
from debmutate.debhelper import (
@ -48,9 +46,12 @@ from debmutate.reformatting import (
FormattingUnpreservable,
GeneratedFile,
)
from lintian_brush import (
reset_tree,
)
try:
from breezy.workspace import reset_tree
except ImportError:
from lintian_brush import reset_tree
from lintian_brush.changelog import (
add_changelog_entry,
)
@ -73,17 +74,17 @@ from buildlog_consultant.common import (
MissingPythonModule,
MissingPythonDistribution,
MissingPerlFile,
)
)
from buildlog_consultant.sbuild import (
SbuildFailure,
)
from ..buildlog import problem_to_upstream_requirement
from ..fix_build import BuildFixer, resolve_error, DependencyContext
from ..buildlog import UpstreamRequirementFixer
from ..resolver.apt import (
AptRequirement,
get_package_for_python_module,
)
)
from .build import attempt_build, DEFAULT_BUILDER
@ -97,7 +98,65 @@ class CircularDependency(Exception):
self.package = package
class PackageDependencyFixer(BuildFixer):
def __init__(self, apt_resolver):
self.apt_resolver = apt_resolver
def __repr__(self):
return "%s(%r)" % (type(self).__name__, self.apt_resolver)
def __str__(self):
return "upstream requirement fixer(%s)" % self.apt_resolver
def can_fix(self, error):
req = problem_to_upstream_requirement(error)
return req is not None
def fix(self, error, context):
reqs = problem_to_upstream_requirement(error)
if reqs is None:
return False
if not isinstance(reqs, list):
reqs = [reqs]
changed = False
for req in reqs:
package = self.apt_resolver.resolve(req)
if package is None:
return False
if context.phase[0] == "autopkgtest":
return add_test_dependency(
context.tree,
context.phase[1],
package,
committer=context.committer,
subpath=context.subpath,
update_changelog=context.update_changelog,
)
elif context.phase[0] == "build":
return add_build_dependency(
context.tree,
package,
committer=context.committer,
subpath=context.subpath,
update_changelog=context.update_changelog,
)
else:
logging.warning('Unknown phase %r', context.phase)
return False
return changed
class BuildDependencyContext(DependencyContext):
def __init__(
self, phase, tree, apt, subpath="", committer=None, update_changelog=True
):
self.phase = phase
super(BuildDependencyContext, self).__init__(
tree, apt, subpath, committer, update_changelog
)
def add_dependency(self, requirement: AptRequirement):
return add_build_dependency(
@ -111,9 +170,9 @@ class BuildDependencyContext(DependencyContext):
class AutopkgtestDependencyContext(DependencyContext):
def __init__(
self, testname, tree, apt, subpath="", committer=None, update_changelog=True
self, phase, tree, apt, subpath="", committer=None, update_changelog=True
):
self.testname = testname
self.phase = phase
super(AutopkgtestDependencyContext, self).__init__(
tree, apt, subpath, committer, update_changelog
)
@ -143,26 +202,17 @@ def add_build_dependency(
try:
with ControlEditor(path=control_path) as updater:
for binary in updater.binaries:
if binary["Package"] == requirement.package:
raise CircularDependency(requirement.package)
if requirement.minimum_version:
updater.source["Build-Depends"] = ensure_minimum_version(
updater.source.get("Build-Depends", ""),
requirement.package, requirement.minimum_version
)
else:
updater.source["Build-Depends"] = ensure_some_version(
updater.source.get("Build-Depends", ""),
requirement.package
if requirement.touches_package(binary["Package"]):
raise CircularDependency(binary["Package"])
for rel in requirement.relations:
updater.source["Build-Depends"] = ensure_relation(
updater.source.get("Build-Depends", ""), PkgRelation.str([rel])
)
except FormattingUnpreservable as e:
logging.info("Unable to edit %s in a way that preserves formatting.", e.path)
return False
if requirement.minimum_version:
desc = "%s (>= %s)" % (requirement.package, requirement.minimum_version)
else:
desc = requirement.package
desc = requirement.pkg_relation_str()
if not updater.changed:
logging.info("Giving up; dependency %s was already present.", desc)
@ -202,14 +252,9 @@ def add_test_dependency(
command_counter += 1
if name != testname:
continue
if requirement.minimum_version:
control["Depends"] = ensure_minimum_version(
control.get("Depends", ""),
requirement.package, requirement.minimum_version
)
else:
control["Depends"] = ensure_some_version(
control.get("Depends", ""), requirement.package
for rel in requirement.relations:
control["Depends"] = ensure_relation(
control.get("Depends", ""), PkgRelation.str([rel])
)
except FormattingUnpreservable as e:
logging.info("Unable to edit %s in a way that preserves formatting.", e.path)
@ -217,11 +262,7 @@ def add_test_dependency(
if not updater.changed:
return False
if requirement.minimum_version:
desc = "%s (>= %s)" % (
requirement.package, requirement.minimum_version)
else:
desc = requirement.package
desc = requirement.pkg_relation_str()
logging.info("Adding dependency to test %s: %s", testname, desc)
return commit_debian_changes(
@ -278,7 +319,7 @@ def fix_missing_python_distribution(error, context): # noqa: C901
default = not targeted
pypy_pkg = context.apt.get_package_for_paths(
["/usr/lib/pypy/dist-packages/%s-.*.egg-info/PKG-INFO" % error.distribution], regex=True
["/usr/lib/pypy/dist-packages/%s-.*.egg-info" % error.distribution], regex=True
)
if pypy_pkg is None:
pypy_pkg = "pypy-%s" % error.distribution
@ -286,7 +327,7 @@ def fix_missing_python_distribution(error, context): # noqa: C901
pypy_pkg = None
py2_pkg = context.apt.get_package_for_paths(
["/usr/lib/python2\\.[0-9]/dist-packages/%s-.*.egg-info/PKG-INFO" % error.distribution],
["/usr/lib/python2\\.[0-9]/dist-packages/%s-.*.egg-info" % error.distribution],
regex=True,
)
if py2_pkg is None:
@ -295,7 +336,7 @@ def fix_missing_python_distribution(error, context): # noqa: C901
py2_pkg = None
py3_pkg = context.apt.get_package_for_paths(
["/usr/lib/python3/dist-packages/%s-.*.egg-info/PKG-INFO" % error.distribution],
["/usr/lib/python3/dist-packages/%s-.*.egg-info" % error.distribution],
regex=True,
)
if py3_pkg is None:
@ -333,9 +374,7 @@ def fix_missing_python_distribution(error, context): # noqa: C901
for dep_pkg in extra_build_deps:
assert dep_pkg is not None
if not context.add_dependency(
AptRequirement(
dep_pkg.package, minimum_version=error.minimum_version)):
if not context.add_dependency(dep_pkg):
return False
return True
@ -347,9 +386,14 @@ def fix_missing_python_module(error, context):
targeted = set()
default = not targeted
pypy_pkg = get_package_for_python_module(context.apt, error.module, "pypy", None)
py2_pkg = get_package_for_python_module(context.apt, error.module, "python2", None)
py3_pkg = get_package_for_python_module(context.apt, error.module, "python3", None)
if error.minimum_version:
specs = [(">=", error.minimum_version)]
else:
specs = []
pypy_pkg = get_package_for_python_module(context.apt, error.module, "pypy", specs)
py2_pkg = get_package_for_python_module(context.apt, error.module, "python2", specs)
py3_pkg = get_package_for_python_module(context.apt, error.module, "python3", specs)
extra_build_deps = []
if error.python_version == 2:
@ -381,8 +425,7 @@ def fix_missing_python_module(error, context):
for dep_pkg in extra_build_deps:
assert dep_pkg is not None
if not context.add_dependency(
AptRequirement(dep_pkg.package, error.minimum_version)):
if not context.add_dependency(dep_pkg):
return False
return True
@ -405,14 +448,15 @@ def enable_dh_autoreconf(context):
return dh_invoke_add_with(line, b"autoreconf")
if update_rules(command_line_cb=add_with_autoreconf):
return context.add_dependency(AptRequirement("dh-autoreconf"))
return context.add_dependency(AptRequirement.simple("dh-autoreconf"))
return False
def fix_missing_configure(error, context):
if (not context.tree.has_filename("configure.ac") and
not context.tree.has_filename("configure.in")):
if not context.tree.has_filename("configure.ac") and not context.tree.has_filename(
"configure.in"
):
return False
return enable_dh_autoreconf(context)
@ -457,16 +501,12 @@ def fix_missing_config_status_input(error, context):
class PgBuildExtOutOfDateControlFixer(BuildFixer):
def __init__(self, session):
self.session = session
def can_fix(self, problem):
return isinstance(problem, NeedPgBuildExtUpdateControl)
def _fix(self, problem, context):
return self._fn(problem, context)
def _fix(self, error, context):
logging.info("Running 'pg_buildext updatecontrol'")
self.session.check_call(["pg_buildext", "updatecontrol"])
@ -491,15 +531,17 @@ def fix_missing_makefile_pl(error, context):
class SimpleBuildFixer(BuildFixer):
def __init__(self, problem_cls, fn):
def __init__(self, problem_cls: Type[Problem], fn):
self._problem_cls = problem_cls
self._fn = fn
def can_fix(self, problem):
def __repr__(self):
return "%s(%r, %r)" % (type(self).__name__, self._problem_cls, self._fn)
def can_fix(self, problem: Problem):
return isinstance(problem, self._problem_cls)
def _fix(self, problem, context):
def _fix(self, problem: Problem, context):
return self._fn(problem, context)
@ -520,7 +562,7 @@ def apt_fixers(apt) -> List[BuildFixer]:
SimpleBuildFixer(MissingPythonModule, fix_missing_python_module),
SimpleBuildFixer(MissingPythonDistribution, fix_missing_python_distribution),
SimpleBuildFixer(AptFetchFailure, retry_apt_failure),
UpstreamRequirementFixer(resolver),
PackageDependencyFixer(resolver),
]
@ -531,7 +573,7 @@ def build_incrementally(
build_suite,
output_directory,
build_command,
build_changelog_entry="Build for debian-janitor apt repository.",
build_changelog_entry,
committer=None,
max_iterations=DEFAULT_MAX_ITERATIONS,
subpath="",
@ -540,7 +582,7 @@ def build_incrementally(
):
fixed_errors = []
fixers = versioned_package_fixers(apt.session) + apt_fixers(apt)
logging.info('Using fixers: %r', fixers)
logging.info("Using fixers: %r", fixers)
while True:
try:
return attempt_build(
@ -569,6 +611,7 @@ def build_incrementally(
reset_tree(local_tree, local_tree.basis_tree(), subpath=subpath)
if e.phase[0] == "build":
context = BuildDependencyContext(
e.phase,
local_tree,
apt,
subpath=subpath,
@ -577,7 +620,7 @@ def build_incrementally(
)
elif e.phase[0] == "autopkgtest":
context = AutopkgtestDependencyContext(
e.phase[1],
e.phase,
local_tree,
apt,
subpath=subpath,
@ -594,7 +637,9 @@ def build_incrementally(
except GeneratedFile:
logging.warning(
"Control file is generated, unable to edit to "
"resolver error %r.", e.error)
"resolver error %r.",
e.error,
)
raise e
except CircularDependency:
logging.warning(
@ -656,19 +701,32 @@ def main(argv=None):
from breezy.workingtree import WorkingTree
from .apt import AptManager
from ..session.plain import PlainSession
import tempfile
import contextlib
apt = AptManager(PlainSession())
tree = WorkingTree.open(".")
build_incrementally(
tree,
apt,
args.suffix,
args.suite,
args.output_directory,
args.build_command,
committer=args.committer,
update_changelog=args.update_changelog,
)
logging.basicConfig(level=logging.INFO, format="%(message)s")
with contextlib.ExitStack() as es:
if args.output_directory is None:
output_directory = es.enter_context(tempfile.TemporaryDirectory())
logging.info("Using output directory %s", output_directory)
else:
output_directory = args.output_directory
tree = WorkingTree.open(".")
build_incrementally(
tree,
apt,
args.suffix,
args.suite,
output_directory,
args.build_command,
None,
committer=args.committer,
update_changelog=args.update_changelog,
)
if __name__ == "__main__":

View file

@ -21,6 +21,7 @@ import os
import shutil
import sys
import tempfile
import time
from typing import Optional
from debian.deb822 import Deb822
@ -48,7 +49,7 @@ SUPPORTED_DIST_EXTENSIONS = [
".tbz2",
".tar",
".zip",
]
]
def is_dist_file(fn):
@ -62,24 +63,24 @@ class DistNoTarball(Exception):
"""Dist operation did not create a tarball."""
def run_dist(session, buildsystems, resolver, fixers):
def run_dist(session, buildsystems, resolver, fixers, quiet=False):
# Some things want to write to the user's home directory,
# e.g. pip caches in ~/.cache
session.create_home()
for buildsystem in buildsystems:
buildsystem.dist(session, resolver, fixers)
buildsystem.dist(session, resolver, fixers, quiet=quiet)
return
raise NoBuildToolsFound()
class DistCatcher(object):
def __init__(self, directory):
self.export_directory = directory
self.files = []
self.existing_files = None
self.start_time = time.time()
def __enter__(self):
self.existing_files = os.listdir(self.export_directory)
@ -103,13 +104,20 @@ class DistCatcher(object):
logging.info("No tarballs found in dist directory.")
parent_directory = os.path.dirname(self.export_directory)
diff = set(os.listdir(parent_directory)) - set([subdir])
diff = set(os.listdir(parent_directory)) - set([self.export_directory])
if len(diff) == 1:
fn = diff.pop()
logging.info("Found tarball %s in parent directory.", fn)
self.files.append(os.path.join(parent_directory, fn))
return fn
if "dist" in new_files:
for entry in os.scandir(os.path.join(self.export_directory, "dist")):
if is_dist_file(entry.name) and entry.stat().st_mtime > self.start_time:
logging.info("Found tarball %s in dist directory.", entry.name)
self.files.append(entry.path)
return entry.name
def __exit__(self, exc_type, exc_val, exc_tb):
self.find_files()
return False
@ -125,7 +133,7 @@ def create_dist_schroot(
) -> str:
from .buildsystem import detect_buildsystems
from .resolver.apt import AptResolver
from .buildlog import UpstreamRequirementFixer
from .buildlog import InstallFixer
if subdir is None:
subdir = "package"
@ -151,7 +159,7 @@ def create_dist_schroot(
buildsystems = list(detect_buildsystems(export_directory))
resolver = AptResolver.from_session(session)
fixers = [UpstreamRequirementFixer(resolver)]
fixers = [InstallFixer(resolver)]
with DistCatcher(export_directory) as dc:
oldcwd = os.getcwd()
@ -196,17 +204,14 @@ if __name__ == "__main__":
parser.add_argument(
"--target-directory", type=str, default="..", help="Target directory"
)
parser.add_argument(
"--verbose",
action="store_true",
help="Be verbose")
parser.add_argument("--verbose", action="store_true", help="Be verbose")
args = parser.parse_args()
if args.verbose:
logging.basicConfig(level=logging.DEBUG)
logging.basicConfig(level=logging.DEBUG, format="%(message)s")
else:
logging.basicConfig(level=logging.INFO)
logging.basicConfig(level=logging.INFO, format="%(message)s")
tree = WorkingTree.open(args.directory)
if args.packaging_directory:

View file

@ -16,14 +16,10 @@
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
import logging
from typing import List, Tuple, Callable, Type, Optional
from typing import List, Optional
from buildlog_consultant.common import (
find_build_failure_description,
Problem,
MissingPerlModule,
MissingPythonDistribution,
MissingCommand,
)
from breezy.mutabletree import MutableTree
@ -62,25 +58,11 @@ class DependencyContext(object):
self.committer = committer
self.update_changelog = update_changelog
def add_dependency(
self, package: str, minimum_version: Optional['Version'] = None
) -> bool:
def add_dependency(self, package) -> bool:
raise NotImplementedError(self.add_dependency)
class SchrootDependencyContext(DependencyContext):
def __init__(self, session):
self.session = session
self.apt = AptManager(session)
def add_dependency(self, package, minimum_version=None):
# TODO(jelmer): Handle minimum_version
self.apt.install([package])
return True
def run_with_build_fixers(
session: Session, args: List[str], fixers: List[BuildFixer]):
def run_with_build_fixers(session: Session, args: List[str], fixers: List[BuildFixer]):
logging.info("Running %r", args)
fixed_errors = []
while True:
@ -89,11 +71,12 @@ def run_with_build_fixers(
return
match, error = find_build_failure_description(lines)
if error is None:
logging.warning("Build failed with unidentified error. Giving up.")
if match is not None:
raise UnidentifiedError(
retcode, args, lines, secondary=(match.lineno, match.line))
raise UnidentifiedError(retcode, args, lines)
if match:
logging.warning("Build failed with unidentified error:")
logging.warning("%s", match.line.rstrip("\n"))
else:
logging.warning("Build failed and unable to find cause. Giving up.")
raise UnidentifiedError(retcode, args, lines, secondary=match)
logging.info("Identified error: %r", error)
if error in fixed_errors:
@ -103,7 +86,7 @@ def run_with_build_fixers(
raise DetailedFailure(retcode, args, error)
if not resolve_error(
error,
SchrootDependencyContext(session),
None,
fixers=fixers,
):
logging.warning("Failed to find resolution for error %r. Giving up.", error)

View file

@ -15,31 +15,31 @@
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
from .buildsystem import NoBuildToolsFound, InstallTarget
def run_info(session, buildsystems):
for buildsystem in buildsystems:
print('%r:' % buildsystem)
print("%r:" % buildsystem)
deps = {}
try:
for kind, dep in buildsystem.get_declared_dependencies():
deps.setdefault(kind, []).append(dep)
except NotImplementedError:
print('\tUnable to detect declared dependencies for this type of build system')
print(
"\tUnable to detect declared dependencies for this type of build system"
)
if deps:
print('\tDeclared dependencies:')
print("\tDeclared dependencies:")
for kind in deps:
print('\t\t%s:' % kind)
print("\t\t%s:" % kind)
for dep in deps[kind]:
print('\t\t\t%s' % dep)
print('')
print("\t\t\t%s" % dep)
print("")
try:
outputs = list(buildsystem.get_declared_outputs())
except NotImplementedError:
print('\tUnable to detect declared outputs for this type of build system')
print("\tUnable to detect declared outputs for this type of build system")
outputs = []
if outputs:
print('\tDeclared outputs:')
print("\tDeclared outputs:")
for output in outputs:
print('\t\t%s' % output)
print("\t\t%s" % output)

48
ognibuild/outputs.py Normal file
View file

@ -0,0 +1,48 @@
#!/usr/bin/python
# Copyright (C) 2019-2020 Jelmer Vernooij <jelmer@jelmer.uk>
# encoding: utf-8
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
from . import UpstreamOutput
class BinaryOutput(UpstreamOutput):
def __init__(self, name):
super(BinaryOutput, self).__init__("binary")
self.name = name
def __repr__(self):
return "%s(%r)" % (type(self).__name__, self.name)
def __str__(self):
return "binary: %s" % self.name
class PythonPackageOutput(UpstreamOutput):
def __init__(self, name, python_version=None):
super(PythonPackageOutput, self).__init__("python-package")
self.name = name
self.python_version = python_version
def __str__(self):
return "python package: %s" % self.name
def __repr__(self):
return "%s(%r, python_version=%r)" % (
type(self).__name__,
self.name,
self.python_version,
)

View file

@ -17,47 +17,92 @@
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
import posixpath
import subprocess
from typing import Optional, List, Tuple
from . import UpstreamRequirement
from . import Requirement
class PythonPackageRequirement(UpstreamRequirement):
class PythonPackageRequirement(Requirement):
package: str
def __init__(self, package, python_version=None, minimum_version=None):
super(PythonPackageRequirement, self).__init__('python-package')
def __init__(self, package, python_version=None, specs=None, minimum_version=None):
super(PythonPackageRequirement, self).__init__("python-package")
self.package = package
self.python_version = python_version
self.minimum_version = minimum_version
if minimum_version is not None:
specs = [(">=", minimum_version)]
if specs is None:
specs = []
self.specs = specs
def __repr__(self):
return "%s(%r, python_version=%r, minimum_version=%r)" % (
type(self).__name__, self.package, self.python_version,
self.minimum_version)
return "%s(%r, python_version=%r, specs=%r)" % (
type(self).__name__,
self.package,
self.python_version,
self.specs,
)
def __str__(self):
return "python package: %s" % self.package
if self.specs:
return "python package: %s (%r)" % (self.package, self.specs)
else:
return "python package: %s" % (self.package,)
@classmethod
def from_requirement_str(cls, text):
from requirements.requirement import Requirement
req = Requirement.parse(text)
return cls(package=req.name, specs=req.specs)
def met(self, session):
if self.python_version == "cpython3":
cmd = "python3"
elif self.python_version == "cpython2":
cmd = "python2"
elif self.python_version == "pypy":
cmd = "pypy"
elif self.python_version == "pypy3":
cmd = "pypy3"
elif self.python_version is None:
cmd = "python3"
else:
raise NotImplementedError
text = self.package + ','.join([''.join(spec) for spec in self.specs])
p = session.Popen(
[cmd, "-c", "import pkg_resources; pkg_resources.require(%r)" % text],
stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
p.communicate()
return p.returncode == 0
class BinaryRequirement(UpstreamRequirement):
class BinaryRequirement(Requirement):
binary_name: str
def __init__(self, binary_name):
super(BinaryRequirement, self).__init__('binary')
super(BinaryRequirement, self).__init__("binary")
self.binary_name = binary_name
def met(self, session):
p = session.Popen(
["which", self.binary_name], stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL)
p.communicate()
return p.returncode == 0
class PerlModuleRequirement(UpstreamRequirement):
class PerlModuleRequirement(Requirement):
module: str
filename: Optional[str]
inc: Optional[List[str]]
def __init__(self, module, filename=None, inc=None):
super(PerlModuleRequirement, self).__init__('perl-module')
super(PerlModuleRequirement, self).__init__("perl-module")
self.module = module
self.filename = filename
self.inc = inc
@ -66,195 +111,207 @@ class PerlModuleRequirement(UpstreamRequirement):
return self.module.replace("::", "/") + ".pm"
class NodePackageRequirement(UpstreamRequirement):
class NodePackageRequirement(Requirement):
package: str
def __init__(self, package):
super(NodePackageRequirement, self).__init__('npm-package')
super(NodePackageRequirement, self).__init__("npm-package")
self.package = package
class CargoCrateRequirement(UpstreamRequirement):
class CargoCrateRequirement(Requirement):
crate: str
def __init__(self, crate):
super(CargoCrateRequirement, self).__init__('cargo-crate')
super(CargoCrateRequirement, self).__init__("cargo-crate")
self.crate = crate
def __repr__(self):
return "%s(%r)" % (
type(self).__name__,
self.crate,
)
class PkgConfigRequirement(UpstreamRequirement):
def __str__(self):
return "cargo crate: %s" % self.crate
class PkgConfigRequirement(Requirement):
module: str
def __init__(self, module, minimum_version=None):
super(PkgConfigRequirement, self).__init__('pkg-config')
super(PkgConfigRequirement, self).__init__("pkg-config")
self.module = module
self.minimum_version = minimum_version
class PathRequirement(UpstreamRequirement):
class PathRequirement(Requirement):
path: str
def __init__(self, path):
super(PathRequirement, self).__init__('path')
super(PathRequirement, self).__init__("path")
self.path = path
class CHeaderRequirement(UpstreamRequirement):
class CHeaderRequirement(Requirement):
header: str
def __init__(self, header):
super(CHeaderRequirement, self).__init__('c-header')
super(CHeaderRequirement, self).__init__("c-header")
self.header = header
class JavaScriptRuntimeRequirement(UpstreamRequirement):
class JavaScriptRuntimeRequirement(Requirement):
def __init__(self):
super(JavaScriptRuntimeRequirement, self).__init__(
'javascript-runtime')
super(JavaScriptRuntimeRequirement, self).__init__("javascript-runtime")
class ValaPackageRequirement(UpstreamRequirement):
class ValaPackageRequirement(Requirement):
package: str
def __init__(self, package: str):
super(ValaPackageRequirement, self).__init__('vala')
super(ValaPackageRequirement, self).__init__("vala")
self.package = package
class RubyGemRequirement(UpstreamRequirement):
class RubyGemRequirement(Requirement):
gem: str
minimum_version: Optional[str]
def __init__(self, gem: str, minimum_version: Optional[str]):
super(RubyGemRequirement, self).__init__('gem')
super(RubyGemRequirement, self).__init__("gem")
self.gem = gem
self.minimum_version = minimum_version
class GoPackageRequirement(UpstreamRequirement):
class GoPackageRequirement(Requirement):
package: str
def __init__(self, package: str):
super(GoPackageRequirement, self).__init__('go')
super(GoPackageRequirement, self).__init__("go")
self.package = package
class DhAddonRequirement(UpstreamRequirement):
class DhAddonRequirement(Requirement):
path: str
def __init__(self, path: str):
super(DhAddonRequirement, self).__init__('dh-addon')
super(DhAddonRequirement, self).__init__("dh-addon")
self.path = path
class PhpClassRequirement(UpstreamRequirement):
class PhpClassRequirement(Requirement):
php_class: str
def __init__(self, php_class: str):
super(PhpClassRequirement, self).__init__('php-class')
super(PhpClassRequirement, self).__init__("php-class")
self.php_class = php_class
class RPackageRequirement(UpstreamRequirement):
class RPackageRequirement(Requirement):
package: str
minimum_version: Optional[str]
def __init__(self, package: str, minimum_version: Optional[str] = None):
super(RPackageRequirement, self).__init__('r-package')
super(RPackageRequirement, self).__init__("r-package")
self.package = package
self.minimum_version = minimum_version
class LibraryRequirement(UpstreamRequirement):
class LibraryRequirement(Requirement):
library: str
def __init__(self, library: str):
super(LibraryRequirement, self).__init__('lib')
super(LibraryRequirement, self).__init__("lib")
self.library = library
class RubyFileRequirement(UpstreamRequirement):
class RubyFileRequirement(Requirement):
filename: str
def __init__(self, filename: str):
super(RubyFileRequirement, self).__init__('ruby-file')
super(RubyFileRequirement, self).__init__("ruby-file")
self.filename = filename
class XmlEntityRequirement(UpstreamRequirement):
class XmlEntityRequirement(Requirement):
url: str
def __init__(self, url: str):
super(XmlEntityRequirement, self).__init__('xml-entity')
super(XmlEntityRequirement, self).__init__("xml-entity")
self.url = url
class SprocketsFileRequirement(UpstreamRequirement):
class SprocketsFileRequirement(Requirement):
content_type: str
name: str
def __init__(self, content_type: str, name: str):
super(SprocketsFileRequirement, self).__init__('sprockets-file')
super(SprocketsFileRequirement, self).__init__("sprockets-file")
self.content_type = content_type
self.name = name
class JavaClassRequirement(UpstreamRequirement):
class JavaClassRequirement(Requirement):
classname: str
def __init__(self, classname: str):
super(JavaClassRequirement, self).__init__('java-class')
super(JavaClassRequirement, self).__init__("java-class")
self.classname = classname
class HaskellPackageRequirement(UpstreamRequirement):
class HaskellPackageRequirement(Requirement):
package: str
def __init__(self, package: str):
super(HaskellPackageRequirement, self).__init__('haskell-package')
def __init__(self, package: str, specs=None):
super(HaskellPackageRequirement, self).__init__("haskell-package")
self.package = package
self.specs = specs
@classmethod
def from_string(cls, text):
parts = text.split()
return cls(parts[0], specs=parts[1:])
class MavenArtifactRequirement(UpstreamRequirement):
class MavenArtifactRequirement(Requirement):
artifacts: List[Tuple[str, str, str]]
def __init__(self, artifacts):
super(MavenArtifactRequirement, self).__init__('maven-artifact')
super(MavenArtifactRequirement, self).__init__("maven-artifact")
self.artifacts = artifacts
class GnomeCommonRequirement(UpstreamRequirement):
class GnomeCommonRequirement(Requirement):
def __init__(self):
super(GnomeCommonRequirement, self).__init__('gnome-common')
super(GnomeCommonRequirement, self).__init__("gnome-common")
class JDKFileRequirement(UpstreamRequirement):
class JDKFileRequirement(Requirement):
jdk_path: str
filename: str
def __init__(self, jdk_path: str, filename: str):
super(JDKFileRequirement, self).__init__('jdk-file')
super(JDKFileRequirement, self).__init__("jdk-file")
self.jdk_path = jdk_path
self.filename = filename
@ -263,31 +320,50 @@ class JDKFileRequirement(UpstreamRequirement):
return posixpath.join(self.jdk_path, self.filename)
class PerlFileRequirement(UpstreamRequirement):
class PerlFileRequirement(Requirement):
filename: str
def __init__(self, filename: str):
super(PerlFileRequirement, self).__init__('perl-file')
super(PerlFileRequirement, self).__init__("perl-file")
self.filename = filename
class AutoconfMacroRequirement(UpstreamRequirement):
class AutoconfMacroRequirement(Requirement):
macro: str
def __init__(self, macro: str):
super(AutoconfMacroRequirement, self).__init__('autoconf-macro')
super(AutoconfMacroRequirement, self).__init__("autoconf-macro")
self.macro = macro
class PythonModuleRequirement(UpstreamRequirement):
class PythonModuleRequirement(Requirement):
module: str
python_version: Optional[str]
minimum_version: Optional[str]
def __init__(self, module, python_version=None, minimum_version=None):
super(PythonModuleRequirement, self).__init__('python-module')
super(PythonModuleRequirement, self).__init__("python-module")
self.python_version = python_version
self.minimum_version = minimum_version
def met(self, session):
if self.python_version == "cpython3":
cmd = "python3"
elif self.python_version == "cpython2":
cmd = "python2"
elif self.python_version == "pypy":
cmd = "pypy"
elif self.python_version == "pypy3":
cmd = "pypy3"
elif self.python_version is None:
cmd = "python3"
else:
raise NotImplementedError
p = session.Popen(
[cmd, "-c", "import %s" % self.module],
stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
p.communicate()
return p.returncode == 0

View file

@ -16,14 +16,15 @@
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
class UnsatisfiedRequirements(Exception):
import subprocess
class UnsatisfiedRequirements(Exception):
def __init__(self, reqs):
self.requirements = reqs
class Resolver(object):
def install(self, requirements):
raise NotImplementedError(self.install)
@ -38,15 +39,29 @@ class Resolver(object):
class CPANResolver(Resolver):
def __init__(self, session):
self.session = session
def __str__(self):
return "cpan"
def __repr__(self):
return "%s(%r)" % (type(self).__name__, self.session)
def explain(self, requirements):
from ..requirements import PerlModuleRequirement
perlreqs = []
for requirement in requirements:
if not isinstance(requirement, PerlModuleRequirement):
continue
perlreqs.append(requirement)
if perlreqs:
yield (["cpan", "-i"] + [req.module for req in perlreqs], [perlreqs])
def install(self, requirements):
from ..requirements import PerlModuleRequirement
missing = []
for requirement in requirements:
if not isinstance(requirement, PerlModuleRequirement):
@ -55,86 +70,122 @@ class CPANResolver(Resolver):
# TODO(jelmer): Specify -T to skip tests?
self.session.check_call(
["cpan", "-i", requirement.module],
user="root", env={"PERL_MM_USE_DEFAULT": "1"}
env={"PERL_MM_USE_DEFAULT": "1"},
)
if missing:
raise UnsatisfiedRequirements(missing)
def explain(self, requirements):
raise NotImplementedError(self.explain)
class HackageResolver(Resolver):
def __init__(self, session):
self.session = session
def __str__(self):
return "hackage"
def __repr__(self):
return "%s(%r)" % (type(self).__name__, self.session)
def install(self, requirements):
from ..requirements import HaskellPackageRequirement
missing = []
for requirement in requirements:
if not isinstance(requirement, HaskellPackageRequirement):
missing.append(requirement)
continue
self.session.check_call(
["cabal", "install", requirement.package],
user="root")
["cabal", "install", requirement.package]
)
if missing:
raise UnsatisfiedRequirements(missing)
def explain(self, requirements):
raise NotImplementedError(self.explain)
from ..requirements import HaskellPackageRequirement
class CargoResolver(Resolver):
def __init__(self, session):
self.session = session
def __str__(self):
return "cargo"
def install(self, requirements):
from ..requirements import CargoCrateRequirement
missing = []
haskellreqs = []
for requirement in requirements:
if not isinstance(requirement, CargoCrateRequirement):
missing.append(requirement)
if not isinstance(requirement, HaskellPackageRequirement):
continue
self.session.check_call(
["cargo", "install", requirement.crate],
user="root")
if missing:
raise UnsatisfiedRequirements(missing)
def explain(self, requirements):
raise NotImplementedError(self.explain)
haskellreqs.append(requirement)
if haskellreqs:
yield (["cabal", "install"] + [req.package for req in haskellreqs],
haskellreqs)
class PypiResolver(Resolver):
def __init__(self, session):
self.session = session
def __str__(self):
return "pypi"
def __repr__(self):
return "%s(%r)" % (type(self).__name__, self.session)
def install(self, requirements):
from ..requirements import PythonPackageRequirement
missing = []
for requirement in requirements:
if not isinstance(requirement, PythonPackageRequirement):
missing.append(requirement)
continue
self.session.check_call(["pip", "install", requirement.package])
try:
self.session.check_call(
["pip", "install", requirement.package])
except subprocess.CalledProcessError:
missing.append(requirement)
if missing:
raise UnsatisfiedRequirements(missing)
def explain(self, requirements):
raise NotImplementedError(self.explain)
from ..requirements import PythonPackageRequirement
pyreqs = []
for requirement in requirements:
if not isinstance(requirement, PythonPackageRequirement):
continue
pyreqs.append(requirement)
if pyreqs:
yield (["pip", "install"] + [req.package for req in pyreqs],
pyreqs)
class GoResolver(Resolver):
def __init__(self, session):
self.session = session
def __str__(self):
return "go"
def __repr__(self):
return "%s(%r)" % (type(self).__name__, self.session)
def install(self, requirements):
from ..requirements import GoPackageRequirement
missing = []
for requirement in requirements:
if not isinstance(requirement, GoPackageRequirement):
missing.append(requirement)
continue
self.session.check_call(["go", "get", requirement.package])
if missing:
raise UnsatisfiedRequirements(missing)
def explain(self, requirements):
from ..requirements import GoPackageRequirement
goreqs = []
for requirement in requirements:
if not isinstance(requirement, GoPackageRequirement):
continue
goreqs.append(requirement)
if goreqs:
yield (["go", "get"] + [req.package for req in goreqs],
goreqs)
NPM_COMMAND_PACKAGES = {
@ -143,15 +194,18 @@ NPM_COMMAND_PACKAGES = {
class NpmResolver(Resolver):
def __init__(self, session):
self.session = session
def __str__(self):
return "npm"
def __repr__(self):
return "%s(%r)" % (type(self).__name__, self.session)
def install(self, requirements):
from ..requirements import NodePackageRequirement
missing = []
for requirement in requirements:
if not isinstance(requirement, NodePackageRequirement):
@ -167,7 +221,21 @@ class NpmResolver(Resolver):
raise UnsatisfiedRequirements(missing)
def explain(self, requirements):
raise NotImplementedError(self.explain)
from ..requirements import NodePackageRequirement
nodereqs = []
packages = []
for requirement in requirements:
if not isinstance(requirement, NodePackageRequirement):
continue
try:
package = NPM_COMMAND_PACKAGES[requirement.command]
except KeyError:
continue
nodereqs.append(requirement)
packages.append(package)
if nodereqs:
yield (["npm", "-g", "install"] + packages, nodereqs)
class StackedResolver(Resolver):
@ -180,6 +248,10 @@ class StackedResolver(Resolver):
def __str__(self):
return "[" + ", ".join(map(str, self.subs)) + "]"
def explain(self, requirements):
for sub in self.subs:
yield from sub.explain(requirements)
def install(self, requirements):
for sub in self.subs:
try:
@ -190,13 +262,17 @@ class StackedResolver(Resolver):
return
NATIVE_RESOLVER_CLS = [
CPANResolver,
PypiResolver,
NpmResolver,
GoResolver,
HackageResolver,
]
def native_resolvers(session):
return StackedResolver([
CPANResolver(session),
PypiResolver(session),
NpmResolver(session),
CargoResolver(session),
HackageResolver(session)])
return StackedResolver([kls(session) for kls in NATIVE_RESOLVER_CLS])
class ExplainResolver(Resolver):
@ -212,17 +288,15 @@ class ExplainResolver(Resolver):
def auto_resolver(session):
# TODO(jelmer): if session is SchrootSession or if we're root, use apt
# if session is SchrootSession or if we're root, use apt
from .apt import AptResolver
from ..session.schroot import SchrootSession
user = session.check_output(['echo', '$USER']).decode().strip()
user = session.check_output(["echo", "$USER"]).decode().strip()
resolvers = []
if isinstance(session, SchrootSession) or user == 'root':
# TODO(jelmer): Check VIRTUAL_ENV, and prioritize PypiResolver if
# present?
if isinstance(session, SchrootSession) or user == "root":
resolvers.append(AptResolver.from_session(session))
resolvers.extend([
CPANResolver(session),
PypiResolver(session),
NpmResolver(session),
CargoResolver(session),
HackageResolver(session)])
resolvers.extend([kls(session) for kls in NATIVE_RESOLVER_CLS])
return StackedResolver(resolvers)

View file

@ -15,19 +15,23 @@
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
from itertools import chain
import logging
import os
import posixpath
from debian.changelog import Version
from debian.deb822 import PkgRelation
from ..debian.apt import AptManager
from . import Resolver, UnsatisfiedRequirements
from ..requirements import (
Requirement,
BinaryRequirement,
CHeaderRequirement,
PkgConfigRequirement,
PathRequirement,
UpstreamRequirement,
JavaScriptRuntimeRequirement,
ValaPackageRequirement,
RubyGemRequirement,
@ -50,38 +54,82 @@ from ..requirements import (
AutoconfMacroRequirement,
PythonModuleRequirement,
PythonPackageRequirement,
)
)
class AptRequirement(object):
class AptRequirement(Requirement):
def __init__(self, relations):
super(AptRequirement, self).__init__("apt")
self.relations = relations
def __init__(self, package, minimum_version=None):
self.package = package
self.minimum_version = minimum_version
@classmethod
def simple(cls, package, minimum_version=None):
rel = {"name": package}
if minimum_version is not None:
rel["version"] = (">=", minimum_version)
return cls([[rel]])
@classmethod
def from_str(cls, text):
return cls(PkgRelation.parse_relations(text))
def pkg_relation_str(self):
return PkgRelation.str(self.relations)
def __str__(self):
return "apt requirement: %s" % self.pkg_relation_str()
def touches_package(self, package):
for rel in self.relations:
for entry in rel:
if entry["name"] == package:
return True
return False
def get_package_for_python_package(apt_mgr, package, python_version, minimum_version=None):
def python_spec_to_apt_rels(pkg_name, specs):
# TODO(jelmer): Dealing with epoch, etc?
if not specs:
return [[{"name": pkg_name}]]
else:
rels = []
for spec in specs:
c = {">=": ">=", "<=": "<=", "<": "<<", ">": ">>", "=": "="}[spec[0]]
rels.append([{"name": pkg_name, "version": (c, Version(spec[1]))}])
return rels
def get_package_for_python_package(apt_mgr, package, python_version, specs=None):
if python_version == "pypy":
pkg_name = apt_mgr.get_package_for_paths(
["/usr/lib/pypy/dist-packages/%s-.*.egg-info/PKG-INFO" % package],
regex=True)
["/usr/lib/pypy/dist-packages/%s-.*.egg-info" % package.replace("-", "_")],
regex=True,
)
elif python_version == "cpython2":
pkg_name = apt_mgr.get_package_for_paths(
["/usr/lib/python2\\.[0-9]/dist-packages/%s-.*.egg-info/PKG-INFO" % package],
regex=True)
[
"/usr/lib/python2\\.[0-9]/dist-packages/%s-.*.egg-info"
% package.replace("-", "_")
],
regex=True,
)
elif python_version == "cpython3":
pkg_name = apt_mgr.get_package_for_paths(
["/usr/lib/python3/dist-packages/%s-.*.egg-info/PKG-INFO" % package],
regex=True)
[
"/usr/lib/python3/dist-packages/%s-.*.egg-info"
% package.replace("-", "_")
],
regex=True,
)
else:
raise NotImplementedError
# TODO(jelmer): Dealing with epoch, etc?
if pkg_name is not None:
return AptRequirement(pkg_name, minimum_version)
return None
if pkg_name is None:
return None
rels = python_spec_to_apt_rels(pkg_name, specs)
return AptRequirement(rels)
def get_package_for_python_module(apt_mgr, module, python_version, minimum_version):
def get_package_for_python_module(apt_mgr, module, python_version, specs):
if python_version == "python3":
paths = [
posixpath.join(
@ -135,9 +183,10 @@ def get_package_for_python_module(apt_mgr, module, python_version, minimum_versi
else:
raise AssertionError("unknown python version %r" % python_version)
pkg_name = apt_mgr.get_package_for_paths(paths, regex=True)
if pkg_name is not None:
return AptRequirement(pkg_name, minimum_version=minimum_version)
return None
if pkg_name is None:
return None
rels = python_spec_to_apt_rels(pkg_name, specs)
return AptRequirement(rels)
def resolve_binary_req(apt_mgr, req):
@ -145,12 +194,11 @@ def resolve_binary_req(apt_mgr, req):
paths = [req.binary_name]
else:
paths = [
posixpath.join(dirname, req.binary_name)
for dirname in ["/usr/bin", "/bin"]
posixpath.join(dirname, req.binary_name) for dirname in ["/usr/bin", "/bin"]
]
pkg_name = apt_mgr.get_package_for_paths(paths)
if pkg_name is not None:
return AptRequirement(pkg_name)
return AptRequirement.simple(pkg_name)
return None
@ -161,16 +209,17 @@ def resolve_pkg_config_req(apt_mgr, req):
if package is None:
package = apt_mgr.get_package_for_paths(
[posixpath.join("/usr/lib", ".*", "pkgconfig", req.module + ".pc")],
regex=True)
regex=True,
)
if package is not None:
return AptRequirement(package, minimum_version=req.minimum_version)
return AptRequirement.simple(package, minimum_version=req.minimum_version)
return None
def resolve_path_req(apt_mgr, req):
package = apt_mgr.get_package_for_paths([req.path])
if package is not None:
return AptRequirement(package)
return AptRequirement.simple(package)
return None
@ -184,14 +233,15 @@ def resolve_c_header_req(apt_mgr, req):
)
if package is None:
return None
return AptRequirement(package)
return AptRequirement.simple(package)
def resolve_js_runtime_req(apt_mgr, req):
package = apt_mgr.get_package_for_paths(
["/usr/bin/node", "/usr/bin/duk"], regex=False)
["/usr/bin/node", "/usr/bin/duk"], regex=False
)
if package is not None:
return AptRequirement(package)
return AptRequirement.simple(package)
return None
@ -199,7 +249,7 @@ def resolve_vala_package_req(apt_mgr, req):
path = "/usr/share/vala-[0-9.]+/vapi/%s.vapi" % req.package
package = apt_mgr.get_package_for_paths([path], regex=True)
if package is not None:
return AptRequirement(package)
return AptRequirement.simple(package)
return None
@ -210,20 +260,18 @@ def resolve_ruby_gem_req(apt_mgr, req):
"specifications/%s-.*\\.gemspec" % req.gem
)
]
package = apt_mgr.get_package_for_paths(
paths, regex=True)
package = apt_mgr.get_package_for_paths(paths, regex=True)
if package is not None:
return AptRequirement(package, minimum_version=req.minimum_version)
return AptRequirement.simple(package, minimum_version=req.minimum_version)
return None
def resolve_go_package_req(apt_mgr, req):
package = apt_mgr.get_package_for_paths(
[posixpath.join("/usr/share/gocode/src", req.package, ".*")],
regex=True
[posixpath.join("/usr/share/gocode/src", req.package, ".*")], regex=True
)
if package is not None:
return AptRequirement(package)
return AptRequirement.simple(package)
return None
@ -231,7 +279,7 @@ def resolve_dh_addon_req(apt_mgr, req):
paths = [posixpath.join("/usr/share/perl5", req.path)]
package = apt_mgr.get_package_for_paths(paths)
if package is not None:
return AptRequirement(package)
return AptRequirement.simple(package)
return None
@ -239,7 +287,7 @@ def resolve_php_class_req(apt_mgr, req):
path = "/usr/share/php/%s.php" % req.php_class.replace("\\", "/")
package = apt_mgr.get_package_for_paths([path])
if package is not None:
return AptRequirement(package)
return AptRequirement.simple(package)
return None
@ -247,7 +295,7 @@ def resolve_r_package_req(apt_mgr, req):
paths = [posixpath.join("/usr/lib/R/site-library/.*/R/%s$" % req.package)]
package = apt_mgr.get_package_for_paths(paths, regex=True)
if package is not None:
return AptRequirement(package)
return AptRequirement.simple(package)
return None
@ -259,7 +307,7 @@ def resolve_node_package_req(apt_mgr, req):
]
pkg_name = apt_mgr.get_package_for_paths(paths, regex=True)
if pkg_name is not None:
return AptRequirement(pkg_name)
return AptRequirement.simple(pkg_name)
return None
@ -272,7 +320,7 @@ def resolve_library_req(apt_mgr, req):
]
pkg_name = apt_mgr.get_package_for_paths(paths, regex=True)
if pkg_name is not None:
return AptRequirement(pkg_name)
return AptRequirement.simple(pkg_name)
return None
@ -280,7 +328,7 @@ def resolve_ruby_file_req(apt_mgr, req):
paths = [posixpath.join("/usr/lib/ruby/vendor_ruby/%s.rb" % req.filename)]
package = apt_mgr.get_package_for_paths(paths)
if package is not None:
return AptRequirement(package)
return AptRequirement.simple(package)
paths = [
posixpath.join(
r"/usr/share/rubygems-integration/all/gems/([^/]+)/"
@ -289,7 +337,7 @@ def resolve_ruby_file_req(apt_mgr, req):
]
pkg_name = apt_mgr.get_package_for_paths(paths, regex=True)
if pkg_name is not None:
return AptRequirement(pkg_name)
return AptRequirement.simple(pkg_name)
return None
@ -308,7 +356,7 @@ def resolve_xml_entity_req(apt_mgr, req):
pkg_name = apt_mgr.get_package_for_paths([search_path], regex=False)
if pkg_name is not None:
return AptRequirement(pkg_name)
return AptRequirement.simple(pkg_name)
return None
@ -320,7 +368,7 @@ def resolve_sprockets_file_req(apt_mgr, req):
return None
pkg_name = apt_mgr.get_package_for_paths([path], regex=True)
if pkg_name is not None:
return AptRequirement(pkg_name)
return AptRequirement.simple(pkg_name)
return None
@ -329,7 +377,8 @@ def resolve_java_class_req(apt_mgr, req):
# system :(
# TODO(jelmer): Call in session
output = apt_mgr.session.check_output(
["java-propose-classpath", "-c" + req.classname])
["java-propose-classpath", "-c" + req.classname]
)
classpath = [p for p in output.decode().strip(":").strip().split(":") if p]
if not classpath:
logging.warning("unable to find classpath for %s", req.classname)
@ -339,14 +388,14 @@ def resolve_java_class_req(apt_mgr, req):
if package is None:
logging.warning("no package for files in %r", classpath)
return None
return AptRequirement(package)
return AptRequirement.simple(package)
def resolve_haskell_package_req(apt_mgr, req):
path = "/var/lib/ghc/package.conf.d/%s-.*.conf" % req.deps[0][0]
pkg_name = apt_mgr.get_package_for_paths([path], regex=True)
if pkg_name is not None:
return AptRequirement(pkg_name)
return AptRequirement.simple(pkg_name)
return None
@ -378,19 +427,19 @@ def resolve_maven_artifact_req(apt_mgr, req):
]
pkg_name = apt_mgr.get_package_for_paths(paths, regex=regex)
if pkg_name is not None:
return AptRequirement(pkg_name)
return AptRequirement.simple(pkg_name)
return None
def resolve_gnome_common_req(apt_mgr, req):
return AptRequirement('gnome-common')
return AptRequirement.simple("gnome-common")
def resolve_jdk_file_req(apt_mgr, req):
path = req.jdk_path + ".*/" + req.filename
pkg_name = apt_mgr.get_package_for_paths([path], regex=True)
if pkg_name is not None:
return AptRequirement(pkg_name)
return AptRequirement.simple(pkg_name)
return None
@ -399,8 +448,7 @@ def resolve_perl_module_req(apt_mgr, req):
if req.inc is None:
if req.filename is None:
paths = [posixpath.join(inc, req.relfilename)
for inc in DEFAULT_PERL_PATHS]
paths = [posixpath.join(inc, req.relfilename) for inc in DEFAULT_PERL_PATHS]
elif not posixpath.isabs(req.filename):
return False
else:
@ -409,14 +457,14 @@ def resolve_perl_module_req(apt_mgr, req):
paths = [posixpath.join(inc, req.filename) for inc in req.inc]
pkg_name = apt_mgr.get_package_for_paths(paths, regex=False)
if pkg_name is not None:
return AptRequirement(pkg_name)
return AptRequirement.simple(pkg_name)
return None
def resolve_perl_file_req(apt_mgr, req):
pkg_name = apt_mgr.get_package_for_paths([req.filename], regex=False)
if pkg_name is not None:
return AptRequirement(pkg_name)
return AptRequirement.simple(pkg_name)
return None
@ -441,24 +489,28 @@ def resolve_autoconf_macro_req(apt_mgr, req):
return None
pkg_name = apt_mgr.get_package_for_paths([path])
if pkg_name is not None:
return AptRequirement(pkg_name)
return AptRequirement.simple(pkg_name)
return None
def resolve_python_module_req(apt_mgr, req):
if req.python_version == 2:
return get_package_for_python_module(apt_mgr, req.module, "cpython2", req.minimum_version)
return get_package_for_python_module(apt_mgr, req.module, "cpython2", req.specs)
elif req.python_version in (None, 3):
return get_package_for_python_module(apt_mgr, req.module, "cpython3", req.minimum_version)
return get_package_for_python_module(apt_mgr, req.module, "cpython3", req.specs)
else:
return None
def resolve_python_package_req(apt_mgr, req):
if req.python_version == 2:
return get_package_for_python_package(apt_mgr, req.package, "cpython2", req.minimum_version)
return get_package_for_python_package(
apt_mgr, req.package, "cpython2", req.specs
)
elif req.python_version in (None, 3):
return get_package_for_python_package(apt_mgr, req.package, "cpython3", req.minimum_version)
return get_package_for_python_package(
apt_mgr, req.package, "cpython3", req.specs
)
else:
return None
@ -493,7 +545,7 @@ APT_REQUIREMENT_RESOLVERS = [
]
def resolve_requirement_apt(apt_mgr, req: UpstreamRequirement) -> AptRequirement:
def resolve_requirement_apt(apt_mgr, req: Requirement) -> AptRequirement:
for rr_class, rr_fn in APT_REQUIREMENT_RESOLVERS:
if isinstance(req, rr_class):
return rr_fn(apt_mgr, req)
@ -501,7 +553,6 @@ def resolve_requirement_apt(apt_mgr, req: UpstreamRequirement) -> AptRequirement
class AptResolver(Resolver):
def __init__(self, apt):
self.apt = apt
@ -531,12 +582,20 @@ class AptResolver(Resolver):
else:
apt_requirements.append(apt_req)
if apt_requirements:
self.apt.install([r.package for r in apt_requirements])
self.apt.satisfy(
[PkgRelation.str(chain(*[r.relations for r in apt_requirements]))]
)
if still_missing:
raise UnsatisfiedRequirements(still_missing)
def explain(self, requirements):
raise NotImplementedError(self.explain)
apt_requirements = []
for r in requirements:
apt_req = self.resolve(r)
if apt_req is not None:
apt_requirements.append((r, apt_req))
if apt_requirements:
yield (["apt", "satisfy"] + [PkgRelation.str(chain(*[r.relations for o, r in apt_requirements]))], [o for o, r in apt_requirements])
def resolve(self, req: UpstreamRequirement):
def resolve(self, req: Requirement):
return resolve_requirement_apt(self.apt, req)

View file

@ -27,6 +27,9 @@ class PlainSession(Session):
location = "/"
def __repr__(self):
return "%s()" % (type(self).__name__, )
def create_home(self):
pass
@ -44,3 +47,6 @@ class PlainSession(Session):
def scandir(self, path):
return os.scandir(path)
def chdir(self, path):
os.chdir(path)

View file

@ -62,8 +62,8 @@ class SchrootSession(Session):
# TODO(jelmer): Capture stderr and forward in SessionSetupFailure
raise SessionSetupFailure()
logging.info(
'Opened schroot session %s (from %s)', self.session_id,
self.chroot)
"Opened schroot session %s (from %s)", self.session_id, self.chroot
)
return self
def __exit__(self, exc_type, exc_val, exc_tb):
@ -157,7 +157,7 @@ class SchrootSession(Session):
def _fullpath(self, path: str) -> str:
if self._cwd is None:
raise ValueError('no cwd set')
raise ValueError("no cwd set")
return os.path.join(self.location, os.path.join(self._cwd, path).lstrip("/"))
def exists(self, path: str) -> bool:

View file

@ -158,11 +158,10 @@ janitor (0.1-1jan+some1) UNRELEASED; urgency=medium
class BuildArchitectureTests(TestCase):
def setUp(self):
super(BuildArchitectureTests, self).setUp()
if not os.path.exists('/usr/bin/dpkg-architecture'):
self.skipTest('not a debian system')
if not os.path.exists("/usr/bin/dpkg-architecture"):
self.skipTest("not a debian system")
def test_is_str(self):
self.assertIsInstance(get_build_architecture(), str)

View file

@ -30,7 +30,6 @@ from buildlog_consultant.common import (
MissingRubyGem,
MissingValaPackage,
)
from ..debian import apt
from ..debian.apt import AptManager, FileSearcher
from ..debian.fix_build import (
resolve_error,
@ -42,7 +41,6 @@ from breezy.tests import TestCaseWithTransport
class DummyAptSearcher(FileSearcher):
def __init__(self, files):
self._apt_files = files
@ -59,8 +57,8 @@ class DummyAptSearcher(FileSearcher):
class ResolveErrorTests(TestCaseWithTransport):
def setUp(self):
super(ResolveErrorTests, self).setUp()
if not os.path.exists('/usr/bin/dpkg-architecture'):
self.skipTest('not a debian system')
if not os.path.exists("/usr/bin/dpkg-architecture"):
self.skipTest("not a debian system")
self.tree = self.make_branch_and_tree(".")
self.build_tree_contents(
[
@ -95,10 +93,12 @@ blah (0.1) UNRELEASED; urgency=medium
def resolve(self, error, context=("build",)):
from ..session.plain import PlainSession
session = PlainSession()
apt = AptManager(session)
apt._searchers = [DummyAptSearcher(self._apt_files)]
context = BuildDependencyContext(
("build", ),
self.tree,
apt,
subpath="",
@ -122,8 +122,8 @@ blah (0.1) UNRELEASED; urgency=medium
"/usr/bin/brz": "brz",
"/usr/bin/brzier": "bash",
}
self.overrideEnv('DEBEMAIL', 'jelmer@debian.org')
self.overrideEnv('DEBFULLNAME', 'Jelmer Vernooij')
self.overrideEnv("DEBEMAIL", "jelmer@debian.org")
self.overrideEnv("DEBFULLNAME", "Jelmer Vernooij")
self.assertTrue(self.resolve(MissingCommand("brz")))
self.assertEqual("libc6, brz", self.get_build_deps())
rev = self.tree.branch.repository.get_revision(self.tree.branch.last_revision())

View file

@ -6,7 +6,7 @@ from setuptools import setup
setup(name="ognibuild",
description="Detect and run any build system",
version="0.0.1",
version="0.0.2",
maintainer="Jelmer Vernooij",
maintainer_email="jelmer@jelmer.uk",
license="GNU GPLv2 or later",
@ -30,6 +30,7 @@ setup(name="ognibuild",
install_requires=[
'breezy',
'buildlog-consultant',
'requirements-parser',
],
extras_require={
'debian': ['debmutate', 'python_debian', 'python_apt'],