Fix style.
This commit is contained in:
parent
1741622d85
commit
f8d269b6e5
18 changed files with 337 additions and 317 deletions
|
@ -21,7 +21,6 @@ import stat
|
|||
|
||||
|
||||
class DetailedFailure(Exception):
|
||||
|
||||
def __init__(self, retcode, argv, error):
|
||||
self.retcode = retcode
|
||||
self.argv = argv
|
||||
|
@ -29,7 +28,6 @@ class DetailedFailure(Exception):
|
|||
|
||||
|
||||
class UnidentifiedError(Exception):
|
||||
|
||||
def __init__(self, retcode, argv, lines, secondary=None):
|
||||
self.retcode = retcode
|
||||
self.argv = argv
|
||||
|
@ -63,7 +61,6 @@ class Requirement(object):
|
|||
|
||||
|
||||
class UpstreamOutput(object):
|
||||
|
||||
def __init__(self, family):
|
||||
self.family = family
|
||||
|
||||
|
|
|
@ -23,7 +23,6 @@ from .buildsystem import NoBuildToolsFound, detect_buildsystems
|
|||
from .resolver import (
|
||||
auto_resolver,
|
||||
native_resolvers,
|
||||
UnsatisfiedRequirements,
|
||||
)
|
||||
from .resolver.apt import AptResolver
|
||||
|
||||
|
@ -39,15 +38,14 @@ def get_necessary_declared_requirements(resolver, requirements, stages):
|
|||
def install_necessary_declared_requirements(resolver, buildsystem, stages):
|
||||
missing = []
|
||||
try:
|
||||
declared_reqs = buildsystem.get_declared_dependencies()
|
||||
declared_reqs = list(buildsystem.get_declared_dependencies())
|
||||
except NotImplementedError:
|
||||
logging.warning(
|
||||
'Unable to determine declared dependencies from %s', buildsystem)
|
||||
"Unable to determine declared dependencies from %s", buildsystem
|
||||
)
|
||||
else:
|
||||
missing.extend(
|
||||
get_necessary_declared_requirements(
|
||||
resolver, declared_reqs, stages
|
||||
)
|
||||
get_necessary_declared_requirements(resolver, declared_reqs, stages)
|
||||
)
|
||||
resolver.install(missing)
|
||||
|
||||
|
@ -70,7 +68,6 @@ STAGE_MAP = {
|
|||
|
||||
def determine_fixers(session, resolver):
|
||||
from .buildlog import RequirementFixer
|
||||
from .resolver.apt import AptResolver
|
||||
return [RequirementFixer(resolver)]
|
||||
|
||||
|
||||
|
@ -90,36 +87,35 @@ def main(): # noqa: C901
|
|||
)
|
||||
parser.add_argument(
|
||||
"--explain",
|
||||
action='store_true',
|
||||
help="Explain what needs to be done rather than making changes")
|
||||
action="store_true",
|
||||
help="Explain what needs to be done rather than making changes",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--ignore-declared-dependencies",
|
||||
"--optimistic",
|
||||
action="store_true",
|
||||
help="Ignore declared dependencies, follow build errors only",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--verbose",
|
||||
action="store_true",
|
||||
help="Be verbose")
|
||||
subparsers = parser.add_subparsers(dest='subcommand')
|
||||
subparsers.add_parser('dist')
|
||||
subparsers.add_parser('build')
|
||||
subparsers.add_parser('clean')
|
||||
subparsers.add_parser('test')
|
||||
subparsers.add_parser('info')
|
||||
install_parser = subparsers.add_parser('install')
|
||||
parser.add_argument("--verbose", action="store_true", help="Be verbose")
|
||||
subparsers = parser.add_subparsers(dest="subcommand")
|
||||
subparsers.add_parser("dist")
|
||||
subparsers.add_parser("build")
|
||||
subparsers.add_parser("clean")
|
||||
subparsers.add_parser("test")
|
||||
subparsers.add_parser("info")
|
||||
install_parser = subparsers.add_parser("install")
|
||||
install_parser.add_argument(
|
||||
'--user', action='store_true', help='Install in local-user directories.')
|
||||
"--user", action="store_true", help="Install in local-user directories."
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
if not args.subcommand:
|
||||
parser.print_usage()
|
||||
return 1
|
||||
if args.verbose:
|
||||
logging.basicConfig(level=logging.DEBUG, format='%(message)s')
|
||||
logging.basicConfig(level=logging.DEBUG, format="%(message)s")
|
||||
else:
|
||||
logging.basicConfig(level=logging.INFO, format='%(message)s')
|
||||
logging.basicConfig(level=logging.INFO, format="%(message)s")
|
||||
if args.schroot:
|
||||
from .session.schroot import SchrootSession
|
||||
|
||||
|
@ -135,46 +131,51 @@ def main(): # noqa: C901
|
|||
resolver = native_resolvers(session)
|
||||
elif args.resolve == "auto":
|
||||
resolver = auto_resolver(session)
|
||||
logging.info('Using requirement resolver: %s', resolver)
|
||||
logging.info("Using requirement resolver: %s", resolver)
|
||||
os.chdir(args.directory)
|
||||
try:
|
||||
bss = list(detect_buildsystems(args.directory))
|
||||
logging.info('Detected buildsystems: %r', bss)
|
||||
logging.info("Detected buildsystems: %r", bss)
|
||||
if not args.ignore_declared_dependencies and not args.explain:
|
||||
stages = STAGE_MAP[args.subcommand]
|
||||
if stages:
|
||||
logging.info('Checking that declared requirements are present')
|
||||
logging.info("Checking that declared requirements are present")
|
||||
for bs in bss:
|
||||
install_necessary_declared_requirements(resolver, bs, stages)
|
||||
fixers = determine_fixers(session, resolver)
|
||||
if args.subcommand == "dist":
|
||||
from .dist import run_dist
|
||||
|
||||
run_dist(
|
||||
session=session, buildsystems=bss, resolver=resolver,
|
||||
fixers=fixers)
|
||||
session=session, buildsystems=bss, resolver=resolver, fixers=fixers
|
||||
)
|
||||
if args.subcommand == "build":
|
||||
from .build import run_build
|
||||
run_build(
|
||||
session, buildsystems=bss, resolver=resolver,
|
||||
fixers=fixers)
|
||||
|
||||
run_build(session, buildsystems=bss, resolver=resolver, fixers=fixers)
|
||||
if args.subcommand == "clean":
|
||||
from .clean import run_clean
|
||||
run_clean(
|
||||
session, buildsystems=bss, resolver=resolver,
|
||||
fixers=fixers)
|
||||
|
||||
run_clean(session, buildsystems=bss, resolver=resolver, fixers=fixers)
|
||||
if args.subcommand == "install":
|
||||
from .install import run_install
|
||||
|
||||
run_install(
|
||||
session, buildsystems=bss, resolver=resolver,
|
||||
fixers=fixers, user=args.user)
|
||||
session,
|
||||
buildsystems=bss,
|
||||
resolver=resolver,
|
||||
fixers=fixers,
|
||||
user=args.user,
|
||||
)
|
||||
if args.subcommand == "test":
|
||||
from .test import run_test
|
||||
run_test(session, buildsystems=bss, resolver=resolver,
|
||||
fixers=fixers)
|
||||
|
||||
run_test(session, buildsystems=bss, resolver=resolver, fixers=fixers)
|
||||
if args.subcommand == "info":
|
||||
from .info import run_info
|
||||
|
||||
run_info(session, buildsystems=bss)
|
||||
except UnidentifiedError as e:
|
||||
except UnidentifiedError:
|
||||
return 1
|
||||
except NoBuildToolsFound:
|
||||
logging.info("No build tools found.")
|
||||
|
|
|
@ -21,7 +21,6 @@
|
|||
import logging
|
||||
|
||||
from buildlog_consultant.common import (
|
||||
MissingConfigStatusInput,
|
||||
MissingPythonModule,
|
||||
MissingPythonDistribution,
|
||||
MissingCHeader,
|
||||
|
@ -41,15 +40,12 @@ from buildlog_consultant.common import (
|
|||
MissingLibrary,
|
||||
MissingJavaClass,
|
||||
MissingCSharpCompiler,
|
||||
MissingConfigure,
|
||||
MissingAutomakeInput,
|
||||
MissingRPackage,
|
||||
MissingRubyFile,
|
||||
MissingAutoconfMacro,
|
||||
MissingValaPackage,
|
||||
MissingXfceDependency,
|
||||
MissingHaskellDependencies,
|
||||
NeedPgBuildExtUpdateControl,
|
||||
DhAddonLoadFailure,
|
||||
MissingMavenArtifacts,
|
||||
GnomeCommonMissing,
|
||||
|
@ -84,17 +80,16 @@ from .requirements import (
|
|||
AutoconfMacroRequirement,
|
||||
PythonModuleRequirement,
|
||||
PythonPackageRequirement,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def problem_to_upstream_requirement(problem):
|
||||
def problem_to_upstream_requirement(problem): # noqa: C901
|
||||
if isinstance(problem, MissingFile):
|
||||
return PathRequirement(problem.path)
|
||||
elif isinstance(problem, MissingCommand):
|
||||
return BinaryRequirement(problem.command)
|
||||
elif isinstance(problem, MissingPkgConfig):
|
||||
return PkgConfigRequirement(
|
||||
problem.module, problem.minimum_version)
|
||||
return PkgConfigRequirement(problem.module, problem.minimum_version)
|
||||
elif isinstance(problem, MissingCHeader):
|
||||
return CHeaderRequirement(problem.header)
|
||||
elif isinstance(problem, MissingJavaScriptRuntime):
|
||||
|
@ -126,35 +121,31 @@ def problem_to_upstream_requirement(problem):
|
|||
elif isinstance(problem, MissingHaskellDependencies):
|
||||
return [HaskellPackageRequirement(dep) for dep in problem.deps]
|
||||
elif isinstance(problem, MissingMavenArtifacts):
|
||||
return [MavenArtifactRequirement(artifact)
|
||||
for artifact in problem.artifacts]
|
||||
return [MavenArtifactRequirement(artifact) for artifact in problem.artifacts]
|
||||
elif isinstance(problem, MissingCSharpCompiler):
|
||||
return BinaryRequirement('msc')
|
||||
return BinaryRequirement("msc")
|
||||
elif isinstance(problem, GnomeCommonMissing):
|
||||
return GnomeCommonRequirement()
|
||||
elif isinstance(problem, MissingJDKFile):
|
||||
return JDKFileRequirement(problem.jdk_path, problem.filename)
|
||||
elif isinstance(problem, MissingGnomeCommonDependency):
|
||||
if problem.package == "glib-gettext":
|
||||
return BinaryRequirement('glib-gettextize')
|
||||
return BinaryRequirement("glib-gettextize")
|
||||
else:
|
||||
logging.warning(
|
||||
"No known command for gnome-common dependency %s",
|
||||
problem.package)
|
||||
"No known command for gnome-common dependency %s", problem.package
|
||||
)
|
||||
return None
|
||||
elif isinstance(problem, MissingXfceDependency):
|
||||
if problem.package == "gtk-doc":
|
||||
return BinaryRequirement("gtkdocize")
|
||||
else:
|
||||
logging.warning(
|
||||
"No known command for xfce dependency %s",
|
||||
problem.package)
|
||||
logging.warning("No known command for xfce dependency %s", problem.package)
|
||||
return None
|
||||
elif isinstance(problem, MissingPerlModule):
|
||||
return PerlModuleRequirement(
|
||||
module=problem.module,
|
||||
filename=problem.filename,
|
||||
inc=problem.inc)
|
||||
module=problem.module, filename=problem.filename, inc=problem.inc
|
||||
)
|
||||
elif isinstance(problem, MissingPerlFile):
|
||||
return PerlFileRequirement(filename=problem.filename)
|
||||
elif isinstance(problem, MissingAutoconfMacro):
|
||||
|
@ -163,18 +154,19 @@ def problem_to_upstream_requirement(problem):
|
|||
return PythonModuleRequirement(
|
||||
problem.module,
|
||||
python_version=problem.python_version,
|
||||
minimum_version=problem.minimum_version)
|
||||
minimum_version=problem.minimum_version,
|
||||
)
|
||||
elif isinstance(problem, MissingPythonDistribution):
|
||||
return PythonPackageRequirement(
|
||||
problem.module,
|
||||
python_version=problem.python_version,
|
||||
minimum_version=problem.minimum_version)
|
||||
minimum_version=problem.minimum_version,
|
||||
)
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
class RequirementFixer(BuildFixer):
|
||||
|
||||
def __init__(self, resolver):
|
||||
self.resolver = resolver
|
||||
|
||||
|
|
|
@ -27,14 +27,14 @@ from . import shebang_binary, UnidentifiedError
|
|||
from .outputs import (
|
||||
BinaryOutput,
|
||||
PythonPackageOutput,
|
||||
)
|
||||
)
|
||||
from .requirements import (
|
||||
BinaryRequirement,
|
||||
PythonPackageRequirement,
|
||||
PerlModuleRequirement,
|
||||
NodePackageRequirement,
|
||||
CargoCrateRequirement,
|
||||
)
|
||||
)
|
||||
from .fix_build import run_with_build_fixers
|
||||
|
||||
|
||||
|
@ -114,22 +114,24 @@ class Pear(BuildSystem):
|
|||
# run_setup, but setting __name__
|
||||
# Imported from Python's distutils.core, Copyright (C) PSF
|
||||
|
||||
|
||||
def run_setup(script_name, script_args=None, stop_after="run"):
|
||||
from distutils import core
|
||||
import sys
|
||||
if stop_after not in ('init', 'config', 'commandline', 'run'):
|
||||
|
||||
if stop_after not in ("init", "config", "commandline", "run"):
|
||||
raise ValueError("invalid value for 'stop_after': %r" % (stop_after,))
|
||||
|
||||
core._setup_stop_after = stop_after
|
||||
|
||||
save_argv = sys.argv.copy()
|
||||
g = {'__file__': script_name, '__name__': '__main__'}
|
||||
g = {"__file__": script_name, "__name__": "__main__"}
|
||||
try:
|
||||
try:
|
||||
sys.argv[0] = script_name
|
||||
if script_args is not None:
|
||||
sys.argv[1:] = script_args
|
||||
with open(script_name, 'rb') as f:
|
||||
with open(script_name, "rb") as f:
|
||||
exec(f.read(), g)
|
||||
finally:
|
||||
sys.argv = save_argv
|
||||
|
@ -140,9 +142,13 @@ def run_setup(script_name, script_args=None, stop_after="run"):
|
|||
pass
|
||||
|
||||
if core._setup_distribution is None:
|
||||
raise RuntimeError(("'distutils.core.setup()' was never called -- "
|
||||
"perhaps '%s' is not a Distutils setup script?") % \
|
||||
script_name)
|
||||
raise RuntimeError(
|
||||
(
|
||||
"'distutils.core.setup()' was never called -- "
|
||||
"perhaps '%s' is not a Distutils setup script?"
|
||||
)
|
||||
% script_name
|
||||
)
|
||||
|
||||
return core._setup_distribution
|
||||
|
||||
|
@ -158,7 +164,7 @@ class SetupPy(BuildSystem):
|
|||
try:
|
||||
self.result = run_setup(os.path.abspath(path), stop_after="init")
|
||||
except RuntimeError as e:
|
||||
logging.warning('Unable to load setup.py metadata: %s', e)
|
||||
logging.warning("Unable to load setup.py metadata: %s", e)
|
||||
self.result = None
|
||||
|
||||
def __repr__(self):
|
||||
|
@ -202,7 +208,7 @@ class SetupPy(BuildSystem):
|
|||
self.setup(resolver)
|
||||
preargs = []
|
||||
if quiet:
|
||||
preargs.append('--quiet')
|
||||
preargs.append("--quiet")
|
||||
self._run_setup(session, resolver, preargs + ["sdist"], fixers)
|
||||
|
||||
def clean(self, session, resolver, fixers):
|
||||
|
@ -213,7 +219,7 @@ class SetupPy(BuildSystem):
|
|||
self.setup(resolver)
|
||||
extra_args = []
|
||||
if install_target.user:
|
||||
extra_args.append('--user')
|
||||
extra_args.append("--user")
|
||||
self._run_setup(session, resolver, ["install"] + extra_args, fixers)
|
||||
|
||||
def _run_setup(self, session, resolver, args, fixers):
|
||||
|
@ -224,9 +230,7 @@ class SetupPy(BuildSystem):
|
|||
else:
|
||||
# Just assume it's Python 3
|
||||
resolver.install([BinaryRequirement("python3")])
|
||||
run_with_build_fixers(
|
||||
session, ["python3", "./setup.py"] + args,
|
||||
fixers)
|
||||
run_with_build_fixers(session, ["python3", "./setup.py"] + args, fixers)
|
||||
|
||||
def get_declared_dependencies(self):
|
||||
if self.result is None:
|
||||
|
@ -234,11 +238,11 @@ class SetupPy(BuildSystem):
|
|||
for require in self.result.get_requires():
|
||||
yield "core", PythonPackageRequirement.from_requirement_str(require)
|
||||
# Not present for distutils-only packages
|
||||
if getattr(self.result, 'install_requires', []):
|
||||
if getattr(self.result, "install_requires", []):
|
||||
for require in self.result.install_requires:
|
||||
yield "core", PythonPackageRequirement.from_requirement_str(require)
|
||||
# Not present for distutils-only packages
|
||||
if getattr(self.result, 'tests_require', []):
|
||||
if getattr(self.result, "tests_require", []):
|
||||
for require in self.result.tests_require:
|
||||
yield "test", PythonPackageRequirement.from_requirement_str(require)
|
||||
|
||||
|
@ -247,7 +251,7 @@ class SetupPy(BuildSystem):
|
|||
raise NotImplementedError
|
||||
for script in self.result.scripts or []:
|
||||
yield BinaryOutput(os.path.basename(script))
|
||||
entry_points = getattr(self.result, 'entry_points', None) or {}
|
||||
entry_points = getattr(self.result, "entry_points", None) or {}
|
||||
for script in entry_points.get("console_scripts", []):
|
||||
yield BinaryOutput(script.split("=")[0])
|
||||
for package in self.result.packages or []:
|
||||
|
@ -271,8 +275,7 @@ class PyProject(BuildSystem):
|
|||
def dist(self, session, resolver, fixers, quiet=False):
|
||||
if "poetry" in self.pyproject.get("tool", []):
|
||||
logging.debug(
|
||||
"Found pyproject.toml with poetry section, "
|
||||
"assuming poetry project."
|
||||
"Found pyproject.toml with poetry section, " "assuming poetry project."
|
||||
)
|
||||
resolver.install(
|
||||
[
|
||||
|
@ -382,8 +385,7 @@ class DistInkt(BuildSystem):
|
|||
continue
|
||||
if key.strip() == b"class" and value.strip().startswith(b"'Dist::Inkt"):
|
||||
logging.debug(
|
||||
"Found Dist::Inkt section in dist.ini, "
|
||||
"assuming distinkt."
|
||||
"Found Dist::Inkt section in dist.ini, " "assuming distinkt."
|
||||
)
|
||||
self.name = "dist-inkt"
|
||||
self.dist_inkt_class = value.decode().strip("'")
|
||||
|
@ -405,8 +407,7 @@ class DistInkt(BuildSystem):
|
|||
else:
|
||||
# Default to invoking Dist::Zilla
|
||||
resolver.install([PerlModuleRequirement("Dist::Zilla")])
|
||||
run_with_build_fixers(
|
||||
session, ["dzil", "build", "--in", ".."], fixers)
|
||||
run_with_build_fixers(session, ["dzil", "build", "--in", ".."], fixers)
|
||||
|
||||
|
||||
class Make(BuildSystem):
|
||||
|
@ -419,27 +420,28 @@ class Make(BuildSystem):
|
|||
def setup(self, session, resolver, fixers):
|
||||
resolver.install([BinaryRequirement("make")])
|
||||
|
||||
if session.exists("Makefile.PL") and not session.exists("Makefile"):
|
||||
def makefile_exists():
|
||||
return any(
|
||||
[session.exists(p) for p in ["Makefile", "GNUmakefile", "makefile"]]
|
||||
)
|
||||
|
||||
if session.exists("Makefile.PL") and not makefile_exists():
|
||||
resolver.install([BinaryRequirement("perl")])
|
||||
run_with_build_fixers(session, ["perl", "Makefile.PL"], fixers)
|
||||
|
||||
if not session.exists("Makefile") and not session.exists("configure"):
|
||||
if not makefile_exists() and not session.exists("configure"):
|
||||
if session.exists("autogen.sh"):
|
||||
if shebang_binary("autogen.sh") is None:
|
||||
run_with_build_fixers(
|
||||
session, ["/bin/sh", "./autogen.sh"], fixers)
|
||||
run_with_build_fixers(session, ["/bin/sh", "./autogen.sh"], fixers)
|
||||
try:
|
||||
run_with_build_fixers(
|
||||
session, ["./autogen.sh"], fixers)
|
||||
run_with_build_fixers(session, ["./autogen.sh"], fixers)
|
||||
except UnidentifiedError as e:
|
||||
if (
|
||||
"Gnulib not yet bootstrapped; "
|
||||
"run ./bootstrap instead.\n" in e.lines
|
||||
):
|
||||
run_with_build_fixers(
|
||||
session, ["./bootstrap"], fixers)
|
||||
run_with_build_fixers(
|
||||
session, ["./autogen.sh"], fixers)
|
||||
run_with_build_fixers(session, ["./bootstrap"], fixers)
|
||||
run_with_build_fixers(session, ["./autogen.sh"], fixers)
|
||||
else:
|
||||
raise
|
||||
|
||||
|
@ -454,7 +456,7 @@ class Make(BuildSystem):
|
|||
)
|
||||
run_with_build_fixers(session, ["autoreconf", "-i"], fixers)
|
||||
|
||||
if not session.exists("Makefile") and session.exists("configure"):
|
||||
if not makefile_exists() and session.exists("configure"):
|
||||
session.check_call(["./configure"])
|
||||
|
||||
def build(self, session, resolver, fixers):
|
||||
|
@ -500,7 +502,8 @@ class Make(BuildSystem):
|
|||
elif any(
|
||||
[
|
||||
re.match(
|
||||
r"Makefile:[0-9]+: \*\*\* Missing \'Make.inc\' "
|
||||
r"(Makefile|GNUmakefile|makefile):[0-9]+: "
|
||||
r"\*\*\* Missing \'Make.inc\' "
|
||||
r"Run \'./configure \[options\]\' and retry. Stop.\n",
|
||||
line,
|
||||
)
|
||||
|
@ -592,20 +595,22 @@ class Cabal(BuildSystem):
|
|||
|
||||
def _run(self, session, args, fixers):
|
||||
try:
|
||||
run_with_build_fixers(
|
||||
session, ["runhaskell", "Setup.hs"] + args, fixers)
|
||||
run_with_build_fixers(session, ["runhaskell", "Setup.hs"] + args, fixers)
|
||||
except UnidentifiedError as e:
|
||||
if "Run the 'configure' command first.\n" in e.lines:
|
||||
run_with_build_fixers(
|
||||
session, ["runhaskell", "Setup.hs", "configure"], fixers)
|
||||
session, ["runhaskell", "Setup.hs", "configure"], fixers
|
||||
)
|
||||
run_with_build_fixers(
|
||||
session, ["runhaskell", "Setup.hs"] + args, fixers)
|
||||
session, ["runhaskell", "Setup.hs"] + args, fixers
|
||||
)
|
||||
else:
|
||||
raise
|
||||
|
||||
def test(self, session, resolver, fixers):
|
||||
self._run(session, ["test"], fixers)
|
||||
|
||||
|
||||
def detect_buildsystems(path, trust_package=False): # noqa: C901
|
||||
"""Detect build systems."""
|
||||
if os.path.exists(os.path.join(path, "package.xml")):
|
||||
|
@ -634,9 +639,9 @@ def detect_buildsystems(path, trust_package=False): # noqa: C901
|
|||
logging.debug("Found Cargo.toml, assuming rust cargo package.")
|
||||
yield Cargo("Cargo.toml")
|
||||
|
||||
if os.path.exists(os.path.join(path, 'Setup.hs')):
|
||||
if os.path.exists(os.path.join(path, "Setup.hs")):
|
||||
logging.debug("Found Setup.hs, assuming haskell package.")
|
||||
yield Cabal('Setup.hs')
|
||||
yield Cabal("Setup.hs")
|
||||
|
||||
if os.path.exists(os.path.join(path, "pom.xml")):
|
||||
logging.debug("Found pom.xml, assuming maven package.")
|
||||
|
@ -656,6 +661,8 @@ def detect_buildsystems(path, trust_package=False): # noqa: C901
|
|||
os.path.exists(os.path.join(path, p))
|
||||
for p in [
|
||||
"Makefile",
|
||||
"GNUmakefile",
|
||||
"makefile",
|
||||
"Makefile.PL",
|
||||
"autogen.sh",
|
||||
"configure.ac",
|
||||
|
|
|
@ -36,5 +36,6 @@ def satisfy_build_deps(session: Session, tree):
|
|||
pass
|
||||
deps = [dep.strip().strip(",") for dep in deps]
|
||||
from .apt import AptManager
|
||||
|
||||
apt = AptManager(session)
|
||||
apt.satisfy(deps)
|
||||
|
|
|
@ -24,7 +24,6 @@ import os
|
|||
from buildlog_consultant.apt import (
|
||||
find_apt_get_failure,
|
||||
)
|
||||
from debian.deb822 import Release
|
||||
|
||||
from .. import DetailedFailure, UnidentifiedError
|
||||
from ..session import Session, run_with_tee
|
||||
|
@ -63,17 +62,19 @@ class AptManager(object):
|
|||
if self._searchers is None:
|
||||
self._searchers = [
|
||||
AptContentsFileSearcher.from_session(self.session),
|
||||
GENERATED_FILE_SEARCHER]
|
||||
GENERATED_FILE_SEARCHER,
|
||||
]
|
||||
return self._searchers
|
||||
|
||||
def package_exists(self, package):
|
||||
if self._apt_cache is None:
|
||||
import apt
|
||||
|
||||
self._apt_cache = apt.Cache(rootdir=self.session.location)
|
||||
return package in self._apt_cache
|
||||
|
||||
def get_package_for_paths(self, paths, regex=False):
|
||||
logging.debug('Searching for packages containing %r', paths)
|
||||
logging.debug("Searching for packages containing %r", paths)
|
||||
# TODO(jelmer): Make sure we use whatever is configured in self.session
|
||||
return get_package_for_paths(paths, self.searchers(), regex=regex)
|
||||
|
||||
|
@ -82,6 +83,7 @@ class AptManager(object):
|
|||
status_path = os.path.join(root, "var/lib/dpkg/status")
|
||||
missing = set(packages)
|
||||
import apt_pkg
|
||||
|
||||
with apt_pkg.TagFile(status_path) as tagf:
|
||||
while missing:
|
||||
tagf.step()
|
||||
|
@ -93,7 +95,7 @@ class AptManager(object):
|
|||
return list(missing)
|
||||
|
||||
def install(self, packages: List[str]) -> None:
|
||||
logging.info('Installing using apt: %r', packages)
|
||||
logging.info("Installing using apt: %r", packages)
|
||||
packages = self.missing(packages)
|
||||
if packages:
|
||||
run_apt(self.session, ["install"] + packages)
|
||||
|
@ -112,16 +114,19 @@ class AptContentsFileSearcher(FileSearcher):
|
|||
|
||||
@classmethod
|
||||
def from_session(cls, session):
|
||||
logging.info('Loading apt contents information')
|
||||
logging.info("Loading apt contents information")
|
||||
# TODO(jelmer): what about sources.list.d?
|
||||
from aptsources.sourceslist import SourcesList
|
||||
|
||||
sl = SourcesList()
|
||||
sl.load(os.path.join(session.location, 'etc/apt/sources.list'))
|
||||
sl.load(os.path.join(session.location, "etc/apt/sources.list"))
|
||||
return cls.from_sources_list(
|
||||
sl,
|
||||
cache_dirs=[
|
||||
os.path.join(session.location, 'var/lib/apt/lists'),
|
||||
'/var/lib/apt/lists'])
|
||||
os.path.join(session.location, "var/lib/apt/lists"),
|
||||
"/var/lib/apt/lists",
|
||||
],
|
||||
)
|
||||
|
||||
def __setitem__(self, path, package):
|
||||
self._db[path] = package
|
||||
|
@ -146,15 +151,17 @@ class AptContentsFileSearcher(FileSearcher):
|
|||
@classmethod
|
||||
def _load_cache_file(cls, url, cache_dir):
|
||||
from urllib.parse import urlparse
|
||||
|
||||
parsed = urlparse(url)
|
||||
p = os.path.join(
|
||||
cache_dir,
|
||||
parsed.hostname + parsed.path.replace('/', '_') + '.lz4')
|
||||
cache_dir, parsed.hostname + parsed.path.replace("/", "_") + ".lz4"
|
||||
)
|
||||
if not os.path.exists(p):
|
||||
return None
|
||||
logging.debug('Loading cached contents file %s', p)
|
||||
logging.debug("Loading cached contents file %s", p)
|
||||
import lz4.frame
|
||||
return lz4.frame.open(p, mode='rb')
|
||||
|
||||
return lz4.frame.open(p, mode="rb")
|
||||
|
||||
@classmethod
|
||||
def from_urls(cls, urls, cache_dirs=None):
|
||||
|
@ -168,39 +175,39 @@ class AptContentsFileSearcher(FileSearcher):
|
|||
else:
|
||||
if not mandatory and self._db:
|
||||
logging.debug(
|
||||
'Not attempting to fetch optional contents '
|
||||
'file %s', url)
|
||||
"Not attempting to fetch optional contents " "file %s", url
|
||||
)
|
||||
else:
|
||||
logging.debug('Fetching contents file %s', url)
|
||||
logging.debug("Fetching contents file %s", url)
|
||||
try:
|
||||
self.load_url(url)
|
||||
except ContentsFileNotFound:
|
||||
if mandatory:
|
||||
logging.warning(
|
||||
'Unable to fetch contents file %s', url)
|
||||
logging.warning("Unable to fetch contents file %s", url)
|
||||
else:
|
||||
logging.debug(
|
||||
'Unable to fetch optional contents file %s',
|
||||
url)
|
||||
"Unable to fetch optional contents file %s", url
|
||||
)
|
||||
return self
|
||||
|
||||
@classmethod
|
||||
def from_sources_list(cls, sl, cache_dirs=None):
|
||||
# TODO(jelmer): Use aptsources.sourceslist.SourcesList
|
||||
from .build import get_build_architecture
|
||||
|
||||
# TODO(jelmer): Verify signatures, etc.
|
||||
urls = []
|
||||
arches = [(get_build_architecture(), True), ("all", False)]
|
||||
for source in sl.list:
|
||||
if source.invalid or source.disabled:
|
||||
continue
|
||||
if source.type == 'deb-src':
|
||||
if source.type == "deb-src":
|
||||
continue
|
||||
if source.type != 'deb':
|
||||
if source.type != "deb":
|
||||
logging.warning("Invalid line in sources: %r", source)
|
||||
continue
|
||||
base_url = source.uri.rstrip('/')
|
||||
name = source.dist.rstrip('/')
|
||||
base_url = source.uri.rstrip("/")
|
||||
name = source.dist.rstrip("/")
|
||||
components = source.comps
|
||||
if components:
|
||||
dists_url = base_url + "/dists"
|
||||
|
@ -210,12 +217,20 @@ class AptContentsFileSearcher(FileSearcher):
|
|||
for component in components:
|
||||
for arch, mandatory in arches:
|
||||
urls.append(
|
||||
("%s/%s/%s/Contents-%s" % (
|
||||
dists_url, name, component, arch), mandatory))
|
||||
(
|
||||
"%s/%s/%s/Contents-%s"
|
||||
% (dists_url, name, component, arch),
|
||||
mandatory,
|
||||
)
|
||||
)
|
||||
else:
|
||||
for arch, mandatory in arches:
|
||||
urls.append(
|
||||
("%s/%s/Contents-%s" % (dists_url, name.rstrip('/'), arch), mandatory))
|
||||
(
|
||||
"%s/%s/Contents-%s" % (dists_url, name.rstrip("/"), arch),
|
||||
mandatory,
|
||||
)
|
||||
)
|
||||
return cls.from_urls(urls, cache_dirs=cache_dirs)
|
||||
|
||||
@staticmethod
|
||||
|
@ -228,7 +243,7 @@ class AptContentsFileSearcher(FileSearcher):
|
|||
def load_url(self, url, allow_cache=True):
|
||||
from urllib.error import HTTPError
|
||||
|
||||
for ext in ['.xz', '.gz', '']:
|
||||
for ext in [".xz", ".gz", ""]:
|
||||
try:
|
||||
response = self._get(url + ext)
|
||||
except HTTPError as e:
|
||||
|
@ -238,13 +253,14 @@ class AptContentsFileSearcher(FileSearcher):
|
|||
break
|
||||
else:
|
||||
raise ContentsFileNotFound(url)
|
||||
if ext == '.gz':
|
||||
if ext == ".gz":
|
||||
import gzip
|
||||
|
||||
f = gzip.GzipFile(fileobj=response)
|
||||
elif ext == '.xz':
|
||||
elif ext == ".xz":
|
||||
import lzma
|
||||
from io import BytesIO
|
||||
|
||||
f = BytesIO(lzma.decompress(response.read()))
|
||||
elif response.headers.get_content_type() == "text/plain":
|
||||
f = response
|
||||
|
@ -280,7 +296,8 @@ GENERATED_FILE_SEARCHER = GeneratedFileSearcher(
|
|||
|
||||
|
||||
def get_package_for_paths(
|
||||
paths: List[str], searchers: List[FileSearcher], regex: bool = False) -> Optional[str]:
|
||||
paths: List[str], searchers: List[FileSearcher], regex: bool = False
|
||||
) -> Optional[str]:
|
||||
candidates: Set[str] = set()
|
||||
for path in paths:
|
||||
for searcher in searchers:
|
||||
|
|
|
@ -62,11 +62,13 @@ def changes_filename(package, version, arch):
|
|||
|
||||
def get_build_architecture():
|
||||
try:
|
||||
return subprocess.check_output(
|
||||
['dpkg-architecture', '-qDEB_BUILD_ARCH']).strip().decode()
|
||||
return (
|
||||
subprocess.check_output(["dpkg-architecture", "-qDEB_BUILD_ARCH"])
|
||||
.strip()
|
||||
.decode()
|
||||
)
|
||||
except subprocess.CalledProcessError as e:
|
||||
raise Exception(
|
||||
"Could not find the build architecture: %s" % e)
|
||||
raise Exception("Could not find the build architecture: %s" % e)
|
||||
|
||||
|
||||
def add_dummy_changelog_entry(
|
||||
|
|
|
@ -22,13 +22,12 @@ __all__ = [
|
|||
import logging
|
||||
import os
|
||||
import sys
|
||||
from typing import List, Set, Optional
|
||||
from typing import List, Set, Optional, Type
|
||||
|
||||
from debian.deb822 import (
|
||||
Deb822,
|
||||
PkgRelation,
|
||||
)
|
||||
from debian.changelog import Version
|
||||
|
||||
from breezy.commit import PointlessCommit
|
||||
from breezy.mutabletree import MutableTree
|
||||
|
@ -37,7 +36,6 @@ from debmutate.control import (
|
|||
ensure_relation,
|
||||
ControlEditor,
|
||||
)
|
||||
from debian.deb822 import PkgRelation
|
||||
from debmutate.debhelper import (
|
||||
get_debhelper_compat_level,
|
||||
)
|
||||
|
@ -48,6 +46,7 @@ from debmutate.reformatting import (
|
|||
FormattingUnpreservable,
|
||||
GeneratedFile,
|
||||
)
|
||||
|
||||
try:
|
||||
from breezy.workspace import reset_tree
|
||||
except ImportError:
|
||||
|
@ -75,7 +74,7 @@ from buildlog_consultant.common import (
|
|||
MissingPythonModule,
|
||||
MissingPythonDistribution,
|
||||
MissingPerlFile,
|
||||
)
|
||||
)
|
||||
from buildlog_consultant.sbuild import (
|
||||
SbuildFailure,
|
||||
)
|
||||
|
@ -85,7 +84,7 @@ from ..buildlog import RequirementFixer
|
|||
from ..resolver.apt import (
|
||||
AptRequirement,
|
||||
get_package_for_python_module,
|
||||
)
|
||||
)
|
||||
from .build import attempt_build, DEFAULT_BUILDER
|
||||
|
||||
|
||||
|
@ -100,7 +99,6 @@ class CircularDependency(Exception):
|
|||
|
||||
|
||||
class BuildDependencyContext(DependencyContext):
|
||||
|
||||
def add_dependency(self, requirement: AptRequirement):
|
||||
return add_build_dependency(
|
||||
self.tree,
|
||||
|
@ -149,8 +147,8 @@ def add_build_dependency(
|
|||
raise CircularDependency(binary["Package"])
|
||||
for rel in requirement.relations:
|
||||
updater.source["Build-Depends"] = ensure_relation(
|
||||
updater.source.get("Build-Depends", ""),
|
||||
PkgRelation.str([rel]))
|
||||
updater.source.get("Build-Depends", ""), PkgRelation.str([rel])
|
||||
)
|
||||
except FormattingUnpreservable as e:
|
||||
logging.info("Unable to edit %s in a way that preserves formatting.", e.path)
|
||||
return False
|
||||
|
@ -197,8 +195,8 @@ def add_test_dependency(
|
|||
continue
|
||||
for rel in requirement.relations:
|
||||
control["Depends"] = ensure_relation(
|
||||
control.get("Depends", ""),
|
||||
PkgRelation.str([rel]))
|
||||
control.get("Depends", ""), PkgRelation.str([rel])
|
||||
)
|
||||
except FormattingUnpreservable as e:
|
||||
logging.info("Unable to edit %s in a way that preserves formatting.", e.path)
|
||||
return False
|
||||
|
@ -330,7 +328,7 @@ def fix_missing_python_module(error, context):
|
|||
default = not targeted
|
||||
|
||||
if error.minimum_version:
|
||||
specs = [('>=', error.minimum_version)]
|
||||
specs = [(">=", error.minimum_version)]
|
||||
else:
|
||||
specs = []
|
||||
|
||||
|
@ -397,8 +395,9 @@ def enable_dh_autoreconf(context):
|
|||
|
||||
|
||||
def fix_missing_configure(error, context):
|
||||
if (not context.tree.has_filename("configure.ac") and
|
||||
not context.tree.has_filename("configure.in")):
|
||||
if not context.tree.has_filename("configure.ac") and not context.tree.has_filename(
|
||||
"configure.in"
|
||||
):
|
||||
return False
|
||||
|
||||
return enable_dh_autoreconf(context)
|
||||
|
@ -443,16 +442,12 @@ def fix_missing_config_status_input(error, context):
|
|||
|
||||
|
||||
class PgBuildExtOutOfDateControlFixer(BuildFixer):
|
||||
|
||||
def __init__(self, session):
|
||||
self.session = session
|
||||
|
||||
def can_fix(self, problem):
|
||||
return isinstance(problem, NeedPgBuildExtUpdateControl)
|
||||
|
||||
def _fix(self, problem, context):
|
||||
return self._fn(problem, context)
|
||||
|
||||
def _fix(self, error, context):
|
||||
logging.info("Running 'pg_buildext updatecontrol'")
|
||||
self.session.check_call(["pg_buildext", "updatecontrol"])
|
||||
|
@ -477,18 +472,17 @@ def fix_missing_makefile_pl(error, context):
|
|||
|
||||
|
||||
class SimpleBuildFixer(BuildFixer):
|
||||
|
||||
def __init__(self, problem_cls, fn):
|
||||
def __init__(self, problem_cls: Type[Problem], fn):
|
||||
self._problem_cls = problem_cls
|
||||
self._fn = fn
|
||||
|
||||
def __repr__(self):
|
||||
return "%s(%r, %r)" % (type(self).__name__, self._problem_cls, self._fn)
|
||||
|
||||
def can_fix(self, problem):
|
||||
def can_fix(self, problem: Problem):
|
||||
return isinstance(problem, self._problem_cls)
|
||||
|
||||
def _fix(self, problem, context):
|
||||
def _fix(self, problem: Problem, context):
|
||||
return self._fn(problem, context)
|
||||
|
||||
|
||||
|
@ -504,6 +498,7 @@ def versioned_package_fixers(session):
|
|||
|
||||
def apt_fixers(apt) -> List[BuildFixer]:
|
||||
from ..resolver.apt import AptResolver
|
||||
|
||||
resolver = AptResolver(apt)
|
||||
return [
|
||||
SimpleBuildFixer(MissingPythonModule, fix_missing_python_module),
|
||||
|
@ -529,7 +524,7 @@ def build_incrementally(
|
|||
):
|
||||
fixed_errors = []
|
||||
fixers = versioned_package_fixers(apt.session) + apt_fixers(apt)
|
||||
logging.info('Using fixers: %r', fixers)
|
||||
logging.info("Using fixers: %r", fixers)
|
||||
while True:
|
||||
try:
|
||||
return attempt_build(
|
||||
|
@ -583,7 +578,9 @@ def build_incrementally(
|
|||
except GeneratedFile:
|
||||
logging.warning(
|
||||
"Control file is generated, unable to edit to "
|
||||
"resolver error %r.", e.error)
|
||||
"resolver error %r.",
|
||||
e.error,
|
||||
)
|
||||
raise e
|
||||
except CircularDependency:
|
||||
logging.warning(
|
||||
|
@ -647,14 +644,15 @@ def main(argv=None):
|
|||
from ..session.plain import PlainSession
|
||||
import tempfile
|
||||
import contextlib
|
||||
|
||||
apt = AptManager(PlainSession())
|
||||
|
||||
logging.basicConfig(level=logging.INFO, format='%(message)s')
|
||||
logging.basicConfig(level=logging.INFO, format="%(message)s")
|
||||
|
||||
with contextlib.ExitStack() as es:
|
||||
if args.output_directory is None:
|
||||
output_directory = es.enter_context(tempfile.TemporaryDirectory())
|
||||
logging.info('Using output directory %s', output_directory)
|
||||
logging.info("Using output directory %s", output_directory)
|
||||
else:
|
||||
output_directory = args.output_directory
|
||||
|
||||
|
|
|
@ -49,7 +49,7 @@ SUPPORTED_DIST_EXTENSIONS = [
|
|||
".tbz2",
|
||||
".tar",
|
||||
".zip",
|
||||
]
|
||||
]
|
||||
|
||||
|
||||
def is_dist_file(fn):
|
||||
|
@ -76,7 +76,6 @@ def run_dist(session, buildsystems, resolver, fixers, quiet=False):
|
|||
|
||||
|
||||
class DistCatcher(object):
|
||||
|
||||
def __init__(self, directory):
|
||||
self.export_directory = directory
|
||||
self.files = []
|
||||
|
@ -205,17 +204,14 @@ if __name__ == "__main__":
|
|||
parser.add_argument(
|
||||
"--target-directory", type=str, default="..", help="Target directory"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--verbose",
|
||||
action="store_true",
|
||||
help="Be verbose")
|
||||
parser.add_argument("--verbose", action="store_true", help="Be verbose")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
if args.verbose:
|
||||
logging.basicConfig(level=logging.DEBUG, format='%(message)s')
|
||||
logging.basicConfig(level=logging.DEBUG, format="%(message)s")
|
||||
else:
|
||||
logging.basicConfig(level=logging.INFO, format='%(message)s')
|
||||
logging.basicConfig(level=logging.INFO, format="%(message)s")
|
||||
|
||||
tree = WorkingTree.open(args.directory)
|
||||
if args.packaging_directory:
|
||||
|
|
|
@ -16,14 +16,10 @@
|
|||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
||||
|
||||
import logging
|
||||
from typing import List, Tuple, Callable, Type, Optional
|
||||
from typing import List, Optional
|
||||
|
||||
from buildlog_consultant.common import (
|
||||
find_build_failure_description,
|
||||
Problem,
|
||||
MissingPerlModule,
|
||||
MissingPythonDistribution,
|
||||
MissingCommand,
|
||||
)
|
||||
from breezy.mutabletree import MutableTree
|
||||
|
||||
|
@ -63,7 +59,7 @@ class DependencyContext(object):
|
|||
self.update_changelog = update_changelog
|
||||
|
||||
def add_dependency(
|
||||
self, package: str, minimum_version: Optional['Version'] = None
|
||||
self, package: str, minimum_version=None
|
||||
) -> bool:
|
||||
raise NotImplementedError(self.add_dependency)
|
||||
|
||||
|
@ -79,8 +75,7 @@ class SchrootDependencyContext(DependencyContext):
|
|||
return True
|
||||
|
||||
|
||||
def run_with_build_fixers(
|
||||
session: Session, args: List[str], fixers: List[BuildFixer]):
|
||||
def run_with_build_fixers(session: Session, args: List[str], fixers: List[BuildFixer]):
|
||||
logging.info("Running %r", args)
|
||||
fixed_errors = []
|
||||
while True:
|
||||
|
@ -91,7 +86,7 @@ def run_with_build_fixers(
|
|||
if error is None:
|
||||
if match:
|
||||
logging.warning("Build failed with unidentified error:")
|
||||
logging.warning('%s', match.line.rstrip('\n'))
|
||||
logging.warning("%s", match.line.rstrip("\n"))
|
||||
else:
|
||||
logging.warning("Build failed and unable to find cause. Giving up.")
|
||||
raise UnidentifiedError(retcode, args, lines, secondary=match)
|
||||
|
|
|
@ -15,31 +15,31 @@
|
|||
# along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
||||
|
||||
from .buildsystem import NoBuildToolsFound, InstallTarget
|
||||
|
||||
|
||||
def run_info(session, buildsystems):
|
||||
for buildsystem in buildsystems:
|
||||
print('%r:' % buildsystem)
|
||||
print("%r:" % buildsystem)
|
||||
deps = {}
|
||||
try:
|
||||
for kind, dep in buildsystem.get_declared_dependencies():
|
||||
deps.setdefault(kind, []).append(dep)
|
||||
except NotImplementedError:
|
||||
print('\tUnable to detect declared dependencies for this type of build system')
|
||||
print(
|
||||
"\tUnable to detect declared dependencies for this type of build system"
|
||||
)
|
||||
if deps:
|
||||
print('\tDeclared dependencies:')
|
||||
print("\tDeclared dependencies:")
|
||||
for kind in deps:
|
||||
print('\t\t%s:' % kind)
|
||||
print("\t\t%s:" % kind)
|
||||
for dep in deps[kind]:
|
||||
print('\t\t\t%s' % dep)
|
||||
print('')
|
||||
print("\t\t\t%s" % dep)
|
||||
print("")
|
||||
try:
|
||||
outputs = list(buildsystem.get_declared_outputs())
|
||||
except NotImplementedError:
|
||||
print('\tUnable to detect declared outputs for this type of build system')
|
||||
print("\tUnable to detect declared outputs for this type of build system")
|
||||
outputs = []
|
||||
if outputs:
|
||||
print('\tDeclared outputs:')
|
||||
print("\tDeclared outputs:")
|
||||
for output in outputs:
|
||||
print('\t\t%s' % output)
|
||||
print("\t\t%s" % output)
|
||||
|
|
|
@ -20,9 +20,8 @@ from . import UpstreamOutput
|
|||
|
||||
|
||||
class BinaryOutput(UpstreamOutput):
|
||||
|
||||
def __init__(self, name):
|
||||
super(BinaryOutput, self).__init__('binary')
|
||||
super(BinaryOutput, self).__init__("binary")
|
||||
self.name = name
|
||||
|
||||
def __repr__(self):
|
||||
|
@ -33,9 +32,8 @@ class BinaryOutput(UpstreamOutput):
|
|||
|
||||
|
||||
class PythonPackageOutput(UpstreamOutput):
|
||||
|
||||
def __init__(self, name, python_version=None):
|
||||
super(PythonPackageOutput, self).__init__('python-package')
|
||||
super(PythonPackageOutput, self).__init__("python-package")
|
||||
self.name = name
|
||||
self.python_version = python_version
|
||||
|
||||
|
@ -44,4 +42,7 @@ class PythonPackageOutput(UpstreamOutput):
|
|||
|
||||
def __repr__(self):
|
||||
return "%s(%r, python_version=%r)" % (
|
||||
type(self).__name__, self.name, self.python_version)
|
||||
type(self).__name__,
|
||||
self.name,
|
||||
self.python_version,
|
||||
)
|
||||
|
|
|
@ -26,30 +26,32 @@ class PythonPackageRequirement(Requirement):
|
|||
|
||||
package: str
|
||||
|
||||
def __init__(self, package, python_version=None, specs=None,
|
||||
minimum_version=None):
|
||||
super(PythonPackageRequirement, self).__init__('python-package')
|
||||
def __init__(self, package, python_version=None, specs=None, minimum_version=None):
|
||||
super(PythonPackageRequirement, self).__init__("python-package")
|
||||
self.package = package
|
||||
self.python_version = python_version
|
||||
if minimum_version is not None:
|
||||
specs = [('>=', minimum_version)]
|
||||
specs = [(">=", minimum_version)]
|
||||
self.specs = specs
|
||||
|
||||
def __repr__(self):
|
||||
return "%s(%r, python_version=%r, specs=%r)" % (
|
||||
type(self).__name__, self.package, self.python_version,
|
||||
self.specs)
|
||||
type(self).__name__,
|
||||
self.package,
|
||||
self.python_version,
|
||||
self.specs,
|
||||
)
|
||||
|
||||
def __str__(self):
|
||||
if self.specs:
|
||||
return "python package: %s (%r)" % (self.package, self.specs)
|
||||
else:
|
||||
return "python package: %s" % (self.package, )
|
||||
|
||||
return "python package: %s" % (self.package,)
|
||||
|
||||
@classmethod
|
||||
def from_requirement_str(cls, text):
|
||||
from requirements.requirement import Requirement
|
||||
|
||||
req = Requirement.parse(text)
|
||||
return cls(package=req.name, specs=req.specs)
|
||||
|
||||
|
@ -59,7 +61,7 @@ class BinaryRequirement(Requirement):
|
|||
binary_name: str
|
||||
|
||||
def __init__(self, binary_name):
|
||||
super(BinaryRequirement, self).__init__('binary')
|
||||
super(BinaryRequirement, self).__init__("binary")
|
||||
self.binary_name = binary_name
|
||||
|
||||
|
||||
|
@ -70,7 +72,7 @@ class PerlModuleRequirement(Requirement):
|
|||
inc: Optional[List[str]]
|
||||
|
||||
def __init__(self, module, filename=None, inc=None):
|
||||
super(PerlModuleRequirement, self).__init__('perl-module')
|
||||
super(PerlModuleRequirement, self).__init__("perl-module")
|
||||
self.module = module
|
||||
self.filename = filename
|
||||
self.inc = inc
|
||||
|
@ -84,7 +86,7 @@ class NodePackageRequirement(Requirement):
|
|||
package: str
|
||||
|
||||
def __init__(self, package):
|
||||
super(NodePackageRequirement, self).__init__('npm-package')
|
||||
super(NodePackageRequirement, self).__init__("npm-package")
|
||||
self.package = package
|
||||
|
||||
|
||||
|
@ -93,7 +95,7 @@ class CargoCrateRequirement(Requirement):
|
|||
crate: str
|
||||
|
||||
def __init__(self, crate):
|
||||
super(CargoCrateRequirement, self).__init__('cargo-crate')
|
||||
super(CargoCrateRequirement, self).__init__("cargo-crate")
|
||||
self.crate = crate
|
||||
|
||||
|
||||
|
@ -102,7 +104,7 @@ class PkgConfigRequirement(Requirement):
|
|||
module: str
|
||||
|
||||
def __init__(self, module, minimum_version=None):
|
||||
super(PkgConfigRequirement, self).__init__('pkg-config')
|
||||
super(PkgConfigRequirement, self).__init__("pkg-config")
|
||||
self.module = module
|
||||
self.minimum_version = minimum_version
|
||||
|
||||
|
@ -112,7 +114,7 @@ class PathRequirement(Requirement):
|
|||
path: str
|
||||
|
||||
def __init__(self, path):
|
||||
super(PathRequirement, self).__init__('path')
|
||||
super(PathRequirement, self).__init__("path")
|
||||
self.path = path
|
||||
|
||||
|
||||
|
@ -121,15 +123,13 @@ class CHeaderRequirement(Requirement):
|
|||
header: str
|
||||
|
||||
def __init__(self, header):
|
||||
super(CHeaderRequirement, self).__init__('c-header')
|
||||
super(CHeaderRequirement, self).__init__("c-header")
|
||||
self.header = header
|
||||
|
||||
|
||||
class JavaScriptRuntimeRequirement(Requirement):
|
||||
|
||||
def __init__(self):
|
||||
super(JavaScriptRuntimeRequirement, self).__init__(
|
||||
'javascript-runtime')
|
||||
super(JavaScriptRuntimeRequirement, self).__init__("javascript-runtime")
|
||||
|
||||
|
||||
class ValaPackageRequirement(Requirement):
|
||||
|
@ -137,7 +137,7 @@ class ValaPackageRequirement(Requirement):
|
|||
package: str
|
||||
|
||||
def __init__(self, package: str):
|
||||
super(ValaPackageRequirement, self).__init__('vala')
|
||||
super(ValaPackageRequirement, self).__init__("vala")
|
||||
self.package = package
|
||||
|
||||
|
||||
|
@ -147,7 +147,7 @@ class RubyGemRequirement(Requirement):
|
|||
minimum_version: Optional[str]
|
||||
|
||||
def __init__(self, gem: str, minimum_version: Optional[str]):
|
||||
super(RubyGemRequirement, self).__init__('gem')
|
||||
super(RubyGemRequirement, self).__init__("gem")
|
||||
self.gem = gem
|
||||
self.minimum_version = minimum_version
|
||||
|
||||
|
@ -157,7 +157,7 @@ class GoPackageRequirement(Requirement):
|
|||
package: str
|
||||
|
||||
def __init__(self, package: str):
|
||||
super(GoPackageRequirement, self).__init__('go')
|
||||
super(GoPackageRequirement, self).__init__("go")
|
||||
self.package = package
|
||||
|
||||
|
||||
|
@ -166,7 +166,7 @@ class DhAddonRequirement(Requirement):
|
|||
path: str
|
||||
|
||||
def __init__(self, path: str):
|
||||
super(DhAddonRequirement, self).__init__('dh-addon')
|
||||
super(DhAddonRequirement, self).__init__("dh-addon")
|
||||
self.path = path
|
||||
|
||||
|
||||
|
@ -175,7 +175,7 @@ class PhpClassRequirement(Requirement):
|
|||
php_class: str
|
||||
|
||||
def __init__(self, php_class: str):
|
||||
super(PhpClassRequirement, self).__init__('php-class')
|
||||
super(PhpClassRequirement, self).__init__("php-class")
|
||||
self.php_class = php_class
|
||||
|
||||
|
||||
|
@ -185,7 +185,7 @@ class RPackageRequirement(Requirement):
|
|||
minimum_version: Optional[str]
|
||||
|
||||
def __init__(self, package: str, minimum_version: Optional[str] = None):
|
||||
super(RPackageRequirement, self).__init__('r-package')
|
||||
super(RPackageRequirement, self).__init__("r-package")
|
||||
self.package = package
|
||||
self.minimum_version = minimum_version
|
||||
|
||||
|
@ -195,7 +195,7 @@ class LibraryRequirement(Requirement):
|
|||
library: str
|
||||
|
||||
def __init__(self, library: str):
|
||||
super(LibraryRequirement, self).__init__('lib')
|
||||
super(LibraryRequirement, self).__init__("lib")
|
||||
self.library = library
|
||||
|
||||
|
||||
|
@ -204,7 +204,7 @@ class RubyFileRequirement(Requirement):
|
|||
filename: str
|
||||
|
||||
def __init__(self, filename: str):
|
||||
super(RubyFileRequirement, self).__init__('ruby-file')
|
||||
super(RubyFileRequirement, self).__init__("ruby-file")
|
||||
self.filename = filename
|
||||
|
||||
|
||||
|
@ -213,7 +213,7 @@ class XmlEntityRequirement(Requirement):
|
|||
url: str
|
||||
|
||||
def __init__(self, url: str):
|
||||
super(XmlEntityRequirement, self).__init__('xml-entity')
|
||||
super(XmlEntityRequirement, self).__init__("xml-entity")
|
||||
self.url = url
|
||||
|
||||
|
||||
|
@ -223,7 +223,7 @@ class SprocketsFileRequirement(Requirement):
|
|||
name: str
|
||||
|
||||
def __init__(self, content_type: str, name: str):
|
||||
super(SprocketsFileRequirement, self).__init__('sprockets-file')
|
||||
super(SprocketsFileRequirement, self).__init__("sprockets-file")
|
||||
self.content_type = content_type
|
||||
self.name = name
|
||||
|
||||
|
@ -233,7 +233,7 @@ class JavaClassRequirement(Requirement):
|
|||
classname: str
|
||||
|
||||
def __init__(self, classname: str):
|
||||
super(JavaClassRequirement, self).__init__('java-class')
|
||||
super(JavaClassRequirement, self).__init__("java-class")
|
||||
self.classname = classname
|
||||
|
||||
|
||||
|
@ -242,7 +242,7 @@ class HaskellPackageRequirement(Requirement):
|
|||
package: str
|
||||
|
||||
def __init__(self, package: str):
|
||||
super(HaskellPackageRequirement, self).__init__('haskell-package')
|
||||
super(HaskellPackageRequirement, self).__init__("haskell-package")
|
||||
self.package = package
|
||||
|
||||
|
||||
|
@ -251,14 +251,13 @@ class MavenArtifactRequirement(Requirement):
|
|||
artifacts: List[Tuple[str, str, str]]
|
||||
|
||||
def __init__(self, artifacts):
|
||||
super(MavenArtifactRequirement, self).__init__('maven-artifact')
|
||||
super(MavenArtifactRequirement, self).__init__("maven-artifact")
|
||||
self.artifacts = artifacts
|
||||
|
||||
|
||||
class GnomeCommonRequirement(Requirement):
|
||||
|
||||
def __init__(self):
|
||||
super(GnomeCommonRequirement, self).__init__('gnome-common')
|
||||
super(GnomeCommonRequirement, self).__init__("gnome-common")
|
||||
|
||||
|
||||
class JDKFileRequirement(Requirement):
|
||||
|
@ -267,7 +266,7 @@ class JDKFileRequirement(Requirement):
|
|||
filename: str
|
||||
|
||||
def __init__(self, jdk_path: str, filename: str):
|
||||
super(JDKFileRequirement, self).__init__('jdk-file')
|
||||
super(JDKFileRequirement, self).__init__("jdk-file")
|
||||
self.jdk_path = jdk_path
|
||||
self.filename = filename
|
||||
|
||||
|
@ -281,7 +280,7 @@ class PerlFileRequirement(Requirement):
|
|||
filename: str
|
||||
|
||||
def __init__(self, filename: str):
|
||||
super(PerlFileRequirement, self).__init__('perl-file')
|
||||
super(PerlFileRequirement, self).__init__("perl-file")
|
||||
self.filename = filename
|
||||
|
||||
|
||||
|
@ -290,7 +289,7 @@ class AutoconfMacroRequirement(Requirement):
|
|||
macro: str
|
||||
|
||||
def __init__(self, macro: str):
|
||||
super(AutoconfMacroRequirement, self).__init__('autoconf-macro')
|
||||
super(AutoconfMacroRequirement, self).__init__("autoconf-macro")
|
||||
self.macro = macro
|
||||
|
||||
|
||||
|
@ -301,6 +300,6 @@ class PythonModuleRequirement(Requirement):
|
|||
minimum_version: Optional[str]
|
||||
|
||||
def __init__(self, module, python_version=None, minimum_version=None):
|
||||
super(PythonModuleRequirement, self).__init__('python-module')
|
||||
super(PythonModuleRequirement, self).__init__("python-module")
|
||||
self.python_version = python_version
|
||||
self.minimum_version = minimum_version
|
||||
|
|
|
@ -17,13 +17,11 @@
|
|||
|
||||
|
||||
class UnsatisfiedRequirements(Exception):
|
||||
|
||||
def __init__(self, reqs):
|
||||
self.requirements = reqs
|
||||
|
||||
|
||||
class Resolver(object):
|
||||
|
||||
def install(self, requirements):
|
||||
raise NotImplementedError(self.install)
|
||||
|
||||
|
@ -38,7 +36,6 @@ class Resolver(object):
|
|||
|
||||
|
||||
class CPANResolver(Resolver):
|
||||
|
||||
def __init__(self, session):
|
||||
self.session = session
|
||||
|
||||
|
@ -47,6 +44,7 @@ class CPANResolver(Resolver):
|
|||
|
||||
def install(self, requirements):
|
||||
from ..requirements import PerlModuleRequirement
|
||||
|
||||
missing = []
|
||||
for requirement in requirements:
|
||||
if not isinstance(requirement, PerlModuleRequirement):
|
||||
|
@ -55,7 +53,8 @@ class CPANResolver(Resolver):
|
|||
# TODO(jelmer): Specify -T to skip tests?
|
||||
self.session.check_call(
|
||||
["cpan", "-i", requirement.module],
|
||||
user="root", env={"PERL_MM_USE_DEFAULT": "1"}
|
||||
user="root",
|
||||
env={"PERL_MM_USE_DEFAULT": "1"},
|
||||
)
|
||||
if missing:
|
||||
raise UnsatisfiedRequirements(missing)
|
||||
|
@ -65,7 +64,6 @@ class CPANResolver(Resolver):
|
|||
|
||||
|
||||
class HackageResolver(Resolver):
|
||||
|
||||
def __init__(self, session):
|
||||
self.session = session
|
||||
|
||||
|
@ -74,14 +72,15 @@ class HackageResolver(Resolver):
|
|||
|
||||
def install(self, requirements):
|
||||
from ..requirements import HaskellPackageRequirement
|
||||
|
||||
missing = []
|
||||
for requirement in requirements:
|
||||
if not isinstance(requirement, HaskellPackageRequirement):
|
||||
missing.append(requirement)
|
||||
continue
|
||||
self.session.check_call(
|
||||
["cabal", "install", requirement.package],
|
||||
user="root")
|
||||
["cabal", "install", requirement.package], user="root"
|
||||
)
|
||||
if missing:
|
||||
raise UnsatisfiedRequirements(missing)
|
||||
|
||||
|
@ -90,7 +89,6 @@ class HackageResolver(Resolver):
|
|||
|
||||
|
||||
class CargoResolver(Resolver):
|
||||
|
||||
def __init__(self, session):
|
||||
self.session = session
|
||||
|
||||
|
@ -99,14 +97,15 @@ class CargoResolver(Resolver):
|
|||
|
||||
def install(self, requirements):
|
||||
from ..requirements import CargoCrateRequirement
|
||||
|
||||
missing = []
|
||||
for requirement in requirements:
|
||||
if not isinstance(requirement, CargoCrateRequirement):
|
||||
missing.append(requirement)
|
||||
continue
|
||||
self.session.check_call(
|
||||
["cargo", "install", requirement.crate],
|
||||
user="root")
|
||||
["cargo", "install", requirement.crate], user="root"
|
||||
)
|
||||
if missing:
|
||||
raise UnsatisfiedRequirements(missing)
|
||||
|
||||
|
@ -115,7 +114,6 @@ class CargoResolver(Resolver):
|
|||
|
||||
|
||||
class PypiResolver(Resolver):
|
||||
|
||||
def __init__(self, session):
|
||||
self.session = session
|
||||
|
||||
|
@ -124,6 +122,7 @@ class PypiResolver(Resolver):
|
|||
|
||||
def install(self, requirements):
|
||||
from ..requirements import PythonPackageRequirement
|
||||
|
||||
missing = []
|
||||
for requirement in requirements:
|
||||
if not isinstance(requirement, PythonPackageRequirement):
|
||||
|
@ -143,7 +142,6 @@ NPM_COMMAND_PACKAGES = {
|
|||
|
||||
|
||||
class NpmResolver(Resolver):
|
||||
|
||||
def __init__(self, session):
|
||||
self.session = session
|
||||
|
||||
|
@ -152,6 +150,7 @@ class NpmResolver(Resolver):
|
|||
|
||||
def install(self, requirements):
|
||||
from ..requirements import NodePackageRequirement
|
||||
|
||||
missing = []
|
||||
for requirement in requirements:
|
||||
if not isinstance(requirement, NodePackageRequirement):
|
||||
|
@ -191,12 +190,15 @@ class StackedResolver(Resolver):
|
|||
|
||||
|
||||
def native_resolvers(session):
|
||||
return StackedResolver([
|
||||
CPANResolver(session),
|
||||
PypiResolver(session),
|
||||
NpmResolver(session),
|
||||
CargoResolver(session),
|
||||
HackageResolver(session)])
|
||||
return StackedResolver(
|
||||
[
|
||||
CPANResolver(session),
|
||||
PypiResolver(session),
|
||||
NpmResolver(session),
|
||||
CargoResolver(session),
|
||||
HackageResolver(session),
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
class ExplainResolver(Resolver):
|
||||
|
@ -215,14 +217,18 @@ def auto_resolver(session):
|
|||
# TODO(jelmer): if session is SchrootSession or if we're root, use apt
|
||||
from .apt import AptResolver
|
||||
from ..session.schroot import SchrootSession
|
||||
user = session.check_output(['echo', '$USER']).decode().strip()
|
||||
|
||||
user = session.check_output(["echo", "$USER"]).decode().strip()
|
||||
resolvers = []
|
||||
if isinstance(session, SchrootSession) or user == 'root':
|
||||
if isinstance(session, SchrootSession) or user == "root":
|
||||
resolvers.append(AptResolver.from_session(session))
|
||||
resolvers.extend([
|
||||
CPANResolver(session),
|
||||
PypiResolver(session),
|
||||
NpmResolver(session),
|
||||
CargoResolver(session),
|
||||
HackageResolver(session)])
|
||||
resolvers.extend(
|
||||
[
|
||||
CPANResolver(session),
|
||||
PypiResolver(session),
|
||||
NpmResolver(session),
|
||||
CargoResolver(session),
|
||||
HackageResolver(session),
|
||||
]
|
||||
)
|
||||
return StackedResolver(resolvers)
|
||||
|
|
|
@ -32,7 +32,6 @@ from ..requirements import (
|
|||
CHeaderRequirement,
|
||||
PkgConfigRequirement,
|
||||
PathRequirement,
|
||||
Requirement,
|
||||
JavaScriptRuntimeRequirement,
|
||||
ValaPackageRequirement,
|
||||
RubyGemRequirement,
|
||||
|
@ -55,20 +54,19 @@ from ..requirements import (
|
|||
AutoconfMacroRequirement,
|
||||
PythonModuleRequirement,
|
||||
PythonPackageRequirement,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
class AptRequirement(Requirement):
|
||||
|
||||
def __init__(self, relations):
|
||||
super(AptRequirement, self).__init__('apt')
|
||||
super(AptRequirement, self).__init__("apt")
|
||||
self.relations = relations
|
||||
|
||||
@classmethod
|
||||
def simple(cls, package, minimum_version=None):
|
||||
rel = {'name': package}
|
||||
rel = {"name": package}
|
||||
if minimum_version is not None:
|
||||
rel['version'] = ('>=', minimum_version)
|
||||
rel["version"] = (">=", minimum_version)
|
||||
return cls([[rel]])
|
||||
|
||||
@classmethod
|
||||
|
@ -81,35 +79,50 @@ class AptRequirement(Requirement):
|
|||
def touches_package(self, package):
|
||||
for rel in self.relations:
|
||||
for entry in rel:
|
||||
if entry['name'] == package:
|
||||
if entry["name"] == package:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def python_spec_to_apt_rels(pkg_name, specs):
|
||||
# TODO(jelmer): Dealing with epoch, etc?
|
||||
if not specs:
|
||||
return [[{"name": pkg_name}]]
|
||||
else:
|
||||
rels = []
|
||||
for spec in specs:
|
||||
c = {">=": ">=", "<=": "<=", "<": "<<", ">": ">>", "=": "="}[spec[0]]
|
||||
rels.append([{"name": pkg_name, "version": (c, Version(spec[1]))}])
|
||||
return rels
|
||||
|
||||
|
||||
def get_package_for_python_package(apt_mgr, package, python_version, specs=None):
|
||||
if python_version == "pypy":
|
||||
pkg_name = apt_mgr.get_package_for_paths(
|
||||
["/usr/lib/pypy/dist-packages/%s-.*.egg-info" % package],
|
||||
regex=True)
|
||||
["/usr/lib/pypy/dist-packages/%s-.*.egg-info" % package.replace("-", "_")],
|
||||
regex=True,
|
||||
)
|
||||
elif python_version == "cpython2":
|
||||
pkg_name = apt_mgr.get_package_for_paths(
|
||||
["/usr/lib/python2\\.[0-9]/dist-packages/%s-.*.egg-info" % package],
|
||||
regex=True)
|
||||
[
|
||||
"/usr/lib/python2\\.[0-9]/dist-packages/%s-.*.egg-info"
|
||||
% package.replace("-", "_")
|
||||
],
|
||||
regex=True,
|
||||
)
|
||||
elif python_version == "cpython3":
|
||||
pkg_name = apt_mgr.get_package_for_paths(
|
||||
["/usr/lib/python3/dist-packages/%s-.*.egg-info" % package],
|
||||
regex=True)
|
||||
[
|
||||
"/usr/lib/python3/dist-packages/%s-.*.egg-info"
|
||||
% package.replace("-", "_")
|
||||
],
|
||||
regex=True,
|
||||
)
|
||||
else:
|
||||
raise NotImplementedError
|
||||
if pkg_name is None:
|
||||
return None
|
||||
# TODO(jelmer): Dealing with epoch, etc?
|
||||
if not specs:
|
||||
rels = [[{'name': pkg_name}]]
|
||||
else:
|
||||
rels = []
|
||||
for spec in specs:
|
||||
rels.append([{'name': pkg_name, 'version': (spec[0], Version(spec[1]))}])
|
||||
rels = python_spec_to_apt_rels(pkg_name, specs)
|
||||
return AptRequirement(rels)
|
||||
|
||||
|
||||
|
@ -169,13 +182,7 @@ def get_package_for_python_module(apt_mgr, module, python_version, specs):
|
|||
pkg_name = apt_mgr.get_package_for_paths(paths, regex=True)
|
||||
if pkg_name is None:
|
||||
return None
|
||||
rels = []
|
||||
if not specs:
|
||||
rels = [[{'name': pkg_name}]]
|
||||
else:
|
||||
rels = []
|
||||
for spec in specs:
|
||||
rels.append([{'name': pkg_name, 'version': (spec[0], Version(spec[1]))}])
|
||||
rels = python_spec_to_apt_rels(pkg_name, specs)
|
||||
return AptRequirement(rels)
|
||||
|
||||
|
||||
|
@ -184,8 +191,7 @@ def resolve_binary_req(apt_mgr, req):
|
|||
paths = [req.binary_name]
|
||||
else:
|
||||
paths = [
|
||||
posixpath.join(dirname, req.binary_name)
|
||||
for dirname in ["/usr/bin", "/bin"]
|
||||
posixpath.join(dirname, req.binary_name) for dirname in ["/usr/bin", "/bin"]
|
||||
]
|
||||
pkg_name = apt_mgr.get_package_for_paths(paths)
|
||||
if pkg_name is not None:
|
||||
|
@ -200,7 +206,8 @@ def resolve_pkg_config_req(apt_mgr, req):
|
|||
if package is None:
|
||||
package = apt_mgr.get_package_for_paths(
|
||||
[posixpath.join("/usr/lib", ".*", "pkgconfig", req.module + ".pc")],
|
||||
regex=True)
|
||||
regex=True,
|
||||
)
|
||||
if package is not None:
|
||||
return AptRequirement.simple(package, minimum_version=req.minimum_version)
|
||||
return None
|
||||
|
@ -228,7 +235,8 @@ def resolve_c_header_req(apt_mgr, req):
|
|||
|
||||
def resolve_js_runtime_req(apt_mgr, req):
|
||||
package = apt_mgr.get_package_for_paths(
|
||||
["/usr/bin/node", "/usr/bin/duk"], regex=False)
|
||||
["/usr/bin/node", "/usr/bin/duk"], regex=False
|
||||
)
|
||||
if package is not None:
|
||||
return AptRequirement.simple(package)
|
||||
return None
|
||||
|
@ -249,8 +257,7 @@ def resolve_ruby_gem_req(apt_mgr, req):
|
|||
"specifications/%s-.*\\.gemspec" % req.gem
|
||||
)
|
||||
]
|
||||
package = apt_mgr.get_package_for_paths(
|
||||
paths, regex=True)
|
||||
package = apt_mgr.get_package_for_paths(paths, regex=True)
|
||||
if package is not None:
|
||||
return AptRequirement.simple(package, minimum_version=req.minimum_version)
|
||||
return None
|
||||
|
@ -258,8 +265,7 @@ def resolve_ruby_gem_req(apt_mgr, req):
|
|||
|
||||
def resolve_go_package_req(apt_mgr, req):
|
||||
package = apt_mgr.get_package_for_paths(
|
||||
[posixpath.join("/usr/share/gocode/src", req.package, ".*")],
|
||||
regex=True
|
||||
[posixpath.join("/usr/share/gocode/src", req.package, ".*")], regex=True
|
||||
)
|
||||
if package is not None:
|
||||
return AptRequirement.simple(package)
|
||||
|
@ -368,7 +374,8 @@ def resolve_java_class_req(apt_mgr, req):
|
|||
# system :(
|
||||
# TODO(jelmer): Call in session
|
||||
output = apt_mgr.session.check_output(
|
||||
["java-propose-classpath", "-c" + req.classname])
|
||||
["java-propose-classpath", "-c" + req.classname]
|
||||
)
|
||||
classpath = [p for p in output.decode().strip(":").strip().split(":") if p]
|
||||
if not classpath:
|
||||
logging.warning("unable to find classpath for %s", req.classname)
|
||||
|
@ -422,7 +429,7 @@ def resolve_maven_artifact_req(apt_mgr, req):
|
|||
|
||||
|
||||
def resolve_gnome_common_req(apt_mgr, req):
|
||||
return AptRequirement.simple('gnome-common')
|
||||
return AptRequirement.simple("gnome-common")
|
||||
|
||||
|
||||
def resolve_jdk_file_req(apt_mgr, req):
|
||||
|
@ -438,8 +445,7 @@ def resolve_perl_module_req(apt_mgr, req):
|
|||
|
||||
if req.inc is None:
|
||||
if req.filename is None:
|
||||
paths = [posixpath.join(inc, req.relfilename)
|
||||
for inc in DEFAULT_PERL_PATHS]
|
||||
paths = [posixpath.join(inc, req.relfilename) for inc in DEFAULT_PERL_PATHS]
|
||||
elif not posixpath.isabs(req.filename):
|
||||
return False
|
||||
else:
|
||||
|
@ -495,9 +501,13 @@ def resolve_python_module_req(apt_mgr, req):
|
|||
|
||||
def resolve_python_package_req(apt_mgr, req):
|
||||
if req.python_version == 2:
|
||||
return get_package_for_python_package(apt_mgr, req.package, "cpython2", req.specs)
|
||||
return get_package_for_python_package(
|
||||
apt_mgr, req.package, "cpython2", req.specs
|
||||
)
|
||||
elif req.python_version in (None, 3):
|
||||
return get_package_for_python_package(apt_mgr, req.package, "cpython3", req.specs)
|
||||
return get_package_for_python_package(
|
||||
apt_mgr, req.package, "cpython3", req.specs
|
||||
)
|
||||
else:
|
||||
return None
|
||||
|
||||
|
@ -540,7 +550,6 @@ def resolve_requirement_apt(apt_mgr, req: Requirement) -> AptRequirement:
|
|||
|
||||
|
||||
class AptResolver(Resolver):
|
||||
|
||||
def __init__(self, apt):
|
||||
self.apt = apt
|
||||
|
||||
|
@ -570,8 +579,9 @@ class AptResolver(Resolver):
|
|||
else:
|
||||
apt_requirements.append(apt_req)
|
||||
if apt_requirements:
|
||||
self.apt.satisfy([PkgRelation.str(chain(*[
|
||||
r.relations for r in apt_requirements]))])
|
||||
self.apt.satisfy(
|
||||
[PkgRelation.str(chain(*[r.relations for r in apt_requirements]))]
|
||||
)
|
||||
if still_missing:
|
||||
raise UnsatisfiedRequirements(still_missing)
|
||||
|
||||
|
|
|
@ -62,8 +62,8 @@ class SchrootSession(Session):
|
|||
# TODO(jelmer): Capture stderr and forward in SessionSetupFailure
|
||||
raise SessionSetupFailure()
|
||||
logging.info(
|
||||
'Opened schroot session %s (from %s)', self.session_id,
|
||||
self.chroot)
|
||||
"Opened schroot session %s (from %s)", self.session_id, self.chroot
|
||||
)
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
|
@ -157,7 +157,7 @@ class SchrootSession(Session):
|
|||
|
||||
def _fullpath(self, path: str) -> str:
|
||||
if self._cwd is None:
|
||||
raise ValueError('no cwd set')
|
||||
raise ValueError("no cwd set")
|
||||
return os.path.join(self.location, os.path.join(self._cwd, path).lstrip("/"))
|
||||
|
||||
def exists(self, path: str) -> bool:
|
||||
|
|
|
@ -158,11 +158,10 @@ janitor (0.1-1jan+some1) UNRELEASED; urgency=medium
|
|||
|
||||
|
||||
class BuildArchitectureTests(TestCase):
|
||||
|
||||
def setUp(self):
|
||||
super(BuildArchitectureTests, self).setUp()
|
||||
if not os.path.exists('/usr/bin/dpkg-architecture'):
|
||||
self.skipTest('not a debian system')
|
||||
if not os.path.exists("/usr/bin/dpkg-architecture"):
|
||||
self.skipTest("not a debian system")
|
||||
|
||||
def test_is_str(self):
|
||||
self.assertIsInstance(get_build_architecture(), str)
|
||||
|
|
|
@ -30,7 +30,6 @@ from buildlog_consultant.common import (
|
|||
MissingRubyGem,
|
||||
MissingValaPackage,
|
||||
)
|
||||
from ..debian import apt
|
||||
from ..debian.apt import AptManager, FileSearcher
|
||||
from ..debian.fix_build import (
|
||||
resolve_error,
|
||||
|
@ -42,7 +41,6 @@ from breezy.tests import TestCaseWithTransport
|
|||
|
||||
|
||||
class DummyAptSearcher(FileSearcher):
|
||||
|
||||
def __init__(self, files):
|
||||
self._apt_files = files
|
||||
|
||||
|
@ -59,8 +57,8 @@ class DummyAptSearcher(FileSearcher):
|
|||
class ResolveErrorTests(TestCaseWithTransport):
|
||||
def setUp(self):
|
||||
super(ResolveErrorTests, self).setUp()
|
||||
if not os.path.exists('/usr/bin/dpkg-architecture'):
|
||||
self.skipTest('not a debian system')
|
||||
if not os.path.exists("/usr/bin/dpkg-architecture"):
|
||||
self.skipTest("not a debian system")
|
||||
self.tree = self.make_branch_and_tree(".")
|
||||
self.build_tree_contents(
|
||||
[
|
||||
|
@ -95,6 +93,7 @@ blah (0.1) UNRELEASED; urgency=medium
|
|||
|
||||
def resolve(self, error, context=("build",)):
|
||||
from ..session.plain import PlainSession
|
||||
|
||||
session = PlainSession()
|
||||
apt = AptManager(session)
|
||||
apt._searchers = [DummyAptSearcher(self._apt_files)]
|
||||
|
@ -122,8 +121,8 @@ blah (0.1) UNRELEASED; urgency=medium
|
|||
"/usr/bin/brz": "brz",
|
||||
"/usr/bin/brzier": "bash",
|
||||
}
|
||||
self.overrideEnv('DEBEMAIL', 'jelmer@debian.org')
|
||||
self.overrideEnv('DEBFULLNAME', 'Jelmer Vernooij')
|
||||
self.overrideEnv("DEBEMAIL", "jelmer@debian.org")
|
||||
self.overrideEnv("DEBFULLNAME", "Jelmer Vernooij")
|
||||
self.assertTrue(self.resolve(MissingCommand("brz")))
|
||||
self.assertEqual("libc6, brz", self.get_build_deps())
|
||||
rev = self.tree.branch.repository.get_revision(self.tree.branch.last_revision())
|
||||
|
|
Loading…
Add table
Reference in a new issue