Reformat using black.
This commit is contained in:
parent
4b1591d864
commit
8aae9c93d8
7 changed files with 327 additions and 266 deletions
|
@ -45,14 +45,12 @@ def shebang_binary(p):
|
|||
|
||||
|
||||
class UpstreamRequirement(object):
|
||||
|
||||
def __init__(self, family, name):
|
||||
self.family = family
|
||||
self.name = name
|
||||
|
||||
|
||||
class UpstreamOutput(object):
|
||||
|
||||
def __init__(self, family, name):
|
||||
self.family = family
|
||||
self.name = name
|
||||
|
|
|
@ -18,7 +18,8 @@
|
|||
import logging
|
||||
import os
|
||||
import sys
|
||||
from . import UpstreamPackage
|
||||
from . import note, UpstreamPackage
|
||||
from .apt import UnidentifiedError
|
||||
from .buildsystem import NoBuildToolsFound, detect_buildsystems
|
||||
from .build import run_build
|
||||
from .clean import run_clean
|
||||
|
@ -30,7 +31,7 @@ from .resolver import (
|
|||
AutoResolver,
|
||||
NativeResolver,
|
||||
MissingDependencies,
|
||||
)
|
||||
)
|
||||
from .test import run_test
|
||||
|
||||
|
||||
|
@ -44,18 +45,20 @@ def get_necessary_declared_requirements(resolver, requirements, stages):
|
|||
|
||||
def install_necessary_declared_requirements(resolver, buildsystem, stages):
|
||||
missing = []
|
||||
missing.extend(get_necessary_declared_requirements(
|
||||
resolver, buildsystem.get_declared_dependencies(),
|
||||
stages))
|
||||
missing.extend(
|
||||
get_necessary_declared_requirements(
|
||||
resolver, buildsystem.get_declared_dependencies(), stages
|
||||
)
|
||||
)
|
||||
resolver.install(missing)
|
||||
|
||||
|
||||
STAGE_MAP = {
|
||||
'dist': [],
|
||||
'install': ['build'],
|
||||
'test': ['test', 'dev'],
|
||||
'build': ['build'],
|
||||
'clean': []
|
||||
"dist": [],
|
||||
"install": ["build"],
|
||||
"test": ["test", "dev"],
|
||||
"build": ["build"],
|
||||
"clean": [],
|
||||
}
|
||||
|
||||
|
||||
|
@ -71,13 +74,16 @@ def main():
|
|||
)
|
||||
parser.add_argument("--schroot", type=str, help="schroot to run in.")
|
||||
parser.add_argument(
|
||||
'--resolve', choices=['explain', 'apt', 'native'],
|
||||
default='apt',
|
||||
help='What to do about missing dependencies')
|
||||
"--resolve",
|
||||
choices=["explain", "apt", "native"],
|
||||
default="apt",
|
||||
help="What to do about missing dependencies",
|
||||
)
|
||||
parser.add_argument(
|
||||
'--ignore-declared-dependencies',
|
||||
action='store_true',
|
||||
help='Ignore declared dependencies, follow build errors only')
|
||||
"--ignore-declared-dependencies",
|
||||
action="store_true",
|
||||
help="Ignore declared dependencies, follow build errors only",
|
||||
)
|
||||
args = parser.parse_args()
|
||||
if args.schroot:
|
||||
from .session.schroot import SchrootSession
|
||||
|
@ -88,13 +94,13 @@ def main():
|
|||
|
||||
session = PlainSession()
|
||||
with session:
|
||||
if args.resolve == 'apt':
|
||||
if args.resolve == "apt":
|
||||
resolver = AptResolver.from_session(session)
|
||||
elif args.resolve == 'explain':
|
||||
elif args.resolve == "explain":
|
||||
resolver = ExplainResolver.from_session(session)
|
||||
elif args.resolve == 'native':
|
||||
elif args.resolve == "native":
|
||||
resolver = NativeResolver.from_session(session)
|
||||
elif args.resolver == 'auto':
|
||||
elif args.resolver == "auto":
|
||||
resolver = AutoResolver.from_session(session)
|
||||
os.chdir(args.directory)
|
||||
try:
|
||||
|
@ -103,29 +109,30 @@ def main():
|
|||
stages = STAGE_MAP[args.subcommand]
|
||||
if stages:
|
||||
for bs in bss:
|
||||
install_necessary_declared_requirements(
|
||||
resolver, bs, stages)
|
||||
if args.subcommand == 'dist':
|
||||
install_necessary_declared_requirements(resolver, bs, stages)
|
||||
if args.subcommand == "dist":
|
||||
run_dist(session=session, buildsystems=bss, resolver=resolver)
|
||||
if args.subcommand == 'build':
|
||||
if args.subcommand == "build":
|
||||
run_build(session, buildsystems=bss, resolver=resolver)
|
||||
if args.subcommand == 'clean':
|
||||
if args.subcommand == "clean":
|
||||
run_clean(session, buildsystems=bss, resolver=resolver)
|
||||
if args.subcommand == 'install':
|
||||
if args.subcommand == "install":
|
||||
run_install(session, buildsystems=bss, resolver=resolver)
|
||||
if args.subcommand == 'test':
|
||||
if args.subcommand == "test":
|
||||
run_test(session, buildsystems=bss, resolver=resolver)
|
||||
except UnidentifiedError:
|
||||
return 1
|
||||
except NoBuildToolsFound:
|
||||
logging.info("No build tools found.")
|
||||
return 1
|
||||
except MissingDependencies as e:
|
||||
for req in e.requirements:
|
||||
note('Missing dependency (%s:%s)' % (
|
||||
req.family, req.name))
|
||||
note("Missing dependency (%s:%s)" % (req.family, req.name))
|
||||
for resolver in [
|
||||
AptResolver.from_session(session),
|
||||
NativeResolver.from_session(session)]:
|
||||
note(' %s' % (resolver.explain([req]), ))
|
||||
NativeResolver.from_session(session),
|
||||
]:
|
||||
note(" %s" % (resolver.explain([req]),))
|
||||
return 2
|
||||
return 0
|
||||
|
||||
|
|
|
@ -60,25 +60,25 @@ class BuildSystem(object):
|
|||
|
||||
class Pear(BuildSystem):
|
||||
|
||||
name = 'pear'
|
||||
name = "pear"
|
||||
|
||||
def __init__(self, path):
|
||||
self.path = path
|
||||
|
||||
def setup(self, resolver):
|
||||
resolver.install([UpstreamRequirement('binary', 'pear')])
|
||||
resolver.install([UpstreamRequirement("binary", "pear")])
|
||||
|
||||
def dist(self, session, resolver):
|
||||
self.setup(resolver)
|
||||
run_with_build_fixer(session, ['pear', 'package'])
|
||||
run_with_build_fixer(session, ["pear", "package"])
|
||||
|
||||
def test(self, session, resolver):
|
||||
self.setup(resolver)
|
||||
run_with_build_fixer(session, ['pear', 'run-tests'])
|
||||
run_with_build_fixer(session, ["pear", "run-tests"])
|
||||
|
||||
def build(self, session, resolver):
|
||||
self.setup(resolver)
|
||||
run_with_build_fixer(session, ['pear', 'build'])
|
||||
run_with_build_fixer(session, ["pear", "build"])
|
||||
|
||||
def clean(self, session, resolver):
|
||||
self.setup(resolver)
|
||||
|
@ -86,201 +86,215 @@ class Pear(BuildSystem):
|
|||
|
||||
def install(self, session, resolver):
|
||||
self.setup(resolver)
|
||||
run_with_build_fixer(session, ['pear', 'install'])
|
||||
run_with_build_fixer(session, ["pear", "install"])
|
||||
|
||||
|
||||
class SetupPy(BuildSystem):
|
||||
|
||||
name = 'setup.py'
|
||||
name = "setup.py"
|
||||
|
||||
def __init__(self, path):
|
||||
from distutils.core import run_setup
|
||||
|
||||
self.result = run_setup(os.path.abspath(path), stop_after="init")
|
||||
|
||||
def setup(self, resolver):
|
||||
resolver.install([
|
||||
UpstreamRequirement('python3', 'pip'),
|
||||
UpstreamRequirement('binary', 'python3'),
|
||||
])
|
||||
with open('setup.py', 'r') as f:
|
||||
resolver.install(
|
||||
[
|
||||
UpstreamRequirement("python3", "pip"),
|
||||
UpstreamRequirement("binary", "python3"),
|
||||
]
|
||||
)
|
||||
with open("setup.py", "r") as f:
|
||||
setup_py_contents = f.read()
|
||||
try:
|
||||
with open("setup.cfg", "r") as f:
|
||||
setup_cfg_contents = f.read()
|
||||
except FileNotFoundError:
|
||||
setup_cfg_contents = ''
|
||||
if 'setuptools' in setup_py_contents:
|
||||
logging.info('Reference to setuptools found, installing.')
|
||||
resolver.install([UpstreamRequirement('python3', 'setuptools')])
|
||||
if ('setuptools_scm' in setup_py_contents or
|
||||
'setuptools_scm' in setup_cfg_contents):
|
||||
logging.info('Reference to setuptools-scm found, installing.')
|
||||
resolver.install([
|
||||
UpstreamRequirement('python3', 'setuptools-scm'),
|
||||
UpstreamRequirement('binary', 'git'),
|
||||
UpstreamRequirement('binary', 'mercurial'),
|
||||
])
|
||||
setup_cfg_contents = ""
|
||||
if "setuptools" in setup_py_contents:
|
||||
logging.info("Reference to setuptools found, installing.")
|
||||
resolver.install([UpstreamRequirement("python3", "setuptools")])
|
||||
if (
|
||||
"setuptools_scm" in setup_py_contents
|
||||
or "setuptools_scm" in setup_cfg_contents
|
||||
):
|
||||
logging.info("Reference to setuptools-scm found, installing.")
|
||||
resolver.install(
|
||||
[
|
||||
UpstreamRequirement("python3", "setuptools-scm"),
|
||||
UpstreamRequirement("binary", "git"),
|
||||
UpstreamRequirement("binary", "mercurial"),
|
||||
]
|
||||
)
|
||||
|
||||
# TODO(jelmer): Install setup_requires
|
||||
|
||||
def test(self, session, resolver):
|
||||
self.setup(resolver)
|
||||
self._run_setup(session, resolver, ['test'])
|
||||
self._run_setup(session, resolver, ["test"])
|
||||
|
||||
def dist(self, session, resolver):
|
||||
self.setup(resolver)
|
||||
self._run_setup(session, resolver, ['sdist'])
|
||||
self._run_setup(session, resolver, ["sdist"])
|
||||
|
||||
def clean(self, session, resolver):
|
||||
self.setup(resolver)
|
||||
self._run_setup(session, resolver, ['clean'])
|
||||
self._run_setup(session, resolver, ["clean"])
|
||||
|
||||
def install(self, session, resolver):
|
||||
self.setup(resolver)
|
||||
self._run_setup(session, resolver, ['install'])
|
||||
self._run_setup(session, resolver, ["install"])
|
||||
|
||||
def _run_setup(self, session, resolver, args):
|
||||
interpreter = shebang_binary('setup.py')
|
||||
interpreter = shebang_binary("setup.py")
|
||||
if interpreter is not None:
|
||||
if interpreter in ('python3', 'python2', 'python'):
|
||||
resolver.install([UpstreamRequirement('binary', interpreter)])
|
||||
if interpreter in ("python3", "python2", "python"):
|
||||
resolver.install([UpstreamRequirement("binary", interpreter)])
|
||||
else:
|
||||
raise ValueError('Unknown interpreter %r' % interpreter)
|
||||
run_with_build_fixer(
|
||||
session, ['./setup.py'] + args)
|
||||
raise ValueError("Unknown interpreter %r" % interpreter)
|
||||
run_with_build_fixer(session, ["./setup.py"] + args)
|
||||
else:
|
||||
# Just assume it's Python 3
|
||||
resolver.install([UpstreamRequirement('binary', 'python3')])
|
||||
run_with_build_fixer(
|
||||
session, ['python3', './setup.py'] + args)
|
||||
resolver.install([UpstreamRequirement("binary", "python3")])
|
||||
run_with_build_fixer(session, ["python3", "./setup.py"] + args)
|
||||
|
||||
def get_declared_dependencies(self):
|
||||
for require in self.result.get_requires():
|
||||
yield 'build', UpstreamRequirement('python3', require)
|
||||
yield "build", UpstreamRequirement("python3", require)
|
||||
if self.result.install_requires:
|
||||
for require in self.result.install_requires:
|
||||
yield 'install', UpstreamRequirement('python3', require)
|
||||
yield "install", UpstreamRequirement("python3", require)
|
||||
if self.result.tests_require:
|
||||
for require in self.result.tests_require:
|
||||
yield 'test', UpstreamRequirement('python3', require)
|
||||
yield "test", UpstreamRequirement("python3", require)
|
||||
|
||||
def get_declared_outputs(self):
|
||||
for script in (self.result.scripts or []):
|
||||
yield UpstreamOutput('binary', os.path.basename(script))
|
||||
for script in self.result.scripts or []:
|
||||
yield UpstreamOutput("binary", os.path.basename(script))
|
||||
entry_points = self.result.entry_points or {}
|
||||
for script in entry_points.get('console_scripts', []):
|
||||
yield UpstreamOutput('binary', script.split('=')[0])
|
||||
for script in entry_points.get("console_scripts", []):
|
||||
yield UpstreamOutput("binary", script.split("=")[0])
|
||||
for package in self.result.packages or []:
|
||||
yield UpstreamOutput('python3', package)
|
||||
yield UpstreamOutput("python3", package)
|
||||
|
||||
|
||||
class PyProject(BuildSystem):
|
||||
|
||||
name = 'pyproject'
|
||||
name = "pyproject"
|
||||
|
||||
def __init__(self, path):
|
||||
self.path = path
|
||||
self.pyproject = self.load_toml()
|
||||
|
||||
def load_toml(self):
|
||||
import toml
|
||||
|
||||
with open("pyproject.toml", "r") as pf:
|
||||
with open(self.path, "r") as pf:
|
||||
return toml.load(pf)
|
||||
|
||||
def dist(self, session, resolver):
|
||||
pyproject = self.load_toml()
|
||||
if "poetry" in pyproject.get("tool", []):
|
||||
if "poetry" in self.pyproject.get("tool", []):
|
||||
logging.info(
|
||||
'Found pyproject.toml with poetry section, '
|
||||
'assuming poetry project.')
|
||||
resolver.install([
|
||||
UpstreamRequirement('python3', 'venv'),
|
||||
UpstreamRequirement('python3', 'pip'),
|
||||
])
|
||||
session.check_call(['pip3', 'install', 'poetry'], user='root')
|
||||
session.check_call(['poetry', 'build', '-f', 'sdist'])
|
||||
"Found pyproject.toml with poetry section, " "assuming poetry project."
|
||||
)
|
||||
resolver.install(
|
||||
[
|
||||
UpstreamRequirement("python3", "venv"),
|
||||
UpstreamRequirement("python3", "pip"),
|
||||
]
|
||||
)
|
||||
session.check_call(["pip3", "install", "poetry"], user="root")
|
||||
session.check_call(["poetry", "build", "-f", "sdist"])
|
||||
return
|
||||
raise AssertionError("no supported section in pyproject.toml")
|
||||
|
||||
|
||||
class SetupCfg(BuildSystem):
|
||||
|
||||
name = 'setup.cfg'
|
||||
name = "setup.cfg"
|
||||
|
||||
def __init__(self, path):
|
||||
self.path = path
|
||||
|
||||
def setup(self, resolver):
|
||||
resolver.install([
|
||||
UpstreamRequirement('python3', 'pep517'),
|
||||
UpstreamRequirement('python3', 'pip'),
|
||||
])
|
||||
resolver.install(
|
||||
[
|
||||
UpstreamRequirement("python3", "pep517"),
|
||||
UpstreamRequirement("python3", "pip"),
|
||||
]
|
||||
)
|
||||
|
||||
def dist(self, session, resolver):
|
||||
self.setup(resolver)
|
||||
session.check_call(['python3', '-m', 'pep517.build', '-s', '.'])
|
||||
session.check_call(["python3", "-m", "pep517.build", "-s", "."])
|
||||
|
||||
|
||||
class Npm(BuildSystem):
|
||||
|
||||
name = 'npm'
|
||||
name = "npm"
|
||||
|
||||
def __init__(self, path):
|
||||
import json
|
||||
with open(path, 'r') as f:
|
||||
|
||||
with open(path, "r") as f:
|
||||
self.package = json.load(f)
|
||||
|
||||
def get_declared_dependencies(self):
|
||||
if 'devDependencies' in self.package:
|
||||
for name, unused_version in (
|
||||
self.package['devDependencies'].items()):
|
||||
if "devDependencies" in self.package:
|
||||
for name, unused_version in self.package["devDependencies"].items():
|
||||
# TODO(jelmer): Look at version
|
||||
yield 'dev', UpstreamRequirement('npm', name)
|
||||
yield "dev", UpstreamRequirement("npm", name)
|
||||
|
||||
def setup(self, resolver):
|
||||
resolver.install([UpstreamRequirement('binary', 'npm')])
|
||||
resolver.install([UpstreamRequirement("binary", "npm")])
|
||||
|
||||
def dist(self, session, resolver):
|
||||
self.setup(resolver)
|
||||
run_with_build_fixer(session, ['npm', 'pack'])
|
||||
run_with_build_fixer(session, ["npm", "pack"])
|
||||
|
||||
|
||||
class Waf(BuildSystem):
|
||||
|
||||
name = 'waf'
|
||||
name = "waf"
|
||||
|
||||
def __init__(self, path):
|
||||
self.path = path
|
||||
|
||||
def setup(self, resolver):
|
||||
resolver.install([UpstreamRequirement('binary', 'python3')])
|
||||
resolver.install([UpstreamRequirement("binary", "python3")])
|
||||
|
||||
def dist(self, session, resolver):
|
||||
self.setup(resolver)
|
||||
run_with_build_fixer(session, ['./waf', 'dist'])
|
||||
run_with_build_fixer(session, ["./waf", "dist"])
|
||||
|
||||
|
||||
class Gem(BuildSystem):
|
||||
|
||||
name = 'gem'
|
||||
name = "gem"
|
||||
|
||||
def __init__(self, path):
|
||||
self.path = path
|
||||
|
||||
def setup(self, resolver):
|
||||
resolver.install([UpstreamRequirement('binary', 'gem2deb')])
|
||||
resolver.install([UpstreamRequirement("binary", "gem2deb")])
|
||||
|
||||
def dist(self, session, resolver):
|
||||
self.setup(resolver)
|
||||
gemfiles = [entry.name for entry in session.scandir('.')
|
||||
if entry.name.endswith('.gem')]
|
||||
gemfiles = [
|
||||
entry.name for entry in session.scandir(".") if entry.name.endswith(".gem")
|
||||
]
|
||||
if len(gemfiles) > 1:
|
||||
logging.warning('More than one gemfile. Trying the first?')
|
||||
run_with_build_fixer(session, ['gem2tgz', gemfiles[0]])
|
||||
logging.warning("More than one gemfile. Trying the first?")
|
||||
run_with_build_fixer(session, ["gem2tgz", gemfiles[0]])
|
||||
|
||||
|
||||
class DistInkt(BuildSystem):
|
||||
|
||||
def __init__(self, path):
|
||||
self.path = path
|
||||
self.name = 'dist-zilla'
|
||||
self.name = "dist-zilla"
|
||||
self.dist_inkt_class = None
|
||||
with open('dist.ini', 'rb') as f:
|
||||
with open("dist.ini", "rb") as f:
|
||||
for line in f:
|
||||
if not line.startswith(b";;"):
|
||||
continue
|
||||
|
@ -290,102 +304,119 @@ class DistInkt(BuildSystem):
|
|||
continue
|
||||
if key.strip() == b"class" and value.strip().startswith(b"'Dist::Inkt"):
|
||||
logging.info(
|
||||
'Found Dist::Inkt section in dist.ini, '
|
||||
'assuming distinkt.')
|
||||
self.name = 'dist-inkt'
|
||||
"Found Dist::Inkt section in dist.ini, " "assuming distinkt."
|
||||
)
|
||||
self.name = "dist-inkt"
|
||||
self.dist_inkt_class = value.decode().strip("'")
|
||||
return
|
||||
logging.info('Found dist.ini, assuming dist-zilla.')
|
||||
logging.info("Found dist.ini, assuming dist-zilla.")
|
||||
|
||||
def setup(self, resolver):
|
||||
resolver.install([
|
||||
UpstreamRequirement('perl', 'Dist::Inkt'),
|
||||
])
|
||||
resolver.install(
|
||||
[
|
||||
UpstreamRequirement("perl", "Dist::Inkt"),
|
||||
]
|
||||
)
|
||||
|
||||
def dist(self, session, resolver):
|
||||
self.setup(resolver)
|
||||
if self.name == 'dist-inkt':
|
||||
resolver.install([
|
||||
UpstreamRequirement('perl-module', self.dist_inkt_class)])
|
||||
run_with_build_fixer(session, ['distinkt-dist'])
|
||||
if self.name == "dist-inkt":
|
||||
resolver.install([UpstreamRequirement("perl-module", self.dist_inkt_class)])
|
||||
run_with_build_fixer(session, ["distinkt-dist"])
|
||||
else:
|
||||
# Default to invoking Dist::Zilla
|
||||
resolver.install([UpstreamRequirement('perl', 'Dist::Zilla')])
|
||||
run_with_build_fixer(session, ['dzil', 'build', '--in', '..'])
|
||||
resolver.install([UpstreamRequirement("perl", "Dist::Zilla")])
|
||||
run_with_build_fixer(session, ["dzil", "build", "--in", ".."])
|
||||
|
||||
|
||||
class Make(BuildSystem):
|
||||
|
||||
name = 'make'
|
||||
name = "make"
|
||||
|
||||
def setup(self, session, resolver):
|
||||
if session.exists('Makefile.PL') and not session.exists('Makefile'):
|
||||
resolver.install([UpstreamRequirement('binary', 'perl')])
|
||||
run_with_build_fixer(session, ['perl', 'Makefile.PL'])
|
||||
if session.exists("Makefile.PL") and not session.exists("Makefile"):
|
||||
resolver.install([UpstreamRequirement("binary", "perl")])
|
||||
run_with_build_fixer(session, ["perl", "Makefile.PL"])
|
||||
|
||||
if not session.exists('Makefile') and not session.exists('configure'):
|
||||
if session.exists('autogen.sh'):
|
||||
if shebang_binary('autogen.sh') is None:
|
||||
run_with_build_fixer(
|
||||
session, ['/bin/sh', './autogen.sh'])
|
||||
if not session.exists("Makefile") and not session.exists("configure"):
|
||||
if session.exists("autogen.sh"):
|
||||
if shebang_binary("autogen.sh") is None:
|
||||
run_with_build_fixer(session, ["/bin/sh", "./autogen.sh"])
|
||||
try:
|
||||
run_with_build_fixer(
|
||||
session, ['./autogen.sh'])
|
||||
run_with_build_fixer(session, ["./autogen.sh"])
|
||||
except UnidentifiedError as e:
|
||||
if ("Gnulib not yet bootstrapped; "
|
||||
"run ./bootstrap instead.\n" in e.lines):
|
||||
if (
|
||||
"Gnulib not yet bootstrapped; "
|
||||
"run ./bootstrap instead.\n" in e.lines
|
||||
):
|
||||
run_with_build_fixer(session, ["./bootstrap"])
|
||||
run_with_build_fixer(session, ['./autogen.sh'])
|
||||
run_with_build_fixer(session, ["./autogen.sh"])
|
||||
else:
|
||||
raise
|
||||
|
||||
elif (session.exists('configure.ac') or
|
||||
session.exists('configure.in')):
|
||||
resolver.install([
|
||||
UpstreamRequirement('binary', 'autoconf'),
|
||||
UpstreamRequirement('binary', 'automake'),
|
||||
UpstreamRequirement('binary', 'gettextize'),
|
||||
UpstreamRequirement('binary', 'libtoolize'),
|
||||
])
|
||||
run_with_build_fixer(session, ['autoreconf', '-i'])
|
||||
elif session.exists("configure.ac") or session.exists("configure.in"):
|
||||
resolver.install(
|
||||
[
|
||||
UpstreamRequirement("binary", "autoconf"),
|
||||
UpstreamRequirement("binary", "automake"),
|
||||
UpstreamRequirement("binary", "gettextize"),
|
||||
UpstreamRequirement("binary", "libtoolize"),
|
||||
]
|
||||
)
|
||||
run_with_build_fixer(session, ["autoreconf", "-i"])
|
||||
|
||||
if not session.exists('Makefile') and session.exists('configure'):
|
||||
session.check_call(['./configure'])
|
||||
if not session.exists("Makefile") and session.exists("configure"):
|
||||
session.check_call(["./configure"])
|
||||
|
||||
def dist(self, session, resolver):
|
||||
self.setup(session, resolver)
|
||||
resolver.install([UpstreamRequirement('binary', 'make')])
|
||||
resolver.install([UpstreamRequirement("binary", "make")])
|
||||
try:
|
||||
run_with_build_fixer(session, ['make', 'dist'])
|
||||
run_with_build_fixer(session, ["make", "dist"])
|
||||
except UnidentifiedError as e:
|
||||
if "make: *** No rule to make target 'dist'. Stop.\n" in e.lines:
|
||||
pass
|
||||
elif "make[1]: *** No rule to make target 'dist'. Stop.\n" in e.lines:
|
||||
pass
|
||||
elif ("Reconfigure the source tree "
|
||||
elif (
|
||||
"Reconfigure the source tree "
|
||||
"(via './config' or 'perl Configure'), please.\n"
|
||||
) in e.lines:
|
||||
run_with_build_fixer(session, ['./config'])
|
||||
run_with_build_fixer(session, ['make', 'dist'])
|
||||
run_with_build_fixer(session, ["./config"])
|
||||
run_with_build_fixer(session, ["make", "dist"])
|
||||
elif (
|
||||
"Please try running 'make manifest' and then run "
|
||||
"'make dist' again.\n" in e.lines):
|
||||
run_with_build_fixer(session, ['make', 'manifest'])
|
||||
run_with_build_fixer(session, ['make', 'dist'])
|
||||
"'make dist' again.\n" in e.lines
|
||||
):
|
||||
run_with_build_fixer(session, ["make", "manifest"])
|
||||
run_with_build_fixer(session, ["make", "dist"])
|
||||
elif "Please run ./configure first\n" in e.lines:
|
||||
run_with_build_fixer(session, ['./configure'])
|
||||
run_with_build_fixer(session, ['make', 'dist'])
|
||||
elif any([re.match(
|
||||
r'Makefile:[0-9]+: \*\*\* Missing \'Make.inc\' '
|
||||
r'Run \'./configure \[options\]\' and retry. Stop.\n',
|
||||
line) for line in e.lines]):
|
||||
run_with_build_fixer(session, ['./configure'])
|
||||
run_with_build_fixer(session, ['make', 'dist'])
|
||||
elif any([re.match(
|
||||
r'Problem opening MANIFEST: No such file or directory '
|
||||
r'at .* line [0-9]+\.', line) for line in e.lines]):
|
||||
run_with_build_fixer(session, ['make', 'manifest'])
|
||||
run_with_build_fixer(session, ['make', 'dist'])
|
||||
run_with_build_fixer(session, ["./configure"])
|
||||
run_with_build_fixer(session, ["make", "dist"])
|
||||
elif any(
|
||||
[
|
||||
re.match(
|
||||
r"Makefile:[0-9]+: \*\*\* Missing \'Make.inc\' "
|
||||
r"Run \'./configure \[options\]\' and retry. Stop.\n",
|
||||
line,
|
||||
)
|
||||
for line in e.lines
|
||||
]
|
||||
):
|
||||
run_with_build_fixer(session, ["./configure"])
|
||||
run_with_build_fixer(session, ["make", "dist"])
|
||||
elif any(
|
||||
[
|
||||
re.match(
|
||||
r"Problem opening MANIFEST: No such file or directory "
|
||||
r"at .* line [0-9]+\.",
|
||||
line,
|
||||
)
|
||||
for line in e.lines
|
||||
]
|
||||
):
|
||||
run_with_build_fixer(session, ["make", "manifest"])
|
||||
run_with_build_fixer(session, ["make", "dist"])
|
||||
else:
|
||||
raise
|
||||
else:
|
||||
|
@ -393,46 +424,48 @@ class Make(BuildSystem):
|
|||
|
||||
def get_declared_dependencies(self):
|
||||
# TODO(jelmer): Split out the perl-specific stuff?
|
||||
if os.path.exists('META.yml'):
|
||||
if os.path.exists("META.yml"):
|
||||
# See http://module-build.sourceforge.net/META-spec-v1.4.html for
|
||||
# the specification of the format.
|
||||
import ruamel.yaml
|
||||
import ruamel.yaml.reader
|
||||
with open('META.yml', 'rb') as f:
|
||||
|
||||
with open("META.yml", "rb") as f:
|
||||
try:
|
||||
data = ruamel.yaml.load(f, ruamel.yaml.SafeLoader)
|
||||
except ruamel.yaml.reader.ReaderError as e:
|
||||
warnings.warn('Unable to parse META.yml: %s' % e)
|
||||
warnings.warn("Unable to parse META.yml: %s" % e)
|
||||
return
|
||||
for require in data.get('requires', []):
|
||||
yield 'build', UpstreamRequirement('perl', require)
|
||||
for require in data.get("requires", []):
|
||||
yield "build", UpstreamRequirement("perl", require)
|
||||
|
||||
|
||||
class Cargo(BuildSystem):
|
||||
|
||||
name = 'cargo'
|
||||
name = "cargo"
|
||||
|
||||
def __init__(self, path):
|
||||
from toml.decoder import load
|
||||
with open(path, 'r') as f:
|
||||
|
||||
with open(path, "r") as f:
|
||||
self.cargo = load(f)
|
||||
|
||||
def get_declared_dependencies(self):
|
||||
if 'dependencies' in self.cargo:
|
||||
for name, details in self.cargo['dependencies'].items():
|
||||
if "dependencies" in self.cargo:
|
||||
for name, details in self.cargo["dependencies"].items():
|
||||
# TODO(jelmer): Look at details['features'], details['version']
|
||||
yield 'build', UpstreamRequirement('cargo-crate', name)
|
||||
yield "build", UpstreamRequirement("cargo-crate", name)
|
||||
|
||||
|
||||
class Golang(BuildSystem):
|
||||
"""Go builds."""
|
||||
|
||||
name = 'golang'
|
||||
name = "golang"
|
||||
|
||||
|
||||
class Maven(BuildSystem):
|
||||
|
||||
name = 'maven'
|
||||
name = "maven"
|
||||
|
||||
def __init__(self, path):
|
||||
self.path = path
|
||||
|
@ -440,83 +473,99 @@ class Maven(BuildSystem):
|
|||
|
||||
class Cabal(BuildSystem):
|
||||
|
||||
name = 'cabal'
|
||||
name = "cabal"
|
||||
|
||||
def __init__(self, path):
|
||||
self.path = path
|
||||
|
||||
|
||||
def detect_buildsystems(path):
|
||||
def detect_buildsystems(path, trust_package=False):
|
||||
"""Detect build systems."""
|
||||
if os.path.exists(os.path.join(path, 'package.xml')):
|
||||
logging.info('Found package.xml, assuming pear package.')
|
||||
yield Pear('package.xml')
|
||||
if os.path.exists(os.path.join(path, "package.xml")):
|
||||
logging.info("Found package.xml, assuming pear package.")
|
||||
yield Pear("package.xml")
|
||||
|
||||
if os.path.exists(os.path.join(path, 'setup.py')):
|
||||
logging.info('Found setup.py, assuming python project.')
|
||||
yield SetupPy('setup.py')
|
||||
elif os.path.exists(os.path.join(path, 'pyproject.toml')):
|
||||
logging.info('Found pyproject.toml, assuming python project.')
|
||||
yield PyProject()
|
||||
elif os.path.exists(os.path.join(path, 'setup.cfg')):
|
||||
logging.info('Found setup.cfg, assuming python project.')
|
||||
yield SetupCfg('setup.cfg')
|
||||
if os.path.exists(os.path.join(path, "setup.py")):
|
||||
logging.info("Found setup.py, assuming python project.")
|
||||
yield SetupPy("setup.py")
|
||||
elif os.path.exists(os.path.join(path, "pyproject.toml")):
|
||||
logging.info("Found pyproject.toml, assuming python project.")
|
||||
yield PyProject("pyproject.toml")
|
||||
elif os.path.exists(os.path.join(path, "setup.cfg")):
|
||||
logging.info("Found setup.cfg, assuming python project.")
|
||||
yield SetupCfg("setup.cfg")
|
||||
|
||||
if os.path.exists(os.path.join(path, 'package.json')):
|
||||
logging.info('Found package.json, assuming node package.')
|
||||
yield Npm('package.json')
|
||||
if os.path.exists(os.path.join(path, "package.json")):
|
||||
logging.info("Found package.json, assuming node package.")
|
||||
yield Npm("package.json")
|
||||
|
||||
if os.path.exists(os.path.join(path, 'waf')):
|
||||
logging.info('Found waf, assuming waf package.')
|
||||
yield Waf('waf')
|
||||
if os.path.exists(os.path.join(path, "waf")):
|
||||
logging.info("Found waf, assuming waf package.")
|
||||
yield Waf("waf")
|
||||
|
||||
if os.path.exists(os.path.join(path, 'Cargo.toml')):
|
||||
logging.info('Found Cargo.toml, assuming rust cargo package.')
|
||||
yield Cargo('Cargo.toml')
|
||||
if os.path.exists(os.path.join(path, "Cargo.toml")):
|
||||
logging.info("Found Cargo.toml, assuming rust cargo package.")
|
||||
yield Cargo("Cargo.toml")
|
||||
|
||||
if os.path.exists(os.path.join(path, 'pom.xml')):
|
||||
logging.info('Found pom.xml, assuming maven package.')
|
||||
yield Maven('pom.xml')
|
||||
if os.path.exists(os.path.join(path, "pom.xml")):
|
||||
logging.info("Found pom.xml, assuming maven package.")
|
||||
yield Maven("pom.xml")
|
||||
|
||||
if (os.path.exists(os.path.join(path, 'dist.ini')) and
|
||||
not os.path.exists(os.path.join(path, 'Makefile.PL'))):
|
||||
yield DistInkt('dist.ini')
|
||||
if os.path.exists(os.path.join(path, "dist.ini")) and not os.path.exists(
|
||||
os.path.join(path, "Makefile.PL")
|
||||
):
|
||||
yield DistInkt("dist.ini")
|
||||
|
||||
gemfiles = [
|
||||
entry.name for entry in os.scandir(path)
|
||||
if entry.name.endswith('.gem')]
|
||||
gemfiles = [entry.name for entry in os.scandir(path) if entry.name.endswith(".gem")]
|
||||
if gemfiles:
|
||||
yield Gem(gemfiles[0])
|
||||
|
||||
if any([os.path.exists(os.path.join(path, p)) for p in [
|
||||
'Makefile', 'Makefile.PL', 'autogen.sh', 'configure.ac',
|
||||
'configure.in']]):
|
||||
if any(
|
||||
[
|
||||
os.path.exists(os.path.join(path, p))
|
||||
for p in [
|
||||
"Makefile",
|
||||
"Makefile.PL",
|
||||
"autogen.sh",
|
||||
"configure.ac",
|
||||
"configure.in",
|
||||
]
|
||||
]
|
||||
):
|
||||
yield Make()
|
||||
|
||||
cabal_filenames = [
|
||||
entry.name for entry in os.scandir(path)
|
||||
if entry.name.endswith('.cabal')]
|
||||
entry.name for entry in os.scandir(path) if entry.name.endswith(".cabal")
|
||||
]
|
||||
if cabal_filenames:
|
||||
if len(cabal_filenames) == 1:
|
||||
yield Cabal(cabal_filenames[0])
|
||||
else:
|
||||
warnings.warn(
|
||||
'More than one cabal filename, ignoring all: %r' %
|
||||
cabal_filenames)
|
||||
"More than one cabal filename, ignoring all: %r" % cabal_filenames
|
||||
)
|
||||
|
||||
if os.path.exists(os.path.join(path, '.travis.yml')):
|
||||
if os.path.exists(os.path.join(path, ".travis.yml")):
|
||||
import ruamel.yaml.reader
|
||||
with open('.travis.yml', 'rb') as f:
|
||||
|
||||
with open(".travis.yml", "rb") as f:
|
||||
try:
|
||||
data = ruamel.yaml.load(f, ruamel.yaml.SafeLoader)
|
||||
except ruamel.yaml.reader.ReaderError as e:
|
||||
warnings.warn('Unable to parse .travis.yml: %s' % (e, ))
|
||||
warnings.warn("Unable to parse .travis.yml: %s" % (e,))
|
||||
else:
|
||||
language = data.get('language')
|
||||
if language == 'go':
|
||||
language = data.get("language")
|
||||
if language == "go":
|
||||
yield Golang()
|
||||
|
||||
for entry in os.scandir(path):
|
||||
if entry.name.endswith('.go'):
|
||||
if entry.name.endswith(".go"):
|
||||
yield Golang()
|
||||
break
|
||||
|
||||
|
||||
def get_buildsystem(path, trust_package=False):
|
||||
for buildsystem in detect_buildsystems(path, trust_package=trust_package):
|
||||
return buildsystem
|
||||
|
||||
raise NoBuildToolsFound()
|
||||
|
|
|
@ -35,13 +35,14 @@ from debmutate.changelog import get_maintainer, format_datetime
|
|||
|
||||
from breezy import osutils
|
||||
from breezy.mutabletree import MutableTree
|
||||
from breezy.plugins.debian.builder import BuildFailedError
|
||||
from breezy.plugins.debian.util import BuildFailedError
|
||||
|
||||
from buildlog_consultant.sbuild import (
|
||||
worker_failure_from_sbuild_log,
|
||||
SbuildFailure,
|
||||
)
|
||||
|
||||
|
||||
DEFAULT_BUILDER = "sbuild --no-clean-source"
|
||||
|
||||
|
||||
|
|
|
@ -30,7 +30,7 @@ from debian.deb822 import (
|
|||
Deb822,
|
||||
PkgRelation,
|
||||
Release,
|
||||
)
|
||||
)
|
||||
from debian.changelog import Version
|
||||
|
||||
from breezy.commit import PointlessCommit
|
||||
|
@ -113,6 +113,7 @@ from buildlog_consultant.sbuild import (
|
|||
SbuildFailure,
|
||||
)
|
||||
|
||||
|
||||
DEFAULT_MAX_ITERATIONS = 10
|
||||
|
||||
|
||||
|
@ -124,18 +125,21 @@ class CircularDependency(Exception):
|
|||
|
||||
|
||||
class DependencyContext(object):
|
||||
|
||||
def __init__(self, tree: MutableTree,
|
||||
subpath: str = '', committer: Optional[str] = None,
|
||||
update_changelog: bool = True):
|
||||
def __init__(
|
||||
self,
|
||||
tree: MutableTree,
|
||||
subpath: str = "",
|
||||
committer: Optional[str] = None,
|
||||
update_changelog: bool = True,
|
||||
):
|
||||
self.tree = tree
|
||||
self.subpath = subpath
|
||||
self.committer = committer
|
||||
self.update_changelog = update_changelog
|
||||
|
||||
def add_dependency(
|
||||
self, package: str,
|
||||
minimum_version: Optional[Version] = None) -> bool:
|
||||
self, package: str, minimum_version: Optional[Version] = None
|
||||
) -> bool:
|
||||
raise NotImplementedError(self.add_dependency)
|
||||
|
||||
|
||||
|
@ -273,9 +277,13 @@ def add_test_dependency(
|
|||
)
|
||||
|
||||
|
||||
def commit_debian_changes(tree: MutableTree, subpath: str,
|
||||
summary: str, committer: Optional[str] = None,
|
||||
update_changelog: bool = True) -> bool:
|
||||
def commit_debian_changes(
|
||||
tree: MutableTree,
|
||||
subpath: str,
|
||||
summary: str,
|
||||
committer: Optional[str] = None,
|
||||
update_changelog: bool = True,
|
||||
) -> bool:
|
||||
with tree.lock_write():
|
||||
try:
|
||||
if update_changelog:
|
||||
|
|
|
@ -115,12 +115,16 @@ class DistCatcher(object):
|
|||
|
||||
|
||||
def create_dist_schroot(
|
||||
tree: Tree, target_dir: str,
|
||||
chroot: str, packaging_tree: Optional[Tree] = None,
|
||||
tree: Tree,
|
||||
target_dir: str,
|
||||
chroot: str,
|
||||
packaging_tree: Optional[Tree] = None,
|
||||
include_controldir: bool = True,
|
||||
subdir: Optional[str] = None) -> str:
|
||||
subdir: Optional[str] = None,
|
||||
) -> str:
|
||||
from .buildsystem import detect_buildsystems
|
||||
from .resolver import AptResolver
|
||||
|
||||
if subdir is None:
|
||||
subdir = "package"
|
||||
with SchrootSession(chroot) as session:
|
||||
|
|
|
@ -17,13 +17,11 @@
|
|||
|
||||
|
||||
class MissingDependencies(Exception):
|
||||
|
||||
def __init__(self, reqs):
|
||||
self.requirements = reqs
|
||||
|
||||
|
||||
class Resolver(object):
|
||||
|
||||
def install(self, requirements):
|
||||
raise NotImplementedError(self.install)
|
||||
|
||||
|
@ -32,21 +30,20 @@ class Resolver(object):
|
|||
|
||||
|
||||
class AptResolver(Resolver):
|
||||
|
||||
def __init__(self, apt):
|
||||
self.apt = apt
|
||||
|
||||
@classmethod
|
||||
def from_session(cls, session):
|
||||
from .apt import AptManager
|
||||
|
||||
return cls(AptManager(session))
|
||||
|
||||
def install(self, requirements):
|
||||
missing = []
|
||||
for req in requirements:
|
||||
pps = list(self._possible_paths(req))
|
||||
if (not pps or
|
||||
not any(self.apt.session.exists(p) for p in pps)):
|
||||
if not pps or not any(self.apt.session.exists(p) for p in pps):
|
||||
missing.append(req)
|
||||
if missing:
|
||||
self.apt.install(list(self.resolve(missing)))
|
||||
|
@ -55,22 +52,21 @@ class AptResolver(Resolver):
|
|||
raise NotImplementedError(self.explain)
|
||||
|
||||
def _possible_paths(self, req):
|
||||
if req.family == 'binary':
|
||||
yield '/usr/bin/%s' % req.name
|
||||
if req.family == "binary":
|
||||
yield "/usr/bin/%s" % req.name
|
||||
else:
|
||||
return
|
||||
|
||||
def resolve(self, requirements):
|
||||
for req in requirements:
|
||||
if req.family == 'python3':
|
||||
yield 'python3-%s' % req.name
|
||||
if req.family == "python3":
|
||||
yield "python3-%s" % req.name
|
||||
else:
|
||||
list(self._possible_paths(req))
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class NativeResolver(Resolver):
|
||||
|
||||
def __init__(self, session):
|
||||
self.session = session
|
||||
|
||||
|
@ -86,7 +82,6 @@ class NativeResolver(Resolver):
|
|||
|
||||
|
||||
class ExplainResolver(Resolver):
|
||||
|
||||
def __init__(self, session):
|
||||
self.session = session
|
||||
|
||||
|
@ -99,8 +94,7 @@ class ExplainResolver(Resolver):
|
|||
|
||||
|
||||
class AutoResolver(Resolver):
|
||||
"""Automatically find out the most appropriate way to instal dependencies.
|
||||
"""
|
||||
"""Automatically find out the most appropriate way to instal dependencies."""
|
||||
|
||||
def __init__(self, session):
|
||||
self.session = session
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue