New upstream release.
This commit is contained in:
commit
fe047d1cf3
17 changed files with 274 additions and 72 deletions
2
PKG-INFO
2
PKG-INFO
|
@ -1,6 +1,6 @@
|
|||
Metadata-Version: 2.1
|
||||
Name: ognibuild
|
||||
Version: 0.0.4
|
||||
Version: 0.0.5
|
||||
Summary: Detect and run any build system
|
||||
Home-page: https://jelmer.uk/code/ognibuild
|
||||
Maintainer: Jelmer Vernooij
|
||||
|
|
|
@ -38,6 +38,7 @@ issues (or lack of support for a particular ecosystem), please file a bug.
|
|||
|
||||
### Supported Build Systems
|
||||
|
||||
- Bazel
|
||||
- Cabal
|
||||
- Cargo
|
||||
- Golang
|
||||
|
|
9
debian/changelog
vendored
9
debian/changelog
vendored
|
@ -1,4 +1,13 @@
|
|||
ognibuild (0.0.5-1) UNRELEASED; urgency=low
|
||||
|
||||
* New upstream release.
|
||||
+ Fixes cmake support. Closes: #988572
|
||||
+ Preserve environment when building Python packages. Closes: #988571
|
||||
|
||||
-- Jelmer Vernooij <jelmer@debian.org> Sun, 16 May 2021 17:08:54 -0000
|
||||
|
||||
ognibuild (0.0.4-1) unstable; urgency=low
|
||||
|
||||
* New upstream release.
|
||||
|
||||
-- Jelmer Vernooij <jelmer@debian.org> Wed, 07 Apr 2021 00:11:09 +0100
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
Metadata-Version: 2.1
|
||||
Name: ognibuild
|
||||
Version: 0.0.4
|
||||
Version: 0.0.5
|
||||
Summary: Detect and run any build system
|
||||
Home-page: https://jelmer.uk/code/ognibuild
|
||||
Maintainer: Jelmer Vernooij
|
||||
|
|
|
@ -20,7 +20,7 @@ import os
|
|||
import stat
|
||||
|
||||
|
||||
__version__ = (0, 0, 4)
|
||||
__version__ = (0, 0, 5)
|
||||
|
||||
|
||||
USER_AGENT = "Ognibuild"
|
||||
|
|
|
@ -106,6 +106,12 @@ def main(): # noqa: C901
|
|||
default="auto",
|
||||
help="What to do about missing dependencies",
|
||||
)
|
||||
parser.add_argument(
|
||||
'--apt', help=argparse.SUPPRESS,
|
||||
dest='resolve', action='store_const', const='apt')
|
||||
parser.add_argument(
|
||||
'--native', help=argparse.SUPPRESS,
|
||||
dest='native', action='store_const', const='native')
|
||||
parser.add_argument(
|
||||
"--explain",
|
||||
action="store_true",
|
||||
|
@ -124,10 +130,14 @@ def main(): # noqa: C901
|
|||
subparsers.add_parser("clean")
|
||||
subparsers.add_parser("test")
|
||||
subparsers.add_parser("info")
|
||||
exec_parser = subparsers.add_parser("exec")
|
||||
exec_parser.add_argument('subargv', nargs=argparse.REMAINDER, help='Command to run.')
|
||||
install_parser = subparsers.add_parser("install")
|
||||
install_parser.add_argument(
|
||||
"--user", action="store_true", help="Install in local-user directories."
|
||||
)
|
||||
install_parser.add_argument(
|
||||
"--prefix", type=str, help='Prefix to install in')
|
||||
|
||||
args = parser.parse_args()
|
||||
if not args.subcommand:
|
||||
|
@ -161,10 +171,14 @@ def main(): # noqa: C901
|
|||
elif args.resolve == "auto":
|
||||
resolver = auto_resolver(session, explain=args.explain)
|
||||
logging.info("Using requirement resolver: %s", resolver)
|
||||
fixers = determine_fixers(session, resolver, explain=args.explain)
|
||||
try:
|
||||
if args.subcommand == "exec":
|
||||
from .fix_build import run_with_build_fixers
|
||||
run_with_build_fixers(session, args.subargv, fixers)
|
||||
return 0
|
||||
bss = list(detect_buildsystems(args.directory))
|
||||
logging.info("Detected buildsystems: %s", ", ".join(map(str, bss)))
|
||||
fixers = determine_fixers(session, resolver, explain=args.explain)
|
||||
if not args.ignore_declared_dependencies:
|
||||
stages = STAGE_MAP[args.subcommand]
|
||||
if stages:
|
||||
|
@ -207,6 +221,7 @@ def main(): # noqa: C901
|
|||
resolver=resolver,
|
||||
fixers=fixers,
|
||||
user=args.user,
|
||||
prefix=args.prefix,
|
||||
)
|
||||
if args.subcommand == "test":
|
||||
from .test import run_test
|
||||
|
|
|
@ -48,6 +48,7 @@ from buildlog_consultant.common import (
|
|||
MissingRubyFile,
|
||||
MissingAutoconfMacro,
|
||||
MissingValaPackage,
|
||||
MissingBoostComponents,
|
||||
MissingXfceDependency,
|
||||
MissingHaskellDependencies,
|
||||
MissingVagueDependency,
|
||||
|
@ -64,6 +65,7 @@ from buildlog_consultant.common import (
|
|||
MissingPerlPredeclared,
|
||||
MissingLatexFile,
|
||||
MissingCargoCrate,
|
||||
MissingStaticLibrary,
|
||||
)
|
||||
from buildlog_consultant.apt import UnsatisfiedAptDependencies
|
||||
|
||||
|
@ -89,6 +91,7 @@ from .requirements import (
|
|||
CMakefileRequirement,
|
||||
HaskellPackageRequirement,
|
||||
MavenArtifactRequirement,
|
||||
BoostComponentRequirement,
|
||||
GnomeCommonRequirement,
|
||||
JDKFileRequirement,
|
||||
JDKRequirement,
|
||||
|
@ -108,6 +111,7 @@ from .requirements import (
|
|||
PerlPreDeclaredRequirement,
|
||||
LatexPackageRequirement,
|
||||
CargoCrateRequirement,
|
||||
StaticLibraryRequirement,
|
||||
)
|
||||
from .resolver import UnsatisfiedRequirements
|
||||
|
||||
|
@ -131,6 +135,8 @@ def problem_to_upstream_requirement(problem): # noqa: C901
|
|||
return ValaPackageRequirement(problem.package)
|
||||
elif isinstance(problem, MissingGoPackage):
|
||||
return GoPackageRequirement(problem.package)
|
||||
elif isinstance(problem, MissingBoostComponents):
|
||||
return [BoostComponentRequirement(name) for name in problem.components]
|
||||
elif isinstance(problem, DhAddonLoadFailure):
|
||||
return DhAddonRequirement(problem.path)
|
||||
elif isinstance(problem, MissingPhpClass):
|
||||
|
@ -139,6 +145,8 @@ def problem_to_upstream_requirement(problem): # noqa: C901
|
|||
return RPackageRequirement(problem.package, problem.minimum_version)
|
||||
elif isinstance(problem, MissingNodeModule):
|
||||
return NodeModuleRequirement(problem.module)
|
||||
elif isinstance(problem, MissingStaticLibrary):
|
||||
return StaticLibraryRequirement(problem.library, problem.filename)
|
||||
elif isinstance(problem, MissingNodePackage):
|
||||
return NodePackageRequirement(problem.package)
|
||||
elif isinstance(problem, MissingLatexFile):
|
||||
|
@ -185,7 +193,11 @@ def problem_to_upstream_requirement(problem): # noqa: C901
|
|||
elif isinstance(problem, UnknownCertificateAuthority):
|
||||
return CertificateAuthorityRequirement(problem.url)
|
||||
elif isinstance(problem, MissingPerlPredeclared):
|
||||
return PerlPreDeclaredRequirement(problem.name)
|
||||
ret = PerlPreDeclaredRequirement(problem.name)
|
||||
try:
|
||||
return ret.lookup_module()
|
||||
except KeyError:
|
||||
return ret
|
||||
elif isinstance(problem, MissingCargoCrate):
|
||||
# TODO(jelmer): handle problem.requirements
|
||||
return CargoCrateRequirement(problem.crate)
|
||||
|
|
|
@ -64,6 +64,8 @@ class InstallTarget(object):
|
|||
# Whether to prefer user-specific installation
|
||||
user: Optional[bool]
|
||||
|
||||
prefix: Optional[str]
|
||||
|
||||
# TODO(jelmer): Add information about target directory, layout, etc.
|
||||
|
||||
|
||||
|
@ -445,6 +447,8 @@ class SetupPy(BuildSystem):
|
|||
extra_args = []
|
||||
if install_target.user:
|
||||
extra_args.append("--user")
|
||||
if install_target.prefix:
|
||||
extra_args.append("--prefix=%s" % install_target.prefix)
|
||||
self._run_setup(session, resolver, ["install"] + extra_args, fixers)
|
||||
else:
|
||||
raise NotImplementedError
|
||||
|
@ -459,7 +463,8 @@ class SetupPy(BuildSystem):
|
|||
if interpreter is None:
|
||||
interpreter = self.DEFAULT_PYTHON
|
||||
argv = [interpreter, "./setup.py"] + args
|
||||
env = {}
|
||||
# TODO(jelmer): Perhaps this should be additive?
|
||||
env = dict(os.environ)
|
||||
# Inherit SETUPTOOLS_SCM_PRETEND_VERSION from the current environment
|
||||
if "SETUPTOOLS_SCM_PRETEND_VERSION" in os.environ:
|
||||
env["SETUPTOOLS_SCM_PRETEND_VERSION"] = os.environ[
|
||||
|
@ -544,6 +549,35 @@ class SetupPy(BuildSystem):
|
|||
return cls(path)
|
||||
|
||||
|
||||
class Bazel(BuildSystem):
|
||||
|
||||
name = "bazel"
|
||||
|
||||
def __init__(self, path):
|
||||
self.path = path
|
||||
|
||||
def __repr__(self):
|
||||
return "%s(%r)" % (type(self).__name__, self.path)
|
||||
|
||||
@classmethod
|
||||
def exists(cls, path):
|
||||
if not os.path.exists(os.path.join(path, "BUILD")):
|
||||
return False
|
||||
return True
|
||||
|
||||
@classmethod
|
||||
def probe(cls, path):
|
||||
if cls.exists(path):
|
||||
logging.debug("Found BUILD, assuming bazel package.")
|
||||
return cls(path)
|
||||
|
||||
def build(self, session, resolver, fixers):
|
||||
run_with_build_fixers(session, ["bazel", "build", "//..."], fixers)
|
||||
|
||||
def test(self, session, resolver, fixers):
|
||||
run_with_build_fixers(session, ["bazel", "test", "//..."], fixers)
|
||||
|
||||
|
||||
class Octave(BuildSystem):
|
||||
|
||||
name = "octave"
|
||||
|
@ -692,6 +726,7 @@ class R(BuildSystem):
|
|||
return dc.copy_single(target_directory)
|
||||
|
||||
def install(self, session, resolver, fixers, install_target):
|
||||
extra_args.append("--prefix=%s" % install_target.prefix)
|
||||
r_path = guaranteed_which(session, resolver, "R")
|
||||
run_with_build_fixers(session, [r_path, "CMD", "INSTALL", "."], fixers)
|
||||
|
||||
|
@ -750,7 +785,7 @@ class Meson(BuildSystem):
|
|||
|
||||
def _setup(self, session, fixers):
|
||||
if not session.exists("build"):
|
||||
session.check_call(["mkdir", "build"])
|
||||
session.mkdir("build")
|
||||
run_with_build_fixers(session, ["meson", "setup", "build"], fixers)
|
||||
|
||||
def clean(self, session, resolver, fixers):
|
||||
|
@ -764,7 +799,12 @@ class Meson(BuildSystem):
|
|||
def dist(self, session, resolver, fixers, target_directory, quiet=False):
|
||||
self._setup(session, fixers)
|
||||
with DistCatcher([session.external_path("build/meson-dist")]) as dc:
|
||||
try:
|
||||
run_with_build_fixers(session, ["ninja", "-C", "build", "dist"], fixers)
|
||||
except UnidentifiedError as e:
|
||||
if "ninja: error: unknown target 'dist', did you mean 'dino'?" in e.lines:
|
||||
raise NotImplementedError
|
||||
raise
|
||||
return dc.copy_single(target_directory)
|
||||
|
||||
def test(self, session, resolver, fixers):
|
||||
|
@ -965,6 +1005,7 @@ class DistZilla(BuildSystem):
|
|||
return cls(os.path.join(path, "dist.ini"))
|
||||
|
||||
def get_declared_dependencies(self, session, fixers=None):
|
||||
if os.path.exists(os.path.join(self.path, "dist.ini")):
|
||||
lines = run_with_build_fixers(session, ["dzil", "authordeps"], fixers)
|
||||
for entry in lines:
|
||||
yield "build", PerlModuleRequirement(entry.strip())
|
||||
|
@ -1017,26 +1058,35 @@ def _declared_deps_from_meta_yml(f):
|
|||
except ruamel.yaml.reader.ReaderError as e:
|
||||
warnings.warn("Unable to parse META.yml: %s" % e)
|
||||
return
|
||||
for require in data.get("requires", []):
|
||||
for require in data.get("requires", None) or []:
|
||||
yield "core", PerlModuleRequirement(require)
|
||||
for require in data.get("build_requires", []):
|
||||
for require in data.get("build_requires", None) or []:
|
||||
yield "build", PerlModuleRequirement(require)
|
||||
for require in data.get("configure_requires", []):
|
||||
for require in data.get("configure_requires", None) or []:
|
||||
yield "build", PerlModuleRequirement(require)
|
||||
# TODO(jelmer): recommends
|
||||
|
||||
|
||||
class Make(BuildSystem):
|
||||
|
||||
name = "make"
|
||||
|
||||
def __init__(self, path):
|
||||
self.path = path
|
||||
if os.path.exists(os.path.join(path, 'Makefile.PL')):
|
||||
self.name = 'makefile.pl'
|
||||
elif os.path.exists(os.path.join(path, 'Makefile.am')):
|
||||
self.name = 'automake'
|
||||
elif any([os.path.exists(os.path.join(path, n))
|
||||
for n in ['configure.ac', 'configure.in', 'autogen.sh']]):
|
||||
self.name = 'autoconf'
|
||||
elif os.path.exists(os.path.join(path, "CMakeLists.txt")):
|
||||
self.name = 'cmake'
|
||||
else:
|
||||
self.name = "make"
|
||||
|
||||
def __repr__(self):
|
||||
return "%s(%r)" % (type(self).__name__, self.path)
|
||||
|
||||
def setup(self, session, resolver, fixers):
|
||||
def setup(self, session, resolver, fixers, prefix=None):
|
||||
def makefile_exists():
|
||||
return any(
|
||||
[session.exists(p) for p in ["Makefile", "GNUmakefile", "makefile"]]
|
||||
|
@ -1065,54 +1115,87 @@ class Make(BuildSystem):
|
|||
run_with_build_fixers(session, ["autoreconf", "-i"], fixers)
|
||||
|
||||
if not makefile_exists() and session.exists("configure"):
|
||||
run_with_build_fixers(session, ["./configure"], fixers)
|
||||
extra_args = []
|
||||
if prefix is not None:
|
||||
extra_args.append('--prefix=%s' % prefix)
|
||||
run_with_build_fixers(session, ["./configure"] + extra_args, fixers)
|
||||
|
||||
if not makefile_exists() and any(
|
||||
[n.name.endswith(".pro") for n in session.scandir(".")]
|
||||
):
|
||||
run_with_build_fixers(session, ["qmake"], fixers)
|
||||
|
||||
if not makefile_exists() and session.exists('CMakeLists.txt'):
|
||||
session.mkdir('build')
|
||||
run_with_build_fixers(session, ["cmake", '..'], fixers, cwd='build')
|
||||
|
||||
def build(self, session, resolver, fixers):
|
||||
self.setup(session, resolver, fixers)
|
||||
run_with_build_fixers(session, ["make", "all"], fixers)
|
||||
self._run_make(session, ["all"], fixers)
|
||||
|
||||
def clean(self, session, resolver, fixers):
|
||||
self.setup(session, resolver, fixers)
|
||||
run_with_build_fixers(session, ["make", "clean"], fixers)
|
||||
self._run_make(session, ["clean"], fixers)
|
||||
|
||||
def test(self, session, resolver, fixers):
|
||||
self.setup(session, resolver, fixers)
|
||||
run_with_build_fixers(session, ["make", "check"], fixers)
|
||||
|
||||
def install(self, session, resolver, fixers, install_target):
|
||||
self.setup(session, resolver, fixers)
|
||||
run_with_build_fixers(session, ["make", "install"], fixers)
|
||||
|
||||
def dist(self, session, resolver, fixers, target_directory, quiet=False):
|
||||
self.setup(session, resolver, fixers)
|
||||
with DistCatcher.default(session.external_path(".")) as dc:
|
||||
def _run_make(self, session, args, fixers, prefix=None):
|
||||
def _wants_configure(line):
|
||||
if line.startswith("Run ./configure"):
|
||||
return True
|
||||
if line == "Please run ./configure first":
|
||||
return True
|
||||
if line.startswith("Project not configured"):
|
||||
return True
|
||||
if line.startswith("The project was not configured"):
|
||||
return True
|
||||
return False
|
||||
if session.exists('build'):
|
||||
cwd = 'build'
|
||||
else:
|
||||
cwd = None
|
||||
try:
|
||||
run_with_build_fixers(session, ["make", "dist"], fixers)
|
||||
run_with_build_fixers(session, ["make"] + args, fixers, cwd=cwd)
|
||||
except UnidentifiedError as e:
|
||||
if "make: *** No rule to make target 'dist'. Stop." in e.lines:
|
||||
raise NotImplementedError
|
||||
elif "make[1]: *** No rule to make target 'dist'. Stop." in e.lines:
|
||||
raise NotImplementedError
|
||||
if len(e.lines) < 5 and any([_wants_configure(line) for line in e.lines]):
|
||||
extra_args = []
|
||||
if prefix is not None:
|
||||
extra_args.append("--prefix=%s" % prefix)
|
||||
run_with_build_fixers(session, ["./configure"] + extra_args, fixers)
|
||||
run_with_build_fixers(session, ["make"] + args, fixers)
|
||||
elif (
|
||||
"Reconfigure the source tree "
|
||||
"(via './config' or 'perl Configure'), please."
|
||||
) in e.lines:
|
||||
run_with_build_fixers(session, ["./config"], fixers)
|
||||
run_with_build_fixers(session, ["make", "dist"], fixers)
|
||||
run_with_build_fixers(session, ["make"] + args, fixers)
|
||||
else:
|
||||
raise
|
||||
|
||||
def test(self, session, resolver, fixers):
|
||||
self.setup(session, resolver, fixers)
|
||||
self._run_make(session, ["check"], fixers)
|
||||
|
||||
def install(self, session, resolver, fixers, install_target):
|
||||
self.setup(session, resolver, fixers, prefix=install_target.prefix)
|
||||
self._run_make(session, ["install"], fixers, prefix=install_target.prefix)
|
||||
|
||||
def dist(self, session, resolver, fixers, target_directory, quiet=False):
|
||||
self.setup(session, resolver, fixers)
|
||||
with DistCatcher.default(session.external_path(".")) as dc:
|
||||
try:
|
||||
self._run_make(session, ["dist"], fixers)
|
||||
except UnidentifiedError as e:
|
||||
if "make: *** No rule to make target 'dist'. Stop." in e.lines:
|
||||
raise NotImplementedError
|
||||
elif "make[1]: *** No rule to make target 'dist'. Stop." in e.lines:
|
||||
raise NotImplementedError
|
||||
elif "ninja: error: unknown target 'dist', did you mean 'dino'?" in e.lines:
|
||||
raise NotImplementedError
|
||||
elif (
|
||||
"Please try running 'make manifest' and then run "
|
||||
"'make dist' again." in e.lines
|
||||
):
|
||||
run_with_build_fixers(session, ["make", "manifest"], fixers)
|
||||
run_with_build_fixers(session, ["make", "dist"], fixers)
|
||||
elif "Please run ./configure first" in e.lines:
|
||||
run_with_build_fixers(session, ["./configure"], fixers)
|
||||
run_with_build_fixers(session, ["make", "dist"], fixers)
|
||||
elif any(
|
||||
[
|
||||
re.match(
|
||||
|
@ -1349,10 +1432,11 @@ class Maven(BuildSystem):
|
|||
deps_tag = root.find("dependencies")
|
||||
if deps_tag:
|
||||
for dep in deps_tag.findall("dependency"):
|
||||
version_tag = dep.find("version")
|
||||
yield "core", MavenArtifactRequirement(
|
||||
dep.find("groupId").text,
|
||||
dep.find("artifactId").text,
|
||||
dep.find("version").text,
|
||||
version_tag.text if version_tag else None,
|
||||
)
|
||||
|
||||
|
||||
|
@ -1450,7 +1534,10 @@ class PerlBuildTiny(BuildSystem):
|
|||
self.setup(session, fixers)
|
||||
with DistCatcher([session.external_path('.')]) as dc:
|
||||
if self.minilla:
|
||||
run_with_build_fixers(session, ["minil", "dist"], fixers)
|
||||
# minil seems to return 0 even if it didn't produce a tarball :(
|
||||
run_with_build_fixers(
|
||||
session, ["minil", "dist"], fixers,
|
||||
check_success=lambda retcode, lines: bool(dc.find_files()))
|
||||
else:
|
||||
try:
|
||||
run_with_build_fixers(session, ["./Build", "dist"], fixers)
|
||||
|
@ -1473,11 +1560,16 @@ class PerlBuildTiny(BuildSystem):
|
|||
|
||||
def get_declared_dependencies(self, session, fixers=None):
|
||||
self.setup(session, fixers)
|
||||
if self.minilla:
|
||||
pass # Minilla doesn't seem to have a way to just regenerate the metadata :(
|
||||
else:
|
||||
try:
|
||||
run_with_build_fixers(session, ["./Build", "distmeta"], fixers)
|
||||
except UnidentifiedError as e:
|
||||
if "No such action 'distmeta'" in e.lines:
|
||||
pass
|
||||
if "Do not run distmeta. Install Minilla and `minil install` instead." in e.lines:
|
||||
self.minilla = True
|
||||
else:
|
||||
raise
|
||||
try:
|
||||
|
@ -1498,8 +1590,8 @@ BUILDSYSTEM_CLSES = [
|
|||
SetupPy,
|
||||
Npm,
|
||||
Waf,
|
||||
Cargo,
|
||||
Meson,
|
||||
Cargo,
|
||||
Cabal,
|
||||
Gradle,
|
||||
Maven,
|
||||
|
@ -1509,6 +1601,7 @@ BUILDSYSTEM_CLSES = [
|
|||
Golang,
|
||||
R,
|
||||
Octave,
|
||||
Bazel,
|
||||
# Make is intentionally at the end of the list.
|
||||
Make,
|
||||
Composer,
|
||||
|
|
|
@ -433,7 +433,7 @@ def fix_missing_makefile_pl(error, phase, context):
|
|||
return False
|
||||
|
||||
|
||||
def coerce_unaccpetable_predicate(error, phase, context):
|
||||
def coerce_unacceptable_predicate(error, phase, context):
|
||||
from debmutate.debcargo import DebcargoEditor
|
||||
with DebcargoEditor(context.abspath('debian/debcargo.toml')) as editor:
|
||||
editor['allow_prerelease_deps'] = True
|
||||
|
@ -492,7 +492,7 @@ def versioned_package_fixers(session, packaging_context, apt):
|
|||
packaging_context, MissingConfigStatusInput, fix_missing_config_status_input
|
||||
),
|
||||
SimpleBuildFixer(packaging_context, MissingPerlFile, fix_missing_makefile_pl),
|
||||
SimpleBuildFixer(packaging_context, DebcargoUnacceptablePredicate, coerce_unaccpetable_predicate),
|
||||
SimpleBuildFixer(packaging_context, DebcargoUnacceptablePredicate, coerce_unacceptable_predicate),
|
||||
]
|
||||
|
||||
|
||||
|
|
|
@ -44,14 +44,17 @@ class BuildFixer(object):
|
|||
return self._fix(problem, phase)
|
||||
|
||||
|
||||
def run_detecting_problems(session: Session, args: List[str], **kwargs):
|
||||
def run_detecting_problems(session: Session, args: List[str], check_success=None, **kwargs):
|
||||
if check_success is None:
|
||||
def check_success(retcode, contents):
|
||||
return (retcode == 0)
|
||||
try:
|
||||
retcode, contents = run_with_tee(session, args, **kwargs)
|
||||
except FileNotFoundError:
|
||||
error = MissingCommand(args[0])
|
||||
retcode = 1
|
||||
else:
|
||||
if retcode == 0:
|
||||
if check_success(retcode, contents):
|
||||
return contents
|
||||
lines = "".join(contents).splitlines(False)
|
||||
match, error = find_build_failure_description(lines)
|
||||
|
|
|
@ -16,15 +16,17 @@
|
|||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
||||
|
||||
from .buildsystem import NoBuildToolsFound, InstallTarget
|
||||
from typing import Optional
|
||||
|
||||
|
||||
def run_install(session, buildsystems, resolver, fixers, user: bool = False):
|
||||
def run_install(session, buildsystems, resolver, fixers, user: bool = False, prefix: Optional[str] = None):
|
||||
# Some things want to write to the user's home directory,
|
||||
# e.g. pip caches in ~/.cache
|
||||
session.create_home()
|
||||
|
||||
install_target = InstallTarget()
|
||||
install_target.user = user
|
||||
install_target.prefix = prefix
|
||||
|
||||
for buildsystem in buildsystems:
|
||||
buildsystem.install(session, resolver, fixers, install_target)
|
||||
|
|
|
@ -177,7 +177,11 @@ class VagueDependencyRequirement(Requirement):
|
|||
from .resolver.apt import AptRequirement
|
||||
|
||||
yield AptRequirement.simple(self.name.lower(), minimum_version=self.minimum_version)
|
||||
yield AptRequirement.simple('lib%s-dev' % self.name.lower(), minimum_version=self.minimum_version)
|
||||
if self.name.lower().startswith('lib'):
|
||||
devname = '%s-dev' % self.name.lower()
|
||||
else:
|
||||
devname = 'lib%s-dev' % self.name.lower()
|
||||
yield AptRequirement.simple(devname, minimum_version=self.minimum_version)
|
||||
|
||||
def met(self, session):
|
||||
for x in self.expand():
|
||||
|
@ -209,10 +213,18 @@ class PerlPreDeclaredRequirement(Requirement):
|
|||
KNOWN_MODULES = {
|
||||
'auto_set_repository': 'Module::Install::Repository',
|
||||
'author_tests': 'Module::Install::AuthorTests',
|
||||
'recursive_author_tests': 'Module::Install::AuthorTests',
|
||||
'author_requires': 'Module::Install::AuthorRequires',
|
||||
'readme_from': 'Module::Install::ReadmeFromPod',
|
||||
'catalyst': 'Module::Install::Catalyst',
|
||||
'githubmeta': 'Module::Install::GithubMeta',
|
||||
'use_ppport': 'Module::Install::XSUtil',
|
||||
'pod_from': 'Module::Install::PodFromEuclid',
|
||||
'write_doap_changes': 'Module::Install::DOAPChangeSets',
|
||||
'use_test_base': 'Module::Install::TestBase',
|
||||
'jsonmeta': 'Module::Install::JSONMETA',
|
||||
'extra_tests': 'Module::Install::ExtraTests',
|
||||
'auto_set_bugtracker': 'Module::Install::Bugtracker',
|
||||
}
|
||||
|
||||
def __init__(self, name):
|
||||
|
@ -460,6 +472,17 @@ class LibraryRequirement(Requirement):
|
|||
self.library = library
|
||||
|
||||
|
||||
class StaticLibraryRequirement(Requirement):
|
||||
|
||||
library: str
|
||||
filename: str
|
||||
|
||||
def __init__(self, library: str, filename: str):
|
||||
super(StaticLibraryRequirement, self).__init__("static-lib")
|
||||
self.library = library
|
||||
self.filename = filename
|
||||
|
||||
|
||||
class RubyFileRequirement(Requirement):
|
||||
|
||||
filename: str
|
||||
|
@ -674,3 +697,12 @@ class PythonModuleRequirement(Requirement):
|
|||
return "%s(%r, python_version=%r, minimum_version=%r)" % (
|
||||
type(self).__name__, self.module, self.python_version,
|
||||
self.minimum_version)
|
||||
|
||||
|
||||
class BoostComponentRequirement(Requirement):
|
||||
|
||||
name: str
|
||||
|
||||
def __init__(self, name):
|
||||
super(BoostComponentRequirement, self).__init__("boost-component")
|
||||
self.name = name
|
||||
|
|
|
@ -47,6 +47,8 @@ from ..requirements import (
|
|||
NodeModuleRequirement,
|
||||
NodePackageRequirement,
|
||||
LibraryRequirement,
|
||||
BoostComponentRequirement,
|
||||
StaticLibraryRequirement,
|
||||
RubyFileRequirement,
|
||||
XmlEntityRequirement,
|
||||
SprocketsFileRequirement,
|
||||
|
@ -178,7 +180,6 @@ def python_spec_to_apt_rels(pkg_name, specs):
|
|||
else:
|
||||
rels = []
|
||||
for spec in specs:
|
||||
deb_version = Version(spec[1])
|
||||
if spec[0] == "~=":
|
||||
# PEP 440: For a given release identifier V.N , the compatible
|
||||
# release clause is approximately equivalent to the pair of
|
||||
|
@ -187,32 +188,26 @@ def python_spec_to_apt_rels(pkg_name, specs):
|
|||
parts.pop(-1)
|
||||
parts[-1] = str(int(parts[-1]) + 1)
|
||||
next_maj_deb_version = Version(".".join(parts))
|
||||
deb_version = Version(spec[1])
|
||||
rels.extend(
|
||||
[
|
||||
{"name": pkg_name, "version": (">=", deb_version)},
|
||||
{"name": pkg_name, "version": ("<<", next_maj_deb_version)},
|
||||
]
|
||||
)
|
||||
[[{"name": pkg_name, "version": (">=", deb_version)}],
|
||||
[{"name": pkg_name, "version": ("<<", next_maj_deb_version)}]])
|
||||
elif spec[0] == "!=":
|
||||
rels.extend(
|
||||
[
|
||||
{"name": pkg_name, "version": (">>", deb_version)},
|
||||
{"name": pkg_name, "version": ("<<", deb_version)},
|
||||
]
|
||||
)
|
||||
deb_version = Version(spec[1])
|
||||
rels.extend([
|
||||
[{"name": pkg_name, "version": (">>", deb_version)}],
|
||||
[{"name": pkg_name, "version": ("<<", deb_version)}]])
|
||||
elif spec[1].endswith(".*") and spec[0] == "==":
|
||||
s = spec[1].split(".")
|
||||
s.pop(-1)
|
||||
n = list(s)
|
||||
n[-1] = str(int(n[-1]) + 1)
|
||||
rels.extend(
|
||||
[
|
||||
{"name": pkg_name, "version": (">=", Version(".".join(s)))},
|
||||
{"name": pkg_name, "version": ("<<", Version(".".join(n)))},
|
||||
]
|
||||
)
|
||||
[[{"name": pkg_name, "version": (">=", Version(".".join(s)))}],
|
||||
[{"name": pkg_name, "version": ("<<", Version(".".join(n)))}]])
|
||||
else:
|
||||
c = {">=": ">=", "<=": "<=", "<": "<<", ">": ">>", "==": "="}[spec[0]]
|
||||
deb_version = Version(spec[1])
|
||||
rels.append([{"name": pkg_name, "version": (c, deb_version)}])
|
||||
return rels
|
||||
|
||||
|
@ -335,6 +330,19 @@ def resolve_vague_dep_req(apt_mgr, req):
|
|||
options.append(AptRequirement.simple(vague_map[name], minimum_version=req.minimum_version))
|
||||
for x in req.expand():
|
||||
options.extend(resolve_requirement_apt(apt_mgr, x))
|
||||
# Try even harder
|
||||
if not options:
|
||||
options.extend(find_reqs_simple(
|
||||
apt_mgr,
|
||||
[
|
||||
posixpath.join("/usr/lib", ".*", "pkgconfig", re.escape(req.name) + "-.*\\.pc"),
|
||||
posixpath.join("/usr/lib/pkgconfig", re.escape(req.name) + "-.*\\.pc")
|
||||
],
|
||||
regex=True,
|
||||
case_insensitive=True,
|
||||
minimum_version=req.minimum_version
|
||||
))
|
||||
|
||||
return options
|
||||
|
||||
|
||||
|
@ -469,6 +477,14 @@ def resolve_library_req(apt_mgr, req):
|
|||
return find_reqs_simple(apt_mgr, paths, regex=True)
|
||||
|
||||
|
||||
def resolve_static_library_req(apt_mgr, req):
|
||||
paths = [
|
||||
posixpath.join("/usr/lib/%s$" % re.escape(req.filename)),
|
||||
posixpath.join("/usr/lib/.*/%s$" % re.escape(req.filename)),
|
||||
]
|
||||
return find_reqs_simple(apt_mgr, paths, regex=True)
|
||||
|
||||
|
||||
def resolve_ruby_file_req(apt_mgr, req):
|
||||
paths = [posixpath.join("/usr/lib/ruby/vendor_ruby/%s.rb" % req.filename)]
|
||||
reqs = find_reqs_simple(apt_mgr, paths, regex=False)
|
||||
|
@ -586,18 +602,22 @@ def resolve_libtool_req(apt_mgr, req):
|
|||
|
||||
|
||||
def resolve_perl_module_req(apt_mgr, req):
|
||||
DEFAULT_PERL_PATHS = ["/usr/share/perl5"]
|
||||
DEFAULT_PERL_PATHS = ["/usr/share/perl5", "/usr/lib/.*/perl5/.*", "/usr/lib/.*/perl-base"]
|
||||
|
||||
if req.inc is None:
|
||||
if req.filename is None:
|
||||
paths = [posixpath.join(inc, req.relfilename) for inc in DEFAULT_PERL_PATHS]
|
||||
paths = [posixpath.join(inc, re.escape(req.module.replace('::', '/') + '.pm')) for inc in DEFAULT_PERL_PATHS]
|
||||
regex = True
|
||||
elif not posixpath.isabs(req.filename):
|
||||
return False
|
||||
paths = [posixpath.join(inc, re.escape(req.filename)) for inc in DEFAULT_PERL_PATHS]
|
||||
regex = True
|
||||
else:
|
||||
paths = [req.filename]
|
||||
regex = False
|
||||
else:
|
||||
regex = False
|
||||
paths = [posixpath.join(inc, req.filename) for inc in req.inc]
|
||||
return find_reqs_simple(apt_mgr, paths, regex=False)
|
||||
return find_reqs_simple(apt_mgr, paths, regex=regex)
|
||||
|
||||
|
||||
def resolve_perl_file_req(apt_mgr, req):
|
||||
|
@ -677,6 +697,12 @@ def resolve_apt_req(apt_mgr, req):
|
|||
return [req]
|
||||
|
||||
|
||||
def resolve_boost_component_req(apt_mgr, req):
|
||||
return find_reqs_simple(
|
||||
apt_mgr, ["/usr/lib/.*/libboost_%s" % re.escape(req.name)],
|
||||
regex=True)
|
||||
|
||||
|
||||
APT_REQUIREMENT_RESOLVERS = [
|
||||
(AptRequirement, resolve_apt_req),
|
||||
(BinaryRequirement, resolve_binary_req),
|
||||
|
@ -697,6 +723,7 @@ APT_REQUIREMENT_RESOLVERS = [
|
|||
(NodeModuleRequirement, resolve_node_module_req),
|
||||
(NodePackageRequirement, resolve_node_package_req),
|
||||
(LibraryRequirement, resolve_library_req),
|
||||
(StaticLibraryRequirement, resolve_static_library_req),
|
||||
(RubyFileRequirement, resolve_ruby_file_req),
|
||||
(XmlEntityRequirement, resolve_xml_entity_req),
|
||||
(SprocketsFileRequirement, resolve_sprockets_file_req),
|
||||
|
@ -719,6 +746,7 @@ APT_REQUIREMENT_RESOLVERS = [
|
|||
(CertificateAuthorityRequirement, resolve_ca_req),
|
||||
(CargoCrateRequirement, resolve_cargo_crate_req),
|
||||
(IntrospectionTypelibRequirement, resolve_introspection_typelib_req),
|
||||
(BoostComponentRequirement, resolve_boost_component_req),
|
||||
]
|
||||
|
||||
|
||||
|
|
|
@ -97,6 +97,9 @@ class PlainSession(Session):
|
|||
def chdir(self, path):
|
||||
os.chdir(path)
|
||||
|
||||
def mkdir(self, path):
|
||||
os.mkdir(path)
|
||||
|
||||
def external_path(self, path):
|
||||
return os.path.abspath(path)
|
||||
|
||||
|
|
|
@ -199,6 +199,10 @@ class SchrootSession(Session):
|
|||
fullpath = self.external_path(path)
|
||||
return os.scandir(fullpath)
|
||||
|
||||
def mkdir(self, path: str):
|
||||
fullpath = self.external_path(path)
|
||||
return os.mkdir(fullpath)
|
||||
|
||||
def setup_from_vcs(
|
||||
self, tree, include_controldir: Optional[bool] = None, subdir="package"
|
||||
):
|
||||
|
|
2
setup.py
2
setup.py
|
@ -6,7 +6,7 @@ from setuptools import setup
|
|||
|
||||
setup(name="ognibuild",
|
||||
description="Detect and run any build system",
|
||||
version="0.0.4",
|
||||
version="0.0.5",
|
||||
maintainer="Jelmer Vernooij",
|
||||
maintainer_email="jelmer@jelmer.uk",
|
||||
license="GNU GPLv2 or later",
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue