diff --git a/ognibuild/__init__.py b/ognibuild/__init__.py index 9a0c5a8..45b2b82 100644 --- a/ognibuild/__init__.py +++ b/ognibuild/__init__.py @@ -42,3 +42,17 @@ def shebang_binary(p): if args[0] in (b"/usr/bin/env", b"env"): return os.path.basename(args[1].decode()).strip() return os.path.basename(args[0].decode()).strip() + + +class UpstreamRequirement(object): + + def __init__(self, family, name): + self.family = family + self.name = name + + +class UpstreamOutput(object): + + def __init__(self, family, name): + self.family = family + self.name = name diff --git a/ognibuild/__main__.py b/ognibuild/__main__.py index 7dff0e3..71e3f63 100644 --- a/ognibuild/__main__.py +++ b/ognibuild/__main__.py @@ -34,14 +34,31 @@ from .resolver import ( from .test import run_test -def install_declared_requirements(resolver, requirements, subcommand): +def get_necessary_declared_requirements(resolver, requirements, stages): missing = [] - for req in requirements: - # TODO(jelmer): Look at stage - missing.append(UpstreamPackage(req.package.family, req.package.name)) + for stage, req in requirements: + if stage in stages: + missing.append(req) + return missing + + +def install_necessary_declared_requirements(resolver, buildsystem, stages): + missing = [] + missing.extend(get_necessary_declared_requirements( + resolver, buildsystem.get_declared_dependencies(), + stages)) resolver.install(missing) +STAGE_MAP = { + 'dist': [], + 'install': ['build'], + 'test': ['test', 'dev'], + 'build': ['build'], + 'clean': [] +} + + def main(): import argparse @@ -81,26 +98,23 @@ def main(): resolver = AutoResolver.from_session(session) os.chdir(args.directory) try: + bss = list(detect_buildsystems(args.directory)) if not args.ignore_declared_dependencies: - from upstream_ontologist.guess import get_upstream_info - buildsystem, requirements, metadata = get_upstream_info( - path=args.directory, - trust_package=True, - net_access=True, - consult_external_directory=True, - check=True) - install_declared_requirements( - resolver, requirements, args.subcommand) + stages = STAGE_MAP[args.subcommand] + if stages: + for bs in bss: + install_necessary_declared_requirements( + resolver, bs, stages) if args.subcommand == 'dist': - run_dist(session=session, resolver=resolver) + run_dist(session=session, buildsystems=bss, resolver=resolver) if args.subcommand == 'build': - run_build(session, resolver=resolver) + run_build(session, buildsystems=bss, resolver=resolver) if args.subcommand == 'clean': - run_clean(session, resolver=resolver) + run_clean(session, buildsystems=bss, resolver=resolver) if args.subcommand == 'install': - run_install(session, resolver=resolver) + run_install(session, buildsystems=bss, resolver=resolver) if args.subcommand == 'test': - run_test(session, resolver=resolver) + run_test(session, buildsystems=bss, resolver=resolver) except NoBuildToolsFound: logging.info("No build tools found.") return 1 diff --git a/ognibuild/build.py b/ognibuild/build.py index ea3fe03..b58db3a 100644 --- a/ognibuild/build.py +++ b/ognibuild/build.py @@ -15,16 +15,16 @@ # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA -from .buildsystem import detect_buildsystems, NoBuildToolsFound +from .buildsystem import NoBuildToolsFound -def run_build(session, resolver): +def run_build(session, buildsystems, resolver): # Some things want to write to the user's home directory, # e.g. pip caches in ~/.cache session.create_home() - for buildsystem in detect_buildsystems(session): - buildsystem.build(resolver) + for buildsystem in buildsystems: + buildsystem.build(session, resolver) return raise NoBuildToolsFound() diff --git a/ognibuild/buildsystem.py b/ognibuild/buildsystem.py index 77fc2dd..fa070e3 100644 --- a/ognibuild/buildsystem.py +++ b/ognibuild/buildsystem.py @@ -18,9 +18,11 @@ import logging +import os import re +import warnings -from . import shebang_binary, UpstreamPackage +from . import shebang_binary, UpstreamRequirement, UpstreamOutput from .apt import UnidentifiedError from .fix_build import run_with_build_fixer @@ -32,57 +34,73 @@ class NoBuildToolsFound(Exception): class BuildSystem(object): """A particular buildsystem.""" - def __init__(self, session): - self.session = session + name: str - def dist(self, resolver): + def dist(self, session, resolver): raise NotImplementedError(self.dist) - def test(self, resolver): + def test(self, session, resolver): raise NotImplementedError(self.test) - def build(self, resolver): + def build(self, session, resolver): raise NotImplementedError(self.build) - def clean(self, resolver): + def clean(self, session, resolver): raise NotImplementedError(self.clean) - def install(self, resolver): + def install(self, session, resolver): raise NotImplementedError(self.install) + def get_declared_dependencies(self): + raise NotImplementedError(self.get_declared_dependencies) + + def get_declared_outputs(self): + raise NotImplementedError(self.get_declared_outputs) + class Pear(BuildSystem): + name = 'pear' + + def __init__(self, path): + self.path = path + def setup(self, resolver): - resolver.install([UpstreamPackage('binary', 'pear')]) + resolver.install([UpstreamRequirement('binary', 'pear')]) - def dist(self, resolver): + def dist(self, session, resolver): self.setup(resolver) - run_with_build_fixer(self.session, ['pear', 'package']) + run_with_build_fixer(session, ['pear', 'package']) - def test(self, resolver): - self.setup() - run_with_build_fixer(self.session, ["pear", "run-tests"]) - - def build(self, resolver): + def test(self, session, resolver): self.setup(resolver) - run_with_build_fixer(self.session, ['pear', 'build']) + run_with_build_fixer(session, ['pear', 'run-tests']) - def clean(self, resolver): + def build(self, session, resolver): + self.setup(resolver) + run_with_build_fixer(session, ['pear', 'build']) + + def clean(self, session, resolver): self.setup(resolver) # TODO - def install(self, resolver): + def install(self, session, resolver): self.setup(resolver) - run_with_build_fixer(self.session, ['pear', 'install']) + run_with_build_fixer(session, ['pear', 'install']) class SetupPy(BuildSystem): + name = 'setup.py' + + def __init__(self, path): + from distutils.core import run_setup + self.result = run_setup(os.path.abspath(path), stop_after="init") + def setup(self, resolver): resolver.install([ - UpstreamPackage('python3', 'pip'), - UpstreamPackage('binary', 'python3'), + UpstreamRequirement('python3', 'pip'), + UpstreamRequirement('binary', 'python3'), ]) with open('setup.py', 'r') as f: setup_py_contents = f.read() @@ -93,129 +111,175 @@ class SetupPy(BuildSystem): setup_cfg_contents = '' if 'setuptools' in setup_py_contents: logging.info('Reference to setuptools found, installing.') - resolver.install([UpstreamPackage('python3', 'setuptools')]) + resolver.install([UpstreamRequirement('python3', 'setuptools')]) if ('setuptools_scm' in setup_py_contents or 'setuptools_scm' in setup_cfg_contents): logging.info('Reference to setuptools-scm found, installing.') resolver.install([ - UpstreamPackage('python3', 'setuptools-scm'), - UpstreamPackage('binary', 'git'), - UpstreamPackage('binary', 'mercurial'), + UpstreamRequirement('python3', 'setuptools-scm'), + UpstreamRequirement('binary', 'git'), + UpstreamRequirement('binary', 'mercurial'), ]) # TODO(jelmer): Install setup_requires - def test(self, resolver): + def test(self, session, resolver): self.setup(resolver) - self._run_setup(resolver, ['test']) + self._run_setup(session, resolver, ['test']) - def dist(self, resolver): + def dist(self, session, resolver): self.setup(resolver) - self._run_setup(resolver, ['sdist']) + self._run_setup(session, resolver, ['sdist']) - def clean(self, resolver): + def clean(self, session, resolver): self.setup(resolver) - self._run_setup(resolver, ['clean']) + self._run_setup(session, resolver, ['clean']) - def install(self, resolver): + def install(self, session, resolver): self.setup(resolver) - self._run_setup(resolver, ['install']) + self._run_setup(session, resolver, ['install']) - def _run_setup(self, resolver, args): + def _run_setup(self, session, resolver, args): interpreter = shebang_binary('setup.py') if interpreter is not None: if interpreter in ('python3', 'python2', 'python'): - resolver.install([UpstreamPackage('binary', interpreter)]) + resolver.install([UpstreamRequirement('binary', interpreter)]) else: raise ValueError('Unknown interpreter %r' % interpreter) run_with_build_fixer( - self.session, ['./setup.py'] + args) + session, ['./setup.py'] + args) else: # Just assume it's Python 3 - resolver.install([UpstreamPackage('binary', 'python3')]) + resolver.install([UpstreamRequirement('binary', 'python3')]) run_with_build_fixer( - self.session, ['python3', './setup.py'] + args) + session, ['python3', './setup.py'] + args) + + def get_declared_dependencies(self): + for require in self.result.get_requires(): + yield 'build', UpstreamRequirement('python3', require) + for require in self.result.install_requires: + yield 'install', UpstreamRequirement('python3', require) + for require in self.result.tests_require: + yield 'test', UpstreamRequirement('python3', require) + + def get_declared_outputs(self): + for script in (self.result.scripts or []): + yield UpstreamOutput('binary', os.path.basename(script)) + entry_points = self.result.entry_points or {} + for script in entry_points.get('console_scripts', []): + yield UpstreamOutput('binary', script.split('=')[0]) + for package in self.result.packages or []: + yield UpstreamOutput('python3', package) class PyProject(BuildSystem): + + name = 'pyproject' + def load_toml(self): import toml with open("pyproject.toml", "r") as pf: return toml.load(pf) - def dist(self, resolver): + def dist(self, session, resolver): pyproject = self.load_toml() if "poetry" in pyproject.get("tool", []): logging.info( 'Found pyproject.toml with poetry section, ' 'assuming poetry project.') resolver.install([ - UpstreamPackage('python3', 'venv'), - UpstreamPackage('python3', 'pip'), + UpstreamRequirement('python3', 'venv'), + UpstreamRequirement('python3', 'pip'), ]) - self.session.check_call(['pip3', 'install', 'poetry'], user='root') - self.session.check_call(['poetry', 'build', '-f', 'sdist']) + session.check_call(['pip3', 'install', 'poetry'], user='root') + session.check_call(['poetry', 'build', '-f', 'sdist']) return raise AssertionError("no supported section in pyproject.toml") class SetupCfg(BuildSystem): + name = 'setup.cfg' + + def __init__(self, path): + self.path = path + def setup(self, resolver): resolver.install([ - UpstreamPackage('python3', 'pep517'), - UpstreamPackage('python3', 'pip'), + UpstreamRequirement('python3', 'pep517'), + UpstreamRequirement('python3', 'pip'), ]) - def dist(self, resolver): + def dist(self, session, resolver): self.setup(resolver) - self.session.check_call(['python3', '-m', 'pep517.build', '-s', '.']) + session.check_call(['python3', '-m', 'pep517.build', '-s', '.']) -class NpmPackage(BuildSystem): +class Npm(BuildSystem): + + name = 'npm' + + def __init__(self, path): + import json + with open(path, 'r') as f: + self.package = json.load(f) + + def get_declared_dependencies(self): + if 'devDependencies' in self.package: + for name, unused_version in ( + self.package['devDependencies'].items()): + # TODO(jelmer): Look at version + yield 'dev', UpstreamRequirement('npm', name) def setup(self, resolver): - resolver.install([UpstreamPackage('binary', 'npm')]) + resolver.install([UpstreamRequirement('binary', 'npm')]) - def dist(self, resolver): + def dist(self, session, resolver): self.setup(resolver) - run_with_build_fixer(self.session, ['npm', 'pack']) + run_with_build_fixer(session, ['npm', 'pack']) class Waf(BuildSystem): - def setup(self, resolver): - resolver.install([UpstreamPackage('binary', 'python3')]) + name = 'waf' - def dist(self, resolver): + def __init__(self, path): + self.path = path + + def setup(self, resolver): + resolver.install([UpstreamRequirement('binary', 'python3')]) + + def dist(self, session, resolver): self.setup(resolver) - run_with_build_fixer(self.session, ['./waf', 'dist']) + run_with_build_fixer(session, ['./waf', 'dist']) class Gem(BuildSystem): - def setup(self, resolver): - resolver.install([UpstreamPackage('binary', 'gem2deb')]) + name = 'gem' - def dist(self, resolver): + def __init__(self, path): + self.path = path + + def setup(self, resolver): + resolver.install([UpstreamRequirement('binary', 'gem2deb')]) + + def dist(self, session, resolver): self.setup(resolver) - gemfiles = [entry.name for entry in self.session.scandir('.') + gemfiles = [entry.name for entry in session.scandir('.') if entry.name.endswith('.gem')] if len(gemfiles) > 1: - logging.warning("More than one gemfile. Trying the first?") - run_with_build_fixer(self.session, ["gem2tgz", gemfiles[0]]) + logging.warning('More than one gemfile. Trying the first?') + run_with_build_fixer(session, ['gem2tgz', gemfiles[0]]) class DistInkt(BuildSystem): - def setup(self, resolver): - resolver.install([ - UpstreamPackage('perl', 'Dist::Inkt'), - ]) - - def dist(self, resolver): - self.setup(resolver) + def __init__(self, path): + self.path = path + self.name = 'dist-zilla' + self.dist_inkt_class = None with open('dist.ini', 'rb') as f: for line in f: if not line.startswith(b";;"): @@ -226,155 +290,234 @@ class DistInkt(BuildSystem): continue if key.strip() == b"class" and value.strip().startswith(b"'Dist::Inkt"): logging.info( - "Found Dist::Inkt section in dist.ini, " "assuming distinkt." - ) - # TODO(jelmer): install via apt if possible - self.session.check_call( - ["cpan", "install", value.decode().strip("'")], user="root" - ) - run_with_build_fixer(self.session, ["distinkt-dist"]) + 'Found Dist::Inkt section in dist.ini, ' + 'assuming distinkt.') + self.name = 'dist-inkt' + self.dist_inkt_class = value.decode().strip("'") return - # Default to invoking Dist::Zilla logging.info('Found dist.ini, assuming dist-zilla.') - resolver.install([UpstreamPackage('perl', 'Dist::Zilla')]) - run_with_build_fixer(self.session, ['dzil', 'build', '--in', '..']) + + def setup(self, resolver): + resolver.install([ + UpstreamRequirement('perl', 'Dist::Inkt'), + ]) + + def dist(self, session, resolver): + self.setup(resolver) + if self.name == 'dist-inkt': + resolver.install([ + UpstreamRequirement('perl-module', self.dist_inkt_class)]) + run_with_build_fixer(session, ['distinkt-dist']) + else: + # Default to invoking Dist::Zilla + resolver.install([UpstreamRequirement('perl', 'Dist::Zilla')]) + run_with_build_fixer(session, ['dzil', 'build', '--in', '..']) class Make(BuildSystem): - def setup(self, resolver): - if self.session.exists('Makefile.PL') and not self.session.exists('Makefile'): - resolver.install([UpstreamPackage('binary', 'perl')]) - run_with_build_fixer(self.session, ['perl', 'Makefile.PL']) + name = 'make' - if not self.session.exists('Makefile') and not self.session.exists('configure'): - if self.session.exists('autogen.sh'): + def setup(self, session, resolver): + if session.exists('Makefile.PL') and not session.exists('Makefile'): + resolver.install([UpstreamRequirement('binary', 'perl')]) + run_with_build_fixer(session, ['perl', 'Makefile.PL']) + + if not session.exists('Makefile') and not session.exists('configure'): + if session.exists('autogen.sh'): if shebang_binary('autogen.sh') is None: run_with_build_fixer( - self.session, ['/bin/sh', './autogen.sh']) + session, ['/bin/sh', './autogen.sh']) try: - run_with_build_fixer(self.session, ["./autogen.sh"]) + run_with_build_fixer( + session, ['./autogen.sh']) except UnidentifiedError as e: - if ( - "Gnulib not yet bootstrapped; " - "run ./bootstrap instead.\n" in e.lines - ): - run_with_build_fixer(self.session, ["./bootstrap"]) - run_with_build_fixer(self.session, ["./autogen.sh"]) + if ("Gnulib not yet bootstrapped; " + "run ./bootstrap instead.\n" in e.lines): + run_with_build_fixer(session, ["./bootstrap"]) + run_with_build_fixer(session, ['./autogen.sh']) else: raise - elif self.session.exists("configure.ac") or self.session.exists( - "configure.in" - ): - apt.install( - ["autoconf", "automake", "gettext", "libtool", "gnu-standards"] - ) - run_with_build_fixer(self.session, ["autoreconf", "-i"]) + elif (session.exists('configure.ac') or + session.exists('configure.in')): + resolver.install([ + UpstreamRequirement('binary', 'autoconf'), + UpstreamRequirement('binary', 'automake'), + UpstreamRequirement('binary', 'gettextize'), + UpstreamRequirement('binary', 'libtoolize'), + ]) + run_with_build_fixer(session, ['autoreconf', '-i']) - if not self.session.exists("Makefile") and self.session.exists("configure"): - self.session.check_call(["./configure"]) + if not session.exists('Makefile') and session.exists('configure'): + session.check_call(['./configure']) - def dist(self, resolver): - self.setup(resolver) - resolver.install([UpstreamPackage('binary', 'make')]) + def dist(self, session, resolver): + self.setup(session, resolver) + resolver.install([UpstreamRequirement('binary', 'make')]) try: - run_with_build_fixer(self.session, ["make", "dist"]) + run_with_build_fixer(session, ['make', 'dist']) except UnidentifiedError as e: if "make: *** No rule to make target 'dist'. Stop.\n" in e.lines: pass elif "make[1]: *** No rule to make target 'dist'. Stop.\n" in e.lines: pass + elif ("Reconfigure the source tree " + "(via './config' or 'perl Configure'), please.\n" + ) in e.lines: + run_with_build_fixer(session, ['./config']) + run_with_build_fixer(session, ['make', 'dist']) elif ( - "Reconfigure the source tree " - "(via './config' or 'perl Configure'), please.\n" - ) in e.lines: - run_with_build_fixer(self.session, ["./config"]) - run_with_build_fixer(self.session, ["make", "dist"]) - elif ( - "Please try running 'make manifest' and then run " - "'make dist' again.\n" in e.lines - ): - run_with_build_fixer(self.session, ["make", "manifest"]) - run_with_build_fixer(self.session, ["make", "dist"]) + "Please try running 'make manifest' and then run " + "'make dist' again.\n" in e.lines): + run_with_build_fixer(session, ['make', 'manifest']) + run_with_build_fixer(session, ['make', 'dist']) elif "Please run ./configure first\n" in e.lines: - run_with_build_fixer(self.session, ["./configure"]) - run_with_build_fixer(self.session, ["make", "dist"]) - elif any( - [ - re.match( - r"Makefile:[0-9]+: \*\*\* Missing \'Make.inc\' " - r"Run \'./configure \[options\]\' and retry. Stop.\n", - line, - ) - for line in e.lines - ] - ): - run_with_build_fixer(self.session, ["./configure"]) - run_with_build_fixer(self.session, ["make", "dist"]) - elif any( - [ - re.match( - r"Problem opening MANIFEST: No such file or directory " - r"at .* line [0-9]+\.", - line, - ) - for line in e.lines - ] - ): - run_with_build_fixer(self.session, ["make", "manifest"]) - run_with_build_fixer(self.session, ["make", "dist"]) + run_with_build_fixer(session, ['./configure']) + run_with_build_fixer(session, ['make', 'dist']) + elif any([re.match( + r'Makefile:[0-9]+: \*\*\* Missing \'Make.inc\' ' + r'Run \'./configure \[options\]\' and retry. Stop.\n', + line) for line in e.lines]): + run_with_build_fixer(session, ['./configure']) + run_with_build_fixer(session, ['make', 'dist']) + elif any([re.match( + r'Problem opening MANIFEST: No such file or directory ' + r'at .* line [0-9]+\.', line) for line in e.lines]): + run_with_build_fixer(session, ['make', 'manifest']) + run_with_build_fixer(session, ['make', 'dist']) else: raise else: return + def get_declared_dependencies(self): + # TODO(jelmer): Split out the perl-specific stuff? + if os.path.exists('META.yml'): + # See http://module-build.sourceforge.net/META-spec-v1.4.html for + # the specification of the format. + import ruamel.yaml + import ruamel.yaml.reader + with open('META.yml', 'rb') as f: + try: + data = ruamel.yaml.load(f, ruamel.yaml.SafeLoader) + except ruamel.yaml.reader.ReaderError as e: + warnings.warn('Unable to parse META.yml: %s' % e) + return + for require in data.get('requires', []): + yield 'build', UpstreamRequirement('perl', require) -def detect_buildsystems(session): + +class Cargo(BuildSystem): + + name = 'cargo' + + def __init__(self, path): + from toml.decoder import load, TomlDecodeError + with open(path, 'r') as f: + self.cargo = load(f) + + def get_declared_dependencies(self): + if 'dependencies' in self.cargo: + for name, details in self.cargo['dependencies'].items(): + # TODO(jelmer): Look at details['features'], details['version'] + yield 'build', UpstreamRequirement('cargo-crate', name) + + +class Golang(BuildSystem): + """Go builds.""" + + name = 'golang' + + +class Maven(BuildSystem): + + name = 'maven' + + def __init__(self, path): + self.path = path + + +class Cabal(BuildSystem): + + name = 'cabal' + + def __init__(self, path): + self.path = path + + +def detect_buildsystems(path): """Detect build systems.""" - if session.exists("package.xml"): - logging.info("Found package.xml, assuming pear package.") - yield Pear(session) + if os.path.exists(os.path.join(path, 'package.xml')): + logging.info('Found package.xml, assuming pear package.') + yield Pear('package.xml') - if session.exists("setup.py"): - logging.info("Found setup.py, assuming python project.") - yield SetupPy(session) + if os.path.exists(os.path.join(path, 'setup.py')): + logging.info('Found setup.py, assuming python project.') + yield SetupPy('setup.py') + elif os.path.exists(os.path.join(path, 'pyproject.toml')): + logging.info('Found pyproject.toml, assuming python project.') + yield PyProject() + elif os.path.exists(os.path.join(path, 'setup.cfg')): + logging.info('Found setup.cfg, assuming python project.') + yield SetupCfg('setup.cfg') - if session.exists("pyproject.toml"): - logging.info("Found pyproject.toml, assuming python project.") - yield PyProject(session) + if os.path.exists(os.path.join(path, 'package.json')): + logging.info('Found package.json, assuming node package.') + yield Npm('package.json') - if session.exists("setup.cfg"): - logging.info("Found setup.cfg, assuming python project.") - yield SetupCfg(session) + if os.path.exists(os.path.join(path, 'waf')): + logging.info('Found waf, assuming waf package.') + yield Waf('waf') - if session.exists("package.json"): - logging.info("Found package.json, assuming node package.") - yield NpmPackage(session) + if os.path.exists(os.path.join(path, 'Cargo.toml')): + logging.info('Found Cargo.toml, assuming rust cargo package.') + yield Cargo('Cargo.toml') - if session.exists("waf"): - logging.info("Found waf, assuming waf package.") - yield Waf(session) + if os.path.exists(os.path.join(path, 'pom.xml')): + logging.info('Found pom.xml, assuming maven package.') + yield Maven('pom.xml') + + if (os.path.exists(os.path.join(path, 'dist.ini')) and + not os.path.exists(os.path.join(path, 'Makefile.PL'))): + yield DistInkt('dist.ini') gemfiles = [ - entry.name for entry in session.scandir(".") if entry.name.endswith(".gem") - ] + entry.name for entry in os.scandir(path) + if entry.name.endswith('.gem')] if gemfiles: - yield Gem(session) + yield Gem(gemfiles[0]) - if session.exists("dist.ini") and not session.exists("Makefile.PL"): - yield DistInkt(session) + if any([os.path.exists(os.path.join(path, p)) for p in [ + 'Makefile', 'Makefile.PL', 'autogen.sh', 'configure.ac', + 'configure.in']]): + yield Make() - if any( - [ - session.exists(p) - for p in [ - "Makefile", - "Makefile.PL", - "autogen.sh", - "configure.ac", - "configure.in", - ] - ] - ): - yield Make(session) + cabal_filenames = [ + entry.name for entry in os.scandir(path) + if entry.name.endswith('.cabal')] + if cabal_filenames: + if len(cabal_filenames) == 1: + yield Cabal(cabal_filenames[0]) + else: + warnings.warn( + 'More than one cabal filename, ignoring all: %r' % + cabal_filenames) + + if os.path.exists(os.path.join(path, '.travis.yml')): + import yaml + import ruamel.yaml.reader + with open('.travis.yml', 'rb') as f: + try: + data = ruamel.yaml.load(f, ruamel.yaml.SafeLoader) + except ruamel.yaml.reader.ReaderError as e: + warnings.warn('Unable to parse .travis.yml: %s' % (e, )) + else: + language = data.get('language') + if language == 'go': + yield Golang() + + for entry in os.scandir(path): + if entry.name.endswith('.go'): + yield Golang() + break diff --git a/ognibuild/clean.py b/ognibuild/clean.py index cabf76f..9f1c4d1 100644 --- a/ognibuild/clean.py +++ b/ognibuild/clean.py @@ -15,16 +15,16 @@ # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA -from .buildsystem import detect_buildsystems, NoBuildToolsFound +from .buildsystem import NoBuildToolsFound -def run_clean(session, resolver): +def run_clean(session, buildsystems, resolver): # Some things want to write to the user's home directory, # e.g. pip caches in ~/.cache session.create_home() - for buildsystem in detect_buildsystems(session): - buildsystem.clean(resolver) + for buildsystem in buildsystems: + buildsystem.clean(session, resolver) return raise NoBuildToolsFound() diff --git a/ognibuild/dist.py b/ognibuild/dist.py index 1fa77cd..79610d5 100644 --- a/ognibuild/dist.py +++ b/ognibuild/dist.py @@ -34,7 +34,7 @@ from buildlog_consultant.common import ( from . import DetailedFailure -from .buildsystem import detect_buildsystems, NoBuildToolsFound +from .buildsystem import NoBuildToolsFound from .session.schroot import SchrootSession from .vcs import dupe_vcs_tree, export_vcs_tree @@ -62,13 +62,13 @@ class DistNoTarball(Exception): """Dist operation did not create a tarball.""" -def run_dist(session, resolver): +def run_dist(session, buildsystems, resolver): # Some things want to write to the user's home directory, # e.g. pip caches in ~/.cache session.create_home() - for buildsystem in detect_buildsystems(session): - buildsystem.dist(resolver) + for buildsystem in buildsystems: + buildsystem.dist(session, resolver) return raise NoBuildToolsFound() @@ -115,13 +115,12 @@ class DistCatcher(object): def create_dist_schroot( - tree: Tree, - target_dir: str, - chroot: str, - packaging_tree: Optional[Tree] = None, - include_controldir: bool = True, - subdir: Optional[str] = None, -) -> str: + tree: Tree, target_dir: str, + chroot: str, packaging_tree: Optional[Tree] = None, + include_controldir: bool = True, + subdir: Optional[str] = None) -> str: + from .buildsystem import detect_buildsystems + from .apt import AptResolver if subdir is None: subdir = "package" with SchrootSession(chroot) as session: @@ -144,12 +143,15 @@ def create_dist_schroot( else: dupe_vcs_tree(tree, export_directory) + buildsystems = list(detect_buildsystems(export_directory)) + resolver = AptResolver.from_session(session) + with DistCatcher(export_directory) as dc: oldcwd = os.getcwd() os.chdir(export_directory) try: session.chdir(os.path.join(reldir, subdir)) - run_dist(session) + run_dist(session, buildsystems, resolver) finally: os.chdir(oldcwd) diff --git a/ognibuild/install.py b/ognibuild/install.py index 5d386c0..df0e61f 100644 --- a/ognibuild/install.py +++ b/ognibuild/install.py @@ -15,16 +15,16 @@ # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA -from .buildsystem import detect_buildsystems, NoBuildToolsFound +from .buildsystem import NoBuildToolsFound -def run_install(session, resolver): +def run_install(session, buildsystems, resolver): # Some things want to write to the user's home directory, # e.g. pip caches in ~/.cache session.create_home() - for buildsystem in detect_buildsystems(session): - buildsystem.install(resolver) + for buildsystem in buildsystems: + buildsystem.install(session, resolver) return raise NoBuildToolsFound() diff --git a/ognibuild/test.py b/ognibuild/test.py index 8f7ca08..8560347 100644 --- a/ognibuild/test.py +++ b/ognibuild/test.py @@ -15,16 +15,16 @@ # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA -from .buildsystem import detect_buildsystems, NoBuildToolsFound +from .buildsystem import NoBuildToolsFound -def run_test(session, resolver): +def run_test(session, buildsystems, resolver): # Some things want to write to the user's home directory, # e.g. pip caches in ~/.cache session.create_home() - for buildsystem in detect_buildsystems(session): - buildsystem.test(resolver) + for buildsystem in buildsystems: + buildsystem.test(session, resolver) return raise NoBuildToolsFound()