New upstream version 0.0.15

This commit is contained in:
Tianyu Chen 2022-11-22 11:19:55 +08:00
parent 3e1f11dd79
commit c286789e37
55 changed files with 3578 additions and 1371 deletions

24
.github/workflows/disperse.yml vendored Normal file
View file

@ -0,0 +1,24 @@
---
name: Disperse configuration
"on":
- push
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Set up Python
uses: actions/setup-python@v2
- name: Install dependencies
run: |
sudo apt install protobuf-compiler
- name: Install disperse
run: |
pip install git+https://github.com/jelmer/disperse
- name: Validate disperse.conf
run: |
PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION=python disperse validate .

View file

@ -1,6 +1,11 @@
---
name: Python package
on: [push, pull_request]
"on":
push:
pull_request:
schedule:
- cron: '0 6 * * *' # Daily 6AM UTC build
jobs:
build:
@ -9,7 +14,7 @@ jobs:
strategy:
matrix:
os: [ubuntu-latest, macos-latest]
python-version: [3.7, 3.8]
python-version: [3.7, 3.8, 3.9, '3.10']
fail-fast: false
steps:
@ -20,28 +25,28 @@ jobs:
python-version: ${{ matrix.python-version }}
- name: Install dependencies
run: |
python -m pip install --upgrade pip flake8 cython
python -m pip install --upgrade pip
python -m pip install -e ".[remote,dep_server,dev]"
python setup.py develop
- name: Install Debian-specific dependencies
run: |
sudo apt install libapt-pkg-dev
python -m pip install wheel
python -m pip install git+https://salsa.debian.org/apt-team/python-apt
sudo apt update
sudo apt install python3-wheel libapt-pkg-dev
python -m pip install \
python_apt@git+https://salsa.debian.org/apt-team/python-apt.git
python -m pip install -e ".[debian]"
python -m pip install testtools
mkdir -p ~/.config/breezy/plugins
brz branch lp:brz-debian ~/.config/breezy/plugins/debian
if: "matrix.python-version != 'pypy3' && matrix.os == 'ubuntu-latest'"
- name: Style checks
run: |
pip install flake8
python -m flake8
- name: Typing checks
run: |
pip install -U mypy
pip install -U mypy types-toml
python -m mypy ognibuild
if: "matrix.python-version != 'pypy3'"
- name: Test suite run
run: |
python -m unittest ognibuild.tests.test_suite
python -m unittest tests.test_suite
env:
PYTHONHASHSEED: random

1
.gitignore vendored
View file

@ -1,3 +1,4 @@
.coverage
build
*~
ognibuild.egg-info

20
Makefile Normal file
View file

@ -0,0 +1,20 @@
check:: style
style:
flake8
check:: testsuite
testsuite:
python3 -m unittest tests.test_suite
check:: typing
typing:
mypy ognibuild tests
coverage:
python3 -m coverage run -m unittest tests.test_suite
coverage-html:
python3 -m coverage html

View file

@ -1,17 +0,0 @@
Metadata-Version: 2.1
Name: ognibuild
Version: 0.0.7
Summary: Detect and run any build system
Home-page: https://jelmer.uk/code/ognibuild
Maintainer: Jelmer Vernooij
Maintainer-email: jelmer@jelmer.uk
License: GNU GPLv2 or later
Description: UNKNOWN
Platform: UNKNOWN
Classifier: Development Status :: 4 - Beta
Classifier: License :: OSI Approved :: GNU General Public License v2 or later (GPLv2+)
Classifier: Programming Language :: Python :: 3.5
Classifier: Programming Language :: Python :: 3.6
Classifier: Programming Language :: Python :: Implementation :: CPython
Classifier: Operating System :: POSIX
Provides-Extra: debian

View file

@ -31,6 +31,12 @@ Ognibuild has a number of subcommands:
It also includes a subcommand that can fix up the build dependencies
for Debian packages, called deb-fix-build.
### Examples
```
ogni -d https://gitlab.gnome.org/GNOME/fractal install
```
## Status
Ognibuild is functional, but sometimes rough around the edges. If you run into

8
disperse.conf Normal file
View file

@ -0,0 +1,8 @@
# See https://github.com/jelmer/disperse
timeout_days: 5
tag_name: "v$VERSION"
verify_command: "python3 -m unittest tests.test_suite"
update_version {
path: "ognibuild/__init__.py"
new_line: "__version__ = $TUPLED_VERSION"
}

View file

@ -1,17 +0,0 @@
Metadata-Version: 2.1
Name: ognibuild
Version: 0.0.7
Summary: Detect and run any build system
Home-page: https://jelmer.uk/code/ognibuild
Maintainer: Jelmer Vernooij
Maintainer-email: jelmer@jelmer.uk
License: GNU GPLv2 or later
Description: UNKNOWN
Platform: UNKNOWN
Classifier: Development Status :: 4 - Beta
Classifier: License :: OSI Approved :: GNU General Public License v2 or later (GPLv2+)
Classifier: Programming Language :: Python :: 3.5
Classifier: Programming Language :: Python :: 3.6
Classifier: Programming Language :: Python :: Implementation :: CPython
Classifier: Operating System :: POSIX
Provides-Extra: debian

View file

@ -1,52 +0,0 @@
.flake8
.gitignore
AUTHORS
CODE_OF_CONDUCT.md
LICENSE
README.md
SECURITY.md
TODO
releaser.conf
setup.cfg
setup.py
.github/workflows/pythonpackage.yml
notes/architecture.md
notes/concepts.md
notes/roadmap.md
ognibuild/__init__.py
ognibuild/__main__.py
ognibuild/build.py
ognibuild/buildlog.py
ognibuild/buildsystem.py
ognibuild/clean.py
ognibuild/dist.py
ognibuild/dist_catcher.py
ognibuild/fix_build.py
ognibuild/fixers.py
ognibuild/info.py
ognibuild/install.py
ognibuild/outputs.py
ognibuild/requirements.py
ognibuild/test.py
ognibuild/vcs.py
ognibuild.egg-info/PKG-INFO
ognibuild.egg-info/SOURCES.txt
ognibuild.egg-info/dependency_links.txt
ognibuild.egg-info/entry_points.txt
ognibuild.egg-info/requires.txt
ognibuild.egg-info/top_level.txt
ognibuild/debian/__init__.py
ognibuild/debian/apt.py
ognibuild/debian/build.py
ognibuild/debian/build_deps.py
ognibuild/debian/file_search.py
ognibuild/debian/fix_build.py
ognibuild/debian/udd.py
ognibuild/resolver/__init__.py
ognibuild/resolver/apt.py
ognibuild/session/__init__.py
ognibuild/session/plain.py
ognibuild/session/schroot.py
ognibuild/tests/__init__.py
ognibuild/tests/test_debian_build.py
ognibuild/tests/test_debian_fix_build.py

View file

@ -1 +0,0 @@

View file

@ -1,4 +0,0 @@
[console_scripts]
deb-fix-build = ognibuild.debian.fix_build:main
ogni = ognibuild.__main__:main

View file

@ -1,8 +0,0 @@
breezy
buildlog-consultant>=0.0.10
requirements-parser
[debian]
debmutate
python_apt
python_debian

View file

@ -1 +0,0 @@
ognibuild

View file

@ -18,12 +18,14 @@
import os
import stat
from typing import List, Dict, Type
__version__ = (0, 0, 7)
__version__ = (0, 0, 15)
version_string = '.'.join(map(str, __version__))
USER_AGENT = "Ognibuild"
USER_AGENT = f"Ognibuild/{version_string}"
class DetailedFailure(Exception):
@ -32,6 +34,12 @@ class DetailedFailure(Exception):
self.argv = argv
self.error = error
def __eq__(self, other):
return (isinstance(other, type(self)) and
self.retcode == other.retcode and
self.argv == other.argv and
self.error == other.error)
class UnidentifiedError(Exception):
"""An unidentified error."""
@ -42,6 +50,13 @@ class UnidentifiedError(Exception):
self.lines = lines
self.secondary = secondary
def __eq__(self, other):
return (isinstance(other, type(self)) and
self.retcode == other.retcode and
self.argv == other.argv and
self.lines == other.lines and
self.secondary == other.secondary)
def __repr__(self):
return "<%s(%r, %r, ..., secondary=%r)>" % (
type(self).__name__,
@ -64,17 +79,64 @@ def shebang_binary(p):
return os.path.basename(args[0].decode()).strip()
class UnknownRequirementFamily(Exception):
"""Requirement family is unknown"""
def __init__(self, family):
self.family = family
class Requirement(object):
# Name of the family of requirements - e.g. "python-package"
family: str
def __init__(self, family):
self.family = family
_JSON_DESERIALIZERS: Dict[str, Type["Requirement"]] = {}
@classmethod
def _from_json(self, js):
raise NotImplementedError(self._from_json)
@classmethod
def from_json(self, js):
try:
family = Requirement._JSON_DESERIALIZERS[js[0]]
except KeyError:
raise UnknownRequirementFamily(js[0])
return family._from_json(js[1])
def met(self, session):
raise NotImplementedError(self)
def _json(self):
raise NotImplementedError(self._json)
def json(self):
return (type(self).family, self._json())
@classmethod
def register_json(cls, subcls):
Requirement._JSON_DESERIALIZERS[subcls.family] = subcls
class OneOfRequirement(Requirement):
elements: List[Requirement]
family = 'or'
def __init__(self, elements):
self.elements = elements
def met(self, session):
for req in self.elements:
if req.met(session):
return True
return False
def __repr__(self):
return "%s(%r)" % (type(self).__name__, self.elements)
class UpstreamOutput(object):
def __init__(self, family):

View file

@ -15,11 +15,13 @@
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
from contextlib import ExitStack
import logging
import os
import shlex
import sys
from . import UnidentifiedError, DetailedFailure
from urllib.parse import urlparse
from . import UnidentifiedError, DetailedFailure, version_string
from .buildlog import (
InstallFixer,
ExplainInstallFixer,
@ -29,9 +31,10 @@ from .buildlog import (
from .buildsystem import NoBuildToolsFound, detect_buildsystems
from .resolver import (
auto_resolver,
native_resolvers,
select_resolvers,
UnsatisfiedRequirements,
)
from .resolver.apt import AptResolver
from .session import SessionSetupFailure
def display_explain_commands(commands):
@ -39,34 +42,33 @@ def display_explain_commands(commands):
for command, reqs in commands:
if isinstance(command, list):
command = shlex.join(command)
logging.info(" %s (to install %s)", command, ", ".join(map(str, reqs)))
def get_necessary_declared_requirements(resolver, requirements, stages):
missing = []
for stage, req in requirements:
if stage in stages:
missing.append(req)
return missing
logging.info(
" %s (to install %s)", command, ", ".join(map(str, reqs)))
def install_necessary_declared_requirements(
session, resolver, fixers, buildsystems, stages, explain=False
):
relevant = []
declared_reqs = []
for buildsystem in buildsystems:
try:
declared_reqs.extend(buildsystem.get_declared_dependencies(session, fixers))
except NotImplementedError:
logging.warning(
"Unable to determine declared dependencies from %r", buildsystem
)
relevant.extend(
get_necessary_declared_requirements(resolver, declared_reqs, stages)
)
install_missing_reqs(session, resolver, relevant, explain=explain)
if explain:
relevant = []
for buildsystem in buildsystems:
declared_reqs = buildsystem.get_declared_dependencies(
session, fixers)
for stage, req in declared_reqs:
if stage in stages:
relevant.append(req)
install_missing_reqs(session, resolver, relevant, explain=True)
else:
for buildsystem in buildsystems:
try:
buildsystem.install_declared_requirements(
stages, session, resolver, fixers)
except NotImplementedError:
logging.warning(
"Unable to determine declared dependencies from %r",
buildsystem
)
# Types of dependencies:
@ -82,6 +84,7 @@ STAGE_MAP = {
"test": ["test", "build", "core"],
"build": ["build", "core"],
"clean": [],
"verify": ["build", "core", "test"],
}
@ -95,9 +98,13 @@ def determine_fixers(session, resolver, explain=False):
def main(): # noqa: C901
import argparse
parser = argparse.ArgumentParser()
parser = argparse.ArgumentParser(prog='ogni')
parser.add_argument(
"--directory", "-d", type=str, help="Directory for project.", default="."
"--version", action="version", version="%(prog)s " + version_string
)
parser.add_argument(
"--directory", "-d", type=str, help="Directory for project.",
default="."
)
parser.add_argument("--schroot", type=str, help="schroot to run in.")
parser.add_argument(
@ -123,6 +130,15 @@ def main(): # noqa: C901
action="store_true",
help="Ignore declared dependencies, follow build errors only",
)
parser.add_argument(
"--user", action="store_true",
help="Install in local-user directories."
)
parser.add_argument(
"--dep-server-url", type=str,
help="ognibuild dep server to use",
default=os.environ.get('OGNIBUILD_DEPS'))
parser.add_argument("--verbose", action="store_true", help="Be verbose")
subparsers = parser.add_subparsers(dest="subcommand")
subparsers.add_parser("dist")
@ -130,12 +146,11 @@ def main(): # noqa: C901
subparsers.add_parser("clean")
subparsers.add_parser("test")
subparsers.add_parser("info")
subparsers.add_parser("verify")
exec_parser = subparsers.add_parser("exec")
exec_parser.add_argument('subargv', nargs=argparse.REMAINDER, help='Command to run.')
exec_parser.add_argument(
'subargv', nargs=argparse.REMAINDER, help='Command to run.')
install_parser = subparsers.add_parser("install")
install_parser.add_argument(
"--user", action="store_true", help="Install in local-user directories."
)
install_parser.add_argument(
"--prefix", type=str, help='Prefix to install in')
@ -155,38 +170,72 @@ def main(): # noqa: C901
from .session.plain import PlainSession
session = PlainSession()
with session:
logging.info("Preparing directory %s", args.directory)
external_dir, internal_dir = session.setup_from_directory(args.directory)
with ExitStack() as es:
try:
es.enter_context(session)
except SessionSetupFailure as e:
logging.debug('Error lines: %r', e.errlines)
logging.fatal('Failed to set up session: %s', e.reason)
return 1
parsed_url = urlparse(args.directory)
# TODO(jelmer): Get a list of supported schemes from breezy?
if parsed_url.scheme in ('git', 'http', 'https', 'ssh'):
import breezy.git # noqa: F401
import breezy.bzr # noqa: F401
from breezy.branch import Branch
from silver_platter.utils import TemporarySprout
b = Branch.open(args.directory)
logging.info("Cloning %s", args.directory)
wt = es.enter_context(TemporarySprout(b))
external_dir, internal_dir = session.setup_from_vcs(wt)
else:
if parsed_url.scheme == 'file':
directory = parsed_url.path
else:
directory = args.directory
logging.info("Preparing directory %s", directory)
external_dir, internal_dir = session.setup_from_directory(
directory)
session.chdir(internal_dir)
os.chdir(external_dir)
if not session.is_temporary and args.subcommand == 'info':
args.explain = True
if args.resolve == "apt":
resolver = AptResolver.from_session(session)
elif args.resolve == "native":
resolver = native_resolvers(session, user_local=args.user)
elif args.resolve == "auto":
if args.resolve == "auto":
resolver = auto_resolver(session, explain=args.explain)
else:
resolver = select_resolvers(
session, user_local=args.user,
resolvers=args.resolve.split(','),
dep_server_url=args.dep_server_url)
logging.info("Using requirement resolver: %s", resolver)
fixers = determine_fixers(session, resolver, explain=args.explain)
try:
if args.subcommand == "exec":
from .fix_build import run_with_build_fixers
run_with_build_fixers(session, args.subargv, fixers)
run_with_build_fixers(fixers, session, args.subargv)
return 0
bss = list(detect_buildsystems(args.directory))
bss = list(detect_buildsystems(external_dir))
logging.info("Detected buildsystems: %s", ", ".join(map(str, bss)))
if not args.ignore_declared_dependencies:
stages = STAGE_MAP[args.subcommand]
if stages:
logging.info("Checking that declared requirements are present")
logging.info(
"Checking that declared requirements are present")
try:
install_necessary_declared_requirements(
session, resolver, fixers, bss, stages, explain=args.explain
session, resolver, fixers, bss, stages,
explain=args.explain
)
except UnsatisfiedRequirements as e:
logging.info(
'Unable to install declared dependencies:')
for req in e.requirements:
logging.info(' * %s', req)
return 1
except ExplainInstall as e:
display_explain_commands(e.commands)
return 1
@ -207,11 +256,15 @@ def main(): # noqa: C901
if args.subcommand == "build":
from .build import run_build
run_build(session, buildsystems=bss, resolver=resolver, fixers=fixers)
run_build(
session, buildsystems=bss, resolver=resolver,
fixers=fixers)
if args.subcommand == "clean":
from .clean import run_clean
run_clean(session, buildsystems=bss, resolver=resolver, fixers=fixers)
run_clean(
session, buildsystems=bss, resolver=resolver,
fixers=fixers)
if args.subcommand == "install":
from .install import run_install
@ -226,14 +279,42 @@ def main(): # noqa: C901
if args.subcommand == "test":
from .test import run_test
run_test(session, buildsystems=bss, resolver=resolver, fixers=fixers)
run_test(
session, buildsystems=bss, resolver=resolver,
fixers=fixers)
if args.subcommand == "info":
from .info import run_info
run_info(session, buildsystems=bss, fixers=fixers)
if args.subcommand == "verify":
from .build import run_build
from .test import run_test
run_build(
session, buildsystems=bss, resolver=resolver,
fixers=fixers)
run_test(
session, buildsystems=bss, resolver=resolver,
fixers=fixers)
except ExplainInstall as e:
display_explain_commands(e.commands)
except (UnidentifiedError, DetailedFailure):
except UnidentifiedError:
logging.info(
'If there is a clear indication of a problem in the build '
'log, please consider filing a request to update the patterns '
'in buildlog-consultant at '
'https://github.com/jelmer/buildlog-consultant/issues/new')
return 1
except DetailedFailure:
if not args.verbose:
logging.info(
'Run with --verbose to get more information '
'about steps taken to try to resolve error')
logging.info(
'Please consider filing a bug report at '
'https://github.com/jelmer/ognibuild/issues/new')
return 1
except NoBuildToolsFound:
logging.info("No build tools found.")

View file

@ -15,16 +15,28 @@
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
from functools import partial
from .buildsystem import NoBuildToolsFound
from .fix_build import iterate_with_build_fixers
from .logs import NoLogManager
def run_build(session, buildsystems, resolver, fixers):
BUILD_LOG_FILENAME = 'build.log'
def run_build(session, buildsystems, resolver, fixers, log_manager=None):
# Some things want to write to the user's home directory,
# e.g. pip caches in ~/.cache
session.create_home()
if log_manager is None:
log_manager = NoLogManager()
for buildsystem in buildsystems:
buildsystem.build(session, resolver, fixers)
iterate_with_build_fixers(
fixers, log_manager.wrap(
partial(buildsystem.build, session, resolver)))
return
raise NoBuildToolsFound()

View file

@ -19,65 +19,32 @@
"""
import logging
from typing import Optional, List, Callable, Union, Tuple
from buildlog_consultant.common import (
MissingPythonModule,
MissingPythonDistribution,
MissingCHeader,
MissingPkgConfig,
MissingCommand,
MissingFile,
MissingJavaScriptRuntime,
MissingSprocketsFile,
MissingGoPackage,
Problem,
MissingPerlFile,
MissingPerlModule,
MissingXmlEntity,
MissingJDKFile,
MissingJDK,
MissingJRE,
MissingNodeModule,
MissingNodePackage,
MissingPhpClass,
MissingRubyGem,
MissingLibrary,
MissingSetupPyCommand,
MissingJavaClass,
MissingCSharpCompiler,
MissingRPackage,
MissingRubyFile,
MissingAutoconfMacro,
MissingValaPackage,
MissingBoostComponents,
MissingCMakeComponents,
MissingXfceDependency,
MissingHaskellDependencies,
MissingVagueDependency,
DhAddonLoadFailure,
MissingMavenArtifacts,
MissingIntrospectionTypelib,
GnomeCommonMissing,
MissingGnomeCommonDependency,
UnknownCertificateAuthority,
CMakeFilesMissing,
MissingLibtool,
MissingQt,
MissingX11,
MissingPerlPredeclared,
MissingLatexFile,
MissingCargoCrate,
MissingStaticLibrary,
)
from buildlog_consultant.apt import UnsatisfiedAptDependencies
from . import OneOfRequirement
from .fix_build import BuildFixer
from .requirements import (
Requirement,
BinaryRequirement,
PathRequirement,
PkgConfigRequirement,
CHeaderRequirement,
JavaScriptRuntimeRequirement,
ValaPackageRequirement,
RubyGemRequirement,
GoPackageRequirement,
DhAddonRequirement,
PhpClassRequirement,
@ -92,6 +59,7 @@ from .requirements import (
HaskellPackageRequirement,
MavenArtifactRequirement,
BoostComponentRequirement,
KF5ComponentRequirement,
GnomeCommonRequirement,
JDKFileRequirement,
JDKRequirement,
@ -112,86 +80,124 @@ from .requirements import (
LatexPackageRequirement,
CargoCrateRequirement,
StaticLibraryRequirement,
GnulibDirectoryRequirement,
LuaModuleRequirement,
PHPExtensionRequirement,
VcsControlDirectoryAccessRequirement,
RubyGemRequirement,
QtModuleRequirement,
)
from .resolver import UnsatisfiedRequirements
def problem_to_upstream_requirement(problem): # noqa: C901
if isinstance(problem, MissingFile):
return PathRequirement(problem.path)
elif isinstance(problem, MissingCommand):
return BinaryRequirement(problem.command)
elif isinstance(problem, MissingPkgConfig):
return PkgConfigRequirement(problem.module, problem.minimum_version)
elif isinstance(problem, MissingCHeader):
return CHeaderRequirement(problem.header)
elif isinstance(problem, MissingIntrospectionTypelib):
return IntrospectionTypelibRequirement(problem.library)
elif isinstance(problem, MissingJavaScriptRuntime):
return JavaScriptRuntimeRequirement()
elif isinstance(problem, MissingRubyGem):
return RubyGemRequirement(problem.gem, problem.version)
elif isinstance(problem, MissingValaPackage):
return ValaPackageRequirement(problem.package)
elif isinstance(problem, MissingGoPackage):
return GoPackageRequirement(problem.package)
elif isinstance(problem, MissingBoostComponents):
return [BoostComponentRequirement(name) for name in problem.components]
elif isinstance(problem, DhAddonLoadFailure):
return DhAddonRequirement(problem.path)
elif isinstance(problem, MissingPhpClass):
return PhpClassRequirement(problem.php_class)
elif isinstance(problem, MissingRPackage):
return RPackageRequirement(problem.package, problem.minimum_version)
elif isinstance(problem, MissingNodeModule):
return NodeModuleRequirement(problem.module)
elif isinstance(problem, MissingStaticLibrary):
return StaticLibraryRequirement(problem.library, problem.filename)
elif isinstance(problem, MissingNodePackage):
return NodePackageRequirement(problem.package)
def map_pytest_arguments_to_plugin(args):
# TODO(jelmer): Map argument to PytestPluginRequirement
return None
ProblemToRequirementConverter = Callable[[Problem], Optional[Requirement]]
PROBLEM_CONVERTERS: List[Union[
Tuple[str, ProblemToRequirementConverter],
Tuple[str, ProblemToRequirementConverter, str]]] = [
('missing-file', lambda p: PathRequirement(p.path)),
('command-missing', lambda p: BinaryRequirement(p.command)),
('valac-cannot-compile', lambda p: VagueDependencyRequirement('valac'),
'0.0.27'),
('missing-cmake-files', lambda p: OneOfRequirement(
[CMakefileRequirement(filename, p.version)
for filename in p.filenames])),
('missing-command-or-build-file', lambda p: BinaryRequirement(p.command)),
('missing-pkg-config-package',
lambda p: PkgConfigRequirement(p.module, p.minimum_version)),
('missing-c-header', lambda p: CHeaderRequirement(p.header)),
('missing-introspection-typelib',
lambda p: IntrospectionTypelibRequirement(p.library)),
('missing-python-module', lambda p: PythonModuleRequirement(
p.module, python_version=p.python_version,
minimum_version=p.minimum_version)),
('missing-python-distribution', lambda p: PythonPackageRequirement(
p.distribution, python_version=p.python_version,
minimum_version=p.minimum_version)),
('javascript-runtime-missing', lambda p: JavaScriptRuntimeRequirement()),
('missing-node-module', lambda p: NodeModuleRequirement(p.module)),
('missing-node-package', lambda p: NodePackageRequirement(p.package)),
('missing-ruby-gem', lambda p: RubyGemRequirement(p.gem, p.version)),
('missing-qt-modules', lambda p: QtModuleRequirement(p.modules[0]),
'0.0.27'),
('missing-php-class', lambda p: PhpClassRequirement(p.php_class)),
('missing-r-package', lambda p: RPackageRequirement(
p.package, p.minimum_version)),
('missing-vague-dependency',
lambda p: VagueDependencyRequirement(
p.name, minimum_version=p.minimum_version)),
('missing-c#-compiler', lambda p: BinaryRequirement("msc")),
('missing-gnome-common', lambda p: GnomeCommonRequirement()),
('missing-jdk', lambda p: JDKRequirement()),
('missing-jre', lambda p: JRERequirement()),
('missing-qt', lambda p: QTRequirement()),
('missing-x11', lambda p: X11Requirement()),
('missing-libtool', lambda p: LibtoolRequirement()),
('missing-php-extension',
lambda p: PHPExtensionRequirement(p.extension)),
('missing-rust-compiler', lambda p: BinaryRequirement("rustc")),
('missing-java-class', lambda p: JavaClassRequirement(p.classname)),
('missing-go-package', lambda p: GoPackageRequirement(p.package)),
('missing-autoconf-macro', lambda p: AutoconfMacroRequirement(p.macro)),
('missing-vala-package', lambda p: ValaPackageRequirement(p.package)),
('missing-lua-module', lambda p: LuaModuleRequirement(p.module)),
('missing-jdk-file', lambda p: JDKFileRequirement(p.jdk_path, p.filename)),
('missing-ruby-file', lambda p: RubyFileRequirement(p.filename)),
('missing-library', lambda p: LibraryRequirement(p.library)),
('missing-sprockets-file',
lambda p: SprocketsFileRequirement(p.content_type, p.name)),
('dh-addon-load-failure', lambda p: DhAddonRequirement(p.path)),
('missing-xml-entity', lambda p: XmlEntityRequirement(p.url)),
('missing-gnulib-directory',
lambda p: GnulibDirectoryRequirement(p.directory)),
('vcs-control-directory-needed',
lambda p: VcsControlDirectoryAccessRequirement(p.vcs)),
('missing-static-library',
lambda p: StaticLibraryRequirement(p.library, p.filename)),
('missing-perl-module',
lambda p: PerlModuleRequirement(
module=p.module, filename=p.filename, inc=p.inc)),
('unknown-certificate-authority',
lambda p: CertificateAuthorityRequirement(p.url)),
('unsupported-pytest-arguments',
lambda p: map_pytest_arguments_to_plugin(p.args), '0.0.27'),
]
def problem_to_upstream_requirement(
problem: Problem) -> Optional[Requirement]: # noqa: C901
for entry in PROBLEM_CONVERTERS:
kind, fn = entry[:2]
if kind == problem.kind:
return fn(problem)
if isinstance(problem, MissingCMakeComponents):
if problem.name.lower() == 'boost':
return OneOfRequirement(
[BoostComponentRequirement(name)
for name in problem.components])
elif problem.name.lower() == 'kf5':
return OneOfRequirement(
[KF5ComponentRequirement(name) for name in problem.components])
return None
elif isinstance(problem, MissingLatexFile):
if problem.filename.endswith('.sty'):
return LatexPackageRequirement(problem.filename[:-4])
return None
elif isinstance(problem, MissingVagueDependency):
return VagueDependencyRequirement(problem.name, minimum_version=problem.minimum_version)
elif isinstance(problem, MissingLibrary):
return LibraryRequirement(problem.library)
elif isinstance(problem, MissingRubyFile):
return RubyFileRequirement(problem.filename)
elif isinstance(problem, MissingXmlEntity):
return XmlEntityRequirement(problem.url)
elif isinstance(problem, MissingSprocketsFile):
return SprocketsFileRequirement(problem.content_type, problem.name)
elif isinstance(problem, MissingJavaClass):
return JavaClassRequirement(problem.classname)
elif isinstance(problem, CMakeFilesMissing):
return [CMakefileRequirement(filename) for filename in problem.filenames]
elif isinstance(problem, MissingHaskellDependencies):
return [HaskellPackageRequirement.from_string(dep) for dep in problem.deps]
return OneOfRequirement(
[HaskellPackageRequirement.from_string(dep)
for dep in problem.deps])
elif isinstance(problem, MissingMavenArtifacts):
return [
return OneOfRequirement([
MavenArtifactRequirement.from_str(artifact)
for artifact in problem.artifacts
]
elif isinstance(problem, MissingCSharpCompiler):
return BinaryRequirement("msc")
elif isinstance(problem, GnomeCommonMissing):
return GnomeCommonRequirement()
elif isinstance(problem, MissingJDKFile):
return JDKFileRequirement(problem.jdk_path, problem.filename)
elif isinstance(problem, MissingJDK):
return JDKRequirement()
elif isinstance(problem, MissingJRE):
return JRERequirement()
elif isinstance(problem, MissingQt):
return QTRequirement()
elif isinstance(problem, MissingX11):
return X11Requirement()
elif isinstance(problem, MissingLibtool):
return LibtoolRequirement()
elif isinstance(problem, UnknownCertificateAuthority):
return CertificateAuthorityRequirement(problem.url)
])
elif isinstance(problem, MissingPerlPredeclared):
ret = PerlPreDeclaredRequirement(problem.name)
try:
@ -210,36 +216,20 @@ def problem_to_upstream_requirement(problem): # noqa: C901
return BinaryRequirement("glib-gettextize")
else:
logging.warning(
"No known command for gnome-common dependency %s", problem.package
"No known command for gnome-common dependency %s",
problem.package
)
return None
elif isinstance(problem, MissingXfceDependency):
if problem.package == "gtk-doc":
return BinaryRequirement("gtkdocize")
else:
logging.warning("No known command for xfce dependency %s", problem.package)
logging.warning(
"No known command for xfce dependency %s", problem.package)
return None
elif isinstance(problem, MissingPerlModule):
return PerlModuleRequirement(
module=problem.module, filename=problem.filename, inc=problem.inc
)
elif isinstance(problem, MissingPerlFile):
return PerlFileRequirement(filename=problem.filename)
elif isinstance(problem, MissingAutoconfMacro):
return AutoconfMacroRequirement(problem.macro)
elif isinstance(problem, MissingPythonModule):
return PythonModuleRequirement(
problem.module,
python_version=problem.python_version,
minimum_version=problem.minimum_version,
)
elif isinstance(problem, MissingPythonDistribution):
return PythonPackageRequirement(
problem.distribution,
python_version=problem.python_version,
minimum_version=problem.minimum_version,
)
elif isinstance(problem, UnsatisfiedAptDependencies):
elif problem.kind == 'unsatisfied-apt-dependencies':
from .resolver.apt import AptRequirement
return AptRequirement(problem.relations)
else:

File diff suppressed because it is too large Load diff

View file

@ -15,16 +15,25 @@
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
from functools import partial
from .fix_build import iterate_with_build_fixers
from .buildsystem import NoBuildToolsFound
from .logs import NoLogManager
def run_clean(session, buildsystems, resolver, fixers):
def run_clean(session, buildsystems, resolver, fixers, log_manager=None):
# Some things want to write to the user's home directory,
# e.g. pip caches in ~/.cache
session.create_home()
if log_manager is None:
log_manager = NoLogManager()
for buildsystem in buildsystems:
buildsystem.clean(session, resolver, fixers)
iterate_with_build_fixers(
fixers, log_manager.wrap(
partial(buildsystem.clean, session, resolver)))
return
raise NoBuildToolsFound()

View file

@ -29,7 +29,8 @@ def satisfy_build_deps(session: Session, tree, debian_path):
deps.append(source[name].strip().strip(","))
except KeyError:
pass
for name in ["Build-Conflicts", "Build-Conflicts-Indep", "Build-Conflicts-Arch"]:
for name in ["Build-Conflicts", "Build-Conflicts-Indep",
"Build-Conflicts-Arch"]:
try:
deps.append("Conflicts: " + source[name])
except KeyError:

View file

@ -16,8 +16,9 @@
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
from debian.changelog import Version
import logging
from typing import List, Optional
from typing import List, Optional, Iterable
import os
from buildlog_consultant.apt import (
@ -37,7 +38,12 @@ from .file_search import (
def run_apt(
session: Session, args: List[str], prefix: Optional[List[str]] = None
) -> None:
"""Run apt."""
"""Run apt.
Raises:
DetailedFailure: When a known error occurs
UnidentifiedError: If an unknown error occurs
"""
if prefix is None:
prefix = []
args = prefix = ["apt", "-y"] + args
@ -48,7 +54,7 @@ def run_apt(
match, error = find_apt_get_failure(lines)
if error is not None:
raise DetailedFailure(retcode, args, error)
while lines and lines[-1] == "":
while lines and lines[-1].rstrip('\n') == "":
lines.pop(-1)
raise UnidentifiedError(retcode, args, lines, secondary=match)
@ -93,13 +99,18 @@ class AptManager(object):
def package_exists(self, package):
return package in self.apt_cache
def package_versions(self, package):
return list(self.apt_cache[package].versions)
def package_versions(self, package: str) -> Optional[Iterable[Version]]:
try:
return list(self.apt_cache[package].versions)
except KeyError:
return None
def get_packages_for_paths(self, paths, regex=False, case_insensitive=False):
async def get_packages_for_paths(
self, paths, regex: bool = False, case_insensitive: bool = False):
logging.debug("Searching for packages containing %r", paths)
return get_packages_for_paths(
paths, self.searchers(), regex=regex, case_insensitive=case_insensitive
return await get_packages_for_paths(
paths, self.searchers(), regex=regex,
case_insensitive=case_insensitive
)
def missing(self, packages):

View file

@ -17,6 +17,7 @@
__all__ = [
"get_build_architecture",
"version_add_suffix",
"add_dummy_changelog_entry",
"build",
"DetailedDebianBuildFailure",
@ -24,20 +25,22 @@ __all__ = [
]
from datetime import datetime
from debmutate.changelog import ChangelogEditor
import logging
import os
import re
import shlex
import subprocess
import sys
from typing import Optional, List, Tuple
from debian.changelog import Changelog
from debmutate.changelog import get_maintainer
from debian.changelog import Changelog, Version, ChangeBlock
from debmutate.changelog import get_maintainer, ChangelogEditor
from debmutate.reformatting import GeneratedFile
from breezy.mutabletree import MutableTree
from breezy.plugins.debian.builder import BuildFailedError
from breezy.tree import Tree
from breezy.workingtree import WorkingTree
from buildlog_consultant.sbuild import (
worker_failure_from_sbuild_log,
@ -45,10 +48,18 @@ from buildlog_consultant.sbuild import (
from .. import DetailedFailure as DetailedFailure, UnidentifiedError
BUILD_LOG_FILENAME = 'build.log'
DEFAULT_BUILDER = "sbuild --no-clean-source"
class ChangelogNotEditable(Exception):
"""Changelog can not be edited."""
def __init__(self, path):
self.path = path
class DetailedDebianBuildFailure(DetailedFailure):
def __init__(self, stage, phase, retcode, argv, error, description):
@ -60,7 +71,8 @@ class DetailedDebianBuildFailure(DetailedFailure):
class UnidentifiedDebianBuildError(UnidentifiedError):
def __init__(self, stage, phase, retcode, argv, lines, description, secondary=None):
def __init__(self, stage, phase, retcode, argv, lines, description,
secondary=None):
super(UnidentifiedDebianBuildError, self).__init__(
retcode, argv, lines, secondary)
self.stage = stage
@ -75,11 +87,12 @@ class MissingChangesFile(Exception):
self.filename = filename
def find_changes_files(path, package, version):
non_epoch_version = version.upstream_version
def find_changes_files(path: str, package: str, version: Version):
non_epoch_version = version.upstream_version or ''
if version.debian_version is not None:
non_epoch_version += "-%s" % version.debian_version
c = re.compile('%s_%s_(.*).changes' % (re.escape(package), re.escape(non_epoch_version)))
c = re.compile('%s_%s_(.*).changes' % (
re.escape(package), re.escape(non_epoch_version)))
for entry in os.scandir(path):
m = c.match(entry.name)
if m:
@ -109,15 +122,32 @@ def control_files_in_root(tree: Tree, subpath: str) -> bool:
return False
def version_add_suffix(version: Version, suffix: str) -> Version:
version = Version(str(version))
def add_suffix(v):
m = re.fullmatch("(.*)(" + re.escape(suffix) + ")([0-9]+)", v)
if m:
return m.group(1) + m.group(2) + "%d" % (int(m.group(3)) + 1)
else:
return v + suffix + "1"
if version.debian_revision:
version.debian_revision = add_suffix(version.debian_revision)
else:
version.upstream_version = add_suffix(version.upstream_version)
return version
def add_dummy_changelog_entry(
tree: MutableTree,
subpath: str,
suffix: str,
suite: str,
message: str,
timestamp=None,
maintainer=None,
):
timestamp: Optional[datetime] = None,
maintainer: Tuple[Optional[str], Optional[str]] = None,
allow_reformatting: bool = True,
) -> Version:
"""Add a dummy changelog entry to a package.
Args:
@ -125,18 +155,10 @@ def add_dummy_changelog_entry(
suffix: Suffix for the version
suite: Debian suite
message: Changelog message
Returns:
version of the newly added entry
"""
def add_suffix(v, suffix):
m = re.fullmatch(
"(.*)(" + re.escape(suffix) + ")([0-9]+)",
v,
)
if m:
return m.group(1) + m.group(2) + "%d" % (int(m.group(3)) + 1)
else:
return v + suffix + "1"
if control_files_in_root(tree, subpath):
path = os.path.join(subpath, "changelog")
else:
@ -145,38 +167,38 @@ def add_dummy_changelog_entry(
maintainer = get_maintainer()
if timestamp is None:
timestamp = datetime.now()
with ChangelogEditor(tree.abspath(os.path.join(path))) as editor:
version = editor[0].version
if version.debian_revision:
version.debian_revision = add_suffix(version.debian_revision, suffix)
else:
version.upstream_version = add_suffix(version.upstream_version, suffix)
editor.auto_version(version, timestamp=timestamp)
editor.add_entry(
summary=[message], maintainer=maintainer, timestamp=timestamp, urgency='low')
editor[0].distributions = suite
try:
with ChangelogEditor(
tree.abspath(path), # type: ignore
allow_reformatting=allow_reformatting) as editor:
version = version_add_suffix(editor[0].version, suffix)
editor.auto_version(version, timestamp=timestamp)
editor.add_entry(
summary=[message], maintainer=maintainer, timestamp=timestamp,
urgency='low')
editor[0].distributions = suite
return version
except GeneratedFile as e:
raise ChangelogNotEditable(path) from e
def get_latest_changelog_entry(local_tree, subpath=""):
def get_latest_changelog_entry(
local_tree: WorkingTree, subpath: str = "") -> ChangeBlock:
if control_files_in_root(local_tree, subpath):
path = os.path.join(subpath, "changelog")
else:
path = os.path.join(subpath, "debian", "changelog")
with local_tree.get_file(path) as f:
cl = Changelog(f, max_blocks=1)
return cl.package, cl.version
return cl[0]
def build(
local_tree,
outf,
build_command=DEFAULT_BUILDER,
result_dir=None,
distribution=None,
subpath="",
source_date_epoch=None,
extra_repositories=None,
):
def _builddeb_command(
build_command: str = DEFAULT_BUILDER,
result_dir: Optional[str] = None,
apt_repository: Optional[str] = None,
apt_repository_key: Optional[str] = None,
extra_repositories: Optional[List[str]] = None):
for repo in extra_repositories or []:
build_command += " --extra-repository=" + shlex.quote(repo)
args = [
@ -187,8 +209,34 @@ def build(
"--guess-upstream-branch-url",
"--builder=%s" % build_command,
]
if apt_repository:
args.append("--apt-repository=%s" % apt_repository)
if apt_repository_key:
args.append("--apt-repository-key=%s" % apt_repository_key)
if result_dir:
args.append("--result-dir=%s" % result_dir)
return args
def build(
local_tree: WorkingTree,
outf,
build_command: str = DEFAULT_BUILDER,
result_dir: Optional[str] = None,
distribution: Optional[str] = None,
subpath: str = "",
source_date_epoch: Optional[int] = None,
apt_repository: Optional[str] = None,
apt_repository_key: Optional[str] = None,
extra_repositories: Optional[List[str]] = None,
):
args = _builddeb_command(
build_command=build_command,
result_dir=result_dir,
apt_repository=apt_repository,
apt_repository_key=apt_repository_key,
extra_repositories=extra_repositories)
outf.write("Running %r\n" % (build_command,))
outf.flush()
env = dict(os.environ.items())
@ -199,22 +247,25 @@ def build(
logging.info("Building debian packages, running %r.", build_command)
try:
subprocess.check_call(
args, cwd=local_tree.abspath(subpath), stdout=outf, stderr=outf, env=env
args, cwd=local_tree.abspath(subpath), stdout=outf, stderr=outf,
env=env
)
except subprocess.CalledProcessError:
raise BuildFailedError()
def build_once(
local_tree,
build_suite,
output_directory,
build_command,
subpath="",
source_date_epoch=None,
extra_repositories=None
local_tree: WorkingTree,
build_suite: str,
output_directory: str,
build_command: str,
subpath: str = "",
source_date_epoch: Optional[int] = None,
apt_repository: Optional[str] = None,
apt_repository_key: Optional[str] = None,
extra_repositories: Optional[List[str]] = None
):
build_log_path = os.path.join(output_directory, "build.log")
build_log_path = os.path.join(output_directory, BUILD_LOG_FILENAME)
logging.debug("Writing build log to %s", build_log_path)
try:
with open(build_log_path, "w") as f:
@ -226,6 +277,8 @@ def build_once(
distribution=build_suite,
subpath=subpath,
source_date_epoch=source_date_epoch,
apt_repository=apt_repository,
apt_repository_key=apt_repository_key,
extra_repositories=extra_repositories,
)
except BuildFailedError as e:
@ -247,27 +300,39 @@ def build_once(
[], sbuild_failure.description)
cl_entry = get_latest_changelog_entry(local_tree, subpath)
if cl_entry.package is None:
raise Exception('missing package in changelog entry')
changes_names = []
for kind, entry in find_changes_files(output_directory, cl_entry.package, cl_entry.version):
for kind, entry in find_changes_files(
output_directory, cl_entry.package, cl_entry.version):
changes_names.append((entry.name))
return (changes_names, cl_entry)
class GitBuildpackageMissing(Exception):
"""git-buildpackage is not installed"""
def gbp_dch(path):
subprocess.check_call(["gbp", "dch", "--ignore-branch"], cwd=path)
try:
subprocess.check_call(["gbp", "dch", "--ignore-branch"], cwd=path)
except FileNotFoundError:
raise GitBuildpackageMissing()
def attempt_build(
local_tree,
suffix,
build_suite,
output_directory,
build_command,
build_changelog_entry=None,
subpath="",
source_date_epoch=None,
run_gbp_dch=False,
extra_repositories=None
local_tree: WorkingTree,
suffix: str,
build_suite: str,
output_directory: str,
build_command: str,
build_changelog_entry: Optional[str] = None,
subpath: str = "",
source_date_epoch: Optional[int] = None,
run_gbp_dch: bool = False,
apt_repository: Optional[str] = None,
apt_repository_key: Optional[str] = None,
extra_repositories: Optional[List[str]] = None
):
"""Attempt a build, with a custom distribution set.
@ -282,7 +347,7 @@ def attempt_build(
source_date_epoch: Source date epoch to set
Returns: Tuple with (changes_name, cl_version)
"""
if run_gbp_dch and not subpath:
if run_gbp_dch and not subpath and hasattr(local_tree.controldir, '_git'):
gbp_dch(local_tree.abspath(subpath))
if build_changelog_entry is not None:
add_dummy_changelog_entry(
@ -295,5 +360,7 @@ def attempt_build(
build_command,
subpath,
source_date_epoch=source_date_epoch,
apt_repository=apt_repository,
apt_repository_key=apt_repository_key,
extra_repositories=extra_repositories,
)

View file

@ -18,43 +18,46 @@
"""Tie breaking by build deps."""
from debian.deb822 import PkgRelation
import logging
from breezy.plugins.debian.apt_repo import LocalApt, NoAptSources
class BuildDependencyTieBreaker(object):
def __init__(self, rootdir):
self.rootdir = rootdir
def __init__(self, apt):
self.apt = apt
self._counts = None
def __repr__(self):
return "%s(%r)" % (type(self).__name__, self.rootdir)
return "%s(%r)" % (type(self).__name__, self.apt)
@classmethod
def from_session(cls, session):
return cls(session.location)
return cls(LocalApt(session.location))
def _count(self):
counts = {}
import apt_pkg
apt_pkg.init()
apt_pkg.config.set("Dir", self.rootdir)
apt_cache = apt_pkg.SourceRecords()
apt_cache.restart()
while apt_cache.step():
try:
for d in apt_cache.build_depends.values():
for o in d:
for p in o:
counts.setdefault(p[0], 0)
counts[p[0]] += 1
except AttributeError:
pass
with self.apt:
for source in self.apt.iter_sources():
for field in ['Build-Depends', 'Build-Depends-Indep',
'Build-Depends-Arch']:
for r in PkgRelation.parse_relations(
source.get(field, '')):
for p in r:
counts.setdefault(p['name'], 0)
counts[p['name']] += 1
return counts
def __call__(self, reqs):
if self._counts is None:
self._counts = self._count()
try:
self._counts = self._count()
except NoAptSources:
logging.warning(
"No 'deb-src' in sources.list, "
"unable to break build-depends")
return None
by_count = {}
for req in reqs:
try:
@ -80,5 +83,5 @@ if __name__ == "__main__":
parser.add_argument("req", nargs="+")
args = parser.parse_args()
reqs = [AptRequirement.from_str(req) for req in args.req]
tie_breaker = BuildDependencyTieBreaker("/")
tie_breaker = BuildDependencyTieBreaker(LocalApt())
print(tie_breaker(reqs))

View file

@ -17,12 +17,13 @@
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
import apt_pkg
import asyncio
from datetime import datetime
from debian.deb822 import Release
import os
import re
import subprocess
from typing import Iterator, List
from typing import List, AsyncIterator
import logging
@ -32,11 +33,15 @@ from ..session import Session
class FileSearcher(object):
def search_files(
self, path: str, regex: bool = False, case_insensitive: bool = False
) -> Iterator[str]:
self, path: str, regex: bool = False,
case_insensitive: bool = False) -> AsyncIterator[str]:
raise NotImplementedError(self.search_files)
class AptFileAccessError(Exception):
"""Apt file access error."""
class ContentsFileNotFound(Exception):
"""The contents file was not found."""
@ -71,7 +76,8 @@ def contents_urls_from_sources_entry(source, arches, load_url):
response = load_url(release_url)
except FileNotFoundError as e:
logging.warning(
"Unable to download %s or %s: %s", inrelease_url, release_url, e
"Unable to download %s or %s: %s", inrelease_url,
release_url, e
)
return
@ -118,7 +124,7 @@ def _unwrap(f, ext):
def load_direct_url(url):
from urllib.error import HTTPError
from urllib.error import HTTPError, URLError
from urllib.request import urlopen, Request
for ext in [".xz", ".gz", ""]:
@ -128,7 +134,11 @@ def load_direct_url(url):
except HTTPError as e:
if e.status == 404:
continue
raise
raise AptFileAccessError(
'Unable to access apt URL %s: %s' % (url + ext, e))
except URLError as e:
raise AptFileAccessError(
'Unable to access apt URL %s: %s' % (url + ext, e))
break
else:
raise FileNotFoundError(url)
@ -187,7 +197,7 @@ class AptFileFileSearcher(FileSearcher):
@classmethod
def from_session(cls, session):
logging.info('Using apt-file to search apt contents')
logging.debug('Using apt-file to search apt contents')
if not os.path.exists(session.external_path(cls.CACHE_IS_EMPTY_PATH)):
from .apt import AptManager
AptManager.from_session(session).install(['apt-file'])
@ -195,7 +205,7 @@ class AptFileFileSearcher(FileSearcher):
session.check_call(['apt-file', 'update'], user='root')
return cls(session)
def search_files(self, path, regex=False, case_insensitive=False):
async def search_files(self, path, regex=False, case_insensitive=False):
args = []
if regex:
args.append('-x')
@ -204,15 +214,17 @@ class AptFileFileSearcher(FileSearcher):
if case_insensitive:
args.append('-i')
args.append(path)
try:
output = self.session.check_output(['/usr/bin/apt-file', 'search'] + args)
except subprocess.CalledProcessError as e:
if e.returncode == 1:
# No results
return
if e.returncode == 3:
raise Exception('apt-file cache is empty')
raise
process = await asyncio.create_subprocess_exec(
'/usr/bin/apt-file', 'search', *args,
stdout=asyncio.subprocess.PIPE)
(output, error) = await process.communicate(input=None)
if process.returncode == 1:
# No results
return
elif process.returncode == 3:
raise Exception('apt-file cache is empty')
elif process.returncode != 0:
raise Exception("unexpected return code %d" % process.returncode)
for line in output.splitlines(False):
pkg, path = line.split(b': ')
@ -253,7 +265,8 @@ class RemoteContentsFileSearcher(FileSearcher):
return load_url_with_cache(url, cache_dirs)
urls = list(
contents_urls_from_sourceslist(sl, get_build_architecture(), load_url)
contents_urls_from_sourceslist(
sl, get_build_architecture(), load_url)
)
self._load_urls(urls, cache_dirs, load_url)
@ -277,8 +290,8 @@ class RemoteContentsFileSearcher(FileSearcher):
return load_url_with_cache(url, cache_dirs)
urls = list(
contents_urls_from_sourceslist(sl, get_build_architecture(), load_url)
)
contents_urls_from_sourceslist(
sl, get_build_architecture(), load_url))
self._load_urls(urls, cache_dirs, load_url)
def _load_urls(self, urls, cache_dirs, load_url):
@ -286,13 +299,16 @@ class RemoteContentsFileSearcher(FileSearcher):
try:
f = load_url(url)
self.load_file(f, url)
except ConnectionResetError:
logging.warning("Connection reset error retrieving %s", url)
# TODO(jelmer): Retry?
except ContentsFileNotFound:
logging.warning("Unable to fetch contents file %s", url)
def __setitem__(self, path, package):
self._db[path] = package
def search_files(self, path, regex=False, case_insensitive=False):
async def search_files(self, path, regex=False, case_insensitive=False):
path = path.lstrip("/").encode("utf-8", "surrogateescape")
if case_insensitive and not regex:
regex = True
@ -338,9 +354,9 @@ class GeneratedFileSearcher(FileSearcher):
(path, pkg) = line.strip().split(None, 1)
self._db.append(path, pkg)
def search_files(
self, path: str, regex: bool = False, case_insensitive: bool = False
) -> Iterator[str]:
async def search_files(
self, path: str, regex: bool = False,
case_insensitive: bool = False):
for p, pkg in self._db:
if regex:
flags = 0
@ -371,16 +387,17 @@ GENERATED_FILE_SEARCHER = GeneratedFileSearcher(
)
def get_packages_for_paths(
async def get_packages_for_paths(
paths: List[str],
searchers: List[FileSearcher],
regex: bool = False,
case_insensitive: bool = False,
) -> List[str]:
candidates: List[str] = list()
# TODO(jelmer): Combine these, perhaps by creating one gigantic regex?
for path in paths:
for searcher in searchers:
for pkg in searcher.search_files(
async for pkg in searcher.search_files(
path, regex=regex, case_insensitive=case_insensitive
):
if pkg not in candidates:
@ -393,8 +410,10 @@ def main(argv):
from ..session.plain import PlainSession
parser = argparse.ArgumentParser()
parser.add_argument("path", help="Path to search for.", type=str, nargs="*")
parser.add_argument("--regex", "-x", help="Search for regex.", action="store_true")
parser.add_argument(
"path", help="Path to search for.", type=str, nargs="*")
parser.add_argument(
"--regex", "-x", help="Search for regex.", action="store_true")
parser.add_argument("--debug", action="store_true")
args = parser.parse_args()
@ -403,13 +422,14 @@ def main(argv):
else:
logging.basicConfig(level=logging.INFO)
main_searcher = get_apt_contents_file_searcher(PlainSession())
main_searcher.load_local()
searchers = [main_searcher, GENERATED_FILE_SEARCHER]
with PlainSession() as session:
main_searcher = get_apt_contents_file_searcher(session)
searchers = [main_searcher, GENERATED_FILE_SEARCHER]
packages = get_packages_for_paths(args.path, searchers=searchers, regex=args.regex)
for package in packages:
print(package)
packages = asyncio.run(get_packages_for_paths(
args.path, searchers=searchers, regex=args.regex))
for package in packages:
print(package)
if __name__ == "__main__":

View file

@ -22,10 +22,10 @@ __all__ = [
from functools import partial
import logging
import os
import re
import shutil
import sys
from typing import List, Set, Optional, Type
import time
from typing import List, Set, Optional, Type, Tuple
from debian.deb822 import (
Deb822,
@ -34,6 +34,8 @@ from debian.deb822 import (
from breezy.commit import PointlessCommit, NullCommitReporter
from breezy.tree import Tree
from breezy.workingtree import WorkingTree
from debmutate.changelog import ChangelogEditor
from debmutate.control import (
ensure_relation,
@ -50,49 +52,7 @@ from debmutate.reformatting import (
GeneratedFile,
)
try:
from breezy.workspace import reset_tree
except ImportError: # breezy < 3.2
def delete_items(deletables, dry_run=False):
"""Delete files in the deletables iterable"""
import errno
import shutil
def onerror(function, path, excinfo):
"""Show warning for errors seen by rmtree."""
# Handle only permission error while removing files.
# Other errors are re-raised.
if function is not os.remove or excinfo[1].errno != errno.EACCES:
raise
logging.warning("unable to remove %s" % path)
for path, subp in deletables:
if os.path.isdir(path):
shutil.rmtree(path, onerror=onerror)
else:
try:
os.unlink(path)
except OSError as e:
# We handle only permission error here
if e.errno != errno.EACCES:
raise e
logging.warning('unable to remove "%s": %s.', path, e.strerror)
def reset_tree(local_tree, subpath=""):
from breezy.transform import revert
from breezy.clean_tree import iter_deletables
revert(
local_tree,
local_tree.branch.basis_tree(),
[subpath] if subpath not in (".", "") else None,
)
deletables = list(
iter_deletables(local_tree, unknown=True, ignored=False, detritus=False)
)
delete_items(deletables)
from breezy.workspace import reset_tree
from debmutate._rules import (
dh_invoke_add_with,
@ -113,18 +73,21 @@ from buildlog_consultant.common import (
)
from buildlog_consultant.sbuild import (
DebcargoUnacceptablePredicate,
DebcargoUnacceptableComparator,
)
from .build import (
DetailedDebianBuildFailure,
UnidentifiedDebianBuildError,
)
from ..logs import rotate_logfile
from ..buildlog import problem_to_upstream_requirement
from ..fix_build import BuildFixer, resolve_error
from ..resolver.apt import (
AptRequirement,
)
from .build import attempt_build, DEFAULT_BUILDER
from .apt import AptManager
from .build import attempt_build, DEFAULT_BUILDER, BUILD_LOG_FILENAME
DEFAULT_MAX_ITERATIONS = 10
@ -150,7 +113,9 @@ class DebianPackagingContext(object):
def abspath(self, *parts):
return self.tree.abspath(os.path.join(self.subpath, *parts))
def commit(self, summary: str, update_changelog: Optional[bool] = None) -> bool:
def commit(
self, summary: str,
update_changelog: Optional[bool] = None) -> bool:
if update_changelog is None:
update_changelog = self.update_changelog
with self.tree.lock_write():
@ -214,6 +179,11 @@ def add_dependency(context, phase, requirement: AptRequirement):
return add_test_dependency(context, phase[1], requirement)
elif phase[0] == "build":
return add_build_dependency(context, requirement)
elif phase[0] == "buildenv":
# TODO(jelmer): Actually, we probably just want to install it on the
# host system?
logging.warning("Unknown phase %r", phase)
return False
else:
logging.warning("Unknown phase %r", phase)
return False
@ -231,16 +201,19 @@ def add_build_dependency(context, requirement: AptRequirement):
raise CircularDependency(binary["Package"])
for rel in requirement.relations:
updater.source["Build-Depends"] = ensure_relation(
updater.source.get("Build-Depends", ""), PkgRelation.str([rel])
updater.source.get("Build-Depends", ""),
PkgRelation.str([rel])
)
except FormattingUnpreservable as e:
logging.info("Unable to edit %s in a way that preserves formatting.", e.path)
logging.info(
"Unable to edit %s in a way that preserves formatting.", e.path)
return False
desc = requirement.pkg_relation_str()
if not updater.changed:
logging.info("Giving up; dependency %s was already present.", desc)
logging.info(
"Giving up; build dependency %s was already present.", desc)
return False
logging.info("Adding build dependency: %s", desc)
@ -272,13 +245,18 @@ def add_test_dependency(context, testname, requirement):
control.get("Depends", ""), PkgRelation.str([rel])
)
except FormattingUnpreservable as e:
logging.info("Unable to edit %s in a way that preserves formatting.", e.path)
return False
if not updater.changed:
logging.info(
"Unable to edit %s in a way that preserves formatting.", e.path)
return False
desc = requirement.pkg_relation_str()
if not updater.changed:
logging.info(
"Giving up; dependency %s for test %s was already present.",
desc, testname)
return False
logging.info("Adding dependency to test %s: %s", testname, desc)
return context.commit(
"Add missing dependency for test %s on %s." % (testname, desc),
@ -288,7 +266,8 @@ def add_test_dependency(context, testname, requirement):
def targeted_python_versions(tree: Tree, subpath: str) -> List[str]:
with tree.get_file(os.path.join(subpath, "debian/control")) as f:
control = Deb822(f)
build_depends = PkgRelation.parse_relations(control.get("Build-Depends", ""))
build_depends = PkgRelation.parse_relations(
control.get("Build-Depends", ""))
all_build_deps: Set[str] = set()
for or_deps in build_depends:
all_build_deps.update(or_dep["name"] for or_dep in or_deps)
@ -312,7 +291,7 @@ def python_tie_breaker(tree, subpath, reqs):
return True
if pkg.startswith("lib%s-" % python_version):
return True
if re.match(r'lib%s\.[0-9]-dev' % python_version, pkg):
if pkg == r'lib%s-dev' % python_version:
return True
return False
@ -337,7 +316,8 @@ def retry_apt_failure(error, phase, apt, context):
def enable_dh_autoreconf(context, phase):
# Debhelper >= 10 depends on dh-autoreconf and enables autoreconf by
# default.
debhelper_compat_version = get_debhelper_compat_level(context.tree.abspath("."))
debhelper_compat_version = get_debhelper_compat_level(
context.tree.abspath("."))
if debhelper_compat_version is not None and debhelper_compat_version < 10:
def add_with_autoreconf(line, target):
@ -356,9 +336,8 @@ def enable_dh_autoreconf(context, phase):
def fix_missing_configure(error, phase, context):
if not context.tree.has_filename("configure.ac") and not context.tree.has_filename(
"configure.in"
):
if (not context.tree.has_filename("configure.ac")
and not context.tree.has_filename("configure.in")):
return False
return enable_dh_autoreconf(context, phase)
@ -433,7 +412,7 @@ def fix_missing_makefile_pl(error, phase, context):
return False
def coerce_unacceptable_predicate(error, phase, context):
def debcargo_coerce_unacceptable_prerelease(error, phase, context):
from debmutate.debcargo import DebcargoEditor
with DebcargoEditor(context.abspath('debian/debcargo.toml')) as editor:
editor['allow_prerelease_deps'] = True
@ -461,7 +440,8 @@ class SimpleBuildFixer(BuildFixer):
class DependencyBuildFixer(BuildFixer):
def __init__(self, packaging_context, apt_resolver, problem_cls: Type[Problem], fn):
def __init__(self, packaging_context, apt_resolver,
problem_cls: Type[Problem], fn):
self.context = packaging_context
self.apt_resolver = apt_resolver
self._problem_cls = problem_cls
@ -481,32 +461,47 @@ class DependencyBuildFixer(BuildFixer):
return self._fn(problem, phase, self.apt_resolver, self.context)
def versioned_package_fixers(session, packaging_context, apt):
def versioned_package_fixers(session, packaging_context, apt: AptManager):
return [
PgBuildExtOutOfDateControlFixer(packaging_context, session, apt),
SimpleBuildFixer(packaging_context, MissingConfigure, fix_missing_configure),
SimpleBuildFixer(
packaging_context, MissingConfigure, fix_missing_configure),
SimpleBuildFixer(
packaging_context, MissingAutomakeInput, fix_missing_automake_input
),
SimpleBuildFixer(
packaging_context, MissingConfigStatusInput, fix_missing_config_status_input
packaging_context, MissingConfigStatusInput,
fix_missing_config_status_input
),
SimpleBuildFixer(packaging_context, MissingPerlFile, fix_missing_makefile_pl),
SimpleBuildFixer(packaging_context, DebcargoUnacceptablePredicate, coerce_unacceptable_predicate),
SimpleBuildFixer(
packaging_context, MissingPerlFile, fix_missing_makefile_pl),
SimpleBuildFixer(
packaging_context, DebcargoUnacceptablePredicate,
debcargo_coerce_unacceptable_prerelease),
SimpleBuildFixer(
packaging_context, DebcargoUnacceptableComparator,
debcargo_coerce_unacceptable_prerelease),
]
def apt_fixers(apt, packaging_context) -> List[BuildFixer]:
def apt_fixers(apt: AptManager, packaging_context,
dep_server_url: Optional[str] = None) -> List[BuildFixer]:
from ..resolver.apt import AptResolver
from .udd import popcon_tie_breaker
from .build_deps import BuildDependencyTieBreaker
apt_tie_breakers = [
partial(python_tie_breaker, packaging_context.tree, packaging_context.subpath),
partial(python_tie_breaker, packaging_context.tree,
packaging_context.subpath),
BuildDependencyTieBreaker.from_session(apt.session),
popcon_tie_breaker,
]
resolver = AptResolver(apt, apt_tie_breakers)
resolver: AptResolver
if dep_server_url:
from ..resolver.dep_server import DepServerAptResolver
resolver = DepServerAptResolver(apt, dep_server_url, apt_tie_breakers)
else:
resolver = AptResolver(apt, apt_tie_breakers)
return [
DependencyBuildFixer(
packaging_context, apt, AptFetchFailure, retry_apt_failure
@ -515,38 +510,49 @@ def apt_fixers(apt, packaging_context) -> List[BuildFixer]:
]
def default_fixers(local_tree, subpath, apt, committer=None, update_changelog=None):
def default_fixers(
local_tree: WorkingTree,
subpath: str, apt: AptManager,
committer: Optional[str] = None,
update_changelog: Optional[bool] = None,
dep_server_url: Optional[str] = None):
packaging_context = DebianPackagingContext(
local_tree, subpath, committer, update_changelog,
commit_reporter=NullCommitReporter()
)
return versioned_package_fixers(apt.session, packaging_context, apt) + apt_fixers(
apt, packaging_context
)
return (versioned_package_fixers(apt.session, packaging_context, apt)
+ apt_fixers(apt, packaging_context, dep_server_url))
def build_incrementally(
local_tree,
apt,
suffix,
build_suite,
output_directory,
build_command,
local_tree: WorkingTree,
apt: AptManager,
suffix: str,
build_suite: str,
output_directory: str,
build_command: str,
build_changelog_entry,
committer=None,
max_iterations=DEFAULT_MAX_ITERATIONS,
subpath="",
committer: Optional[str] = None,
max_iterations: int = DEFAULT_MAX_ITERATIONS,
subpath: str = "",
source_date_epoch=None,
update_changelog=True,
extra_repositories=None,
fixers=None
update_changelog: bool = True,
apt_repository: Optional[str] = None,
apt_repository_key: Optional[str] = None,
extra_repositories: Optional[List[str]] = None,
fixers: Optional[List[BuildFixer]] = None,
run_gbp_dch: Optional[bool] = None,
dep_server_url: Optional[str] = None,
):
fixed_errors = []
fixed_errors: List[Tuple[Problem, str]] = []
if fixers is None:
fixers = default_fixers(
local_tree, subpath, apt, committer=committer,
update_changelog=update_changelog)
update_changelog=update_changelog,
dep_server_url=dep_server_url)
logging.info("Using fixers: %r", fixers)
if run_gbp_dch is None:
run_gbp_dch = (update_changelog is False)
while True:
try:
return attempt_build(
@ -558,7 +564,9 @@ def build_incrementally(
build_changelog_entry,
subpath=subpath,
source_date_epoch=source_date_epoch,
run_gbp_dch=(update_changelog is False),
run_gbp_dch=run_gbp_dch,
apt_repository=apt_repository,
apt_repository_key=apt_repository_key,
extra_repositories=extra_repositories,
)
except UnidentifiedDebianBuildError:
@ -569,15 +577,19 @@ def build_incrementally(
logging.info("No relevant context, not making any changes.")
raise
if (e.error, e.phase) in fixed_errors:
logging.warning("Error was still not fixed on second try. Giving up.")
logging.warning(
"Error was still not fixed on second try. Giving up.")
raise
if max_iterations is not None and len(fixed_errors) > max_iterations:
logging.warning("Last fix did not address the issue. Giving up.")
if (max_iterations is not None
and len(fixed_errors) > max_iterations):
logging.warning(
"Last fix did not address the issue. Giving up.")
raise
reset_tree(local_tree, subpath=subpath)
try:
if not resolve_error(e.error, e.phase, fixers):
logging.warning("Failed to resolve error %r. Giving up.", e.error)
logging.warning(
"Failed to resolve error %r. Giving up.", e.error)
raise
except GeneratedFile:
logging.warning(
@ -588,71 +600,71 @@ def build_incrementally(
raise e
except CircularDependency:
logging.warning(
"Unable to fix %r; it would introduce a circular " "dependency.",
"Unable to fix %r; it would introduce a circular "
"dependency.",
e.error,
)
raise e
fixed_errors.append((e.error, e.phase))
if os.path.exists(os.path.join(output_directory, "build.log")):
i = 1
while os.path.exists(
os.path.join(output_directory, "build.log.%d" % i)
):
i += 1
target_path = os.path.join(output_directory, "build.log.%d" % i)
os.rename(os.path.join(output_directory, "build.log"), target_path)
logging.debug("Storing build log at %s", target_path)
rotate_logfile(os.path.join(output_directory, BUILD_LOG_FILENAME))
def main(argv=None):
import argparse
parser = argparse.ArgumentParser("ognibuild.debian.fix_build")
parser.add_argument(
"--suffix", type=str, help="Suffix to use for test builds.", default="fixbuild1"
modifications = parser.add_argument_group('Modifications')
modifications.add_argument(
"--suffix", type=str, help="Suffix to use for test builds.",
default="fixbuild1"
)
parser.add_argument(
modifications.add_argument(
"--suite", type=str, help="Suite to target.", default="unstable"
)
parser.add_argument(
"--output-directory", type=str, help="Output directory.", default=None
modifications.add_argument(
"--committer", type=str, help="Committer string (name and email)",
default=None
)
parser.add_argument(
"--committer", type=str, help="Committer string (name and email)", default=None
)
parser.add_argument(
"--build-command",
type=str,
help="Build command",
default=(DEFAULT_BUILDER + " -A -s -v"),
)
parser.add_argument(
modifications.add_argument(
"--no-update-changelog",
action="store_false",
default=None,
dest="update_changelog",
help="do not update the changelog",
)
parser.add_argument(
'--max-iterations',
type=int,
default=DEFAULT_MAX_ITERATIONS,
help='Maximum number of issues to attempt to fix before giving up.')
parser.add_argument(
modifications.add_argument(
"--update-changelog",
action="store_true",
dest="update_changelog",
help="force updating of the changelog",
default=None,
)
parser.add_argument("--schroot", type=str, help="chroot to use.")
build_behaviour = parser.add_argument_group('Build Behaviour')
build_behaviour.add_argument(
"--output-directory", type=str, help="Output directory.", default=None
)
build_behaviour.add_argument(
"--build-command",
type=str,
help="Build command",
default=(DEFAULT_BUILDER + " -A -s -v"),
)
build_behaviour.add_argument(
'--max-iterations',
type=int,
default=DEFAULT_MAX_ITERATIONS,
help='Maximum number of issues to attempt to fix before giving up.')
build_behaviour.add_argument("--schroot", type=str, help="chroot to use.")
parser.add_argument(
"--dep-server-url", type=str,
help="ognibuild dep server to use",
default=os.environ.get('OGNIBUILD_DEPS'))
parser.add_argument("--verbose", action="store_true", help="Be verbose")
args = parser.parse_args()
from breezy.workingtree import WorkingTree
import breezy.git # noqa: F401
import breezy.bzr # noqa: F401
from .apt import AptManager
from ..session.plain import PlainSession
from ..session.schroot import SchrootSession
import tempfile
@ -669,6 +681,10 @@ def main(argv=None):
logging.info("Using output directory %s", output_directory)
else:
output_directory = args.output_directory
if not os.path.isdir(output_directory):
parser.error(
'output directory %s is not a directory'
% output_directory)
tree = WorkingTree.open(".")
if args.schroot:
@ -692,6 +708,7 @@ def main(argv=None):
committer=args.committer,
update_changelog=args.update_changelog,
max_iterations=args.max_iterations,
dep_server_url=args.dep_server_url,
)
except DetailedDebianBuildFailure as e:
if e.phase is None:
@ -701,6 +718,21 @@ def main(argv=None):
else:
phase = "%s (%s)" % (e.phase[0], e.phase[1])
logging.fatal("Error during %s: %s", phase, e.error)
if not args.output_directory:
xdg_cache_dir = os.environ.get(
'XDG_CACHE_HOME', os.path.expanduser('~/.cache'))
buildlogs_dir = os.path.join(
xdg_cache_dir, 'ognibuild', 'buildlogs')
os.makedirs(buildlogs_dir, exist_ok=True)
target_log_file = os.path.join(
buildlogs_dir,
'%s-%s.log' % (
os.path.basename(getattr(tree, 'basedir', 'build')),
time.strftime('%Y-%m-%d_%H%M%s')))
shutil.copy(
os.path.join(output_directory, 'build.log'),
target_log_file)
logging.info('Build log available in %s', target_log_file)
return 1
except UnidentifiedDebianBuildError as e:
if e.phase is None:

View file

@ -35,7 +35,8 @@ class UDD(object):
def get_most_popular(self, packages):
cursor = self._conn.cursor()
cursor.execute(
"SELECT package FROM popcon WHERE package IN %s ORDER BY insts DESC LIMIT 1",
"SELECT package FROM popcon "
"WHERE package IN %s ORDER BY insts DESC LIMIT 1",
(tuple(packages),),
)
return cursor.fetchone()[0]
@ -54,7 +55,8 @@ def popcon_tie_breaker(candidates):
names = {list(c.package_names())[0]: c for c in candidates}
winner = udd.get_most_popular(list(names.keys()))
if winner is None:
logging.warning("No relevant popcon information found, not ranking by popcon")
logging.warning(
"No relevant popcon information found, not ranking by popcon")
return None
logging.info("Picked winner using popcon")
return names[winner]

126
ognibuild/dep_server.py Normal file
View file

@ -0,0 +1,126 @@
#!/usr/bin/python
# Copyright (C) 2022 Jelmer Vernooij <jelmer@jelmer.uk>
# encoding: utf-8
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
import logging
import sys
from aiohttp import web
from aiohttp_openmetrics import setup_metrics
from . import Requirement, UnknownRequirementFamily
from .debian.apt import AptManager
from .resolver.apt import resolve_requirement_apt
SUPPORTED_RELEASES = ['unstable', 'sid']
routes = web.RouteTableDef()
@routes.get('/health', name='health')
async def handle_health(request):
return web.Response(text='ok')
@routes.get('/families', name='families')
async def handle_families(request):
return web.json_response(list(Requirement._JSON_DESERIALIZERS.keys()))
@routes.post('/resolve-apt', name='resolve-apt')
async def handle_apt(request):
js = await request.json()
try:
req_js = js['requirement']
except KeyError:
raise web.HTTPBadRequest(text="json missing 'requirement' key")
release = js.get('release')
if release and release not in SUPPORTED_RELEASES:
return web.json_response(
{"reason": "unsupported-release", "release": release},
status=404)
try:
req = Requirement.from_json(req_js)
except UnknownRequirementFamily as e:
return web.json_response(
{"reason": "family-unknown", "family": e.family}, status=404)
apt_reqs = await resolve_requirement_apt(request.app['apt_mgr'], req)
return web.json_response([r.pkg_relation_str() for r in apt_reqs])
@routes.get('/resolve-apt/{release}/{family}:{arg}', name='resolve-apt-simple')
async def handle_apt_simple(request):
if request.match_info['release'] not in SUPPORTED_RELEASES:
return web.json_response(
{"reason": "unsupported-release",
"release": request.match_info['release']},
status=404)
try:
req = Requirement.from_json(
(request.match_info['family'], request.match_info['arg']))
except UnknownRequirementFamily as e:
return web.json_response(
{"reason": "family-unknown", "family": e.family}, status=404)
apt_reqs = await resolve_requirement_apt(request.app['apt_mgr'], req)
return web.json_response([r.pkg_relation_str() for r in apt_reqs])
def main():
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('--listen-address', type=str, help='Listen address')
parser.add_argument('--schroot', type=str, help='Schroot session to use')
parser.add_argument('--port', type=str, help='Listen port', default=9934)
parser.add_argument('--debug', action='store_true')
parser.add_argument(
"--gcp-logging", action='store_true', help='Use Google cloud logging.')
args = parser.parse_args()
if args.gcp_logging:
import google.cloud.logging
client = google.cloud.logging.Client()
client.get_default_handler()
client.setup_logging()
else:
if args.debug:
log_level = logging.DEBUG
else:
log_level = logging.INFO
logging.basicConfig(
level=log_level,
format="[%(asctime)s] %(message)s",
datefmt="%Y-%m-%d %H:%M:%S")
if args.schroot:
from .session.schroot import SchrootSession
session = SchrootSession(args.schroot)
else:
from .session.plain import PlainSession
session = PlainSession()
with session:
app = web.Application()
app.router.add_routes(routes)
app['apt_mgr'] = AptManager.from_session(session)
setup_metrics(app)
web.run_app(app, host=args.listen_address, port=args.port)
if __name__ == '__main__':
sys.exit(main())

View file

@ -18,18 +18,19 @@
__all__ = [
"UnidentifiedError",
"DetailedFailure",
"create_dist",
"run_dist",
"create_dist_schroot",
"create_dist",
"dist",
]
import errno
from functools import partial
import logging
import os
import sys
from typing import Optional, List
from debian.deb822 import Deb822
from breezy.tree import Tree
from breezy.workingtree import WorkingTree
@ -37,16 +38,78 @@ from buildlog_consultant.common import (
NoSpaceOnDevice,
)
from debian.deb822 import Deb822
from . import DetailedFailure, UnidentifiedError
from .dist_catcher import DistNoTarball
from .fix_build import iterate_with_build_fixers
from .logs import LogManager, NoLogManager
from .buildsystem import NoBuildToolsFound
from .resolver import auto_resolver
from .session import Session
from .session.schroot import SchrootSession
def run_dist(session, buildsystems, resolver, fixers, target_directory, quiet=False):
DIST_LOG_FILENAME = 'dist.log'
def run_dist(session, buildsystems, resolver, fixers, target_directory,
quiet=False, log_manager=None):
# Some things want to write to the user's home directory,
# e.g. pip caches in ~/.cache
session.create_home()
logging.info('Using dependency resolver: %s', resolver)
if log_manager is None:
log_manager = NoLogManager()
for buildsystem in buildsystems:
filename = iterate_with_build_fixers(fixers, log_manager.wrap(
partial(buildsystem.dist, session, resolver, target_directory,
quiet=quiet)))
return filename
raise NoBuildToolsFound()
def dist(session, export_directory, reldir, target_dir, log_manager, *,
version: Optional[str] = None, quiet=False):
from .fix_build import BuildFixer
from .buildsystem import detect_buildsystems
from .buildlog import InstallFixer
from .fixers import (
GitIdentityFixer,
MissingGoSumEntryFixer,
SecretGpgKeyFixer,
UnexpandedAutoconfMacroFixer,
GnulibDirectoryFixer,
)
if version:
# TODO(jelmer): Shouldn't include backend-specific code here
os.environ['SETUPTOOLS_SCM_PRETEND_VERSION'] = version
# TODO(jelmer): use scan_buildsystems to also look in subdirectories
buildsystems = list(detect_buildsystems(export_directory))
resolver = auto_resolver(session)
fixers: List[BuildFixer] = [
UnexpandedAutoconfMacroFixer(session, resolver),
GnulibDirectoryFixer(session),
MissingGoSumEntryFixer(session)]
fixers.append(InstallFixer(resolver))
if session.is_temporary:
# Only muck about with temporary sessions
fixers.extend([
GitIdentityFixer(session),
SecretGpgKeyFixer(session),
])
session.chdir(reldir)
# Some things want to write to the user's home directory,
# e.g. pip caches in ~/.cache
session.create_home()
@ -54,31 +117,34 @@ def run_dist(session, buildsystems, resolver, fixers, target_directory, quiet=Fa
logging.info('Using dependency resolver: %s', resolver)
for buildsystem in buildsystems:
filename = buildsystem.dist(
session, resolver, fixers, target_directory, quiet=quiet
)
filename = iterate_with_build_fixers(fixers, log_manager.wrap(
partial(
buildsystem.dist, session, resolver, target_dir,
quiet=quiet)))
return filename
raise NoBuildToolsFound()
# This is the function used by debianize()
def create_dist(
session: Session,
tree: Tree,
target_dir: str,
include_controldir: bool = True,
subdir: Optional[str] = None,
cleanup: bool = False,
log_manager: Optional[LogManager] = None,
version: Optional[str] = None,
) -> Optional[str]:
from .buildsystem import detect_buildsystems
from .buildlog import InstallFixer
from .fix_build import BuildFixer
from .fixers import (
GitIdentityFixer,
SecretGpgKeyFixer,
UnexpandedAutoconfMacroFixer,
)
"""Create a dist tarball for a tree.
Args:
session: session to run it
tree: Tree object to work in
target_dir: Directory to write tarball into
include_controldir: Whether to include the version control directory
subdir: subdirectory in the tree to operate in
"""
if subdir is None:
subdir = "package"
try:
@ -90,19 +156,11 @@ def create_dist(
raise DetailedFailure(1, ["mkdtemp"], NoSpaceOnDevice())
raise
# TODO(jelmer): use scan_buildsystems to also look in subdirectories
buildsystems = list(detect_buildsystems(export_directory))
resolver = auto_resolver(session)
fixers: List[BuildFixer] = [UnexpandedAutoconfMacroFixer(session, resolver)]
if log_manager is None:
log_manager = NoLogManager()
fixers.append(InstallFixer(resolver))
if session.is_temporary:
# Only muck about with temporary sessions
fixers.extend([GitIdentityFixer(session), SecretGpgKeyFixer(session)])
session.chdir(reldir)
return run_dist(session, buildsystems, resolver, fixers, target_dir)
return dist(session, export_directory, reldir, target_dir,
log_manager=log_manager, version=version)
def create_dist_schroot(
@ -113,30 +171,35 @@ def create_dist_schroot(
packaging_subpath: Optional[str] = None,
include_controldir: bool = True,
subdir: Optional[str] = None,
cleanup: bool = False,
log_manager: Optional[LogManager] = None,
) -> Optional[str]:
"""Create a dist tarball for a tree.
Args:
session: session to run it
tree: Tree object to work in
target_dir: Directory to write tarball into
include_controldir: Whether to include the version control directory
subdir: subdirectory in the tree to operate in
"""
with SchrootSession(chroot) as session:
if packaging_tree is not None:
from .debian import satisfy_build_deps
satisfy_build_deps(session, packaging_tree, packaging_subpath)
return create_dist(
session,
tree,
target_dir,
include_controldir=include_controldir,
subdir=subdir,
cleanup=cleanup,
)
session, tree, target_dir,
include_controldir=include_controldir, subdir=subdir,
log_manager=log_manager)
if __name__ == "__main__":
def main(argv=None):
import argparse
import breezy.bzr # noqa: F401
import breezy.git # noqa: F401
from breezy.export import export
parser = argparse.ArgumentParser()
parser = argparse.ArgumentParser(argv)
parser.add_argument(
"--chroot",
default="unstable-amd64-sbuild",
@ -157,8 +220,12 @@ if __name__ == "__main__":
"--target-directory", type=str, default="..", help="Target directory"
)
parser.add_argument("--verbose", action="store_true", help="Be verbose")
parser.add_argument("--mode", choices=["auto", "vcs", "buildsystem"],
type=str,
help="Mechanism to use to create buildsystem")
parser.add_argument(
"--include-controldir", action="store_true", help="Clone rather than export."
"--include-controldir", action="store_true",
help="Clone rather than export."
)
args = parser.parse_args()
@ -169,6 +236,10 @@ if __name__ == "__main__":
logging.basicConfig(level=logging.INFO, format="%(message)s")
tree = WorkingTree.open(args.directory)
packaging_tree: Optional[WorkingTree]
subdir: Optional[str]
if args.packaging_directory:
packaging_tree = WorkingTree.open(args.packaging_directory)
with packaging_tree.lock_read():
@ -179,30 +250,47 @@ if __name__ == "__main__":
packaging_tree = None
subdir = None
try:
ret = create_dist_schroot(
tree,
subdir=subdir,
target_dir=os.path.abspath(args.target_directory),
packaging_tree=packaging_tree,
chroot=args.chroot,
include_controldir=args.include_controldir,
)
except (NoBuildToolsFound, NotImplementedError):
logging.info("No build tools found, falling back to simple export.")
if args.mode == 'vcs':
export(tree, "dist.tar.gz", "tgz", None)
except NotImplementedError:
logging.info(
"Build system does not support dist tarball creation, "
"falling back to simple export."
)
export(tree, "dist.tar.gz", "tgz", None)
except UnidentifiedError as e:
logging.fatal("Unidentified error: %r", e.lines)
except DetailedFailure as e:
logging.fatal("Identified error during dist creation: %s", e.error)
except DistNoTarball:
logging.fatal("dist operation did not create a tarball")
else:
logging.info("Created %s", ret)
sys.exit(0)
elif args.mode in ('auto', 'buildsystem'):
try:
ret = create_dist_schroot(
tree,
subdir=subdir,
target_dir=os.path.abspath(args.target_directory),
packaging_tree=packaging_tree,
chroot=args.chroot,
include_controldir=args.include_controldir,
)
except NoBuildToolsFound:
if args.mode == 'buildsystem':
logging.fatal('No build tools found, unable to create tarball')
return 1
logging.info(
"No build tools found, falling back to simple export.")
export(tree, "dist.tar.gz", "tgz", None)
except NotImplementedError:
if args.mode == 'buildsystem':
logging.fatal('Unable to ask buildsystem for tarball')
return 1
logging.info(
"Build system does not support dist tarball creation, "
"falling back to simple export."
)
export(tree, "dist.tar.gz", "tgz", None)
except UnidentifiedError as e:
logging.fatal("Unidentified error: %r", e.lines)
return 1
except DetailedFailure as e:
logging.fatal("Identified error during dist creation: %s", e.error)
return 1
except DistNoTarball:
logging.fatal("dist operation did not create a tarball")
return 1
else:
logging.info("Created %s", ret)
return 0
if __name__ == "__main__":
sys.exit(main(sys.argv[1:]))

View file

@ -54,7 +54,8 @@ class DistCatcher(object):
@classmethod
def default(cls, directory):
return cls(
[os.path.join(directory, "dist"), directory, os.path.join(directory, "..")]
[os.path.join(directory, "dist"), directory,
os.path.join(directory, "..")]
)
def __enter__(self):
@ -87,19 +88,23 @@ class DistCatcher(object):
continue
if len(possible_new) == 1:
entry = possible_new[0]
logging.info("Found new tarball %s in %s.", entry.name, directory)
logging.info(
"Found new tarball %s in %s.", entry.name, directory)
self.files.append(entry.path)
return entry.name
elif len(possible_new) > 1:
logging.warning(
"Found multiple tarballs %r in %s.", possible_new, directory
"Found multiple tarballs %r in %s.", possible_new,
directory
)
self.files.extend([entry.path for entry in possible_new])
return possible_new[0].name
if len(possible_updated) == 1:
entry = possible_updated[0]
logging.info("Found updated tarball %s in %s.", entry.name, directory)
logging.info(
"Found updated tarball %s in %s.", entry.name,
directory)
self.files.append(entry.path)
return entry.name

View file

@ -17,7 +17,7 @@
from functools import partial
import logging
from typing import List, Tuple, Callable, Any, Optional
from typing import List, Tuple, Callable, Optional, TypeVar
from buildlog_consultant import Problem
from buildlog_consultant.common import (
@ -29,6 +29,14 @@ from . import DetailedFailure, UnidentifiedError
from .session import Session, run_with_tee
# Number of attempts to fix a build before giving up.
DEFAULT_LIMIT = 200
class FixerLimitReached(Exception):
"""The maximum number of fixes has been reached."""
class BuildFixer(object):
"""Build fixer."""
@ -44,7 +52,11 @@ class BuildFixer(object):
return self._fix(problem, phase)
def run_detecting_problems(session: Session, args: List[str], check_success=None, **kwargs):
def run_detecting_problems(
session: Session, args: List[str], check_success=None,
quiet=False, **kwargs) -> List[str]:
if not quiet:
logging.info('Running %r', args)
if check_success is None:
def check_success(retcode, contents):
return (retcode == 0)
@ -63,17 +75,26 @@ def run_detecting_problems(session: Session, args: List[str], check_success=None
logging.warning("Build failed with unidentified error:")
logging.warning("%s", match.line.rstrip("\n"))
else:
logging.warning("Build failed and unable to find cause. Giving up.")
logging.warning(
"Build failed and unable to find cause. Giving up.")
raise UnidentifiedError(retcode, args, lines, secondary=match)
raise DetailedFailure(retcode, args, error)
def iterate_with_build_fixers(fixers: List[BuildFixer], cb: Callable[[], Any]):
T = TypeVar('T')
def iterate_with_build_fixers(
fixers: List[BuildFixer],
cb: Callable[[], T], limit=DEFAULT_LIMIT) -> T:
"""Call cb() until there are no more DetailedFailures we can fix.
Args:
fixers: List of fixers to use to resolve issues
cb: Callable to run the build
limit: Maximum number of fixing attempts before giving up
"""
attempts = 0
fixed_errors = []
while True:
to_resolve = []
@ -86,9 +107,13 @@ def iterate_with_build_fixers(fixers: List[BuildFixer], cb: Callable[[], Any]):
logging.info("Identified error: %r", f.error)
if f.error in fixed_errors:
logging.warning(
"Failed to resolve error %r, it persisted. Giving up.", f.error
"Failed to resolve error %r, it persisted. Giving up.",
f.error
)
raise f
attempts += 1
if limit is not None and limit <= attempts:
raise FixerLimitReached(limit)
try:
resolved = resolve_error(f.error, None, fixers=fixers)
except DetailedFailure as n:
@ -100,23 +125,25 @@ def iterate_with_build_fixers(fixers: List[BuildFixer], cb: Callable[[], Any]):
else:
if not resolved:
logging.warning(
"Failed to find resolution for error %r. Giving up.", f.error
"Failed to find resolution for error %r. Giving up.",
f.error
)
raise f
fixed_errors.append(f.error)
def run_with_build_fixers(
session: Session, args: List[str], fixers: Optional[List[BuildFixer]], **kwargs
):
fixers: Optional[List[BuildFixer]], session: Session, args: List[str],
quiet: bool = False, **kwargs
) -> List[str]:
if fixers is None:
fixers = []
return iterate_with_build_fixers(
fixers, partial(run_detecting_problems, session, args, **kwargs)
)
fixers,
partial(run_detecting_problems, session, args, quiet=quiet, **kwargs))
def resolve_error(error, phase, fixers):
def resolve_error(error, phase, fixers) -> bool:
relevant_fixers = []
for fixer in fixers:
if fixer.can_fix(error):

View file

@ -21,8 +21,10 @@ from typing import Tuple
from buildlog_consultant import Problem
from buildlog_consultant.common import (
MissingGitIdentity,
MissingGoSumEntry,
MissingSecretGpgKey,
MissingAutoconfMacro,
MissingGnulibDirectory,
)
from ognibuild.requirements import AutoconfMacroRequirement
from ognibuild.resolver import UnsatisfiedRequirements
@ -30,6 +32,18 @@ from ognibuild.resolver import UnsatisfiedRequirements
from .fix_build import BuildFixer
class GnulibDirectoryFixer(BuildFixer):
def __init__(self, session):
self.session = session
def can_fix(self, problem: Problem):
return isinstance(problem, MissingGnulibDirectory)
def _fix(self, problem: Problem, phase: Tuple[str, ...]):
self.session.check_call(["./gnulib.sh"])
return True
class GitIdentityFixer(BuildFixer):
def __init__(self, session):
self.session = session
@ -77,6 +91,26 @@ Passphrase: ""
return False
class MissingGoSumEntryFixer(BuildFixer):
def __init__(self, session):
self.session = session
def __repr__(self):
return "%s()" % (type(self).__name__)
def __str__(self):
return "missing go.sum entry fixer"
def can_fix(self, error):
return isinstance(error, MissingGoSumEntry)
def _fix(self, error, phase):
from .fix_build import run_detecting_problems
run_detecting_problems(
self.session, ["go", "mod", "download", error.package])
return True
class UnexpandedAutoconfMacroFixer(BuildFixer):
def __init__(self, session, resolver):
self.session = session

View file

@ -21,11 +21,13 @@ def run_info(session, buildsystems, fixers=None):
print("%r:" % buildsystem)
deps = {}
try:
for kind, dep in buildsystem.get_declared_dependencies(session, fixers=fixers):
for kind, dep in buildsystem.get_declared_dependencies(
session, fixers=fixers):
deps.setdefault(kind, []).append(dep)
except NotImplementedError:
print(
"\tUnable to detect declared dependencies for this type of build system"
"\tUnable to detect declared dependencies for this type of "
"build system"
)
if deps:
print("\tDeclared dependencies:")
@ -35,9 +37,11 @@ def run_info(session, buildsystems, fixers=None):
print("\t\t\t%s" % dep)
print("")
try:
outputs = list(buildsystem.get_declared_outputs(session, fixers=fixers))
outputs = list(buildsystem.get_declared_outputs(
session, fixers=fixers))
except NotImplementedError:
print("\tUnable to detect declared outputs for this type of build system")
print("\tUnable to detect declared outputs for this type of "
"build system")
outputs = []
if outputs:
print("\tDeclared outputs:")

View file

@ -15,21 +15,34 @@
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
from .buildsystem import NoBuildToolsFound, InstallTarget
from functools import partial
from typing import Optional
from .buildsystem import NoBuildToolsFound, InstallTarget
from .fix_build import iterate_with_build_fixers
from .logs import NoLogManager
def run_install(session, buildsystems, resolver, fixers, user: bool = False, prefix: Optional[str] = None):
def run_install(
session, buildsystems, resolver, fixers, *, user: bool = False,
prefix: Optional[str] = None, log_manager=None):
# Some things want to write to the user's home directory,
# e.g. pip caches in ~/.cache
session.create_home()
if log_manager is None:
log_manager = NoLogManager()
install_target = InstallTarget()
install_target.user = user
install_target.prefix = prefix
for buildsystem in buildsystems:
buildsystem.install(session, resolver, fixers, install_target)
iterate_with_build_fixers(
fixers,
log_manager.wrap(
partial(buildsystem.install, session, resolver,
install_target)))
return
raise NoBuildToolsFound()

105
ognibuild/logs.py Normal file
View file

@ -0,0 +1,105 @@
#!/usr/bin/python
# Copyright (C) 2018 Jelmer Vernooij <jelmer@jelmer.uk>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
from contextlib import contextmanager
import subprocess
import logging
import os
import sys
@contextmanager
def copy_output(output_log: str, tee: bool = False):
old_stdout = os.dup(sys.stdout.fileno())
old_stderr = os.dup(sys.stderr.fileno())
if tee:
p = subprocess.Popen(["tee", output_log], stdin=subprocess.PIPE)
newfd = p.stdin
else:
newfd = open(output_log, 'wb')
os.dup2(newfd.fileno(), sys.stdout.fileno()) # type: ignore
os.dup2(newfd.fileno(), sys.stderr.fileno()) # type: ignore
try:
yield
finally:
sys.stdout.flush()
sys.stderr.flush()
os.dup2(old_stdout, sys.stdout.fileno())
os.dup2(old_stderr, sys.stderr.fileno())
if newfd is not None:
newfd.close()
@contextmanager
def redirect_output(to_file):
sys.stdout.flush()
sys.stderr.flush()
old_stdout = os.dup(sys.stdout.fileno())
old_stderr = os.dup(sys.stderr.fileno())
os.dup2(to_file.fileno(), sys.stdout.fileno()) # type: ignore
os.dup2(to_file.fileno(), sys.stderr.fileno()) # type: ignore
try:
yield
finally:
sys.stdout.flush()
sys.stderr.flush()
os.dup2(old_stdout, sys.stdout.fileno())
os.dup2(old_stderr, sys.stderr.fileno())
def rotate_logfile(source_path: str) -> None:
if os.path.exists(source_path):
(directory_path, name) = os.path.split(source_path)
i = 1
while os.path.exists(
os.path.join(directory_path, "%s.%d" % (name, i))):
i += 1
target_path = os.path.join(directory_path, "%s.%d" % (name, i))
os.rename(source_path, target_path)
logging.debug("Storing previous build log at %s", target_path)
class LogManager(object):
def wrap(self, fn):
raise NotImplementedError(self.wrap)
class DirectoryLogManager(LogManager):
def __init__(self, path, mode):
self.path = path
self.mode = mode
def wrap(self, fn):
def _run(*args, **kwargs):
rotate_logfile(self.path)
if self.mode == 'copy':
with copy_output(self.path, tee=True):
return fn(*args, **kwargs)
elif self.mode == 'redirect':
with copy_output(self.path, tee=False):
return fn(*args, **kwargs)
else:
raise NotImplementedError(self.mode)
return _run
class NoLogManager(LogManager):
def wrap(self, fn):
return fn

View file

@ -26,16 +26,19 @@ from . import Requirement
class PythonPackageRequirement(Requirement):
family = "python-package"
package: str
def __init__(self, package, python_version=None, specs=None, minimum_version=None):
super(PythonPackageRequirement, self).__init__("python-package")
def __init__(
self, package, python_version=None, specs=None,
minimum_version=None):
self.package = package
self.python_version = python_version
if minimum_version is not None:
specs = [(">=", minimum_version)]
if specs is None:
specs = []
if minimum_version is not None:
specs.append((">=", minimum_version))
self.specs = specs
def __repr__(self):
@ -53,11 +56,29 @@ class PythonPackageRequirement(Requirement):
return "python package: %s" % (self.package,)
@classmethod
def from_requirement_str(cls, text):
def from_requirement_str(cls, text, python_version=None):
from requirements.requirement import Requirement
req = Requirement.parse(text)
return cls(package=req.name, specs=req.specs)
return cls(
package=req.name, specs=req.specs, python_version=python_version)
def requirement_str(self):
if self.specs:
return '%s;%s' % (
self.package, ','.join([''.join(s) for s in self.specs]))
return self.package
@classmethod
def _from_json(cls, js):
if isinstance(js, str):
return cls.from_requirement_str(js)
return cls.from_requirement_str(js[0], python_version=js[1])
def _json(self):
if self.python_version:
return [self.requirement_str(), self.python_version]
return self.requirement_str()
def met(self, session):
if self.python_version == "cpython3":
@ -74,7 +95,8 @@ class PythonPackageRequirement(Requirement):
raise NotImplementedError
text = self.package + ",".join(["".join(spec) for spec in self.specs])
p = session.Popen(
[cmd, "-c", "import pkg_resources; pkg_resources.require(%r)" % text],
[cmd, "-c",
"import pkg_resources; pkg_resources.require(%r)" % text],
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL,
)
@ -82,16 +104,33 @@ class PythonPackageRequirement(Requirement):
return p.returncode == 0
Requirement.register_json(PythonPackageRequirement)
class LatexPackageRequirement(Requirement):
family = "latex-package"
def __init__(self, package: str):
self.package = package
def __repr__(self):
return "%s(%r)" % (type(self).__name__, self.package)
def _json(self):
return self.package
def _from_json(cls, package):
return cls(package)
Requirement.register_json(LatexPackageRequirement)
class PhpPackageRequirement(Requirement):
family = "php-package"
def __init__(
self,
package: str,
@ -104,6 +143,13 @@ class PhpPackageRequirement(Requirement):
self.min_version = min_version
self.max_version = max_version
def _json(self):
return (self.package, self.channel, self.min_version, self.max_version)
@classmethod
def _from_json(cls, js):
return cls(*js)
def __repr__(self):
return "%s(%r, %r, %r, %r)" % (
type(self).__name__,
@ -114,14 +160,24 @@ class PhpPackageRequirement(Requirement):
)
Requirement.register_json(PhpPackageRequirement)
class BinaryRequirement(Requirement):
family = "binary"
binary_name: str
def __init__(self, binary_name):
super(BinaryRequirement, self).__init__("binary")
self.binary_name = binary_name
def _json(self):
return self.binary_name
@classmethod
def _from_json(cls, js):
return cls(js)
def __repr__(self):
return "%s(%r)" % (type(self).__name__, self.binary_name)
@ -135,14 +191,54 @@ class BinaryRequirement(Requirement):
return p.returncode == 0
Requirement.register_json(BinaryRequirement)
class PHPExtensionRequirement(Requirement):
family = "php-extension"
extension: str
def __init__(self, extension: str):
self.extension = extension
def __repr__(self):
return "%s(%r)" % (type(self).__name__, self.extension)
class PytestPluginRequirement(Requirement):
family = "pytest-plugin"
plugin: str
def __init__(self, plugin: str):
self.plugin = plugin
def __repr__(self):
return "%s(%r)" % (type(self).__name__, self.plugin)
class VcsControlDirectoryAccessRequirement(Requirement):
vcs: List[str]
family = "vcs-access"
def __init__(self, vcs):
self.vcs = vcs
def __repr__(self):
return "%s(%r)" % (type(self).__name__, self.vcs)
class PerlModuleRequirement(Requirement):
module: str
filename: Optional[str]
inc: Optional[List[str]]
family = "perl-module"
def __init__(self, module, filename=None, inc=None):
super(PerlModuleRequirement, self).__init__("perl-module")
self.module = module
self.filename = filename
self.inc = inc
@ -158,10 +254,10 @@ class PerlModuleRequirement(Requirement):
class VagueDependencyRequirement(Requirement):
name: str
family = "vague"
minimum_version: Optional[str] = None
def __init__(self, name, minimum_version=None):
super(VagueDependencyRequirement, self).__init__("vague")
self.name = name
self.minimum_version = minimum_version
@ -169,19 +265,26 @@ class VagueDependencyRequirement(Requirement):
if " " not in self.name:
yield BinaryRequirement(self.name)
yield LibraryRequirement(self.name)
yield PkgConfigRequirement(self.name, minimum_version=self.minimum_version)
yield PkgConfigRequirement(
self.name, minimum_version=self.minimum_version)
if self.name.lower() != self.name:
yield BinaryRequirement(self.name.lower())
yield LibraryRequirement(self.name.lower())
yield PkgConfigRequirement(self.name.lower(), minimum_version=self.minimum_version)
from .resolver.apt import AptRequirement
yield AptRequirement.simple(self.name.lower(), minimum_version=self.minimum_version)
if self.name.lower().startswith('lib'):
devname = '%s-dev' % self.name.lower()
yield PkgConfigRequirement(
self.name.lower(), minimum_version=self.minimum_version)
try:
from .resolver.apt import AptRequirement
except ModuleNotFoundError:
pass
else:
devname = 'lib%s-dev' % self.name.lower()
yield AptRequirement.simple(devname, minimum_version=self.minimum_version)
yield AptRequirement.simple(
self.name.lower(), minimum_version=self.minimum_version)
if self.name.lower().startswith('lib'):
devname = '%s-dev' % self.name.lower()
else:
devname = 'lib%s-dev' % self.name.lower()
yield AptRequirement.simple(
devname, minimum_version=self.minimum_version)
def met(self, session):
for x in self.expand():
@ -192,19 +295,36 @@ class VagueDependencyRequirement(Requirement):
def __repr__(self):
return "%s(%r)" % (type(self).__name__, self.name)
def __str__(self):
if self.minimum_version:
return "%s >= %s" % (self.name, self.minimum_version)
return self.name
class NodePackageRequirement(Requirement):
package: str
family = "npm-package"
def __init__(self, package):
super(NodePackageRequirement, self).__init__("npm-package")
self.package = package
def __repr__(self):
return "%s(%r)" % (type(self).__name__, self.package)
class LuaModuleRequirement(Requirement):
module: str
family = "lua-module"
def __init__(self, module):
self.module = module
def __repr__(self):
return "%s(%r)" % (type(self).__name__, self.module)
class PerlPreDeclaredRequirement(Requirement):
name: str
@ -227,8 +347,9 @@ class PerlPreDeclaredRequirement(Requirement):
'auto_set_bugtracker': 'Module::Install::Bugtracker',
}
family = "perl-predeclared"
def __init__(self, name):
super(PerlPreDeclaredRequirement, self).__init__("perl-predeclared")
self.name = name
def lookup_module(self):
@ -242,9 +363,9 @@ class PerlPreDeclaredRequirement(Requirement):
class NodeModuleRequirement(Requirement):
module: str
family = "npm-module"
def __init__(self, module):
super(NodeModuleRequirement, self).__init__("npm-module")
self.module = module
def __repr__(self):
@ -255,41 +376,45 @@ class CargoCrateRequirement(Requirement):
crate: str
features: Set[str]
version: Optional[str]
api_version: Optional[str]
minimum_version: Optional[str]
family = "cargo-crate"
def __init__(self, crate, features=None, version=None):
super(CargoCrateRequirement, self).__init__("cargo-crate")
def __init__(self, crate, features=None, api_version=None,
minimum_version=None):
self.crate = crate
if features is None:
features = set()
self.features = features
self.version = version
self.api_version = api_version
self.minimum_version = minimum_version
def __repr__(self):
return "%s(%r, features=%r, version=%r)" % (
return "%s(%r, features=%r, api_version=%r, minimum_version=%r)" % (
type(self).__name__,
self.crate,
self.features,
self.version,
self.api_version,
self.minimum_version,
)
def __str__(self):
ret = "cargo crate: %s %s" % (
self.crate,
self.api_version or "")
if self.features:
return "cargo crate: %s %s (%s)" % (
self.crate,
self.version or "",
", ".join(sorted(self.features)),
)
else:
return "cargo crate: %s %s" % (self.crate, self.version or "")
ret += " (%s)" % (", ".join(sorted(self.features)))
if self.minimum_version:
ret += " (>= %s)" % self.minimum_version
return ret
class PkgConfigRequirement(Requirement):
module: str
family = "pkg-config"
def __init__(self, module, minimum_version=None):
super(PkgConfigRequirement, self).__init__("pkg-config")
self.module = module
self.minimum_version = minimum_version
@ -301,9 +426,9 @@ class PkgConfigRequirement(Requirement):
class PathRequirement(Requirement):
path: str
family = "path"
def __init__(self, path):
super(PathRequirement, self).__init__("path")
self.path = path
def __repr__(self):
@ -313,9 +438,9 @@ class PathRequirement(Requirement):
class CHeaderRequirement(Requirement):
header: str
family = "c-header"
def __init__(self, header):
super(CHeaderRequirement, self).__init__("c-header")
self.header = header
def __repr__(self):
@ -323,16 +448,15 @@ class CHeaderRequirement(Requirement):
class JavaScriptRuntimeRequirement(Requirement):
def __init__(self):
super(JavaScriptRuntimeRequirement, self).__init__("javascript-runtime")
family = "javascript-runtime"
class ValaPackageRequirement(Requirement):
package: str
family = "vala"
def __init__(self, package: str):
super(ValaPackageRequirement, self).__init__("vala")
self.package = package
@ -340,9 +464,9 @@ class RubyGemRequirement(Requirement):
gem: str
minimum_version: Optional[str]
family = "gem"
def __init__(self, gem: str, minimum_version: Optional[str]):
super(RubyGemRequirement, self).__init__("gem")
self.gem = gem
self.minimum_version = minimum_version
@ -351,12 +475,16 @@ class GoPackageRequirement(Requirement):
package: str
version: Optional[str]
family = "go-package"
def __init__(self, package: str, version: Optional[str] = None):
super(GoPackageRequirement, self).__init__("go-package")
self.package = package
self.version = version
def __repr__(self):
return "%s(%r, version=%r)" % (
type(self).__name__, self.package, self.version)
def __str__(self):
if self.version:
return "go package: %s (= %s)" % (self.package, self.version)
@ -366,9 +494,9 @@ class GoPackageRequirement(Requirement):
class GoRequirement(Requirement):
version: Optional[str]
family = "go"
def __init__(self, version: Optional[str] = None):
super(GoRequirement, self).__init__("go")
self.version = version
def __str__(self):
@ -378,18 +506,18 @@ class GoRequirement(Requirement):
class DhAddonRequirement(Requirement):
path: str
family = "dh-addon"
def __init__(self, path: str):
super(DhAddonRequirement, self).__init__("dh-addon")
self.path = path
class PhpClassRequirement(Requirement):
php_class: str
family = "php-class"
def __init__(self, php_class: str):
super(PhpClassRequirement, self).__init__("php-class")
self.php_class = php_class
@ -397,9 +525,9 @@ class RPackageRequirement(Requirement):
package: str
minimum_version: Optional[str]
family = "r-package"
def __init__(self, package: str, minimum_version: Optional[str] = None):
super(RPackageRequirement, self).__init__("r-package")
self.package = package
self.minimum_version = minimum_version
@ -412,7 +540,8 @@ class RPackageRequirement(Requirement):
def __str__(self):
if self.minimum_version:
return "R package: %s (>= %s)" % (self.package, self.minimum_version)
return "R package: %s (>= %s)" % (
self.package, self.minimum_version)
else:
return "R package: %s" % (self.package,)
@ -432,9 +561,9 @@ class OctavePackageRequirement(Requirement):
package: str
minimum_version: Optional[str]
family = "octave-package"
def __init__(self, package: str, minimum_version: Optional[str] = None):
super(OctavePackageRequirement, self).__init__("octave-package")
self.package = package
self.minimum_version = minimum_version
@ -447,7 +576,8 @@ class OctavePackageRequirement(Requirement):
def __str__(self):
if self.minimum_version:
return "Octave package: %s (>= %s)" % (self.package, self.minimum_version)
return "Octave package: %s (>= %s)" % (
self.package, self.minimum_version)
else:
return "Octave package: %s" % (self.package,)
@ -466,9 +596,9 @@ class OctavePackageRequirement(Requirement):
class LibraryRequirement(Requirement):
library: str
family = "lib"
def __init__(self, library: str):
super(LibraryRequirement, self).__init__("lib")
self.library = library
@ -476,9 +606,9 @@ class StaticLibraryRequirement(Requirement):
library: str
filename: str
family = "static-lib"
def __init__(self, library: str, filename: str):
super(StaticLibraryRequirement, self).__init__("static-lib")
self.library = library
self.filename = filename
@ -486,18 +616,18 @@ class StaticLibraryRequirement(Requirement):
class RubyFileRequirement(Requirement):
filename: str
family = "ruby-file"
def __init__(self, filename: str):
super(RubyFileRequirement, self).__init__("ruby-file")
self.filename = filename
class XmlEntityRequirement(Requirement):
url: str
family = "xml-entity"
def __init__(self, url: str):
super(XmlEntityRequirement, self).__init__("xml-entity")
self.url = url
@ -505,9 +635,9 @@ class SprocketsFileRequirement(Requirement):
content_type: str
name: str
family = "sprockets-file"
def __init__(self, content_type: str, name: str):
super(SprocketsFileRequirement, self).__init__("sprockets-file")
self.content_type = content_type
self.name = name
@ -515,27 +645,29 @@ class SprocketsFileRequirement(Requirement):
class JavaClassRequirement(Requirement):
classname: str
family = "java-class"
def __init__(self, classname: str):
super(JavaClassRequirement, self).__init__("java-class")
self.classname = classname
class CMakefileRequirement(Requirement):
filename: str
version: Optional[str]
family = "cmake-file"
def __init__(self, filename: str):
super(CMakefileRequirement, self).__init__("cmake-file")
def __init__(self, filename: str, version=None):
self.filename = filename
self.version = version
class HaskellPackageRequirement(Requirement):
package: str
family = "haskell-package"
def __init__(self, package: str, specs=None):
super(HaskellPackageRequirement, self).__init__("haskell-package")
self.package = package
self.specs = specs
@ -551,9 +683,9 @@ class MavenArtifactRequirement(Requirement):
artifact_id: str
version: Optional[str]
kind: Optional[str]
family = "maven-artifact"
def __init__(self, group_id, artifact_id, version=None, kind=None):
super(MavenArtifactRequirement, self).__init__("maven-artifact")
self.group_id = group_id
self.artifact_id = artifact_id
self.version = version
@ -566,6 +698,11 @@ class MavenArtifactRequirement(Requirement):
self.version,
)
def __repr__(self):
return "%s(group_id=%r, artifact_id=%r, version=%r, kind=%r)" % (
type(self).__name__, self.group_id, self.artifact_id,
self.version, self.kind)
@classmethod
def from_str(cls, text):
return cls.from_tuple(text.split(":"))
@ -587,17 +724,16 @@ class MavenArtifactRequirement(Requirement):
class GnomeCommonRequirement(Requirement):
def __init__(self):
super(GnomeCommonRequirement, self).__init__("gnome-common")
family = "gnome-common"
class JDKFileRequirement(Requirement):
jdk_path: str
filename: str
family = "jdk-file"
def __init__(self, jdk_path: str, filename: str):
super(JDKFileRequirement, self).__init__("jdk-file")
self.jdk_path = jdk_path
self.filename = filename
@ -607,55 +743,70 @@ class JDKFileRequirement(Requirement):
class JDKRequirement(Requirement):
def __init__(self):
super(JDKRequirement, self).__init__("jdk")
family = "jdk"
class JRERequirement(Requirement):
def __init__(self):
super(JRERequirement, self).__init__("jre")
family = "jre"
class QtModuleRequirement(Requirement):
family = "qt-module"
def __init__(self, module):
self.module = module
class QTRequirement(Requirement):
def __init__(self):
super(QTRequirement, self).__init__("qt")
family = "qt"
class X11Requirement(Requirement):
def __init__(self):
super(X11Requirement, self).__init__("x11")
family = "x11"
class CertificateAuthorityRequirement(Requirement):
family = "ca-cert"
def __init__(self, url):
super(CertificateAuthorityRequirement, self).__init__("ca-cert")
self.url = url
class PerlFileRequirement(Requirement):
filename: str
family = "perl-file"
def __init__(self, filename: str):
super(PerlFileRequirement, self).__init__("perl-file")
self.filename = filename
class AutoconfMacroRequirement(Requirement):
family = "autoconf-macro"
macro: str
def __init__(self, macro: str):
super(AutoconfMacroRequirement, self).__init__("autoconf-macro")
self.macro = macro
def _json(self):
return self.macro
@classmethod
def _from_json(cls, macro):
return cls(macro)
Requirement.register_json(AutoconfMacroRequirement)
class LibtoolRequirement(Requirement):
def __init__(self):
super(LibtoolRequirement, self).__init__("libtool")
family = "libtool"
class IntrospectionTypelibRequirement(Requirement):
family = "introspection-type-lib"
def __init__(self, library):
self.library = library
@ -665,9 +816,9 @@ class PythonModuleRequirement(Requirement):
module: str
python_version: Optional[str]
minimum_version: Optional[str]
family = "python-module"
def __init__(self, module, python_version=None, minimum_version=None):
super(PythonModuleRequirement, self).__init__("python-module")
self.module = module
self.python_version = python_version
self.minimum_version = minimum_version
@ -702,7 +853,25 @@ class PythonModuleRequirement(Requirement):
class BoostComponentRequirement(Requirement):
name: str
family = "boost-component"
def __init__(self, name):
super(BoostComponentRequirement, self).__init__("boost-component")
self.name = name
class KF5ComponentRequirement(Requirement):
name: str
family = "kf5-component"
def __init__(self, name):
self.name = name
class GnulibDirectoryRequirement(Requirement):
directory: str
family = "gnulib"
def __init__(self, directory):
self.directory = directory

View file

@ -18,8 +18,11 @@
import logging
import subprocess
from .. import UnidentifiedError
from typing import Optional, List, Type
from .. import UnidentifiedError, Requirement
from ..fix_build import run_detecting_problems
from ..session import Session
class UnsatisfiedRequirements(Exception):
@ -28,13 +31,22 @@ class UnsatisfiedRequirements(Exception):
class Resolver(object):
def install(self, requirements):
name: str
def __init__(self, session, user_local):
raise NotImplementedError(self.__init__)
def install(self, requirements: List[Requirement]):
raise NotImplementedError(self.install)
def resolve(self, requirement):
def resolve(self, requirement: Requirement) -> Optional[Requirement]:
raise NotImplementedError(self.resolve)
def explain(self, requirements):
def resolve_all(self, requirement: Requirement) -> List[Requirement]:
raise NotImplementedError(self.resolve_all)
def explain(self, requirements: List[Requirement]):
raise NotImplementedError(self.explain)
def env(self):
@ -42,13 +54,15 @@ class Resolver(object):
class CPANResolver(Resolver):
name = "cpan"
def __init__(self, session, user_local=False, skip_tests=True):
self.session = session
self.user_local = user_local
self.skip_tests = skip_tests
def __str__(self):
return "cpan"
return self.name
def __repr__(self):
return "%s(%r)" % (type(self).__name__, self.session)
@ -109,7 +123,8 @@ class TlmgrResolver(Resolver):
self.repository = repository
def __str__(self):
if self.repository.startswith('http://') or self.repository.startswith('https://'):
if (self.repository.startswith('http://')
or self.repository.startswith('https://')):
return 'tlmgr(%r)' % self.repository
else:
return self.repository
@ -154,7 +169,8 @@ class TlmgrResolver(Resolver):
try:
run_detecting_problems(self.session, cmd, user=user)
except UnidentifiedError as e:
if "tlmgr: user mode not initialized, please read the documentation!" in e.lines:
if ("tlmgr: user mode not initialized, "
"please read the documentation!") in e.lines:
self.session.check_call(['tlmgr', 'init-usertree'])
else:
raise
@ -163,6 +179,7 @@ class TlmgrResolver(Resolver):
class CTANResolver(TlmgrResolver):
name = "ctan"
def __init__(self, session, user_local=False):
super(CTANResolver, self).__init__(
@ -170,13 +187,16 @@ class CTANResolver(TlmgrResolver):
class RResolver(Resolver):
name: str
def __init__(self, session, repos, user_local=False):
self.session = session
self.repos = repos
self.user_local = user_local
def __str__(self):
return "cran"
return self.name
def __repr__(self):
return "%s(%r, %r)" % (type(self).__name__, self.session, self.repos)
@ -221,12 +241,14 @@ class RResolver(Resolver):
class OctaveForgeResolver(Resolver):
name = "octave-forge"
def __init__(self, session, user_local=False):
self.session = session
self.user_local = user_local
def __str__(self):
return "octave-forge"
return self.name
def __repr__(self):
return "%s(%r)" % (type(self).__name__, self.session)
@ -267,6 +289,8 @@ class OctaveForgeResolver(Resolver):
class CRANResolver(RResolver):
name = "cran"
def __init__(self, session, user_local=False):
super(CRANResolver, self).__init__(
session, "http://cran.r-project.org", user_local=user_local
@ -274,19 +298,25 @@ class CRANResolver(RResolver):
class BioconductorResolver(RResolver):
name = "bioconductor"
def __init__(self, session, user_local=False):
super(BioconductorResolver, self).__init__(
session, "https://hedgehog.fhcrc.org/bioconductor", user_local=user_local
session, "https://hedgehog.fhcrc.org/bioconductor",
user_local=user_local
)
class HackageResolver(Resolver):
name = "hackage"
def __init__(self, session, user_local=False):
self.session = session
self.user_local = user_local
def __str__(self):
return "hackage"
return self.name
def __repr__(self):
return "%s(%r)" % (type(self).__name__, self.session)
@ -295,7 +325,8 @@ class HackageResolver(Resolver):
extra_args = []
if self.user_local:
extra_args.append("--user")
return ["cabal", "install"] + extra_args + [req.package for req in reqs]
return (["cabal", "install"] + extra_args
+ [req.package for req in reqs])
def install(self, requirements):
from ..requirements import HaskellPackageRequirement
@ -329,12 +360,15 @@ class HackageResolver(Resolver):
class PypiResolver(Resolver):
name = "pypi"
def __init__(self, session, user_local=False):
self.session = session
self.user_local = user_local
def __str__(self):
return "pypi"
return self.name
def __repr__(self):
return "%s(%r)" % (type(self).__name__, self.session)
@ -380,12 +414,15 @@ class PypiResolver(Resolver):
class GoResolver(Resolver):
name = "go"
def __init__(self, session, user_local):
self.session = session
self.user_local = user_local
def __str__(self):
return "go"
return self.name
def __repr__(self):
return "%s(%r)" % (type(self).__name__, self.session)
@ -426,17 +463,26 @@ NPM_COMMAND_PACKAGES = {
"del-cli": "del-cli",
"husky": "husky",
"cross-env": "cross-env",
"xo": "xo",
"standard": "standard",
"jshint": "jshint",
"if-node-version": "if-node-version",
"babel-cli": "babel",
"c8": "c8",
"prettier-standard": "prettier-standard",
}
class NpmResolver(Resolver):
name = "npm"
def __init__(self, session, user_local=False):
self.session = session
self.user_local = user_local
# TODO(jelmer): Handle user_local
def __str__(self):
return "npm"
return self.name
def __repr__(self):
return "%s(%r)" % (type(self).__name__, self.session)
@ -472,7 +518,10 @@ class NpmResolver(Resolver):
if not isinstance(requirement, NodePackageRequirement):
missing.append(requirement)
continue
cmd = ["npm", "-g", "install", requirement.package]
cmd = ["npm", "install"]
if not self.user_local:
cmd.append('-g')
cmd.append(requirement.package)
logging.info("npm: running %r", cmd)
run_detecting_problems(self.session, cmd, user=user)
if missing:
@ -529,7 +578,7 @@ class StackedResolver(Resolver):
raise UnsatisfiedRequirements(requirements)
NATIVE_RESOLVER_CLS = [
NATIVE_RESOLVER_CLS: List[Type[Resolver]] = [
CPANResolver,
CTANResolver,
PypiResolver,
@ -543,24 +592,70 @@ NATIVE_RESOLVER_CLS = [
def native_resolvers(session, user_local):
return StackedResolver([kls(session, user_local) for kls in NATIVE_RESOLVER_CLS])
return StackedResolver(
[kls(session, user_local) for kls in NATIVE_RESOLVER_CLS])
def auto_resolver(session, explain=False):
def select_resolvers(session, user_local, resolvers,
dep_server_url=None) -> Optional[Resolver]:
selected = []
for resolver in resolvers:
for kls in NATIVE_RESOLVER_CLS:
if kls.name == resolver:
selected.append(kls(session, user_local))
break
else:
if resolver == 'native':
selected.extend([
kls(session, user_local) for kls in NATIVE_RESOLVER_CLS])
elif resolver == 'apt':
if user_local:
raise NotImplementedError(
'user local not supported for apt')
if dep_server_url:
from .dep_server import DepServerAptResolver
selected.append(DepServerAptResolver.from_session(
session, dep_server_url))
else:
from .apt import AptResolver
selected.append(AptResolver.from_session(session))
else:
raise KeyError(resolver)
if len(selected) == 0:
return None
if len(selected) == 1:
return selected[0]
return StackedResolver(selected)
def auto_resolver(session: Session, explain: bool = False,
system_wide: Optional[bool] = None,
dep_server_url: Optional[str] = None):
# if session is SchrootSession or if we're root, use apt
from .apt import AptResolver
from ..session.schroot import SchrootSession
from ..session import get_user
user = get_user(session)
resolvers = []
# TODO(jelmer): Check VIRTUAL_ENV, and prioritize PypiResolver if
# present?
if isinstance(session, SchrootSession) or user == "root" or explain:
user_local = False
else:
user_local = True
if not user_local:
resolvers.append(AptResolver.from_session(session))
resolvers.extend([kls(session, user_local) for kls in NATIVE_RESOLVER_CLS])
if system_wide is None:
# TODO(jelmer): Check VIRTUAL_ENV, and prioritize PypiResolver if
# present?
if isinstance(session, SchrootSession) or user == "root" or explain:
system_wide = True
else:
system_wide = False
if system_wide:
try:
from .apt import AptResolver
except ModuleNotFoundError:
pass
else:
if dep_server_url:
from .dep_server import DepServerAptResolver
resolvers.append(
DepServerAptResolver.from_session(session, dep_server_url))
else:
resolvers.append(AptResolver.from_session(session))
resolvers.extend([kls(session, not system_wide)
for kls in NATIVE_RESOLVER_CLS])
return StackedResolver(resolvers)

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,88 @@
#!/usr/bin/python3
# Copyright (C) 2022 Jelmer Vernooij <jelmer@jelmer.uk>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
import asyncio
import logging
from typing import List
from aiohttp import (
ClientSession,
ClientConnectorError,
ClientResponseError,
ServerDisconnectedError,
)
from yarl import URL
from .. import Requirement, USER_AGENT
from ..debian.apt import AptManager
from .apt import AptRequirement, AptResolver
class DepServerError(Exception):
def __init__(self, inner):
self.inner = inner
async def resolve_apt_requirement_dep_server(
url: str, req: Requirement) -> List[AptRequirement]:
"""Resolve a requirement to an APT requirement with a dep server.
Args:
url: Dep server URL
req: Requirement to resolve
Returns:
List of Apt requirements.
"""
async with ClientSession() as session:
try:
async with session.post(URL(url) / "resolve-apt", headers={
'User-Agent': USER_AGENT},
json={'requirement': req.json()},
raise_for_status=True) as resp:
return [
AptRequirement._from_json(e) for e in await resp.json()]
except (ClientConnectorError, ClientResponseError,
ServerDisconnectedError) as e:
logging.warning('Unable to contact dep server: %r', e)
raise DepServerError(e)
class DepServerAptResolver(AptResolver):
def __init__(self, apt, dep_server_url, tie_breakers=None):
super(DepServerAptResolver, self).__init__(
apt, tie_breakers=tie_breakers)
self.dep_server_url = dep_server_url
@classmethod
def from_session(cls, session, dep_server_url, tie_breakers=None):
return cls(
AptManager.from_session(session), dep_server_url,
tie_breakers=tie_breakers)
def resolve_all(self, req: Requirement):
try:
req.json()
except NotImplementedError:
return super(DepServerAptResolver, self).resolve_all(req)
try:
return asyncio.run(
resolve_apt_requirement_dep_server(self.dep_server_url, req))
except DepServerError:
logging.warning('Falling back to resolving error locally')
return super(DepServerAptResolver, self).resolve_all(req)

View file

@ -69,12 +69,14 @@ class Session(object):
raise NotImplementedError(self.check_output)
def Popen(
self, argv, cwd: Optional[str] = None, user: Optional[str] = None, **kwargs
self, argv, cwd: Optional[str] = None, user: Optional[str] = None,
**kwargs
):
raise NotImplementedError(self.Popen)
def call(
self, argv: List[str], cwd: Optional[str] = None, user: Optional[str] = None
self, argv: List[str], cwd: Optional[str] = None,
user: Optional[str] = None
):
raise NotImplementedError(self.call)
@ -100,17 +102,26 @@ class Session(object):
def external_path(self, path: str) -> str:
raise NotImplementedError
def rmtree(self, path: str) -> str:
raise NotImplementedError
is_temporary: bool
class SessionSetupFailure(Exception):
"""Session failed to be set up."""
def __init__(self, reason, errlines=None):
self.reason = reason
self.errlines = errlines
def run_with_tee(session: Session, args: List[str], **kwargs):
def run_with_tee(session: Session,
args: List[str], **kwargs) -> Tuple[int, List[str]]:
if "stdin" not in kwargs:
kwargs["stdin"] = subprocess.DEVNULL
p = session.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, **kwargs)
p = session.Popen(
args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, **kwargs)
contents = []
while p.poll() is None:
line = p.stdout.readline()
@ -121,7 +132,8 @@ def run_with_tee(session: Session, args: List[str], **kwargs):
def get_user(session):
return session.check_output(["echo", "$USER"], cwd="/").decode().strip()
return session.check_output(
["sh", "-c", "echo $USER"], cwd="/").decode().strip()
def which(session, name):

View file

@ -20,6 +20,7 @@ from . import Session, NoSessionOpen, SessionAlreadyOpen
import contextlib
import os
import shutil
import subprocess
import tempfile
from typing import Optional, Dict, List
@ -72,7 +73,8 @@ class PlainSession(Session):
close_fds: bool = True,
):
argv = self._prepend_user(user, argv)
return subprocess.check_call(argv, cwd=cwd, env=env, close_fds=close_fds)
return subprocess.check_call(
argv, cwd=cwd, env=env, close_fds=close_fds)
def check_output(
self,
@ -84,13 +86,19 @@ class PlainSession(Session):
argv = self._prepend_user(user, argv)
return subprocess.check_output(argv, cwd=cwd, env=env)
def Popen(self, args, stdout=None, stderr=None, stdin=None, user=None, cwd=None, env=None):
def Popen(
self, args, stdout=None, stderr=None, stdin=None, user=None,
cwd=None, env=None):
args = self._prepend_user(user, args)
return subprocess.Popen(args, stdout=stdout, stderr=stderr, stdin=stdin, cwd=cwd, env=env)
return subprocess.Popen(
args, stdout=stdout, stderr=stderr, stdin=stdin, cwd=cwd, env=env)
def exists(self, path):
return os.path.exists(path)
def rmtree(self, path):
return shutil.rmtree(path)
def scandir(self, path):
return os.scandir(path)

View file

@ -66,25 +66,38 @@ class SchrootSession(Session):
if line.startswith(b"E: "):
logging.error("%s", line[3:].decode(errors="replace"))
logging.warning(
"Failed to close schroot session %s, leaving stray.", self.session_id
"Failed to close schroot session %s, leaving stray.",
self.session_id
)
self.session_id = None
return False
self.session_id = None
self._location = None
return True
def __enter__(self) -> "Session":
if self.session_id is not None:
raise SessionAlreadyOpen(self)
stderr = tempfile.TemporaryFile()
try:
self.session_id = (
subprocess.check_output(["schroot", "-c", self.chroot, "-b"])
subprocess.check_output(
["schroot", "-c", self.chroot, "-b"], stderr=stderr)
.strip()
.decode()
)
except subprocess.CalledProcessError:
# TODO(jelmer): Capture stderr and forward in SessionSetupFailure
raise SessionSetupFailure()
stderr.seek(0)
errlines = stderr.readlines()
if len(errlines) == 1:
raise SessionSetupFailure(
errlines[0].rstrip().decode(), errlines=errlines)
elif len(errlines) == 0:
raise SessionSetupFailure(
"No output from schroot", errlines=errlines)
else:
raise SessionSetupFailure(
errlines[-1].decode(), errlines=errlines)
logging.info(
"Opened schroot session %s (from %s)", self.session_id, self.chroot
)
@ -156,24 +169,28 @@ class SchrootSession(Session):
env: Optional[Dict[str, str]] = None,
) -> bytes:
try:
return subprocess.check_output(self._run_argv(argv, cwd, user, env=env))
return subprocess.check_output(
self._run_argv(argv, cwd, user, env=env))
except subprocess.CalledProcessError as e:
raise subprocess.CalledProcessError(e.returncode, argv)
def Popen(
self, argv, cwd: Optional[str] = None, user: Optional[str] = None, **kwargs
self, argv, cwd: Optional[str] = None, user: Optional[str] = None,
**kwargs
):
return subprocess.Popen(self._run_argv(argv, cwd, user), **kwargs)
def call(
self, argv: List[str], cwd: Optional[str] = None, user: Optional[str] = None
self, argv: List[str], cwd: Optional[str] = None,
user: Optional[str] = None
):
return subprocess.call(self._run_argv(argv, cwd, user))
def create_home(self) -> None:
"""Create the user's home directory."""
home = (
self.check_output(["sh", "-c", "echo $HOME"], cwd="/").decode().rstrip("\n")
self.check_output(
["sh", "-c", "echo $HOME"], cwd="/").decode().rstrip("\n")
)
user = (
self.check_output(["sh", "-c", "echo $LOGNAME"], cwd="/")
@ -189,7 +206,8 @@ class SchrootSession(Session):
return os.path.join(self.location, path.lstrip("/"))
if self._cwd is None:
raise ValueError("no cwd set")
return os.path.join(self.location, os.path.join(self._cwd, path).lstrip("/"))
return os.path.join(
self.location, os.path.join(self._cwd, path).lstrip("/"))
def exists(self, path: str) -> bool:
fullpath = self.external_path(path)
@ -203,13 +221,17 @@ class SchrootSession(Session):
fullpath = self.external_path(path)
return os.mkdir(fullpath)
def rmtree(self, path: str):
import shutil
fullpath = self.external_path(path)
return shutil.rmtree(fullpath)
def setup_from_vcs(
self, tree, include_controldir: Optional[bool] = None, subdir="package"
):
from ..vcs import dupe_vcs_tree, export_vcs_tree
build_dir = os.path.join(self.location, "build")
directory = tempfile.mkdtemp(dir=build_dir)
reldir = "/" + os.path.relpath(directory, self.location)
@ -228,7 +250,7 @@ class SchrootSession(Session):
directory = tempfile.mkdtemp(dir=build_dir)
reldir = "/" + os.path.relpath(directory, self.location)
export_directory = os.path.join(directory, subdir)
shutil.copytree(path, export_directory, dirs_exist_ok=True)
shutil.copytree(path, export_directory, symlinks=True)
return export_directory, os.path.join(reldir, subdir)
is_temporary = True

View file

@ -15,16 +15,25 @@
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
from functools import partial
from .buildsystem import NoBuildToolsFound
from .fix_build import iterate_with_build_fixers
from .logs import NoLogManager
def run_test(session, buildsystems, resolver, fixers):
def run_test(session, buildsystems, resolver, fixers, log_manager=None):
# Some things want to write to the user's home directory,
# e.g. pip caches in ~/.cache
session.create_home()
if log_manager is None:
log_manager = NoLogManager()
for buildsystem in buildsystems:
buildsystem.test(session, resolver, fixers)
iterate_with_build_fixers(
fixers, log_manager.wrap(
partial(buildsystem.test, session, resolver)))
return
raise NoBuildToolsFound()

253
ognibuild/upstream.py Normal file
View file

@ -0,0 +1,253 @@
#!/usr/bin/python3
# Copyright (C) 2020-2021 Jelmer Vernooij <jelmer@jelmer.uk>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
from dataclasses import dataclass, field
from typing import Optional, Dict, Any
from debian.changelog import Version
import logging
import re
from . import Requirement
from .requirements import (
CargoCrateRequirement,
GoPackageRequirement,
PythonPackageRequirement,
)
from .resolver.apt import AptRequirement, OneOfRequirement
@dataclass
class UpstreamInfo:
name: Optional[str]
buildsystem: Optional[str] = None
branch_url: Optional[str] = None
branch_subpath: Optional[str] = None
tarball_url: Optional[str] = None
version: Optional[str] = None
metadata: Dict[str, Any] = field(default_factory=dict)
def json(self):
return {
'name': self.name,
'buildsystem': self.buildsystem,
'branch_url': self.branch_url,
'branch_subpath': self.branch_subpath,
'tarball_url': self.tarball_url,
'version': self.version
}
def go_base_name(package):
(hostname, path) = package.split('/', 1)
if hostname == "github.com":
hostname = "github"
if hostname == "gopkg.in":
hostname = "gopkg"
path = path.rstrip('/').replace("/", "-")
if path.endswith('.git'):
path = path[:-4]
return (hostname + path).replace("_", "-").lower()
def load_crate_info(crate):
import urllib.error
from urllib.request import urlopen, Request
import json
http_url = 'https://crates.io/api/v1/crates/%s' % crate
headers = {'User-Agent': 'debianize', 'Accept': 'application/json'}
http_contents = urlopen(Request(http_url, headers=headers)).read()
try:
return json.loads(http_contents)
except urllib.error.HTTPError as e:
if e.code == 404:
logging.warning('No crate %r', crate)
return None
raise
def find_python_package_upstream(requirement):
import urllib.error
from urllib.request import urlopen, Request
import json
http_url = 'https://pypi.org/pypi/%s/json' % requirement.package
headers = {'User-Agent': 'ognibuild', 'Accept': 'application/json'}
try:
http_contents = urlopen(
Request(http_url, headers=headers)).read()
except urllib.error.HTTPError as e:
if e.code == 404:
logging.warning('No pypi project %r', requirement.package)
return None
raise
pypi_data = json.loads(http_contents)
upstream_branch = None
for name, url in pypi_data['info']['project_urls'].items():
if name.lower() in ('github', 'repository'):
upstream_branch = url
tarball_url = None
for url_data in pypi_data['urls']:
if url_data.get('package_type') == 'sdist':
tarball_url = url_data['url']
return UpstreamInfo(
branch_url=upstream_branch, branch_subpath='',
name='python-%s' % pypi_data['info']['name'],
tarball_url=tarball_url)
def find_go_package_upstream(requirement):
if requirement.package.startswith('github.com/'):
return UpstreamInfo(
name='golang-%s' % go_base_name(requirement.package),
branch_url='https://%s' % '/'.join(
requirement.package.split('/')[:3]),
branch_subpath='')
def find_cargo_crate_upstream(requirement):
import semver
from debmutate.debcargo import semver_pair
data = load_crate_info(requirement.crate)
if data is None:
return None
upstream_branch = data['crate']['repository']
name = 'rust-' + data['crate']['name'].replace('_', '-')
version = None
if requirement.api_version is not None:
for version_info in data['versions']:
if (not version_info['num'].startswith(
requirement.api_version + '.')
and not version_info['num'] == requirement.api_version):
continue
if version is None:
version = semver.VersionInfo.parse(version_info['num'])
else:
version = semver.max_ver(
version, semver.VersionInfo.parse(version_info['num']))
if version is None:
logging.warning(
'Unable to find version of crate %s '
'that matches API version %s',
name, requirement.api_version)
else:
name += '-' + semver_pair(str(version))
return UpstreamInfo(
branch_url=upstream_branch, branch_subpath=None,
name=name, version=str(version) if version else None,
metadata={'X-Cargo-Crate': data['crate']['name']},
buildsystem='cargo')
def apt_to_cargo_requirement(m, rels):
name = m.group(1)
api_version = m.group(2)
if m.group(3):
features = set(m.group(3)[1:].split('-'))
else:
features = set()
if not rels:
minimum_version = None
elif len(rels) == 1 and rels[0][0] == '>=':
minimum_version = Version(rels[0][1]).upstream_version
else:
logging.warning('Unable to parse Debian version %r', rels)
minimum_version = None
return CargoCrateRequirement(
name, api_version=api_version,
features=features, minimum_version=minimum_version)
def apt_to_python_requirement(m, rels):
name = m.group(2)
python_version = m.group(1)
if not rels:
minimum_version = None
elif len(rels) == 1 and rels[0][0] == '>=':
minimum_version = Version(rels[0][1]).upstream_version
else:
logging.warning('Unable to parse Debian version %r', rels)
minimum_version = None
return PythonPackageRequirement(
name, python_version=(python_version or None),
minimum_version=minimum_version)
def apt_to_go_requirement(m, rels):
parts = m.group(1).split('-')
if parts[0] == 'github':
parts[0] = 'github.com'
if parts[0] == 'gopkg':
parts[0] = 'gopkg.in'
if not rels:
version = None
elif len(rels) == 1 and rels[0][0] == '=':
version = Version(rels[0][1]).upstream_version
else:
logging.warning('Unable to parse Debian version %r', rels)
version = None
return GoPackageRequirement('/'.join(parts), version=version)
BINARY_PACKAGE_UPSTREAM_MATCHERS = [
(r'librust-(.*)-([^-+]+)(\+.*?)-dev', apt_to_cargo_requirement),
(r'python([0-9.]*)-(.*)', apt_to_python_requirement),
(r'golang-(.*)-dev', apt_to_go_requirement),
]
_BINARY_PACKAGE_UPSTREAM_MATCHERS = [
(re.compile(r), fn) for (r, fn) in BINARY_PACKAGE_UPSTREAM_MATCHERS]
def find_apt_upstream(requirement: AptRequirement) -> Optional[UpstreamInfo]:
for option in requirement.relations:
for rel in option:
for matcher, fn in _BINARY_PACKAGE_UPSTREAM_MATCHERS:
m = matcher.fullmatch(rel['name'])
if m:
upstream_requirement = fn(
m, [rel['version']] if rel['version'] else [])
return find_upstream(upstream_requirement)
logging.warning(
'Unable to map binary package name %s to upstream',
rel['name'])
return None
def find_or_upstream(requirement: OneOfRequirement) -> Optional[UpstreamInfo]:
for req in requirement.elements:
info = find_upstream(req)
if info is not None:
return info
return None
UPSTREAM_FINDER = {
'python-package': find_python_package_upstream,
'go-package': find_go_package_upstream,
'cargo-crate': find_cargo_crate_upstream,
'apt': find_apt_upstream,
'or': find_or_upstream,
}
def find_upstream(requirement: Requirement) -> Optional[UpstreamInfo]:
try:
return UPSTREAM_FINDER[requirement.family](requirement)
except KeyError:
return None

View file

@ -43,7 +43,8 @@ def dupe_vcs_tree(tree, directory):
tree = tree.basis_tree()
try:
result = tree._repository.controldir.sprout(
directory, create_tree_if_local=True, revision_id=tree.get_revision_id()
directory, create_tree_if_local=True,
revision_id=tree.get_revision_id()
)
except OSError as e:
if e.errno == errno.ENOSPC:

3
pyproject.toml Normal file
View file

@ -0,0 +1,3 @@
[build-system]
requires = ["setuptools"]
build-backend = "setuptools.build_meta"

View file

@ -1,14 +0,0 @@
name: "ognibuild"
timeout_days: 5
tag_name: "v$VERSION"
verify_command: "python3 setup.py test"
update_version {
path: "setup.py"
match: "^ version=\"(.*)\",$"
new_line: " version=\"$VERSION\","
}
update_version {
path: "ognibuild/__init__.py"
match: "^__version__ = \\((.*)\\)$"
new_line: "__version__ = $TUPLED_VERSION"
}

89
scripts/report-apt-deps-status Executable file
View file

@ -0,0 +1,89 @@
#!/usr/bin/python3
import argparse
from contextlib import ExitStack
import logging
import sys
from typing import Dict, List
from ognibuild.buildsystem import NoBuildToolsFound, detect_buildsystems
from ognibuild.requirements import Requirement
from ognibuild.resolver.apt import AptResolver
from ognibuild.session.plain import PlainSession
parser = argparse.ArgumentParser('report-apt-deps-status')
parser.add_argument('directory', type=str, default='.', nargs='?')
parser.add_argument(
'--detailed', action='store_true', help='Show detailed analysis')
args = parser.parse_args()
logging.basicConfig(format='%(message)s', level=logging.INFO)
session = PlainSession()
with ExitStack() as es:
es.enter_context(session)
session.chdir(args.directory)
resolver = AptResolver.from_session(session)
try:
bss = list(detect_buildsystems(args.directory))
except NoBuildToolsFound:
logging.fatal('No build tools found')
sys.exit(1)
logging.debug("Detected buildsystems: %s", ", ".join(map(str, bss)))
deps: Dict[str, List[Requirement]] = {}
for buildsystem in bss:
try:
declared_reqs = buildsystem.get_declared_dependencies(session, [])
for stage, req in declared_reqs:
deps.setdefault(stage, []).append(req)
except NotImplementedError:
logging.warning(
'Unable to get dependencies from buildsystem %r, skipping',
buildsystem)
continue
if args.detailed:
for stage, reqs in deps.items():
logging.info("Stage: %s", stage)
for req in reqs:
apt_req = resolver.resolve(req)
logging.info("%s: %s", req, apt_req.pkg_relation_str())
logging.info('')
else:
build_depends = []
test_depends = []
run_depends = []
unresolved = []
for stage, reqs in deps.items():
for req in reqs:
apt_req = resolver.resolve(req)
if apt_req is None:
unresolved.append(req)
elif stage == 'core':
build_depends.append(apt_req)
run_depends.append(apt_req)
elif stage == 'build':
build_depends.append(apt_req)
elif stage == 'test':
test_depends.append(apt_req)
else:
raise NotImplementedError('stage %s not supported' % stage)
if build_depends:
logging.info(
'Build-Depends: %s',
', '.join([d.pkg_relation_str() for d in build_depends]))
if test_depends:
logging.info(
'Test-Depends: %s',
', '.join([d.pkg_relation_str() for d in test_depends]))
if run_depends:
logging.info(
'Depends: %s',
', '.join([d.pkg_relation_str() for d in run_depends]))
if unresolved:
sys.stdout.write('\n')
logging.warning(
'Unable to find apt packages for the following dependencies:')
for req in unresolved:
logging.warning('* %s', req)

View file

@ -1,13 +1,65 @@
[metadata]
name = ognibuild
description = Detect and run any build system
version = attr:ognibuild.__version__
maintainer = Jelmer Vernooij
maintainer_email = jelmer@jelmer.uk
license = GNU GPLv2 or later
url = https://jelmer.uk/code/ognibuild
classifiers =
Development Status :: 4 - Beta
License :: OSI Approved :: GNU General Public License v2 or later (GPLv2+)
Programming Language :: Python :: 3.5
Programming Language :: Python :: 3.6
Programming Language :: Python :: Implementation :: CPython
Operating System :: POSIX
[options]
packages =
ognibuild
ognibuild.debian
ognibuild.resolver
ognibuild.session
scripts = scripts/report-apt-deps-status
install_requires =
breezy>=3.2
buildlog-consultant>=0.0.21
requirements-parser
toml
setuptools
ruamel.yaml
tests_require =
testtools
types-toml
[options.entry_points]
console_scripts =
ogni=ognibuild.__main__:main
deb-fix-build=ognibuild.debian.fix_build:main
[options.extras_require]
dev =
testtools
debian =
debmutate
python_debian
python_apt
brz-debian
lz4
remote =
breezy
dulwich
dep_server =
aiohttp
aiohttp-openmetrics
gcp = google-cloud-logging
[flake8]
banned-modules = silver-platter = Should not use silver-platter
exclude = build,.eggs/
[mypy]
ignore_missing_imports = True
[bdist_wheel]
universal = 1
[egg_info]
tag_build =
tag_date = 0

View file

@ -1,40 +1,3 @@
#!/usr/bin/env python3
# encoding: utf-8
#!/usr/bin/python3
from setuptools import setup
setup(name="ognibuild",
description="Detect and run any build system",
version="0.0.7",
maintainer="Jelmer Vernooij",
maintainer_email="jelmer@jelmer.uk",
license="GNU GPLv2 or later",
url="https://jelmer.uk/code/ognibuild",
packages=['ognibuild', 'ognibuild.tests', 'ognibuild.debian', 'ognibuild.resolver', 'ognibuild.session'],
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: '
'GNU General Public License v2 or later (GPLv2+)',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: Implementation :: CPython',
'Operating System :: POSIX',
],
entry_points={
"console_scripts": [
"ogni=ognibuild.__main__:main",
"deb-fix-build=ognibuild.debian.fix_build:main",
]
},
install_requires=[
'breezy',
'buildlog-consultant>=0.0.10',
'requirements-parser',
],
extras_require={
'debian': ['debmutate', 'python_debian', 'python_apt'],
},
tests_require=['python_debian', 'buildlog-consultant', 'breezy', 'testtools'],
test_suite='ognibuild.tests.test_suite',
)
setup()

View file

@ -23,10 +23,13 @@ import unittest
def test_suite():
names = [
"debian_build",
'buildlog',
'logs',
]
if os.path.exists("/usr/bin/dpkg-architecture"):
names.append("debian_build")
names.append("debian_fix_build")
module_names = ["ognibuild.tests.test_" + name for name in names]
names.append("resolver_apt")
module_names = ["tests.test_" + name for name in names]
loader = unittest.TestLoader()
return loader.loadTestsFromNames(module_names)

47
tests/test_buildlog.py Normal file
View file

@ -0,0 +1,47 @@
#!/usr/bin/python
# Copyright (C) 2022 Jelmer Vernooij <jelmer@jelmer.uk>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
from ognibuild.buildlog import PROBLEM_CONVERTERS
from buildlog_consultant import (
problem_clses,
__version__ as buildlog_consultant_version,
)
from unittest import TestCase
class TestProblemsExists(TestCase):
def test_exist(self):
for entry in PROBLEM_CONVERTERS:
if len(entry) == 2:
problem_kind, fn = entry
min_version = None
elif len(entry) == 3:
problem_kind, fn, min_version = entry
else:
raise TypeError(entry)
if min_version is not None:
min_version_tuple = tuple(
[int(x) for x in min_version.split('.')])
if buildlog_consultant_version < min_version_tuple:
continue
self.assertTrue(
problem_kind in problem_clses,
f"{problem_kind} does not exist in known "
"buildlog-consultant problem kinds")

View file

@ -17,8 +17,17 @@
import datetime
import os
import sys
from ..debian.build import add_dummy_changelog_entry, get_build_architecture
from debian.changelog import Version
from ognibuild.debian.build import (
add_dummy_changelog_entry,
get_build_architecture,
version_add_suffix,
_builddeb_command,
DEFAULT_BUILDER,
)
from breezy.tests import TestCaseWithTransport, TestCase
@ -150,3 +159,43 @@ class BuildArchitectureTests(TestCase):
def test_is_str(self):
self.assertIsInstance(get_build_architecture(), str)
class VersionAddSuffixTests(TestCase):
def test_native(self):
self.assertEqual(
Version('1.0~jan+lint4'),
version_add_suffix(Version('1.0~jan+lint3'), '~jan+lint'))
self.assertEqual(
Version('1.0~jan+lint1'),
version_add_suffix(Version('1.0'), '~jan+lint'))
def test_normal(self):
self.assertEqual(
Version('1.0-1~jan+lint4'),
version_add_suffix(Version('1.0-1~jan+lint3'), '~jan+lint'))
self.assertEqual(
Version('1.0-1~jan+lint1'),
version_add_suffix(Version('1.0-1'), '~jan+lint'))
self.assertEqual(
Version('0.0.12-1~jan+lint1'),
version_add_suffix(Version('0.0.12-1'), '~jan+lint'))
self.assertEqual(
Version('0.0.12-1~jan+unchanged1~jan+lint1'),
version_add_suffix(
Version('0.0.12-1~jan+unchanged1'), '~jan+lint'))
class BuilddebCommandTests(TestCase):
def test_simple(self):
self.assertEqual(
[sys.executable, "-m", "breezy", "builddeb",
"--guess-upstream-branch-url", "--builder=" + DEFAULT_BUILDER],
_builddeb_command())
self.assertEqual(
[sys.executable, "-m", "breezy", "builddeb",
"--guess-upstream-branch-url", "--builder=" + DEFAULT_BUILDER,
"--result-dir=/tmp/blah"],
_builddeb_command(result_dir="/tmp/blah"))

View file

@ -29,13 +29,15 @@ from buildlog_consultant.common import (
MissingRubyGem,
MissingValaPackage,
)
from ..debian.apt import AptManager, FileSearcher
from ..debian.fix_build import (
from ognibuild.debian.apt import AptManager, FileSearcher
from ognibuild.debian.fix_build import (
resolve_error,
versioned_package_fixers,
apt_fixers,
DebianPackagingContext,
add_build_dependency,
)
from ognibuild.resolver.apt import AptRequirement
from breezy.commit import NullCommitReporter
from breezy.tests import TestCaseWithTransport
@ -44,7 +46,7 @@ class DummyAptSearcher(FileSearcher):
def __init__(self, files):
self._apt_files = files
def search_files(self, path, regex=False, case_insensitive=False):
async def search_files(self, path, regex=False, case_insensitive=False):
for p, pkg in sorted(self._apt_files.items()):
if case_insensitive:
flags = re.I
@ -97,7 +99,7 @@ blah (0.1) UNRELEASED; urgency=medium
self._apt_files = {}
def resolve(self, error, context=("build",)):
from ..session.plain import PlainSession
from ognibuild.session.plain import PlainSession
session = PlainSession()
apt = AptManager(session)
@ -109,7 +111,8 @@ blah (0.1) UNRELEASED; urgency=medium
update_changelog=True,
commit_reporter=NullCommitReporter(),
)
fixers = versioned_package_fixers(session, context, apt) + apt_fixers(apt, context)
fixers = versioned_package_fixers(
session, context, apt) + apt_fixers(apt, context)
return resolve_error(error, ("build",), fixers)
def get_build_deps(self):
@ -118,7 +121,8 @@ blah (0.1) UNRELEASED; urgency=medium
def test_missing_command_unknown(self):
self._apt_files = {}
self.assertFalse(self.resolve(MissingCommand("acommandthatdoesnotexist")))
self.assertFalse(self.resolve(
MissingCommand("acommandthatdoesnotexist")))
def test_missing_command_brz(self):
self._apt_files = {
@ -130,7 +134,8 @@ blah (0.1) UNRELEASED; urgency=medium
self.overrideEnv("DEBFULLNAME", "Jelmer Vernooij")
self.assertTrue(self.resolve(MissingCommand("brz")))
self.assertEqual("libc6, brz", self.get_build_deps())
rev = self.tree.branch.repository.get_revision(self.tree.branch.last_revision())
rev = self.tree.branch.repository.get_revision(
self.tree.branch.last_revision())
self.assertEqual("Add missing build dependency on brz.\n", rev.message)
self.assertFalse(self.resolve(MissingCommand("brz")))
self.assertEqual("libc6, brz", self.get_build_deps())
@ -153,10 +158,12 @@ blah (0.1) UNRELEASED; urgency=medium
def test_missing_ruby_file_from_gem(self):
self._apt_files = {
"/usr/share/rubygems-integration/all/gems/activesupport-"
"5.2.3/lib/active_support/core_ext/string/strip.rb": "ruby-activesupport"
"5.2.3/lib/active_support/core_ext/string/strip.rb":
"ruby-activesupport"
}
self.assertTrue(
self.resolve(MissingRubyFile("active_support/core_ext/string/strip"))
self.resolve(MissingRubyFile(
"active_support/core_ext/string/strip"))
)
self.assertEqual("libc6, ruby-activesupport", self.get_build_deps())
@ -173,7 +180,8 @@ blah (0.1) UNRELEASED; urgency=medium
self.assertEqual("libc6, ruby-bio (>= 2.0.3)", self.get_build_deps())
def test_missing_perl_module(self):
self._apt_files = {"/usr/share/perl5/App/cpanminus/fatscript.pm": "cpanminus"}
self._apt_files = {
"/usr/share/perl5/App/cpanminus/fatscript.pm": "cpanminus"}
self.assertTrue(
self.resolve(
MissingPerlModule(
@ -200,28 +208,34 @@ blah (0.1) UNRELEASED; urgency=medium
def test_missing_pkg_config(self):
self._apt_files = {
"/usr/lib/x86_64-linux-gnu/pkgconfig/xcb-xfixes.pc": "libxcb-xfixes0-dev"
"/usr/lib/x86_64-linux-gnu/pkgconfig/xcb-xfixes.pc":
"libxcb-xfixes0-dev"
}
self.assertTrue(self.resolve(MissingPkgConfig("xcb-xfixes")))
self.assertEqual("libc6, libxcb-xfixes0-dev", self.get_build_deps())
def test_missing_pkg_config_versioned(self):
self._apt_files = {
"/usr/lib/x86_64-linux-gnu/pkgconfig/xcb-xfixes.pc": "libxcb-xfixes0-dev"
"/usr/lib/x86_64-linux-gnu/pkgconfig/xcb-xfixes.pc":
"libxcb-xfixes0-dev"
}
self.assertTrue(self.resolve(MissingPkgConfig("xcb-xfixes", "1.0")))
self.assertEqual("libc6, libxcb-xfixes0-dev (>= 1.0)", self.get_build_deps())
self.assertEqual(
"libc6, libxcb-xfixes0-dev (>= 1.0)", self.get_build_deps())
def test_missing_python_module(self):
self._apt_files = {"/usr/lib/python3/dist-packages/m2r.py": "python3-m2r"}
self._apt_files = {
"/usr/lib/python3/dist-packages/m2r.py": "python3-m2r"}
self.assertTrue(self.resolve(MissingPythonModule("m2r")))
self.assertEqual("libc6, python3-m2r", self.get_build_deps())
def test_missing_go_package(self):
self._apt_files = {
"/usr/share/gocode/src/github.com/chzyer/readline/utils_test.go": "golang-github-chzyer-readline-dev",
"/usr/share/gocode/src/github.com/chzyer/readline/utils_test.go":
"golang-github-chzyer-readline-dev",
}
self.assertTrue(self.resolve(MissingGoPackage("github.com/chzyer/readline")))
self.assertTrue(self.resolve(
MissingGoPackage("github.com/chzyer/readline")))
self.assertEqual(
"libc6, golang-github-chzyer-readline-dev", self.get_build_deps()
)
@ -232,3 +246,63 @@ blah (0.1) UNRELEASED; urgency=medium
}
self.assertTrue(self.resolve(MissingValaPackage("posix")))
self.assertEqual("libc6, valac-0.48-vapi", self.get_build_deps())
class AddBuildDependencyTests(TestCaseWithTransport):
def setUp(self):
super(AddBuildDependencyTests, self).setUp()
self.tree = self.make_branch_and_tree(".")
self.build_tree_contents(
[
("debian/",),
(
"debian/control",
"""\
Source: blah
Build-Depends: libc6
Package: python-blah
Depends: ${python3:Depends}
Description: A python package
Foo
""",
),
(
"debian/changelog",
"""\
blah (0.1) UNRELEASED; urgency=medium
* Initial release. (Closes: #XXXXXX)
-- Jelmer Vernooij <jelmer@debian.org> Sat, 04 Apr 2020 14:12:13 +0000
""",
),
]
)
self.tree.add(["debian", "debian/control", "debian/changelog"])
self.tree.commit("Initial commit")
self.context = DebianPackagingContext(
self.tree,
subpath="",
committer="ognibuild <ognibuild@jelmer.uk>",
update_changelog=True,
commit_reporter=NullCommitReporter(),
)
def test_already_present(self):
requirement = AptRequirement.simple('libc6')
self.assertFalse(add_build_dependency(self.context, requirement))
def test_basic(self):
requirement = AptRequirement.simple('foo')
self.assertTrue(add_build_dependency(self.context, requirement))
self.assertFileEqual("""\
Source: blah
Build-Depends: libc6, foo
Package: python-blah
Depends: ${python3:Depends}
Description: A python package
Foo
""", 'debian/control')

95
tests/test_logs.py Normal file
View file

@ -0,0 +1,95 @@
#!/usr/bin/python
# Copyright (C) 2022 Jelmer Vernooij <jelmer@jelmer.uk>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
import os
import sys
import tempfile
from unittest import TestCase
from ognibuild.logs import (
copy_output,
redirect_output,
rotate_logfile,
DirectoryLogManager,
)
class TestCopyOutput(TestCase):
def test_no_tee(self):
with tempfile.TemporaryDirectory() as td:
p = os.path.join(td, 'foo.log')
with copy_output(p, tee=False):
sys.stdout.write('lala\n')
sys.stdout.flush()
with open(p, 'r') as f:
self.assertEqual('lala\n', f.read())
def test_tee(self):
with tempfile.TemporaryDirectory() as td:
p = os.path.join(td, 'foo.log')
with copy_output(p, tee=True):
sys.stdout.write('lala\n')
sys.stdout.flush()
with open(p, 'r') as f:
self.assertEqual('lala\n', f.read())
class TestRedirectOutput(TestCase):
def test_simple(self):
with tempfile.TemporaryDirectory() as td:
p = os.path.join(td, 'foo.log')
with open(p, 'w') as f:
with redirect_output(f):
sys.stdout.write('lala\n')
sys.stdout.flush()
with open(p, 'r') as f:
self.assertEqual('lala\n', f.read())
class TestRotateLogfile(TestCase):
def test_does_not_exist(self):
with tempfile.TemporaryDirectory() as td:
p = os.path.join(td, 'foo.log')
rotate_logfile(p)
self.assertEqual([], os.listdir(td))
def test_simple(self):
with tempfile.TemporaryDirectory() as td:
p = os.path.join(td, 'foo.log')
with open(p, 'w') as f:
f.write('contents\n')
rotate_logfile(p)
self.assertEqual(['foo.log.1'], os.listdir(td))
class TestLogManager(TestCase):
def test_simple(self):
with tempfile.TemporaryDirectory() as td:
p = os.path.join(td, 'foo.log')
lm = DirectoryLogManager(p, mode='redirect')
def writesomething():
sys.stdout.write('foo\n')
sys.stdout.flush()
fn = lm.wrap(writesomething)
fn()
with open(p, 'r') as f:
self.assertEqual('foo\n', f.read())

View file

@ -0,0 +1,47 @@
#!/usr/bin/python
# Copyright (C) 2022 Jelmer Vernooij <jelmer@jelmer.uk>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
from unittest import TestCase
from ognibuild.resolver.apt import get_possible_python3_paths_for_python_object
class TestPython3Paths(TestCase):
def test_paths(self):
self.assertEqual([
'/usr/lib/python3/dist\\-packages/dulwich/__init__\\.py',
'/usr/lib/python3/dist\\-packages/dulwich\\.py',
'/usr/lib/python3\\.[0-9]+/'
'lib\\-dynload/dulwich.cpython\\-.*\\.so',
'/usr/lib/python3\\.[0-9]+/dulwich\\.py',
'/usr/lib/python3\\.[0-9]+/dulwich/__init__\\.py'],
get_possible_python3_paths_for_python_object('dulwich'))
self.assertEqual([
'/usr/lib/python3/dist\\-packages/cleo/foo/__init__\\.py',
'/usr/lib/python3/dist\\-packages/cleo/foo\\.py',
'/usr/lib/python3\\.[0-9]+/'
'lib\\-dynload/cleo/foo.cpython\\-.*\\.so',
'/usr/lib/python3\\.[0-9]+/cleo/foo\\.py',
'/usr/lib/python3\\.[0-9]+/cleo/foo/__init__\\.py',
'/usr/lib/python3/dist\\-packages/cleo/__init__\\.py',
'/usr/lib/python3/dist\\-packages/cleo\\.py',
'/usr/lib/python3\\.[0-9]+/lib\\-dynload/cleo.cpython\\-.*\\.so',
'/usr/lib/python3\\.[0-9]+/cleo\\.py',
'/usr/lib/python3\\.[0-9]+/cleo/__init__\\.py'],
get_possible_python3_paths_for_python_object('cleo.foo'))