New upstream version 0.0.15

This commit is contained in:
Tianyu Chen 2022-11-22 11:19:55 +08:00
parent 3e1f11dd79
commit c286789e37
55 changed files with 3578 additions and 1371 deletions

24
.github/workflows/disperse.yml vendored Normal file
View file

@ -0,0 +1,24 @@
---
name: Disperse configuration
"on":
- push
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Set up Python
uses: actions/setup-python@v2
- name: Install dependencies
run: |
sudo apt install protobuf-compiler
- name: Install disperse
run: |
pip install git+https://github.com/jelmer/disperse
- name: Validate disperse.conf
run: |
PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION=python disperse validate .

View file

@ -1,6 +1,11 @@
---
name: Python package name: Python package
on: [push, pull_request] "on":
push:
pull_request:
schedule:
- cron: '0 6 * * *' # Daily 6AM UTC build
jobs: jobs:
build: build:
@ -9,7 +14,7 @@ jobs:
strategy: strategy:
matrix: matrix:
os: [ubuntu-latest, macos-latest] os: [ubuntu-latest, macos-latest]
python-version: [3.7, 3.8] python-version: [3.7, 3.8, 3.9, '3.10']
fail-fast: false fail-fast: false
steps: steps:
@ -20,28 +25,28 @@ jobs:
python-version: ${{ matrix.python-version }} python-version: ${{ matrix.python-version }}
- name: Install dependencies - name: Install dependencies
run: | run: |
python -m pip install --upgrade pip flake8 cython python -m pip install --upgrade pip
python -m pip install -e ".[remote,dep_server,dev]"
python setup.py develop python setup.py develop
- name: Install Debian-specific dependencies - name: Install Debian-specific dependencies
run: | run: |
sudo apt install libapt-pkg-dev sudo apt update
python -m pip install wheel sudo apt install python3-wheel libapt-pkg-dev
python -m pip install git+https://salsa.debian.org/apt-team/python-apt python -m pip install \
python_apt@git+https://salsa.debian.org/apt-team/python-apt.git
python -m pip install -e ".[debian]" python -m pip install -e ".[debian]"
python -m pip install testtools
mkdir -p ~/.config/breezy/plugins
brz branch lp:brz-debian ~/.config/breezy/plugins/debian
if: "matrix.python-version != 'pypy3' && matrix.os == 'ubuntu-latest'" if: "matrix.python-version != 'pypy3' && matrix.os == 'ubuntu-latest'"
- name: Style checks - name: Style checks
run: | run: |
pip install flake8
python -m flake8 python -m flake8
- name: Typing checks - name: Typing checks
run: | run: |
pip install -U mypy pip install -U mypy types-toml
python -m mypy ognibuild python -m mypy ognibuild
if: "matrix.python-version != 'pypy3'" if: "matrix.python-version != 'pypy3'"
- name: Test suite run - name: Test suite run
run: | run: |
python -m unittest ognibuild.tests.test_suite python -m unittest tests.test_suite
env: env:
PYTHONHASHSEED: random PYTHONHASHSEED: random

1
.gitignore vendored
View file

@ -1,3 +1,4 @@
.coverage
build build
*~ *~
ognibuild.egg-info ognibuild.egg-info

20
Makefile Normal file
View file

@ -0,0 +1,20 @@
check:: style
style:
flake8
check:: testsuite
testsuite:
python3 -m unittest tests.test_suite
check:: typing
typing:
mypy ognibuild tests
coverage:
python3 -m coverage run -m unittest tests.test_suite
coverage-html:
python3 -m coverage html

View file

@ -1,17 +0,0 @@
Metadata-Version: 2.1
Name: ognibuild
Version: 0.0.7
Summary: Detect and run any build system
Home-page: https://jelmer.uk/code/ognibuild
Maintainer: Jelmer Vernooij
Maintainer-email: jelmer@jelmer.uk
License: GNU GPLv2 or later
Description: UNKNOWN
Platform: UNKNOWN
Classifier: Development Status :: 4 - Beta
Classifier: License :: OSI Approved :: GNU General Public License v2 or later (GPLv2+)
Classifier: Programming Language :: Python :: 3.5
Classifier: Programming Language :: Python :: 3.6
Classifier: Programming Language :: Python :: Implementation :: CPython
Classifier: Operating System :: POSIX
Provides-Extra: debian

View file

@ -31,6 +31,12 @@ Ognibuild has a number of subcommands:
It also includes a subcommand that can fix up the build dependencies It also includes a subcommand that can fix up the build dependencies
for Debian packages, called deb-fix-build. for Debian packages, called deb-fix-build.
### Examples
```
ogni -d https://gitlab.gnome.org/GNOME/fractal install
```
## Status ## Status
Ognibuild is functional, but sometimes rough around the edges. If you run into Ognibuild is functional, but sometimes rough around the edges. If you run into

8
disperse.conf Normal file
View file

@ -0,0 +1,8 @@
# See https://github.com/jelmer/disperse
timeout_days: 5
tag_name: "v$VERSION"
verify_command: "python3 -m unittest tests.test_suite"
update_version {
path: "ognibuild/__init__.py"
new_line: "__version__ = $TUPLED_VERSION"
}

View file

@ -1,17 +0,0 @@
Metadata-Version: 2.1
Name: ognibuild
Version: 0.0.7
Summary: Detect and run any build system
Home-page: https://jelmer.uk/code/ognibuild
Maintainer: Jelmer Vernooij
Maintainer-email: jelmer@jelmer.uk
License: GNU GPLv2 or later
Description: UNKNOWN
Platform: UNKNOWN
Classifier: Development Status :: 4 - Beta
Classifier: License :: OSI Approved :: GNU General Public License v2 or later (GPLv2+)
Classifier: Programming Language :: Python :: 3.5
Classifier: Programming Language :: Python :: 3.6
Classifier: Programming Language :: Python :: Implementation :: CPython
Classifier: Operating System :: POSIX
Provides-Extra: debian

View file

@ -1,52 +0,0 @@
.flake8
.gitignore
AUTHORS
CODE_OF_CONDUCT.md
LICENSE
README.md
SECURITY.md
TODO
releaser.conf
setup.cfg
setup.py
.github/workflows/pythonpackage.yml
notes/architecture.md
notes/concepts.md
notes/roadmap.md
ognibuild/__init__.py
ognibuild/__main__.py
ognibuild/build.py
ognibuild/buildlog.py
ognibuild/buildsystem.py
ognibuild/clean.py
ognibuild/dist.py
ognibuild/dist_catcher.py
ognibuild/fix_build.py
ognibuild/fixers.py
ognibuild/info.py
ognibuild/install.py
ognibuild/outputs.py
ognibuild/requirements.py
ognibuild/test.py
ognibuild/vcs.py
ognibuild.egg-info/PKG-INFO
ognibuild.egg-info/SOURCES.txt
ognibuild.egg-info/dependency_links.txt
ognibuild.egg-info/entry_points.txt
ognibuild.egg-info/requires.txt
ognibuild.egg-info/top_level.txt
ognibuild/debian/__init__.py
ognibuild/debian/apt.py
ognibuild/debian/build.py
ognibuild/debian/build_deps.py
ognibuild/debian/file_search.py
ognibuild/debian/fix_build.py
ognibuild/debian/udd.py
ognibuild/resolver/__init__.py
ognibuild/resolver/apt.py
ognibuild/session/__init__.py
ognibuild/session/plain.py
ognibuild/session/schroot.py
ognibuild/tests/__init__.py
ognibuild/tests/test_debian_build.py
ognibuild/tests/test_debian_fix_build.py

View file

@ -1 +0,0 @@

View file

@ -1,4 +0,0 @@
[console_scripts]
deb-fix-build = ognibuild.debian.fix_build:main
ogni = ognibuild.__main__:main

View file

@ -1,8 +0,0 @@
breezy
buildlog-consultant>=0.0.10
requirements-parser
[debian]
debmutate
python_apt
python_debian

View file

@ -1 +0,0 @@
ognibuild

View file

@ -18,12 +18,14 @@
import os import os
import stat import stat
from typing import List, Dict, Type
__version__ = (0, 0, 7) __version__ = (0, 0, 15)
version_string = '.'.join(map(str, __version__))
USER_AGENT = "Ognibuild" USER_AGENT = f"Ognibuild/{version_string}"
class DetailedFailure(Exception): class DetailedFailure(Exception):
@ -32,6 +34,12 @@ class DetailedFailure(Exception):
self.argv = argv self.argv = argv
self.error = error self.error = error
def __eq__(self, other):
return (isinstance(other, type(self)) and
self.retcode == other.retcode and
self.argv == other.argv and
self.error == other.error)
class UnidentifiedError(Exception): class UnidentifiedError(Exception):
"""An unidentified error.""" """An unidentified error."""
@ -42,6 +50,13 @@ class UnidentifiedError(Exception):
self.lines = lines self.lines = lines
self.secondary = secondary self.secondary = secondary
def __eq__(self, other):
return (isinstance(other, type(self)) and
self.retcode == other.retcode and
self.argv == other.argv and
self.lines == other.lines and
self.secondary == other.secondary)
def __repr__(self): def __repr__(self):
return "<%s(%r, %r, ..., secondary=%r)>" % ( return "<%s(%r, %r, ..., secondary=%r)>" % (
type(self).__name__, type(self).__name__,
@ -64,17 +79,64 @@ def shebang_binary(p):
return os.path.basename(args[0].decode()).strip() return os.path.basename(args[0].decode()).strip()
class UnknownRequirementFamily(Exception):
"""Requirement family is unknown"""
def __init__(self, family):
self.family = family
class Requirement(object): class Requirement(object):
# Name of the family of requirements - e.g. "python-package" # Name of the family of requirements - e.g. "python-package"
family: str family: str
def __init__(self, family): _JSON_DESERIALIZERS: Dict[str, Type["Requirement"]] = {}
self.family = family
@classmethod
def _from_json(self, js):
raise NotImplementedError(self._from_json)
@classmethod
def from_json(self, js):
try:
family = Requirement._JSON_DESERIALIZERS[js[0]]
except KeyError:
raise UnknownRequirementFamily(js[0])
return family._from_json(js[1])
def met(self, session): def met(self, session):
raise NotImplementedError(self) raise NotImplementedError(self)
def _json(self):
raise NotImplementedError(self._json)
def json(self):
return (type(self).family, self._json())
@classmethod
def register_json(cls, subcls):
Requirement._JSON_DESERIALIZERS[subcls.family] = subcls
class OneOfRequirement(Requirement):
elements: List[Requirement]
family = 'or'
def __init__(self, elements):
self.elements = elements
def met(self, session):
for req in self.elements:
if req.met(session):
return True
return False
def __repr__(self):
return "%s(%r)" % (type(self).__name__, self.elements)
class UpstreamOutput(object): class UpstreamOutput(object):
def __init__(self, family): def __init__(self, family):

View file

@ -15,11 +15,13 @@
# along with this program; if not, write to the Free Software # along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
from contextlib import ExitStack
import logging import logging
import os import os
import shlex import shlex
import sys import sys
from . import UnidentifiedError, DetailedFailure from urllib.parse import urlparse
from . import UnidentifiedError, DetailedFailure, version_string
from .buildlog import ( from .buildlog import (
InstallFixer, InstallFixer,
ExplainInstallFixer, ExplainInstallFixer,
@ -29,9 +31,10 @@ from .buildlog import (
from .buildsystem import NoBuildToolsFound, detect_buildsystems from .buildsystem import NoBuildToolsFound, detect_buildsystems
from .resolver import ( from .resolver import (
auto_resolver, auto_resolver,
native_resolvers, select_resolvers,
UnsatisfiedRequirements,
) )
from .resolver.apt import AptResolver from .session import SessionSetupFailure
def display_explain_commands(commands): def display_explain_commands(commands):
@ -39,34 +42,33 @@ def display_explain_commands(commands):
for command, reqs in commands: for command, reqs in commands:
if isinstance(command, list): if isinstance(command, list):
command = shlex.join(command) command = shlex.join(command)
logging.info(" %s (to install %s)", command, ", ".join(map(str, reqs))) logging.info(
" %s (to install %s)", command, ", ".join(map(str, reqs)))
def get_necessary_declared_requirements(resolver, requirements, stages):
missing = []
for stage, req in requirements:
if stage in stages:
missing.append(req)
return missing
def install_necessary_declared_requirements( def install_necessary_declared_requirements(
session, resolver, fixers, buildsystems, stages, explain=False session, resolver, fixers, buildsystems, stages, explain=False
): ):
if explain:
relevant = [] relevant = []
declared_reqs = [] for buildsystem in buildsystems:
declared_reqs = buildsystem.get_declared_dependencies(
session, fixers)
for stage, req in declared_reqs:
if stage in stages:
relevant.append(req)
install_missing_reqs(session, resolver, relevant, explain=True)
else:
for buildsystem in buildsystems: for buildsystem in buildsystems:
try: try:
declared_reqs.extend(buildsystem.get_declared_dependencies(session, fixers)) buildsystem.install_declared_requirements(
stages, session, resolver, fixers)
except NotImplementedError: except NotImplementedError:
logging.warning( logging.warning(
"Unable to determine declared dependencies from %r", buildsystem "Unable to determine declared dependencies from %r",
buildsystem
) )
relevant.extend(
get_necessary_declared_requirements(resolver, declared_reqs, stages)
)
install_missing_reqs(session, resolver, relevant, explain=explain)
# Types of dependencies: # Types of dependencies:
@ -82,6 +84,7 @@ STAGE_MAP = {
"test": ["test", "build", "core"], "test": ["test", "build", "core"],
"build": ["build", "core"], "build": ["build", "core"],
"clean": [], "clean": [],
"verify": ["build", "core", "test"],
} }
@ -95,9 +98,13 @@ def determine_fixers(session, resolver, explain=False):
def main(): # noqa: C901 def main(): # noqa: C901
import argparse import argparse
parser = argparse.ArgumentParser() parser = argparse.ArgumentParser(prog='ogni')
parser.add_argument( parser.add_argument(
"--directory", "-d", type=str, help="Directory for project.", default="." "--version", action="version", version="%(prog)s " + version_string
)
parser.add_argument(
"--directory", "-d", type=str, help="Directory for project.",
default="."
) )
parser.add_argument("--schroot", type=str, help="schroot to run in.") parser.add_argument("--schroot", type=str, help="schroot to run in.")
parser.add_argument( parser.add_argument(
@ -123,6 +130,15 @@ def main(): # noqa: C901
action="store_true", action="store_true",
help="Ignore declared dependencies, follow build errors only", help="Ignore declared dependencies, follow build errors only",
) )
parser.add_argument(
"--user", action="store_true",
help="Install in local-user directories."
)
parser.add_argument(
"--dep-server-url", type=str,
help="ognibuild dep server to use",
default=os.environ.get('OGNIBUILD_DEPS'))
parser.add_argument("--verbose", action="store_true", help="Be verbose") parser.add_argument("--verbose", action="store_true", help="Be verbose")
subparsers = parser.add_subparsers(dest="subcommand") subparsers = parser.add_subparsers(dest="subcommand")
subparsers.add_parser("dist") subparsers.add_parser("dist")
@ -130,12 +146,11 @@ def main(): # noqa: C901
subparsers.add_parser("clean") subparsers.add_parser("clean")
subparsers.add_parser("test") subparsers.add_parser("test")
subparsers.add_parser("info") subparsers.add_parser("info")
subparsers.add_parser("verify")
exec_parser = subparsers.add_parser("exec") exec_parser = subparsers.add_parser("exec")
exec_parser.add_argument('subargv', nargs=argparse.REMAINDER, help='Command to run.') exec_parser.add_argument(
'subargv', nargs=argparse.REMAINDER, help='Command to run.')
install_parser = subparsers.add_parser("install") install_parser = subparsers.add_parser("install")
install_parser.add_argument(
"--user", action="store_true", help="Install in local-user directories."
)
install_parser.add_argument( install_parser.add_argument(
"--prefix", type=str, help='Prefix to install in') "--prefix", type=str, help='Prefix to install in')
@ -155,38 +170,72 @@ def main(): # noqa: C901
from .session.plain import PlainSession from .session.plain import PlainSession
session = PlainSession() session = PlainSession()
with session:
logging.info("Preparing directory %s", args.directory) with ExitStack() as es:
external_dir, internal_dir = session.setup_from_directory(args.directory) try:
es.enter_context(session)
except SessionSetupFailure as e:
logging.debug('Error lines: %r', e.errlines)
logging.fatal('Failed to set up session: %s', e.reason)
return 1
parsed_url = urlparse(args.directory)
# TODO(jelmer): Get a list of supported schemes from breezy?
if parsed_url.scheme in ('git', 'http', 'https', 'ssh'):
import breezy.git # noqa: F401
import breezy.bzr # noqa: F401
from breezy.branch import Branch
from silver_platter.utils import TemporarySprout
b = Branch.open(args.directory)
logging.info("Cloning %s", args.directory)
wt = es.enter_context(TemporarySprout(b))
external_dir, internal_dir = session.setup_from_vcs(wt)
else:
if parsed_url.scheme == 'file':
directory = parsed_url.path
else:
directory = args.directory
logging.info("Preparing directory %s", directory)
external_dir, internal_dir = session.setup_from_directory(
directory)
session.chdir(internal_dir) session.chdir(internal_dir)
os.chdir(external_dir) os.chdir(external_dir)
if not session.is_temporary and args.subcommand == 'info': if not session.is_temporary and args.subcommand == 'info':
args.explain = True args.explain = True
if args.resolve == "apt": if args.resolve == "auto":
resolver = AptResolver.from_session(session)
elif args.resolve == "native":
resolver = native_resolvers(session, user_local=args.user)
elif args.resolve == "auto":
resolver = auto_resolver(session, explain=args.explain) resolver = auto_resolver(session, explain=args.explain)
else:
resolver = select_resolvers(
session, user_local=args.user,
resolvers=args.resolve.split(','),
dep_server_url=args.dep_server_url)
logging.info("Using requirement resolver: %s", resolver) logging.info("Using requirement resolver: %s", resolver)
fixers = determine_fixers(session, resolver, explain=args.explain) fixers = determine_fixers(session, resolver, explain=args.explain)
try: try:
if args.subcommand == "exec": if args.subcommand == "exec":
from .fix_build import run_with_build_fixers from .fix_build import run_with_build_fixers
run_with_build_fixers(session, args.subargv, fixers) run_with_build_fixers(fixers, session, args.subargv)
return 0 return 0
bss = list(detect_buildsystems(args.directory)) bss = list(detect_buildsystems(external_dir))
logging.info("Detected buildsystems: %s", ", ".join(map(str, bss))) logging.info("Detected buildsystems: %s", ", ".join(map(str, bss)))
if not args.ignore_declared_dependencies: if not args.ignore_declared_dependencies:
stages = STAGE_MAP[args.subcommand] stages = STAGE_MAP[args.subcommand]
if stages: if stages:
logging.info("Checking that declared requirements are present") logging.info(
"Checking that declared requirements are present")
try: try:
install_necessary_declared_requirements( install_necessary_declared_requirements(
session, resolver, fixers, bss, stages, explain=args.explain session, resolver, fixers, bss, stages,
explain=args.explain
) )
except UnsatisfiedRequirements as e:
logging.info(
'Unable to install declared dependencies:')
for req in e.requirements:
logging.info(' * %s', req)
return 1
except ExplainInstall as e: except ExplainInstall as e:
display_explain_commands(e.commands) display_explain_commands(e.commands)
return 1 return 1
@ -207,11 +256,15 @@ def main(): # noqa: C901
if args.subcommand == "build": if args.subcommand == "build":
from .build import run_build from .build import run_build
run_build(session, buildsystems=bss, resolver=resolver, fixers=fixers) run_build(
session, buildsystems=bss, resolver=resolver,
fixers=fixers)
if args.subcommand == "clean": if args.subcommand == "clean":
from .clean import run_clean from .clean import run_clean
run_clean(session, buildsystems=bss, resolver=resolver, fixers=fixers) run_clean(
session, buildsystems=bss, resolver=resolver,
fixers=fixers)
if args.subcommand == "install": if args.subcommand == "install":
from .install import run_install from .install import run_install
@ -226,14 +279,42 @@ def main(): # noqa: C901
if args.subcommand == "test": if args.subcommand == "test":
from .test import run_test from .test import run_test
run_test(session, buildsystems=bss, resolver=resolver, fixers=fixers) run_test(
session, buildsystems=bss, resolver=resolver,
fixers=fixers)
if args.subcommand == "info": if args.subcommand == "info":
from .info import run_info from .info import run_info
run_info(session, buildsystems=bss, fixers=fixers) run_info(session, buildsystems=bss, fixers=fixers)
if args.subcommand == "verify":
from .build import run_build
from .test import run_test
run_build(
session, buildsystems=bss, resolver=resolver,
fixers=fixers)
run_test(
session, buildsystems=bss, resolver=resolver,
fixers=fixers)
except ExplainInstall as e: except ExplainInstall as e:
display_explain_commands(e.commands) display_explain_commands(e.commands)
except (UnidentifiedError, DetailedFailure): except UnidentifiedError:
logging.info(
'If there is a clear indication of a problem in the build '
'log, please consider filing a request to update the patterns '
'in buildlog-consultant at '
'https://github.com/jelmer/buildlog-consultant/issues/new')
return 1
except DetailedFailure:
if not args.verbose:
logging.info(
'Run with --verbose to get more information '
'about steps taken to try to resolve error')
logging.info(
'Please consider filing a bug report at '
'https://github.com/jelmer/ognibuild/issues/new')
return 1 return 1
except NoBuildToolsFound: except NoBuildToolsFound:
logging.info("No build tools found.") logging.info("No build tools found.")

View file

@ -15,16 +15,28 @@
# along with this program; if not, write to the Free Software # along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
from functools import partial
from .buildsystem import NoBuildToolsFound from .buildsystem import NoBuildToolsFound
from .fix_build import iterate_with_build_fixers
from .logs import NoLogManager
def run_build(session, buildsystems, resolver, fixers): BUILD_LOG_FILENAME = 'build.log'
def run_build(session, buildsystems, resolver, fixers, log_manager=None):
# Some things want to write to the user's home directory, # Some things want to write to the user's home directory,
# e.g. pip caches in ~/.cache # e.g. pip caches in ~/.cache
session.create_home() session.create_home()
if log_manager is None:
log_manager = NoLogManager()
for buildsystem in buildsystems: for buildsystem in buildsystems:
buildsystem.build(session, resolver, fixers) iterate_with_build_fixers(
fixers, log_manager.wrap(
partial(buildsystem.build, session, resolver)))
return return
raise NoBuildToolsFound() raise NoBuildToolsFound()

View file

@ -19,65 +19,32 @@
""" """
import logging import logging
from typing import Optional, List, Callable, Union, Tuple
from buildlog_consultant.common import ( from buildlog_consultant.common import (
MissingPythonModule, Problem,
MissingPythonDistribution,
MissingCHeader,
MissingPkgConfig,
MissingCommand,
MissingFile,
MissingJavaScriptRuntime,
MissingSprocketsFile,
MissingGoPackage,
MissingPerlFile, MissingPerlFile,
MissingPerlModule,
MissingXmlEntity,
MissingJDKFile,
MissingJDK,
MissingJRE,
MissingNodeModule,
MissingNodePackage,
MissingPhpClass,
MissingRubyGem,
MissingLibrary,
MissingSetupPyCommand, MissingSetupPyCommand,
MissingJavaClass, MissingCMakeComponents,
MissingCSharpCompiler,
MissingRPackage,
MissingRubyFile,
MissingAutoconfMacro,
MissingValaPackage,
MissingBoostComponents,
MissingXfceDependency, MissingXfceDependency,
MissingHaskellDependencies, MissingHaskellDependencies,
MissingVagueDependency,
DhAddonLoadFailure,
MissingMavenArtifacts, MissingMavenArtifacts,
MissingIntrospectionTypelib,
GnomeCommonMissing,
MissingGnomeCommonDependency, MissingGnomeCommonDependency,
UnknownCertificateAuthority,
CMakeFilesMissing,
MissingLibtool,
MissingQt,
MissingX11,
MissingPerlPredeclared, MissingPerlPredeclared,
MissingLatexFile, MissingLatexFile,
MissingCargoCrate, MissingCargoCrate,
MissingStaticLibrary,
) )
from buildlog_consultant.apt import UnsatisfiedAptDependencies
from . import OneOfRequirement
from .fix_build import BuildFixer from .fix_build import BuildFixer
from .requirements import ( from .requirements import (
Requirement,
BinaryRequirement, BinaryRequirement,
PathRequirement, PathRequirement,
PkgConfigRequirement, PkgConfigRequirement,
CHeaderRequirement, CHeaderRequirement,
JavaScriptRuntimeRequirement, JavaScriptRuntimeRequirement,
ValaPackageRequirement, ValaPackageRequirement,
RubyGemRequirement,
GoPackageRequirement, GoPackageRequirement,
DhAddonRequirement, DhAddonRequirement,
PhpClassRequirement, PhpClassRequirement,
@ -92,6 +59,7 @@ from .requirements import (
HaskellPackageRequirement, HaskellPackageRequirement,
MavenArtifactRequirement, MavenArtifactRequirement,
BoostComponentRequirement, BoostComponentRequirement,
KF5ComponentRequirement,
GnomeCommonRequirement, GnomeCommonRequirement,
JDKFileRequirement, JDKFileRequirement,
JDKRequirement, JDKRequirement,
@ -112,86 +80,124 @@ from .requirements import (
LatexPackageRequirement, LatexPackageRequirement,
CargoCrateRequirement, CargoCrateRequirement,
StaticLibraryRequirement, StaticLibraryRequirement,
GnulibDirectoryRequirement,
LuaModuleRequirement,
PHPExtensionRequirement,
VcsControlDirectoryAccessRequirement,
RubyGemRequirement,
QtModuleRequirement,
) )
from .resolver import UnsatisfiedRequirements from .resolver import UnsatisfiedRequirements
def problem_to_upstream_requirement(problem): # noqa: C901 def map_pytest_arguments_to_plugin(args):
if isinstance(problem, MissingFile): # TODO(jelmer): Map argument to PytestPluginRequirement
return PathRequirement(problem.path) return None
elif isinstance(problem, MissingCommand):
return BinaryRequirement(problem.command)
elif isinstance(problem, MissingPkgConfig): ProblemToRequirementConverter = Callable[[Problem], Optional[Requirement]]
return PkgConfigRequirement(problem.module, problem.minimum_version)
elif isinstance(problem, MissingCHeader):
return CHeaderRequirement(problem.header) PROBLEM_CONVERTERS: List[Union[
elif isinstance(problem, MissingIntrospectionTypelib): Tuple[str, ProblemToRequirementConverter],
return IntrospectionTypelibRequirement(problem.library) Tuple[str, ProblemToRequirementConverter, str]]] = [
elif isinstance(problem, MissingJavaScriptRuntime): ('missing-file', lambda p: PathRequirement(p.path)),
return JavaScriptRuntimeRequirement() ('command-missing', lambda p: BinaryRequirement(p.command)),
elif isinstance(problem, MissingRubyGem): ('valac-cannot-compile', lambda p: VagueDependencyRequirement('valac'),
return RubyGemRequirement(problem.gem, problem.version) '0.0.27'),
elif isinstance(problem, MissingValaPackage): ('missing-cmake-files', lambda p: OneOfRequirement(
return ValaPackageRequirement(problem.package) [CMakefileRequirement(filename, p.version)
elif isinstance(problem, MissingGoPackage): for filename in p.filenames])),
return GoPackageRequirement(problem.package) ('missing-command-or-build-file', lambda p: BinaryRequirement(p.command)),
elif isinstance(problem, MissingBoostComponents): ('missing-pkg-config-package',
return [BoostComponentRequirement(name) for name in problem.components] lambda p: PkgConfigRequirement(p.module, p.minimum_version)),
elif isinstance(problem, DhAddonLoadFailure): ('missing-c-header', lambda p: CHeaderRequirement(p.header)),
return DhAddonRequirement(problem.path) ('missing-introspection-typelib',
elif isinstance(problem, MissingPhpClass): lambda p: IntrospectionTypelibRequirement(p.library)),
return PhpClassRequirement(problem.php_class) ('missing-python-module', lambda p: PythonModuleRequirement(
elif isinstance(problem, MissingRPackage): p.module, python_version=p.python_version,
return RPackageRequirement(problem.package, problem.minimum_version) minimum_version=p.minimum_version)),
elif isinstance(problem, MissingNodeModule): ('missing-python-distribution', lambda p: PythonPackageRequirement(
return NodeModuleRequirement(problem.module) p.distribution, python_version=p.python_version,
elif isinstance(problem, MissingStaticLibrary): minimum_version=p.minimum_version)),
return StaticLibraryRequirement(problem.library, problem.filename) ('javascript-runtime-missing', lambda p: JavaScriptRuntimeRequirement()),
elif isinstance(problem, MissingNodePackage): ('missing-node-module', lambda p: NodeModuleRequirement(p.module)),
return NodePackageRequirement(problem.package) ('missing-node-package', lambda p: NodePackageRequirement(p.package)),
('missing-ruby-gem', lambda p: RubyGemRequirement(p.gem, p.version)),
('missing-qt-modules', lambda p: QtModuleRequirement(p.modules[0]),
'0.0.27'),
('missing-php-class', lambda p: PhpClassRequirement(p.php_class)),
('missing-r-package', lambda p: RPackageRequirement(
p.package, p.minimum_version)),
('missing-vague-dependency',
lambda p: VagueDependencyRequirement(
p.name, minimum_version=p.minimum_version)),
('missing-c#-compiler', lambda p: BinaryRequirement("msc")),
('missing-gnome-common', lambda p: GnomeCommonRequirement()),
('missing-jdk', lambda p: JDKRequirement()),
('missing-jre', lambda p: JRERequirement()),
('missing-qt', lambda p: QTRequirement()),
('missing-x11', lambda p: X11Requirement()),
('missing-libtool', lambda p: LibtoolRequirement()),
('missing-php-extension',
lambda p: PHPExtensionRequirement(p.extension)),
('missing-rust-compiler', lambda p: BinaryRequirement("rustc")),
('missing-java-class', lambda p: JavaClassRequirement(p.classname)),
('missing-go-package', lambda p: GoPackageRequirement(p.package)),
('missing-autoconf-macro', lambda p: AutoconfMacroRequirement(p.macro)),
('missing-vala-package', lambda p: ValaPackageRequirement(p.package)),
('missing-lua-module', lambda p: LuaModuleRequirement(p.module)),
('missing-jdk-file', lambda p: JDKFileRequirement(p.jdk_path, p.filename)),
('missing-ruby-file', lambda p: RubyFileRequirement(p.filename)),
('missing-library', lambda p: LibraryRequirement(p.library)),
('missing-sprockets-file',
lambda p: SprocketsFileRequirement(p.content_type, p.name)),
('dh-addon-load-failure', lambda p: DhAddonRequirement(p.path)),
('missing-xml-entity', lambda p: XmlEntityRequirement(p.url)),
('missing-gnulib-directory',
lambda p: GnulibDirectoryRequirement(p.directory)),
('vcs-control-directory-needed',
lambda p: VcsControlDirectoryAccessRequirement(p.vcs)),
('missing-static-library',
lambda p: StaticLibraryRequirement(p.library, p.filename)),
('missing-perl-module',
lambda p: PerlModuleRequirement(
module=p.module, filename=p.filename, inc=p.inc)),
('unknown-certificate-authority',
lambda p: CertificateAuthorityRequirement(p.url)),
('unsupported-pytest-arguments',
lambda p: map_pytest_arguments_to_plugin(p.args), '0.0.27'),
]
def problem_to_upstream_requirement(
problem: Problem) -> Optional[Requirement]: # noqa: C901
for entry in PROBLEM_CONVERTERS:
kind, fn = entry[:2]
if kind == problem.kind:
return fn(problem)
if isinstance(problem, MissingCMakeComponents):
if problem.name.lower() == 'boost':
return OneOfRequirement(
[BoostComponentRequirement(name)
for name in problem.components])
elif problem.name.lower() == 'kf5':
return OneOfRequirement(
[KF5ComponentRequirement(name) for name in problem.components])
return None
elif isinstance(problem, MissingLatexFile): elif isinstance(problem, MissingLatexFile):
if problem.filename.endswith('.sty'): if problem.filename.endswith('.sty'):
return LatexPackageRequirement(problem.filename[:-4]) return LatexPackageRequirement(problem.filename[:-4])
return None return None
elif isinstance(problem, MissingVagueDependency):
return VagueDependencyRequirement(problem.name, minimum_version=problem.minimum_version)
elif isinstance(problem, MissingLibrary):
return LibraryRequirement(problem.library)
elif isinstance(problem, MissingRubyFile):
return RubyFileRequirement(problem.filename)
elif isinstance(problem, MissingXmlEntity):
return XmlEntityRequirement(problem.url)
elif isinstance(problem, MissingSprocketsFile):
return SprocketsFileRequirement(problem.content_type, problem.name)
elif isinstance(problem, MissingJavaClass):
return JavaClassRequirement(problem.classname)
elif isinstance(problem, CMakeFilesMissing):
return [CMakefileRequirement(filename) for filename in problem.filenames]
elif isinstance(problem, MissingHaskellDependencies): elif isinstance(problem, MissingHaskellDependencies):
return [HaskellPackageRequirement.from_string(dep) for dep in problem.deps] return OneOfRequirement(
[HaskellPackageRequirement.from_string(dep)
for dep in problem.deps])
elif isinstance(problem, MissingMavenArtifacts): elif isinstance(problem, MissingMavenArtifacts):
return [ return OneOfRequirement([
MavenArtifactRequirement.from_str(artifact) MavenArtifactRequirement.from_str(artifact)
for artifact in problem.artifacts for artifact in problem.artifacts
] ])
elif isinstance(problem, MissingCSharpCompiler):
return BinaryRequirement("msc")
elif isinstance(problem, GnomeCommonMissing):
return GnomeCommonRequirement()
elif isinstance(problem, MissingJDKFile):
return JDKFileRequirement(problem.jdk_path, problem.filename)
elif isinstance(problem, MissingJDK):
return JDKRequirement()
elif isinstance(problem, MissingJRE):
return JRERequirement()
elif isinstance(problem, MissingQt):
return QTRequirement()
elif isinstance(problem, MissingX11):
return X11Requirement()
elif isinstance(problem, MissingLibtool):
return LibtoolRequirement()
elif isinstance(problem, UnknownCertificateAuthority):
return CertificateAuthorityRequirement(problem.url)
elif isinstance(problem, MissingPerlPredeclared): elif isinstance(problem, MissingPerlPredeclared):
ret = PerlPreDeclaredRequirement(problem.name) ret = PerlPreDeclaredRequirement(problem.name)
try: try:
@ -210,36 +216,20 @@ def problem_to_upstream_requirement(problem): # noqa: C901
return BinaryRequirement("glib-gettextize") return BinaryRequirement("glib-gettextize")
else: else:
logging.warning( logging.warning(
"No known command for gnome-common dependency %s", problem.package "No known command for gnome-common dependency %s",
problem.package
) )
return None return None
elif isinstance(problem, MissingXfceDependency): elif isinstance(problem, MissingXfceDependency):
if problem.package == "gtk-doc": if problem.package == "gtk-doc":
return BinaryRequirement("gtkdocize") return BinaryRequirement("gtkdocize")
else: else:
logging.warning("No known command for xfce dependency %s", problem.package) logging.warning(
"No known command for xfce dependency %s", problem.package)
return None return None
elif isinstance(problem, MissingPerlModule):
return PerlModuleRequirement(
module=problem.module, filename=problem.filename, inc=problem.inc
)
elif isinstance(problem, MissingPerlFile): elif isinstance(problem, MissingPerlFile):
return PerlFileRequirement(filename=problem.filename) return PerlFileRequirement(filename=problem.filename)
elif isinstance(problem, MissingAutoconfMacro): elif problem.kind == 'unsatisfied-apt-dependencies':
return AutoconfMacroRequirement(problem.macro)
elif isinstance(problem, MissingPythonModule):
return PythonModuleRequirement(
problem.module,
python_version=problem.python_version,
minimum_version=problem.minimum_version,
)
elif isinstance(problem, MissingPythonDistribution):
return PythonPackageRequirement(
problem.distribution,
python_version=problem.python_version,
minimum_version=problem.minimum_version,
)
elif isinstance(problem, UnsatisfiedAptDependencies):
from .resolver.apt import AptRequirement from .resolver.apt import AptRequirement
return AptRequirement(problem.relations) return AptRequirement(problem.relations)
else: else:

File diff suppressed because it is too large Load diff

View file

@ -15,16 +15,25 @@
# along with this program; if not, write to the Free Software # along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
from functools import partial
from .fix_build import iterate_with_build_fixers
from .buildsystem import NoBuildToolsFound from .buildsystem import NoBuildToolsFound
from .logs import NoLogManager
def run_clean(session, buildsystems, resolver, fixers): def run_clean(session, buildsystems, resolver, fixers, log_manager=None):
# Some things want to write to the user's home directory, # Some things want to write to the user's home directory,
# e.g. pip caches in ~/.cache # e.g. pip caches in ~/.cache
session.create_home() session.create_home()
if log_manager is None:
log_manager = NoLogManager()
for buildsystem in buildsystems: for buildsystem in buildsystems:
buildsystem.clean(session, resolver, fixers) iterate_with_build_fixers(
fixers, log_manager.wrap(
partial(buildsystem.clean, session, resolver)))
return return
raise NoBuildToolsFound() raise NoBuildToolsFound()

View file

@ -29,7 +29,8 @@ def satisfy_build_deps(session: Session, tree, debian_path):
deps.append(source[name].strip().strip(",")) deps.append(source[name].strip().strip(","))
except KeyError: except KeyError:
pass pass
for name in ["Build-Conflicts", "Build-Conflicts-Indep", "Build-Conflicts-Arch"]: for name in ["Build-Conflicts", "Build-Conflicts-Indep",
"Build-Conflicts-Arch"]:
try: try:
deps.append("Conflicts: " + source[name]) deps.append("Conflicts: " + source[name])
except KeyError: except KeyError:

View file

@ -16,8 +16,9 @@
# along with this program; if not, write to the Free Software # along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
from debian.changelog import Version
import logging import logging
from typing import List, Optional from typing import List, Optional, Iterable
import os import os
from buildlog_consultant.apt import ( from buildlog_consultant.apt import (
@ -37,7 +38,12 @@ from .file_search import (
def run_apt( def run_apt(
session: Session, args: List[str], prefix: Optional[List[str]] = None session: Session, args: List[str], prefix: Optional[List[str]] = None
) -> None: ) -> None:
"""Run apt.""" """Run apt.
Raises:
DetailedFailure: When a known error occurs
UnidentifiedError: If an unknown error occurs
"""
if prefix is None: if prefix is None:
prefix = [] prefix = []
args = prefix = ["apt", "-y"] + args args = prefix = ["apt", "-y"] + args
@ -48,7 +54,7 @@ def run_apt(
match, error = find_apt_get_failure(lines) match, error = find_apt_get_failure(lines)
if error is not None: if error is not None:
raise DetailedFailure(retcode, args, error) raise DetailedFailure(retcode, args, error)
while lines and lines[-1] == "": while lines and lines[-1].rstrip('\n') == "":
lines.pop(-1) lines.pop(-1)
raise UnidentifiedError(retcode, args, lines, secondary=match) raise UnidentifiedError(retcode, args, lines, secondary=match)
@ -93,13 +99,18 @@ class AptManager(object):
def package_exists(self, package): def package_exists(self, package):
return package in self.apt_cache return package in self.apt_cache
def package_versions(self, package): def package_versions(self, package: str) -> Optional[Iterable[Version]]:
try:
return list(self.apt_cache[package].versions) return list(self.apt_cache[package].versions)
except KeyError:
return None
def get_packages_for_paths(self, paths, regex=False, case_insensitive=False): async def get_packages_for_paths(
self, paths, regex: bool = False, case_insensitive: bool = False):
logging.debug("Searching for packages containing %r", paths) logging.debug("Searching for packages containing %r", paths)
return get_packages_for_paths( return await get_packages_for_paths(
paths, self.searchers(), regex=regex, case_insensitive=case_insensitive paths, self.searchers(), regex=regex,
case_insensitive=case_insensitive
) )
def missing(self, packages): def missing(self, packages):

View file

@ -17,6 +17,7 @@
__all__ = [ __all__ = [
"get_build_architecture", "get_build_architecture",
"version_add_suffix",
"add_dummy_changelog_entry", "add_dummy_changelog_entry",
"build", "build",
"DetailedDebianBuildFailure", "DetailedDebianBuildFailure",
@ -24,20 +25,22 @@ __all__ = [
] ]
from datetime import datetime from datetime import datetime
from debmutate.changelog import ChangelogEditor
import logging import logging
import os import os
import re import re
import shlex import shlex
import subprocess import subprocess
import sys import sys
from typing import Optional, List, Tuple
from debian.changelog import Changelog from debian.changelog import Changelog, Version, ChangeBlock
from debmutate.changelog import get_maintainer from debmutate.changelog import get_maintainer, ChangelogEditor
from debmutate.reformatting import GeneratedFile
from breezy.mutabletree import MutableTree from breezy.mutabletree import MutableTree
from breezy.plugins.debian.builder import BuildFailedError from breezy.plugins.debian.builder import BuildFailedError
from breezy.tree import Tree from breezy.tree import Tree
from breezy.workingtree import WorkingTree
from buildlog_consultant.sbuild import ( from buildlog_consultant.sbuild import (
worker_failure_from_sbuild_log, worker_failure_from_sbuild_log,
@ -45,10 +48,18 @@ from buildlog_consultant.sbuild import (
from .. import DetailedFailure as DetailedFailure, UnidentifiedError from .. import DetailedFailure as DetailedFailure, UnidentifiedError
BUILD_LOG_FILENAME = 'build.log'
DEFAULT_BUILDER = "sbuild --no-clean-source" DEFAULT_BUILDER = "sbuild --no-clean-source"
class ChangelogNotEditable(Exception):
"""Changelog can not be edited."""
def __init__(self, path):
self.path = path
class DetailedDebianBuildFailure(DetailedFailure): class DetailedDebianBuildFailure(DetailedFailure):
def __init__(self, stage, phase, retcode, argv, error, description): def __init__(self, stage, phase, retcode, argv, error, description):
@ -60,7 +71,8 @@ class DetailedDebianBuildFailure(DetailedFailure):
class UnidentifiedDebianBuildError(UnidentifiedError): class UnidentifiedDebianBuildError(UnidentifiedError):
def __init__(self, stage, phase, retcode, argv, lines, description, secondary=None): def __init__(self, stage, phase, retcode, argv, lines, description,
secondary=None):
super(UnidentifiedDebianBuildError, self).__init__( super(UnidentifiedDebianBuildError, self).__init__(
retcode, argv, lines, secondary) retcode, argv, lines, secondary)
self.stage = stage self.stage = stage
@ -75,11 +87,12 @@ class MissingChangesFile(Exception):
self.filename = filename self.filename = filename
def find_changes_files(path, package, version): def find_changes_files(path: str, package: str, version: Version):
non_epoch_version = version.upstream_version non_epoch_version = version.upstream_version or ''
if version.debian_version is not None: if version.debian_version is not None:
non_epoch_version += "-%s" % version.debian_version non_epoch_version += "-%s" % version.debian_version
c = re.compile('%s_%s_(.*).changes' % (re.escape(package), re.escape(non_epoch_version))) c = re.compile('%s_%s_(.*).changes' % (
re.escape(package), re.escape(non_epoch_version)))
for entry in os.scandir(path): for entry in os.scandir(path):
m = c.match(entry.name) m = c.match(entry.name)
if m: if m:
@ -109,15 +122,32 @@ def control_files_in_root(tree: Tree, subpath: str) -> bool:
return False return False
def version_add_suffix(version: Version, suffix: str) -> Version:
version = Version(str(version))
def add_suffix(v):
m = re.fullmatch("(.*)(" + re.escape(suffix) + ")([0-9]+)", v)
if m:
return m.group(1) + m.group(2) + "%d" % (int(m.group(3)) + 1)
else:
return v + suffix + "1"
if version.debian_revision:
version.debian_revision = add_suffix(version.debian_revision)
else:
version.upstream_version = add_suffix(version.upstream_version)
return version
def add_dummy_changelog_entry( def add_dummy_changelog_entry(
tree: MutableTree, tree: MutableTree,
subpath: str, subpath: str,
suffix: str, suffix: str,
suite: str, suite: str,
message: str, message: str,
timestamp=None, timestamp: Optional[datetime] = None,
maintainer=None, maintainer: Tuple[Optional[str], Optional[str]] = None,
): allow_reformatting: bool = True,
) -> Version:
"""Add a dummy changelog entry to a package. """Add a dummy changelog entry to a package.
Args: Args:
@ -125,18 +155,10 @@ def add_dummy_changelog_entry(
suffix: Suffix for the version suffix: Suffix for the version
suite: Debian suite suite: Debian suite
message: Changelog message message: Changelog message
Returns:
version of the newly added entry
""" """
def add_suffix(v, suffix):
m = re.fullmatch(
"(.*)(" + re.escape(suffix) + ")([0-9]+)",
v,
)
if m:
return m.group(1) + m.group(2) + "%d" % (int(m.group(3)) + 1)
else:
return v + suffix + "1"
if control_files_in_root(tree, subpath): if control_files_in_root(tree, subpath):
path = os.path.join(subpath, "changelog") path = os.path.join(subpath, "changelog")
else: else:
@ -145,38 +167,38 @@ def add_dummy_changelog_entry(
maintainer = get_maintainer() maintainer = get_maintainer()
if timestamp is None: if timestamp is None:
timestamp = datetime.now() timestamp = datetime.now()
with ChangelogEditor(tree.abspath(os.path.join(path))) as editor: try:
version = editor[0].version with ChangelogEditor(
if version.debian_revision: tree.abspath(path), # type: ignore
version.debian_revision = add_suffix(version.debian_revision, suffix) allow_reformatting=allow_reformatting) as editor:
else: version = version_add_suffix(editor[0].version, suffix)
version.upstream_version = add_suffix(version.upstream_version, suffix)
editor.auto_version(version, timestamp=timestamp) editor.auto_version(version, timestamp=timestamp)
editor.add_entry( editor.add_entry(
summary=[message], maintainer=maintainer, timestamp=timestamp, urgency='low') summary=[message], maintainer=maintainer, timestamp=timestamp,
urgency='low')
editor[0].distributions = suite editor[0].distributions = suite
return version
except GeneratedFile as e:
raise ChangelogNotEditable(path) from e
def get_latest_changelog_entry(local_tree, subpath=""): def get_latest_changelog_entry(
local_tree: WorkingTree, subpath: str = "") -> ChangeBlock:
if control_files_in_root(local_tree, subpath): if control_files_in_root(local_tree, subpath):
path = os.path.join(subpath, "changelog") path = os.path.join(subpath, "changelog")
else: else:
path = os.path.join(subpath, "debian", "changelog") path = os.path.join(subpath, "debian", "changelog")
with local_tree.get_file(path) as f: with local_tree.get_file(path) as f:
cl = Changelog(f, max_blocks=1) cl = Changelog(f, max_blocks=1)
return cl.package, cl.version return cl[0]
def build( def _builddeb_command(
local_tree, build_command: str = DEFAULT_BUILDER,
outf, result_dir: Optional[str] = None,
build_command=DEFAULT_BUILDER, apt_repository: Optional[str] = None,
result_dir=None, apt_repository_key: Optional[str] = None,
distribution=None, extra_repositories: Optional[List[str]] = None):
subpath="",
source_date_epoch=None,
extra_repositories=None,
):
for repo in extra_repositories or []: for repo in extra_repositories or []:
build_command += " --extra-repository=" + shlex.quote(repo) build_command += " --extra-repository=" + shlex.quote(repo)
args = [ args = [
@ -187,8 +209,34 @@ def build(
"--guess-upstream-branch-url", "--guess-upstream-branch-url",
"--builder=%s" % build_command, "--builder=%s" % build_command,
] ]
if apt_repository:
args.append("--apt-repository=%s" % apt_repository)
if apt_repository_key:
args.append("--apt-repository-key=%s" % apt_repository_key)
if result_dir: if result_dir:
args.append("--result-dir=%s" % result_dir) args.append("--result-dir=%s" % result_dir)
return args
def build(
local_tree: WorkingTree,
outf,
build_command: str = DEFAULT_BUILDER,
result_dir: Optional[str] = None,
distribution: Optional[str] = None,
subpath: str = "",
source_date_epoch: Optional[int] = None,
apt_repository: Optional[str] = None,
apt_repository_key: Optional[str] = None,
extra_repositories: Optional[List[str]] = None,
):
args = _builddeb_command(
build_command=build_command,
result_dir=result_dir,
apt_repository=apt_repository,
apt_repository_key=apt_repository_key,
extra_repositories=extra_repositories)
outf.write("Running %r\n" % (build_command,)) outf.write("Running %r\n" % (build_command,))
outf.flush() outf.flush()
env = dict(os.environ.items()) env = dict(os.environ.items())
@ -199,22 +247,25 @@ def build(
logging.info("Building debian packages, running %r.", build_command) logging.info("Building debian packages, running %r.", build_command)
try: try:
subprocess.check_call( subprocess.check_call(
args, cwd=local_tree.abspath(subpath), stdout=outf, stderr=outf, env=env args, cwd=local_tree.abspath(subpath), stdout=outf, stderr=outf,
env=env
) )
except subprocess.CalledProcessError: except subprocess.CalledProcessError:
raise BuildFailedError() raise BuildFailedError()
def build_once( def build_once(
local_tree, local_tree: WorkingTree,
build_suite, build_suite: str,
output_directory, output_directory: str,
build_command, build_command: str,
subpath="", subpath: str = "",
source_date_epoch=None, source_date_epoch: Optional[int] = None,
extra_repositories=None apt_repository: Optional[str] = None,
apt_repository_key: Optional[str] = None,
extra_repositories: Optional[List[str]] = None
): ):
build_log_path = os.path.join(output_directory, "build.log") build_log_path = os.path.join(output_directory, BUILD_LOG_FILENAME)
logging.debug("Writing build log to %s", build_log_path) logging.debug("Writing build log to %s", build_log_path)
try: try:
with open(build_log_path, "w") as f: with open(build_log_path, "w") as f:
@ -226,6 +277,8 @@ def build_once(
distribution=build_suite, distribution=build_suite,
subpath=subpath, subpath=subpath,
source_date_epoch=source_date_epoch, source_date_epoch=source_date_epoch,
apt_repository=apt_repository,
apt_repository_key=apt_repository_key,
extra_repositories=extra_repositories, extra_repositories=extra_repositories,
) )
except BuildFailedError as e: except BuildFailedError as e:
@ -247,27 +300,39 @@ def build_once(
[], sbuild_failure.description) [], sbuild_failure.description)
cl_entry = get_latest_changelog_entry(local_tree, subpath) cl_entry = get_latest_changelog_entry(local_tree, subpath)
if cl_entry.package is None:
raise Exception('missing package in changelog entry')
changes_names = [] changes_names = []
for kind, entry in find_changes_files(output_directory, cl_entry.package, cl_entry.version): for kind, entry in find_changes_files(
output_directory, cl_entry.package, cl_entry.version):
changes_names.append((entry.name)) changes_names.append((entry.name))
return (changes_names, cl_entry) return (changes_names, cl_entry)
class GitBuildpackageMissing(Exception):
"""git-buildpackage is not installed"""
def gbp_dch(path): def gbp_dch(path):
try:
subprocess.check_call(["gbp", "dch", "--ignore-branch"], cwd=path) subprocess.check_call(["gbp", "dch", "--ignore-branch"], cwd=path)
except FileNotFoundError:
raise GitBuildpackageMissing()
def attempt_build( def attempt_build(
local_tree, local_tree: WorkingTree,
suffix, suffix: str,
build_suite, build_suite: str,
output_directory, output_directory: str,
build_command, build_command: str,
build_changelog_entry=None, build_changelog_entry: Optional[str] = None,
subpath="", subpath: str = "",
source_date_epoch=None, source_date_epoch: Optional[int] = None,
run_gbp_dch=False, run_gbp_dch: bool = False,
extra_repositories=None apt_repository: Optional[str] = None,
apt_repository_key: Optional[str] = None,
extra_repositories: Optional[List[str]] = None
): ):
"""Attempt a build, with a custom distribution set. """Attempt a build, with a custom distribution set.
@ -282,7 +347,7 @@ def attempt_build(
source_date_epoch: Source date epoch to set source_date_epoch: Source date epoch to set
Returns: Tuple with (changes_name, cl_version) Returns: Tuple with (changes_name, cl_version)
""" """
if run_gbp_dch and not subpath: if run_gbp_dch and not subpath and hasattr(local_tree.controldir, '_git'):
gbp_dch(local_tree.abspath(subpath)) gbp_dch(local_tree.abspath(subpath))
if build_changelog_entry is not None: if build_changelog_entry is not None:
add_dummy_changelog_entry( add_dummy_changelog_entry(
@ -295,5 +360,7 @@ def attempt_build(
build_command, build_command,
subpath, subpath,
source_date_epoch=source_date_epoch, source_date_epoch=source_date_epoch,
apt_repository=apt_repository,
apt_repository_key=apt_repository_key,
extra_repositories=extra_repositories, extra_repositories=extra_repositories,
) )

View file

@ -18,43 +18,46 @@
"""Tie breaking by build deps.""" """Tie breaking by build deps."""
from debian.deb822 import PkgRelation
import logging import logging
from breezy.plugins.debian.apt_repo import LocalApt, NoAptSources
class BuildDependencyTieBreaker(object): class BuildDependencyTieBreaker(object):
def __init__(self, rootdir): def __init__(self, apt):
self.rootdir = rootdir self.apt = apt
self._counts = None self._counts = None
def __repr__(self): def __repr__(self):
return "%s(%r)" % (type(self).__name__, self.rootdir) return "%s(%r)" % (type(self).__name__, self.apt)
@classmethod @classmethod
def from_session(cls, session): def from_session(cls, session):
return cls(session.location) return cls(LocalApt(session.location))
def _count(self): def _count(self):
counts = {} counts = {}
import apt_pkg with self.apt:
for source in self.apt.iter_sources():
apt_pkg.init() for field in ['Build-Depends', 'Build-Depends-Indep',
apt_pkg.config.set("Dir", self.rootdir) 'Build-Depends-Arch']:
apt_cache = apt_pkg.SourceRecords() for r in PkgRelation.parse_relations(
apt_cache.restart() source.get(field, '')):
while apt_cache.step(): for p in r:
try: counts.setdefault(p['name'], 0)
for d in apt_cache.build_depends.values(): counts[p['name']] += 1
for o in d:
for p in o:
counts.setdefault(p[0], 0)
counts[p[0]] += 1
except AttributeError:
pass
return counts return counts
def __call__(self, reqs): def __call__(self, reqs):
if self._counts is None: if self._counts is None:
try:
self._counts = self._count() self._counts = self._count()
except NoAptSources:
logging.warning(
"No 'deb-src' in sources.list, "
"unable to break build-depends")
return None
by_count = {} by_count = {}
for req in reqs: for req in reqs:
try: try:
@ -80,5 +83,5 @@ if __name__ == "__main__":
parser.add_argument("req", nargs="+") parser.add_argument("req", nargs="+")
args = parser.parse_args() args = parser.parse_args()
reqs = [AptRequirement.from_str(req) for req in args.req] reqs = [AptRequirement.from_str(req) for req in args.req]
tie_breaker = BuildDependencyTieBreaker("/") tie_breaker = BuildDependencyTieBreaker(LocalApt())
print(tie_breaker(reqs)) print(tie_breaker(reqs))

View file

@ -17,12 +17,13 @@
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
import apt_pkg import apt_pkg
import asyncio
from datetime import datetime from datetime import datetime
from debian.deb822 import Release from debian.deb822 import Release
import os import os
import re import re
import subprocess import subprocess
from typing import Iterator, List from typing import List, AsyncIterator
import logging import logging
@ -32,11 +33,15 @@ from ..session import Session
class FileSearcher(object): class FileSearcher(object):
def search_files( def search_files(
self, path: str, regex: bool = False, case_insensitive: bool = False self, path: str, regex: bool = False,
) -> Iterator[str]: case_insensitive: bool = False) -> AsyncIterator[str]:
raise NotImplementedError(self.search_files) raise NotImplementedError(self.search_files)
class AptFileAccessError(Exception):
"""Apt file access error."""
class ContentsFileNotFound(Exception): class ContentsFileNotFound(Exception):
"""The contents file was not found.""" """The contents file was not found."""
@ -71,7 +76,8 @@ def contents_urls_from_sources_entry(source, arches, load_url):
response = load_url(release_url) response = load_url(release_url)
except FileNotFoundError as e: except FileNotFoundError as e:
logging.warning( logging.warning(
"Unable to download %s or %s: %s", inrelease_url, release_url, e "Unable to download %s or %s: %s", inrelease_url,
release_url, e
) )
return return
@ -118,7 +124,7 @@ def _unwrap(f, ext):
def load_direct_url(url): def load_direct_url(url):
from urllib.error import HTTPError from urllib.error import HTTPError, URLError
from urllib.request import urlopen, Request from urllib.request import urlopen, Request
for ext in [".xz", ".gz", ""]: for ext in [".xz", ".gz", ""]:
@ -128,7 +134,11 @@ def load_direct_url(url):
except HTTPError as e: except HTTPError as e:
if e.status == 404: if e.status == 404:
continue continue
raise raise AptFileAccessError(
'Unable to access apt URL %s: %s' % (url + ext, e))
except URLError as e:
raise AptFileAccessError(
'Unable to access apt URL %s: %s' % (url + ext, e))
break break
else: else:
raise FileNotFoundError(url) raise FileNotFoundError(url)
@ -187,7 +197,7 @@ class AptFileFileSearcher(FileSearcher):
@classmethod @classmethod
def from_session(cls, session): def from_session(cls, session):
logging.info('Using apt-file to search apt contents') logging.debug('Using apt-file to search apt contents')
if not os.path.exists(session.external_path(cls.CACHE_IS_EMPTY_PATH)): if not os.path.exists(session.external_path(cls.CACHE_IS_EMPTY_PATH)):
from .apt import AptManager from .apt import AptManager
AptManager.from_session(session).install(['apt-file']) AptManager.from_session(session).install(['apt-file'])
@ -195,7 +205,7 @@ class AptFileFileSearcher(FileSearcher):
session.check_call(['apt-file', 'update'], user='root') session.check_call(['apt-file', 'update'], user='root')
return cls(session) return cls(session)
def search_files(self, path, regex=False, case_insensitive=False): async def search_files(self, path, regex=False, case_insensitive=False):
args = [] args = []
if regex: if regex:
args.append('-x') args.append('-x')
@ -204,15 +214,17 @@ class AptFileFileSearcher(FileSearcher):
if case_insensitive: if case_insensitive:
args.append('-i') args.append('-i')
args.append(path) args.append(path)
try: process = await asyncio.create_subprocess_exec(
output = self.session.check_output(['/usr/bin/apt-file', 'search'] + args) '/usr/bin/apt-file', 'search', *args,
except subprocess.CalledProcessError as e: stdout=asyncio.subprocess.PIPE)
if e.returncode == 1: (output, error) = await process.communicate(input=None)
if process.returncode == 1:
# No results # No results
return return
if e.returncode == 3: elif process.returncode == 3:
raise Exception('apt-file cache is empty') raise Exception('apt-file cache is empty')
raise elif process.returncode != 0:
raise Exception("unexpected return code %d" % process.returncode)
for line in output.splitlines(False): for line in output.splitlines(False):
pkg, path = line.split(b': ') pkg, path = line.split(b': ')
@ -253,7 +265,8 @@ class RemoteContentsFileSearcher(FileSearcher):
return load_url_with_cache(url, cache_dirs) return load_url_with_cache(url, cache_dirs)
urls = list( urls = list(
contents_urls_from_sourceslist(sl, get_build_architecture(), load_url) contents_urls_from_sourceslist(
sl, get_build_architecture(), load_url)
) )
self._load_urls(urls, cache_dirs, load_url) self._load_urls(urls, cache_dirs, load_url)
@ -277,8 +290,8 @@ class RemoteContentsFileSearcher(FileSearcher):
return load_url_with_cache(url, cache_dirs) return load_url_with_cache(url, cache_dirs)
urls = list( urls = list(
contents_urls_from_sourceslist(sl, get_build_architecture(), load_url) contents_urls_from_sourceslist(
) sl, get_build_architecture(), load_url))
self._load_urls(urls, cache_dirs, load_url) self._load_urls(urls, cache_dirs, load_url)
def _load_urls(self, urls, cache_dirs, load_url): def _load_urls(self, urls, cache_dirs, load_url):
@ -286,13 +299,16 @@ class RemoteContentsFileSearcher(FileSearcher):
try: try:
f = load_url(url) f = load_url(url)
self.load_file(f, url) self.load_file(f, url)
except ConnectionResetError:
logging.warning("Connection reset error retrieving %s", url)
# TODO(jelmer): Retry?
except ContentsFileNotFound: except ContentsFileNotFound:
logging.warning("Unable to fetch contents file %s", url) logging.warning("Unable to fetch contents file %s", url)
def __setitem__(self, path, package): def __setitem__(self, path, package):
self._db[path] = package self._db[path] = package
def search_files(self, path, regex=False, case_insensitive=False): async def search_files(self, path, regex=False, case_insensitive=False):
path = path.lstrip("/").encode("utf-8", "surrogateescape") path = path.lstrip("/").encode("utf-8", "surrogateescape")
if case_insensitive and not regex: if case_insensitive and not regex:
regex = True regex = True
@ -338,9 +354,9 @@ class GeneratedFileSearcher(FileSearcher):
(path, pkg) = line.strip().split(None, 1) (path, pkg) = line.strip().split(None, 1)
self._db.append(path, pkg) self._db.append(path, pkg)
def search_files( async def search_files(
self, path: str, regex: bool = False, case_insensitive: bool = False self, path: str, regex: bool = False,
) -> Iterator[str]: case_insensitive: bool = False):
for p, pkg in self._db: for p, pkg in self._db:
if regex: if regex:
flags = 0 flags = 0
@ -371,16 +387,17 @@ GENERATED_FILE_SEARCHER = GeneratedFileSearcher(
) )
def get_packages_for_paths( async def get_packages_for_paths(
paths: List[str], paths: List[str],
searchers: List[FileSearcher], searchers: List[FileSearcher],
regex: bool = False, regex: bool = False,
case_insensitive: bool = False, case_insensitive: bool = False,
) -> List[str]: ) -> List[str]:
candidates: List[str] = list() candidates: List[str] = list()
# TODO(jelmer): Combine these, perhaps by creating one gigantic regex?
for path in paths: for path in paths:
for searcher in searchers: for searcher in searchers:
for pkg in searcher.search_files( async for pkg in searcher.search_files(
path, regex=regex, case_insensitive=case_insensitive path, regex=regex, case_insensitive=case_insensitive
): ):
if pkg not in candidates: if pkg not in candidates:
@ -393,8 +410,10 @@ def main(argv):
from ..session.plain import PlainSession from ..session.plain import PlainSession
parser = argparse.ArgumentParser() parser = argparse.ArgumentParser()
parser.add_argument("path", help="Path to search for.", type=str, nargs="*") parser.add_argument(
parser.add_argument("--regex", "-x", help="Search for regex.", action="store_true") "path", help="Path to search for.", type=str, nargs="*")
parser.add_argument(
"--regex", "-x", help="Search for regex.", action="store_true")
parser.add_argument("--debug", action="store_true") parser.add_argument("--debug", action="store_true")
args = parser.parse_args() args = parser.parse_args()
@ -403,11 +422,12 @@ def main(argv):
else: else:
logging.basicConfig(level=logging.INFO) logging.basicConfig(level=logging.INFO)
main_searcher = get_apt_contents_file_searcher(PlainSession()) with PlainSession() as session:
main_searcher.load_local() main_searcher = get_apt_contents_file_searcher(session)
searchers = [main_searcher, GENERATED_FILE_SEARCHER] searchers = [main_searcher, GENERATED_FILE_SEARCHER]
packages = get_packages_for_paths(args.path, searchers=searchers, regex=args.regex) packages = asyncio.run(get_packages_for_paths(
args.path, searchers=searchers, regex=args.regex))
for package in packages: for package in packages:
print(package) print(package)

View file

@ -22,10 +22,10 @@ __all__ = [
from functools import partial from functools import partial
import logging import logging
import os import os
import re
import shutil import shutil
import sys import sys
from typing import List, Set, Optional, Type import time
from typing import List, Set, Optional, Type, Tuple
from debian.deb822 import ( from debian.deb822 import (
Deb822, Deb822,
@ -34,6 +34,8 @@ from debian.deb822 import (
from breezy.commit import PointlessCommit, NullCommitReporter from breezy.commit import PointlessCommit, NullCommitReporter
from breezy.tree import Tree from breezy.tree import Tree
from breezy.workingtree import WorkingTree
from debmutate.changelog import ChangelogEditor from debmutate.changelog import ChangelogEditor
from debmutate.control import ( from debmutate.control import (
ensure_relation, ensure_relation,
@ -50,49 +52,7 @@ from debmutate.reformatting import (
GeneratedFile, GeneratedFile,
) )
try:
from breezy.workspace import reset_tree from breezy.workspace import reset_tree
except ImportError: # breezy < 3.2
def delete_items(deletables, dry_run=False):
"""Delete files in the deletables iterable"""
import errno
import shutil
def onerror(function, path, excinfo):
"""Show warning for errors seen by rmtree."""
# Handle only permission error while removing files.
# Other errors are re-raised.
if function is not os.remove or excinfo[1].errno != errno.EACCES:
raise
logging.warning("unable to remove %s" % path)
for path, subp in deletables:
if os.path.isdir(path):
shutil.rmtree(path, onerror=onerror)
else:
try:
os.unlink(path)
except OSError as e:
# We handle only permission error here
if e.errno != errno.EACCES:
raise e
logging.warning('unable to remove "%s": %s.', path, e.strerror)
def reset_tree(local_tree, subpath=""):
from breezy.transform import revert
from breezy.clean_tree import iter_deletables
revert(
local_tree,
local_tree.branch.basis_tree(),
[subpath] if subpath not in (".", "") else None,
)
deletables = list(
iter_deletables(local_tree, unknown=True, ignored=False, detritus=False)
)
delete_items(deletables)
from debmutate._rules import ( from debmutate._rules import (
dh_invoke_add_with, dh_invoke_add_with,
@ -113,18 +73,21 @@ from buildlog_consultant.common import (
) )
from buildlog_consultant.sbuild import ( from buildlog_consultant.sbuild import (
DebcargoUnacceptablePredicate, DebcargoUnacceptablePredicate,
DebcargoUnacceptableComparator,
) )
from .build import ( from .build import (
DetailedDebianBuildFailure, DetailedDebianBuildFailure,
UnidentifiedDebianBuildError, UnidentifiedDebianBuildError,
) )
from ..logs import rotate_logfile
from ..buildlog import problem_to_upstream_requirement from ..buildlog import problem_to_upstream_requirement
from ..fix_build import BuildFixer, resolve_error from ..fix_build import BuildFixer, resolve_error
from ..resolver.apt import ( from ..resolver.apt import (
AptRequirement, AptRequirement,
) )
from .build import attempt_build, DEFAULT_BUILDER from .apt import AptManager
from .build import attempt_build, DEFAULT_BUILDER, BUILD_LOG_FILENAME
DEFAULT_MAX_ITERATIONS = 10 DEFAULT_MAX_ITERATIONS = 10
@ -150,7 +113,9 @@ class DebianPackagingContext(object):
def abspath(self, *parts): def abspath(self, *parts):
return self.tree.abspath(os.path.join(self.subpath, *parts)) return self.tree.abspath(os.path.join(self.subpath, *parts))
def commit(self, summary: str, update_changelog: Optional[bool] = None) -> bool: def commit(
self, summary: str,
update_changelog: Optional[bool] = None) -> bool:
if update_changelog is None: if update_changelog is None:
update_changelog = self.update_changelog update_changelog = self.update_changelog
with self.tree.lock_write(): with self.tree.lock_write():
@ -214,6 +179,11 @@ def add_dependency(context, phase, requirement: AptRequirement):
return add_test_dependency(context, phase[1], requirement) return add_test_dependency(context, phase[1], requirement)
elif phase[0] == "build": elif phase[0] == "build":
return add_build_dependency(context, requirement) return add_build_dependency(context, requirement)
elif phase[0] == "buildenv":
# TODO(jelmer): Actually, we probably just want to install it on the
# host system?
logging.warning("Unknown phase %r", phase)
return False
else: else:
logging.warning("Unknown phase %r", phase) logging.warning("Unknown phase %r", phase)
return False return False
@ -231,16 +201,19 @@ def add_build_dependency(context, requirement: AptRequirement):
raise CircularDependency(binary["Package"]) raise CircularDependency(binary["Package"])
for rel in requirement.relations: for rel in requirement.relations:
updater.source["Build-Depends"] = ensure_relation( updater.source["Build-Depends"] = ensure_relation(
updater.source.get("Build-Depends", ""), PkgRelation.str([rel]) updater.source.get("Build-Depends", ""),
PkgRelation.str([rel])
) )
except FormattingUnpreservable as e: except FormattingUnpreservable as e:
logging.info("Unable to edit %s in a way that preserves formatting.", e.path) logging.info(
"Unable to edit %s in a way that preserves formatting.", e.path)
return False return False
desc = requirement.pkg_relation_str() desc = requirement.pkg_relation_str()
if not updater.changed: if not updater.changed:
logging.info("Giving up; dependency %s was already present.", desc) logging.info(
"Giving up; build dependency %s was already present.", desc)
return False return False
logging.info("Adding build dependency: %s", desc) logging.info("Adding build dependency: %s", desc)
@ -272,13 +245,18 @@ def add_test_dependency(context, testname, requirement):
control.get("Depends", ""), PkgRelation.str([rel]) control.get("Depends", ""), PkgRelation.str([rel])
) )
except FormattingUnpreservable as e: except FormattingUnpreservable as e:
logging.info("Unable to edit %s in a way that preserves formatting.", e.path) logging.info(
return False "Unable to edit %s in a way that preserves formatting.", e.path)
if not updater.changed:
return False return False
desc = requirement.pkg_relation_str() desc = requirement.pkg_relation_str()
if not updater.changed:
logging.info(
"Giving up; dependency %s for test %s was already present.",
desc, testname)
return False
logging.info("Adding dependency to test %s: %s", testname, desc) logging.info("Adding dependency to test %s: %s", testname, desc)
return context.commit( return context.commit(
"Add missing dependency for test %s on %s." % (testname, desc), "Add missing dependency for test %s on %s." % (testname, desc),
@ -288,7 +266,8 @@ def add_test_dependency(context, testname, requirement):
def targeted_python_versions(tree: Tree, subpath: str) -> List[str]: def targeted_python_versions(tree: Tree, subpath: str) -> List[str]:
with tree.get_file(os.path.join(subpath, "debian/control")) as f: with tree.get_file(os.path.join(subpath, "debian/control")) as f:
control = Deb822(f) control = Deb822(f)
build_depends = PkgRelation.parse_relations(control.get("Build-Depends", "")) build_depends = PkgRelation.parse_relations(
control.get("Build-Depends", ""))
all_build_deps: Set[str] = set() all_build_deps: Set[str] = set()
for or_deps in build_depends: for or_deps in build_depends:
all_build_deps.update(or_dep["name"] for or_dep in or_deps) all_build_deps.update(or_dep["name"] for or_dep in or_deps)
@ -312,7 +291,7 @@ def python_tie_breaker(tree, subpath, reqs):
return True return True
if pkg.startswith("lib%s-" % python_version): if pkg.startswith("lib%s-" % python_version):
return True return True
if re.match(r'lib%s\.[0-9]-dev' % python_version, pkg): if pkg == r'lib%s-dev' % python_version:
return True return True
return False return False
@ -337,7 +316,8 @@ def retry_apt_failure(error, phase, apt, context):
def enable_dh_autoreconf(context, phase): def enable_dh_autoreconf(context, phase):
# Debhelper >= 10 depends on dh-autoreconf and enables autoreconf by # Debhelper >= 10 depends on dh-autoreconf and enables autoreconf by
# default. # default.
debhelper_compat_version = get_debhelper_compat_level(context.tree.abspath(".")) debhelper_compat_version = get_debhelper_compat_level(
context.tree.abspath("."))
if debhelper_compat_version is not None and debhelper_compat_version < 10: if debhelper_compat_version is not None and debhelper_compat_version < 10:
def add_with_autoreconf(line, target): def add_with_autoreconf(line, target):
@ -356,9 +336,8 @@ def enable_dh_autoreconf(context, phase):
def fix_missing_configure(error, phase, context): def fix_missing_configure(error, phase, context):
if not context.tree.has_filename("configure.ac") and not context.tree.has_filename( if (not context.tree.has_filename("configure.ac")
"configure.in" and not context.tree.has_filename("configure.in")):
):
return False return False
return enable_dh_autoreconf(context, phase) return enable_dh_autoreconf(context, phase)
@ -433,7 +412,7 @@ def fix_missing_makefile_pl(error, phase, context):
return False return False
def coerce_unacceptable_predicate(error, phase, context): def debcargo_coerce_unacceptable_prerelease(error, phase, context):
from debmutate.debcargo import DebcargoEditor from debmutate.debcargo import DebcargoEditor
with DebcargoEditor(context.abspath('debian/debcargo.toml')) as editor: with DebcargoEditor(context.abspath('debian/debcargo.toml')) as editor:
editor['allow_prerelease_deps'] = True editor['allow_prerelease_deps'] = True
@ -461,7 +440,8 @@ class SimpleBuildFixer(BuildFixer):
class DependencyBuildFixer(BuildFixer): class DependencyBuildFixer(BuildFixer):
def __init__(self, packaging_context, apt_resolver, problem_cls: Type[Problem], fn): def __init__(self, packaging_context, apt_resolver,
problem_cls: Type[Problem], fn):
self.context = packaging_context self.context = packaging_context
self.apt_resolver = apt_resolver self.apt_resolver = apt_resolver
self._problem_cls = problem_cls self._problem_cls = problem_cls
@ -481,31 +461,46 @@ class DependencyBuildFixer(BuildFixer):
return self._fn(problem, phase, self.apt_resolver, self.context) return self._fn(problem, phase, self.apt_resolver, self.context)
def versioned_package_fixers(session, packaging_context, apt): def versioned_package_fixers(session, packaging_context, apt: AptManager):
return [ return [
PgBuildExtOutOfDateControlFixer(packaging_context, session, apt), PgBuildExtOutOfDateControlFixer(packaging_context, session, apt),
SimpleBuildFixer(packaging_context, MissingConfigure, fix_missing_configure), SimpleBuildFixer(
packaging_context, MissingConfigure, fix_missing_configure),
SimpleBuildFixer( SimpleBuildFixer(
packaging_context, MissingAutomakeInput, fix_missing_automake_input packaging_context, MissingAutomakeInput, fix_missing_automake_input
), ),
SimpleBuildFixer( SimpleBuildFixer(
packaging_context, MissingConfigStatusInput, fix_missing_config_status_input packaging_context, MissingConfigStatusInput,
fix_missing_config_status_input
), ),
SimpleBuildFixer(packaging_context, MissingPerlFile, fix_missing_makefile_pl), SimpleBuildFixer(
SimpleBuildFixer(packaging_context, DebcargoUnacceptablePredicate, coerce_unacceptable_predicate), packaging_context, MissingPerlFile, fix_missing_makefile_pl),
SimpleBuildFixer(
packaging_context, DebcargoUnacceptablePredicate,
debcargo_coerce_unacceptable_prerelease),
SimpleBuildFixer(
packaging_context, DebcargoUnacceptableComparator,
debcargo_coerce_unacceptable_prerelease),
] ]
def apt_fixers(apt, packaging_context) -> List[BuildFixer]: def apt_fixers(apt: AptManager, packaging_context,
dep_server_url: Optional[str] = None) -> List[BuildFixer]:
from ..resolver.apt import AptResolver from ..resolver.apt import AptResolver
from .udd import popcon_tie_breaker from .udd import popcon_tie_breaker
from .build_deps import BuildDependencyTieBreaker from .build_deps import BuildDependencyTieBreaker
apt_tie_breakers = [ apt_tie_breakers = [
partial(python_tie_breaker, packaging_context.tree, packaging_context.subpath), partial(python_tie_breaker, packaging_context.tree,
packaging_context.subpath),
BuildDependencyTieBreaker.from_session(apt.session), BuildDependencyTieBreaker.from_session(apt.session),
popcon_tie_breaker, popcon_tie_breaker,
] ]
resolver: AptResolver
if dep_server_url:
from ..resolver.dep_server import DepServerAptResolver
resolver = DepServerAptResolver(apt, dep_server_url, apt_tie_breakers)
else:
resolver = AptResolver(apt, apt_tie_breakers) resolver = AptResolver(apt, apt_tie_breakers)
return [ return [
DependencyBuildFixer( DependencyBuildFixer(
@ -515,38 +510,49 @@ def apt_fixers(apt, packaging_context) -> List[BuildFixer]:
] ]
def default_fixers(local_tree, subpath, apt, committer=None, update_changelog=None): def default_fixers(
local_tree: WorkingTree,
subpath: str, apt: AptManager,
committer: Optional[str] = None,
update_changelog: Optional[bool] = None,
dep_server_url: Optional[str] = None):
packaging_context = DebianPackagingContext( packaging_context = DebianPackagingContext(
local_tree, subpath, committer, update_changelog, local_tree, subpath, committer, update_changelog,
commit_reporter=NullCommitReporter() commit_reporter=NullCommitReporter()
) )
return versioned_package_fixers(apt.session, packaging_context, apt) + apt_fixers( return (versioned_package_fixers(apt.session, packaging_context, apt)
apt, packaging_context + apt_fixers(apt, packaging_context, dep_server_url))
)
def build_incrementally( def build_incrementally(
local_tree, local_tree: WorkingTree,
apt, apt: AptManager,
suffix, suffix: str,
build_suite, build_suite: str,
output_directory, output_directory: str,
build_command, build_command: str,
build_changelog_entry, build_changelog_entry,
committer=None, committer: Optional[str] = None,
max_iterations=DEFAULT_MAX_ITERATIONS, max_iterations: int = DEFAULT_MAX_ITERATIONS,
subpath="", subpath: str = "",
source_date_epoch=None, source_date_epoch=None,
update_changelog=True, update_changelog: bool = True,
extra_repositories=None, apt_repository: Optional[str] = None,
fixers=None apt_repository_key: Optional[str] = None,
extra_repositories: Optional[List[str]] = None,
fixers: Optional[List[BuildFixer]] = None,
run_gbp_dch: Optional[bool] = None,
dep_server_url: Optional[str] = None,
): ):
fixed_errors = [] fixed_errors: List[Tuple[Problem, str]] = []
if fixers is None: if fixers is None:
fixers = default_fixers( fixers = default_fixers(
local_tree, subpath, apt, committer=committer, local_tree, subpath, apt, committer=committer,
update_changelog=update_changelog) update_changelog=update_changelog,
dep_server_url=dep_server_url)
logging.info("Using fixers: %r", fixers) logging.info("Using fixers: %r", fixers)
if run_gbp_dch is None:
run_gbp_dch = (update_changelog is False)
while True: while True:
try: try:
return attempt_build( return attempt_build(
@ -558,7 +564,9 @@ def build_incrementally(
build_changelog_entry, build_changelog_entry,
subpath=subpath, subpath=subpath,
source_date_epoch=source_date_epoch, source_date_epoch=source_date_epoch,
run_gbp_dch=(update_changelog is False), run_gbp_dch=run_gbp_dch,
apt_repository=apt_repository,
apt_repository_key=apt_repository_key,
extra_repositories=extra_repositories, extra_repositories=extra_repositories,
) )
except UnidentifiedDebianBuildError: except UnidentifiedDebianBuildError:
@ -569,15 +577,19 @@ def build_incrementally(
logging.info("No relevant context, not making any changes.") logging.info("No relevant context, not making any changes.")
raise raise
if (e.error, e.phase) in fixed_errors: if (e.error, e.phase) in fixed_errors:
logging.warning("Error was still not fixed on second try. Giving up.") logging.warning(
"Error was still not fixed on second try. Giving up.")
raise raise
if max_iterations is not None and len(fixed_errors) > max_iterations: if (max_iterations is not None
logging.warning("Last fix did not address the issue. Giving up.") and len(fixed_errors) > max_iterations):
logging.warning(
"Last fix did not address the issue. Giving up.")
raise raise
reset_tree(local_tree, subpath=subpath) reset_tree(local_tree, subpath=subpath)
try: try:
if not resolve_error(e.error, e.phase, fixers): if not resolve_error(e.error, e.phase, fixers):
logging.warning("Failed to resolve error %r. Giving up.", e.error) logging.warning(
"Failed to resolve error %r. Giving up.", e.error)
raise raise
except GeneratedFile: except GeneratedFile:
logging.warning( logging.warning(
@ -588,71 +600,71 @@ def build_incrementally(
raise e raise e
except CircularDependency: except CircularDependency:
logging.warning( logging.warning(
"Unable to fix %r; it would introduce a circular " "dependency.", "Unable to fix %r; it would introduce a circular "
"dependency.",
e.error, e.error,
) )
raise e raise e
fixed_errors.append((e.error, e.phase)) fixed_errors.append((e.error, e.phase))
if os.path.exists(os.path.join(output_directory, "build.log")): rotate_logfile(os.path.join(output_directory, BUILD_LOG_FILENAME))
i = 1
while os.path.exists(
os.path.join(output_directory, "build.log.%d" % i)
):
i += 1
target_path = os.path.join(output_directory, "build.log.%d" % i)
os.rename(os.path.join(output_directory, "build.log"), target_path)
logging.debug("Storing build log at %s", target_path)
def main(argv=None): def main(argv=None):
import argparse import argparse
parser = argparse.ArgumentParser("ognibuild.debian.fix_build") parser = argparse.ArgumentParser("ognibuild.debian.fix_build")
parser.add_argument( modifications = parser.add_argument_group('Modifications')
"--suffix", type=str, help="Suffix to use for test builds.", default="fixbuild1" modifications.add_argument(
"--suffix", type=str, help="Suffix to use for test builds.",
default="fixbuild1"
) )
parser.add_argument( modifications.add_argument(
"--suite", type=str, help="Suite to target.", default="unstable" "--suite", type=str, help="Suite to target.", default="unstable"
) )
parser.add_argument( modifications.add_argument(
"--output-directory", type=str, help="Output directory.", default=None "--committer", type=str, help="Committer string (name and email)",
default=None
) )
parser.add_argument( modifications.add_argument(
"--committer", type=str, help="Committer string (name and email)", default=None
)
parser.add_argument(
"--build-command",
type=str,
help="Build command",
default=(DEFAULT_BUILDER + " -A -s -v"),
)
parser.add_argument(
"--no-update-changelog", "--no-update-changelog",
action="store_false", action="store_false",
default=None, default=None,
dest="update_changelog", dest="update_changelog",
help="do not update the changelog", help="do not update the changelog",
) )
parser.add_argument( modifications.add_argument(
'--max-iterations',
type=int,
default=DEFAULT_MAX_ITERATIONS,
help='Maximum number of issues to attempt to fix before giving up.')
parser.add_argument(
"--update-changelog", "--update-changelog",
action="store_true", action="store_true",
dest="update_changelog", dest="update_changelog",
help="force updating of the changelog", help="force updating of the changelog",
default=None, default=None,
) )
parser.add_argument("--schroot", type=str, help="chroot to use.") build_behaviour = parser.add_argument_group('Build Behaviour')
build_behaviour.add_argument(
"--output-directory", type=str, help="Output directory.", default=None
)
build_behaviour.add_argument(
"--build-command",
type=str,
help="Build command",
default=(DEFAULT_BUILDER + " -A -s -v"),
)
build_behaviour.add_argument(
'--max-iterations',
type=int,
default=DEFAULT_MAX_ITERATIONS,
help='Maximum number of issues to attempt to fix before giving up.')
build_behaviour.add_argument("--schroot", type=str, help="chroot to use.")
parser.add_argument(
"--dep-server-url", type=str,
help="ognibuild dep server to use",
default=os.environ.get('OGNIBUILD_DEPS'))
parser.add_argument("--verbose", action="store_true", help="Be verbose") parser.add_argument("--verbose", action="store_true", help="Be verbose")
args = parser.parse_args() args = parser.parse_args()
from breezy.workingtree import WorkingTree
import breezy.git # noqa: F401 import breezy.git # noqa: F401
import breezy.bzr # noqa: F401 import breezy.bzr # noqa: F401
from .apt import AptManager
from ..session.plain import PlainSession from ..session.plain import PlainSession
from ..session.schroot import SchrootSession from ..session.schroot import SchrootSession
import tempfile import tempfile
@ -669,6 +681,10 @@ def main(argv=None):
logging.info("Using output directory %s", output_directory) logging.info("Using output directory %s", output_directory)
else: else:
output_directory = args.output_directory output_directory = args.output_directory
if not os.path.isdir(output_directory):
parser.error(
'output directory %s is not a directory'
% output_directory)
tree = WorkingTree.open(".") tree = WorkingTree.open(".")
if args.schroot: if args.schroot:
@ -692,6 +708,7 @@ def main(argv=None):
committer=args.committer, committer=args.committer,
update_changelog=args.update_changelog, update_changelog=args.update_changelog,
max_iterations=args.max_iterations, max_iterations=args.max_iterations,
dep_server_url=args.dep_server_url,
) )
except DetailedDebianBuildFailure as e: except DetailedDebianBuildFailure as e:
if e.phase is None: if e.phase is None:
@ -701,6 +718,21 @@ def main(argv=None):
else: else:
phase = "%s (%s)" % (e.phase[0], e.phase[1]) phase = "%s (%s)" % (e.phase[0], e.phase[1])
logging.fatal("Error during %s: %s", phase, e.error) logging.fatal("Error during %s: %s", phase, e.error)
if not args.output_directory:
xdg_cache_dir = os.environ.get(
'XDG_CACHE_HOME', os.path.expanduser('~/.cache'))
buildlogs_dir = os.path.join(
xdg_cache_dir, 'ognibuild', 'buildlogs')
os.makedirs(buildlogs_dir, exist_ok=True)
target_log_file = os.path.join(
buildlogs_dir,
'%s-%s.log' % (
os.path.basename(getattr(tree, 'basedir', 'build')),
time.strftime('%Y-%m-%d_%H%M%s')))
shutil.copy(
os.path.join(output_directory, 'build.log'),
target_log_file)
logging.info('Build log available in %s', target_log_file)
return 1 return 1
except UnidentifiedDebianBuildError as e: except UnidentifiedDebianBuildError as e:
if e.phase is None: if e.phase is None:

View file

@ -35,7 +35,8 @@ class UDD(object):
def get_most_popular(self, packages): def get_most_popular(self, packages):
cursor = self._conn.cursor() cursor = self._conn.cursor()
cursor.execute( cursor.execute(
"SELECT package FROM popcon WHERE package IN %s ORDER BY insts DESC LIMIT 1", "SELECT package FROM popcon "
"WHERE package IN %s ORDER BY insts DESC LIMIT 1",
(tuple(packages),), (tuple(packages),),
) )
return cursor.fetchone()[0] return cursor.fetchone()[0]
@ -54,7 +55,8 @@ def popcon_tie_breaker(candidates):
names = {list(c.package_names())[0]: c for c in candidates} names = {list(c.package_names())[0]: c for c in candidates}
winner = udd.get_most_popular(list(names.keys())) winner = udd.get_most_popular(list(names.keys()))
if winner is None: if winner is None:
logging.warning("No relevant popcon information found, not ranking by popcon") logging.warning(
"No relevant popcon information found, not ranking by popcon")
return None return None
logging.info("Picked winner using popcon") logging.info("Picked winner using popcon")
return names[winner] return names[winner]

126
ognibuild/dep_server.py Normal file
View file

@ -0,0 +1,126 @@
#!/usr/bin/python
# Copyright (C) 2022 Jelmer Vernooij <jelmer@jelmer.uk>
# encoding: utf-8
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
import logging
import sys
from aiohttp import web
from aiohttp_openmetrics import setup_metrics
from . import Requirement, UnknownRequirementFamily
from .debian.apt import AptManager
from .resolver.apt import resolve_requirement_apt
SUPPORTED_RELEASES = ['unstable', 'sid']
routes = web.RouteTableDef()
@routes.get('/health', name='health')
async def handle_health(request):
return web.Response(text='ok')
@routes.get('/families', name='families')
async def handle_families(request):
return web.json_response(list(Requirement._JSON_DESERIALIZERS.keys()))
@routes.post('/resolve-apt', name='resolve-apt')
async def handle_apt(request):
js = await request.json()
try:
req_js = js['requirement']
except KeyError:
raise web.HTTPBadRequest(text="json missing 'requirement' key")
release = js.get('release')
if release and release not in SUPPORTED_RELEASES:
return web.json_response(
{"reason": "unsupported-release", "release": release},
status=404)
try:
req = Requirement.from_json(req_js)
except UnknownRequirementFamily as e:
return web.json_response(
{"reason": "family-unknown", "family": e.family}, status=404)
apt_reqs = await resolve_requirement_apt(request.app['apt_mgr'], req)
return web.json_response([r.pkg_relation_str() for r in apt_reqs])
@routes.get('/resolve-apt/{release}/{family}:{arg}', name='resolve-apt-simple')
async def handle_apt_simple(request):
if request.match_info['release'] not in SUPPORTED_RELEASES:
return web.json_response(
{"reason": "unsupported-release",
"release": request.match_info['release']},
status=404)
try:
req = Requirement.from_json(
(request.match_info['family'], request.match_info['arg']))
except UnknownRequirementFamily as e:
return web.json_response(
{"reason": "family-unknown", "family": e.family}, status=404)
apt_reqs = await resolve_requirement_apt(request.app['apt_mgr'], req)
return web.json_response([r.pkg_relation_str() for r in apt_reqs])
def main():
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('--listen-address', type=str, help='Listen address')
parser.add_argument('--schroot', type=str, help='Schroot session to use')
parser.add_argument('--port', type=str, help='Listen port', default=9934)
parser.add_argument('--debug', action='store_true')
parser.add_argument(
"--gcp-logging", action='store_true', help='Use Google cloud logging.')
args = parser.parse_args()
if args.gcp_logging:
import google.cloud.logging
client = google.cloud.logging.Client()
client.get_default_handler()
client.setup_logging()
else:
if args.debug:
log_level = logging.DEBUG
else:
log_level = logging.INFO
logging.basicConfig(
level=log_level,
format="[%(asctime)s] %(message)s",
datefmt="%Y-%m-%d %H:%M:%S")
if args.schroot:
from .session.schroot import SchrootSession
session = SchrootSession(args.schroot)
else:
from .session.plain import PlainSession
session = PlainSession()
with session:
app = web.Application()
app.router.add_routes(routes)
app['apt_mgr'] = AptManager.from_session(session)
setup_metrics(app)
web.run_app(app, host=args.listen_address, port=args.port)
if __name__ == '__main__':
sys.exit(main())

View file

@ -18,18 +18,19 @@
__all__ = [ __all__ = [
"UnidentifiedError", "UnidentifiedError",
"DetailedFailure", "DetailedFailure",
"create_dist", "run_dist",
"create_dist_schroot", "create_dist_schroot",
"create_dist",
"dist",
] ]
import errno import errno
from functools import partial
import logging import logging
import os import os
import sys import sys
from typing import Optional, List from typing import Optional, List
from debian.deb822 import Deb822
from breezy.tree import Tree from breezy.tree import Tree
from breezy.workingtree import WorkingTree from breezy.workingtree import WorkingTree
@ -37,16 +38,78 @@ from buildlog_consultant.common import (
NoSpaceOnDevice, NoSpaceOnDevice,
) )
from debian.deb822 import Deb822
from . import DetailedFailure, UnidentifiedError from . import DetailedFailure, UnidentifiedError
from .dist_catcher import DistNoTarball from .dist_catcher import DistNoTarball
from .fix_build import iterate_with_build_fixers
from .logs import LogManager, NoLogManager
from .buildsystem import NoBuildToolsFound from .buildsystem import NoBuildToolsFound
from .resolver import auto_resolver from .resolver import auto_resolver
from .session import Session from .session import Session
from .session.schroot import SchrootSession from .session.schroot import SchrootSession
def run_dist(session, buildsystems, resolver, fixers, target_directory, quiet=False): DIST_LOG_FILENAME = 'dist.log'
def run_dist(session, buildsystems, resolver, fixers, target_directory,
quiet=False, log_manager=None):
# Some things want to write to the user's home directory,
# e.g. pip caches in ~/.cache
session.create_home()
logging.info('Using dependency resolver: %s', resolver)
if log_manager is None:
log_manager = NoLogManager()
for buildsystem in buildsystems:
filename = iterate_with_build_fixers(fixers, log_manager.wrap(
partial(buildsystem.dist, session, resolver, target_directory,
quiet=quiet)))
return filename
raise NoBuildToolsFound()
def dist(session, export_directory, reldir, target_dir, log_manager, *,
version: Optional[str] = None, quiet=False):
from .fix_build import BuildFixer
from .buildsystem import detect_buildsystems
from .buildlog import InstallFixer
from .fixers import (
GitIdentityFixer,
MissingGoSumEntryFixer,
SecretGpgKeyFixer,
UnexpandedAutoconfMacroFixer,
GnulibDirectoryFixer,
)
if version:
# TODO(jelmer): Shouldn't include backend-specific code here
os.environ['SETUPTOOLS_SCM_PRETEND_VERSION'] = version
# TODO(jelmer): use scan_buildsystems to also look in subdirectories
buildsystems = list(detect_buildsystems(export_directory))
resolver = auto_resolver(session)
fixers: List[BuildFixer] = [
UnexpandedAutoconfMacroFixer(session, resolver),
GnulibDirectoryFixer(session),
MissingGoSumEntryFixer(session)]
fixers.append(InstallFixer(resolver))
if session.is_temporary:
# Only muck about with temporary sessions
fixers.extend([
GitIdentityFixer(session),
SecretGpgKeyFixer(session),
])
session.chdir(reldir)
# Some things want to write to the user's home directory, # Some things want to write to the user's home directory,
# e.g. pip caches in ~/.cache # e.g. pip caches in ~/.cache
session.create_home() session.create_home()
@ -54,31 +117,34 @@ def run_dist(session, buildsystems, resolver, fixers, target_directory, quiet=Fa
logging.info('Using dependency resolver: %s', resolver) logging.info('Using dependency resolver: %s', resolver)
for buildsystem in buildsystems: for buildsystem in buildsystems:
filename = buildsystem.dist( filename = iterate_with_build_fixers(fixers, log_manager.wrap(
session, resolver, fixers, target_directory, quiet=quiet partial(
) buildsystem.dist, session, resolver, target_dir,
quiet=quiet)))
return filename return filename
raise NoBuildToolsFound() raise NoBuildToolsFound()
# This is the function used by debianize()
def create_dist( def create_dist(
session: Session, session: Session,
tree: Tree, tree: Tree,
target_dir: str, target_dir: str,
include_controldir: bool = True, include_controldir: bool = True,
subdir: Optional[str] = None, subdir: Optional[str] = None,
cleanup: bool = False, log_manager: Optional[LogManager] = None,
version: Optional[str] = None,
) -> Optional[str]: ) -> Optional[str]:
from .buildsystem import detect_buildsystems """Create a dist tarball for a tree.
from .buildlog import InstallFixer
from .fix_build import BuildFixer
from .fixers import (
GitIdentityFixer,
SecretGpgKeyFixer,
UnexpandedAutoconfMacroFixer,
)
Args:
session: session to run it
tree: Tree object to work in
target_dir: Directory to write tarball into
include_controldir: Whether to include the version control directory
subdir: subdirectory in the tree to operate in
"""
if subdir is None: if subdir is None:
subdir = "package" subdir = "package"
try: try:
@ -90,19 +156,11 @@ def create_dist(
raise DetailedFailure(1, ["mkdtemp"], NoSpaceOnDevice()) raise DetailedFailure(1, ["mkdtemp"], NoSpaceOnDevice())
raise raise
# TODO(jelmer): use scan_buildsystems to also look in subdirectories if log_manager is None:
buildsystems = list(detect_buildsystems(export_directory)) log_manager = NoLogManager()
resolver = auto_resolver(session)
fixers: List[BuildFixer] = [UnexpandedAutoconfMacroFixer(session, resolver)]
fixers.append(InstallFixer(resolver)) return dist(session, export_directory, reldir, target_dir,
log_manager=log_manager, version=version)
if session.is_temporary:
# Only muck about with temporary sessions
fixers.extend([GitIdentityFixer(session), SecretGpgKeyFixer(session)])
session.chdir(reldir)
return run_dist(session, buildsystems, resolver, fixers, target_dir)
def create_dist_schroot( def create_dist_schroot(
@ -113,30 +171,35 @@ def create_dist_schroot(
packaging_subpath: Optional[str] = None, packaging_subpath: Optional[str] = None,
include_controldir: bool = True, include_controldir: bool = True,
subdir: Optional[str] = None, subdir: Optional[str] = None,
cleanup: bool = False, log_manager: Optional[LogManager] = None,
) -> Optional[str]: ) -> Optional[str]:
"""Create a dist tarball for a tree.
Args:
session: session to run it
tree: Tree object to work in
target_dir: Directory to write tarball into
include_controldir: Whether to include the version control directory
subdir: subdirectory in the tree to operate in
"""
with SchrootSession(chroot) as session: with SchrootSession(chroot) as session:
if packaging_tree is not None: if packaging_tree is not None:
from .debian import satisfy_build_deps from .debian import satisfy_build_deps
satisfy_build_deps(session, packaging_tree, packaging_subpath) satisfy_build_deps(session, packaging_tree, packaging_subpath)
return create_dist( return create_dist(
session, session, tree, target_dir,
tree, include_controldir=include_controldir, subdir=subdir,
target_dir, log_manager=log_manager)
include_controldir=include_controldir,
subdir=subdir,
cleanup=cleanup,
)
if __name__ == "__main__": def main(argv=None):
import argparse import argparse
import breezy.bzr # noqa: F401 import breezy.bzr # noqa: F401
import breezy.git # noqa: F401 import breezy.git # noqa: F401
from breezy.export import export from breezy.export import export
parser = argparse.ArgumentParser() parser = argparse.ArgumentParser(argv)
parser.add_argument( parser.add_argument(
"--chroot", "--chroot",
default="unstable-amd64-sbuild", default="unstable-amd64-sbuild",
@ -157,8 +220,12 @@ if __name__ == "__main__":
"--target-directory", type=str, default="..", help="Target directory" "--target-directory", type=str, default="..", help="Target directory"
) )
parser.add_argument("--verbose", action="store_true", help="Be verbose") parser.add_argument("--verbose", action="store_true", help="Be verbose")
parser.add_argument("--mode", choices=["auto", "vcs", "buildsystem"],
type=str,
help="Mechanism to use to create buildsystem")
parser.add_argument( parser.add_argument(
"--include-controldir", action="store_true", help="Clone rather than export." "--include-controldir", action="store_true",
help="Clone rather than export."
) )
args = parser.parse_args() args = parser.parse_args()
@ -169,6 +236,10 @@ if __name__ == "__main__":
logging.basicConfig(level=logging.INFO, format="%(message)s") logging.basicConfig(level=logging.INFO, format="%(message)s")
tree = WorkingTree.open(args.directory) tree = WorkingTree.open(args.directory)
packaging_tree: Optional[WorkingTree]
subdir: Optional[str]
if args.packaging_directory: if args.packaging_directory:
packaging_tree = WorkingTree.open(args.packaging_directory) packaging_tree = WorkingTree.open(args.packaging_directory)
with packaging_tree.lock_read(): with packaging_tree.lock_read():
@ -179,6 +250,9 @@ if __name__ == "__main__":
packaging_tree = None packaging_tree = None
subdir = None subdir = None
if args.mode == 'vcs':
export(tree, "dist.tar.gz", "tgz", None)
elif args.mode in ('auto', 'buildsystem'):
try: try:
ret = create_dist_schroot( ret = create_dist_schroot(
tree, tree,
@ -188,10 +262,17 @@ if __name__ == "__main__":
chroot=args.chroot, chroot=args.chroot,
include_controldir=args.include_controldir, include_controldir=args.include_controldir,
) )
except (NoBuildToolsFound, NotImplementedError): except NoBuildToolsFound:
logging.info("No build tools found, falling back to simple export.") if args.mode == 'buildsystem':
logging.fatal('No build tools found, unable to create tarball')
return 1
logging.info(
"No build tools found, falling back to simple export.")
export(tree, "dist.tar.gz", "tgz", None) export(tree, "dist.tar.gz", "tgz", None)
except NotImplementedError: except NotImplementedError:
if args.mode == 'buildsystem':
logging.fatal('Unable to ask buildsystem for tarball')
return 1
logging.info( logging.info(
"Build system does not support dist tarball creation, " "Build system does not support dist tarball creation, "
"falling back to simple export." "falling back to simple export."
@ -199,10 +280,17 @@ if __name__ == "__main__":
export(tree, "dist.tar.gz", "tgz", None) export(tree, "dist.tar.gz", "tgz", None)
except UnidentifiedError as e: except UnidentifiedError as e:
logging.fatal("Unidentified error: %r", e.lines) logging.fatal("Unidentified error: %r", e.lines)
return 1
except DetailedFailure as e: except DetailedFailure as e:
logging.fatal("Identified error during dist creation: %s", e.error) logging.fatal("Identified error during dist creation: %s", e.error)
return 1
except DistNoTarball: except DistNoTarball:
logging.fatal("dist operation did not create a tarball") logging.fatal("dist operation did not create a tarball")
return 1
else: else:
logging.info("Created %s", ret) logging.info("Created %s", ret)
sys.exit(0) return 0
if __name__ == "__main__":
sys.exit(main(sys.argv[1:]))

View file

@ -54,7 +54,8 @@ class DistCatcher(object):
@classmethod @classmethod
def default(cls, directory): def default(cls, directory):
return cls( return cls(
[os.path.join(directory, "dist"), directory, os.path.join(directory, "..")] [os.path.join(directory, "dist"), directory,
os.path.join(directory, "..")]
) )
def __enter__(self): def __enter__(self):
@ -87,19 +88,23 @@ class DistCatcher(object):
continue continue
if len(possible_new) == 1: if len(possible_new) == 1:
entry = possible_new[0] entry = possible_new[0]
logging.info("Found new tarball %s in %s.", entry.name, directory) logging.info(
"Found new tarball %s in %s.", entry.name, directory)
self.files.append(entry.path) self.files.append(entry.path)
return entry.name return entry.name
elif len(possible_new) > 1: elif len(possible_new) > 1:
logging.warning( logging.warning(
"Found multiple tarballs %r in %s.", possible_new, directory "Found multiple tarballs %r in %s.", possible_new,
directory
) )
self.files.extend([entry.path for entry in possible_new]) self.files.extend([entry.path for entry in possible_new])
return possible_new[0].name return possible_new[0].name
if len(possible_updated) == 1: if len(possible_updated) == 1:
entry = possible_updated[0] entry = possible_updated[0]
logging.info("Found updated tarball %s in %s.", entry.name, directory) logging.info(
"Found updated tarball %s in %s.", entry.name,
directory)
self.files.append(entry.path) self.files.append(entry.path)
return entry.name return entry.name

View file

@ -17,7 +17,7 @@
from functools import partial from functools import partial
import logging import logging
from typing import List, Tuple, Callable, Any, Optional from typing import List, Tuple, Callable, Optional, TypeVar
from buildlog_consultant import Problem from buildlog_consultant import Problem
from buildlog_consultant.common import ( from buildlog_consultant.common import (
@ -29,6 +29,14 @@ from . import DetailedFailure, UnidentifiedError
from .session import Session, run_with_tee from .session import Session, run_with_tee
# Number of attempts to fix a build before giving up.
DEFAULT_LIMIT = 200
class FixerLimitReached(Exception):
"""The maximum number of fixes has been reached."""
class BuildFixer(object): class BuildFixer(object):
"""Build fixer.""" """Build fixer."""
@ -44,7 +52,11 @@ class BuildFixer(object):
return self._fix(problem, phase) return self._fix(problem, phase)
def run_detecting_problems(session: Session, args: List[str], check_success=None, **kwargs): def run_detecting_problems(
session: Session, args: List[str], check_success=None,
quiet=False, **kwargs) -> List[str]:
if not quiet:
logging.info('Running %r', args)
if check_success is None: if check_success is None:
def check_success(retcode, contents): def check_success(retcode, contents):
return (retcode == 0) return (retcode == 0)
@ -63,17 +75,26 @@ def run_detecting_problems(session: Session, args: List[str], check_success=None
logging.warning("Build failed with unidentified error:") logging.warning("Build failed with unidentified error:")
logging.warning("%s", match.line.rstrip("\n")) logging.warning("%s", match.line.rstrip("\n"))
else: else:
logging.warning("Build failed and unable to find cause. Giving up.") logging.warning(
"Build failed and unable to find cause. Giving up.")
raise UnidentifiedError(retcode, args, lines, secondary=match) raise UnidentifiedError(retcode, args, lines, secondary=match)
raise DetailedFailure(retcode, args, error) raise DetailedFailure(retcode, args, error)
def iterate_with_build_fixers(fixers: List[BuildFixer], cb: Callable[[], Any]): T = TypeVar('T')
def iterate_with_build_fixers(
fixers: List[BuildFixer],
cb: Callable[[], T], limit=DEFAULT_LIMIT) -> T:
"""Call cb() until there are no more DetailedFailures we can fix. """Call cb() until there are no more DetailedFailures we can fix.
Args: Args:
fixers: List of fixers to use to resolve issues fixers: List of fixers to use to resolve issues
cb: Callable to run the build
limit: Maximum number of fixing attempts before giving up
""" """
attempts = 0
fixed_errors = [] fixed_errors = []
while True: while True:
to_resolve = [] to_resolve = []
@ -86,9 +107,13 @@ def iterate_with_build_fixers(fixers: List[BuildFixer], cb: Callable[[], Any]):
logging.info("Identified error: %r", f.error) logging.info("Identified error: %r", f.error)
if f.error in fixed_errors: if f.error in fixed_errors:
logging.warning( logging.warning(
"Failed to resolve error %r, it persisted. Giving up.", f.error "Failed to resolve error %r, it persisted. Giving up.",
f.error
) )
raise f raise f
attempts += 1
if limit is not None and limit <= attempts:
raise FixerLimitReached(limit)
try: try:
resolved = resolve_error(f.error, None, fixers=fixers) resolved = resolve_error(f.error, None, fixers=fixers)
except DetailedFailure as n: except DetailedFailure as n:
@ -100,23 +125,25 @@ def iterate_with_build_fixers(fixers: List[BuildFixer], cb: Callable[[], Any]):
else: else:
if not resolved: if not resolved:
logging.warning( logging.warning(
"Failed to find resolution for error %r. Giving up.", f.error "Failed to find resolution for error %r. Giving up.",
f.error
) )
raise f raise f
fixed_errors.append(f.error) fixed_errors.append(f.error)
def run_with_build_fixers( def run_with_build_fixers(
session: Session, args: List[str], fixers: Optional[List[BuildFixer]], **kwargs fixers: Optional[List[BuildFixer]], session: Session, args: List[str],
): quiet: bool = False, **kwargs
) -> List[str]:
if fixers is None: if fixers is None:
fixers = [] fixers = []
return iterate_with_build_fixers( return iterate_with_build_fixers(
fixers, partial(run_detecting_problems, session, args, **kwargs) fixers,
) partial(run_detecting_problems, session, args, quiet=quiet, **kwargs))
def resolve_error(error, phase, fixers): def resolve_error(error, phase, fixers) -> bool:
relevant_fixers = [] relevant_fixers = []
for fixer in fixers: for fixer in fixers:
if fixer.can_fix(error): if fixer.can_fix(error):

View file

@ -21,8 +21,10 @@ from typing import Tuple
from buildlog_consultant import Problem from buildlog_consultant import Problem
from buildlog_consultant.common import ( from buildlog_consultant.common import (
MissingGitIdentity, MissingGitIdentity,
MissingGoSumEntry,
MissingSecretGpgKey, MissingSecretGpgKey,
MissingAutoconfMacro, MissingAutoconfMacro,
MissingGnulibDirectory,
) )
from ognibuild.requirements import AutoconfMacroRequirement from ognibuild.requirements import AutoconfMacroRequirement
from ognibuild.resolver import UnsatisfiedRequirements from ognibuild.resolver import UnsatisfiedRequirements
@ -30,6 +32,18 @@ from ognibuild.resolver import UnsatisfiedRequirements
from .fix_build import BuildFixer from .fix_build import BuildFixer
class GnulibDirectoryFixer(BuildFixer):
def __init__(self, session):
self.session = session
def can_fix(self, problem: Problem):
return isinstance(problem, MissingGnulibDirectory)
def _fix(self, problem: Problem, phase: Tuple[str, ...]):
self.session.check_call(["./gnulib.sh"])
return True
class GitIdentityFixer(BuildFixer): class GitIdentityFixer(BuildFixer):
def __init__(self, session): def __init__(self, session):
self.session = session self.session = session
@ -77,6 +91,26 @@ Passphrase: ""
return False return False
class MissingGoSumEntryFixer(BuildFixer):
def __init__(self, session):
self.session = session
def __repr__(self):
return "%s()" % (type(self).__name__)
def __str__(self):
return "missing go.sum entry fixer"
def can_fix(self, error):
return isinstance(error, MissingGoSumEntry)
def _fix(self, error, phase):
from .fix_build import run_detecting_problems
run_detecting_problems(
self.session, ["go", "mod", "download", error.package])
return True
class UnexpandedAutoconfMacroFixer(BuildFixer): class UnexpandedAutoconfMacroFixer(BuildFixer):
def __init__(self, session, resolver): def __init__(self, session, resolver):
self.session = session self.session = session

View file

@ -21,11 +21,13 @@ def run_info(session, buildsystems, fixers=None):
print("%r:" % buildsystem) print("%r:" % buildsystem)
deps = {} deps = {}
try: try:
for kind, dep in buildsystem.get_declared_dependencies(session, fixers=fixers): for kind, dep in buildsystem.get_declared_dependencies(
session, fixers=fixers):
deps.setdefault(kind, []).append(dep) deps.setdefault(kind, []).append(dep)
except NotImplementedError: except NotImplementedError:
print( print(
"\tUnable to detect declared dependencies for this type of build system" "\tUnable to detect declared dependencies for this type of "
"build system"
) )
if deps: if deps:
print("\tDeclared dependencies:") print("\tDeclared dependencies:")
@ -35,9 +37,11 @@ def run_info(session, buildsystems, fixers=None):
print("\t\t\t%s" % dep) print("\t\t\t%s" % dep)
print("") print("")
try: try:
outputs = list(buildsystem.get_declared_outputs(session, fixers=fixers)) outputs = list(buildsystem.get_declared_outputs(
session, fixers=fixers))
except NotImplementedError: except NotImplementedError:
print("\tUnable to detect declared outputs for this type of build system") print("\tUnable to detect declared outputs for this type of "
"build system")
outputs = [] outputs = []
if outputs: if outputs:
print("\tDeclared outputs:") print("\tDeclared outputs:")

View file

@ -15,21 +15,34 @@
# along with this program; if not, write to the Free Software # along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
from .buildsystem import NoBuildToolsFound, InstallTarget from functools import partial
from typing import Optional from typing import Optional
from .buildsystem import NoBuildToolsFound, InstallTarget
from .fix_build import iterate_with_build_fixers
from .logs import NoLogManager
def run_install(session, buildsystems, resolver, fixers, user: bool = False, prefix: Optional[str] = None):
def run_install(
session, buildsystems, resolver, fixers, *, user: bool = False,
prefix: Optional[str] = None, log_manager=None):
# Some things want to write to the user's home directory, # Some things want to write to the user's home directory,
# e.g. pip caches in ~/.cache # e.g. pip caches in ~/.cache
session.create_home() session.create_home()
if log_manager is None:
log_manager = NoLogManager()
install_target = InstallTarget() install_target = InstallTarget()
install_target.user = user install_target.user = user
install_target.prefix = prefix install_target.prefix = prefix
for buildsystem in buildsystems: for buildsystem in buildsystems:
buildsystem.install(session, resolver, fixers, install_target) iterate_with_build_fixers(
fixers,
log_manager.wrap(
partial(buildsystem.install, session, resolver,
install_target)))
return return
raise NoBuildToolsFound() raise NoBuildToolsFound()

105
ognibuild/logs.py Normal file
View file

@ -0,0 +1,105 @@
#!/usr/bin/python
# Copyright (C) 2018 Jelmer Vernooij <jelmer@jelmer.uk>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
from contextlib import contextmanager
import subprocess
import logging
import os
import sys
@contextmanager
def copy_output(output_log: str, tee: bool = False):
old_stdout = os.dup(sys.stdout.fileno())
old_stderr = os.dup(sys.stderr.fileno())
if tee:
p = subprocess.Popen(["tee", output_log], stdin=subprocess.PIPE)
newfd = p.stdin
else:
newfd = open(output_log, 'wb')
os.dup2(newfd.fileno(), sys.stdout.fileno()) # type: ignore
os.dup2(newfd.fileno(), sys.stderr.fileno()) # type: ignore
try:
yield
finally:
sys.stdout.flush()
sys.stderr.flush()
os.dup2(old_stdout, sys.stdout.fileno())
os.dup2(old_stderr, sys.stderr.fileno())
if newfd is not None:
newfd.close()
@contextmanager
def redirect_output(to_file):
sys.stdout.flush()
sys.stderr.flush()
old_stdout = os.dup(sys.stdout.fileno())
old_stderr = os.dup(sys.stderr.fileno())
os.dup2(to_file.fileno(), sys.stdout.fileno()) # type: ignore
os.dup2(to_file.fileno(), sys.stderr.fileno()) # type: ignore
try:
yield
finally:
sys.stdout.flush()
sys.stderr.flush()
os.dup2(old_stdout, sys.stdout.fileno())
os.dup2(old_stderr, sys.stderr.fileno())
def rotate_logfile(source_path: str) -> None:
if os.path.exists(source_path):
(directory_path, name) = os.path.split(source_path)
i = 1
while os.path.exists(
os.path.join(directory_path, "%s.%d" % (name, i))):
i += 1
target_path = os.path.join(directory_path, "%s.%d" % (name, i))
os.rename(source_path, target_path)
logging.debug("Storing previous build log at %s", target_path)
class LogManager(object):
def wrap(self, fn):
raise NotImplementedError(self.wrap)
class DirectoryLogManager(LogManager):
def __init__(self, path, mode):
self.path = path
self.mode = mode
def wrap(self, fn):
def _run(*args, **kwargs):
rotate_logfile(self.path)
if self.mode == 'copy':
with copy_output(self.path, tee=True):
return fn(*args, **kwargs)
elif self.mode == 'redirect':
with copy_output(self.path, tee=False):
return fn(*args, **kwargs)
else:
raise NotImplementedError(self.mode)
return _run
class NoLogManager(LogManager):
def wrap(self, fn):
return fn

View file

@ -26,16 +26,19 @@ from . import Requirement
class PythonPackageRequirement(Requirement): class PythonPackageRequirement(Requirement):
family = "python-package"
package: str package: str
def __init__(self, package, python_version=None, specs=None, minimum_version=None): def __init__(
super(PythonPackageRequirement, self).__init__("python-package") self, package, python_version=None, specs=None,
minimum_version=None):
self.package = package self.package = package
self.python_version = python_version self.python_version = python_version
if minimum_version is not None:
specs = [(">=", minimum_version)]
if specs is None: if specs is None:
specs = [] specs = []
if minimum_version is not None:
specs.append((">=", minimum_version))
self.specs = specs self.specs = specs
def __repr__(self): def __repr__(self):
@ -53,11 +56,29 @@ class PythonPackageRequirement(Requirement):
return "python package: %s" % (self.package,) return "python package: %s" % (self.package,)
@classmethod @classmethod
def from_requirement_str(cls, text): def from_requirement_str(cls, text, python_version=None):
from requirements.requirement import Requirement from requirements.requirement import Requirement
req = Requirement.parse(text) req = Requirement.parse(text)
return cls(package=req.name, specs=req.specs) return cls(
package=req.name, specs=req.specs, python_version=python_version)
def requirement_str(self):
if self.specs:
return '%s;%s' % (
self.package, ','.join([''.join(s) for s in self.specs]))
return self.package
@classmethod
def _from_json(cls, js):
if isinstance(js, str):
return cls.from_requirement_str(js)
return cls.from_requirement_str(js[0], python_version=js[1])
def _json(self):
if self.python_version:
return [self.requirement_str(), self.python_version]
return self.requirement_str()
def met(self, session): def met(self, session):
if self.python_version == "cpython3": if self.python_version == "cpython3":
@ -74,7 +95,8 @@ class PythonPackageRequirement(Requirement):
raise NotImplementedError raise NotImplementedError
text = self.package + ",".join(["".join(spec) for spec in self.specs]) text = self.package + ",".join(["".join(spec) for spec in self.specs])
p = session.Popen( p = session.Popen(
[cmd, "-c", "import pkg_resources; pkg_resources.require(%r)" % text], [cmd, "-c",
"import pkg_resources; pkg_resources.require(%r)" % text],
stdout=subprocess.DEVNULL, stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL, stderr=subprocess.DEVNULL,
) )
@ -82,16 +104,33 @@ class PythonPackageRequirement(Requirement):
return p.returncode == 0 return p.returncode == 0
Requirement.register_json(PythonPackageRequirement)
class LatexPackageRequirement(Requirement): class LatexPackageRequirement(Requirement):
family = "latex-package"
def __init__(self, package: str): def __init__(self, package: str):
self.package = package self.package = package
def __repr__(self): def __repr__(self):
return "%s(%r)" % (type(self).__name__, self.package) return "%s(%r)" % (type(self).__name__, self.package)
def _json(self):
return self.package
def _from_json(cls, package):
return cls(package)
Requirement.register_json(LatexPackageRequirement)
class PhpPackageRequirement(Requirement): class PhpPackageRequirement(Requirement):
family = "php-package"
def __init__( def __init__(
self, self,
package: str, package: str,
@ -104,6 +143,13 @@ class PhpPackageRequirement(Requirement):
self.min_version = min_version self.min_version = min_version
self.max_version = max_version self.max_version = max_version
def _json(self):
return (self.package, self.channel, self.min_version, self.max_version)
@classmethod
def _from_json(cls, js):
return cls(*js)
def __repr__(self): def __repr__(self):
return "%s(%r, %r, %r, %r)" % ( return "%s(%r, %r, %r, %r)" % (
type(self).__name__, type(self).__name__,
@ -114,14 +160,24 @@ class PhpPackageRequirement(Requirement):
) )
Requirement.register_json(PhpPackageRequirement)
class BinaryRequirement(Requirement): class BinaryRequirement(Requirement):
family = "binary"
binary_name: str binary_name: str
def __init__(self, binary_name): def __init__(self, binary_name):
super(BinaryRequirement, self).__init__("binary")
self.binary_name = binary_name self.binary_name = binary_name
def _json(self):
return self.binary_name
@classmethod
def _from_json(cls, js):
return cls(js)
def __repr__(self): def __repr__(self):
return "%s(%r)" % (type(self).__name__, self.binary_name) return "%s(%r)" % (type(self).__name__, self.binary_name)
@ -135,14 +191,54 @@ class BinaryRequirement(Requirement):
return p.returncode == 0 return p.returncode == 0
Requirement.register_json(BinaryRequirement)
class PHPExtensionRequirement(Requirement):
family = "php-extension"
extension: str
def __init__(self, extension: str):
self.extension = extension
def __repr__(self):
return "%s(%r)" % (type(self).__name__, self.extension)
class PytestPluginRequirement(Requirement):
family = "pytest-plugin"
plugin: str
def __init__(self, plugin: str):
self.plugin = plugin
def __repr__(self):
return "%s(%r)" % (type(self).__name__, self.plugin)
class VcsControlDirectoryAccessRequirement(Requirement):
vcs: List[str]
family = "vcs-access"
def __init__(self, vcs):
self.vcs = vcs
def __repr__(self):
return "%s(%r)" % (type(self).__name__, self.vcs)
class PerlModuleRequirement(Requirement): class PerlModuleRequirement(Requirement):
module: str module: str
filename: Optional[str] filename: Optional[str]
inc: Optional[List[str]] inc: Optional[List[str]]
family = "perl-module"
def __init__(self, module, filename=None, inc=None): def __init__(self, module, filename=None, inc=None):
super(PerlModuleRequirement, self).__init__("perl-module")
self.module = module self.module = module
self.filename = filename self.filename = filename
self.inc = inc self.inc = inc
@ -158,10 +254,10 @@ class PerlModuleRequirement(Requirement):
class VagueDependencyRequirement(Requirement): class VagueDependencyRequirement(Requirement):
name: str name: str
family = "vague"
minimum_version: Optional[str] = None minimum_version: Optional[str] = None
def __init__(self, name, minimum_version=None): def __init__(self, name, minimum_version=None):
super(VagueDependencyRequirement, self).__init__("vague")
self.name = name self.name = name
self.minimum_version = minimum_version self.minimum_version = minimum_version
@ -169,19 +265,26 @@ class VagueDependencyRequirement(Requirement):
if " " not in self.name: if " " not in self.name:
yield BinaryRequirement(self.name) yield BinaryRequirement(self.name)
yield LibraryRequirement(self.name) yield LibraryRequirement(self.name)
yield PkgConfigRequirement(self.name, minimum_version=self.minimum_version) yield PkgConfigRequirement(
self.name, minimum_version=self.minimum_version)
if self.name.lower() != self.name: if self.name.lower() != self.name:
yield BinaryRequirement(self.name.lower()) yield BinaryRequirement(self.name.lower())
yield LibraryRequirement(self.name.lower()) yield LibraryRequirement(self.name.lower())
yield PkgConfigRequirement(self.name.lower(), minimum_version=self.minimum_version) yield PkgConfigRequirement(
self.name.lower(), minimum_version=self.minimum_version)
try:
from .resolver.apt import AptRequirement from .resolver.apt import AptRequirement
except ModuleNotFoundError:
yield AptRequirement.simple(self.name.lower(), minimum_version=self.minimum_version) pass
else:
yield AptRequirement.simple(
self.name.lower(), minimum_version=self.minimum_version)
if self.name.lower().startswith('lib'): if self.name.lower().startswith('lib'):
devname = '%s-dev' % self.name.lower() devname = '%s-dev' % self.name.lower()
else: else:
devname = 'lib%s-dev' % self.name.lower() devname = 'lib%s-dev' % self.name.lower()
yield AptRequirement.simple(devname, minimum_version=self.minimum_version) yield AptRequirement.simple(
devname, minimum_version=self.minimum_version)
def met(self, session): def met(self, session):
for x in self.expand(): for x in self.expand():
@ -192,19 +295,36 @@ class VagueDependencyRequirement(Requirement):
def __repr__(self): def __repr__(self):
return "%s(%r)" % (type(self).__name__, self.name) return "%s(%r)" % (type(self).__name__, self.name)
def __str__(self):
if self.minimum_version:
return "%s >= %s" % (self.name, self.minimum_version)
return self.name
class NodePackageRequirement(Requirement): class NodePackageRequirement(Requirement):
package: str package: str
family = "npm-package"
def __init__(self, package): def __init__(self, package):
super(NodePackageRequirement, self).__init__("npm-package")
self.package = package self.package = package
def __repr__(self): def __repr__(self):
return "%s(%r)" % (type(self).__name__, self.package) return "%s(%r)" % (type(self).__name__, self.package)
class LuaModuleRequirement(Requirement):
module: str
family = "lua-module"
def __init__(self, module):
self.module = module
def __repr__(self):
return "%s(%r)" % (type(self).__name__, self.module)
class PerlPreDeclaredRequirement(Requirement): class PerlPreDeclaredRequirement(Requirement):
name: str name: str
@ -227,8 +347,9 @@ class PerlPreDeclaredRequirement(Requirement):
'auto_set_bugtracker': 'Module::Install::Bugtracker', 'auto_set_bugtracker': 'Module::Install::Bugtracker',
} }
family = "perl-predeclared"
def __init__(self, name): def __init__(self, name):
super(PerlPreDeclaredRequirement, self).__init__("perl-predeclared")
self.name = name self.name = name
def lookup_module(self): def lookup_module(self):
@ -242,9 +363,9 @@ class PerlPreDeclaredRequirement(Requirement):
class NodeModuleRequirement(Requirement): class NodeModuleRequirement(Requirement):
module: str module: str
family = "npm-module"
def __init__(self, module): def __init__(self, module):
super(NodeModuleRequirement, self).__init__("npm-module")
self.module = module self.module = module
def __repr__(self): def __repr__(self):
@ -255,41 +376,45 @@ class CargoCrateRequirement(Requirement):
crate: str crate: str
features: Set[str] features: Set[str]
version: Optional[str] api_version: Optional[str]
minimum_version: Optional[str]
family = "cargo-crate"
def __init__(self, crate, features=None, version=None): def __init__(self, crate, features=None, api_version=None,
super(CargoCrateRequirement, self).__init__("cargo-crate") minimum_version=None):
self.crate = crate self.crate = crate
if features is None: if features is None:
features = set() features = set()
self.features = features self.features = features
self.version = version self.api_version = api_version
self.minimum_version = minimum_version
def __repr__(self): def __repr__(self):
return "%s(%r, features=%r, version=%r)" % ( return "%s(%r, features=%r, api_version=%r, minimum_version=%r)" % (
type(self).__name__, type(self).__name__,
self.crate, self.crate,
self.features, self.features,
self.version, self.api_version,
self.minimum_version,
) )
def __str__(self): def __str__(self):
if self.features: ret = "cargo crate: %s %s" % (
return "cargo crate: %s %s (%s)" % (
self.crate, self.crate,
self.version or "", self.api_version or "")
", ".join(sorted(self.features)), if self.features:
) ret += " (%s)" % (", ".join(sorted(self.features)))
else: if self.minimum_version:
return "cargo crate: %s %s" % (self.crate, self.version or "") ret += " (>= %s)" % self.minimum_version
return ret
class PkgConfigRequirement(Requirement): class PkgConfigRequirement(Requirement):
module: str module: str
family = "pkg-config"
def __init__(self, module, minimum_version=None): def __init__(self, module, minimum_version=None):
super(PkgConfigRequirement, self).__init__("pkg-config")
self.module = module self.module = module
self.minimum_version = minimum_version self.minimum_version = minimum_version
@ -301,9 +426,9 @@ class PkgConfigRequirement(Requirement):
class PathRequirement(Requirement): class PathRequirement(Requirement):
path: str path: str
family = "path"
def __init__(self, path): def __init__(self, path):
super(PathRequirement, self).__init__("path")
self.path = path self.path = path
def __repr__(self): def __repr__(self):
@ -313,9 +438,9 @@ class PathRequirement(Requirement):
class CHeaderRequirement(Requirement): class CHeaderRequirement(Requirement):
header: str header: str
family = "c-header"
def __init__(self, header): def __init__(self, header):
super(CHeaderRequirement, self).__init__("c-header")
self.header = header self.header = header
def __repr__(self): def __repr__(self):
@ -323,16 +448,15 @@ class CHeaderRequirement(Requirement):
class JavaScriptRuntimeRequirement(Requirement): class JavaScriptRuntimeRequirement(Requirement):
def __init__(self): family = "javascript-runtime"
super(JavaScriptRuntimeRequirement, self).__init__("javascript-runtime")
class ValaPackageRequirement(Requirement): class ValaPackageRequirement(Requirement):
package: str package: str
family = "vala"
def __init__(self, package: str): def __init__(self, package: str):
super(ValaPackageRequirement, self).__init__("vala")
self.package = package self.package = package
@ -340,9 +464,9 @@ class RubyGemRequirement(Requirement):
gem: str gem: str
minimum_version: Optional[str] minimum_version: Optional[str]
family = "gem"
def __init__(self, gem: str, minimum_version: Optional[str]): def __init__(self, gem: str, minimum_version: Optional[str]):
super(RubyGemRequirement, self).__init__("gem")
self.gem = gem self.gem = gem
self.minimum_version = minimum_version self.minimum_version = minimum_version
@ -351,12 +475,16 @@ class GoPackageRequirement(Requirement):
package: str package: str
version: Optional[str] version: Optional[str]
family = "go-package"
def __init__(self, package: str, version: Optional[str] = None): def __init__(self, package: str, version: Optional[str] = None):
super(GoPackageRequirement, self).__init__("go-package")
self.package = package self.package = package
self.version = version self.version = version
def __repr__(self):
return "%s(%r, version=%r)" % (
type(self).__name__, self.package, self.version)
def __str__(self): def __str__(self):
if self.version: if self.version:
return "go package: %s (= %s)" % (self.package, self.version) return "go package: %s (= %s)" % (self.package, self.version)
@ -366,9 +494,9 @@ class GoPackageRequirement(Requirement):
class GoRequirement(Requirement): class GoRequirement(Requirement):
version: Optional[str] version: Optional[str]
family = "go"
def __init__(self, version: Optional[str] = None): def __init__(self, version: Optional[str] = None):
super(GoRequirement, self).__init__("go")
self.version = version self.version = version
def __str__(self): def __str__(self):
@ -378,18 +506,18 @@ class GoRequirement(Requirement):
class DhAddonRequirement(Requirement): class DhAddonRequirement(Requirement):
path: str path: str
family = "dh-addon"
def __init__(self, path: str): def __init__(self, path: str):
super(DhAddonRequirement, self).__init__("dh-addon")
self.path = path self.path = path
class PhpClassRequirement(Requirement): class PhpClassRequirement(Requirement):
php_class: str php_class: str
family = "php-class"
def __init__(self, php_class: str): def __init__(self, php_class: str):
super(PhpClassRequirement, self).__init__("php-class")
self.php_class = php_class self.php_class = php_class
@ -397,9 +525,9 @@ class RPackageRequirement(Requirement):
package: str package: str
minimum_version: Optional[str] minimum_version: Optional[str]
family = "r-package"
def __init__(self, package: str, minimum_version: Optional[str] = None): def __init__(self, package: str, minimum_version: Optional[str] = None):
super(RPackageRequirement, self).__init__("r-package")
self.package = package self.package = package
self.minimum_version = minimum_version self.minimum_version = minimum_version
@ -412,7 +540,8 @@ class RPackageRequirement(Requirement):
def __str__(self): def __str__(self):
if self.minimum_version: if self.minimum_version:
return "R package: %s (>= %s)" % (self.package, self.minimum_version) return "R package: %s (>= %s)" % (
self.package, self.minimum_version)
else: else:
return "R package: %s" % (self.package,) return "R package: %s" % (self.package,)
@ -432,9 +561,9 @@ class OctavePackageRequirement(Requirement):
package: str package: str
minimum_version: Optional[str] minimum_version: Optional[str]
family = "octave-package"
def __init__(self, package: str, minimum_version: Optional[str] = None): def __init__(self, package: str, minimum_version: Optional[str] = None):
super(OctavePackageRequirement, self).__init__("octave-package")
self.package = package self.package = package
self.minimum_version = minimum_version self.minimum_version = minimum_version
@ -447,7 +576,8 @@ class OctavePackageRequirement(Requirement):
def __str__(self): def __str__(self):
if self.minimum_version: if self.minimum_version:
return "Octave package: %s (>= %s)" % (self.package, self.minimum_version) return "Octave package: %s (>= %s)" % (
self.package, self.minimum_version)
else: else:
return "Octave package: %s" % (self.package,) return "Octave package: %s" % (self.package,)
@ -466,9 +596,9 @@ class OctavePackageRequirement(Requirement):
class LibraryRequirement(Requirement): class LibraryRequirement(Requirement):
library: str library: str
family = "lib"
def __init__(self, library: str): def __init__(self, library: str):
super(LibraryRequirement, self).__init__("lib")
self.library = library self.library = library
@ -476,9 +606,9 @@ class StaticLibraryRequirement(Requirement):
library: str library: str
filename: str filename: str
family = "static-lib"
def __init__(self, library: str, filename: str): def __init__(self, library: str, filename: str):
super(StaticLibraryRequirement, self).__init__("static-lib")
self.library = library self.library = library
self.filename = filename self.filename = filename
@ -486,18 +616,18 @@ class StaticLibraryRequirement(Requirement):
class RubyFileRequirement(Requirement): class RubyFileRequirement(Requirement):
filename: str filename: str
family = "ruby-file"
def __init__(self, filename: str): def __init__(self, filename: str):
super(RubyFileRequirement, self).__init__("ruby-file")
self.filename = filename self.filename = filename
class XmlEntityRequirement(Requirement): class XmlEntityRequirement(Requirement):
url: str url: str
family = "xml-entity"
def __init__(self, url: str): def __init__(self, url: str):
super(XmlEntityRequirement, self).__init__("xml-entity")
self.url = url self.url = url
@ -505,9 +635,9 @@ class SprocketsFileRequirement(Requirement):
content_type: str content_type: str
name: str name: str
family = "sprockets-file"
def __init__(self, content_type: str, name: str): def __init__(self, content_type: str, name: str):
super(SprocketsFileRequirement, self).__init__("sprockets-file")
self.content_type = content_type self.content_type = content_type
self.name = name self.name = name
@ -515,27 +645,29 @@ class SprocketsFileRequirement(Requirement):
class JavaClassRequirement(Requirement): class JavaClassRequirement(Requirement):
classname: str classname: str
family = "java-class"
def __init__(self, classname: str): def __init__(self, classname: str):
super(JavaClassRequirement, self).__init__("java-class")
self.classname = classname self.classname = classname
class CMakefileRequirement(Requirement): class CMakefileRequirement(Requirement):
filename: str filename: str
version: Optional[str]
family = "cmake-file"
def __init__(self, filename: str): def __init__(self, filename: str, version=None):
super(CMakefileRequirement, self).__init__("cmake-file")
self.filename = filename self.filename = filename
self.version = version
class HaskellPackageRequirement(Requirement): class HaskellPackageRequirement(Requirement):
package: str package: str
family = "haskell-package"
def __init__(self, package: str, specs=None): def __init__(self, package: str, specs=None):
super(HaskellPackageRequirement, self).__init__("haskell-package")
self.package = package self.package = package
self.specs = specs self.specs = specs
@ -551,9 +683,9 @@ class MavenArtifactRequirement(Requirement):
artifact_id: str artifact_id: str
version: Optional[str] version: Optional[str]
kind: Optional[str] kind: Optional[str]
family = "maven-artifact"
def __init__(self, group_id, artifact_id, version=None, kind=None): def __init__(self, group_id, artifact_id, version=None, kind=None):
super(MavenArtifactRequirement, self).__init__("maven-artifact")
self.group_id = group_id self.group_id = group_id
self.artifact_id = artifact_id self.artifact_id = artifact_id
self.version = version self.version = version
@ -566,6 +698,11 @@ class MavenArtifactRequirement(Requirement):
self.version, self.version,
) )
def __repr__(self):
return "%s(group_id=%r, artifact_id=%r, version=%r, kind=%r)" % (
type(self).__name__, self.group_id, self.artifact_id,
self.version, self.kind)
@classmethod @classmethod
def from_str(cls, text): def from_str(cls, text):
return cls.from_tuple(text.split(":")) return cls.from_tuple(text.split(":"))
@ -587,17 +724,16 @@ class MavenArtifactRequirement(Requirement):
class GnomeCommonRequirement(Requirement): class GnomeCommonRequirement(Requirement):
def __init__(self): family = "gnome-common"
super(GnomeCommonRequirement, self).__init__("gnome-common")
class JDKFileRequirement(Requirement): class JDKFileRequirement(Requirement):
jdk_path: str jdk_path: str
filename: str filename: str
family = "jdk-file"
def __init__(self, jdk_path: str, filename: str): def __init__(self, jdk_path: str, filename: str):
super(JDKFileRequirement, self).__init__("jdk-file")
self.jdk_path = jdk_path self.jdk_path = jdk_path
self.filename = filename self.filename = filename
@ -607,55 +743,70 @@ class JDKFileRequirement(Requirement):
class JDKRequirement(Requirement): class JDKRequirement(Requirement):
def __init__(self): family = "jdk"
super(JDKRequirement, self).__init__("jdk")
class JRERequirement(Requirement): class JRERequirement(Requirement):
def __init__(self): family = "jre"
super(JRERequirement, self).__init__("jre")
class QtModuleRequirement(Requirement):
family = "qt-module"
def __init__(self, module):
self.module = module
class QTRequirement(Requirement): class QTRequirement(Requirement):
def __init__(self): family = "qt"
super(QTRequirement, self).__init__("qt")
class X11Requirement(Requirement): class X11Requirement(Requirement):
def __init__(self): family = "x11"
super(X11Requirement, self).__init__("x11")
class CertificateAuthorityRequirement(Requirement): class CertificateAuthorityRequirement(Requirement):
family = "ca-cert"
def __init__(self, url): def __init__(self, url):
super(CertificateAuthorityRequirement, self).__init__("ca-cert")
self.url = url self.url = url
class PerlFileRequirement(Requirement): class PerlFileRequirement(Requirement):
filename: str filename: str
family = "perl-file"
def __init__(self, filename: str): def __init__(self, filename: str):
super(PerlFileRequirement, self).__init__("perl-file")
self.filename = filename self.filename = filename
class AutoconfMacroRequirement(Requirement): class AutoconfMacroRequirement(Requirement):
family = "autoconf-macro"
macro: str macro: str
def __init__(self, macro: str): def __init__(self, macro: str):
super(AutoconfMacroRequirement, self).__init__("autoconf-macro")
self.macro = macro self.macro = macro
def _json(self):
return self.macro
@classmethod
def _from_json(cls, macro):
return cls(macro)
Requirement.register_json(AutoconfMacroRequirement)
class LibtoolRequirement(Requirement): class LibtoolRequirement(Requirement):
def __init__(self): family = "libtool"
super(LibtoolRequirement, self).__init__("libtool")
class IntrospectionTypelibRequirement(Requirement): class IntrospectionTypelibRequirement(Requirement):
family = "introspection-type-lib"
def __init__(self, library): def __init__(self, library):
self.library = library self.library = library
@ -665,9 +816,9 @@ class PythonModuleRequirement(Requirement):
module: str module: str
python_version: Optional[str] python_version: Optional[str]
minimum_version: Optional[str] minimum_version: Optional[str]
family = "python-module"
def __init__(self, module, python_version=None, minimum_version=None): def __init__(self, module, python_version=None, minimum_version=None):
super(PythonModuleRequirement, self).__init__("python-module")
self.module = module self.module = module
self.python_version = python_version self.python_version = python_version
self.minimum_version = minimum_version self.minimum_version = minimum_version
@ -702,7 +853,25 @@ class PythonModuleRequirement(Requirement):
class BoostComponentRequirement(Requirement): class BoostComponentRequirement(Requirement):
name: str name: str
family = "boost-component"
def __init__(self, name): def __init__(self, name):
super(BoostComponentRequirement, self).__init__("boost-component")
self.name = name self.name = name
class KF5ComponentRequirement(Requirement):
name: str
family = "kf5-component"
def __init__(self, name):
self.name = name
class GnulibDirectoryRequirement(Requirement):
directory: str
family = "gnulib"
def __init__(self, directory):
self.directory = directory

View file

@ -18,8 +18,11 @@
import logging import logging
import subprocess import subprocess
from .. import UnidentifiedError from typing import Optional, List, Type
from .. import UnidentifiedError, Requirement
from ..fix_build import run_detecting_problems from ..fix_build import run_detecting_problems
from ..session import Session
class UnsatisfiedRequirements(Exception): class UnsatisfiedRequirements(Exception):
@ -28,13 +31,22 @@ class UnsatisfiedRequirements(Exception):
class Resolver(object): class Resolver(object):
def install(self, requirements):
name: str
def __init__(self, session, user_local):
raise NotImplementedError(self.__init__)
def install(self, requirements: List[Requirement]):
raise NotImplementedError(self.install) raise NotImplementedError(self.install)
def resolve(self, requirement): def resolve(self, requirement: Requirement) -> Optional[Requirement]:
raise NotImplementedError(self.resolve) raise NotImplementedError(self.resolve)
def explain(self, requirements): def resolve_all(self, requirement: Requirement) -> List[Requirement]:
raise NotImplementedError(self.resolve_all)
def explain(self, requirements: List[Requirement]):
raise NotImplementedError(self.explain) raise NotImplementedError(self.explain)
def env(self): def env(self):
@ -42,13 +54,15 @@ class Resolver(object):
class CPANResolver(Resolver): class CPANResolver(Resolver):
name = "cpan"
def __init__(self, session, user_local=False, skip_tests=True): def __init__(self, session, user_local=False, skip_tests=True):
self.session = session self.session = session
self.user_local = user_local self.user_local = user_local
self.skip_tests = skip_tests self.skip_tests = skip_tests
def __str__(self): def __str__(self):
return "cpan" return self.name
def __repr__(self): def __repr__(self):
return "%s(%r)" % (type(self).__name__, self.session) return "%s(%r)" % (type(self).__name__, self.session)
@ -109,7 +123,8 @@ class TlmgrResolver(Resolver):
self.repository = repository self.repository = repository
def __str__(self): def __str__(self):
if self.repository.startswith('http://') or self.repository.startswith('https://'): if (self.repository.startswith('http://')
or self.repository.startswith('https://')):
return 'tlmgr(%r)' % self.repository return 'tlmgr(%r)' % self.repository
else: else:
return self.repository return self.repository
@ -154,7 +169,8 @@ class TlmgrResolver(Resolver):
try: try:
run_detecting_problems(self.session, cmd, user=user) run_detecting_problems(self.session, cmd, user=user)
except UnidentifiedError as e: except UnidentifiedError as e:
if "tlmgr: user mode not initialized, please read the documentation!" in e.lines: if ("tlmgr: user mode not initialized, "
"please read the documentation!") in e.lines:
self.session.check_call(['tlmgr', 'init-usertree']) self.session.check_call(['tlmgr', 'init-usertree'])
else: else:
raise raise
@ -163,6 +179,7 @@ class TlmgrResolver(Resolver):
class CTANResolver(TlmgrResolver): class CTANResolver(TlmgrResolver):
name = "ctan"
def __init__(self, session, user_local=False): def __init__(self, session, user_local=False):
super(CTANResolver, self).__init__( super(CTANResolver, self).__init__(
@ -170,13 +187,16 @@ class CTANResolver(TlmgrResolver):
class RResolver(Resolver): class RResolver(Resolver):
name: str
def __init__(self, session, repos, user_local=False): def __init__(self, session, repos, user_local=False):
self.session = session self.session = session
self.repos = repos self.repos = repos
self.user_local = user_local self.user_local = user_local
def __str__(self): def __str__(self):
return "cran" return self.name
def __repr__(self): def __repr__(self):
return "%s(%r, %r)" % (type(self).__name__, self.session, self.repos) return "%s(%r, %r)" % (type(self).__name__, self.session, self.repos)
@ -221,12 +241,14 @@ class RResolver(Resolver):
class OctaveForgeResolver(Resolver): class OctaveForgeResolver(Resolver):
name = "octave-forge"
def __init__(self, session, user_local=False): def __init__(self, session, user_local=False):
self.session = session self.session = session
self.user_local = user_local self.user_local = user_local
def __str__(self): def __str__(self):
return "octave-forge" return self.name
def __repr__(self): def __repr__(self):
return "%s(%r)" % (type(self).__name__, self.session) return "%s(%r)" % (type(self).__name__, self.session)
@ -267,6 +289,8 @@ class OctaveForgeResolver(Resolver):
class CRANResolver(RResolver): class CRANResolver(RResolver):
name = "cran"
def __init__(self, session, user_local=False): def __init__(self, session, user_local=False):
super(CRANResolver, self).__init__( super(CRANResolver, self).__init__(
session, "http://cran.r-project.org", user_local=user_local session, "http://cran.r-project.org", user_local=user_local
@ -274,19 +298,25 @@ class CRANResolver(RResolver):
class BioconductorResolver(RResolver): class BioconductorResolver(RResolver):
name = "bioconductor"
def __init__(self, session, user_local=False): def __init__(self, session, user_local=False):
super(BioconductorResolver, self).__init__( super(BioconductorResolver, self).__init__(
session, "https://hedgehog.fhcrc.org/bioconductor", user_local=user_local session, "https://hedgehog.fhcrc.org/bioconductor",
user_local=user_local
) )
class HackageResolver(Resolver): class HackageResolver(Resolver):
name = "hackage"
def __init__(self, session, user_local=False): def __init__(self, session, user_local=False):
self.session = session self.session = session
self.user_local = user_local self.user_local = user_local
def __str__(self): def __str__(self):
return "hackage" return self.name
def __repr__(self): def __repr__(self):
return "%s(%r)" % (type(self).__name__, self.session) return "%s(%r)" % (type(self).__name__, self.session)
@ -295,7 +325,8 @@ class HackageResolver(Resolver):
extra_args = [] extra_args = []
if self.user_local: if self.user_local:
extra_args.append("--user") extra_args.append("--user")
return ["cabal", "install"] + extra_args + [req.package for req in reqs] return (["cabal", "install"] + extra_args
+ [req.package for req in reqs])
def install(self, requirements): def install(self, requirements):
from ..requirements import HaskellPackageRequirement from ..requirements import HaskellPackageRequirement
@ -329,12 +360,15 @@ class HackageResolver(Resolver):
class PypiResolver(Resolver): class PypiResolver(Resolver):
name = "pypi"
def __init__(self, session, user_local=False): def __init__(self, session, user_local=False):
self.session = session self.session = session
self.user_local = user_local self.user_local = user_local
def __str__(self): def __str__(self):
return "pypi" return self.name
def __repr__(self): def __repr__(self):
return "%s(%r)" % (type(self).__name__, self.session) return "%s(%r)" % (type(self).__name__, self.session)
@ -380,12 +414,15 @@ class PypiResolver(Resolver):
class GoResolver(Resolver): class GoResolver(Resolver):
name = "go"
def __init__(self, session, user_local): def __init__(self, session, user_local):
self.session = session self.session = session
self.user_local = user_local self.user_local = user_local
def __str__(self): def __str__(self):
return "go" return self.name
def __repr__(self): def __repr__(self):
return "%s(%r)" % (type(self).__name__, self.session) return "%s(%r)" % (type(self).__name__, self.session)
@ -426,17 +463,26 @@ NPM_COMMAND_PACKAGES = {
"del-cli": "del-cli", "del-cli": "del-cli",
"husky": "husky", "husky": "husky",
"cross-env": "cross-env", "cross-env": "cross-env",
"xo": "xo",
"standard": "standard",
"jshint": "jshint",
"if-node-version": "if-node-version",
"babel-cli": "babel",
"c8": "c8",
"prettier-standard": "prettier-standard",
} }
class NpmResolver(Resolver): class NpmResolver(Resolver):
name = "npm"
def __init__(self, session, user_local=False): def __init__(self, session, user_local=False):
self.session = session self.session = session
self.user_local = user_local self.user_local = user_local
# TODO(jelmer): Handle user_local # TODO(jelmer): Handle user_local
def __str__(self): def __str__(self):
return "npm" return self.name
def __repr__(self): def __repr__(self):
return "%s(%r)" % (type(self).__name__, self.session) return "%s(%r)" % (type(self).__name__, self.session)
@ -472,7 +518,10 @@ class NpmResolver(Resolver):
if not isinstance(requirement, NodePackageRequirement): if not isinstance(requirement, NodePackageRequirement):
missing.append(requirement) missing.append(requirement)
continue continue
cmd = ["npm", "-g", "install", requirement.package] cmd = ["npm", "install"]
if not self.user_local:
cmd.append('-g')
cmd.append(requirement.package)
logging.info("npm: running %r", cmd) logging.info("npm: running %r", cmd)
run_detecting_problems(self.session, cmd, user=user) run_detecting_problems(self.session, cmd, user=user)
if missing: if missing:
@ -529,7 +578,7 @@ class StackedResolver(Resolver):
raise UnsatisfiedRequirements(requirements) raise UnsatisfiedRequirements(requirements)
NATIVE_RESOLVER_CLS = [ NATIVE_RESOLVER_CLS: List[Type[Resolver]] = [
CPANResolver, CPANResolver,
CTANResolver, CTANResolver,
PypiResolver, PypiResolver,
@ -543,24 +592,70 @@ NATIVE_RESOLVER_CLS = [
def native_resolvers(session, user_local): def native_resolvers(session, user_local):
return StackedResolver([kls(session, user_local) for kls in NATIVE_RESOLVER_CLS]) return StackedResolver(
[kls(session, user_local) for kls in NATIVE_RESOLVER_CLS])
def auto_resolver(session, explain=False): def select_resolvers(session, user_local, resolvers,
# if session is SchrootSession or if we're root, use apt dep_server_url=None) -> Optional[Resolver]:
selected = []
for resolver in resolvers:
for kls in NATIVE_RESOLVER_CLS:
if kls.name == resolver:
selected.append(kls(session, user_local))
break
else:
if resolver == 'native':
selected.extend([
kls(session, user_local) for kls in NATIVE_RESOLVER_CLS])
elif resolver == 'apt':
if user_local:
raise NotImplementedError(
'user local not supported for apt')
if dep_server_url:
from .dep_server import DepServerAptResolver
selected.append(DepServerAptResolver.from_session(
session, dep_server_url))
else:
from .apt import AptResolver from .apt import AptResolver
selected.append(AptResolver.from_session(session))
else:
raise KeyError(resolver)
if len(selected) == 0:
return None
if len(selected) == 1:
return selected[0]
return StackedResolver(selected)
def auto_resolver(session: Session, explain: bool = False,
system_wide: Optional[bool] = None,
dep_server_url: Optional[str] = None):
# if session is SchrootSession or if we're root, use apt
from ..session.schroot import SchrootSession from ..session.schroot import SchrootSession
from ..session import get_user from ..session import get_user
user = get_user(session) user = get_user(session)
resolvers = [] resolvers = []
if system_wide is None:
# TODO(jelmer): Check VIRTUAL_ENV, and prioritize PypiResolver if # TODO(jelmer): Check VIRTUAL_ENV, and prioritize PypiResolver if
# present? # present?
if isinstance(session, SchrootSession) or user == "root" or explain: if isinstance(session, SchrootSession) or user == "root" or explain:
user_local = False system_wide = True
else:
system_wide = False
if system_wide:
try:
from .apt import AptResolver
except ModuleNotFoundError:
pass
else:
if dep_server_url:
from .dep_server import DepServerAptResolver
resolvers.append(
DepServerAptResolver.from_session(session, dep_server_url))
else: else:
user_local = True
if not user_local:
resolvers.append(AptResolver.from_session(session)) resolvers.append(AptResolver.from_session(session))
resolvers.extend([kls(session, user_local) for kls in NATIVE_RESOLVER_CLS]) resolvers.extend([kls(session, not system_wide)
for kls in NATIVE_RESOLVER_CLS])
return StackedResolver(resolvers) return StackedResolver(resolvers)

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,88 @@
#!/usr/bin/python3
# Copyright (C) 2022 Jelmer Vernooij <jelmer@jelmer.uk>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
import asyncio
import logging
from typing import List
from aiohttp import (
ClientSession,
ClientConnectorError,
ClientResponseError,
ServerDisconnectedError,
)
from yarl import URL
from .. import Requirement, USER_AGENT
from ..debian.apt import AptManager
from .apt import AptRequirement, AptResolver
class DepServerError(Exception):
def __init__(self, inner):
self.inner = inner
async def resolve_apt_requirement_dep_server(
url: str, req: Requirement) -> List[AptRequirement]:
"""Resolve a requirement to an APT requirement with a dep server.
Args:
url: Dep server URL
req: Requirement to resolve
Returns:
List of Apt requirements.
"""
async with ClientSession() as session:
try:
async with session.post(URL(url) / "resolve-apt", headers={
'User-Agent': USER_AGENT},
json={'requirement': req.json()},
raise_for_status=True) as resp:
return [
AptRequirement._from_json(e) for e in await resp.json()]
except (ClientConnectorError, ClientResponseError,
ServerDisconnectedError) as e:
logging.warning('Unable to contact dep server: %r', e)
raise DepServerError(e)
class DepServerAptResolver(AptResolver):
def __init__(self, apt, dep_server_url, tie_breakers=None):
super(DepServerAptResolver, self).__init__(
apt, tie_breakers=tie_breakers)
self.dep_server_url = dep_server_url
@classmethod
def from_session(cls, session, dep_server_url, tie_breakers=None):
return cls(
AptManager.from_session(session), dep_server_url,
tie_breakers=tie_breakers)
def resolve_all(self, req: Requirement):
try:
req.json()
except NotImplementedError:
return super(DepServerAptResolver, self).resolve_all(req)
try:
return asyncio.run(
resolve_apt_requirement_dep_server(self.dep_server_url, req))
except DepServerError:
logging.warning('Falling back to resolving error locally')
return super(DepServerAptResolver, self).resolve_all(req)

View file

@ -69,12 +69,14 @@ class Session(object):
raise NotImplementedError(self.check_output) raise NotImplementedError(self.check_output)
def Popen( def Popen(
self, argv, cwd: Optional[str] = None, user: Optional[str] = None, **kwargs self, argv, cwd: Optional[str] = None, user: Optional[str] = None,
**kwargs
): ):
raise NotImplementedError(self.Popen) raise NotImplementedError(self.Popen)
def call( def call(
self, argv: List[str], cwd: Optional[str] = None, user: Optional[str] = None self, argv: List[str], cwd: Optional[str] = None,
user: Optional[str] = None
): ):
raise NotImplementedError(self.call) raise NotImplementedError(self.call)
@ -100,17 +102,26 @@ class Session(object):
def external_path(self, path: str) -> str: def external_path(self, path: str) -> str:
raise NotImplementedError raise NotImplementedError
def rmtree(self, path: str) -> str:
raise NotImplementedError
is_temporary: bool is_temporary: bool
class SessionSetupFailure(Exception): class SessionSetupFailure(Exception):
"""Session failed to be set up.""" """Session failed to be set up."""
def __init__(self, reason, errlines=None):
self.reason = reason
self.errlines = errlines
def run_with_tee(session: Session, args: List[str], **kwargs):
def run_with_tee(session: Session,
args: List[str], **kwargs) -> Tuple[int, List[str]]:
if "stdin" not in kwargs: if "stdin" not in kwargs:
kwargs["stdin"] = subprocess.DEVNULL kwargs["stdin"] = subprocess.DEVNULL
p = session.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, **kwargs) p = session.Popen(
args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, **kwargs)
contents = [] contents = []
while p.poll() is None: while p.poll() is None:
line = p.stdout.readline() line = p.stdout.readline()
@ -121,7 +132,8 @@ def run_with_tee(session: Session, args: List[str], **kwargs):
def get_user(session): def get_user(session):
return session.check_output(["echo", "$USER"], cwd="/").decode().strip() return session.check_output(
["sh", "-c", "echo $USER"], cwd="/").decode().strip()
def which(session, name): def which(session, name):

View file

@ -20,6 +20,7 @@ from . import Session, NoSessionOpen, SessionAlreadyOpen
import contextlib import contextlib
import os import os
import shutil
import subprocess import subprocess
import tempfile import tempfile
from typing import Optional, Dict, List from typing import Optional, Dict, List
@ -72,7 +73,8 @@ class PlainSession(Session):
close_fds: bool = True, close_fds: bool = True,
): ):
argv = self._prepend_user(user, argv) argv = self._prepend_user(user, argv)
return subprocess.check_call(argv, cwd=cwd, env=env, close_fds=close_fds) return subprocess.check_call(
argv, cwd=cwd, env=env, close_fds=close_fds)
def check_output( def check_output(
self, self,
@ -84,13 +86,19 @@ class PlainSession(Session):
argv = self._prepend_user(user, argv) argv = self._prepend_user(user, argv)
return subprocess.check_output(argv, cwd=cwd, env=env) return subprocess.check_output(argv, cwd=cwd, env=env)
def Popen(self, args, stdout=None, stderr=None, stdin=None, user=None, cwd=None, env=None): def Popen(
self, args, stdout=None, stderr=None, stdin=None, user=None,
cwd=None, env=None):
args = self._prepend_user(user, args) args = self._prepend_user(user, args)
return subprocess.Popen(args, stdout=stdout, stderr=stderr, stdin=stdin, cwd=cwd, env=env) return subprocess.Popen(
args, stdout=stdout, stderr=stderr, stdin=stdin, cwd=cwd, env=env)
def exists(self, path): def exists(self, path):
return os.path.exists(path) return os.path.exists(path)
def rmtree(self, path):
return shutil.rmtree(path)
def scandir(self, path): def scandir(self, path):
return os.scandir(path) return os.scandir(path)

View file

@ -66,25 +66,38 @@ class SchrootSession(Session):
if line.startswith(b"E: "): if line.startswith(b"E: "):
logging.error("%s", line[3:].decode(errors="replace")) logging.error("%s", line[3:].decode(errors="replace"))
logging.warning( logging.warning(
"Failed to close schroot session %s, leaving stray.", self.session_id "Failed to close schroot session %s, leaving stray.",
self.session_id
) )
self.session_id = None self.session_id = None
return False return False
self.session_id = None self.session_id = None
self._location = None
return True return True
def __enter__(self) -> "Session": def __enter__(self) -> "Session":
if self.session_id is not None: if self.session_id is not None:
raise SessionAlreadyOpen(self) raise SessionAlreadyOpen(self)
stderr = tempfile.TemporaryFile()
try: try:
self.session_id = ( self.session_id = (
subprocess.check_output(["schroot", "-c", self.chroot, "-b"]) subprocess.check_output(
["schroot", "-c", self.chroot, "-b"], stderr=stderr)
.strip() .strip()
.decode() .decode()
) )
except subprocess.CalledProcessError: except subprocess.CalledProcessError:
# TODO(jelmer): Capture stderr and forward in SessionSetupFailure stderr.seek(0)
raise SessionSetupFailure() errlines = stderr.readlines()
if len(errlines) == 1:
raise SessionSetupFailure(
errlines[0].rstrip().decode(), errlines=errlines)
elif len(errlines) == 0:
raise SessionSetupFailure(
"No output from schroot", errlines=errlines)
else:
raise SessionSetupFailure(
errlines[-1].decode(), errlines=errlines)
logging.info( logging.info(
"Opened schroot session %s (from %s)", self.session_id, self.chroot "Opened schroot session %s (from %s)", self.session_id, self.chroot
) )
@ -156,24 +169,28 @@ class SchrootSession(Session):
env: Optional[Dict[str, str]] = None, env: Optional[Dict[str, str]] = None,
) -> bytes: ) -> bytes:
try: try:
return subprocess.check_output(self._run_argv(argv, cwd, user, env=env)) return subprocess.check_output(
self._run_argv(argv, cwd, user, env=env))
except subprocess.CalledProcessError as e: except subprocess.CalledProcessError as e:
raise subprocess.CalledProcessError(e.returncode, argv) raise subprocess.CalledProcessError(e.returncode, argv)
def Popen( def Popen(
self, argv, cwd: Optional[str] = None, user: Optional[str] = None, **kwargs self, argv, cwd: Optional[str] = None, user: Optional[str] = None,
**kwargs
): ):
return subprocess.Popen(self._run_argv(argv, cwd, user), **kwargs) return subprocess.Popen(self._run_argv(argv, cwd, user), **kwargs)
def call( def call(
self, argv: List[str], cwd: Optional[str] = None, user: Optional[str] = None self, argv: List[str], cwd: Optional[str] = None,
user: Optional[str] = None
): ):
return subprocess.call(self._run_argv(argv, cwd, user)) return subprocess.call(self._run_argv(argv, cwd, user))
def create_home(self) -> None: def create_home(self) -> None:
"""Create the user's home directory.""" """Create the user's home directory."""
home = ( home = (
self.check_output(["sh", "-c", "echo $HOME"], cwd="/").decode().rstrip("\n") self.check_output(
["sh", "-c", "echo $HOME"], cwd="/").decode().rstrip("\n")
) )
user = ( user = (
self.check_output(["sh", "-c", "echo $LOGNAME"], cwd="/") self.check_output(["sh", "-c", "echo $LOGNAME"], cwd="/")
@ -189,7 +206,8 @@ class SchrootSession(Session):
return os.path.join(self.location, path.lstrip("/")) return os.path.join(self.location, path.lstrip("/"))
if self._cwd is None: if self._cwd is None:
raise ValueError("no cwd set") raise ValueError("no cwd set")
return os.path.join(self.location, os.path.join(self._cwd, path).lstrip("/")) return os.path.join(
self.location, os.path.join(self._cwd, path).lstrip("/"))
def exists(self, path: str) -> bool: def exists(self, path: str) -> bool:
fullpath = self.external_path(path) fullpath = self.external_path(path)
@ -203,13 +221,17 @@ class SchrootSession(Session):
fullpath = self.external_path(path) fullpath = self.external_path(path)
return os.mkdir(fullpath) return os.mkdir(fullpath)
def rmtree(self, path: str):
import shutil
fullpath = self.external_path(path)
return shutil.rmtree(fullpath)
def setup_from_vcs( def setup_from_vcs(
self, tree, include_controldir: Optional[bool] = None, subdir="package" self, tree, include_controldir: Optional[bool] = None, subdir="package"
): ):
from ..vcs import dupe_vcs_tree, export_vcs_tree from ..vcs import dupe_vcs_tree, export_vcs_tree
build_dir = os.path.join(self.location, "build") build_dir = os.path.join(self.location, "build")
directory = tempfile.mkdtemp(dir=build_dir) directory = tempfile.mkdtemp(dir=build_dir)
reldir = "/" + os.path.relpath(directory, self.location) reldir = "/" + os.path.relpath(directory, self.location)
@ -228,7 +250,7 @@ class SchrootSession(Session):
directory = tempfile.mkdtemp(dir=build_dir) directory = tempfile.mkdtemp(dir=build_dir)
reldir = "/" + os.path.relpath(directory, self.location) reldir = "/" + os.path.relpath(directory, self.location)
export_directory = os.path.join(directory, subdir) export_directory = os.path.join(directory, subdir)
shutil.copytree(path, export_directory, dirs_exist_ok=True) shutil.copytree(path, export_directory, symlinks=True)
return export_directory, os.path.join(reldir, subdir) return export_directory, os.path.join(reldir, subdir)
is_temporary = True is_temporary = True

View file

@ -15,16 +15,25 @@
# along with this program; if not, write to the Free Software # along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
from functools import partial
from .buildsystem import NoBuildToolsFound from .buildsystem import NoBuildToolsFound
from .fix_build import iterate_with_build_fixers
from .logs import NoLogManager
def run_test(session, buildsystems, resolver, fixers): def run_test(session, buildsystems, resolver, fixers, log_manager=None):
# Some things want to write to the user's home directory, # Some things want to write to the user's home directory,
# e.g. pip caches in ~/.cache # e.g. pip caches in ~/.cache
session.create_home() session.create_home()
if log_manager is None:
log_manager = NoLogManager()
for buildsystem in buildsystems: for buildsystem in buildsystems:
buildsystem.test(session, resolver, fixers) iterate_with_build_fixers(
fixers, log_manager.wrap(
partial(buildsystem.test, session, resolver)))
return return
raise NoBuildToolsFound() raise NoBuildToolsFound()

253
ognibuild/upstream.py Normal file
View file

@ -0,0 +1,253 @@
#!/usr/bin/python3
# Copyright (C) 2020-2021 Jelmer Vernooij <jelmer@jelmer.uk>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
from dataclasses import dataclass, field
from typing import Optional, Dict, Any
from debian.changelog import Version
import logging
import re
from . import Requirement
from .requirements import (
CargoCrateRequirement,
GoPackageRequirement,
PythonPackageRequirement,
)
from .resolver.apt import AptRequirement, OneOfRequirement
@dataclass
class UpstreamInfo:
name: Optional[str]
buildsystem: Optional[str] = None
branch_url: Optional[str] = None
branch_subpath: Optional[str] = None
tarball_url: Optional[str] = None
version: Optional[str] = None
metadata: Dict[str, Any] = field(default_factory=dict)
def json(self):
return {
'name': self.name,
'buildsystem': self.buildsystem,
'branch_url': self.branch_url,
'branch_subpath': self.branch_subpath,
'tarball_url': self.tarball_url,
'version': self.version
}
def go_base_name(package):
(hostname, path) = package.split('/', 1)
if hostname == "github.com":
hostname = "github"
if hostname == "gopkg.in":
hostname = "gopkg"
path = path.rstrip('/').replace("/", "-")
if path.endswith('.git'):
path = path[:-4]
return (hostname + path).replace("_", "-").lower()
def load_crate_info(crate):
import urllib.error
from urllib.request import urlopen, Request
import json
http_url = 'https://crates.io/api/v1/crates/%s' % crate
headers = {'User-Agent': 'debianize', 'Accept': 'application/json'}
http_contents = urlopen(Request(http_url, headers=headers)).read()
try:
return json.loads(http_contents)
except urllib.error.HTTPError as e:
if e.code == 404:
logging.warning('No crate %r', crate)
return None
raise
def find_python_package_upstream(requirement):
import urllib.error
from urllib.request import urlopen, Request
import json
http_url = 'https://pypi.org/pypi/%s/json' % requirement.package
headers = {'User-Agent': 'ognibuild', 'Accept': 'application/json'}
try:
http_contents = urlopen(
Request(http_url, headers=headers)).read()
except urllib.error.HTTPError as e:
if e.code == 404:
logging.warning('No pypi project %r', requirement.package)
return None
raise
pypi_data = json.loads(http_contents)
upstream_branch = None
for name, url in pypi_data['info']['project_urls'].items():
if name.lower() in ('github', 'repository'):
upstream_branch = url
tarball_url = None
for url_data in pypi_data['urls']:
if url_data.get('package_type') == 'sdist':
tarball_url = url_data['url']
return UpstreamInfo(
branch_url=upstream_branch, branch_subpath='',
name='python-%s' % pypi_data['info']['name'],
tarball_url=tarball_url)
def find_go_package_upstream(requirement):
if requirement.package.startswith('github.com/'):
return UpstreamInfo(
name='golang-%s' % go_base_name(requirement.package),
branch_url='https://%s' % '/'.join(
requirement.package.split('/')[:3]),
branch_subpath='')
def find_cargo_crate_upstream(requirement):
import semver
from debmutate.debcargo import semver_pair
data = load_crate_info(requirement.crate)
if data is None:
return None
upstream_branch = data['crate']['repository']
name = 'rust-' + data['crate']['name'].replace('_', '-')
version = None
if requirement.api_version is not None:
for version_info in data['versions']:
if (not version_info['num'].startswith(
requirement.api_version + '.')
and not version_info['num'] == requirement.api_version):
continue
if version is None:
version = semver.VersionInfo.parse(version_info['num'])
else:
version = semver.max_ver(
version, semver.VersionInfo.parse(version_info['num']))
if version is None:
logging.warning(
'Unable to find version of crate %s '
'that matches API version %s',
name, requirement.api_version)
else:
name += '-' + semver_pair(str(version))
return UpstreamInfo(
branch_url=upstream_branch, branch_subpath=None,
name=name, version=str(version) if version else None,
metadata={'X-Cargo-Crate': data['crate']['name']},
buildsystem='cargo')
def apt_to_cargo_requirement(m, rels):
name = m.group(1)
api_version = m.group(2)
if m.group(3):
features = set(m.group(3)[1:].split('-'))
else:
features = set()
if not rels:
minimum_version = None
elif len(rels) == 1 and rels[0][0] == '>=':
minimum_version = Version(rels[0][1]).upstream_version
else:
logging.warning('Unable to parse Debian version %r', rels)
minimum_version = None
return CargoCrateRequirement(
name, api_version=api_version,
features=features, minimum_version=minimum_version)
def apt_to_python_requirement(m, rels):
name = m.group(2)
python_version = m.group(1)
if not rels:
minimum_version = None
elif len(rels) == 1 and rels[0][0] == '>=':
minimum_version = Version(rels[0][1]).upstream_version
else:
logging.warning('Unable to parse Debian version %r', rels)
minimum_version = None
return PythonPackageRequirement(
name, python_version=(python_version or None),
minimum_version=minimum_version)
def apt_to_go_requirement(m, rels):
parts = m.group(1).split('-')
if parts[0] == 'github':
parts[0] = 'github.com'
if parts[0] == 'gopkg':
parts[0] = 'gopkg.in'
if not rels:
version = None
elif len(rels) == 1 and rels[0][0] == '=':
version = Version(rels[0][1]).upstream_version
else:
logging.warning('Unable to parse Debian version %r', rels)
version = None
return GoPackageRequirement('/'.join(parts), version=version)
BINARY_PACKAGE_UPSTREAM_MATCHERS = [
(r'librust-(.*)-([^-+]+)(\+.*?)-dev', apt_to_cargo_requirement),
(r'python([0-9.]*)-(.*)', apt_to_python_requirement),
(r'golang-(.*)-dev', apt_to_go_requirement),
]
_BINARY_PACKAGE_UPSTREAM_MATCHERS = [
(re.compile(r), fn) for (r, fn) in BINARY_PACKAGE_UPSTREAM_MATCHERS]
def find_apt_upstream(requirement: AptRequirement) -> Optional[UpstreamInfo]:
for option in requirement.relations:
for rel in option:
for matcher, fn in _BINARY_PACKAGE_UPSTREAM_MATCHERS:
m = matcher.fullmatch(rel['name'])
if m:
upstream_requirement = fn(
m, [rel['version']] if rel['version'] else [])
return find_upstream(upstream_requirement)
logging.warning(
'Unable to map binary package name %s to upstream',
rel['name'])
return None
def find_or_upstream(requirement: OneOfRequirement) -> Optional[UpstreamInfo]:
for req in requirement.elements:
info = find_upstream(req)
if info is not None:
return info
return None
UPSTREAM_FINDER = {
'python-package': find_python_package_upstream,
'go-package': find_go_package_upstream,
'cargo-crate': find_cargo_crate_upstream,
'apt': find_apt_upstream,
'or': find_or_upstream,
}
def find_upstream(requirement: Requirement) -> Optional[UpstreamInfo]:
try:
return UPSTREAM_FINDER[requirement.family](requirement)
except KeyError:
return None

View file

@ -43,7 +43,8 @@ def dupe_vcs_tree(tree, directory):
tree = tree.basis_tree() tree = tree.basis_tree()
try: try:
result = tree._repository.controldir.sprout( result = tree._repository.controldir.sprout(
directory, create_tree_if_local=True, revision_id=tree.get_revision_id() directory, create_tree_if_local=True,
revision_id=tree.get_revision_id()
) )
except OSError as e: except OSError as e:
if e.errno == errno.ENOSPC: if e.errno == errno.ENOSPC:

3
pyproject.toml Normal file
View file

@ -0,0 +1,3 @@
[build-system]
requires = ["setuptools"]
build-backend = "setuptools.build_meta"

View file

@ -1,14 +0,0 @@
name: "ognibuild"
timeout_days: 5
tag_name: "v$VERSION"
verify_command: "python3 setup.py test"
update_version {
path: "setup.py"
match: "^ version=\"(.*)\",$"
new_line: " version=\"$VERSION\","
}
update_version {
path: "ognibuild/__init__.py"
match: "^__version__ = \\((.*)\\)$"
new_line: "__version__ = $TUPLED_VERSION"
}

89
scripts/report-apt-deps-status Executable file
View file

@ -0,0 +1,89 @@
#!/usr/bin/python3
import argparse
from contextlib import ExitStack
import logging
import sys
from typing import Dict, List
from ognibuild.buildsystem import NoBuildToolsFound, detect_buildsystems
from ognibuild.requirements import Requirement
from ognibuild.resolver.apt import AptResolver
from ognibuild.session.plain import PlainSession
parser = argparse.ArgumentParser('report-apt-deps-status')
parser.add_argument('directory', type=str, default='.', nargs='?')
parser.add_argument(
'--detailed', action='store_true', help='Show detailed analysis')
args = parser.parse_args()
logging.basicConfig(format='%(message)s', level=logging.INFO)
session = PlainSession()
with ExitStack() as es:
es.enter_context(session)
session.chdir(args.directory)
resolver = AptResolver.from_session(session)
try:
bss = list(detect_buildsystems(args.directory))
except NoBuildToolsFound:
logging.fatal('No build tools found')
sys.exit(1)
logging.debug("Detected buildsystems: %s", ", ".join(map(str, bss)))
deps: Dict[str, List[Requirement]] = {}
for buildsystem in bss:
try:
declared_reqs = buildsystem.get_declared_dependencies(session, [])
for stage, req in declared_reqs:
deps.setdefault(stage, []).append(req)
except NotImplementedError:
logging.warning(
'Unable to get dependencies from buildsystem %r, skipping',
buildsystem)
continue
if args.detailed:
for stage, reqs in deps.items():
logging.info("Stage: %s", stage)
for req in reqs:
apt_req = resolver.resolve(req)
logging.info("%s: %s", req, apt_req.pkg_relation_str())
logging.info('')
else:
build_depends = []
test_depends = []
run_depends = []
unresolved = []
for stage, reqs in deps.items():
for req in reqs:
apt_req = resolver.resolve(req)
if apt_req is None:
unresolved.append(req)
elif stage == 'core':
build_depends.append(apt_req)
run_depends.append(apt_req)
elif stage == 'build':
build_depends.append(apt_req)
elif stage == 'test':
test_depends.append(apt_req)
else:
raise NotImplementedError('stage %s not supported' % stage)
if build_depends:
logging.info(
'Build-Depends: %s',
', '.join([d.pkg_relation_str() for d in build_depends]))
if test_depends:
logging.info(
'Test-Depends: %s',
', '.join([d.pkg_relation_str() for d in test_depends]))
if run_depends:
logging.info(
'Depends: %s',
', '.join([d.pkg_relation_str() for d in run_depends]))
if unresolved:
sys.stdout.write('\n')
logging.warning(
'Unable to find apt packages for the following dependencies:')
for req in unresolved:
logging.warning('* %s', req)

View file

@ -1,13 +1,65 @@
[metadata]
name = ognibuild
description = Detect and run any build system
version = attr:ognibuild.__version__
maintainer = Jelmer Vernooij
maintainer_email = jelmer@jelmer.uk
license = GNU GPLv2 or later
url = https://jelmer.uk/code/ognibuild
classifiers =
Development Status :: 4 - Beta
License :: OSI Approved :: GNU General Public License v2 or later (GPLv2+)
Programming Language :: Python :: 3.5
Programming Language :: Python :: 3.6
Programming Language :: Python :: Implementation :: CPython
Operating System :: POSIX
[options]
packages =
ognibuild
ognibuild.debian
ognibuild.resolver
ognibuild.session
scripts = scripts/report-apt-deps-status
install_requires =
breezy>=3.2
buildlog-consultant>=0.0.21
requirements-parser
toml
setuptools
ruamel.yaml
tests_require =
testtools
types-toml
[options.entry_points]
console_scripts =
ogni=ognibuild.__main__:main
deb-fix-build=ognibuild.debian.fix_build:main
[options.extras_require]
dev =
testtools
debian =
debmutate
python_debian
python_apt
brz-debian
lz4
remote =
breezy
dulwich
dep_server =
aiohttp
aiohttp-openmetrics
gcp = google-cloud-logging
[flake8] [flake8]
banned-modules = silver-platter = Should not use silver-platter banned-modules = silver-platter = Should not use silver-platter
exclude = build,.eggs/
[mypy] [mypy]
ignore_missing_imports = True ignore_missing_imports = True
[bdist_wheel] [bdist_wheel]
universal = 1 universal = 1
[egg_info]
tag_build =
tag_date = 0

View file

@ -1,40 +1,3 @@
#!/usr/bin/env python3 #!/usr/bin/python3
# encoding: utf-8
from setuptools import setup from setuptools import setup
setup()
setup(name="ognibuild",
description="Detect and run any build system",
version="0.0.7",
maintainer="Jelmer Vernooij",
maintainer_email="jelmer@jelmer.uk",
license="GNU GPLv2 or later",
url="https://jelmer.uk/code/ognibuild",
packages=['ognibuild', 'ognibuild.tests', 'ognibuild.debian', 'ognibuild.resolver', 'ognibuild.session'],
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: '
'GNU General Public License v2 or later (GPLv2+)',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: Implementation :: CPython',
'Operating System :: POSIX',
],
entry_points={
"console_scripts": [
"ogni=ognibuild.__main__:main",
"deb-fix-build=ognibuild.debian.fix_build:main",
]
},
install_requires=[
'breezy',
'buildlog-consultant>=0.0.10',
'requirements-parser',
],
extras_require={
'debian': ['debmutate', 'python_debian', 'python_apt'],
},
tests_require=['python_debian', 'buildlog-consultant', 'breezy', 'testtools'],
test_suite='ognibuild.tests.test_suite',
)

View file

@ -23,10 +23,13 @@ import unittest
def test_suite(): def test_suite():
names = [ names = [
"debian_build", 'buildlog',
'logs',
] ]
if os.path.exists("/usr/bin/dpkg-architecture"): if os.path.exists("/usr/bin/dpkg-architecture"):
names.append("debian_build")
names.append("debian_fix_build") names.append("debian_fix_build")
module_names = ["ognibuild.tests.test_" + name for name in names] names.append("resolver_apt")
module_names = ["tests.test_" + name for name in names]
loader = unittest.TestLoader() loader = unittest.TestLoader()
return loader.loadTestsFromNames(module_names) return loader.loadTestsFromNames(module_names)

47
tests/test_buildlog.py Normal file
View file

@ -0,0 +1,47 @@
#!/usr/bin/python
# Copyright (C) 2022 Jelmer Vernooij <jelmer@jelmer.uk>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
from ognibuild.buildlog import PROBLEM_CONVERTERS
from buildlog_consultant import (
problem_clses,
__version__ as buildlog_consultant_version,
)
from unittest import TestCase
class TestProblemsExists(TestCase):
def test_exist(self):
for entry in PROBLEM_CONVERTERS:
if len(entry) == 2:
problem_kind, fn = entry
min_version = None
elif len(entry) == 3:
problem_kind, fn, min_version = entry
else:
raise TypeError(entry)
if min_version is not None:
min_version_tuple = tuple(
[int(x) for x in min_version.split('.')])
if buildlog_consultant_version < min_version_tuple:
continue
self.assertTrue(
problem_kind in problem_clses,
f"{problem_kind} does not exist in known "
"buildlog-consultant problem kinds")

View file

@ -17,8 +17,17 @@
import datetime import datetime
import os import os
import sys
from ..debian.build import add_dummy_changelog_entry, get_build_architecture from debian.changelog import Version
from ognibuild.debian.build import (
add_dummy_changelog_entry,
get_build_architecture,
version_add_suffix,
_builddeb_command,
DEFAULT_BUILDER,
)
from breezy.tests import TestCaseWithTransport, TestCase from breezy.tests import TestCaseWithTransport, TestCase
@ -150,3 +159,43 @@ class BuildArchitectureTests(TestCase):
def test_is_str(self): def test_is_str(self):
self.assertIsInstance(get_build_architecture(), str) self.assertIsInstance(get_build_architecture(), str)
class VersionAddSuffixTests(TestCase):
def test_native(self):
self.assertEqual(
Version('1.0~jan+lint4'),
version_add_suffix(Version('1.0~jan+lint3'), '~jan+lint'))
self.assertEqual(
Version('1.0~jan+lint1'),
version_add_suffix(Version('1.0'), '~jan+lint'))
def test_normal(self):
self.assertEqual(
Version('1.0-1~jan+lint4'),
version_add_suffix(Version('1.0-1~jan+lint3'), '~jan+lint'))
self.assertEqual(
Version('1.0-1~jan+lint1'),
version_add_suffix(Version('1.0-1'), '~jan+lint'))
self.assertEqual(
Version('0.0.12-1~jan+lint1'),
version_add_suffix(Version('0.0.12-1'), '~jan+lint'))
self.assertEqual(
Version('0.0.12-1~jan+unchanged1~jan+lint1'),
version_add_suffix(
Version('0.0.12-1~jan+unchanged1'), '~jan+lint'))
class BuilddebCommandTests(TestCase):
def test_simple(self):
self.assertEqual(
[sys.executable, "-m", "breezy", "builddeb",
"--guess-upstream-branch-url", "--builder=" + DEFAULT_BUILDER],
_builddeb_command())
self.assertEqual(
[sys.executable, "-m", "breezy", "builddeb",
"--guess-upstream-branch-url", "--builder=" + DEFAULT_BUILDER,
"--result-dir=/tmp/blah"],
_builddeb_command(result_dir="/tmp/blah"))

View file

@ -29,13 +29,15 @@ from buildlog_consultant.common import (
MissingRubyGem, MissingRubyGem,
MissingValaPackage, MissingValaPackage,
) )
from ..debian.apt import AptManager, FileSearcher from ognibuild.debian.apt import AptManager, FileSearcher
from ..debian.fix_build import ( from ognibuild.debian.fix_build import (
resolve_error, resolve_error,
versioned_package_fixers, versioned_package_fixers,
apt_fixers, apt_fixers,
DebianPackagingContext, DebianPackagingContext,
add_build_dependency,
) )
from ognibuild.resolver.apt import AptRequirement
from breezy.commit import NullCommitReporter from breezy.commit import NullCommitReporter
from breezy.tests import TestCaseWithTransport from breezy.tests import TestCaseWithTransport
@ -44,7 +46,7 @@ class DummyAptSearcher(FileSearcher):
def __init__(self, files): def __init__(self, files):
self._apt_files = files self._apt_files = files
def search_files(self, path, regex=False, case_insensitive=False): async def search_files(self, path, regex=False, case_insensitive=False):
for p, pkg in sorted(self._apt_files.items()): for p, pkg in sorted(self._apt_files.items()):
if case_insensitive: if case_insensitive:
flags = re.I flags = re.I
@ -97,7 +99,7 @@ blah (0.1) UNRELEASED; urgency=medium
self._apt_files = {} self._apt_files = {}
def resolve(self, error, context=("build",)): def resolve(self, error, context=("build",)):
from ..session.plain import PlainSession from ognibuild.session.plain import PlainSession
session = PlainSession() session = PlainSession()
apt = AptManager(session) apt = AptManager(session)
@ -109,7 +111,8 @@ blah (0.1) UNRELEASED; urgency=medium
update_changelog=True, update_changelog=True,
commit_reporter=NullCommitReporter(), commit_reporter=NullCommitReporter(),
) )
fixers = versioned_package_fixers(session, context, apt) + apt_fixers(apt, context) fixers = versioned_package_fixers(
session, context, apt) + apt_fixers(apt, context)
return resolve_error(error, ("build",), fixers) return resolve_error(error, ("build",), fixers)
def get_build_deps(self): def get_build_deps(self):
@ -118,7 +121,8 @@ blah (0.1) UNRELEASED; urgency=medium
def test_missing_command_unknown(self): def test_missing_command_unknown(self):
self._apt_files = {} self._apt_files = {}
self.assertFalse(self.resolve(MissingCommand("acommandthatdoesnotexist"))) self.assertFalse(self.resolve(
MissingCommand("acommandthatdoesnotexist")))
def test_missing_command_brz(self): def test_missing_command_brz(self):
self._apt_files = { self._apt_files = {
@ -130,7 +134,8 @@ blah (0.1) UNRELEASED; urgency=medium
self.overrideEnv("DEBFULLNAME", "Jelmer Vernooij") self.overrideEnv("DEBFULLNAME", "Jelmer Vernooij")
self.assertTrue(self.resolve(MissingCommand("brz"))) self.assertTrue(self.resolve(MissingCommand("brz")))
self.assertEqual("libc6, brz", self.get_build_deps()) self.assertEqual("libc6, brz", self.get_build_deps())
rev = self.tree.branch.repository.get_revision(self.tree.branch.last_revision()) rev = self.tree.branch.repository.get_revision(
self.tree.branch.last_revision())
self.assertEqual("Add missing build dependency on brz.\n", rev.message) self.assertEqual("Add missing build dependency on brz.\n", rev.message)
self.assertFalse(self.resolve(MissingCommand("brz"))) self.assertFalse(self.resolve(MissingCommand("brz")))
self.assertEqual("libc6, brz", self.get_build_deps()) self.assertEqual("libc6, brz", self.get_build_deps())
@ -153,10 +158,12 @@ blah (0.1) UNRELEASED; urgency=medium
def test_missing_ruby_file_from_gem(self): def test_missing_ruby_file_from_gem(self):
self._apt_files = { self._apt_files = {
"/usr/share/rubygems-integration/all/gems/activesupport-" "/usr/share/rubygems-integration/all/gems/activesupport-"
"5.2.3/lib/active_support/core_ext/string/strip.rb": "ruby-activesupport" "5.2.3/lib/active_support/core_ext/string/strip.rb":
"ruby-activesupport"
} }
self.assertTrue( self.assertTrue(
self.resolve(MissingRubyFile("active_support/core_ext/string/strip")) self.resolve(MissingRubyFile(
"active_support/core_ext/string/strip"))
) )
self.assertEqual("libc6, ruby-activesupport", self.get_build_deps()) self.assertEqual("libc6, ruby-activesupport", self.get_build_deps())
@ -173,7 +180,8 @@ blah (0.1) UNRELEASED; urgency=medium
self.assertEqual("libc6, ruby-bio (>= 2.0.3)", self.get_build_deps()) self.assertEqual("libc6, ruby-bio (>= 2.0.3)", self.get_build_deps())
def test_missing_perl_module(self): def test_missing_perl_module(self):
self._apt_files = {"/usr/share/perl5/App/cpanminus/fatscript.pm": "cpanminus"} self._apt_files = {
"/usr/share/perl5/App/cpanminus/fatscript.pm": "cpanminus"}
self.assertTrue( self.assertTrue(
self.resolve( self.resolve(
MissingPerlModule( MissingPerlModule(
@ -200,28 +208,34 @@ blah (0.1) UNRELEASED; urgency=medium
def test_missing_pkg_config(self): def test_missing_pkg_config(self):
self._apt_files = { self._apt_files = {
"/usr/lib/x86_64-linux-gnu/pkgconfig/xcb-xfixes.pc": "libxcb-xfixes0-dev" "/usr/lib/x86_64-linux-gnu/pkgconfig/xcb-xfixes.pc":
"libxcb-xfixes0-dev"
} }
self.assertTrue(self.resolve(MissingPkgConfig("xcb-xfixes"))) self.assertTrue(self.resolve(MissingPkgConfig("xcb-xfixes")))
self.assertEqual("libc6, libxcb-xfixes0-dev", self.get_build_deps()) self.assertEqual("libc6, libxcb-xfixes0-dev", self.get_build_deps())
def test_missing_pkg_config_versioned(self): def test_missing_pkg_config_versioned(self):
self._apt_files = { self._apt_files = {
"/usr/lib/x86_64-linux-gnu/pkgconfig/xcb-xfixes.pc": "libxcb-xfixes0-dev" "/usr/lib/x86_64-linux-gnu/pkgconfig/xcb-xfixes.pc":
"libxcb-xfixes0-dev"
} }
self.assertTrue(self.resolve(MissingPkgConfig("xcb-xfixes", "1.0"))) self.assertTrue(self.resolve(MissingPkgConfig("xcb-xfixes", "1.0")))
self.assertEqual("libc6, libxcb-xfixes0-dev (>= 1.0)", self.get_build_deps()) self.assertEqual(
"libc6, libxcb-xfixes0-dev (>= 1.0)", self.get_build_deps())
def test_missing_python_module(self): def test_missing_python_module(self):
self._apt_files = {"/usr/lib/python3/dist-packages/m2r.py": "python3-m2r"} self._apt_files = {
"/usr/lib/python3/dist-packages/m2r.py": "python3-m2r"}
self.assertTrue(self.resolve(MissingPythonModule("m2r"))) self.assertTrue(self.resolve(MissingPythonModule("m2r")))
self.assertEqual("libc6, python3-m2r", self.get_build_deps()) self.assertEqual("libc6, python3-m2r", self.get_build_deps())
def test_missing_go_package(self): def test_missing_go_package(self):
self._apt_files = { self._apt_files = {
"/usr/share/gocode/src/github.com/chzyer/readline/utils_test.go": "golang-github-chzyer-readline-dev", "/usr/share/gocode/src/github.com/chzyer/readline/utils_test.go":
"golang-github-chzyer-readline-dev",
} }
self.assertTrue(self.resolve(MissingGoPackage("github.com/chzyer/readline"))) self.assertTrue(self.resolve(
MissingGoPackage("github.com/chzyer/readline")))
self.assertEqual( self.assertEqual(
"libc6, golang-github-chzyer-readline-dev", self.get_build_deps() "libc6, golang-github-chzyer-readline-dev", self.get_build_deps()
) )
@ -232,3 +246,63 @@ blah (0.1) UNRELEASED; urgency=medium
} }
self.assertTrue(self.resolve(MissingValaPackage("posix"))) self.assertTrue(self.resolve(MissingValaPackage("posix")))
self.assertEqual("libc6, valac-0.48-vapi", self.get_build_deps()) self.assertEqual("libc6, valac-0.48-vapi", self.get_build_deps())
class AddBuildDependencyTests(TestCaseWithTransport):
def setUp(self):
super(AddBuildDependencyTests, self).setUp()
self.tree = self.make_branch_and_tree(".")
self.build_tree_contents(
[
("debian/",),
(
"debian/control",
"""\
Source: blah
Build-Depends: libc6
Package: python-blah
Depends: ${python3:Depends}
Description: A python package
Foo
""",
),
(
"debian/changelog",
"""\
blah (0.1) UNRELEASED; urgency=medium
* Initial release. (Closes: #XXXXXX)
-- Jelmer Vernooij <jelmer@debian.org> Sat, 04 Apr 2020 14:12:13 +0000
""",
),
]
)
self.tree.add(["debian", "debian/control", "debian/changelog"])
self.tree.commit("Initial commit")
self.context = DebianPackagingContext(
self.tree,
subpath="",
committer="ognibuild <ognibuild@jelmer.uk>",
update_changelog=True,
commit_reporter=NullCommitReporter(),
)
def test_already_present(self):
requirement = AptRequirement.simple('libc6')
self.assertFalse(add_build_dependency(self.context, requirement))
def test_basic(self):
requirement = AptRequirement.simple('foo')
self.assertTrue(add_build_dependency(self.context, requirement))
self.assertFileEqual("""\
Source: blah
Build-Depends: libc6, foo
Package: python-blah
Depends: ${python3:Depends}
Description: A python package
Foo
""", 'debian/control')

95
tests/test_logs.py Normal file
View file

@ -0,0 +1,95 @@
#!/usr/bin/python
# Copyright (C) 2022 Jelmer Vernooij <jelmer@jelmer.uk>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
import os
import sys
import tempfile
from unittest import TestCase
from ognibuild.logs import (
copy_output,
redirect_output,
rotate_logfile,
DirectoryLogManager,
)
class TestCopyOutput(TestCase):
def test_no_tee(self):
with tempfile.TemporaryDirectory() as td:
p = os.path.join(td, 'foo.log')
with copy_output(p, tee=False):
sys.stdout.write('lala\n')
sys.stdout.flush()
with open(p, 'r') as f:
self.assertEqual('lala\n', f.read())
def test_tee(self):
with tempfile.TemporaryDirectory() as td:
p = os.path.join(td, 'foo.log')
with copy_output(p, tee=True):
sys.stdout.write('lala\n')
sys.stdout.flush()
with open(p, 'r') as f:
self.assertEqual('lala\n', f.read())
class TestRedirectOutput(TestCase):
def test_simple(self):
with tempfile.TemporaryDirectory() as td:
p = os.path.join(td, 'foo.log')
with open(p, 'w') as f:
with redirect_output(f):
sys.stdout.write('lala\n')
sys.stdout.flush()
with open(p, 'r') as f:
self.assertEqual('lala\n', f.read())
class TestRotateLogfile(TestCase):
def test_does_not_exist(self):
with tempfile.TemporaryDirectory() as td:
p = os.path.join(td, 'foo.log')
rotate_logfile(p)
self.assertEqual([], os.listdir(td))
def test_simple(self):
with tempfile.TemporaryDirectory() as td:
p = os.path.join(td, 'foo.log')
with open(p, 'w') as f:
f.write('contents\n')
rotate_logfile(p)
self.assertEqual(['foo.log.1'], os.listdir(td))
class TestLogManager(TestCase):
def test_simple(self):
with tempfile.TemporaryDirectory() as td:
p = os.path.join(td, 'foo.log')
lm = DirectoryLogManager(p, mode='redirect')
def writesomething():
sys.stdout.write('foo\n')
sys.stdout.flush()
fn = lm.wrap(writesomething)
fn()
with open(p, 'r') as f:
self.assertEqual('foo\n', f.read())

View file

@ -0,0 +1,47 @@
#!/usr/bin/python
# Copyright (C) 2022 Jelmer Vernooij <jelmer@jelmer.uk>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
from unittest import TestCase
from ognibuild.resolver.apt import get_possible_python3_paths_for_python_object
class TestPython3Paths(TestCase):
def test_paths(self):
self.assertEqual([
'/usr/lib/python3/dist\\-packages/dulwich/__init__\\.py',
'/usr/lib/python3/dist\\-packages/dulwich\\.py',
'/usr/lib/python3\\.[0-9]+/'
'lib\\-dynload/dulwich.cpython\\-.*\\.so',
'/usr/lib/python3\\.[0-9]+/dulwich\\.py',
'/usr/lib/python3\\.[0-9]+/dulwich/__init__\\.py'],
get_possible_python3_paths_for_python_object('dulwich'))
self.assertEqual([
'/usr/lib/python3/dist\\-packages/cleo/foo/__init__\\.py',
'/usr/lib/python3/dist\\-packages/cleo/foo\\.py',
'/usr/lib/python3\\.[0-9]+/'
'lib\\-dynload/cleo/foo.cpython\\-.*\\.so',
'/usr/lib/python3\\.[0-9]+/cleo/foo\\.py',
'/usr/lib/python3\\.[0-9]+/cleo/foo/__init__\\.py',
'/usr/lib/python3/dist\\-packages/cleo/__init__\\.py',
'/usr/lib/python3/dist\\-packages/cleo\\.py',
'/usr/lib/python3\\.[0-9]+/lib\\-dynload/cleo.cpython\\-.*\\.so',
'/usr/lib/python3\\.[0-9]+/cleo\\.py',
'/usr/lib/python3\\.[0-9]+/cleo/__init__\\.py'],
get_possible_python3_paths_for_python_object('cleo.foo'))