Merge branch 'new-upstream/upstream' into 'upstream'

Import of new upstream version 0.0.7

See merge request jelmer/ognibuild!1
This commit is contained in:
Jelmer Vernooij 2021-06-03 11:51:38 +00:00
commit 3e1f11dd79
52 changed files with 8139 additions and 301 deletions

5
.flake8 Normal file
View file

@ -0,0 +1,5 @@
[flake8]
extend-ignore = E203, E266, E501, W293, W291
max-line-length = 88
max-complexity = 18
select = B,C,E,F,W,T4,B9

View file

@ -8,30 +8,40 @@ jobs:
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [ubuntu-latest, macos-latest, windows-latest]
python-version: [3.7, 3.8, pypy3]
os: [ubuntu-latest, macos-latest]
python-version: [3.7, 3.8]
fail-fast: false
steps:
- uses: actions/checkout@v2
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- name: Install dependencies
run: |
python -m pip install --upgrade pip flake8
python setup.py develop
- name: Style checks
run: |
python -m flake8
- name: Typing checks
run: |
pip install -U mypy
python -m mypy ognibuild
if: "matrix.python-version != 'pypy3'"
- name: Test suite run
run: |
python -m unittest ognibuild.tests.test_suite
env:
PYTHONHASHSEED: random
- uses: actions/checkout@v2
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- name: Install dependencies
run: |
python -m pip install --upgrade pip flake8 cython
python setup.py develop
- name: Install Debian-specific dependencies
run: |
sudo apt install libapt-pkg-dev
python -m pip install wheel
python -m pip install git+https://salsa.debian.org/apt-team/python-apt
python -m pip install -e ".[debian]"
python -m pip install testtools
mkdir -p ~/.config/breezy/plugins
brz branch lp:brz-debian ~/.config/breezy/plugins/debian
if: "matrix.python-version != 'pypy3' && matrix.os == 'ubuntu-latest'"
- name: Style checks
run: |
python -m flake8
- name: Typing checks
run: |
pip install -U mypy
python -m mypy ognibuild
if: "matrix.python-version != 'pypy3'"
- name: Test suite run
run: |
python -m unittest ognibuild.tests.test_suite
env:
PYTHONHASHSEED: random

4
.gitignore vendored
View file

@ -3,3 +3,7 @@ build
ognibuild.egg-info
dist
__pycache__
.eggs
*.swp
*.swo
*.swn

1
AUTHORS Normal file
View file

@ -0,0 +1 @@
Jelmer Vernooij <jelmer@jelmer.uk>

76
CODE_OF_CONDUCT.md Normal file
View file

@ -0,0 +1,76 @@
# Contributor Covenant Code of Conduct
## Our Pledge
In the interest of fostering an open and welcoming environment, we as
contributors and maintainers pledge to making participation in our project and
our community a harassment-free experience for everyone, regardless of age, body
size, disability, ethnicity, sex characteristics, gender identity and expression,
level of experience, education, socio-economic status, nationality, personal
appearance, race, religion, or sexual identity and orientation.
## Our Standards
Examples of behavior that contributes to creating a positive environment
include:
* Using welcoming and inclusive language
* Being respectful of differing viewpoints and experiences
* Gracefully accepting constructive criticism
* Focusing on what is best for the community
* Showing empathy towards other community members
Examples of unacceptable behavior by participants include:
* The use of sexualized language or imagery and unwelcome sexual attention or
advances
* Trolling, insulting/derogatory comments, and personal or political attacks
* Public or private harassment
* Publishing others' private information, such as a physical or electronic
address, without explicit permission
* Other conduct which could reasonably be considered inappropriate in a
professional setting
## Our Responsibilities
Project maintainers are responsible for clarifying the standards of acceptable
behavior and are expected to take appropriate and fair corrective action in
response to any instances of unacceptable behavior.
Project maintainers have the right and responsibility to remove, edit, or
reject comments, commits, code, wiki edits, issues, and other contributions
that are not aligned to this Code of Conduct, or to ban temporarily or
permanently any contributor for other behaviors that they deem inappropriate,
threatening, offensive, or harmful.
## Scope
This Code of Conduct applies both within project spaces and in public spaces
when an individual is representing the project or its community. Examples of
representing a project or community include using an official project e-mail
address, posting via an official social media account, or acting as an appointed
representative at an online or offline event. Representation of a project may be
further defined and clarified by project maintainers.
## Enforcement
Instances of abusive, harassing, or otherwise unacceptable behavior may be
reported by contacting the project lead at jelmer@jelmer.uk. All
complaints will be reviewed and investigated and will result in a response that
is deemed necessary and appropriate to the circumstances. The project team is
obligated to maintain confidentiality with regard to the reporter of an incident.
Further details of specific enforcement policies may be posted separately.
Project maintainers who do not follow or enforce the Code of Conduct in good
faith may face temporary or permanent repercussions as determined by other
members of the project's leadership.
## Attribution
This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html
[homepage]: https://www.contributor-covenant.org
For answers to common questions about this code of conduct, see
https://www.contributor-covenant.org/faq

17
PKG-INFO Normal file
View file

@ -0,0 +1,17 @@
Metadata-Version: 2.1
Name: ognibuild
Version: 0.0.7
Summary: Detect and run any build system
Home-page: https://jelmer.uk/code/ognibuild
Maintainer: Jelmer Vernooij
Maintainer-email: jelmer@jelmer.uk
License: GNU GPLv2 or later
Description: UNKNOWN
Platform: UNKNOWN
Classifier: Development Status :: 4 - Beta
Classifier: License :: OSI Approved :: GNU General Public License v2 or later (GPLv2+)
Classifier: Programming Language :: Python :: 3.5
Classifier: Programming Language :: Python :: 3.6
Classifier: Programming Language :: Python :: Implementation :: CPython
Classifier: Operating System :: POSIX
Provides-Extra: debian

View file

@ -1,5 +1,4 @@
ognibuild
=========
# ognibuild
Ognibuild is a simple wrapper with a common interface for invoking any kind of
build tool.
@ -10,8 +9,7 @@ parameters.
It can also detect and install missing dependencies.
Goals
-----
## Goals
The goal of ognibuild is to provide a consistent CLI that can be used for any
software package. It is mostly useful for automated building of
@ -20,8 +18,7 @@ large sets of diverse packages (e.g. different programming languages).
It is not meant to expose all functionality that is present in the underlying
build systems. To use that, invoke those build systems directly.
Usage
-----
## Usage
Ognibuild has a number of subcommands:
@ -31,7 +28,59 @@ Ognibuild has a number of subcommands:
* ``ogni install`` - install the package
* ``ogni test`` - run the testsuite in the source directory
License
-------
It also includes a subcommand that can fix up the build dependencies
for Debian packages, called deb-fix-build.
## Status
Ognibuild is functional, but sometimes rough around the edges. If you run into
issues (or lack of support for a particular ecosystem), please file a bug.
### Supported Build Systems
- Bazel
- Cabal
- Cargo
- Golang
- Gradle
- Make, including various makefile generators:
- autoconf/automake
- CMake
- Makefile.PL
- qmake
- Maven
- ninja, including ninja file generators:
- meson
- Node
- Octave
- Perl
- Module::Build::Tiny
- Dist::Zilla
- Minilla
- PHP Pear
- Python - setup.py/setup.cfg/pyproject.toml
- R
- Ruby gems
- Waf
### Supported package repositories
Package repositories are used to install missing dependencies.
The following "native" repositories are supported:
- pypi
- cpan
- hackage
- npm
- cargo
- cran
- golang\*
As well one distribution repository:
- apt
## License
Ognibuild is licensed under the GNU GPL, v2 or later.

10
SECURITY.md Normal file
View file

@ -0,0 +1,10 @@
# Security Policy
## Supported Versions
ognibuild is still under heavy development. Only the latest version is security
supported.
## Reporting a Vulnerability
Please report security issues by e-mail to jelmer@jelmer.uk, ideally PGP encrypted to the key at https://jelmer.uk/D729A457.asc

2
TODO Normal file
View file

@ -0,0 +1,2 @@
- Need to be able to check up front whether a requirement is satisfied, before attempting to install it (which is more expensive)
- Cache parsed Contents files during test suite runs and/or speed up reading

51
notes/architecture.md Normal file
View file

@ -0,0 +1,51 @@
Upstream requirements are expressed as objects derived from Requirement.
They can either be:
* extracted from the build system
* extracted from errors in build logs
The details of Requirements are specific to the kind of requirement,
and otherwise opaque to ognibuild.
When building a package, we first make sure that all declared upstream
requirements are met.
Then we attempt to build.
If any Problems are found in the log, buildlog-consultant will report them.
ognibuild can then invoke "fixers" to address Problems. Fixers can do things
like e.g. upgrade configure.ac to a newer version, or invoke autoreconf.
A list of possible fixers can be provided. Each fixer will be called
(in order) until one of them claims to ahve fixed the issue.
Problems can be converted to Requirements by RequirementFixer
InstallFixer uses a Resolver object that
can translate Requirement objects into apt package names or
e.g. cpan commands.
ognibuild keeps finding problems, resolving them and rebuilding until it finds
a problem it can not resolve or that it thinks it has already resolved
(i.e. seen before).
Operations are run in a Session - this can represent a virtualized
environment of some sort (e.g. a chroot or virtualenv) or simply
on the host machine.
For e.g. PerlModuleRequirement, need to be able to:
* install from apt package
+ InstallFixer(AptResolver()).fix(problem)
* update debian package (source, runtime, test) deps to include apt package
+ DebianPackageDepFixer(AptResolver()).fix(problem, ('test', 'foo'))
* suggest command to run to install from apt package
+ InstallFixer(AptResolver()).command(problem)
* install from cpan
+ InstallFixer(CpanResolver()).fix(problem)
* suggest command to run to install from cpan package
+ InstallFixer(CpanResolver()).command(problem)
* update source package reqs to depend on perl module
+ PerlDepFixer().fix(problem)

49
notes/concepts.md Normal file
View file

@ -0,0 +1,49 @@
Requirement
===========
Some sort of constraint about the environment that can be specified and satisfied.
Examples:
* a dependency on version 1.3 of the python package "foo"
* a dependency on the apt package "blah"
Requirements can be discovered from build system metadata files and from build logs.
Different kinds of requirements are subclassed from the main Requirement class.
Output
======
A build artifact that can be produced by a build system, e.g. an
executable file or a Perl module.
Problem
=======
An issue found in a build log by buildlog-consultant.
BuildFixer
==========
Takes a build problem and tries to resolve it in some way.
This can mean changing the project that's being built
(by modifying the source tree), or changing the environment
(e.g. by install packages from apt).
Common fixers:
+ InstallFixer([(resolver, repository)])
+ DebianDependencyFixer(tree, resolver)
Repository
==========
Some sort of provider of external requirements. Can satisfy environment
requirements.
Resolver
========
Can take one kind of upstream requirement and turn it into another. E.g.
converting missing Python modules to apt or pypi packages.

44
notes/roadmap.md Normal file
View file

@ -0,0 +1,44 @@
class UpstreamRequirement(object):
family: str
class PythonPackageRequirement(UpstreamRequirement):
package: str
SetupPy.get_build_requirements() yields some PythonPackageRequirement objects
apt_resolver.install([PythonPackageRequirement(...)]) then:
* needs to translate to apt package name
Once we find errors during build, buildlog consultant extracts them ("MissingPythonPackage", "configure.ac needs updating").
fix_build then takes the problem found and converts it to an action:
* modifying some of the source files
* resolving requirements
Resolving requirements dependencies means creating e.g. a PythonPackageRequirement() object and feeding it to resolver.install()
we have specific handlers for each kind of thingy
resolver.install() needs to translate the upstream information to an apt name or a cpan name or update dependencies or raise an exception or..
MissingPythonPackage() -> PythonPackageRequirement()
PythonPackageRequirement() can either:
* directly provide apt names, if they are known
* look up apt names
We specifically want to support multiple resolvers. In some cases a resolver can't deal with a particular kind of requirement.
Who is responsible for taking a PythonPackageRequirement and translating it to an apt package name?
1) PythonPackageRequirement itself? That would mean knowledge about package naming etc, is with the requirement object, which seems wrong.
2) PythonPackageRequirement.apt_name(apt_archive) - i.e. find the package name given an archive object of some sort
3) The apt resolver has a list of callbacks to map requirements to apt package names

View file

@ -0,0 +1,17 @@
Metadata-Version: 2.1
Name: ognibuild
Version: 0.0.7
Summary: Detect and run any build system
Home-page: https://jelmer.uk/code/ognibuild
Maintainer: Jelmer Vernooij
Maintainer-email: jelmer@jelmer.uk
License: GNU GPLv2 or later
Description: UNKNOWN
Platform: UNKNOWN
Classifier: Development Status :: 4 - Beta
Classifier: License :: OSI Approved :: GNU General Public License v2 or later (GPLv2+)
Classifier: Programming Language :: Python :: 3.5
Classifier: Programming Language :: Python :: 3.6
Classifier: Programming Language :: Python :: Implementation :: CPython
Classifier: Operating System :: POSIX
Provides-Extra: debian

View file

@ -0,0 +1,52 @@
.flake8
.gitignore
AUTHORS
CODE_OF_CONDUCT.md
LICENSE
README.md
SECURITY.md
TODO
releaser.conf
setup.cfg
setup.py
.github/workflows/pythonpackage.yml
notes/architecture.md
notes/concepts.md
notes/roadmap.md
ognibuild/__init__.py
ognibuild/__main__.py
ognibuild/build.py
ognibuild/buildlog.py
ognibuild/buildsystem.py
ognibuild/clean.py
ognibuild/dist.py
ognibuild/dist_catcher.py
ognibuild/fix_build.py
ognibuild/fixers.py
ognibuild/info.py
ognibuild/install.py
ognibuild/outputs.py
ognibuild/requirements.py
ognibuild/test.py
ognibuild/vcs.py
ognibuild.egg-info/PKG-INFO
ognibuild.egg-info/SOURCES.txt
ognibuild.egg-info/dependency_links.txt
ognibuild.egg-info/entry_points.txt
ognibuild.egg-info/requires.txt
ognibuild.egg-info/top_level.txt
ognibuild/debian/__init__.py
ognibuild/debian/apt.py
ognibuild/debian/build.py
ognibuild/debian/build_deps.py
ognibuild/debian/file_search.py
ognibuild/debian/fix_build.py
ognibuild/debian/udd.py
ognibuild/resolver/__init__.py
ognibuild/resolver/apt.py
ognibuild/session/__init__.py
ognibuild/session/plain.py
ognibuild/session/schroot.py
ognibuild/tests/__init__.py
ognibuild/tests/test_debian_build.py
ognibuild/tests/test_debian_fix_build.py

View file

@ -0,0 +1 @@

View file

@ -0,0 +1,4 @@
[console_scripts]
deb-fix-build = ognibuild.debian.fix_build:main
ogni = ognibuild.__main__:main

View file

@ -0,0 +1,8 @@
breezy
buildlog-consultant>=0.0.10
requirements-parser
[debian]
debmutate
python_apt
python_debian

View file

@ -0,0 +1 @@
ognibuild

View file

@ -18,210 +18,67 @@
import os
import stat
import subprocess
import sys
from typing import List
DEFAULT_PYTHON = 'python3'
__version__ = (0, 0, 7)
USER_AGENT = "Ognibuild"
class DetailedFailure(Exception):
def __init__(self, retcode, argv, error):
self.retcode = retcode
self.argv = argv
self.error = error
class UnidentifiedError(Exception):
"""An unidentified error."""
def __init__(self, retcode, argv, lines):
def __init__(self, retcode, argv, lines, secondary=None):
self.retcode = retcode
self.argv = argv
self.lines = lines
self.secondary = secondary
class NoBuildToolsFound(Exception):
"""No supported build tools were found."""
def __repr__(self):
return "<%s(%r, %r, ..., secondary=%r)>" % (
type(self).__name__,
self.retcode,
self.argv,
self.secondary,
)
def shebang_binary(p):
if not (os.stat(p).st_mode & stat.S_IEXEC):
return None
with open(p, 'rb') as f:
with open(p, "rb") as f:
firstline = f.readline()
if not firstline.startswith(b'#!'):
if not firstline.startswith(b"#!"):
return None
args = firstline[2:].split(b' ')
if args[0] in (b'/usr/bin/env', b'env'):
return os.path.basename(args[1].decode())
return os.path.basename(args[0].decode())
args = firstline[2:].strip().split(b" ")
if args[0] in (b"/usr/bin/env", b"env"):
return os.path.basename(args[1].decode()).strip()
return os.path.basename(args[0].decode()).strip()
def note(m):
sys.stdout.write('%s\n' % m)
class Requirement(object):
# Name of the family of requirements - e.g. "python-package"
family: str
def __init__(self, family):
self.family = family
def met(self, session):
raise NotImplementedError(self)
def warning(m):
sys.stderr.write('WARNING: %s\n' % m)
class UpstreamOutput(object):
def __init__(self, family):
self.family = family
def run_with_tee(session, args: List[str], **kwargs):
p = session.Popen(
args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, **kwargs)
contents = []
while p.poll() is None:
line = p.stdout.readline()
sys.stdout.buffer.write(line)
sys.stdout.buffer.flush()
contents.append(line.decode('utf-8', 'surrogateescape'))
return p.returncode, contents
def run_apt(session, args: List[str]) -> None:
args = ['apt', '-y'] + args
retcode, lines = run_with_tee(session, args, cwd='/', user='root')
if retcode == 0:
return
raise UnidentifiedError(retcode, args, lines)
def apt_install(session, packages: List[str]) -> None:
run_apt(session, ['install'] + packages)
def run_with_build_fixer(session, args):
session.check_call(args)
def run_dist(session):
# TODO(jelmer): Check $PATH rather than hardcoding?
if not os.path.exists('/usr/bin/git'):
apt_install(session, ['git'])
# Some things want to write to the user's home directory,
# e.g. pip caches in ~/.cache
session.create_home()
if os.path.exists('package.xml'):
apt_install(session, ['php-pear', 'php-horde-core'])
note('Found package.xml, assuming pear package.')
session.check_call(['pear', 'package'])
return
if os.path.exists('pyproject.toml'):
import toml
with open('pyproject.toml', 'r') as pf:
pyproject = toml.load(pf)
if 'poetry' in pyproject.get('tool', []):
note('Found pyproject.toml with poetry section, '
'assuming poetry project.')
apt_install(session, ['python3-venv', 'python3-pip'])
session.check_call(['pip3', 'install', 'poetry'], user='root')
session.check_call(['poetry', 'build', '-f', 'sdist'])
return
if os.path.exists('setup.py'):
note('Found setup.py, assuming python project.')
apt_install(session, ['python3', 'python3-pip'])
with open('setup.py', 'r') as f:
setup_py_contents = f.read()
try:
with open('setup.cfg', 'r') as f:
setup_cfg_contents = f.read()
except FileNotFoundError:
setup_cfg_contents = ''
if 'setuptools' in setup_py_contents:
note('Reference to setuptools found, installing.')
apt_install(session, ['python3-setuptools'])
if ('setuptools_scm' in setup_py_contents or
'setuptools_scm' in setup_cfg_contents):
note('Reference to setuptools-scm found, installing.')
apt_install(
session, ['python3-setuptools-scm', 'git', 'mercurial'])
# TODO(jelmer): Install setup_requires
interpreter = shebang_binary('setup.py')
if interpreter is not None:
if interpreter == 'python2' or interpreter.startswith('python2.'):
apt_install(session, [interpreter])
elif (interpreter == 'python3' or
interpreter.startswith('python3.')):
apt_install(session, [interpreter])
else:
apt_install(session, [DEFAULT_PYTHON])
run_with_build_fixer(session, ['./setup.py', 'sdist'])
else:
# Just assume it's Python 3
apt_install(session, ['python3'])
run_with_build_fixer(session, ['python3', './setup.py', 'sdist'])
return
if os.path.exists('setup.cfg'):
note('Found setup.cfg, assuming python project.')
apt_install(session, ['python3-pep517', 'python3-pip'])
session.check_call(['python3', '-m', 'pep517.build', '-s', '.'])
return
if os.path.exists('dist.ini') and not os.path.exists('Makefile.PL'):
apt_install(session, ['libdist-inkt-perl'])
with open('dist.ini', 'rb') as f:
for line in f:
if not line.startswith(b';;'):
continue
try:
(key, value) = line[2:].split(b'=', 1)
except ValueError:
continue
if (key.strip() == b'class' and
value.strip().startswith(b"'Dist::Inkt")):
note('Found Dist::Inkt section in dist.ini, '
'assuming distinkt.')
# TODO(jelmer): install via apt if possible
session.check_call(
['cpan', 'install', value.decode().strip("'")],
user='root')
run_with_build_fixer(session, ['distinkt-dist'])
return
# Default to invoking Dist::Zilla
note('Found dist.ini, assuming dist-zilla.')
apt_install(session, ['libdist-zilla-perl'])
run_with_build_fixer(session, ['dzil', 'build', '--in', '..'])
return
if os.path.exists('package.json'):
apt_install(session, ['npm'])
run_with_build_fixer(session, ['npm', 'pack'])
return
gemfiles = [name for name in os.listdir('.') if name.endswith('.gem')]
if gemfiles:
apt_install(session, ['gem2deb'])
if len(gemfiles) > 1:
warning('More than one gemfile. Trying the first?')
run_with_build_fixer(session, ['gem2tgz', gemfiles[0]])
return
if os.path.exists('waf'):
apt_install(session, ['python3'])
run_with_build_fixer(session, ['./waf', 'dist'])
return
if os.path.exists('Makefile.PL') and not os.path.exists('Makefile'):
apt_install(session, ['perl'])
run_with_build_fixer(session, ['perl', 'Makefile.PL'])
if not os.path.exists('Makefile') and not os.path.exists('configure'):
if os.path.exists('autogen.sh'):
if shebang_binary('autogen.sh') is None:
run_with_build_fixer(session, ['/bin/sh', './autogen.sh'])
else:
run_with_build_fixer(session, ['./autogen.sh'])
elif os.path.exists('configure.ac') or os.path.exists('configure.in'):
apt_install(session, [
'autoconf', 'automake', 'gettext', 'libtool', 'gnu-standards'])
run_with_build_fixer(session, ['autoreconf', '-i'])
if not os.path.exists('Makefile') and os.path.exists('configure'):
session.check_call(['./configure'])
if os.path.exists('Makefile'):
apt_install(session, ['make'])
run_with_build_fixer(session, ['make', 'dist'])
raise NoBuildToolsFound()
def get_declared_dependencies(self):
raise NotImplementedError(self.get_declared_dependencies)

View file

@ -15,34 +15,228 @@
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
import logging
import os
import shlex
import sys
from . import run_dist, NoBuildToolsFound, note
from . import UnidentifiedError, DetailedFailure
from .buildlog import (
InstallFixer,
ExplainInstallFixer,
ExplainInstall,
install_missing_reqs,
)
from .buildsystem import NoBuildToolsFound, detect_buildsystems
from .resolver import (
auto_resolver,
native_resolvers,
)
from .resolver.apt import AptResolver
def main():
def display_explain_commands(commands):
logging.info("Run one or more of the following commands:")
for command, reqs in commands:
if isinstance(command, list):
command = shlex.join(command)
logging.info(" %s (to install %s)", command, ", ".join(map(str, reqs)))
def get_necessary_declared_requirements(resolver, requirements, stages):
missing = []
for stage, req in requirements:
if stage in stages:
missing.append(req)
return missing
def install_necessary_declared_requirements(
session, resolver, fixers, buildsystems, stages, explain=False
):
relevant = []
declared_reqs = []
for buildsystem in buildsystems:
try:
declared_reqs.extend(buildsystem.get_declared_dependencies(session, fixers))
except NotImplementedError:
logging.warning(
"Unable to determine declared dependencies from %r", buildsystem
)
relevant.extend(
get_necessary_declared_requirements(resolver, declared_reqs, stages)
)
install_missing_reqs(session, resolver, relevant, explain=explain)
# Types of dependencies:
# - core: necessary to do anything with the package
# - build: necessary to build the package
# - test: necessary to run the tests
# - dev: necessary for development (e.g. linters, yacc)
STAGE_MAP = {
"dist": [],
"info": [],
"install": ["core", "build"],
"test": ["test", "build", "core"],
"build": ["build", "core"],
"clean": [],
}
def determine_fixers(session, resolver, explain=False):
if explain:
return [ExplainInstallFixer(resolver)]
else:
return [InstallFixer(resolver)]
def main(): # noqa: C901
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('subcommand', type=str, choices=['dist'])
parser.add_argument(
'--directory', '-d', type=str, help='Directory for project.',
default='.')
"--directory", "-d", type=str, help="Directory for project.", default="."
)
parser.add_argument("--schroot", type=str, help="schroot to run in.")
parser.add_argument(
'--schroot', type=str, help='schroot to run in.')
"--resolve",
choices=["apt", "native", "auto"],
default="auto",
help="What to do about missing dependencies",
)
parser.add_argument(
'--apt', help=argparse.SUPPRESS,
dest='resolve', action='store_const', const='apt')
parser.add_argument(
'--native', help=argparse.SUPPRESS,
dest='native', action='store_const', const='native')
parser.add_argument(
"--explain",
action="store_true",
help="Explain what needs to be done rather than making changes",
)
parser.add_argument(
"--ignore-declared-dependencies",
"--optimistic",
action="store_true",
help="Ignore declared dependencies, follow build errors only",
)
parser.add_argument("--verbose", action="store_true", help="Be verbose")
subparsers = parser.add_subparsers(dest="subcommand")
subparsers.add_parser("dist")
subparsers.add_parser("build")
subparsers.add_parser("clean")
subparsers.add_parser("test")
subparsers.add_parser("info")
exec_parser = subparsers.add_parser("exec")
exec_parser.add_argument('subargv', nargs=argparse.REMAINDER, help='Command to run.')
install_parser = subparsers.add_parser("install")
install_parser.add_argument(
"--user", action="store_true", help="Install in local-user directories."
)
install_parser.add_argument(
"--prefix", type=str, help='Prefix to install in')
args = parser.parse_args()
if not args.subcommand:
parser.print_usage()
return 1
if args.verbose:
logging.basicConfig(level=logging.DEBUG, format="%(message)s")
else:
logging.basicConfig(level=logging.INFO, format="%(message)s")
if args.schroot:
from .session.schroot import SchrootSession
session = SchrootSession(args.schroot)
else:
from .session.plain import PlainSession
session = PlainSession()
with session:
os.chdir(args.directory)
logging.info("Preparing directory %s", args.directory)
external_dir, internal_dir = session.setup_from_directory(args.directory)
session.chdir(internal_dir)
os.chdir(external_dir)
if not session.is_temporary and args.subcommand == 'info':
args.explain = True
if args.resolve == "apt":
resolver = AptResolver.from_session(session)
elif args.resolve == "native":
resolver = native_resolvers(session, user_local=args.user)
elif args.resolve == "auto":
resolver = auto_resolver(session, explain=args.explain)
logging.info("Using requirement resolver: %s", resolver)
fixers = determine_fixers(session, resolver, explain=args.explain)
try:
if args.subcommand == 'dist':
run_dist(session)
if args.subcommand == "exec":
from .fix_build import run_with_build_fixers
run_with_build_fixers(session, args.subargv, fixers)
return 0
bss = list(detect_buildsystems(args.directory))
logging.info("Detected buildsystems: %s", ", ".join(map(str, bss)))
if not args.ignore_declared_dependencies:
stages = STAGE_MAP[args.subcommand]
if stages:
logging.info("Checking that declared requirements are present")
try:
install_necessary_declared_requirements(
session, resolver, fixers, bss, stages, explain=args.explain
)
except ExplainInstall as e:
display_explain_commands(e.commands)
return 1
if args.subcommand == "dist":
from .dist import run_dist, DistNoTarball
try:
run_dist(
session=session,
buildsystems=bss,
resolver=resolver,
fixers=fixers,
target_directory=".",
)
except DistNoTarball:
logging.fatal('No tarball created.')
return 1
if args.subcommand == "build":
from .build import run_build
run_build(session, buildsystems=bss, resolver=resolver, fixers=fixers)
if args.subcommand == "clean":
from .clean import run_clean
run_clean(session, buildsystems=bss, resolver=resolver, fixers=fixers)
if args.subcommand == "install":
from .install import run_install
run_install(
session,
buildsystems=bss,
resolver=resolver,
fixers=fixers,
user=args.user,
prefix=args.prefix,
)
if args.subcommand == "test":
from .test import run_test
run_test(session, buildsystems=bss, resolver=resolver, fixers=fixers)
if args.subcommand == "info":
from .info import run_info
run_info(session, buildsystems=bss, fixers=fixers)
except ExplainInstall as e:
display_explain_commands(e.commands)
except (UnidentifiedError, DetailedFailure):
return 1
except NoBuildToolsFound:
note('No build tools found.')
logging.info("No build tools found.")
return 1
return 0

30
ognibuild/build.py Normal file
View file

@ -0,0 +1,30 @@
#!/usr/bin/python3
# Copyright (C) 2020-2021 Jelmer Vernooij <jelmer@jelmer.uk>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
from .buildsystem import NoBuildToolsFound
def run_build(session, buildsystems, resolver, fixers):
# Some things want to write to the user's home directory,
# e.g. pip caches in ~/.cache
session.create_home()
for buildsystem in buildsystems:
buildsystem.build(session, resolver, fixers)
return
raise NoBuildToolsFound()

328
ognibuild/buildlog.py Normal file
View file

@ -0,0 +1,328 @@
#!/usr/bin/python3
# Copyright (C) 2020 Jelmer Vernooij <jelmer@jelmer.uk>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
"""Convert problems found in the buildlog to upstream requirements.
"""
import logging
from buildlog_consultant.common import (
MissingPythonModule,
MissingPythonDistribution,
MissingCHeader,
MissingPkgConfig,
MissingCommand,
MissingFile,
MissingJavaScriptRuntime,
MissingSprocketsFile,
MissingGoPackage,
MissingPerlFile,
MissingPerlModule,
MissingXmlEntity,
MissingJDKFile,
MissingJDK,
MissingJRE,
MissingNodeModule,
MissingNodePackage,
MissingPhpClass,
MissingRubyGem,
MissingLibrary,
MissingSetupPyCommand,
MissingJavaClass,
MissingCSharpCompiler,
MissingRPackage,
MissingRubyFile,
MissingAutoconfMacro,
MissingValaPackage,
MissingBoostComponents,
MissingXfceDependency,
MissingHaskellDependencies,
MissingVagueDependency,
DhAddonLoadFailure,
MissingMavenArtifacts,
MissingIntrospectionTypelib,
GnomeCommonMissing,
MissingGnomeCommonDependency,
UnknownCertificateAuthority,
CMakeFilesMissing,
MissingLibtool,
MissingQt,
MissingX11,
MissingPerlPredeclared,
MissingLatexFile,
MissingCargoCrate,
MissingStaticLibrary,
)
from buildlog_consultant.apt import UnsatisfiedAptDependencies
from .fix_build import BuildFixer
from .requirements import (
BinaryRequirement,
PathRequirement,
PkgConfigRequirement,
CHeaderRequirement,
JavaScriptRuntimeRequirement,
ValaPackageRequirement,
RubyGemRequirement,
GoPackageRequirement,
DhAddonRequirement,
PhpClassRequirement,
RPackageRequirement,
NodePackageRequirement,
LibraryRequirement,
RubyFileRequirement,
XmlEntityRequirement,
SprocketsFileRequirement,
JavaClassRequirement,
CMakefileRequirement,
HaskellPackageRequirement,
MavenArtifactRequirement,
BoostComponentRequirement,
GnomeCommonRequirement,
JDKFileRequirement,
JDKRequirement,
JRERequirement,
PerlModuleRequirement,
PerlFileRequirement,
AutoconfMacroRequirement,
PythonModuleRequirement,
PythonPackageRequirement,
CertificateAuthorityRequirement,
NodeModuleRequirement,
QTRequirement,
X11Requirement,
LibtoolRequirement,
VagueDependencyRequirement,
IntrospectionTypelibRequirement,
PerlPreDeclaredRequirement,
LatexPackageRequirement,
CargoCrateRequirement,
StaticLibraryRequirement,
)
from .resolver import UnsatisfiedRequirements
def problem_to_upstream_requirement(problem): # noqa: C901
if isinstance(problem, MissingFile):
return PathRequirement(problem.path)
elif isinstance(problem, MissingCommand):
return BinaryRequirement(problem.command)
elif isinstance(problem, MissingPkgConfig):
return PkgConfigRequirement(problem.module, problem.minimum_version)
elif isinstance(problem, MissingCHeader):
return CHeaderRequirement(problem.header)
elif isinstance(problem, MissingIntrospectionTypelib):
return IntrospectionTypelibRequirement(problem.library)
elif isinstance(problem, MissingJavaScriptRuntime):
return JavaScriptRuntimeRequirement()
elif isinstance(problem, MissingRubyGem):
return RubyGemRequirement(problem.gem, problem.version)
elif isinstance(problem, MissingValaPackage):
return ValaPackageRequirement(problem.package)
elif isinstance(problem, MissingGoPackage):
return GoPackageRequirement(problem.package)
elif isinstance(problem, MissingBoostComponents):
return [BoostComponentRequirement(name) for name in problem.components]
elif isinstance(problem, DhAddonLoadFailure):
return DhAddonRequirement(problem.path)
elif isinstance(problem, MissingPhpClass):
return PhpClassRequirement(problem.php_class)
elif isinstance(problem, MissingRPackage):
return RPackageRequirement(problem.package, problem.minimum_version)
elif isinstance(problem, MissingNodeModule):
return NodeModuleRequirement(problem.module)
elif isinstance(problem, MissingStaticLibrary):
return StaticLibraryRequirement(problem.library, problem.filename)
elif isinstance(problem, MissingNodePackage):
return NodePackageRequirement(problem.package)
elif isinstance(problem, MissingLatexFile):
if problem.filename.endswith('.sty'):
return LatexPackageRequirement(problem.filename[:-4])
return None
elif isinstance(problem, MissingVagueDependency):
return VagueDependencyRequirement(problem.name, minimum_version=problem.minimum_version)
elif isinstance(problem, MissingLibrary):
return LibraryRequirement(problem.library)
elif isinstance(problem, MissingRubyFile):
return RubyFileRequirement(problem.filename)
elif isinstance(problem, MissingXmlEntity):
return XmlEntityRequirement(problem.url)
elif isinstance(problem, MissingSprocketsFile):
return SprocketsFileRequirement(problem.content_type, problem.name)
elif isinstance(problem, MissingJavaClass):
return JavaClassRequirement(problem.classname)
elif isinstance(problem, CMakeFilesMissing):
return [CMakefileRequirement(filename) for filename in problem.filenames]
elif isinstance(problem, MissingHaskellDependencies):
return [HaskellPackageRequirement.from_string(dep) for dep in problem.deps]
elif isinstance(problem, MissingMavenArtifacts):
return [
MavenArtifactRequirement.from_str(artifact)
for artifact in problem.artifacts
]
elif isinstance(problem, MissingCSharpCompiler):
return BinaryRequirement("msc")
elif isinstance(problem, GnomeCommonMissing):
return GnomeCommonRequirement()
elif isinstance(problem, MissingJDKFile):
return JDKFileRequirement(problem.jdk_path, problem.filename)
elif isinstance(problem, MissingJDK):
return JDKRequirement()
elif isinstance(problem, MissingJRE):
return JRERequirement()
elif isinstance(problem, MissingQt):
return QTRequirement()
elif isinstance(problem, MissingX11):
return X11Requirement()
elif isinstance(problem, MissingLibtool):
return LibtoolRequirement()
elif isinstance(problem, UnknownCertificateAuthority):
return CertificateAuthorityRequirement(problem.url)
elif isinstance(problem, MissingPerlPredeclared):
ret = PerlPreDeclaredRequirement(problem.name)
try:
return ret.lookup_module()
except KeyError:
return ret
elif isinstance(problem, MissingCargoCrate):
# TODO(jelmer): handle problem.requirements
return CargoCrateRequirement(problem.crate)
elif isinstance(problem, MissingSetupPyCommand):
if problem.command == "test":
return PythonPackageRequirement("setuptools")
return None
elif isinstance(problem, MissingGnomeCommonDependency):
if problem.package == "glib-gettext":
return BinaryRequirement("glib-gettextize")
else:
logging.warning(
"No known command for gnome-common dependency %s", problem.package
)
return None
elif isinstance(problem, MissingXfceDependency):
if problem.package == "gtk-doc":
return BinaryRequirement("gtkdocize")
else:
logging.warning("No known command for xfce dependency %s", problem.package)
return None
elif isinstance(problem, MissingPerlModule):
return PerlModuleRequirement(
module=problem.module, filename=problem.filename, inc=problem.inc
)
elif isinstance(problem, MissingPerlFile):
return PerlFileRequirement(filename=problem.filename)
elif isinstance(problem, MissingAutoconfMacro):
return AutoconfMacroRequirement(problem.macro)
elif isinstance(problem, MissingPythonModule):
return PythonModuleRequirement(
problem.module,
python_version=problem.python_version,
minimum_version=problem.minimum_version,
)
elif isinstance(problem, MissingPythonDistribution):
return PythonPackageRequirement(
problem.distribution,
python_version=problem.python_version,
minimum_version=problem.minimum_version,
)
elif isinstance(problem, UnsatisfiedAptDependencies):
from .resolver.apt import AptRequirement
return AptRequirement(problem.relations)
else:
return None
class InstallFixer(BuildFixer):
def __init__(self, resolver):
self.resolver = resolver
def __repr__(self):
return "%s(%r)" % (type(self).__name__, self.resolver)
def __str__(self):
return "upstream requirement fixer(%s)" % self.resolver
def can_fix(self, error):
req = problem_to_upstream_requirement(error)
return req is not None
def fix(self, error, phase):
reqs = problem_to_upstream_requirement(error)
if reqs is None:
return False
if not isinstance(reqs, list):
reqs = [reqs]
try:
self.resolver.install(reqs)
except UnsatisfiedRequirements:
return False
return True
class ExplainInstall(Exception):
def __init__(self, commands):
self.commands = commands
class ExplainInstallFixer(BuildFixer):
def __init__(self, resolver):
self.resolver = resolver
def __repr__(self):
return "%s(%r)" % (type(self).__name__, self.resolver)
def __str__(self):
return "upstream requirement install explainer(%s)" % self.resolver
def can_fix(self, error):
req = problem_to_upstream_requirement(error)
return req is not None
def fix(self, error, phase):
reqs = problem_to_upstream_requirement(error)
if reqs is None:
return False
if not isinstance(reqs, list):
reqs = [reqs]
explanations = list(self.resolver.explain(reqs))
if not explanations:
return False
raise ExplainInstall(explanations)
def install_missing_reqs(session, resolver, reqs, explain=False):
if not reqs:
return
missing = []
for req in reqs:
try:
if not req.met(session):
missing.append(req)
except NotImplementedError:
missing.append(req)
if missing:
if explain:
commands = resolver.explain(missing)
if not commands:
raise UnsatisfiedRequirements(missing)
raise ExplainInstall(commands)
else:
resolver.install(missing)

1640
ognibuild/buildsystem.py Normal file

File diff suppressed because it is too large Load diff

30
ognibuild/clean.py Normal file
View file

@ -0,0 +1,30 @@
#!/usr/bin/python3
# Copyright (C) 2020-2021 Jelmer Vernooij <jelmer@jelmer.uk>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
from .buildsystem import NoBuildToolsFound
def run_clean(session, buildsystems, resolver, fixers):
# Some things want to write to the user's home directory,
# e.g. pip caches in ~/.cache
session.create_home()
for buildsystem in buildsystems:
buildsystem.clean(session, resolver, fixers)
return
raise NoBuildToolsFound()

View file

@ -0,0 +1,41 @@
#!/usr/bin/python
# Copyright (C) 2018 Jelmer Vernooij <jelmer@jelmer.uk>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
import os
from debian.deb822 import Deb822
from ..session import Session
def satisfy_build_deps(session: Session, tree, debian_path):
source = Deb822(tree.get_file(os.path.join(debian_path, "control")))
deps = []
for name in ["Build-Depends", "Build-Depends-Indep", "Build-Depends-Arch"]:
try:
deps.append(source[name].strip().strip(","))
except KeyError:
pass
for name in ["Build-Conflicts", "Build-Conflicts-Indep", "Build-Conflicts-Arch"]:
try:
deps.append("Conflicts: " + source[name])
except KeyError:
pass
deps = [dep.strip().strip(",") for dep in deps]
from .apt import AptManager
apt = AptManager(session)
apt.satisfy(deps)

131
ognibuild/debian/apt.py Normal file
View file

@ -0,0 +1,131 @@
#!/usr/bin/python
# Copyright (C) 2019-2020 Jelmer Vernooij <jelmer@jelmer.uk>
# encoding: utf-8
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
import logging
from typing import List, Optional
import os
from buildlog_consultant.apt import (
find_apt_get_failure,
)
from .. import DetailedFailure, UnidentifiedError
from ..session import Session, run_with_tee, get_user
from .file_search import (
FileSearcher,
get_apt_contents_file_searcher,
GENERATED_FILE_SEARCHER,
get_packages_for_paths,
)
def run_apt(
session: Session, args: List[str], prefix: Optional[List[str]] = None
) -> None:
"""Run apt."""
if prefix is None:
prefix = []
args = prefix = ["apt", "-y"] + args
logging.info('apt: running %r', args)
retcode, lines = run_with_tee(session, args, cwd="/", user="root")
if retcode == 0:
return
match, error = find_apt_get_failure(lines)
if error is not None:
raise DetailedFailure(retcode, args, error)
while lines and lines[-1] == "":
lines.pop(-1)
raise UnidentifiedError(retcode, args, lines, secondary=match)
class AptManager(object):
session: Session
_searchers: Optional[List[FileSearcher]]
def __init__(self, session, prefix=None):
self.session = session
self._apt_cache = None
self._searchers = None
if prefix is None:
prefix = []
self.prefix = prefix
@classmethod
def from_session(cls, session):
if get_user(session) != "root":
prefix = ["sudo"]
else:
prefix = []
return cls(session, prefix=prefix)
def searchers(self):
if self._searchers is None:
self._searchers = [
get_apt_contents_file_searcher(self.session),
GENERATED_FILE_SEARCHER,
]
return self._searchers
@property
def apt_cache(self):
if self._apt_cache is None:
import apt
self._apt_cache = apt.Cache(rootdir=self.session.location)
return self._apt_cache
def package_exists(self, package):
return package in self.apt_cache
def package_versions(self, package):
return list(self.apt_cache[package].versions)
def get_packages_for_paths(self, paths, regex=False, case_insensitive=False):
logging.debug("Searching for packages containing %r", paths)
return get_packages_for_paths(
paths, self.searchers(), regex=regex, case_insensitive=case_insensitive
)
def missing(self, packages):
root = getattr(self.session, "location", "/")
status_path = os.path.join(root, "var/lib/dpkg/status")
missing = set(packages)
import apt_pkg
with apt_pkg.TagFile(status_path) as tagf:
while missing:
tagf.step()
if not tagf.section:
break
if tagf.section["Package"] in missing:
if tagf.section["Status"] == "install ok installed":
missing.remove(tagf.section["Package"])
return list(missing)
def install(self, packages: List[str]) -> None:
logging.info("Installing using apt: %r", packages)
packages = self.missing(packages)
if packages:
run_apt(self.session, ["install"] + packages, prefix=self.prefix)
def satisfy(self, deps: List[str]) -> None:
run_apt(self.session, ["satisfy"] + deps, prefix=self.prefix)
def satisfy_command(self, deps: List[str]) -> List[str]:
return self.prefix + ["apt", "satisfy"] + deps

299
ognibuild/debian/build.py Normal file
View file

@ -0,0 +1,299 @@
#!/usr/bin/python
# Copyright (C) 2018 Jelmer Vernooij <jelmer@jelmer.uk>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
__all__ = [
"get_build_architecture",
"add_dummy_changelog_entry",
"build",
"DetailedDebianBuildFailure",
"UnidentifiedDebianBuildError",
]
from datetime import datetime
from debmutate.changelog import ChangelogEditor
import logging
import os
import re
import shlex
import subprocess
import sys
from debian.changelog import Changelog
from debmutate.changelog import get_maintainer
from breezy.mutabletree import MutableTree
from breezy.plugins.debian.builder import BuildFailedError
from breezy.tree import Tree
from buildlog_consultant.sbuild import (
worker_failure_from_sbuild_log,
)
from .. import DetailedFailure as DetailedFailure, UnidentifiedError
DEFAULT_BUILDER = "sbuild --no-clean-source"
class DetailedDebianBuildFailure(DetailedFailure):
def __init__(self, stage, phase, retcode, argv, error, description):
super(DetailedDebianBuildFailure, self).__init__(retcode, argv, error)
self.stage = stage
self.phase = phase
self.description = description
class UnidentifiedDebianBuildError(UnidentifiedError):
def __init__(self, stage, phase, retcode, argv, lines, description, secondary=None):
super(UnidentifiedDebianBuildError, self).__init__(
retcode, argv, lines, secondary)
self.stage = stage
self.phase = phase
self.description = description
class MissingChangesFile(Exception):
"""Expected changes file was not written."""
def __init__(self, filename):
self.filename = filename
def find_changes_files(path, package, version):
non_epoch_version = version.upstream_version
if version.debian_version is not None:
non_epoch_version += "-%s" % version.debian_version
c = re.compile('%s_%s_(.*).changes' % (re.escape(package), re.escape(non_epoch_version)))
for entry in os.scandir(path):
m = c.match(entry.name)
if m:
yield m.group(1), entry
def get_build_architecture():
try:
return (
subprocess.check_output(["dpkg-architecture", "-qDEB_BUILD_ARCH"])
.strip()
.decode()
)
except subprocess.CalledProcessError as e:
raise Exception("Could not find the build architecture: %s" % e)
def control_files_in_root(tree: Tree, subpath: str) -> bool:
debian_path = os.path.join(subpath, "debian")
if tree.has_filename(debian_path):
return False
control_path = os.path.join(subpath, "control")
if tree.has_filename(control_path):
return True
if tree.has_filename(control_path + ".in"):
return True
return False
def add_dummy_changelog_entry(
tree: MutableTree,
subpath: str,
suffix: str,
suite: str,
message: str,
timestamp=None,
maintainer=None,
):
"""Add a dummy changelog entry to a package.
Args:
directory: Directory to run in
suffix: Suffix for the version
suite: Debian suite
message: Changelog message
"""
def add_suffix(v, suffix):
m = re.fullmatch(
"(.*)(" + re.escape(suffix) + ")([0-9]+)",
v,
)
if m:
return m.group(1) + m.group(2) + "%d" % (int(m.group(3)) + 1)
else:
return v + suffix + "1"
if control_files_in_root(tree, subpath):
path = os.path.join(subpath, "changelog")
else:
path = os.path.join(subpath, "debian", "changelog")
if maintainer is None:
maintainer = get_maintainer()
if timestamp is None:
timestamp = datetime.now()
with ChangelogEditor(tree.abspath(os.path.join(path))) as editor:
version = editor[0].version
if version.debian_revision:
version.debian_revision = add_suffix(version.debian_revision, suffix)
else:
version.upstream_version = add_suffix(version.upstream_version, suffix)
editor.auto_version(version, timestamp=timestamp)
editor.add_entry(
summary=[message], maintainer=maintainer, timestamp=timestamp, urgency='low')
editor[0].distributions = suite
def get_latest_changelog_entry(local_tree, subpath=""):
if control_files_in_root(local_tree, subpath):
path = os.path.join(subpath, "changelog")
else:
path = os.path.join(subpath, "debian", "changelog")
with local_tree.get_file(path) as f:
cl = Changelog(f, max_blocks=1)
return cl.package, cl.version
def build(
local_tree,
outf,
build_command=DEFAULT_BUILDER,
result_dir=None,
distribution=None,
subpath="",
source_date_epoch=None,
extra_repositories=None,
):
for repo in extra_repositories or []:
build_command += " --extra-repository=" + shlex.quote(repo)
args = [
sys.executable,
"-m",
"breezy",
"builddeb",
"--guess-upstream-branch-url",
"--builder=%s" % build_command,
]
if result_dir:
args.append("--result-dir=%s" % result_dir)
outf.write("Running %r\n" % (build_command,))
outf.flush()
env = dict(os.environ.items())
if distribution is not None:
env["DISTRIBUTION"] = distribution
if source_date_epoch is not None:
env["SOURCE_DATE_EPOCH"] = "%d" % source_date_epoch
logging.info("Building debian packages, running %r.", build_command)
try:
subprocess.check_call(
args, cwd=local_tree.abspath(subpath), stdout=outf, stderr=outf, env=env
)
except subprocess.CalledProcessError:
raise BuildFailedError()
def build_once(
local_tree,
build_suite,
output_directory,
build_command,
subpath="",
source_date_epoch=None,
extra_repositories=None
):
build_log_path = os.path.join(output_directory, "build.log")
logging.debug("Writing build log to %s", build_log_path)
try:
with open(build_log_path, "w") as f:
build(
local_tree,
outf=f,
build_command=build_command,
result_dir=output_directory,
distribution=build_suite,
subpath=subpath,
source_date_epoch=source_date_epoch,
extra_repositories=extra_repositories,
)
except BuildFailedError as e:
with open(build_log_path, "rb") as f:
sbuild_failure = worker_failure_from_sbuild_log(f)
retcode = getattr(e, 'returncode', None)
if sbuild_failure.error:
raise DetailedDebianBuildFailure(
sbuild_failure.stage,
sbuild_failure.phase, retcode,
shlex.split(build_command),
sbuild_failure.error,
sbuild_failure.description)
else:
raise UnidentifiedDebianBuildError(
sbuild_failure.stage,
sbuild_failure.phase,
retcode, shlex.split(build_command),
[], sbuild_failure.description)
cl_entry = get_latest_changelog_entry(local_tree, subpath)
changes_names = []
for kind, entry in find_changes_files(output_directory, cl_entry.package, cl_entry.version):
changes_names.append((entry.name))
return (changes_names, cl_entry)
def gbp_dch(path):
subprocess.check_call(["gbp", "dch", "--ignore-branch"], cwd=path)
def attempt_build(
local_tree,
suffix,
build_suite,
output_directory,
build_command,
build_changelog_entry=None,
subpath="",
source_date_epoch=None,
run_gbp_dch=False,
extra_repositories=None
):
"""Attempt a build, with a custom distribution set.
Args:
local_tree: Tree to build in
suffix: Suffix to add to version string
build_suite: Name of suite (i.e. distribution) to build for
output_directory: Directory to write output to
build_command: Build command to build package
build_changelog_entry: Changelog entry to use
subpath: Sub path in tree where package lives
source_date_epoch: Source date epoch to set
Returns: Tuple with (changes_name, cl_version)
"""
if run_gbp_dch and not subpath:
gbp_dch(local_tree.abspath(subpath))
if build_changelog_entry is not None:
add_dummy_changelog_entry(
local_tree, subpath, suffix, build_suite, build_changelog_entry
)
return build_once(
local_tree,
build_suite,
output_directory,
build_command,
subpath,
source_date_epoch=source_date_epoch,
extra_repositories=extra_repositories,
)

View file

@ -0,0 +1,84 @@
#!/usr/bin/python3
# Copyright (C) 2021 Jelmer Vernooij
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
"""Tie breaking by build deps."""
import logging
class BuildDependencyTieBreaker(object):
def __init__(self, rootdir):
self.rootdir = rootdir
self._counts = None
def __repr__(self):
return "%s(%r)" % (type(self).__name__, self.rootdir)
@classmethod
def from_session(cls, session):
return cls(session.location)
def _count(self):
counts = {}
import apt_pkg
apt_pkg.init()
apt_pkg.config.set("Dir", self.rootdir)
apt_cache = apt_pkg.SourceRecords()
apt_cache.restart()
while apt_cache.step():
try:
for d in apt_cache.build_depends.values():
for o in d:
for p in o:
counts.setdefault(p[0], 0)
counts[p[0]] += 1
except AttributeError:
pass
return counts
def __call__(self, reqs):
if self._counts is None:
self._counts = self._count()
by_count = {}
for req in reqs:
try:
by_count[req] = self._counts[list(req.package_names())[0]]
except KeyError:
pass
if not by_count:
return None
top = max(by_count.items(), key=lambda k: k[1])
logging.info(
"Breaking tie between [%s] to %s based on build-depends count",
', '.join([repr(r.pkg_relation_str()) for r in reqs]),
repr(top[0].pkg_relation_str()),
)
return top[0]
if __name__ == "__main__":
import argparse
from ..resolver.apt import AptRequirement
parser = argparse.ArgumentParser()
parser.add_argument("req", nargs="+")
args = parser.parse_args()
reqs = [AptRequirement.from_str(req) for req in args.req]
tie_breaker = BuildDependencyTieBreaker("/")
print(tie_breaker(reqs))

View file

@ -0,0 +1,418 @@
#!/usr/bin/python
# Copyright (C) 2019-2020 Jelmer Vernooij <jelmer@jelmer.uk>
# encoding: utf-8
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
import apt_pkg
from datetime import datetime
from debian.deb822 import Release
import os
import re
import subprocess
from typing import Iterator, List
import logging
from .. import USER_AGENT
from ..session import Session
class FileSearcher(object):
def search_files(
self, path: str, regex: bool = False, case_insensitive: bool = False
) -> Iterator[str]:
raise NotImplementedError(self.search_files)
class ContentsFileNotFound(Exception):
"""The contents file was not found."""
def read_contents_file(f):
for line in f:
(path, rest) = line.rsplit(maxsplit=1)
yield path, rest
def contents_urls_from_sources_entry(source, arches, load_url):
if source.invalid or source.disabled:
return
if source.type == "deb-src":
return
if source.type != "deb":
logging.warning("Invalid line in sources: %r", source)
return
base_url = source.uri.rstrip("/")
name = source.dist.rstrip("/")
components = source.comps
if components:
dists_url = base_url + "/dists"
else:
dists_url = base_url
inrelease_url = "%s/%s/InRelease" % (dists_url, name)
try:
response = load_url(inrelease_url)
except FileNotFoundError:
release_url = "%s/%s/Release" % (dists_url, name)
try:
response = load_url(release_url)
except FileNotFoundError as e:
logging.warning(
"Unable to download %s or %s: %s", inrelease_url, release_url, e
)
return
existing_names = {}
release = Release(response.read())
for hn in ["MD5Sum", "SHA1Sum", "SHA256Sum"]:
for entry in release.get(hn, []):
existing_names[os.path.splitext(entry["name"])[0]] = entry["name"]
contents_files = set()
if components:
for component in components:
for arch in arches:
contents_files.add("%s/Contents-%s" % (component, arch))
else:
for arch in arches:
contents_files.add("Contents-%s" % (arch,))
for fn in contents_files:
if fn in existing_names:
url = "%s/%s/%s" % (dists_url, name, fn)
yield url
def contents_urls_from_sourceslist(sl, arch, load_url):
# TODO(jelmer): Verify signatures, etc.
arches = [arch, "all"]
for source in sl.list:
yield from contents_urls_from_sources_entry(source, arches, load_url)
def _unwrap(f, ext):
if ext == ".gz":
import gzip
return gzip.GzipFile(fileobj=f)
elif ext == ".xz":
import lzma
from io import BytesIO
f = BytesIO(lzma.decompress(f.read()))
else:
return f
def load_direct_url(url):
from urllib.error import HTTPError
from urllib.request import urlopen, Request
for ext in [".xz", ".gz", ""]:
try:
request = Request(url + ext, headers={"User-Agent": USER_AGENT})
response = urlopen(request)
except HTTPError as e:
if e.status == 404:
continue
raise
break
else:
raise FileNotFoundError(url)
return _unwrap(response, ext)
def load_url_with_cache(url, cache_dirs):
for cache_dir in cache_dirs:
try:
return load_apt_cache_file(url, cache_dir)
except FileNotFoundError:
pass
return load_direct_url(url)
def load_apt_cache_file(url, cache_dir):
fn = apt_pkg.uri_to_filename(url)
for ext in [".xz", ".gz", ".lz4", ""]:
p = os.path.join(cache_dir, fn + ext)
if not os.path.exists(p):
continue
# return os.popen('/usr/lib/apt/apt-helper cat-file %s' % p)
logging.debug("Loading cached contents file %s", p)
if ext == ".lz4":
import lz4.frame
return lz4.frame.open(p, mode="rb")
try:
f = open(p, "rb")
except PermissionError as e:
logging.warning('Unable to open %s: %s', p, e)
raise FileNotFoundError(url)
return _unwrap(f, ext)
raise FileNotFoundError(url)
class AptFileFileSearcher(FileSearcher):
CACHE_IS_EMPTY_PATH = '/usr/share/apt-file/is-cache-empty'
def __init__(self, session: Session):
self.session = session
@classmethod
def has_cache(cls, session: Session) -> bool:
if not os.path.exists(session.external_path(cls.CACHE_IS_EMPTY_PATH)):
return False
try:
session.check_call([cls.CACHE_IS_EMPTY_PATH])
except subprocess.CalledProcessError as e:
if e.returncode == 1:
return True
raise
else:
return False
@classmethod
def from_session(cls, session):
logging.info('Using apt-file to search apt contents')
if not os.path.exists(session.external_path(cls.CACHE_IS_EMPTY_PATH)):
from .apt import AptManager
AptManager.from_session(session).install(['apt-file'])
if not cls.has_cache(session):
session.check_call(['apt-file', 'update'], user='root')
return cls(session)
def search_files(self, path, regex=False, case_insensitive=False):
args = []
if regex:
args.append('-x')
else:
args.append('-F')
if case_insensitive:
args.append('-i')
args.append(path)
try:
output = self.session.check_output(['/usr/bin/apt-file', 'search'] + args)
except subprocess.CalledProcessError as e:
if e.returncode == 1:
# No results
return
if e.returncode == 3:
raise Exception('apt-file cache is empty')
raise
for line in output.splitlines(False):
pkg, path = line.split(b': ')
yield pkg.decode('utf-8')
def get_apt_contents_file_searcher(session):
if AptFileFileSearcher.has_cache(session):
return AptFileFileSearcher.from_session(session)
return RemoteContentsFileSearcher.from_session(session)
class RemoteContentsFileSearcher(FileSearcher):
def __init__(self):
self._db = {}
@classmethod
def from_session(cls, session):
logging.info("Loading apt contents information")
self = cls()
self.load_from_session(session)
return self
def load_local(self):
# TODO(jelmer): what about sources.list.d?
from aptsources.sourceslist import SourcesList
sl = SourcesList()
sl.load("/etc/apt/sources.list")
from .build import get_build_architecture
cache_dirs = set(["/var/lib/apt/lists"])
def load_url(url):
return load_url_with_cache(url, cache_dirs)
urls = list(
contents_urls_from_sourceslist(sl, get_build_architecture(), load_url)
)
self._load_urls(urls, cache_dirs, load_url)
def load_from_session(self, session):
# TODO(jelmer): what about sources.list.d?
from aptsources.sourceslist import SourcesList
sl = SourcesList()
sl.load(os.path.join(session.location, "etc/apt/sources.list"))
from .build import get_build_architecture
cache_dirs = set(
[
os.path.join(session.location, "var/lib/apt/lists"),
"/var/lib/apt/lists",
]
)
def load_url(url):
return load_url_with_cache(url, cache_dirs)
urls = list(
contents_urls_from_sourceslist(sl, get_build_architecture(), load_url)
)
self._load_urls(urls, cache_dirs, load_url)
def _load_urls(self, urls, cache_dirs, load_url):
for url in urls:
try:
f = load_url(url)
self.load_file(f, url)
except ContentsFileNotFound:
logging.warning("Unable to fetch contents file %s", url)
def __setitem__(self, path, package):
self._db[path] = package
def search_files(self, path, regex=False, case_insensitive=False):
path = path.lstrip("/").encode("utf-8", "surrogateescape")
if case_insensitive and not regex:
regex = True
path = re.escape(path)
if regex:
flags = 0
if case_insensitive:
flags |= re.I
c = re.compile(path, flags=flags)
ret = []
for p, rest in self._db.items():
if c.match(p):
pkg = rest.split(b"/")[-1]
ret.append((p, pkg.decode("utf-8")))
for p, pkg in sorted(ret):
yield pkg
else:
try:
yield self._db[path].split(b"/")[-1].decode("utf-8")
except KeyError:
pass
def load_file(self, f, url):
start_time = datetime.now()
for path, rest in read_contents_file(f.readlines()):
self[path] = rest
logging.debug("Read %s in %s", url, datetime.now() - start_time)
class GeneratedFileSearcher(FileSearcher):
def __init__(self, db):
self._db = db
@classmethod
def from_path(cls, path):
self = cls({})
self.load_from_path(path)
return self
def load_from_path(self, path):
with open(path, "r") as f:
for line in f:
(path, pkg) = line.strip().split(None, 1)
self._db.append(path, pkg)
def search_files(
self, path: str, regex: bool = False, case_insensitive: bool = False
) -> Iterator[str]:
for p, pkg in self._db:
if regex:
flags = 0
if case_insensitive:
flags |= re.I
if re.match(path, p, flags=flags):
yield pkg
elif case_insensitive:
if path.lower() == p.lower():
yield pkg
else:
if path == p:
yield pkg
# TODO(jelmer): read from a file
GENERATED_FILE_SEARCHER = GeneratedFileSearcher(
[
("/etc/locale.gen", "locales"),
# Alternative
("/usr/bin/rst2html", "python3-docutils"),
# aclocal is a symlink to aclocal-1.XY
("/usr/bin/aclocal", "automake"),
("/usr/bin/automake", "automake"),
# maven lives in /usr/share
("/usr/bin/mvn", "maven"),
]
)
def get_packages_for_paths(
paths: List[str],
searchers: List[FileSearcher],
regex: bool = False,
case_insensitive: bool = False,
) -> List[str]:
candidates: List[str] = list()
for path in paths:
for searcher in searchers:
for pkg in searcher.search_files(
path, regex=regex, case_insensitive=case_insensitive
):
if pkg not in candidates:
candidates.append(pkg)
return candidates
def main(argv):
import argparse
from ..session.plain import PlainSession
parser = argparse.ArgumentParser()
parser.add_argument("path", help="Path to search for.", type=str, nargs="*")
parser.add_argument("--regex", "-x", help="Search for regex.", action="store_true")
parser.add_argument("--debug", action="store_true")
args = parser.parse_args()
if args.debug:
logging.basicConfig(level=logging.DEBUG)
else:
logging.basicConfig(level=logging.INFO)
main_searcher = get_apt_contents_file_searcher(PlainSession())
main_searcher.load_local()
searchers = [main_searcher, GENERATED_FILE_SEARCHER]
packages = get_packages_for_paths(args.path, searchers=searchers, regex=args.regex)
for package in packages:
print(package)
if __name__ == "__main__":
import sys
sys.exit(main(sys.argv))

View file

@ -0,0 +1,721 @@
#!/usr/bin/python
# Copyright (C) 2018 Jelmer Vernooij <jelmer@jelmer.uk>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
__all__ = [
"build_incrementally",
]
from functools import partial
import logging
import os
import re
import shutil
import sys
from typing import List, Set, Optional, Type
from debian.deb822 import (
Deb822,
PkgRelation,
)
from breezy.commit import PointlessCommit, NullCommitReporter
from breezy.tree import Tree
from debmutate.changelog import ChangelogEditor
from debmutate.control import (
ensure_relation,
ControlEditor,
)
from debmutate.debhelper import (
get_debhelper_compat_level,
)
from debmutate.deb822 import (
Deb822Editor,
)
from debmutate.reformatting import (
FormattingUnpreservable,
GeneratedFile,
)
try:
from breezy.workspace import reset_tree
except ImportError: # breezy < 3.2
def delete_items(deletables, dry_run=False):
"""Delete files in the deletables iterable"""
import errno
import shutil
def onerror(function, path, excinfo):
"""Show warning for errors seen by rmtree."""
# Handle only permission error while removing files.
# Other errors are re-raised.
if function is not os.remove or excinfo[1].errno != errno.EACCES:
raise
logging.warning("unable to remove %s" % path)
for path, subp in deletables:
if os.path.isdir(path):
shutil.rmtree(path, onerror=onerror)
else:
try:
os.unlink(path)
except OSError as e:
# We handle only permission error here
if e.errno != errno.EACCES:
raise e
logging.warning('unable to remove "%s": %s.', path, e.strerror)
def reset_tree(local_tree, subpath=""):
from breezy.transform import revert
from breezy.clean_tree import iter_deletables
revert(
local_tree,
local_tree.branch.basis_tree(),
[subpath] if subpath not in (".", "") else None,
)
deletables = list(
iter_deletables(local_tree, unknown=True, ignored=False, detritus=False)
)
delete_items(deletables)
from debmutate._rules import (
dh_invoke_add_with,
update_rules,
)
from breezy.plugins.debian.changelog import debcommit
from buildlog_consultant import Problem
from buildlog_consultant.apt import (
AptFetchFailure,
)
from buildlog_consultant.common import (
MissingConfigStatusInput,
MissingAutomakeInput,
MissingConfigure,
NeedPgBuildExtUpdateControl,
MissingPerlFile,
)
from buildlog_consultant.sbuild import (
DebcargoUnacceptablePredicate,
)
from .build import (
DetailedDebianBuildFailure,
UnidentifiedDebianBuildError,
)
from ..buildlog import problem_to_upstream_requirement
from ..fix_build import BuildFixer, resolve_error
from ..resolver.apt import (
AptRequirement,
)
from .build import attempt_build, DEFAULT_BUILDER
DEFAULT_MAX_ITERATIONS = 10
class CircularDependency(Exception):
"""Adding dependency would introduce cycle."""
def __init__(self, package):
self.package = package
class DebianPackagingContext(object):
def __init__(
self, tree, subpath, committer, update_changelog, commit_reporter=None
):
self.tree = tree
self.subpath = subpath
self.committer = committer
self.update_changelog = update_changelog
self.commit_reporter = commit_reporter
def abspath(self, *parts):
return self.tree.abspath(os.path.join(self.subpath, *parts))
def commit(self, summary: str, update_changelog: Optional[bool] = None) -> bool:
if update_changelog is None:
update_changelog = self.update_changelog
with self.tree.lock_write():
try:
if update_changelog:
cl_path = self.abspath("debian/changelog")
with ChangelogEditor(cl_path) as editor:
editor.add_entry([summary])
debcommit(
self.tree, committer=self.committer,
subpath=self.subpath,
reporter=self.commit_reporter)
else:
self.tree.commit(
message=summary,
committer=self.committer,
specific_files=[self.subpath],
reporter=self.commit_reporter,
)
except PointlessCommit:
return False
else:
return True
class PackageDependencyFixer(BuildFixer):
def __init__(self, context, apt_resolver):
self.apt_resolver = apt_resolver
self.context = context
def __repr__(self):
return "%s(%r)" % (type(self).__name__, self.apt_resolver)
def __str__(self):
return "upstream requirement fixer(%s)" % self.apt_resolver
def can_fix(self, error):
req = problem_to_upstream_requirement(error)
return req is not None
def fix(self, error, phase):
reqs = problem_to_upstream_requirement(error)
if reqs is None:
return False
if not isinstance(reqs, list):
reqs = [reqs]
changed = False
for req in reqs:
apt_req = self.apt_resolver.resolve(req)
if apt_req is None:
return False
if add_dependency(self.context, phase, apt_req):
changed = True
return changed
def add_dependency(context, phase, requirement: AptRequirement):
if phase[0] == "autopkgtest":
return add_test_dependency(context, phase[1], requirement)
elif phase[0] == "build":
return add_build_dependency(context, requirement)
else:
logging.warning("Unknown phase %r", phase)
return False
def add_build_dependency(context, requirement: AptRequirement):
if not isinstance(requirement, AptRequirement):
raise TypeError(requirement)
control_path = context.abspath("debian/control")
try:
with ControlEditor(path=control_path) as updater:
for binary in updater.binaries:
if requirement.touches_package(binary["Package"]):
raise CircularDependency(binary["Package"])
for rel in requirement.relations:
updater.source["Build-Depends"] = ensure_relation(
updater.source.get("Build-Depends", ""), PkgRelation.str([rel])
)
except FormattingUnpreservable as e:
logging.info("Unable to edit %s in a way that preserves formatting.", e.path)
return False
desc = requirement.pkg_relation_str()
if not updater.changed:
logging.info("Giving up; dependency %s was already present.", desc)
return False
logging.info("Adding build dependency: %s", desc)
return context.commit("Add missing build dependency on %s." % desc)
def add_test_dependency(context, testname, requirement):
if not isinstance(requirement, AptRequirement):
raise TypeError(requirement)
tests_control_path = context.abspath("debian/tests/control")
# TODO(jelmer): If requirement is for one of our binary packages
# but "@" is already present then don't do anything.
try:
with Deb822Editor(path=tests_control_path) as updater:
command_counter = 1
for control in updater.paragraphs:
try:
name = control["Tests"]
except KeyError:
name = "command%d" % command_counter
command_counter += 1
if name != testname:
continue
for rel in requirement.relations:
control["Depends"] = ensure_relation(
control.get("Depends", ""), PkgRelation.str([rel])
)
except FormattingUnpreservable as e:
logging.info("Unable to edit %s in a way that preserves formatting.", e.path)
return False
if not updater.changed:
return False
desc = requirement.pkg_relation_str()
logging.info("Adding dependency to test %s: %s", testname, desc)
return context.commit(
"Add missing dependency for test %s on %s." % (testname, desc),
)
def targeted_python_versions(tree: Tree, subpath: str) -> List[str]:
with tree.get_file(os.path.join(subpath, "debian/control")) as f:
control = Deb822(f)
build_depends = PkgRelation.parse_relations(control.get("Build-Depends", ""))
all_build_deps: Set[str] = set()
for or_deps in build_depends:
all_build_deps.update(or_dep["name"] for or_dep in or_deps)
targeted = []
if any(x.startswith("python3-") for x in all_build_deps):
targeted.append("python3")
if any(x.startswith("pypy") for x in all_build_deps):
targeted.append("pypy")
if any(x.startswith("python-") for x in all_build_deps):
targeted.append("python")
return targeted
def python_tie_breaker(tree, subpath, reqs):
targeted = targeted_python_versions(tree, subpath)
if not targeted:
return None
def same(pkg, python_version):
if pkg.startswith(python_version + "-"):
return True
if pkg.startswith("lib%s-" % python_version):
return True
if re.match(r'lib%s\.[0-9]-dev' % python_version, pkg):
return True
return False
for python_version in targeted:
for req in reqs:
if any(same(name, python_version) for name in req.package_names()):
logging.info(
"Breaking tie between %r to %r, since package already "
"has %r build-dependencies",
[str(req) for req in reqs],
str(req),
python_version,
)
return req
return None
def retry_apt_failure(error, phase, apt, context):
return True
def enable_dh_autoreconf(context, phase):
# Debhelper >= 10 depends on dh-autoreconf and enables autoreconf by
# default.
debhelper_compat_version = get_debhelper_compat_level(context.tree.abspath("."))
if debhelper_compat_version is not None and debhelper_compat_version < 10:
def add_with_autoreconf(line, target):
if target != b"%":
return line
if not line.startswith(b"dh "):
return line
return dh_invoke_add_with(line, b"autoreconf")
if update_rules(command_line_cb=add_with_autoreconf):
return add_dependency(
context, phase, AptRequirement.simple("dh-autoreconf")
)
return False
def fix_missing_configure(error, phase, context):
if not context.tree.has_filename("configure.ac") and not context.tree.has_filename(
"configure.in"
):
return False
return enable_dh_autoreconf(context, phase)
def fix_missing_automake_input(error, phase, context):
# TODO(jelmer): If it's ./NEWS, ./AUTHORS or ./README that's missing, then
# try to set 'export AUTOMAKE = automake --foreign' in debian/rules.
# https://salsa.debian.org/jelmer/debian-janitor/issues/88
return enable_dh_autoreconf(context, phase)
def fix_missing_config_status_input(error, phase, context):
autogen_path = "autogen.sh"
rules_path = "debian/rules"
if context.subpath not in (".", ""):
autogen_path = os.path.join(context.subpath, autogen_path)
rules_path = os.path.join(context.subpath, rules_path)
if not context.tree.has_filename(autogen_path):
return False
def add_autogen(mf):
rule = any(mf.iter_rules(b"override_dh_autoreconf"))
if rule:
return
rule = mf.add_rule(b"override_dh_autoreconf")
rule.append_command(b"dh_autoreconf ./autogen.sh")
if not update_rules(makefile_cb=add_autogen, path=rules_path):
return False
return context.commit("Run autogen.sh during build.")
class PgBuildExtOutOfDateControlFixer(BuildFixer):
def __init__(self, packaging_context, session, apt):
self.session = session
self.context = packaging_context
self.apt = apt
def can_fix(self, problem):
return isinstance(problem, NeedPgBuildExtUpdateControl)
def __repr__(self):
return "%s()" % (type(self).__name__,)
def _fix(self, error, phase):
logging.info("Running 'pg_buildext updatecontrol'")
self.apt.install(['postgresql-common'])
external_dir, internal_dir = self.session.setup_from_vcs(
self.context.tree, include_controldir=None,
subdir=self.context.subpath)
self.session.chdir(internal_dir)
self.session.check_call(["pg_buildext", "updatecontrol"])
shutil.copy(
os.path.join(external_dir, error.generated_path),
self.context.abspath(error.generated_path)
)
return self.context.commit(
"Run 'pgbuildext updatecontrol'.", update_changelog=False
)
def fix_missing_makefile_pl(error, phase, context):
if (
error.filename == "Makefile.PL"
and not context.tree.has_filename("Makefile.PL")
and context.tree.has_filename("dist.ini")
):
# TODO(jelmer): add dist-zilla add-on to debhelper
raise NotImplementedError
return False
def coerce_unacceptable_predicate(error, phase, context):
from debmutate.debcargo import DebcargoEditor
with DebcargoEditor(context.abspath('debian/debcargo.toml')) as editor:
editor['allow_prerelease_deps'] = True
return context.commit('Enable allow_prerelease_deps.')
class SimpleBuildFixer(BuildFixer):
def __init__(self, packaging_context, problem_cls: Type[Problem], fn):
self.context = packaging_context
self._problem_cls = problem_cls
self._fn = fn
def __repr__(self):
return "%s(%s, %s)" % (
type(self).__name__,
self._problem_cls.__name__,
self._fn.__name__,
)
def can_fix(self, problem: Problem):
return isinstance(problem, self._problem_cls)
def _fix(self, problem: Problem, phase):
return self._fn(problem, phase, self.context)
class DependencyBuildFixer(BuildFixer):
def __init__(self, packaging_context, apt_resolver, problem_cls: Type[Problem], fn):
self.context = packaging_context
self.apt_resolver = apt_resolver
self._problem_cls = problem_cls
self._fn = fn
def __repr__(self):
return "%s(%s, %s)" % (
type(self).__name__,
self._problem_cls.__name__,
self._fn.__name__,
)
def can_fix(self, problem: Problem):
return isinstance(problem, self._problem_cls)
def _fix(self, problem: Problem, phase):
return self._fn(problem, phase, self.apt_resolver, self.context)
def versioned_package_fixers(session, packaging_context, apt):
return [
PgBuildExtOutOfDateControlFixer(packaging_context, session, apt),
SimpleBuildFixer(packaging_context, MissingConfigure, fix_missing_configure),
SimpleBuildFixer(
packaging_context, MissingAutomakeInput, fix_missing_automake_input
),
SimpleBuildFixer(
packaging_context, MissingConfigStatusInput, fix_missing_config_status_input
),
SimpleBuildFixer(packaging_context, MissingPerlFile, fix_missing_makefile_pl),
SimpleBuildFixer(packaging_context, DebcargoUnacceptablePredicate, coerce_unacceptable_predicate),
]
def apt_fixers(apt, packaging_context) -> List[BuildFixer]:
from ..resolver.apt import AptResolver
from .udd import popcon_tie_breaker
from .build_deps import BuildDependencyTieBreaker
apt_tie_breakers = [
partial(python_tie_breaker, packaging_context.tree, packaging_context.subpath),
BuildDependencyTieBreaker.from_session(apt.session),
popcon_tie_breaker,
]
resolver = AptResolver(apt, apt_tie_breakers)
return [
DependencyBuildFixer(
packaging_context, apt, AptFetchFailure, retry_apt_failure
),
PackageDependencyFixer(packaging_context, resolver),
]
def default_fixers(local_tree, subpath, apt, committer=None, update_changelog=None):
packaging_context = DebianPackagingContext(
local_tree, subpath, committer, update_changelog,
commit_reporter=NullCommitReporter()
)
return versioned_package_fixers(apt.session, packaging_context, apt) + apt_fixers(
apt, packaging_context
)
def build_incrementally(
local_tree,
apt,
suffix,
build_suite,
output_directory,
build_command,
build_changelog_entry,
committer=None,
max_iterations=DEFAULT_MAX_ITERATIONS,
subpath="",
source_date_epoch=None,
update_changelog=True,
extra_repositories=None,
fixers=None
):
fixed_errors = []
if fixers is None:
fixers = default_fixers(
local_tree, subpath, apt, committer=committer,
update_changelog=update_changelog)
logging.info("Using fixers: %r", fixers)
while True:
try:
return attempt_build(
local_tree,
suffix,
build_suite,
output_directory,
build_command,
build_changelog_entry,
subpath=subpath,
source_date_epoch=source_date_epoch,
run_gbp_dch=(update_changelog is False),
extra_repositories=extra_repositories,
)
except UnidentifiedDebianBuildError:
logging.warning("Build failed with unidentified error. Giving up.")
raise
except DetailedDebianBuildFailure as e:
if e.phase is None:
logging.info("No relevant context, not making any changes.")
raise
if (e.error, e.phase) in fixed_errors:
logging.warning("Error was still not fixed on second try. Giving up.")
raise
if max_iterations is not None and len(fixed_errors) > max_iterations:
logging.warning("Last fix did not address the issue. Giving up.")
raise
reset_tree(local_tree, subpath=subpath)
try:
if not resolve_error(e.error, e.phase, fixers):
logging.warning("Failed to resolve error %r. Giving up.", e.error)
raise
except GeneratedFile:
logging.warning(
"Control file is generated, unable to edit to "
"resolver error %r.",
e.error,
)
raise e
except CircularDependency:
logging.warning(
"Unable to fix %r; it would introduce a circular " "dependency.",
e.error,
)
raise e
fixed_errors.append((e.error, e.phase))
if os.path.exists(os.path.join(output_directory, "build.log")):
i = 1
while os.path.exists(
os.path.join(output_directory, "build.log.%d" % i)
):
i += 1
target_path = os.path.join(output_directory, "build.log.%d" % i)
os.rename(os.path.join(output_directory, "build.log"), target_path)
logging.debug("Storing build log at %s", target_path)
def main(argv=None):
import argparse
parser = argparse.ArgumentParser("ognibuild.debian.fix_build")
parser.add_argument(
"--suffix", type=str, help="Suffix to use for test builds.", default="fixbuild1"
)
parser.add_argument(
"--suite", type=str, help="Suite to target.", default="unstable"
)
parser.add_argument(
"--output-directory", type=str, help="Output directory.", default=None
)
parser.add_argument(
"--committer", type=str, help="Committer string (name and email)", default=None
)
parser.add_argument(
"--build-command",
type=str,
help="Build command",
default=(DEFAULT_BUILDER + " -A -s -v"),
)
parser.add_argument(
"--no-update-changelog",
action="store_false",
default=None,
dest="update_changelog",
help="do not update the changelog",
)
parser.add_argument(
'--max-iterations',
type=int,
default=DEFAULT_MAX_ITERATIONS,
help='Maximum number of issues to attempt to fix before giving up.')
parser.add_argument(
"--update-changelog",
action="store_true",
dest="update_changelog",
help="force updating of the changelog",
default=None,
)
parser.add_argument("--schroot", type=str, help="chroot to use.")
parser.add_argument("--verbose", action="store_true", help="Be verbose")
args = parser.parse_args()
from breezy.workingtree import WorkingTree
import breezy.git # noqa: F401
import breezy.bzr # noqa: F401
from .apt import AptManager
from ..session.plain import PlainSession
from ..session.schroot import SchrootSession
import tempfile
import contextlib
if args.verbose:
logging.basicConfig(level=logging.DEBUG, format="%(message)s")
else:
logging.basicConfig(level=logging.INFO, format="%(message)s")
with contextlib.ExitStack() as es:
if args.output_directory is None:
output_directory = es.enter_context(tempfile.TemporaryDirectory())
logging.info("Using output directory %s", output_directory)
else:
output_directory = args.output_directory
tree = WorkingTree.open(".")
if args.schroot:
session = SchrootSession(args.schroot)
else:
session = PlainSession()
es.enter_context(session)
apt = AptManager(session)
try:
(changes_filenames, cl_entry) = build_incrementally(
tree,
apt,
args.suffix,
args.suite,
output_directory,
args.build_command,
None,
committer=args.committer,
update_changelog=args.update_changelog,
max_iterations=args.max_iterations,
)
except DetailedDebianBuildFailure as e:
if e.phase is None:
phase = "unknown phase"
elif len(e.phase) == 1:
phase = e.phase[0]
else:
phase = "%s (%s)" % (e.phase[0], e.phase[1])
logging.fatal("Error during %s: %s", phase, e.error)
return 1
except UnidentifiedDebianBuildError as e:
if e.phase is None:
phase = "unknown phase"
elif len(e.phase) == 1:
phase = e.phase[0]
else:
phase = "%s (%s)" % (e.phase[0], e.phase[1])
logging.fatal("Error during %s: %s", phase, e.description)
return 1
logging.info(
'Built %s - changes file at %r.',
cl_entry.version, changes_filenames)
if __name__ == "__main__":
sys.exit(main(sys.argv))

60
ognibuild/debian/udd.py Normal file
View file

@ -0,0 +1,60 @@
#!/usr/bin/python3
# Copyright (C) 2021 Jelmer Vernooij
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
"""Support for accessing UDD."""
import logging
class UDD(object):
def connect(self):
import psycopg2
self._conn = psycopg2.connect(
database="udd",
user="udd-mirror",
password="udd-mirror",
port=5432,
host="udd-mirror.debian.net",
)
def get_most_popular(self, packages):
cursor = self._conn.cursor()
cursor.execute(
"SELECT package FROM popcon WHERE package IN %s ORDER BY insts DESC LIMIT 1",
(tuple(packages),),
)
return cursor.fetchone()[0]
def popcon_tie_breaker(candidates):
# TODO(jelmer): Pick package based on what appears most commonly in
# build-depends{-indep,-arch}
try:
from .udd import UDD
except ModuleNotFoundError:
logging.warning("Unable to import UDD, not ranking by popcon")
return sorted(candidates, key=len)[0]
udd = UDD()
udd.connect()
names = {list(c.package_names())[0]: c for c in candidates}
winner = udd.get_most_popular(list(names.keys()))
if winner is None:
logging.warning("No relevant popcon information found, not ranking by popcon")
return None
logging.info("Picked winner using popcon")
return names[winner]

208
ognibuild/dist.py Normal file
View file

@ -0,0 +1,208 @@
#!/usr/bin/python3
# Copyright (C) 2020 Jelmer Vernooij <jelmer@jelmer.uk>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
__all__ = [
"UnidentifiedError",
"DetailedFailure",
"create_dist",
"create_dist_schroot",
]
import errno
import logging
import os
import sys
from typing import Optional, List
from debian.deb822 import Deb822
from breezy.tree import Tree
from breezy.workingtree import WorkingTree
from buildlog_consultant.common import (
NoSpaceOnDevice,
)
from . import DetailedFailure, UnidentifiedError
from .dist_catcher import DistNoTarball
from .buildsystem import NoBuildToolsFound
from .resolver import auto_resolver
from .session import Session
from .session.schroot import SchrootSession
def run_dist(session, buildsystems, resolver, fixers, target_directory, quiet=False):
# Some things want to write to the user's home directory,
# e.g. pip caches in ~/.cache
session.create_home()
logging.info('Using dependency resolver: %s', resolver)
for buildsystem in buildsystems:
filename = buildsystem.dist(
session, resolver, fixers, target_directory, quiet=quiet
)
return filename
raise NoBuildToolsFound()
def create_dist(
session: Session,
tree: Tree,
target_dir: str,
include_controldir: bool = True,
subdir: Optional[str] = None,
cleanup: bool = False,
) -> Optional[str]:
from .buildsystem import detect_buildsystems
from .buildlog import InstallFixer
from .fix_build import BuildFixer
from .fixers import (
GitIdentityFixer,
SecretGpgKeyFixer,
UnexpandedAutoconfMacroFixer,
)
if subdir is None:
subdir = "package"
try:
export_directory, reldir = session.setup_from_vcs(
tree, include_controldir=include_controldir, subdir=subdir
)
except OSError as e:
if e.errno == errno.ENOSPC:
raise DetailedFailure(1, ["mkdtemp"], NoSpaceOnDevice())
raise
# TODO(jelmer): use scan_buildsystems to also look in subdirectories
buildsystems = list(detect_buildsystems(export_directory))
resolver = auto_resolver(session)
fixers: List[BuildFixer] = [UnexpandedAutoconfMacroFixer(session, resolver)]
fixers.append(InstallFixer(resolver))
if session.is_temporary:
# Only muck about with temporary sessions
fixers.extend([GitIdentityFixer(session), SecretGpgKeyFixer(session)])
session.chdir(reldir)
return run_dist(session, buildsystems, resolver, fixers, target_dir)
def create_dist_schroot(
tree: Tree,
target_dir: str,
chroot: str,
packaging_tree: Optional[Tree] = None,
packaging_subpath: Optional[str] = None,
include_controldir: bool = True,
subdir: Optional[str] = None,
cleanup: bool = False,
) -> Optional[str]:
with SchrootSession(chroot) as session:
if packaging_tree is not None:
from .debian import satisfy_build_deps
satisfy_build_deps(session, packaging_tree, packaging_subpath)
return create_dist(
session,
tree,
target_dir,
include_controldir=include_controldir,
subdir=subdir,
cleanup=cleanup,
)
if __name__ == "__main__":
import argparse
import breezy.bzr # noqa: F401
import breezy.git # noqa: F401
from breezy.export import export
parser = argparse.ArgumentParser()
parser.add_argument(
"--chroot",
default="unstable-amd64-sbuild",
type=str,
help="Name of chroot to use",
)
parser.add_argument(
"directory",
default=".",
type=str,
nargs="?",
help="Directory with upstream source.",
)
parser.add_argument(
"--packaging-directory", type=str, help="Path to packaging directory."
)
parser.add_argument(
"--target-directory", type=str, default="..", help="Target directory"
)
parser.add_argument("--verbose", action="store_true", help="Be verbose")
parser.add_argument(
"--include-controldir", action="store_true", help="Clone rather than export."
)
args = parser.parse_args()
if args.verbose:
logging.basicConfig(level=logging.DEBUG, format="%(message)s")
else:
logging.basicConfig(level=logging.INFO, format="%(message)s")
tree = WorkingTree.open(args.directory)
if args.packaging_directory:
packaging_tree = WorkingTree.open(args.packaging_directory)
with packaging_tree.lock_read():
source = Deb822(packaging_tree.get_file("debian/control"))
package = source["Source"]
subdir = package
else:
packaging_tree = None
subdir = None
try:
ret = create_dist_schroot(
tree,
subdir=subdir,
target_dir=os.path.abspath(args.target_directory),
packaging_tree=packaging_tree,
chroot=args.chroot,
include_controldir=args.include_controldir,
)
except (NoBuildToolsFound, NotImplementedError):
logging.info("No build tools found, falling back to simple export.")
export(tree, "dist.tar.gz", "tgz", None)
except NotImplementedError:
logging.info(
"Build system does not support dist tarball creation, "
"falling back to simple export."
)
export(tree, "dist.tar.gz", "tgz", None)
except UnidentifiedError as e:
logging.fatal("Unidentified error: %r", e.lines)
except DetailedFailure as e:
logging.fatal("Identified error during dist creation: %s", e.error)
except DistNoTarball:
logging.fatal("dist operation did not create a tarball")
else:
logging.info("Created %s", ret)
sys.exit(0)

118
ognibuild/dist_catcher.py Normal file
View file

@ -0,0 +1,118 @@
#!/usr/bin/python3
# Copyright (C) 2020 Jelmer Vernooij <jelmer@jelmer.uk>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
import os
import logging
import shutil
import time
class DistNoTarball(Exception):
"""Dist operation did not create a tarball."""
SUPPORTED_DIST_EXTENSIONS = [
".tar.gz",
".tgz",
".tar.bz2",
".tar.xz",
".tar.lzma",
".tbz2",
".tar",
".zip",
]
def is_dist_file(fn):
for ext in SUPPORTED_DIST_EXTENSIONS:
if fn.endswith(ext):
return True
return False
class DistCatcher(object):
def __init__(self, directories):
self.directories = [os.path.abspath(d) for d in directories]
self.files = []
self.existing_files = None
self.start_time = time.time()
@classmethod
def default(cls, directory):
return cls(
[os.path.join(directory, "dist"), directory, os.path.join(directory, "..")]
)
def __enter__(self):
self.existing_files = {}
for directory in self.directories:
try:
self.existing_files[directory] = {
entry.name: entry for entry in os.scandir(directory)
}
except FileNotFoundError:
self.existing_files[directory] = {}
return self
def find_files(self):
for directory in self.directories:
old_files = self.existing_files[directory]
possible_new = []
possible_updated = []
if not os.path.isdir(directory):
continue
for entry in os.scandir(directory):
if not entry.is_file() or not is_dist_file(entry.name):
continue
old_entry = old_files.get(entry.name)
if not old_entry:
possible_new.append(entry)
continue
if entry.stat().st_mtime > self.start_time:
possible_updated.append(entry)
continue
if len(possible_new) == 1:
entry = possible_new[0]
logging.info("Found new tarball %s in %s.", entry.name, directory)
self.files.append(entry.path)
return entry.name
elif len(possible_new) > 1:
logging.warning(
"Found multiple tarballs %r in %s.", possible_new, directory
)
self.files.extend([entry.path for entry in possible_new])
return possible_new[0].name
if len(possible_updated) == 1:
entry = possible_updated[0]
logging.info("Found updated tarball %s in %s.", entry.name, directory)
self.files.append(entry.path)
return entry.name
def __exit__(self, exc_type, exc_val, exc_tb):
self.find_files()
return False
def copy_single(self, target_dir):
for path in self.files:
try:
shutil.copy(path, target_dir)
except shutil.SameFileError:
pass
return os.path.basename(path)
logging.info("No tarball created :(")
raise DistNoTarball()

132
ognibuild/fix_build.py Normal file
View file

@ -0,0 +1,132 @@
#!/usr/bin/python3
# Copyright (C) 2020 Jelmer Vernooij <jelmer@jelmer.uk>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
from functools import partial
import logging
from typing import List, Tuple, Callable, Any, Optional
from buildlog_consultant import Problem
from buildlog_consultant.common import (
find_build_failure_description,
MissingCommand,
)
from . import DetailedFailure, UnidentifiedError
from .session import Session, run_with_tee
class BuildFixer(object):
"""Build fixer."""
def can_fix(self, problem: Problem):
raise NotImplementedError(self.can_fix)
def _fix(self, problem: Problem, phase: Tuple[str, ...]):
raise NotImplementedError(self._fix)
def fix(self, problem: Problem, phase: Tuple[str, ...]):
if not self.can_fix(problem):
return None
return self._fix(problem, phase)
def run_detecting_problems(session: Session, args: List[str], check_success=None, **kwargs):
if check_success is None:
def check_success(retcode, contents):
return (retcode == 0)
try:
retcode, contents = run_with_tee(session, args, **kwargs)
except FileNotFoundError:
error = MissingCommand(args[0])
retcode = 1
else:
if check_success(retcode, contents):
return contents
lines = "".join(contents).splitlines(False)
match, error = find_build_failure_description(lines)
if error is None:
if match:
logging.warning("Build failed with unidentified error:")
logging.warning("%s", match.line.rstrip("\n"))
else:
logging.warning("Build failed and unable to find cause. Giving up.")
raise UnidentifiedError(retcode, args, lines, secondary=match)
raise DetailedFailure(retcode, args, error)
def iterate_with_build_fixers(fixers: List[BuildFixer], cb: Callable[[], Any]):
"""Call cb() until there are no more DetailedFailures we can fix.
Args:
fixers: List of fixers to use to resolve issues
"""
fixed_errors = []
while True:
to_resolve = []
try:
return cb()
except DetailedFailure as e:
to_resolve.append(e)
while to_resolve:
f = to_resolve.pop(-1)
logging.info("Identified error: %r", f.error)
if f.error in fixed_errors:
logging.warning(
"Failed to resolve error %r, it persisted. Giving up.", f.error
)
raise f
try:
resolved = resolve_error(f.error, None, fixers=fixers)
except DetailedFailure as n:
logging.info("New error %r while resolving %r", n, f)
if n in to_resolve:
raise
to_resolve.append(f)
to_resolve.append(n)
else:
if not resolved:
logging.warning(
"Failed to find resolution for error %r. Giving up.", f.error
)
raise f
fixed_errors.append(f.error)
def run_with_build_fixers(
session: Session, args: List[str], fixers: Optional[List[BuildFixer]], **kwargs
):
if fixers is None:
fixers = []
return iterate_with_build_fixers(
fixers, partial(run_detecting_problems, session, args, **kwargs)
)
def resolve_error(error, phase, fixers):
relevant_fixers = []
for fixer in fixers:
if fixer.can_fix(error):
relevant_fixers.append(fixer)
if not relevant_fixers:
logging.warning("No fixer found for %r", error)
return False
for fixer in relevant_fixers:
logging.info("Attempting to use fixer %s to address %r", fixer, error)
made_changes = fixer.fix(error, phase)
if made_changes:
return True
return False

103
ognibuild/fixers.py Normal file
View file

@ -0,0 +1,103 @@
#!/usr/bin/python3
# Copyright (C) 2020 Jelmer Vernooij <jelmer@jelmer.uk>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
import subprocess
from typing import Tuple
from buildlog_consultant import Problem
from buildlog_consultant.common import (
MissingGitIdentity,
MissingSecretGpgKey,
MissingAutoconfMacro,
)
from ognibuild.requirements import AutoconfMacroRequirement
from ognibuild.resolver import UnsatisfiedRequirements
from .fix_build import BuildFixer
class GitIdentityFixer(BuildFixer):
def __init__(self, session):
self.session = session
def can_fix(self, problem: Problem):
return isinstance(problem, MissingGitIdentity)
def _fix(self, problem: Problem, phase: Tuple[str, ...]):
for name in ["user.email", "user.name"]:
value = (
subprocess.check_output(["git", "config", "--global", name])
.decode()
.strip()
)
self.session.check_call(["git", "config", "--global", name, value])
return True
class SecretGpgKeyFixer(BuildFixer):
def __init__(self, session):
self.session = session
def can_fix(self, problem: Problem):
return isinstance(problem, MissingSecretGpgKey)
def _fix(self, problem: Problem, phase: Tuple[str, ...]):
SCRIPT = b"""\
Key-Type: 1
Key-Length: 4096
Subkey-Type: 1
Subkey-Length: 4096
Name-Real: Dummy Key for ognibuild
Name-Email: dummy@example.com
Expire-Date: 0
Passphrase: ""
"""
p = self.session.Popen(
["gpg", "--gen-key", "--batch", "/dev/stdin"],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
)
p.communicate(SCRIPT)
if p.returncode == 0:
return True
return False
class UnexpandedAutoconfMacroFixer(BuildFixer):
def __init__(self, session, resolver):
self.session = session
self.resolver = resolver
def __repr__(self):
return "%s(%r)" % (type(self).__name__, self.resolver)
def __str__(self):
return "unexpanded m4 macro fixer (%s)" % self.resolver
def can_fix(self, error):
return isinstance(error, MissingAutoconfMacro)
def _fix(self, error, phase):
try:
self.resolver.install([AutoconfMacroRequirement(error.macro)])
except UnsatisfiedRequirements:
return False
from .fix_build import run_detecting_problems
run_detecting_problems(self.session, ["autoconf", "-f"])
return True

45
ognibuild/info.py Normal file
View file

@ -0,0 +1,45 @@
#!/usr/bin/python3
# Copyright (C) 2020-2021 Jelmer Vernooij <jelmer@jelmer.uk>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
def run_info(session, buildsystems, fixers=None):
for buildsystem in buildsystems:
print("%r:" % buildsystem)
deps = {}
try:
for kind, dep in buildsystem.get_declared_dependencies(session, fixers=fixers):
deps.setdefault(kind, []).append(dep)
except NotImplementedError:
print(
"\tUnable to detect declared dependencies for this type of build system"
)
if deps:
print("\tDeclared dependencies:")
for kind in deps:
print("\t\t%s:" % kind)
for dep in deps[kind]:
print("\t\t\t%s" % dep)
print("")
try:
outputs = list(buildsystem.get_declared_outputs(session, fixers=fixers))
except NotImplementedError:
print("\tUnable to detect declared outputs for this type of build system")
outputs = []
if outputs:
print("\tDeclared outputs:")
for output in outputs:
print("\t\t%s" % output)

35
ognibuild/install.py Normal file
View file

@ -0,0 +1,35 @@
#!/usr/bin/python3
# Copyright (C) 2020-2021 Jelmer Vernooij <jelmer@jelmer.uk>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
from .buildsystem import NoBuildToolsFound, InstallTarget
from typing import Optional
def run_install(session, buildsystems, resolver, fixers, user: bool = False, prefix: Optional[str] = None):
# Some things want to write to the user's home directory,
# e.g. pip caches in ~/.cache
session.create_home()
install_target = InstallTarget()
install_target.user = user
install_target.prefix = prefix
for buildsystem in buildsystems:
buildsystem.install(session, resolver, fixers, install_target)
return
raise NoBuildToolsFound()

60
ognibuild/outputs.py Normal file
View file

@ -0,0 +1,60 @@
#!/usr/bin/python
# Copyright (C) 2019-2020 Jelmer Vernooij <jelmer@jelmer.uk>
# encoding: utf-8
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
from . import UpstreamOutput
class BinaryOutput(UpstreamOutput):
def __init__(self, name):
super(BinaryOutput, self).__init__("binary")
self.name = name
def __repr__(self):
return "%s(%r)" % (type(self).__name__, self.name)
def __str__(self):
return "binary: %s" % self.name
class PythonPackageOutput(UpstreamOutput):
def __init__(self, name, python_version=None):
super(PythonPackageOutput, self).__init__("python-package")
self.name = name
self.python_version = python_version
def __str__(self):
return "python package: %s" % self.name
def __repr__(self):
return "%s(%r, python_version=%r)" % (
type(self).__name__,
self.name,
self.python_version,
)
class RPackageOutput(UpstreamOutput):
def __init__(self, name):
super(RPackageOutput, self).__init__("r-package")
self.name = name
def __str__(self):
return "R package: %s" % self.name
def __repr__(self):
return "%s(%r)" % (type(self).__name__, self.name)

708
ognibuild/requirements.py Normal file
View file

@ -0,0 +1,708 @@
#!/usr/bin/python
# Copyright (C) 2019-2020 Jelmer Vernooij <jelmer@jelmer.uk>
# encoding: utf-8
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
import posixpath
import re
import subprocess
from typing import Optional, List, Set
from . import Requirement
class PythonPackageRequirement(Requirement):
package: str
def __init__(self, package, python_version=None, specs=None, minimum_version=None):
super(PythonPackageRequirement, self).__init__("python-package")
self.package = package
self.python_version = python_version
if minimum_version is not None:
specs = [(">=", minimum_version)]
if specs is None:
specs = []
self.specs = specs
def __repr__(self):
return "%s(%r, python_version=%r, specs=%r)" % (
type(self).__name__,
self.package,
self.python_version,
self.specs,
)
def __str__(self):
if self.specs:
return "python package: %s (%r)" % (self.package, self.specs)
else:
return "python package: %s" % (self.package,)
@classmethod
def from_requirement_str(cls, text):
from requirements.requirement import Requirement
req = Requirement.parse(text)
return cls(package=req.name, specs=req.specs)
def met(self, session):
if self.python_version == "cpython3":
cmd = "python3"
elif self.python_version == "cpython2":
cmd = "python2"
elif self.python_version == "pypy":
cmd = "pypy"
elif self.python_version == "pypy3":
cmd = "pypy3"
elif self.python_version is None:
cmd = "python3"
else:
raise NotImplementedError
text = self.package + ",".join(["".join(spec) for spec in self.specs])
p = session.Popen(
[cmd, "-c", "import pkg_resources; pkg_resources.require(%r)" % text],
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL,
)
p.communicate()
return p.returncode == 0
class LatexPackageRequirement(Requirement):
def __init__(self, package: str):
self.package = package
def __repr__(self):
return "%s(%r)" % (type(self).__name__, self.package)
class PhpPackageRequirement(Requirement):
def __init__(
self,
package: str,
channel: Optional[str] = None,
min_version: Optional[str] = None,
max_version: Optional[str] = None,
):
self.package = package
self.channel = channel
self.min_version = min_version
self.max_version = max_version
def __repr__(self):
return "%s(%r, %r, %r, %r)" % (
type(self).__name__,
self.package,
self.channel,
self.min_version,
self.max_version,
)
class BinaryRequirement(Requirement):
binary_name: str
def __init__(self, binary_name):
super(BinaryRequirement, self).__init__("binary")
self.binary_name = binary_name
def __repr__(self):
return "%s(%r)" % (type(self).__name__, self.binary_name)
def met(self, session):
p = session.Popen(
["which", self.binary_name],
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL,
)
p.communicate()
return p.returncode == 0
class PerlModuleRequirement(Requirement):
module: str
filename: Optional[str]
inc: Optional[List[str]]
def __init__(self, module, filename=None, inc=None):
super(PerlModuleRequirement, self).__init__("perl-module")
self.module = module
self.filename = filename
self.inc = inc
@property
def relfilename(self):
return self.module.replace("::", "/") + ".pm"
def __repr__(self):
return "%s(%r)" % (type(self).__name__, self.module)
class VagueDependencyRequirement(Requirement):
name: str
minimum_version: Optional[str] = None
def __init__(self, name, minimum_version=None):
super(VagueDependencyRequirement, self).__init__("vague")
self.name = name
self.minimum_version = minimum_version
def expand(self):
if " " not in self.name:
yield BinaryRequirement(self.name)
yield LibraryRequirement(self.name)
yield PkgConfigRequirement(self.name, minimum_version=self.minimum_version)
if self.name.lower() != self.name:
yield BinaryRequirement(self.name.lower())
yield LibraryRequirement(self.name.lower())
yield PkgConfigRequirement(self.name.lower(), minimum_version=self.minimum_version)
from .resolver.apt import AptRequirement
yield AptRequirement.simple(self.name.lower(), minimum_version=self.minimum_version)
if self.name.lower().startswith('lib'):
devname = '%s-dev' % self.name.lower()
else:
devname = 'lib%s-dev' % self.name.lower()
yield AptRequirement.simple(devname, minimum_version=self.minimum_version)
def met(self, session):
for x in self.expand():
if x.met(session):
return True
return False
def __repr__(self):
return "%s(%r)" % (type(self).__name__, self.name)
class NodePackageRequirement(Requirement):
package: str
def __init__(self, package):
super(NodePackageRequirement, self).__init__("npm-package")
self.package = package
def __repr__(self):
return "%s(%r)" % (type(self).__name__, self.package)
class PerlPreDeclaredRequirement(Requirement):
name: str
# TODO(jelmer): Can we obtain this information elsewhere?
KNOWN_MODULES = {
'auto_set_repository': 'Module::Install::Repository',
'author_tests': 'Module::Install::AuthorTests',
'recursive_author_tests': 'Module::Install::AuthorTests',
'author_requires': 'Module::Install::AuthorRequires',
'readme_from': 'Module::Install::ReadmeFromPod',
'catalyst': 'Module::Install::Catalyst',
'githubmeta': 'Module::Install::GithubMeta',
'use_ppport': 'Module::Install::XSUtil',
'pod_from': 'Module::Install::PodFromEuclid',
'write_doap_changes': 'Module::Install::DOAPChangeSets',
'use_test_base': 'Module::Install::TestBase',
'jsonmeta': 'Module::Install::JSONMETA',
'extra_tests': 'Module::Install::ExtraTests',
'auto_set_bugtracker': 'Module::Install::Bugtracker',
}
def __init__(self, name):
super(PerlPreDeclaredRequirement, self).__init__("perl-predeclared")
self.name = name
def lookup_module(self):
module = self.KNOWN_MODULES[self.name]
return PerlModuleRequirement(module=module)
def __repr__(self):
return "%s(%r)" % (type(self).__name__, self.name)
class NodeModuleRequirement(Requirement):
module: str
def __init__(self, module):
super(NodeModuleRequirement, self).__init__("npm-module")
self.module = module
def __repr__(self):
return "%s(%r)" % (type(self).__name__, self.module)
class CargoCrateRequirement(Requirement):
crate: str
features: Set[str]
version: Optional[str]
def __init__(self, crate, features=None, version=None):
super(CargoCrateRequirement, self).__init__("cargo-crate")
self.crate = crate
if features is None:
features = set()
self.features = features
self.version = version
def __repr__(self):
return "%s(%r, features=%r, version=%r)" % (
type(self).__name__,
self.crate,
self.features,
self.version,
)
def __str__(self):
if self.features:
return "cargo crate: %s %s (%s)" % (
self.crate,
self.version or "",
", ".join(sorted(self.features)),
)
else:
return "cargo crate: %s %s" % (self.crate, self.version or "")
class PkgConfigRequirement(Requirement):
module: str
def __init__(self, module, minimum_version=None):
super(PkgConfigRequirement, self).__init__("pkg-config")
self.module = module
self.minimum_version = minimum_version
def __repr__(self):
return "%s(%r, minimum_version=%r)" % (
type(self).__name__, self.module, self.minimum_version)
class PathRequirement(Requirement):
path: str
def __init__(self, path):
super(PathRequirement, self).__init__("path")
self.path = path
def __repr__(self):
return "%s(%r)" % (type(self).__name__, self.path)
class CHeaderRequirement(Requirement):
header: str
def __init__(self, header):
super(CHeaderRequirement, self).__init__("c-header")
self.header = header
def __repr__(self):
return "%s(%r)" % (type(self).__name__, self.header)
class JavaScriptRuntimeRequirement(Requirement):
def __init__(self):
super(JavaScriptRuntimeRequirement, self).__init__("javascript-runtime")
class ValaPackageRequirement(Requirement):
package: str
def __init__(self, package: str):
super(ValaPackageRequirement, self).__init__("vala")
self.package = package
class RubyGemRequirement(Requirement):
gem: str
minimum_version: Optional[str]
def __init__(self, gem: str, minimum_version: Optional[str]):
super(RubyGemRequirement, self).__init__("gem")
self.gem = gem
self.minimum_version = minimum_version
class GoPackageRequirement(Requirement):
package: str
version: Optional[str]
def __init__(self, package: str, version: Optional[str] = None):
super(GoPackageRequirement, self).__init__("go-package")
self.package = package
self.version = version
def __str__(self):
if self.version:
return "go package: %s (= %s)" % (self.package, self.version)
return "go package: %s" % self.package
class GoRequirement(Requirement):
version: Optional[str]
def __init__(self, version: Optional[str] = None):
super(GoRequirement, self).__init__("go")
self.version = version
def __str__(self):
return "go %s" % self.version
class DhAddonRequirement(Requirement):
path: str
def __init__(self, path: str):
super(DhAddonRequirement, self).__init__("dh-addon")
self.path = path
class PhpClassRequirement(Requirement):
php_class: str
def __init__(self, php_class: str):
super(PhpClassRequirement, self).__init__("php-class")
self.php_class = php_class
class RPackageRequirement(Requirement):
package: str
minimum_version: Optional[str]
def __init__(self, package: str, minimum_version: Optional[str] = None):
super(RPackageRequirement, self).__init__("r-package")
self.package = package
self.minimum_version = minimum_version
def __repr__(self):
return "%s(%r, minimum_version=%r)" % (
type(self).__name__,
self.package,
self.minimum_version,
)
def __str__(self):
if self.minimum_version:
return "R package: %s (>= %s)" % (self.package, self.minimum_version)
else:
return "R package: %s" % (self.package,)
@classmethod
def from_str(cls, text):
# TODO(jelmer): More complex parser
m = re.fullmatch(r"(.*)\s+\(>=\s+(.*)\)", text)
if m:
return cls(m.group(1), m.group(2))
m = re.fullmatch(r"([^ ]+)", text)
if m:
return cls(m.group(1))
raise ValueError(text)
class OctavePackageRequirement(Requirement):
package: str
minimum_version: Optional[str]
def __init__(self, package: str, minimum_version: Optional[str] = None):
super(OctavePackageRequirement, self).__init__("octave-package")
self.package = package
self.minimum_version = minimum_version
def __repr__(self):
return "%s(%r, minimum_version=%r)" % (
type(self).__name__,
self.package,
self.minimum_version,
)
def __str__(self):
if self.minimum_version:
return "Octave package: %s (>= %s)" % (self.package, self.minimum_version)
else:
return "Octave package: %s" % (self.package,)
@classmethod
def from_str(cls, text):
# TODO(jelmer): More complex parser
m = re.fullmatch(r"(.*)\s+\(>=\s+(.*)\)", text)
if m:
return cls(m.group(1), m.group(2))
m = re.fullmatch(r"([^ ]+)", text)
if m:
return cls(m.group(1))
raise ValueError(text)
class LibraryRequirement(Requirement):
library: str
def __init__(self, library: str):
super(LibraryRequirement, self).__init__("lib")
self.library = library
class StaticLibraryRequirement(Requirement):
library: str
filename: str
def __init__(self, library: str, filename: str):
super(StaticLibraryRequirement, self).__init__("static-lib")
self.library = library
self.filename = filename
class RubyFileRequirement(Requirement):
filename: str
def __init__(self, filename: str):
super(RubyFileRequirement, self).__init__("ruby-file")
self.filename = filename
class XmlEntityRequirement(Requirement):
url: str
def __init__(self, url: str):
super(XmlEntityRequirement, self).__init__("xml-entity")
self.url = url
class SprocketsFileRequirement(Requirement):
content_type: str
name: str
def __init__(self, content_type: str, name: str):
super(SprocketsFileRequirement, self).__init__("sprockets-file")
self.content_type = content_type
self.name = name
class JavaClassRequirement(Requirement):
classname: str
def __init__(self, classname: str):
super(JavaClassRequirement, self).__init__("java-class")
self.classname = classname
class CMakefileRequirement(Requirement):
filename: str
def __init__(self, filename: str):
super(CMakefileRequirement, self).__init__("cmake-file")
self.filename = filename
class HaskellPackageRequirement(Requirement):
package: str
def __init__(self, package: str, specs=None):
super(HaskellPackageRequirement, self).__init__("haskell-package")
self.package = package
self.specs = specs
@classmethod
def from_string(cls, text):
parts = text.split()
return cls(parts[0], specs=parts[1:])
class MavenArtifactRequirement(Requirement):
group_id: str
artifact_id: str
version: Optional[str]
kind: Optional[str]
def __init__(self, group_id, artifact_id, version=None, kind=None):
super(MavenArtifactRequirement, self).__init__("maven-artifact")
self.group_id = group_id
self.artifact_id = artifact_id
self.version = version
self.kind = kind
def __str__(self):
return "maven requirement: %s:%s:%s" % (
self.group_id,
self.artifact_id,
self.version,
)
@classmethod
def from_str(cls, text):
return cls.from_tuple(text.split(":"))
@classmethod
def from_tuple(cls, parts):
if len(parts) == 4:
(group_id, artifact_id, kind, version) = parts
elif len(parts) == 3:
(group_id, artifact_id, version) = parts
kind = "jar"
elif len(parts) == 2:
version = None
(group_id, artifact_id) = parts
kind = "jar"
else:
raise ValueError("invalid number of parts to artifact %r" % parts)
return cls(group_id, artifact_id, version, kind)
class GnomeCommonRequirement(Requirement):
def __init__(self):
super(GnomeCommonRequirement, self).__init__("gnome-common")
class JDKFileRequirement(Requirement):
jdk_path: str
filename: str
def __init__(self, jdk_path: str, filename: str):
super(JDKFileRequirement, self).__init__("jdk-file")
self.jdk_path = jdk_path
self.filename = filename
@property
def path(self):
return posixpath.join(self.jdk_path, self.filename)
class JDKRequirement(Requirement):
def __init__(self):
super(JDKRequirement, self).__init__("jdk")
class JRERequirement(Requirement):
def __init__(self):
super(JRERequirement, self).__init__("jre")
class QTRequirement(Requirement):
def __init__(self):
super(QTRequirement, self).__init__("qt")
class X11Requirement(Requirement):
def __init__(self):
super(X11Requirement, self).__init__("x11")
class CertificateAuthorityRequirement(Requirement):
def __init__(self, url):
super(CertificateAuthorityRequirement, self).__init__("ca-cert")
self.url = url
class PerlFileRequirement(Requirement):
filename: str
def __init__(self, filename: str):
super(PerlFileRequirement, self).__init__("perl-file")
self.filename = filename
class AutoconfMacroRequirement(Requirement):
macro: str
def __init__(self, macro: str):
super(AutoconfMacroRequirement, self).__init__("autoconf-macro")
self.macro = macro
class LibtoolRequirement(Requirement):
def __init__(self):
super(LibtoolRequirement, self).__init__("libtool")
class IntrospectionTypelibRequirement(Requirement):
def __init__(self, library):
self.library = library
class PythonModuleRequirement(Requirement):
module: str
python_version: Optional[str]
minimum_version: Optional[str]
def __init__(self, module, python_version=None, minimum_version=None):
super(PythonModuleRequirement, self).__init__("python-module")
self.module = module
self.python_version = python_version
self.minimum_version = minimum_version
def met(self, session):
if self.python_version == "cpython3":
cmd = "python3"
elif self.python_version == "cpython2":
cmd = "python2"
elif self.python_version == "pypy":
cmd = "pypy"
elif self.python_version == "pypy3":
cmd = "pypy3"
elif self.python_version is None:
cmd = "python3"
else:
raise NotImplementedError
p = session.Popen(
[cmd, "-c", "import %s" % self.module],
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL,
)
p.communicate()
return p.returncode == 0
def __repr__(self):
return "%s(%r, python_version=%r, minimum_version=%r)" % (
type(self).__name__, self.module, self.python_version,
self.minimum_version)
class BoostComponentRequirement(Requirement):
name: str
def __init__(self, name):
super(BoostComponentRequirement, self).__init__("boost-component")
self.name = name

View file

@ -0,0 +1,566 @@
#!/usr/bin/python3
# Copyright (C) 2020 Jelmer Vernooij <jelmer@jelmer.uk>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
import logging
import subprocess
from .. import UnidentifiedError
from ..fix_build import run_detecting_problems
class UnsatisfiedRequirements(Exception):
def __init__(self, reqs):
self.requirements = reqs
class Resolver(object):
def install(self, requirements):
raise NotImplementedError(self.install)
def resolve(self, requirement):
raise NotImplementedError(self.resolve)
def explain(self, requirements):
raise NotImplementedError(self.explain)
def env(self):
return {}
class CPANResolver(Resolver):
def __init__(self, session, user_local=False, skip_tests=True):
self.session = session
self.user_local = user_local
self.skip_tests = skip_tests
def __str__(self):
return "cpan"
def __repr__(self):
return "%s(%r)" % (type(self).__name__, self.session)
def _cmd(self, reqs):
ret = ["cpan", "-i"]
if self.skip_tests:
ret.append("-T")
ret.extend([req.module for req in reqs])
return ret
def explain(self, requirements):
from ..requirements import PerlModuleRequirement
perlreqs = []
for requirement in requirements:
if not isinstance(requirement, PerlModuleRequirement):
continue
perlreqs.append(requirement)
if perlreqs:
yield (self._cmd(perlreqs), perlreqs)
def install(self, requirements):
from ..requirements import PerlModuleRequirement
env = {
"PERL_MM_USE_DEFAULT": "1",
"PERL_MM_OPT": "",
"PERL_MB_OPT": "",
}
if not self.user_local:
user = "root"
else:
user = None
missing = []
for requirement in requirements:
if not isinstance(requirement, PerlModuleRequirement):
missing.append(requirement)
continue
cmd = self._cmd([requirement])
logging.info("CPAN: running %r", cmd)
run_detecting_problems(
self.session,
cmd,
env=env,
user=user,
)
if missing:
raise UnsatisfiedRequirements(missing)
class TlmgrResolver(Resolver):
def __init__(self, session, repository: str, user_local=False):
self.session = session
self.user_local = user_local
self.repository = repository
def __str__(self):
if self.repository.startswith('http://') or self.repository.startswith('https://'):
return 'tlmgr(%r)' % self.repository
else:
return self.repository
def __repr__(self):
return "%s(%r, %r)" % (
type(self).__name__, self.session, self.repository)
def _cmd(self, reqs):
ret = ["tlmgr", "--repository=%s" % self.repository, "install"]
if self.user_local:
ret.append("--usermode")
ret.extend([req.package for req in reqs])
return ret
def explain(self, requirements):
from ..requirements import LatexPackageRequirement
latexreqs = []
for requirement in requirements:
if not isinstance(requirement, LatexPackageRequirement):
continue
latexreqs.append(requirement)
if latexreqs:
yield (self._cmd(latexreqs), latexreqs)
def install(self, requirements):
from ..requirements import LatexPackageRequirement
if not self.user_local:
user = "root"
else:
user = None
missing = []
for requirement in requirements:
if not isinstance(requirement, LatexPackageRequirement):
missing.append(requirement)
continue
cmd = self._cmd([requirement])
logging.info("tlmgr: running %r", cmd)
try:
run_detecting_problems(self.session, cmd, user=user)
except UnidentifiedError as e:
if "tlmgr: user mode not initialized, please read the documentation!" in e.lines:
self.session.check_call(['tlmgr', 'init-usertree'])
else:
raise
if missing:
raise UnsatisfiedRequirements(missing)
class CTANResolver(TlmgrResolver):
def __init__(self, session, user_local=False):
super(CTANResolver, self).__init__(
session, "ctan", user_local=user_local)
class RResolver(Resolver):
def __init__(self, session, repos, user_local=False):
self.session = session
self.repos = repos
self.user_local = user_local
def __str__(self):
return "cran"
def __repr__(self):
return "%s(%r, %r)" % (type(self).__name__, self.session, self.repos)
def _cmd(self, req):
# TODO(jelmer: Handle self.user_local
return [
"R",
"-e",
"install.packages('%s', repos=%r)" % (req.package, self.repos),
]
def explain(self, requirements):
from ..requirements import RPackageRequirement
rreqs = []
for requirement in requirements:
if not isinstance(requirement, RPackageRequirement):
continue
rreqs.append(requirement)
if rreqs:
yield ([self._cmd(req) for req in rreqs])
def install(self, requirements):
from ..requirements import RPackageRequirement
if self.user_local:
user = None
else:
user = "root"
missing = []
for requirement in requirements:
if not isinstance(requirement, RPackageRequirement):
missing.append(requirement)
continue
cmd = self._cmd(requirement)
logging.info("RResolver(%r): running %r", self.repos, cmd)
run_detecting_problems(self.session, cmd, user=user)
if missing:
raise UnsatisfiedRequirements(missing)
class OctaveForgeResolver(Resolver):
def __init__(self, session, user_local=False):
self.session = session
self.user_local = user_local
def __str__(self):
return "octave-forge"
def __repr__(self):
return "%s(%r)" % (type(self).__name__, self.session)
def _cmd(self, req):
# TODO(jelmer: Handle self.user_local
return ["octave-cli", "--eval", "pkg install -forge %s" % req.package]
def explain(self, requirements):
from ..requirements import OctavePackageRequirement
rreqs = []
for requirement in requirements:
if not isinstance(requirement, OctavePackageRequirement):
continue
rreqs.append(requirement)
if rreqs:
yield ([self._cmd(req) for req in rreqs])
def install(self, requirements):
from ..requirements import OctavePackageRequirement
if self.user_local:
user = None
else:
user = "root"
missing = []
for requirement in requirements:
if not isinstance(requirement, OctavePackageRequirement):
missing.append(requirement)
continue
cmd = self._cmd(requirement)
logging.info("Octave: running %r", cmd)
run_detecting_problems(self.session, cmd, user=user)
if missing:
raise UnsatisfiedRequirements(missing)
class CRANResolver(RResolver):
def __init__(self, session, user_local=False):
super(CRANResolver, self).__init__(
session, "http://cran.r-project.org", user_local=user_local
)
class BioconductorResolver(RResolver):
def __init__(self, session, user_local=False):
super(BioconductorResolver, self).__init__(
session, "https://hedgehog.fhcrc.org/bioconductor", user_local=user_local
)
class HackageResolver(Resolver):
def __init__(self, session, user_local=False):
self.session = session
self.user_local = user_local
def __str__(self):
return "hackage"
def __repr__(self):
return "%s(%r)" % (type(self).__name__, self.session)
def _cmd(self, reqs):
extra_args = []
if self.user_local:
extra_args.append("--user")
return ["cabal", "install"] + extra_args + [req.package for req in reqs]
def install(self, requirements):
from ..requirements import HaskellPackageRequirement
if self.user_local:
user = None
else:
user = "root"
missing = []
for requirement in requirements:
if not isinstance(requirement, HaskellPackageRequirement):
missing.append(requirement)
continue
cmd = self._cmd([requirement])
logging.info("Hackage: running %r", cmd)
run_detecting_problems(self.session, cmd, user=user)
if missing:
raise UnsatisfiedRequirements(missing)
def explain(self, requirements):
from ..requirements import HaskellPackageRequirement
haskellreqs = []
for requirement in requirements:
if not isinstance(requirement, HaskellPackageRequirement):
continue
haskellreqs.append(requirement)
if haskellreqs:
yield (self._cmd(haskellreqs), haskellreqs)
class PypiResolver(Resolver):
def __init__(self, session, user_local=False):
self.session = session
self.user_local = user_local
def __str__(self):
return "pypi"
def __repr__(self):
return "%s(%r)" % (type(self).__name__, self.session)
def _cmd(self, reqs):
extra_args = []
if self.user_local:
extra_args.append("--user")
return ["pip", "install"] + extra_args + [req.package for req in reqs]
def install(self, requirements):
from ..requirements import PythonPackageRequirement
if self.user_local:
user = None
else:
user = "root"
missing = []
for requirement in requirements:
if not isinstance(requirement, PythonPackageRequirement):
missing.append(requirement)
continue
cmd = self._cmd([requirement])
logging.info("pip: running %r", cmd)
try:
run_detecting_problems(self.session, cmd, user=user)
except subprocess.CalledProcessError:
missing.append(requirement)
if missing:
raise UnsatisfiedRequirements(missing)
def explain(self, requirements):
from ..requirements import PythonPackageRequirement
pyreqs = []
for requirement in requirements:
if not isinstance(requirement, PythonPackageRequirement):
continue
pyreqs.append(requirement)
if pyreqs:
yield (self._cmd(pyreqs), pyreqs)
class GoResolver(Resolver):
def __init__(self, session, user_local):
self.session = session
self.user_local = user_local
def __str__(self):
return "go"
def __repr__(self):
return "%s(%r)" % (type(self).__name__, self.session)
def install(self, requirements):
from ..requirements import GoPackageRequirement
if self.user_local:
env = {}
else:
# TODO(jelmer): Isn't this Debian-specific?
env = {"GOPATH": "/usr/share/gocode"}
missing = []
for requirement in requirements:
if not isinstance(requirement, GoPackageRequirement):
missing.append(requirement)
continue
cmd = ["go", "get", requirement.package]
logging.info("go: running %r", cmd)
run_detecting_problems(self.session, cmd, env=env)
if missing:
raise UnsatisfiedRequirements(missing)
def explain(self, requirements):
from ..requirements import GoPackageRequirement
goreqs = []
for requirement in requirements:
if not isinstance(requirement, GoPackageRequirement):
continue
goreqs.append(requirement)
if goreqs:
yield (["go", "get"] + [req.package for req in goreqs], goreqs)
NPM_COMMAND_PACKAGES = {
"del-cli": "del-cli",
"husky": "husky",
"cross-env": "cross-env",
}
class NpmResolver(Resolver):
def __init__(self, session, user_local=False):
self.session = session
self.user_local = user_local
# TODO(jelmer): Handle user_local
def __str__(self):
return "npm"
def __repr__(self):
return "%s(%r)" % (type(self).__name__, self.session)
def install(self, requirements):
from ..requirements import (
NodePackageRequirement,
NodeModuleRequirement,
BinaryRequirement,
)
if self.user_local:
user = None
else:
user = "root"
missing = []
for requirement in requirements:
if isinstance(requirement, BinaryRequirement):
try:
package = NPM_COMMAND_PACKAGES[requirement.binary_name]
except KeyError:
pass
else:
requirement = NodePackageRequirement(package)
if isinstance(requirement, NodeModuleRequirement):
# TODO: Is this legit?
parts = requirement.module.split("/")
if parts[0].startswith('@'):
requirement = NodePackageRequirement('/'.join(parts[:2]))
else:
requirement = NodePackageRequirement(parts[0])
if not isinstance(requirement, NodePackageRequirement):
missing.append(requirement)
continue
cmd = ["npm", "-g", "install", requirement.package]
logging.info("npm: running %r", cmd)
run_detecting_problems(self.session, cmd, user=user)
if missing:
raise UnsatisfiedRequirements(missing)
def explain(self, requirements):
from ..requirements import NodePackageRequirement
nodereqs = []
packages = []
for requirement in requirements:
if not isinstance(requirement, NodePackageRequirement):
continue
try:
package = NPM_COMMAND_PACKAGES[requirement.command]
except KeyError:
continue
nodereqs.append(requirement)
packages.append(package)
if nodereqs:
yield (["npm", "-g", "install"] + packages, nodereqs)
class StackedResolver(Resolver):
def __init__(self, subs):
self.subs = subs
def __repr__(self):
return "%s(%r)" % (type(self).__name__, self.subs)
def __str__(self):
return "[" + ", ".join(map(str, self.subs)) + "]"
def env(self):
ret = {}
# Reversed so earlier resolvers override later ones
for sub in reversed(self.subs):
ret.update(sub.env())
return ret
def explain(self, requirements):
for sub in self.subs:
yield from sub.explain(requirements)
def install(self, requirements):
for sub in self.subs:
try:
sub.install(requirements)
except UnsatisfiedRequirements as e:
requirements = e.requirements
else:
return
if requirements:
raise UnsatisfiedRequirements(requirements)
NATIVE_RESOLVER_CLS = [
CPANResolver,
CTANResolver,
PypiResolver,
NpmResolver,
GoResolver,
HackageResolver,
CRANResolver,
BioconductorResolver,
OctaveForgeResolver,
]
def native_resolvers(session, user_local):
return StackedResolver([kls(session, user_local) for kls in NATIVE_RESOLVER_CLS])
def auto_resolver(session, explain=False):
# if session is SchrootSession or if we're root, use apt
from .apt import AptResolver
from ..session.schroot import SchrootSession
from ..session import get_user
user = get_user(session)
resolvers = []
# TODO(jelmer): Check VIRTUAL_ENV, and prioritize PypiResolver if
# present?
if isinstance(session, SchrootSession) or user == "root" or explain:
user_local = False
else:
user_local = True
if not user_local:
resolvers.append(AptResolver.from_session(session))
resolvers.extend([kls(session, user_local) for kls in NATIVE_RESOLVER_CLS])
return StackedResolver(resolvers)

848
ognibuild/resolver/apt.py Normal file
View file

@ -0,0 +1,848 @@
#!/usr/bin/python3
# Copyright (C) 2020 Jelmer Vernooij <jelmer@jelmer.uk>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
from itertools import chain
import logging
import os
import posixpath
import re
from typing import Optional, List
from debian.changelog import Version
from debian.deb822 import PkgRelation
from ..debian.apt import AptManager
from . import Resolver, UnsatisfiedRequirements
from ..requirements import (
Requirement,
CargoCrateRequirement,
BinaryRequirement,
CHeaderRequirement,
PkgConfigRequirement,
PathRequirement,
JavaScriptRuntimeRequirement,
ValaPackageRequirement,
RubyGemRequirement,
GoPackageRequirement,
GoRequirement,
DhAddonRequirement,
PhpClassRequirement,
PhpPackageRequirement,
RPackageRequirement,
NodeModuleRequirement,
NodePackageRequirement,
LibraryRequirement,
BoostComponentRequirement,
StaticLibraryRequirement,
RubyFileRequirement,
XmlEntityRequirement,
SprocketsFileRequirement,
JavaClassRequirement,
CMakefileRequirement,
HaskellPackageRequirement,
MavenArtifactRequirement,
GnomeCommonRequirement,
JDKFileRequirement,
JDKRequirement,
JRERequirement,
QTRequirement,
X11Requirement,
PerlModuleRequirement,
PerlFileRequirement,
AutoconfMacroRequirement,
PythonModuleRequirement,
PythonPackageRequirement,
CertificateAuthorityRequirement,
LibtoolRequirement,
VagueDependencyRequirement,
PerlPreDeclaredRequirement,
IntrospectionTypelibRequirement,
)
class AptRequirement(Requirement):
def __init__(self, relations):
super(AptRequirement, self).__init__("apt")
if not isinstance(relations, list):
raise TypeError(relations)
self.relations = relations
@classmethod
def simple(cls, package, minimum_version=None):
rel = {"name": package}
if minimum_version is not None:
rel["version"] = (">=", minimum_version)
return cls([[rel]])
@classmethod
def from_str(cls, text):
return cls(PkgRelation.parse_relations(text))
def pkg_relation_str(self):
return PkgRelation.str(self.relations)
def __hash__(self):
return hash((type(self), self.pkg_relation_str()))
def __eq__(self, other):
return isinstance(self, type(other)) and self.relations == other.relations
def __str__(self):
return "apt requirement: %s" % self.pkg_relation_str()
def __repr__(self):
return "%s.from_str(%r)" % (type(self).__name__, self.pkg_relation_str())
def package_names(self):
for rel in self.relations:
for entry in rel:
yield entry["name"]
def touches_package(self, package):
for name in self.package_names():
if name == package:
return True
return False
def satisfied_by(self, binaries, version):
def binary_pkg_matches(entry, binary):
# TODO(jelmer): check versions
if entry['name'] == binary['Package']:
return True
for provides_top in PkgRelation.parse_relations(
binary.get('Provides', '')):
for provides in provides_top:
if entry['name'] == provides['name']:
return True
return False
for rel in self.relations:
for entry in rel:
if any(binary_pkg_matches(entry, binary) for binary in binaries):
break
else:
return False
return True
def resolve_perl_predeclared_req(apt_mgr, req):
try:
req = req.lookup_module()
except KeyError:
logging.warning(
'Unable to map predeclared function %s to a perl module', req.name)
return None
return resolve_perl_module_req(apt_mgr, req)
def find_package_names(
apt_mgr: AptManager, paths: List[str], regex: bool = False, case_insensitive=False
) -> List[str]:
if not isinstance(paths, list):
raise TypeError(paths)
return apt_mgr.get_packages_for_paths(paths, regex, case_insensitive)
def find_reqs_simple(
apt_mgr: AptManager,
paths: List[str],
regex: bool = False,
minimum_version=None,
case_insensitive=False,
) -> List[str]:
if not isinstance(paths, list):
raise TypeError(paths)
return [
AptRequirement.simple(package, minimum_version=minimum_version)
for package in find_package_names(apt_mgr, paths, regex, case_insensitive)
]
def python_spec_to_apt_rels(pkg_name, specs):
# TODO(jelmer): Dealing with epoch, etc?
if not specs:
return [[{"name": pkg_name}]]
else:
rels = []
for spec in specs:
if spec[0] == "~=":
# PEP 440: For a given release identifier V.N , the compatible
# release clause is approximately equivalent to the pair of
# comparison clauses: >= V.N, == V.*
parts = spec[1].split(".")
parts.pop(-1)
parts[-1] = str(int(parts[-1]) + 1)
next_maj_deb_version = Version(".".join(parts))
deb_version = Version(spec[1])
rels.extend(
[[{"name": pkg_name, "version": (">=", deb_version)}],
[{"name": pkg_name, "version": ("<<", next_maj_deb_version)}]])
elif spec[0] == "!=":
deb_version = Version(spec[1])
rels.extend([
[{"name": pkg_name, "version": (">>", deb_version)}],
[{"name": pkg_name, "version": ("<<", deb_version)}]])
elif spec[1].endswith(".*") and spec[0] == "==":
s = spec[1].split(".")
s.pop(-1)
n = list(s)
n[-1] = str(int(n[-1]) + 1)
rels.extend(
[[{"name": pkg_name, "version": (">=", Version(".".join(s)))}],
[{"name": pkg_name, "version": ("<<", Version(".".join(n)))}]])
else:
c = {">=": ">=", "<=": "<=", "<": "<<", ">": ">>", "==": "="}[spec[0]]
deb_version = Version(spec[1])
rels.append([{"name": pkg_name, "version": (c, deb_version)}])
return rels
def get_package_for_python_package(
apt_mgr, package, python_version: Optional[str], specs=None
):
pypy_regex = "/usr/lib/pypy/dist-packages/%s-.*.egg-info" % re.escape(
package.replace("-", "_")
)
cpython2_regex = (
"/usr/lib/python2\\.[0-9]/dist-packages/%s-.*.egg-info"
% re.escape(package.replace("-", "_"))
)
cpython3_regex = "/usr/lib/python3/dist-packages/%s-.*.egg-info" % re.escape(
package.replace("-", "_")
)
if python_version == "pypy":
paths = [pypy_regex]
elif python_version == "cpython2":
paths = [cpython2_regex]
elif python_version == "cpython3":
paths = [cpython3_regex]
elif python_version is None:
paths = [cpython3_regex, cpython2_regex, pypy_regex]
else:
raise NotImplementedError("unsupported python version %s" % python_version)
names = find_package_names(apt_mgr, paths, regex=True, case_insensitive=True)
return [AptRequirement(python_spec_to_apt_rels(name, specs)) for name in names]
def get_package_for_python_module(apt_mgr, module, python_version, specs):
cpython3_regexes = [
posixpath.join(
"/usr/lib/python3/dist-packages",
re.escape(module.replace(".", "/")),
"__init__.py",
),
posixpath.join(
"/usr/lib/python3/dist-packages",
re.escape(module.replace(".", "/")) + ".py",
),
posixpath.join(
"/usr/lib/python3\\.[0-9]+/lib-dynload",
re.escape(module.replace(".", "/")) + "\\.cpython-.*\\.so",
),
posixpath.join(
"/usr/lib/python3\\.[0-9]+/", re.escape(module.replace(".", "/")) + ".py"
),
posixpath.join(
"/usr/lib/python3\\.[0-9]+/",
re.escape(module.replace(".", "/")),
"__init__.py",
),
]
cpython2_regexes = [
posixpath.join(
"/usr/lib/python2\\.[0-9]/dist-packages",
re.escape(module.replace(".", "/")),
"__init__.py",
),
posixpath.join(
"/usr/lib/python2\\.[0-9]/dist-packages",
re.escape(module.replace(".", "/")) + ".py",
),
posixpath.join(
"/usr/lib/python2.\\.[0-9]/lib-dynload",
re.escape(module.replace(".", "/")) + ".so",
),
]
pypy_regexes = [
posixpath.join(
"/usr/lib/pypy/dist-packages",
re.escape(module.replace(".", "/")),
"__init__.py",
),
posixpath.join(
"/usr/lib/pypy/dist-packages", re.escape(module.replace(".", "/")) + ".py"
),
posixpath.join(
"/usr/lib/pypy/dist-packages",
re.escape(module.replace(".", "/")) + "\\.pypy-.*\\.so",
),
]
if python_version == "cpython3":
paths = cpython3_regexes
elif python_version == "cpython2":
paths = cpython2_regexes
elif python_version == "pypy":
paths = pypy_regexes
elif python_version is None:
paths = cpython3_regexes + cpython2_regexes + pypy_regexes
else:
raise AssertionError("unknown python version %r" % python_version)
names = find_package_names(apt_mgr, paths, regex=True)
return [AptRequirement(python_spec_to_apt_rels(name, specs)) for name in names]
vague_map = {
"the Gnu Scientific Library": "libgsl-dev",
"the required FreeType library": "libfreetype-dev",
"the Boost C++ libraries": "libboost-dev",
# TODO(jelmer): Support resolving virtual packages
"PythonLibs": "libpython3-dev",
"ZLIB": "libz3-dev",
"Osmium": "libosmium2-dev",
"glib": "libglib2.0-dev",
# TODO(jelmer): For Python, check minimum_version and map to python 2 or python 3
"Python": "libpython3-dev",
"Lua": "liblua5.4-dev",
}
def resolve_vague_dep_req(apt_mgr, req):
name = req.name
options = []
if name in vague_map:
options.append(AptRequirement.simple(vague_map[name], minimum_version=req.minimum_version))
for x in req.expand():
options.extend(resolve_requirement_apt(apt_mgr, x))
# Try even harder
if not options:
options.extend(find_reqs_simple(
apt_mgr,
[
posixpath.join("/usr/lib", ".*", "pkgconfig", re.escape(req.name) + "-.*\\.pc"),
posixpath.join("/usr/lib/pkgconfig", re.escape(req.name) + "-.*\\.pc")
],
regex=True,
case_insensitive=True,
minimum_version=req.minimum_version
))
return options
def resolve_binary_req(apt_mgr, req):
if posixpath.isabs(req.binary_name):
paths = [req.binary_name]
else:
paths = [
posixpath.join(dirname, req.binary_name) for dirname in ["/usr/bin", "/bin"]
]
return find_reqs_simple(apt_mgr, paths)
def resolve_pkg_config_req(apt_mgr, req):
names = find_package_names(
apt_mgr,
[
posixpath.join(
"/usr/lib", ".*", "pkgconfig", re.escape(req.module) + "\\.pc"
)
],
regex=True,
)
if not names:
names = find_package_names(
apt_mgr, [posixpath.join("/usr/lib/pkgconfig", req.module + ".pc")]
)
return [
AptRequirement.simple(name, minimum_version=req.minimum_version)
for name in names
]
def resolve_path_req(apt_mgr, req):
return find_reqs_simple(apt_mgr, [req.path])
def resolve_c_header_req(apt_mgr, req):
reqs = find_reqs_simple(
apt_mgr, [posixpath.join("/usr/include", req.header)], regex=False
)
if not reqs:
reqs = find_reqs_simple(
apt_mgr,
[posixpath.join("/usr/include", ".*", re.escape(req.header))],
regex=True,
)
return reqs
def resolve_js_runtime_req(apt_mgr, req):
return find_reqs_simple(apt_mgr, ["/usr/bin/node", "/usr/bin/duk"])
def resolve_vala_package_req(apt_mgr, req):
path = "/usr/share/vala-[0-9.]+/vapi/%s\\.vapi" % re.escape(req.package)
return find_reqs_simple(apt_mgr, [path], regex=True)
def resolve_ruby_gem_req(apt_mgr, req):
paths = [
posixpath.join(
"/usr/share/rubygems-integration/all/"
"specifications/%s-.*\\.gemspec" % re.escape(req.gem)
)
]
return find_reqs_simple(
apt_mgr, paths, regex=True, minimum_version=req.minimum_version
)
def resolve_go_package_req(apt_mgr, req):
return find_reqs_simple(
apt_mgr,
[posixpath.join("/usr/share/gocode/src", re.escape(req.package), ".*")],
regex=True,
)
def resolve_go_req(apt_mgr, req):
return [AptRequirement.simple("golang-go", minimum_version="2:%s" % req.version)]
def resolve_dh_addon_req(apt_mgr, req):
paths = [posixpath.join("/usr/share/perl5", req.path)]
return find_reqs_simple(apt_mgr, paths)
def resolve_php_class_req(apt_mgr, req):
path = "/usr/share/php/%s.php" % req.php_class.replace("\\", "/")
return find_reqs_simple(apt_mgr, [path])
def resolve_php_package_req(apt_mgr, req):
return [
AptRequirement.simple("php-%s" % req.package, minimum_version=req.min_version)
]
def resolve_r_package_req(apt_mgr, req):
paths = [
posixpath.join("/usr/lib/R/site-library", req.package, "DESCRIPTION")
]
return find_reqs_simple(apt_mgr, paths, minimum_version=req.minimum_version)
def resolve_node_module_req(apt_mgr, req):
paths = [
"/usr/share/nodejs/.*/node_modules/%s/index.js" % re.escape(req.module),
"/usr/lib/nodejs/%s/index.js" % re.escape(req.module),
"/usr/share/nodejs/%s/index.js" % re.escape(req.module),
]
return find_reqs_simple(apt_mgr, paths, regex=True)
def resolve_node_package_req(apt_mgr, req):
paths = [
"/usr/share/nodejs/.*/node_modules/%s/package\\.json" % re.escape(req.package),
"/usr/lib/nodejs/%s/package\\.json" % re.escape(req.package),
"/usr/share/nodejs/%s/package\\.json" % re.escape(req.package),
]
return find_reqs_simple(apt_mgr, paths, regex=True)
def resolve_library_req(apt_mgr, req):
paths = [
posixpath.join("/usr/lib/lib%s.so$" % re.escape(req.library)),
posixpath.join("/usr/lib/.*/lib%s.so$" % re.escape(req.library)),
posixpath.join("/usr/lib/lib%s.a$" % re.escape(req.library)),
posixpath.join("/usr/lib/.*/lib%s.a$" % re.escape(req.library)),
]
return find_reqs_simple(apt_mgr, paths, regex=True)
def resolve_static_library_req(apt_mgr, req):
paths = [
posixpath.join("/usr/lib/%s$" % re.escape(req.filename)),
posixpath.join("/usr/lib/.*/%s$" % re.escape(req.filename)),
]
return find_reqs_simple(apt_mgr, paths, regex=True)
def resolve_ruby_file_req(apt_mgr, req):
paths = [posixpath.join("/usr/lib/ruby/vendor_ruby/%s.rb" % req.filename)]
reqs = find_reqs_simple(apt_mgr, paths, regex=False)
if reqs:
return reqs
paths = [
posixpath.join(
r"/usr/share/rubygems-integration/all/gems/([^/]+)/"
"lib/%s\\.rb" % re.escape(req.filename)
)
]
return find_reqs_simple(apt_mgr, paths, regex=True)
def resolve_xml_entity_req(apt_mgr, req):
# Ideally we should be using the XML catalog for this, but hardcoding
# a few URLs will do for now..
URL_MAP = {
"http://www.oasis-open.org/docbook/xml/": "/usr/share/xml/docbook/schema/dtd/"
}
for url, path in URL_MAP.items():
if req.url.startswith(url):
search_path = posixpath.join(path, req.url[len(url) :])
break
else:
return None
return find_reqs_simple(apt_mgr, [search_path], regex=False)
def resolve_sprockets_file_req(apt_mgr, req):
if req.content_type == "application/javascript":
path = "/usr/share/.*/app/assets/javascripts/%s\\.js$" % re.escape(req.name)
else:
logging.warning("unable to handle content type %s", req.content_type)
return None
return find_reqs_simple(apt_mgr, [path], regex=True)
def resolve_java_class_req(apt_mgr, req):
# Unfortunately this only finds classes in jars installed on the host
# system :(
output = apt_mgr.session.check_output(
["java-propose-classpath", "-c" + req.classname]
)
classpath = [p for p in output.decode().strip(":").strip().split(":") if p]
if not classpath:
logging.warning("unable to find classpath for %s", req.classname)
return False
logging.info("Classpath for %s: %r", req.classname, classpath)
return find_reqs_simple(apt_mgr, [classpath])
def resolve_cmake_file_req(apt_mgr, req):
paths = ['/usr/lib/.*/cmake/.*/%s' % re.escape(req.filename)]
return find_reqs_simple(apt_mgr, paths, regex=True)
def resolve_haskell_package_req(apt_mgr, req):
path = "/var/lib/ghc/package\\.conf\\.d/%s-.*\\.conf" % re.escape(req.package)
return find_reqs_simple(apt_mgr, [path], regex=True)
def resolve_maven_artifact_req(apt_mgr, req):
if req.version is None:
version = ".*"
regex = True
escape = re.escape
else:
version = req.version
regex = False
def escape(x):
return x
kind = req.kind or "jar"
path = posixpath.join(
escape("/usr/share/maven-repo"),
escape(req.group_id.replace(".", "/")),
escape(req.artifact_id),
version,
escape("%s-") + version + escape("." + kind),
)
return find_reqs_simple(apt_mgr, [path], regex=regex)
def resolve_gnome_common_req(apt_mgr, req):
return [AptRequirement.simple("gnome-common")]
def resolve_jdk_file_req(apt_mgr, req):
path = re.escape(req.jdk_path) + ".*/" + re.escape(req.filename)
return find_reqs_simple(apt_mgr, [path], regex=True)
def resolve_jdk_req(apt_mgr, req):
return [AptRequirement.simple("default-jdk")]
def resolve_jre_req(apt_mgr, req):
return [AptRequirement.simple("default-jre")]
def resolve_x11_req(apt_mgr, req):
return [AptRequirement.simple("libx11-dev")]
def resolve_qt_req(apt_mgr, req):
return find_reqs_simple(apt_mgr, ["/usr/lib/.*/qt[0-9]+/bin/qmake"], regex=True)
def resolve_libtool_req(apt_mgr, req):
return [AptRequirement.simple("libtool")]
def resolve_perl_module_req(apt_mgr, req):
DEFAULT_PERL_PATHS = ["/usr/share/perl5", "/usr/lib/.*/perl5/.*", "/usr/lib/.*/perl-base"]
if req.inc is None:
if req.filename is None:
paths = [posixpath.join(inc, re.escape(req.module.replace('::', '/') + '.pm')) for inc in DEFAULT_PERL_PATHS]
regex = True
elif not posixpath.isabs(req.filename):
paths = [posixpath.join(inc, re.escape(req.filename)) for inc in DEFAULT_PERL_PATHS]
regex = True
else:
paths = [req.filename]
regex = False
else:
regex = False
paths = [posixpath.join(inc, req.filename) for inc in req.inc]
return find_reqs_simple(apt_mgr, paths, regex=regex)
def resolve_perl_file_req(apt_mgr, req):
return find_reqs_simple(apt_mgr, [req.filename], regex=False)
def _find_aclocal_fun(macro):
# TODO(jelmer): Use the API for codesearch.debian.net instead?
defun_prefix = b"AC_DEFUN([%s]," % macro.encode("ascii")
au_alias_prefix = b"AU_ALIAS([%s]," % macro.encode("ascii")
prefixes = [defun_prefix, au_alias_prefix]
for entry in os.scandir("/usr/share/aclocal"):
if not entry.is_file():
continue
with open(entry.path, "rb") as f:
for line in f:
if any([line.startswith(prefix) for prefix in prefixes]):
return entry.path
raise KeyError
def resolve_autoconf_macro_req(apt_mgr, req):
try:
path = _find_aclocal_fun(req.macro)
except KeyError:
logging.info("No local m4 file found defining %s", req.macro)
return None
return find_reqs_simple(apt_mgr, [path])
def resolve_python_module_req(apt_mgr, req):
if req.minimum_version:
specs = [(">=", req.minimum_version)]
else:
specs = []
if req.python_version == 2:
return get_package_for_python_module(apt_mgr, req.module, "cpython2", specs)
elif req.python_version in (None, 3):
return get_package_for_python_module(apt_mgr, req.module, "cpython3", specs)
else:
return None
def resolve_python_package_req(apt_mgr, req):
if req.python_version == 2:
return get_package_for_python_package(
apt_mgr, req.package, "cpython2", req.specs
)
elif req.python_version in (None, 3):
return get_package_for_python_package(
apt_mgr, req.package, "cpython3", req.specs
)
else:
return None
def resolve_cargo_crate_req(apt_mgr, req):
paths = ["/usr/share/cargo/registry/%s-[0-9]+.*/Cargo.toml" % re.escape(req.crate)]
return find_reqs_simple(apt_mgr, paths, regex=True)
def resolve_ca_req(apt_mgr, req):
return [AptRequirement.simple("ca-certificates")]
def resolve_introspection_typelib_req(apt_mgr, req):
return find_reqs_simple(
apt_mgr, [r'/usr/lib/.*/girepository-.*/%s-.*\.typelib' % re.escape(req.library)],
regex=True)
def resolve_apt_req(apt_mgr, req):
# TODO(jelmer): This should be checking whether versions match as well.
for package_name in req.package_names():
if not apt_mgr.package_exists(package_name):
return []
return [req]
def resolve_boost_component_req(apt_mgr, req):
return find_reqs_simple(
apt_mgr, ["/usr/lib/.*/libboost_%s" % re.escape(req.name)],
regex=True)
APT_REQUIREMENT_RESOLVERS = [
(AptRequirement, resolve_apt_req),
(BinaryRequirement, resolve_binary_req),
(VagueDependencyRequirement, resolve_vague_dep_req),
(PerlPreDeclaredRequirement, resolve_perl_predeclared_req),
(PkgConfigRequirement, resolve_pkg_config_req),
(PathRequirement, resolve_path_req),
(CHeaderRequirement, resolve_c_header_req),
(JavaScriptRuntimeRequirement, resolve_js_runtime_req),
(ValaPackageRequirement, resolve_vala_package_req),
(RubyGemRequirement, resolve_ruby_gem_req),
(GoPackageRequirement, resolve_go_package_req),
(GoRequirement, resolve_go_req),
(DhAddonRequirement, resolve_dh_addon_req),
(PhpClassRequirement, resolve_php_class_req),
(PhpPackageRequirement, resolve_php_package_req),
(RPackageRequirement, resolve_r_package_req),
(NodeModuleRequirement, resolve_node_module_req),
(NodePackageRequirement, resolve_node_package_req),
(LibraryRequirement, resolve_library_req),
(StaticLibraryRequirement, resolve_static_library_req),
(RubyFileRequirement, resolve_ruby_file_req),
(XmlEntityRequirement, resolve_xml_entity_req),
(SprocketsFileRequirement, resolve_sprockets_file_req),
(JavaClassRequirement, resolve_java_class_req),
(CMakefileRequirement, resolve_cmake_file_req),
(HaskellPackageRequirement, resolve_haskell_package_req),
(MavenArtifactRequirement, resolve_maven_artifact_req),
(GnomeCommonRequirement, resolve_gnome_common_req),
(JDKFileRequirement, resolve_jdk_file_req),
(JDKRequirement, resolve_jdk_req),
(JRERequirement, resolve_jre_req),
(QTRequirement, resolve_qt_req),
(X11Requirement, resolve_x11_req),
(LibtoolRequirement, resolve_libtool_req),
(PerlModuleRequirement, resolve_perl_module_req),
(PerlFileRequirement, resolve_perl_file_req),
(AutoconfMacroRequirement, resolve_autoconf_macro_req),
(PythonModuleRequirement, resolve_python_module_req),
(PythonPackageRequirement, resolve_python_package_req),
(CertificateAuthorityRequirement, resolve_ca_req),
(CargoCrateRequirement, resolve_cargo_crate_req),
(IntrospectionTypelibRequirement, resolve_introspection_typelib_req),
(BoostComponentRequirement, resolve_boost_component_req),
]
def resolve_requirement_apt(apt_mgr, req: Requirement) -> List[AptRequirement]:
for rr_class, rr_fn in APT_REQUIREMENT_RESOLVERS:
if isinstance(req, rr_class):
ret = rr_fn(apt_mgr, req)
if not ret:
return []
if not isinstance(ret, list):
raise TypeError(ret)
return ret
raise NotImplementedError(type(req))
def default_tie_breakers(session):
from ..debian.udd import popcon_tie_breaker
from ..debian.build_deps import BuildDependencyTieBreaker
return [
BuildDependencyTieBreaker.from_session(session),
popcon_tie_breaker,
]
class AptResolver(Resolver):
def __init__(self, apt, tie_breakers=None):
self.apt = apt
if tie_breakers is None:
tie_breakers = default_tie_breakers(apt.session)
self.tie_breakers = tie_breakers
def __str__(self):
return "apt"
def __repr__(self):
return "%s(%r, %r)" % (type(self).__name__, self.apt, self.tie_breakers)
@classmethod
def from_session(cls, session, tie_breakers=None):
return cls(AptManager.from_session(session), tie_breakers=tie_breakers)
def install(self, requirements):
missing = []
for req in requirements:
try:
if not req.met(self.apt.session):
missing.append(req)
except NotImplementedError:
missing.append(req)
if not missing:
return
still_missing = []
apt_requirements = []
for m in missing:
apt_req = self.resolve(m)
if apt_req is None:
still_missing.append(m)
else:
apt_requirements.append(apt_req)
if apt_requirements:
self.apt.satisfy(
[PkgRelation.str(chain(*[r.relations for r in apt_requirements]))]
)
if still_missing:
raise UnsatisfiedRequirements(still_missing)
def explain(self, requirements):
apt_requirements = []
for r in requirements:
apt_req = self.resolve(r)
if apt_req is not None:
apt_requirements.append((r, apt_req))
if apt_requirements:
yield (
self.apt.satisfy_command(
[
PkgRelation.str(
chain(*[r.relations for o, r in apt_requirements])
)
]
),
[o for o, r in apt_requirements],
)
def resolve(self, req: Requirement):
ret = resolve_requirement_apt(self.apt, req)
if not ret:
return None
if len(ret) == 1:
return ret[0]
logging.info("Need to break tie between %r with %r", ret, self.tie_breakers)
for tie_breaker in self.tie_breakers:
winner = tie_breaker(ret)
if winner is not None:
if not isinstance(winner, AptRequirement):
raise TypeError(winner)
return winner
logging.info("Unable to break tie over %r, picking first: %r", ret, ret[0])
return ret[0]

View file

@ -16,12 +16,27 @@
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
from typing import Optional, List, Dict
from typing import Optional, List, Dict, Tuple
import sys
import subprocess
class NoSessionOpen(Exception):
"""There is no session open."""
def __init__(self, session):
self.session = session
class SessionAlreadyOpen(Exception):
"""There is already a session open."""
def __init__(self, session):
self.session = session
class Session(object):
def __enter__(self) -> 'Session':
def __enter__(self) -> "Session":
return self
def __exit__(self, exc_type, exc_val, exc_tb):
@ -32,35 +47,90 @@ class Session(object):
@property
def location(self) -> str:
raise NotImplementedError(self.location)
raise NotImplementedError
def check_call(
self,
argv: List[str], cwd: Optional[str] = None,
user: Optional[str] = None,
env: Optional[Dict[str, str]] = None):
self,
argv: List[str],
cwd: Optional[str] = None,
user: Optional[str] = None,
env: Optional[Dict[str, str]] = None,
close_fds: bool = True,
):
raise NotImplementedError(self.check_call)
def check_output(
self,
argv: List[str], cwd: Optional[str] = None,
user: Optional[str] = None,
env: Optional[Dict[str, str]] = None) -> bytes:
self,
argv: List[str],
cwd: Optional[str] = None,
user: Optional[str] = None,
env: Optional[Dict[str, str]] = None,
) -> bytes:
raise NotImplementedError(self.check_output)
def Popen(self, argv, cwd: Optional[str] = None,
user: Optional[str] = None, **kwargs):
def Popen(
self, argv, cwd: Optional[str] = None, user: Optional[str] = None, **kwargs
):
raise NotImplementedError(self.Popen)
def call(
self, argv: List[str], cwd: Optional[str] = None,
user: Optional[str] = None):
self, argv: List[str], cwd: Optional[str] = None, user: Optional[str] = None
):
raise NotImplementedError(self.call)
def create_home(self) -> None:
"""Create the user's home directory."""
raise NotImplementedError(self.create_home)
def exists(self, path: str) -> bool:
"""Check whether a path exists in the chroot."""
raise NotImplementedError(self.exists)
def scandir(self, path: str):
raise NotImplementedError(self.scandir)
def setup_from_vcs(
self, tree, include_controldir: Optional[bool] = None, subdir="package"
) -> Tuple[str, str]:
raise NotImplementedError(self.setup_from_vcs)
def setup_from_directory(self, path, subdir="package") -> Tuple[str, str]:
raise NotImplementedError(self.setup_from_directory)
def external_path(self, path: str) -> str:
raise NotImplementedError
is_temporary: bool
class SessionSetupFailure(Exception):
"""Session failed to be set up."""
def run_with_tee(session: Session, args: List[str], **kwargs):
if "stdin" not in kwargs:
kwargs["stdin"] = subprocess.DEVNULL
p = session.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, **kwargs)
contents = []
while p.poll() is None:
line = p.stdout.readline()
sys.stdout.buffer.write(line)
sys.stdout.buffer.flush()
contents.append(line.decode("utf-8", "surrogateescape"))
return p.returncode, contents
def get_user(session):
return session.check_output(["echo", "$USER"], cwd="/").decode().strip()
def which(session, name):
try:
ret = session.check_output(["which", name], cwd="/").decode().strip()
except subprocess.CalledProcessError as e:
if e.returncode == 1:
return None
raise
if not ret:
return None
return ret

View file

@ -16,20 +16,110 @@
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
from . import Session
from . import Session, NoSessionOpen, SessionAlreadyOpen
import contextlib
import os
import subprocess
import tempfile
from typing import Optional, Dict, List
class PlainSession(Session):
"""Session ignoring user."""
location = "/"
def __init__(self):
self.es = None
def _prepend_user(self, user, args):
if self.es is None:
raise NoSessionOpen(self)
if user is not None:
import getpass
if user != getpass.getuser():
args = ["sudo", "-u", user] + args
return args
def __repr__(self):
return "%s()" % (type(self).__name__,)
def __enter__(self) -> "Session":
if self.es is not None:
raise SessionAlreadyOpen(self)
self.es = contextlib.ExitStack()
self.es.__enter__()
return self
def __exit__(self, exc_type, exc_val, exc_tb):
if self.es is None:
raise NoSessionOpen(self)
self.es.__exit__(exc_type, exc_val, exc_tb)
self.es = None
return False
def create_home(self):
pass
def check_call(self, args):
return subprocess.check_call(args)
def check_call(
self,
argv: List[str],
cwd: Optional[str] = None,
user: Optional[str] = None,
env: Optional[Dict[str, str]] = None,
close_fds: bool = True,
):
argv = self._prepend_user(user, argv)
return subprocess.check_call(argv, cwd=cwd, env=env, close_fds=close_fds)
def Popen(self, args, stdout=None, stderr=None, user=None, cwd=None):
return subprocess.Popen(
args, stdout=stdout, stderr=stderr, cwd=cwd)
def check_output(
self,
argv: List[str],
cwd: Optional[str] = None,
user: Optional[str] = None,
env: Optional[Dict[str, str]] = None,
) -> bytes:
argv = self._prepend_user(user, argv)
return subprocess.check_output(argv, cwd=cwd, env=env)
def Popen(self, args, stdout=None, stderr=None, stdin=None, user=None, cwd=None, env=None):
args = self._prepend_user(user, args)
return subprocess.Popen(args, stdout=stdout, stderr=stderr, stdin=stdin, cwd=cwd, env=env)
def exists(self, path):
return os.path.exists(path)
def scandir(self, path):
return os.scandir(path)
def chdir(self, path):
os.chdir(path)
def mkdir(self, path):
os.mkdir(path)
def external_path(self, path):
return os.path.abspath(path)
def setup_from_vcs(self, tree, include_controldir=None, subdir="package"):
from ..vcs import dupe_vcs_tree, export_vcs_tree
if include_controldir is False or (
not hasattr(tree, "base") and include_controldir is None
):
td = self.es.enter_context(tempfile.TemporaryDirectory())
export_vcs_tree(tree, td)
return td, td
elif not hasattr(tree, "base"):
td = self.es.enter_context(tempfile.TemporaryDirectory())
dupe_vcs_tree(tree, td)
return td, td
else:
return tree.base, tree.base
def setup_from_directory(self, path):
return path, path
is_temporary = False

View file

@ -15,13 +15,16 @@
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
import logging
import os
import shlex
import subprocess
import tempfile
from typing import Optional, List, Dict
from . import Session, SessionSetupFailure
from . import Session, SessionSetupFailure, NoSessionOpen, SessionAlreadyOpen
class SchrootSession(Session):
@ -29,30 +32,62 @@ class SchrootSession(Session):
_cwd: Optional[str]
_location: Optional[str]
chroot: str
session_id: Optional[str]
def __init__(self, chroot: str):
if not isinstance(chroot, str):
raise TypeError('not a valid chroot: %r' % chroot)
raise TypeError("not a valid chroot: %r" % chroot)
self.chroot = chroot
self._location = None
self._cwd = None
self.session_id = None
def _get_location(self) -> str:
return subprocess.check_output(
['schroot', '--location', '-c', 'session:' + self.session_id
]).strip().decode()
if self.session_id is None:
raise NoSessionOpen(self)
return (
subprocess.check_output(
["schroot", "--location", "-c", "session:" + self.session_id]
)
.strip()
.decode()
)
def _end_session(self) -> None:
subprocess.check_output(
['schroot', '-c', 'session:' + self.session_id, '-e'])
def __enter__(self) -> 'Session':
def _end_session(self) -> bool:
if self.session_id is None:
raise NoSessionOpen(self)
try:
self.session_id = subprocess.check_output(
['schroot', '-c', self.chroot, '-b']).strip().decode()
subprocess.check_output(
["schroot", "-c", "session:" + self.session_id, "-e"],
stderr=subprocess.PIPE,
)
except subprocess.CalledProcessError as e:
for line in e.stderr.splitlines(False):
if line.startswith(b"E: "):
logging.error("%s", line[3:].decode(errors="replace"))
logging.warning(
"Failed to close schroot session %s, leaving stray.", self.session_id
)
self.session_id = None
return False
self.session_id = None
return True
def __enter__(self) -> "Session":
if self.session_id is not None:
raise SessionAlreadyOpen(self)
try:
self.session_id = (
subprocess.check_output(["schroot", "-c", self.chroot, "-b"])
.strip()
.decode()
)
except subprocess.CalledProcessError:
# TODO(jelmer): Capture stderr and forward in SessionSetupFailure
raise SessionSetupFailure()
logging.info(
"Opened schroot session %s (from %s)", self.session_id, self.chroot
)
return self
def __exit__(self, exc_type, exc_val, exc_tb):
@ -68,60 +103,132 @@ class SchrootSession(Session):
self._location = self._get_location()
return self._location
def _run_argv(self, argv: List[str], cwd: Optional[str] = None,
user: Optional[str] = None,
env: Optional[Dict[str, str]] = None):
base_argv = ['schroot', '-r', '-c', 'session:' + self.session_id]
def _run_argv(
self,
argv: List[str],
cwd: Optional[str] = None,
user: Optional[str] = None,
env: Optional[Dict[str, str]] = None,
):
if self.session_id is None:
raise NoSessionOpen(self)
base_argv = ["schroot", "-r", "-c", "session:" + self.session_id]
if cwd is None:
cwd = self._cwd
if cwd is not None:
base_argv.extend(['-d', cwd])
base_argv.extend(["-d", cwd])
if user is not None:
base_argv.extend(['-u', user])
base_argv.extend(["-u", user])
if env:
argv = [
'sh', '-c',
' '.join(
['%s=%s ' % (key, shlex.quote(value))
for (key, value) in env.items()] +
[shlex.quote(arg) for arg in argv])]
return base_argv + ['--'] + argv
"sh",
"-c",
" ".join(
[
"%s=%s " % (key, shlex.quote(value))
for (key, value) in env.items()
]
+ [shlex.quote(arg) for arg in argv]
),
]
return base_argv + ["--"] + argv
def check_call(
self,
argv: List[str], cwd: Optional[str] = None,
user: Optional[str] = None,
env: Optional[Dict[str, str]] = None):
self,
argv: List[str],
cwd: Optional[str] = None,
user: Optional[str] = None,
env: Optional[Dict[str, str]] = None,
close_fds: bool = True,
):
try:
subprocess.check_call(self._run_argv(argv, cwd, user, env=env))
subprocess.check_call(
self._run_argv(argv, cwd, user, env=env), close_fds=close_fds
)
except subprocess.CalledProcessError as e:
raise subprocess.CalledProcessError(e.returncode, argv)
def check_output(
self,
argv: List[str], cwd: Optional[str] = None,
user: Optional[str] = None,
env: Optional[Dict[str, str]] = None) -> bytes:
self,
argv: List[str],
cwd: Optional[str] = None,
user: Optional[str] = None,
env: Optional[Dict[str, str]] = None,
) -> bytes:
try:
return subprocess.check_output(
self._run_argv(argv, cwd, user, env=env))
return subprocess.check_output(self._run_argv(argv, cwd, user, env=env))
except subprocess.CalledProcessError as e:
raise subprocess.CalledProcessError(e.returncode, argv)
def Popen(self, argv, cwd: Optional[str] = None,
user: Optional[str] = None, **kwargs):
def Popen(
self, argv, cwd: Optional[str] = None, user: Optional[str] = None, **kwargs
):
return subprocess.Popen(self._run_argv(argv, cwd, user), **kwargs)
def call(
self, argv: List[str], cwd: Optional[str] = None,
user: Optional[str] = None):
self, argv: List[str], cwd: Optional[str] = None, user: Optional[str] = None
):
return subprocess.call(self._run_argv(argv, cwd, user))
def create_home(self) -> None:
"""Create the user's home directory."""
home = self.check_output(
['sh', '-c', 'echo $HOME']).decode().rstrip('\n')
user = self.check_output(
['sh', '-c', 'echo $LOGNAME']).decode().rstrip('\n')
self.check_call(['mkdir', '-p', home], user='root')
self.check_call(['chown', user, home], user='root')
home = (
self.check_output(["sh", "-c", "echo $HOME"], cwd="/").decode().rstrip("\n")
)
user = (
self.check_output(["sh", "-c", "echo $LOGNAME"], cwd="/")
.decode()
.rstrip("\n")
)
logging.info("Creating directory %s in schroot session.", home)
self.check_call(["mkdir", "-p", home], cwd="/", user="root")
self.check_call(["chown", user, home], cwd="/", user="root")
def external_path(self, path: str) -> str:
if os.path.isabs(path):
return os.path.join(self.location, path.lstrip("/"))
if self._cwd is None:
raise ValueError("no cwd set")
return os.path.join(self.location, os.path.join(self._cwd, path).lstrip("/"))
def exists(self, path: str) -> bool:
fullpath = self.external_path(path)
return os.path.exists(fullpath)
def scandir(self, path: str):
fullpath = self.external_path(path)
return os.scandir(fullpath)
def mkdir(self, path: str):
fullpath = self.external_path(path)
return os.mkdir(fullpath)
def setup_from_vcs(
self, tree, include_controldir: Optional[bool] = None, subdir="package"
):
from ..vcs import dupe_vcs_tree, export_vcs_tree
build_dir = os.path.join(self.location, "build")
directory = tempfile.mkdtemp(dir=build_dir)
reldir = "/" + os.path.relpath(directory, self.location)
export_directory = os.path.join(directory, subdir)
if not include_controldir:
export_vcs_tree(tree, export_directory)
else:
dupe_vcs_tree(tree, export_directory)
return export_directory, os.path.join(reldir, subdir)
def setup_from_directory(self, path, subdir="package"):
import shutil
build_dir = os.path.join(self.location, "build")
directory = tempfile.mkdtemp(dir=build_dir)
reldir = "/" + os.path.relpath(directory, self.location)
export_directory = os.path.join(directory, subdir)
shutil.copytree(path, export_directory, dirs_exist_ok=True)
return export_directory, os.path.join(reldir, subdir)
is_temporary = True

30
ognibuild/test.py Normal file
View file

@ -0,0 +1,30 @@
#!/usr/bin/python3
# Copyright (C) 2020-2021 Jelmer Vernooij <jelmer@jelmer.uk>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
from .buildsystem import NoBuildToolsFound
def run_test(session, buildsystems, resolver, fixers):
# Some things want to write to the user's home directory,
# e.g. pip caches in ~/.cache
session.create_home()
for buildsystem in buildsystems:
buildsystem.test(session, resolver, fixers)
return
raise NoBuildToolsFound()

View file

@ -17,12 +17,16 @@
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
import os
import unittest
def test_suite():
names = [
"debian_build",
]
module_names = ['ognibuild.tests.test_' + name for name in names]
if os.path.exists("/usr/bin/dpkg-architecture"):
names.append("debian_fix_build")
module_names = ["ognibuild.tests.test_" + name for name in names]
loader = unittest.TestLoader()
return loader.loadTestsFromNames(module_names)

View file

@ -0,0 +1,152 @@
#!/usr/bin/python
# Copyright (C) 2020 Jelmer Vernooij <jelmer@jelmer.uk>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
import datetime
import os
from ..debian.build import add_dummy_changelog_entry, get_build_architecture
from breezy.tests import TestCaseWithTransport, TestCase
class AddDummyChangelogEntryTests(TestCaseWithTransport):
def test_simple(self):
tree = self.make_branch_and_tree(".")
self.build_tree_contents(
[
("debian/",),
(
"debian/changelog",
"""\
janitor (0.1-1) UNRELEASED; urgency=medium
* Initial release. (Closes: #XXXXXX)
-- Jelmer Vernooij <jelmer@debian.org> Sat, 04 Apr 2020 14:12:13 +0000
""",
),
]
)
tree.add(["debian", "debian/changelog"])
add_dummy_changelog_entry(
tree,
"",
"jan+some",
"some-fixes",
"Dummy build.",
timestamp=datetime.datetime(2020, 9, 5, 12, 35, 4, 899654),
maintainer=("Jelmer Vernooij", "jelmer@debian.org"),
)
self.assertFileEqual(
"""\
janitor (0.1-1jan+some1) some-fixes; urgency=medium
* Initial release. (Closes: #XXXXXX)
* Dummy build.
-- Jelmer Vernooij <jelmer@debian.org> Sat, 05 Sep 2020 12:35:04 -0000
""",
"debian/changelog",
)
def test_native(self):
tree = self.make_branch_and_tree(".")
self.build_tree_contents(
[
("debian/",),
(
"debian/changelog",
"""\
janitor (0.1) UNRELEASED; urgency=medium
* Initial release. (Closes: #XXXXXX)
-- Jelmer Vernooij <jelmer@debian.org> Sat, 04 Apr 2020 14:12:13 +0000
""",
),
]
)
tree.add(["debian", "debian/changelog"])
add_dummy_changelog_entry(
tree,
"",
"jan+some",
"some-fixes",
"Dummy build.",
timestamp=datetime.datetime(2020, 9, 5, 12, 35, 4, 899654),
maintainer=("Jelmer Vernooij", "jelmer@debian.org"),
)
self.assertFileEqual(
"""\
janitor (0.1jan+some1) some-fixes; urgency=medium
* Initial release. (Closes: #XXXXXX)
* Dummy build.
-- Jelmer Vernooij <jelmer@debian.org> Sat, 05 Sep 2020 12:35:04 -0000
""",
"debian/changelog",
)
def test_exists(self):
tree = self.make_branch_and_tree(".")
self.build_tree_contents(
[
("debian/",),
(
"debian/changelog",
"""\
janitor (0.1-1jan+some1) UNRELEASED; urgency=medium
* Initial release. (Closes: #XXXXXX)
-- Jelmer Vernooij <jelmer@debian.org> Sat, 04 Apr 2020 14:12:13 +0000
""",
),
]
)
tree.add(["debian", "debian/changelog"])
add_dummy_changelog_entry(
tree,
"",
"jan+some",
"some-fixes",
"Dummy build.",
timestamp=datetime.datetime(2020, 9, 5, 12, 35, 4, 899654),
maintainer=("Jelmer Vernooij", "jelmer@debian.org"),
)
self.assertFileEqual(
"""\
janitor (0.1-1jan+some2) some-fixes; urgency=medium
* Initial release. (Closes: #XXXXXX)
* Dummy build.
-- Jelmer Vernooij <jelmer@debian.org> Sat, 05 Sep 2020 12:35:04 -0000
""",
"debian/changelog",
)
class BuildArchitectureTests(TestCase):
def setUp(self):
super(BuildArchitectureTests, self).setUp()
if not os.path.exists("/usr/bin/dpkg-architecture"):
self.skipTest("not a debian system")
def test_is_str(self):
self.assertIsInstance(get_build_architecture(), str)

View file

@ -0,0 +1,234 @@
#!/usr/bin/python
# Copyright (C) 2020 Jelmer Vernooij <jelmer@jelmer.uk>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
import re
from debian.deb822 import Deb822
from buildlog_consultant.common import (
MissingCommand,
MissingGoPackage,
MissingPerlModule,
MissingPkgConfig,
MissingPythonModule,
MissingRubyFile,
MissingRubyGem,
MissingValaPackage,
)
from ..debian.apt import AptManager, FileSearcher
from ..debian.fix_build import (
resolve_error,
versioned_package_fixers,
apt_fixers,
DebianPackagingContext,
)
from breezy.commit import NullCommitReporter
from breezy.tests import TestCaseWithTransport
class DummyAptSearcher(FileSearcher):
def __init__(self, files):
self._apt_files = files
def search_files(self, path, regex=False, case_insensitive=False):
for p, pkg in sorted(self._apt_files.items()):
if case_insensitive:
flags = re.I
else:
flags = 0
if regex:
if re.match(path, p, flags):
yield pkg
elif case_insensitive:
if path.lower() == p.lower():
yield pkg
else:
if path == p:
yield pkg
class ResolveErrorTests(TestCaseWithTransport):
def setUp(self):
super(ResolveErrorTests, self).setUp()
self.tree = self.make_branch_and_tree(".")
self.build_tree_contents(
[
("debian/",),
(
"debian/control",
"""\
Source: blah
Build-Depends: libc6
Package: python-blah
Depends: ${python3:Depends}
Description: A python package
Foo
""",
),
(
"debian/changelog",
"""\
blah (0.1) UNRELEASED; urgency=medium
* Initial release. (Closes: #XXXXXX)
-- Jelmer Vernooij <jelmer@debian.org> Sat, 04 Apr 2020 14:12:13 +0000
""",
),
]
)
self.tree.add(["debian", "debian/control", "debian/changelog"])
self.tree.commit("Initial commit")
self._apt_files = {}
def resolve(self, error, context=("build",)):
from ..session.plain import PlainSession
session = PlainSession()
apt = AptManager(session)
apt._searchers = [DummyAptSearcher(self._apt_files)]
context = DebianPackagingContext(
self.tree,
subpath="",
committer="ognibuild <ognibuild@jelmer.uk>",
update_changelog=True,
commit_reporter=NullCommitReporter(),
)
fixers = versioned_package_fixers(session, context, apt) + apt_fixers(apt, context)
return resolve_error(error, ("build",), fixers)
def get_build_deps(self):
with open(self.tree.abspath("debian/control"), "r") as f:
return next(Deb822.iter_paragraphs(f)).get("Build-Depends", "")
def test_missing_command_unknown(self):
self._apt_files = {}
self.assertFalse(self.resolve(MissingCommand("acommandthatdoesnotexist")))
def test_missing_command_brz(self):
self._apt_files = {
"/usr/bin/b": "bash",
"/usr/bin/brz": "brz",
"/usr/bin/brzier": "bash",
}
self.overrideEnv("DEBEMAIL", "jelmer@debian.org")
self.overrideEnv("DEBFULLNAME", "Jelmer Vernooij")
self.assertTrue(self.resolve(MissingCommand("brz")))
self.assertEqual("libc6, brz", self.get_build_deps())
rev = self.tree.branch.repository.get_revision(self.tree.branch.last_revision())
self.assertEqual("Add missing build dependency on brz.\n", rev.message)
self.assertFalse(self.resolve(MissingCommand("brz")))
self.assertEqual("libc6, brz", self.get_build_deps())
def test_missing_command_ps(self):
self._apt_files = {
"/bin/ps": "procps",
"/usr/bin/pscal": "xcal",
}
self.assertTrue(self.resolve(MissingCommand("ps")))
self.assertEqual("libc6, procps", self.get_build_deps())
def test_missing_ruby_file(self):
self._apt_files = {
"/usr/lib/ruby/vendor_ruby/rake/testtask.rb": "rake",
}
self.assertTrue(self.resolve(MissingRubyFile("rake/testtask")))
self.assertEqual("libc6, rake", self.get_build_deps())
def test_missing_ruby_file_from_gem(self):
self._apt_files = {
"/usr/share/rubygems-integration/all/gems/activesupport-"
"5.2.3/lib/active_support/core_ext/string/strip.rb": "ruby-activesupport"
}
self.assertTrue(
self.resolve(MissingRubyFile("active_support/core_ext/string/strip"))
)
self.assertEqual("libc6, ruby-activesupport", self.get_build_deps())
def test_missing_ruby_gem(self):
self._apt_files = {
"/usr/share/rubygems-integration/all/specifications/"
"bio-1.5.2.gemspec": "ruby-bio",
"/usr/share/rubygems-integration/all/specifications/"
"bio-2.0.2.gemspec": "ruby-bio",
}
self.assertTrue(self.resolve(MissingRubyGem("bio", None)))
self.assertEqual("libc6, ruby-bio", self.get_build_deps())
self.assertTrue(self.resolve(MissingRubyGem("bio", "2.0.3")))
self.assertEqual("libc6, ruby-bio (>= 2.0.3)", self.get_build_deps())
def test_missing_perl_module(self):
self._apt_files = {"/usr/share/perl5/App/cpanminus/fatscript.pm": "cpanminus"}
self.assertTrue(
self.resolve(
MissingPerlModule(
"App/cpanminus/fatscript.pm",
"App::cpanminus::fatscript",
[
"/<<PKGBUILDDIR>>/blib/lib",
"/<<PKGBUILDDIR>>/blib/arch",
"/etc/perl",
"/usr/local/lib/x86_64-linux-gnu/perl/5.30.0",
"/usr/local/share/perl/5.30.0",
"/usr/lib/x86_64-linux-gnu/perl5/5.30",
"/usr/share/perl5",
"/usr/lib/x86_64-linux-gnu/perl/5.30",
"/usr/share/perl/5.30",
"/usr/local/lib/site_perl",
"/usr/lib/x86_64-linux-gnu/perl-base",
".",
],
)
)
)
self.assertEqual("libc6, cpanminus", self.get_build_deps())
def test_missing_pkg_config(self):
self._apt_files = {
"/usr/lib/x86_64-linux-gnu/pkgconfig/xcb-xfixes.pc": "libxcb-xfixes0-dev"
}
self.assertTrue(self.resolve(MissingPkgConfig("xcb-xfixes")))
self.assertEqual("libc6, libxcb-xfixes0-dev", self.get_build_deps())
def test_missing_pkg_config_versioned(self):
self._apt_files = {
"/usr/lib/x86_64-linux-gnu/pkgconfig/xcb-xfixes.pc": "libxcb-xfixes0-dev"
}
self.assertTrue(self.resolve(MissingPkgConfig("xcb-xfixes", "1.0")))
self.assertEqual("libc6, libxcb-xfixes0-dev (>= 1.0)", self.get_build_deps())
def test_missing_python_module(self):
self._apt_files = {"/usr/lib/python3/dist-packages/m2r.py": "python3-m2r"}
self.assertTrue(self.resolve(MissingPythonModule("m2r")))
self.assertEqual("libc6, python3-m2r", self.get_build_deps())
def test_missing_go_package(self):
self._apt_files = {
"/usr/share/gocode/src/github.com/chzyer/readline/utils_test.go": "golang-github-chzyer-readline-dev",
}
self.assertTrue(self.resolve(MissingGoPackage("github.com/chzyer/readline")))
self.assertEqual(
"libc6, golang-github-chzyer-readline-dev", self.get_build_deps()
)
def test_missing_vala_package(self):
self._apt_files = {
"/usr/share/vala-0.48/vapi/posix.vapi": "valac-0.48-vapi",
}
self.assertTrue(self.resolve(MissingValaPackage("posix")))
self.assertEqual("libc6, valac-0.48-vapi", self.get_build_deps())

63
ognibuild/vcs.py Normal file
View file

@ -0,0 +1,63 @@
#!/usr/bin/python3
# Copyright (C) 2020 Jelmer Vernooij <jelmer@jelmer.uk>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
import errno
from breezy.errors import NotBranchError
from breezy.export import export
from breezy.workingtree import WorkingTree
from buildlog_consultant.sbuild import (
NoSpaceOnDevice,
)
from . import DetailedFailure
def export_vcs_tree(tree, directory, subpath=""):
try:
export(tree, directory, "dir", None, subdir=(subpath or None))
except OSError as e:
if e.errno == errno.ENOSPC:
raise DetailedFailure(1, ["export"], NoSpaceOnDevice())
raise
def dupe_vcs_tree(tree, directory):
with tree.lock_read():
if isinstance(tree, WorkingTree):
tree = tree.basis_tree()
try:
result = tree._repository.controldir.sprout(
directory, create_tree_if_local=True, revision_id=tree.get_revision_id()
)
except OSError as e:
if e.errno == errno.ENOSPC:
raise DetailedFailure(1, ["sprout"], NoSpaceOnDevice())
raise
if not result.has_workingtree():
raise AssertionError
# Copy parent location - some scripts need this
if isinstance(tree, WorkingTree):
parent = tree.branch.get_parent()
else:
try:
parent = tree._repository.controldir.open_branch().get_parent()
except NotBranchError:
parent = None
if parent:
result.open_branch().set_parent(parent)

14
releaser.conf Normal file
View file

@ -0,0 +1,14 @@
name: "ognibuild"
timeout_days: 5
tag_name: "v$VERSION"
verify_command: "python3 setup.py test"
update_version {
path: "setup.py"
match: "^ version=\"(.*)\",$"
new_line: " version=\"$VERSION\","
}
update_version {
path: "ognibuild/__init__.py"
match: "^__version__ = \\((.*)\\)$"
new_line: "__version__ = $TUPLED_VERSION"
}

View file

@ -1,9 +1,13 @@
[flake8]
application-package-names = ognibuild
banned-modules = silver-platter = Should not use silver-platter
[mypy]
# A number of ognibuilds' dependencies don't have type hints yet
ignore_missing_imports = True
[bdist_wheel]
universal = 1
[egg_info]
tag_build =
tag_date = 0

View file

@ -6,12 +6,12 @@ from setuptools import setup
setup(name="ognibuild",
description="Detect and run any build system",
version="0.0.1",
version="0.0.7",
maintainer="Jelmer Vernooij",
maintainer_email="jelmer@jelmer.uk",
license="GNU GPLv2 or later",
url="https://jelmer.uk/code/ognibuild",
packages=['ognibuild'],
packages=['ognibuild', 'ognibuild.tests', 'ognibuild.debian', 'ognibuild.resolver', 'ognibuild.session'],
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: '
@ -23,5 +23,18 @@ setup(name="ognibuild",
],
entry_points={
"console_scripts": [
"ogni=ognibuild.__main__:main"]
})
"ogni=ognibuild.__main__:main",
"deb-fix-build=ognibuild.debian.fix_build:main",
]
},
install_requires=[
'breezy',
'buildlog-consultant>=0.0.10',
'requirements-parser',
],
extras_require={
'debian': ['debmutate', 'python_debian', 'python_apt'],
},
tests_require=['python_debian', 'buildlog-consultant', 'breezy', 'testtools'],
test_suite='ognibuild.tests.test_suite',
)