Fix style.
This commit is contained in:
parent
1741622d85
commit
f8d269b6e5
18 changed files with 337 additions and 317 deletions
|
@ -36,5 +36,6 @@ def satisfy_build_deps(session: Session, tree):
|
|||
pass
|
||||
deps = [dep.strip().strip(",") for dep in deps]
|
||||
from .apt import AptManager
|
||||
|
||||
apt = AptManager(session)
|
||||
apt.satisfy(deps)
|
||||
|
|
|
@ -24,7 +24,6 @@ import os
|
|||
from buildlog_consultant.apt import (
|
||||
find_apt_get_failure,
|
||||
)
|
||||
from debian.deb822 import Release
|
||||
|
||||
from .. import DetailedFailure, UnidentifiedError
|
||||
from ..session import Session, run_with_tee
|
||||
|
@ -63,17 +62,19 @@ class AptManager(object):
|
|||
if self._searchers is None:
|
||||
self._searchers = [
|
||||
AptContentsFileSearcher.from_session(self.session),
|
||||
GENERATED_FILE_SEARCHER]
|
||||
GENERATED_FILE_SEARCHER,
|
||||
]
|
||||
return self._searchers
|
||||
|
||||
def package_exists(self, package):
|
||||
if self._apt_cache is None:
|
||||
import apt
|
||||
|
||||
self._apt_cache = apt.Cache(rootdir=self.session.location)
|
||||
return package in self._apt_cache
|
||||
|
||||
def get_package_for_paths(self, paths, regex=False):
|
||||
logging.debug('Searching for packages containing %r', paths)
|
||||
logging.debug("Searching for packages containing %r", paths)
|
||||
# TODO(jelmer): Make sure we use whatever is configured in self.session
|
||||
return get_package_for_paths(paths, self.searchers(), regex=regex)
|
||||
|
||||
|
@ -82,6 +83,7 @@ class AptManager(object):
|
|||
status_path = os.path.join(root, "var/lib/dpkg/status")
|
||||
missing = set(packages)
|
||||
import apt_pkg
|
||||
|
||||
with apt_pkg.TagFile(status_path) as tagf:
|
||||
while missing:
|
||||
tagf.step()
|
||||
|
@ -93,7 +95,7 @@ class AptManager(object):
|
|||
return list(missing)
|
||||
|
||||
def install(self, packages: List[str]) -> None:
|
||||
logging.info('Installing using apt: %r', packages)
|
||||
logging.info("Installing using apt: %r", packages)
|
||||
packages = self.missing(packages)
|
||||
if packages:
|
||||
run_apt(self.session, ["install"] + packages)
|
||||
|
@ -112,16 +114,19 @@ class AptContentsFileSearcher(FileSearcher):
|
|||
|
||||
@classmethod
|
||||
def from_session(cls, session):
|
||||
logging.info('Loading apt contents information')
|
||||
logging.info("Loading apt contents information")
|
||||
# TODO(jelmer): what about sources.list.d?
|
||||
from aptsources.sourceslist import SourcesList
|
||||
|
||||
sl = SourcesList()
|
||||
sl.load(os.path.join(session.location, 'etc/apt/sources.list'))
|
||||
sl.load(os.path.join(session.location, "etc/apt/sources.list"))
|
||||
return cls.from_sources_list(
|
||||
sl,
|
||||
cache_dirs=[
|
||||
os.path.join(session.location, 'var/lib/apt/lists'),
|
||||
'/var/lib/apt/lists'])
|
||||
os.path.join(session.location, "var/lib/apt/lists"),
|
||||
"/var/lib/apt/lists",
|
||||
],
|
||||
)
|
||||
|
||||
def __setitem__(self, path, package):
|
||||
self._db[path] = package
|
||||
|
@ -146,15 +151,17 @@ class AptContentsFileSearcher(FileSearcher):
|
|||
@classmethod
|
||||
def _load_cache_file(cls, url, cache_dir):
|
||||
from urllib.parse import urlparse
|
||||
|
||||
parsed = urlparse(url)
|
||||
p = os.path.join(
|
||||
cache_dir,
|
||||
parsed.hostname + parsed.path.replace('/', '_') + '.lz4')
|
||||
cache_dir, parsed.hostname + parsed.path.replace("/", "_") + ".lz4"
|
||||
)
|
||||
if not os.path.exists(p):
|
||||
return None
|
||||
logging.debug('Loading cached contents file %s', p)
|
||||
logging.debug("Loading cached contents file %s", p)
|
||||
import lz4.frame
|
||||
return lz4.frame.open(p, mode='rb')
|
||||
|
||||
return lz4.frame.open(p, mode="rb")
|
||||
|
||||
@classmethod
|
||||
def from_urls(cls, urls, cache_dirs=None):
|
||||
|
@ -168,39 +175,39 @@ class AptContentsFileSearcher(FileSearcher):
|
|||
else:
|
||||
if not mandatory and self._db:
|
||||
logging.debug(
|
||||
'Not attempting to fetch optional contents '
|
||||
'file %s', url)
|
||||
"Not attempting to fetch optional contents " "file %s", url
|
||||
)
|
||||
else:
|
||||
logging.debug('Fetching contents file %s', url)
|
||||
logging.debug("Fetching contents file %s", url)
|
||||
try:
|
||||
self.load_url(url)
|
||||
except ContentsFileNotFound:
|
||||
if mandatory:
|
||||
logging.warning(
|
||||
'Unable to fetch contents file %s', url)
|
||||
logging.warning("Unable to fetch contents file %s", url)
|
||||
else:
|
||||
logging.debug(
|
||||
'Unable to fetch optional contents file %s',
|
||||
url)
|
||||
"Unable to fetch optional contents file %s", url
|
||||
)
|
||||
return self
|
||||
|
||||
@classmethod
|
||||
def from_sources_list(cls, sl, cache_dirs=None):
|
||||
# TODO(jelmer): Use aptsources.sourceslist.SourcesList
|
||||
from .build import get_build_architecture
|
||||
|
||||
# TODO(jelmer): Verify signatures, etc.
|
||||
urls = []
|
||||
arches = [(get_build_architecture(), True), ("all", False)]
|
||||
for source in sl.list:
|
||||
if source.invalid or source.disabled:
|
||||
continue
|
||||
if source.type == 'deb-src':
|
||||
if source.type == "deb-src":
|
||||
continue
|
||||
if source.type != 'deb':
|
||||
if source.type != "deb":
|
||||
logging.warning("Invalid line in sources: %r", source)
|
||||
continue
|
||||
base_url = source.uri.rstrip('/')
|
||||
name = source.dist.rstrip('/')
|
||||
base_url = source.uri.rstrip("/")
|
||||
name = source.dist.rstrip("/")
|
||||
components = source.comps
|
||||
if components:
|
||||
dists_url = base_url + "/dists"
|
||||
|
@ -210,12 +217,20 @@ class AptContentsFileSearcher(FileSearcher):
|
|||
for component in components:
|
||||
for arch, mandatory in arches:
|
||||
urls.append(
|
||||
("%s/%s/%s/Contents-%s" % (
|
||||
dists_url, name, component, arch), mandatory))
|
||||
(
|
||||
"%s/%s/%s/Contents-%s"
|
||||
% (dists_url, name, component, arch),
|
||||
mandatory,
|
||||
)
|
||||
)
|
||||
else:
|
||||
for arch, mandatory in arches:
|
||||
urls.append(
|
||||
("%s/%s/Contents-%s" % (dists_url, name.rstrip('/'), arch), mandatory))
|
||||
(
|
||||
"%s/%s/Contents-%s" % (dists_url, name.rstrip("/"), arch),
|
||||
mandatory,
|
||||
)
|
||||
)
|
||||
return cls.from_urls(urls, cache_dirs=cache_dirs)
|
||||
|
||||
@staticmethod
|
||||
|
@ -228,7 +243,7 @@ class AptContentsFileSearcher(FileSearcher):
|
|||
def load_url(self, url, allow_cache=True):
|
||||
from urllib.error import HTTPError
|
||||
|
||||
for ext in ['.xz', '.gz', '']:
|
||||
for ext in [".xz", ".gz", ""]:
|
||||
try:
|
||||
response = self._get(url + ext)
|
||||
except HTTPError as e:
|
||||
|
@ -238,13 +253,14 @@ class AptContentsFileSearcher(FileSearcher):
|
|||
break
|
||||
else:
|
||||
raise ContentsFileNotFound(url)
|
||||
if ext == '.gz':
|
||||
if ext == ".gz":
|
||||
import gzip
|
||||
|
||||
f = gzip.GzipFile(fileobj=response)
|
||||
elif ext == '.xz':
|
||||
elif ext == ".xz":
|
||||
import lzma
|
||||
from io import BytesIO
|
||||
|
||||
f = BytesIO(lzma.decompress(response.read()))
|
||||
elif response.headers.get_content_type() == "text/plain":
|
||||
f = response
|
||||
|
@ -280,7 +296,8 @@ GENERATED_FILE_SEARCHER = GeneratedFileSearcher(
|
|||
|
||||
|
||||
def get_package_for_paths(
|
||||
paths: List[str], searchers: List[FileSearcher], regex: bool = False) -> Optional[str]:
|
||||
paths: List[str], searchers: List[FileSearcher], regex: bool = False
|
||||
) -> Optional[str]:
|
||||
candidates: Set[str] = set()
|
||||
for path in paths:
|
||||
for searcher in searchers:
|
||||
|
|
|
@ -62,11 +62,13 @@ def changes_filename(package, version, arch):
|
|||
|
||||
def get_build_architecture():
|
||||
try:
|
||||
return subprocess.check_output(
|
||||
['dpkg-architecture', '-qDEB_BUILD_ARCH']).strip().decode()
|
||||
return (
|
||||
subprocess.check_output(["dpkg-architecture", "-qDEB_BUILD_ARCH"])
|
||||
.strip()
|
||||
.decode()
|
||||
)
|
||||
except subprocess.CalledProcessError as e:
|
||||
raise Exception(
|
||||
"Could not find the build architecture: %s" % e)
|
||||
raise Exception("Could not find the build architecture: %s" % e)
|
||||
|
||||
|
||||
def add_dummy_changelog_entry(
|
||||
|
|
|
@ -22,13 +22,12 @@ __all__ = [
|
|||
import logging
|
||||
import os
|
||||
import sys
|
||||
from typing import List, Set, Optional
|
||||
from typing import List, Set, Optional, Type
|
||||
|
||||
from debian.deb822 import (
|
||||
Deb822,
|
||||
PkgRelation,
|
||||
)
|
||||
from debian.changelog import Version
|
||||
|
||||
from breezy.commit import PointlessCommit
|
||||
from breezy.mutabletree import MutableTree
|
||||
|
@ -37,7 +36,6 @@ from debmutate.control import (
|
|||
ensure_relation,
|
||||
ControlEditor,
|
||||
)
|
||||
from debian.deb822 import PkgRelation
|
||||
from debmutate.debhelper import (
|
||||
get_debhelper_compat_level,
|
||||
)
|
||||
|
@ -48,6 +46,7 @@ from debmutate.reformatting import (
|
|||
FormattingUnpreservable,
|
||||
GeneratedFile,
|
||||
)
|
||||
|
||||
try:
|
||||
from breezy.workspace import reset_tree
|
||||
except ImportError:
|
||||
|
@ -75,7 +74,7 @@ from buildlog_consultant.common import (
|
|||
MissingPythonModule,
|
||||
MissingPythonDistribution,
|
||||
MissingPerlFile,
|
||||
)
|
||||
)
|
||||
from buildlog_consultant.sbuild import (
|
||||
SbuildFailure,
|
||||
)
|
||||
|
@ -85,7 +84,7 @@ from ..buildlog import RequirementFixer
|
|||
from ..resolver.apt import (
|
||||
AptRequirement,
|
||||
get_package_for_python_module,
|
||||
)
|
||||
)
|
||||
from .build import attempt_build, DEFAULT_BUILDER
|
||||
|
||||
|
||||
|
@ -100,7 +99,6 @@ class CircularDependency(Exception):
|
|||
|
||||
|
||||
class BuildDependencyContext(DependencyContext):
|
||||
|
||||
def add_dependency(self, requirement: AptRequirement):
|
||||
return add_build_dependency(
|
||||
self.tree,
|
||||
|
@ -149,8 +147,8 @@ def add_build_dependency(
|
|||
raise CircularDependency(binary["Package"])
|
||||
for rel in requirement.relations:
|
||||
updater.source["Build-Depends"] = ensure_relation(
|
||||
updater.source.get("Build-Depends", ""),
|
||||
PkgRelation.str([rel]))
|
||||
updater.source.get("Build-Depends", ""), PkgRelation.str([rel])
|
||||
)
|
||||
except FormattingUnpreservable as e:
|
||||
logging.info("Unable to edit %s in a way that preserves formatting.", e.path)
|
||||
return False
|
||||
|
@ -197,8 +195,8 @@ def add_test_dependency(
|
|||
continue
|
||||
for rel in requirement.relations:
|
||||
control["Depends"] = ensure_relation(
|
||||
control.get("Depends", ""),
|
||||
PkgRelation.str([rel]))
|
||||
control.get("Depends", ""), PkgRelation.str([rel])
|
||||
)
|
||||
except FormattingUnpreservable as e:
|
||||
logging.info("Unable to edit %s in a way that preserves formatting.", e.path)
|
||||
return False
|
||||
|
@ -330,7 +328,7 @@ def fix_missing_python_module(error, context):
|
|||
default = not targeted
|
||||
|
||||
if error.minimum_version:
|
||||
specs = [('>=', error.minimum_version)]
|
||||
specs = [(">=", error.minimum_version)]
|
||||
else:
|
||||
specs = []
|
||||
|
||||
|
@ -397,8 +395,9 @@ def enable_dh_autoreconf(context):
|
|||
|
||||
|
||||
def fix_missing_configure(error, context):
|
||||
if (not context.tree.has_filename("configure.ac") and
|
||||
not context.tree.has_filename("configure.in")):
|
||||
if not context.tree.has_filename("configure.ac") and not context.tree.has_filename(
|
||||
"configure.in"
|
||||
):
|
||||
return False
|
||||
|
||||
return enable_dh_autoreconf(context)
|
||||
|
@ -443,16 +442,12 @@ def fix_missing_config_status_input(error, context):
|
|||
|
||||
|
||||
class PgBuildExtOutOfDateControlFixer(BuildFixer):
|
||||
|
||||
def __init__(self, session):
|
||||
self.session = session
|
||||
|
||||
def can_fix(self, problem):
|
||||
return isinstance(problem, NeedPgBuildExtUpdateControl)
|
||||
|
||||
def _fix(self, problem, context):
|
||||
return self._fn(problem, context)
|
||||
|
||||
def _fix(self, error, context):
|
||||
logging.info("Running 'pg_buildext updatecontrol'")
|
||||
self.session.check_call(["pg_buildext", "updatecontrol"])
|
||||
|
@ -477,18 +472,17 @@ def fix_missing_makefile_pl(error, context):
|
|||
|
||||
|
||||
class SimpleBuildFixer(BuildFixer):
|
||||
|
||||
def __init__(self, problem_cls, fn):
|
||||
def __init__(self, problem_cls: Type[Problem], fn):
|
||||
self._problem_cls = problem_cls
|
||||
self._fn = fn
|
||||
|
||||
def __repr__(self):
|
||||
return "%s(%r, %r)" % (type(self).__name__, self._problem_cls, self._fn)
|
||||
|
||||
def can_fix(self, problem):
|
||||
def can_fix(self, problem: Problem):
|
||||
return isinstance(problem, self._problem_cls)
|
||||
|
||||
def _fix(self, problem, context):
|
||||
def _fix(self, problem: Problem, context):
|
||||
return self._fn(problem, context)
|
||||
|
||||
|
||||
|
@ -504,6 +498,7 @@ def versioned_package_fixers(session):
|
|||
|
||||
def apt_fixers(apt) -> List[BuildFixer]:
|
||||
from ..resolver.apt import AptResolver
|
||||
|
||||
resolver = AptResolver(apt)
|
||||
return [
|
||||
SimpleBuildFixer(MissingPythonModule, fix_missing_python_module),
|
||||
|
@ -529,7 +524,7 @@ def build_incrementally(
|
|||
):
|
||||
fixed_errors = []
|
||||
fixers = versioned_package_fixers(apt.session) + apt_fixers(apt)
|
||||
logging.info('Using fixers: %r', fixers)
|
||||
logging.info("Using fixers: %r", fixers)
|
||||
while True:
|
||||
try:
|
||||
return attempt_build(
|
||||
|
@ -583,7 +578,9 @@ def build_incrementally(
|
|||
except GeneratedFile:
|
||||
logging.warning(
|
||||
"Control file is generated, unable to edit to "
|
||||
"resolver error %r.", e.error)
|
||||
"resolver error %r.",
|
||||
e.error,
|
||||
)
|
||||
raise e
|
||||
except CircularDependency:
|
||||
logging.warning(
|
||||
|
@ -647,14 +644,15 @@ def main(argv=None):
|
|||
from ..session.plain import PlainSession
|
||||
import tempfile
|
||||
import contextlib
|
||||
|
||||
apt = AptManager(PlainSession())
|
||||
|
||||
logging.basicConfig(level=logging.INFO, format='%(message)s')
|
||||
logging.basicConfig(level=logging.INFO, format="%(message)s")
|
||||
|
||||
with contextlib.ExitStack() as es:
|
||||
if args.output_directory is None:
|
||||
output_directory = es.enter_context(tempfile.TemporaryDirectory())
|
||||
logging.info('Using output directory %s', output_directory)
|
||||
logging.info("Using output directory %s", output_directory)
|
||||
else:
|
||||
output_directory = args.output_directory
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue