Fix schroot operation.

This commit is contained in:
Jelmer Vernooij 2021-02-27 16:35:32 +00:00
parent 7c61fa0e43
commit a963db22be
No known key found for this signature in database
GPG key ID: 579C160D4C9E23E8
2 changed files with 33 additions and 25 deletions

View file

@ -121,15 +121,18 @@ class RemoteAptContentsFileSearcher(FileSearcher):
sl.load(os.path.join(session.location, 'etc/apt/sources.list')) sl.load(os.path.join(session.location, 'etc/apt/sources.list'))
return cls.from_sources_list( return cls.from_sources_list(
sl, sl,
cache_dir=os.path.join(session.location, 'var/lib/apt/lists')) cache_dirs=[
os.path.join(session.location, 'var/lib/apt/lists'),
'/var/lib/apt/lists'])
def __setitem__(self, path, package): def __setitem__(self, path, package):
self._db[path] = package self._db[path] = package
def search_files(self, path, regex=False): def search_files(self, path, regex=False):
c = re.compile(path)
for p, pkg in sorted(self._db.items()): for p, pkg in sorted(self._db.items()):
if regex: if regex:
if re.match(path, p): if c.match(p):
yield pkg yield pkg
else: else:
if path == p: if path == p:
@ -149,22 +152,26 @@ class RemoteAptContentsFileSearcher(FileSearcher):
p = os.path.join( p = os.path.join(
cache_dir, cache_dir,
parsed.hostname + parsed.path.replace('/', '_') + '.lz4') parsed.hostname + parsed.path.replace('/', '_') + '.lz4')
logging.debug('Loading cached contents file %s', p)
if not os.path.exists(p): if not os.path.exists(p):
return None return None
logging.debug('Loading cached contents file %s', p)
import lz4.frame import lz4.frame
return lz4.frame.open(p, mode='rb') return lz4.frame.open(p, mode='rb')
@classmethod @classmethod
def from_urls(cls, urls, cache_dir=None): def from_urls(cls, urls, cache_dirs=None):
self = cls() self = cls()
for url, mandatory in urls: for url, mandatory in urls:
for cache_dir in cache_dirs or []:
f = cls._load_cache_file(url, cache_dir) f = cls._load_cache_file(url, cache_dir)
if f is not None: if f is not None:
self.load_file(f) self.load_file(f)
elif not mandatory and self._db: break
else:
if not mandatory and self._db:
logging.debug( logging.debug(
'Not attempting to fetch optional contents file %s', url) 'Not attempting to fetch optional contents '
'file %s', url)
else: else:
logging.debug('Fetching contents file %s', url) logging.debug('Fetching contents file %s', url)
try: try:
@ -175,11 +182,12 @@ class RemoteAptContentsFileSearcher(FileSearcher):
'Unable to fetch contents file %s', url) 'Unable to fetch contents file %s', url)
else: else:
logging.debug( logging.debug(
'Unable to fetch optional contents file %s', url) 'Unable to fetch optional contents file %s',
url)
return self return self
@classmethod @classmethod
def from_sources_list(cls, sl, cache_dir=None): def from_sources_list(cls, sl, cache_dirs=None):
# TODO(jelmer): Use aptsources.sourceslist.SourcesList # TODO(jelmer): Use aptsources.sourceslist.SourcesList
from .build import get_build_architecture from .build import get_build_architecture
# TODO(jelmer): Verify signatures, etc. # TODO(jelmer): Verify signatures, etc.
@ -210,7 +218,7 @@ class RemoteAptContentsFileSearcher(FileSearcher):
for arch, mandatory in arches: for arch, mandatory in arches:
urls.append( urls.append(
("%s/%s/Contents-%s" % (dists_url, name.rstrip('/'), arch), mandatory)) ("%s/%s/Contents-%s" % (dists_url, name.rstrip('/'), arch), mandatory))
return cls.from_urls(urls, cache_dir=cache_dir) return cls.from_urls(urls, cache_dirs=cache_dirs)
@staticmethod @staticmethod
def _get(url): def _get(url):

View file

@ -529,9 +529,9 @@ class AptResolver(Resolver):
if apt_req is None: if apt_req is None:
still_missing.append(m) still_missing.append(m)
else: else:
apt_requirements.append(m) apt_requirements.append(apt_req)
self.apt.install( if apt_requirements:
[req.package for req in apt_requirements]) self.apt.install([r.package for r in apt_requirements])
if still_missing: if still_missing:
raise UnsatisfiedRequirements(still_missing) raise UnsatisfiedRequirements(still_missing)