229ce3b18982eb22e5f95000f7c0f95a0b9fc62a
[proj/portage.git] / pym / portage / dbapi / bintree.py
1 # Copyright 1998-2014 Gentoo Foundation
2 # Distributed under the terms of the GNU General Public License v2
3
4 from __future__ import unicode_literals
5
6 __all__ = ["bindbapi", "binarytree"]
7
8 import portage
9 portage.proxy.lazyimport.lazyimport(globals(),
10 'portage.checksum:hashfunc_map,perform_multiple_checksums,' + \
11 'verify_all,_apply_hash_filter,_hash_filter',
12 'portage.dbapi.dep_expand:dep_expand',
13 'portage.dep:dep_getkey,isjustname,isvalidatom,match_from_list',
14 'portage.output:EOutput,colorize',
15 'portage.locks:lockfile,unlockfile',
16 'portage.package.ebuild.fetch:_check_distfile,_hide_url_passwd',
17 'portage.update:update_dbentries',
18 'portage.util:atomic_ofstream,ensure_dirs,normalize_path,' + \
19 'writemsg,writemsg_stdout',
20 'portage.util.listdir:listdir',
21 'portage.util._urlopen:urlopen@_urlopen',
22 'portage.versions:best,catpkgsplit,catsplit,_pkg_str',
23 )
24
25 from portage.cache.mappings import slot_dict_class
26 from portage.const import CACHE_PATH
27 from portage.dbapi.virtual import fakedbapi
28 from portage.dep import Atom, use_reduce, paren_enclose
29 from portage.exception import AlarmSignal, InvalidData, InvalidPackageName, \
30 ParseError, PermissionDenied, PortageException
31 from portage.localization import _
32 from portage import _movefile
33 from portage import os
34 from portage import _encodings
35 from portage import _unicode_decode
36 from portage import _unicode_encode
37
38 import codecs
39 import errno
40 import io
41 import stat
42 import subprocess
43 import sys
44 import tempfile
45 import textwrap
46 import traceback
47 import warnings
48 from gzip import GzipFile
49 from itertools import chain
50 try:
51 from urllib.parse import urlparse
52 except ImportError:
53 from urlparse import urlparse
54
55 if sys.hexversion >= 0x3000000:
56 # pylint: disable=W0622
57 _unicode = str
58 basestring = str
59 long = int
60 else:
61 _unicode = unicode
62
63 class UseCachedCopyOfRemoteIndex(Exception):
64 # If the local copy is recent enough
65 # then fetching the remote index can be skipped.
66 pass
67
68 class bindbapi(fakedbapi):
69 _known_keys = frozenset(list(fakedbapi._known_keys) + \
70 ["CHOST", "repository", "USE"])
71 def __init__(self, mybintree=None, **kwargs):
72 fakedbapi.__init__(self, **kwargs)
73 self.bintree = mybintree
74 self.move_ent = mybintree.move_ent
75 self.cpvdict={}
76 self.cpdict={}
77 # Selectively cache metadata in order to optimize dep matching.
78 self._aux_cache_keys = set(
79 ["BUILD_TIME", "CHOST", "DEPEND", "EAPI",
80 "HDEPEND", "IUSE", "KEYWORDS",
81 "LICENSE", "PDEPEND", "PROPERTIES", "PROVIDE",
82 "RDEPEND", "repository", "RESTRICT", "SLOT", "USE", "DEFINED_PHASES"
83 ])
84 self._aux_cache_slot_dict = slot_dict_class(self._aux_cache_keys)
85 self._aux_cache = {}
86
87 def match(self, *pargs, **kwargs):
88 if self.bintree and not self.bintree.populated:
89 self.bintree.populate()
90 return fakedbapi.match(self, *pargs, **kwargs)
91
92 def cpv_exists(self, cpv, myrepo=None):
93 if self.bintree and not self.bintree.populated:
94 self.bintree.populate()
95 return fakedbapi.cpv_exists(self, cpv)
96
97 def cpv_inject(self, cpv, **kwargs):
98 self._aux_cache.pop(cpv, None)
99 fakedbapi.cpv_inject(self, cpv, **kwargs)
100
101 def cpv_remove(self, cpv):
102 self._aux_cache.pop(cpv, None)
103 fakedbapi.cpv_remove(self, cpv)
104
105 def aux_get(self, mycpv, wants, myrepo=None):
106 if self.bintree and not self.bintree.populated:
107 self.bintree.populate()
108 cache_me = False
109 if not self._known_keys.intersection(
110 wants).difference(self._aux_cache_keys):
111 aux_cache = self._aux_cache.get(mycpv)
112 if aux_cache is not None:
113 return [aux_cache.get(x, "") for x in wants]
114 cache_me = True
115 mysplit = mycpv.split("/")
116 mylist = []
117 tbz2name = mysplit[1]+".tbz2"
118 if not self.bintree._remotepkgs or \
119 not self.bintree.isremote(mycpv):
120 tbz2_path = self.bintree.getname(mycpv)
121 if not os.path.exists(tbz2_path):
122 raise KeyError(mycpv)
123 metadata_bytes = portage.xpak.tbz2(tbz2_path).get_data()
124 def getitem(k):
125 v = metadata_bytes.get(_unicode_encode(k,
126 encoding=_encodings['repo.content'],
127 errors='backslashreplace'))
128 if v is not None:
129 v = _unicode_decode(v,
130 encoding=_encodings['repo.content'], errors='replace')
131 return v
132 else:
133 getitem = self.bintree._remotepkgs[mycpv].get
134 mydata = {}
135 mykeys = wants
136 if cache_me:
137 mykeys = self._aux_cache_keys.union(wants)
138 for x in mykeys:
139 myval = getitem(x)
140 # myval is None if the key doesn't exist
141 # or the tbz2 is corrupt.
142 if myval:
143 mydata[x] = " ".join(myval.split())
144
145 if not mydata.setdefault('EAPI', '0'):
146 mydata['EAPI'] = '0'
147
148 if cache_me:
149 aux_cache = self._aux_cache_slot_dict()
150 for x in self._aux_cache_keys:
151 aux_cache[x] = mydata.get(x, '')
152 self._aux_cache[mycpv] = aux_cache
153 return [mydata.get(x, '') for x in wants]
154
155 def aux_update(self, cpv, values):
156 if not self.bintree.populated:
157 self.bintree.populate()
158 tbz2path = self.bintree.getname(cpv)
159 if not os.path.exists(tbz2path):
160 raise KeyError(cpv)
161 mytbz2 = portage.xpak.tbz2(tbz2path)
162 mydata = mytbz2.get_data()
163
164 for k, v in values.items():
165 k = _unicode_encode(k,
166 encoding=_encodings['repo.content'], errors='backslashreplace')
167 v = _unicode_encode(v,
168 encoding=_encodings['repo.content'], errors='backslashreplace')
169 mydata[k] = v
170
171 for k, v in list(mydata.items()):
172 if not v:
173 del mydata[k]
174 mytbz2.recompose_mem(portage.xpak.xpak_mem(mydata))
175 # inject will clear stale caches via cpv_inject.
176 self.bintree.inject(cpv)
177
178 def cp_list(self, *pargs, **kwargs):
179 if not self.bintree.populated:
180 self.bintree.populate()
181 return fakedbapi.cp_list(self, *pargs, **kwargs)
182
183 def cp_all(self):
184 if not self.bintree.populated:
185 self.bintree.populate()
186 return fakedbapi.cp_all(self)
187
188 def cpv_all(self):
189 if not self.bintree.populated:
190 self.bintree.populate()
191 return fakedbapi.cpv_all(self)
192
193 def getfetchsizes(self, pkg):
194 """
195 This will raise MissingSignature if SIZE signature is not available,
196 or InvalidSignature if SIZE signature is invalid.
197 """
198
199 if not self.bintree.populated:
200 self.bintree.populate()
201
202 pkg = getattr(pkg, 'cpv', pkg)
203
204 filesdict = {}
205 if not self.bintree.isremote(pkg):
206 pass
207 else:
208 metadata = self.bintree._remotepkgs[pkg]
209 try:
210 size = int(metadata["SIZE"])
211 except KeyError:
212 raise portage.exception.MissingSignature("SIZE")
213 except ValueError:
214 raise portage.exception.InvalidSignature(
215 "SIZE: %s" % metadata["SIZE"])
216 else:
217 filesdict[os.path.basename(self.bintree.getname(pkg))] = size
218
219 return filesdict
220
221 def _pkgindex_cpv_map_latest_build(pkgindex):
222 """
223 Given a PackageIndex instance, create a dict of cpv -> metadata map.
224 If multiple packages have identical CPV values, prefer the package
225 with latest BUILD_TIME value.
226 @param pkgindex: A PackageIndex instance.
227 @type pkgindex: PackageIndex
228 @rtype: dict
229 @return: a dict containing entry for the give cpv.
230 """
231 cpv_map = {}
232
233 for d in pkgindex.packages:
234 cpv = d["CPV"]
235
236 try:
237 cpv = _pkg_str(cpv)
238 except InvalidData:
239 writemsg(_("!!! Invalid remote binary package: %s\n") % cpv,
240 noiselevel=-1)
241 continue
242
243 btime = d.get('BUILD_TIME', '')
244 try:
245 btime = int(btime)
246 except ValueError:
247 btime = None
248
249 other_d = cpv_map.get(cpv)
250 if other_d is not None:
251 other_btime = other_d.get('BUILD_TIME', '')
252 try:
253 other_btime = int(other_btime)
254 except ValueError:
255 other_btime = None
256 if other_btime and (not btime or other_btime > btime):
257 continue
258
259 cpv_map[_pkg_str(cpv)] = d
260
261 return cpv_map
262
263 class binarytree(object):
264 "this tree scans for a list of all packages available in PKGDIR"
265 def __init__(self, _unused=DeprecationWarning, pkgdir=None,
266 virtual=DeprecationWarning, settings=None):
267
268 if pkgdir is None:
269 raise TypeError("pkgdir parameter is required")
270
271 if settings is None:
272 raise TypeError("settings parameter is required")
273
274 if _unused is not DeprecationWarning:
275 warnings.warn("The first parameter of the "
276 "portage.dbapi.bintree.binarytree"
277 " constructor is now unused. Instead "
278 "settings['ROOT'] is used.",
279 DeprecationWarning, stacklevel=2)
280
281 if virtual is not DeprecationWarning:
282 warnings.warn("The 'virtual' parameter of the "
283 "portage.dbapi.bintree.binarytree"
284 " constructor is unused",
285 DeprecationWarning, stacklevel=2)
286
287 if True:
288 self.pkgdir = normalize_path(pkgdir)
289 self.dbapi = bindbapi(self, settings=settings)
290 self.update_ents = self.dbapi.update_ents
291 self.move_slot_ent = self.dbapi.move_slot_ent
292 self.populated = 0
293 self.tree = {}
294 self._remote_has_index = False
295 self._remotepkgs = None # remote metadata indexed by cpv
296 self.invalids = []
297 self.settings = settings
298 self._pkg_paths = {}
299 self._pkgindex_uri = {}
300 self._populating = False
301 self._all_directory = os.path.isdir(
302 os.path.join(self.pkgdir, "All"))
303 self._pkgindex_version = 0
304 self._pkgindex_hashes = ["MD5","SHA1"]
305 self._pkgindex_file = os.path.join(self.pkgdir, "Packages")
306 self._pkgindex_keys = self.dbapi._aux_cache_keys.copy()
307 self._pkgindex_keys.update(["CPV", "MTIME", "SIZE"])
308 self._pkgindex_aux_keys = \
309 ["BUILD_TIME", "CHOST", "DEPEND", "DESCRIPTION", "EAPI",
310 "HDEPEND", "IUSE", "KEYWORDS", "LICENSE", "PDEPEND", "PROPERTIES",
311 "PROVIDE", "RESTRICT", "RDEPEND", "repository", "SLOT", "USE", "DEFINED_PHASES",
312 "BASE_URI"]
313 self._pkgindex_aux_keys = list(self._pkgindex_aux_keys)
314 self._pkgindex_use_evaluated_keys = \
315 ("DEPEND", "HDEPEND", "LICENSE", "RDEPEND",
316 "PDEPEND", "PROPERTIES", "PROVIDE", "RESTRICT")
317 self._pkgindex_header_keys = set([
318 "ACCEPT_KEYWORDS", "ACCEPT_LICENSE",
319 "ACCEPT_PROPERTIES", "ACCEPT_RESTRICT", "CBUILD",
320 "CONFIG_PROTECT", "CONFIG_PROTECT_MASK", "FEATURES",
321 "GENTOO_MIRRORS", "INSTALL_MASK", "IUSE_IMPLICIT", "USE",
322 "USE_EXPAND", "USE_EXPAND_HIDDEN", "USE_EXPAND_IMPLICIT",
323 "USE_EXPAND_UNPREFIXED"])
324 self._pkgindex_default_pkg_data = {
325 "BUILD_TIME" : "",
326 "DEFINED_PHASES" : "",
327 "DEPEND" : "",
328 "EAPI" : "0",
329 "HDEPEND" : "",
330 "IUSE" : "",
331 "KEYWORDS": "",
332 "LICENSE" : "",
333 "PATH" : "",
334 "PDEPEND" : "",
335 "PROPERTIES" : "",
336 "PROVIDE" : "",
337 "RDEPEND" : "",
338 "RESTRICT": "",
339 "SLOT" : "0",
340 "USE" : "",
341 }
342 self._pkgindex_inherited_keys = ["CHOST", "repository"]
343
344 # Populate the header with appropriate defaults.
345 self._pkgindex_default_header_data = {
346 "CHOST" : self.settings.get("CHOST", ""),
347 "repository" : "",
348 }
349
350 # It is especially important to populate keys like
351 # "repository" that save space when entries can
352 # inherit them from the header. If an existing
353 # pkgindex header already defines these keys, then
354 # they will appropriately override our defaults.
355 main_repo = self.settings.repositories.mainRepo()
356 if main_repo is not None and not main_repo.missing_repo_name:
357 self._pkgindex_default_header_data["repository"] = \
358 main_repo.name
359
360 self._pkgindex_translated_keys = (
361 ("DESCRIPTION" , "DESC"),
362 ("repository" , "REPO"),
363 )
364
365 self._pkgindex_allowed_pkg_keys = set(chain(
366 self._pkgindex_keys,
367 self._pkgindex_aux_keys,
368 self._pkgindex_hashes,
369 self._pkgindex_default_pkg_data,
370 self._pkgindex_inherited_keys,
371 chain(*self._pkgindex_translated_keys)
372 ))
373
374 @property
375 def root(self):
376 warnings.warn("The root attribute of "
377 "portage.dbapi.bintree.binarytree"
378 " is deprecated. Use "
379 "settings['ROOT'] instead.",
380 DeprecationWarning, stacklevel=3)
381 return self.settings['ROOT']
382
383 def move_ent(self, mylist, repo_match=None):
384 if not self.populated:
385 self.populate()
386 origcp = mylist[1]
387 newcp = mylist[2]
388 # sanity check
389 for atom in (origcp, newcp):
390 if not isjustname(atom):
391 raise InvalidPackageName(str(atom))
392 mynewcat = catsplit(newcp)[0]
393 origmatches=self.dbapi.cp_list(origcp)
394 moves = 0
395 if not origmatches:
396 return moves
397 for mycpv in origmatches:
398 try:
399 mycpv = self.dbapi._pkg_str(mycpv, None)
400 except (KeyError, InvalidData):
401 continue
402 mycpv_cp = portage.cpv_getkey(mycpv)
403 if mycpv_cp != origcp:
404 # Ignore PROVIDE virtual match.
405 continue
406 if repo_match is not None \
407 and not repo_match(mycpv.repo):
408 continue
409
410 # Use isvalidatom() to check if this move is valid for the
411 # EAPI (characters allowed in package names may vary).
412 if not isvalidatom(newcp, eapi=mycpv.eapi):
413 continue
414
415 mynewcpv = mycpv.replace(mycpv_cp, _unicode(newcp), 1)
416 myoldpkg = catsplit(mycpv)[1]
417 mynewpkg = catsplit(mynewcpv)[1]
418
419 if (mynewpkg != myoldpkg) and os.path.exists(self.getname(mynewcpv)):
420 writemsg(_("!!! Cannot update binary: Destination exists.\n"),
421 noiselevel=-1)
422 writemsg("!!! "+mycpv+" -> "+mynewcpv+"\n", noiselevel=-1)
423 continue
424
425 tbz2path = self.getname(mycpv)
426 if os.path.exists(tbz2path) and not os.access(tbz2path,os.W_OK):
427 writemsg(_("!!! Cannot update readonly binary: %s\n") % mycpv,
428 noiselevel=-1)
429 continue
430
431 moves += 1
432 mytbz2 = portage.xpak.tbz2(tbz2path)
433 mydata = mytbz2.get_data()
434 updated_items = update_dbentries([mylist], mydata, parent=mycpv)
435 mydata.update(updated_items)
436 mydata[b'PF'] = \
437 _unicode_encode(mynewpkg + "\n",
438 encoding=_encodings['repo.content'])
439 mydata[b'CATEGORY'] = \
440 _unicode_encode(mynewcat + "\n",
441 encoding=_encodings['repo.content'])
442 if mynewpkg != myoldpkg:
443 ebuild_data = mydata.pop(_unicode_encode(myoldpkg + '.ebuild',
444 encoding=_encodings['repo.content']), None)
445 if ebuild_data is not None:
446 mydata[_unicode_encode(mynewpkg + '.ebuild',
447 encoding=_encodings['repo.content'])] = ebuild_data
448
449 mytbz2.recompose_mem(portage.xpak.xpak_mem(mydata))
450
451 self.dbapi.cpv_remove(mycpv)
452 del self._pkg_paths[mycpv]
453 new_path = self.getname(mynewcpv)
454 self._pkg_paths[mynewcpv] = os.path.join(
455 *new_path.split(os.path.sep)[-2:])
456 if new_path != mytbz2:
457 self._ensure_dir(os.path.dirname(new_path))
458 _movefile(tbz2path, new_path, mysettings=self.settings)
459 self._remove_symlink(mycpv)
460 if new_path.split(os.path.sep)[-2] == "All":
461 self._create_symlink(mynewcpv)
462 self.inject(mynewcpv)
463
464 return moves
465
466 def _remove_symlink(self, cpv):
467 """Remove a ${PKGDIR}/${CATEGORY}/${PF}.tbz2 symlink and also remove
468 the ${PKGDIR}/${CATEGORY} directory if empty. The file will not be
469 removed if os.path.islink() returns False."""
470 mycat, mypkg = catsplit(cpv)
471 mylink = os.path.join(self.pkgdir, mycat, mypkg + ".tbz2")
472 if os.path.islink(mylink):
473 """Only remove it if it's really a link so that this method never
474 removes a real package that was placed here to avoid a collision."""
475 os.unlink(mylink)
476 try:
477 os.rmdir(os.path.join(self.pkgdir, mycat))
478 except OSError as e:
479 if e.errno not in (errno.ENOENT,
480 errno.ENOTEMPTY, errno.EEXIST):
481 raise
482 del e
483
484 def _create_symlink(self, cpv):
485 """Create a ${PKGDIR}/${CATEGORY}/${PF}.tbz2 symlink (and
486 ${PKGDIR}/${CATEGORY} directory, if necessary). Any file that may
487 exist in the location of the symlink will first be removed."""
488 mycat, mypkg = catsplit(cpv)
489 full_path = os.path.join(self.pkgdir, mycat, mypkg + ".tbz2")
490 self._ensure_dir(os.path.dirname(full_path))
491 try:
492 os.unlink(full_path)
493 except OSError as e:
494 if e.errno != errno.ENOENT:
495 raise
496 del e
497 os.symlink(os.path.join("..", "All", mypkg + ".tbz2"), full_path)
498
499 def prevent_collision(self, cpv):
500 """Make sure that the file location ${PKGDIR}/All/${PF}.tbz2 is safe to
501 use for a given cpv. If a collision will occur with an existing
502 package from another category, the existing package will be bumped to
503 ${PKGDIR}/${CATEGORY}/${PF}.tbz2 so that both can coexist."""
504 if not self._all_directory:
505 return
506
507 # Copy group permissions for new directories that
508 # may have been created.
509 for path in ("All", catsplit(cpv)[0]):
510 path = os.path.join(self.pkgdir, path)
511 self._ensure_dir(path)
512 if not os.access(path, os.W_OK):
513 raise PermissionDenied("access('%s', W_OK)" % path)
514
515 full_path = self.getname(cpv)
516 if "All" == full_path.split(os.path.sep)[-2]:
517 return
518 """Move a colliding package if it exists. Code below this point only
519 executes in rare cases."""
520 mycat, mypkg = catsplit(cpv)
521 myfile = mypkg + ".tbz2"
522 mypath = os.path.join("All", myfile)
523 dest_path = os.path.join(self.pkgdir, mypath)
524
525 try:
526 st = os.lstat(dest_path)
527 except OSError:
528 st = None
529 else:
530 if stat.S_ISLNK(st.st_mode):
531 st = None
532 try:
533 os.unlink(dest_path)
534 except OSError:
535 if os.path.exists(dest_path):
536 raise
537
538 if st is not None:
539 # For invalid packages, other_cat could be None.
540 other_cat = portage.xpak.tbz2(dest_path).getfile(b"CATEGORY")
541 if other_cat:
542 other_cat = _unicode_decode(other_cat,
543 encoding=_encodings['repo.content'], errors='replace')
544 other_cat = other_cat.strip()
545 other_cpv = other_cat + "/" + mypkg
546 self._move_from_all(other_cpv)
547 self.inject(other_cpv)
548 self._move_to_all(cpv)
549
550 def _ensure_dir(self, path):
551 """
552 Create the specified directory. Also, copy gid and group mode
553 bits from self.pkgdir if possible.
554 @param cat_dir: Absolute path of the directory to be created.
555 @type cat_dir: String
556 """
557 try:
558 pkgdir_st = os.stat(self.pkgdir)
559 except OSError:
560 ensure_dirs(path)
561 return
562 pkgdir_gid = pkgdir_st.st_gid
563 pkgdir_grp_mode = 0o2070 & pkgdir_st.st_mode
564 try:
565 ensure_dirs(path, gid=pkgdir_gid, mode=pkgdir_grp_mode, mask=0)
566 except PortageException:
567 if not os.path.isdir(path):
568 raise
569
570 def _file_permissions(self, path):
571 try:
572 pkgdir_st = os.stat(self.pkgdir)
573 except OSError:
574 pass
575 else:
576 pkgdir_gid = pkgdir_st.st_gid
577 pkgdir_grp_mode = 0o0060 & pkgdir_st.st_mode
578 try:
579 portage.util.apply_permissions(path, gid=pkgdir_gid,
580 mode=pkgdir_grp_mode, mask=0)
581 except PortageException:
582 pass
583
584 def _move_to_all(self, cpv):
585 """If the file exists, move it. Whether or not it exists, update state
586 for future getname() calls."""
587 mycat, mypkg = catsplit(cpv)
588 myfile = mypkg + ".tbz2"
589 self._pkg_paths[cpv] = os.path.join("All", myfile)
590 src_path = os.path.join(self.pkgdir, mycat, myfile)
591 try:
592 mystat = os.lstat(src_path)
593 except OSError as e:
594 mystat = None
595 if mystat and stat.S_ISREG(mystat.st_mode):
596 self._ensure_dir(os.path.join(self.pkgdir, "All"))
597 dest_path = os.path.join(self.pkgdir, "All", myfile)
598 _movefile(src_path, dest_path, mysettings=self.settings)
599 self._create_symlink(cpv)
600 self.inject(cpv)
601
602 def _move_from_all(self, cpv):
603 """Move a package from ${PKGDIR}/All/${PF}.tbz2 to
604 ${PKGDIR}/${CATEGORY}/${PF}.tbz2 and update state from getname calls."""
605 self._remove_symlink(cpv)
606 mycat, mypkg = catsplit(cpv)
607 myfile = mypkg + ".tbz2"
608 mypath = os.path.join(mycat, myfile)
609 dest_path = os.path.join(self.pkgdir, mypath)
610 self._ensure_dir(os.path.dirname(dest_path))
611 src_path = os.path.join(self.pkgdir, "All", myfile)
612 _movefile(src_path, dest_path, mysettings=self.settings)
613 self._pkg_paths[cpv] = mypath
614
615 def populate(self, getbinpkgs=0):
616 "populates the binarytree"
617
618 if self._populating:
619 return
620
621 pkgindex_lock = None
622 try:
623 if os.access(self.pkgdir, os.W_OK):
624 pkgindex_lock = lockfile(self._pkgindex_file,
625 wantnewlockfile=1)
626 self._populating = True
627 self._populate(getbinpkgs)
628 finally:
629 if pkgindex_lock:
630 unlockfile(pkgindex_lock)
631 self._populating = False
632
633 def _populate(self, getbinpkgs=0):
634 if (not os.path.isdir(self.pkgdir) and not getbinpkgs):
635 return 0
636
637 # Clear all caches in case populate is called multiple times
638 # as may be the case when _global_updates calls populate()
639 # prior to performing package moves since it only wants to
640 # operate on local packages (getbinpkgs=0).
641 self._remotepkgs = None
642 self.dbapi._clear_cache()
643 self.dbapi._aux_cache.clear()
644 if True:
645 pkg_paths = {}
646 self._pkg_paths = pkg_paths
647 dirs = listdir(self.pkgdir, dirsonly=True, EmptyOnError=True)
648 if "All" in dirs:
649 dirs.remove("All")
650 dirs.sort()
651 dirs.insert(0, "All")
652 pkgindex = self._load_pkgindex()
653 pf_index = None
654 if not self._pkgindex_version_supported(pkgindex):
655 pkgindex = self._new_pkgindex()
656 header = pkgindex.header
657 metadata = {}
658 for d in pkgindex.packages:
659 metadata[d["CPV"]] = d
660 update_pkgindex = False
661 for mydir in dirs:
662 for myfile in listdir(os.path.join(self.pkgdir, mydir)):
663 if not myfile.endswith(".tbz2"):
664 continue
665 mypath = os.path.join(mydir, myfile)
666 full_path = os.path.join(self.pkgdir, mypath)
667 s = os.lstat(full_path)
668 if stat.S_ISLNK(s.st_mode):
669 continue
670
671 # Validate data from the package index and try to avoid
672 # reading the xpak if possible.
673 if mydir != "All":
674 possibilities = None
675 d = metadata.get(mydir+"/"+myfile[:-5])
676 if d:
677 possibilities = [d]
678 else:
679 if pf_index is None:
680 pf_index = {}
681 for mycpv in metadata:
682 mycat, mypf = catsplit(mycpv)
683 pf_index.setdefault(
684 mypf, []).append(metadata[mycpv])
685 possibilities = pf_index.get(myfile[:-5])
686 if possibilities:
687 match = None
688 for d in possibilities:
689 try:
690 if long(d["MTIME"]) != s[stat.ST_MTIME]:
691 continue
692 except (KeyError, ValueError):
693 continue
694 try:
695 if long(d["SIZE"]) != long(s.st_size):
696 continue
697 except (KeyError, ValueError):
698 continue
699 if not self._pkgindex_keys.difference(d):
700 match = d
701 break
702 if match:
703 mycpv = match["CPV"]
704 if mycpv in pkg_paths:
705 # discard duplicates (All/ is preferred)
706 continue
707 mycpv = _pkg_str(mycpv)
708 pkg_paths[mycpv] = mypath
709 # update the path if the package has been moved
710 oldpath = d.get("PATH")
711 if oldpath and oldpath != mypath:
712 update_pkgindex = True
713 if mypath != mycpv + ".tbz2":
714 d["PATH"] = mypath
715 if not oldpath:
716 update_pkgindex = True
717 else:
718 d.pop("PATH", None)
719 if oldpath:
720 update_pkgindex = True
721 self.dbapi.cpv_inject(mycpv)
722 if not self.dbapi._aux_cache_keys.difference(d):
723 aux_cache = self.dbapi._aux_cache_slot_dict()
724 for k in self.dbapi._aux_cache_keys:
725 aux_cache[k] = d[k]
726 self.dbapi._aux_cache[mycpv] = aux_cache
727 continue
728 if not os.access(full_path, os.R_OK):
729 writemsg(_("!!! Permission denied to read " \
730 "binary package: '%s'\n") % full_path,
731 noiselevel=-1)
732 self.invalids.append(myfile[:-5])
733 continue
734 metadata_bytes = portage.xpak.tbz2(full_path).get_data()
735 mycat = _unicode_decode(metadata_bytes.get(b"CATEGORY", ""),
736 encoding=_encodings['repo.content'], errors='replace')
737 mypf = _unicode_decode(metadata_bytes.get(b"PF", ""),
738 encoding=_encodings['repo.content'], errors='replace')
739 slot = _unicode_decode(metadata_bytes.get(b"SLOT", ""),
740 encoding=_encodings['repo.content'], errors='replace')
741 mypkg = myfile[:-5]
742 if not mycat or not mypf or not slot:
743 #old-style or corrupt package
744 writemsg(_("\n!!! Invalid binary package: '%s'\n") % full_path,
745 noiselevel=-1)
746 missing_keys = []
747 if not mycat:
748 missing_keys.append("CATEGORY")
749 if not mypf:
750 missing_keys.append("PF")
751 if not slot:
752 missing_keys.append("SLOT")
753 msg = []
754 if missing_keys:
755 missing_keys.sort()
756 msg.append(_("Missing metadata key(s): %s.") % \
757 ", ".join(missing_keys))
758 msg.append(_(" This binary package is not " \
759 "recoverable and should be deleted."))
760 for line in textwrap.wrap("".join(msg), 72):
761 writemsg("!!! %s\n" % line, noiselevel=-1)
762 self.invalids.append(mypkg)
763 continue
764 mycat = mycat.strip()
765 slot = slot.strip()
766 if mycat != mydir and mydir != "All":
767 continue
768 if mypkg != mypf.strip():
769 continue
770 mycpv = mycat + "/" + mypkg
771 if mycpv in pkg_paths:
772 # All is first, so it's preferred.
773 continue
774 if not self.dbapi._category_re.match(mycat):
775 writemsg(_("!!! Binary package has an " \
776 "unrecognized category: '%s'\n") % full_path,
777 noiselevel=-1)
778 writemsg(_("!!! '%s' has a category that is not" \
779 " listed in %setc/portage/categories\n") % \
780 (mycpv, self.settings["PORTAGE_CONFIGROOT"]),
781 noiselevel=-1)
782 continue
783 mycpv = _pkg_str(mycpv)
784 pkg_paths[mycpv] = mypath
785 self.dbapi.cpv_inject(mycpv)
786 update_pkgindex = True
787 d = metadata.get(mycpv, {})
788 if d:
789 try:
790 if long(d["MTIME"]) != s[stat.ST_MTIME]:
791 d.clear()
792 except (KeyError, ValueError):
793 d.clear()
794 if d:
795 try:
796 if long(d["SIZE"]) != long(s.st_size):
797 d.clear()
798 except (KeyError, ValueError):
799 d.clear()
800
801 d["CPV"] = mycpv
802 d["SLOT"] = slot
803 d["MTIME"] = str(s[stat.ST_MTIME])
804 d["SIZE"] = str(s.st_size)
805
806 d.update(zip(self._pkgindex_aux_keys,
807 self.dbapi.aux_get(mycpv, self._pkgindex_aux_keys)))
808 try:
809 self._eval_use_flags(mycpv, d)
810 except portage.exception.InvalidDependString:
811 writemsg(_("!!! Invalid binary package: '%s'\n") % \
812 self.getname(mycpv), noiselevel=-1)
813 self.dbapi.cpv_remove(mycpv)
814 del pkg_paths[mycpv]
815
816 # record location if it's non-default
817 if mypath != mycpv + ".tbz2":
818 d["PATH"] = mypath
819 else:
820 d.pop("PATH", None)
821 metadata[mycpv] = d
822 if not self.dbapi._aux_cache_keys.difference(d):
823 aux_cache = self.dbapi._aux_cache_slot_dict()
824 for k in self.dbapi._aux_cache_keys:
825 aux_cache[k] = d[k]
826 self.dbapi._aux_cache[mycpv] = aux_cache
827
828 for cpv in list(metadata):
829 if cpv not in pkg_paths:
830 del metadata[cpv]
831
832 # Do not bother to write the Packages index if $PKGDIR/All/ exists
833 # since it will provide no benefit due to the need to read CATEGORY
834 # from xpak.
835 if update_pkgindex and os.access(self.pkgdir, os.W_OK):
836 del pkgindex.packages[:]
837 pkgindex.packages.extend(iter(metadata.values()))
838 self._update_pkgindex_header(pkgindex.header)
839 self._pkgindex_write(pkgindex)
840
841 if getbinpkgs and not self.settings["PORTAGE_BINHOST"]:
842 writemsg(_("!!! PORTAGE_BINHOST unset, but use is requested.\n"),
843 noiselevel=-1)
844
845 if not getbinpkgs or 'PORTAGE_BINHOST' not in self.settings:
846 self.populated=1
847 return
848 self._remotepkgs = {}
849 for base_url in self.settings["PORTAGE_BINHOST"].split():
850 parsed_url = urlparse(base_url)
851 host = parsed_url.netloc
852 port = parsed_url.port
853 user = None
854 passwd = None
855 user_passwd = ""
856 if "@" in host:
857 user, host = host.split("@", 1)
858 user_passwd = user + "@"
859 if ":" in user:
860 user, passwd = user.split(":", 1)
861 port_args = []
862 if port is not None:
863 port_str = ":%s" % (port,)
864 if host.endswith(port_str):
865 host = host[:-len(port_str)]
866 pkgindex_file = os.path.join(self.settings["EROOT"], CACHE_PATH, "binhost",
867 host, parsed_url.path.lstrip("/"), "Packages")
868 pkgindex = self._new_pkgindex()
869 try:
870 f = io.open(_unicode_encode(pkgindex_file,
871 encoding=_encodings['fs'], errors='strict'),
872 mode='r', encoding=_encodings['repo.content'],
873 errors='replace')
874 try:
875 pkgindex.read(f)
876 finally:
877 f.close()
878 except EnvironmentError as e:
879 if e.errno != errno.ENOENT:
880 raise
881 local_timestamp = pkgindex.header.get("TIMESTAMP", None)
882 remote_timestamp = None
883 rmt_idx = self._new_pkgindex()
884 proc = None
885 tmp_filename = None
886 try:
887 # urlparse.urljoin() only works correctly with recognized
888 # protocols and requires the base url to have a trailing
889 # slash, so join manually...
890 url = base_url.rstrip("/") + "/Packages"
891 f = None
892
893 # Don't use urlopen for https, since it doesn't support
894 # certificate/hostname verification (bug #469888).
895 if parsed_url.scheme not in ('https',):
896 try:
897 f = _urlopen(url, if_modified_since=local_timestamp)
898 if hasattr(f, 'headers') and f.headers.get('timestamp', ''):
899 remote_timestamp = f.headers.get('timestamp')
900 except IOError as err:
901 if hasattr(err, 'code') and err.code == 304: # not modified (since local_timestamp)
902 raise UseCachedCopyOfRemoteIndex()
903
904 if parsed_url.scheme in ('ftp', 'http', 'https'):
905 # This protocol is supposedly supported by urlopen,
906 # so apparently there's a problem with the url
907 # or a bug in urlopen.
908 if self.settings.get("PORTAGE_DEBUG", "0") != "0":
909 traceback.print_exc()
910
911 raise
912 except ValueError:
913 raise ParseError("Invalid Portage BINHOST value '%s'"
914 % url.lstrip())
915
916 if f is None:
917
918 path = parsed_url.path.rstrip("/") + "/Packages"
919
920 if parsed_url.scheme == 'ssh':
921 # Use a pipe so that we can terminate the download
922 # early if we detect that the TIMESTAMP header
923 # matches that of the cached Packages file.
924 ssh_args = ['ssh']
925 if port is not None:
926 ssh_args.append("-p%s" % (port,))
927 # NOTE: shlex evaluates embedded quotes
928 ssh_args.extend(portage.util.shlex_split(
929 self.settings.get("PORTAGE_SSH_OPTS", "")))
930 ssh_args.append(user_passwd + host)
931 ssh_args.append('--')
932 ssh_args.append('cat')
933 ssh_args.append(path)
934
935 proc = subprocess.Popen(ssh_args,
936 stdout=subprocess.PIPE)
937 f = proc.stdout
938 else:
939 setting = 'FETCHCOMMAND_' + parsed_url.scheme.upper()
940 fcmd = self.settings.get(setting)
941 if not fcmd:
942 fcmd = self.settings.get('FETCHCOMMAND')
943 if not fcmd:
944 raise EnvironmentError("FETCHCOMMAND is unset")
945
946 fd, tmp_filename = tempfile.mkstemp()
947 tmp_dirname, tmp_basename = os.path.split(tmp_filename)
948 os.close(fd)
949
950 fcmd_vars = {
951 "DISTDIR": tmp_dirname,
952 "FILE": tmp_basename,
953 "URI": url
954 }
955
956 for k in ("PORTAGE_SSH_OPTS",):
957 try:
958 fcmd_vars[k] = self.settings[k]
959 except KeyError:
960 pass
961
962 success = portage.getbinpkg.file_get(
963 fcmd=fcmd, fcmd_vars=fcmd_vars)
964 if not success:
965 raise EnvironmentError("%s failed" % (setting,))
966 f = open(tmp_filename, 'rb')
967
968 f_dec = codecs.iterdecode(f,
969 _encodings['repo.content'], errors='replace')
970 try:
971 rmt_idx.readHeader(f_dec)
972 if not remote_timestamp: # in case it had not been read from HTTP header
973 remote_timestamp = rmt_idx.header.get("TIMESTAMP", None)
974 if not remote_timestamp:
975 # no timestamp in the header, something's wrong
976 pkgindex = None
977 writemsg(_("\n\n!!! Binhost package index " \
978 " has no TIMESTAMP field.\n"), noiselevel=-1)
979 else:
980 if not self._pkgindex_version_supported(rmt_idx):
981 writemsg(_("\n\n!!! Binhost package index version" \
982 " is not supported: '%s'\n") % \
983 rmt_idx.header.get("VERSION"), noiselevel=-1)
984 pkgindex = None
985 elif local_timestamp != remote_timestamp:
986 rmt_idx.readBody(f_dec)
987 pkgindex = rmt_idx
988 finally:
989 # Timeout after 5 seconds, in case close() blocks
990 # indefinitely (see bug #350139).
991 try:
992 try:
993 AlarmSignal.register(5)
994 f.close()
995 finally:
996 AlarmSignal.unregister()
997 except AlarmSignal:
998 writemsg("\n\n!!! %s\n" % \
999 _("Timed out while closing connection to binhost"),
1000 noiselevel=-1)
1001 except UseCachedCopyOfRemoteIndex:
1002 writemsg_stdout("\n")
1003 writemsg_stdout(
1004 colorize("GOOD", _("Local copy of remote index is up-to-date and will be used.")) + \
1005 "\n")
1006 rmt_idx = pkgindex
1007 except EnvironmentError as e:
1008 writemsg(_("\n\n!!! Error fetching binhost package" \
1009 " info from '%s'\n") % _hide_url_passwd(base_url))
1010 writemsg("!!! %s\n\n" % str(e))
1011 del e
1012 pkgindex = None
1013 if proc is not None:
1014 if proc.poll() is None:
1015 proc.kill()
1016 proc.wait()
1017 proc = None
1018 if tmp_filename is not None:
1019 try:
1020 os.unlink(tmp_filename)
1021 except OSError:
1022 pass
1023 if pkgindex is rmt_idx:
1024 pkgindex.modified = False # don't update the header
1025 try:
1026 ensure_dirs(os.path.dirname(pkgindex_file))
1027 f = atomic_ofstream(pkgindex_file)
1028 pkgindex.write(f)
1029 f.close()
1030 except (IOError, PortageException):
1031 if os.access(os.path.dirname(pkgindex_file), os.W_OK):
1032 raise
1033 # The current user doesn't have permission to cache the
1034 # file, but that's alright.
1035 if pkgindex:
1036 # Organize remote package list as a cpv -> metadata map.
1037 remotepkgs = _pkgindex_cpv_map_latest_build(pkgindex)
1038 remote_base_uri = pkgindex.header.get("URI", base_url)
1039 for cpv, remote_metadata in remotepkgs.items():
1040 remote_metadata["BASE_URI"] = remote_base_uri
1041 self._pkgindex_uri[cpv] = url
1042 self._remotepkgs.update(remotepkgs)
1043 self._remote_has_index = True
1044 for cpv in remotepkgs:
1045 self.dbapi.cpv_inject(cpv)
1046 if True:
1047 # Remote package instances override local package
1048 # if they are not identical.
1049 hash_names = ["SIZE"] + self._pkgindex_hashes
1050 for cpv, local_metadata in metadata.items():
1051 remote_metadata = self._remotepkgs.get(cpv)
1052 if remote_metadata is None:
1053 continue
1054 # Use digests to compare identity.
1055 identical = True
1056 for hash_name in hash_names:
1057 local_value = local_metadata.get(hash_name)
1058 if local_value is None:
1059 continue
1060 remote_value = remote_metadata.get(hash_name)
1061 if remote_value is None:
1062 continue
1063 if local_value != remote_value:
1064 identical = False
1065 break
1066 if identical:
1067 del self._remotepkgs[cpv]
1068 else:
1069 # Override the local package in the aux_get cache.
1070 self.dbapi._aux_cache[cpv] = remote_metadata
1071 else:
1072 # Local package instances override remote instances.
1073 for cpv in metadata:
1074 self._remotepkgs.pop(cpv, None)
1075
1076 self.populated=1
1077
1078 def inject(self, cpv, filename=None):
1079 """Add a freshly built package to the database. This updates
1080 $PKGDIR/Packages with the new package metadata (including MD5).
1081 @param cpv: The cpv of the new package to inject
1082 @type cpv: string
1083 @param filename: File path of the package to inject, or None if it's
1084 already in the location returned by getname()
1085 @type filename: string
1086 @rtype: None
1087 """
1088 mycat, mypkg = catsplit(cpv)
1089 if not self.populated:
1090 self.populate()
1091 if filename is None:
1092 full_path = self.getname(cpv)
1093 else:
1094 full_path = filename
1095 try:
1096 s = os.stat(full_path)
1097 except OSError as e:
1098 if e.errno != errno.ENOENT:
1099 raise
1100 del e
1101 writemsg(_("!!! Binary package does not exist: '%s'\n") % full_path,
1102 noiselevel=-1)
1103 return
1104 mytbz2 = portage.xpak.tbz2(full_path)
1105 slot = mytbz2.getfile("SLOT")
1106 if slot is None:
1107 writemsg(_("!!! Invalid binary package: '%s'\n") % full_path,
1108 noiselevel=-1)
1109 return
1110 slot = slot.strip()
1111 self.dbapi.cpv_inject(cpv)
1112
1113 # Reread the Packages index (in case it's been changed by another
1114 # process) and then updated it, all while holding a lock.
1115 pkgindex_lock = None
1116 created_symlink = False
1117 try:
1118 pkgindex_lock = lockfile(self._pkgindex_file,
1119 wantnewlockfile=1)
1120 if filename is not None:
1121 new_filename = self.getname(cpv)
1122 try:
1123 samefile = os.path.samefile(filename, new_filename)
1124 except OSError:
1125 samefile = False
1126 if not samefile:
1127 self._ensure_dir(os.path.dirname(new_filename))
1128 _movefile(filename, new_filename, mysettings=self.settings)
1129 full_path = new_filename
1130
1131 self._file_permissions(full_path)
1132
1133 if self._all_directory and \
1134 self.getname(cpv).split(os.path.sep)[-2] == "All":
1135 self._create_symlink(cpv)
1136 created_symlink = True
1137 pkgindex = self._load_pkgindex()
1138
1139 if not self._pkgindex_version_supported(pkgindex):
1140 pkgindex = self._new_pkgindex()
1141
1142 # Discard remote metadata to ensure that _pkgindex_entry
1143 # gets the local metadata. This also updates state for future
1144 # isremote calls.
1145 if self._remotepkgs is not None:
1146 self._remotepkgs.pop(cpv, None)
1147
1148 # Discard cached metadata to ensure that _pkgindex_entry
1149 # doesn't return stale metadata.
1150 self.dbapi._aux_cache.pop(cpv, None)
1151
1152 try:
1153 d = self._pkgindex_entry(cpv)
1154 except portage.exception.InvalidDependString:
1155 writemsg(_("!!! Invalid binary package: '%s'\n") % \
1156 self.getname(cpv), noiselevel=-1)
1157 self.dbapi.cpv_remove(cpv)
1158 del self._pkg_paths[cpv]
1159 return
1160
1161 # If found, remove package(s) with duplicate path.
1162 path = d.get("PATH", "")
1163 for i in range(len(pkgindex.packages) - 1, -1, -1):
1164 d2 = pkgindex.packages[i]
1165 if path and path == d2.get("PATH"):
1166 # Handle path collisions in $PKGDIR/All
1167 # when CPV is not identical.
1168 del pkgindex.packages[i]
1169 elif cpv == d2.get("CPV"):
1170 if path == d2.get("PATH", ""):
1171 del pkgindex.packages[i]
1172 elif created_symlink and not d2.get("PATH", ""):
1173 # Delete entry for the package that was just
1174 # overwritten by a symlink to this package.
1175 del pkgindex.packages[i]
1176
1177 pkgindex.packages.append(d)
1178
1179 self._update_pkgindex_header(pkgindex.header)
1180 self._pkgindex_write(pkgindex)
1181
1182 finally:
1183 if pkgindex_lock:
1184 unlockfile(pkgindex_lock)
1185
1186 def _pkgindex_write(self, pkgindex):
1187 contents = codecs.getwriter(_encodings['repo.content'])(io.BytesIO())
1188 pkgindex.write(contents)
1189 contents = contents.getvalue()
1190 atime = mtime = long(pkgindex.header["TIMESTAMP"])
1191 output_files = [(atomic_ofstream(self._pkgindex_file, mode="wb"),
1192 self._pkgindex_file, None)]
1193
1194 if "compress-index" in self.settings.features:
1195 gz_fname = self._pkgindex_file + ".gz"
1196 fileobj = atomic_ofstream(gz_fname, mode="wb")
1197 output_files.append((GzipFile(filename='', mode="wb",
1198 fileobj=fileobj, mtime=mtime), gz_fname, fileobj))
1199
1200 for f, fname, f_close in output_files:
1201 f.write(contents)
1202 f.close()
1203 if f_close is not None:
1204 f_close.close()
1205 self._file_permissions(fname)
1206 # some seconds might have elapsed since TIMESTAMP
1207 os.utime(fname, (atime, mtime))
1208
1209 def _pkgindex_entry(self, cpv):
1210 """
1211 Performs checksums and evaluates USE flag conditionals.
1212 Raises InvalidDependString if necessary.
1213 @rtype: dict
1214 @return: a dict containing entry for the give cpv.
1215 """
1216
1217 pkg_path = self.getname(cpv)
1218
1219 d = dict(zip(self._pkgindex_aux_keys,
1220 self.dbapi.aux_get(cpv, self._pkgindex_aux_keys)))
1221
1222 d.update(perform_multiple_checksums(
1223 pkg_path, hashes=self._pkgindex_hashes))
1224
1225 d["CPV"] = cpv
1226 st = os.stat(pkg_path)
1227 d["MTIME"] = str(st[stat.ST_MTIME])
1228 d["SIZE"] = str(st.st_size)
1229
1230 rel_path = self._pkg_paths[cpv]
1231 # record location if it's non-default
1232 if rel_path != cpv + ".tbz2":
1233 d["PATH"] = rel_path
1234
1235 self._eval_use_flags(cpv, d)
1236 return d
1237
1238 def _new_pkgindex(self):
1239 return portage.getbinpkg.PackageIndex(
1240 allowed_pkg_keys=self._pkgindex_allowed_pkg_keys,
1241 default_header_data=self._pkgindex_default_header_data,
1242 default_pkg_data=self._pkgindex_default_pkg_data,
1243 inherited_keys=self._pkgindex_inherited_keys,
1244 translated_keys=self._pkgindex_translated_keys)
1245
1246 def _update_pkgindex_header(self, header):
1247 portdir = normalize_path(os.path.realpath(self.settings["PORTDIR"]))
1248 profiles_base = os.path.join(portdir, "profiles") + os.path.sep
1249 if self.settings.profile_path:
1250 profile_path = normalize_path(
1251 os.path.realpath(self.settings.profile_path))
1252 if profile_path.startswith(profiles_base):
1253 profile_path = profile_path[len(profiles_base):]
1254 header["PROFILE"] = profile_path
1255 header["VERSION"] = str(self._pkgindex_version)
1256 base_uri = self.settings.get("PORTAGE_BINHOST_HEADER_URI")
1257 if base_uri:
1258 header["URI"] = base_uri
1259 else:
1260 header.pop("URI", None)
1261 for k in self._pkgindex_header_keys:
1262 v = self.settings.get(k, None)
1263 if v:
1264 header[k] = v
1265 else:
1266 header.pop(k, None)
1267
1268 # These values may be useful for using a binhost without
1269 # having a local copy of the profile (bug #470006).
1270 for k in self.settings.get("USE_EXPAND_IMPLICIT", "").split():
1271 k = "USE_EXPAND_VALUES_" + k
1272 v = self.settings.get(k)
1273 if v:
1274 header[k] = v
1275 else:
1276 header.pop(k, None)
1277
1278 def _pkgindex_version_supported(self, pkgindex):
1279 version = pkgindex.header.get("VERSION")
1280 if version:
1281 try:
1282 if int(version) <= self._pkgindex_version:
1283 return True
1284 except ValueError:
1285 pass
1286 return False
1287
1288 def _eval_use_flags(self, cpv, metadata):
1289 use = frozenset(metadata["USE"].split())
1290 for k in self._pkgindex_use_evaluated_keys:
1291 if k.endswith('DEPEND'):
1292 token_class = Atom
1293 else:
1294 token_class = None
1295
1296 try:
1297 deps = metadata[k]
1298 deps = use_reduce(deps, uselist=use, token_class=token_class)
1299 deps = paren_enclose(deps)
1300 except portage.exception.InvalidDependString as e:
1301 writemsg("%s: %s\n" % (k, str(e)),
1302 noiselevel=-1)
1303 raise
1304 metadata[k] = deps
1305
1306 def exists_specific(self, cpv):
1307 if not self.populated:
1308 self.populate()
1309 return self.dbapi.match(
1310 dep_expand("="+cpv, mydb=self.dbapi, settings=self.settings))
1311
1312 def dep_bestmatch(self, mydep):
1313 "compatibility method -- all matches, not just visible ones"
1314 if not self.populated:
1315 self.populate()
1316 writemsg("\n\n", 1)
1317 writemsg("mydep: %s\n" % mydep, 1)
1318 mydep = dep_expand(mydep, mydb=self.dbapi, settings=self.settings)
1319 writemsg("mydep: %s\n" % mydep, 1)
1320 mykey = dep_getkey(mydep)
1321 writemsg("mykey: %s\n" % mykey, 1)
1322 mymatch = best(match_from_list(mydep,self.dbapi.cp_list(mykey)))
1323 writemsg("mymatch: %s\n" % mymatch, 1)
1324 if mymatch is None:
1325 return ""
1326 return mymatch
1327
1328 def getname(self, pkgname):
1329 """Returns a file location for this package. The default location is
1330 ${PKGDIR}/All/${PF}.tbz2, but will be ${PKGDIR}/${CATEGORY}/${PF}.tbz2
1331 in the rare event of a collision. The prevent_collision() method can
1332 be called to ensure that ${PKGDIR}/All/${PF}.tbz2 is available for a
1333 specific cpv."""
1334 if not self.populated:
1335 self.populate()
1336 mycpv = pkgname
1337 mypath = self._pkg_paths.get(mycpv, None)
1338 if mypath:
1339 return os.path.join(self.pkgdir, mypath)
1340 mycat, mypkg = catsplit(mycpv)
1341 if self._all_directory:
1342 mypath = os.path.join("All", mypkg + ".tbz2")
1343 if mypath in self._pkg_paths.values():
1344 mypath = os.path.join(mycat, mypkg + ".tbz2")
1345 else:
1346 mypath = os.path.join(mycat, mypkg + ".tbz2")
1347 self._pkg_paths[mycpv] = mypath # cache for future lookups
1348 return os.path.join(self.pkgdir, mypath)
1349
1350 def isremote(self, pkgname):
1351 """Returns true if the package is kept remotely and it has not been
1352 downloaded (or it is only partially downloaded)."""
1353 if self._remotepkgs is None or pkgname not in self._remotepkgs:
1354 return False
1355 # Presence in self._remotepkgs implies that it's remote. When a
1356 # package is downloaded, state is updated by self.inject().
1357 return True
1358
1359 def get_pkgindex_uri(self, pkgname):
1360 """Returns the URI to the Packages file for a given package."""
1361 return self._pkgindex_uri.get(pkgname)
1362
1363
1364
1365 def gettbz2(self, pkgname):
1366 """Fetches the package from a remote site, if necessary. Attempts to
1367 resume if the file appears to be partially downloaded."""
1368 tbz2_path = self.getname(pkgname)
1369 tbz2name = os.path.basename(tbz2_path)
1370 resume = False
1371 if os.path.exists(tbz2_path):
1372 if tbz2name[:-5] not in self.invalids:
1373 return
1374 else:
1375 resume = True
1376 writemsg(_("Resuming download of this tbz2, but it is possible that it is corrupt.\n"),
1377 noiselevel=-1)
1378
1379 mydest = os.path.dirname(self.getname(pkgname))
1380 self._ensure_dir(mydest)
1381 # urljoin doesn't work correctly with unrecognized protocols like sftp
1382 if self._remote_has_index:
1383 rel_url = self._remotepkgs[pkgname].get("PATH")
1384 if not rel_url:
1385 rel_url = pkgname+".tbz2"
1386 remote_base_uri = self._remotepkgs[pkgname]["BASE_URI"]
1387 url = remote_base_uri.rstrip("/") + "/" + rel_url.lstrip("/")
1388 else:
1389 url = self.settings["PORTAGE_BINHOST"].rstrip("/") + "/" + tbz2name
1390 protocol = urlparse(url)[0]
1391 fcmd_prefix = "FETCHCOMMAND"
1392 if resume:
1393 fcmd_prefix = "RESUMECOMMAND"
1394 fcmd = self.settings.get(fcmd_prefix + "_" + protocol.upper())
1395 if not fcmd:
1396 fcmd = self.settings.get(fcmd_prefix)
1397 success = portage.getbinpkg.file_get(url, mydest, fcmd=fcmd)
1398 if not success:
1399 try:
1400 os.unlink(self.getname(pkgname))
1401 except OSError:
1402 pass
1403 raise portage.exception.FileNotFound(mydest)
1404 self.inject(pkgname)
1405
1406 def _load_pkgindex(self):
1407 pkgindex = self._new_pkgindex()
1408 try:
1409 f = io.open(_unicode_encode(self._pkgindex_file,
1410 encoding=_encodings['fs'], errors='strict'),
1411 mode='r', encoding=_encodings['repo.content'],
1412 errors='replace')
1413 except EnvironmentError:
1414 pass
1415 else:
1416 try:
1417 pkgindex.read(f)
1418 finally:
1419 f.close()
1420 return pkgindex
1421
1422 def _get_digests(self, pkg):
1423
1424 try:
1425 cpv = pkg.cpv
1426 except AttributeError:
1427 cpv = pkg
1428
1429 digests = {}
1430 metadata = None
1431 if self._remotepkgs is None or cpv not in self._remotepkgs:
1432 for d in self._load_pkgindex().packages:
1433 if d["CPV"] == cpv:
1434 metadata = d
1435 break
1436 else:
1437 metadata = self._remotepkgs[cpv]
1438 if metadata is None:
1439 return digests
1440
1441 for k in hashfunc_map:
1442 v = metadata.get(k)
1443 if not v:
1444 continue
1445 digests[k] = v
1446
1447 if "SIZE" in metadata:
1448 try:
1449 digests["size"] = int(metadata["SIZE"])
1450 except ValueError:
1451 writemsg(_("!!! Malformed SIZE attribute in remote " \
1452 "metadata for '%s'\n") % cpv)
1453
1454 return digests
1455
1456 def digestCheck(self, pkg):
1457 """
1458 Verify digests for the given package and raise DigestException
1459 if verification fails.
1460 @rtype: bool
1461 @return: True if digests could be located, False otherwise.
1462 """
1463
1464 digests = self._get_digests(pkg)
1465
1466 if not digests:
1467 return False
1468
1469 try:
1470 cpv = pkg.cpv
1471 except AttributeError:
1472 cpv = pkg
1473
1474 pkg_path = self.getname(cpv)
1475 hash_filter = _hash_filter(
1476 self.settings.get("PORTAGE_CHECKSUM_FILTER", ""))
1477 if not hash_filter.transparent:
1478 digests = _apply_hash_filter(digests, hash_filter)
1479 eout = EOutput()
1480 eout.quiet = self.settings.get("PORTAGE_QUIET") == "1"
1481 ok, st = _check_distfile(pkg_path, digests, eout, show_errors=0)
1482 if not ok:
1483 ok, reason = verify_all(pkg_path, digests)
1484 if not ok:
1485 raise portage.exception.DigestException(
1486 (pkg_path,) + tuple(reason))
1487
1488 return True
1489
1490 def getslot(self, mycatpkg):
1491 "Get a slot for a catpkg; assume it exists."
1492 myslot = ""
1493 try:
1494 myslot = self.dbapi._pkg_str(mycatpkg, None).slot
1495 except KeyError:
1496 pass
1497 return myslot