portdbapi: fix volatile cache handling
authorZac Medico <zmedico@gentoo.org>
Mon, 17 Oct 2011 23:08:44 +0000 (16:08 -0700)
committerZac Medico <zmedico@gentoo.org>
Mon, 17 Oct 2011 23:08:44 +0000 (16:08 -0700)
The metadata_overlay usage, that was triggered automatically for users
without depcachedir write access, has been broken since commit
2ed1cb53cc4158af08c22d466b15b9a9a7767212. The metadata_overlay class
is not compatible with the new validation code, so remove it. If users
have metadata_overlay configured as their cache module, fall back to
flat_hash.

pym/portage/cache/metadata_overlay.py [deleted file]
pym/portage/cache/volatile.py
pym/portage/dbapi/porttree.py
pym/portage/package/ebuild/config.py

diff --git a/pym/portage/cache/metadata_overlay.py b/pym/portage/cache/metadata_overlay.py
deleted file mode 100644 (file)
index cfa0051..0000000
+++ /dev/null
@@ -1,105 +0,0 @@
-# Copyright 1999-2010 Gentoo Foundation
-# Distributed under the terms of the GNU General Public License v2
-
-from portage.cache import template
-from portage.cache.cache_errors import CacheCorruption
-from portage.cache.flat_hash import database as db_rw
-from portage.cache.metadata import database as db_ro
-
-class database(template.database):
-
-       serialize_eclasses = False
-
-       def __init__(self, location, label, auxdbkeys, db_rw=db_rw, db_ro=db_ro,
-               *args, **config):
-               super_config = config.copy()
-               super_config.pop("gid", None)
-               super_config.pop("perms", None)
-               super(database, self).__init__(location, label, auxdbkeys,
-                       *args, **super_config)
-               self.db_rw = db_rw(location, label, auxdbkeys, **config)
-               self.commit = self.db_rw.commit
-               self.autocommits = self.db_rw.autocommits
-               if isinstance(db_ro, type):
-                       ro_config = config.copy()
-                       ro_config["readonly"] = True
-                       self.db_ro = db_ro(label, "metadata/cache", auxdbkeys, **ro_config)
-               else:
-                       self.db_ro = db_ro
-
-       def __getitem__(self, cpv):
-               """funnel whiteout validation through here, since value needs to be fetched"""
-               try:
-                       value = self.db_rw[cpv]
-               except KeyError:
-                       return self.db_ro[cpv] # raises a KeyError when necessary
-               except CacheCorruption:
-                       del self.db_rw[cpv]
-                       return self.db_ro[cpv] # raises a KeyError when necessary
-               if self._is_whiteout(value):
-                       if self._is_whiteout_valid(cpv, value):
-                               raise KeyError(cpv)
-                       else:
-                               del self.db_rw[cpv]
-                               return self.db_ro[cpv] # raises a KeyError when necessary
-               else:
-                       return value
-
-       def _setitem(self, name, values):
-               try:
-                       value_ro = self.db_ro.get(name)
-               except CacheCorruption:
-                       value_ro = None
-               if value_ro is not None and \
-                       self._are_values_identical(value_ro, values):
-                       # we have matching values in the underlying db_ro
-                       # so it is unnecessary to store data in db_rw
-                       try:
-                               del self.db_rw[name] # delete unwanted whiteout when necessary
-                       except KeyError:
-                               pass
-                       return
-               self.db_rw[name] = values
-
-       def _delitem(self, cpv):
-               value = self[cpv] # validates whiteout and/or raises a KeyError when necessary
-               if cpv in self.db_ro:
-                       self.db_rw[cpv] = self._create_whiteout(value)
-               else:
-                       del self.db_rw[cpv]
-
-       def __contains__(self, cpv):
-               try:
-                       self[cpv] # validates whiteout when necessary
-               except KeyError:
-                       return False
-               return True
-
-       def __iter__(self):
-               s = set()
-               for cpv in self.db_rw:
-                       if cpv in self: # validates whiteout when necessary
-                               yield cpv
-                       # set includes whiteouts so they won't be yielded later
-                       s.add(cpv)
-               for cpv in self.db_ro:
-                       if cpv not in s:
-                               yield cpv
-
-       def _is_whiteout(self, value):
-               return value["EAPI"] == "whiteout"
-
-       def _create_whiteout(self, value):
-               return {"EAPI":"whiteout","_eclasses_":value["_eclasses_"],"_mtime_":value["_mtime_"]}
-
-       def _is_whiteout_valid(self, name, value_rw):
-               try:
-                       value_ro = self.db_ro[name]
-                       return self._are_values_identical(value_rw,value_ro)
-               except KeyError:
-                       return False
-
-       def _are_values_identical(self, value1, value2):
-               if value1['_mtime_'] != value2['_mtime_']:
-                       return False
-               return value1["_eclasses_"] == value2["_eclasses_"]
index 18049dddb3f1e34a7602c2ab038031186de4fa4d..a3c57f55f1fcff33d31d63b3e2fdd6df04d258cb 100644 (file)
@@ -8,6 +8,7 @@ class database(template.database):
 
        autocommits = True
        serialize_eclasses = False
+       store_eclass_paths = False
 
        def __init__(self, *args, **config):
                config.pop("gid", None)
@@ -16,10 +17,10 @@ class database(template.database):
                self._data = {}
                self._delitem = self._data.__delitem__
 
-       def _setitem(self, name, values):
+       def __setitem__(self, name, values):
                self._data[name] = copy.deepcopy(values)
 
-       def _getitem(self, cpv):
+       def __getitem__(self, cpv):
                return copy.deepcopy(self._data[cpv])
 
        def __iter__(self):
index 6ad7a6b376e39af29e1039c7c2899d9f62c12af3..aad72e14439135922c8403ce8bfddc24011be9b0 100644 (file)
@@ -17,7 +17,7 @@ portage.proxy.lazyimport.lazyimport(globals(),
        'portage.versions:best,catpkgsplit,_pkgsplit@pkgsplit,ver_regexp',
 )
 
-from portage.cache import metadata_overlay, volatile
+from portage.cache import volatile
 from portage.cache.cache_errors import CacheError
 from portage.cache.mappings import Mapping
 from portage.dbapi import dbapi
@@ -150,6 +150,9 @@ class portdbapi(dbapi):
                self.auxdbmodule = self.settings.load_best_module("portdbapi.auxdbmodule")
                self.auxdb = {}
                self._pregen_auxdb = {}
+               # If the current user doesn't have depcachedir write permission,
+               # then the depcachedir cache is kept here read-only access.
+               self._ro_auxdb = {}
                self._init_cache_dirs()
                try:
                        depcachedir_st = os.stat(self.depcachedir)
@@ -189,18 +192,14 @@ class portdbapi(dbapi):
                # to the cache entries/directories.
                if (secpass < 1 and not depcachedir_unshared) or not depcachedir_w_ok:
                        for x in self.porttrees:
+                               self.auxdb[x] = volatile.database(
+                                       self.depcachedir, x, filtered_auxdbkeys,
+                                       **cache_kwargs)
                                try:
-                                       db_ro = self.auxdbmodule(self.depcachedir, x,
+                                       self._ro_auxdb[x] = self.auxdbmodule(self.depcachedir, x,
                                                filtered_auxdbkeys, readonly=True, **cache_kwargs)
                                except CacheError:
-                                       self.auxdb[x] = volatile.database(
-                                               self.depcachedir, x, filtered_auxdbkeys,
-                                               **cache_kwargs)
-                               else:
-                                       self.auxdb[x] = metadata_overlay.database(
-                                               self.depcachedir, x, filtered_auxdbkeys,
-                                               db_rw=volatile.database, db_ro=db_ro,
-                                               **cache_kwargs)
+                                       pass
                else:
                        for x in self.porttrees:
                                if x in self.auxdb:
@@ -208,8 +207,6 @@ class portdbapi(dbapi):
                                # location, label, auxdbkeys
                                self.auxdb[x] = self.auxdbmodule(
                                        self.depcachedir, x, filtered_auxdbkeys, **cache_kwargs)
-                               if self.auxdbmodule is metadata_overlay.database:
-                                       self.auxdb[x].db_ro.ec = self._repo_info[x].eclass_db
                if "metadata-transfer" not in self.settings.features:
                        for x in self.porttrees:
                                if x in self._pregen_auxdb:
@@ -440,6 +437,9 @@ class portdbapi(dbapi):
                pregen_auxdb = self._pregen_auxdb.get(repo_path)
                if pregen_auxdb is not None:
                        auxdbs.append(pregen_auxdb)
+               ro_auxdb = self._ro_auxdb.get(repo_path)
+               if ro_auxdb is not None:
+                       auxdbs.append(ro_auxdb)
                auxdbs.append(self.auxdb[repo_path])
                eclass_db = self._repo_info[repo_path].eclass_db
 
index a80c82dd53d26a2db119e7747d85d69fd28b8be1..2739584c59d9a995eceb8ef1b7fd28c931daf9ee 100644 (file)
@@ -887,7 +887,11 @@ class config(object):
                                try:
                                        mod = load_mod(best_mod)
                                except ImportError:
-                                       raise
+                                       if best_mod == "portage.cache.metadata_overlay.database":
+                                               best_mod = "portage.cache.flat_hash.database"
+                                               mod = load_mod(best_mod)
+                                       else:
+                                               raise
                return mod
 
        def lock(self):