From: Sascha Silbe <sascha@silbe.org>
Subject: [PATCH] simplify jarabe.journal.model._Cache (#1651)
cache.remove_all(cache) doesn't work as expected as we're iterating over
the object we're modifying. cache won't be empty after this call, causing
the caching algorithm to break.
Signed-off-by: Sascha Silbe <sascha@silbe.org>
---
src/jarabe/journal/model.py | 30 ++++++++++--------------------
1 files changed, 10 insertions(+), 20 deletions(-)
diff --git a/src/jarabe/journal/model.py b/src/jarabe/journal/model.py
index 85b4f46..4183deb 100644
a
|
b
|
class _Cache(object): |
49 | 49 | |
50 | 50 | def __init__(self, entries=None): |
51 | 51 | self._array = [] |
52 | | self._dict = {} |
53 | 52 | if entries is not None: |
54 | 53 | self.append_all(entries) |
55 | 54 | |
56 | 55 | def prepend_all(self, entries): |
57 | | for entry in entries[::-1]: |
58 | | self._array.insert(0, entry) |
59 | | self._dict[entry['uid']] = entry |
| 56 | self._array[0:0] = entries |
60 | 57 | |
61 | 58 | def append_all(self, entries): |
62 | | for entry in entries: |
63 | | self._array.append(entry) |
64 | | self._dict[entry['uid']] = entry |
65 | | |
66 | | def remove_all(self, entries): |
67 | | for uid in [entry['uid'] for entry in entries]: |
68 | | obj = self._dict[uid] |
69 | | self._array.remove(obj) |
70 | | del self._dict[uid] |
| 59 | self._array += entries |
71 | 60 | |
72 | 61 | def __len__(self): |
73 | 62 | return len(self._array) |
74 | 63 | |
75 | 64 | def __getitem__(self, key): |
76 | | if isinstance(key, basestring): |
77 | | return self._dict[key] |
78 | | else: |
79 | | return self._array[key] |
| 65 | return self._array[key] |
| 66 | |
| 67 | def __delitem__(self, key): |
| 68 | del self._array[key] |
| 69 | |
80 | 70 | |
81 | 71 | class BaseResultSet(object): |
82 | 72 | """Encapsulates the result of a query |
… |
… |
class BaseResultSet(object): |
148 | 138 | query['offset'] = offset |
149 | 139 | entries, self._total_count = self.find(query) |
150 | 140 | |
151 | | self._cache.remove_all(self._cache) |
| 141 | del self._cache[:] |
152 | 142 | self._cache.append_all(entries) |
153 | 143 | self._offset = offset |
154 | 144 | |
… |
… |
class BaseResultSet(object): |
170 | 160 | objects_excess = len(self._cache) - cache_limit |
171 | 161 | if objects_excess > 0: |
172 | 162 | self._offset += objects_excess |
173 | | self._cache.remove_all(self._cache[:objects_excess]) |
| 163 | del self._cache[:objects_excess] |
174 | 164 | |
175 | 165 | elif remaining_forward_entries > 0 and \ |
176 | 166 | remaining_backwards_entries <= 0 and self._offset > 0: |
… |
… |
class BaseResultSet(object): |
193 | 183 | cache_limit = self._page_size * MAX_PAGES_TO_CACHE |
194 | 184 | objects_excess = len(self._cache) - cache_limit |
195 | 185 | if objects_excess > 0: |
196 | | self._cache.remove_all(self._cache[-objects_excess:]) |
| 186 | del self._cache[-objects_excess:] |
197 | 187 | else: |
198 | 188 | logging.debug('cache hit and no need to grow the cache') |
199 | 189 | |