From 12400ec12eb32be37c41a4940b545797aae036a6 Mon Sep 17 00:00:00 2001 From: Abhilash Raj Date: Fri, 21 May 2021 14:44:18 -0700 Subject: [PATCH 01/74] Fix the URL to Django documentation for cache. --- docs/tutorial.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/tutorial.rst b/docs/tutorial.rst index 8f8dc0f..58220b2 100644 --- a/docs/tutorial.rst +++ b/docs/tutorial.rst @@ -535,7 +535,7 @@ they are guaranteed to be stored in files. The full path is available on the file handle in the `name` attribute. Remember to also include the `Content-Type` header if known. -.. _`Django documentation on caching`: https://docs.djangoproject.com/en/1.9/topics/cache/#the-low-level-cache-api +.. _`Django documentation on caching`: https://docs.djangoproject.com/en/3.2/topics/cache/#the-low-level-cache-api Deque ----- From b460e7409d8d299e208500d640d69e55c1faccec Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=BCrgen=20Gmach?= Date: Mon, 7 Jun 2021 15:11:34 +0200 Subject: [PATCH 02/74] remove leftovers from Travis and AppVeyor Both were removed in favor of GitHub actions. --- docs/conf.py | 1 - tests/stress_test_deque_mp.py | 6 ------ tests/stress_test_index_mp.py | 6 ------ 3 files changed, 13 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index 402d3ad..d725198 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -59,7 +59,6 @@ 'logo': 'gj-logo.png', 'logo_name': True, 'logo_text_align': 'center', - 'travis_button': True, 'analytics_id': 'UA-19364636-2', 'show_powered_by': False, 'show_related': True, diff --git a/tests/stress_test_deque_mp.py b/tests/stress_test_deque_mp.py index 091ff0e..d6bb56a 100644 --- a/tests/stress_test_deque_mp.py +++ b/tests/stress_test_deque_mp.py @@ -107,12 +107,6 @@ def stress(seed, deque): def test(status=False): - if os.environ.get('TRAVIS') == 'true': - return - - if os.environ.get('APPVEYOR') == 'True': - return - random.seed(SEED) deque = dc.Deque(range(SIZE)) processes = [] diff --git a/tests/stress_test_index_mp.py b/tests/stress_test_index_mp.py index f8718f0..2d290ec 100644 --- a/tests/stress_test_index_mp.py +++ b/tests/stress_test_index_mp.py @@ -94,12 +94,6 @@ def stress(seed, index): def test(status=False): - if os.environ.get('TRAVIS') == 'true': - return - - if os.environ.get('APPVEYOR') == 'True': - return - random.seed(SEED) index = dc.Index(enumerate(range(KEYS))) processes = [] From d20ddb3b2273dee41e73aa72ba182f4331ccba83 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=BCrgen=20Gmach?= Date: Tue, 8 Jun 2021 11:05:03 +0200 Subject: [PATCH 03/74] remove unused imports --- tests/stress_test_deque_mp.py | 1 - tests/stress_test_index_mp.py | 1 - 2 files changed, 2 deletions(-) diff --git a/tests/stress_test_deque_mp.py b/tests/stress_test_deque_mp.py index d6bb56a..f3b8a48 100644 --- a/tests/stress_test_deque_mp.py +++ b/tests/stress_test_deque_mp.py @@ -2,7 +2,6 @@ import itertools as it import multiprocessing as mp -import os import random import time diff --git a/tests/stress_test_index_mp.py b/tests/stress_test_index_mp.py index 2d290ec..06ed102 100644 --- a/tests/stress_test_index_mp.py +++ b/tests/stress_test_index_mp.py @@ -2,7 +2,6 @@ import itertools as it import multiprocessing as mp -import os import random import time From d9b9d06614a41beff9ebce9b1bc8038d5540394e Mon Sep 17 00:00:00 2001 From: Grant Jenks Date: Tue, 8 Jun 2021 21:42:54 -0700 Subject: [PATCH 04/74] Ignore pylint's consider-using-with in Disk.fetch --- diskcache/core.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/diskcache/core.py b/diskcache/core.py index da4d884..4d0ae05 100644 --- a/diskcache/core.py +++ b/diskcache/core.py @@ -261,7 +261,7 @@ def fetch(self, mode, filename, value, read): :return: corresponding Python value """ - # pylint: disable=no-self-use,unidiomatic-typecheck + # pylint: disable=no-self-use,unidiomatic-typecheck,consider-using-with if mode == MODE_RAW: return bytes(value) if type(value) is sqlite3.Binary else value elif mode == MODE_BINARY: From fac9ad3ffdc289336b7c280e50ca26712f65f8ea Mon Sep 17 00:00:00 2001 From: Grant Jenks Date: Mon, 30 Aug 2021 22:07:25 -0700 Subject: [PATCH 05/74] Simplify ENOENT handling around fetch() and remove() --- diskcache/core.py | 53 ++++++++++++++--------------------------------- 1 file changed, 15 insertions(+), 38 deletions(-) diff --git a/diskcache/core.py b/diskcache/core.py index 4d0ae05..1c915b1 100644 --- a/diskcache/core.py +++ b/diskcache/core.py @@ -26,14 +26,6 @@ def full_name(func): return func.__module__ + '.' + func.__qualname__ -try: - WindowsError -except NameError: - - class WindowsError(Exception): - "Windows error place-holder on platforms without support." - - class Constant(tuple): "Pretty display of immutable constant." @@ -328,13 +320,10 @@ def remove(self, filename): try: os.remove(full_path) - except WindowsError: + except OSError: + # OSError may occur if two caches attempt to delete the same + # file at the same time. pass - except OSError as error: - if error.errno != errno.ENOENT: - # ENOENT may occur if two caches attempt to delete the same - # file at the same time. - raise class JSONDisk(Disk): @@ -1201,13 +1190,10 @@ def get( try: value = self._disk.fetch(mode, filename, db_value, read) except IOError as error: - if error.errno == errno.ENOENT: - # Key was deleted before we could retrieve result. - if self.statistics: - sql(cache_miss) - return default - else: - raise + # Key was deleted before we could retrieve result. + if self.statistics: + sql(cache_miss) + return default if self.statistics: sql(cache_hit) @@ -1324,11 +1310,8 @@ def pop( try: value = self._disk.fetch(mode, filename, db_value, False) except IOError as error: - if error.errno == errno.ENOENT: - # Key was deleted before we could retrieve result. - return default - else: - raise + # Key was deleted before we could retrieve result. + return default finally: if filename is not None: self._disk.remove(filename) @@ -1595,10 +1578,8 @@ def pull( try: value = self._disk.fetch(mode, name, db_value, False) except IOError as error: - if error.errno == errno.ENOENT: - # Key was deleted before we could retrieve result. - continue - raise + # Key was deleted before we could retrieve result. + continue finally: if name is not None: self._disk.remove(name) @@ -1711,10 +1692,8 @@ def peek( try: value = self._disk.fetch(mode, name, db_value, False) except IOError as error: - if error.errno == errno.ENOENT: - # Key was deleted before we could retrieve result. - continue - raise + # Key was deleted before we could retrieve result. + continue finally: if name is not None: self._disk.remove(name) @@ -1794,10 +1773,8 @@ def peekitem(self, last=True, expire_time=False, tag=False, retry=False): try: value = self._disk.fetch(mode, name, db_value, False) except IOError as error: - if error.errno == errno.ENOENT: - # Key was deleted before we could retrieve result. - continue - raise + # Key was deleted before we could retrieve result. + continue break if expire_time and tag: From ceff81cde5ecacaf7ff79b50bd90250e988a5db0 Mon Sep 17 00:00:00 2001 From: Grant Jenks Date: Mon, 30 Aug 2021 22:11:33 -0700 Subject: [PATCH 06/74] Add doc about IOError --- diskcache/core.py | 1 + 1 file changed, 1 insertion(+) diff --git a/diskcache/core.py b/diskcache/core.py index 1c915b1..efa175b 100644 --- a/diskcache/core.py +++ b/diskcache/core.py @@ -251,6 +251,7 @@ def fetch(self, mode, filename, value, read): :param value: database value :param bool read: when True, return an open file handle :return: corresponding Python value + :raises: IOError if the value cannot be read """ # pylint: disable=no-self-use,unidiomatic-typecheck,consider-using-with From aefb2feda735b1f602eee290cac3a1baa95c8c8c Mon Sep 17 00:00:00 2001 From: Grant Jenks Date: Mon, 30 Aug 2021 22:12:36 -0700 Subject: [PATCH 07/74] Add notes about changes to store() and remove() --- diskcache/core.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/diskcache/core.py b/diskcache/core.py index efa175b..2cbcdad 100644 --- a/diskcache/core.py +++ b/diskcache/core.py @@ -187,6 +187,7 @@ def store(self, value, read, key=UNKNOWN): :return: (size, mode, filename, value) tuple for Cache table """ + # TODO: Retry mkdirs!!! # pylint: disable=unidiomatic-typecheck type_value = type(value) min_file_size = self.min_file_size @@ -317,6 +318,7 @@ def remove(self, filename): :param str filename: relative path to file """ + # TODO: Delete dir if empty!!! full_path = op.join(self._directory, filename) try: From 49c5979190bd33b134ab8acfe5b5c95823d567e2 Mon Sep 17 00:00:00 2001 From: Grant Jenks Date: Tue, 31 Aug 2021 22:33:14 -0700 Subject: [PATCH 08/74] Update remove to cleanup parent dirs --- diskcache/core.py | 26 ++++++++++++++------------ 1 file changed, 14 insertions(+), 12 deletions(-) diff --git a/diskcache/core.py b/diskcache/core.py index 2cbcdad..13a1b01 100644 --- a/diskcache/core.py +++ b/diskcache/core.py @@ -309,24 +309,26 @@ def filename(self, key=UNKNOWN, value=UNKNOWN): full_path = op.join(self._directory, filename) return filename, full_path - def remove(self, filename): - """Remove a file given by `filename`. + def remove(self, file_path): + """Remove a file given by `file_path`. - This method is cross-thread and cross-process safe. If an "error no - entry" occurs, it is suppressed. + This method is cross-thread and cross-process safe. If an OSError + occurs, it is suppressed. - :param str filename: relative path to file + :param str file_path: relative path to file """ - # TODO: Delete dir if empty!!! - full_path = op.join(self._directory, filename) + full_path = op.join(self._directory, file_path) + full_dir, _ = op.split(full_path) - try: + # Suppress OSError that may occur if two caches attempt to delete the + # same file or directory at the same time. + + with cl.suppress(OSError): os.remove(full_path) - except OSError: - # OSError may occur if two caches attempt to delete the same - # file at the same time. - pass + + with cl.suppress(OSError): + os.removedirs(full_dir) class JSONDisk(Disk): From 1acab3b7b0809180b7c5b0863751e18662662cc3 Mon Sep 17 00:00:00 2001 From: Grant Jenks Date: Tue, 31 Aug 2021 22:33:38 -0700 Subject: [PATCH 09/74] Remove logic from filename() for creating directories --- diskcache/core.py | 8 -------- 1 file changed, 8 deletions(-) diff --git a/diskcache/core.py b/diskcache/core.py index 13a1b01..ac266e3 100644 --- a/diskcache/core.py +++ b/diskcache/core.py @@ -297,14 +297,6 @@ def filename(self, key=UNKNOWN, value=UNKNOWN): hex_name = codecs.encode(os.urandom(16), 'hex').decode('utf-8') sub_dir = op.join(hex_name[:2], hex_name[2:4]) name = hex_name[4:] + '.val' - directory = op.join(self._directory, sub_dir) - - try: - os.makedirs(directory) - except OSError as error: - if error.errno != errno.EEXIST: - raise - filename = op.join(sub_dir, name) full_path = op.join(self._directory, filename) return filename, full_path From b86aa9e0ba840821f2b5bcbc064a1283fd4d3e1e Mon Sep 17 00:00:00 2001 From: Grant Jenks Date: Tue, 31 Aug 2021 22:34:03 -0700 Subject: [PATCH 10/74] Modify store() to create the subdirs when writing the file (1 of 4) --- diskcache/core.py | 21 +++++++++++++++++++-- 1 file changed, 19 insertions(+), 2 deletions(-) diff --git a/diskcache/core.py b/diskcache/core.py index ac266e3..9040fb4 100644 --- a/diskcache/core.py +++ b/diskcache/core.py @@ -206,9 +206,26 @@ def store(self, value, read, key=UNKNOWN): return 0, MODE_RAW, None, sqlite3.Binary(value) else: filename, full_path = self.filename(key, value) + full_dir, _ = op.split(full_path) - with open(full_path, 'xb') as writer: - writer.write(value) + for count in range(11): + with cl.suppress(OSError): + os.makedirs(full_dir) + + try: + # Another cache may have deleted the directory before + # the file could be opened. + writer = open(full_path, 'xb') + except OSError: + if count == 10: + # Give up after 10 tries to open the file. + raise + continue + + with writer: + writer.write(value) + + break return len(value), MODE_BINARY, filename, None elif type_value is str: From 879a65a1932377d017cc187661fd880346459b0d Mon Sep 17 00:00:00 2001 From: Grant Jenks Date: Mon, 6 Sep 2021 22:02:21 -0700 Subject: [PATCH 11/74] Refactor file writing logic to retry makedirs --- diskcache/core.py | 66 +++++++++++++++++++++-------------------------- 1 file changed, 30 insertions(+), 36 deletions(-) diff --git a/diskcache/core.py b/diskcache/core.py index 9040fb4..332276a 100644 --- a/diskcache/core.py +++ b/diskcache/core.py @@ -187,7 +187,6 @@ def store(self, value, read, key=UNKNOWN): :return: (size, mode, filename, value) tuple for Cache table """ - # TODO: Retry mkdirs!!! # pylint: disable=unidiomatic-typecheck type_value = type(value) min_file_size = self.min_file_size @@ -206,46 +205,18 @@ def store(self, value, read, key=UNKNOWN): return 0, MODE_RAW, None, sqlite3.Binary(value) else: filename, full_path = self.filename(key, value) - full_dir, _ = op.split(full_path) - - for count in range(11): - with cl.suppress(OSError): - os.makedirs(full_dir) - - try: - # Another cache may have deleted the directory before - # the file could be opened. - writer = open(full_path, 'xb') - except OSError: - if count == 10: - # Give up after 10 tries to open the file. - raise - continue - - with writer: - writer.write(value) - - break - + self._write(full_path, io.BytesIO(value), 'xb') return len(value), MODE_BINARY, filename, None elif type_value is str: filename, full_path = self.filename(key, value) - - with open(full_path, 'x', encoding='UTF-8') as writer: - writer.write(value) - + self._write(full_path, io.StringIO(value), 'x', 'UTF-8') size = op.getsize(full_path) return size, MODE_TEXT, filename, None elif read: - size = 0 reader = ft.partial(value.read, 2 ** 22) filename, full_path = self.filename(key, value) - - with open(full_path, 'xb') as writer: - for chunk in iter(reader, b''): - size += len(chunk) - writer.write(chunk) - + iterator = iter(reader, b'') + size = self._write(full_path, iterator, 'xb') return size, MODE_BINARY, filename, None else: result = pickle.dumps(value, protocol=self.pickle_protocol) @@ -254,11 +225,34 @@ def store(self, value, read, key=UNKNOWN): return 0, MODE_PICKLE, None, sqlite3.Binary(result) else: filename, full_path = self.filename(key, value) + self._write(full_path, io.BytesIO(result), 'xb') + return len(result), MODE_PICKLE, filename, None + + def _write(self, full_path, iterator, mode, encoding=None): + full_dir, _ = op.split(full_path) - with open(full_path, 'xb') as writer: - writer.write(result) + for count in range(1, 11): + with cl.suppress(OSError): + os.makedirs(full_dir) - return len(result), MODE_PICKLE, filename, None + try: + # Another cache may have deleted the directory before + # the file could be opened. + writer = open(full_path, mode, encoding=encoding) + except OSError: + if count == 10: + # Give up after 10 tries to open the file. + raise + continue + + with writer: + size = 0 + for chunk in iterator: + size += len(chunk) + writer.write(chunk) + return size + + break def fetch(self, mode, filename, value, read): """Convert fields `mode`, `filename`, and `value` from Cache table to From 226a5cfd2ec68a726a1f6d535d6bc5c055cf226f Mon Sep 17 00:00:00 2001 From: Grant Jenks Date: Mon, 6 Sep 2021 22:39:29 -0700 Subject: [PATCH 12/74] Add test for Lock.locked() --- tests/test_recipes.py | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/tests/test_recipes.py b/tests/test_recipes.py index 8d13f2b..88612cf 100644 --- a/tests/test_recipes.py +++ b/tests/test_recipes.py @@ -28,6 +28,26 @@ def test_averager(cache): assert nums.pop() == 9.5 +def test_lock(cache): + state = {'num': 0} + lock = dc.Lock(cache, 'demo') + + def worker(): + state['num'] += 1 + with lock: + assert lock.locked() + state['num'] += 1 + time.sleep(0.1) + + with lock: + thread = threading.Thread(target=worker) + thread.start() + time.sleep(0.1) + assert state['num'] == 1 + thread.join() + assert state['num'] == 2 + + def test_rlock(cache): state = {'num': 0} rlock = dc.RLock(cache, 'demo') From 9d5c0b7ebf43bea02eae968f75a9104a699fba46 Mon Sep 17 00:00:00 2001 From: Grant Jenks Date: Mon, 6 Sep 2021 22:39:47 -0700 Subject: [PATCH 13/74] Test re-entrancy of "rlock" --- tests/test_recipes.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/tests/test_recipes.py b/tests/test_recipes.py index 88612cf..3f330a6 100644 --- a/tests/test_recipes.py +++ b/tests/test_recipes.py @@ -55,8 +55,9 @@ def test_rlock(cache): def worker(): state['num'] += 1 with rlock: - state['num'] += 1 - time.sleep(0.1) + with rlock: + state['num'] += 1 + time.sleep(0.1) with rlock: thread = threading.Thread(target=worker) From 14094b31006d2951b64a8b87f2a07bd57efa6525 Mon Sep 17 00:00:00 2001 From: Grant Jenks Date: Mon, 6 Sep 2021 22:40:23 -0700 Subject: [PATCH 14/74] Delete EACCESS error tests --- tests/test_core.py | 121 --------------------------------------------- 1 file changed, 121 deletions(-) diff --git a/tests/test_core.py b/tests/test_core.py index bcc79e1..a113443 100644 --- a/tests/test_core.py +++ b/tests/test_core.py @@ -333,26 +333,6 @@ def test_get_expired_slow_path(cache): assert cache.get(0) is None -def test_get_ioerror_slow_path(cache): - cache.reset('eviction_policy', 'least-recently-used') - cache.set(0, 0) - - disk = mock.Mock() - put = mock.Mock() - fetch = mock.Mock() - - disk.put = put - put.side_effect = [(0, True)] - disk.fetch = fetch - io_error = IOError() - io_error.errno = errno.EACCES - fetch.side_effect = io_error - - with mock.patch.object(cache, '_disk', disk): - with pytest.raises(IOError): - cache.get(0) - - def test_pop(cache): assert cache.incr('alpha') == 1 assert cache.pop('alpha') == 1 @@ -396,25 +376,6 @@ def test_pop_ioerror(cache): assert cache.pop(0) is None -def test_pop_ioerror_eacces(cache): - assert cache.set(0, 0) - - disk = mock.Mock() - put = mock.Mock() - fetch = mock.Mock() - - disk.put = put - put.side_effect = [(0, True)] - disk.fetch = fetch - io_error = IOError() - io_error.errno = errno.EACCES - fetch.side_effect = io_error - - with mock.patch.object(cache, '_disk', disk): - with pytest.raises(IOError): - cache.pop(0) - - def test_delete(cache): cache[0] = 0 assert cache.delete(0) @@ -591,29 +552,6 @@ def test_least_frequently_used(cache): assert len(cache.check()) == 0 -def test_filename_error(cache): - func = mock.Mock(side_effect=OSError(errno.EACCES)) - - with mock.patch('os.makedirs', func): - with pytest.raises(OSError): - cache._disk.filename() - - -def test_remove_error(cache): - func = mock.Mock(side_effect=OSError(errno.EACCES)) - - try: - with mock.patch('os.remove', func): - cache._disk.remove('ab/cd/efg.val') - except OSError: - pass - else: - if os.name == 'nt': - pass # File delete errors ignored on Windows. - else: - raise Exception('test_remove_error failed') - - def test_check(cache): blob = b'a' * 2 ** 20 keys = (0, 1, 1234, 56.78, u'hello', b'world', None) @@ -1028,44 +966,6 @@ def test_peek_ioerror(cache): assert value == 0 -def test_pull_ioerror_eacces(cache): - assert cache.push(0) == 500000000000000 - - disk = mock.Mock() - put = mock.Mock() - fetch = mock.Mock() - - disk.put = put - put.side_effect = [(0, True)] - disk.fetch = fetch - io_error = IOError() - io_error.errno = errno.EACCES - fetch.side_effect = io_error - - with mock.patch.object(cache, '_disk', disk): - with pytest.raises(IOError): - cache.pull() - - -def test_peek_ioerror_eacces(cache): - assert cache.push(0) == 500000000000000 - - disk = mock.Mock() - put = mock.Mock() - fetch = mock.Mock() - - disk.put = put - put.side_effect = [(0, True)] - disk.fetch = fetch - io_error = IOError() - io_error.errno = errno.EACCES - fetch.side_effect = io_error - - with mock.patch.object(cache, '_disk', disk): - with pytest.raises(IOError): - cache.peek() - - def test_peekitem_extras(cache): with pytest.raises(KeyError): cache.peekitem() @@ -1117,27 +1017,6 @@ def test_peekitem_ioerror(cache): assert value == 2 -def test_peekitem_ioerror_eacces(cache): - assert cache.set('a', 0) - assert cache.set('b', 1) - assert cache.set('c', 2) - - disk = mock.Mock() - put = mock.Mock() - fetch = mock.Mock() - - disk.put = put - put.side_effect = [(0, True)] - disk.fetch = fetch - io_error = IOError() - io_error.errno = errno.EACCES - fetch.side_effect = io_error - - with mock.patch.object(cache, '_disk', disk): - with pytest.raises(IOError): - cache.peekitem() - - def test_iterkeys(cache): assert list(cache.iterkeys()) == [] From 2f18867705a536d8a6f54404856f029923204f08 Mon Sep 17 00:00:00 2001 From: Grant Jenks Date: Mon, 6 Sep 2021 22:40:36 -0700 Subject: [PATCH 15/74] Test Cache.memoize() with typed kwargs --- tests/test_core.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/tests/test_core.py b/tests/test_core.py index a113443..518f99e 100644 --- a/tests/test_core.py +++ b/tests/test_core.py @@ -1338,3 +1338,10 @@ def fibrec(num): assert hits2 == (hits1 + count) assert misses2 == misses1 + + +def test_memoize_kwargs(cache): + @cache.memoize(typed=True) + def foo(*args, **kwargs): + return args, kwargs + assert foo(1, 2, 3, a=4, b=5) == ((1, 2, 3), {'a': 4, 'b': 5}) From 094b873e1a780004d6b07dd3ebbb216898d803e4 Mon Sep 17 00:00:00 2001 From: Grant Jenks Date: Mon, 6 Sep 2021 22:40:53 -0700 Subject: [PATCH 16/74] Test JSONDisk.get by iterating cache --- tests/test_core.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/tests/test_core.py b/tests/test_core.py index 518f99e..efe3e24 100644 --- a/tests/test_core.py +++ b/tests/test_core.py @@ -89,6 +89,9 @@ def test_custom_disk(): for value in values: assert cache[value] == value + for key, value in zip(cache, values): + assert key == value + shutil.rmtree(cache.directory, ignore_errors=True) From b92f2fd3abaf9c30a921ee7d287f573f2d6546a6 Mon Sep 17 00:00:00 2001 From: Grant Jenks Date: Mon, 6 Sep 2021 22:41:07 -0700 Subject: [PATCH 17/74] Increase coverage to 97% --- tox.ini | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index 04b7382..926a2b3 100644 --- a/tox.ini +++ b/tox.ini @@ -79,7 +79,7 @@ line_length = 79 addopts= -n auto --cov-branch - --cov-fail-under=96 + --cov-fail-under=97 --cov-report=term-missing --cov=diskcache --doctest-glob="*.rst" From f9503321a78ecffabac670bdd147bb6026af78fa Mon Sep 17 00:00:00 2001 From: Grant Jenks Date: Mon, 6 Sep 2021 22:46:16 -0700 Subject: [PATCH 18/74] Add test for cleaning up dirs --- tests/test_core.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/tests/test_core.py b/tests/test_core.py index efe3e24..cb44da6 100644 --- a/tests/test_core.py +++ b/tests/test_core.py @@ -1348,3 +1348,16 @@ def test_memoize_kwargs(cache): def foo(*args, **kwargs): return args, kwargs assert foo(1, 2, 3, a=4, b=5) == ((1, 2, 3), {'a': 4, 'b': 5}) + + +def test_cleanup_dirs(cache): + value = b'\0' * 2**20 + start_count = len(os.listdir(cache.directory)) + for i in range(10): + cache[i] = value + set_count = len(os.listdir(cache.directory)) + assert set_count > start_count + for i in range(10): + del cache[i] + del_count = len(os.listdir(cache.directory)) + assert start_count == del_count From 587f00d97724ef9bd2f66fcfa4935fecfc1f86a3 Mon Sep 17 00:00:00 2001 From: Grant Jenks Date: Mon, 6 Sep 2021 22:47:21 -0700 Subject: [PATCH 19/74] Add TODO for testing Disk._write --- tests/test_core.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/tests/test_core.py b/tests/test_core.py index cb44da6..2d09f94 100644 --- a/tests/test_core.py +++ b/tests/test_core.py @@ -1361,3 +1361,11 @@ def test_cleanup_dirs(cache): del cache[i] del_count = len(os.listdir(cache.directory)) assert start_count == del_count + + +# TODO: Add tests for Disk._write +# diskcache/core.py +## Disk._write +# - 234->exit +# - 242-246 +# - 255 From 28aa595a662754e88a0d2665b4a9c5eaeadd93fd Mon Sep 17 00:00:00 2001 From: Grant Jenks Date: Mon, 13 Sep 2021 20:25:34 -0700 Subject: [PATCH 20/74] Add tests for Disk._write --- diskcache/core.py | 2 -- tests/test_core.py | 11 +++++------ 2 files changed, 5 insertions(+), 8 deletions(-) diff --git a/diskcache/core.py b/diskcache/core.py index 332276a..b836e84 100644 --- a/diskcache/core.py +++ b/diskcache/core.py @@ -252,8 +252,6 @@ def _write(self, full_path, iterator, mode, encoding=None): writer.write(chunk) return size - break - def fetch(self, mode, filename, value, read): """Convert fields `mode`, `filename`, and `value` from Cache table to value. diff --git a/tests/test_core.py b/tests/test_core.py index 2d09f94..41cfe51 100644 --- a/tests/test_core.py +++ b/tests/test_core.py @@ -1363,9 +1363,8 @@ def test_cleanup_dirs(cache): assert start_count == del_count -# TODO: Add tests for Disk._write -# diskcache/core.py -## Disk._write -# - 234->exit -# - 242-246 -# - 255 +def test_disk_write_os_error(cache): + func = mock.Mock(side_effect=[OSError] * 10) + with mock.patch('diskcache.core.open', func): + with pytest.raises(OSError): + cache[0] = '\0' * 2**20 From a2e461ad93f7fa99a0d861db614cea2f7e521a6c Mon Sep 17 00:00:00 2001 From: Grant Jenks Date: Mon, 13 Sep 2021 20:38:35 -0700 Subject: [PATCH 21/74] Add a pragma "no cover" statements and increase threshold to 98 --- diskcache/__init__.py | 2 +- diskcache/djangocache.py | 2 +- tox.ini | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/diskcache/__init__.py b/diskcache/__init__.py index 428eb30..b5a6218 100644 --- a/diskcache/__init__.py +++ b/diskcache/__init__.py @@ -57,7 +57,7 @@ from .djangocache import DjangoCache # noqa __all__.append('DjangoCache') -except Exception: # pylint: disable=broad-except +except Exception: # pylint: disable=broad-except # pragma: no cover # Django not installed or not setup so ignore. pass diff --git a/diskcache/djangocache.py b/diskcache/djangocache.py index 44f673d..bf9f4d4 100644 --- a/diskcache/djangocache.py +++ b/diskcache/djangocache.py @@ -6,7 +6,7 @@ try: from django.core.cache.backends.base import DEFAULT_TIMEOUT -except ImportError: +except ImportError: # pragma: no cover # For older versions of Django simply use 300 seconds. DEFAULT_TIMEOUT = 300 diff --git a/tox.ini b/tox.ini index 926a2b3..104a084 100644 --- a/tox.ini +++ b/tox.ini @@ -79,7 +79,7 @@ line_length = 79 addopts= -n auto --cov-branch - --cov-fail-under=97 + --cov-fail-under=98 --cov-report=term-missing --cov=diskcache --doctest-glob="*.rst" From ab1484daf149039a468ed087b5073198bc4f21c1 Mon Sep 17 00:00:00 2001 From: Grant Jenks Date: Mon, 13 Sep 2021 20:42:06 -0700 Subject: [PATCH 22/74] Blue fixes (mostly docstring triple quotes) --- diskcache/__init__.py | 1 - diskcache/cli.py | 2 +- diskcache/core.py | 29 ++++++++++++++--------------- diskcache/djangocache.py | 14 +++++++------- diskcache/fanout.py | 12 ++++++------ diskcache/persistent.py | 13 ++++++------- diskcache/recipes.py | 29 ++++++++++++++--------------- tests/benchmark_core.py | 1 - tests/benchmark_djangocache.py | 2 -- tests/benchmark_glob.py | 2 +- tests/benchmark_incr.py | 5 ++--- tests/benchmark_kv_store.py | 1 - tests/issue_109.py | 1 - tests/issue_85.py | 1 - tests/plot.py | 9 ++++----- tests/plot_early_recompute.py | 5 ++--- tests/stress_test_core.py | 10 +++++----- tests/stress_test_fanout.py | 10 +++++----- tests/test_core.py | 7 ++++--- tests/test_deque.py | 2 +- tests/test_djangocache.py | 4 ++-- tests/test_fanout.py | 2 +- tests/test_index.py | 2 +- tests/test_recipes.py | 2 +- tests/utils.py | 6 +++--- 25 files changed, 80 insertions(+), 92 deletions(-) diff --git a/diskcache/__init__.py b/diskcache/__init__.py index b5a6218..934a3a7 100644 --- a/diskcache/__init__.py +++ b/diskcache/__init__.py @@ -3,7 +3,6 @@ ======================= The :doc:`tutorial` provides a helpful walkthrough of most methods. - """ from .core import ( diff --git a/diskcache/cli.py b/diskcache/cli.py index 44bffeb..6a39f60 100644 --- a/diskcache/cli.py +++ b/diskcache/cli.py @@ -1 +1 @@ -"Command line interface to disk cache." +"""Command line interface to disk cache.""" diff --git a/diskcache/core.py b/diskcache/core.py index b836e84..4cbf67f 100644 --- a/diskcache/core.py +++ b/diskcache/core.py @@ -1,5 +1,4 @@ """Core disk and file backed cache API. - """ import codecs @@ -22,12 +21,12 @@ def full_name(func): - "Return full name of `func` by adding the module and function name." + """Return full name of `func` by adding the module and function name.""" return func.__module__ + '.' + func.__qualname__ class Constant(tuple): - "Pretty display of immutable constant." + """Pretty display of immutable constant.""" def __new__(cls, name): return tuple.__new__(cls, (name,)) @@ -102,7 +101,7 @@ def __repr__(self): class Disk: - "Cache key and value serialization for SQLite database and files." + """Cache key and value serialization for SQLite database and files.""" def __init__(self, directory, min_file_size=0, pickle_protocol=0): """Initialize disk instance. @@ -333,7 +332,7 @@ def remove(self, file_path): class JSONDisk(Disk): - "Cache key and value using JSON serialization with zlib compression." + """Cache key and value using JSON serialization with zlib compression.""" def __init__(self, directory, compress_level=1, **kwargs): """Initialize JSON disk instance. @@ -374,15 +373,15 @@ def fetch(self, mode, filename, value, read): class Timeout(Exception): - "Database timeout expired." + """Database timeout expired.""" class UnknownFileWarning(UserWarning): - "Warning used by Cache.check for unknown files." + """Warning used by Cache.check for unknown files.""" class EmptyDirWarning(UserWarning): - "Warning used by Cache.check for empty directories." + """Warning used by Cache.check for empty directories.""" def args_to_key(base, args, kwargs, typed): @@ -414,7 +413,7 @@ def args_to_key(base, args, kwargs, typed): class Cache: - "Disk and file backed cache." + """Disk and file backed cache.""" def __init__(self, directory=None, timeout=60, disk=Disk, **settings): """Initialize cache instance. @@ -1859,12 +1858,12 @@ def memoize(self, name=None, typed=False, expire=None, tag=None): raise TypeError('name cannot be callable') def decorator(func): - "Decorator created by memoize() for callable `func`." + """Decorator created by memoize() for callable `func`.""" base = (full_name(func),) if name is None else (name,) @ft.wraps(func) def wrapper(*args, **kwargs): - "Wrapper for callable to cache arguments and return values." + """Wrapper for callable to cache arguments and return values.""" key = wrapper.__cache_key__(*args, **kwargs) result = self.get(key, default=ENOVAL, retry=True) @@ -1876,7 +1875,7 @@ def wrapper(*args, **kwargs): return result def __cache_key__(*args, **kwargs): - "Make key for cache given function arguments." + """Make key for cache given function arguments.""" return args_to_key(base, args, kwargs, typed) wrapper.__cache_key__ = __cache_key__ @@ -2291,13 +2290,13 @@ def _iter(self, ascending=True): yield _disk_get(key, raw) def __iter__(self): - "Iterate keys in cache including expired items." + """Iterate keys in cache including expired items.""" iterator = self._iter() next(iterator) return iterator def __reversed__(self): - "Reverse iterate keys in cache including expired items." + """Reverse iterate keys in cache including expired items.""" iterator = self._iter(ascending=False) next(iterator) return iterator @@ -2355,7 +2354,7 @@ def __exit__(self, *exception): self.close() def __len__(self): - "Count of items in cache including expired items." + """Count of items in cache including expired items.""" return self.reset('count') def __getstate__(self): diff --git a/diskcache/djangocache.py b/diskcache/djangocache.py index bf9f4d4..449f3a0 100644 --- a/diskcache/djangocache.py +++ b/diskcache/djangocache.py @@ -1,4 +1,4 @@ -"Django-compatible disk and file backed cache." +"""Django-compatible disk and file backed cache.""" from functools import wraps @@ -15,7 +15,7 @@ class DjangoCache(BaseCache): - "Django-compatible disk and file backed cache." + """Django-compatible disk and file backed cache.""" def __init__(self, directory, params): """Initialize DjangoCache instance. @@ -344,11 +344,11 @@ def cull(self): return self._cache.cull() def clear(self): - "Remove *all* values from the cache at once." + """Remove *all* values from the cache at once.""" return self._cache.clear() def close(self, **kwargs): - "Close the cache connection." + """Close the cache connection.""" # pylint: disable=unused-argument self._cache.close() @@ -415,12 +415,12 @@ def memoize( raise TypeError('name cannot be callable') def decorator(func): - "Decorator created by memoize() for callable `func`." + """Decorator created by memoize() for callable `func`.""" base = (full_name(func),) if name is None else (name,) @wraps(func) def wrapper(*args, **kwargs): - "Wrapper for callable to cache arguments and return values." + """Wrapper for callable to cache arguments and return values.""" key = wrapper.__cache_key__(*args, **kwargs) result = self.get(key, ENOVAL, version, retry=True) @@ -444,7 +444,7 @@ def wrapper(*args, **kwargs): return result def __cache_key__(*args, **kwargs): - "Make key for cache given function arguments." + """Make key for cache given function arguments.""" return args_to_key(base, args, kwargs, typed) wrapper.__cache_key__ = __cache_key__ diff --git a/diskcache/fanout.py b/diskcache/fanout.py index 99384a0..dc5240c 100644 --- a/diskcache/fanout.py +++ b/diskcache/fanout.py @@ -1,4 +1,4 @@ -"Fanout cache automatically shards keys and values." +"""Fanout cache automatically shards keys and values.""" import contextlib as cl import functools @@ -14,7 +14,7 @@ class FanoutCache: - "Cache that shards keys and values." + """Cache that shards keys and values.""" def __init__( self, directory=None, shards=8, timeout=0.010, disk=Disk, **settings @@ -512,7 +512,7 @@ def volume(self): return sum(shard.volume() for shard in self._shards) def close(self): - "Close database connection." + """Close database connection.""" for shard in self._shards: shard.close() self._caches.clear() @@ -532,17 +532,17 @@ def __setstate__(self, state): self.__init__(*state) def __iter__(self): - "Iterate keys in cache including expired items." + """Iterate keys in cache including expired items.""" iterators = (iter(shard) for shard in self._shards) return it.chain.from_iterable(iterators) def __reversed__(self): - "Reverse iterate keys in cache including expired items." + """Reverse iterate keys in cache including expired items.""" iterators = (reversed(shard) for shard in reversed(self._shards)) return it.chain.from_iterable(iterators) def __len__(self): - "Count of items in cache including expired items." + """Count of items in cache including expired items.""" return sum(len(shard) for shard in self._shards) def reset(self, key, value=ENOVAL): diff --git a/diskcache/persistent.py b/diskcache/persistent.py index 44f9cc7..89c3899 100644 --- a/diskcache/persistent.py +++ b/diskcache/persistent.py @@ -1,5 +1,4 @@ """Persistent Data Types - """ import operator as op @@ -18,10 +17,10 @@ def _make_compare(seq_op, doc): - "Make compare method with Sequence semantics." + """Make compare method with Sequence semantics.""" def compare(self, that): - "Compare method for deque and sequence." + """Compare method for deque and sequence.""" if not isinstance(that, Sequence): return NotImplemented @@ -117,12 +116,12 @@ def fromcache(cls, cache, iterable=()): @property def cache(self): - "Cache used by deque." + """Cache used by deque.""" return self._cache @property def directory(self): - "Directory path where deque is stored." + """Directory path where deque is stored.""" return self._cache.directory def _index(self, index, func): @@ -699,12 +698,12 @@ def fromcache(cls, cache, *args, **kwargs): @property def cache(self): - "Cache used by index." + """Cache used by index.""" return self._cache @property def directory(self): - "Directory path where items are stored." + """Directory path where items are stored.""" return self._cache.directory def __getitem__(self, key): diff --git a/diskcache/recipes.py b/diskcache/recipes.py index 8f7bd32..0c02dd7 100644 --- a/diskcache/recipes.py +++ b/diskcache/recipes.py @@ -1,5 +1,4 @@ """Disk Cache Recipes - """ import functools @@ -40,7 +39,7 @@ def __init__(self, cache, key, expire=None, tag=None): self._tag = tag def add(self, value): - "Add `value` to average." + """Add `value` to average.""" with self._cache.transact(retry=True): total, count = self._cache.get(self._key, default=(0.0, 0)) total += value @@ -53,12 +52,12 @@ def add(self, value): ) def get(self): - "Get current average or return `None` if count equals zero." + """Get current average or return `None` if count equals zero.""" total, count = self._cache.get(self._key, default=(0.0, 0), retry=True) return None if count == 0 else total / count def pop(self): - "Return current average and delete key." + """Return current average and delete key.""" total, count = self._cache.pop(self._key, default=(0.0, 0), retry=True) return None if count == 0 else total / count @@ -83,7 +82,7 @@ def __init__(self, cache, key, expire=None, tag=None): self._tag = tag def acquire(self): - "Acquire lock using spin-lock algorithm." + """Acquire lock using spin-lock algorithm.""" while True: added = self._cache.add( self._key, @@ -97,11 +96,11 @@ def acquire(self): time.sleep(0.001) def release(self): - "Release lock by deleting key." + """Release lock by deleting key.""" self._cache.delete(self._key, retry=True) def locked(self): - "Return true if the lock is acquired." + """Return true if the lock is acquired.""" return self._key in self._cache def __enter__(self): @@ -137,7 +136,7 @@ def __init__(self, cache, key, expire=None, tag=None): self._tag = tag def acquire(self): - "Acquire lock by incrementing count using spin-lock algorithm." + """Acquire lock by incrementing count using spin-lock algorithm.""" pid = os.getpid() tid = threading.get_ident() pid_tid = '{}-{}'.format(pid, tid) @@ -156,7 +155,7 @@ def acquire(self): time.sleep(0.001) def release(self): - "Release lock by decrementing count." + """Release lock by decrementing count.""" pid = os.getpid() tid = threading.get_ident() pid_tid = '{}-{}'.format(pid, tid) @@ -206,7 +205,7 @@ def __init__(self, cache, key, value=1, expire=None, tag=None): self._tag = tag def acquire(self): - "Acquire semaphore by decrementing value using spin-lock algorithm." + """Acquire semaphore by decrementing value using spin-lock algorithm.""" while True: with self._cache.transact(retry=True): value = self._cache.get(self._key, default=self._value) @@ -221,7 +220,7 @@ def acquire(self): time.sleep(0.001) def release(self): - "Release semaphore by incrementing value." + """Release semaphore by incrementing value.""" with self._cache.transact(retry=True): value = self._cache.get(self._key, default=self._value) assert self._value > value, 'cannot release un-acquired semaphore' @@ -396,11 +395,11 @@ def memoize_stampede(cache, expire, name=None, typed=False, tag=None, beta=1): """ # Caution: Nearly identical code exists in Cache.memoize def decorator(func): - "Decorator created by memoize call for callable." + """Decorator created by memoize call for callable.""" base = (full_name(func),) if name is None else (name,) def timer(*args, **kwargs): - "Time execution of `func` and return result and time delta." + """Time execution of `func` and return result and time delta.""" start = time.time() result = func(*args, **kwargs) delta = time.time() - start @@ -408,7 +407,7 @@ def timer(*args, **kwargs): @functools.wraps(func) def wrapper(*args, **kwargs): - "Wrapper for callable to cache arguments and return values." + """Wrapper for callable to cache arguments and return values.""" key = wrapper.__cache_key__(*args, **kwargs) pair, expire_time = cache.get( key, @@ -459,7 +458,7 @@ def recompute(): return pair[0] def __cache_key__(*args, **kwargs): - "Make key for cache given function arguments." + """Make key for cache given function arguments.""" return args_to_key(base, args, kwargs, typed) wrapper.__cache_key__ = __cache_key__ diff --git a/tests/benchmark_core.py b/tests/benchmark_core.py index 282ce2a..7d64595 100644 --- a/tests/benchmark_core.py +++ b/tests/benchmark_core.py @@ -3,7 +3,6 @@ $ export PYTHONPATH=/Users/grantj/repos/python-diskcache $ python tests/benchmark_core.py -p 1 > tests/timings_core_p1.txt $ python tests/benchmark_core.py -p 8 > tests/timings_core_p8.txt - """ import collections as co diff --git a/tests/benchmark_djangocache.py b/tests/benchmark_djangocache.py index 9dbbcd7..61a80bf 100644 --- a/tests/benchmark_djangocache.py +++ b/tests/benchmark_djangocache.py @@ -2,8 +2,6 @@ $ export PYTHONPATH=/Users/grantj/repos/python-diskcache $ python tests/benchmark_djangocache.py > tests/timings_djangocache.txt - - """ import collections as co diff --git a/tests/benchmark_glob.py b/tests/benchmark_glob.py index 9c23104..7f0bf7c 100644 --- a/tests/benchmark_glob.py +++ b/tests/benchmark_glob.py @@ -1,4 +1,4 @@ -"Benchmark glob.glob1 as used by django.core.cache.backends.filebased." +"""Benchmark glob.glob1 as used by django.core.cache.backends.filebased.""" import os import os.path as op diff --git a/tests/benchmark_incr.py b/tests/benchmark_incr.py index 9c8e2fa..4f758aa 100644 --- a/tests/benchmark_incr.py +++ b/tests/benchmark_incr.py @@ -1,5 +1,4 @@ """Benchmark cache.incr method. - """ import json @@ -16,7 +15,7 @@ def worker(num): - "Rapidly increment key and time operation." + """Rapidly increment key and time operation.""" time.sleep(0.1) # Let other workers start. cache = dc.Cache('tmp') @@ -33,7 +32,7 @@ def worker(num): def main(): - "Run workers and print percentile results." + """Run workers and print percentile results.""" shutil.rmtree('tmp', ignore_errors=True) processes = [ diff --git a/tests/benchmark_kv_store.py b/tests/benchmark_kv_store.py index e141a36..7015470 100644 --- a/tests/benchmark_kv_store.py +++ b/tests/benchmark_kv_store.py @@ -1,7 +1,6 @@ """Benchmarking Key-Value Stores $ python -m IPython tests/benchmark_kv_store.py - """ from IPython import get_ipython diff --git a/tests/issue_109.py b/tests/issue_109.py index c10a81f..a649c58 100644 --- a/tests/issue_109.py +++ b/tests/issue_109.py @@ -1,5 +1,4 @@ """Benchmark for Issue #109 - """ import time diff --git a/tests/issue_85.py b/tests/issue_85.py index 723406b..cb8789b 100644 --- a/tests/issue_85.py +++ b/tests/issue_85.py @@ -2,7 +2,6 @@ $ export PYTHONPATH=`pwd` $ python tests/issue_85.py - """ import collections diff --git a/tests/plot.py b/tests/plot.py index 2138659..fcac0bc 100644 --- a/tests/plot.py +++ b/tests/plot.py @@ -2,7 +2,6 @@ $ export PYTHONPATH=/Users/grantj/repos/python-diskcache $ python tests/plot.py --show tests/timings_core_p1.txt - """ import argparse @@ -14,7 +13,7 @@ def parse_timing(timing, limit): - "Parse timing." + """Parse timing.""" if timing.endswith('ms'): value = float(timing[:-2]) * 1e-3 elif timing.endswith('us'): @@ -26,12 +25,12 @@ def parse_timing(timing, limit): def parse_row(row, line): - "Parse row." + """Parse row.""" return [val.strip() for val in row.match(line).groups()] def parse_data(infile): - "Parse data from `infile`." + """Parse data from `infile`.""" blocks = re.compile(' '.join(['=' * 9] * 8)) dashes = re.compile('^-{79}$') title = re.compile('^Timings for (.*)$') @@ -83,7 +82,7 @@ def parse_data(infile): def make_plot(data, action, save=False, show=False, limit=0.005): - "Make plot." + """Make plot.""" fig, ax = plt.subplots(figsize=(8, 10)) colors = ['#ff7f00', '#377eb8', '#4daf4a', '#984ea3', '#e41a1c'] width = 0.15 diff --git a/tests/plot_early_recompute.py b/tests/plot_early_recompute.py index e58f580..1508c45 100644 --- a/tests/plot_early_recompute.py +++ b/tests/plot_early_recompute.py @@ -1,5 +1,4 @@ """Early Recomputation Measurements - """ import functools as ft @@ -61,14 +60,14 @@ def repeat(num): def frange(start, stop, step=1e-3): - "Generator for floating point values from `start` to `stop` by `step`." + """Generator for floating point values from `start` to `stop` by `step`.""" while start < stop: yield start start += step def plot(option, filename, cache_times, worker_times): - "Plot concurrent workers and latency." + """Plot concurrent workers and latency.""" import matplotlib.pyplot as plt fig, (workers, latency) = plt.subplots(2, sharex=True) diff --git a/tests/stress_test_core.py b/tests/stress_test_core.py index 6fd0991..c30fa3f 100644 --- a/tests/stress_test_core.py +++ b/tests/stress_test_core.py @@ -1,4 +1,4 @@ -"Stress test diskcache.core.Cache." +"""Stress test diskcache.core.Cache.""" import collections as co import multiprocessing as mp @@ -292,22 +292,22 @@ def stress_test( def stress_test_lru(): - "Stress test least-recently-used eviction policy." + """Stress test least-recently-used eviction policy.""" stress_test(eviction_policy=u'least-recently-used') def stress_test_lfu(): - "Stress test least-frequently-used eviction policy." + """Stress test least-frequently-used eviction policy.""" stress_test(eviction_policy=u'least-frequently-used') def stress_test_none(): - "Stress test 'none' eviction policy." + """Stress test 'none' eviction policy.""" stress_test(eviction_policy=u'none') def stress_test_mp(): - "Stress test multiple threads and processes." + """Stress test multiple threads and processes.""" stress_test(processes=4, threads=4) diff --git a/tests/stress_test_fanout.py b/tests/stress_test_fanout.py index 58708c9..d3b67e3 100644 --- a/tests/stress_test_fanout.py +++ b/tests/stress_test_fanout.py @@ -1,4 +1,4 @@ -"Stress test diskcache.core.Cache." +"""Stress test diskcache.core.Cache.""" import multiprocessing as mp import os @@ -283,22 +283,22 @@ def stress_test( def stress_test_lru(): - "Stress test least-recently-used eviction policy." + """Stress test least-recently-used eviction policy.""" stress_test(eviction_policy=u'least-recently-used') def stress_test_lfu(): - "Stress test least-frequently-used eviction policy." + """Stress test least-frequently-used eviction policy.""" stress_test(eviction_policy=u'least-frequently-used') def stress_test_none(): - "Stress test 'none' eviction policy." + """Stress test 'none' eviction policy.""" stress_test(eviction_policy=u'none') def stress_test_mp(): - "Stress test multiple threads and processes." + """Stress test multiple threads and processes.""" stress_test(processes=4, threads=4) diff --git a/tests/test_core.py b/tests/test_core.py index 41cfe51..c1e7a4a 100644 --- a/tests/test_core.py +++ b/tests/test_core.py @@ -1,4 +1,4 @@ -"Test diskcache.core.Cache." +"""Test diskcache.core.Cache.""" import errno import hashlib @@ -1347,11 +1347,12 @@ def test_memoize_kwargs(cache): @cache.memoize(typed=True) def foo(*args, **kwargs): return args, kwargs + assert foo(1, 2, 3, a=4, b=5) == ((1, 2, 3), {'a': 4, 'b': 5}) def test_cleanup_dirs(cache): - value = b'\0' * 2**20 + value = b'\0' * 2 ** 20 start_count = len(os.listdir(cache.directory)) for i in range(10): cache[i] = value @@ -1367,4 +1368,4 @@ def test_disk_write_os_error(cache): func = mock.Mock(side_effect=[OSError] * 10) with mock.patch('diskcache.core.open', func): with pytest.raises(OSError): - cache[0] = '\0' * 2**20 + cache[0] = '\0' * 2 ** 20 diff --git a/tests/test_deque.py b/tests/test_deque.py index 8113dfe..add7714 100644 --- a/tests/test_deque.py +++ b/tests/test_deque.py @@ -1,4 +1,4 @@ -"Test diskcache.persistent.Deque." +"""Test diskcache.persistent.Deque.""" import pickle import shutil diff --git a/tests/test_djangocache.py b/tests/test_djangocache.py index 2c216fe..cdaf101 100644 --- a/tests/test_djangocache.py +++ b/tests/test_djangocache.py @@ -100,7 +100,7 @@ class UnpicklableType(object): def custom_key_func(key, key_prefix, version): - "A customized cache key function" + """A customized cache key function""" return 'CUSTOM-' + '-'.join([key_prefix, str(version), key]) @@ -921,7 +921,7 @@ def __getstate__(self): ) ) class DiskCacheTests(BaseCacheTests, TestCase): - "Specific test cases for diskcache.DjangoCache." + """Specific test cases for diskcache.DjangoCache.""" def setUp(self): super().setUp() diff --git a/tests/test_fanout.py b/tests/test_fanout.py index 8918af3..f212fac 100644 --- a/tests/test_fanout.py +++ b/tests/test_fanout.py @@ -1,4 +1,4 @@ -"Test diskcache.fanout.FanoutCache." +"""Test diskcache.fanout.FanoutCache.""" import collections as co import hashlib diff --git a/tests/test_index.py b/tests/test_index.py index 27639f7..742daf3 100644 --- a/tests/test_index.py +++ b/tests/test_index.py @@ -1,4 +1,4 @@ -"Test diskcache.persistent.Index." +"""Test diskcache.persistent.Index.""" import pickle import shutil diff --git a/tests/test_recipes.py b/tests/test_recipes.py index 3f330a6..ae74459 100644 --- a/tests/test_recipes.py +++ b/tests/test_recipes.py @@ -1,4 +1,4 @@ -"Test diskcache.recipes." +"""Test diskcache.recipes.""" import shutil import threading diff --git a/tests/utils.py b/tests/utils.py index 5b41ce9..38e5d33 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -32,7 +32,7 @@ def secs(value): def run(*args): - "Run command, print output, and return output." + """Run command, print output, and return output.""" print('utils$', *args) result = sp.check_output(args) print(result) @@ -40,7 +40,7 @@ def run(*args): def mount_ramdisk(size, path): - "Mount RAM disk at `path` with `size` in bytes." + """Mount RAM disk at `path` with `size` in bytes.""" sectors = size / 512 os.makedirs(path) @@ -53,7 +53,7 @@ def mount_ramdisk(size, path): def unmount_ramdisk(dev_path, path): - "Unmount RAM disk with `dev_path` and `path`." + """Unmount RAM disk with `dev_path` and `path`.""" run('umount', path) run('diskutil', 'eject', dev_path) run('rm', '-r', path) From 72bbd73d29184e095df248efd48c31eeeb8e992c Mon Sep 17 00:00:00 2001 From: Grant Jenks Date: Mon, 13 Sep 2021 20:58:43 -0700 Subject: [PATCH 23/74] Pylint fixes --- diskcache/core.py | 44 ++++++++++++++++++++++---------------------- 1 file changed, 22 insertions(+), 22 deletions(-) diff --git a/diskcache/core.py b/diskcache/core.py index 4cbf67f..2e946ad 100644 --- a/diskcache/core.py +++ b/diskcache/core.py @@ -46,25 +46,25 @@ def __repr__(self): MODE_PICKLE = 4 DEFAULT_SETTINGS = { - u'statistics': 0, # False - u'tag_index': 0, # False - u'eviction_policy': u'least-recently-stored', - u'size_limit': 2 ** 30, # 1gb - u'cull_limit': 10, - u'sqlite_auto_vacuum': 1, # FULL - u'sqlite_cache_size': 2 ** 13, # 8,192 pages - u'sqlite_journal_mode': u'wal', - u'sqlite_mmap_size': 2 ** 26, # 64mb - u'sqlite_synchronous': 1, # NORMAL - u'disk_min_file_size': 2 ** 15, # 32kb - u'disk_pickle_protocol': pickle.HIGHEST_PROTOCOL, + 'statistics': 0, # False + 'tag_index': 0, # False + 'eviction_policy': 'least-recently-stored', + 'size_limit': 2 ** 30, # 1gb + 'cull_limit': 10, + 'sqlite_auto_vacuum': 1, # FULL + 'sqlite_cache_size': 2 ** 13, # 8,192 pages + 'sqlite_journal_mode': 'wal', + 'sqlite_mmap_size': 2 ** 26, # 64mb + 'sqlite_synchronous': 1, # NORMAL + 'disk_min_file_size': 2 ** 15, # 32kb + 'disk_pickle_protocol': pickle.HIGHEST_PROTOCOL, } METADATA = { - u'count': 0, - u'size': 0, - u'hits': 0, - u'misses': 0, + 'count': 0, + 'size': 0, + 'hits': 0, + 'misses': 0, } EVICTION_POLICY = { @@ -1194,7 +1194,7 @@ def get( try: value = self._disk.fetch(mode, filename, db_value, read) - except IOError as error: + except IOError: # Key was deleted before we could retrieve result. if self.statistics: sql(cache_miss) @@ -1314,7 +1314,7 @@ def pop( try: value = self._disk.fetch(mode, filename, db_value, False) - except IOError as error: + except IOError: # Key was deleted before we could retrieve result. return default finally: @@ -1582,7 +1582,7 @@ def pull( try: value = self._disk.fetch(mode, name, db_value, False) - except IOError as error: + except IOError: # Key was deleted before we could retrieve result. continue finally: @@ -1696,7 +1696,7 @@ def peek( try: value = self._disk.fetch(mode, name, db_value, False) - except IOError as error: + except IOError: # Key was deleted before we could retrieve result. continue finally: @@ -1777,7 +1777,7 @@ def peekitem(self, last=True, expire_time=False, tag=False, retry=False): try: value = self._disk.fetch(mode, name, db_value, False) - except IOError as error: + except IOError: # Key was deleted before we could retrieve result. continue break @@ -1911,7 +1911,7 @@ def check(self, fix=False, retry=False): rows = sql('PRAGMA integrity_check').fetchall() - if len(rows) != 1 or rows[0][0] != u'ok': + if len(rows) != 1 or rows[0][0] != 'ok': for (message,) in rows: warnings.warn(message) From 3e87128d4154acf90bd64c6630b1200108dc1ed2 Mon Sep 17 00:00:00 2001 From: Grant Jenks Date: Mon, 13 Sep 2021 21:04:34 -0700 Subject: [PATCH 24/74] Disable no-self-use in Disk._write --- diskcache/core.py | 1 + 1 file changed, 1 insertion(+) diff --git a/diskcache/core.py b/diskcache/core.py index 2e946ad..251c5a2 100644 --- a/diskcache/core.py +++ b/diskcache/core.py @@ -228,6 +228,7 @@ def store(self, value, read, key=UNKNOWN): return len(result), MODE_PICKLE, filename, None def _write(self, full_path, iterator, mode, encoding=None): + # pylint: disable=no-self-use full_dir, _ = op.split(full_path) for count in range(1, 11): From fbc537a7138330652d0d42aa09caa5531746b302 Mon Sep 17 00:00:00 2001 From: Grant Jenks Date: Mon, 13 Sep 2021 21:51:30 -0700 Subject: [PATCH 25/74] Add `ignore` to memoize() --- diskcache/core.py | 10 +++++++--- diskcache/djangocache.py | 4 +++- diskcache/persistent.py | 5 +++-- diskcache/recipes.py | 5 +++-- tests/test_core.py | 13 +++++++++++++ 5 files changed, 29 insertions(+), 8 deletions(-) diff --git a/diskcache/core.py b/diskcache/core.py index 251c5a2..4035293 100644 --- a/diskcache/core.py +++ b/diskcache/core.py @@ -385,19 +385,22 @@ class EmptyDirWarning(UserWarning): """Warning used by Cache.check for empty directories.""" -def args_to_key(base, args, kwargs, typed): +def args_to_key(base, args, kwargs, typed, ignore): """Create cache key out of function arguments. :param tuple base: base of key :param tuple args: function arguments :param dict kwargs: function keyword arguments :param bool typed: include types in cache key + :param set ignore: positional or keyword args to ignore :return: cache key tuple """ + args = tuple(arg for index, arg in enumerate(args) if index not in ignore) key = base + args if kwargs: + kwargs = {key: val for key, val in kwargs.items() if key not in ignore} key += (ENOVAL,) sorted_items = sorted(kwargs.items()) @@ -1792,7 +1795,7 @@ def peekitem(self, last=True, expire_time=False, tag=False, retry=False): else: return key, value - def memoize(self, name=None, typed=False, expire=None, tag=None): + def memoize(self, name=None, typed=False, expire=None, tag=None, ignore=()): """Memoizing cache decorator. Decorator to wrap callable with memoizing function using cache. @@ -1851,6 +1854,7 @@ def memoize(self, name=None, typed=False, expire=None, tag=None): :param float expire: seconds until arguments expire (default None, no expiry) :param str tag: text to associate with arguments (default None) + :param set ignore: positional or keyword args to ignore (default ()) :return: callable decorator """ @@ -1877,7 +1881,7 @@ def wrapper(*args, **kwargs): def __cache_key__(*args, **kwargs): """Make key for cache given function arguments.""" - return args_to_key(base, args, kwargs, typed) + return args_to_key(base, args, kwargs, typed, ignore) wrapper.__cache_key__ = __cache_key__ return wrapper diff --git a/diskcache/djangocache.py b/diskcache/djangocache.py index 449f3a0..347a613 100644 --- a/diskcache/djangocache.py +++ b/diskcache/djangocache.py @@ -373,6 +373,7 @@ def memoize( version=None, typed=False, tag=None, + ignore=(), ): """Memoizing cache decorator. @@ -407,6 +408,7 @@ def memoize( :param int version: key version number (default None, cache parameter) :param bool typed: cache different types separately (default False) :param str tag: text to associate with arguments (default None) + :param set ignore: positional or keyword args to ignore (default ()) :return: callable decorator """ @@ -445,7 +447,7 @@ def wrapper(*args, **kwargs): def __cache_key__(*args, **kwargs): """Make key for cache given function arguments.""" - return args_to_key(base, args, kwargs, typed) + return args_to_key(base, args, kwargs, typed, ignore) wrapper.__cache_key__ = __cache_key__ return wrapper diff --git a/diskcache/persistent.py b/diskcache/persistent.py index 89c3899..9b5939b 100644 --- a/diskcache/persistent.py +++ b/diskcache/persistent.py @@ -1096,7 +1096,7 @@ def __ne__(self, other): """ return not self == other - def memoize(self, name=None, typed=False): + def memoize(self, name=None, typed=False, ignore=()): """Memoizing cache decorator. Decorator to wrap callable with memoizing function using cache. @@ -1147,10 +1147,11 @@ def memoize(self, name=None, typed=False): :param str name: name given for callable (default None, automatic) :param bool typed: cache different types separately (default False) + :param set ignore: positional or keyword args to ignore (default ()) :return: callable decorator """ - return self._cache.memoize(name, typed) + return self._cache.memoize(name, typed, ignore) @contextmanager def transact(self): diff --git a/diskcache/recipes.py b/diskcache/recipes.py index 0c02dd7..b345560 100644 --- a/diskcache/recipes.py +++ b/diskcache/recipes.py @@ -337,7 +337,7 @@ def wrapper(*args, **kwargs): return decorator -def memoize_stampede(cache, expire, name=None, typed=False, tag=None, beta=1): +def memoize_stampede(cache, expire, name=None, typed=False, tag=None, beta=1, ignore=()): """Memoizing cache decorator with cache stampede protection. Cache stampedes are a type of system overload that can occur when parallel @@ -390,6 +390,7 @@ def memoize_stampede(cache, expire, name=None, typed=False, tag=None, beta=1): :param str name: name given for callable (default None, automatic) :param bool typed: cache different types separately (default False) :param str tag: text to associate with arguments (default None) + :param set ignore: positional or keyword args to ignore (default ()) :return: callable decorator """ @@ -459,7 +460,7 @@ def recompute(): def __cache_key__(*args, **kwargs): """Make key for cache given function arguments.""" - return args_to_key(base, args, kwargs, typed) + return args_to_key(base, args, kwargs, typed, ignore) wrapper.__cache_key__ = __cache_key__ return wrapper diff --git a/tests/test_core.py b/tests/test_core.py index c1e7a4a..0a1fca6 100644 --- a/tests/test_core.py +++ b/tests/test_core.py @@ -1369,3 +1369,16 @@ def test_disk_write_os_error(cache): with mock.patch('diskcache.core.open', func): with pytest.raises(OSError): cache[0] = '\0' * 2 ** 20 + + +def test_memoize_ignore(cache): + + @cache.memoize(ignore={1, 'arg1'}) + def test(*args, **kwargs): + return args, kwargs + + cache.stats(enable=True) + assert test('a', 'b', 'c', arg0='d', arg1='e', arg2='f') + assert test('a', 'w', 'c', arg0='d', arg1='x', arg2='f') + assert test('a', 'y', 'c', arg0='d', arg1='z', arg2='f') + assert cache.stats() == (2, 1) From bd800aa069ad6bc15aa3f6fec84ae92d2f644de5 Mon Sep 17 00:00:00 2001 From: Grant Jenks Date: Mon, 13 Sep 2021 21:52:05 -0700 Subject: [PATCH 26/74] Fixes for blue --- diskcache/core.py | 4 +++- diskcache/recipes.py | 4 +++- tests/test_core.py | 1 - 3 files changed, 6 insertions(+), 3 deletions(-) diff --git a/diskcache/core.py b/diskcache/core.py index 4035293..d7f707b 100644 --- a/diskcache/core.py +++ b/diskcache/core.py @@ -1795,7 +1795,9 @@ def peekitem(self, last=True, expire_time=False, tag=False, retry=False): else: return key, value - def memoize(self, name=None, typed=False, expire=None, tag=None, ignore=()): + def memoize( + self, name=None, typed=False, expire=None, tag=None, ignore=() + ): """Memoizing cache decorator. Decorator to wrap callable with memoizing function using cache. diff --git a/diskcache/recipes.py b/diskcache/recipes.py index b345560..b5af6dd 100644 --- a/diskcache/recipes.py +++ b/diskcache/recipes.py @@ -337,7 +337,9 @@ def wrapper(*args, **kwargs): return decorator -def memoize_stampede(cache, expire, name=None, typed=False, tag=None, beta=1, ignore=()): +def memoize_stampede( + cache, expire, name=None, typed=False, tag=None, beta=1, ignore=() +): """Memoizing cache decorator with cache stampede protection. Cache stampedes are a type of system overload that can occur when parallel diff --git a/tests/test_core.py b/tests/test_core.py index 0a1fca6..b3d3d66 100644 --- a/tests/test_core.py +++ b/tests/test_core.py @@ -1372,7 +1372,6 @@ def test_disk_write_os_error(cache): def test_memoize_ignore(cache): - @cache.memoize(ignore={1, 'arg1'}) def test(*args, **kwargs): return args, kwargs From 606d8f2b12e8022e6d5b509d7c41439c4242043d Mon Sep 17 00:00:00 2001 From: Abhinav Omprakash <55880260+AbhinavOmprakash@users.noreply.github.com> Date: Fri, 11 Jun 2021 09:54:54 +0530 Subject: [PATCH 27/74] Fixes #201 added github repo to project_urls --- setup.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 7f0561c..b29a463 100644 --- a/setup.py +++ b/setup.py @@ -29,7 +29,12 @@ def run_tests(self): long_description=readme, author='Grant Jenks', author_email='contact@grantjenks.com', - url='http://www.grantjenks.com/docs/diskcache/', + url='http://www.grantjenks.com/docs/diskcache/', + project_urls = { + 'Documentation':'http://www.grantjenks.com/docs/diskcache/', + 'Source':'https://github.com/grantjenks/python-diskcache', + 'Tracker':'https://github.com/grantjenks/python-diskcache/issues', + 'Funding':'https://gumroad.com/l/diskcache',} license='Apache 2.0', packages=['diskcache'], tests_require=['tox'], From c22d3ee59ee28bd58aec59182d375b1eccd191f2 Mon Sep 17 00:00:00 2001 From: Grant Jenks Date: Mon, 13 Sep 2021 22:03:57 -0700 Subject: [PATCH 28/74] Fixup formatting for project urls --- setup.py | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/setup.py b/setup.py index b29a463..b49d9c8 100644 --- a/setup.py +++ b/setup.py @@ -29,12 +29,13 @@ def run_tests(self): long_description=readme, author='Grant Jenks', author_email='contact@grantjenks.com', - url='http://www.grantjenks.com/docs/diskcache/', - project_urls = { - 'Documentation':'http://www.grantjenks.com/docs/diskcache/', - 'Source':'https://github.com/grantjenks/python-diskcache', - 'Tracker':'https://github.com/grantjenks/python-diskcache/issues', - 'Funding':'https://gumroad.com/l/diskcache',} + url='http://www.grantjenks.com/docs/diskcache/', + project_urls={ + 'Documentation': 'http://www.grantjenks.com/docs/diskcache/', + 'Funding': 'https://gum.co/diskcache', + 'Source': 'https://github.com/grantjenks/python-diskcache', + 'Tracker': 'https://github.com/grantjenks/python-diskcache/issues', + }, license='Apache 2.0', packages=['diskcache'], tests_require=['tox'], From d55a50ee083784afa9c85e14e41c4a2d132f3111 Mon Sep 17 00:00:00 2001 From: Grant Jenks Date: Mon, 13 Sep 2021 22:15:50 -0700 Subject: [PATCH 29/74] Stop using ENOVAL in args_to_key() --- diskcache/core.py | 3 +-- tests/test_core.py | 16 ++++++++++++++++ 2 files changed, 17 insertions(+), 2 deletions(-) diff --git a/diskcache/core.py b/diskcache/core.py index d7f707b..fb343be 100644 --- a/diskcache/core.py +++ b/diskcache/core.py @@ -397,11 +397,10 @@ def args_to_key(base, args, kwargs, typed, ignore): """ args = tuple(arg for index, arg in enumerate(args) if index not in ignore) - key = base + args + key = base + args + (None,) if kwargs: kwargs = {key: val for key, val in kwargs.items() if key not in ignore} - key += (ENOVAL,) sorted_items = sorted(kwargs.items()) for item in sorted_items: diff --git a/tests/test_core.py b/tests/test_core.py index b3d3d66..55ca962 100644 --- a/tests/test_core.py +++ b/tests/test_core.py @@ -92,6 +92,8 @@ def test_custom_disk(): for key, value in zip(cache, values): assert key == value + test_memoize_iter(cache) + shutil.rmtree(cache.directory, ignore_errors=True) @@ -1381,3 +1383,17 @@ def test(*args, **kwargs): assert test('a', 'w', 'c', arg0='d', arg1='x', arg2='f') assert test('a', 'y', 'c', arg0='d', arg1='z', arg2='f') assert cache.stats() == (2, 1) + + +def test_memoize_iter(cache): + @cache.memoize() + def test(*args, **kwargs): + return sum(args) + sum(kwargs.values()) + + cache.clear() + assert test(1, 2, 3) + assert test(a=1, b=2, c=3) + assert test(-1, 0, 1, a=1, b=2, c=3) + assert len(cache) == 3 + for key in cache: + assert cache[key] == 6 From b10b3866f4d64ea197e3fb40c0ebe25aa32900d6 Mon Sep 17 00:00:00 2001 From: Grant Jenks Date: Mon, 13 Sep 2021 22:29:21 -0700 Subject: [PATCH 30/74] Add caveat about inconsistent pickles --- docs/tutorial.rst | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/docs/tutorial.rst b/docs/tutorial.rst index 58220b2..3191af6 100644 --- a/docs/tutorial.rst +++ b/docs/tutorial.rst @@ -863,8 +863,17 @@ protocol`_ is not used. Neither the `__hash__` nor `__eq__` methods are used for lookups. Instead lookups depend on the serialization method defined by :class:`Disk ` objects. For strings, bytes, integers, and floats, equality matches Python's definition. But large integers and all other -types will be converted to bytes using pickling and the bytes representation -will define equality. +types will be converted to bytes and the bytes representation will define +equality. + +The default :class:`diskcache.Disk` serialization uses pickling for both keys +and values. Unfortunately, pickling produces inconsistencies sometimes when +applied to container data types like tuples. Two equal tuples may serialize to +different bytes objects using pickle. The likelihood of differences is reduced +by using `pickletools.optimize` but still inconsistencies occur (`#54`_). The +inconsistent serialized pickle values is particularly problematic when applied +to the key in the cache. Consider using an alternative Disk type, like +:class:`JSONDisk `, for consistent serialization of keys. SQLite is used to synchronize database access between threads and processes and as such inherits all SQLite caveats. Most notably SQLite is `not recommended`_ @@ -898,6 +907,7 @@ does not account the size of directories themselves or other filesystem metadata. If directory count or size is a concern then consider implementing an alternative :class:`Disk `. +.. _`#54`: https://github.com/grantjenks/python-diskcache/issues/54 .. _`hash protocol`: https://docs.python.org/library/functions.html#hash .. _`not recommended`: https://www.sqlite.org/faq.html#q5 .. _`performs poorly`: https://www.pythonanywhere.com/forums/topic/1847/ From 3ad6c0e4365ef93e60a75aee75740e6551e279f9 Mon Sep 17 00:00:00 2001 From: Grant Jenks Date: Mon, 13 Sep 2021 22:40:26 -0700 Subject: [PATCH 31/74] Bug Fix: Use "ignore" keyword argument with Index.memoize() --- diskcache/persistent.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/diskcache/persistent.py b/diskcache/persistent.py index 9b5939b..c3d570b 100644 --- a/diskcache/persistent.py +++ b/diskcache/persistent.py @@ -1151,7 +1151,7 @@ def memoize(self, name=None, typed=False, ignore=()): :return: callable decorator """ - return self._cache.memoize(name, typed, ignore) + return self._cache.memoize(name, typed, ignore=ignore) @contextmanager def transact(self): From 4de3c0ed99dd2bcebffad8e6e0cb7a2885210a97 Mon Sep 17 00:00:00 2001 From: Grant Jenks Date: Tue, 14 Sep 2021 14:07:00 -0700 Subject: [PATCH 32/74] Drop old Ubuntu from integration testing --- .github/workflows/integration.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/integration.yml b/.github/workflows/integration.yml index 0a44f89..b1143a4 100644 --- a/.github/workflows/integration.yml +++ b/.github/workflows/integration.yml @@ -31,7 +31,7 @@ jobs: strategy: max-parallel: 8 matrix: - os: [ubuntu-latest, macos-latest, windows-latest, ubuntu-16.04] + os: [ubuntu-latest, macos-latest, windows-latest] python-version: [3.6, 3.7, 3.8, 3.9] steps: From 6ed012a655673c54468beb7ff00038443ed2595e Mon Sep 17 00:00:00 2001 From: artiom Date: Wed, 29 Sep 2021 10:57:36 +0100 Subject: [PATCH 33/74] docs: fix typo --- docs/tutorial.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/tutorial.rst b/docs/tutorial.rst index 3191af6..1963635 100644 --- a/docs/tutorial.rst +++ b/docs/tutorial.rst @@ -570,7 +570,7 @@ access and editing at both front and back sides. :class:`Deque cross-thread and cross-process communication. :class:`Deque ` objects are also useful in scenarios where contents should remain persistent or limitations prohibit holding all items in memory at the same time. The deque -uses a fixed amout of memory regardless of the size or number of items stored +uses a fixed amount of memory regardless of the size or number of items stored inside it. Index @@ -603,7 +603,7 @@ interface. :class:`Index ` objects inherit all the benefits of cross-thread and cross-process communication. :class:`Index ` objects are also useful in scenarios where contents should remain persistent or limitations prohibit holding all items in memory at the same time. The index -uses a fixed amout of memory regardless of the size or number of items stored +uses a fixed amount of memory regardless of the size or number of items stored inside it. .. _tutorial-transactions: From 20f1d93a6e3852bac4289cb94e27585d9c23330c Mon Sep 17 00:00:00 2001 From: Grant Jenks Date: Wed, 29 Sep 2021 08:11:04 -0700 Subject: [PATCH 34/74] Disable consider-using-f-string --- .pylintrc | 1 + 1 file changed, 1 insertion(+) diff --git a/.pylintrc b/.pylintrc index 158fe34..6baa978 100644 --- a/.pylintrc +++ b/.pylintrc @@ -143,6 +143,7 @@ disable=print-statement, no-else-return, duplicate-code, inconsistent-return-statements, + consider-using-f-string, # Enable the message, report, category or checker with the given id(s). You can # either give multiple identifier separated by comma (,) or put this option From 7bfbce63ba127d601ddfbf2838539e34cad87616 Mon Sep 17 00:00:00 2001 From: Grant Jenks Date: Mon, 29 Nov 2021 22:58:48 -0800 Subject: [PATCH 35/74] Support for Python 3.10 in testing (#238) * Add support for Python 3.10 * Update copyright to 2022 * Bump version to 5.3.0 * Add Python 3.10 to the README --- .github/workflows/integration.yml | 4 ++-- .github/workflows/release.yml | 2 +- LICENSE | 2 +- README.rst | 6 +++--- diskcache/__init__.py | 6 +++--- docs/conf.py | 2 +- 6 files changed, 11 insertions(+), 11 deletions(-) diff --git a/.github/workflows/integration.yml b/.github/workflows/integration.yml index b1143a4..07a5650 100644 --- a/.github/workflows/integration.yml +++ b/.github/workflows/integration.yml @@ -16,7 +16,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v2 with: - python-version: 3.9 + python-version: '3.10' - name: Install dependencies run: | pip install --upgrade pip @@ -32,7 +32,7 @@ jobs: max-parallel: 8 matrix: os: [ubuntu-latest, macos-latest, windows-latest] - python-version: [3.6, 3.7, 3.8, 3.9] + python-version: [3.6, 3.7, 3.8, 3.9, '3.10'] steps: - name: Set up Python ${{ matrix.python-version }} x64 diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 2a07787..1f89c14 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -21,7 +21,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v2 with: - python-version: 3.9 + python-version: '3.10' - name: Install dependencies run: | diff --git a/LICENSE b/LICENSE index ca80a22..bb4cfb7 100644 --- a/LICENSE +++ b/LICENSE @@ -1,4 +1,4 @@ -Copyright 2016-2021 Grant Jenks +Copyright 2016-2022 Grant Jenks Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the diff --git a/README.rst b/README.rst index c4aa8e9..eb06a6a 100644 --- a/README.rst +++ b/README.rst @@ -77,8 +77,8 @@ Features - Thread-safe and process-safe - Supports multiple eviction policies (LRU and LFU included) - Keys support "tag" metadata and eviction -- Developed on Python 3.9 -- Tested on CPython 3.6, 3.7, 3.8, 3.9 +- Developed on Python 3.10 +- Tested on CPython 3.6, 3.7, 3.8, 3.9, 3.10 - Tested on Linux, Mac OS X, and Windows - Tested using GitHub Actions @@ -387,7 +387,7 @@ Reference License ------- -Copyright 2016-2021 Grant Jenks +Copyright 2016-2022 Grant Jenks Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the diff --git a/diskcache/__init__.py b/diskcache/__init__.py index 934a3a7..c361ca9 100644 --- a/diskcache/__init__.py +++ b/diskcache/__init__.py @@ -61,8 +61,8 @@ pass __title__ = 'diskcache' -__version__ = '5.2.1' -__build__ = 0x050201 +__version__ = '5.3.0' +__build__ = 0x050300 __author__ = 'Grant Jenks' __license__ = 'Apache 2.0' -__copyright__ = 'Copyright 2016-2021 Grant Jenks' +__copyright__ = 'Copyright 2016-2022 Grant Jenks' diff --git a/docs/conf.py b/docs/conf.py index d725198..92ce1b9 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -20,7 +20,7 @@ # -- Project information ----------------------------------------------------- project = 'DiskCache' -copyright = '2021, Grant Jenks' +copyright = '2022, Grant Jenks' author = 'Grant Jenks' # The full version, including alpha/beta/rc tags From 78a5cc690e0aa53ad3537a72b2f52f3c161da1ae Mon Sep 17 00:00:00 2001 From: Grant Jenks Date: Thu, 30 Dec 2021 17:35:44 -0800 Subject: [PATCH 36/74] Update tests for Django 3.2 --- tests/test_djangocache.py | 266 +++++++++++++++++++++++++------------- 1 file changed, 176 insertions(+), 90 deletions(-) diff --git a/tests/test_djangocache.py b/tests/test_djangocache.py index cdaf101..36e1b01 100644 --- a/tests/test_djangocache.py +++ b/tests/test_djangocache.py @@ -1,5 +1,5 @@ # Most of this file was copied from: -# https://raw.githubusercontent.com/django/django/stable/2.2.x/tests/cache/tests.py +# https://raw.githubusercontent.com/django/django/stable/3.2.x/tests/cache/tests.py # Unit tests for cache framework # Uses whatever cache backend is set in the test settings file. @@ -9,12 +9,14 @@ import pickle import re import shutil +import sys import tempfile import threading import time import unittest import warnings -from unittest import mock +from pathlib import Path +from unittest import mock, skipIf from django.conf import settings from django.core import management, signals @@ -88,6 +90,10 @@ def __getstate__(self): raise pickle.PickleError() +def empty_response(request): + return HttpResponse() + + KEY_ERRORS_WITH_MEMCACHED_MSG = ( 'Cache key contains characters that will cause errors if used with ' 'memcached: %r' @@ -138,6 +144,14 @@ class BaseCacheTests: # A common set of tests to apply to all cache backends factory = RequestFactory() + # RemovedInDjango41Warning: python-memcached doesn't support .get() with + # default. + supports_get_with_default = True + + # Some clients raise custom exceptions when .incr() or .decr() are called + # with a non-integer value. + incr_decr_type_error = TypeError + def tearDown(self): cache.clear() @@ -146,11 +160,15 @@ def test_simple(self): cache.set('key', 'value') self.assertEqual(cache.get('key'), 'value') + def test_default_used_when_none_is_set(self): + """If None is cached, get() returns it instead of the default.""" + cache.set('key_default_none', None) + self.assertIsNone(cache.get('key_default_none', default='default')) + def test_add(self): # A key can be added to a cache - cache.add('addkey1', 'value') - result = cache.add('addkey1', 'newvalue') - self.assertFalse(result) + self.assertIs(cache.add('addkey1', 'value'), True) + self.assertIs(cache.add('addkey1', 'newvalue'), False) self.assertEqual(cache.get('addkey1'), 'value') def test_prefix(self): @@ -158,7 +176,7 @@ def test_prefix(self): cache.set('somekey', 'value') # should not be set in the prefixed cache - self.assertFalse(caches['prefix'].has_key('somekey')) + self.assertIs(caches['prefix'].has_key('somekey'), False) caches['prefix'].set('somekey', 'value2') @@ -180,28 +198,43 @@ def test_get_many(self): self.assertEqual( cache.get_many(iter(['a', 'b', 'e'])), {'a': 'a', 'b': 'b'} ) + cache.set_many({'x': None, 'y': 1}) + self.assertEqual(cache.get_many(['x', 'y']), {'x': None, 'y': 1}) def test_delete(self): # Cache keys can be deleted cache.set_many({'key1': 'spam', 'key2': 'eggs'}) self.assertEqual(cache.get('key1'), 'spam') - cache.delete('key1') + self.assertIs(cache.delete('key1'), True) self.assertIsNone(cache.get('key1')) self.assertEqual(cache.get('key2'), 'eggs') + def test_delete_nonexistent(self): + self.assertIs(cache.delete('nonexistent_key'), False) + def test_has_key(self): # The cache can be inspected for cache keys cache.set('hello1', 'goodbye1') - self.assertTrue(cache.has_key('hello1')) - self.assertFalse(cache.has_key('goodbye1')) + self.assertIs(cache.has_key('hello1'), True) + self.assertIs(cache.has_key('goodbye1'), False) cache.set('no_expiry', 'here', None) - self.assertTrue(cache.has_key('no_expiry')) + self.assertIs(cache.has_key('no_expiry'), True) + cache.set('null', None) + self.assertIs( + cache.has_key('null'), + True if self.supports_get_with_default else False, + ) def test_in(self): # The in operator can be used to inspect cache contents cache.set('hello2', 'goodbye2') self.assertIn('hello2', cache) self.assertNotIn('goodbye2', cache) + cache.set('null', None) + if self.supports_get_with_default: + self.assertIn('null', cache) + else: + self.assertNotIn('null', cache) def test_incr(self): # Cache values can be incremented @@ -213,6 +246,9 @@ def test_incr(self): self.assertEqual(cache.incr('answer', -10), 42) with self.assertRaises(ValueError): cache.incr('does_not_exist') + cache.set('null', None) + with self.assertRaises(self.incr_decr_type_error): + cache.incr('null') def test_decr(self): # Cache values can be decremented @@ -224,6 +260,9 @@ def test_decr(self): self.assertEqual(cache.decr('answer', -10), 42) with self.assertRaises(ValueError): cache.decr('does_not_exist') + cache.set('null', None) + with self.assertRaises(self.incr_decr_type_error): + cache.decr('null') def test_close(self): self.assertTrue(hasattr(cache, 'close')) @@ -295,24 +334,23 @@ def test_expiration(self): time.sleep(2) self.assertIsNone(cache.get('expire1')) - cache.add('expire2', 'newvalue') + self.assertIs(cache.add('expire2', 'newvalue'), True) self.assertEqual(cache.get('expire2'), 'newvalue') - self.assertFalse(cache.has_key('expire3')) + self.assertIs(cache.has_key('expire3'), False) def test_touch(self): # cache.touch() updates the timeout. cache.set('expire1', 'very quickly', timeout=1) self.assertIs(cache.touch('expire1', timeout=4), True) time.sleep(2) - self.assertTrue(cache.has_key('expire1')) + self.assertIs(cache.has_key('expire1'), True) time.sleep(3) - self.assertFalse(cache.has_key('expire1')) - + self.assertIs(cache.has_key('expire1'), False) # cache.touch() works without the timeout argument. cache.set('expire1', 'very quickly', timeout=1) self.assertIs(cache.touch('expire1'), True) time.sleep(2) - self.assertTrue(cache.has_key('expire1')) + self.assertIs(cache.has_key('expire1'), True) self.assertIs(cache.touch('nonexistent'), False) @@ -333,13 +371,13 @@ def test_unicode(self): # Test `add` for (key, value) in stuff.items(): with self.subTest(key=key): - cache.delete(key) - cache.add(key, value) + self.assertIs(cache.delete(key), True) + self.assertIs(cache.add(key, value), True) self.assertEqual(cache.get(key), value) # Test `set_many` for (key, value) in stuff.items(): - cache.delete(key) + self.assertIs(cache.delete(key), True) cache.set_many(stuff) for (key, value) in stuff.items(): with self.subTest(key=key): @@ -359,7 +397,7 @@ def test_binary_string(self): self.assertEqual(value, decompress(compressed_result).decode()) # Test add - cache.add('binary1-add', compressed_value) + self.assertIs(cache.add('binary1-add', compressed_value), True) compressed_result = cache.get('binary1-add') self.assertEqual(compressed_value, compressed_result) self.assertEqual(value, decompress(compressed_result).decode()) @@ -405,14 +443,14 @@ def test_clear(self): def test_long_timeout(self): """ - Followe memcached's convention where a timeout greater than 30 days is + Follow memcached's convention where a timeout greater than 30 days is treated as an absolute expiration timestamp instead of a relative offset (#12399). """ cache.set('key1', 'eggs', 60 * 60 * 24 * 30 + 1) # 30 days + 1 second self.assertEqual(cache.get('key1'), 'eggs') - cache.add('key2', 'ham', 60 * 60 * 24 * 30 + 1) + self.assertIs(cache.add('key2', 'ham', 60 * 60 * 24 * 30 + 1), True) self.assertEqual(cache.get('key2'), 'ham') cache.set_many( @@ -429,10 +467,9 @@ def test_forever_timeout(self): cache.set('key1', 'eggs', None) self.assertEqual(cache.get('key1'), 'eggs') - cache.add('key2', 'ham', None) + self.assertIs(cache.add('key2', 'ham', None), True) self.assertEqual(cache.get('key2'), 'ham') - added = cache.add('key1', 'new eggs', None) - self.assertIs(added, False) + self.assertIs(cache.add('key1', 'new eggs', None), False) self.assertEqual(cache.get('key1'), 'eggs') cache.set_many({'key3': 'sausage', 'key4': 'lobster bisque'}, None) @@ -440,7 +477,7 @@ def test_forever_timeout(self): self.assertEqual(cache.get('key4'), 'lobster bisque') cache.set('key5', 'belgian fries', timeout=1) - cache.touch('key5', timeout=None) + self.assertIs(cache.touch('key5', timeout=None), True) time.sleep(2) self.assertEqual(cache.get('key5'), 'belgian fries') @@ -451,7 +488,7 @@ def test_zero_timeout(self): cache.set('key1', 'eggs', 0) self.assertIsNone(cache.get('key1')) - cache.add('key2', 'ham', 0) + self.assertIs(cache.add('key2', 'ham', 0), True) self.assertIsNone(cache.get('key2')) cache.set_many({'key3': 'sausage', 'key4': 'lobster bisque'}, 0) @@ -459,7 +496,7 @@ def test_zero_timeout(self): self.assertIsNone(cache.get('key4')) cache.set('key5', 'belgian fries', timeout=5) - cache.touch('key5', timeout=0) + self.assertIs(cache.touch('key5', timeout=0), True) self.assertIsNone(cache.get('key5')) def test_float_timeout(self): @@ -467,7 +504,12 @@ def test_float_timeout(self): cache.set('key1', 'spam', 100.2) self.assertEqual(cache.get('key1'), 'spam') - def _perform_cull_test(self, cull_cache, initial_count, final_count): + def _perform_cull_test(self, cull_cache_name, initial_count, final_count): + try: + cull_cache = caches[cull_cache_name] + except InvalidCacheBackendError: + self.skipTest("Culling isn't implemented.") + # Create initial cache key entries. This will overflow the cache, # causing a cull. for i in range(1, initial_count): @@ -480,10 +522,24 @@ def _perform_cull_test(self, cull_cache, initial_count, final_count): self.assertEqual(count, final_count) def test_cull(self): - self._perform_cull_test(caches['cull'], 50, 29) + self._perform_cull_test('cull', 50, 29) def test_zero_cull(self): - self._perform_cull_test(caches['zero_cull'], 50, 19) + self._perform_cull_test('zero_cull', 50, 19) + + def test_cull_delete_when_store_empty(self): + try: + cull_cache = caches['cull'] + except InvalidCacheBackendError: + self.skipTest("Culling isn't implemented.") + old_max_entries = cull_cache._max_entries + # Force _cull to delete on first cached record. + cull_cache._max_entries = -1 + try: + cull_cache.set('force_cull_delete', 'value', 1000) + self.assertIs(cull_cache.has_key('force_cull_delete'), True) + finally: + cull_cache._max_entries = old_max_entries def _perform_invalid_key_test(self, key, expected_warning): """ @@ -500,10 +556,24 @@ def func(key, *args): old_func = cache.key_func cache.key_func = func + tests = [ + ('add', [key, 1]), + ('get', [key]), + ('set', [key, 1]), + ('incr', [key]), + ('decr', [key]), + ('touch', [key]), + ('delete', [key]), + ('get_many', [[key, 'b']]), + ('set_many', [{key: 1, 'b': 2}]), + ('delete_many', [{key: 1, 'b': 2}]), + ] try: - with self.assertWarns(CacheKeyWarning) as cm: - cache.set(key, 'value') - self.assertEqual(str(cm.warning), expected_warning) + for operation, args in tests: + with self.subTest(operation=operation): + with self.assertWarns(CacheKeyWarning) as cm: + getattr(cache, operation)(*args) + self.assertEqual(str(cm.warning), expected_warning) finally: cache.key_func = old_func @@ -567,41 +637,41 @@ def test_cache_versioning_get_set(self): def test_cache_versioning_add(self): # add, default version = 1, but manually override version = 2 - cache.add('answer1', 42, version=2) + self.assertIs(cache.add('answer1', 42, version=2), True) self.assertIsNone(cache.get('answer1', version=1)) self.assertEqual(cache.get('answer1', version=2), 42) - cache.add('answer1', 37, version=2) + self.assertIs(cache.add('answer1', 37, version=2), False) self.assertIsNone(cache.get('answer1', version=1)) self.assertEqual(cache.get('answer1', version=2), 42) - cache.add('answer1', 37, version=1) + self.assertIs(cache.add('answer1', 37, version=1), True) self.assertEqual(cache.get('answer1', version=1), 37) self.assertEqual(cache.get('answer1', version=2), 42) # v2 add, using default version = 2 - caches['v2'].add('answer2', 42) + self.assertIs(caches['v2'].add('answer2', 42), True) self.assertIsNone(cache.get('answer2', version=1)) self.assertEqual(cache.get('answer2', version=2), 42) - caches['v2'].add('answer2', 37) + self.assertIs(caches['v2'].add('answer2', 37), False) self.assertIsNone(cache.get('answer2', version=1)) self.assertEqual(cache.get('answer2', version=2), 42) - caches['v2'].add('answer2', 37, version=1) + self.assertIs(caches['v2'].add('answer2', 37, version=1), True) self.assertEqual(cache.get('answer2', version=1), 37) self.assertEqual(cache.get('answer2', version=2), 42) # v2 add, default version = 2, but manually override version = 1 - caches['v2'].add('answer3', 42, version=1) + self.assertIs(caches['v2'].add('answer3', 42, version=1), True) self.assertEqual(cache.get('answer3', version=1), 42) self.assertIsNone(cache.get('answer3', version=2)) - caches['v2'].add('answer3', 37, version=1) + self.assertIs(caches['v2'].add('answer3', 37, version=1), False) self.assertEqual(cache.get('answer3', version=1), 42) self.assertIsNone(cache.get('answer3', version=2)) - caches['v2'].add('answer3', 37) + self.assertIs(caches['v2'].add('answer3', 37), True) self.assertEqual(cache.get('answer3', version=1), 42) self.assertEqual(cache.get('answer3', version=2), 37) @@ -609,73 +679,73 @@ def test_cache_versioning_has_key(self): cache.set('answer1', 42) # has_key - self.assertTrue(cache.has_key('answer1')) - self.assertTrue(cache.has_key('answer1', version=1)) - self.assertFalse(cache.has_key('answer1', version=2)) + self.assertIs(cache.has_key('answer1'), True) + self.assertIs(cache.has_key('answer1', version=1), True) + self.assertIs(cache.has_key('answer1', version=2), False) - self.assertFalse(caches['v2'].has_key('answer1')) - self.assertTrue(caches['v2'].has_key('answer1', version=1)) - self.assertFalse(caches['v2'].has_key('answer1', version=2)) + self.assertIs(caches['v2'].has_key('answer1'), False) + self.assertIs(caches['v2'].has_key('answer1', version=1), True) + self.assertIs(caches['v2'].has_key('answer1', version=2), False) def test_cache_versioning_delete(self): cache.set('answer1', 37, version=1) cache.set('answer1', 42, version=2) - cache.delete('answer1') + self.assertIs(cache.delete('answer1'), True) self.assertIsNone(cache.get('answer1', version=1)) self.assertEqual(cache.get('answer1', version=2), 42) cache.set('answer2', 37, version=1) cache.set('answer2', 42, version=2) - cache.delete('answer2', version=2) + self.assertIs(cache.delete('answer2', version=2), True) self.assertEqual(cache.get('answer2', version=1), 37) self.assertIsNone(cache.get('answer2', version=2)) cache.set('answer3', 37, version=1) cache.set('answer3', 42, version=2) - caches['v2'].delete('answer3') + self.assertIs(caches['v2'].delete('answer3'), True) self.assertEqual(cache.get('answer3', version=1), 37) self.assertIsNone(cache.get('answer3', version=2)) cache.set('answer4', 37, version=1) cache.set('answer4', 42, version=2) - caches['v2'].delete('answer4', version=1) + self.assertIs(caches['v2'].delete('answer4', version=1), True) self.assertIsNone(cache.get('answer4', version=1)) self.assertEqual(cache.get('answer4', version=2), 42) def test_cache_versioning_incr_decr(self): cache.set('answer1', 37, version=1) cache.set('answer1', 42, version=2) - cache.incr('answer1') + self.assertEqual(cache.incr('answer1'), 38) self.assertEqual(cache.get('answer1', version=1), 38) self.assertEqual(cache.get('answer1', version=2), 42) - cache.decr('answer1') + self.assertEqual(cache.decr('answer1'), 37) self.assertEqual(cache.get('answer1', version=1), 37) self.assertEqual(cache.get('answer1', version=2), 42) cache.set('answer2', 37, version=1) cache.set('answer2', 42, version=2) - cache.incr('answer2', version=2) + self.assertEqual(cache.incr('answer2', version=2), 43) self.assertEqual(cache.get('answer2', version=1), 37) self.assertEqual(cache.get('answer2', version=2), 43) - cache.decr('answer2', version=2) + self.assertEqual(cache.decr('answer2', version=2), 42) self.assertEqual(cache.get('answer2', version=1), 37) self.assertEqual(cache.get('answer2', version=2), 42) cache.set('answer3', 37, version=1) cache.set('answer3', 42, version=2) - caches['v2'].incr('answer3') + self.assertEqual(caches['v2'].incr('answer3'), 43) self.assertEqual(cache.get('answer3', version=1), 37) self.assertEqual(cache.get('answer3', version=2), 43) - caches['v2'].decr('answer3') + self.assertEqual(caches['v2'].decr('answer3'), 42) self.assertEqual(cache.get('answer3', version=1), 37) self.assertEqual(cache.get('answer3', version=2), 42) cache.set('answer4', 37, version=1) cache.set('answer4', 42, version=2) - caches['v2'].incr('answer4', version=1) + self.assertEqual(caches['v2'].incr('answer4', version=1), 38) self.assertEqual(cache.get('answer4', version=1), 38) self.assertEqual(cache.get('answer4', version=2), 42) - caches['v2'].decr('answer4', version=1) + self.assertEqual(caches['v2'].decr('answer4', version=1), 37) self.assertEqual(cache.get('answer4', version=1), 37) self.assertEqual(cache.get('answer4', version=2), 42) @@ -790,6 +860,13 @@ def test_incr_version(self): with self.assertRaises(ValueError): cache.incr_version('does_not_exist') + cache.set('null', None) + if self.supports_get_with_default: + self.assertEqual(cache.incr_version('null'), 2) + else: + with self.assertRaises(self.incr_decr_type_error): + cache.incr_version('null') + def test_decr_version(self): cache.set('answer', 42, version=2) self.assertIsNone(cache.get('answer')) @@ -814,6 +891,13 @@ def test_decr_version(self): with self.assertRaises(ValueError): cache.decr_version('does_not_exist', version=2) + cache.set('null', None, version=2) + if self.supports_get_with_default: + self.assertEqual(cache.decr_version('null', version=2), 1) + else: + with self.assertRaises(self.incr_decr_type_error): + cache.decr_version('null', version=2) + def test_custom_key_func(self): # Two caches with different key functions aren't visible to each other cache.set('answer1', 42) @@ -827,30 +911,33 @@ def test_custom_key_func(self): self.assertEqual(caches['custom_key2'].get('answer2'), 42) def test_cache_write_unpicklable_object(self): - update_middleware = UpdateCacheMiddleware() - update_middleware.cache = cache - - fetch_middleware = FetchFromCacheMiddleware() + fetch_middleware = FetchFromCacheMiddleware(empty_response) fetch_middleware.cache = cache request = self.factory.get('/cache/test') request._cache_update_cache = True - get_cache_data = FetchFromCacheMiddleware().process_request(request) + get_cache_data = FetchFromCacheMiddleware( + empty_response + ).process_request(request) self.assertIsNone(get_cache_data) - response = HttpResponse() content = 'Testing cookie serialization.' - response.content = content - response.set_cookie('foo', 'bar') - update_middleware.process_response(request, response) + def get_response(req): + response = HttpResponse(content) + response.set_cookie('foo', 'bar') + return response + + update_middleware = UpdateCacheMiddleware(get_response) + update_middleware.cache = cache + response = update_middleware(request) get_cache_data = fetch_middleware.process_request(request) self.assertIsNotNone(get_cache_data) self.assertEqual(get_cache_data.content, content.encode()) self.assertEqual(get_cache_data.cookies, response.cookies) - update_middleware.process_response(request, get_cache_data) + UpdateCacheMiddleware(lambda req: get_cache_data)(request) get_cache_data = fetch_middleware.process_request(request) self.assertIsNotNone(get_cache_data) self.assertEqual(get_cache_data.content, content.encode()) @@ -869,7 +956,12 @@ def test_get_or_set(self): self.assertIsNone(cache.get('projector')) self.assertEqual(cache.get_or_set('projector', 42), 42) self.assertEqual(cache.get('projector'), 42) - self.assertEqual(cache.get_or_set('null', None), None) + self.assertIsNone(cache.get_or_set('null', None)) + if self.supports_get_with_default: + # Previous get_or_set() stores None in the cache. + self.assertIsNone(cache.get('null', 'default')) + else: + self.assertEqual(cache.get('null', 'default'), 'default') def test_get_or_set_callable(self): def my_callable(): @@ -878,14 +970,16 @@ def my_callable(): self.assertEqual(cache.get_or_set('mykey', my_callable), 'value') self.assertEqual(cache.get_or_set('mykey', my_callable()), 'value') - def test_get_or_set_callable_returning_none(self): - self.assertIsNone(cache.get_or_set('mykey', lambda: None)) - # Previous get_or_set() doesn't store None in the cache. - self.assertEqual(cache.get('mykey', 'default'), 'default') + self.assertIsNone(cache.get_or_set('null', lambda: None)) + if self.supports_get_with_default: + # Previous get_or_set() stores None in the cache. + self.assertIsNone(cache.get('null', 'default')) + else: + self.assertEqual(cache.get('null', 'default'), 'default') def test_get_or_set_version(self): msg = "get_or_set() missing 1 required positional argument: 'default'" - cache.get_or_set('brian', 1979, version=2) + self.assertEqual(cache.get_or_set('brian', 1979, version=2), 1979) with self.assertRaisesMessage(TypeError, msg): cache.get_or_set('brian') with self.assertRaisesMessage(TypeError, msg): @@ -949,6 +1043,11 @@ def test_ignores_non_cache_files(self): ) os.remove(fname) + def test_creates_cache_dir_if_nonexistent(self): + os.rmdir(self.dirname) + cache.set('foo', 'bar') + self.assertTrue(os.path.exists(self.dirname)) + def test_clear_does_not_remove_cache_dir(self): cache.clear() self.assertTrue( @@ -1026,19 +1125,6 @@ def test_pop(self): ) self.assertEqual(cache.pop(4, retry=False), 4) - def test_pickle(self): - letters = 'abcde' - cache.clear() - - for num, val in enumerate(letters): - cache.set(val, num) - - data = pickle.dumps(cache) - other = pickle.loads(data) - - for key in letters: - self.assertEqual(other.get(key), cache.get(key)) - def test_cache(self): subcache = cache.cache('test') directory = os.path.join(cache.directory, 'cache', 'test') From f3836f9f1938ede1eaf32d474be0311d0c7a3183 Mon Sep 17 00:00:00 2001 From: Grant Jenks Date: Thu, 30 Dec 2021 17:35:55 -0800 Subject: [PATCH 37/74] Fix DjangoCache.delete to return True/False --- diskcache/djangocache.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/diskcache/djangocache.py b/diskcache/djangocache.py index 347a613..8bf85ce 100644 --- a/diskcache/djangocache.py +++ b/diskcache/djangocache.py @@ -220,7 +220,7 @@ def delete(self, key, version=None, retry=True): """ # pylint: disable=arguments-differ key = self.make_key(key, version=version) - self._cache.delete(key, retry) + return self._cache.delete(key, retry) def incr(self, key, delta=1, version=None, default=None, retry=True): """Increment value by delta for item with key. From 8da6634877178ff3704d8468aa5a95b9baf4f9ac Mon Sep 17 00:00:00 2001 From: Grant Jenks Date: Thu, 30 Dec 2021 17:37:38 -0800 Subject: [PATCH 38/74] Bump Django testing to 3.2 --- requirements.txt | 2 +- tox.ini | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements.txt b/requirements.txt index 2ab91c7..efb2160 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,7 +1,7 @@ -e . blue coverage -django==2.2.* +django==3.2.* django_redis doc8 flake8 diff --git a/tox.ini b/tox.ini index 104a084..650c8f0 100644 --- a/tox.ini +++ b/tox.ini @@ -5,7 +5,7 @@ skip_missing_interpreters=True [testenv] commands=pytest deps= - django==2.2.* + django==3.2.* pytest pytest-cov pytest-django @@ -31,7 +31,7 @@ allowlist_externals=make changedir=docs commands=make html deps= - django==2.2.* + django==3.2.* sphinx [testenv:flake8] @@ -53,7 +53,7 @@ deps=mypy [testenv:pylint] commands=pylint {toxinidir}/diskcache deps= - django==2.2.* + django==3.2.* pylint [testenv:rstcheck] From 221f3d38cea69e33ba9a83cde0e2e202f3d3d70e Mon Sep 17 00:00:00 2001 From: Grant Jenks Date: Thu, 30 Dec 2021 17:45:26 -0800 Subject: [PATCH 39/74] Remove unused imports --- tests/test_djangocache.py | 38 ++------------------------------------ 1 file changed, 2 insertions(+), 36 deletions(-) diff --git a/tests/test_djangocache.py b/tests/test_djangocache.py index 36e1b01..6fae5be 100644 --- a/tests/test_djangocache.py +++ b/tests/test_djangocache.py @@ -3,64 +3,30 @@ # Unit tests for cache framework # Uses whatever cache backend is set in the test settings file. -import copy -import io import os import pickle -import re import shutil -import sys import tempfile -import threading import time -import unittest -import warnings -from pathlib import Path -from unittest import mock, skipIf +from unittest import mock from django.conf import settings -from django.core import management, signals from django.core.cache import ( - DEFAULT_CACHE_ALIAS, CacheKeyWarning, - InvalidCacheKey, cache, caches, ) -from django.core.cache.utils import make_template_fragment_key -from django.db import close_old_connections, connection, connections -from django.http import ( - HttpRequest, - HttpResponse, - HttpResponseNotModified, - StreamingHttpResponse, -) +from django.http import HttpResponse from django.middleware.cache import ( - CacheMiddleware, FetchFromCacheMiddleware, UpdateCacheMiddleware, ) -from django.middleware.csrf import CsrfViewMiddleware -from django.template import engines -from django.template.context_processors import csrf -from django.template.response import TemplateResponse from django.test import ( RequestFactory, - SimpleTestCase, TestCase, - TransactionTestCase, override_settings, ) from django.test.signals import setting_changed -from django.utils import timezone, translation -from django.utils.cache import ( - get_cache_key, - learn_cache_key, - patch_cache_control, - patch_vary_headers, -) -from django.utils.encoding import force_text -from django.views.decorators.cache import cache_page ################################################################################ # Setup Django for models import. From 5ac77969c88df42eece83be2e4ce85d2e277109d Mon Sep 17 00:00:00 2001 From: Grant Jenks Date: Thu, 30 Dec 2021 17:48:08 -0800 Subject: [PATCH 40/74] Run isort --- tests/test_djangocache.py | 12 ++---------- 1 file changed, 2 insertions(+), 10 deletions(-) diff --git a/tests/test_djangocache.py b/tests/test_djangocache.py index 6fae5be..5f83b81 100644 --- a/tests/test_djangocache.py +++ b/tests/test_djangocache.py @@ -11,21 +11,13 @@ from unittest import mock from django.conf import settings -from django.core.cache import ( - CacheKeyWarning, - cache, - caches, -) +from django.core.cache import CacheKeyWarning, cache, caches from django.http import HttpResponse from django.middleware.cache import ( FetchFromCacheMiddleware, UpdateCacheMiddleware, ) -from django.test import ( - RequestFactory, - TestCase, - override_settings, -) +from django.test import RequestFactory, TestCase, override_settings from django.test.signals import setting_changed ################################################################################ From 1cb1425b1ba24f26fb1e37349c4c2658c2a46d8f Mon Sep 17 00:00:00 2001 From: Grant Jenks Date: Thu, 30 Dec 2021 17:58:24 -0800 Subject: [PATCH 41/74] Bump version to 5.4.0 --- diskcache/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/diskcache/__init__.py b/diskcache/__init__.py index c361ca9..2355128 100644 --- a/diskcache/__init__.py +++ b/diskcache/__init__.py @@ -61,8 +61,8 @@ pass __title__ = 'diskcache' -__version__ = '5.3.0' -__build__ = 0x050300 +__version__ = '5.4.0' +__build__ = 0x050400 __author__ = 'Grant Jenks' __license__ = 'Apache 2.0' __copyright__ = 'Copyright 2016-2022 Grant Jenks' From c9844bba9de039ab64858073d8864f7e40172c9e Mon Sep 17 00:00:00 2001 From: Grant Jenks Date: Sun, 20 Feb 2022 12:47:41 -0800 Subject: [PATCH 42/74] Put commands above deps for doc8 testenv --- tox.ini | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index 650c8f0..65da17c 100644 --- a/tox.ini +++ b/tox.ini @@ -23,8 +23,8 @@ commands=blue --check {toxinidir}/setup.py {toxinidir}/diskcache {toxinidir}/tes deps=blue [testenv:doc8] -deps=doc8 commands=doc8 docs --ignore-path docs/_build +deps=doc8 [testenv:docs] allowlist_externals=make From 2d2cc8b39db3e0c0785dd304bde7902219d85ffe Mon Sep 17 00:00:00 2001 From: Grant Jenks Date: Sun, 20 Feb 2022 13:42:14 -0800 Subject: [PATCH 43/74] Update rsync command for uploading docs --- tox.ini | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/tox.ini b/tox.ini index 65da17c..36ed2b8 100644 --- a/tox.ini +++ b/tox.ini @@ -64,8 +64,9 @@ deps=rstcheck allowlist_externals=rsync changedir=docs commands= - rsync -azP --stats --delete _build/html/ \ - grantjenks.com:/srv/www/www.grantjenks.com/public/docs/diskcache/ + rsync --rsync-path 'sudo -u herokuish rsync' -azP --stats --delete \ + _build/html/ \ + grantjenks:/srv/www/grantjenks.com/public/docs/diskcache/ [isort] multi_line_output = 3 From c1774469b8d4c4906fe24f7b5afd637795af48d9 Mon Sep 17 00:00:00 2001 From: Grant Jenks Date: Mon, 10 Apr 2023 22:12:28 -0700 Subject: [PATCH 44/74] Remove unused import --- setup.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/setup.py b/setup.py index b49d9c8..841dfb9 100644 --- a/setup.py +++ b/setup.py @@ -1,5 +1,3 @@ -from io import open - from setuptools import setup from setuptools.command.test import test as TestCommand From f3fcdffee88af7923740b9864d533524a79d1222 Mon Sep 17 00:00:00 2001 From: Grant Jenks Date: Mon, 10 Apr 2023 22:27:48 -0700 Subject: [PATCH 45/74] Update Cache(...) params when allocating --- diskcache/fanout.py | 26 ++++++++++++++++++++++---- 1 file changed, 22 insertions(+), 4 deletions(-) diff --git a/diskcache/fanout.py b/diskcache/fanout.py index dc5240c..8fe51d9 100644 --- a/diskcache/fanout.py +++ b/diskcache/fanout.py @@ -573,9 +573,11 @@ def reset(self, key, value=ENOVAL): break return result - def cache(self, name): + def cache(self, name, timeout=60, disk=None, **settings): """Return Cache with given `name` in subdirectory. + If disk is none (default), uses the fanout cache disk. + >>> fanout_cache = FanoutCache() >>> cache = fanout_cache.cache('test') >>> cache.set('abc', 123) @@ -588,6 +590,9 @@ def cache(self, name): True :param str name: subdirectory name for Cache + :param float timeout: SQLite connection timeout + :param disk: Disk type or subclass for serialization + :param settings: any of DEFAULT_SETTINGS :return: Cache with given name """ @@ -598,7 +603,12 @@ def cache(self, name): except KeyError: parts = name.split('/') directory = op.join(self._directory, 'cache', *parts) - temp = Cache(directory=directory, disk=self._disk) + temp = Cache( + directory=directory, + timeout=timeout, + disk=self._disk if disk is None else Disk, + **settings, + ) _caches[name] = temp return temp @@ -626,7 +636,11 @@ def deque(self, name): except KeyError: parts = name.split('/') directory = op.join(self._directory, 'deque', *parts) - cache = Cache(directory=directory, disk=self._disk) + cache = Cache( + directory=directory, + disk=self._disk, + eviction_policy='none', + ) deque = Deque.fromcache(cache) _deques[name] = deque return deque @@ -658,7 +672,11 @@ def index(self, name): except KeyError: parts = name.split('/') directory = op.join(self._directory, 'index', *parts) - cache = Cache(directory=directory, disk=self._disk) + cache = Cache( + directory=directory, + disk=self._disk, + eviction_policy='none', + ) index = Index.fromcache(cache) _indexes[name] = index return index From ee7a248e5c09e6fb9145b2e4a1777a345114b71d Mon Sep 17 00:00:00 2001 From: Grant Jenks Date: Mon, 10 Apr 2023 22:37:39 -0700 Subject: [PATCH 46/74] Add docs about the eviction policy to recipes --- diskcache/recipes.py | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/diskcache/recipes.py b/diskcache/recipes.py index b5af6dd..babb68f 100644 --- a/diskcache/recipes.py +++ b/diskcache/recipes.py @@ -17,6 +17,9 @@ class Averager: Sometimes known as "online statistics," the running average maintains the total and count. The average can then be calculated at any time. + Assumes the key will not be evicted. Set the eviction policy to 'none' on + the cache to guarantee the key is not evicted. + >>> import diskcache >>> cache = diskcache.FanoutCache() >>> ave = Averager(cache, 'latency') @@ -65,6 +68,9 @@ def pop(self): class Lock: """Recipe for cross-process and cross-thread lock. + Assumes the key will not be evicted. Set the eviction policy to 'none' on + the cache to guarantee the key is not evicted. + >>> import diskcache >>> cache = diskcache.Cache() >>> lock = Lock(cache, 'report-123') @@ -113,6 +119,9 @@ def __exit__(self, *exc_info): class RLock: """Recipe for cross-process and cross-thread re-entrant lock. + Assumes the key will not be evicted. Set the eviction policy to 'none' on + the cache to guarantee the key is not evicted. + >>> import diskcache >>> cache = diskcache.Cache() >>> rlock = RLock(cache, 'user-123') @@ -181,6 +190,9 @@ def __exit__(self, *exc_info): class BoundedSemaphore: """Recipe for cross-process and cross-thread bounded semaphore. + Assumes the key will not be evicted. Set the eviction policy to 'none' on + the cache to guarantee the key is not evicted. + >>> import diskcache >>> cache = diskcache.Cache() >>> semaphore = BoundedSemaphore(cache, 'max-cons', value=2) @@ -251,6 +263,9 @@ def throttle( ): """Decorator to throttle calls to function. + Assumes keys will not be evicted. Set the eviction policy to 'none' on the + cache to guarantee the keys are not evicted. + >>> import diskcache, time >>> cache = diskcache.Cache() >>> count = 0 @@ -305,6 +320,9 @@ def barrier(cache, lock_factory, name=None, expire=None, tag=None): Supports different kinds of locks: Lock, RLock, BoundedSemaphore. + Assumes keys will not be evicted. Set the eviction policy to 'none' on the + cache to guarantee the keys are not evicted. + >>> import diskcache, time >>> cache = diskcache.Cache() >>> @barrier(cache, Lock) From fb2fa2c401bb88ecbbc58009d02cbe65f2fc594a Mon Sep 17 00:00:00 2001 From: Grant Jenks Date: Mon, 10 Apr 2023 22:52:52 -0700 Subject: [PATCH 47/74] Test on Django 4.2 LTS --- tox.ini | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tox.ini b/tox.ini index 36ed2b8..0ddcc8f 100644 --- a/tox.ini +++ b/tox.ini @@ -5,7 +5,7 @@ skip_missing_interpreters=True [testenv] commands=pytest deps= - django==3.2.* + django==4.2.* pytest pytest-cov pytest-django @@ -31,7 +31,7 @@ allowlist_externals=make changedir=docs commands=make html deps= - django==3.2.* + django==4.2.* sphinx [testenv:flake8] @@ -53,7 +53,7 @@ deps=mypy [testenv:pylint] commands=pylint {toxinidir}/diskcache deps= - django==3.2.* + django==4.2.* pylint [testenv:rstcheck] From 0a9783353ff7dc9f874154ef98b23de27e4aba8d Mon Sep 17 00:00:00 2001 From: Grant Jenks Date: Mon, 10 Apr 2023 22:53:17 -0700 Subject: [PATCH 48/74] Update year to 2023 --- README.rst | 4 ++-- diskcache/__init__.py | 2 +- docs/conf.py | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/README.rst b/README.rst index eb06a6a..04abdc0 100644 --- a/README.rst +++ b/README.rst @@ -4,7 +4,7 @@ DiskCache: Disk Backed Cache `DiskCache`_ is an Apache2 licensed disk and file backed cache library, written in pure-Python, and compatible with Django. -The cloud-based computing of 2021 puts a premium on memory. Gigabytes of empty +The cloud-based computing of 2023 puts a premium on memory. Gigabytes of empty space is left on disks as processes vie for memory. Among these processes is Memcached (and sometimes Redis) which is used as a cache. Wouldn't it be nice to leverage empty disk space for caching? @@ -387,7 +387,7 @@ Reference License ------- -Copyright 2016-2022 Grant Jenks +Copyright 2016-2023 Grant Jenks Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the diff --git a/diskcache/__init__.py b/diskcache/__init__.py index 2355128..f7aa771 100644 --- a/diskcache/__init__.py +++ b/diskcache/__init__.py @@ -65,4 +65,4 @@ __build__ = 0x050400 __author__ = 'Grant Jenks' __license__ = 'Apache 2.0' -__copyright__ = 'Copyright 2016-2022 Grant Jenks' +__copyright__ = 'Copyright 2016-2023 Grant Jenks' diff --git a/docs/conf.py b/docs/conf.py index 92ce1b9..92bf3ec 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -20,7 +20,7 @@ # -- Project information ----------------------------------------------------- project = 'DiskCache' -copyright = '2022, Grant Jenks' +copyright = '2023, Grant Jenks' author = 'Grant Jenks' # The full version, including alpha/beta/rc tags From 712cc1827b29fb8f6a76803d32f87a5fb57f3c6e Mon Sep 17 00:00:00 2001 From: Grant Jenks Date: Mon, 10 Apr 2023 22:56:16 -0700 Subject: [PATCH 49/74] Bump python testing to 3.11 --- .github/workflows/integration.yml | 4 ++-- .github/workflows/release.yml | 2 +- tox.ini | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/integration.yml b/.github/workflows/integration.yml index 07a5650..2d83ad3 100644 --- a/.github/workflows/integration.yml +++ b/.github/workflows/integration.yml @@ -16,7 +16,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v2 with: - python-version: '3.10' + python-version: '3.11' - name: Install dependencies run: | pip install --upgrade pip @@ -32,7 +32,7 @@ jobs: max-parallel: 8 matrix: os: [ubuntu-latest, macos-latest, windows-latest] - python-version: [3.6, 3.7, 3.8, 3.9, '3.10'] + python-version: [3.7, 3.8, 3.9, '3.10', 3.11] steps: - name: Set up Python ${{ matrix.python-version }} x64 diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 1f89c14..676593c 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -21,7 +21,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v2 with: - python-version: '3.10' + python-version: '3.11' - name: Install dependencies run: | diff --git a/tox.ini b/tox.ini index 0ddcc8f..3735ebc 100644 --- a/tox.ini +++ b/tox.ini @@ -1,5 +1,5 @@ [tox] -envlist=bluecheck,doc8,docs,isortcheck,flake8,mypy,pylint,rstcheck,py36,py37,py38,py39 +envlist=bluecheck,doc8,docs,isortcheck,flake8,mypy,pylint,rstcheck,py37,py38,py39,py310,py311 skip_missing_interpreters=True [testenv] From d7ae0990b240e8ea22c1a4060f58900edd339a18 Mon Sep 17 00:00:00 2001 From: Grant Jenks Date: Mon, 10 Apr 2023 23:07:29 -0700 Subject: [PATCH 50/74] i blue it --- diskcache/core.py | 10 ++--- diskcache/fanout.py | 2 +- tests/benchmark_glob.py | 2 +- tests/settings.py | 2 +- tests/stress_test_core.py | 30 ++++++------- tests/stress_test_fanout.py | 30 ++++++------- tests/test_core.py | 84 ++++++++++++++++++------------------- tests/test_djangocache.py | 2 +- tests/test_fanout.py | 16 +++---- 9 files changed, 85 insertions(+), 93 deletions(-) diff --git a/diskcache/core.py b/diskcache/core.py index fb343be..05a0854 100644 --- a/diskcache/core.py +++ b/diskcache/core.py @@ -49,14 +49,14 @@ def __repr__(self): 'statistics': 0, # False 'tag_index': 0, # False 'eviction_policy': 'least-recently-stored', - 'size_limit': 2 ** 30, # 1gb + 'size_limit': 2**30, # 1gb 'cull_limit': 10, 'sqlite_auto_vacuum': 1, # FULL - 'sqlite_cache_size': 2 ** 13, # 8,192 pages + 'sqlite_cache_size': 2**13, # 8,192 pages 'sqlite_journal_mode': 'wal', - 'sqlite_mmap_size': 2 ** 26, # 64mb + 'sqlite_mmap_size': 2**26, # 64mb 'sqlite_synchronous': 1, # NORMAL - 'disk_min_file_size': 2 ** 15, # 32kb + 'disk_min_file_size': 2**15, # 32kb 'disk_pickle_protocol': pickle.HIGHEST_PROTOCOL, } @@ -212,7 +212,7 @@ def store(self, value, read, key=UNKNOWN): size = op.getsize(full_path) return size, MODE_TEXT, filename, None elif read: - reader = ft.partial(value.read, 2 ** 22) + reader = ft.partial(value.read, 2**22) filename, full_path = self.filename(key, value) iterator = iter(reader, b'') size = self._write(full_path, iterator, 'xb') diff --git a/diskcache/fanout.py b/diskcache/fanout.py index 8fe51d9..5283490 100644 --- a/diskcache/fanout.py +++ b/diskcache/fanout.py @@ -45,7 +45,7 @@ def __init__( timeout=timeout, disk=disk, size_limit=size_limit, - **settings + **settings, ) for num in range(shards) ) diff --git a/tests/benchmark_glob.py b/tests/benchmark_glob.py index 7f0bf7c..7da5fd3 100644 --- a/tests/benchmark_glob.py +++ b/tests/benchmark_glob.py @@ -22,7 +22,7 @@ print(template % ('Count', 'Time')) print(' '.join(['=' * size] * len(cols))) -for count in [10 ** exp for exp in range(6)]: +for count in [10**exp for exp in range(6)]: for value in range(count): with open(op.join('tmp', '%s.tmp' % value), 'wb') as writer: pass diff --git a/tests/settings.py b/tests/settings.py index 1a2f569..04aee85 100644 --- a/tests/settings.py +++ b/tests/settings.py @@ -25,7 +25,7 @@ # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True -ALLOWED_HOSTS = [u'testserver'] +ALLOWED_HOSTS = ['testserver'] # Application definition diff --git a/tests/stress_test_core.py b/tests/stress_test_core.py index c30fa3f..2b2578b 100644 --- a/tests/stress_test_core.py +++ b/tests/stress_test_core.py @@ -33,16 +33,14 @@ def make_long(): def make_unicode(): word_size = random.randint(1, 26) - word = u''.join( - random.sample(u'abcdefghijklmnopqrstuvwxyz', word_size) - ) + word = ''.join(random.sample('abcdefghijklmnopqrstuvwxyz', word_size)) size = random.randint(1, int(200 / 13)) return word * size def make_bytes(): word_size = random.randint(1, 26) - word = u''.join( - random.sample(u'abcdefghijklmnopqrstuvwxyz', word_size) + word = ''.join( + random.sample('abcdefghijklmnopqrstuvwxyz', word_size) ).encode('utf-8') size = random.randint(1, int(200 / 13)) return word * size @@ -77,18 +75,16 @@ def make_long(): def make_unicode(): word_size = random.randint(1, 26) - word = u''.join( - random.sample(u'abcdefghijklmnopqrstuvwxyz', word_size) - ) - size = random.randint(1, int(2 ** 16 / 13)) + word = ''.join(random.sample('abcdefghijklmnopqrstuvwxyz', word_size)) + size = random.randint(1, int(2**16 / 13)) return word * size def make_bytes(): word_size = random.randint(1, 26) - word = u''.join( - random.sample(u'abcdefghijklmnopqrstuvwxyz', word_size) + word = ''.join( + random.sample('abcdefghijklmnopqrstuvwxyz', word_size) ).encode('utf-8') - size = random.randint(1, int(2 ** 16 / 13)) + size = random.randint(1, int(2**16 / 13)) return word * size def make_float(): @@ -233,7 +229,7 @@ def percentile(sequence, percent): def stress_test( create=True, delete=True, - eviction_policy=u'least-recently-stored', + eviction_policy='least-recently-stored', processes=1, threads=1, ): @@ -293,17 +289,17 @@ def stress_test( def stress_test_lru(): """Stress test least-recently-used eviction policy.""" - stress_test(eviction_policy=u'least-recently-used') + stress_test(eviction_policy='least-recently-used') def stress_test_lfu(): """Stress test least-frequently-used eviction policy.""" - stress_test(eviction_policy=u'least-frequently-used') + stress_test(eviction_policy='least-frequently-used') def stress_test_none(): """Stress test 'none' eviction policy.""" - stress_test(eviction_policy=u'none') + stress_test(eviction_policy='none') def stress_test_mp(): @@ -396,7 +392,7 @@ def stress_test_mp(): '-v', '--eviction-policy', type=str, - default=u'least-recently-stored', + default='least-recently-stored', ) args = parser.parse_args() diff --git a/tests/stress_test_fanout.py b/tests/stress_test_fanout.py index d3b67e3..e78dda5 100644 --- a/tests/stress_test_fanout.py +++ b/tests/stress_test_fanout.py @@ -32,16 +32,14 @@ def make_long(): def make_unicode(): word_size = random.randint(1, 26) - word = u''.join( - random.sample(u'abcdefghijklmnopqrstuvwxyz', word_size) - ) + word = ''.join(random.sample('abcdefghijklmnopqrstuvwxyz', word_size)) size = random.randint(1, int(200 / 13)) return word * size def make_bytes(): word_size = random.randint(1, 26) - word = u''.join( - random.sample(u'abcdefghijklmnopqrstuvwxyz', word_size) + word = ''.join( + random.sample('abcdefghijklmnopqrstuvwxyz', word_size) ).encode('utf-8') size = random.randint(1, int(200 / 13)) return word * size @@ -76,18 +74,16 @@ def make_long(): def make_unicode(): word_size = random.randint(1, 26) - word = u''.join( - random.sample(u'abcdefghijklmnopqrstuvwxyz', word_size) - ) - size = random.randint(1, int(2 ** 16 / 13)) + word = ''.join(random.sample('abcdefghijklmnopqrstuvwxyz', word_size)) + size = random.randint(1, int(2**16 / 13)) return word * size def make_bytes(): word_size = random.randint(1, 26) - word = u''.join( - random.sample(u'abcdefghijklmnopqrstuvwxyz', word_size) + word = ''.join( + random.sample('abcdefghijklmnopqrstuvwxyz', word_size) ).encode('utf-8') - size = random.randint(1, int(2 ** 16 / 13)) + size = random.randint(1, int(2**16 / 13)) return word * size def make_float(): @@ -224,7 +220,7 @@ def percentile(sequence, percent): def stress_test( create=True, delete=True, - eviction_policy=u'least-recently-stored', + eviction_policy='least-recently-stored', processes=1, threads=1, ): @@ -284,17 +280,17 @@ def stress_test( def stress_test_lru(): """Stress test least-recently-used eviction policy.""" - stress_test(eviction_policy=u'least-recently-used') + stress_test(eviction_policy='least-recently-used') def stress_test_lfu(): """Stress test least-frequently-used eviction policy.""" - stress_test(eviction_policy=u'least-frequently-used') + stress_test(eviction_policy='least-frequently-used') def stress_test_none(): """Stress test 'none' eviction policy.""" - stress_test(eviction_policy=u'none') + stress_test(eviction_policy='none') def stress_test_mp(): @@ -387,7 +383,7 @@ def stress_test_mp(): '-v', '--eviction-policy', type=str, - default=u'least-recently-stored', + default='least-recently-stored', ) args = parser.parse_args() diff --git a/tests/test_core.py b/tests/test_core.py index 55ca962..356d104 100644 --- a/tests/test_core.py +++ b/tests/test_core.py @@ -38,11 +38,11 @@ def test_init(cache): def test_init_disk(): - with dc.Cache(disk_pickle_protocol=1, disk_min_file_size=2 ** 20) as cache: + with dc.Cache(disk_pickle_protocol=1, disk_min_file_size=2**20) as cache: key = (None, 0, 'abc') cache[key] = 0 cache.check() - assert cache.disk_min_file_size == 2 ** 20 + assert cache.disk_min_file_size == 2**20 assert cache.disk_pickle_protocol == 1 shutil.rmtree(cache.directory, ignore_errors=True) @@ -59,15 +59,15 @@ def test_disk_reset(): assert cache._disk.min_file_size == 0 assert cache._disk.pickle_protocol == 0 - cache.reset('disk_min_file_size', 2 ** 10) + cache.reset('disk_min_file_size', 2**10) cache.reset('disk_pickle_protocol', 2) cache[1] = value cache.check() - assert cache.disk_min_file_size == 2 ** 10 + assert cache.disk_min_file_size == 2**10 assert cache.disk_pickle_protocol == 2 - assert cache._disk.min_file_size == 2 ** 10 + assert cache._disk.min_file_size == 2**10 assert cache._disk.pickle_protocol == 2 shutil.rmtree(cache.directory, ignore_errors=True) @@ -150,7 +150,7 @@ def test_pragma_error(cache): cursor.fetchall = fetchall fetchall.side_effect = [sqlite3.OperationalError] * 60000 - size = 2 ** 28 + size = 2**28 with mock.patch('time.sleep', lambda num: 0): with mock.patch.object(cache, '_local', local): @@ -177,15 +177,15 @@ def __getattr__(self, name): def test_getsetdel(cache): values = [ (None, False), - ((None,) * 2 ** 20, False), + ((None,) * 2**20, False), (1234, False), - (2 ** 512, False), + (2**512, False), (56.78, False), - (u'hello', False), - (u'hello' * 2 ** 20, False), + ('hello', False), + ('hello' * 2**20, False), (b'world', False), - (b'world' * 2 ** 20, False), - (io.BytesIO(b'world' * 2 ** 20), True), + (b'world' * 2**20, False), + (io.BytesIO(b'world' * 2**20), True), ] for key, (value, file_like) in enumerate(values): @@ -229,7 +229,7 @@ def test_get_keyerror4(cache): func = mock.Mock(side_effect=IOError(errno.ENOENT, '')) cache.reset('statistics', True) - cache[0] = b'abcd' * 2 ** 20 + cache[0] = b'abcd' * 2**20 with mock.patch('diskcache.core.open', func): with pytest.raises((IOError, KeyError, OSError)): @@ -237,7 +237,7 @@ def test_get_keyerror4(cache): def test_read(cache): - cache.set(0, b'abcd' * 2 ** 20) + cache.set(0, b'abcd' * 2**20) with cache.read(0) as reader: assert reader is not None @@ -249,7 +249,7 @@ def test_read_keyerror(cache): def test_set_twice(cache): - large_value = b'abcd' * 2 ** 20 + large_value = b'abcd' * 2**20 cache[0] = 0 cache[0] = 1 @@ -283,7 +283,7 @@ def test_set_timeout(cache): with pytest.raises(dc.Timeout): try: with mock.patch.object(cache, '_local', local): - cache.set('a', 'b' * 2 ** 20) + cache.set('a', 'b' * 2**20) finally: cache.check() @@ -299,11 +299,11 @@ def test_get(cache): assert cache.get(2, {}) == {} assert cache.get(0, expire_time=True, tag=True) == (None, None, None) - assert cache.set(0, 0, expire=None, tag=u'number') + assert cache.set(0, 0, expire=None, tag='number') assert cache.get(0, expire_time=True) == (0, None) - assert cache.get(0, tag=True) == (0, u'number') - assert cache.get(0, expire_time=True, tag=True) == (0, None, u'number') + assert cache.get(0, tag=True) == (0, 'number') + assert cache.get(0, expire_time=True, tag=True) == (0, None, 'number') def test_get_expired_fast_path(cache): @@ -359,8 +359,8 @@ def test_pop(cache): assert cache.set('delta', 210) assert cache.pop('delta', expire_time=True) == (210, None) - assert cache.set('epsilon', '0' * 2 ** 20) - assert cache.pop('epsilon') == '0' * 2 ** 20 + assert cache.set('epsilon', '0' * 2**20) + assert cache.pop('epsilon') == '0' * 2**20 def test_pop_ioerror(cache): @@ -426,11 +426,11 @@ def test_stats(cache): def test_path(cache): - cache[0] = u'abc' - large_value = b'abc' * 2 ** 20 + cache[0] = 'abc' + large_value = b'abc' * 2**20 cache[1] = large_value - assert cache.get(0, read=True) == u'abc' + assert cache.get(0, read=True) == 'abc' with cache.get(1, read=True) as reader: assert reader.name is not None @@ -465,7 +465,7 @@ def test_expire_rows(cache): def test_least_recently_stored(cache): - cache.reset('eviction_policy', u'least-recently-stored') + cache.reset('eviction_policy', 'least-recently-stored') cache.reset('size_limit', int(10.1e6)) cache.reset('cull_limit', 2) @@ -500,7 +500,7 @@ def test_least_recently_stored(cache): def test_least_recently_used(cache): - cache.reset('eviction_policy', u'least-recently-used') + cache.reset('eviction_policy', 'least-recently-used') cache.reset('size_limit', int(10.1e6)) cache.reset('cull_limit', 5) @@ -530,7 +530,7 @@ def test_least_recently_used(cache): def test_least_frequently_used(cache): - cache.reset('eviction_policy', u'least-frequently-used') + cache.reset('eviction_policy', 'least-frequently-used') cache.reset('size_limit', int(10.1e6)) cache.reset('cull_limit', 5) @@ -558,8 +558,8 @@ def test_least_frequently_used(cache): def test_check(cache): - blob = b'a' * 2 ** 20 - keys = (0, 1, 1234, 56.78, u'hello', b'world', None) + blob = b'a' * 2**20 + keys = (0, 1, 1234, 56.78, 'hello', b'world', None) for key in keys: cache[key] = blob @@ -662,12 +662,12 @@ def test_clear_timeout(cache): def test_tag(cache): - assert cache.set(0, None, tag=u'zero') + assert cache.set(0, None, tag='zero') assert cache.set(1, None, tag=1234) assert cache.set(2, None, tag=5.67) assert cache.set(3, None, tag=b'three') - assert cache.get(0, tag=True) == (None, u'zero') + assert cache.get(0, tag=True) == (None, 'zero') assert cache.get(1, tag=True) == (None, 1234) assert cache.get(2, tag=True) == (None, 5.67) assert cache.get(3, tag=True) == (None, b'three') @@ -675,11 +675,11 @@ def test_tag(cache): def test_with(cache): with dc.Cache(cache.directory) as tmp: - tmp[u'a'] = 0 - tmp[u'b'] = 1 + tmp['a'] = 0 + tmp['b'] = 1 - assert cache[u'a'] == 0 - assert cache[u'b'] == 1 + assert cache['a'] == 0 + assert cache['b'] == 1 def test_contains(cache): @@ -708,7 +708,7 @@ def test_add(cache): def test_add_large_value(cache): - value = b'abcd' * 2 ** 20 + value = b'abcd' * 2**20 assert cache.add(b'test-key', value) assert cache.get(b'test-key') == value assert not cache.add(b'test-key', value * 2) @@ -919,7 +919,7 @@ def test_push_peek_expire(cache): def test_push_pull_large_value(cache): - value = b'test' * (2 ** 20) + value = b'test' * (2**20) cache.push(value) assert cache.pull() == (500000000000000, value) assert len(cache) == 0 @@ -927,7 +927,7 @@ def test_push_pull_large_value(cache): def test_push_peek_large_value(cache): - value = b'test' * (2 ** 20) + value = b'test' * (2**20) cache.push(value) assert cache.peek() == (500000000000000, value) assert len(cache) == 1 @@ -1144,8 +1144,8 @@ def test_cull_timeout(cache): def test_key_roundtrip(cache): - key_part_0 = u'part0' - key_part_1 = u'part1' + key_part_0 = 'part0' + key_part_1 = 'part1' to_test = [ (key_part_0, key_part_1), [key_part_0, key_part_1], @@ -1354,7 +1354,7 @@ def foo(*args, **kwargs): def test_cleanup_dirs(cache): - value = b'\0' * 2 ** 20 + value = b'\0' * 2**20 start_count = len(os.listdir(cache.directory)) for i in range(10): cache[i] = value @@ -1370,7 +1370,7 @@ def test_disk_write_os_error(cache): func = mock.Mock(side_effect=[OSError] * 10) with mock.patch('diskcache.core.open', func): with pytest.raises(OSError): - cache[0] = '\0' * 2 ** 20 + cache[0] = '\0' * 2**20 def test_memoize_ignore(cache): diff --git a/tests/test_djangocache.py b/tests/test_djangocache.py index 5f83b81..b5cc2a8 100644 --- a/tests/test_djangocache.py +++ b/tests/test_djangocache.py @@ -1033,7 +1033,7 @@ def test_directory(self): self.assertTrue('tmp' in cache.directory) def test_read(self): - value = b'abcd' * 2 ** 20 + value = b'abcd' * 2**20 result = cache.set(b'test-key', value) self.assertTrue(result) diff --git a/tests/test_fanout.py b/tests/test_fanout.py index f212fac..deea03f 100644 --- a/tests/test_fanout.py +++ b/tests/test_fanout.py @@ -34,7 +34,7 @@ def test_init(cache): del default_settings['size_limit'] for key, value in default_settings.items(): assert getattr(cache, key) == value - assert cache.size_limit == 2 ** 27 + assert cache.size_limit == 2**27 cache.check() @@ -229,15 +229,15 @@ def test_incr_concurrent(): def test_getsetdel(cache): values = [ (None, False), - ((None,) * 2 ** 10, False), + ((None,) * 2**10, False), (1234, False), - (2 ** 512, False), + (2**512, False), (56.78, False), - (u'hello', False), - (u'hello' * 2 ** 10, False), + ('hello', False), + ('hello' * 2**10, False), (b'world', False), - (b'world' * 2 ** 10, False), - (io.BytesIO(b'world' * 2 ** 10), True), + (b'world' * 2**10, False), + (io.BytesIO(b'world' * 2**10), True), ] for key, (value, file_like) in enumerate(values): @@ -341,7 +341,7 @@ def test_tag_index(cache): def test_read(cache): - cache.set(0, b'abcd' * 2 ** 20) + cache.set(0, b'abcd' * 2**20) with cache.read(0) as reader: assert reader is not None From a53283d554fbe7bd678067f24d6f45d57de3f83b Mon Sep 17 00:00:00 2001 From: Grant Jenks Date: Mon, 10 Apr 2023 23:25:16 -0700 Subject: [PATCH 51/74] Update requirements --- requirements-dev.txt | 23 +++++++++++++++++++++++ requirements.txt | 22 ---------------------- 2 files changed, 23 insertions(+), 22 deletions(-) create mode 100644 requirements-dev.txt diff --git a/requirements-dev.txt b/requirements-dev.txt new file mode 100644 index 0000000..6149361 --- /dev/null +++ b/requirements-dev.txt @@ -0,0 +1,23 @@ +-e . +blue +coverage +django==4.2.* +django_redis +doc8 +flake8 +ipython +jedi +pickleDB +pylibmc +pylint +pytest +pytest-cov +pytest-django +pytest-env +pytest-xdist +rstcheck +sphinx +sqlitedict +tox +twine +wheel diff --git a/requirements.txt b/requirements.txt index efb2160..d6e1198 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,23 +1 @@ -e . -blue -coverage -django==3.2.* -django_redis -doc8 -flake8 -ipython -jedi==0.17.* # Remove after IPython bug fixed. -pickleDB -pylibmc -pylint -pytest -pytest-cov -pytest-django -pytest-env -pytest-xdist -rstcheck -sphinx -sqlitedict -tox -twine -wheel From b22a7d58c3dbf3f71fa4f9156ccbd4892ebf3fe2 Mon Sep 17 00:00:00 2001 From: Grant Jenks Date: Mon, 10 Apr 2023 23:25:35 -0700 Subject: [PATCH 52/74] Update pylint --- .pylintrc | 829 ++++++++++++++++++++++++---------------------- diskcache/core.py | 6 +- 2 files changed, 439 insertions(+), 396 deletions(-) diff --git a/.pylintrc b/.pylintrc index 6baa978..dc1490a 100644 --- a/.pylintrc +++ b/.pylintrc @@ -1,27 +1,77 @@ -[MASTER] +[MAIN] + +# Analyse import fallback blocks. This can be used to support both Python 2 and +# 3 compatible code, which means that the block might have code that exists +# only in one or another interpreter, leading to false positives when analysed. +analyse-fallback-blocks=no + +# Clear in-memory caches upon conclusion of linting. Useful if running pylint +# in a server-like mode. +clear-cache-post-run=no + +# Load and enable all available extensions. Use --list-extensions to see a list +# all available extensions. +#enable-all-extensions= + +# In error mode, messages with a category besides ERROR or FATAL are +# suppressed, and no reports are done by default. Error mode is compatible with +# disabling specific errors. +#errors-only= + +# Always return a 0 (non-error) status code, even if lint errors are found. +# This is primarily useful in continuous integration scripts. +#exit-zero= # A comma-separated list of package or module names from where C extensions may # be loaded. Extensions are loading into the active Python interpreter and may # run arbitrary code. +extension-pkg-allow-list= + +# A comma-separated list of package or module names from where C extensions may +# be loaded. Extensions are loading into the active Python interpreter and may +# run arbitrary code. (This is an alternative name to extension-pkg-allow-list +# for backward compatibility.) extension-pkg-whitelist= -# Specify a score threshold to be exceeded before program exits with error. -fail-under=10.0 +# Return non-zero exit code if any of these messages/categories are detected, +# even if score is above --fail-under value. Syntax same as enable. Messages +# specified are enabled, while categories only check already-enabled messages. +fail-on= -# Add files or directories to the blacklist. They should be base names, not -# paths. +# Specify a score threshold under which the program will exit with error. +fail-under=10 + +# Interpret the stdin as a python script, whose filename needs to be passed as +# the module_or_package argument. +#from-stdin= + +# Files or directories to be skipped. They should be base names, not paths. ignore=CVS -# Add files or directories matching the regex patterns to the blacklist. The -# regex matches against base names, not paths. -ignore-patterns= +# Add files or directories matching the regular expressions patterns to the +# ignore-list. The regex matches against paths and can be in Posix or Windows +# format. Because '\\' represents the directory delimiter on Windows systems, +# it can't be used as an escape character. +ignore-paths= + +# Files or directories matching the regular expression patterns are skipped. +# The regex matches against base names, not paths. The default value ignores +# Emacs file locks +ignore-patterns=^\.# + +# List of module names for which member attributes should not be checked +# (useful for modules/projects where namespaces are manipulated during runtime +# and thus existing member attributes cannot be deduced by static analysis). It +# supports qualified module names, as well as Unix pattern matching. +ignored-modules= # Python code to execute, usually for sys.path manipulation such as # pygtk.require(). #init-hook= # Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the -# number of processors available to use. +# number of processors available to use, and will cap the count on Windows to +# avoid hangs. jobs=1 # Control the amount of potential inferred values when inferring a single @@ -36,6 +86,19 @@ load-plugins= # Pickle collected data for later comparisons. persistent=yes +# Minimum Python version to use for version dependent checks. Will default to +# the version used to run pylint. +py-version=3.11 + +# Discover python modules and packages in the file system subtree. +recursive=no + +# Add paths to the list of the source roots. Supports globbing patterns. The +# source root is an absolute path or a path relative to the current working +# directory used to determine a package namespace for modules located under the +# source root. +source-roots= + # When enabled, pylint would attempt to guess common misconfiguration and emit # user-friendly hints instead of false-positive error messages. suggestion-mode=yes @@ -44,321 +107,8 @@ suggestion-mode=yes # active Python interpreter and may run arbitrary code. unsafe-load-any-extension=no - -[MESSAGES CONTROL] - -# Only show warnings with the listed confidence levels. Leave empty to show -# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED. -confidence= - -# Disable the message, report, category or checker with the given id(s). You -# can either give multiple identifiers separated by comma (,) or put this -# option multiple times (only on the command line, not in the configuration -# file where it should appear only once). You can also use "--disable=all" to -# disable everything first and then reenable specific checks. For example, if -# you want to run only the similarities checker, you can use "--disable=all -# --enable=similarities". If you want to run only the classes checker, but have -# no Warning level messages displayed, use "--disable=all --enable=classes -# --disable=W". -disable=print-statement, - parameter-unpacking, - unpacking-in-except, - old-raise-syntax, - backtick, - long-suffix, - old-ne-operator, - old-octal-literal, - import-star-module-level, - non-ascii-bytes-literal, - raw-checker-failed, - bad-inline-option, - locally-disabled, - file-ignored, - suppressed-message, - useless-suppression, - deprecated-pragma, - use-symbolic-message-instead, - apply-builtin, - basestring-builtin, - buffer-builtin, - cmp-builtin, - coerce-builtin, - execfile-builtin, - file-builtin, - long-builtin, - raw_input-builtin, - reduce-builtin, - standarderror-builtin, - unicode-builtin, - xrange-builtin, - coerce-method, - delslice-method, - getslice-method, - setslice-method, - no-absolute-import, - old-division, - dict-iter-method, - dict-view-method, - next-method-called, - metaclass-assignment, - indexing-exception, - raising-string, - reload-builtin, - oct-method, - hex-method, - nonzero-method, - cmp-method, - input-builtin, - round-builtin, - intern-builtin, - unichr-builtin, - map-builtin-not-iterating, - zip-builtin-not-iterating, - range-builtin-not-iterating, - filter-builtin-not-iterating, - using-cmp-argument, - eq-without-hash, - div-method, - idiv-method, - rdiv-method, - exception-message-attribute, - invalid-str-codec, - sys-max-int, - bad-python3-import, - deprecated-string-function, - deprecated-str-translate-call, - deprecated-itertools-function, - deprecated-types-field, - next-method-defined, - dict-items-not-iterating, - dict-keys-not-iterating, - dict-values-not-iterating, - deprecated-operator-function, - deprecated-urllib-function, - xreadlines-attribute, - deprecated-sys-function, - exception-escape, - comprehension-escape, - no-member, - no-else-return, - duplicate-code, - inconsistent-return-statements, - consider-using-f-string, - -# Enable the message, report, category or checker with the given id(s). You can -# either give multiple identifier separated by comma (,) or put this option -# multiple time (only on the command line, not in the configuration file where -# it should appear only once). See also the "--disable" option for examples. -enable=c-extension-no-member - - -[REPORTS] - -# Python expression which should return a score less than or equal to 10. You -# have access to the variables 'error', 'warning', 'refactor', and 'convention' -# which contain the number of messages in each category, as well as 'statement' -# which is the total number of statements analyzed. This score is used by the -# global evaluation report (RP0004). -evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10) - -# Template used to display messages. This is a python new-style format string -# used to format the message information. See doc for all details. -#msg-template= - -# Set the output format. Available formats are text, parseable, colorized, json -# and msvs (visual studio). You can also give a reporter class, e.g. -# mypackage.mymodule.MyReporterClass. -output-format=text - -# Tells whether to display a full report or only the messages. -reports=no - -# Activate the evaluation score. -score=yes - - -[REFACTORING] - -# Maximum number of nested blocks for function / method body -max-nested-blocks=5 - -# Complete name of functions that never returns. When checking for -# inconsistent-return-statements if a never returning function is called then -# it will be considered as an explicit return statement and no message will be -# printed. -never-returning-functions=sys.exit - - -[LOGGING] - -# The type of string formatting that logging methods do. `old` means using % -# formatting, `new` is for `{}` formatting. -logging-format-style=old - -# Logging modules to check that the string format arguments are in logging -# function parameter format. -logging-modules=logging - - -[SPELLING] - -# Limits count of emitted suggestions for spelling mistakes. -max-spelling-suggestions=4 - -# Spelling dictionary name. Available dictionaries: none. To make it work, -# install the python-enchant package. -spelling-dict= - -# List of comma separated words that should not be checked. -spelling-ignore-words= - -# A path to a file that contains the private dictionary; one word per line. -spelling-private-dict-file= - -# Tells whether to store unknown words to the private dictionary (see the -# --spelling-private-dict-file option) instead of raising a message. -spelling-store-unknown-words=no - - -[MISCELLANEOUS] - -# List of note tags to take in consideration, separated by a comma. -notes=FIXME, - XXX, - TODO - -# Regular expression of note tags to take in consideration. -#notes-rgx= - - -[TYPECHECK] - -# List of decorators that produce context managers, such as -# contextlib.contextmanager. Add to this list to register other decorators that -# produce valid context managers. -contextmanager-decorators=contextlib.contextmanager - -# List of members which are set dynamically and missed by pylint inference -# system, and so shouldn't trigger E1101 when accessed. Python regular -# expressions are accepted. -generated-members= - -# Tells whether missing members accessed in mixin class should be ignored. A -# mixin class is detected if its name ends with "mixin" (case insensitive). -ignore-mixin-members=yes - -# Tells whether to warn about missing members when the owner of the attribute -# is inferred to be None. -ignore-none=yes - -# This flag controls whether pylint should warn about no-member and similar -# checks whenever an opaque object is returned when inferring. The inference -# can return multiple potential results while evaluating a Python object, but -# some branches might not be evaluated, which results in partial inference. In -# that case, it might be useful to still emit no-member and other checks for -# the rest of the inferred objects. -ignore-on-opaque-inference=yes - -# List of class names for which member attributes should not be checked (useful -# for classes with dynamically set attributes). This supports the use of -# qualified names. -ignored-classes=optparse.Values,thread._local,_thread._local - -# List of module names for which member attributes should not be checked -# (useful for modules/projects where namespaces are manipulated during runtime -# and thus existing member attributes cannot be deduced by static analysis). It -# supports qualified module names, as well as Unix pattern matching. -ignored-modules= - -# Show a hint with possible names when a member name was not found. The aspect -# of finding the hint is based on edit distance. -missing-member-hint=yes - -# The minimum edit distance a name should have in order to be considered a -# similar match for a missing member name. -missing-member-hint-distance=1 - -# The total number of similar names that should be taken in consideration when -# showing a hint for a missing member. -missing-member-max-choices=1 - -# List of decorators that change the signature of a decorated function. -signature-mutators= - - -[VARIABLES] - -# List of additional names supposed to be defined in builtins. Remember that -# you should avoid defining new builtins when possible. -additional-builtins= - -# Tells whether unused global variables should be treated as a violation. -allow-global-unused-variables=yes - -# List of strings which can identify a callback function by name. A callback -# name must start or end with one of those strings. -callbacks=cb_, - _cb - -# A regular expression matching the name of dummy variables (i.e. expected to -# not be used). -dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_ - -# Argument names that match this expression will be ignored. Default to name -# with leading underscore. -ignored-argument-names=_.*|^ignored_|^unused_ - -# Tells whether we should check for unused import in __init__ files. -init-import=no - -# List of qualified module names which can have objects that can redefine -# builtins. -redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io - - -[FORMAT] - -# Expected format of line ending, e.g. empty (any line ending), LF or CRLF. -expected-line-ending-format= - -# Regexp for a line that is allowed to be longer than the limit. -ignore-long-lines=^\s*(# )??$ - -# Number of spaces of indent required inside a hanging or continued line. -indent-after-paren=4 - -# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 -# tab). -indent-string=' ' - -# Maximum number of characters on a single line. -max-line-length=100 - -# Maximum number of lines in a module. -max-module-lines=3000 - -# Allow the body of a class to be on the same line as the declaration if body -# contains single statement. -single-line-class-stmt=no - -# Allow the body of an if to be on the same line as the test if there is no -# else. -single-line-if-stmt=no - - -[SIMILARITIES] - -# Ignore comments when computing similarities. -ignore-comments=yes - -# Ignore docstrings when computing similarities. -ignore-docstrings=yes - -# Ignore imports when computing similarities. -ignore-imports=yes - -# Minimum lines number of a similarity. -min-similarity-lines=4 +# In verbose mode, extra non-checker-related info will be displayed. +#verbose= [BASIC] @@ -367,13 +117,15 @@ min-similarity-lines=4 argument-naming-style=snake_case # Regular expression matching correct argument names. Overrides argument- -# naming-style. +# naming-style. If left empty, argument names will be checked with the set +# naming style. #argument-rgx= # Naming style matching correct attribute names. attr-naming-style=snake_case # Regular expression matching correct attribute names. Overrides attr-naming- +# style. If left empty, attribute names will be checked with the set naming # style. #attr-rgx= @@ -393,20 +145,30 @@ bad-names-rgxs= class-attribute-naming-style=any # Regular expression matching correct class attribute names. Overrides class- -# attribute-naming-style. +# attribute-naming-style. If left empty, class attribute names will be checked +# with the set naming style. #class-attribute-rgx= +# Naming style matching correct class constant names. +class-const-naming-style=UPPER_CASE + +# Regular expression matching correct class constant names. Overrides class- +# const-naming-style. If left empty, class constant names will be checked with +# the set naming style. +#class-const-rgx= + # Naming style matching correct class names. class-naming-style=PascalCase # Regular expression matching correct class names. Overrides class-naming- -# style. +# style. If left empty, class names will be checked with the set naming style. #class-rgx= # Naming style matching correct constant names. const-naming-style=UPPER_CASE # Regular expression matching correct constant names. Overrides const-naming- +# style. If left empty, constant names will be checked with the set naming # style. #const-rgx= @@ -418,7 +180,8 @@ docstring-min-length=-1 function-naming-style=snake_case # Regular expression matching correct function names. Overrides function- -# naming-style. +# naming-style. If left empty, function names will be checked with the set +# naming style. #function-rgx= # Good variable names which should always be accepted, separated by a comma. @@ -440,21 +203,22 @@ include-naming-hint=no inlinevar-naming-style=any # Regular expression matching correct inline iteration names. Overrides -# inlinevar-naming-style. +# inlinevar-naming-style. If left empty, inline iteration names will be checked +# with the set naming style. #inlinevar-rgx= # Naming style matching correct method names. method-naming-style=snake_case # Regular expression matching correct method names. Overrides method-naming- -# style. +# style. If left empty, method names will be checked with the set naming style. #method-rgx= # Naming style matching correct module names. module-naming-style=snake_case # Regular expression matching correct module names. Overrides module-naming- -# style. +# style. If left empty, module names will be checked with the set naming style. #module-rgx= # Colon-delimited sets of names that determine each other's naming style when @@ -470,90 +234,56 @@ no-docstring-rgx=^_ # These decorators are taken in consideration only for invalid-name. property-classes=abc.abstractproperty +# Regular expression matching correct type alias names. If left empty, type +# alias names will be checked with the set naming style. +#typealias-rgx= + +# Regular expression matching correct type variable names. If left empty, type +# variable names will be checked with the set naming style. +#typevar-rgx= + # Naming style matching correct variable names. variable-naming-style=snake_case # Regular expression matching correct variable names. Overrides variable- -# naming-style. +# naming-style. If left empty, variable names will be checked with the set +# naming style. #variable-rgx= -[STRING] - -# This flag controls whether inconsistent-quotes generates a warning when the -# character used as a quote delimiter is used inconsistently within a module. -check-quote-consistency=no +[CLASSES] -# This flag controls whether the implicit-str-concat should generate a warning -# on implicit string concatenation in sequences defined over several lines. -check-str-concat-over-line-jumps=no - - -[IMPORTS] - -# List of modules that can be imported at any level, not just the top level -# one. -allow-any-import-level= - -# Allow wildcard imports from modules that define __all__. -allow-wildcard-with-all=no - -# Analyse import fallback blocks. This can be used to support both Python 2 and -# 3 compatible code, which means that the block might have code that exists -# only in one or another interpreter, leading to false positives when analysed. -analyse-fallback-blocks=no - -# Deprecated modules which should not be used, separated by a comma. -deprecated-modules=optparse,tkinter.tix - -# Create a graph of external dependencies in the given file (report RP0402 must -# not be disabled). -ext-import-graph= - -# Create a graph of every (i.e. internal and external) dependencies in the -# given file (report RP0402 must not be disabled). -import-graph= - -# Create a graph of internal dependencies in the given file (report RP0402 must -# not be disabled). -int-import-graph= - -# Force import order to recognize a module as part of the standard -# compatibility libraries. -known-standard-library= - -# Force import order to recognize a module as part of a third party library. -known-third-party=enchant - -# Couples of modules and preferred modules, separated by a comma. -preferred-modules= - - -[CLASSES] +# Warn about protected attribute access inside special methods +check-protected-access-in-special-methods=no # List of method names used to declare (i.e. assign) instance attributes. defining-attr-methods=__init__, __new__, setUp, + asyncSetUp, __post_init__ # List of member names, which should be excluded from the protected access # warning. -exclude-protected=_asdict, - _fields, - _replace, - _source, - _make +exclude-protected=_asdict,_fields,_replace,_source,_make,os._exit # List of valid names for the first argument in a class method. valid-classmethod-first-arg=cls # List of valid names for the first argument in a metaclass class method. -valid-metaclass-classmethod-first-arg=cls +valid-metaclass-classmethod-first-arg=mcs [DESIGN] +# List of regular expressions of class ancestor names to ignore when counting +# public methods (see R0903) +exclude-too-few-public-methods= + +# List of qualified class names to ignore when counting class parents (see +# R0901) +ignored-parents= + # Maximum number of arguments for function / method. max-args=8 @@ -587,7 +317,320 @@ min-public-methods=2 [EXCEPTIONS] -# Exceptions that will emit a warning when being caught. Defaults to -# "BaseException, Exception". -overgeneral-exceptions=BaseException, - Exception +# Exceptions that will emit a warning when caught. +overgeneral-exceptions=builtins.BaseException,builtins.Exception + + +[FORMAT] + +# Expected format of line ending, e.g. empty (any line ending), LF or CRLF. +expected-line-ending-format= + +# Regexp for a line that is allowed to be longer than the limit. +ignore-long-lines=^\s*(# )??$ + +# Number of spaces of indent required inside a hanging or continued line. +indent-after-paren=4 + +# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 +# tab). +indent-string=' ' + +# Maximum number of characters on a single line. +max-line-length=100 + +# Maximum number of lines in a module. +max-module-lines=2500 + +# Allow the body of a class to be on the same line as the declaration if body +# contains single statement. +single-line-class-stmt=no + +# Allow the body of an if to be on the same line as the test if there is no +# else. +single-line-if-stmt=no + + +[IMPORTS] + +# List of modules that can be imported at any level, not just the top level +# one. +allow-any-import-level= + +# Allow explicit reexports by alias from a package __init__. +allow-reexport-from-package=no + +# Allow wildcard imports from modules that define __all__. +allow-wildcard-with-all=no + +# Deprecated modules which should not be used, separated by a comma. +deprecated-modules= + +# Output a graph (.gv or any supported image format) of external dependencies +# to the given file (report RP0402 must not be disabled). +ext-import-graph= + +# Output a graph (.gv or any supported image format) of all (i.e. internal and +# external) dependencies to the given file (report RP0402 must not be +# disabled). +import-graph= + +# Output a graph (.gv or any supported image format) of internal dependencies +# to the given file (report RP0402 must not be disabled). +int-import-graph= + +# Force import order to recognize a module as part of the standard +# compatibility libraries. +known-standard-library= + +# Force import order to recognize a module as part of a third party library. +known-third-party=enchant + +# Couples of modules and preferred modules, separated by a comma. +preferred-modules= + + +[LOGGING] + +# The type of string formatting that logging methods do. `old` means using % +# formatting, `new` is for `{}` formatting. +logging-format-style=old + +# Logging modules to check that the string format arguments are in logging +# function parameter format. +logging-modules=logging + + +[MESSAGES CONTROL] + +# Only show warnings with the listed confidence levels. Leave empty to show +# all. Valid levels: HIGH, CONTROL_FLOW, INFERENCE, INFERENCE_FAILURE, +# UNDEFINED. +confidence=HIGH, + CONTROL_FLOW, + INFERENCE, + INFERENCE_FAILURE, + UNDEFINED + +# Disable the message, report, category or checker with the given id(s). You +# can either give multiple identifiers separated by comma (,) or put this +# option multiple times (only on the command line, not in the configuration +# file where it should appear only once). You can also use "--disable=all" to +# disable everything first and then re-enable specific checks. For example, if +# you want to run only the similarities checker, you can use "--disable=all +# --enable=similarities". If you want to run only the classes checker, but have +# no Warning level messages displayed, use "--disable=all --enable=classes +# --disable=W". +disable=raw-checker-failed, + bad-inline-option, + locally-disabled, + file-ignored, + suppressed-message, + useless-suppression, + deprecated-pragma, + use-symbolic-message-instead, + consider-using-f-string, + no-member, + no-else-return, + no-else-raise, + inconsistent-return-statements + +# Enable the message, report, category or checker with the given id(s). You can +# either give multiple identifier separated by comma (,) or put this option +# multiple time (only on the command line, not in the configuration file where +# it should appear only once). See also the "--disable" option for examples. +enable=c-extension-no-member + + +[METHOD_ARGS] + +# List of qualified names (i.e., library.method) which require a timeout +# parameter e.g. 'requests.api.get,requests.api.post' +timeout-methods=requests.api.delete,requests.api.get,requests.api.head,requests.api.options,requests.api.patch,requests.api.post,requests.api.put,requests.api.request + + +[MISCELLANEOUS] + +# List of note tags to take in consideration, separated by a comma. +notes=FIXME, + XXX, + TODO + +# Regular expression of note tags to take in consideration. +notes-rgx= + + +[REFACTORING] + +# Maximum number of nested blocks for function / method body +max-nested-blocks=5 + +# Complete name of functions that never returns. When checking for +# inconsistent-return-statements if a never returning function is called then +# it will be considered as an explicit return statement and no message will be +# printed. +never-returning-functions=sys.exit,argparse.parse_error + + +[REPORTS] + +# Python expression which should return a score less than or equal to 10. You +# have access to the variables 'fatal', 'error', 'warning', 'refactor', +# 'convention', and 'info' which contain the number of messages in each +# category, as well as 'statement' which is the total number of statements +# analyzed. This score is used by the global evaluation report (RP0004). +evaluation=max(0, 0 if fatal else 10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)) + +# Template used to display messages. This is a python new-style format string +# used to format the message information. See doc for all details. +msg-template= + +# Set the output format. Available formats are text, parseable, colorized, json +# and msvs (visual studio). You can also give a reporter class, e.g. +# mypackage.mymodule.MyReporterClass. +#output-format= + +# Tells whether to display a full report or only the messages. +reports=no + +# Activate the evaluation score. +score=yes + + +[SIMILARITIES] + +# Comments are removed from the similarity computation +ignore-comments=yes + +# Docstrings are removed from the similarity computation +ignore-docstrings=yes + +# Imports are removed from the similarity computation +ignore-imports=yes + +# Signatures are removed from the similarity computation +ignore-signatures=yes + +# Minimum lines number of a similarity. +min-similarity-lines=20 + + +[SPELLING] + +# Limits count of emitted suggestions for spelling mistakes. +max-spelling-suggestions=4 + +# Spelling dictionary name. No available dictionaries : You need to install +# both the python package and the system dependency for enchant to work.. +spelling-dict= + +# List of comma separated words that should be considered directives if they +# appear at the beginning of a comment and should not be checked. +spelling-ignore-comment-directives=fmt: on,fmt: off,noqa:,noqa,nosec,isort:skip,mypy: + +# List of comma separated words that should not be checked. +spelling-ignore-words= + +# A path to a file that contains the private dictionary; one word per line. +spelling-private-dict-file= + +# Tells whether to store unknown words to the private dictionary (see the +# --spelling-private-dict-file option) instead of raising a message. +spelling-store-unknown-words=no + + +[STRING] + +# This flag controls whether inconsistent-quotes generates a warning when the +# character used as a quote delimiter is used inconsistently within a module. +check-quote-consistency=no + +# This flag controls whether the implicit-str-concat should generate a warning +# on implicit string concatenation in sequences defined over several lines. +check-str-concat-over-line-jumps=no + + +[TYPECHECK] + +# List of decorators that produce context managers, such as +# contextlib.contextmanager. Add to this list to register other decorators that +# produce valid context managers. +contextmanager-decorators=contextlib.contextmanager + +# List of members which are set dynamically and missed by pylint inference +# system, and so shouldn't trigger E1101 when accessed. Python regular +# expressions are accepted. +generated-members= + +# Tells whether to warn about missing members when the owner of the attribute +# is inferred to be None. +ignore-none=yes + +# This flag controls whether pylint should warn about no-member and similar +# checks whenever an opaque object is returned when inferring. The inference +# can return multiple potential results while evaluating a Python object, but +# some branches might not be evaluated, which results in partial inference. In +# that case, it might be useful to still emit no-member and other checks for +# the rest of the inferred objects. +ignore-on-opaque-inference=yes + +# List of symbolic message names to ignore for Mixin members. +ignored-checks-for-mixins=no-member, + not-async-context-manager, + not-context-manager, + attribute-defined-outside-init + +# List of class names for which member attributes should not be checked (useful +# for classes with dynamically set attributes). This supports the use of +# qualified names. +ignored-classes=optparse.Values,thread._local,_thread._local,argparse.Namespace + +# Show a hint with possible names when a member name was not found. The aspect +# of finding the hint is based on edit distance. +missing-member-hint=yes + +# The minimum edit distance a name should have in order to be considered a +# similar match for a missing member name. +missing-member-hint-distance=1 + +# The total number of similar names that should be taken in consideration when +# showing a hint for a missing member. +missing-member-max-choices=1 + +# Regex pattern to define which classes are considered mixins. +mixin-class-rgx=.*[Mm]ixin + +# List of decorators that change the signature of a decorated function. +signature-mutators= + + +[VARIABLES] + +# List of additional names supposed to be defined in builtins. Remember that +# you should avoid defining new builtins when possible. +additional-builtins= + +# Tells whether unused global variables should be treated as a violation. +allow-global-unused-variables=yes + +# List of names allowed to shadow builtins +allowed-redefined-builtins= + +# List of strings which can identify a callback function by name. A callback +# name must start or end with one of those strings. +callbacks=cb_, + _cb + +# A regular expression matching the name of dummy variables (i.e. expected to +# not be used). +dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_ + +# Argument names that match this expression will be ignored. +ignored-argument-names=_.*|^ignored_|^unused_ + +# Tells whether we should check for unused import in __init__ files. +init-import=no + +# List of qualified module names which can have objects that can redefine +# builtins. +redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io diff --git a/diskcache/core.py b/diskcache/core.py index 05a0854..9f3a597 100644 --- a/diskcache/core.py +++ b/diskcache/core.py @@ -170,7 +170,7 @@ def get(self, key, raw): :return: corresponding Python key """ - # pylint: disable=no-self-use,unidiomatic-typecheck + # pylint: disable=unidiomatic-typecheck if raw: return bytes(key) if type(key) is sqlite3.Binary else key else: @@ -228,7 +228,6 @@ def store(self, value, read, key=UNKNOWN): return len(result), MODE_PICKLE, filename, None def _write(self, full_path, iterator, mode, encoding=None): - # pylint: disable=no-self-use full_dir, _ = op.split(full_path) for count in range(1, 11): @@ -264,7 +263,7 @@ def fetch(self, mode, filename, value, read): :raises: IOError if the value cannot be read """ - # pylint: disable=no-self-use,unidiomatic-typecheck,consider-using-with + # pylint: disable=unidiomatic-typecheck,consider-using-with if mode == MODE_RAW: return bytes(value) if type(value) is sqlite3.Binary else value elif mode == MODE_BINARY: @@ -1378,6 +1377,7 @@ def delete(self, key, retry=False): :raises Timeout: if database timeout occurs """ + # pylint: disable=unnecessary-dunder-call try: return self.__delitem__(key, retry=retry) except KeyError: From 471aa5e551aed186dd34d5bc7d345be835efd6f3 Mon Sep 17 00:00:00 2001 From: Grant Jenks Date: Mon, 10 Apr 2023 23:26:38 -0700 Subject: [PATCH 53/74] Drop Python 3.7 from testing --- tox.ini | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index 3735ebc..e7217a7 100644 --- a/tox.ini +++ b/tox.ini @@ -1,5 +1,5 @@ [tox] -envlist=bluecheck,doc8,docs,isortcheck,flake8,mypy,pylint,rstcheck,py37,py38,py39,py310,py311 +envlist=bluecheck,doc8,docs,isortcheck,flake8,mypy,pylint,rstcheck,py38,py39,py310,py311 skip_missing_interpreters=True [testenv] From c14345f105f14eba45986b09ec96d87b8997c9cc Mon Sep 17 00:00:00 2001 From: Grant Jenks Date: Mon, 10 Apr 2023 23:42:38 -0700 Subject: [PATCH 54/74] Update tests for Django 4.2 --- tests/test_djangocache.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/tests/test_djangocache.py b/tests/test_djangocache.py index b5cc2a8..734ba1b 100644 --- a/tests/test_djangocache.py +++ b/tests/test_djangocache.py @@ -870,7 +870,6 @@ def test_custom_key_func(self): def test_cache_write_unpicklable_object(self): fetch_middleware = FetchFromCacheMiddleware(empty_response) - fetch_middleware.cache = cache request = self.factory.get('/cache/test') request._cache_update_cache = True @@ -887,7 +886,6 @@ def get_response(req): return response update_middleware = UpdateCacheMiddleware(get_response) - update_middleware.cache = cache response = update_middleware(request) get_cache_data = fetch_middleware.process_request(request) From 0294d58cd0cb72dd6affbbc46b3ff05d96d015cc Mon Sep 17 00:00:00 2001 From: Grant Jenks Date: Mon, 10 Apr 2023 23:43:15 -0700 Subject: [PATCH 55/74] Bump version to v5.5.0 --- diskcache/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/diskcache/__init__.py b/diskcache/__init__.py index f7aa771..8d7e28c 100644 --- a/diskcache/__init__.py +++ b/diskcache/__init__.py @@ -61,8 +61,8 @@ pass __title__ = 'diskcache' -__version__ = '5.4.0' -__build__ = 0x050400 +__version__ = '5.5.0' +__build__ = 0x050500 __author__ = 'Grant Jenks' __license__ = 'Apache 2.0' __copyright__ = 'Copyright 2016-2023 Grant Jenks' From 6cd6888a16be1d531a19ca91d5e0daa6edac9718 Mon Sep 17 00:00:00 2001 From: Grant Jenks Date: Mon, 10 Apr 2023 23:45:14 -0700 Subject: [PATCH 56/74] Drop 3.7 from CI --- .github/workflows/integration.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/integration.yml b/.github/workflows/integration.yml index 2d83ad3..07aceec 100644 --- a/.github/workflows/integration.yml +++ b/.github/workflows/integration.yml @@ -32,7 +32,7 @@ jobs: max-parallel: 8 matrix: os: [ubuntu-latest, macos-latest, windows-latest] - python-version: [3.7, 3.8, 3.9, '3.10', 3.11] + python-version: [3.8, 3.9, '3.10', 3.11] steps: - name: Set up Python ${{ matrix.python-version }} x64 From bbac13b54f9ac0a5c45b7cfbd242869b41042cac Mon Sep 17 00:00:00 2001 From: Grant Jenks Date: Mon, 10 Apr 2023 23:52:57 -0700 Subject: [PATCH 57/74] Install dev requirements for wheel package --- .github/workflows/release.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 676593c..21b6e5b 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -26,7 +26,7 @@ jobs: - name: Install dependencies run: | pip install --upgrade pip - pip install -r requirements.txt + pip install -r requirements-dev.txt - name: Create source dist run: python setup.py sdist From 0f5d8ed63406e26de96701f98aa585b4fb26f6dd Mon Sep 17 00:00:00 2001 From: Grant Jenks Date: Mon, 10 Apr 2023 23:53:20 -0700 Subject: [PATCH 58/74] Bump version to 5.5.1 --- diskcache/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/diskcache/__init__.py b/diskcache/__init__.py index 8d7e28c..95dafb3 100644 --- a/diskcache/__init__.py +++ b/diskcache/__init__.py @@ -61,8 +61,8 @@ pass __title__ = 'diskcache' -__version__ = '5.5.0' -__build__ = 0x050500 +__version__ = '5.5.1' +__build__ = 0x050501 __author__ = 'Grant Jenks' __license__ = 'Apache 2.0' __copyright__ = 'Copyright 2016-2023 Grant Jenks' From fe5ee43ac5df1847556f280c696ab921f0910be2 Mon Sep 17 00:00:00 2001 From: Grant Jenks Date: Sun, 16 Apr 2023 21:42:13 -0700 Subject: [PATCH 59/74] Close the cache explicitly before deleting the reference --- diskcache/persistent.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/diskcache/persistent.py b/diskcache/persistent.py index c3d570b..cce3736 100644 --- a/diskcache/persistent.py +++ b/diskcache/persistent.py @@ -533,12 +533,13 @@ def reverse(self): # GrantJ 2019-03-22 Consider using an algorithm that swaps the values # at two keys. Like self._cache.swap(key1, key2, retry=True) The swap # method would exchange the values at two given keys. Then, using a - # forward iterator and a reverse iterator, the reversis method could + # forward iterator and a reverse iterator, the reverse method could # avoid making copies of the values. temp = Deque(iterable=reversed(self)) self.clear() self.extend(temp) directory = temp.directory + temp.close() del temp rmtree(directory) From 9380c784d9e2954611b2ea309f1c657602085f25 Mon Sep 17 00:00:00 2001 From: Grant Jenks Date: Sun, 16 Apr 2023 23:02:24 -0700 Subject: [PATCH 60/74] Oops, close the cache, not the deque --- diskcache/persistent.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/diskcache/persistent.py b/diskcache/persistent.py index cce3736..01cf4e3 100644 --- a/diskcache/persistent.py +++ b/diskcache/persistent.py @@ -539,7 +539,7 @@ def reverse(self): self.clear() self.extend(temp) directory = temp.directory - temp.close() + temp._cache.close() del temp rmtree(directory) From f5a17ff0959a4cc7147d45a4cc8d8eef8d7416b0 Mon Sep 17 00:00:00 2001 From: Grant Jenks Date: Mon, 17 Apr 2023 19:00:34 -0700 Subject: [PATCH 61/74] Shutup pylint --- diskcache/persistent.py | 1 + 1 file changed, 1 insertion(+) diff --git a/diskcache/persistent.py b/diskcache/persistent.py index 01cf4e3..c3f22b5 100644 --- a/diskcache/persistent.py +++ b/diskcache/persistent.py @@ -530,6 +530,7 @@ def reverse(self): ['c', 'b', 'a'] """ + # pylint: disable=protected-access # GrantJ 2019-03-22 Consider using an algorithm that swaps the values # at two keys. Like self._cache.swap(key1, key2, retry=True) The swap # method would exchange the values at two given keys. Then, using a From ef94856d2447fa9662bc62557f9b96ba6f131e15 Mon Sep 17 00:00:00 2001 From: Grant Jenks Date: Mon, 17 Apr 2023 19:01:25 -0700 Subject: [PATCH 62/74] Bump version to 5.5.2 --- diskcache/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/diskcache/__init__.py b/diskcache/__init__.py index 95dafb3..134c88a 100644 --- a/diskcache/__init__.py +++ b/diskcache/__init__.py @@ -61,8 +61,8 @@ pass __title__ = 'diskcache' -__version__ = '5.5.1' -__build__ = 0x050501 +__version__ = '5.5.2' +__build__ = 0x050502 __author__ = 'Grant Jenks' __license__ = 'Apache 2.0' __copyright__ = 'Copyright 2016-2023 Grant Jenks' From 74e554c5d9340765f6fd6f7891da49a420e92b70 Mon Sep 17 00:00:00 2001 From: Grant Jenks Date: Mon, 17 Apr 2023 19:05:01 -0700 Subject: [PATCH 63/74] Bump versions of checkout and setup-python --- .github/workflows/integration.yml | 8 ++++---- .github/workflows/release.yml | 4 ++-- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/workflows/integration.yml b/.github/workflows/integration.yml index 07aceec..b596fc6 100644 --- a/.github/workflows/integration.yml +++ b/.github/workflows/integration.yml @@ -12,9 +12,9 @@ jobs: check: [bluecheck, doc8, docs, flake8, isortcheck, mypy, pylint, rstcheck] steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Set up Python - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: python-version: '3.11' - name: Install dependencies @@ -36,12 +36,12 @@ jobs: steps: - name: Set up Python ${{ matrix.python-version }} x64 - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} architecture: x64 - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Install tox run: | diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 21b6e5b..efe73c6 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -11,7 +11,7 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Install libmemcached-dev run: | @@ -19,7 +19,7 @@ jobs: sudo apt-get install libmemcached-dev - name: Set up Python - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: python-version: '3.11' From 35dbeabd283b242e9afd33713a5cea5cd260f51d Mon Sep 17 00:00:00 2001 From: Grant Jenks Date: Mon, 17 Apr 2023 20:38:04 -0700 Subject: [PATCH 64/74] Add maxlen parameter to diskcache.Deque (#191) * Add maxlen parameter to diskcache.Deque --- diskcache/djangocache.py | 5 ++- diskcache/fanout.py | 5 ++- diskcache/persistent.py | 91 +++++++++++++++++++++++++++++++--------- docs/tutorial.rst | 3 ++ tests/test_deque.py | 27 +++++++++++- 5 files changed, 107 insertions(+), 24 deletions(-) diff --git a/diskcache/djangocache.py b/diskcache/djangocache.py index 8bf85ce..5dc8ce2 100644 --- a/diskcache/djangocache.py +++ b/diskcache/djangocache.py @@ -44,14 +44,15 @@ def cache(self, name): """ return self._cache.cache(name) - def deque(self, name): + def deque(self, name, maxlen=None): """Return Deque with given `name` in subdirectory. :param str name: subdirectory name for Deque + :param maxlen: max length (default None, no max) :return: Deque with given name """ - return self._cache.deque(name) + return self._cache.deque(name, maxlen=maxlen) def index(self, name): """Return Index with given `name` in subdirectory. diff --git a/diskcache/fanout.py b/diskcache/fanout.py index 5283490..50005fc 100644 --- a/diskcache/fanout.py +++ b/diskcache/fanout.py @@ -612,7 +612,7 @@ def cache(self, name, timeout=60, disk=None, **settings): _caches[name] = temp return temp - def deque(self, name): + def deque(self, name, maxlen=None): """Return Deque with given `name` in subdirectory. >>> cache = FanoutCache() @@ -626,6 +626,7 @@ def deque(self, name): 1 :param str name: subdirectory name for Deque + :param maxlen: max length (default None, no max) :return: Deque with given name """ @@ -641,7 +642,7 @@ def deque(self, name): disk=self._disk, eviction_policy='none', ) - deque = Deque.fromcache(cache) + deque = Deque.fromcache(cache, maxlen=maxlen) _deques[name] = deque return deque diff --git a/diskcache/persistent.py b/diskcache/persistent.py index c3f22b5..522bb74 100644 --- a/diskcache/persistent.py +++ b/diskcache/persistent.py @@ -75,7 +75,7 @@ class Deque(Sequence): """ - def __init__(self, iterable=(), directory=None): + def __init__(self, iterable=(), directory=None, maxlen=None): """Initialize deque instance. If directory is None then temporary directory created. The directory @@ -86,10 +86,11 @@ def __init__(self, iterable=(), directory=None): """ self._cache = Cache(directory, eviction_policy='none') - self.extend(iterable) + self._maxlen = float('inf') if maxlen is None else maxlen + self._extend(iterable) @classmethod - def fromcache(cls, cache, iterable=()): + def fromcache(cls, cache, iterable=(), maxlen=None): """Initialize deque using `cache`. >>> cache = Cache() @@ -111,7 +112,8 @@ def fromcache(cls, cache, iterable=()): # pylint: disable=no-member,protected-access self = cls.__new__(cls) self._cache = cache - self.extend(iterable) + self._maxlen = float('inf') if maxlen is None else maxlen + self._extend(iterable) return self @property @@ -124,6 +126,31 @@ def directory(self): """Directory path where deque is stored.""" return self._cache.directory + @property + def maxlen(self): + """Max length of the deque.""" + return self._maxlen + + @maxlen.setter + def maxlen(self, value): + """Set max length of the deque. + + Pops items from left while length greater than max. + + >>> deque = Deque() + >>> deque.extendleft('abcde') + >>> deque.maxlen = 3 + >>> list(deque) + ['c', 'd', 'e'] + + :param value: max length + + """ + self._maxlen = value + with self._cache.transact(retry=True): + while len(self._cache) > self._maxlen: + self._popleft() + def _index(self, index, func): len_self = len(self) @@ -244,7 +271,7 @@ def __iadd__(self, iterable): :return: deque with added items """ - self.extend(iterable) + self._extend(iterable) return self def __iter__(self): @@ -292,10 +319,11 @@ def __reversed__(self): pass def __getstate__(self): - return self.directory + return self.directory, self.maxlen def __setstate__(self, state): - self.__init__(directory=state) + directory, maxlen = state + self.__init__(directory=directory, maxlen=maxlen) def append(self, value): """Add `value` to back of deque. @@ -310,7 +338,12 @@ def append(self, value): :param value: value to add to back of deque """ - self._cache.push(value, retry=True) + with self._cache.transact(retry=True): + self._cache.push(value, retry=True) + if len(self._cache) > self._maxlen: + self._popleft() + + _append = append def appendleft(self, value): """Add `value` to front of deque. @@ -325,7 +358,12 @@ def appendleft(self, value): :param value: value to add to front of deque """ - self._cache.push(value, side='front', retry=True) + with self._cache.transact(retry=True): + self._cache.push(value, side='front', retry=True) + if len(self._cache) > self._maxlen: + self._pop() + + _appendleft = appendleft def clear(self): """Remove all elements from deque. @@ -340,6 +378,13 @@ def clear(self): """ self._cache.clear(retry=True) + _clear = clear + + def copy(self): + """Copy deque with same directory and max length.""" + TypeSelf = type(self) + return TypeSelf(directory=self.directory, maxlen=self.maxlen) + def count(self, value): """Return number of occurrences of `value` in deque. @@ -365,7 +410,9 @@ def extend(self, iterable): """ for value in iterable: - self.append(value) + self._append(value) + + _extend = extend def extendleft(self, iterable): """Extend front side of deque with value from `iterable`. @@ -379,7 +426,7 @@ def extendleft(self, iterable): """ for value in iterable: - self.appendleft(value) + self._appendleft(value) def peek(self): """Peek at value at back of deque. @@ -459,6 +506,8 @@ def pop(self): raise IndexError('pop from an empty deque') return value + _pop = pop + def popleft(self): """Remove and return value at front of deque. @@ -483,6 +532,8 @@ def popleft(self): raise IndexError('pop from an empty deque') return value + _popleft = popleft + def remove(self, value): """Remove first occurrence of `value` in deque. @@ -537,8 +588,8 @@ def reverse(self): # forward iterator and a reverse iterator, the reverse method could # avoid making copies of the values. temp = Deque(iterable=reversed(self)) - self.clear() - self.extend(temp) + self._clear() + self._extend(temp) directory = temp.directory temp._cache.close() del temp @@ -575,22 +626,22 @@ def rotate(self, steps=1): for _ in range(steps): try: - value = self.pop() + value = self._pop() except IndexError: return else: - self.appendleft(value) + self._appendleft(value) else: steps *= -1 steps %= len_self for _ in range(steps): try: - value = self.popleft() + value = self._popleft() except IndexError: return else: - self.append(value) + self._append(value) __hash__ = None # type: ignore @@ -669,7 +720,9 @@ def __init__(self, *args, **kwargs): args = args[1:] directory = None self._cache = Cache(directory, eviction_policy='none') - self.update(*args, **kwargs) + self._update(*args, **kwargs) + + _update = MutableMapping.update @classmethod def fromcache(cls, cache, *args, **kwargs): @@ -695,7 +748,7 @@ def fromcache(cls, cache, *args, **kwargs): # pylint: disable=no-member,protected-access self = cls.__new__(cls) self._cache = cache - self.update(*args, **kwargs) + self._update(*args, **kwargs) return self @property diff --git a/docs/tutorial.rst b/docs/tutorial.rst index 1963635..69277d3 100644 --- a/docs/tutorial.rst +++ b/docs/tutorial.rst @@ -565,6 +565,9 @@ access and editing at both front and back sides. :class:`Deque 4 >>> other.popleft() 'foo' + >>> thing = Deque('abcde', maxlen=3) + >>> list(thing) + ['c', 'd', 'e'] :class:`Deque ` objects provide an efficient and safe means of cross-thread and cross-process communication. :class:`Deque ` diff --git a/tests/test_deque.py b/tests/test_deque.py index add7714..71c69e2 100644 --- a/tests/test_deque.py +++ b/tests/test_deque.py @@ -77,6 +77,20 @@ def test_getsetdel(deque): assert len(deque) == 0 +def test_append(deque): + deque.maxlen = 3 + for item in 'abcde': + deque.append(item) + assert deque == 'cde' + + +def test_appendleft(deque): + deque.maxlen = 3 + for item in 'abcde': + deque.appendleft(item) + assert deque == 'edc' + + def test_index_positive(deque): cache = mock.MagicMock() cache.__len__.return_value = 3 @@ -131,9 +145,12 @@ def test_state(deque): sequence = list('abcde') deque.extend(sequence) assert deque == sequence + deque.maxlen = 3 + assert list(deque) == sequence[-3:] state = pickle.dumps(deque) values = pickle.loads(state) - assert values == sequence + assert values == sequence[-3:] + assert values.maxlen == 3 def test_compare(deque): @@ -161,6 +178,14 @@ def test_repr(): assert repr(deque) == 'Deque(directory=%r)' % directory +def test_copy(deque): + sequence = list('abcde') + deque.extend(sequence) + temp = deque.copy() + assert deque == sequence + assert temp == sequence + + def test_count(deque): deque += 'abbcccddddeeeee' From 4beffe892a6c4352098a79614de40649d7e9f88e Mon Sep 17 00:00:00 2001 From: Grant Jenks Date: Mon, 17 Apr 2023 20:40:55 -0700 Subject: [PATCH 65/74] Bump version to 5.6.0 --- diskcache/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/diskcache/__init__.py b/diskcache/__init__.py index 134c88a..8647b9a 100644 --- a/diskcache/__init__.py +++ b/diskcache/__init__.py @@ -61,8 +61,8 @@ pass __title__ = 'diskcache' -__version__ = '5.5.2' -__build__ = 0x050502 +__version__ = '5.6.0' +__build__ = 0x050600 __author__ = 'Grant Jenks' __license__ = 'Apache 2.0' __copyright__ = 'Copyright 2016-2023 Grant Jenks' From cffbcec2b198e3a296ec294bd43da37fc559645b Mon Sep 17 00:00:00 2001 From: Grant Jenks Date: Mon, 17 Apr 2023 22:30:26 -0700 Subject: [PATCH 66/74] Fix docs re: JSONDisk --- docs/tutorial.rst | 14 ++++++-------- 1 file changed, 6 insertions(+), 8 deletions(-) diff --git a/docs/tutorial.rst b/docs/tutorial.rst index 69277d3..2eb454d 100644 --- a/docs/tutorial.rst +++ b/docs/tutorial.rst @@ -821,30 +821,28 @@ example below uses compressed JSON, available for convenience as .. code-block:: python - import json, zlib - class JSONDisk(diskcache.Disk): def __init__(self, directory, compress_level=1, **kwargs): self.compress_level = compress_level - super(JSONDisk, self).__init__(directory, **kwargs) + super().__init__(directory, **kwargs) def put(self, key): json_bytes = json.dumps(key).encode('utf-8') data = zlib.compress(json_bytes, self.compress_level) - return super(JSONDisk, self).put(data) + return super().put(data) def get(self, key, raw): - data = super(JSONDisk, self).get(key, raw) + data = super().get(key, raw) return json.loads(zlib.decompress(data).decode('utf-8')) - def store(self, value, read): + def store(self, value, read, key=UNKNOWN): if not read: json_bytes = json.dumps(value).encode('utf-8') value = zlib.compress(json_bytes, self.compress_level) - return super(JSONDisk, self).store(value, read) + return super().store(value, read, key=key) def fetch(self, mode, filename, value, read): - data = super(JSONDisk, self).fetch(mode, filename, value, read) + data = super().fetch(mode, filename, value, read) if not read: data = json.loads(zlib.decompress(data).decode('utf-8')) return data From f81160f22af9e8af0e07e179808280188146a020 Mon Sep 17 00:00:00 2001 From: Grant Jenks Date: Mon, 17 Apr 2023 22:46:44 -0700 Subject: [PATCH 67/74] Support pathlib.Path as directory argument --- diskcache/core.py | 1 + diskcache/fanout.py | 1 + tests/test_core.py | 8 ++++++++ tests/test_fanout.py | 8 ++++++++ 4 files changed, 18 insertions(+) diff --git a/diskcache/core.py b/diskcache/core.py index 9f3a597..af65454 100644 --- a/diskcache/core.py +++ b/diskcache/core.py @@ -433,6 +433,7 @@ def __init__(self, directory=None, timeout=60, disk=Disk, **settings): if directory is None: directory = tempfile.mkdtemp(prefix='diskcache-') + directory = str(directory) directory = op.expanduser(directory) directory = op.expandvars(directory) diff --git a/diskcache/fanout.py b/diskcache/fanout.py index 50005fc..9822ee4 100644 --- a/diskcache/fanout.py +++ b/diskcache/fanout.py @@ -30,6 +30,7 @@ def __init__( """ if directory is None: directory = tempfile.mkdtemp(prefix='diskcache-') + directory = str(directory) directory = op.expanduser(directory) directory = op.expandvars(directory) diff --git a/tests/test_core.py b/tests/test_core.py index 356d104..788afef 100644 --- a/tests/test_core.py +++ b/tests/test_core.py @@ -5,6 +5,7 @@ import io import os import os.path as op +import pathlib import pickle import shutil import sqlite3 @@ -37,6 +38,13 @@ def test_init(cache): cache.close() +def test_init_path(cache): + path = pathlib.Path(cache.directory) + other = dc.Cache(path) + other.close() + assert cache.directory == other.directory + + def test_init_disk(): with dc.Cache(disk_pickle_protocol=1, disk_min_file_size=2**20) as cache: key = (None, 0, 'abc') diff --git a/tests/test_fanout.py b/tests/test_fanout.py index deea03f..af221b6 100644 --- a/tests/test_fanout.py +++ b/tests/test_fanout.py @@ -5,6 +5,7 @@ import io import os import os.path as op +import pathlib import pickle import shutil import subprocess as sp @@ -44,6 +45,13 @@ def test_init(cache): cache.check() +def test_init_path(cache): + path = pathlib.Path(cache.directory) + other = dc.FanoutCache(path) + other.close() + assert cache.directory == other.directory + + def test_set_get_delete(cache): for value in range(100): cache.set(value, value) From 4d3068625a3edcd2f5a1f6f104ef621f1f7ea395 Mon Sep 17 00:00:00 2001 From: Grant Jenks Date: Mon, 17 Apr 2023 22:47:22 -0700 Subject: [PATCH 68/74] Bump version to 5.6.1 --- diskcache/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/diskcache/__init__.py b/diskcache/__init__.py index 8647b9a..1931a0d 100644 --- a/diskcache/__init__.py +++ b/diskcache/__init__.py @@ -61,8 +61,8 @@ pass __title__ = 'diskcache' -__version__ = '5.6.0' -__build__ = 0x050600 +__version__ = '5.6.1' +__build__ = 0x050601 __author__ = 'Grant Jenks' __license__ = 'Apache 2.0' __copyright__ = 'Copyright 2016-2023 Grant Jenks' From 17a5f42facc312dae6e98b7b53345e2ed02be21d Mon Sep 17 00:00:00 2001 From: Grant Jenks Date: Wed, 30 Aug 2023 22:56:38 -0700 Subject: [PATCH 69/74] Bug fix: Fix peek when value is so large that a file is used (#288) Error caused by copy/paste from pull(). --- diskcache/core.py | 3 --- tests/test_deque.py | 10 ++++++++++ 2 files changed, 10 insertions(+), 3 deletions(-) diff --git a/diskcache/core.py b/diskcache/core.py index af65454..c7c8486 100644 --- a/diskcache/core.py +++ b/diskcache/core.py @@ -1703,9 +1703,6 @@ def peek( except IOError: # Key was deleted before we could retrieve result. continue - finally: - if name is not None: - self._disk.remove(name) break if expire_time and tag: diff --git a/tests/test_deque.py b/tests/test_deque.py index 71c69e2..f997a86 100644 --- a/tests/test_deque.py +++ b/tests/test_deque.py @@ -302,3 +302,13 @@ def test_rotate_indexerror_negative(deque): with mock.patch.object(deque, '_cache', cache): deque.rotate(-1) + + +def test_peek(deque): + value = b'x' * 100_000 + deque.append(value) + assert len(deque) == 1 + assert deque.peek() == value + assert len(deque) == 1 + assert deque.peek() == value + assert len(deque) == 1 From 63a5f6068b77fe9c02c8f310758fa1f05ae1ae04 Mon Sep 17 00:00:00 2001 From: Grant Jenks Date: Wed, 30 Aug 2023 22:58:11 -0700 Subject: [PATCH 70/74] Bump version to 5.6.2 --- diskcache/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/diskcache/__init__.py b/diskcache/__init__.py index 1931a0d..719640f 100644 --- a/diskcache/__init__.py +++ b/diskcache/__init__.py @@ -61,8 +61,8 @@ pass __title__ = 'diskcache' -__version__ = '5.6.1' -__build__ = 0x050601 +__version__ = '5.6.2' +__build__ = 0x050602 __author__ = 'Grant Jenks' __license__ = 'Apache 2.0' __copyright__ = 'Copyright 2016-2023 Grant Jenks' From 23d10dce8f4be9c00df4786d508964b3b7d72b27 Mon Sep 17 00:00:00 2001 From: Grant Jenks Date: Wed, 30 Aug 2023 23:09:54 -0700 Subject: [PATCH 71/74] Update release.yml to use pypa/gh-action-pypi-publish --- .github/workflows/release.yml | 29 ++++++++--------------------- 1 file changed, 8 insertions(+), 21 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index efe73c6..33b3a8f 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -9,35 +9,22 @@ jobs: upload: runs-on: ubuntu-latest + permissions: + id-token: write steps: - uses: actions/checkout@v3 - - name: Install libmemcached-dev - run: | - sudo apt-get update - sudo apt-get install libmemcached-dev - - name: Set up Python uses: actions/setup-python@v4 with: python-version: '3.11' - - name: Install dependencies - run: | - pip install --upgrade pip - pip install -r requirements-dev.txt - - - name: Create source dist - run: python setup.py sdist + - name: Install build + run: pip install build - - name: Create wheel dist - run: python setup.py bdist_wheel + - name: Create build + run: python -m build - - name: Upload with twine - env: - TWINE_USERNAME: ${{ secrets.TWINE_USERNAME }} - TWINE_PASSWORD: ${{ secrets.TWINE_PASSWORD }} - run: | - ls -l dist/* - twine upload dist/* + - name: Publish package distributions to PyPI + uses: pypa/gh-action-pypi-publish@release/v1 From 323787f507a6456c56cce213156a78b17073fe00 Mon Sep 17 00:00:00 2001 From: Grant Jenks Date: Wed, 30 Aug 2023 23:10:27 -0700 Subject: [PATCH 72/74] Bump version to 5.6.3 --- diskcache/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/diskcache/__init__.py b/diskcache/__init__.py index 719640f..7757d66 100644 --- a/diskcache/__init__.py +++ b/diskcache/__init__.py @@ -61,8 +61,8 @@ pass __title__ = 'diskcache' -__version__ = '5.6.2' -__build__ = 0x050602 +__version__ = '5.6.3' +__build__ = 0x050603 __author__ = 'Grant Jenks' __license__ = 'Apache 2.0' __copyright__ = 'Copyright 2016-2023 Grant Jenks' From 9cd3816333fa34cb30d6cc2a7f227b6b1cdb793c Mon Sep 17 00:00:00 2001 From: ddorian Date: Tue, 27 Feb 2024 00:30:46 +0100 Subject: [PATCH 73/74] Change `Cache_expire_time` to a partial index because we don't need to query rows efficiently `where expire_time IS NULL` (#305) --- diskcache/core.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/diskcache/core.py b/diskcache/core.py index c7c8486..ad9ad4c 100644 --- a/diskcache/core.py +++ b/diskcache/core.py @@ -531,7 +531,7 @@ def __init__(self, directory=None, timeout=60, disk=Disk, **settings): sql( 'CREATE INDEX IF NOT EXISTS Cache_expire_time ON' - ' Cache (expire_time)' + ' Cache (expire_time) WHERE expire_time IS NOT NULL' ) query = EVICTION_POLICY[self.eviction_policy]['init'] From ebfa37cd99d7ef716ec452ad8af4b4276a8e2233 Mon Sep 17 00:00:00 2001 From: ddorian Date: Sun, 3 Mar 2024 02:19:29 +0100 Subject: [PATCH 74/74] Change `Cache_tag_rowid` to a partial index because we don't need to query rows efficiently `where tag IS NULL` (#307) * Change `Cache_tag_rowid` to a partial index because we don't need to query rows efficiently `where tag IS NULL` * Fix formatting --- diskcache/core.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/diskcache/core.py b/diskcache/core.py index ad9ad4c..7a3d23b 100644 --- a/diskcache/core.py +++ b/diskcache/core.py @@ -2028,7 +2028,10 @@ def create_tag_index(self): """ sql = self._sql - sql('CREATE INDEX IF NOT EXISTS Cache_tag_rowid ON Cache(tag, rowid)') + sql( + 'CREATE INDEX IF NOT EXISTS Cache_tag_rowid ON Cache(tag, rowid) ' + 'WHERE tag IS NOT NULL' + ) self.reset('tag_index', 1) def drop_tag_index(self):