diff --git a/.github/workflows/integration.yml b/.github/workflows/integration.yml index 0a44f89..b596fc6 100644 --- a/.github/workflows/integration.yml +++ b/.github/workflows/integration.yml @@ -12,11 +12,11 @@ jobs: check: [bluecheck, doc8, docs, flake8, isortcheck, mypy, pylint, rstcheck] steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Set up Python - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: - python-version: 3.9 + python-version: '3.11' - name: Install dependencies run: | pip install --upgrade pip @@ -31,17 +31,17 @@ jobs: strategy: max-parallel: 8 matrix: - os: [ubuntu-latest, macos-latest, windows-latest, ubuntu-16.04] - python-version: [3.6, 3.7, 3.8, 3.9] + os: [ubuntu-latest, macos-latest, windows-latest] + python-version: [3.8, 3.9, '3.10', 3.11] steps: - name: Set up Python ${{ matrix.python-version }} x64 - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} architecture: x64 - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Install tox run: | diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 2a07787..33b3a8f 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -9,35 +9,22 @@ jobs: upload: runs-on: ubuntu-latest + permissions: + id-token: write steps: - - uses: actions/checkout@v2 - - - name: Install libmemcached-dev - run: | - sudo apt-get update - sudo apt-get install libmemcached-dev + - uses: actions/checkout@v3 - name: Set up Python - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: - python-version: 3.9 - - - name: Install dependencies - run: | - pip install --upgrade pip - pip install -r requirements.txt - - - name: Create source dist - run: python setup.py sdist - - - name: Create wheel dist - run: python setup.py bdist_wheel - - - name: Upload with twine - env: - TWINE_USERNAME: ${{ secrets.TWINE_USERNAME }} - TWINE_PASSWORD: ${{ secrets.TWINE_PASSWORD }} - run: | - ls -l dist/* - twine upload dist/* + python-version: '3.11' + + - name: Install build + run: pip install build + + - name: Create build + run: python -m build + + - name: Publish package distributions to PyPI + uses: pypa/gh-action-pypi-publish@release/v1 diff --git a/.pylintrc b/.pylintrc index 158fe34..dc1490a 100644 --- a/.pylintrc +++ b/.pylintrc @@ -1,27 +1,77 @@ -[MASTER] +[MAIN] + +# Analyse import fallback blocks. This can be used to support both Python 2 and +# 3 compatible code, which means that the block might have code that exists +# only in one or another interpreter, leading to false positives when analysed. +analyse-fallback-blocks=no + +# Clear in-memory caches upon conclusion of linting. Useful if running pylint +# in a server-like mode. +clear-cache-post-run=no + +# Load and enable all available extensions. Use --list-extensions to see a list +# all available extensions. +#enable-all-extensions= + +# In error mode, messages with a category besides ERROR or FATAL are +# suppressed, and no reports are done by default. Error mode is compatible with +# disabling specific errors. +#errors-only= + +# Always return a 0 (non-error) status code, even if lint errors are found. +# This is primarily useful in continuous integration scripts. +#exit-zero= # A comma-separated list of package or module names from where C extensions may # be loaded. Extensions are loading into the active Python interpreter and may # run arbitrary code. +extension-pkg-allow-list= + +# A comma-separated list of package or module names from where C extensions may +# be loaded. Extensions are loading into the active Python interpreter and may +# run arbitrary code. (This is an alternative name to extension-pkg-allow-list +# for backward compatibility.) extension-pkg-whitelist= -# Specify a score threshold to be exceeded before program exits with error. -fail-under=10.0 +# Return non-zero exit code if any of these messages/categories are detected, +# even if score is above --fail-under value. Syntax same as enable. Messages +# specified are enabled, while categories only check already-enabled messages. +fail-on= -# Add files or directories to the blacklist. They should be base names, not -# paths. +# Specify a score threshold under which the program will exit with error. +fail-under=10 + +# Interpret the stdin as a python script, whose filename needs to be passed as +# the module_or_package argument. +#from-stdin= + +# Files or directories to be skipped. They should be base names, not paths. ignore=CVS -# Add files or directories matching the regex patterns to the blacklist. The -# regex matches against base names, not paths. -ignore-patterns= +# Add files or directories matching the regular expressions patterns to the +# ignore-list. The regex matches against paths and can be in Posix or Windows +# format. Because '\\' represents the directory delimiter on Windows systems, +# it can't be used as an escape character. +ignore-paths= + +# Files or directories matching the regular expression patterns are skipped. +# The regex matches against base names, not paths. The default value ignores +# Emacs file locks +ignore-patterns=^\.# + +# List of module names for which member attributes should not be checked +# (useful for modules/projects where namespaces are manipulated during runtime +# and thus existing member attributes cannot be deduced by static analysis). It +# supports qualified module names, as well as Unix pattern matching. +ignored-modules= # Python code to execute, usually for sys.path manipulation such as # pygtk.require(). #init-hook= # Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the -# number of processors available to use. +# number of processors available to use, and will cap the count on Windows to +# avoid hangs. jobs=1 # Control the amount of potential inferred values when inferring a single @@ -36,6 +86,19 @@ load-plugins= # Pickle collected data for later comparisons. persistent=yes +# Minimum Python version to use for version dependent checks. Will default to +# the version used to run pylint. +py-version=3.11 + +# Discover python modules and packages in the file system subtree. +recursive=no + +# Add paths to the list of the source roots. Supports globbing patterns. The +# source root is an absolute path or a path relative to the current working +# directory used to determine a package namespace for modules located under the +# source root. +source-roots= + # When enabled, pylint would attempt to guess common misconfiguration and emit # user-friendly hints instead of false-positive error messages. suggestion-mode=yes @@ -44,320 +107,8 @@ suggestion-mode=yes # active Python interpreter and may run arbitrary code. unsafe-load-any-extension=no - -[MESSAGES CONTROL] - -# Only show warnings with the listed confidence levels. Leave empty to show -# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED. -confidence= - -# Disable the message, report, category or checker with the given id(s). You -# can either give multiple identifiers separated by comma (,) or put this -# option multiple times (only on the command line, not in the configuration -# file where it should appear only once). You can also use "--disable=all" to -# disable everything first and then reenable specific checks. For example, if -# you want to run only the similarities checker, you can use "--disable=all -# --enable=similarities". If you want to run only the classes checker, but have -# no Warning level messages displayed, use "--disable=all --enable=classes -# --disable=W". -disable=print-statement, - parameter-unpacking, - unpacking-in-except, - old-raise-syntax, - backtick, - long-suffix, - old-ne-operator, - old-octal-literal, - import-star-module-level, - non-ascii-bytes-literal, - raw-checker-failed, - bad-inline-option, - locally-disabled, - file-ignored, - suppressed-message, - useless-suppression, - deprecated-pragma, - use-symbolic-message-instead, - apply-builtin, - basestring-builtin, - buffer-builtin, - cmp-builtin, - coerce-builtin, - execfile-builtin, - file-builtin, - long-builtin, - raw_input-builtin, - reduce-builtin, - standarderror-builtin, - unicode-builtin, - xrange-builtin, - coerce-method, - delslice-method, - getslice-method, - setslice-method, - no-absolute-import, - old-division, - dict-iter-method, - dict-view-method, - next-method-called, - metaclass-assignment, - indexing-exception, - raising-string, - reload-builtin, - oct-method, - hex-method, - nonzero-method, - cmp-method, - input-builtin, - round-builtin, - intern-builtin, - unichr-builtin, - map-builtin-not-iterating, - zip-builtin-not-iterating, - range-builtin-not-iterating, - filter-builtin-not-iterating, - using-cmp-argument, - eq-without-hash, - div-method, - idiv-method, - rdiv-method, - exception-message-attribute, - invalid-str-codec, - sys-max-int, - bad-python3-import, - deprecated-string-function, - deprecated-str-translate-call, - deprecated-itertools-function, - deprecated-types-field, - next-method-defined, - dict-items-not-iterating, - dict-keys-not-iterating, - dict-values-not-iterating, - deprecated-operator-function, - deprecated-urllib-function, - xreadlines-attribute, - deprecated-sys-function, - exception-escape, - comprehension-escape, - no-member, - no-else-return, - duplicate-code, - inconsistent-return-statements, - -# Enable the message, report, category or checker with the given id(s). You can -# either give multiple identifier separated by comma (,) or put this option -# multiple time (only on the command line, not in the configuration file where -# it should appear only once). See also the "--disable" option for examples. -enable=c-extension-no-member - - -[REPORTS] - -# Python expression which should return a score less than or equal to 10. You -# have access to the variables 'error', 'warning', 'refactor', and 'convention' -# which contain the number of messages in each category, as well as 'statement' -# which is the total number of statements analyzed. This score is used by the -# global evaluation report (RP0004). -evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10) - -# Template used to display messages. This is a python new-style format string -# used to format the message information. See doc for all details. -#msg-template= - -# Set the output format. Available formats are text, parseable, colorized, json -# and msvs (visual studio). You can also give a reporter class, e.g. -# mypackage.mymodule.MyReporterClass. -output-format=text - -# Tells whether to display a full report or only the messages. -reports=no - -# Activate the evaluation score. -score=yes - - -[REFACTORING] - -# Maximum number of nested blocks for function / method body -max-nested-blocks=5 - -# Complete name of functions that never returns. When checking for -# inconsistent-return-statements if a never returning function is called then -# it will be considered as an explicit return statement and no message will be -# printed. -never-returning-functions=sys.exit - - -[LOGGING] - -# The type of string formatting that logging methods do. `old` means using % -# formatting, `new` is for `{}` formatting. -logging-format-style=old - -# Logging modules to check that the string format arguments are in logging -# function parameter format. -logging-modules=logging - - -[SPELLING] - -# Limits count of emitted suggestions for spelling mistakes. -max-spelling-suggestions=4 - -# Spelling dictionary name. Available dictionaries: none. To make it work, -# install the python-enchant package. -spelling-dict= - -# List of comma separated words that should not be checked. -spelling-ignore-words= - -# A path to a file that contains the private dictionary; one word per line. -spelling-private-dict-file= - -# Tells whether to store unknown words to the private dictionary (see the -# --spelling-private-dict-file option) instead of raising a message. -spelling-store-unknown-words=no - - -[MISCELLANEOUS] - -# List of note tags to take in consideration, separated by a comma. -notes=FIXME, - XXX, - TODO - -# Regular expression of note tags to take in consideration. -#notes-rgx= - - -[TYPECHECK] - -# List of decorators that produce context managers, such as -# contextlib.contextmanager. Add to this list to register other decorators that -# produce valid context managers. -contextmanager-decorators=contextlib.contextmanager - -# List of members which are set dynamically and missed by pylint inference -# system, and so shouldn't trigger E1101 when accessed. Python regular -# expressions are accepted. -generated-members= - -# Tells whether missing members accessed in mixin class should be ignored. A -# mixin class is detected if its name ends with "mixin" (case insensitive). -ignore-mixin-members=yes - -# Tells whether to warn about missing members when the owner of the attribute -# is inferred to be None. -ignore-none=yes - -# This flag controls whether pylint should warn about no-member and similar -# checks whenever an opaque object is returned when inferring. The inference -# can return multiple potential results while evaluating a Python object, but -# some branches might not be evaluated, which results in partial inference. In -# that case, it might be useful to still emit no-member and other checks for -# the rest of the inferred objects. -ignore-on-opaque-inference=yes - -# List of class names for which member attributes should not be checked (useful -# for classes with dynamically set attributes). This supports the use of -# qualified names. -ignored-classes=optparse.Values,thread._local,_thread._local - -# List of module names for which member attributes should not be checked -# (useful for modules/projects where namespaces are manipulated during runtime -# and thus existing member attributes cannot be deduced by static analysis). It -# supports qualified module names, as well as Unix pattern matching. -ignored-modules= - -# Show a hint with possible names when a member name was not found. The aspect -# of finding the hint is based on edit distance. -missing-member-hint=yes - -# The minimum edit distance a name should have in order to be considered a -# similar match for a missing member name. -missing-member-hint-distance=1 - -# The total number of similar names that should be taken in consideration when -# showing a hint for a missing member. -missing-member-max-choices=1 - -# List of decorators that change the signature of a decorated function. -signature-mutators= - - -[VARIABLES] - -# List of additional names supposed to be defined in builtins. Remember that -# you should avoid defining new builtins when possible. -additional-builtins= - -# Tells whether unused global variables should be treated as a violation. -allow-global-unused-variables=yes - -# List of strings which can identify a callback function by name. A callback -# name must start or end with one of those strings. -callbacks=cb_, - _cb - -# A regular expression matching the name of dummy variables (i.e. expected to -# not be used). -dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_ - -# Argument names that match this expression will be ignored. Default to name -# with leading underscore. -ignored-argument-names=_.*|^ignored_|^unused_ - -# Tells whether we should check for unused import in __init__ files. -init-import=no - -# List of qualified module names which can have objects that can redefine -# builtins. -redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io - - -[FORMAT] - -# Expected format of line ending, e.g. empty (any line ending), LF or CRLF. -expected-line-ending-format= - -# Regexp for a line that is allowed to be longer than the limit. -ignore-long-lines=^\s*(# )??$ - -# Number of spaces of indent required inside a hanging or continued line. -indent-after-paren=4 - -# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 -# tab). -indent-string=' ' - -# Maximum number of characters on a single line. -max-line-length=100 - -# Maximum number of lines in a module. -max-module-lines=3000 - -# Allow the body of a class to be on the same line as the declaration if body -# contains single statement. -single-line-class-stmt=no - -# Allow the body of an if to be on the same line as the test if there is no -# else. -single-line-if-stmt=no - - -[SIMILARITIES] - -# Ignore comments when computing similarities. -ignore-comments=yes - -# Ignore docstrings when computing similarities. -ignore-docstrings=yes - -# Ignore imports when computing similarities. -ignore-imports=yes - -# Minimum lines number of a similarity. -min-similarity-lines=4 +# In verbose mode, extra non-checker-related info will be displayed. +#verbose= [BASIC] @@ -366,13 +117,15 @@ min-similarity-lines=4 argument-naming-style=snake_case # Regular expression matching correct argument names. Overrides argument- -# naming-style. +# naming-style. If left empty, argument names will be checked with the set +# naming style. #argument-rgx= # Naming style matching correct attribute names. attr-naming-style=snake_case # Regular expression matching correct attribute names. Overrides attr-naming- +# style. If left empty, attribute names will be checked with the set naming # style. #attr-rgx= @@ -392,20 +145,30 @@ bad-names-rgxs= class-attribute-naming-style=any # Regular expression matching correct class attribute names. Overrides class- -# attribute-naming-style. +# attribute-naming-style. If left empty, class attribute names will be checked +# with the set naming style. #class-attribute-rgx= +# Naming style matching correct class constant names. +class-const-naming-style=UPPER_CASE + +# Regular expression matching correct class constant names. Overrides class- +# const-naming-style. If left empty, class constant names will be checked with +# the set naming style. +#class-const-rgx= + # Naming style matching correct class names. class-naming-style=PascalCase # Regular expression matching correct class names. Overrides class-naming- -# style. +# style. If left empty, class names will be checked with the set naming style. #class-rgx= # Naming style matching correct constant names. const-naming-style=UPPER_CASE # Regular expression matching correct constant names. Overrides const-naming- +# style. If left empty, constant names will be checked with the set naming # style. #const-rgx= @@ -417,7 +180,8 @@ docstring-min-length=-1 function-naming-style=snake_case # Regular expression matching correct function names. Overrides function- -# naming-style. +# naming-style. If left empty, function names will be checked with the set +# naming style. #function-rgx= # Good variable names which should always be accepted, separated by a comma. @@ -439,21 +203,22 @@ include-naming-hint=no inlinevar-naming-style=any # Regular expression matching correct inline iteration names. Overrides -# inlinevar-naming-style. +# inlinevar-naming-style. If left empty, inline iteration names will be checked +# with the set naming style. #inlinevar-rgx= # Naming style matching correct method names. method-naming-style=snake_case # Regular expression matching correct method names. Overrides method-naming- -# style. +# style. If left empty, method names will be checked with the set naming style. #method-rgx= # Naming style matching correct module names. module-naming-style=snake_case # Regular expression matching correct module names. Overrides module-naming- -# style. +# style. If left empty, module names will be checked with the set naming style. #module-rgx= # Colon-delimited sets of names that determine each other's naming style when @@ -469,90 +234,56 @@ no-docstring-rgx=^_ # These decorators are taken in consideration only for invalid-name. property-classes=abc.abstractproperty +# Regular expression matching correct type alias names. If left empty, type +# alias names will be checked with the set naming style. +#typealias-rgx= + +# Regular expression matching correct type variable names. If left empty, type +# variable names will be checked with the set naming style. +#typevar-rgx= + # Naming style matching correct variable names. variable-naming-style=snake_case # Regular expression matching correct variable names. Overrides variable- -# naming-style. +# naming-style. If left empty, variable names will be checked with the set +# naming style. #variable-rgx= -[STRING] - -# This flag controls whether inconsistent-quotes generates a warning when the -# character used as a quote delimiter is used inconsistently within a module. -check-quote-consistency=no - -# This flag controls whether the implicit-str-concat should generate a warning -# on implicit string concatenation in sequences defined over several lines. -check-str-concat-over-line-jumps=no +[CLASSES] - -[IMPORTS] - -# List of modules that can be imported at any level, not just the top level -# one. -allow-any-import-level= - -# Allow wildcard imports from modules that define __all__. -allow-wildcard-with-all=no - -# Analyse import fallback blocks. This can be used to support both Python 2 and -# 3 compatible code, which means that the block might have code that exists -# only in one or another interpreter, leading to false positives when analysed. -analyse-fallback-blocks=no - -# Deprecated modules which should not be used, separated by a comma. -deprecated-modules=optparse,tkinter.tix - -# Create a graph of external dependencies in the given file (report RP0402 must -# not be disabled). -ext-import-graph= - -# Create a graph of every (i.e. internal and external) dependencies in the -# given file (report RP0402 must not be disabled). -import-graph= - -# Create a graph of internal dependencies in the given file (report RP0402 must -# not be disabled). -int-import-graph= - -# Force import order to recognize a module as part of the standard -# compatibility libraries. -known-standard-library= - -# Force import order to recognize a module as part of a third party library. -known-third-party=enchant - -# Couples of modules and preferred modules, separated by a comma. -preferred-modules= - - -[CLASSES] +# Warn about protected attribute access inside special methods +check-protected-access-in-special-methods=no # List of method names used to declare (i.e. assign) instance attributes. defining-attr-methods=__init__, __new__, setUp, + asyncSetUp, __post_init__ # List of member names, which should be excluded from the protected access # warning. -exclude-protected=_asdict, - _fields, - _replace, - _source, - _make +exclude-protected=_asdict,_fields,_replace,_source,_make,os._exit # List of valid names for the first argument in a class method. valid-classmethod-first-arg=cls # List of valid names for the first argument in a metaclass class method. -valid-metaclass-classmethod-first-arg=cls +valid-metaclass-classmethod-first-arg=mcs [DESIGN] +# List of regular expressions of class ancestor names to ignore when counting +# public methods (see R0903) +exclude-too-few-public-methods= + +# List of qualified class names to ignore when counting class parents (see +# R0901) +ignored-parents= + # Maximum number of arguments for function / method. max-args=8 @@ -586,7 +317,320 @@ min-public-methods=2 [EXCEPTIONS] -# Exceptions that will emit a warning when being caught. Defaults to -# "BaseException, Exception". -overgeneral-exceptions=BaseException, - Exception +# Exceptions that will emit a warning when caught. +overgeneral-exceptions=builtins.BaseException,builtins.Exception + + +[FORMAT] + +# Expected format of line ending, e.g. empty (any line ending), LF or CRLF. +expected-line-ending-format= + +# Regexp for a line that is allowed to be longer than the limit. +ignore-long-lines=^\s*(# )??$ + +# Number of spaces of indent required inside a hanging or continued line. +indent-after-paren=4 + +# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 +# tab). +indent-string=' ' + +# Maximum number of characters on a single line. +max-line-length=100 + +# Maximum number of lines in a module. +max-module-lines=2500 + +# Allow the body of a class to be on the same line as the declaration if body +# contains single statement. +single-line-class-stmt=no + +# Allow the body of an if to be on the same line as the test if there is no +# else. +single-line-if-stmt=no + + +[IMPORTS] + +# List of modules that can be imported at any level, not just the top level +# one. +allow-any-import-level= + +# Allow explicit reexports by alias from a package __init__. +allow-reexport-from-package=no + +# Allow wildcard imports from modules that define __all__. +allow-wildcard-with-all=no + +# Deprecated modules which should not be used, separated by a comma. +deprecated-modules= + +# Output a graph (.gv or any supported image format) of external dependencies +# to the given file (report RP0402 must not be disabled). +ext-import-graph= + +# Output a graph (.gv or any supported image format) of all (i.e. internal and +# external) dependencies to the given file (report RP0402 must not be +# disabled). +import-graph= + +# Output a graph (.gv or any supported image format) of internal dependencies +# to the given file (report RP0402 must not be disabled). +int-import-graph= + +# Force import order to recognize a module as part of the standard +# compatibility libraries. +known-standard-library= + +# Force import order to recognize a module as part of a third party library. +known-third-party=enchant + +# Couples of modules and preferred modules, separated by a comma. +preferred-modules= + + +[LOGGING] + +# The type of string formatting that logging methods do. `old` means using % +# formatting, `new` is for `{}` formatting. +logging-format-style=old + +# Logging modules to check that the string format arguments are in logging +# function parameter format. +logging-modules=logging + + +[MESSAGES CONTROL] + +# Only show warnings with the listed confidence levels. Leave empty to show +# all. Valid levels: HIGH, CONTROL_FLOW, INFERENCE, INFERENCE_FAILURE, +# UNDEFINED. +confidence=HIGH, + CONTROL_FLOW, + INFERENCE, + INFERENCE_FAILURE, + UNDEFINED + +# Disable the message, report, category or checker with the given id(s). You +# can either give multiple identifiers separated by comma (,) or put this +# option multiple times (only on the command line, not in the configuration +# file where it should appear only once). You can also use "--disable=all" to +# disable everything first and then re-enable specific checks. For example, if +# you want to run only the similarities checker, you can use "--disable=all +# --enable=similarities". If you want to run only the classes checker, but have +# no Warning level messages displayed, use "--disable=all --enable=classes +# --disable=W". +disable=raw-checker-failed, + bad-inline-option, + locally-disabled, + file-ignored, + suppressed-message, + useless-suppression, + deprecated-pragma, + use-symbolic-message-instead, + consider-using-f-string, + no-member, + no-else-return, + no-else-raise, + inconsistent-return-statements + +# Enable the message, report, category or checker with the given id(s). You can +# either give multiple identifier separated by comma (,) or put this option +# multiple time (only on the command line, not in the configuration file where +# it should appear only once). See also the "--disable" option for examples. +enable=c-extension-no-member + + +[METHOD_ARGS] + +# List of qualified names (i.e., library.method) which require a timeout +# parameter e.g. 'requests.api.get,requests.api.post' +timeout-methods=requests.api.delete,requests.api.get,requests.api.head,requests.api.options,requests.api.patch,requests.api.post,requests.api.put,requests.api.request + + +[MISCELLANEOUS] + +# List of note tags to take in consideration, separated by a comma. +notes=FIXME, + XXX, + TODO + +# Regular expression of note tags to take in consideration. +notes-rgx= + + +[REFACTORING] + +# Maximum number of nested blocks for function / method body +max-nested-blocks=5 + +# Complete name of functions that never returns. When checking for +# inconsistent-return-statements if a never returning function is called then +# it will be considered as an explicit return statement and no message will be +# printed. +never-returning-functions=sys.exit,argparse.parse_error + + +[REPORTS] + +# Python expression which should return a score less than or equal to 10. You +# have access to the variables 'fatal', 'error', 'warning', 'refactor', +# 'convention', and 'info' which contain the number of messages in each +# category, as well as 'statement' which is the total number of statements +# analyzed. This score is used by the global evaluation report (RP0004). +evaluation=max(0, 0 if fatal else 10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)) + +# Template used to display messages. This is a python new-style format string +# used to format the message information. See doc for all details. +msg-template= + +# Set the output format. Available formats are text, parseable, colorized, json +# and msvs (visual studio). You can also give a reporter class, e.g. +# mypackage.mymodule.MyReporterClass. +#output-format= + +# Tells whether to display a full report or only the messages. +reports=no + +# Activate the evaluation score. +score=yes + + +[SIMILARITIES] + +# Comments are removed from the similarity computation +ignore-comments=yes + +# Docstrings are removed from the similarity computation +ignore-docstrings=yes + +# Imports are removed from the similarity computation +ignore-imports=yes + +# Signatures are removed from the similarity computation +ignore-signatures=yes + +# Minimum lines number of a similarity. +min-similarity-lines=20 + + +[SPELLING] + +# Limits count of emitted suggestions for spelling mistakes. +max-spelling-suggestions=4 + +# Spelling dictionary name. No available dictionaries : You need to install +# both the python package and the system dependency for enchant to work.. +spelling-dict= + +# List of comma separated words that should be considered directives if they +# appear at the beginning of a comment and should not be checked. +spelling-ignore-comment-directives=fmt: on,fmt: off,noqa:,noqa,nosec,isort:skip,mypy: + +# List of comma separated words that should not be checked. +spelling-ignore-words= + +# A path to a file that contains the private dictionary; one word per line. +spelling-private-dict-file= + +# Tells whether to store unknown words to the private dictionary (see the +# --spelling-private-dict-file option) instead of raising a message. +spelling-store-unknown-words=no + + +[STRING] + +# This flag controls whether inconsistent-quotes generates a warning when the +# character used as a quote delimiter is used inconsistently within a module. +check-quote-consistency=no + +# This flag controls whether the implicit-str-concat should generate a warning +# on implicit string concatenation in sequences defined over several lines. +check-str-concat-over-line-jumps=no + + +[TYPECHECK] + +# List of decorators that produce context managers, such as +# contextlib.contextmanager. Add to this list to register other decorators that +# produce valid context managers. +contextmanager-decorators=contextlib.contextmanager + +# List of members which are set dynamically and missed by pylint inference +# system, and so shouldn't trigger E1101 when accessed. Python regular +# expressions are accepted. +generated-members= + +# Tells whether to warn about missing members when the owner of the attribute +# is inferred to be None. +ignore-none=yes + +# This flag controls whether pylint should warn about no-member and similar +# checks whenever an opaque object is returned when inferring. The inference +# can return multiple potential results while evaluating a Python object, but +# some branches might not be evaluated, which results in partial inference. In +# that case, it might be useful to still emit no-member and other checks for +# the rest of the inferred objects. +ignore-on-opaque-inference=yes + +# List of symbolic message names to ignore for Mixin members. +ignored-checks-for-mixins=no-member, + not-async-context-manager, + not-context-manager, + attribute-defined-outside-init + +# List of class names for which member attributes should not be checked (useful +# for classes with dynamically set attributes). This supports the use of +# qualified names. +ignored-classes=optparse.Values,thread._local,_thread._local,argparse.Namespace + +# Show a hint with possible names when a member name was not found. The aspect +# of finding the hint is based on edit distance. +missing-member-hint=yes + +# The minimum edit distance a name should have in order to be considered a +# similar match for a missing member name. +missing-member-hint-distance=1 + +# The total number of similar names that should be taken in consideration when +# showing a hint for a missing member. +missing-member-max-choices=1 + +# Regex pattern to define which classes are considered mixins. +mixin-class-rgx=.*[Mm]ixin + +# List of decorators that change the signature of a decorated function. +signature-mutators= + + +[VARIABLES] + +# List of additional names supposed to be defined in builtins. Remember that +# you should avoid defining new builtins when possible. +additional-builtins= + +# Tells whether unused global variables should be treated as a violation. +allow-global-unused-variables=yes + +# List of names allowed to shadow builtins +allowed-redefined-builtins= + +# List of strings which can identify a callback function by name. A callback +# name must start or end with one of those strings. +callbacks=cb_, + _cb + +# A regular expression matching the name of dummy variables (i.e. expected to +# not be used). +dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_ + +# Argument names that match this expression will be ignored. +ignored-argument-names=_.*|^ignored_|^unused_ + +# Tells whether we should check for unused import in __init__ files. +init-import=no + +# List of qualified module names which can have objects that can redefine +# builtins. +redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io diff --git a/LICENSE b/LICENSE index ca80a22..bb4cfb7 100644 --- a/LICENSE +++ b/LICENSE @@ -1,4 +1,4 @@ -Copyright 2016-2021 Grant Jenks +Copyright 2016-2022 Grant Jenks Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the diff --git a/README.rst b/README.rst index c4aa8e9..04abdc0 100644 --- a/README.rst +++ b/README.rst @@ -4,7 +4,7 @@ DiskCache: Disk Backed Cache `DiskCache`_ is an Apache2 licensed disk and file backed cache library, written in pure-Python, and compatible with Django. -The cloud-based computing of 2021 puts a premium on memory. Gigabytes of empty +The cloud-based computing of 2023 puts a premium on memory. Gigabytes of empty space is left on disks as processes vie for memory. Among these processes is Memcached (and sometimes Redis) which is used as a cache. Wouldn't it be nice to leverage empty disk space for caching? @@ -77,8 +77,8 @@ Features - Thread-safe and process-safe - Supports multiple eviction policies (LRU and LFU included) - Keys support "tag" metadata and eviction -- Developed on Python 3.9 -- Tested on CPython 3.6, 3.7, 3.8, 3.9 +- Developed on Python 3.10 +- Tested on CPython 3.6, 3.7, 3.8, 3.9, 3.10 - Tested on Linux, Mac OS X, and Windows - Tested using GitHub Actions @@ -387,7 +387,7 @@ Reference License ------- -Copyright 2016-2021 Grant Jenks +Copyright 2016-2023 Grant Jenks Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the diff --git a/diskcache/__init__.py b/diskcache/__init__.py index 428eb30..7757d66 100644 --- a/diskcache/__init__.py +++ b/diskcache/__init__.py @@ -3,7 +3,6 @@ ======================= The :doc:`tutorial` provides a helpful walkthrough of most methods. - """ from .core import ( @@ -57,13 +56,13 @@ from .djangocache import DjangoCache # noqa __all__.append('DjangoCache') -except Exception: # pylint: disable=broad-except +except Exception: # pylint: disable=broad-except # pragma: no cover # Django not installed or not setup so ignore. pass __title__ = 'diskcache' -__version__ = '5.2.1' -__build__ = 0x050201 +__version__ = '5.6.3' +__build__ = 0x050603 __author__ = 'Grant Jenks' __license__ = 'Apache 2.0' -__copyright__ = 'Copyright 2016-2021 Grant Jenks' +__copyright__ = 'Copyright 2016-2023 Grant Jenks' diff --git a/diskcache/cli.py b/diskcache/cli.py index 44bffeb..6a39f60 100644 --- a/diskcache/cli.py +++ b/diskcache/cli.py @@ -1 +1 @@ -"Command line interface to disk cache." +"""Command line interface to disk cache.""" diff --git a/diskcache/core.py b/diskcache/core.py index da4d884..7a3d23b 100644 --- a/diskcache/core.py +++ b/diskcache/core.py @@ -1,5 +1,4 @@ """Core disk and file backed cache API. - """ import codecs @@ -22,20 +21,12 @@ def full_name(func): - "Return full name of `func` by adding the module and function name." + """Return full name of `func` by adding the module and function name.""" return func.__module__ + '.' + func.__qualname__ -try: - WindowsError -except NameError: - - class WindowsError(Exception): - "Windows error place-holder on platforms without support." - - class Constant(tuple): - "Pretty display of immutable constant." + """Pretty display of immutable constant.""" def __new__(cls, name): return tuple.__new__(cls, (name,)) @@ -55,25 +46,25 @@ def __repr__(self): MODE_PICKLE = 4 DEFAULT_SETTINGS = { - u'statistics': 0, # False - u'tag_index': 0, # False - u'eviction_policy': u'least-recently-stored', - u'size_limit': 2 ** 30, # 1gb - u'cull_limit': 10, - u'sqlite_auto_vacuum': 1, # FULL - u'sqlite_cache_size': 2 ** 13, # 8,192 pages - u'sqlite_journal_mode': u'wal', - u'sqlite_mmap_size': 2 ** 26, # 64mb - u'sqlite_synchronous': 1, # NORMAL - u'disk_min_file_size': 2 ** 15, # 32kb - u'disk_pickle_protocol': pickle.HIGHEST_PROTOCOL, + 'statistics': 0, # False + 'tag_index': 0, # False + 'eviction_policy': 'least-recently-stored', + 'size_limit': 2**30, # 1gb + 'cull_limit': 10, + 'sqlite_auto_vacuum': 1, # FULL + 'sqlite_cache_size': 2**13, # 8,192 pages + 'sqlite_journal_mode': 'wal', + 'sqlite_mmap_size': 2**26, # 64mb + 'sqlite_synchronous': 1, # NORMAL + 'disk_min_file_size': 2**15, # 32kb + 'disk_pickle_protocol': pickle.HIGHEST_PROTOCOL, } METADATA = { - u'count': 0, - u'size': 0, - u'hits': 0, - u'misses': 0, + 'count': 0, + 'size': 0, + 'hits': 0, + 'misses': 0, } EVICTION_POLICY = { @@ -110,7 +101,7 @@ def __repr__(self): class Disk: - "Cache key and value serialization for SQLite database and files." + """Cache key and value serialization for SQLite database and files.""" def __init__(self, directory, min_file_size=0, pickle_protocol=0): """Initialize disk instance. @@ -179,7 +170,7 @@ def get(self, key, raw): :return: corresponding Python key """ - # pylint: disable=no-self-use,unidiomatic-typecheck + # pylint: disable=unidiomatic-typecheck if raw: return bytes(key) if type(key) is sqlite3.Binary else key else: @@ -213,29 +204,18 @@ def store(self, value, read, key=UNKNOWN): return 0, MODE_RAW, None, sqlite3.Binary(value) else: filename, full_path = self.filename(key, value) - - with open(full_path, 'xb') as writer: - writer.write(value) - + self._write(full_path, io.BytesIO(value), 'xb') return len(value), MODE_BINARY, filename, None elif type_value is str: filename, full_path = self.filename(key, value) - - with open(full_path, 'x', encoding='UTF-8') as writer: - writer.write(value) - + self._write(full_path, io.StringIO(value), 'x', 'UTF-8') size = op.getsize(full_path) return size, MODE_TEXT, filename, None elif read: - size = 0 - reader = ft.partial(value.read, 2 ** 22) + reader = ft.partial(value.read, 2**22) filename, full_path = self.filename(key, value) - - with open(full_path, 'xb') as writer: - for chunk in iter(reader, b''): - size += len(chunk) - writer.write(chunk) - + iterator = iter(reader, b'') + size = self._write(full_path, iterator, 'xb') return size, MODE_BINARY, filename, None else: result = pickle.dumps(value, protocol=self.pickle_protocol) @@ -244,11 +224,32 @@ def store(self, value, read, key=UNKNOWN): return 0, MODE_PICKLE, None, sqlite3.Binary(result) else: filename, full_path = self.filename(key, value) + self._write(full_path, io.BytesIO(result), 'xb') + return len(result), MODE_PICKLE, filename, None - with open(full_path, 'xb') as writer: - writer.write(result) + def _write(self, full_path, iterator, mode, encoding=None): + full_dir, _ = op.split(full_path) - return len(result), MODE_PICKLE, filename, None + for count in range(1, 11): + with cl.suppress(OSError): + os.makedirs(full_dir) + + try: + # Another cache may have deleted the directory before + # the file could be opened. + writer = open(full_path, mode, encoding=encoding) + except OSError: + if count == 10: + # Give up after 10 tries to open the file. + raise + continue + + with writer: + size = 0 + for chunk in iterator: + size += len(chunk) + writer.write(chunk) + return size def fetch(self, mode, filename, value, read): """Convert fields `mode`, `filename`, and `value` from Cache table to @@ -259,9 +260,10 @@ def fetch(self, mode, filename, value, read): :param value: database value :param bool read: when True, return an open file handle :return: corresponding Python value + :raises: IOError if the value cannot be read """ - # pylint: disable=no-self-use,unidiomatic-typecheck + # pylint: disable=unidiomatic-typecheck,consider-using-with if mode == MODE_RAW: return bytes(value) if type(value) is sqlite3.Binary else value elif mode == MODE_BINARY: @@ -303,42 +305,34 @@ def filename(self, key=UNKNOWN, value=UNKNOWN): hex_name = codecs.encode(os.urandom(16), 'hex').decode('utf-8') sub_dir = op.join(hex_name[:2], hex_name[2:4]) name = hex_name[4:] + '.val' - directory = op.join(self._directory, sub_dir) - - try: - os.makedirs(directory) - except OSError as error: - if error.errno != errno.EEXIST: - raise - filename = op.join(sub_dir, name) full_path = op.join(self._directory, filename) return filename, full_path - def remove(self, filename): - """Remove a file given by `filename`. + def remove(self, file_path): + """Remove a file given by `file_path`. - This method is cross-thread and cross-process safe. If an "error no - entry" occurs, it is suppressed. + This method is cross-thread and cross-process safe. If an OSError + occurs, it is suppressed. - :param str filename: relative path to file + :param str file_path: relative path to file """ - full_path = op.join(self._directory, filename) + full_path = op.join(self._directory, file_path) + full_dir, _ = op.split(full_path) - try: + # Suppress OSError that may occur if two caches attempt to delete the + # same file or directory at the same time. + + with cl.suppress(OSError): os.remove(full_path) - except WindowsError: - pass - except OSError as error: - if error.errno != errno.ENOENT: - # ENOENT may occur if two caches attempt to delete the same - # file at the same time. - raise + + with cl.suppress(OSError): + os.removedirs(full_dir) class JSONDisk(Disk): - "Cache key and value using JSON serialization with zlib compression." + """Cache key and value using JSON serialization with zlib compression.""" def __init__(self, directory, compress_level=1, **kwargs): """Initialize JSON disk instance. @@ -379,31 +373,33 @@ def fetch(self, mode, filename, value, read): class Timeout(Exception): - "Database timeout expired." + """Database timeout expired.""" class UnknownFileWarning(UserWarning): - "Warning used by Cache.check for unknown files." + """Warning used by Cache.check for unknown files.""" class EmptyDirWarning(UserWarning): - "Warning used by Cache.check for empty directories." + """Warning used by Cache.check for empty directories.""" -def args_to_key(base, args, kwargs, typed): +def args_to_key(base, args, kwargs, typed, ignore): """Create cache key out of function arguments. :param tuple base: base of key :param tuple args: function arguments :param dict kwargs: function keyword arguments :param bool typed: include types in cache key + :param set ignore: positional or keyword args to ignore :return: cache key tuple """ - key = base + args + args = tuple(arg for index, arg in enumerate(args) if index not in ignore) + key = base + args + (None,) if kwargs: - key += (ENOVAL,) + kwargs = {key: val for key, val in kwargs.items() if key not in ignore} sorted_items = sorted(kwargs.items()) for item in sorted_items: @@ -419,7 +415,7 @@ def args_to_key(base, args, kwargs, typed): class Cache: - "Disk and file backed cache." + """Disk and file backed cache.""" def __init__(self, directory=None, timeout=60, disk=Disk, **settings): """Initialize cache instance. @@ -437,6 +433,7 @@ def __init__(self, directory=None, timeout=60, disk=Disk, **settings): if directory is None: directory = tempfile.mkdtemp(prefix='diskcache-') + directory = str(directory) directory = op.expanduser(directory) directory = op.expandvars(directory) @@ -534,7 +531,7 @@ def __init__(self, directory=None, timeout=60, disk=Disk, **settings): sql( 'CREATE INDEX IF NOT EXISTS Cache_expire_time ON' - ' Cache (expire_time)' + ' Cache (expire_time) WHERE expire_time IS NOT NULL' ) query = EVICTION_POLICY[self.eviction_policy]['init'] @@ -1200,14 +1197,11 @@ def get( try: value = self._disk.fetch(mode, filename, db_value, read) - except IOError as error: - if error.errno == errno.ENOENT: - # Key was deleted before we could retrieve result. - if self.statistics: - sql(cache_miss) - return default - else: - raise + except IOError: + # Key was deleted before we could retrieve result. + if self.statistics: + sql(cache_miss) + return default if self.statistics: sql(cache_hit) @@ -1323,12 +1317,9 @@ def pop( try: value = self._disk.fetch(mode, filename, db_value, False) - except IOError as error: - if error.errno == errno.ENOENT: - # Key was deleted before we could retrieve result. - return default - else: - raise + except IOError: + # Key was deleted before we could retrieve result. + return default finally: if filename is not None: self._disk.remove(filename) @@ -1387,6 +1378,7 @@ def delete(self, key, retry=False): :raises Timeout: if database timeout occurs """ + # pylint: disable=unnecessary-dunder-call try: return self.__delitem__(key, retry=retry) except KeyError: @@ -1594,11 +1586,9 @@ def pull( try: value = self._disk.fetch(mode, name, db_value, False) - except IOError as error: - if error.errno == errno.ENOENT: - # Key was deleted before we could retrieve result. - continue - raise + except IOError: + # Key was deleted before we could retrieve result. + continue finally: if name is not None: self._disk.remove(name) @@ -1710,14 +1700,9 @@ def peek( try: value = self._disk.fetch(mode, name, db_value, False) - except IOError as error: - if error.errno == errno.ENOENT: - # Key was deleted before we could retrieve result. - continue - raise - finally: - if name is not None: - self._disk.remove(name) + except IOError: + # Key was deleted before we could retrieve result. + continue break if expire_time and tag: @@ -1793,11 +1778,9 @@ def peekitem(self, last=True, expire_time=False, tag=False, retry=False): try: value = self._disk.fetch(mode, name, db_value, False) - except IOError as error: - if error.errno == errno.ENOENT: - # Key was deleted before we could retrieve result. - continue - raise + except IOError: + # Key was deleted before we could retrieve result. + continue break if expire_time and tag: @@ -1809,7 +1792,9 @@ def peekitem(self, last=True, expire_time=False, tag=False, retry=False): else: return key, value - def memoize(self, name=None, typed=False, expire=None, tag=None): + def memoize( + self, name=None, typed=False, expire=None, tag=None, ignore=() + ): """Memoizing cache decorator. Decorator to wrap callable with memoizing function using cache. @@ -1868,6 +1853,7 @@ def memoize(self, name=None, typed=False, expire=None, tag=None): :param float expire: seconds until arguments expire (default None, no expiry) :param str tag: text to associate with arguments (default None) + :param set ignore: positional or keyword args to ignore (default ()) :return: callable decorator """ @@ -1876,12 +1862,12 @@ def memoize(self, name=None, typed=False, expire=None, tag=None): raise TypeError('name cannot be callable') def decorator(func): - "Decorator created by memoize() for callable `func`." + """Decorator created by memoize() for callable `func`.""" base = (full_name(func),) if name is None else (name,) @ft.wraps(func) def wrapper(*args, **kwargs): - "Wrapper for callable to cache arguments and return values." + """Wrapper for callable to cache arguments and return values.""" key = wrapper.__cache_key__(*args, **kwargs) result = self.get(key, default=ENOVAL, retry=True) @@ -1893,8 +1879,8 @@ def wrapper(*args, **kwargs): return result def __cache_key__(*args, **kwargs): - "Make key for cache given function arguments." - return args_to_key(base, args, kwargs, typed) + """Make key for cache given function arguments.""" + return args_to_key(base, args, kwargs, typed, ignore) wrapper.__cache_key__ = __cache_key__ return wrapper @@ -1929,7 +1915,7 @@ def check(self, fix=False, retry=False): rows = sql('PRAGMA integrity_check').fetchall() - if len(rows) != 1 or rows[0][0] != u'ok': + if len(rows) != 1 or rows[0][0] != 'ok': for (message,) in rows: warnings.warn(message) @@ -2042,7 +2028,10 @@ def create_tag_index(self): """ sql = self._sql - sql('CREATE INDEX IF NOT EXISTS Cache_tag_rowid ON Cache(tag, rowid)') + sql( + 'CREATE INDEX IF NOT EXISTS Cache_tag_rowid ON Cache(tag, rowid) ' + 'WHERE tag IS NOT NULL' + ) self.reset('tag_index', 1) def drop_tag_index(self): @@ -2308,13 +2297,13 @@ def _iter(self, ascending=True): yield _disk_get(key, raw) def __iter__(self): - "Iterate keys in cache including expired items." + """Iterate keys in cache including expired items.""" iterator = self._iter() next(iterator) return iterator def __reversed__(self): - "Reverse iterate keys in cache including expired items." + """Reverse iterate keys in cache including expired items.""" iterator = self._iter(ascending=False) next(iterator) return iterator @@ -2372,7 +2361,7 @@ def __exit__(self, *exception): self.close() def __len__(self): - "Count of items in cache including expired items." + """Count of items in cache including expired items.""" return self.reset('count') def __getstate__(self): diff --git a/diskcache/djangocache.py b/diskcache/djangocache.py index 44f673d..5dc8ce2 100644 --- a/diskcache/djangocache.py +++ b/diskcache/djangocache.py @@ -1,4 +1,4 @@ -"Django-compatible disk and file backed cache." +"""Django-compatible disk and file backed cache.""" from functools import wraps @@ -6,7 +6,7 @@ try: from django.core.cache.backends.base import DEFAULT_TIMEOUT -except ImportError: +except ImportError: # pragma: no cover # For older versions of Django simply use 300 seconds. DEFAULT_TIMEOUT = 300 @@ -15,7 +15,7 @@ class DjangoCache(BaseCache): - "Django-compatible disk and file backed cache." + """Django-compatible disk and file backed cache.""" def __init__(self, directory, params): """Initialize DjangoCache instance. @@ -44,14 +44,15 @@ def cache(self, name): """ return self._cache.cache(name) - def deque(self, name): + def deque(self, name, maxlen=None): """Return Deque with given `name` in subdirectory. :param str name: subdirectory name for Deque + :param maxlen: max length (default None, no max) :return: Deque with given name """ - return self._cache.deque(name) + return self._cache.deque(name, maxlen=maxlen) def index(self, name): """Return Index with given `name` in subdirectory. @@ -220,7 +221,7 @@ def delete(self, key, version=None, retry=True): """ # pylint: disable=arguments-differ key = self.make_key(key, version=version) - self._cache.delete(key, retry) + return self._cache.delete(key, retry) def incr(self, key, delta=1, version=None, default=None, retry=True): """Increment value by delta for item with key. @@ -344,11 +345,11 @@ def cull(self): return self._cache.cull() def clear(self): - "Remove *all* values from the cache at once." + """Remove *all* values from the cache at once.""" return self._cache.clear() def close(self, **kwargs): - "Close the cache connection." + """Close the cache connection.""" # pylint: disable=unused-argument self._cache.close() @@ -373,6 +374,7 @@ def memoize( version=None, typed=False, tag=None, + ignore=(), ): """Memoizing cache decorator. @@ -407,6 +409,7 @@ def memoize( :param int version: key version number (default None, cache parameter) :param bool typed: cache different types separately (default False) :param str tag: text to associate with arguments (default None) + :param set ignore: positional or keyword args to ignore (default ()) :return: callable decorator """ @@ -415,12 +418,12 @@ def memoize( raise TypeError('name cannot be callable') def decorator(func): - "Decorator created by memoize() for callable `func`." + """Decorator created by memoize() for callable `func`.""" base = (full_name(func),) if name is None else (name,) @wraps(func) def wrapper(*args, **kwargs): - "Wrapper for callable to cache arguments and return values." + """Wrapper for callable to cache arguments and return values.""" key = wrapper.__cache_key__(*args, **kwargs) result = self.get(key, ENOVAL, version, retry=True) @@ -444,8 +447,8 @@ def wrapper(*args, **kwargs): return result def __cache_key__(*args, **kwargs): - "Make key for cache given function arguments." - return args_to_key(base, args, kwargs, typed) + """Make key for cache given function arguments.""" + return args_to_key(base, args, kwargs, typed, ignore) wrapper.__cache_key__ = __cache_key__ return wrapper diff --git a/diskcache/fanout.py b/diskcache/fanout.py index 99384a0..9822ee4 100644 --- a/diskcache/fanout.py +++ b/diskcache/fanout.py @@ -1,4 +1,4 @@ -"Fanout cache automatically shards keys and values." +"""Fanout cache automatically shards keys and values.""" import contextlib as cl import functools @@ -14,7 +14,7 @@ class FanoutCache: - "Cache that shards keys and values." + """Cache that shards keys and values.""" def __init__( self, directory=None, shards=8, timeout=0.010, disk=Disk, **settings @@ -30,6 +30,7 @@ def __init__( """ if directory is None: directory = tempfile.mkdtemp(prefix='diskcache-') + directory = str(directory) directory = op.expanduser(directory) directory = op.expandvars(directory) @@ -45,7 +46,7 @@ def __init__( timeout=timeout, disk=disk, size_limit=size_limit, - **settings + **settings, ) for num in range(shards) ) @@ -512,7 +513,7 @@ def volume(self): return sum(shard.volume() for shard in self._shards) def close(self): - "Close database connection." + """Close database connection.""" for shard in self._shards: shard.close() self._caches.clear() @@ -532,17 +533,17 @@ def __setstate__(self, state): self.__init__(*state) def __iter__(self): - "Iterate keys in cache including expired items." + """Iterate keys in cache including expired items.""" iterators = (iter(shard) for shard in self._shards) return it.chain.from_iterable(iterators) def __reversed__(self): - "Reverse iterate keys in cache including expired items." + """Reverse iterate keys in cache including expired items.""" iterators = (reversed(shard) for shard in reversed(self._shards)) return it.chain.from_iterable(iterators) def __len__(self): - "Count of items in cache including expired items." + """Count of items in cache including expired items.""" return sum(len(shard) for shard in self._shards) def reset(self, key, value=ENOVAL): @@ -573,9 +574,11 @@ def reset(self, key, value=ENOVAL): break return result - def cache(self, name): + def cache(self, name, timeout=60, disk=None, **settings): """Return Cache with given `name` in subdirectory. + If disk is none (default), uses the fanout cache disk. + >>> fanout_cache = FanoutCache() >>> cache = fanout_cache.cache('test') >>> cache.set('abc', 123) @@ -588,6 +591,9 @@ def cache(self, name): True :param str name: subdirectory name for Cache + :param float timeout: SQLite connection timeout + :param disk: Disk type or subclass for serialization + :param settings: any of DEFAULT_SETTINGS :return: Cache with given name """ @@ -598,11 +604,16 @@ def cache(self, name): except KeyError: parts = name.split('/') directory = op.join(self._directory, 'cache', *parts) - temp = Cache(directory=directory, disk=self._disk) + temp = Cache( + directory=directory, + timeout=timeout, + disk=self._disk if disk is None else Disk, + **settings, + ) _caches[name] = temp return temp - def deque(self, name): + def deque(self, name, maxlen=None): """Return Deque with given `name` in subdirectory. >>> cache = FanoutCache() @@ -616,6 +627,7 @@ def deque(self, name): 1 :param str name: subdirectory name for Deque + :param maxlen: max length (default None, no max) :return: Deque with given name """ @@ -626,8 +638,12 @@ def deque(self, name): except KeyError: parts = name.split('/') directory = op.join(self._directory, 'deque', *parts) - cache = Cache(directory=directory, disk=self._disk) - deque = Deque.fromcache(cache) + cache = Cache( + directory=directory, + disk=self._disk, + eviction_policy='none', + ) + deque = Deque.fromcache(cache, maxlen=maxlen) _deques[name] = deque return deque @@ -658,7 +674,11 @@ def index(self, name): except KeyError: parts = name.split('/') directory = op.join(self._directory, 'index', *parts) - cache = Cache(directory=directory, disk=self._disk) + cache = Cache( + directory=directory, + disk=self._disk, + eviction_policy='none', + ) index = Index.fromcache(cache) _indexes[name] = index return index diff --git a/diskcache/persistent.py b/diskcache/persistent.py index 44f9cc7..522bb74 100644 --- a/diskcache/persistent.py +++ b/diskcache/persistent.py @@ -1,5 +1,4 @@ """Persistent Data Types - """ import operator as op @@ -18,10 +17,10 @@ def _make_compare(seq_op, doc): - "Make compare method with Sequence semantics." + """Make compare method with Sequence semantics.""" def compare(self, that): - "Compare method for deque and sequence." + """Compare method for deque and sequence.""" if not isinstance(that, Sequence): return NotImplemented @@ -76,7 +75,7 @@ class Deque(Sequence): """ - def __init__(self, iterable=(), directory=None): + def __init__(self, iterable=(), directory=None, maxlen=None): """Initialize deque instance. If directory is None then temporary directory created. The directory @@ -87,10 +86,11 @@ def __init__(self, iterable=(), directory=None): """ self._cache = Cache(directory, eviction_policy='none') - self.extend(iterable) + self._maxlen = float('inf') if maxlen is None else maxlen + self._extend(iterable) @classmethod - def fromcache(cls, cache, iterable=()): + def fromcache(cls, cache, iterable=(), maxlen=None): """Initialize deque using `cache`. >>> cache = Cache() @@ -112,19 +112,45 @@ def fromcache(cls, cache, iterable=()): # pylint: disable=no-member,protected-access self = cls.__new__(cls) self._cache = cache - self.extend(iterable) + self._maxlen = float('inf') if maxlen is None else maxlen + self._extend(iterable) return self @property def cache(self): - "Cache used by deque." + """Cache used by deque.""" return self._cache @property def directory(self): - "Directory path where deque is stored." + """Directory path where deque is stored.""" return self._cache.directory + @property + def maxlen(self): + """Max length of the deque.""" + return self._maxlen + + @maxlen.setter + def maxlen(self, value): + """Set max length of the deque. + + Pops items from left while length greater than max. + + >>> deque = Deque() + >>> deque.extendleft('abcde') + >>> deque.maxlen = 3 + >>> list(deque) + ['c', 'd', 'e'] + + :param value: max length + + """ + self._maxlen = value + with self._cache.transact(retry=True): + while len(self._cache) > self._maxlen: + self._popleft() + def _index(self, index, func): len_self = len(self) @@ -245,7 +271,7 @@ def __iadd__(self, iterable): :return: deque with added items """ - self.extend(iterable) + self._extend(iterable) return self def __iter__(self): @@ -293,10 +319,11 @@ def __reversed__(self): pass def __getstate__(self): - return self.directory + return self.directory, self.maxlen def __setstate__(self, state): - self.__init__(directory=state) + directory, maxlen = state + self.__init__(directory=directory, maxlen=maxlen) def append(self, value): """Add `value` to back of deque. @@ -311,7 +338,12 @@ def append(self, value): :param value: value to add to back of deque """ - self._cache.push(value, retry=True) + with self._cache.transact(retry=True): + self._cache.push(value, retry=True) + if len(self._cache) > self._maxlen: + self._popleft() + + _append = append def appendleft(self, value): """Add `value` to front of deque. @@ -326,7 +358,12 @@ def appendleft(self, value): :param value: value to add to front of deque """ - self._cache.push(value, side='front', retry=True) + with self._cache.transact(retry=True): + self._cache.push(value, side='front', retry=True) + if len(self._cache) > self._maxlen: + self._pop() + + _appendleft = appendleft def clear(self): """Remove all elements from deque. @@ -341,6 +378,13 @@ def clear(self): """ self._cache.clear(retry=True) + _clear = clear + + def copy(self): + """Copy deque with same directory and max length.""" + TypeSelf = type(self) + return TypeSelf(directory=self.directory, maxlen=self.maxlen) + def count(self, value): """Return number of occurrences of `value` in deque. @@ -366,7 +410,9 @@ def extend(self, iterable): """ for value in iterable: - self.append(value) + self._append(value) + + _extend = extend def extendleft(self, iterable): """Extend front side of deque with value from `iterable`. @@ -380,7 +426,7 @@ def extendleft(self, iterable): """ for value in iterable: - self.appendleft(value) + self._appendleft(value) def peek(self): """Peek at value at back of deque. @@ -460,6 +506,8 @@ def pop(self): raise IndexError('pop from an empty deque') return value + _pop = pop + def popleft(self): """Remove and return value at front of deque. @@ -484,6 +532,8 @@ def popleft(self): raise IndexError('pop from an empty deque') return value + _popleft = popleft + def remove(self, value): """Remove first occurrence of `value` in deque. @@ -531,15 +581,17 @@ def reverse(self): ['c', 'b', 'a'] """ + # pylint: disable=protected-access # GrantJ 2019-03-22 Consider using an algorithm that swaps the values # at two keys. Like self._cache.swap(key1, key2, retry=True) The swap # method would exchange the values at two given keys. Then, using a - # forward iterator and a reverse iterator, the reversis method could + # forward iterator and a reverse iterator, the reverse method could # avoid making copies of the values. temp = Deque(iterable=reversed(self)) - self.clear() - self.extend(temp) + self._clear() + self._extend(temp) directory = temp.directory + temp._cache.close() del temp rmtree(directory) @@ -574,22 +626,22 @@ def rotate(self, steps=1): for _ in range(steps): try: - value = self.pop() + value = self._pop() except IndexError: return else: - self.appendleft(value) + self._appendleft(value) else: steps *= -1 steps %= len_self for _ in range(steps): try: - value = self.popleft() + value = self._popleft() except IndexError: return else: - self.append(value) + self._append(value) __hash__ = None # type: ignore @@ -668,7 +720,9 @@ def __init__(self, *args, **kwargs): args = args[1:] directory = None self._cache = Cache(directory, eviction_policy='none') - self.update(*args, **kwargs) + self._update(*args, **kwargs) + + _update = MutableMapping.update @classmethod def fromcache(cls, cache, *args, **kwargs): @@ -694,17 +748,17 @@ def fromcache(cls, cache, *args, **kwargs): # pylint: disable=no-member,protected-access self = cls.__new__(cls) self._cache = cache - self.update(*args, **kwargs) + self._update(*args, **kwargs) return self @property def cache(self): - "Cache used by index." + """Cache used by index.""" return self._cache @property def directory(self): - "Directory path where items are stored." + """Directory path where items are stored.""" return self._cache.directory def __getitem__(self, key): @@ -1097,7 +1151,7 @@ def __ne__(self, other): """ return not self == other - def memoize(self, name=None, typed=False): + def memoize(self, name=None, typed=False, ignore=()): """Memoizing cache decorator. Decorator to wrap callable with memoizing function using cache. @@ -1148,10 +1202,11 @@ def memoize(self, name=None, typed=False): :param str name: name given for callable (default None, automatic) :param bool typed: cache different types separately (default False) + :param set ignore: positional or keyword args to ignore (default ()) :return: callable decorator """ - return self._cache.memoize(name, typed) + return self._cache.memoize(name, typed, ignore=ignore) @contextmanager def transact(self): diff --git a/diskcache/recipes.py b/diskcache/recipes.py index 8f7bd32..babb68f 100644 --- a/diskcache/recipes.py +++ b/diskcache/recipes.py @@ -1,5 +1,4 @@ """Disk Cache Recipes - """ import functools @@ -18,6 +17,9 @@ class Averager: Sometimes known as "online statistics," the running average maintains the total and count. The average can then be calculated at any time. + Assumes the key will not be evicted. Set the eviction policy to 'none' on + the cache to guarantee the key is not evicted. + >>> import diskcache >>> cache = diskcache.FanoutCache() >>> ave = Averager(cache, 'latency') @@ -40,7 +42,7 @@ def __init__(self, cache, key, expire=None, tag=None): self._tag = tag def add(self, value): - "Add `value` to average." + """Add `value` to average.""" with self._cache.transact(retry=True): total, count = self._cache.get(self._key, default=(0.0, 0)) total += value @@ -53,12 +55,12 @@ def add(self, value): ) def get(self): - "Get current average or return `None` if count equals zero." + """Get current average or return `None` if count equals zero.""" total, count = self._cache.get(self._key, default=(0.0, 0), retry=True) return None if count == 0 else total / count def pop(self): - "Return current average and delete key." + """Return current average and delete key.""" total, count = self._cache.pop(self._key, default=(0.0, 0), retry=True) return None if count == 0 else total / count @@ -66,6 +68,9 @@ def pop(self): class Lock: """Recipe for cross-process and cross-thread lock. + Assumes the key will not be evicted. Set the eviction policy to 'none' on + the cache to guarantee the key is not evicted. + >>> import diskcache >>> cache = diskcache.Cache() >>> lock = Lock(cache, 'report-123') @@ -83,7 +88,7 @@ def __init__(self, cache, key, expire=None, tag=None): self._tag = tag def acquire(self): - "Acquire lock using spin-lock algorithm." + """Acquire lock using spin-lock algorithm.""" while True: added = self._cache.add( self._key, @@ -97,11 +102,11 @@ def acquire(self): time.sleep(0.001) def release(self): - "Release lock by deleting key." + """Release lock by deleting key.""" self._cache.delete(self._key, retry=True) def locked(self): - "Return true if the lock is acquired." + """Return true if the lock is acquired.""" return self._key in self._cache def __enter__(self): @@ -114,6 +119,9 @@ def __exit__(self, *exc_info): class RLock: """Recipe for cross-process and cross-thread re-entrant lock. + Assumes the key will not be evicted. Set the eviction policy to 'none' on + the cache to guarantee the key is not evicted. + >>> import diskcache >>> cache = diskcache.Cache() >>> rlock = RLock(cache, 'user-123') @@ -137,7 +145,7 @@ def __init__(self, cache, key, expire=None, tag=None): self._tag = tag def acquire(self): - "Acquire lock by incrementing count using spin-lock algorithm." + """Acquire lock by incrementing count using spin-lock algorithm.""" pid = os.getpid() tid = threading.get_ident() pid_tid = '{}-{}'.format(pid, tid) @@ -156,7 +164,7 @@ def acquire(self): time.sleep(0.001) def release(self): - "Release lock by decrementing count." + """Release lock by decrementing count.""" pid = os.getpid() tid = threading.get_ident() pid_tid = '{}-{}'.format(pid, tid) @@ -182,6 +190,9 @@ def __exit__(self, *exc_info): class BoundedSemaphore: """Recipe for cross-process and cross-thread bounded semaphore. + Assumes the key will not be evicted. Set the eviction policy to 'none' on + the cache to guarantee the key is not evicted. + >>> import diskcache >>> cache = diskcache.Cache() >>> semaphore = BoundedSemaphore(cache, 'max-cons', value=2) @@ -206,7 +217,7 @@ def __init__(self, cache, key, value=1, expire=None, tag=None): self._tag = tag def acquire(self): - "Acquire semaphore by decrementing value using spin-lock algorithm." + """Acquire semaphore by decrementing value using spin-lock algorithm.""" while True: with self._cache.transact(retry=True): value = self._cache.get(self._key, default=self._value) @@ -221,7 +232,7 @@ def acquire(self): time.sleep(0.001) def release(self): - "Release semaphore by incrementing value." + """Release semaphore by incrementing value.""" with self._cache.transact(retry=True): value = self._cache.get(self._key, default=self._value) assert self._value > value, 'cannot release un-acquired semaphore' @@ -252,6 +263,9 @@ def throttle( ): """Decorator to throttle calls to function. + Assumes keys will not be evicted. Set the eviction policy to 'none' on the + cache to guarantee the keys are not evicted. + >>> import diskcache, time >>> cache = diskcache.Cache() >>> count = 0 @@ -306,6 +320,9 @@ def barrier(cache, lock_factory, name=None, expire=None, tag=None): Supports different kinds of locks: Lock, RLock, BoundedSemaphore. + Assumes keys will not be evicted. Set the eviction policy to 'none' on the + cache to guarantee the keys are not evicted. + >>> import diskcache, time >>> cache = diskcache.Cache() >>> @barrier(cache, Lock) @@ -338,7 +355,9 @@ def wrapper(*args, **kwargs): return decorator -def memoize_stampede(cache, expire, name=None, typed=False, tag=None, beta=1): +def memoize_stampede( + cache, expire, name=None, typed=False, tag=None, beta=1, ignore=() +): """Memoizing cache decorator with cache stampede protection. Cache stampedes are a type of system overload that can occur when parallel @@ -391,16 +410,17 @@ def memoize_stampede(cache, expire, name=None, typed=False, tag=None, beta=1): :param str name: name given for callable (default None, automatic) :param bool typed: cache different types separately (default False) :param str tag: text to associate with arguments (default None) + :param set ignore: positional or keyword args to ignore (default ()) :return: callable decorator """ # Caution: Nearly identical code exists in Cache.memoize def decorator(func): - "Decorator created by memoize call for callable." + """Decorator created by memoize call for callable.""" base = (full_name(func),) if name is None else (name,) def timer(*args, **kwargs): - "Time execution of `func` and return result and time delta." + """Time execution of `func` and return result and time delta.""" start = time.time() result = func(*args, **kwargs) delta = time.time() - start @@ -408,7 +428,7 @@ def timer(*args, **kwargs): @functools.wraps(func) def wrapper(*args, **kwargs): - "Wrapper for callable to cache arguments and return values." + """Wrapper for callable to cache arguments and return values.""" key = wrapper.__cache_key__(*args, **kwargs) pair, expire_time = cache.get( key, @@ -459,8 +479,8 @@ def recompute(): return pair[0] def __cache_key__(*args, **kwargs): - "Make key for cache given function arguments." - return args_to_key(base, args, kwargs, typed) + """Make key for cache given function arguments.""" + return args_to_key(base, args, kwargs, typed, ignore) wrapper.__cache_key__ = __cache_key__ return wrapper diff --git a/docs/conf.py b/docs/conf.py index 402d3ad..92bf3ec 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -20,7 +20,7 @@ # -- Project information ----------------------------------------------------- project = 'DiskCache' -copyright = '2021, Grant Jenks' +copyright = '2023, Grant Jenks' author = 'Grant Jenks' # The full version, including alpha/beta/rc tags @@ -59,7 +59,6 @@ 'logo': 'gj-logo.png', 'logo_name': True, 'logo_text_align': 'center', - 'travis_button': True, 'analytics_id': 'UA-19364636-2', 'show_powered_by': False, 'show_related': True, diff --git a/docs/tutorial.rst b/docs/tutorial.rst index 8f8dc0f..2eb454d 100644 --- a/docs/tutorial.rst +++ b/docs/tutorial.rst @@ -535,7 +535,7 @@ they are guaranteed to be stored in files. The full path is available on the file handle in the `name` attribute. Remember to also include the `Content-Type` header if known. -.. _`Django documentation on caching`: https://docs.djangoproject.com/en/1.9/topics/cache/#the-low-level-cache-api +.. _`Django documentation on caching`: https://docs.djangoproject.com/en/3.2/topics/cache/#the-low-level-cache-api Deque ----- @@ -565,12 +565,15 @@ access and editing at both front and back sides. :class:`Deque 4 >>> other.popleft() 'foo' + >>> thing = Deque('abcde', maxlen=3) + >>> list(thing) + ['c', 'd', 'e'] :class:`Deque ` objects provide an efficient and safe means of cross-thread and cross-process communication. :class:`Deque ` objects are also useful in scenarios where contents should remain persistent or limitations prohibit holding all items in memory at the same time. The deque -uses a fixed amout of memory regardless of the size or number of items stored +uses a fixed amount of memory regardless of the size or number of items stored inside it. Index @@ -603,7 +606,7 @@ interface. :class:`Index ` objects inherit all the benefits of cross-thread and cross-process communication. :class:`Index ` objects are also useful in scenarios where contents should remain persistent or limitations prohibit holding all items in memory at the same time. The index -uses a fixed amout of memory regardless of the size or number of items stored +uses a fixed amount of memory regardless of the size or number of items stored inside it. .. _tutorial-transactions: @@ -818,30 +821,28 @@ example below uses compressed JSON, available for convenience as .. code-block:: python - import json, zlib - class JSONDisk(diskcache.Disk): def __init__(self, directory, compress_level=1, **kwargs): self.compress_level = compress_level - super(JSONDisk, self).__init__(directory, **kwargs) + super().__init__(directory, **kwargs) def put(self, key): json_bytes = json.dumps(key).encode('utf-8') data = zlib.compress(json_bytes, self.compress_level) - return super(JSONDisk, self).put(data) + return super().put(data) def get(self, key, raw): - data = super(JSONDisk, self).get(key, raw) + data = super().get(key, raw) return json.loads(zlib.decompress(data).decode('utf-8')) - def store(self, value, read): + def store(self, value, read, key=UNKNOWN): if not read: json_bytes = json.dumps(value).encode('utf-8') value = zlib.compress(json_bytes, self.compress_level) - return super(JSONDisk, self).store(value, read) + return super().store(value, read, key=key) def fetch(self, mode, filename, value, read): - data = super(JSONDisk, self).fetch(mode, filename, value, read) + data = super().fetch(mode, filename, value, read) if not read: data = json.loads(zlib.decompress(data).decode('utf-8')) return data @@ -863,8 +864,17 @@ protocol`_ is not used. Neither the `__hash__` nor `__eq__` methods are used for lookups. Instead lookups depend on the serialization method defined by :class:`Disk ` objects. For strings, bytes, integers, and floats, equality matches Python's definition. But large integers and all other -types will be converted to bytes using pickling and the bytes representation -will define equality. +types will be converted to bytes and the bytes representation will define +equality. + +The default :class:`diskcache.Disk` serialization uses pickling for both keys +and values. Unfortunately, pickling produces inconsistencies sometimes when +applied to container data types like tuples. Two equal tuples may serialize to +different bytes objects using pickle. The likelihood of differences is reduced +by using `pickletools.optimize` but still inconsistencies occur (`#54`_). The +inconsistent serialized pickle values is particularly problematic when applied +to the key in the cache. Consider using an alternative Disk type, like +:class:`JSONDisk `, for consistent serialization of keys. SQLite is used to synchronize database access between threads and processes and as such inherits all SQLite caveats. Most notably SQLite is `not recommended`_ @@ -898,6 +908,7 @@ does not account the size of directories themselves or other filesystem metadata. If directory count or size is a concern then consider implementing an alternative :class:`Disk `. +.. _`#54`: https://github.com/grantjenks/python-diskcache/issues/54 .. _`hash protocol`: https://docs.python.org/library/functions.html#hash .. _`not recommended`: https://www.sqlite.org/faq.html#q5 .. _`performs poorly`: https://www.pythonanywhere.com/forums/topic/1847/ diff --git a/requirements-dev.txt b/requirements-dev.txt new file mode 100644 index 0000000..6149361 --- /dev/null +++ b/requirements-dev.txt @@ -0,0 +1,23 @@ +-e . +blue +coverage +django==4.2.* +django_redis +doc8 +flake8 +ipython +jedi +pickleDB +pylibmc +pylint +pytest +pytest-cov +pytest-django +pytest-env +pytest-xdist +rstcheck +sphinx +sqlitedict +tox +twine +wheel diff --git a/requirements.txt b/requirements.txt index 2ab91c7..d6e1198 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,23 +1 @@ -e . -blue -coverage -django==2.2.* -django_redis -doc8 -flake8 -ipython -jedi==0.17.* # Remove after IPython bug fixed. -pickleDB -pylibmc -pylint -pytest -pytest-cov -pytest-django -pytest-env -pytest-xdist -rstcheck -sphinx -sqlitedict -tox -twine -wheel diff --git a/setup.py b/setup.py index 7f0561c..841dfb9 100644 --- a/setup.py +++ b/setup.py @@ -1,5 +1,3 @@ -from io import open - from setuptools import setup from setuptools.command.test import test as TestCommand @@ -30,6 +28,12 @@ def run_tests(self): author='Grant Jenks', author_email='contact@grantjenks.com', url='http://www.grantjenks.com/docs/diskcache/', + project_urls={ + 'Documentation': 'http://www.grantjenks.com/docs/diskcache/', + 'Funding': 'https://gum.co/diskcache', + 'Source': 'https://github.com/grantjenks/python-diskcache', + 'Tracker': 'https://github.com/grantjenks/python-diskcache/issues', + }, license='Apache 2.0', packages=['diskcache'], tests_require=['tox'], diff --git a/tests/benchmark_core.py b/tests/benchmark_core.py index 282ce2a..7d64595 100644 --- a/tests/benchmark_core.py +++ b/tests/benchmark_core.py @@ -3,7 +3,6 @@ $ export PYTHONPATH=/Users/grantj/repos/python-diskcache $ python tests/benchmark_core.py -p 1 > tests/timings_core_p1.txt $ python tests/benchmark_core.py -p 8 > tests/timings_core_p8.txt - """ import collections as co diff --git a/tests/benchmark_djangocache.py b/tests/benchmark_djangocache.py index 9dbbcd7..61a80bf 100644 --- a/tests/benchmark_djangocache.py +++ b/tests/benchmark_djangocache.py @@ -2,8 +2,6 @@ $ export PYTHONPATH=/Users/grantj/repos/python-diskcache $ python tests/benchmark_djangocache.py > tests/timings_djangocache.txt - - """ import collections as co diff --git a/tests/benchmark_glob.py b/tests/benchmark_glob.py index 9c23104..7da5fd3 100644 --- a/tests/benchmark_glob.py +++ b/tests/benchmark_glob.py @@ -1,4 +1,4 @@ -"Benchmark glob.glob1 as used by django.core.cache.backends.filebased." +"""Benchmark glob.glob1 as used by django.core.cache.backends.filebased.""" import os import os.path as op @@ -22,7 +22,7 @@ print(template % ('Count', 'Time')) print(' '.join(['=' * size] * len(cols))) -for count in [10 ** exp for exp in range(6)]: +for count in [10**exp for exp in range(6)]: for value in range(count): with open(op.join('tmp', '%s.tmp' % value), 'wb') as writer: pass diff --git a/tests/benchmark_incr.py b/tests/benchmark_incr.py index 9c8e2fa..4f758aa 100644 --- a/tests/benchmark_incr.py +++ b/tests/benchmark_incr.py @@ -1,5 +1,4 @@ """Benchmark cache.incr method. - """ import json @@ -16,7 +15,7 @@ def worker(num): - "Rapidly increment key and time operation." + """Rapidly increment key and time operation.""" time.sleep(0.1) # Let other workers start. cache = dc.Cache('tmp') @@ -33,7 +32,7 @@ def worker(num): def main(): - "Run workers and print percentile results." + """Run workers and print percentile results.""" shutil.rmtree('tmp', ignore_errors=True) processes = [ diff --git a/tests/benchmark_kv_store.py b/tests/benchmark_kv_store.py index e141a36..7015470 100644 --- a/tests/benchmark_kv_store.py +++ b/tests/benchmark_kv_store.py @@ -1,7 +1,6 @@ """Benchmarking Key-Value Stores $ python -m IPython tests/benchmark_kv_store.py - """ from IPython import get_ipython diff --git a/tests/issue_109.py b/tests/issue_109.py index c10a81f..a649c58 100644 --- a/tests/issue_109.py +++ b/tests/issue_109.py @@ -1,5 +1,4 @@ """Benchmark for Issue #109 - """ import time diff --git a/tests/issue_85.py b/tests/issue_85.py index 723406b..cb8789b 100644 --- a/tests/issue_85.py +++ b/tests/issue_85.py @@ -2,7 +2,6 @@ $ export PYTHONPATH=`pwd` $ python tests/issue_85.py - """ import collections diff --git a/tests/plot.py b/tests/plot.py index 2138659..fcac0bc 100644 --- a/tests/plot.py +++ b/tests/plot.py @@ -2,7 +2,6 @@ $ export PYTHONPATH=/Users/grantj/repos/python-diskcache $ python tests/plot.py --show tests/timings_core_p1.txt - """ import argparse @@ -14,7 +13,7 @@ def parse_timing(timing, limit): - "Parse timing." + """Parse timing.""" if timing.endswith('ms'): value = float(timing[:-2]) * 1e-3 elif timing.endswith('us'): @@ -26,12 +25,12 @@ def parse_timing(timing, limit): def parse_row(row, line): - "Parse row." + """Parse row.""" return [val.strip() for val in row.match(line).groups()] def parse_data(infile): - "Parse data from `infile`." + """Parse data from `infile`.""" blocks = re.compile(' '.join(['=' * 9] * 8)) dashes = re.compile('^-{79}$') title = re.compile('^Timings for (.*)$') @@ -83,7 +82,7 @@ def parse_data(infile): def make_plot(data, action, save=False, show=False, limit=0.005): - "Make plot." + """Make plot.""" fig, ax = plt.subplots(figsize=(8, 10)) colors = ['#ff7f00', '#377eb8', '#4daf4a', '#984ea3', '#e41a1c'] width = 0.15 diff --git a/tests/plot_early_recompute.py b/tests/plot_early_recompute.py index e58f580..1508c45 100644 --- a/tests/plot_early_recompute.py +++ b/tests/plot_early_recompute.py @@ -1,5 +1,4 @@ """Early Recomputation Measurements - """ import functools as ft @@ -61,14 +60,14 @@ def repeat(num): def frange(start, stop, step=1e-3): - "Generator for floating point values from `start` to `stop` by `step`." + """Generator for floating point values from `start` to `stop` by `step`.""" while start < stop: yield start start += step def plot(option, filename, cache_times, worker_times): - "Plot concurrent workers and latency." + """Plot concurrent workers and latency.""" import matplotlib.pyplot as plt fig, (workers, latency) = plt.subplots(2, sharex=True) diff --git a/tests/settings.py b/tests/settings.py index 1a2f569..04aee85 100644 --- a/tests/settings.py +++ b/tests/settings.py @@ -25,7 +25,7 @@ # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True -ALLOWED_HOSTS = [u'testserver'] +ALLOWED_HOSTS = ['testserver'] # Application definition diff --git a/tests/stress_test_core.py b/tests/stress_test_core.py index 6fd0991..2b2578b 100644 --- a/tests/stress_test_core.py +++ b/tests/stress_test_core.py @@ -1,4 +1,4 @@ -"Stress test diskcache.core.Cache." +"""Stress test diskcache.core.Cache.""" import collections as co import multiprocessing as mp @@ -33,16 +33,14 @@ def make_long(): def make_unicode(): word_size = random.randint(1, 26) - word = u''.join( - random.sample(u'abcdefghijklmnopqrstuvwxyz', word_size) - ) + word = ''.join(random.sample('abcdefghijklmnopqrstuvwxyz', word_size)) size = random.randint(1, int(200 / 13)) return word * size def make_bytes(): word_size = random.randint(1, 26) - word = u''.join( - random.sample(u'abcdefghijklmnopqrstuvwxyz', word_size) + word = ''.join( + random.sample('abcdefghijklmnopqrstuvwxyz', word_size) ).encode('utf-8') size = random.randint(1, int(200 / 13)) return word * size @@ -77,18 +75,16 @@ def make_long(): def make_unicode(): word_size = random.randint(1, 26) - word = u''.join( - random.sample(u'abcdefghijklmnopqrstuvwxyz', word_size) - ) - size = random.randint(1, int(2 ** 16 / 13)) + word = ''.join(random.sample('abcdefghijklmnopqrstuvwxyz', word_size)) + size = random.randint(1, int(2**16 / 13)) return word * size def make_bytes(): word_size = random.randint(1, 26) - word = u''.join( - random.sample(u'abcdefghijklmnopqrstuvwxyz', word_size) + word = ''.join( + random.sample('abcdefghijklmnopqrstuvwxyz', word_size) ).encode('utf-8') - size = random.randint(1, int(2 ** 16 / 13)) + size = random.randint(1, int(2**16 / 13)) return word * size def make_float(): @@ -233,7 +229,7 @@ def percentile(sequence, percent): def stress_test( create=True, delete=True, - eviction_policy=u'least-recently-stored', + eviction_policy='least-recently-stored', processes=1, threads=1, ): @@ -292,22 +288,22 @@ def stress_test( def stress_test_lru(): - "Stress test least-recently-used eviction policy." - stress_test(eviction_policy=u'least-recently-used') + """Stress test least-recently-used eviction policy.""" + stress_test(eviction_policy='least-recently-used') def stress_test_lfu(): - "Stress test least-frequently-used eviction policy." - stress_test(eviction_policy=u'least-frequently-used') + """Stress test least-frequently-used eviction policy.""" + stress_test(eviction_policy='least-frequently-used') def stress_test_none(): - "Stress test 'none' eviction policy." - stress_test(eviction_policy=u'none') + """Stress test 'none' eviction policy.""" + stress_test(eviction_policy='none') def stress_test_mp(): - "Stress test multiple threads and processes." + """Stress test multiple threads and processes.""" stress_test(processes=4, threads=4) @@ -396,7 +392,7 @@ def stress_test_mp(): '-v', '--eviction-policy', type=str, - default=u'least-recently-stored', + default='least-recently-stored', ) args = parser.parse_args() diff --git a/tests/stress_test_deque_mp.py b/tests/stress_test_deque_mp.py index 091ff0e..f3b8a48 100644 --- a/tests/stress_test_deque_mp.py +++ b/tests/stress_test_deque_mp.py @@ -2,7 +2,6 @@ import itertools as it import multiprocessing as mp -import os import random import time @@ -107,12 +106,6 @@ def stress(seed, deque): def test(status=False): - if os.environ.get('TRAVIS') == 'true': - return - - if os.environ.get('APPVEYOR') == 'True': - return - random.seed(SEED) deque = dc.Deque(range(SIZE)) processes = [] diff --git a/tests/stress_test_fanout.py b/tests/stress_test_fanout.py index 58708c9..e78dda5 100644 --- a/tests/stress_test_fanout.py +++ b/tests/stress_test_fanout.py @@ -1,4 +1,4 @@ -"Stress test diskcache.core.Cache." +"""Stress test diskcache.core.Cache.""" import multiprocessing as mp import os @@ -32,16 +32,14 @@ def make_long(): def make_unicode(): word_size = random.randint(1, 26) - word = u''.join( - random.sample(u'abcdefghijklmnopqrstuvwxyz', word_size) - ) + word = ''.join(random.sample('abcdefghijklmnopqrstuvwxyz', word_size)) size = random.randint(1, int(200 / 13)) return word * size def make_bytes(): word_size = random.randint(1, 26) - word = u''.join( - random.sample(u'abcdefghijklmnopqrstuvwxyz', word_size) + word = ''.join( + random.sample('abcdefghijklmnopqrstuvwxyz', word_size) ).encode('utf-8') size = random.randint(1, int(200 / 13)) return word * size @@ -76,18 +74,16 @@ def make_long(): def make_unicode(): word_size = random.randint(1, 26) - word = u''.join( - random.sample(u'abcdefghijklmnopqrstuvwxyz', word_size) - ) - size = random.randint(1, int(2 ** 16 / 13)) + word = ''.join(random.sample('abcdefghijklmnopqrstuvwxyz', word_size)) + size = random.randint(1, int(2**16 / 13)) return word * size def make_bytes(): word_size = random.randint(1, 26) - word = u''.join( - random.sample(u'abcdefghijklmnopqrstuvwxyz', word_size) + word = ''.join( + random.sample('abcdefghijklmnopqrstuvwxyz', word_size) ).encode('utf-8') - size = random.randint(1, int(2 ** 16 / 13)) + size = random.randint(1, int(2**16 / 13)) return word * size def make_float(): @@ -224,7 +220,7 @@ def percentile(sequence, percent): def stress_test( create=True, delete=True, - eviction_policy=u'least-recently-stored', + eviction_policy='least-recently-stored', processes=1, threads=1, ): @@ -283,22 +279,22 @@ def stress_test( def stress_test_lru(): - "Stress test least-recently-used eviction policy." - stress_test(eviction_policy=u'least-recently-used') + """Stress test least-recently-used eviction policy.""" + stress_test(eviction_policy='least-recently-used') def stress_test_lfu(): - "Stress test least-frequently-used eviction policy." - stress_test(eviction_policy=u'least-frequently-used') + """Stress test least-frequently-used eviction policy.""" + stress_test(eviction_policy='least-frequently-used') def stress_test_none(): - "Stress test 'none' eviction policy." - stress_test(eviction_policy=u'none') + """Stress test 'none' eviction policy.""" + stress_test(eviction_policy='none') def stress_test_mp(): - "Stress test multiple threads and processes." + """Stress test multiple threads and processes.""" stress_test(processes=4, threads=4) @@ -387,7 +383,7 @@ def stress_test_mp(): '-v', '--eviction-policy', type=str, - default=u'least-recently-stored', + default='least-recently-stored', ) args = parser.parse_args() diff --git a/tests/stress_test_index_mp.py b/tests/stress_test_index_mp.py index f8718f0..06ed102 100644 --- a/tests/stress_test_index_mp.py +++ b/tests/stress_test_index_mp.py @@ -2,7 +2,6 @@ import itertools as it import multiprocessing as mp -import os import random import time @@ -94,12 +93,6 @@ def stress(seed, index): def test(status=False): - if os.environ.get('TRAVIS') == 'true': - return - - if os.environ.get('APPVEYOR') == 'True': - return - random.seed(SEED) index = dc.Index(enumerate(range(KEYS))) processes = [] diff --git a/tests/test_core.py b/tests/test_core.py index bcc79e1..788afef 100644 --- a/tests/test_core.py +++ b/tests/test_core.py @@ -1,10 +1,11 @@ -"Test diskcache.core.Cache." +"""Test diskcache.core.Cache.""" import errno import hashlib import io import os import os.path as op +import pathlib import pickle import shutil import sqlite3 @@ -37,12 +38,19 @@ def test_init(cache): cache.close() +def test_init_path(cache): + path = pathlib.Path(cache.directory) + other = dc.Cache(path) + other.close() + assert cache.directory == other.directory + + def test_init_disk(): - with dc.Cache(disk_pickle_protocol=1, disk_min_file_size=2 ** 20) as cache: + with dc.Cache(disk_pickle_protocol=1, disk_min_file_size=2**20) as cache: key = (None, 0, 'abc') cache[key] = 0 cache.check() - assert cache.disk_min_file_size == 2 ** 20 + assert cache.disk_min_file_size == 2**20 assert cache.disk_pickle_protocol == 1 shutil.rmtree(cache.directory, ignore_errors=True) @@ -59,15 +67,15 @@ def test_disk_reset(): assert cache._disk.min_file_size == 0 assert cache._disk.pickle_protocol == 0 - cache.reset('disk_min_file_size', 2 ** 10) + cache.reset('disk_min_file_size', 2**10) cache.reset('disk_pickle_protocol', 2) cache[1] = value cache.check() - assert cache.disk_min_file_size == 2 ** 10 + assert cache.disk_min_file_size == 2**10 assert cache.disk_pickle_protocol == 2 - assert cache._disk.min_file_size == 2 ** 10 + assert cache._disk.min_file_size == 2**10 assert cache._disk.pickle_protocol == 2 shutil.rmtree(cache.directory, ignore_errors=True) @@ -89,6 +97,11 @@ def test_custom_disk(): for value in values: assert cache[value] == value + for key, value in zip(cache, values): + assert key == value + + test_memoize_iter(cache) + shutil.rmtree(cache.directory, ignore_errors=True) @@ -145,7 +158,7 @@ def test_pragma_error(cache): cursor.fetchall = fetchall fetchall.side_effect = [sqlite3.OperationalError] * 60000 - size = 2 ** 28 + size = 2**28 with mock.patch('time.sleep', lambda num: 0): with mock.patch.object(cache, '_local', local): @@ -172,15 +185,15 @@ def __getattr__(self, name): def test_getsetdel(cache): values = [ (None, False), - ((None,) * 2 ** 20, False), + ((None,) * 2**20, False), (1234, False), - (2 ** 512, False), + (2**512, False), (56.78, False), - (u'hello', False), - (u'hello' * 2 ** 20, False), + ('hello', False), + ('hello' * 2**20, False), (b'world', False), - (b'world' * 2 ** 20, False), - (io.BytesIO(b'world' * 2 ** 20), True), + (b'world' * 2**20, False), + (io.BytesIO(b'world' * 2**20), True), ] for key, (value, file_like) in enumerate(values): @@ -224,7 +237,7 @@ def test_get_keyerror4(cache): func = mock.Mock(side_effect=IOError(errno.ENOENT, '')) cache.reset('statistics', True) - cache[0] = b'abcd' * 2 ** 20 + cache[0] = b'abcd' * 2**20 with mock.patch('diskcache.core.open', func): with pytest.raises((IOError, KeyError, OSError)): @@ -232,7 +245,7 @@ def test_get_keyerror4(cache): def test_read(cache): - cache.set(0, b'abcd' * 2 ** 20) + cache.set(0, b'abcd' * 2**20) with cache.read(0) as reader: assert reader is not None @@ -244,7 +257,7 @@ def test_read_keyerror(cache): def test_set_twice(cache): - large_value = b'abcd' * 2 ** 20 + large_value = b'abcd' * 2**20 cache[0] = 0 cache[0] = 1 @@ -278,7 +291,7 @@ def test_set_timeout(cache): with pytest.raises(dc.Timeout): try: with mock.patch.object(cache, '_local', local): - cache.set('a', 'b' * 2 ** 20) + cache.set('a', 'b' * 2**20) finally: cache.check() @@ -294,11 +307,11 @@ def test_get(cache): assert cache.get(2, {}) == {} assert cache.get(0, expire_time=True, tag=True) == (None, None, None) - assert cache.set(0, 0, expire=None, tag=u'number') + assert cache.set(0, 0, expire=None, tag='number') assert cache.get(0, expire_time=True) == (0, None) - assert cache.get(0, tag=True) == (0, u'number') - assert cache.get(0, expire_time=True, tag=True) == (0, None, u'number') + assert cache.get(0, tag=True) == (0, 'number') + assert cache.get(0, expire_time=True, tag=True) == (0, None, 'number') def test_get_expired_fast_path(cache): @@ -333,26 +346,6 @@ def test_get_expired_slow_path(cache): assert cache.get(0) is None -def test_get_ioerror_slow_path(cache): - cache.reset('eviction_policy', 'least-recently-used') - cache.set(0, 0) - - disk = mock.Mock() - put = mock.Mock() - fetch = mock.Mock() - - disk.put = put - put.side_effect = [(0, True)] - disk.fetch = fetch - io_error = IOError() - io_error.errno = errno.EACCES - fetch.side_effect = io_error - - with mock.patch.object(cache, '_disk', disk): - with pytest.raises(IOError): - cache.get(0) - - def test_pop(cache): assert cache.incr('alpha') == 1 assert cache.pop('alpha') == 1 @@ -374,8 +367,8 @@ def test_pop(cache): assert cache.set('delta', 210) assert cache.pop('delta', expire_time=True) == (210, None) - assert cache.set('epsilon', '0' * 2 ** 20) - assert cache.pop('epsilon') == '0' * 2 ** 20 + assert cache.set('epsilon', '0' * 2**20) + assert cache.pop('epsilon') == '0' * 2**20 def test_pop_ioerror(cache): @@ -396,25 +389,6 @@ def test_pop_ioerror(cache): assert cache.pop(0) is None -def test_pop_ioerror_eacces(cache): - assert cache.set(0, 0) - - disk = mock.Mock() - put = mock.Mock() - fetch = mock.Mock() - - disk.put = put - put.side_effect = [(0, True)] - disk.fetch = fetch - io_error = IOError() - io_error.errno = errno.EACCES - fetch.side_effect = io_error - - with mock.patch.object(cache, '_disk', disk): - with pytest.raises(IOError): - cache.pop(0) - - def test_delete(cache): cache[0] = 0 assert cache.delete(0) @@ -460,11 +434,11 @@ def test_stats(cache): def test_path(cache): - cache[0] = u'abc' - large_value = b'abc' * 2 ** 20 + cache[0] = 'abc' + large_value = b'abc' * 2**20 cache[1] = large_value - assert cache.get(0, read=True) == u'abc' + assert cache.get(0, read=True) == 'abc' with cache.get(1, read=True) as reader: assert reader.name is not None @@ -499,7 +473,7 @@ def test_expire_rows(cache): def test_least_recently_stored(cache): - cache.reset('eviction_policy', u'least-recently-stored') + cache.reset('eviction_policy', 'least-recently-stored') cache.reset('size_limit', int(10.1e6)) cache.reset('cull_limit', 2) @@ -534,7 +508,7 @@ def test_least_recently_stored(cache): def test_least_recently_used(cache): - cache.reset('eviction_policy', u'least-recently-used') + cache.reset('eviction_policy', 'least-recently-used') cache.reset('size_limit', int(10.1e6)) cache.reset('cull_limit', 5) @@ -564,7 +538,7 @@ def test_least_recently_used(cache): def test_least_frequently_used(cache): - cache.reset('eviction_policy', u'least-frequently-used') + cache.reset('eviction_policy', 'least-frequently-used') cache.reset('size_limit', int(10.1e6)) cache.reset('cull_limit', 5) @@ -591,32 +565,9 @@ def test_least_frequently_used(cache): assert len(cache.check()) == 0 -def test_filename_error(cache): - func = mock.Mock(side_effect=OSError(errno.EACCES)) - - with mock.patch('os.makedirs', func): - with pytest.raises(OSError): - cache._disk.filename() - - -def test_remove_error(cache): - func = mock.Mock(side_effect=OSError(errno.EACCES)) - - try: - with mock.patch('os.remove', func): - cache._disk.remove('ab/cd/efg.val') - except OSError: - pass - else: - if os.name == 'nt': - pass # File delete errors ignored on Windows. - else: - raise Exception('test_remove_error failed') - - def test_check(cache): - blob = b'a' * 2 ** 20 - keys = (0, 1, 1234, 56.78, u'hello', b'world', None) + blob = b'a' * 2**20 + keys = (0, 1, 1234, 56.78, 'hello', b'world', None) for key in keys: cache[key] = blob @@ -719,12 +670,12 @@ def test_clear_timeout(cache): def test_tag(cache): - assert cache.set(0, None, tag=u'zero') + assert cache.set(0, None, tag='zero') assert cache.set(1, None, tag=1234) assert cache.set(2, None, tag=5.67) assert cache.set(3, None, tag=b'three') - assert cache.get(0, tag=True) == (None, u'zero') + assert cache.get(0, tag=True) == (None, 'zero') assert cache.get(1, tag=True) == (None, 1234) assert cache.get(2, tag=True) == (None, 5.67) assert cache.get(3, tag=True) == (None, b'three') @@ -732,11 +683,11 @@ def test_tag(cache): def test_with(cache): with dc.Cache(cache.directory) as tmp: - tmp[u'a'] = 0 - tmp[u'b'] = 1 + tmp['a'] = 0 + tmp['b'] = 1 - assert cache[u'a'] == 0 - assert cache[u'b'] == 1 + assert cache['a'] == 0 + assert cache['b'] == 1 def test_contains(cache): @@ -765,7 +716,7 @@ def test_add(cache): def test_add_large_value(cache): - value = b'abcd' * 2 ** 20 + value = b'abcd' * 2**20 assert cache.add(b'test-key', value) assert cache.get(b'test-key') == value assert not cache.add(b'test-key', value * 2) @@ -976,7 +927,7 @@ def test_push_peek_expire(cache): def test_push_pull_large_value(cache): - value = b'test' * (2 ** 20) + value = b'test' * (2**20) cache.push(value) assert cache.pull() == (500000000000000, value) assert len(cache) == 0 @@ -984,7 +935,7 @@ def test_push_pull_large_value(cache): def test_push_peek_large_value(cache): - value = b'test' * (2 ** 20) + value = b'test' * (2**20) cache.push(value) assert cache.peek() == (500000000000000, value) assert len(cache) == 1 @@ -1028,44 +979,6 @@ def test_peek_ioerror(cache): assert value == 0 -def test_pull_ioerror_eacces(cache): - assert cache.push(0) == 500000000000000 - - disk = mock.Mock() - put = mock.Mock() - fetch = mock.Mock() - - disk.put = put - put.side_effect = [(0, True)] - disk.fetch = fetch - io_error = IOError() - io_error.errno = errno.EACCES - fetch.side_effect = io_error - - with mock.patch.object(cache, '_disk', disk): - with pytest.raises(IOError): - cache.pull() - - -def test_peek_ioerror_eacces(cache): - assert cache.push(0) == 500000000000000 - - disk = mock.Mock() - put = mock.Mock() - fetch = mock.Mock() - - disk.put = put - put.side_effect = [(0, True)] - disk.fetch = fetch - io_error = IOError() - io_error.errno = errno.EACCES - fetch.side_effect = io_error - - with mock.patch.object(cache, '_disk', disk): - with pytest.raises(IOError): - cache.peek() - - def test_peekitem_extras(cache): with pytest.raises(KeyError): cache.peekitem() @@ -1117,27 +1030,6 @@ def test_peekitem_ioerror(cache): assert value == 2 -def test_peekitem_ioerror_eacces(cache): - assert cache.set('a', 0) - assert cache.set('b', 1) - assert cache.set('c', 2) - - disk = mock.Mock() - put = mock.Mock() - fetch = mock.Mock() - - disk.put = put - put.side_effect = [(0, True)] - disk.fetch = fetch - io_error = IOError() - io_error.errno = errno.EACCES - fetch.side_effect = io_error - - with mock.patch.object(cache, '_disk', disk): - with pytest.raises(IOError): - cache.peekitem() - - def test_iterkeys(cache): assert list(cache.iterkeys()) == [] @@ -1260,8 +1152,8 @@ def test_cull_timeout(cache): def test_key_roundtrip(cache): - key_part_0 = u'part0' - key_part_1 = u'part1' + key_part_0 = 'part0' + key_part_1 = 'part1' to_test = [ (key_part_0, key_part_1), [key_part_0, key_part_1], @@ -1459,3 +1351,57 @@ def fibrec(num): assert hits2 == (hits1 + count) assert misses2 == misses1 + + +def test_memoize_kwargs(cache): + @cache.memoize(typed=True) + def foo(*args, **kwargs): + return args, kwargs + + assert foo(1, 2, 3, a=4, b=5) == ((1, 2, 3), {'a': 4, 'b': 5}) + + +def test_cleanup_dirs(cache): + value = b'\0' * 2**20 + start_count = len(os.listdir(cache.directory)) + for i in range(10): + cache[i] = value + set_count = len(os.listdir(cache.directory)) + assert set_count > start_count + for i in range(10): + del cache[i] + del_count = len(os.listdir(cache.directory)) + assert start_count == del_count + + +def test_disk_write_os_error(cache): + func = mock.Mock(side_effect=[OSError] * 10) + with mock.patch('diskcache.core.open', func): + with pytest.raises(OSError): + cache[0] = '\0' * 2**20 + + +def test_memoize_ignore(cache): + @cache.memoize(ignore={1, 'arg1'}) + def test(*args, **kwargs): + return args, kwargs + + cache.stats(enable=True) + assert test('a', 'b', 'c', arg0='d', arg1='e', arg2='f') + assert test('a', 'w', 'c', arg0='d', arg1='x', arg2='f') + assert test('a', 'y', 'c', arg0='d', arg1='z', arg2='f') + assert cache.stats() == (2, 1) + + +def test_memoize_iter(cache): + @cache.memoize() + def test(*args, **kwargs): + return sum(args) + sum(kwargs.values()) + + cache.clear() + assert test(1, 2, 3) + assert test(a=1, b=2, c=3) + assert test(-1, 0, 1, a=1, b=2, c=3) + assert len(cache) == 3 + for key in cache: + assert cache[key] == 6 diff --git a/tests/test_deque.py b/tests/test_deque.py index 8113dfe..f997a86 100644 --- a/tests/test_deque.py +++ b/tests/test_deque.py @@ -1,4 +1,4 @@ -"Test diskcache.persistent.Deque." +"""Test diskcache.persistent.Deque.""" import pickle import shutil @@ -77,6 +77,20 @@ def test_getsetdel(deque): assert len(deque) == 0 +def test_append(deque): + deque.maxlen = 3 + for item in 'abcde': + deque.append(item) + assert deque == 'cde' + + +def test_appendleft(deque): + deque.maxlen = 3 + for item in 'abcde': + deque.appendleft(item) + assert deque == 'edc' + + def test_index_positive(deque): cache = mock.MagicMock() cache.__len__.return_value = 3 @@ -131,9 +145,12 @@ def test_state(deque): sequence = list('abcde') deque.extend(sequence) assert deque == sequence + deque.maxlen = 3 + assert list(deque) == sequence[-3:] state = pickle.dumps(deque) values = pickle.loads(state) - assert values == sequence + assert values == sequence[-3:] + assert values.maxlen == 3 def test_compare(deque): @@ -161,6 +178,14 @@ def test_repr(): assert repr(deque) == 'Deque(directory=%r)' % directory +def test_copy(deque): + sequence = list('abcde') + deque.extend(sequence) + temp = deque.copy() + assert deque == sequence + assert temp == sequence + + def test_count(deque): deque += 'abbcccddddeeeee' @@ -277,3 +302,13 @@ def test_rotate_indexerror_negative(deque): with mock.patch.object(deque, '_cache', cache): deque.rotate(-1) + + +def test_peek(deque): + value = b'x' * 100_000 + deque.append(value) + assert len(deque) == 1 + assert deque.peek() == value + assert len(deque) == 1 + assert deque.peek() == value + assert len(deque) == 1 diff --git a/tests/test_djangocache.py b/tests/test_djangocache.py index 2c216fe..734ba1b 100644 --- a/tests/test_djangocache.py +++ b/tests/test_djangocache.py @@ -1,64 +1,24 @@ # Most of this file was copied from: -# https://raw.githubusercontent.com/django/django/stable/2.2.x/tests/cache/tests.py +# https://raw.githubusercontent.com/django/django/stable/3.2.x/tests/cache/tests.py # Unit tests for cache framework # Uses whatever cache backend is set in the test settings file. -import copy -import io import os import pickle -import re import shutil import tempfile -import threading import time -import unittest -import warnings from unittest import mock from django.conf import settings -from django.core import management, signals -from django.core.cache import ( - DEFAULT_CACHE_ALIAS, - CacheKeyWarning, - InvalidCacheKey, - cache, - caches, -) -from django.core.cache.utils import make_template_fragment_key -from django.db import close_old_connections, connection, connections -from django.http import ( - HttpRequest, - HttpResponse, - HttpResponseNotModified, - StreamingHttpResponse, -) +from django.core.cache import CacheKeyWarning, cache, caches +from django.http import HttpResponse from django.middleware.cache import ( - CacheMiddleware, FetchFromCacheMiddleware, UpdateCacheMiddleware, ) -from django.middleware.csrf import CsrfViewMiddleware -from django.template import engines -from django.template.context_processors import csrf -from django.template.response import TemplateResponse -from django.test import ( - RequestFactory, - SimpleTestCase, - TestCase, - TransactionTestCase, - override_settings, -) +from django.test import RequestFactory, TestCase, override_settings from django.test.signals import setting_changed -from django.utils import timezone, translation -from django.utils.cache import ( - get_cache_key, - learn_cache_key, - patch_cache_control, - patch_vary_headers, -) -from django.utils.encoding import force_text -from django.views.decorators.cache import cache_page ################################################################################ # Setup Django for models import. @@ -88,6 +48,10 @@ def __getstate__(self): raise pickle.PickleError() +def empty_response(request): + return HttpResponse() + + KEY_ERRORS_WITH_MEMCACHED_MSG = ( 'Cache key contains characters that will cause errors if used with ' 'memcached: %r' @@ -100,7 +64,7 @@ class UnpicklableType(object): def custom_key_func(key, key_prefix, version): - "A customized cache key function" + """A customized cache key function""" return 'CUSTOM-' + '-'.join([key_prefix, str(version), key]) @@ -138,6 +102,14 @@ class BaseCacheTests: # A common set of tests to apply to all cache backends factory = RequestFactory() + # RemovedInDjango41Warning: python-memcached doesn't support .get() with + # default. + supports_get_with_default = True + + # Some clients raise custom exceptions when .incr() or .decr() are called + # with a non-integer value. + incr_decr_type_error = TypeError + def tearDown(self): cache.clear() @@ -146,11 +118,15 @@ def test_simple(self): cache.set('key', 'value') self.assertEqual(cache.get('key'), 'value') + def test_default_used_when_none_is_set(self): + """If None is cached, get() returns it instead of the default.""" + cache.set('key_default_none', None) + self.assertIsNone(cache.get('key_default_none', default='default')) + def test_add(self): # A key can be added to a cache - cache.add('addkey1', 'value') - result = cache.add('addkey1', 'newvalue') - self.assertFalse(result) + self.assertIs(cache.add('addkey1', 'value'), True) + self.assertIs(cache.add('addkey1', 'newvalue'), False) self.assertEqual(cache.get('addkey1'), 'value') def test_prefix(self): @@ -158,7 +134,7 @@ def test_prefix(self): cache.set('somekey', 'value') # should not be set in the prefixed cache - self.assertFalse(caches['prefix'].has_key('somekey')) + self.assertIs(caches['prefix'].has_key('somekey'), False) caches['prefix'].set('somekey', 'value2') @@ -180,28 +156,43 @@ def test_get_many(self): self.assertEqual( cache.get_many(iter(['a', 'b', 'e'])), {'a': 'a', 'b': 'b'} ) + cache.set_many({'x': None, 'y': 1}) + self.assertEqual(cache.get_many(['x', 'y']), {'x': None, 'y': 1}) def test_delete(self): # Cache keys can be deleted cache.set_many({'key1': 'spam', 'key2': 'eggs'}) self.assertEqual(cache.get('key1'), 'spam') - cache.delete('key1') + self.assertIs(cache.delete('key1'), True) self.assertIsNone(cache.get('key1')) self.assertEqual(cache.get('key2'), 'eggs') + def test_delete_nonexistent(self): + self.assertIs(cache.delete('nonexistent_key'), False) + def test_has_key(self): # The cache can be inspected for cache keys cache.set('hello1', 'goodbye1') - self.assertTrue(cache.has_key('hello1')) - self.assertFalse(cache.has_key('goodbye1')) + self.assertIs(cache.has_key('hello1'), True) + self.assertIs(cache.has_key('goodbye1'), False) cache.set('no_expiry', 'here', None) - self.assertTrue(cache.has_key('no_expiry')) + self.assertIs(cache.has_key('no_expiry'), True) + cache.set('null', None) + self.assertIs( + cache.has_key('null'), + True if self.supports_get_with_default else False, + ) def test_in(self): # The in operator can be used to inspect cache contents cache.set('hello2', 'goodbye2') self.assertIn('hello2', cache) self.assertNotIn('goodbye2', cache) + cache.set('null', None) + if self.supports_get_with_default: + self.assertIn('null', cache) + else: + self.assertNotIn('null', cache) def test_incr(self): # Cache values can be incremented @@ -213,6 +204,9 @@ def test_incr(self): self.assertEqual(cache.incr('answer', -10), 42) with self.assertRaises(ValueError): cache.incr('does_not_exist') + cache.set('null', None) + with self.assertRaises(self.incr_decr_type_error): + cache.incr('null') def test_decr(self): # Cache values can be decremented @@ -224,6 +218,9 @@ def test_decr(self): self.assertEqual(cache.decr('answer', -10), 42) with self.assertRaises(ValueError): cache.decr('does_not_exist') + cache.set('null', None) + with self.assertRaises(self.incr_decr_type_error): + cache.decr('null') def test_close(self): self.assertTrue(hasattr(cache, 'close')) @@ -295,24 +292,23 @@ def test_expiration(self): time.sleep(2) self.assertIsNone(cache.get('expire1')) - cache.add('expire2', 'newvalue') + self.assertIs(cache.add('expire2', 'newvalue'), True) self.assertEqual(cache.get('expire2'), 'newvalue') - self.assertFalse(cache.has_key('expire3')) + self.assertIs(cache.has_key('expire3'), False) def test_touch(self): # cache.touch() updates the timeout. cache.set('expire1', 'very quickly', timeout=1) self.assertIs(cache.touch('expire1', timeout=4), True) time.sleep(2) - self.assertTrue(cache.has_key('expire1')) + self.assertIs(cache.has_key('expire1'), True) time.sleep(3) - self.assertFalse(cache.has_key('expire1')) - + self.assertIs(cache.has_key('expire1'), False) # cache.touch() works without the timeout argument. cache.set('expire1', 'very quickly', timeout=1) self.assertIs(cache.touch('expire1'), True) time.sleep(2) - self.assertTrue(cache.has_key('expire1')) + self.assertIs(cache.has_key('expire1'), True) self.assertIs(cache.touch('nonexistent'), False) @@ -333,13 +329,13 @@ def test_unicode(self): # Test `add` for (key, value) in stuff.items(): with self.subTest(key=key): - cache.delete(key) - cache.add(key, value) + self.assertIs(cache.delete(key), True) + self.assertIs(cache.add(key, value), True) self.assertEqual(cache.get(key), value) # Test `set_many` for (key, value) in stuff.items(): - cache.delete(key) + self.assertIs(cache.delete(key), True) cache.set_many(stuff) for (key, value) in stuff.items(): with self.subTest(key=key): @@ -359,7 +355,7 @@ def test_binary_string(self): self.assertEqual(value, decompress(compressed_result).decode()) # Test add - cache.add('binary1-add', compressed_value) + self.assertIs(cache.add('binary1-add', compressed_value), True) compressed_result = cache.get('binary1-add') self.assertEqual(compressed_value, compressed_result) self.assertEqual(value, decompress(compressed_result).decode()) @@ -405,14 +401,14 @@ def test_clear(self): def test_long_timeout(self): """ - Followe memcached's convention where a timeout greater than 30 days is + Follow memcached's convention where a timeout greater than 30 days is treated as an absolute expiration timestamp instead of a relative offset (#12399). """ cache.set('key1', 'eggs', 60 * 60 * 24 * 30 + 1) # 30 days + 1 second self.assertEqual(cache.get('key1'), 'eggs') - cache.add('key2', 'ham', 60 * 60 * 24 * 30 + 1) + self.assertIs(cache.add('key2', 'ham', 60 * 60 * 24 * 30 + 1), True) self.assertEqual(cache.get('key2'), 'ham') cache.set_many( @@ -429,10 +425,9 @@ def test_forever_timeout(self): cache.set('key1', 'eggs', None) self.assertEqual(cache.get('key1'), 'eggs') - cache.add('key2', 'ham', None) + self.assertIs(cache.add('key2', 'ham', None), True) self.assertEqual(cache.get('key2'), 'ham') - added = cache.add('key1', 'new eggs', None) - self.assertIs(added, False) + self.assertIs(cache.add('key1', 'new eggs', None), False) self.assertEqual(cache.get('key1'), 'eggs') cache.set_many({'key3': 'sausage', 'key4': 'lobster bisque'}, None) @@ -440,7 +435,7 @@ def test_forever_timeout(self): self.assertEqual(cache.get('key4'), 'lobster bisque') cache.set('key5', 'belgian fries', timeout=1) - cache.touch('key5', timeout=None) + self.assertIs(cache.touch('key5', timeout=None), True) time.sleep(2) self.assertEqual(cache.get('key5'), 'belgian fries') @@ -451,7 +446,7 @@ def test_zero_timeout(self): cache.set('key1', 'eggs', 0) self.assertIsNone(cache.get('key1')) - cache.add('key2', 'ham', 0) + self.assertIs(cache.add('key2', 'ham', 0), True) self.assertIsNone(cache.get('key2')) cache.set_many({'key3': 'sausage', 'key4': 'lobster bisque'}, 0) @@ -459,7 +454,7 @@ def test_zero_timeout(self): self.assertIsNone(cache.get('key4')) cache.set('key5', 'belgian fries', timeout=5) - cache.touch('key5', timeout=0) + self.assertIs(cache.touch('key5', timeout=0), True) self.assertIsNone(cache.get('key5')) def test_float_timeout(self): @@ -467,7 +462,12 @@ def test_float_timeout(self): cache.set('key1', 'spam', 100.2) self.assertEqual(cache.get('key1'), 'spam') - def _perform_cull_test(self, cull_cache, initial_count, final_count): + def _perform_cull_test(self, cull_cache_name, initial_count, final_count): + try: + cull_cache = caches[cull_cache_name] + except InvalidCacheBackendError: + self.skipTest("Culling isn't implemented.") + # Create initial cache key entries. This will overflow the cache, # causing a cull. for i in range(1, initial_count): @@ -480,10 +480,24 @@ def _perform_cull_test(self, cull_cache, initial_count, final_count): self.assertEqual(count, final_count) def test_cull(self): - self._perform_cull_test(caches['cull'], 50, 29) + self._perform_cull_test('cull', 50, 29) def test_zero_cull(self): - self._perform_cull_test(caches['zero_cull'], 50, 19) + self._perform_cull_test('zero_cull', 50, 19) + + def test_cull_delete_when_store_empty(self): + try: + cull_cache = caches['cull'] + except InvalidCacheBackendError: + self.skipTest("Culling isn't implemented.") + old_max_entries = cull_cache._max_entries + # Force _cull to delete on first cached record. + cull_cache._max_entries = -1 + try: + cull_cache.set('force_cull_delete', 'value', 1000) + self.assertIs(cull_cache.has_key('force_cull_delete'), True) + finally: + cull_cache._max_entries = old_max_entries def _perform_invalid_key_test(self, key, expected_warning): """ @@ -500,10 +514,24 @@ def func(key, *args): old_func = cache.key_func cache.key_func = func + tests = [ + ('add', [key, 1]), + ('get', [key]), + ('set', [key, 1]), + ('incr', [key]), + ('decr', [key]), + ('touch', [key]), + ('delete', [key]), + ('get_many', [[key, 'b']]), + ('set_many', [{key: 1, 'b': 2}]), + ('delete_many', [{key: 1, 'b': 2}]), + ] try: - with self.assertWarns(CacheKeyWarning) as cm: - cache.set(key, 'value') - self.assertEqual(str(cm.warning), expected_warning) + for operation, args in tests: + with self.subTest(operation=operation): + with self.assertWarns(CacheKeyWarning) as cm: + getattr(cache, operation)(*args) + self.assertEqual(str(cm.warning), expected_warning) finally: cache.key_func = old_func @@ -567,41 +595,41 @@ def test_cache_versioning_get_set(self): def test_cache_versioning_add(self): # add, default version = 1, but manually override version = 2 - cache.add('answer1', 42, version=2) + self.assertIs(cache.add('answer1', 42, version=2), True) self.assertIsNone(cache.get('answer1', version=1)) self.assertEqual(cache.get('answer1', version=2), 42) - cache.add('answer1', 37, version=2) + self.assertIs(cache.add('answer1', 37, version=2), False) self.assertIsNone(cache.get('answer1', version=1)) self.assertEqual(cache.get('answer1', version=2), 42) - cache.add('answer1', 37, version=1) + self.assertIs(cache.add('answer1', 37, version=1), True) self.assertEqual(cache.get('answer1', version=1), 37) self.assertEqual(cache.get('answer1', version=2), 42) # v2 add, using default version = 2 - caches['v2'].add('answer2', 42) + self.assertIs(caches['v2'].add('answer2', 42), True) self.assertIsNone(cache.get('answer2', version=1)) self.assertEqual(cache.get('answer2', version=2), 42) - caches['v2'].add('answer2', 37) + self.assertIs(caches['v2'].add('answer2', 37), False) self.assertIsNone(cache.get('answer2', version=1)) self.assertEqual(cache.get('answer2', version=2), 42) - caches['v2'].add('answer2', 37, version=1) + self.assertIs(caches['v2'].add('answer2', 37, version=1), True) self.assertEqual(cache.get('answer2', version=1), 37) self.assertEqual(cache.get('answer2', version=2), 42) # v2 add, default version = 2, but manually override version = 1 - caches['v2'].add('answer3', 42, version=1) + self.assertIs(caches['v2'].add('answer3', 42, version=1), True) self.assertEqual(cache.get('answer3', version=1), 42) self.assertIsNone(cache.get('answer3', version=2)) - caches['v2'].add('answer3', 37, version=1) + self.assertIs(caches['v2'].add('answer3', 37, version=1), False) self.assertEqual(cache.get('answer3', version=1), 42) self.assertIsNone(cache.get('answer3', version=2)) - caches['v2'].add('answer3', 37) + self.assertIs(caches['v2'].add('answer3', 37), True) self.assertEqual(cache.get('answer3', version=1), 42) self.assertEqual(cache.get('answer3', version=2), 37) @@ -609,73 +637,73 @@ def test_cache_versioning_has_key(self): cache.set('answer1', 42) # has_key - self.assertTrue(cache.has_key('answer1')) - self.assertTrue(cache.has_key('answer1', version=1)) - self.assertFalse(cache.has_key('answer1', version=2)) + self.assertIs(cache.has_key('answer1'), True) + self.assertIs(cache.has_key('answer1', version=1), True) + self.assertIs(cache.has_key('answer1', version=2), False) - self.assertFalse(caches['v2'].has_key('answer1')) - self.assertTrue(caches['v2'].has_key('answer1', version=1)) - self.assertFalse(caches['v2'].has_key('answer1', version=2)) + self.assertIs(caches['v2'].has_key('answer1'), False) + self.assertIs(caches['v2'].has_key('answer1', version=1), True) + self.assertIs(caches['v2'].has_key('answer1', version=2), False) def test_cache_versioning_delete(self): cache.set('answer1', 37, version=1) cache.set('answer1', 42, version=2) - cache.delete('answer1') + self.assertIs(cache.delete('answer1'), True) self.assertIsNone(cache.get('answer1', version=1)) self.assertEqual(cache.get('answer1', version=2), 42) cache.set('answer2', 37, version=1) cache.set('answer2', 42, version=2) - cache.delete('answer2', version=2) + self.assertIs(cache.delete('answer2', version=2), True) self.assertEqual(cache.get('answer2', version=1), 37) self.assertIsNone(cache.get('answer2', version=2)) cache.set('answer3', 37, version=1) cache.set('answer3', 42, version=2) - caches['v2'].delete('answer3') + self.assertIs(caches['v2'].delete('answer3'), True) self.assertEqual(cache.get('answer3', version=1), 37) self.assertIsNone(cache.get('answer3', version=2)) cache.set('answer4', 37, version=1) cache.set('answer4', 42, version=2) - caches['v2'].delete('answer4', version=1) + self.assertIs(caches['v2'].delete('answer4', version=1), True) self.assertIsNone(cache.get('answer4', version=1)) self.assertEqual(cache.get('answer4', version=2), 42) def test_cache_versioning_incr_decr(self): cache.set('answer1', 37, version=1) cache.set('answer1', 42, version=2) - cache.incr('answer1') + self.assertEqual(cache.incr('answer1'), 38) self.assertEqual(cache.get('answer1', version=1), 38) self.assertEqual(cache.get('answer1', version=2), 42) - cache.decr('answer1') + self.assertEqual(cache.decr('answer1'), 37) self.assertEqual(cache.get('answer1', version=1), 37) self.assertEqual(cache.get('answer1', version=2), 42) cache.set('answer2', 37, version=1) cache.set('answer2', 42, version=2) - cache.incr('answer2', version=2) + self.assertEqual(cache.incr('answer2', version=2), 43) self.assertEqual(cache.get('answer2', version=1), 37) self.assertEqual(cache.get('answer2', version=2), 43) - cache.decr('answer2', version=2) + self.assertEqual(cache.decr('answer2', version=2), 42) self.assertEqual(cache.get('answer2', version=1), 37) self.assertEqual(cache.get('answer2', version=2), 42) cache.set('answer3', 37, version=1) cache.set('answer3', 42, version=2) - caches['v2'].incr('answer3') + self.assertEqual(caches['v2'].incr('answer3'), 43) self.assertEqual(cache.get('answer3', version=1), 37) self.assertEqual(cache.get('answer3', version=2), 43) - caches['v2'].decr('answer3') + self.assertEqual(caches['v2'].decr('answer3'), 42) self.assertEqual(cache.get('answer3', version=1), 37) self.assertEqual(cache.get('answer3', version=2), 42) cache.set('answer4', 37, version=1) cache.set('answer4', 42, version=2) - caches['v2'].incr('answer4', version=1) + self.assertEqual(caches['v2'].incr('answer4', version=1), 38) self.assertEqual(cache.get('answer4', version=1), 38) self.assertEqual(cache.get('answer4', version=2), 42) - caches['v2'].decr('answer4', version=1) + self.assertEqual(caches['v2'].decr('answer4', version=1), 37) self.assertEqual(cache.get('answer4', version=1), 37) self.assertEqual(cache.get('answer4', version=2), 42) @@ -790,6 +818,13 @@ def test_incr_version(self): with self.assertRaises(ValueError): cache.incr_version('does_not_exist') + cache.set('null', None) + if self.supports_get_with_default: + self.assertEqual(cache.incr_version('null'), 2) + else: + with self.assertRaises(self.incr_decr_type_error): + cache.incr_version('null') + def test_decr_version(self): cache.set('answer', 42, version=2) self.assertIsNone(cache.get('answer')) @@ -814,6 +849,13 @@ def test_decr_version(self): with self.assertRaises(ValueError): cache.decr_version('does_not_exist', version=2) + cache.set('null', None, version=2) + if self.supports_get_with_default: + self.assertEqual(cache.decr_version('null', version=2), 1) + else: + with self.assertRaises(self.incr_decr_type_error): + cache.decr_version('null', version=2) + def test_custom_key_func(self): # Two caches with different key functions aren't visible to each other cache.set('answer1', 42) @@ -827,30 +869,31 @@ def test_custom_key_func(self): self.assertEqual(caches['custom_key2'].get('answer2'), 42) def test_cache_write_unpicklable_object(self): - update_middleware = UpdateCacheMiddleware() - update_middleware.cache = cache - - fetch_middleware = FetchFromCacheMiddleware() - fetch_middleware.cache = cache + fetch_middleware = FetchFromCacheMiddleware(empty_response) request = self.factory.get('/cache/test') request._cache_update_cache = True - get_cache_data = FetchFromCacheMiddleware().process_request(request) + get_cache_data = FetchFromCacheMiddleware( + empty_response + ).process_request(request) self.assertIsNone(get_cache_data) - response = HttpResponse() content = 'Testing cookie serialization.' - response.content = content - response.set_cookie('foo', 'bar') - update_middleware.process_response(request, response) + def get_response(req): + response = HttpResponse(content) + response.set_cookie('foo', 'bar') + return response + + update_middleware = UpdateCacheMiddleware(get_response) + response = update_middleware(request) get_cache_data = fetch_middleware.process_request(request) self.assertIsNotNone(get_cache_data) self.assertEqual(get_cache_data.content, content.encode()) self.assertEqual(get_cache_data.cookies, response.cookies) - update_middleware.process_response(request, get_cache_data) + UpdateCacheMiddleware(lambda req: get_cache_data)(request) get_cache_data = fetch_middleware.process_request(request) self.assertIsNotNone(get_cache_data) self.assertEqual(get_cache_data.content, content.encode()) @@ -869,7 +912,12 @@ def test_get_or_set(self): self.assertIsNone(cache.get('projector')) self.assertEqual(cache.get_or_set('projector', 42), 42) self.assertEqual(cache.get('projector'), 42) - self.assertEqual(cache.get_or_set('null', None), None) + self.assertIsNone(cache.get_or_set('null', None)) + if self.supports_get_with_default: + # Previous get_or_set() stores None in the cache. + self.assertIsNone(cache.get('null', 'default')) + else: + self.assertEqual(cache.get('null', 'default'), 'default') def test_get_or_set_callable(self): def my_callable(): @@ -878,14 +926,16 @@ def my_callable(): self.assertEqual(cache.get_or_set('mykey', my_callable), 'value') self.assertEqual(cache.get_or_set('mykey', my_callable()), 'value') - def test_get_or_set_callable_returning_none(self): - self.assertIsNone(cache.get_or_set('mykey', lambda: None)) - # Previous get_or_set() doesn't store None in the cache. - self.assertEqual(cache.get('mykey', 'default'), 'default') + self.assertIsNone(cache.get_or_set('null', lambda: None)) + if self.supports_get_with_default: + # Previous get_or_set() stores None in the cache. + self.assertIsNone(cache.get('null', 'default')) + else: + self.assertEqual(cache.get('null', 'default'), 'default') def test_get_or_set_version(self): msg = "get_or_set() missing 1 required positional argument: 'default'" - cache.get_or_set('brian', 1979, version=2) + self.assertEqual(cache.get_or_set('brian', 1979, version=2), 1979) with self.assertRaisesMessage(TypeError, msg): cache.get_or_set('brian') with self.assertRaisesMessage(TypeError, msg): @@ -921,7 +971,7 @@ def __getstate__(self): ) ) class DiskCacheTests(BaseCacheTests, TestCase): - "Specific test cases for diskcache.DjangoCache." + """Specific test cases for diskcache.DjangoCache.""" def setUp(self): super().setUp() @@ -949,6 +999,11 @@ def test_ignores_non_cache_files(self): ) os.remove(fname) + def test_creates_cache_dir_if_nonexistent(self): + os.rmdir(self.dirname) + cache.set('foo', 'bar') + self.assertTrue(os.path.exists(self.dirname)) + def test_clear_does_not_remove_cache_dir(self): cache.clear() self.assertTrue( @@ -976,7 +1031,7 @@ def test_directory(self): self.assertTrue('tmp' in cache.directory) def test_read(self): - value = b'abcd' * 2 ** 20 + value = b'abcd' * 2**20 result = cache.set(b'test-key', value) self.assertTrue(result) @@ -1026,19 +1081,6 @@ def test_pop(self): ) self.assertEqual(cache.pop(4, retry=False), 4) - def test_pickle(self): - letters = 'abcde' - cache.clear() - - for num, val in enumerate(letters): - cache.set(val, num) - - data = pickle.dumps(cache) - other = pickle.loads(data) - - for key in letters: - self.assertEqual(other.get(key), cache.get(key)) - def test_cache(self): subcache = cache.cache('test') directory = os.path.join(cache.directory, 'cache', 'test') diff --git a/tests/test_fanout.py b/tests/test_fanout.py index 8918af3..af221b6 100644 --- a/tests/test_fanout.py +++ b/tests/test_fanout.py @@ -1,10 +1,11 @@ -"Test diskcache.fanout.FanoutCache." +"""Test diskcache.fanout.FanoutCache.""" import collections as co import hashlib import io import os import os.path as op +import pathlib import pickle import shutil import subprocess as sp @@ -34,7 +35,7 @@ def test_init(cache): del default_settings['size_limit'] for key, value in default_settings.items(): assert getattr(cache, key) == value - assert cache.size_limit == 2 ** 27 + assert cache.size_limit == 2**27 cache.check() @@ -44,6 +45,13 @@ def test_init(cache): cache.check() +def test_init_path(cache): + path = pathlib.Path(cache.directory) + other = dc.FanoutCache(path) + other.close() + assert cache.directory == other.directory + + def test_set_get_delete(cache): for value in range(100): cache.set(value, value) @@ -229,15 +237,15 @@ def test_incr_concurrent(): def test_getsetdel(cache): values = [ (None, False), - ((None,) * 2 ** 10, False), + ((None,) * 2**10, False), (1234, False), - (2 ** 512, False), + (2**512, False), (56.78, False), - (u'hello', False), - (u'hello' * 2 ** 10, False), + ('hello', False), + ('hello' * 2**10, False), (b'world', False), - (b'world' * 2 ** 10, False), - (io.BytesIO(b'world' * 2 ** 10), True), + (b'world' * 2**10, False), + (io.BytesIO(b'world' * 2**10), True), ] for key, (value, file_like) in enumerate(values): @@ -341,7 +349,7 @@ def test_tag_index(cache): def test_read(cache): - cache.set(0, b'abcd' * 2 ** 20) + cache.set(0, b'abcd' * 2**20) with cache.read(0) as reader: assert reader is not None diff --git a/tests/test_index.py b/tests/test_index.py index 27639f7..742daf3 100644 --- a/tests/test_index.py +++ b/tests/test_index.py @@ -1,4 +1,4 @@ -"Test diskcache.persistent.Index." +"""Test diskcache.persistent.Index.""" import pickle import shutil diff --git a/tests/test_recipes.py b/tests/test_recipes.py index 8d13f2b..ae74459 100644 --- a/tests/test_recipes.py +++ b/tests/test_recipes.py @@ -1,4 +1,4 @@ -"Test diskcache.recipes." +"""Test diskcache.recipes.""" import shutil import threading @@ -28,6 +28,26 @@ def test_averager(cache): assert nums.pop() == 9.5 +def test_lock(cache): + state = {'num': 0} + lock = dc.Lock(cache, 'demo') + + def worker(): + state['num'] += 1 + with lock: + assert lock.locked() + state['num'] += 1 + time.sleep(0.1) + + with lock: + thread = threading.Thread(target=worker) + thread.start() + time.sleep(0.1) + assert state['num'] == 1 + thread.join() + assert state['num'] == 2 + + def test_rlock(cache): state = {'num': 0} rlock = dc.RLock(cache, 'demo') @@ -35,8 +55,9 @@ def test_rlock(cache): def worker(): state['num'] += 1 with rlock: - state['num'] += 1 - time.sleep(0.1) + with rlock: + state['num'] += 1 + time.sleep(0.1) with rlock: thread = threading.Thread(target=worker) diff --git a/tests/utils.py b/tests/utils.py index 5b41ce9..38e5d33 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -32,7 +32,7 @@ def secs(value): def run(*args): - "Run command, print output, and return output." + """Run command, print output, and return output.""" print('utils$', *args) result = sp.check_output(args) print(result) @@ -40,7 +40,7 @@ def run(*args): def mount_ramdisk(size, path): - "Mount RAM disk at `path` with `size` in bytes." + """Mount RAM disk at `path` with `size` in bytes.""" sectors = size / 512 os.makedirs(path) @@ -53,7 +53,7 @@ def mount_ramdisk(size, path): def unmount_ramdisk(dev_path, path): - "Unmount RAM disk with `dev_path` and `path`." + """Unmount RAM disk with `dev_path` and `path`.""" run('umount', path) run('diskutil', 'eject', dev_path) run('rm', '-r', path) diff --git a/tox.ini b/tox.ini index 04b7382..e7217a7 100644 --- a/tox.ini +++ b/tox.ini @@ -1,11 +1,11 @@ [tox] -envlist=bluecheck,doc8,docs,isortcheck,flake8,mypy,pylint,rstcheck,py36,py37,py38,py39 +envlist=bluecheck,doc8,docs,isortcheck,flake8,mypy,pylint,rstcheck,py38,py39,py310,py311 skip_missing_interpreters=True [testenv] commands=pytest deps= - django==2.2.* + django==4.2.* pytest pytest-cov pytest-django @@ -23,15 +23,15 @@ commands=blue --check {toxinidir}/setup.py {toxinidir}/diskcache {toxinidir}/tes deps=blue [testenv:doc8] -deps=doc8 commands=doc8 docs --ignore-path docs/_build +deps=doc8 [testenv:docs] allowlist_externals=make changedir=docs commands=make html deps= - django==2.2.* + django==4.2.* sphinx [testenv:flake8] @@ -53,7 +53,7 @@ deps=mypy [testenv:pylint] commands=pylint {toxinidir}/diskcache deps= - django==2.2.* + django==4.2.* pylint [testenv:rstcheck] @@ -64,8 +64,9 @@ deps=rstcheck allowlist_externals=rsync changedir=docs commands= - rsync -azP --stats --delete _build/html/ \ - grantjenks.com:/srv/www/www.grantjenks.com/public/docs/diskcache/ + rsync --rsync-path 'sudo -u herokuish rsync' -azP --stats --delete \ + _build/html/ \ + grantjenks:/srv/www/grantjenks.com/public/docs/diskcache/ [isort] multi_line_output = 3 @@ -79,7 +80,7 @@ line_length = 79 addopts= -n auto --cov-branch - --cov-fail-under=96 + --cov-fail-under=98 --cov-report=term-missing --cov=diskcache --doctest-glob="*.rst"