Skip to content
1 change: 0 additions & 1 deletion .bumpversion.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -7,4 +7,3 @@ current_version = 0.12.6
files = setup.py cachecontrol/__init__.py docs/conf.py
commit = True
tag = True

17 changes: 9 additions & 8 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -2,16 +2,17 @@
#
# SPDX-License-Identifier: Apache-2.0

.DS_Store
*.egg-info/*
*.pyc
*.pyo
*.egg-info/*
dist
*~
.DS_Store
.Python
.tox
bin
build/
dist
docs/_build
include
lib
lib64
include
.Python
docs/_build
build/
.tox
21 changes: 21 additions & 0 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
# SPDX-FileCopyrightText: 2015 Eric Larson
#
# SPDX-License-Identifier: Apache-2.0

repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v2.3.0
hooks:
- id: check-yaml
- id: end-of-file-fixer
- id: trailing-whitespace
- repo: https://github.com/timothycrosley/isort
rev: 4.3.21
hooks:
- id: isort
additional_dependencies:
- toml
- repo: https://github.com/python/black
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

rev: 19.10b0
hooks:
- id: black
2 changes: 1 addition & 1 deletion cachecontrol/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,6 @@
__email__ = "eric@ionrock.org"
__version__ = "0.12.6"

from .wrapper import CacheControl
from .adapter import CacheControlAdapter
from .controller import CacheController
from .wrapper import CacheControl
3 changes: 1 addition & 2 deletions cachecontrol/_cmd.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,15 +3,14 @@
# SPDX-License-Identifier: Apache-2.0

import logging
from argparse import ArgumentParser

import requests

from cachecontrol.adapter import CacheControlAdapter
from cachecontrol.cache import DictCache
from cachecontrol.controller import logger

from argparse import ArgumentParser


def setup_logging():
logger.setLevel(logging.DEBUG)
Expand Down
4 changes: 2 additions & 2 deletions cachecontrol/adapter.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,14 +2,14 @@
#
# SPDX-License-Identifier: Apache-2.0

import types
import functools
import types
import zlib

from requests.adapters import HTTPAdapter

from .controller import CacheController, PERMANENT_REDIRECT_STATUSES
from .cache import DictCache
from .controller import PERMANENT_REDIRECT_STATUSES, CacheController
from .filewrapper import CallbackFileWrapper


Expand Down
16 changes: 11 additions & 5 deletions cachecontrol/cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,26 +6,32 @@
The cache object API for implementing caches. The default is a thread
safe in-memory dictionary.
"""

from abc import ABCMeta, abstractmethod
from threading import Lock

from six import add_metaclass

class BaseCache(object):

@add_metaclass(ABCMeta)
class BaseCache(object):
@abstractmethod
def get(self, key):
raise NotImplementedError()
pass

@abstractmethod
def set(self, key, value):
raise NotImplementedError()
pass

@abstractmethod
def delete(self, key):
raise NotImplementedError()
pass

def close(self):
pass


class DictCache(BaseCache):

def __init__(self, init_dict=None):
self.lock = Lock()
self.data = init_dict or {}
Expand Down
12 changes: 9 additions & 3 deletions cachecontrol/caches/file_cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
# SPDX-License-Identifier: Apache-2.0

import hashlib
import logging
import os
from textwrap import dedent

Expand All @@ -14,6 +15,9 @@
except NameError:
# py2.X
FileNotFoundError = (IOError, OSError)
FileExistsError = (IOError, OSError)

logger = logging.getLogger(__name__)


def _secure_open_write(filename, fmode):
Expand Down Expand Up @@ -58,7 +62,6 @@ def _secure_open_write(filename, fmode):


class FileCache(BaseCache):

def __init__(
self,
directory,
Expand Down Expand Up @@ -111,6 +114,7 @@ def _fn(self, name):

def get(self, key):
name = self._fn(key)
logger.debug("Looking up '%s' in '%s'", key, name)
try:
with open(name, "rb") as fh:
return fh.read()
Expand All @@ -120,11 +124,13 @@ def get(self, key):

def set(self, key, value):
name = self._fn(key)
logger.debug("Caching '%s' in '%s'", key, name)

# Make sure the directory exists
parentdir = os.path.dirname(name)
try:
os.makedirs(os.path.dirname(name), self.dirmode)
except (IOError, OSError):
os.makedirs(parentdir, self.dirmode)
except FileExistsError:
pass

with self.lock_class(name) as lock:
Expand Down
2 changes: 1 addition & 1 deletion cachecontrol/caches/redis_cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,11 +5,11 @@
from __future__ import division

from datetime import datetime

from cachecontrol.cache import BaseCache


class RedisCache(BaseCache):

def __init__(self, conn):
self.conn = conn

Expand Down
17 changes: 0 additions & 17 deletions cachecontrol/compat.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,17 +2,6 @@
#
# SPDX-License-Identifier: Apache-2.0

try:
from urllib.parse import urljoin
except ImportError:
from urlparse import urljoin


try:
import cPickle as pickle
except ImportError:
import pickle

# Handle the case where the requests module has been patched to not have
# urllib3 bundled as part of its source.
try:
Expand All @@ -24,9 +13,3 @@
from requests.packages.urllib3.util import is_fp_closed
except ImportError:
from urllib3.util import is_fp_closed

# Replicate some six behaviour
try:
text_type = unicode
except NameError:
text_type = str
18 changes: 10 additions & 8 deletions cachecontrol/controller.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,9 @@
"""
The httplib2 algorithms ported for use with requests.
"""
import calendar
import logging
import re
import calendar
import time
from email.utils import parsedate_tz

Expand All @@ -16,7 +16,6 @@
from .cache import DictCache
from .serialize import Serializer


logger = logging.getLogger(__name__)

URI = re.compile(r"^(([^:/?#]+):)?(//([^/?#]*))?([^?#]*)(\?([^#]*))?(#(.*))?")
Expand Down Expand Up @@ -164,7 +163,7 @@ def cached_request(self, request):
# with cache busting headers as usual (ie no-cache).
if int(resp.status) in PERMANENT_REDIRECT_STATUSES:
msg = (
'Returning cached permanent redirect response '
"Returning cached permanent redirect response "
"(ignoring date and etag information)"
)
logger.debug(msg)
Expand Down Expand Up @@ -312,20 +311,21 @@ def cache_response(self, request, response, body=None, status_codes=None):
# If we've been given an etag, then keep the response
if self.cache_etags and "etag" in response_headers:
logger.debug("Caching due to etag")
self.cache.set(
cache_url, self.serializer.dumps(request, response, body)
)
self.cache.set(cache_url, self.serializer.dumps(request, response, body))

# Add to the cache any permanent redirects. We do this before looking
# that the Date headers.
elif int(response.status) in PERMANENT_REDIRECT_STATUSES:
logger.debug("Caching permanent redirect")
self.cache.set(cache_url, self.serializer.dumps(request, response, b''))
self.cache.set(cache_url, self.serializer.dumps(request, response, b""))

# Add to the cache if the response headers demand it. If there
# is no date header then we can't do anything about expiring
# the cache.
elif "date" in response_headers:
elif "date" not in response_headers:
logger.debug("No date header, expiration cannot be set.")
return
else:
# cache when there is a max-age > 0
if "max-age" in cc and cc["max-age"] > 0:
logger.debug("Caching b/c date exists and max-age > 0")
Expand All @@ -341,6 +341,8 @@ def cache_response(self, request, response, body=None, status_codes=None):
self.cache.set(
cache_url, self.serializer.dumps(request, response, body)
)
else:
logger.debug("No combination of headers to cache.")

def update_cached_response(self, request, response):
"""On a 304 we will get a new set of headers that we want to
Expand Down
18 changes: 13 additions & 5 deletions cachecontrol/heuristics.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,10 +4,8 @@

import calendar
import time

from email.utils import formatdate, parsedate, parsedate_tz

from datetime import datetime, timedelta
from email.utils import formatdate, parsedate, parsedate_tz

TIME_FMT = "%a, %d %b %Y %H:%M:%S GMT"

Expand All @@ -22,7 +20,6 @@ def datetime_to_header(dt):


class BaseHeuristic(object):

def warning(self, response):
"""
Return a valid 1xx warning header value describing the cache
Expand Down Expand Up @@ -101,8 +98,19 @@ class LastModified(BaseHeuristic):
http://lxr.mozilla.org/mozilla-release/source/netwerk/protocol/http/nsHttpResponseHead.cpp#397
Unlike mozilla we limit this to 24-hr.
"""

cacheable_by_default_statuses = {
200, 203, 204, 206, 300, 301, 404, 405, 410, 414, 501
200,
203,
204,
206,
300,
301,
404,
405,
410,
414,
501,
}

def update_headers(self, resp):
Expand Down
6 changes: 4 additions & 2 deletions cachecontrol/serialize.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,10 @@
import msgpack
from requests.structures import CaseInsensitiveDict

from .compat import HTTPResponse, pickle, text_type
from six import text_type
from six.moves import cPickle as pickle

from .compat import HTTPResponse


def _b64_decode_bytes(b):
Expand All @@ -25,7 +28,6 @@ def _b64_decode_str(s):


class Serializer(object):

def dumps(self, request, response, body):
response_headers = CaseInsensitiveDict(response.headers)

Expand Down
18 changes: 10 additions & 8 deletions dev_requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -4,15 +4,17 @@

-e .

tox
pytest-cov
pytest
mock
black
bumpversion
cherrypy
sphinx
redis
isort
lockfile
bumpversion
mock
pre-commit
pytest
pytest-cov
redis
sphinx
tox
twine
black
wheel
3 changes: 2 additions & 1 deletion docs/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,8 @@
# All configuration values have a default; values that are commented out
# serve to show the default.

import sys, os
import os
import sys

# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
Expand Down
Loading