Skip to content

Windows fixes #27

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 6 commits into
base: master
Choose a base branch
from
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
18 changes: 12 additions & 6 deletions cappy/cappy.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,8 @@
from requests.adapters import HTTPAdapter
from requests.packages.urllib3.util.retry import Retry

from hashlib import md5
from re import compile

def log(*args):
message = "".join(args)
Expand Down Expand Up @@ -56,18 +58,17 @@ def split_path(path):
last_fragment = split_path[-1]
if '.' not in last_fragment:
filename = ''
dirname = path
dirname = os.path.join(*split_path)
else:
filename = last_fragment
dirname = '/'.join(split_path[:-1])
dirname = os.path.join(*split_path[:-1])
else:
filename = ''
dirname = path
return (dirname, filename)


def get_hashed_filepath(stub, method, parsed_url, params):
hash_template = '{method}:{stub}{param_str}'
param_str = ''
if not stub:
stub = 'index.html'
Expand All @@ -77,9 +78,11 @@ def get_hashed_filepath(stub, method, parsed_url, params):
param_str = parsed_url.query
if param_str:
param_str = '?'+param_str
return hash_template.format(method=method, stub=stub, param_str=param_str)
return md5(method + stub + param_str).hexdigest()


FORBIDDEN = compile('[<>:"|?*]')

class CacheHandler(SocketServer.ThreadingMixIn, BaseHTTPServer.BaseHTTPRequestHandler):
# Based on http://sharebear.co.uk/blog/2009/09/17/very-simple-python-caching-proxy/
def get_cache(self, parsed_url, url, params={}):
Expand All @@ -89,7 +92,10 @@ def get_cache(self, parsed_url, url, params={}):
data = None
filepath = get_hashed_filepath(stub=filepath_stub, method=method, parsed_url=parsed_url, params=params)

cache_file = os.path.join(get_cache_dir(CACHE_DIR), dirpath, filepath)
# replace characters forbidden by file system with `_`
clean_dirpath = FORBIDDEN.sub('_', dirpath)

cache_file = os.path.join(get_cache_dir(CACHE_DIR), clean_dirpath, filepath)
hit = False
if os.path.exists(cache_file):
if CACHE_TIMEOUT == 0:
Expand All @@ -113,7 +119,7 @@ def get_cache(self, parsed_url, url, params={}):
log("Cache miss")
data = self.make_request(url=url, params=params, method=method)
# make dirs before you write to file
dirname, _filename = split_path(cache_file)
dirname = os.path.dirname(cache_file)
make_dirs(dirname)
file_obj = fopen(cache_file, 'wb+')
file_obj.writelines(data)
Expand Down