-import os, md5, urllib, time
+import os, hashlib, urllib.request, time
pj = os.path.join
class cache(object):
self.dir = dir
def mangle(self, url):
- n = md5.new()
- n.update(url)
+ n = hashlib.md5()
+ n.update(url.encode("ascii"))
return n.hexdigest()
def miss(self, url):
- s = urllib.urlopen(url)
- try:
+ with urllib.request.urlopen(url) as s:
+ if s.headers.get("content-encoding") == "gzip":
+ import gzip, io
+ return gzip.GzipFile(fileobj=io.BytesIO(s.read()), mode="r").read()
return s.read()
- finally:
- s.close()
- def fetch(self, url, expire = 3600):
+ def fetch(self, url, expire=3600):
path = pj(self.dir, self.mangle(url))
if os.path.exists(path):
if time.time() - os.stat(path).st_mtime < expire:
- with open(path) as f:
+ with open(path, "rb") as f:
return f.read()
data = self.miss(url)
if not os.path.isdir(self.dir):
os.makedirs(self.dir)
- with open(path, "w") as f:
+ with open(path, "wb") as f:
f.write(data)
return data
raise Exception("Could not find home directory for HTTP caching")
default = cache(pj(home, ".manga", "htcache"))
-def fetch(url, expire = 3600):
+def fetch(url, expire=3600):
return default.fetch(url, expire)