-import os, md5, urllib, time
+import os, hashlib, urllib.request, time
+from . import profile
pj = os.path.join
class cache(object):
self.dir = dir
def mangle(self, url):
- n = md5.new()
- n.update(url)
+ n = hashlib.md5()
+ n.update(url.encode("ascii"))
return n.hexdigest()
def miss(self, url):
- s = urllib.urlopen(url)
- try:
+ req = urllib.request.Request(url, headers={"User-Agent": "automanga/1"})
+ with urllib.request.urlopen(req) as s:
if s.headers.get("content-encoding") == "gzip":
- import gzip, StringIO
- return gzip.GzipFile(fileobj=StringIO.StringIO(s.read()), mode="r").read()
+ import gzip, io
+ return gzip.GzipFile(fileobj=io.BytesIO(s.read()), mode="r").read()
return s.read()
- finally:
- s.close()
- def fetch(self, url, expire = 3600):
+ def fetch(self, url, expire=3600):
path = pj(self.dir, self.mangle(url))
if os.path.exists(path):
if time.time() - os.stat(path).st_mtime < expire:
- with open(path) as f:
+ with open(path, "rb") as f:
return f.read()
data = self.miss(url)
if not os.path.isdir(self.dir):
os.makedirs(self.dir)
- with open(path, "w") as f:
+ with open(path, "wb") as f:
f.write(data)
return data
-home = os.getenv("HOME")
-if home is None or not os.path.isdir(home):
- raise Exception("Could not find home directory for HTTP caching")
-default = cache(pj(home, ".manga", "htcache"))
+default = cache(pj(profile.confdir, "htcache"))
-def fetch(url, expire = 3600):
+def fetch(url, expire=3600):
return default.fetch(url, expire)