| 1 | import os, hashlib, urllib.request, time |
| 2 | from . import profile |
| 3 | pj = os.path.join |
| 4 | |
| 5 | class cache(object): |
| 6 | def __init__(self, dir): |
| 7 | self.dir = dir |
| 8 | |
| 9 | def mangle(self, url): |
| 10 | n = hashlib.md5() |
| 11 | n.update(url.encode("ascii")) |
| 12 | return n.hexdigest() |
| 13 | |
| 14 | def miss(self, url): |
| 15 | req = urllib.request.Request(url, headers={"User-Agent": "automanga/1"}) |
| 16 | with urllib.request.urlopen(req) as s: |
| 17 | if s.headers.get("content-encoding") == "gzip": |
| 18 | import gzip, io |
| 19 | return gzip.GzipFile(fileobj=io.BytesIO(s.read()), mode="r").read() |
| 20 | return s.read() |
| 21 | |
| 22 | def fetch(self, url, expire=3600): |
| 23 | path = pj(self.dir, self.mangle(url)) |
| 24 | if os.path.exists(path): |
| 25 | if time.time() - os.stat(path).st_mtime < expire: |
| 26 | with open(path, "rb") as f: |
| 27 | return f.read() |
| 28 | data = self.miss(url) |
| 29 | if not os.path.isdir(self.dir): |
| 30 | os.makedirs(self.dir) |
| 31 | with open(path, "wb") as f: |
| 32 | f.write(data) |
| 33 | return data |
| 34 | |
| 35 | default = cache(pj(profile.confdir, "htcache")) |
| 36 | |
| 37 | def fetch(url, expire=3600): |
| 38 | return default.fetch(url, expire) |