Dolda2000 GitWeb
/
automanga.git
/ commitdiff
commit
grep
author
committer
pickaxe
?
search:
re
summary
|
shortlog
|
log
|
commit
| commitdiff |
tree
raw
|
patch
|
inline
| side by side (parent:
90b3abc
)
Use "standard" user-agent for Batoto.
author
Fredrik Tolf
<fredrik@dolda2000.com>
Wed, 31 Aug 2016 20:20:57 +0000
(22:20 +0200)
committer
Fredrik Tolf
<fredrik@dolda2000.com>
Wed, 31 Aug 2016 20:20:57 +0000
(22:20 +0200)
manga/batoto.py
patch
|
blob
|
blame
|
history
diff --git
a/manga/batoto.py
b/manga/batoto.py
index
4343a05
..
8c25789
100644
(file)
--- a/
manga/batoto.py
+++ b/
manga/batoto.py
@@
-288,6
+288,7
@@
class session(object):
values["rememberMe"] = "1"
values["anonymous"] = "1"
req = urllib.request.Request(form["action"], urllib.parse.urlencode(values).encode("ascii"))
values["rememberMe"] = "1"
values["anonymous"] = "1"
req = urllib.request.Request(form["action"], urllib.parse.urlencode(values).encode("ascii"))
+ req.add_header("User-Agent", self.useragent)
with self.web.open(req) as hs:
page = soupify(hs.read())
for resp in page.findAll("p", attrs={"class": "message"}):
with self.web.open(req) as hs:
page = soupify(hs.read())
for resp in page.findAll("p", attrs={"class": "message"}):
@@
-300,8
+301,10
@@
class session(object):
def open(self, url):
return self.web.open(url)
def open(self, url):
return self.web.open(url)
+ useragent = "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.22 (KHTML, like Gecko) Chrome/25.0.1364.160 Safari/537.22"
def fetch(self, url, headers=None):
req = urllib.request.Request(url)
def fetch(self, url, headers=None):
req = urllib.request.Request(url)
+ req.add_header("User-Agent", self.useragent)
if headers is not None:
for k, v in headers.items():
req.add_header(k, v)
if headers is not None:
for k, v in headers.items():
req.add_header(k, v)
@@
-338,7
+341,9
@@
class library(lib.library):
while True:
_pars = dict(pars)
_pars["p"] = str(p)
while True:
_pars = dict(pars)
_pars["p"] = str(p)
- resp = urllib.request.urlopen(self.base + "search?" + urllib.parse.urlencode(_pars))
+ req = urllib.request.Request(self.base + "search?" + urllib.parse.urlencode(_pars))
+ req.add_header("User-Agent", session.useragent)
+ resp = urllib.request.urlopen(req)
try:
page = soupify(resp.read())
finally:
try:
page = soupify(resp.read())
finally: