Update 4.0.2

This commit is contained in:
yoshiko2
2020-11-16 10:24:40 +08:00
parent 024c703124
commit edfa9c2435
5 changed files with 36 additions and 28 deletions

View File

@@ -40,7 +40,7 @@ def get_proxy(proxy: str, proxytype: str = None) -> dict:
# 网页请求核心
def get_html(url, cookies: dict = None, ua: str = None, return_type: str = None):
proxy, timeout, retry_count, proxytype = config.Config().proxy()
switch, proxy, timeout, retry_count, proxytype = config.Config().proxy()
proxies = get_proxy(proxy, proxytype)
if ua is None:
@@ -50,7 +50,7 @@ def get_html(url, cookies: dict = None, ua: str = None, return_type: str = None)
for i in range(retry_count):
try:
if not proxy == '':
if switch == 1:
result = requests.get(str(url), headers=headers, timeout=timeout, proxies=proxies, cookies=cookies)
else:
result = requests.get(str(url), headers=headers, timeout=timeout, cookies=cookies)
@@ -62,10 +62,6 @@ def get_html(url, cookies: dict = None, ua: str = None, return_type: str = None)
else:
return result.text
except requests.exceptions.ProxyError:
print("[-]Connect retry {}/{}".format(i + 1, retry_count))
except requests.exceptions.ConnectionError:
print("[-]Connect retry {}/{}".format(i + 1, retry_count))
except Exception as e:
print("[-]Connect retry {}/{}".format(i + 1, retry_count))
print("[-]" + str(e))
@@ -73,23 +69,26 @@ def get_html(url, cookies: dict = None, ua: str = None, return_type: str = None)
def post_html(url: str, query: dict) -> requests.Response:
proxy, timeout, retry_count, proxytype = config.Config().proxy()
switch, proxy, timeout, retry_count, proxytype = config.Config().proxy()
proxies = get_proxy(proxy, proxytype)
headers = {
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/60.0.3100.0 Safari/537.36"}
for i in range(retry_count):
try:
result = requests.post(url, data=query, proxies=proxies)
if switch == 1:
result = requests.post(url, data=query, proxies=proxies,headers=headers, timeout=timeout)
else:
result = requests.post(url, data=query, headers=headers, timeout=timeout)
return result
except requests.exceptions.ProxyError:
print("[-]Connect retry {}/{}".format(i+1, retry_count))
print("[-]Connect Failed! Please check your Proxy or Network!")
input("Press ENTER to exit!")
exit()
def get_javlib_cookie() -> [dict, str]:
import cloudscraper
proxy, timeout, retry_count, proxytype = config.Config().proxy()
switch, proxy, timeout, retry_count, proxytype = config.Config().proxy()
proxies = get_proxy(proxy, proxytype)
raw_cookie = {}
@@ -98,10 +97,15 @@ def get_javlib_cookie() -> [dict, str]:
# Get __cfduid/cf_clearance and user-agent
for i in range(retry_count):
try:
raw_cookie, user_agent = cloudscraper.get_cookie_string(
"http://www.m45e.com/",
proxies=proxies
)
if switch == 1:
raw_cookie, user_agent = cloudscraper.get_cookie_string(
"http://www.m45e.com/",
proxies=proxies
)
else:
raw_cookie, user_agent = cloudscraper.get_cookie_string(
"http://www.m45e.com/"
)
except requests.exceptions.ProxyError:
print("[-] ProxyError, retry {}/{}".format(i+1, retry_count))
except cloudscraper.exceptions.CloudflareIUAMError:
@@ -453,7 +457,7 @@ def translateTag_to_sc(tag):
return tag
def translate(src:str,target_language:str="zh_cn"):
url = "https://translate.google.cn/translate_a/single?client=gtx&dt=t&dj=1&ie=UTF-8&sl=auto&tl={target_language}&q={src}"
url = "https://translate.google.cn/translate_a/single?client=gtx&dt=t&dj=1&ie=UTF-8&sl=auto&tl=" + target_language + "&q=" + src
result = get_html(url=url,return_type="object")
translate_list = [i["trans"] for i in result.json()["sentences"]]

View File

@@ -141,7 +141,10 @@ def main_uncensored(number):
def main(number):
try:
try:
htmlcode = get_html('https://www.javbus.com/' + number)
try:
htmlcode = get_html('https://www.fanbus.us/' + number)
except:
htmlcode = get_html('https://www.javbus.com/' + number)
try:
dww_htmlcode = fanza.main_htmlcode(getCID(htmlcode))
except:
@@ -165,8 +168,7 @@ def main(number):
'source': 'javbus.py',
'series': getSerise(htmlcode),
}
js = json.dumps(dic, ensure_ascii=False, sort_keys=True, indent=4,
separators=(',', ':'), ) # .encode('UTF-8')
js = json.dumps(dic, ensure_ascii=False, sort_keys=True, indent=4,separators=(',', ':'), ) # .encode('UTF-8')
return js
except:
return main_uncensored(number)

View File

@@ -8,8 +8,9 @@ auto_exit=0
transalte_to_sc=1
[proxy]
;proxytype: http or socks5 or socks5h
type=socks5
;proxytype: http or socks5 or socks5h switch: 0 1
switch=0
type=http
proxy=127.0.0.1:1080
timeout=5
retry=3
@@ -30,8 +31,8 @@ literals=\()/
folders=failed,JAV_output
[debug_mode]
switch=0
switch=1
[transalte]
switch=0 # 是否开启翻译功能
values=title,outline #需要翻译的变量
switch=1
values=title,outline

View File

@@ -50,11 +50,12 @@ class Config:
def proxy(self) -> [str, int, int, str]:
try:
sec = "proxy"
switch = self.conf.get(sec, "switch")
proxy = self.conf.get(sec, "proxy")
timeout = self.conf.getint(sec, "timeout")
retry = self.conf.getint(sec, "retry")
proxytype = self.conf.get(sec, "type")
return proxy, timeout, retry, proxytype
return switch, proxy, timeout, retry, proxytype
except ValueError:
self._exit("common")

View File

@@ -278,11 +278,11 @@ def trimblank(s: str):
# path = examle:photo , video.in the Project Folder!
def download_file_with_filename(url, filename, path, conf: config.Config, filepath, failed_folder):
proxy, timeout, retry_count, proxytype = config.Config().proxy()
switch, proxy, timeout, retry_count, proxytype = config.Config().proxy()
for i in range(retry_count):
try:
if not proxy == '':
if switch == 1:
if not os.path.exists(path):
os.makedirs(path)
proxies = get_proxy(proxy, proxytype)
@@ -330,7 +330,7 @@ def image_download(cover, number, c_word, path, conf: config.Config, filepath, f
moveFailedFolder(filepath, failed_folder)
return
_proxy, _timeout, retry, _proxytype = conf.proxy()
switch, _proxy, _timeout, retry, _proxytype = conf.proxy()
for i in range(retry):
if os.path.getsize(path + '/' + number + c_word + '-fanart.jpg') == 0:
print('[!]Image Download Failed! Trying again. [{}/3]', i + 1)