From 785fb7db344be18557b0fb168a2c52afc7a863af Mon Sep 17 00:00:00 2001 From: yoshiko2 Date: Fri, 5 May 2023 02:32:55 +0800 Subject: [PATCH] Test importlib --- requirements.txt | 1 + scrapinglib/api.py | 74 +++++++++++----------------------------------- 2 files changed, 19 insertions(+), 56 deletions(-) diff --git a/requirements.txt b/requirements.txt index 2014f84..c15eb72 100644 --- a/requirements.txt +++ b/requirements.txt @@ -12,3 +12,4 @@ urllib3 certifi MechanicalSoup opencc-python-reimplemented +importlib diff --git a/scrapinglib/api.py b/scrapinglib/api.py index 29de0aa..2e3e642 100644 --- a/scrapinglib/api.py +++ b/scrapinglib/api.py @@ -2,29 +2,9 @@ import re import json - +from .parser import Parser import config -from .airav import Airav -from .carib import Carib -from .dlsite import Dlsite -from .fanza import Fanza -from .gcolle import Gcolle -from .getchu import Getchu -from .jav321 import Jav321 -from .javdb import Javdb -from .fc2 import Fc2 -from .madou import Madou -from .mgstage import Mgstage -from .javbus import Javbus -from .xcity import Xcity -from .avsox import Avsox -from .javlibrary import Javlibrary -from .javday import Javday -from .pissplay import Pissplay -from .javmenu import Javmenu - -from .tmdb import Tmdb -from .imdb import Imdb +import importlib def search(number, sources: str = None, **kwargs): @@ -56,32 +36,8 @@ class Scraping: 'mgstage', 'fc2', 'avsox', 'dlsite', 'carib', 'madou', 'getchu', 'gcolle', 'javday', 'pissplay', 'javmenu' ] - adult_func_mapping = { - 'avsox': Avsox().scrape, - 'javbus': Javbus().scrape, - 'xcity': Xcity().scrape, - 'mgstage': Mgstage().scrape, - 'madou': Madou().scrape, - 'fc2': Fc2().scrape, - 'dlsite': Dlsite().scrape, - 'jav321': Jav321().scrape, - 'fanza': Fanza().scrape, - 'airav': Airav().scrape, - 'carib': Carib().scrape, - 'gcolle': Gcolle().scrape, - 'javdb': Javdb().scrape, - 'getchu': Getchu().scrape, - 'javlibrary': Javlibrary().scrape, - 'javday': Javday().scrape, - 'pissplay': Pissplay().scrape, - 'javmenu': Javmenu().scrape - } general_full_sources = ['tmdb', 'imdb'] - general_func_mapping = { - 'tmdb': Tmdb().scrape, - 'imdb': Imdb().scrape, - } debug = False @@ -126,13 +82,16 @@ class Scraping: if self.debug: print('[+]select', source) try: - data = self.general_func_mapping[source](name, self) + module = importlib.import_module('.' + source, 'scrapinglib') + parser_type = getattr(module, source.capitalize()) + parser: Parser = parser_type() + data = parser.scrape(name, self) if data == 404: continue json_data = json.loads(data) except Exception as e: - # print('[!] 出错啦') - # print(e) + print('[!] 出错啦') + print(e) pass # if any service return a valid return, break if self.get_data_state(json_data): @@ -170,13 +129,16 @@ class Scraping: if self.debug: print('[+]select', source) try: - data = self.adult_func_mapping[source](number, self) + module = importlib.import_module('.' + source, 'scrapinglib') + parser_type = getattr(module, source.capitalize()) + parser: Parser = parser_type() + data = parser.scrape(number, self) if data == 404: continue json_data = json.loads(data) except Exception as e: - # print('[!] 出错啦') - # print(e) + print('[!] 出错啦') + print(e) pass # json_data = self.func_mapping[source](number, self) # if any service return a valid return, break @@ -232,7 +194,7 @@ class Scraping: # check sources in func_mapping todel = [] for s in sources: - if not s in self.general_func_mapping: + if not s in self.general_full_sources: print('[!] Source Not Exist : ' + s) todel.append(s) for d in todel: @@ -251,7 +213,7 @@ class Scraping: sources.insert(0, sources.pop(sources.index(source))) return sources - if len(sources) <= len(self.adult_func_mapping): + if len(sources) <= len(self.adult_full_sources): # if the input file name matches certain rules, # move some web service to the beginning of the list lo_file_number = file_number.lower() @@ -287,7 +249,7 @@ class Scraping: # check sources in func_mapping todel = [] for s in sources: - if not s in self.adult_func_mapping and config.getInstance().debug(): + if not s in self.adult_full_sources and config.getInstance().debug(): print('[!] Source Not Exist : ' + s) todel.append(s) for d in todel: @@ -307,4 +269,4 @@ class Scraping: and (data["cover_small"] is None or data["cover_small"] == "" or data["cover_small"] == "null"): return False - return True + return True \ No newline at end of file