def find_spider_filepath(directory=None, name=None): if directory is None: directory = os.path.abspath(os.curdir) os.chdir(directory) sys.path.insert(0, directory) cp = get_scrapy_cfg() settings_mod_name = cp.get('settings', 'default') project_name = settings_mod_name.split('.')[0] spiders_path = os.path.join(directory, project_name, 'spiders') filenames = os.listdir(spiders_path) filepath = get_spider_filepath(filenames, project_name, name) print(filepath)
def items(directory=None): if directory is None: directory = os.path.abspath(os.curdir) os.chdir(directory) cp = get_scrapy_cfg() settings_mod_name = cp.get('settings', 'default') project_name = settings_mod_name.split('.')[0] items_mod_name = f'{project_name}.items' sys.path.insert(0, directory) items = importlib.import_module(items_mod_name) # get list of all items fields data = get_items_fields(items) print(json.dumps(data))
def pipelines(directory=None, name=None, delete=None): if directory is None: directory = os.path.abspath(os.curdir) os.chdir(directory) cp = get_scrapy_cfg() settings_mod_name = cp.get('settings', 'default') project_name = settings_mod_name.split('.')[0] pipelines_mod_name = f'{project_name}.pipelines' sys.path.insert(0, directory) pipelines = importlib.import_module(pipelines_mod_name) # get list of all pipelines if name is None: data = get_pipelines(pipelines) print(json.dumps(data))
def settings(directory=None): if directory is None: directory = os.path.abspath(os.curdir) os.chdir(directory) cp = get_scrapy_cfg() settings_mod_name = cp.get('settings', 'default') sys.path.insert(0, directory) settings = importlib.import_module(settings_mod_name) data = [] for key in [key for key in dir(settings) if not key.startswith('__')]: data.append({ 'key': key, 'value': getattr(settings, key), }) print(json.dumps(data))