def load_scrapycfg(self, sources): """Load configuration from a list of scrapy.cfg-like sources.""" targets = get_scrapycfg_targets(sources) self._load_scrapycfg_target('default', targets['default']) del targets['default'] for tname, t in six.iteritems(targets): self._load_scrapycfg_target(tname, t)
def _migrate_and_load_scrapy_cfg(conf): # Load from closest scrapy.cfg closest_scrapycfg = closest_file('scrapy.cfg') if not closest_scrapycfg: return targets = get_scrapycfg_targets([closest_scrapycfg]) if targets == get_scrapycfg_targets(): # No deploy configuration in scrapy.cfg return conf.load_scrapycfg([closest_scrapycfg]) # Migrate to scrapinghub.yml closest_sh_yml = os.path.join(os.path.dirname(closest_scrapycfg), 'scrapinghub.yml') temp_conf = ShubConfig() temp_conf.load_scrapycfg([closest_scrapycfg]) try: temp_conf.save(closest_sh_yml) except Exception: click.echo(PROJECT_MIGRATION_FAILED_BANNER, err=True) else: click.echo(PROJECT_MIGRATION_OK_BANNER, err=True)
def _load_scrapycfg(self, sources): """Load configuration from a list of scrapy.cfg-like sources.""" targets = get_scrapycfg_targets(sources) for tname, t in six.iteritems(targets): if 'project' in t: prefix = '' if tname == 'default' else tname + '/' self.projects.update({tname: prefix + t['project']}) if 'url' in t: self.endpoints.update({tname: t['url']}) if 'username' in t: self.apikeys.update({tname: t['username']}) if 'version' in t: self.version = t['version']