def cli(project, base_image, base_deps, add_deps, requirements): closest_scrapy_cfg = shub_utils.closest_file('scrapy.cfg') scrapy_config = shub_utils.get_config() if not closest_scrapy_cfg or not scrapy_config.has_option( 'settings', project): raise shub_exceptions.BadConfigException( 'Cannot find Scrapy project settings. Please ensure that current directory ' 'contains scrapy.cfg with settings section, see example at ' 'https://doc.scrapy.org/en/latest/topics/commands.html#default-structure-of-scrapy-projects' ) # NOQA project_dir = os.path.dirname(closest_scrapy_cfg) dockefile_path = os.path.join(project_dir, 'Dockerfile') if os.path.exists(dockefile_path): raise shub_exceptions.ShubException( 'Found a Dockerfile in the project directory, aborting') settings_module = scrapy_config.get('settings', 'default') shub_utils.create_default_setup_py(settings=settings_module) values = { 'base_image': base_image, 'system_deps': _format_system_deps(base_deps, add_deps), 'system_env': _format_system_env(settings_module), 'requirements': _format_requirements(project_dir, requirements), } values = {key: value if value else '' for key, value in values.items()} source = Template(DOCKERFILE_TEMPLATE) results = source.substitute(values) results = results.replace('\n\n', '\n') with open(dockefile_path, 'w') as dockerfile: dockerfile.write(results) click.echo("Dockerfile is saved to {}".format(dockefile_path))
def cli(project, base_image, base_deps, add_deps, requirements): closest_scrapy_cfg = shub_utils.closest_file('scrapy.cfg') scrapy_config = shub_utils.get_config() if not closest_scrapy_cfg or not scrapy_config.has_option('settings', project): raise shub_exceptions.BadConfigException( 'Cannot find Scrapy project settings. Please ensure that current directory ' 'contains scrapy.cfg with settings section, see example at ' 'https://doc.scrapy.org/en/latest/topics/commands.html#default-structure-of-scrapy-projects') # NOQA project_dir = os.path.dirname(closest_scrapy_cfg) dockefile_path = os.path.join(project_dir, 'Dockerfile') if os.path.exists(dockefile_path): raise shub_exceptions.ShubException('Found a Dockerfile in the project directory, aborting') settings_module = scrapy_config.get('settings', 'default') shub_utils.create_default_setup_py(settings=settings_module) values = { 'base_image': base_image, 'system_deps': _format_system_deps(base_deps, add_deps), 'system_env': _format_system_env(settings_module), 'requirements': _format_requirements(project_dir, requirements), } values = {key: value if value else '' for key, value in values.items()} source = Template(DOCKERFILE_TEMPLATE) results = source.substitute(values) results = results.replace('\n\n', '\n') with open(dockefile_path, 'w') as dockerfile: dockerfile.write(results) click.echo("Dockerfile is saved to {}".format(dockefile_path))
def get_project_dir(): """ A helper to get project root dir. Used by init/build command to locate Dockerfile. """ closest = shub_utils.closest_file('scrapinghub.yml') if not closest: raise BadConfigException( "Not inside a project: scrapinghub.yml not found.") return os.path.dirname(closest)
def _build_egg(): closest = closest_file('scrapy.cfg') os.chdir(os.path.dirname(closest)) if not os.path.exists('setup.py'): settings = get_config().get('settings', 'default') _create_default_setup_py(settings=settings) d = tempfile.mkdtemp(prefix="shub-deploy-") run_python(['setup.py', 'clean', '-a', 'bdist_egg', '-d', d]) egg = glob.glob(os.path.join(d, '*.egg'))[0] return egg, d
def _create_setup_py_if_not_exists(): closest = closest_file('scrapy.cfg') # create default setup.py only if scrapy.cfg is found, otherwise # consider it as a non-scrapy/non-python project if not closest: warnings.warn("scrapy.cfg is not found") return with utils.remember_cwd(): os.chdir(os.path.dirname(closest)) if not os.path.exists('setup.py'): settings = get_config().get('settings', 'default') _create_default_setup_py(settings=settings)
def _build_egg(): closest = closest_file("scrapy.cfg") os.chdir(os.path.dirname(closest)) if not os.path.exists("setup.py"): settings = get_config().get("settings", "default") _create_default_setup_py(settings=settings) d = tempfile.mkdtemp(prefix="shub-deploy-") with open(os.path.join(d, "stdout"), "wb") as o, open(os.path.join(d, "stderr"), "wb") as e, patch_sys_executable(): retry_on_eintr( check_call, [sys.executable, "setup.py", "clean", "-a", "bdist_egg", "-d", d], stdout=o, stderr=e ) egg = glob.glob(os.path.join(d, "*.egg"))[0] return egg, d
def _build_egg(): closest = closest_file('scrapy.cfg') os.chdir(os.path.dirname(closest)) if not os.path.exists('setup.py'): settings = get_config().get('settings', 'default') _create_default_setup_py(settings=settings) d = tempfile.mkdtemp(prefix="shub-deploy-") o = open(os.path.join(d, "stdout"), "wb") e = open(os.path.join(d, "stderr"), "wb") retry_on_eintr(check_call, [sys.executable, 'setup.py', 'clean', '-a', 'bdist_egg', '-d', d], stdout=o, stderr=e) o.close() e.close() egg = glob.glob(os.path.join(d, '*.egg'))[0] return egg, d
def _build_egg(): closest = closest_file('scrapy.cfg') os.chdir(os.path.dirname(closest)) if not os.path.exists('setup.py'): settings = get_config().get('settings', 'default') _create_default_setup_py(settings=settings) d = tempfile.mkdtemp(prefix="shub-deploy-") with open(os.path.join(d, "stdout"), "wb") as o, \ open(os.path.join(d, "stderr"), "wb") as e, \ patch_sys_executable(): retry_on_eintr( check_call, [sys.executable, 'setup.py', 'clean', '-a', 'bdist_egg', '-d', d], stdout=o, stderr=e, ) egg = glob.glob(os.path.join(d, '*.egg'))[0] return egg, d
def load_shub_config(load_global=True, load_local=True, load_env=True): """ Return a ShubConfig instance with ~/.scrapinghub.yml and the closest scrapinghub.yml already loaded """ conf = ShubConfig() if load_global: if not os.path.exists(GLOBAL_SCRAPINGHUB_YML_PATH): _migrate_to_global_scrapinghub_yml() conf.load_file(GLOBAL_SCRAPINGHUB_YML_PATH) if load_env and 'SHUB_APIKEY' in os.environ: conf.apikeys['default'] = os.environ['SHUB_APIKEY'] if load_local: closest_sh_yml = closest_file('scrapinghub.yml') if closest_sh_yml: conf.load_file(closest_sh_yml) else: _migrate_and_load_scrapy_cfg(conf) return conf
def _migrate_and_load_scrapy_cfg(conf): # Load from closest scrapy.cfg closest_scrapycfg = closest_file('scrapy.cfg') if not closest_scrapycfg: return targets = get_scrapycfg_targets([closest_scrapycfg]) if targets == get_scrapycfg_targets(): # No deploy configuration in scrapy.cfg return conf.load_scrapycfg([closest_scrapycfg]) # Migrate to scrapinghub.yml closest_sh_yml = os.path.join(os.path.dirname(closest_scrapycfg), 'scrapinghub.yml') temp_conf = ShubConfig() temp_conf.load_scrapycfg([closest_scrapycfg]) try: temp_conf.save(closest_sh_yml) except Exception: click.echo(PROJECT_MIGRATION_FAILED_BANNER, err=True) else: click.echo(PROJECT_MIGRATION_OK_BANNER, err=True)
def _deploy_wizard(conf, target='default'): """ Ask user for project ID, ensure they have access to that project, and save it to given ``target`` in local ``scrapinghub.yml`` if desired. """ closest_scrapycfg = closest_file('scrapy.cfg') # Double-checking to make deploy_wizard() independent of cli() if not closest_scrapycfg: raise NotFoundException("No Scrapy project found in this location.") closest_sh_yml = os.path.join(os.path.dirname(closest_scrapycfg), 'scrapinghub.yml') # Get default endpoint and API key (meanwhile making sure the user is # logged in) endpoint, apikey = conf.get_endpoint(0), conf.get_apikey(0) project = click.prompt("Target project ID", type=int) if not _has_project_access(project, endpoint, apikey): raise InvalidAuthException( "The account you logged in to has no access to project {}. Please " "double-check the project ID and make sure you logged in to the " "correct acount.".format(project), ) conf.projects[target] = project if click.confirm("Save as default", default=True): try: with update_yaml_dict(closest_sh_yml) as conf_yml: default_entry = {'default': project} if 'projects' in conf_yml: conf_yml['projects'].update(default_entry) else: conf_yml['projects'] = default_entry except Exception: click.echo( "There was an error while trying to write to scrapinghub.yml. " "Could not save project {} as default.".format(project), ) else: click.echo( "Project {} was set as default in scrapinghub.yml. You can " "deploy to it via 'shub deploy' from now on.".format(project), )