def build_cmd(target, version, skip_tests): client = utils.get_docker_client() project_dir = utils.get_project_dir() config = load_shub_config() image = config.get_image(target) _create_setup_py_if_not_exists() image_name = utils.format_image_name(image, version) if not os.path.exists(os.path.join(project_dir, 'Dockerfile')): raise shub_exceptions.BadParameterException( "Dockerfile is not found, please use shub image 'init' command") is_built = False for data in client.build(path=project_dir, tag=image_name, decode=True): if 'stream' in data: utils.debug_log("{}".format(data['stream'][:-1])) is_built = re.search(r'Successfully built ([0-9a-f]+)', data['stream']) elif 'error' in data: click.echo("Error {}:\n{}".format(data['error'], data['errorDetail'])) if not is_built: raise shub_exceptions.RemoteErrorException( "Build image operation failed") click.echo("The image {} build is completed.".format(image_name)) # Test the image content after building it if not skip_tests: test_cmd(target, version)
def build_cmd(target, version, skip_tests, no_cache, filename='Dockerfile'): config = load_shub_config() create_scrapinghub_yml_wizard(config, target=target, image=True) client = utils.get_docker_client() project_dir = utils.get_project_dir() image = config.get_image(target) image_name = utils.format_image_name(image, version) if not os.path.exists(os.path.join(project_dir, filename)): raise shub_exceptions.NotFoundException( "Dockerfile is not found and it is required because project '{}' is configured " "to deploy Docker images. Please add a Dockerfile that will be used to build " "the image and retry this command. If you want to migrate an existing Scrapy project " "you can use `shub image init` command to create a Dockerfile.".format(target)) if utils.is_verbose(): build_progress_cls = _LoggedBuildProgress else: build_progress_cls = _BuildProgress click.echo("Building {}.".format(image_name)) events = client.build( path=project_dir, tag=image_name, decode=True, dockerfile=filename, nocache=no_cache ) build_progress = build_progress_cls(events) build_progress.show() click.echo("The image {} build is completed.".format(image_name)) # Test the image content after building it if not skip_tests: test_cmd(target, version)
def build_cmd(target, version, skip_tests): config = load_shub_config() create_scrapinghub_yml_wizard(config, target=target, image=True) client = utils.get_docker_client() project_dir = utils.get_project_dir() image = config.get_image(target) image_name = utils.format_image_name(image, version) if not os.path.exists(os.path.join(project_dir, 'Dockerfile')): raise shub_exceptions.NotFoundException( "Dockerfile is not found and it is required because project '{}' is configured " "to deploy Docker images. Please add a Dockerfile that will be used to build " "the image and retry this command. If you want to migrate an existing Scrapy project " "you can use `shub image init` command to create a Dockerfile.". format(target)) if utils.is_verbose(): build_progress_cls = _LoggedBuildProgress else: build_progress_cls = _BuildProgress click.echo("Building {}.".format(image_name)) events = client.build(path=project_dir, tag=image_name, decode=True) build_progress = build_progress_cls(events) build_progress.show() click.echo("The image {} build is completed.".format(image_name)) # Test the image content after building it if not skip_tests: test_cmd(target, version)
def cli(project, base_image, base_deps, add_deps, requirements): project_dir = utils.get_project_dir() scrapy_config = shub_utils.get_config() if not scrapy_config.has_option('settings', project): raise shub_exceptions.BadConfigException( 'Cannot find Scrapy project settings. Please ensure that current directory ' 'contains scrapy.cfg with settings section, see example at ' 'https://doc.scrapy.org/en/latest/topics/commands.html#default-structure-of-scrapy-projects' ) # NOQA dockefile_path = os.path.join(project_dir, 'Dockerfile') if os.path.exists(dockefile_path): raise shub_exceptions.ShubException( 'Found a Dockerfile in the project directory, aborting') settings_module = scrapy_config.get('settings', project) values = { 'base_image': base_image, 'system_deps': _format_system_deps(base_deps, add_deps), 'system_env': _format_system_env(settings_module), 'requirements': _format_requirements(project_dir, requirements), } values = {key: value if value else '' for key, value in values.items()} source = Template(DOCKERFILE_TEMPLATE) results = source.substitute(values) results = results.replace('\n\n', '\n') with open(dockefile_path, 'w') as dockerfile: dockerfile.write(results) click.echo("Dockerfile is saved to {}".format(dockefile_path))
def test_get_project_dir(self): self.assertRaises(BadConfigException, get_project_dir) with FakeProjectDirectory() as tmpdir: add_sh_fake_config(tmpdir) assert get_project_dir() == tmpdir