def _check_start_crawl_entry(image_name, docker_client): """Check that the image has start-crawl entrypoint""" status, logs = _run_docker_command(docker_client, image_name, ['which', 'start-crawl']) if status != 0 or not logs: raise shub_exceptions.NotFoundException( CONTRACT_CMD_NOT_FOUND_WARNING % 'start-crawl')
def build_cmd(target, version, skip_tests): config = load_shub_config() create_scrapinghub_yml_wizard(config, target=target, image=True) client = utils.get_docker_client() project_dir = utils.get_project_dir() image = config.get_image(target) image_name = utils.format_image_name(image, version) if not os.path.exists(os.path.join(project_dir, 'Dockerfile')): raise shub_exceptions.NotFoundException( "Dockerfile is not found and it is required because project '{}' is configured " "to deploy Docker images. Please add a Dockerfile that will be used to build " "the image and retry this command. If you want to migrate an existing Scrapy project " "you can use `shub image init` command to create a Dockerfile.". format(target)) if utils.is_verbose(): build_progress_cls = _LoggedBuildProgress else: build_progress_cls = _BuildProgress click.echo("Building {}.".format(image_name)) events = client.build(path=project_dir, tag=image_name, decode=True) build_progress = build_progress_cls(events) build_progress.show() click.echo("The image {} build is completed.".format(image_name)) # Test the image content after building it if not skip_tests: test_cmd(target, version)
def _check_sh_entrypoint(image_name, docker_client): """Check that the image has scrapinghub-entrypoint-scrapy pkg""" status, logs = _run_docker_command( docker_client, image_name, ['pip', 'show', 'scrapinghub-entrypoint-scrapy']) if status != 0 or not logs: raise shub_exceptions.NotFoundException(SH_EP_SCRAPY_WARNING)
def load_status_url(status_id): """ Load status url from file by status_id""" if not os.path.isfile(STATUS_FILE_LOCATION): raise shub_exceptions.NotFoundException( 'Status file is not found at {}'.format(STATUS_FILE_LOCATION)) data = _load_status_file(STATUS_FILE_LOCATION) # return latest status url if status id is not provided if not isinstance(status_id, int) and data: max_status_id = max(data.keys()) click.echo('Getting results for latest status id {}.' .format(max_status_id)) return data[max_status_id] if status_id not in data: raise shub_exceptions.NotFoundException( "Status url with id {} is not found".format(status_id)) return data[status_id]
def _check_list_spiders_entry(image_name, docker_client): """Check that the image has list-spiders entrypoint""" status, logs = _run_docker_command(docker_client, image_name, ['which', 'list-spiders']) if status != 0 or not logs: raise shub_exceptions.NotFoundException( CONTRACT_CMD_NOT_FOUND_WARNING % 'list-spiders')
def _check_start_crawl_entry(image_name, docker_client): """Check that the image has start-crawl entrypoint""" status, logs = _run_docker_command(docker_client, image_name, ['which', 'start-crawl']) if status != 0 or not logs: raise shub_exceptions.NotFoundException( "start-crawl command is not found in the image.\n" + SH_EP_SCRAPY_WARNING)
def _check_list_spiders_entry(image_name, docker_client): """Check that the image has list-spiders entrypoint""" status, logs = _run_docker_command(docker_client, image_name, ['which', 'list-spiders']) if status != 0 or not logs: raise shub_exceptions.NotFoundException( "list-spiders command is not found in the image.\n" "Please upgrade your scrapinghub-entrypoint-scrapy(>=0.7.0)")
def get_image(self, target): """Return image for given target.""" try: return self.images[target] except KeyError: raise shub_exceptions.NotFoundException( "Could not find image for %s. Please define" " it in your scrapinghub.yml." % target)
def _check_fallback_to_list_spiders(image_name, docker_client): status, logs = _run_docker_command(docker_client, image_name, ['which', 'list-spiders']) if status != 0 or not logs: raise shub_exceptions.NotFoundException( CONTRACT_CMD_NOT_FOUND_WARNING % 'shub-image-info (& list-spiders)') else: click.echo(LIST_SPIDERS_DEPRECATED_WARNING)
def _check_image_exists(image_name, docker_client): """Check that the image exists on local machine.""" # if there's no docker lib, the command will fail earlier # with an exception when getting a client in get_docker_client() from docker.errors import NotFound try: docker_client.inspect_image(image_name) except NotFound as exc: utils.debug_log("{}".format(exc)) raise shub_exceptions.NotFoundException( "The image doesn't exist yet, please use build command at first.")
def _check_image_size(image_name, docker_client): """Check that the image exists on local machine and validate its size.""" # if there's no docker lib, the command will fail earlier # with an exception when getting a client in get_docker_client() from docker.errors import NotFound try: size = docker_client.inspect_image(image_name).get('Size') if size and isinstance(size, (int, long)) and size > IMAGE_SIZE_LIMIT: raise shub_exceptions.CustomImageTooLargeException( IMAGE_TOO_LARGE_WARNING) except NotFound as exc: utils.debug_log("{}".format(exc)) raise shub_exceptions.NotFoundException( "The image doesn't exist yet, please use build command at first.")