コード例 #1
0
ファイル: build.py プロジェクト: rowhit/shub
def build_cmd(target, version, skip_tests):
    config = load_shub_config()
    create_scrapinghub_yml_wizard(config, target=target, image=True)
    client = utils.get_docker_client()
    project_dir = utils.get_project_dir()
    image = config.get_image(target)
    image_name = utils.format_image_name(image, version)
    if not os.path.exists(os.path.join(project_dir, 'Dockerfile')):
        raise shub_exceptions.NotFoundException(
            "Dockerfile is not found and it is required because project '{}' is configured "
            "to deploy Docker images. Please add a Dockerfile that will be used to build "
            "the image and retry this command. If you want to migrate an existing Scrapy project "
            "you can use `shub image init` command to create a Dockerfile.".
            format(target))
    if utils.is_verbose():
        build_progress_cls = _LoggedBuildProgress
    else:
        build_progress_cls = _BuildProgress
    click.echo("Building {}.".format(image_name))
    events = client.build(path=project_dir, tag=image_name, decode=True)
    build_progress = build_progress_cls(events)
    build_progress.show()
    click.echo("The image {} build is completed.".format(image_name))
    # Test the image content after building it
    if not skip_tests:
        test_cmd(target, version)
コード例 #2
0
ファイル: build.py プロジェクト: scrapinghub/shub
def build_cmd(target, version, skip_tests, no_cache, filename='Dockerfile'):
    config = load_shub_config()
    create_scrapinghub_yml_wizard(config, target=target, image=True)
    client = utils.get_docker_client()
    project_dir = utils.get_project_dir()
    image = config.get_image(target)
    image_name = utils.format_image_name(image, version)
    if not os.path.exists(os.path.join(project_dir, filename)):
        raise shub_exceptions.NotFoundException(
            "Dockerfile is not found and it is required because project '{}' is configured "
            "to deploy Docker images. Please add a Dockerfile that will be used to build "
            "the image and retry this command. If you want to migrate an existing Scrapy project "
            "you can use `shub image init` command to create a Dockerfile.".format(target))
    if utils.is_verbose():
        build_progress_cls = _LoggedBuildProgress
    else:
        build_progress_cls = _BuildProgress
    click.echo("Building {}.".format(image_name))
    events = client.build(
        path=project_dir,
        tag=image_name,
        decode=True,
        dockerfile=filename,
        nocache=no_cache
    )
    build_progress = build_progress_cls(events)
    build_progress.show()
    click.echo("The image {} build is completed.".format(image_name))
    # Test the image content after building it
    if not skip_tests:
        test_cmd(target, version)
コード例 #3
0
ファイル: deploy.py プロジェクト: hcoura/shub
def cli(target, version, debug, egg, build_egg, verbose, keep_log,
        ignore_size):
    conf, image = load_shub_config(), None
    if not build_egg:
        create_scrapinghub_yml_wizard(conf, target=target)
    image = conf.get_target_conf(target).image
    if not image:
        deploy_cmd(target, version, debug, egg, build_egg, verbose, keep_log,
                   conf=conf)
    elif image.startswith(SH_IMAGES_REGISTRY):
        upload_cmd(target, version)
    else:
        raise BadParameterException(
            "Please use `shub image` commands to work with Docker registries "
            "other than Scrapinghub default registry.")
コード例 #4
0
ファイル: deploy.py プロジェクト: scrapinghub/shub
def cli(target, version, debug, egg, build_egg, verbose, keep_log,
        ignore_size):
    conf, image = load_shub_config(), None
    if not build_egg:
        create_scrapinghub_yml_wizard(conf, target=target)
    image = conf.get_target_conf(target).image
    if not image:
        deploy_cmd(target, version, debug, egg, build_egg, verbose, keep_log,
                   conf=conf)
    elif image.startswith(SH_IMAGES_REGISTRY):
        upload_cmd(target, version)
    else:
        raise BadParameterException(
            "Please use `shub image` commands to work with Docker registries "
            "other than Scrapinghub default registry.")
コード例 #5
0
ファイル: test_utils.py プロジェクト: scrapinghub/shub
 def call_wizard():
     utils.create_scrapinghub_yml_wizard(
         conf, target=target, image=image)
コード例 #6
0
ファイル: test_utils.py プロジェクト: rowhit/shub
 def call_wizard():
     utils.create_scrapinghub_yml_wizard(conf,
                                         target=target,
                                         image=image)