Esempio n. 1
0
 def test_autocreate_empty_global_scrapinghub_yml(self):
     os.remove(self.globalpath)
     os.remove(self.globalscrapycfgpath)
     os.remove(self.netrcpath)
     load_shub_config()
     self.assertTrue(os.path.isfile(self.globalpath))
     with open(self.globalpath, 'r') as f:
         self.assertEqual(f.read(), "")
Esempio n. 2
0
 def test_autocreate_empty_global_scrapinghub_yml(self):
     os.remove(self.globalpath)
     os.remove(self.globalscrapycfgpath)
     os.remove(self.netrcpath)
     load_shub_config()
     self.assertTrue(os.path.isfile(self.globalpath))
     with open(self.globalpath, 'r') as f:
         self.assertEqual(f.read(), "")
Esempio n. 3
0
 def test_remove_key(self):
     GLOBAL_SH_YML = textwrap.dedent("""
         apikeys:
             default: LOGGED_IN_KEY
     """)
     with self.runner.isolated_filesystem():
         with open('.scrapinghub.yml', 'w') as f:
             f.write(GLOBAL_SH_YML)
         conf = config.load_shub_config()
         self.assertIn('default', conf.apikeys)
         self.runner.invoke(logout.cli)
         conf = config.load_shub_config()
         self.assertNotIn('default', conf.apikeys)
Esempio n. 4
0
 def test_remove_key(self):
     GLOBAL_SH_YML = textwrap.dedent("""
         apikeys:
             default: LOGGED_IN_KEY
     """)
     with self.runner.isolated_filesystem():
         with open('.scrapinghub.yml', 'w') as f:
             f.write(GLOBAL_SH_YML)
         conf = config.load_shub_config()
         self.assertIn('default', conf.apikeys)
         self.runner.invoke(logout.cli)
         conf = config.load_shub_config()
         self.assertNotIn('default', conf.apikeys)
Esempio n. 5
0
def cli():
    global_conf = load_shub_config(load_local=False, load_env=False)
    if 'default' in global_conf.apikeys:
        raise AlreadyLoggedInException

    conf = load_shub_config()
    key = _get_apikey(
        suggestion=conf.apikeys.get('default'),
        endpoint=global_conf.endpoints.get('default'),
    )
    with update_yaml_dict(GLOBAL_SCRAPINGHUB_YML_PATH) as conf:
        conf.setdefault('apikeys', {})
        conf['apikeys']['default'] = key
Esempio n. 6
0
def cli():
    global_conf = load_shub_config(load_local=False, load_env=False)
    if 'default' in global_conf.apikeys:
        raise AlreadyLoggedInException

    conf = load_shub_config()
    key = _get_apikey(
        suggestion=conf.apikeys.get('default'),
        endpoint=global_conf.endpoints.get('default'),
    )
    with update_yaml_dict() as conf:
        conf.setdefault('apikeys', {})
        conf['apikeys']['default'] = key
Esempio n. 7
0
def deploy_cmd(target, version, debug, egg, build_egg, verbose, keep_log,
               conf=None):
    tmpdir = None
    try:
        if build_egg:
            egg, tmpdir = _build_egg()
            click.echo("Writing egg to %s" % build_egg)
            shutil.copyfile(egg, build_egg)
        else:
            conf = conf or load_shub_config()
            targetconf = conf.get_target_conf(target)
            version = version or targetconf.version
            auth = (targetconf.apikey, '')

            if egg:
                click.echo("Using egg: %s" % egg)
                egg = egg
            else:
                click.echo("Packing version %s" % version)
                egg, tmpdir = _build_egg()

            _upload_egg(targetconf.endpoint, egg, targetconf.project_id,
                        version, auth, verbose, keep_log, targetconf.stack,
                        targetconf.requirements_file, targetconf.eggs)
            click.echo("Run your spiders at: "
                       "https://app.scrapinghub.com/p/%s/"
                       "" % targetconf.project_id)
    finally:
        if tmpdir:
            if debug:
                click.echo("Output dir not removed: %s" % tmpdir)
            else:
                shutil.rmtree(tmpdir, ignore_errors=True)
Esempio n. 8
0
def build_cmd(target, version, skip_tests, no_cache, filename='Dockerfile'):
    config = load_shub_config()
    create_scrapinghub_yml_wizard(config, target=target, image=True)
    client = utils.get_docker_client()
    project_dir = utils.get_project_dir()
    image = config.get_image(target)
    image_name = utils.format_image_name(image, version)
    if not os.path.exists(os.path.join(project_dir, filename)):
        raise shub_exceptions.NotFoundException(
            "Dockerfile is not found and it is required because project '{}' is configured "
            "to deploy Docker images. Please add a Dockerfile that will be used to build "
            "the image and retry this command. If you want to migrate an existing Scrapy project "
            "you can use `shub image init` command to create a Dockerfile.".format(target))
    if utils.is_verbose():
        build_progress_cls = _LoggedBuildProgress
    else:
        build_progress_cls = _BuildProgress
    click.echo("Building {}.".format(image_name))
    events = client.build(
        path=project_dir,
        tag=image_name,
        decode=True,
        dockerfile=filename,
        nocache=no_cache
    )
    build_progress = build_progress_cls(events)
    build_progress.show()
    click.echo("The image {} build is completed.".format(image_name))
    # Test the image content after building it
    if not skip_tests:
        test_cmd(target, version)
Esempio n. 9
0
def push_cmd(target, version, username, password, email, apikey, insecure, skip_tests):
    # Test the image content after building it
    if not skip_tests:
        test_cmd(target, version)

    client = utils.get_docker_client()
    config = load_shub_config()
    image = config.get_image(target)
    username, password = utils.get_credentials(
        username=username, password=password, insecure=insecure,
        apikey=apikey, target_apikey=config.get_apikey(target))

    if username:
        _execute_push_login(client, image, username, password, email)
    image_name = utils.format_image_name(image, version)
    click.echo("Pushing {} to the registry.".format(image_name))
    events = client.push(image_name, stream=True, decode=True,
                         insecure_registry=not bool(username))
    if utils.is_verbose():
        push_progress_cls = _LoggedPushProgress
    else:
        push_progress_cls = _PushProgress
    push_progress = push_progress_cls(events)
    push_progress.show()
    click.echo("The image {} pushed successfully.".format(image_name))
Esempio n. 10
0
def list_targets(ctx, param, value):
    if not value:
        return
    conf = load_shub_config()
    for name in conf.projects:
        click.echo(name)
    ctx.exit()
Esempio n. 11
0
 def test_envvar_precedence(self):
     _old_environ = dict(os.environ)
     os.environ['SHUB_APIKEY'] = 'key_env'
     conf = load_shub_config()
     self.assertEqual(conf.get_apikey('shproj'), 'key_env')
     os.environ.clear()
     os.environ.update(_old_environ)
Esempio n. 12
0
 def test_envvar_precedence(self):
     _old_environ = dict(os.environ)
     os.environ['SHUB_APIKEY'] = 'key_env'
     conf = load_shub_config()
     self.assertEqual(conf.get_apikey('shproj'), 'key_env')
     os.environ.clear()
     os.environ.update(_old_environ)
Esempio n. 13
0
File: build.py Progetto: rowhit/shub
def build_cmd(target, version, skip_tests):
    config = load_shub_config()
    create_scrapinghub_yml_wizard(config, target=target, image=True)
    client = utils.get_docker_client()
    project_dir = utils.get_project_dir()
    image = config.get_image(target)
    image_name = utils.format_image_name(image, version)
    if not os.path.exists(os.path.join(project_dir, 'Dockerfile')):
        raise shub_exceptions.NotFoundException(
            "Dockerfile is not found and it is required because project '{}' is configured "
            "to deploy Docker images. Please add a Dockerfile that will be used to build "
            "the image and retry this command. If you want to migrate an existing Scrapy project "
            "you can use `shub image init` command to create a Dockerfile.".
            format(target))
    if utils.is_verbose():
        build_progress_cls = _LoggedBuildProgress
    else:
        build_progress_cls = _BuildProgress
    click.echo("Building {}.".format(image_name))
    events = client.build(path=project_dir, tag=image_name, decode=True)
    build_progress = build_progress_cls(events)
    build_progress.show()
    click.echo("The image {} build is completed.".format(image_name))
    # Test the image content after building it
    if not skip_tests:
        test_cmd(target, version)
Esempio n. 14
0
def deploy_cmd(target, version, debug, egg, build_egg, verbose, keep_log,
               conf=None):
    tmpdir = None
    try:
        if build_egg:
            egg, tmpdir = _build_egg()
            click.echo("Writing egg to %s" % build_egg)
            shutil.copyfile(egg, build_egg)
        else:
            conf = conf or load_shub_config()
            targetconf = conf.get_target_conf(target)
            version = version or targetconf.version
            auth = (targetconf.apikey, '')

            if egg:
                click.echo("Using egg: %s" % egg)
                egg = egg
            else:
                click.echo("Packing version %s" % version)
                egg, tmpdir = _build_egg()

            _upload_egg(targetconf.endpoint, egg, targetconf.project_id,
                        version, auth, verbose, keep_log, targetconf.stack,
                        targetconf.requirements_file, targetconf.eggs)
            click.echo("Run your spiders at: "
                       "https://app.scrapinghub.com/p/%s/"
                       "" % targetconf.project_id)
    finally:
        if tmpdir:
            if debug:
                click.echo("Output dir not removed: %s" % tmpdir)
            else:
                shutil.rmtree(tmpdir, ignore_errors=True)
Esempio n. 15
0
def resolve_project_id(project_id=None):
    """
    Gets project id from following sources in following order of precedence:
    - default parameter values
    - environment variables
    - sh_scrapy.hsref (kumo)
    - scrapinghub.yml file

    in order to allow to use codes that needs HS or dash API,
    either locally or from scrapinghub, correctly configured
    """
    if project_id:
        return project_id

    # read from environment
    if os.environ.get('PROJECT_ID'):
        return os.environ.get('PROJECT_ID')

    # for ScrapyCloud jobs:
    if os.environ.get('SHUB_JOBKEY'):
        return os.environ['SHUB_JOBKEY'].split('/')[0]

    # read from scrapinghub.yml
    try:
        from shub.config import load_shub_config
        cfg = load_shub_config()
        project_id = cfg.get_project_id('default')
        if project_id:
            return project_id
    except Exception:
        logger.warning("Install shub package if want to access scrapinghub.yml")

    if not project_id:
        logger.warning('Project id not found. Use either PROJECT_ID env. variable or scrapinghub.yml default target.')
Esempio n. 16
0
def push_cmd(target, version, username, password, email, apikey, insecure,
             skip_tests):
    # Test the image content after building it
    if not skip_tests:
        test_cmd(target, version)

    client = utils.get_docker_client()
    config = load_shub_config()
    image = config.get_image(target)
    username, password = utils.get_credentials(
        username=username,
        password=password,
        insecure=insecure,
        apikey=apikey,
        target_apikey=config.get_apikey(target))

    if username:
        _execute_push_login(client, image, username, password, email)
    image_name = utils.format_image_name(image, version)
    click.echo("Pushing {} to the registry.".format(image_name))
    events = client.push(image_name,
                         stream=True,
                         decode=True,
                         insecure_registry=not bool(username))
    if utils.is_verbose():
        push_progress_cls = _LoggedPushProgress
    else:
        push_progress_cls = _PushProgress
    push_progress = push_progress_cls(events)
    push_progress.show()
    click.echo("The image {} pushed successfully.".format(image_name))
Esempio n. 17
0
def list_targets(ctx, param, value):
    if not value:
        return
    conf = load_shub_config()
    for name in conf.projects:
        click.echo(name)
    ctx.exit()
Esempio n. 18
0
def build_cmd(target, version, skip_tests):
    client = utils.get_docker_client()
    project_dir = utils.get_project_dir()
    config = load_shub_config()
    image = config.get_image(target)
    _create_setup_py_if_not_exists()
    image_name = utils.format_image_name(image, version)
    if not os.path.exists(os.path.join(project_dir, 'Dockerfile')):
        raise shub_exceptions.BadParameterException(
            "Dockerfile is not found, please use shub image 'init' command")
    is_built = False
    for data in client.build(path=project_dir, tag=image_name, decode=True):
        if 'stream' in data:
            utils.debug_log("{}".format(data['stream'][:-1]))
            is_built = re.search(r'Successfully built ([0-9a-f]+)',
                                 data['stream'])
        elif 'error' in data:
            click.echo("Error {}:\n{}".format(data['error'],
                                              data['errorDetail']))
    if not is_built:
        raise shub_exceptions.RemoteErrorException(
            "Build image operation failed")
    click.echo("The image {} build is completed.".format(image_name))
    # Test the image content after building it
    if not skip_tests:
        test_cmd(target, version)
Esempio n. 19
0
def cli():
    global_conf = load_shub_config(load_local=False, load_env=False)
    if 'default' not in global_conf.apikeys:
        click.echo("You are not logged in.")
        return 0

    with update_yaml_dict(GLOBAL_SCRAPINGHUB_YML_PATH) as conf:
        del conf['apikeys']['default']
Esempio n. 20
0
def cli():
    global_conf = load_shub_config(load_local=False, load_env=False)
    if 'default' not in global_conf.apikeys:
        click.echo("You are not logged in.")
        return 0

    with update_config() as conf:
        del conf['apikeys']['default']
Esempio n. 21
0
def cli():
    global_conf = load_shub_config(load_local=False, load_env=False)
    if 'default' not in global_conf.apikeys:
        click.echo("You are not logged in.")
        return 0

    with update_yaml_dict(GLOBAL_SCRAPINGHUB_YML_PATH) as conf:
        del conf['apikeys']['default']
Esempio n. 22
0
 def test_scrapinghub_ymls_read(self):
     conf = load_shub_config()
     self.assertEqual(conf.get_apikey('shproj'), 'key')
     self.assertEqual(
         conf.get_endpoint('externalproj'),
         'local_ext_endpoint',
     )
     self.assertEqual(conf.get_apikey('externalproj'), 'key_ext')
     with self.assertRaises(BadParameterException):
         conf.get_project_id('ext2')
Esempio n. 23
0
def test_cmd(target, version):
    config = load_shub_config()
    image = config.get_image(target)
    version = version or config.get_version()
    image_name = utils.format_image_name(image, version)
    docker_client = utils.get_docker_client()
    for check in [_check_image_size,
                  _check_start_crawl_entry,
                  _check_shub_image_info_entry]:
        check(image_name, docker_client)
Esempio n. 24
0
def list_cmd_full(target, silent, version):
    config = load_shub_config()
    image = config.get_image(target)
    version = version or config.get_version()
    image_name = utils.format_image_name(image, version)
    target_conf = config.get_target_conf(target)
    metadata = list_cmd(image_name, target_conf.project_id,
                        target_conf.endpoint, target_conf.apikey)
    for spider in metadata.get('spiders', []):
        click.echo(spider)
Esempio n. 25
0
 def test_scrapinghub_ymls_read(self):
     conf = load_shub_config()
     self.assertEqual(conf.get_apikey('shproj'), 'key')
     self.assertEqual(
         conf.get_endpoint('externalproj'),
         'local_ext_endpoint',
     )
     self.assertEqual(conf.get_apikey('externalproj'), 'key_ext')
     with self.assertRaises(BadParameterException):
         conf.get_project_id('ext2')
Esempio n. 26
0
def test_cmd(target, version):
    config = load_shub_config()
    image = config.get_image(target)
    version = version or config.get_version()
    image_name = utils.format_image_name(image, version)
    docker_client = utils.get_docker_client()
    for check in [
            _check_image_size, _check_start_crawl_entry,
            _check_shub_image_info_entry
    ]:
        check(image_name, docker_client)
Esempio n. 27
0
 def _check_conf():
     conf = load_shub_config()
     self.assertEqual(
         conf.get_target('123'),
         (123, 'dotsc_endpoint', 'netrc_key'),
     )
     self.assertEqual(conf.projects['ext2'], 'ext2/333')
     self.assertEqual(
         conf.get_target('ext2'),
         (333, 'ext2_endpoint', 'ext2_key'),
     )
Esempio n. 28
0
 def _check_conf():
     conf = load_shub_config()
     self.assertEqual(
         conf.get_target('123'),
         (123, 'dotsc_endpoint', 'netrc_key'),
     )
     self.assertEqual(conf.projects['ext2'], 'ext2/333')
     self.assertEqual(
         conf.get_target('ext2'),
         (333, 'ext2_endpoint', 'ext2_key'),
     )
Esempio n. 29
0
 def _check_conf():
     conf = load_shub_config()
     self.assertEqual(
         conf.get_target('default'),
         (222, 'scrapycfg_endpoint', 'key'),
     )
     self.assertEqual(
         conf.get_target('ext2'),
         (333, 'ext2_endpoint', 'ext2_key'),
     )
     self.assertEqual(conf.get_version(), 'ext2_ver')
Esempio n. 30
0
def format_image_name(image_name, image_tag):
    """Format image name using image tag"""
    parts = image_name.rsplit('/', 1)
    # check if tag is already here
    if ':' in parts[-1]:
        # change name to shorter version w/o existing tag
        click.echo('Please use --version param to specify tag')
        image_name = image_name.rsplit(':', 1)[0]
    if not image_tag:
        config = shub_config.load_shub_config()
        image_tag = config.get_version()
    return '{}:{}'.format(image_name, image_tag)
Esempio n. 31
0
def list_cmd_full(target, silent, version):
    config = load_shub_config()
    image = config.get_image(target)
    version = version or config.get_version()
    image_name = utils.format_image_name(image, version)
    target_conf = config.get_target_conf(target)
    metadata = list_cmd(image_name,
                        target_conf.project_id,
                        target_conf.endpoint,
                        target_conf.apikey)
    for spider in metadata.get('spiders', []):
        click.echo(spider)
Esempio n. 32
0
def format_image_name(image_name, image_tag):
    """Format image name using image tag"""
    parts = image_name.rsplit('/', 1)
    # check if tag is already here
    if ':' in parts[-1]:
        # change name to shorter version w/o existing tag
        click.echo('Please use --version param to specify tag')
        image_name = image_name.rsplit(':', 1)[0]
    if not image_tag:
        config = shub_config.load_shub_config()
        image_tag = config.get_version()
    return '{}:{}'.format(image_name, image_tag)
Esempio n. 33
0
    def test_automigrate_project_scrapy_cfg(self):
        def _check_conf():
            conf = load_shub_config()
            self.assertEqual(
                conf.get_target('default'),
                (222, 'scrapycfg_endpoint/', 'key'),
            )
            self.assertEqual(
                conf.get_target('ext2'),
                (333, 'ext2_endpoint/', 'ext2_key'),
            )
            self.assertEqual(
                conf.get_target('ext3'),
                (333, 'scrapycfg_endpoint/', 'key'),
            )
            self.assertEqual(
                conf.get_target('ext4'),
                (444, 'scrapycfg_endpoint/', 'ext4_key'),
            )
            self.assertEqual(conf.get_version(), 'ext2_ver')

        scrapycfg = """
            [deploy]
            project = 222
            url = scrapycfg_endpoint/scrapyd/

            [deploy:ext2]
            url = ext2_endpoint/scrapyd/
            project = 333
            username = ext2_key
            version = ext2_ver

            [deploy:ext3]
            project = 333

            [deploy:ext4]
            project = 444
            username = ext4_key
        """
        with open(self.localscrapycfgpath, 'w') as f:
            f.write(textwrap.dedent(scrapycfg))
        os.mkdir('project')
        os.chdir('project')
        conf = load_shub_config()
        with self.assertRaises(BadParameterException):
            conf.get_target('ext2')
        os.remove(self.localpath)
        # Loaded from scrapy.cfg
        _check_conf()
        # Same config should now be loaded from scrapinghub.yml
        self.assertTrue(os.path.isfile(self.localpath))
        _check_conf()
Esempio n. 34
0
    def test_automigrate_project_scrapy_cfg(self):
        def _check_conf():
            conf = load_shub_config()
            self.assertEqual(
                conf.get_target('default'),
                (222, 'scrapycfg_endpoint/', 'key'),
            )
            self.assertEqual(
                conf.get_target('ext2'),
                (333, 'ext2_endpoint/', 'ext2_key'),
            )
            self.assertEqual(
                conf.get_target('ext3'),
                (333, 'scrapycfg_endpoint/', 'key'),
            )
            self.assertEqual(
                conf.get_target('ext4'),
                (444, 'scrapycfg_endpoint/', 'ext4_key'),
            )
            self.assertEqual(conf.get_version(), 'ext2_ver')
        scrapycfg = """
            [deploy]
            project = 222
            url = scrapycfg_endpoint/scrapyd/

            [deploy:ext2]
            url = ext2_endpoint/scrapyd/
            project = 333
            username = ext2_key
            version = ext2_ver

            [deploy:ext3]
            project = 333

            [deploy:ext4]
            project = 444
            username = ext4_key
        """
        with open(self.localscrapycfgpath, 'w') as f:
            f.write(textwrap.dedent(scrapycfg))
        os.mkdir('project')
        os.chdir('project')
        conf = load_shub_config()
        with self.assertRaises(BadParameterException):
            conf.get_target('ext2')
        os.remove(self.localpath)
        # Loaded from scrapy.cfg
        _check_conf()
        # Same config should now be loaded from scrapinghub.yml
        self.assertTrue(os.path.isfile(self.localpath))
        _check_conf()
Esempio n. 35
0
def run_cmd(spider, args, settings, environment, version, keep_volume):
    try:
        target, spider = spider.rsplit('/', 1)
    except ValueError:
        target = 'default'

    config = load_shub_config()
    image = config.get_image(target)
    version = version or config.get_version()
    image_name = utils.format_image_name(image, version)
    docker_client = utils.get_docker_client()

    env = _format_environment(spider, args, settings, environment)
    _run_with_docker(docker_client, image_name, env, keep_volume)
Esempio n. 36
0
def deploy_cmd(target, version, username, password, email, apikey, insecure,
               async):
    config = load_shub_config()
    target_conf = config.get_target_conf(target)
    endpoint, target_apikey = target_conf.endpoint, target_conf.apikey
    image = config.get_image(target)
    version = version or config.get_version()
    image_name = utils.format_image_name(image, version)
    username, password = utils.get_credentials(username=username,
                                               password=password,
                                               insecure=insecure,
                                               apikey=apikey,
                                               target_apikey=target_apikey)

    apikey = apikey or target_apikey
    params = _prepare_deploy_params(target_conf.project_id, version,
                                    image_name, endpoint, apikey, username,
                                    password, email)

    utils.debug_log('Deploy with params: {}'.format(params))
    req = requests.post(urljoin(endpoint, '/api/releases/deploy.json'),
                        data=params,
                        auth=(apikey, ''),
                        timeout=300,
                        allow_redirects=False)
    try:
        req.raise_for_status()
    except requests.exceptions.HTTPError:
        _handle_deploy_errors(req)

    click.echo("Deploy task results: {}".format(req))
    status_url = req.headers['location']

    status_id = utils.store_status_url(status_url,
                                       limit=STORE_N_LAST_STATUS_URLS)
    click.echo("You can check deploy results later with "
               "'shub image check --id {}'.".format(status_id))

    click.echo("Deploy results:")
    actual_state = _check_status_url(status_url)
    click.echo(" {}".format(actual_state))

    if not async:
        status = actual_state['status']
        while status in SYNC_DEPLOY_WAIT_STATUSES:
            time.sleep(SYNC_DEPLOY_REFRESH_TIMEOUT)
            actual_state = _check_status_url(status_url)
            if actual_state['status'] != status:
                click.echo(" {}".format(actual_state))
                status = actual_state['status']
Esempio n. 37
0
def run_cmd(spider, args, settings, environment, version, keep_volume):
    try:
        target, spider = spider.rsplit('/', 1)
    except ValueError:
        target = 'default'

    config = load_shub_config()
    image = config.get_image(target)
    version = version or config.get_version()
    image_name = utils.format_image_name(image, version)
    docker_client = utils.get_docker_client()

    env = _format_environment(spider, args, settings, environment)
    _run_with_docker(docker_client, image_name, env, keep_volume)
Esempio n. 38
0
def cli(target, version, debug, egg, build_egg, verbose, keep_log,
        ignore_size):
    conf, image = load_shub_config(), None
    if not build_egg:
        create_scrapinghub_yml_wizard(conf, target=target)
    image = conf.get_target_conf(target).image
    if not image:
        deploy_cmd(target, version, debug, egg, build_egg, verbose, keep_log,
                   conf=conf)
    elif image.startswith(SH_IMAGES_REGISTRY):
        upload_cmd(target, version)
    else:
        raise BadParameterException(
            "Please use `shub image` commands to work with Docker registries "
            "other than Scrapinghub default registry.")
Esempio n. 39
0
def cli(target, version, debug, egg, build_egg, verbose, keep_log,
        ignore_size):
    conf, image = load_shub_config(), None
    if not build_egg:
        create_scrapinghub_yml_wizard(conf, target=target)
    image = conf.get_target_conf(target).image
    if not image:
        deploy_cmd(target, version, debug, egg, build_egg, verbose, keep_log,
                   conf=conf)
    elif image.startswith(SH_IMAGES_REGISTRY):
        upload_cmd(target, version)
    else:
        raise BadParameterException(
            "Please use `shub image` commands to work with Docker registries "
            "other than Scrapinghub default registry.")
Esempio n. 40
0
def deploy_cmd(target, version, username, password, email,
               apikey, insecure, async_):
    config = load_shub_config()
    target_conf = config.get_target_conf(target)
    endpoint, target_apikey = target_conf.endpoint, target_conf.apikey
    image = config.get_image(target)
    version = version or config.get_version()
    image_name = utils.format_image_name(image, version)
    username, password = utils.get_credentials(
        username=username, password=password, insecure=insecure,
        apikey=apikey, target_apikey=target_apikey)

    apikey = apikey or target_apikey
    params = _prepare_deploy_params(
        target_conf.project_id, version, image_name, endpoint, apikey,
        username, password, email)

    click.echo("Deploying {}".format(image_name))
    utils.debug_log('Deploy parameters: {}'.format(params))
    req = requests.post(
        urljoin(endpoint, '/api/releases/deploy.json'),
        data=params,
        auth=(apikey, ''),
        timeout=300,
        allow_redirects=False
    )
    if req.status_code == 400:
        reason = req.json().get('non_field_errors')
        raise ShubException('\n'.join(reason) if reason else req.text)
    req.raise_for_status()
    status_url = req.headers['location']
    status_id = utils.store_status_url(
        status_url, limit=STORE_N_LAST_STATUS_URLS)
    click.echo(
        "You can check deploy results later with "
        "'shub image check --id {}'.".format(status_id))
    if async_:
        return
    if utils.is_verbose():
        deploy_progress_cls = _LoggedDeployProgress
    else:
        deploy_progress_cls = _DeployProgress
    events = _convert_status_requests_to_events(status_url)
    deploy_progress = deploy_progress_cls(events)
    deploy_progress.show()
Esempio n. 41
0
def deploy_cmd(target, version, username, password, email,
               apikey, insecure, async_):
    config = load_shub_config()
    target_conf = config.get_target_conf(target)
    endpoint, target_apikey = target_conf.endpoint, target_conf.apikey
    image = config.get_image(target)
    version = version or config.get_version()
    image_name = utils.format_image_name(image, version)
    username, password = utils.get_credentials(
        username=username, password=password, insecure=insecure,
        apikey=apikey, target_apikey=target_apikey)

    apikey = apikey or target_apikey
    params = _prepare_deploy_params(
        target_conf.project_id, version, image_name, endpoint, apikey,
        username, password, email)

    click.echo("Deploying {}".format(image_name))
    utils.debug_log('Deploy parameters: {}'.format(params))
    req = requests.post(
        urljoin(endpoint, '/api/releases/deploy.json'),
        data=params,
        auth=(apikey, ''),
        timeout=300,
        allow_redirects=False
    )
    if req.status_code == 400:
        reason = req.json().get('non_field_errors')
        raise ShubException('\n'.join(reason) if reason else req.text)
    req.raise_for_status()
    status_url = req.headers['location']
    status_id = utils.store_status_url(
        status_url, limit=STORE_N_LAST_STATUS_URLS)
    click.echo(
        "You can check deploy results later with "
        "'shub image check --id {}'.".format(status_id))
    if async_:
        return
    if utils.is_verbose():
        deploy_progress_cls = _LoggedDeployProgress
    else:
        deploy_progress_cls = _DeployProgress
    events = _convert_status_requests_to_events(status_url)
    deploy_progress = deploy_progress_cls(events)
    deploy_progress.show()
Esempio n. 42
0
 def _check_conf():
     conf = load_shub_config()
     self.assertEqual(
         conf.get_target('default'),
         (222, 'scrapycfg_endpoint/', 'key'),
     )
     self.assertEqual(
         conf.get_target('ext2'),
         (333, 'ext2_endpoint/', 'ext2_key'),
     )
     self.assertEqual(
         conf.get_target('ext3'),
         (333, 'scrapycfg_endpoint/', 'key'),
     )
     self.assertEqual(
         conf.get_target('ext4'),
         (444, 'scrapycfg_endpoint/', 'ext4_key'),
     )
     self.assertEqual(conf.get_version(), 'ext2_ver')
Esempio n. 43
0
def cli(target, version, debug, egg, build_egg, verbose, keep_log):
    if not inside_project():
        raise NotFoundException("No Scrapy project found in this location.")
    tmpdir = None
    try:
        if build_egg:
            egg, tmpdir = _build_egg()
            click.echo("Writing egg to %s" % build_egg)
            shutil.copyfile(egg, build_egg)
        else:
            conf = load_shub_config()
            if target == "default" and target not in conf.projects:
                _deploy_wizard(conf)
            targetconf = conf.get_target_conf(target)
            version = version or targetconf.version
            auth = (targetconf.apikey, "")

            if egg:
                click.echo("Using egg: %s" % egg)
                egg = egg
            else:
                click.echo("Packing version %s" % version)
                egg, tmpdir = _build_egg()

            _upload_egg(
                targetconf.endpoint,
                egg,
                targetconf.project_id,
                version,
                auth,
                verbose,
                keep_log,
                targetconf.stack,
                targetconf.requirements_file,
            )
            click.echo("Run your spiders at: " "https://app.scrapinghub.com/p/%s/" "" % targetconf.project_id)
    finally:
        if tmpdir:
            if debug:
                click.echo("Output dir not removed: %s" % tmpdir)
            else:
                shutil.rmtree(tmpdir, ignore_errors=True)
Esempio n. 44
0
def cli(target, version, list_targets, debug, egg, build_egg,
        verbose, keep_log):
    if not inside_project():
        raise NotFoundException("No Scrapy project found in this location.")

    conf = load_shub_config()

    if list_targets:
        for name in conf.projects:
            click.echo(name)
        return

    tmpdir = None

    try:
        if build_egg:
            egg, tmpdir = _build_egg()
            click.echo("Writing egg to %s" % build_egg)
            shutil.copyfile(egg, build_egg)
        else:
            project, endpoint, apikey = conf.get_target(target)
            version = version or conf.get_version()
            auth = (apikey, '')

            if egg:
                click.echo("Using egg: %s" % egg)
                egg = egg
            else:
                click.echo("Packing version %s" % version)
                egg, tmpdir = _build_egg()

            _upload_egg(endpoint, egg, project, version, auth,
                        verbose, keep_log)
            click.echo("Run your spiders at: https://dash.scrapinghub.com/p/%s/" % project)
    finally:
        if tmpdir:
            if debug:
                click.echo("Output dir not removed: %s" % tmpdir)
            else:
                shutil.rmtree(tmpdir, ignore_errors=True)
Esempio n. 45
0
def cli(target, version, debug, egg, build_egg, verbose, keep_log):
    if not inside_project():
        raise NotFoundException("No Scrapy project found in this location.")
    tmpdir = None
    try:
        if build_egg:
            egg, tmpdir = _build_egg()
            click.echo("Writing egg to %s" % build_egg)
            shutil.copyfile(egg, build_egg)
        else:
            conf = load_shub_config()
            if target == 'default' and target not in conf.projects:
                _deploy_wizard(conf)
            targetconf = conf.get_target_conf(target)
            version = version or targetconf.version
            auth = (targetconf.apikey, '')

            if egg:
                click.echo("Using egg: %s" % egg)
                egg = egg
            else:
                click.echo("Packing version %s" % version)
                egg, tmpdir = _build_egg()

            _upload_egg(targetconf.endpoint, egg, targetconf.project_id,
                        version, auth, verbose, keep_log, targetconf.stack,
                        targetconf.requirements_file, targetconf.eggs)
            click.echo("Run your spiders at: "
                       "https://app.scrapinghub.com/p/%s/"
                       "" % targetconf.project_id)
    finally:
        if tmpdir:
            if debug:
                click.echo("Output dir not removed: %s" % tmpdir)
            else:
                shutil.rmtree(tmpdir, ignore_errors=True)
Esempio n. 46
0
 def test_no_global_scrapinghub_yml(self):
     os.remove(self.globalpath)
     conf = load_shub_config()
     with self.assertRaises(BadParameterException):
         conf.get_apikey('shproj')
     self.assertEqual(conf.get_apikey('localextproj'), 'key_ext')
Esempio n. 47
0
def load_release_config():
    """ shub.config.load_shub_config with replaced config class """
    shub_config.ShubConfig = ReleaseConfig
    return shub_config.load_shub_config()
Esempio n. 48
0
 def test_local_scrapinghub_yml_in_parent_dir(self):
     subsubdir = os.path.join(self.tmpdir, 'sub/sub')
     os.makedirs(subsubdir)
     os.chdir(subsubdir)
     conf = load_shub_config()
     self.assertEqual(conf.get_apikey('externalproj'), 'key_ext')
Esempio n. 49
0
 def test_local_scrapinghub_yml_in_parent_dir(self):
     subsubdir = os.path.join(self.tmpdir, 'sub/sub')
     os.makedirs(subsubdir)
     os.chdir(subsubdir)
     conf = load_shub_config()
     self.assertEqual(conf.get_apikey('externalproj'), 'key_ext')
Esempio n. 50
0
 def test_no_global_scrapinghub_yml(self):
     os.remove(self.globalpath)
     conf = load_shub_config()
     with self.assertRaises(BadParameterException):
         conf.get_apikey('shproj')
     self.assertEqual(conf.get_apikey('localextproj'), 'key_ext')