Esempio n. 1
0
def _upload_egg(target, eggpath, project, version):
    data = {'project': project, 'version': version}
    files = {'egg': ('project.egg', open(eggpath, 'rb'))}
    url = _url(target, 'addversion.json')
    auth = _get_auth(target)

    log('Deploying to Scrapy Cloud project "%s"' % project)
    make_deploy_request(url, data, files, auth)
Esempio n. 2
0
def _upload_egg(target, eggpath, project, version):
    data = {'project': project, 'version': version}
    files = {'egg': ('project.egg', open(eggpath, 'rb'))}
    url = _url(target, 'addversion.json')
    auth = _get_auth(target)

    log('Deploying to Scrapy Cloud project "%s"' % project)
    make_deploy_request(url, data, files, auth)
Esempio n. 3
0
def main(project_id, requirements_file):
    target = scrapycfg.get_target('default')
    project_id = scrapycfg.get_project(target, project_id)
    apikey = find_api_key()
    log('Deploying requirements to project "%s"' % project_id)

    requirements_full_path = os.path.abspath(requirements_file)
    eggs_tmp_dir = _mk_and_cd_eggs_tmpdir()
    _download_egg_files(eggs_tmp_dir, requirements_full_path)
    decompress_egg_files()
    utils.build_and_deploy_eggs(project_id, apikey)
Esempio n. 4
0
def build_and_deploy_egg(project_id, apikey):
    """Builds and deploys the current dir's egg"""
    log("Building egg in: %s" % os.getcwd())
    try:
        run("python setup.py bdist_egg")
    except CalledProcessError:
        # maybe a C extension or distutils package, forcing bdist_egg
        log("Couldn't build an egg with vanilla setup.py, trying with setuptools...")
        run("python -c  \"import setuptools; __file__='setup.py'; execfile('setup.py')\" bdist_egg")

    _deploy_dependency_egg(apikey, project_id)
Esempio n. 5
0
def _download_egg_files(eggs_dir, requirements_file):
    editable_src_dir = tempfile.mkdtemp(prefix='pipsrc')

    log('Downloading eggs...')
    try:
        pip_cmd = ("pip install -d {eggs_dir} -r {requirements_file}"
                   " --src {editable_src_dir} --no-deps --no-use-wheel")
        log(run(pip_cmd.format(eggs_dir=eggs_dir,
                               editable_src_dir=editable_src_dir,
                               requirements_file=requirements_file)))
    finally:
        shutil.rmtree(editable_src_dir, ignore_errors=True)
Esempio n. 6
0
def decompress_egg_files():
    decompressor_by_ext = _build_decompressor_by_ext_map()
    eggs = [f for ext in decompressor_by_ext for f in glob("*.%s" % ext)]

    if not eggs:
        files = glob("*")
        err = "No egg files with a supported file extension were found. " "Files: %s" % ", ".join(files)
        raise ClickException(err)

    for egg in eggs:
        log("Uncompressing: %s" % egg)
        run("%s %s" % (decompressor_by_ext[_ext(egg)], egg))
Esempio n. 7
0
def _deploy_dependency_egg(apikey, project_id):
    name = _get_dependency_name()
    version = _get_dependency_version(name)
    egg_name, egg_path = _get_egg_info(name)

    url = 'https://dash.scrapinghub.com/api/eggs/add.json'
    data = {'project': project_id, 'name': name, 'version': version}
    files = {'egg': (egg_name, open(egg_path, 'rb'))}
    auth = (apikey, '')

    log('Deploying dependency to Scrapy Cloud project "%s"' % project_id)
    make_deploy_request(url, data, files, auth)
    success = "Deployed eggs list at: https://dash.scrapinghub.com/p/%s/eggs"
    log(success % project_id)
Esempio n. 8
0
def _deploy_dependency_egg(shub_apikey, project_id):
    name = _get_dependency_name()
    version = _get_dependency_version(name)
    egg_name, egg_path = _get_egg_info(name)

    url = 'https://dash.scrapinghub.com/api/eggs/add.json'
    data = {'project': project_id, 'name': name, 'version': version}
    files = {'egg': (egg_name, open(egg_path, 'rb'))}
    auth = (shub_apikey, '')

    log('Deploying dependency to Scrapy Cloud project "%s"' % project_id)
    make_deploy_request(url, data, files, auth)
    success = "Deployed eggs list at: https://dash.scrapinghub.com/p/%s/eggs"
    log(success % project_id)
Esempio n. 9
0
def _deploy_dependency_egg(apikey, project_id):
    name = _get_dependency_name()
    version = _get_dependency_version(name)
    egg_name, egg_path = _get_egg_info(name)

    url = "https://dash.scrapinghub.com/api/eggs/add.json"
    data = {"project": project_id, "name": name, "version": version}
    files = {"egg": (egg_name, open(egg_path, "rb"))}
    auth = (apikey, "")

    log('Deploying dependency to Scrapy Cloud project "%s"' % project_id)
    make_deploy_request(url, data, files, auth)
    success = "Deployed eggs list at: https://dash.scrapinghub.com/p/%s/eggs"
    log(success % project_id)
Esempio n. 10
0
def fetch_eggs(project_id, api_key, destfile):
    auth = (api_key, '')
    url = "https://dash.scrapinghub.com/api/eggs/bundle.zip?project=%s" % project_id
    rsp = requests.get(url=url, auth=auth, stream=True, timeout=300)

    _assert_response_is_valid(rsp)

    log("Downloading eggs to %s" % destfile)

    with open(destfile, 'wb') as f:
        for chunk in rsp.iter_content(chunk_size=1024):
            if chunk:
                f.write(chunk)
                f.flush()
Esempio n. 11
0
def cli(context):
    if auth.get_key_netrc():
        log("You're already logged in. To change credentials, use 'shub logout' first.")
        return 0

    cfg_key = _find_cfg_key()
    key = _prompt_for_key(suggestion=cfg_key)

    if not key and is_valid_key(cfg_key):
        auth.write_key_netrc(cfg_key)
    elif key and is_valid_key(key):
        auth.write_key_netrc(key)
    else:
        context.fail('Invalid key. Tip: your key must have 32 characters.')
    log('Success.')
Esempio n. 12
0
def make_deploy_request(url, data, files, auth):
    try:
        rsp = requests.post(url=url, auth=auth, data=data, files=files, stream=True, timeout=300)
        rsp.raise_for_status()
        for line in rsp.iter_lines():
            log(line)
        return True
    except requests.HTTPError as exc:
        rsp = exc.response

        if rsp.status_code == 403:
            raise AuthException()

        msg = "Deploy failed ({}):\n{}".format(rsp.status_code, rsp.text)
        raise ClickException(msg)
    except requests.RequestException as exc:
        raise ClickException("Deploy failed: {}".format(exc))
def _checkout(repo, git_branch=None):
    tmpdir = tempfile.mkdtemp(prefix='shub-deploy-egg-from-url')

    log("Cloning the repository to a tmp folder...")
    os.chdir(tmpdir)

    if (_run('git clone %s egg-tmp-clone' % repo) != 0 and
            _run('hg clone %s egg-tmp-clone' % repo) != 0 and
            _run('bzr branch %s egg-tmp-clone' % repo) != 0):
        error = "\nERROR: The provided repository URL is not valid: %s\n"
        fail(error % repo)

    os.chdir('egg-tmp-clone')

    if git_branch:
        if _run('git checkout %s' % git_branch) != 0:
            fail("Branch %s is not valid" % git_branch)
        log("%s branch was checked out" % git_branch)
Esempio n. 14
0
def cli(target, project, version, list_targets, debug, egg, build_egg):
    exitcode = 0
    if not inside_project():
        log("Error: no Scrapy project found in this location")
        sys.exit(1)

    if list_targets:
        for name, target in _get_targets().items():
            click.echo(name)
        return

    tmpdir = None

    if build_egg:
        egg, tmpdir = _build_egg()
        log("Writing egg to %s" % build_egg)
        shutil.copyfile(egg, build_egg)
    else:
        target = _get_target(target)
        project = _get_project(target, project)
        version = _get_version(target, version)
        if egg:
            log("Using egg: %s" % egg)
            egg = egg
        else:
            log("Packing version %s" % version)
            egg, tmpdir = _build_egg()
        if _upload_egg(target, egg, project, version):
            click.echo(
                "Run your spiders at: https://dash.scrapinghub.com/p/%s/" %
                project)
        else:
            exitcode = 1

    if tmpdir:
        if debug:
            log("Output dir not removed: %s" % tmpdir)
        else:
            shutil.rmtree(tmpdir)

    sys.exit(exitcode)
Esempio n. 15
0
def cli(target, project, version, list_targets, debug, egg, build_egg):
    if not inside_project():
        log("Error: no Scrapy project found in this location")
        sys.exit(1)

    if list_targets:
        for name, target in scrapycfg.get_targets().items():
            click.echo(name)
        return

    tmpdir = None

    try:
        if build_egg:
            egg, tmpdir = _build_egg()
            log("Writing egg to %s" % build_egg)
            shutil.copyfile(egg, build_egg)
        else:
            target = scrapycfg.get_target(target)
            project = scrapycfg.get_project(target, project)
            version = scrapycfg.get_version(target, version)
            apikey = target.get('username') or find_api_key()
            auth = (apikey, '')

            if egg:
                log("Using egg: %s" % egg)
                egg = egg
            else:
                log("Packing version %s" % version)
                egg, tmpdir = _build_egg()

            _upload_egg(target, egg, project, version, auth)
            click.echo("Run your spiders at: https://dash.scrapinghub.com/p/%s/" % project)
    finally:
        if tmpdir:
            if debug:
                log("Output dir not removed: %s" % tmpdir)
            else:
                shutil.rmtree(tmpdir, ignore_errors=True)
Esempio n. 16
0
def cli(target, project, version, list_targets, debug, egg, build_egg):
    exitcode = 0
    if not inside_project():
        log("Error: no Scrapy project found in this location")
        sys.exit(1)

    if list_targets:
        for name, target in _get_targets().items():
            click.echo(name)
        return

    tmpdir = None

    if build_egg:
        egg, tmpdir = _build_egg()
        log("Writing egg to %s" % build_egg)
        shutil.copyfile(egg, build_egg)
    else:
        target = _get_target(target)
        project = _get_project(target, project)
        version = _get_version(target, version)
        if egg:
            log("Using egg: %s" % egg)
            egg = egg
        else:
            log("Packing version %s" % version)
            egg, tmpdir = _build_egg()
        if _upload_egg(target, egg, project, version):
            click.echo("Run your spiders at: https://dash.scrapinghub.com/p/%s/" % project)
        else:
            exitcode = 1

    if tmpdir:
        if debug:
            log("Output dir not removed: %s" % tmpdir)
        else:
            shutil.rmtree(tmpdir)

    sys.exit(exitcode)
def _fetch_from_pypi(pkg):
    tmpdir = tempfile.mkdtemp(prefix='shub-deploy-egg-from-pypi')

    log('Fetching %s from pypi' % pkg)
    pip_cmd = "pip install -d %s %s --no-deps --no-use-wheel" % (tmpdir, pkg)
    log(run(pip_cmd))
    log('Package fetched successfully')
    os.chdir(tmpdir)
Esempio n. 18
0
def make_deploy_request(url, data, files, auth):
    try:
        rsp = requests.post(url=url, auth=auth, data=data, files=files,
                            stream=True, timeout=300)
        rsp.raise_for_status()
        for line in rsp.iter_lines():
            log(line)
        return True
    except requests.HTTPError as exc:
        rsp = exc.response
        log("Deploy failed ({}):".format(rsp.status_code))
        log(rsp.text)
        return False
    except requests.RequestException as exc:
        log("Deploy failed: {}".format(exc))
        return False