Esempio n. 1
0
def main(project_id, requirements_file):
    target = scrapycfg.get_target('default')
    project_id = scrapycfg.get_project(target, project_id)
    apikey = find_api_key()
    log('Deploying requirements to project "%s"' % project_id)

    requirements_full_path = os.path.abspath(requirements_file)
    eggs_tmp_dir = _mk_and_cd_eggs_tmpdir()
    _download_egg_files(eggs_tmp_dir, requirements_full_path)
    decompress_egg_files()
    utils.build_and_deploy_eggs(project_id, apikey)
Esempio n. 2
0
def cli(target, project, version, list_targets, debug, egg, build_egg):
    if not inside_project():
        log("Error: no Scrapy project found in this location")
        sys.exit(1)

    if list_targets:
        for name, target in scrapycfg.get_targets().items():
            click.echo(name)
        return

    tmpdir = None

    try:
        if build_egg:
            egg, tmpdir = _build_egg()
            log("Writing egg to %s" % build_egg)
            shutil.copyfile(egg, build_egg)
        else:
            target = scrapycfg.get_target(target)
            project = scrapycfg.get_project(target, project)
            version = scrapycfg.get_version(target, version)
            apikey = target.get('username') or find_api_key()
            auth = (apikey, '')

            if egg:
                log("Using egg: %s" % egg)
                egg = egg
            else:
                log("Packing version %s" % version)
                egg, tmpdir = _build_egg()

            _upload_egg(target, egg, project, version, auth)
            click.echo("Run your spiders at: https://dash.scrapinghub.com/p/%s/" % project)
    finally:
        if tmpdir:
            if debug:
                log("Output dir not removed: %s" % tmpdir)
            else:
                shutil.rmtree(tmpdir, ignore_errors=True)