def _build_egg(): if not inside_project(): raise NotFoundException("No Scrapy project found in this location.") create_default_setup_py() d = tempfile.mkdtemp(prefix="shub-deploy-") run_python(['setup.py', 'clean', '-a', 'bdist_egg', '-d', d]) egg = glob.glob(os.path.join(d, '*.egg'))[0] return egg, d
def cli(target, version, debug, egg, build_egg, verbose, keep_log): if not inside_project(): raise NotFoundException("No Scrapy project found in this location.") tmpdir = None try: if build_egg: egg, tmpdir = _build_egg() click.echo("Writing egg to %s" % build_egg) shutil.copyfile(egg, build_egg) else: conf = load_shub_config() if target == "default" and target not in conf.projects: _deploy_wizard(conf) targetconf = conf.get_target_conf(target) version = version or targetconf.version auth = (targetconf.apikey, "") if egg: click.echo("Using egg: %s" % egg) egg = egg else: click.echo("Packing version %s" % version) egg, tmpdir = _build_egg() _upload_egg( targetconf.endpoint, egg, targetconf.project_id, version, auth, verbose, keep_log, targetconf.stack, targetconf.requirements_file, ) click.echo("Run your spiders at: " "https://app.scrapinghub.com/p/%s/" "" % targetconf.project_id) finally: if tmpdir: if debug: click.echo("Output dir not removed: %s" % tmpdir) else: shutil.rmtree(tmpdir, ignore_errors=True)
def cli(target, version, list_targets, debug, egg, build_egg, verbose, keep_log): if not inside_project(): raise NotFoundException("No Scrapy project found in this location.") conf = load_shub_config() if list_targets: for name in conf.projects: click.echo(name) return tmpdir = None try: if build_egg: egg, tmpdir = _build_egg() click.echo("Writing egg to %s" % build_egg) shutil.copyfile(egg, build_egg) else: project, endpoint, apikey = conf.get_target(target) version = version or conf.get_version() auth = (apikey, '') if egg: click.echo("Using egg: %s" % egg) egg = egg else: click.echo("Packing version %s" % version) egg, tmpdir = _build_egg() _upload_egg(endpoint, egg, project, version, auth, verbose, keep_log) click.echo("Run your spiders at: https://dash.scrapinghub.com/p/%s/" % project) finally: if tmpdir: if debug: click.echo("Output dir not removed: %s" % tmpdir) else: shutil.rmtree(tmpdir, ignore_errors=True)
def cli(target, version, debug, egg, build_egg, verbose, keep_log): if not inside_project(): raise NotFoundException("No Scrapy project found in this location.") tmpdir = None try: if build_egg: egg, tmpdir = _build_egg() click.echo("Writing egg to %s" % build_egg) shutil.copyfile(egg, build_egg) else: conf = load_shub_config() if target == 'default' and target not in conf.projects: _deploy_wizard(conf) targetconf = conf.get_target_conf(target) version = version or targetconf.version auth = (targetconf.apikey, '') if egg: click.echo("Using egg: %s" % egg) egg = egg else: click.echo("Packing version %s" % version) egg, tmpdir = _build_egg() _upload_egg(targetconf.endpoint, egg, targetconf.project_id, version, auth, verbose, keep_log, targetconf.stack, targetconf.requirements_file, targetconf.eggs) click.echo("Run your spiders at: " "https://app.scrapinghub.com/p/%s/" "" % targetconf.project_id) finally: if tmpdir: if debug: click.echo("Output dir not removed: %s" % tmpdir) else: shutil.rmtree(tmpdir, ignore_errors=True)