def deploy(self, target='default'): try: conf = self.config.get_target_conf(target) archive = self.build_archive() data = { 'project': conf.project_id, 'version': self.project.version, 'stack': conf.stack } files = [('egg', archive)] if conf.requirements_file: try: file = self.storage.open(conf.requirements_file) except OSError: file = StringIO(templates['REQUIREMENTS']) files.append(('requirements', file)) make_deploy_request( urljoin(conf.endpoint, 'scrapyd/addversion.json'), data, files, (conf.apikey, ''), False, False) except self.EXCEPTIONS as e: raise JsonApiGeneralException( e.format_message(), self.STATUS_CODES.get(getattr(e, 'exit_code', None), 500), ) return { 'message': 'Your deploy completed successfully', }
def _upload_egg(target, eggpath, project, version): data = {'project': project, 'version': version} files = {'egg': ('project.egg', open(eggpath, 'rb'))} url = _url(target, 'addversion.json') auth = _get_auth(target) log('Deploying to Scrapy Cloud project "%s"' % project) make_deploy_request(url, data, files, auth)
def _deploy_dependency_egg(shub_apikey, project_id): name = _get_dependency_name() version = _get_dependency_version(name) egg_name, egg_path = _get_egg_info(name) url = 'https://dash.scrapinghub.com/api/eggs/add.json' data = {'project': project_id, 'name': name, 'version': version} files = {'egg': (egg_name, open(egg_path, 'rb'))} auth = (shub_apikey, '') log('Deploying dependency to Scrapy Cloud project "%s"' % project_id) make_deploy_request(url, data, files, auth) success = "Deployed eggs list at: https://dash.scrapinghub.com/p/%s/eggs" log(success % project_id)
def _upload_egg(endpoint, eggpath, project, version, auth, verbose, keep_log, stack=None, requirements_file=None, eggs=None): expanded_eggs = [] for e in (eggs or []): # Expand glob patterns, but make sure we don't swallow non-existing # eggs that were directly named # (glob.glob('non_existing_file') returns []) if any(['*' in e, '?' in e, '[' in e and ']' in e]): # Never match the main egg expanded_eggs.extend( [x for x in glob.glob(e) if os.path.abspath(x) != os.path.abspath(eggpath)]) else: expanded_eggs.append(e) data = {'project': project, 'version': version} if stack: data['stack'] = stack try: files = [('eggs', open(path, 'rb')) for path in expanded_eggs] if requirements_file: files.append(('requirements', open(requirements_file, 'rb'))) except IOError as e: raise ShubException("%s %s" % (e.strerror, e.filename)) files.append(('egg', open(eggpath, 'rb'))) url = _url(endpoint, 'scrapyd/addversion.json') click.echo('Deploying to Scrapy Cloud project "%s"' % project) return make_deploy_request(url, data, files, auth, verbose, keep_log)
def _upload_egg(endpoint, eggpath, project, version, auth, verbose, keep_log, stack=None, requirements_file=None, eggs=None): eggs = eggs or [] data = {'project': project, 'version': version} if stack: data['stack'] = stack try: files = [('eggs', open(path, 'rb')) for path in eggs] if requirements_file: files.append(('requirements', open(requirements_file, 'rb'))) except IOError as e: raise ShubException("%s %s" % (e.strerror, e.filename)) files.append(('egg', open(eggpath, 'rb'))) url = _url(endpoint, 'scrapyd/addversion.json') click.echo('Deploying to Scrapy Cloud project "%s"' % project) return make_deploy_request(url, data, files, auth, verbose, keep_log)
def _upload_egg(endpoint, eggpath, project, version, auth, verbose, keep_log, stack=None, requirements_file=None, eggs=None): expanded_eggs = [] for e in (eggs or []): # Expand glob patterns, but make sure we don't swallow non-existing # eggs that were directly named # (glob.glob('non_existing_file') returns []) if any(['*' in e, '?' in e, '[' in e and ']' in e]): # Never match the main egg expanded_eggs.extend( [x for x in glob.glob(e) if os.path.abspath(x) != os.path.abspath(eggpath)]) else: expanded_eggs.append(e) data = {'project': project, 'version': version} if stack: data['stack'] = stack try: files = [('eggs', open(path, 'rb')) for path in expanded_eggs] if _is_pipfile(requirements_file): requirements_file = _get_pipfile_requirements() elif _is_poetry(requirements_file): requirements_file = _get_poetry_requirements() elif requirements_file: requirements_file = open(requirements_file, 'rb') if requirements_file: files.append(('requirements', requirements_file)) except IOError as e: raise ShubException("%s %s" % (e.strerror, e.filename)) files.append(('egg', open(eggpath, 'rb'))) url = _url(endpoint, 'scrapyd/addversion.json') click.echo('Deploying to Scrapy Cloud project "%s"' % project) return make_deploy_request(url, data, files, auth, verbose, keep_log)
def _upload_egg(endpoint, eggpath, project, version, auth, verbose, keep_log, stack=None, requirements_file=None): data = {"project": project, "version": version} if stack: data["stack"] = stack files = {"egg": ("project.egg", open(eggpath, "rb"))} if requirements_file: files["requirements"] = ("requirements.txt", open(requirements_file, "rb")) url = _url(endpoint, "scrapyd/addversion.json") click.echo('Deploying to Scrapy Cloud project "%s"' % project) return make_deploy_request(url, data, files, auth, verbose, keep_log)
def _upload_egg(endpoint, eggpath, project, version, auth, verbose, keep_log, stack=None, requirements_file=None): data = {'project': project, 'version': version} if stack: data['stack'] = stack files = {'egg': ('project.egg', open(eggpath, 'rb'))} if requirements_file: files['requirements'] = ('requirements.txt', open(requirements_file, 'rb')) url = _url(endpoint, 'scrapyd/addversion.json') click.echo('Deploying to Scrapy Cloud project "%s"' % project) return make_deploy_request(url, data, files, auth, verbose, keep_log)
def _upload_egg(endpoint, eggpath, project, version, auth, verbose, keep_log): data = {'project': project, 'version': version} files = {'egg': ('project.egg', open(eggpath, 'rb'))} url = _url(endpoint, 'addversion.json') click.echo('Deploying to Scrapy Cloud project "%s"' % project) return make_deploy_request(url, data, files, auth, verbose, keep_log)