def test_get_target_version(self, mock_lsh): get_target('mytarget', auth_required=False) get_target_conf('mytargetconf', auth_required=False) get_version() mock_lsh.return_value.get_target.assert_called_once_with( 'mytarget', auth_required=False) mock_lsh.return_value.get_target_conf.assert_called_once_with( 'mytargetconf', auth_required=False) mock_lsh.return_value.get_version.assert_called_once_with()
def main(target, requirements_file): project, endpoint, apikey = get_target(target) requirements_full_path = os.path.abspath(requirements_file) eggs_tmp_dir = _mk_and_cd_eggs_tmpdir() _download_egg_files(eggs_tmp_dir, requirements_full_path) decompress_egg_files() build_and_deploy_eggs(project, endpoint, apikey)
def main(target, from_url=None, git_branch=None, from_pypi=None): project, endpoint, apikey = get_target(target) if from_pypi: _fetch_from_pypi(from_pypi) decompress_egg_files() utils.build_and_deploy_eggs(project, endpoint, apikey) return if from_url: _checkout(from_url, git_branch) if not os.path.isfile('setup.py'): error = "No setup.py -- are you running from a valid Python project?" raise NotFoundException(error) utils.build_and_deploy_egg(project, endpoint, apikey)
def cli(spider, argument, set): try: target, spider = spider.rsplit('/', 1) except ValueError: target = 'default' project, endpoint, apikey = get_target(target) job_key = schedule_spider(project, endpoint, apikey, spider, argument, set) watch_url = urljoin( endpoint, '../../p/{}/job/{}/{}'.format(*job_key.split('/')), ) short_key = job_key.split('/', 1)[1] if target == 'default' else job_key click.echo("Spider {} scheduled, job ID: {}".format(spider, job_key)) click.echo("Watch the log on the command line:\n shub log -f {}" "".format(short_key)) click.echo("or print items as they are being scraped:\n shub items -f " "{}".format(short_key)) click.echo("or watch it running in Scrapinghub's web interface:\n {}" "".format(watch_url))
def get_job_specs(job): """ Parse job identifier into valid job id and corresponding API key. With projects default=10 and external=20 defined in config: * 1/1 -> 10/1/1 * 2/2/2 -> 2/2/2 * external/2/2 -> 20/2/2 """ match = re.match(r'^((\w+)/)?(\d+/\d+)$', job) if not match: raise BadParameterException( "Job ID {} is invalid. Format should be spiderid/jobid (inside a " "project) or target/spiderid/jobid, where target can be either a " "project ID or an identifier defined in scrapinghub.yml." "".format(job), param_hint='job_id', ) # XXX: Lazy import due to circular dependency from shub.config import get_target project_id, endpoint, apikey = get_target(match.group(2) or 'default') return "{}/{}".format(project_id, match.group(3)), apikey
def cli(target): project, endpoint, apikey = get_target(target) destfile = 'eggs-%s.zip' % project fetch_eggs(project, endpoint, apikey, destfile)
def cli(source_project, new_project, copy_main): project, endpoint, apikey = get_target(source_project) new_project, new_endpoint, new_apikey = get_target(new_project) copy_eggs(project, endpoint, apikey, new_project, new_endpoint, new_apikey, copy_main)