def __init__(self, input_uri, settings): super().__init__(settings) client = ScrapinghubClient() jobkey = parse_job_key(os.environ['SHUB_JOBKEY']) project = client.get_project(jobkey.project_id) collection_name = input_uri.replace('collections://', '') self._store = project.collections.get_store(collection_name)
def validate_job_key(project_id, short_key): job_key = "%s/%s" % (project_id, short_key) if len(short_key.split("/")) != 2: raise BadParameterException( "keys must be defined as <spider_id>/<job_id>" ) try: return parse_job_key(job_key) except ValueError as err: raise BadParameterException(str(err)) except Exception as err: raise SubcommandException(str(err))
def test_parse_job_key_incorrect_length(): with pytest.raises(ValueError): parse_job_key('123/1')
def test_parse_job_key_non_numeric(): with pytest.raises(ValueError): parse_job_key('123/a/6')
def test_parse_job_key(): job_key = parse_job_key('123/10/11') assert job_key.project_id == '123' assert job_key.spider_id == '10' assert job_key.job_id == '11'