def reset(self): self.__parameters = {} self.__invokedMethod = None self.__dummy_results = {"filename": ["dummy_filename"], "artifactid": ["dummy_artifact"], "observationid": Table({'a': np.arange(1)})} self.__job = Job(async_job=False) self.__job.set_results(self.__dummy_results)
def startElement(self, name, attrs): if self.__check_item_id(UWS_JOBID, name): self.__job = Job(self.__async) self.__jobs.append(self.__job) self.__start_reading_data() elif self.__check_valid_item_id(name): self.__start_reading_data() if self.__check_item_id(UWS_PARAMETER, name): self.__paramKey = attrs.get("id") else: self.__stop_reading_data()
def test_job_get_results(self): job = Job(async_job=True) jobid = "12345" outputFormat = "votable" job.jobid = jobid job.parameters['format'] = outputFormat responseCheckPhase = DummyResponse() responseCheckPhase.set_status_code(500) responseCheckPhase.set_message("ERROR") responseCheckPhase.set_data(method='GET', context=None, body='FINISHED', headers=None) waitRequest = "async/" + str(jobid) + "/phase" connHandler = DummyConnHandler() connHandler.set_response(waitRequest, responseCheckPhase) job.connHandler = connHandler with pytest.raises(Exception): job.get_results() responseCheckPhase.set_status_code(200) responseCheckPhase.set_message("OK") responseGetData = DummyResponse() responseGetData.set_status_code(500) responseGetData.set_message("ERROR") jobContentFileName = data_path('result_1.vot') jobContent = utils.read_file_content(jobContentFileName) responseGetData.set_data(method='GET', context=None, body=jobContent, headers=None) dataRequest = "async/" + str(jobid) + "/results/result" connHandler.set_response(dataRequest, responseGetData) with pytest.raises(Exception): job.get_results() responseGetData.set_status_code(200) responseGetData.set_message("OK") res = job.get_results() assert len(res) == 3, \ "Num rows. Expected %d, found %d" % (3, len(res)) assert len(res.columns) == 4, \ "Num cols. Expected %d, found %d" % (4, len(res.columns)) for cn in ['alpha', 'delta', 'source_id', 'table1_oid']: if cn not in res.colnames: self.fail(cn + " column name not found" + str(res.colnames))
def test_job_get_results(self): job = Job(async_job=True) jobid = "12345" outputFormat = "votable" job.jobid = jobid job.parameters['format'] = outputFormat responseCheckPhase = DummyResponse() responseCheckPhase.set_status_code(500) responseCheckPhase.set_message("ERROR") responseCheckPhase.set_data(method='GET', context=None, body='FINISHED', headers=None) waitRequest = "async/"+str(jobid)+"/phase" connHandler = DummyConnHandler() connHandler.set_response(waitRequest, responseCheckPhase) job.connHandler = connHandler with pytest.raises(Exception): job.get_results() responseCheckPhase.set_status_code(200) responseCheckPhase.set_message("OK") responseGetData = DummyResponse() responseGetData.set_status_code(500) responseGetData.set_message("ERROR") jobContentFileName = data_path('result_1.vot') jobContent = utils.read_file_content(jobContentFileName) responseGetData.set_data(method='GET', context=None, body=jobContent, headers=None) dataRequest = "async/" + str(jobid) + "/results/result" connHandler.set_response(dataRequest, responseGetData) with pytest.raises(Exception): job.get_results() responseGetData.set_status_code(200) responseGetData.set_message("OK") res = job.get_results() assert len(res) == 3, \ "Num rows. Expected %d, found %d" % (3, len(res)) assert len(res.columns) == 4, \ "Num cols. Expected %d, found %d" % (4, len(res.columns)) for cn in ['alpha', 'delta', 'source_id', 'table1_oid']: if cn not in res.colnames: self.fail(cn + " column name not found" + str(res.colnames))
def startElement(self, name, attrs): if self.__check_item_id(UWS_JOBID, name): self.__job = Job(self.__async) self.__jobs.append(self.__job) self.__start_reading_data() elif self.__check_valid_item_id(name): self.__start_reading_data() if self.__check_item_id(UWS_PARAMETER, name): self.__paramKey = attrs.get("id") else: self.__stop_reading_data()
def launch_job(self, query, name=None, output_file=None, output_format="votable", verbose=False, dump_to_file=False, upload_resource=None, upload_table_name=None): self.__invokedMethod = 'launch_job' self._parameters['query'] = query self._parameters['name'] = name self._parameters['output_file'] = output_file self._parameters['output_format'] = output_format self._parameters['verbose'] = verbose self._parameters['dump_to_file'] = dump_to_file self._parameters['upload_resource'] = upload_resource self._parameters['upload_table_name'] = upload_table_name return Job(False)
def query_criteria(self, calibration_level=None, data_product_type=None, intent=None, obs_collection=None, instrument_name=None, filters=None, async_job=False, output_file=None, output_format="votable", verbose=False): self.__invokedMethod = 'query_criteria' self._parameters['calibration_level'] = calibration_level self._parameters['data_product_type'] = data_product_type self._parameters['intent'] = intent self._parameters['obs_collection'] = obs_collection self._parameters['instrument_name'] = instrument_name self._parameters['filters'] = filters self._parameters['async_job'] = async_job self._parameters['output_file'] = output_file self._parameters['output_format'] = output_format self._parameters['verbose'] = verbose return Job(True)
def test_job_basic(self): job = Job(async_job=False) res = job.is_sync() assert res, \ "Sync job, expected: %s, found: %s" % (str(True), str(res)) res = job.is_async() assert res is False, \ "Sync job, expected: %s, found: %s" % (str(False), str(res)) job = Job(async_job=True) res = job.is_sync() assert res is False, \ "Async job, expected: %s, found: %s" % (str(False), str(res)) res = job.is_async() assert res, \ "Async job, expected: %s, found: %s" % (str(True), str(res)) with pytest.raises(AttributeError): job.get_results() # parameters query = "query" jobid = "jobid" remoteLocation = "remoteLocation" phase = "phase" outputFile = "outputFile" responseStatus = "responseStatus" responseMsg = "responseMsg" runid = "runid" ownerid = "ownerid" startTime = "startTime" endTime = "endTime" creationTime = "creationTime" executionDuration = "executionDuration" destruction = "destruction" locationid = "locationid" name = "name" quote = "quote" job = Job(async_job=False, query=query) job.set_jobid(jobid) job.set_remote_location(remoteLocation) job.set_phase(phase) job.set_output_file(outputFile) job.set_response_status(responseStatus, responseMsg) job.set_runid(runid) job.set_ownerid(ownerid) job.set_start_time(startTime) job.set_end_time(endTime) job.set_creation_time(creationTime) job.set_execution_duration(executionDuration) job.set_destruction(destruction) job.set_locationid(locationid) job.set_name(name) job.set_quote(quote) assert job.get_query() == query, \ "query, expected: %s, found: %s" % (query, job.get_query()) assert job.get_jobid() == jobid, \ "jobid, expected: %s, found: %s" % (jobid, job.get_jobid()) assert job.get_remote_location() == remoteLocation, \ "remoteLocation, expected: %s, found: %s" % (remoteLocation, job.get_remote_location()) assert job.get_phase() == phase, \ "phase, expected: %s, found: %s" % (phase, job.get_phase()) assert job.get_output_file() == outputFile, \ "outputFile, expected: %s, found: %s" % (outputFile, job.get_output_file()) assert job.get_response_status() == responseStatus, \ "responseStatus, expected: %s, found: %s" % (responseStatus, job.get_response_status()) assert job.get_response_msg() == responseMsg, \ "responseMsg, expected: %s, found: %s" % (responseMsg, job.get_response_msg()) assert job.get_results() is None, \ "results, expected: %s, found: %s" % (str(None), job.get_results()) assert job.get_runid() == runid, \ "runid, expected: %s, found: %s" % (runid, job.get_runid()) assert job.get_ownerid() == ownerid, \ "ownerid, expected: %s, found: %s" % (ownerid, job.get_ownerid()) assert job.get_start_time() == startTime, \ "startTime, expected: %s, found: %s" % (startTime, job.get_start_time()) assert job.get_end_time() == endTime, \ "endTime, expected: %s, found: %s" % (endTime, job.get_end_time()) assert job.get_creation_time() == creationTime, \ "creationTime, expected: %s, found: %s" % (creationTime, job.get_creation_time()) assert job.get_execution_duration() == executionDuration, \ "executionDuration, expected: %s, found: %s" % (executionDuration, job.get_execution_duration()) assert job.get_destruction() == destruction, \ "destruction, expected: %s, found: %s" % (destruction, job.get_destruction()) assert job.get_locationid() == locationid, \ "locationid, expected: %s, found: %s" % (locationid, job.get_locationid()) assert job.get_name() == name, \ "name, expected: %s, found: %s" % (name, job.get_name()) assert job.get_quote() == quote, \ "quote, expected: %s, found: %s" % (quote, job.get_quote())
def __reading_job(self, name, attrs): if self.__check_item_id(UWS_JOBREF, name): self.__job = Job(self.__async) self.__job.set_jobid(attrs.get("id")) self.__status = READING_PHASE
def test_job_basic(self): with pytest.raises(AttributeError): job = Job(async_job=True) job.get_results()
def launch_job_async(self, query, name=None, output_file=None, output_format="votable", verbose=False, dump_to_file=False, background=False, upload_resource=None, upload_table_name=None): """Launches an asynchronous job Parameters ---------- query : str, mandatory query to be executed output_file : str, optional, default None file name where the results are saved if dumpToFile is True. If this parameter is not provided, the jobid is used instead output_format : str, optional, default 'votable' results format verbose : bool, optional, default 'False' flag to display information about the process dump_to_file : bool, optional, default 'False' if True, the results are saved in a file instead of using memory background : bool, optional, default 'False' when the job is executed in asynchronous mode, this flag specifies whether the execution will wait until results are available upload_resource: str, optional, default None resource to be uploaded to UPLOAD_SCHEMA upload_table_name: str, required if uploadResource is provided, default None resource temporary table name associated to the uploaded resource Returns ------- A Job object """ if verbose: print("Launched query: '"+str(query)+"'") if upload_resource is not None: if upload_table_name is None: raise ValueError( "Table name is required when a resource is uploaded") response = self.__launchJobMultipart(query, upload_resource, upload_table_name, output_format, "async", verbose, name) else: response = self.__launchJob(query, output_format, "async", verbose, name) isError = self.__connHandler.check_launch_response_status(response, verbose, 303) job = Job(async_job=True, query=query, connhandler=self.__connHandler) suitableOutputFile = self.__getSuitableOutputFile(True, output_file, response.getheaders(), isError, output_format) job.set_output_file(suitableOutputFile) job.set_response_status(response.status, response.reason) job.set_output_format(output_format) if isError: job.set_failed(True) if dump_to_file: self.__connHandler.dump_to_file(suitableOutputFile, response) raise requests.exceptions.HTTPError(response.reason) else: location = self.__connHandler.find_header( response.getheaders(), "location") jobid = self.__getJobId(location) if verbose: print("job " + str(jobid) + ", at: " + str(location)) job.set_jobid(jobid) job.set_remote_location(location) if not background: if verbose: print("Retrieving async. results...") # saveResults or getResults will block (not background) if dump_to_file: job.save_results(verbose) else: job.get_results() print("Query finished.") return job
class JobListSaxParser(xml.sax.ContentHandler): ''' classdocs ''' def __init__(self, async_job=False): ''' Constructor ''' self.__internal_init() self.__async = async_job def __internal_init(self): self.__concatData = False self.__charBuffer = [] self.__job = None self.__jobs = [] self.__status = 0 self.__paramKey = None self.__async = False def __create_string_from_buffer(self): return Utils.util_create_string_from_buffer(self.__charBuffer) def __check_item_id(self, itemId, tmpValue): if str(itemId).lower() == str(tmpValue).lower(): return True return False def __start_reading_data(self): self.__concatData = True del self.__charBuffer[:] def __stop_reading_data(self): self.__concatData = False def parseData(self, data): self.__status = READING_JOB xml.sax.parse(data, self) return self.__jobs def startElement(self, name, attrs): if self.__status == READING_JOB: self.__reading_job(name, attrs) elif self.__status == READING_PHASE: self.__reading_phase(name, attrs) def endElement(self, name): if self.__status == READING_JOB: self.__end_job(name) elif self.__status == READING_PHASE: self.__end_phase(name) def characters(self, content): if self.__concatData: self.__charBuffer.append(content) def __reading_job(self, name, attrs): if self.__check_item_id(UWS_JOBREF, name): self.__job = Job(self.__async) self.__job.set_jobid(attrs.get("id")) self.__status = READING_PHASE def __end_job(self, name): if self.__check_item_id(UWS_JOBREF, name): self.__jobs.append(self.__job) def __reading_phase(self, name, attrs): if self.__check_item_id(UWS_PHASE, name): self.__start_reading_data() def __end_phase(self, name): if self.__check_item_id(UWS_PHASE, name): self.__job.set_phase(self.__create_string_from_buffer()) self.__status = READING_JOB
def launch_job(self, query, name=None, output_file=None, output_format="votable", verbose=False, dump_to_file=False, upload_resource=None, upload_table_name=None, lang=None, queue=None): if verbose: print("Launched query: '" + str(query) + "'") if upload_resource is not None: if upload_table_name is None: raise ValueError( "Table name is required when a resource is uploaded") response = self._Tap__launchJobMultipart(query, upload_resource, upload_table_name, output_format, "sync", verbose, name, lang, queue) else: response = self.__launchJob(query, output_format, "sync", verbose, name, lang, queue) # handle redirection if response.status == 303: # redirection if verbose: print("Redirection found") location = self._Tap__connHandler.find_header( response.getheaders(), "location") if location is None: raise requests.exceptions.HTTPError( "No location found after redirection was received (303)") if verbose: print("Redirect to %s", location) subcontext = self._Tap__extract_sync_subcontext(location) response = self._Tap__connHandler.execute_get(subcontext) job = Job(async_job=False, query=query, connhandler=self._Tap__connHandler) isError = self._Tap__connHandler.check_launch_response_status( response, verbose, 200) suitableOutputFile = self._Tap__getSuitableOutputFile( False, output_file, response.getheaders(), isError, output_format) job.set_output_file(suitableOutputFile) job.set_output_format(output_format) job.set_response_status(response.status, response.reason) if isError: job.set_failed(True) if dump_to_file: self._Tap__connHandler.dump_to_file(suitableOutputFile, response) raise requests.exceptions.HTTPError(response.reason) else: if verbose: print("Retrieving sync. results...") if dump_to_file: self._Tap__connHandler.dump_to_file(suitableOutputFile, response) else: results = utils.read_http_response(response, output_format) job.set_results(results) if verbose: print("Query finished.") job.set_phase('COMPLETED') return job
def launch_job(self, query, name=None, output_file=None, output_format="votable", verbose=False, dump_to_file=False, upload_resource=None, upload_table_name=None): """Launches a synchronous job Parameters ---------- query : str, mandatory query to be executed output_file : str, optional, default None file name where the results are saved if dumpToFile is True. If this parameter is not provided, the jobid is used instead output_format : str, optional, default 'votable' results format verbose : bool, optional, default 'False' flag to display information about the process dump_to_file : bool, optional, default 'False' if True, the results are saved in a file instead of using memory upload_resource: str, optional, default None resource to be uploaded to UPLOAD_SCHEMA upload_table_name: str, required if uploadResource is provided, default None resource temporary table name associated to the uploaded resource Returns ------- A Job object """ query = taputils.set_top_in_query(query, 2000) if verbose: print("Launched query: '"+str(query)+"'") if upload_resource is not None: if upload_table_name is None: raise ValueError("Table name is required when a resource is uploaded") response = self.__launchJobMultipart(query, upload_resource, upload_table_name, output_format, "sync", verbose, name) else: response = self.__launchJob(query, output_format, "sync", verbose, name) # handle redirection if response.status == 303: # redirection if verbose: print("Redirection found") location = self.__connHandler.find_header( response.getheaders(), "location") if location is None: raise requests.exceptions.HTTPError("No location found after redirection was received (303)") if verbose: print("Redirect to %s", location) subcontext = self.__extract_sync_subcontext(location) response = self.__connHandler.execute_get(subcontext) job = Job(async_job=False, query=query, connhandler=self.__connHandler) isError = self.__connHandler.check_launch_response_status(response, verbose, 200) suitableOutputFile = self.__getSuitableOutputFile(False, output_file, response.getheaders(), isError, output_format) job.set_output_file(suitableOutputFile) job.set_output_format(output_format) job.set_response_status(response.status, response.reason) if isError: job.set_failed(True) if dump_to_file: self.__connHandler.dump_to_file(suitableOutputFile, response) raise requests.exceptions.HTTPError(response.reason) else: if verbose: print("Retrieving sync. results...") if dump_to_file: self.__connHandler.dump_to_file(suitableOutputFile, response) else: results = utils.read_http_response(response, output_format) job.set_results(results) if verbose: print("Query finished.") job.set_phase('COMPLETED') return job
def test_job_basic(self): with pytest.raises(AttributeError): job = Job(async_job=True) job.get_results()
def launch_job_async(self, query, name=None, output_file=None, output_format="votable", verbose=False, dump_to_file=False, background=False, upload_resource=None, upload_table_name=None, lang=None, queue=None): if verbose: print("Launched query: '" + str(query) + "'") if upload_resource is not None: if upload_table_name is None: raise ValueError( "Table name is required when a resource is uploaded") response = self.__launchJobMultipart(query, upload_resource, upload_table_name, output_format, "async", verbose, name, lang, queue) else: response = self.__launchJob(query, output_format, "async", verbose, name, lang, queue) isError = self._Tap__connHandler.check_launch_response_status( response, verbose, 303) job = Job(async_job=True, query=query, connhandler=self._Tap__connHandler) suitableOutputFile = self._Tap__getSuitableOutputFile( True, output_file, response.getheaders(), isError, output_format) job.set_output_file(suitableOutputFile) job.set_response_status(response.status, response.reason) job.set_output_format(output_format) if isError: job.set_failed(True) if dump_to_file: self._Tap__connHandler.dump_to_file(suitableOutputFile, response) raise requests.exceptions.HTTPError(response.reason) else: location = self._Tap__connHandler.find_header( response.getheaders(), "location") jobid = self._Tap__getJobId(location) if verbose: print("job " + str(jobid) + ", at: " + str(location)) job.set_jobid(jobid) job.set_remote_location(location) if not background: if verbose: print("Retrieving async. results...") # saveResults or getResults will block (not background) if dump_to_file: job.save_results(verbose) else: job.get_results() print("Query finished.") return job
def test_job_phase(self): job = Job(async_job=True) jobid = "12345" outputFormat = "votable" job.jobid = jobid job.parameters['format'] = outputFormat job.set_phase("COMPLETED") try: job.set_phase("RUN") self.fail("Exception expected. " + "Phase cannot be changed for a finished job") except ValueError: # ok pass try: job.start() self.fail("Exception expected. " + "A job in 'COMPLETE' phase cannot be started") except ValueError: # ok pass try: job.abort() self.fail("Exception expected. " + "A job in 'COMPLETE' phase cannot be aborted") except ValueError: # ok pass
class DummyTapHandler: def __init__(self): self.__invokedMethod = None self.__parameters = {} self.__dummy_results = {"filename": ["dummy_filename"], "artifactid": ["dummy_artifact"], "observationid": Table({'obs': np.arange(1)})} self.__job = Job(async_job=False) self.__job.set_results(self.__dummy_results) def reset(self): self.__parameters = {} self.__invokedMethod = None self.__dummy_results = {"filename": ["dummy_filename"], "artifactid": ["dummy_artifact"], "observationid": Table({'a': np.arange(1)})} self.__job = Job(async_job=False) self.__job.set_results(self.__dummy_results) def set_job(self, job): self.__job = job def get_job(self): return self.__job def check_call(self, method_name, parameters): self.check_method(method_name) self.check_parameters(parameters, method_name) def check_method(self, method): if method == self.__invokedMethod: return else: raise ValueError(f"Method '+{str(method)}" + f"' not invoked. (Invoked method is '" + f"{str(self.__invokedMethod)}"+"')") def check_parameters(self, parameters, method_name): print("FOUND") print(self.__parameters) print("EXPECTED") print(parameters) if parameters is None: return len(self.__parameters) == 0 if len(parameters) != len(self.__parameters): raise ValueError(f"Wrong number of parameters " f"for method '{method_name}'" f" Found: {len(self.__parameters)}. " f"Expected {len(parameters)}") for key in parameters: if key in self.__parameters: # check value if self.__parameters[key] != parameters[key]: raise ValueError(f"Wrong {key} parameter value for " f" method '{method_name}'. " f"Found: {self.__parameters[key]}. " f"Expected: {parameters[key]}") else: raise ValueError(f"Parameter '{str(key)}' not found " f"for method '{method_name}'") def load_tables(self, only_names=False, include_shared_tables=False, verbose=False): self.__invokedMethod = 'load_tables' self.__parameters['only_names'] = only_names self.__parameters['include_shared_tables'] = include_shared_tables self.__parameters['verbose'] = verbose return None def load_table(self, table, verbose=False): self.__invokedMethod = 'load_table' self.__parameters['table'] = table self.__parameters['verbose'] = verbose return None def launch_job(self, query, name=None, output_file=None, output_format="votable", verbose=False, dump_to_file=False, upload_resource=None, upload_table_name=None): self.__invokedMethod = 'launch_job' self.__parameters['query'] = query self.__parameters['name'] = name self.__parameters['output_file'] = output_file self.__parameters['output_format'] = output_format self.__parameters['verbose'] = verbose self.__parameters['dump_to_file'] = dump_to_file self.__parameters['upload_resource'] = upload_resource self.__parameters['upload_table_name'] = upload_table_name return self.__job def launch_job_async(self, query, name=None, output_file=None, output_format="votable", verbose=False, dump_to_file=False, background=False, upload_resource=None, upload_table_name=None): self.__invokedMethod = 'launch_job_async' self.__parameters['query'] = query self.__parameters['name'] = name self.__parameters['output_file'] = output_file self.__parameters['output_format'] = output_format self.__parameters['verbose'] = verbose self.__parameters['dump_to_file'] = dump_to_file self.__parameters['background'] = background self.__parameters['upload_resource'] = upload_resource self.__parameters['upload_table_name'] = upload_table_name return self.__job def load_async_job(self, jobid=None, name=None, verbose=False): self.__invokedMethod = 'load_async_job' self.__parameters['jobid'] = jobid self.__parameters['name'] = name self.__parameters['verbose'] = verbose return None def search_async_jobs(self, jobfilter=None, verbose=False): self.__invokedMethod = 'search_async_jobs' self.__parameters['jobfilter'] = jobfilter self.__parameters['verbose'] = verbose return None def list_async_jobs(self, verbose=False): self.__invokedMethod = 'list_async_jobs' self.__parameters['verbose'] = verbose return None def query_object(self, coordinate, radius=None, width=None, height=None, verbose=False): self.__invokedMethod = 'query_object' self.__parameters['coordinate'] = coordinate self.__parameters['radius'] = radius self.__parameters['width'] = width self.__parameters['height'] = height self.__parameters['verbose'] = verbose return None def query_object_async(self, coordinate, radius=None, width=None, height=None, verbose=False): self.__invokedMethod = 'query_object_async' self.__parameters['coordinate'] = coordinate self.__parameters['radius'] = radius self.__parameters['width'] = width self.__parameters['height'] = height self.__parameters['verbose'] = verbose return None def query_region(self, coordinate, radius=None, width=None): self.__invokedMethod = 'query_region' self.__parameters['coordinate'] = coordinate self.__parameters['radius'] = radius self.__parameters['width'] = width return None def query_region_async(self, coordinate, radius=None, width=None): self.__invokedMethod = 'query_region_async' self.__parameters['coordinate'] = coordinate self.__parameters['radius'] = radius self.__parameters['width'] = width return None def get_images(self, coordinate): self.__invokedMethod = 'get_images' self.__parameters['coordinate'] = coordinate return None def get_images_async(self, coordinate): self.__invokedMethod = 'get_images_sync' self.__parameters['coordinate'] = coordinate return None def cone_search(self, coordinate, radius, output_file=None, output_format="votable", verbose=False, dump_to_file=False): self.__invokedMethod = 'cone_search' self.__parameters['coordinate'] = coordinate self.__parameters['radius'] = radius self.__parameters['output_file'] = output_file self.__parameters['output_format'] = output_format self.__parameters['verbose'] = verbose self.__parameters['dump_to_file'] = dump_to_file return None def cone_search_async(self, coordinate, radius, background=False, output_file=None, output_format="votable", verbose=False, dump_to_file=False): self.__invokedMethod = 'cone_search_async' self.__parameters['coordinate'] = coordinate self.__parameters['radius'] = radius self.__parameters['background'] = background self.__parameters['output_file'] = output_file self.__parameters['output_format'] = output_format self.__parameters['verbose'] = verbose self.__parameters['dump_to_file'] = dump_to_file return None def remove_jobs(self, jobs_list, verbose=False): self.__invokedMethod = 'remove_jobs' self.__parameters['jobs_list'] = jobs_list self.__parameters['verbose'] = verbose return None def save_results(self, job, verbose=False): self.__invokedMethod = 'save_results' self.__parameters['job'] = job self.__parameters['verbose'] = verbose return None def login(self, user=None, password=None, credentials_file=None, verbose=False): self.__invokedMethod = 'login' self.__parameters['user'] = user self.__parameters['password'] = password self.__parameters['credentials_file'] = credentials_file self.__parameters['verbose'] = verbose return None def login_gui(self, verbose=False): self.__invokedMethod = 'login_gui' self.__parameters['verbose'] = verbose return None def logout(self, verbose=False): self.__invokedMethod = 'logout' self.__parameters['verbose'] = verbose return None def load_data(self, params_dict, output_file=None, verbose=False): self.__invokedMethod = 'load_data' self.__parameters['params_dict'] = params_dict self.__parameters['output_file'] = output_file self.__parameters['verbose'] = verbose def set_job_results(self, results): self.__dummy_results = results self.__job.set_results(self.__dummy_results)
def test_job_get_results(capsys, tmpdir): job = Job(async_job=True) jobid = "12345" outputFormat = "votable" job.jobid = jobid job.parameters['format'] = outputFormat responseCheckPhase = DummyResponse() responseCheckPhase.set_status_code(500) responseCheckPhase.set_message("ERROR") responseCheckPhase.set_data(method='GET', context=None, body='FINISHED', headers=None) waitRequest = f"async/{jobid}/phase" connHandler = DummyConnHandler() connHandler.set_response(waitRequest, responseCheckPhase) job.connHandler = connHandler with pytest.raises(Exception): job.get_results() responseCheckPhase.set_status_code(200) responseCheckPhase.set_message("OK") responseGetData = DummyResponse() responseGetData.set_status_code(500) responseGetData.set_message("ERROR") jobContentFileName = data_path('result_1.vot') jobContent = utils.read_file_content(jobContentFileName) responseGetData.set_data(method='GET', context=None, body=jobContent, headers=None) dataRequest = f"async/{jobid}/results/result" connHandler.set_response(dataRequest, responseGetData) with pytest.raises(Exception): job.get_results() responseGetData.set_status_code(200) responseGetData.set_message("OK") res = job.get_results() assert len(res) == 3 assert len(res.columns) == 4 for cn in ['alpha', 'delta', 'source_id', 'table1_oid']: if cn not in res.colnames: pytest.fail(f"{cn} column name not found: {res.colnames}") # Regression test for #2299; messages were printed even with `verbose=False` capsys.readouterr() job._Job__resultInMemory = False job.save_results(verbose=False) assert 'Saving results to:' not in capsys.readouterr().out job.save_results(verbose=True) assert 'Saving results to:' in capsys.readouterr().out
class JobSaxParser(xml.sax.ContentHandler): ''' classdocs ''' def __init__(self, async_job=False): ''' Constructor ''' self.__internal_init() self.__async = async_job def __internal_init(self): self.__concatData = False self.__charBuffer = [] self.__job = None self.__jobs = [] self.__status = 0 self.__paramKey = None self.__async = False def __create_string_from_buffer(self): return Utils.util_create_string_from_buffer(self.__charBuffer) def __check_item_id(self, itemId, tmpValue): if str(itemId).lower() == str(tmpValue).lower(): return True return False def __check_valid_item_id(self, name): for idTmp in VALID_ITEMS: if self.__check_item_id(idTmp, name): return True return False def __start_reading_data(self): self.__concatData = True del self.__charBuffer[:] def __stop_reading_data(self): self.__concatData = False def parseData(self, data): # self.__job = Job(True) xml.sax.parse(data, self) return self.__jobs def startElement(self, name, attrs): if self.__check_item_id(UWS_JOBID, name): self.__job = Job(self.__async) self.__jobs.append(self.__job) self.__start_reading_data() elif self.__check_valid_item_id(name): self.__start_reading_data() if self.__check_item_id(UWS_PARAMETER, name): self.__paramKey = attrs.get("id") else: self.__stop_reading_data() def endElement(self, name): if self.__check_valid_item_id(name): value = self.__create_string_from_buffer() self.__populate_job_value(value, name) self.__stop_reading_data() else: self.__stop_reading_data() def characters(self, content): if self.__concatData: self.__charBuffer.append(content) def __populate_job_value(self, value, name): nameLower = name.lower() if UWS_JOBID == nameLower: self.__job.jobid = value elif UWS_RUNID == nameLower: self.__job.runid = value elif UWS_OWNERID == nameLower: self.__job.ownerid = value elif UWS_PHASE == nameLower: print("phase was set") self.__job._phase = value elif UWS_QUOTE == nameLower: self.__job.quote = value elif UWS_START_TIME == nameLower: self.__job.startTime = value elif UWS_END_TIME == nameLower: self.__job.endTime = value elif UWS_CREATION_TIME == nameLower: self.__job.creationTime = value elif UWS_LOCATIONID == nameLower: self.__job.locationID = value elif UWS_NAME == nameLower: self.__job.name = value elif UWS_EXECUTION_DURATION == nameLower: self.__job.executionDuration = value elif UWS_DESTRUCTION == nameLower: self.__job.destruction = value elif UWS_PARAMETER == nameLower: self.__job.set_parameter(self.__paramKey, value)
def launch_job_async(self, query, name=None, output_file=None, output_format="votable", verbose=False, dump_to_file=False, background=False, upload_resource=None, upload_table_name=None): """Launches an asynchronous job Parameters ---------- query : str, mandatory query to be executed output_file : str, optional, default None file name where the results are saved if dumpToFile is True. If this parameter is not provided, the jobid is used instead output_format : str, optional, default 'votable' results format verbose : bool, optional, default 'False' flag to display information about the process dump_to_file : bool, optional, default 'False' if True, the results are saved in a file instead of using memory background : bool, optional, default 'False' when the job is executed in asynchronous mode, this flag specifies whether the execution will wait until results are available upload_resource: str, optional, default None resource to be uploaded to UPLOAD_SCHEMA upload_table_name: str, required if uploadResource is provided, default None resource temporary table name associated to the uploaded resource Returns ------- A Job object """ print("Launched query: '"+str(query)+"'") if upload_resource is not None: if upload_table_name is None: raise ValueError( "Table name is required when a resource is uploaded") response = self.__launchJobMultipart(query, upload_resource, upload_table_name, output_format, "async", verbose, name) else: response = self.__launchJob(query, output_format, "async", verbose, name) isError = self.__connHandler.check_launch_response_status(response, verbose, 303) job = Job(async_job=True, query=query, connhandler=self.__connHandler) suitableOutputFile = self.__getSuitableOutputFile(True, output_file, response.getheaders(), isError, output_format) job.set_output_file(suitableOutputFile) job.set_response_status(response.status, response.reason) job.set_output_format(output_format) if isError: job.set_failed(True) if dump_to_file: self.__connHandler.dump_to_file(suitableOutputFile, response) raise Exception(response.reason) else: location = self.__connHandler.find_header( response.getheaders(), "location") jobid = self.__getJobId(location) if verbose: print("job " + str(jobid) + ", at: " + str(location)) job.set_jobid(jobid) job.set_remote_location(location) if not background: print("Retrieving async. results...") # saveResults or getResults will block (not background) if dump_to_file: job.save_results(verbose) else: job.get_results() print("Query finished.") return job
def launch_job(self, query, name=None, output_file=None, output_format="votable", verbose=False, dump_to_file=False, upload_resource=None, upload_table_name=None): """Launches a synchronous job Parameters ---------- query : str, mandatory query to be executed output_file : str, optional, default None file name where the results are saved if dumpToFile is True. If this parameter is not provided, the jobid is used instead output_format : str, optional, default 'votable' results format verbose : bool, optional, default 'False' flag to display information about the process dump_to_file : bool, optional, default 'False' if True, the results are saved in a file instead of using memory upload_resource: str, optional, default None resource to be uploaded to UPLOAD_SCHEMA upload_table_name: str, required if uploadResource is provided, default None resource temporary table name associated to the uploaded resource Returns ------- A Job object """ query = taputils.set_top_in_query(query, 2000) print("Launched query: '"+str(query)+"'") if upload_resource is not None: if upload_table_name is None: raise ValueError("Table name is required when a resource is uploaded") response = self.__launchJobMultipart(query, upload_resource, upload_table_name, output_format, "sync", verbose, name) else: response = self.__launchJob(query, output_format, "sync", verbose, name) job = Job(async_job=False, query=query, connhandler=self.__connHandler) isError = self.__connHandler.check_launch_response_status(response, verbose, 200) suitableOutputFile = self.__getSuitableOutputFile(False, output_file, response.getheaders(), isError, output_format) job.set_output_file(suitableOutputFile) job.set_output_format(output_format) job.set_response_status(response.status, response.reason) if isError: job.set_failed(True) if dump_to_file: self.__connHandler.dump_to_file(suitableOutputFile, response) raise Exception(response.reason) else: print("Retrieving sync. results...") if dump_to_file: self.__connHandler.dump_to_file(suitableOutputFile, response) else: results = utils.read_http_response(response, output_format) job.set_results(results) print("Query finished.") job.set_phase('COMPLETED') return job
class JobSaxParser(xml.sax.ContentHandler): ''' classdocs ''' def __init__(self, async_job=False): ''' Constructor ''' self.__internal_init() self.__async = async_job def __internal_init(self): self.__concatData = False self.__charBuffer = [] self.__job = None self.__jobs = [] self.__status = 0 self.__paramKey = None self.__async = False def __create_string_from_buffer(self): return Utils.util_create_string_from_buffer(self.__charBuffer) def __check_item_id(self, itemId, tmpValue): if str(itemId).lower() == str(tmpValue).lower(): return True return False def __check_valid_item_id(self, name): for idTmp in VALID_ITEMS: if self.__check_item_id(idTmp, name): return True return False def __start_reading_data(self): self.__concatData = True del self.__charBuffer[:] def __stop_reading_data(self): self.__concatData = False def parseData(self, data): # self.__job = Job(True) xml.sax.parse(data, self) return self.__jobs def startElement(self, name, attrs): if self.__check_item_id(UWS_JOBID, name): self.__job = Job(self.__async) self.__jobs.append(self.__job) self.__start_reading_data() elif self.__check_valid_item_id(name): self.__start_reading_data() if self.__check_item_id(UWS_PARAMETER, name): self.__paramKey = attrs.get("id") else: self.__stop_reading_data() def endElement(self, name): if self.__check_valid_item_id(name): value = self.__create_string_from_buffer() self.__populate_job_value(value, name) self.__stop_reading_data() else: self.__stop_reading_data() def characters(self, content): if self.__concatData: self.__charBuffer.append(content) def __populate_job_value(self, value, name): nameLower = name.lower() if UWS_JOBID == nameLower: self.__job.set_jobid(value) elif UWS_RUNID == nameLower: self.__job.set_runid(value) elif UWS_OWNERID == nameLower: self.__job.set_ownerid(value) elif UWS_PHASE == nameLower: self.__job.set_phase(value) elif UWS_QUOTE == nameLower: self.__job.set_quote(value) elif UWS_START_TIME == nameLower: self.__job.set_start_time(value) elif UWS_END_TIME == nameLower: self.__job.set_end_time(value) elif UWS_CREATION_TIME == nameLower: self.__job.set_creation_time(value) elif UWS_LOCATIONID == nameLower: self.__job.set_locationid(value) elif UWS_NAME == nameLower: self.__job.set_name(value) elif UWS_EXECUTION_DURATION == nameLower: self.__job.set_execution_duration(value) elif UWS_DESTRUCTION == nameLower: self.__job.set_destruction(value) elif UWS_PARAMETER == nameLower: self.__job.set_parameter(self.__paramKey, value)