def test_job_results_parser(self): fileName = data_path('test_job_results.xml') file = open(fileName, 'rb') resultTable = utils.read_http_response(file, 'votable') assert len(resultTable.columns) == 57, \ "Expected 57 columsn, found %d" % len(resultTable.columns) file.close()
def __load_async_job_results(self, debug=False): wjResponse, phase = self.wait_for_job_end() subContext = "async/" + str(self.jobid) + "/results/result" resultsResponse = self.connHandler.execute_tapget(subContext) # resultsResponse = self.__readAsyncResults(self.__jobid, debug) if debug: print(resultsResponse.status, resultsResponse.reason) print(resultsResponse.getheaders()) resultsResponse = self.__handle_redirect_if_required(resultsResponse, debug) isError = self.connHandler.\ check_launch_response_status(resultsResponse, debug, 200) self._phase = phase if phase == 'ERROR': errMsg = self.get_error(debug) raise SystemError(errMsg) else: if isError: errMsg = taputils.get_http_response_error(resultsResponse) print(resultsResponse.status, errMsg) raise requests.exceptions.HTTPError(errMsg) else: outputFormat = self.parameters['format'] results = utils.read_http_response(resultsResponse, outputFormat) self.set_results(results)
def __load_async_job_results(self, debug=False): wjResponse, wjData = self.wait_for_job_end() subContext = "async/" + str(self.jobid) + "/results/result" resultsResponse = self.connHandler.execute_get(subContext) # resultsResponse = self.__readAsyncResults(self.__jobid, debug) if debug: print(resultsResponse.status, resultsResponse.reason) print(resultsResponse.getheaders()) isError = self.connHandler.check_launch_response_status( resultsResponse, debug, 200) if isError: print(resultsResponse.reason) raise Exception(resultsResponse.reason) else: outputFormat = self.parameters['format'] results = utils.read_http_response(resultsResponse, outputFormat) self.set_results(results) self._phase = wjData
def __load_async_job_results(self, debug=False): wjResponse, wjData = self.wait_for_job_end() subContext = "async/" + str(self.jobid) + "/results/result" resultsResponse = self.connHandler.execute_get(subContext) # resultsResponse = self.__readAsyncResults(self.__jobid, debug) if debug: print(resultsResponse.status, resultsResponse.reason) print(resultsResponse.getheaders()) isError = self.connHandler.check_launch_response_status(resultsResponse, debug, 200) if isError: print(resultsResponse.reason) raise Exception(resultsResponse.reason) else: outputFormat = self.parameters['format'] results = utils.read_http_response(resultsResponse, outputFormat) self.set_results(results) self._phase = wjData
def launch_job(self, query, name=None, output_file=None, output_format="votable", verbose=False, dump_to_file=False, upload_resource=None, upload_table_name=None): """Launches a synchronous job Parameters ---------- query : str, mandatory query to be executed output_file : str, optional, default None file name where the results are saved if dumpToFile is True. If this parameter is not provided, the jobid is used instead output_format : str, optional, default 'votable' results format verbose : bool, optional, default 'False' flag to display information about the process dump_to_file : bool, optional, default 'False' if True, the results are saved in a file instead of using memory upload_resource: str, optional, default None resource to be uploaded to UPLOAD_SCHEMA upload_table_name: str, required if uploadResource is provided, default None resource temporary table name associated to the uploaded resource Returns ------- A Job object """ query = taputils.set_top_in_query(query, 2000) if verbose: print("Launched query: '"+str(query)+"'") if upload_resource is not None: if upload_table_name is None: raise ValueError("Table name is required when a resource is uploaded") response = self.__launchJobMultipart(query, upload_resource, upload_table_name, output_format, "sync", verbose, name) else: response = self.__launchJob(query, output_format, "sync", verbose, name) # handle redirection if response.status == 303: # redirection if verbose: print("Redirection found") location = self.__connHandler.find_header( response.getheaders(), "location") if location is None: raise requests.exceptions.HTTPError("No location found after redirection was received (303)") if verbose: print("Redirect to %s", location) subcontext = self.__extract_sync_subcontext(location) response = self.__connHandler.execute_get(subcontext) job = Job(async_job=False, query=query, connhandler=self.__connHandler) isError = self.__connHandler.check_launch_response_status(response, verbose, 200) suitableOutputFile = self.__getSuitableOutputFile(False, output_file, response.getheaders(), isError, output_format) job.set_output_file(suitableOutputFile) job.set_output_format(output_format) job.set_response_status(response.status, response.reason) if isError: job.set_failed(True) if dump_to_file: self.__connHandler.dump_to_file(suitableOutputFile, response) raise requests.exceptions.HTTPError(response.reason) else: if verbose: print("Retrieving sync. results...") if dump_to_file: self.__connHandler.dump_to_file(suitableOutputFile, response) else: results = utils.read_http_response(response, output_format) job.set_results(results) if verbose: print("Query finished.") job.set_phase('COMPLETED') return job
def launch_job(self, query, name=None, output_file=None, output_format="votable", verbose=False, dump_to_file=False, upload_resource=None, upload_table_name=None, lang=None, queue=None): if verbose: print("Launched query: '" + str(query) + "'") if upload_resource is not None: if upload_table_name is None: raise ValueError( "Table name is required when a resource is uploaded") response = self._Tap__launchJobMultipart(query, upload_resource, upload_table_name, output_format, "sync", verbose, name, lang, queue) else: response = self.__launchJob(query, output_format, "sync", verbose, name, lang, queue) # handle redirection if response.status == 303: # redirection if verbose: print("Redirection found") location = self._Tap__connHandler.find_header( response.getheaders(), "location") if location is None: raise requests.exceptions.HTTPError( "No location found after redirection was received (303)") if verbose: print("Redirect to %s", location) subcontext = self._Tap__extract_sync_subcontext(location) response = self._Tap__connHandler.execute_get(subcontext) job = Job(async_job=False, query=query, connhandler=self._Tap__connHandler) isError = self._Tap__connHandler.check_launch_response_status( response, verbose, 200) suitableOutputFile = self._Tap__getSuitableOutputFile( False, output_file, response.getheaders(), isError, output_format) job.set_output_file(suitableOutputFile) job.set_output_format(output_format) job.set_response_status(response.status, response.reason) if isError: job.set_failed(True) if dump_to_file: self._Tap__connHandler.dump_to_file(suitableOutputFile, response) raise requests.exceptions.HTTPError(response.reason) else: if verbose: print("Retrieving sync. results...") if dump_to_file: self._Tap__connHandler.dump_to_file(suitableOutputFile, response) else: results = utils.read_http_response(response, output_format) job.set_results(results) if verbose: print("Query finished.") job.set_phase('COMPLETED') return job
def test_job_results_parser(): fileName = data_path('test_job_results.xml') file = open(fileName, 'rb') resultTable = utils.read_http_response(file, 'votable') assert len(resultTable.columns) == 57 file.close()
def launch_job(self, query, name=None, output_file=None, output_format="votable", verbose=False, dump_to_file=False, upload_resource=None, upload_table_name=None): """Launches a synchronous job Parameters ---------- query : str, mandatory query to be executed output_file : str, optional, default None file name where the results are saved if dumpToFile is True. If this parameter is not provided, the jobid is used instead output_format : str, optional, default 'votable' results format verbose : bool, optional, default 'False' flag to display information about the process dump_to_file : bool, optional, default 'False' if True, the results are saved in a file instead of using memory upload_resource: str, optional, default None resource to be uploaded to UPLOAD_SCHEMA upload_table_name: str, required if uploadResource is provided, default None resource temporary table name associated to the uploaded resource Returns ------- A Job object """ query = taputils.set_top_in_query(query, 2000) print("Launched query: '"+str(query)+"'") if upload_resource is not None: if upload_table_name is None: raise ValueError("Table name is required when a resource is uploaded") response = self.__launchJobMultipart(query, upload_resource, upload_table_name, output_format, "sync", verbose, name) else: response = self.__launchJob(query, output_format, "sync", verbose, name) job = Job(async_job=False, query=query, connhandler=self.__connHandler) isError = self.__connHandler.check_launch_response_status(response, verbose, 200) suitableOutputFile = self.__getSuitableOutputFile(False, output_file, response.getheaders(), isError, output_format) job.set_output_file(suitableOutputFile) job.set_output_format(output_format) job.set_response_status(response.status, response.reason) if isError: job.set_failed(True) if dump_to_file: self.__connHandler.dump_to_file(suitableOutputFile, response) raise Exception(response.reason) else: print("Retrieving sync. results...") if dump_to_file: self.__connHandler.dump_to_file(suitableOutputFile, response) else: results = utils.read_http_response(response, output_format) job.set_results(results) print("Query finished.") job.set_phase('COMPLETED') return job