def query_latest(self, state, last_nhours=1): """ Query the latest status transfers status in FTS3 via JSON. :param state: Transfer state as a string or a dictionary. :returns: Transfer status information as a dictionary. """ jobs = None try: whoami = requests.get('%s/whoami' % (self.external_host), verify=self.verify, cert=self.cert, headers={'Content-Type': 'application/json'}) if whoami and whoami.status_code == 200: delegation_id = whoami.json()['delegation_id'] else: raise Exception('Could not retrieve delegation id: %s', whoami.content) state_string = ','.join(state) jobs = requests.get( '%s/jobs?dlg_id=%s&state_in=%s&time_window=%s' % (self.external_host, delegation_id, state_string, last_nhours), verify=self.verify, cert=self.cert, headers={'Content-Type': 'application/json'}) except ReadTimeout as error: raise TransferToolTimeout(error) except JSONDecodeError as error: raise TransferToolWrongAnswer(error) except Exception: logging.warn('Could not query latest terminal states from %s', self.external_host) if jobs and (jobs.status_code == 200 or jobs.status_code == 207): record_counter('transfertool.fts3.%s.query_latest.success' % self.__extract_host(self.external_host)) try: jobs_json = jobs.json() return jobs_json except ReadTimeout as error: raise TransferToolTimeout(error) except JSONDecodeError as error: raise TransferToolWrongAnswer(error) except Exception as error: logging.error("Failed to parse the jobs status %s" % (str(error))) record_counter('transfertool.fts3.%s.query.failure' % self.__extract_host(self.external_host))
def delegate_proxy(self, proxy, ca_path='/etc/grid-security/certificates/', duration_hours=96, timeleft_hours=72): """Delegate user proxy to fts server if the lifetime is less than timeleft_hours :param proxy: proxy to be delegated :type proxy: str :param ca_path: ca path for verification, defaults to '/etc/grid-security/certificates/' :param ca_path: str, optional :param duration_hours: delegation validity duration in hours, defaults to 48 :param duration_hours: int, optional :param timeleft_hours: minimal delegation time left, defaults to 12 :param timeleft_hours: int, optional :return: delegation ID :rtype: str """ logging.info("Delegating proxy %s to %s", proxy, self.external_host) start_time = time.time() try: context = Context(self.external_host, ucert=proxy, ukey=proxy, verify=True, capath=ca_path) delegation_id = delegate( context, lifetime=datetime.timedelta(hours=duration_hours), delegate_when_lifetime_lt=datetime.timedelta( hours=timeleft_hours)) record_timer('transfertool.fts3.delegate_proxy.success.%s' % proxy, (time.time() - start_time)) except ServerError: logging.error("Server side exception during FTS proxy delegation.") record_timer('transfertool.fts3.delegate_proxy.fail.%s' % proxy, (time.time() - start_time)) raise except ClientError: logging.error("Config side exception during FTS proxy delegation.") record_timer('transfertool.fts3.delegate_proxy.fail.%s' % proxy, (time.time() - start_time)) raise except BadEndpoint: logging.error("Wrong FTS endpoint: %s", self.external_host) record_timer('transfertool.fts3.delegate_proxy.fail.%s' % proxy, (time.time() - start_time)) raise except ReadTimeout as error: raise TransferToolTimeout(error) except JSONDecodeError as error: raise TransferToolWrongAnswer(error) logging.info("Delegated proxy %s", delegation_id) return delegation_id, context
def bulk_query(self, transfer_ids, timeout=None): """ Query the status of a bulk of transfers in FTS3 via JSON. :param transfer_ids: FTS transfer identifiers as a list. :returns: Transfer status information as a dictionary. """ jobs = None if not isinstance(transfer_ids, list): transfer_ids = [transfer_ids] responses = {} fts_session = requests.Session() xfer_ids = ','.join(transfer_ids) jobs = fts_session.get( '%s/jobs/%s?files=file_state,dest_surl,finish_time,start_time,reason,source_surl,file_metadata' % (self.external_host, xfer_ids), verify=self.verify, cert=self.cert, headers={'Content-Type': 'application/json'}, timeout=timeout) if jobs is None: record_counter('transfertool.fts3.%s.bulk_query.failure' % self.__extract_host(self.external_host)) for transfer_id in transfer_ids: responses[transfer_id] = Exception( 'Transfer information returns None: %s' % jobs) elif jobs.status_code == 200 or jobs.status_code == 207: try: record_counter('transfertool.fts3.%s.bulk_query.success' % self.__extract_host(self.external_host)) jobs_response = jobs.json() responses = self.__bulk_query_responses(jobs_response) except ReadTimeout as error: raise TransferToolTimeout(error) except JSONDecodeError as error: raise TransferToolWrongAnswer(error) except Exception as error: raise Exception( "Failed to parse the job response: %s, error: %s" % (str(jobs), str(error))) else: record_counter('transfertool.fts3.%s.bulk_query.failure' % self.__extract_host(self.external_host)) for transfer_id in transfer_ids: responses[transfer_id] = Exception( 'Could not retrieve transfer information: %s', jobs.content) return responses
def __get_transfer_baseid_voname(self): """ Get transfer VO name from the external host. :returns base id as a string and VO name as a string. """ result = (None, None) try: key = 'voname: %s' % self.external_host result = REGION_SHORT.get(key) if isinstance(result, NoValue): logging.debug("Refresh transfer baseid and voname for %s", self.external_host) get_result = None try: get_result = requests.get( '%s/whoami' % self.external_host, verify=self.verify, cert=self.cert, headers={'Content-Type': 'application/json'}, timeout=5) except ReadTimeout as error: raise TransferToolTimeout(error) except JSONDecodeError as error: raise TransferToolWrongAnswer(error) except Exception as error: logging.warn( 'Could not get baseid and voname from %s - %s' % (self.external_host, str(error))) if get_result and get_result.status_code == 200: baseid = str(get_result.json()['base_id']) voname = str(get_result.json()['vos'][0]) result = (baseid, voname) REGION_SHORT.set(key, result) logging.debug("Get baseid %s and voname %s from %s", baseid, voname, self.external_host) else: logging.warn( "Failed to get baseid and voname from %s, error: %s", self.external_host, get_result.text if get_result is not None else get_result) result = (None, None) except Exception as error: logging.warning("Failed to get baseid and voname from %s: %s" % (self.external_host, str(error))) result = (None, None) return result
def submit(self, files, job_params, timeout=None): """ Submit transfers to FTS3 via JSON. :param files: List of dictionaries describing the file transfers. :param job_params: Dictionary containing key/value pairs, for all transfers. :param timeout: Timeout in seconds. :returns: FTS transfer identifier. """ # FTS3 expects 'davs' as the scheme identifier instead of https for transfer_file in files: if not transfer_file['sources'] or transfer_file['sources'] == []: raise Exception('No sources defined') new_src_urls = [] new_dst_urls = [] for url in transfer_file['sources']: if url.startswith('https'): new_src_urls.append(':'.join(['davs'] + url.split(':')[1:])) else: new_src_urls.append(url) for url in transfer_file['destinations']: if url.startswith('https'): new_dst_urls.append(':'.join(['davs'] + url.split(':')[1:])) else: new_dst_urls.append(url) transfer_file['sources'] = new_src_urls transfer_file['destinations'] = new_dst_urls transfer_id = None expected_transfer_id = None if self.deterministic_id: job_params = job_params.copy() job_params["id_generator"] = "deterministic" job_params["sid"] = files[0]['metadata']['request_id'] expected_transfer_id = self.__get_deterministic_id(job_params["sid"]) logging.debug("Submit bulk transfers in deterministic mode, sid %s, expected transfer id: %s", job_params["sid"], expected_transfer_id) # bulk submission params_dict = {'files': files, 'params': job_params} params_str = json.dumps(params_dict, cls=APIEncoder) post_result = None try: start_time = time.time() post_result = requests.post('%s/jobs' % self.external_host, verify=self.verify, cert=self.cert, data=params_str, headers={'Content-Type': 'application/json'}, timeout=timeout) record_timer('transfertool.fts3.submit_transfer.%s' % self.__extract_host(self.external_host), (time.time() - start_time) * 1000 / len(files)) except ReadTimeout as error: raise TransferToolTimeout(error) except JSONDecodeError as error: raise TransferToolWrongAnswer(error) except Exception as error: logging.warn('Could not submit transfer to %s - %s' % (self.external_host, str(error))) if post_result and post_result.status_code == 200: record_counter('transfertool.fts3.%s.submission.success' % self.__extract_host(self.external_host), len(files)) transfer_id = str(post_result.json()['job_id']) elif post_result and post_result.status_code == 409: record_counter('transfertool.fts3.%s.submission.failure' % self.__extract_host(self.external_host), len(files)) raise DuplicateFileTransferSubmission() else: if expected_transfer_id: transfer_id = expected_transfer_id logging.warn("Failed to submit transfer to %s, will use expected transfer id %s, error: %s", self.external_host, transfer_id, post_result.text if post_result is not None else post_result) else: logging.warn("Failed to submit transfer to %s, error: %s", self.external_host, post_result.text if post_result is not None else post_result) record_counter('transfertool.fts3.%s.submission.failure' % self.__extract_host(self.external_host), len(files)) if not transfer_id: raise TransferToolWrongAnswer('No transfer id returned by %s' % self.external_host) return transfer_id