def get_and_write_data_by_api3(data_api_request_pulseid, parameters): import data_api3.h5 as h5 import pytz data_api_request = utils.transform_range_from_pulse_id_to_timestamp( data_api_request_pulseid) channels = [channel["name"] for channel in data_api_request["channels"]] filename = parameters["output_file"] start = datetime.fromtimestamp( float(data_api_request["range"]["startSeconds"]) ).astimezone(pytz.timezone('UTC')).strftime( "%Y-%m-%dT%H:%M:%S.%fZ") # isoformat() # "2019-12-13T09:00:00.00 end = datetime.fromtimestamp(float( data_api_request["range"] ["endSeconds"])).astimezone(pytz.timezone('UTC')).strftime( "%Y-%m-%dT%H:%M:%S.%fZ") # isoformat() # "2019-12-13T09:00:00.00 query = { "channels": channels, "range": { "type": "date", "startDate": start, "endDate": end } } _logger.info("Going to make query %s to write file %s from %s " % (query, filename, config.IMAGE_API_QUERY_ADDRESS)) h5.request(query, filename, url=config.IMAGE_API_QUERY_ADDRESS)
def save(args): # sf-archive, saresa-archive, saresb-archive, proscan-archive # sf-archiverappliance ? backends = ["sf-databuffer", "sf-imagebuffer", "hipa-archive"] if args.default_backend not in backends: raise RuntimeError( f"Only backends allowed currently: {backends}. Please use `--default-backend <BACKEND>`" ) baseurl = args.baseurl filename = args.filename channels = args.channels start = args.start.astimezone(pytz.timezone('UTC')).strftime( "%Y-%m-%dT%H:%M:%S.%fZ" ) # isoformat() # "2019-12-13T09:00:00.000000000Z" end = args.end.astimezone(pytz.timezone('UTC')).strftime( "%Y-%m-%dT%H:%M:%S.%fZ" ) # .isoformat() # "2019-12-13T09:00:00.100000000Z" query = { "channels": channels, "range": { "type": "date", "startDate": start, "endDate": end } } h5.request(query, filename, url=f"{baseurl}/query") return 0
def write_from_databuffer_api3(data_api_request, output_file, parameters): import data_api3.h5 as h5 _logger.debug("Data3 API request: %s", data_api_request) start_pulse_id = data_api_request["range"]["startPulseId"] stop_pulse_id = data_api_request["range"]["endPulseId"] rate_multiplicator = data_api_request.get("rate_multiplicator", 1) data_api_request_timestamp = utils.transform_range_from_pulse_id_to_timestamp_new( data_api_request) channels = [ channel["name"] for channel in data_api_request_timestamp["channels"] ] if "startTS" not in data_api_request_timestamp["range"]: _logger.info( "startTS not present, tranformation of pulseid to timestamp failed" ) return start = tsfmt(data_api_request_timestamp["range"]["startTS"]) end = tsfmt(data_api_request_timestamp["range"]["endTS"]) query = { "channels": channels, "range": { "type": "date", "startDate": start, "endDate": end } } data_buffer_url = config.DATA_API3_QUERY_ADDRESS _logger.debug("Requesting '%s' to output_file %s from %s " % (query, output_file, data_buffer_url)) start_time = time() try: h5.request(query, filename=output_file, baseurl=data_buffer_url, default_backend=config.DATA_BACKEND) _logger.info("Data download and writing took %s seconds." % (time() - start_time)) except Exception as e: _logger.error("Got exception from data_api3") _logger.error(e) check_data_consistency(start_pulse_id, stop_pulse_id, rate_multiplicator, channels, output_file)
def test_read_http_hipa(self): query = { "channels": ["MHC1:IST:2"], "range": { "type": "date", "startDate": "2019-11-15T10:50:00.000000000Z", "endDate": "2019-11-15T10:51:00.000000000Z" } } h5.request(query, "my.h5", url="http://localhost:8080/api/v1/query")
def test_read_images_swissfel(self): query = { "channels": ["SARES11-SPEC125-M1:FPICTURE"], "range": { "type": "date", "startDate": "2019-12-13T09:00:00.000000000Z", "endDate": "2019-12-13T09:00:00.100000000Z" } } h5.request(query, "my.h5", url="http://sf-daq-5.psi.ch:8080/api/v1/query")
def write_from_databuffer_api3(data_api_request, output_file, parameters): import data_api3.h5 as h5 import pytz _logger.debug("Data3 API request: %s", data_api_request) start_pulse_id = data_api_request["range"]["startPulseId"] stop_pulse_id = data_api_request["range"]["endPulseId"] rate_multiplicator = data_api_request.get("rate_multiplicator", 1) data_api_request_timestamp = utils.transform_range_from_pulse_id_to_timestamp_new(data_api_request) channels = [channel["name"] for channel in data_api_request_timestamp["channels"]] start = datetime.fromtimestamp(float(data_api_request_timestamp["range"]["startSeconds"])).astimezone( pytz.timezone('UTC')).strftime("%Y-%m-%dT%H:%M:%S.%fZ") # isoformat() # "2019-12-13T09:00:00.00 end = datetime.fromtimestamp(float(data_api_request_timestamp["range"]["endSeconds"])).astimezone( pytz.timezone('UTC')).strftime("%Y-%m-%dT%H:%M:%S.%fZ") # isoformat() # "2019-12-13T09:00:00.00 query = { "channels": channels, "range": { "type": "date", "startDate": start, "endDate": end } } data_buffer_url = config.DATA_API3_QUERY_ADDRESS _logger.debug("Requesting '%s' to output_file %s from %s " % (query, output_file, data_buffer_url)) start_time = time() try: h5.request(query, filename=output_file, baseurl=data_buffer_url, default_backend=config.DATA_BACKEND) _logger.info("Data download and writing took %s seconds." % (time() - start_time)) except Exception as e: _logger.error("Got exception from data_api3") _logger.error(e) check_data_consistency(start_pulse_id, stop_pulse_id, rate_multiplicator, channels, output_file)