def json_latest_runs(): """ Get the last N run numbers from the database. limit: Limit on the number of results returned. Default 100. """ limit = request.args.get('limit', None) if not limit: limit = 100 # query PostgreSQL database for the lastest runs db_query = db_session.query(DataQualityRun) \ .order_by(DataQualityRun.date_time.desc()) results = db_query.limit(limit) json_results = [] for result in results: json_results.append({ 'run': result.run, 'subruns': result.subruns, 'updated': result.date_time_updated, }) return jsonify(results=json_results)
def json_latest_runs(): """ Get the last N run numbers from the database. limit: Limit on the number of results returned. Default 100. """ limit = request.args.get('limit', None) if not limit: limit = 100 # query PostgreSQL database for the lastest runs db_query = db_session.query(DataQualityRun) \ .order_by(DataQualityRun.date_time.desc()) results = db_query.limit(limit) json_results = [] for result in results: json_results.append({ 'run' : result.run, 'subruns' : result.subruns, 'updated' : result.date_time_updated, }) return jsonify(results=json_results)
def update(): date_time = datetime.now() - timedelta(seconds=delay) # http://stackoverflow.com/questions/8542723/change-datetime-to-unix-time-stamp-in-python timestamp = date_time.strftime('%s') #///////////////////////////////////////////////////////// # redis client instance #///////////////////////////////////////////////////////// redis = Redis(host='lariat-daq03', port=6379) # null array for empty horizon if not redis.exists(null_key): # send commands in a pipeline to save on round-trip time p = redis.pipeline() p.delete(null_key) p.rpush(null_key, *null_list) p.execute() # check if the TPC pedestal mean reference exists tpc_pedestal_reference_exists = redis.exists( tpc_pedestal_mean_reference_key) if tpc_pedestal_reference_exists: tpc_pedestal_mean_reference = redis.lrange( tpc_pedestal_mean_reference_key, 0, -1) #///////////////////////////////////////////////////////// # create bins for time series #///////////////////////////////////////////////////////// time_bins = date_time_bins(date_time, bin_width, number_bins) date_time_start, date_time_stop = time_bins[0], time_bins[-1] # send commands in a pipeline to save on round-trip time p = redis.pipeline() time_bins_key = key_prefix + 'time_bins' p.delete(time_bins_key) p.rpush(time_bins_key, *time_bins) p.execute() #///////////////////////////////////////////////////////// # query PostgreSQL database #///////////////////////////////////////////////////////// #query = db_session.query(DataQualitySubRun) \ # .order_by(DataQualitySubRun.date_time.desc()) \ # .filter(and_(DataQualitySubRun.date_time <= date_time)) query = db_session.query(DataQualitySubRun) \ .order_by(DataQualitySubRun.date_time.desc()) \ .filter(DataQualitySubRun.date_time.between(date_time_start, date_time_stop)) results = query.all() #///////////////////////////////////////////////////////// # initialize bins for time series #///////////////////////////////////////////////////////// parameters_dict = {} for parameter in parameters: parameters_dict[parameter] = { time_bin : None for time_bin in time_bins } array_parameters_dict = {} for parameter in array_parameters: array_parameters_dict[parameter] = { time_bin : None for time_bin in time_bins } tpc_pedestal_deviation_dict = {} for parameter in tpc_pedestal_deviation_parameters: tpc_pedestal_deviation_dict[parameter] = { time_bin : None for time_bin in time_bins } caen_pedestal_deviation_dict = {} for parameter in caen_pedestal_deviation_parameters: caen_pedestal_deviation_dict[parameter] = { time_bin : None for time_bin in time_bins } #///////////////////////////////////////////////////////// # place the results into the appropriate time bin #///////////////////////////////////////////////////////// for result in results: time_bin = round_time(result.date_time, bin_width) if time_bin not in time_bins: continue for parameter in parameters: attr = getattr(result, parameter) if not isinstance(attr, list): parameters_dict[parameter][time_bin] = attr for parameter_base, channel_indices in \ array_parameters_base.iteritems(): array = getattr(result, parameter_base) for channel_index in channel_indices: channel = channel_index + \ array_parameters_channel_offset[parameter_base] parameter = parameter_base + '_channel_' + str(channel) value = array[channel_index] array_parameters_dict[parameter][time_bin] = value if (parameter_base == 'tpc_pedestal_mean' and tpc_pedestal_reference_exists and value > 0): parameter = 'tpc_pedestal_deviation_channel_' + \ str(channel) tpc_pedestal_deviation_dict[parameter][time_bin] = \ value - \ float(tpc_pedestal_mean_reference[channel_index]) if (parameter_base == 'caen_board_8_pedestal_mean' and value > 0): parameter = 'caen_board_8_pedestal_deviation_channel_' + \ str(channel) caen_pedestal_deviation_dict[parameter][time_bin] = \ value - v1751_pedestal_reference if (parameter_base == 'caen_board_9_pedestal_mean' and value > 0): parameter = 'caen_board_9_pedestal_deviation_channel_' + \ str(channel) caen_pedestal_deviation_dict[parameter][time_bin] = \ value - v1751_pedestal_reference if (parameter_base == 'caen_board_7_pedestal_mean' and value > 0): parameter = 'caen_board_7_pedestal_deviation_channel_' + \ str(channel) caen_pedestal_deviation_dict[parameter][time_bin] = \ value - v1740_pedestal_reference if (parameter_base == 'caen_board_24_pedestal_mean' and value > 0): parameter = 'caen_board_24_pedestal_deviation_channel_' + \ str(channel) caen_pedestal_deviation_dict[parameter][time_bin] = \ value - v1740b_pedestal_reference #///////////////////////////////////////////////////////// # sort according to datetime #///////////////////////////////////////////////////////// parameter_values = {} for parameter in parameters: parameter_values[parameter] = [ x for (y, x) in sorted(zip(parameters_dict[parameter].keys(), parameters_dict[parameter].values())) ] array_parameter_values = {} for parameter in array_parameters: array_parameter_values[parameter] = [ x for (y, x) in sorted(zip(array_parameters_dict[parameter].keys(), array_parameters_dict[parameter].values())) ] tpc_pedestal_deviation_parameter_values = {} for parameter in tpc_pedestal_deviation_parameters: tpc_pedestal_deviation_parameter_values[parameter] = [ x for (y, x) in sorted(zip(tpc_pedestal_deviation_dict[parameter].keys(), tpc_pedestal_deviation_dict[parameter].values())) ] caen_pedestal_deviation_parameter_values = {} for parameter in caen_pedestal_deviation_parameters: caen_pedestal_deviation_parameter_values[parameter] = [ x for (y, x) in sorted(zip(caen_pedestal_deviation_dict[parameter].keys(), caen_pedestal_deviation_dict[parameter].values())) ] # send commands in a pipeline to save on round-trip time p = redis.pipeline() p.set(key_prefix + 'timestamp', timestamp) for parameter in parameters: parameter_key = key_prefix + parameter p.delete(parameter_key) p.rpush(parameter_key, *parameter_values[parameter]) p.execute() # send commands in a pipeline to save on round-trip time p = redis.pipeline() for parameter in array_parameters: parameter_key = key_prefix + parameter p.delete(parameter_key) p.rpush(parameter_key, *array_parameter_values[parameter]) p.execute() # send commands in a pipeline to save on round-trip time p = redis.pipeline() for parameter in tpc_pedestal_deviation_parameters: parameter_key = key_prefix + parameter p.delete(parameter_key) p.rpush(parameter_key, *tpc_pedestal_deviation_parameter_values[parameter]) p.execute() # send commands in a pipeline to save on round-trip time p = redis.pipeline() for parameter in caen_pedestal_deviation_parameters: parameter_key = key_prefix + parameter p.delete(parameter_key) p.rpush(parameter_key, *caen_pedestal_deviation_parameter_values[parameter]) p.execute() # get DAQ uptime for the last bin_width * number_bins seconds daq_minute_count = 0 for subrun in parameter_values['subrun']: if subrun: daq_minute_count += 1 daq_uptime = float(daq_minute_count) / len(parameter_values['subrun']) redis.set(daq_uptime_key, daq_uptime)