def __init__(self, poste_id: int, obs_id: int, agg_niveau: str, start_dt_agg_utc: datetime, is_obs_date: bool = False, b_need_to_sum_duration: bool = False): """ Init a new AggMeteor object poste: Poste object agg_niveau: 'H','D', 'M', 'Y', 'A', HT, DT, MT, YT, AT start_dt_agg_utc: start_date for the aggregation level, will be rounded is_obs_date: is it a stop_dat b_need_to_sum_duration: please do not add duration (for omm updates) """ self.agg_niveau = agg_niveau self.is_tmp = isTmpLevel(agg_niveau) self.obs_id = obs_id self.b_need_to_sum_duration = b_need_to_sum_duration my_start_date = calcAggDate(agg_niveau, start_dt_agg_utc, 0, is_obs_date) # my_start_date = date_to_str(my_start_date) agg_object = getAggTable(agg_niveau) if agg_object.objects.filter(poste_id=poste_id).filter(start_dat=my_start_date).exists(): self.data = agg_object.objects.filter(poste_id=poste_id).filter(start_dat=my_start_date).first() else: self.data = agg_object(poste_id=poste_id, start_dat=my_start_date, duration_sum=0, duration_max=0, j={}) self.data.duration_max = getAggDuration(agg_niveau, my_start_date) # histo data only on hourly/daily agregations if self.agg_niveau[0] == 'A' or self.agg_niveau[0] == 'Y' or self.agg_niveau[0] == 'M': self.j_ori = {} self.duration_ori = 0 else: # save original values allowing us to generate agg_histo self.j_ori = self.data.j.copy() self.duration_ori = self.data.duration_sum
def loadInObs(self, poste_metier, my_measure: json, json_file_data: json, measure_idx: int, m_agg_j: json, obs_meteor: ObsMeteor, delta_values: json, trace_flag: bool = False): """ processObservation load json data in Observation table load max/min return the delta_values to be added in all aggregations some methods are implemented here, some in the inherited class """ # load field if defined in json src_key = my_measure['src_key'] target_key = my_measure['target_key'] # get exclusion, and return if value is nullified exclusion = poste_metier.exclusion(my_measure['type_i']) # to check later... # if shouldNullify(exclusion, src_key) is True: # return my_values = {} if target_key == "rain_rate": target_key = "rain_rate" self.loadValuesFromCurrent(my_measure, json_file_data, measure_idx, src_key, target_key, exclusion, my_values, obs_meteor.data.stop_dat, trace_flag) if my_values.__len__() == 0 and m_agg_j.__len__() == 0: return if (isFlagged(my_measure['special'], MeasureProcessingBitMask.NotAllowedInCurrent) is True): return # load Json data in dv # update duration & agg_start_dat in obs if needed if obs_meteor.data.duration == 0 and my_values.__len__() > 1: tmp_duration = delta_values.get(target_key + '_du') obs_meteor.data.duration = tmp_duration # compute our agg_h.start_dat for faster retrieval of observation for a given agg_h.start_dat obs_meteor.data.agg_start_dat = calcAggDate('H', obs_meteor.data.stop_dat, tmp_duration, True) # double check that the duration are compatible if obs_meteor.data.duration != tmp_duration: raise Exception('loadObsDatarow', 'incompatible durations -> in table obs: ' + str(obs_meteor.data.duration) + ', in json: ' + str(tmp_duration)) # load data from dv to obs self.loadDataInObs(my_measure, obs_meteor, target_key, delta_values, my_values, trace_flag) # check maxmin that need to be regenated later self.checkMaxMinToRegenerate(my_measure, obs_meteor, target_key, delta_values, my_values, trace_flag) # load Max/Min in obs, and in dv self.loadMaxMinInObs(my_measure, obs_meteor, target_key, delta_values, my_values, trace_flag) return
def load_aggregations_in_array(self, my_measure, anAgg: str, aggregations, m_stop_dat: datetime): """load array of aggregations for calculus: [0] -> main_deca for data [1] -> min_deca [2] -> max_deca """ agg_decas = [] deca_hours = [] if my_measure.get('target_key') == 'out_temp_omm': deca_hour = [] if my_measure.get('target_key') == 'rain_omm': agg_decas = [] if my_measure.get("hour_deca") is None: deca_hours.append(0) main_deca = 0 else: main_deca = my_measure["hour_deca"] deca_hours.append(main_deca) if my_measure.get("deca_max") is None: deca_hours.append(main_deca) else: deca_hours.append(my_measure["deca_max"]) if my_measure.get("deca_min") is None: deca_hours.append(main_deca) else: deca_hours.append(my_measure["deca_min"]) for deca_hour in deca_hours: a_start_dat_level = calcAggDate(anAgg, m_stop_dat, deca_hour, True) # load the needed aggregation for this measure b_found = False for my_agg in aggregations: if (my_agg.agg_niveau == anAgg and my_agg.data.start_dat == a_start_dat_level): agg_decas.append(my_agg) b_found = True break if b_found is False: raise Exception( "aggCompute::loadAggregations", "aggregation not loaded deca: " + str(deca_hour) + ", date: " + str(a_start_dat_level), ) return agg_decas
def aggregations(self, obs_id: int, start_date_utc: datetime, is_measure_date: bool = False, is_tmp: bool = None) -> json: """ get_agg return an array of aggregations needed by our calculus function load empty agg_xxxx if does not exist """ # determine all dates needed to process the measure at the given date # m_duration = self.data.du needed_dates = [] ti_all = AllTypeInstruments() for an_instru in ti_all.get_all_instruments(): for a_measure in an_instru['object'].measures: for deca_type in ['hour_deca', 'deca_max', 'deca_min']: hour_deca = int(a_measure[deca_type]) if hour_deca not in needed_dates: needed_dates.append(hour_deca) # now load the needed aggregations ret = [] for a_needed_hour in needed_dates: b_need_to_sum_duration = False if a_needed_hour == 0: b_need_to_sum_duration = True for agg_niveau in getAggLevels(is_tmp): # is_measure_date only used in agg_hour tmp_dt = calcAggDate(agg_niveau, start_date_utc, a_needed_hour, is_measure_date) already_loaded = False for a_ret in ret: if a_ret.data.start_dat == tmp_dt and a_ret.agg_niveau == agg_niveau: already_loaded = True break if already_loaded is False: ret.append( AggMeteor(self.data.id, obs_id, agg_niveau, tmp_dt, False, b_need_to_sum_duration)) return ret
def __init__(self, poste_id: int, stop_dt_utc: datetime, is_tmp: bool = None): """Init a new ObsMeteor object""" # todo: block if my_datetime_utc > previous dat+duration self.is_tmp = is_tmp myObsObj = getObservationTable(is_tmp) if myObsObj.objects.filter(poste_id=poste_id).filter( stop_dat=stop_dt_utc).exists(): self.data = myObsObj.objects.filter(poste_id=poste_id).filter( stop_dat=stop_dt_utc).first() else: agg_start_dat = calcAggDate('H', stop_dt_utc, 0, True) self.data = myObsObj(poste_id=poste_id, stop_dat=stop_dt_utc, duration=0, agg_start_dat=agg_start_dat, j={}, j_agg={})
def run_test(self, name, option: int = 255): """ Run all tests in our suite test option: 1: load Obs 3: load Aggregations 7: load extremeFix """ try: self.tracer = Telemetry.Start("calculus", name) pid = PosteMetier.getPosteIdByMeteor('BBF015') if pid is None: p = PosteMetier(1) p.data.meteor = 'BBF015' p.data.fuseau = 4 p.save() for a_test in self.my_test_suite: if a_test['name'] != name: continue # prepare our full json my_json = JsonPlus().loads(""" { "meteor" : "BBF015", "info" : { "blabla": "blabla" }, "data": [] } """) j_data = a_test['data'] my_json['data'] = j_data # remove any existing data self.calc.delete_obs_agg(True) if ((option & 1) == 1): my_params = { "param": { "json": my_json, "trace_flag": False, "delete": False, "is_tmp": True, "validation": False, "filename": 'test_json', } } self.calc_obs.LoadJsonFromSvc(my_params) if ((option & 2) == 2): self.calc_agg._computeAggreg(True, False) error_msg = [] # load list of resultset to load for a_result in a_test['results']: b_compute_agg_date = False if a_result['t'] == "A" or a_result['t'] == "AT": test_dat = "1900-12-31T00:00:00" elif a_result.__contains__('dat'): test_dat = a_result['dat'] elif a_result.__contains__('idx'): test_dat = j_data[a_result['idx']]['stop_dat'] b_compute_agg_date = True elif a_result.__contains__('count'): test_dat = '1900-12-31T00:00:00+00:00' else: raise Exception('calTestEngine', 'wrong test JSON file') if a_result["t"] == "O": my_row = ObsMeteor(pid, test_dat, True) else: # if idx is given, need to compute agregation date for the level if b_compute_agg_date is True: test_dat = calcAggDate('H', test_dat, 0, True) hour_deca = 0 if a_result.__contains__('hour_deca') is True: hour_deca = a_result['hour_deca'] test_dat = calcAggDate(a_result["t"], test_dat, hour_deca, False) my_row = AggMeteor(pid, a_result['t'], test_dat) # if a_result['t'] == "O": # t.LogDebug('obs: ' + str(my_row.data.id) + ", dat: " + str(my_row.data.stop_dat) + ", j: " + JsonPlus().dumps(my_row.data.j)) # else: # t.LogDebug('agg_' + a_result['t'] + ': ' + str(my_row.data.id) + ", dat: " + str(my_row.data.start_dat) + ", j: " + JsonPlus().dumps(my_row.data.j)) if a_result.__contains__('count'): stop_dat_mask = '' if a_result.__contains__('stop_dat_mask'): stop_dat_mask = a_result['stop_dat_mask'] if a_result['t'] == 'O': my_count = my_row.count(pid, stop_dat_mask, True) else: my_count = my_row.count(a_result['t'], pid, stop_dat_mask) assert my_count == a_result['count'] continue # check the result for k in a_result.items(): if k[0] == 't' or k[0] == 'idx' or k[0] == 'dat': continue if a_result[k[0]] != my_row.data.j[k[0]]: err_txt = "t: " + a_result['t'] + ', key:' + k[0] err_txt = err_txt + ' -> ' + str( my_row.data.j[k[0]]) + ' should be ' + str( a_result[k[0]]) t.logError("error: " + str(err_txt)) error_msg.append(err_txt) assert error_msg.__len__() == 0 except Exception as inst: t.LogDebug(inst.with_traceback(None)) assert "error in " == " json file"
def test_calcRealAggHourDate(): # check that our constants matchs our computation made in Excel results = [{ 't': 5, 'h': '2020-12-31 22:00:00+04:00', 'd': '2020-12-31 00:00:00+04:00', 'm': '2020-12-01 00:00:00+04:00', 'y': '2020-01-01 00:00:00+04:00' }, { 't': 10, 'h': '2020-12-31 22:00:00+04:00', 'd': '2020-12-31 00:00:00+04:00', 'm': '2020-12-01 00:00:00+04:00', 'y': '2020-01-01 00:00:00+04:00' }, { 't': 70, 'h': '2020-12-31 23:00:00+04:00', 'd': '2020-12-31 00:00:00+04:00', 'm': '2020-12-01 00:00:00+04:00', 'y': '2020-01-01 00:00:00+04:00' }, { 't': 75, 'h': '2020-12-31 23:00:00+04:00', 'd': '2020-12-31 00:00:00+04:00', 'm': '2020-12-01 00:00:00+04:00', 'y': '2020-01-01 00:00:00+04:00' }, { 't': 80, 'h': '2021-01-01 00:00:00+04:00', 'd': '2021-01-01 00:00:00+04:00', 'm': '2021-01-01 00:00:00+04:00', 'y': '2021-01-01 00:00:00+04:00' }, { 't': 85, 'h': '2021-01-01 00:00:00+04:00', 'd': '2021-01-01 00:00:00+04:00', 'm': '2021-01-01 00:00:00+04:00', 'y': '2021-01-01 00:00:00+04:00' }, { 't': 90, 'h': '2021-01-01 00:00:00+04:00', 'd': '2021-01-01 00:00:00+04:00', 'm': '2021-01-01 00:00:00+04:00', 'y': '2021-01-01 00:00:00+04:00' }, { 't': 135, 'h': '2021-01-01 00:00:00+04:00', 'd': '2021-01-01 00:00:00+04:00', 'm': '2021-01-01 00:00:00+04:00', 'y': '2021-01-01 00:00:00+04:00' }, { 't': 140, 'h': '2021-01-01 01:00:00+04:00', 'd': '2021-01-01 00:00:00+04:00', 'm': '2021-01-01 00:00:00+04:00', 'y': '2021-01-01 00:00:00+04:00' }] dt = str_to_date('2020-12-31T22:45:00') sum_duration = 0 for result in results: # compute the delta of duration duration = int(result['t']) - sum_duration # sum the duration to allow to compute delta_duration sum_duration += duration # we got an end date in our json end_date = dt + datetime.timedelta(minutes=int(result['t'])) # compute aggregation dates aggh_dt = calcAggDate('H', end_date, 0, True) aggd_dt = calcAggDate('D', end_date, 0, True) aggd_dt2 = calcAggDate('D', aggh_dt, 0) aggm_dt = calcAggDate('M', end_date, 0, True) aggm_dt2 = calcAggDate('M', aggh_dt, 0) aggy_dt = calcAggDate('Y', end_date, 0, True) aggy_dt2 = calcAggDate('Y', aggh_dt, 0) # check result, and print duration in order to know where is our error... assert 't' + str(sum_duration) + '->' + str(aggh_dt) == 't' + str( sum_duration) + '->' + result['h'] assert 't' + str(sum_duration) + '->' + str(aggd_dt) == 't' + str( sum_duration) + '->' + result['d'] assert 't' + str(sum_duration) + '->' + str(aggd_dt2) == 't' + str( sum_duration) + '->' + result['d'] assert 't' + str(sum_duration) + '->' + str(aggm_dt) == 't' + str( sum_duration) + '->' + result['m'] assert 't' + str(sum_duration) + '->' + str(aggm_dt2) == 't' + str( sum_duration) + '->' + result['m'] assert 't' + str(sum_duration) + '->' + str(aggy_dt) == 't' + str( sum_duration) + '->' + result['y'] assert 't' + str(sum_duration) + '->' + str(aggy_dt2) == 't' + str( sum_duration) + '->' + result['y']