def process(mj, since, till, path_incoming, path_to, force_unpack=False): if force_unpack or not os.path.isdir('tmp/%s' % mj): logging.debug('will unpack') utils.unpack(mj, since, till, path_incoming, path_to) production_times, unmatched_files = get_production_times(path_to, mj) logging.debug('production times count: %s' % len(production_times)) base_ini_fn = '%s/%s/atx300/set/base/base.ini' % (path_incoming, mj) base_to_ior, ior_to_base = atxutils.build_signal_maps(base_ini_fn) signals_fns = utils.get_signals_files('%s/%s' % (path_incoming, mj), since, till) ''' for fn in signals_fns: header = atxsignals.read_header(fn) assert(header['is_deaggregated']) ''' logging.debug('reading signals') deaggregated = atxsignals.read_deaggregated(signals_fns, since, till) illegal_cement_openings = [] cem_bin_closed = None cem_scale_value = None for rec in deaggregated: if rec['type'] == 'header': continue k = rec['k'] v = rec['v'] if 'v' in rec else rec['avg'] #v_recalc = rec['v'] * coeffs[rec['k']] - offs[rec['k']] if ior_to_base.get(k) == 'Cement_Scale1': cem_scale_value = v elif ior_to_base.get(k) == 'I_Bin1CEMclosed': if v == 0 and cem_bin_closed != 0: is_illegal = True for prod_since, prod_till in production_times: if prod_since <= rec['t'] <= prod_till: is_illegal = False break if is_illegal: #logging.debug('%s %s' % (dt, cem_scale_value)) illegal_cement_openings.append((rec['t'], cem_scale_value)) cem_bin_closed = v logging.debug('found %d illegal cement openings' % len(illegal_cement_openings)) return unmatched_files, illegal_cement_openings
def process(mj, since, till, path, progress_callback=None): #if not os.path.isdir('tmp/%s' % mj): # utils.unpack(mj, since, till, path, path_to) logging.debug('loading structure and placement') if progress_callback: progress_callback({'message': 'loading structure and placement'}) base_ini_fn = '%s/atx300/set/base/base.ini' % path parameters_ini_fn = '%s/atx300/set/parameters/parameters.ini' % path settings_ini_fn = '%s/atx300/set/settings/settings.ini' % path stru = structure.structure_from_base_ini_pre( base_ini_fn, parameters_ini_fn, settings_ini_fn) placement_ini_fn = '%s/atx300/comm/placemnt/placemnt.ini' % path placement = utils.load_placement(placement_ini_fn) coeffs, offs = atxsignals.get_coeffs_and_offs(settings_ini_fn) # TODO: hack for k, v in stru.items(): if v['_type'] == 'Scale': v['Signal'] = 'IORSH_AI_VAL_%s' % v['Signal'][-2:] #import pprint #pprint.pprint(stru) #pprint.pprint(placement) assert stru assert placement sig_trigger = {} for k, v in stru.items(): # TODO: solve later - additional water sucks ass if k.startswith('Water'): continue # TODO: solve later - flowmeters suck ass if k.startswith('Admixture'): continue if v['_type'] == 'Silo': if v['Signal_Is_Closed'] is not None: sig_trigger[v['Signal_Is_Closed']] = v if v['Signal_Open'] is not None: sig_trigger[v['Signal_Open']] = v elif v['_type'] == 'Bin': #v['Signal_Is_Closed'] = None # TODO: hack if v['Signal_Is_Closed'] is not None: sig_trigger[v['Signal_Is_Closed']] = v if v['Signal_Open'] is not None: sig_trigger[v['Signal_Open']] = v elif v['_type'] == 'Scale': if v['Signal'] is not None: sig_trigger[v['Signal']] = v elif v['_type'] == 'Mixer': if v['Signal_Is_Closed'] is not None: sig_trigger[v['Signal_Is_Closed']] = v logging.debug('trigger signals: %s' % sig_trigger) signals_fns = utils.get_signals_files(path, since, till) if progress_callback: progress_callback({'message': 'reading signals'}) signal_stream = atxsignals.read_deaggregated(signals_fns, since, till) if progress_callback: signal_stream = progress(signal_stream, since, till, lambda x: progress_callback({'progress': x})) signal_stream = recreate_analogs(signal_stream, coeffs, offs) events = signal_stream_to_events(signal_stream, stru, sig_trigger, placement) events = add_dosages(events) events = add_discharges(events) events = add_batches(events, stru) batches = [e for e in events if e['_type'] == 'batch'] non_empty_batches = [i for i in batches if i['dosages'] or i['discharges']] non_empty_batches = list(map(add_human_times, non_empty_batches)) if progress_callback: # TODO: we actually catch the result so this is not needed progress_callback({'message': 'done', 'result': non_empty_batches}) return non_empty_batches