def run(pid, stations, months, data_path, db_info): """ target station identification main process Parameters =========== - pid : process identification for multi-processing - stations : station list - months : month list - data_path : TICAS data path :type pid: int :type stations: list[str] :type months: list[(int, int)] :type data_path : str :type db_info: dict :return: """ if db_info: Infra.initialize(data_path) infra = Infra.get_infra() if conn.Session == None: conn.connect(db_info) else: infra = ncrtes.get_infra() logger = getLogger(__name__) logger.info('starting target station identification') wsDA = WinterSeasonDataAccess() nfDA = NormalFunctionDataAccess() tsDA = TargetStationDataAccess() # process start time stime = time.time() n_stations = len(stations) cnt = 0 for sidx, st in enumerate(stations): station = infra.get_rnode(st) logger.info('# PID=%d, SIDX=%d/%d, STATION=%s' % (pid, sidx, n_stations, st)) try: nf = nsrf.get_normal_function(station, months, wsDA=wsDA, nfDA=nfDA, tsDA=tsDA, autocommit=True) if nf and nf.is_valid(): logger.info(' - %s is valid' % station.station_id) else: logger.debug(' - %s is not valid (nf=%s)' % (station.station_id, nf)) # cnt += 1 # # if cnt and cnt % 20 == 0: # logger.warning(' - commmit!!') # wsDA.commit() # nfDA.commit() # tsDA.commit() except Exception as ex: logger.warning(tb.traceback(ex, False)) # wsDA.commit() # nfDA.commit() # tsDA.commit() # logger.warning(' - commmit!! (final)') wsDA.close() nfDA.close() tsDA.close() etime = time.time() logger.info('end of target station identification (elapsed time=%s)' % timeutil.human_time(seconds=(etime - stime)))
tcm = sum(tcm_data[1]) sheet.write_column(row=0, col=col, data=[tcm_data[0]] + tcm_data[1] + [tcm]) col += 1 avg_data = [] for row in range(len(timeline)): tdata = [tcm_data[1][row] for tcm_data in data] avg_data.append(sum(tdata) / len(tdata)) sheet.write_column(row=0, col=col, data=['Average'] + avg_data + [sum(avg_data)]) if __name__ == '__main__': import os from pyticas.infra import Infra from pyticas import route from pyticas import period from pyticas.moe import writer data_path = os.path.abspath(os.path.join(os.path.dirname(__file__), '../../../../../data')) Infra.initialize(data_path) infra = Infra.load_infra() r = route.load_route_by_name('Route I-494 WB') prd1 = period.create_period_from_string('2016-05-17 06:00:00', '2016-05-17 09:00:00', 300) prd2 = period.create_period_from_string('2016-05-18 06:00:00', '2016-05-18 09:00:00', 300) res1 = run(r, prd1) res2 = run(r, prd2) res = [res1, res2] writer.write_cmh(os.path.join(infra.get_path('moe', create=True), 'cmh.xlsx'), r, res)
wd.data[v[0]].append(ret) if __name__ == '__main__': from pyticas.infra import Infra from pyticas.ttypes import Period RWIS_DB_INFO = { 'engine': 'postgresql', 'host': '131.212.105.85', 'port': 5432, 'db_name': 'rwis', 'user': '******', 'passwd': 'natsrl@207' } conn.connect(RWIS_DB_INFO) Infra.initialize('D:/TICAS-NG/data') Infra.get_infra() prd = Period('2012-01-02 07:00:00', '2012-01-02 08:00:00', 300) site = get_site_by_id(330009) wd = get_weather(site.site_id, prd) # from pyticas.infra import Infra # # Infra.initialize('./data') # Infra.get_infra() # # prd = Period(datetime.datetime(2013, 5, 2, 23, 50, 0), datetime.datetime(2013, 5, 3, 1, 0, 0), 600) # start_time = time.time() # wd = get_weather(330045, prd) # print("--- retreiving data : %s seconds ---" % (time.time() - start_time)) # for idx, dt in enumerate(wd.data['DateTime']):