def load_era(var, year=None, STANDARDISE_COORDS=True, NORMALISE_TO_DATE_ONLY=True, ROLL_LON=False): """ Get ERA-Interim data array (xr) Var list = ['T2', 't2max', 't2min'] Name in array = ['T2', 'MX2T', 'MN2T'], just call by da[da.name] # 0: surface tempterature, T2_1979.nc, 1979-2016 # 1: max temperature, t2max_1979.nc, 1979-2017 # 2: min temperature, t2min_1979.nc, 1979-2017 """ tic() # Measure time elapsed to load # path_parent = '/group_workspaces/jasmin4/bas_climate/data/ecmwf/era-interim/day/' path_parent = '/gws/nopw/j04/bas_climate/data/ecmwf1/era-interim/day/' # path_parent = '/group_workspaces/jasmin4/bas_climate/data/ecmwf/era5' if year is None: # Get all years path = path_parent + var + '/*.nc' # Rounding coordinates for ERA-Interim as different years have slightly different coordinate values below 2 decimals # https://github.com/pydata/xarray/blob/aabda43c54c6336dc62cc9ec3c2050c9c656cff6/xarray/backends/api.py ds = xr.open_mfdataset(path, preprocess=round_coords) else: # Get specific year fname = path_parent + var + '/' + var + '_' + str(year) + '.nc' ds = xr.open_dataset(fname) da = ds[ds.name] # Extract main data array from dataset da = standardise_da_coords(da, STANDARDISE_COORDS=STANDARDISE_COORDS, NORMALISE_TO_DATE_ONLY=NORMALISE_TO_DATE_ONLY, ROLL_LON=ROLL_LON) toc() return da.squeeze()
def convert_to_datetime64(da, verbose=True): """ ! ONLY WORKS FOR 1D DATA (i.e. ONE GRID LOCATION) Convert datetime format to standard python datetime by simple interpolation """ if type(da.time.values[0]) == np.datetime64: #print("Already in datetime64 format.") return da print("Converting to datetime64... (retrieving all values and interpolating if necessary, may take a few mins)") start_year = da.time.values[0].year end_year = da.time.values[-1].year # Create new data array times = np.arange(str(start_year), str(end_year + 1), dtype='datetime64[D]') # new_da = xr.DataArray(np.zeros(len(times)), coords=[('time', times)]) new_da = xr.DataArray(np.zeros(len(times)), coords=[('time', times)], attrs=da.attrs) new_da['lon'] = da.coords['lon'].values new_da['lat'] = da.coords['lat'].values for yr in np.arange(start_year, end_year + 1): if len(da.sel(time=str(yr))) != len(new_da.sel(time=str(yr))): # Do interpolation of original array to fit standard datetime if verbose: print(" Processing year " + str(yr) + "/" + str(end_year)) tic() leapyear = calendar.isleap(yr) size = 365 if not leapyear else 366 arr = da.sel(time=str(yr)).values arr_new = helper.interp1d(arr, size) if verbose: toc() else: arr_new = da.sel(time=str(yr)).values # Replace array values with new data ind_start = (new_da.indexes['time'] == pd.Timestamp(str(yr) + '-01-01')).argmax() ind_end = (new_da.indexes['time'] == pd.Timestamp(str(yr) + '-12-31')).argmax() new_da[ind_start:ind_end + 1] = arr_new #print("Done") return new_da
def tag2(tagid, begin_time, end_time, timestep, mode, utc, show, plot): """Parse user friendly tag query input and assemble wincc tag query""" if timestep and not end_time: end_time = datetime_to_str_without_ms(datetime.now()) query = tag_query_builder(tagid, begin_time, end_time, timestep, mode, utc) if show: print(query) return toc = tic() try: w = wincc(host_info.address, host_info.database) w.connect() w.execute(query) records = w.create_tag_records() print("Fetched data in {time}.".format(time=round(toc(), 3))) # print(tags) # tags.plot() for record in records: print(record) if plot: plot_tag_records(records) except Exception as e: print(e) print(traceback.format_exc()) finally: w.close()
def tag(tagid, begin_time, end_time, timestep, mode, utc, show): """Parse user friendly tag query and assemble userunfriendly wincc query""" query = tag_query_builder(tagid, begin_time, end_time, timestep, mode, utc) if show: print(query) return toc = tic() try: w = wincc(host_info.address, host_info.database) w.connect() w.execute(query) if w.rowcount(): print_tag_logging(w.fetchall()) # for rec in w.fetchall(): # print rec print("Fetched data in {time}.".format(time=round(toc(), 3))) except Exception as e: print(e) print(traceback.format_exc()) finally: w.close()
def tagid_by_name(tagname): """Search hosts db for tag entries matching the given name. Return tagid. """ try: toc = tic() mssql_conn = mssql(host_info.address, strip_R_from_db_name(host_info.database)) mssql_conn.connect() mssql_conn.execute("SELECT TLGTAGID, VARNAME FROM PDE#TAGs WHERE " "VARNAME LIKE '%{name}%'".format(name=tagname)) if mssql_conn.rowcount(): for rec in mssql_conn.fetchall(): print rec print("Fetched data in {time}.".format(time=round(toc(), 3))) except Exception as e: print(e) finally: mssql_conn.close()
def operator_messages(begin_time, end_time, text, utc, show): """Query db for operator messages.""" query = om_query_builder(eval_datetime(begin_time), eval_datetime(end_time), text, utc) if show: print(query) return try: toc = tic() w = wincc(host_info.address, host_info.database) w.connect() w.execute(query) w.print_operator_messages() print("Fetched data in {time}.".format(time=round(toc(), 3))) except WinCCException as e: print(e) print(traceback.format_exc()) finally: w.close()
def tag2(tagid, begin_time, end_time, timestep, mode, utc, show, plot, outfile, outfile_col_name, outfile_time_zone): """Parse user friendly tag query input and assemble wincc tag query""" if timestep and not end_time: end_time = datetime_to_str_without_ms(datetime.now()) query = tag_query_builder(tagid, begin_time, end_time, timestep, mode, utc) if show: print(query) return toc = tic() try: w = wincc(host_info.address, host_info.database) w.connect() w.execute(query) records = w.create_tag_records(utc) print("Fetched data in {time}.".format(time=round(toc(), 3))) if records: if (outfile != ''): with open(outfile, "w") as f: # print(records.to_csv().encode("UTF-8")) for rec in records: f.write( rec.to_csv(name=outfile_col_name.encode("UTF-8"), tz=outfile_time_zone)) else: for record in records: print(record) if plot: plot_tag_records(records) else: logging.warning("No data returned.") except Exception as e: print(e) print(traceback.format_exc()) finally: w.close()
def alarms(begin_time, end_time, text, utc, show, state, priority, priority2, report, report_hostname): """Read alarms from given host in given time.""" query = alarm_query_builder(eval_datetime(begin_time), eval_datetime(end_time), text, utc, state, priority, priority2) if show: print(query) return try: toc = tic() w = wincc(host_info.address, host_info.database) w.connect() w.execute(query) if report: alarms = w.create_alarm_record() if report_hostname: host_description = report_hostname else: host_description = host_info.description if not end_time: end_time = datetime_to_str_without_ms(datetime.now()) generate_alarms_report(alarms, begin_time, end_time, host_description, text) print(unicode(alarms)) else: w.print_alarms() print("Fetched data in {time}.".format(time=round(toc(), 3))) except WinCCException as e: print(e) print(traceback.format_exc()) finally: w.close()
########## # 모델 저장 ########## if (not epoch == 0): save_path = saver.save(sess, "./checkpoint/%s" % (TSET.name)) print("Model saved in file: %s" % save_path) ########## # 레지듀얼 스케일 조절 ########## rs_factor = min(1.0, epoch / 100) ########## # 트레이닝 ########## train_time = tic() num_batch = TSET.num_batch_in_a_epoch batch_trn = [] for i in range(num_batch): # 배치 가져오기 batch_trn = dset_train.get_batch() # 학습 - generator [summary_train, _] = sess.run( [merged, train_sr], feed_dict={ in_lowv: batch_trn.arr4d_LR_v, gt_highv: batch_trn.arr4d_HR_v, op_rs: rs_factor
save_path = saver.save(sess, "./checkpoint/model-%s" % (exp.train_name), global_step=epoch) print("Model saved in file: %s" % save_path) ########## # 레지듀얼 스케일 조절 ########## rs_factor = 1.0 if (epoch < 100): rs_factor = epoch / 100 ########## # 트레이닝 ########## train_time = tic() gpu_time = 0 batch_time = 0 loss = 0.0 num_batch = exp.num_batch_in_a_epoch batch_trn = [] for i in range(num_batch): batch_st = time.time() # 배치 가져오기 batch_trn = dset.get_batch() st = tic() # 학습 - 오토엔코더