def post(self, id_): """ Publish to BBS """ errors = {} username = self.get_argument("username", "名無し") if username == "": username = "******" mail = self.get_argument("mail") text = self.get_argument("text") if text == "": errors["text"] = True if not errors: session = Session() res = Response(text, username) res.bbs_id = id_ session.add(res) session.commit() session.close() responses = self.get_responses(id_) self.render("BBS/index.html", responses=responses, errors=errors )
def leastSquares(self, frequencies: list = None, t_step: float = None) -> Tuple[Session, Session]: print("Moisture content evaluation (spectral method)...") if not frequencies: frequencies = self.m.DATA.keys session = self.m.DATA.select(frequencies) session.thin_fast(t_step) session.box() timestamps = session.get_timestamps_averaged() model = Model(theta=0) session_q, session_w = Session(), Session() for j, t in enumerate(timestamps): print('{:.2f}%'.format(j / len(timestamps) * 100), end='\r', flush=True) spec = session.get_spectrum(t) T = self.w.Temperature(t, dimension=p.weather.labels.T_C) P = self.w.Pressure(t, dimension=p.weather.labels.P_hpa) hum = self.w.Rho_abs(t) model.setParameters(temperature=T, pressure=P, rho=hum) A = np.zeros((2, 2), dtype=float) b = np.zeros(2, dtype=float) for f, brt in spec: krho, kw = model.krho(f), model.kw(f, self.tcl) A += np.array([[krho * krho, krho * kw], [krho * kw, kw * kw]]) b_ = model.opacity(brt, self.t_avg) * math.cos(self.theta) - model.tauO_theory(f) b += np.array([b_ * krho, b_ * kw]) q, w = np.linalg.solve(A, b).tolist() session_q.add('q', Point(t, q)) session_w.add('w', Point(t, w)) return session_q, session_w
def DualFrequency(self, freq_pairs: list, t_step: float = None) -> Tuple[Session, Session]: q, w = Session(), Session() for f1, f2 in freq_pairs: s = self.dual_frequency(f1, f2, t_step) q.add(Series(key=(f1, f2), data=s.get_series('q').data)) w.add(Series(key=(f1, f2), data=s.get_series('w').data)) return q, w
def sample(): title = request.form['title'] # name を受け取る opinion = request.form['opinion'] session = Session() sample = sample_model(id=title) # インスタンスを作成 session.add(sample) # データ追加 session.commit() # データをDBへ反映 session.close() # セッションを閉じる return render_template('sample.html',title = title, opinion = opinion)
def post_list_page(): title = request.form['title'] # name を受け取る opinion = request.form['opinion'] session = Session() post_list = post(title=title,opinion=opinion) session.add(post_list) #session.add(opinion_db) session.commit() post_list = session.query( post ).all() session.close() return render_template('post_list.html',post_list=post_list,title=title,opinion=opinion)
def structural_functions( MDATA: Session, t_step: float = parameter.struct_func.t_step, part: float = parameter.struct_func.part, rightShowLimit: float = parameter.struct_func.rightShowLimit ) -> Session: print("Structural functions calculation...\t", end='', flush=True) start_time = time.time() data = Session() for freq in MDATA.keys: data.add( structural_function(MDATA.get_series(freq), t_step, part, rightShowLimit)) print('{:.3f} sec\t'.format(time.time() - start_time), end='') print('[' + colored('OK', 'green') + ']') return data
def login(): result = {'message': "fail", 'action': "login", 'info': "", 'data': {}} # 登出 if request.method == "GET": token = request.args.get("token") username = request.args.get("username") if tokendef.certify_token(key=username, token=token): # 注销seesion里面的信息 new_session = Session(username) new_session.delete() result['message'] = 'successful' result['action'] = 'login_out' result['info'] = '注销成功' return jsonify(result) else: result['action'] = 'login_out' result['info'] = 'token 过期' return jsonify(result) # 登录 elif request.method == "POST": username = request.form.get("username") password = request.form.get('password') user = User(username) isRight = user.verify_password(password) # 在数据库里面核对密码, print(isRight) if isRight: # if password is right new_session = Session(username=username) token = tokendef.generate_token(username) # 生成token if new_session.query() == None: # 如果session里面无记录 new_session.token = token new_session.add() result['info'] = "新加用户token" else: # session 里面已经存在该用户了,则更新token就好 new_session.token = token new_session.update() result['info'] = '更新用户token' result['message'] = 'successful' result['data'] = {'token': token, 'username': username} return jsonify(result) else: result['info'] = "密码错误" return jsonify(result) else: return "405 禁用请求中指定的方法。"
def DUALFREQCPP(self, freq_pairs=None, t_step: float = None) -> Tuple[Session, Session]: if freq_pairs is None: freq_pairs = p.freqs.qw2_freq_pairs class mcps_t(ctypes.Structure): _fields_ = [('Q', ctypes.c_double), ('W', ctypes.c_double)] # domainpp = ctypes.CDLL('/home/dobri/QtProjects/SMAC/SMAC/libdomainpp.so', mode=os.RTLD_LAZY) domainpp = ctypes.CDLL('libdomainpp.so', mode=os.RTLD_LAZY) domainpp.domain_new.argtypes = [ctypes.c_double, ctypes.c_double] domainpp.domain_new.restype = ctypes.c_void_p domainpp.get_qw.argtypes = [ctypes.c_void_p, ctypes.c_double, ctypes.c_double, ctypes.c_double, ctypes.c_double, ctypes.c_double, ctypes.c_double, ctypes.c_double, ctypes.c_double] domainpp.get_qw.restype = ctypes.POINTER(mcps_t) domain = domainpp.domain_new(ctypes.c_double(self.t_avg), ctypes.c_double(self.tcl)) q, w = Session(), Session() for f1, f2 in freq_pairs: s1, s2 = self.m.DATA.get_series(f1), self.m.DATA.get_series(f2) s1.thin_fast(t_step) s2.thin_fast(t_step) s = Session() for i in range(min(s1.length, s2.length)): t1, tb1 = s1.data[i].to_tuple() t2, tb2 = s2.data[i].to_tuple() t = (t1 + t2) / 2 T = self.w.Temperature(t, dimension=p.weather.labels.T_C) P = self.w.Pressure(t, dimension=p.weather.labels.P_hpa) hum = self.w.Rho_abs(t) ret = domainpp.get_qw(domain, ctypes.c_double(f1), ctypes.c_double(f2), ctypes.c_double(tb1), ctypes.c_double(tb2), ctypes.c_double(self.theta), ctypes.c_double(T), ctypes.c_double(P), ctypes.c_double(hum)) s.add('q', Point(t, ret.contents.Q)) s.add('w', Point(t, ret.contents.W)) q.add(Series(key=(f1, f2), data=s.get_series('q').data)) w.add(Series(key=(f1, f2), data=s.get_series('w').data)) return q, w
def __init__(self): """ Write one row to the file_info table, with the schema version and the "started" status False. """ # # Have we been initialized before? If so, schema_version should be # FileInfo.CURRENT_SCHEMA_VERSION. If not, we need to initialize. # logging.debug("In FileInfo constructor. schema_version is: {}.\n" "\t started is {}.".format(self.schema_version, self.started)) if self.schema_version is None: logging.debug("Initializing FileInfo table") self.schema_version = FileInfo.CURRENT_SCHEMA_VERSION self.started = False Session.add(self) Session.commit()
def getData(self, frequencies: list = None) -> Session: MDATA = Session() for tfile in self.tfiles: tfile.parse(True, parameter.parsing.measurements.rm_zeros, parameter.parsing.measurements.sort_freqs, parameter.parsing.measurements.sort_time, parameter.parsing.measurements.outliers_elimination, parameter.parsing.measurements.upper_threshold_val) tfile.cutData(self.start_t, self.stop_t) if not frequencies: for s in tfile.session.series: MDATA.add(s) else: for freq in frequencies: MDATA.add(tfile.session.get_series(freq)) if self.erase_txt: os.remove(tfile.path) return MDATA
class KeyBackend(object): def __init__(self): self.session = Session() if client.get("key") is None: self._key = Fernet.generate_key() client.delete("key") client.set("key", self._key) self.storeKey(self._key) else: self._key = client.get("key") self._fernet = Fernet(self._key) self.sha256 = hashlib.sha256() def storeHash(self, hash, key): data = HashTable(hash_key=key, hash_data_value=hash) self.session.add(data) self.session.commit() def receiveHashKey(self, hash): query = self.session.query(HashTable).filter( HashTable.hash_data_value == hash).first() return query def storeKey(self, key): self.updateKeyStatus() k = KeyTable(key_value=key, key_status=True) self.session.add(k) self.session.commit() def receiveKey(self, key): query = self.session.query(KeyTable).filter( KeyTable.key_value == key, KeyTable.key_status == True).first() return query def updateKeyStatus(self): kTable = self.session.query(KeyTable).filter( KeyTable.key_status == True).first() if kTable is not None: kTable.key_status = False kTable.key_update_date = datetime.datetime.now() self.session.add(kTable) self.session.commit() return True return False @property def fernet(self): return self._fernet @fernet.setter def fernet(self, fernet): self._fernet = fernet @property def key(self): return self._key def keys(self, key): self._key = key self._fernet = Fernet(key)
def post(self, id_): """ Publish to BBS """ errors = {} username = self.get_argument("username", "名無し") if username == "": username = "******" text = self.get_argument("text") if text == "": errors["text"] = True if not errors: session = Session() res = Response(text, username) res.bbs_id = id_ session.add(res) session.commit() res = res.toDict() session.close() self.set_header("Content-Type", "text/json") self.write(json_encode([res]));
def connection(): from connection import Connection from session import Session from link import Sender, Receiver, link from protocol import Fragment, Linkage a = Connection(lambda n: Session(n, link)) b = Connection(lambda n: Session(n, link)) a.id = "A" a.tracing = set(["ops", "err"]) b.id = "B" b.tracing = set(["ops", "err"]) def pump(): while a.pending() or b.pending(): b.write(a.read()) a.write(b.read()) s = Session("test-ssn", link) a.add(s) s2 = Session("test-ssn2", link) a.add(s2) a.open(hostname="asdf") b.open() s.begin() s2.begin() l = Sender("qwer", local=Linkage("S", "T")) s.add(l) l.attach() pump() bssn = [x for x in b.incoming.values() if x.name == "test-ssn"][0] bssn.begin() bl = bssn.links["qwer"] bl.attach() bl.flow(10) pump() l.settle(l.send(fragments=Fragment(True, True, 0, 0, "asdf"))) tag = l.send(delivery_tag="blah", fragments=Fragment(True, True, 0, 0, "asdf")) pump() ln = bssn.links["qwer"] x = ln.get() print "INCOMING XFR:", x ln.disposition(x.delivery_tag, "ACCEPTED") xfr = ln.get() print "INCOMING XFR:", xfr ln.disposition(xfr.delivery_tag, "ASDF") print "--" pump() print "--" print "DISPOSITION", l.get_remote(modified=True) l.settle(tag) l.detach() bl.detach() pump() s.end(True) pump() bssn.end(True) s2.end(True) a.close() b.close() pump()
def session(): from connection import Connection from session import Session from link import link, Sender, Receiver from protocol import Fragment, Linkage a = Connection(lambda n: Session(n, link)) a.tracing = set(["ops", "err"]) a.id = "A" b = Connection(lambda n: Session(n, link)) b.tracing = set(["err"]) b.id = "B" ssn = Session("test", link) a.add(ssn) ssn.begin() # nss = Session("test", link) # b.add(nss) def pump(): a.tick() b.tick() b.write(a.read()) a.write(b.read()) pump() nss = [s for s in b.incoming.values() if s.name == "test"][0] nss.begin() snd = Sender("L", "S", "T") ssn.add(snd) rcv = Receiver("L", "S", "T") nss.add(rcv) snd.attach() rcv.attach() rcv.flow(10) pump() snd.send(fragments=Fragment(True, True, 0, 0, "m1")) snd.send(fragments=Fragment(True, True, 0, 0, "m2")) dt3 = snd.send(fragments=Fragment(True, True, 0, 0, "m3")) pump() print "PENDING", rcv.pending() pump() snd.send(fragments=Fragment(True, True, 0, 0, "m4")) pump() xfrs = [] while rcv.pending(): x = rcv.get() xfrs.append(x) print "XFR", x rcv.disposition(xfrs[-1].delivery_tag, "ACCEPTED") pump() snd.send(fragments=Fragment(True, True, 0, 0, "m5")) pump() rcv.disposition(xfrs[0].delivery_tag, "ACCEPTED") print "----------" pump() print "----------" print "ssn.outgoing:", ssn.outgoing print "snd.unsettled:", snd.unsettled for xfr in xfrs[1:-1]: rcv.disposition(xfr.delivery_tag, "ACCEPTED") print "rcv.unsettled", rcv.unsettled print "rcv.pending()", rcv.pending() rcv.disposition(rcv.get().delivery_tag) pump() print "----------" print "ssn.outgoing:", ssn.outgoing print "snd.unsettled:", snd.unsettled print "settling:", dt3 snd.settle(dt3) print "ssn.outgoing:", ssn.outgoing print "snd.unsettled:", snd.unsettled for dt in list(snd.unsettled): snd.settle(dt) snd.detach() rcv.detach() pump() print "ssn.outgoing:", ssn.outgoing print "snd.unsettled:", snd.unsettled
import json from session import Session session = Session( json.loads( '{"_":{"code":"","module":"true;"},"Test":{"code":"console.log(1)","module":"console.log(Math.random())","dependencies":[]}}' )) print('\x1b[1mPython\x1b[0m') session.add('Test') print(session.flush()) session.add('Test') print(session.flush())
def getData(self, formatstr: str = 'tkmphrw*') -> Session: """ :param formatstr: Type 't' to get Temperature (Cels), 'k' to get Temperature (K), 'p' - Pressure (hPa), 'm' - Pressure (mmHg), 'h' - relative humidity (%), 'r' - absolute humidity (g/m3), 'w' - wind speed (m/s), '*' - precipitation amount (mm). Example: 'tprw*'. :return: Session of weather data. """ WDATA = Session() for wfile in self.wfiles: wfile.parse() wfile.cutData(self.start_t, self.stop_t) for char in formatstr: if char == 't': WDATA.add(wfile.WDATA.get_series(p.weather.labels.T_C)) if char == 'k': WDATA.add(wfile.WDATA.get_series(p.weather.labels.T_K)) if char == 'm': WDATA.add(wfile.WDATA.get_series(p.weather.labels.P_mm)) if char == 'p': WDATA.add(wfile.WDATA.get_series(p.weather.labels.P_hpa)) if char == 'h': WDATA.add(wfile.WDATA.get_series(p.weather.labels.rho_rel)) if char == 'r': WDATA.add(wfile.WDATA.get_series(p.weather.labels.rho_abs)) if char == 'w': WDATA.add(wfile.WDATA.get_series(p.weather.labels.Vwind)) if char == '*': WDATA.add(wfile.WDATA.get_series(p.weather.labels.RainRt)) WDATA.time_sorting() return WDATA
def dual_frequency(self, freq1: float, freq2: float, t_step: float = None, resolve: bool = False) -> Session: s1, s2 = self.m.DATA.get_series(freq1), self.m.DATA.get_series(freq2) s1.thin_fast(t_step) s2.thin_fast(t_step) model = Model(theta=0) out = Session() for i in range(min(s1.length, s2.length)): t1, tb1 = s1.data[i].to_tuple() t2, tb2 = s2.data[i].to_tuple() t = (t1 + t2) / 2 T = self.w.Temperature(t, dimension=p.weather.labels.T_C) P = self.w.Pressure(t, dimension=p.weather.labels.P_hpa) hum = self.w.Rho_abs(t) model.setParameters(temperature=T, pressure=P, rho=hum) M = np.array([[model.krho(freq1), model.kw(freq1, tcl=self.tcl)], [model.krho(freq2), model.kw(freq2, tcl=self.tcl)]]) tauE = np.array([model.opacity(tb1, self.t_avg) * math.cos(self.theta), model.opacity(tb2, self.t_avg) * math.cos(self.theta)]) tauOxygen = np.array([model.tauO_theory(freq1), model.tauO_theory(freq2)]) q, w = np.linalg.solve(M, tauE - tauOxygen).tolist() out.add('q', Point(t, q)) out.add('w', Point(t, w)) if resolve: w_min = min(out.get_series('w').get_values()) out = Session() for i in range(min(s1.length, s2.length)): t1, tb1 = s1.data[i].to_tuple() t2, tb2 = s2.data[i].to_tuple() t = (t1 + t2) / 2 T = self.w.Temperature(t, dimension=p.weather.labels.T_C) P = self.w.Pressure(t, dimension=p.weather.labels.P_hpa) hum = self.w.Rho_abs(t) model.setParameters(temperature=T, pressure=P, rho=hum) M = np.array([[model.krho(freq1), model.kw(freq1, tcl=self.tcl)], [model.krho(freq2), model.kw(freq2, tcl=self.tcl)]]) tauE = np.array([model.opacity(tb1, self.t_avg) * math.cos(self.theta), model.opacity(tb2, self.t_avg) * math.cos(self.theta)]) tauOxygen = np.array([model.tauO_theory(freq1), model.tauO_theory(freq2)]) wShift = np.array([model.kw(freq1, tcl=self.tcl) * w_min, model.kw(freq2, tcl=self.tcl) * w_min]) q, w = np.linalg.solve(M, tauE - tauOxygen - wShift).tolist() out.add('q', Point(t, q)) out.add('w', Point(t, w)) return out
class TFile: def __init__(self, path: str, mode: str = ''): self.mode = mode if not mode: self.mode = 'standard' self.path = path self.session = Session() @staticmethod def __parse_line(text: str) -> list: return [ elem for elem in re.split("[\t ]", re.sub("[\r\n]", '', text)) if elem ] def parse(self, shift=True, rm_zeros=True, sort_freqs=True, sort_time=False, outliers_elimination=False, upper_threshold_val: float = None) -> None: print("Loading data from txt...\t", end='', flush=True) start_time = time.time() print('Current parser mode: ' + self.mode) with Switch(self.mode) as case: if case("standard"): # стандартный режим t_file = open(self.path, "r", encoding='cp1251') t_file.readline() for line in t_file: try: l_data = TFile.__parse_line(line) YYYY, MM, DD = l_data[0].split('.') hh, mm, ss = l_data[1].split(':') ms = l_data[2] timestamp = TDateTime(YYYY, MM, DD, hh, mm, ss, ms).toDouble() self.session.add( float(l_data[5]) / 1000, Point(timestamp, float(l_data[6]))) self.session.add( float(l_data[7]) / 1000, Point(timestamp, float(l_data[8]))) except ValueError: continue t_file.close() if case("DDMMYY"): t_file = open(self.path, "r", encoding='cp1251') t_file.readline() for line in t_file: try: l_data = TFile.__parse_line(line) DD, MM, YY = l_data[0].split('.') YYYY = str(int(YY) + 2000) hh, mm, ss = l_data[1].split(':') ms = l_data[2] timestamp = TDateTime(YYYY, MM, DD, hh, mm, ss, ms).toDouble() self.session.add( float(l_data[5]) / 1000, Point(timestamp, float(l_data[6]))) self.session.add( float(l_data[7]) / 1000, Point(timestamp, float(l_data[8]))) except ValueError: continue t_file.close() if case("p"): # по процессорному времени t_file = open(self.path, "r") t_file.readline() for line in t_file: l_data = TFile.__parse_line(line) timestamp = float(l_data[3]) self.session.add( float(l_data[5]) / 1000, Point(timestamp, float(l_data[6]))) self.session.add( float(l_data[7]) / 1000, Point(timestamp, float(l_data[8]))) t_file.close() if case("g1"): # режим гетеродин|1|x| t_file = open(self.path, "r") t_file.readline() for line in t_file: l_data = TFile.__parse_line(line) YYYY, MM, DD = l_data[0].split('.') hh, mm, ss = l_data[1].split(':') ms = l_data[2] timestamp = TDateTime(YYYY, MM, DD, hh, mm, ss, ms).toDouble() self.session.add( float(l_data[4]) / 1000, Point(timestamp, float(l_data[6]))) t_file.close() if case("g2"): # режим гетеродин|x|1| t_file = open(self.path, "r") t_file.readline() for line in t_file: l_data = TFile.__parse_line(line) YYYY, MM, DD = l_data[0].split('.') hh, mm, ss = l_data[1].split(':') ms = l_data[2] timestamp = TDateTime(YYYY, MM, DD, hh, mm, ss, ms).toDouble() self.session.add( float(l_data[4]) / 1000, Point(timestamp, float(l_data[8]))) t_file.close() print('{:.3f} sec\t'.format(time.time() - start_time), end='') print('[' + colored('OK', 'green') + ']') if shift: print('Overlay elimination...\t') self.shift() if rm_zeros: print('Removing zeros...\t') self.remove_zeros() if sort_freqs: print('Frequency sorting...\t') self.sort_frequencies() if sort_time: print('Time sorting...\t') self.sort_time() if outliers_elimination: print('Outliers elimination...\t') self.outliers_elimination() if upper_threshold_val: print('Setting threshold...\t') self.set_upp_threshold(upper_threshold_val) def shift(self) -> None: s_arr = settings.parameter.freqs.shifted ssize = self.session.get_series(settings.parameter.freqs.all[0]).length msize = self.session.get_series(s_arr[0]).length for f in s_arr: s = self.session.get_series(f) if s.is_empty: continue if s.length > 1.25 * ssize: k = 0 data = [] for i in range(0, s.length - 1, 2): p = s.data[i] p.merge(s.data[i + 1]) data.append(p) k += 1 self.session.replace(s.key, data) if k < msize: msize = k def remove_zeros(self) -> None: self.session.remove_zeros(timeQ=True, valQ=True) def sort_frequencies(self) -> None: self.session.sort() def remove_time_zeros(self) -> None: self.session.remove_zeros(timeQ=True, valQ=False) def remove_val_zeros(self) -> None: self.session.remove_zeros(timeQ=False, valQ=True) def sort_time(self) -> None: self.session.time_sorting() def getData(self) -> Session: return self.session def getTimeBounds(self) -> Tuple[float, float]: return self.session.get_time_bounds() def outliers_elimination(self) -> None: self.session.apply_to_series(Eliminate.time_series) def set_upp_threshold(self, threshold: float) -> None: self.session.set_upper_threshold(threshold) def cutData(self, start_t: float, stop_t: float) -> None: self.session.cut(start_t, stop_t)
class Session: def __init__(self, connection): self.proto = ProtoSession(link) self.connection = connection self._lock = self.connection._lock self.timeout = 120 self.txn = None self.proto.begin() def wait(self, predicate, timeout=DEFAULT): if timeout is DEFAULT: self.timeout = timeout self.connection.wait(predicate, timeout) @synchronized def sender(self, target, name=None, unsettled=None): if isinstance(target, basestring): target = Target(address=target) snd = Sender(self.connection, name or str(uuid4()), target) self.proto.add(snd.proto) for k, v in (unsettled or {}).items(): snd.proto.resume(k, v) snd.proto.attach() self.wait(lambda: snd.proto.attached() or snd.proto.detaching()) if snd.proto.remote_target is None: snd.close() raise LinkError("no such target: %s" % target) else: snd.address = getattr(snd.proto.remote_target, "address", None) snd.unsettled = snd.proto.remote_unsettled() return snd @synchronized def receiver(self, source, limit=0, drain=False, name=None, unsettled=None): if isinstance(source, basestring): source = Source(address=source) rcv = Receiver(self.connection, name or str(uuid4()), source) self.proto.add(rcv.proto) if limit: rcv.flow(limit, drain=drain) for k, v in (unsettled or {}).items(): rcv.proto.resume(k, v) rcv.proto.attach() self.wait(lambda: rcv.proto.attached() or rcv.proto.attaching()) if rcv.proto.remote_source is None: rcv.close() raise LinkError("no such source: %s" % source) else: rcv.address = getattr(rcv.proto.remote_source, "address", None) rcv.unsettled = rcv.proto.remote_unsettled() return rcv @synchronized def incoming_window(self): return self.proto.incoming_window(self) @synchronized def set_incoming_window(self, *args, **kwargs): return self.proto.set_incoming_window(*args, **kwargs) def _txn_link(self): if self.txn is None: self.txn = self.sender(Coordinator(), "txn-%s" % uuid4()) @synchronized def declare(self, timeout=None): self._txn_link() self.txn.send(Message(Declare(), delivery_tag="declare")) for t, l, r in self.txn.pending(block=True, timeout=timeout): if t == "declare": self.txn.settle(t) return r.state.txn_id else: raise SessionError("transaction declare failed") @synchronized def discharge(self, txn, fail=False, timeout=None): self._txn_link() self.txn.send(Message(Discharge(txn_id=txn, fail=fail), delivery_tag="discharge")) for t, l, r in self.txn.pending(block=True, timeout=timeout): if t == "discharge": self.txn.settle(t) break else: raise SessionError("transaction discharge failed") @synchronized def close(self): self.proto.end()
def saveEntity(self, entity): session = Session() session.add(entity) session.commit()
class WFile: def __init__(self, YYYY: Union[int, str], MM: Union[int, str], DD: Union[int, str]): self.YYYY = str(YYYY) self.MM = '{0:0>2}'.format(MM) self.DD = '{0:0>2}'.format(DD) self.wdatapath = os.path.join(Settings.meteoBaseDir, self.YYYY, self.MM, self.DD, 'data') self.WDATA = Session() self.swvp_tbl = WFile.load_swv_prop() self.min_t = TDateTime(YYYY, MM, DD).toDouble() self.max_t = TDateTime(YYYY, MM, DD, **END_OF_DAY).toDouble() @staticmethod def fromDate(Date: date) -> 'WFile': return WFile(Date.year, Date.month, Date.day) def parse(self) -> None: if not os.path.exists(self.wdatapath): print('No weather files found\t' + '[' + colored('Error', 'red') + ']') return k = 0 self.WDATA.series.clear() with open(self.wdatapath, 'r') as wdatafile: for line in wdatafile: l_data = re.split("[\t ]", re.sub("[\r\n]", '', line)) l_data = [elem for elem in l_data if elem] try: time = l_data[0] t_data = re.split(':', time) hh, mm = int(t_data[0]), int(t_data[1]) P = float(l_data[1]) T = float(l_data[2]) rho_rel = float(l_data[3]) Wind = float(l_data[4]) Rain_rt = float(l_data[5]) except IndexError or ValueError: k += 1 continue timestamp = TDateTime(self.YYYY, self.MM, self.DD, hh, mm).toDouble() self.WDATA.add(p.weather.labels.T_C, Point(timestamp, T)) self.WDATA.add(p.weather.labels.T_K, Point(timestamp, T - 273.15)) self.WDATA.add(p.weather.labels.P_mm, Point(timestamp, P)) self.WDATA.add(p.weather.labels.P_hpa, Point(timestamp, P * 1.3332222)) self.WDATA.add(p.weather.labels.rho_rel, Point(timestamp, rho_rel)) rho_s = self.swvp_tbl[int(T)][0] rho_abs = rho_s * rho_rel / 100 self.WDATA.add(p.weather.labels.rho_abs, Point(timestamp, rho_abs)) self.WDATA.add(p.weather.labels.Vwind, Point(timestamp, Wind)) self.WDATA.add(p.weather.labels.RainRt, Point(timestamp, Rain_rt)) self.min_t, self.max_t = self.WDATA.get_time_bounds() # print('{}.{}.{} - Загрузка данных погоды. Ошибок: '.format(self.YYYY, self.MM, self.DD) # + colored('{}\t'.format(k), 'red') # + '[' + colored('OK', 'green') + ']') return @staticmethod def load_swv_prop() -> defaultdict: data = defaultdict(list) k = 0 with open(Settings.swvapour_conf_path, 'r') as swvcfile: for line in swvcfile: l_data = re.split("[\t ]", re.sub("[\r\n]", '', line)) l_data = [elem for elem in l_data if elem] try: T_s = float(l_data[0]) p_s_mmrtst = float(l_data[1]) p_s_kPa = float(l_data[2]) rho_s = float(l_data[3]) except IndexError or ValueError: k += 1 continue data[T_s] = [rho_s, p_s_kPa, p_s_mmrtst] # print('Параметры насыщенного водяного пара\t' # + '[' + colored('OK', 'green') + ']') return data def cutData(self, start_t: float, stop_t: float) -> None: self.WDATA.cut(start_t, stop_t) def getInfo(self, timestamp: float) -> list: return self.WDATA.get_spectrum(timestamp) def Pressure(self, timestamp: float, dimension: str = p.weather.labels.P_mm) -> float: return self.WDATA.get_series(key=dimension).get(timestamp) def Temperature(self, timestamp: float, dimension: str = p.weather.labels.T_C) -> float: return self.WDATA.get_series(key=dimension).get(timestamp) def Rho_rel(self, timestamp: float) -> float: return self.WDATA.get_series( key=p.weather.labels.rho_rel).get(timestamp) def Rho_abs(self, timestamp: float) -> float: return self.WDATA.get_series( key=p.weather.labels.rho_abs).get(timestamp) def WindV(self, timestamp: float) -> float: return self.WDATA.get_series(key=p.weather.labels.Vwind).get(timestamp) def RainRt(self, timestamp: float) -> float: return self.WDATA.get_series( key=p.weather.labels.RainRt).get(timestamp)