def fatalError(error): error = str(error) printFailure("FAILED") print(Fore.RED + error) logger.error(error) print(Style.RESET_ALL) sys.exit()
def discover_cluster_databases(self,make_observable=True): if not self.__prod_conn: if not self.__set_self_db_conn(): logger.error("Did not obtain Prod database conection for cluster {0} in discover_cluster_databases method ".format(self.db_fields['hostname'])) return cur=self.__prod_conn.cursor() try: cur.execute("SELECT oid,datname FROM pg_database WHERE NOT datistemplate AND datname !='postgres'") except Exception as e: logger.error("Cannot execute database discovery query: {0}".format(e)) cur.close() return prod_dbs=cur.fetchall() cur.close() self.cursor.execute("SELECT obj_oid,db_name,id FROM database_name WHERE hc_id={0} AND alive".format(self.id)) local_dbs=self.cursor.fetchall() for l_db in local_dbs: for p_db in prod_dbs: if l_db[0]==p_db[0] and l_db[1]==p_db[1]: break else: old_db=DatabaseName(l_db[2],self.return_conn_string(dbname=l_db[1])) old_db.retire() logger.info("Retired database {0} in cluster {1}".format(l_db[1],self.db_fields['hostname'])) for p_db in prod_dbs: for l_db in local_dbs: if l_db[0]==p_db[0] and l_db[1]==p_db[1]: break else: new_db=DatabaseName() new_db.set_fields(hc_id=self.id,obj_oid=p_db[0],db_name=p_db[1],observable=make_observable) new_db.create() new_db.truncate() logger.info("Create new database {0} for cluster {1}".format(p_db[1],self.db_fields['hostname']))
def run(self) -> None: for ext in self.startup_extensions: try: self.load_extension(ext) except Exception as e: logger.error(Constants.EXT_FAILED_TO_RUN, ext, e) super().run(get_token(), reconnect=True)
def replace_node_by_path(tree: Node, path_in_tree: list, replace_by: Node) -> Node: """ Finds the correct node in the 'tree', using the given 'path' to it, replace it with the given node in 'replace_by' :param tree: tree that one of its nodes are going to be replaced by something else :param path_in_tree: path to the node inside the tree, path is a list of branch indices :param replace_by: the node which will replace the existing one inside the tree :return: altered tree """ def recursion(root: Node, path: list) -> Node: if path: res = recursion(root.branches[path[0]], path[1:]) root.branches[path[0]] = res else: return replace_by return root try: new_tree = recursion(root=deepcopy(tree), path=path_in_tree) new_tree.update_metrics() return new_tree except Exception as e: logger.error("Error in replace node | tree: %s | path_in_tree: %s | exception: %s ", json.dumps(tree), str(path_in_tree), str(e)) return None
def run_detail(self, feature_id): # 构建请求头 ua = UserAgent() headers = {'user-agent': ua.Chrome} url = 'http://www.ldmap.net/service/map/feature/get' # 请求url param = { 'feature_id': feature_id, 'mapid': self.mapid, '_': int(datetime.now().timestamp()) } try: resp = requests.get(url, headers=headers, params=param) except requests.RequestException as e: logger.error(e) else: print(resp.text) if resp.status_code == 200: result = resp.json() if 'point' in result: latitude = result['point']['x'] longitude = result['point']['y'] return longitude, latitude
async def conversion(self, ctx, from_currency, to_currency): api_url = 'https://www.alphavantage.co/query' parameters = { 'function': 'CURRENCY_EXCHANGE_RATE', 'from_currency': from_currency, 'to_currency': to_currency, 'apikey': self.av_api_key, 'datatype': 'json', 'outputsize': 'compact' } headers = { 'Accepts': 'application/json', 'AlphaVantagKey': self.av_api_key, } # Checking if user has specified a ticker if from_currency is None or to_currency is None: await ctx.send("Please specify a currency to convert from, and to. Example: !conversion USD EUR ") return else: try: response_json = requests.get(api_url, headers=headers, params=parameters).json() from_currency = response_json['Realtime Currency Exchange Rate']['1. From_Currency Code'] to_currency = response_json['Realtime Currency Exchange Rate']['3. To_Currency Code'] exchange_rate = response_json['Realtime Currency Exchange Rate']['5. Exchange Rate'] # Embedding data to output to user embed = discord.Embed(title=f'{from_currency} to {to_currency}' , colour = discord.Colour.purple()) embed.add_field(name=":moneybag: Exchange Rate", value=exchange_rate, inline=True) await ctx.send(embed=embed) except: await ctx.send("Error, unable to process embed with data. Please try again.") logger.error("Error sending embed from !convert command.") return
def set_description(self,desc): upd_stat=self.cursor.mogrify("UPDATE {0} SET description=%s WHERE id={1}".format(self.table,self.id),(desc,)) try: self.cursor.execute(upd_stat) except Exception as e: logger.error("Cannot update table {0}. {1}".format(self.table,e.pgerror)) return
def recursion(node_type: NT, depth: int, rule_number: int = None) -> Node: if node_type not in rules: logger.error("quest_generator, rule: " + str(node_type) + " is not in the rules list.") return None rules_for_type = rules[node_type] if root_rule_number and rule_number and rule_number in rules_for_type: rule_requirements_list = rules_for_type[rule_number] else: if depth > 0: rule_number, rule_requirements_list = random.choice( list(rules_for_type.items())) else: """ Rule number one for each action is the shortest one, this will make the quest depth within limit """ rule_number, rule_requirements_list = 1, rules_for_type[1] branches = [] for action_type in rule_requirements_list: if type(action_type) == T: branch = Leaf(action_type) else: branch = recursion(node_type=action_type, depth=depth - 1) branches.append(branch) return Node(node_type, rule_number, *branches)
def get_page_info(html): # 得到易读的html soup = None try: soup = BeautifulSoup(html, "html.parser") except AttributeError: logger.error('getJDInfo-getPageInfo:html is None!') return soup
def __request_API(self, path, method="GET", data=None, silent=False): # Build URL url = "https://api.telegram.org/bot" + self.__token + "/" + path # Handle HTTP method if method == "GET": f = self.__req.get(url) elif method == "POST" and data is None: raise Exception("Data is missing") elif method == "POST": f = self.__req.post(url, data) else: raise Exception("Method unsupported") # Debug log if not silent: logger.debug("API ", method, " - Requesting : ", path) result = f.json() if not silent: logger.debug("API ", method, " - Result : \n", pformat(result)) # Handle API error if result["ok"] is False and not silent: logger.error("API ERROR - ", result["description"]) return result
def routineSource(name): ip = SOURCE_LIST[name]["ip"] port = SOURCE_LIST[name]["port"] logger.info("source routine " + name + " started, ip=" + ip + " port=" + str(port)) queue = queDic[name] print("ok1") while True: try: sock = socket.socket(socket.AF_INET,socket.SOCK_STREAM) logger.info("source connecting: " + ip + " " + str(port)) sock.connect((ip, port)) logger.info("source connected: " + ip + " " + str(port)) while True: # 接受转发数据 data = sock.recv(1024) print("ok2", data) # packdata = readPackage(sock, DUMP_MODE) queue.put(data) except Exception as e: logger.error(str(e)) finally: sock.close() time.sleep(3)
def discover_indexes(self,prod_cursor): self.cursor.execute("SELECT obj_oid,idx_name,id FROM index_name WHERE tn_id={0} AND alive".format(self.id)) local_idxs=self.cursor.fetchall() try: prod_cursor.execute("""SELECT i.indexrelid,c.relname,i.indisunique,i.indisprimary FROM pg_index i JOIN pg_class c ON i.indexrelid=c.oid WHERE i.indrelid={0}""".format(self.db_fields['obj_oid'])) except Exception as e: logger.error("Cannot execute index discovery query: {0},{1}".format(e.pgcode,e.pgerror)) return prod_idxs=prod_cursor.fetchall() for l_idx in local_idxs: for p_idx in prod_idxs: if l_idx[0]==p_idx[0] and l_idx[1]==p_idx[1]: break else: logger.info("Retired index {0} in table {1}".format(l_idx[1],self.db_fields['tbl_name'])) old_idx=IndexName(l_idx[2]) old_idx.retire() for p_idx in prod_idxs: for l_idx in local_idxs: if l_idx[0]==p_idx[0] and l_idx[1]==p_idx[1]: break else: logger.info("Create new index {0} in table {1}".format(p_idx[1],self.db_fields['tbl_name'])) new_index=IndexName() new_index.set_fields(tn_id=self.id,obj_oid=p_idx[0],idx_name=p_idx[1],is_unique=p_idx[2],is_primary=p_idx[3]) new_index.create() new_index.truncate()
def discover_tables(self,prod_cursor,make_observable=True): self.cursor.execute("SELECT obj_oid,tbl_name,id FROM table_name WHERE {0}={1} AND alive".format(self.sub_fk,self.id)) local_tbls=self.cursor.fetchall() try: prod_cursor.execute("""SELECT r.oid,r.relname, CASE WHEN h.inhrelid IS NULL THEN 'f'::boolean ELSE 't'::boolean END AS has_parent FROM pg_class r LEFT JOIN pg_inherits h ON r.oid=h.inhrelid WHERE r.relkind='r' AND r.relnamespace={0}""".format(self.db_fields['obj_oid'])) except Exception as e: logger.error("Cannot execute tables discovery query: {0},{1}".format(e.pgcode,e.pgerror)) # prod_cursor.close() return prod_tbls=prod_cursor.fetchall() for l_table in local_tbls: for p_table in prod_tbls: if l_table[0]==p_table[0] and l_table[1]==p_table[1]: break else: logger.info("Retired table {0} in schema {1}".format(l_table[1],self.db_fields['sch_name'])) old_table=TableName(l_table[2]) old_table.retire() for p_table in prod_tbls: for l_table in local_tbls: if p_table[0]==l_table[0] and p_table[1]==l_table[1]: break else: logger.info("Created new table: {0} in schema {1}".format(p_table[1],self.db_fields['sch_name'])) new_table=TableName() new_table.set_fields(sn_id=self.id,tbl_name=p_table[1],obj_oid=p_table[0],has_parent=p_table[2],observable=make_observable) new_table.create() new_table.truncate()
def discover_functions(self,prod_cursor,make_observable=True): self.cursor.execute("SELECT pro_oid,func_name,id FROM function_name WHERE {0}={1} AND alive".format(self.sub_fk,self.id)) local_funcs=self.cursor.fetchall() # print local_funcs try: prod_cursor.execute("""SELECT p.oid AS pro_oid,p.proname AS funcname,p.proretset,t.typname,l.lanname FROM pg_proc p LEFT JOIN pg_namespace n ON n.oid = p.pronamespace JOIN pg_type t ON p.prorettype=t.oid JOIN pg_language l ON p.prolang=l.oid WHERE (p.prolang <> (12)::oid) AND n.oid={0}""".format(self.db_fields['obj_oid'])) except Exception as e: logger.error("Cannot execute function discovery query: {0}".format(e)) # prod_cursor.close() return prod_funcs=prod_cursor.fetchall() for l_func in local_funcs: for p_func in prod_funcs: if l_func[0]==p_func[0] and l_func[1]==p_func[1]: break else: logger.info("Retired function {0} in schema {1}".format(l_func[1],self.db_fields['sch_name'])) old_func=FunctionName(l_func[2]) old_func.retire() for p_func in prod_funcs: for l_func in local_funcs: if p_func[0]==l_func[0] and p_func[1]==l_func[1]: break else: logger.info("Created new function: {0} in schema {1}".format(p_func[1],self.db_fields['sch_name'])) new_func=FunctionName() new_func.set_fields(sn_id=self.id,pro_oid=p_func[0],func_name=p_func[1],proretset=p_func[2],prorettype=p_func[3],prolang=p_func[4],observable=make_observable) new_func.create() new_func.truncate()
def routineSource(name): ip = SOURCE_LIST[name]["ip"] port = SOURCE_LIST[name]["port"] logger.info("source routine " + name + " started, ip=" + ip + " port=" + str(port)) queue = queDic[name] print("ok3") while True: try: #连接源服务器 sock = socket.socket(socket.AF_INET,socket.SOCK_STREAM) logger.info("source connecting: " + ip + " " + str(port)) sock.connect((ip, port)) logger.info("source connected: " + ip + " " + str(port)) while True: #取数据放入队列 packdata = readPackage(sock) queue.put(packdata) # print(packdata) except Exception as e: logger.error("source conn broken: " + str(e)) sock.close() queue.clear() finally: time.sleep(3)
def updateTDC(self, sn, Trs, Tss, M_MAC): """ Trs - Tss = k * Trs + B we update k and B every time we get Trs and Tss. """ with self.conTDC: x2 = Trs y2 = Tss if M_MAC in self.T_Drift_Compensation: last_sn, last_k, last_B, x1, y1,last_fir_k,last_fir_P = self.T_Drift_Compensation.pop(M_MAC) if y1>y2: y1 = y1 - CLOCK_OVERFLOW if x1>x2: x1 = x1 - CLOCK_OVERFLOW # insure that Sync SN is not too early if int(sn) - last_sn <5 or int(sn) + 4294967296 - last_sn < 5:#2^32 try: k = (y1 - y2) / (x1 - x2) B = (y2 * x1 - y1 * x2) / (x1 - x2) if last_fir_k==0: fir_k,fir_P = self.calcu(k,k,last_fir_P) else: fir_k,fir_P = self.calcu(k,last_fir_k,last_fir_P) if abs(fir_k-k)<1e-8: self.T_Drift_Compensation[M_MAC] = (int(sn), k, B, x2, y2,fir_k,fir_P) else: self.T_Drift_Compensation[M_MAC] = (last_sn, last_k, last_B, x1, y1,fir_k,fir_P) logger.warning([M_MAC,self.MAC,int(sn),k,fir_k,fir_k-k,B,x1,x2,x1 - x2,y1,y2,y1 - y2]) except ZeroDivisionError as e: logger.error(e) finally: self.conTDC.notifyAll() return self.T_Drift_Compensation[M_MAC] = (int(sn), 0, None, x2, y2,0,0)#若不满足if,则不计算k,b,直接存入x2,y2 self.conTDC.notifyAll()
def discover_schemas(self): if not self.get_self_db_conn(): return cur=self.prod_conn.cursor() try: cur.execute("SELECT oid,nspname FROM pg_namespace WHERE nspname NOT IN ('pg_catalog', 'information_schema') AND nspname !~ '^pg_toast' AND nspname !~ '^pg_temp'") except Exception as e: logger.error("Canot get schema info for database {0}".format(self.db_fields['db_name'])) cur.close() return prod_schs=cur.fetchall() cur.close() self.cursor.execute("SELECT obj_oid,sch_name,id FROM schema_name WHERE dn_id={0} AND alive".format(self.id)) local_schs=self.cursor.fetchall() for l_sch in local_schs: for p_sch in prod_schs: if l_sch[0]==p_sch[0] and l_sch[1]==p_sch[1]: break else: old.sch=SchemaName(l_sch[2]) old_sch.retire() logger.info("Retired schema {0} in database {1}".format(l_sch[1],self.db_fields['db_name'])) for p_sch in prod_schs: for l_sch in local_schs: if l_sch[0]==p_sch[0] and l_sch[1]==p_sch[1]: break else: new_sch=SchemaName() new_sch.set_fields(dn_id=self.id,obj_oid=p_sch[0],sch_name=p_sch[1]) new_sch.create() new_sch.truncate() logger.info("Create new schema {0} in database {1}".format(p_sch[1],self.db_fields['db_name']))
def routineTarget(name): ip = TARGET_LIST[name]["ip"] port = TARGET_LIST[name]["port"] logger.info("target routine " + name + " started, ip=" + ip + " port=" + str(port)) queue = queDic[name] print("ok1", queue) while True: try: #连接目标服务器 sock = socket.socket(socket.AF_INET,socket.SOCK_STREAM) logger.info("target connecting: " + ip + " " + str(port)) sock.connect((ip, port)) logger.info("target connected: " + ip + " " + str(port)) while True: #从队列取数据发送 packdata = queue.get() # packdata = b'hello' sock.sendall(bytes(packdata, encoding='utf8')) # print("ok2") except Exception as e: logger.error("target conn broken: " + str(e)) sock.close() finally: time.sleep(3)
def create_email(cls, data): if data.get('type') == '' or data.get('type') is None: return Con.response({'error': 'Email type required'}, 400) if data.get('type') not in ['Личный', 'Рабочий']: return Con.response({'error': 'Email type not allowed'}, 400) if data.get('email') == '' or data.get('email') is None: return Con.response({'error': 'Email required'}, 400) if not valid_email(data.get('email')): return Con.response({'error': 'Email not valid'}, 400) if data.get('user_id') == '' or data.get('user_id') is None: return Con.response( {'error': 'Invalid parameters. User id required'}, 400) email = Email.get_email_by_id(data.get('user_id')) if email: return Con.response({'error': 'Email exist'}, 400) try: Email.create_email(data) logger.info('create object %s', data) return Con.response({'OK': '200'}, 200) except Exception as error: logger.error('error creating %s', error) return Con.response({'Error': '500'}, 500)
def upload(environ, start_response): start_response('200 OK', [('Content-type', 'text/html')]) params = environ['params'] port = params.get('tomcat') if locker[port]: result = f"{port}当前正在处理锁定中,请稍后再试" else: locker[port] = True try: tomcat = f"/data/tomcat7_finance_{port}" name = params.get("name") file = params.get("file") with open(f"file/{port}/{name}", 'wb') as f: f.write(file) shutdown(port) unzip(port, name) rm(tomcat) mv(tomcat, port) cp(tomcat) start(tomcat) result = "替包成功" except Exception as e: logger.error(str(e)) result = f"替包:{str(e)}" finally: locker[port] = False write_log(port, result) yield result.encode('utf-8')
def restart(self, flag, token, websocket_server): hostname = self.server_info[flag]['host'] port = self.server_info[flag]['port'] username = self.server_info[flag]['username'] password = self.server_info[flag]['password'] cmd_path = self.server_info[flag]['cmd_path'] log_path = self.server_info[flag]['log_path'] client = paramiko.SSHClient() client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) try: restart_cmd = cmd_path + " > /dev/null" client.connect(hostname, port, username, password) client.exec_command(restart_cmd) log_cmd = "cat " + log_path stdin, stdout, stderr = client.exec_command(log_cmd) msg = '' for line in stdout: msg = line.strip("\n") self.data['code'] = 0 self.data['msg'] = "success" self.data['data'] = "脚本执行成功 -- " + msg except Exception as e: logger.error(str(e)) self.data['code'] = -1 self.data['msg'] = "error" self.data['data'] = "脚本执行失败 -- " + str(e) client.close() logger.info(self.data['data']) websocket_server.write_message(json.dumps(self.data))
def get_detail(self, url): logger.info('获取明细链接:{}'.format(url)) headers = { 'User-Agent': 'Mozilla/5.0(Windows NT 10.0; WOW64)AppleWebKit/537.36 (KHTML, like Gecko) ' 'Chrome/53.0.2785.104 Safari/537.36 Core/1.53.3427.400 QQBrowser/9.6.12513.400', 'Content-Type': 'application/json' } try: r = requests.get(url, headers=headers, timeout=self.timeout, verify=False) r.encoding = 'utf-8' r.raise_for_status() except requests.RequestException as e: logger.error(e) else: # print(r.text) if r.status_code == 200: # 获取内容 soup = BeautifulSoup(r.text, 'html.parser') obj = soup.find('div', class_="vF_detail_content") content = obj.__str__() budgetprice = highprice = winningprice = 0 budgetprice = self.get_price(['预算金额:'], r.text) highprice = self.get_price(['最高限价(如有):'], r.text) winningprice = self.get_price(['中标(成交)金额:', '中标金额:', '成交金额:'], r.text) return content, budgetprice, highprice, winningprice
async def cashflow(self, ctx, ticker): api_url = "https://www.alphavantage.co/query" parameters = { 'function': 'CASH_FLOW', 'symbol': ticker, 'apikey': self.av_api_key, 'datatype': 'json', 'outputsize': 'compact' } headers = { 'Accepts': 'application/json', 'AlphaVantagKey': self.av_api_key, } # Checking if user has specified a ticker if ticker is None: await ctx.send("Please specify a stock ticker. Example: !cashflow MSFT") return else: try: response_json = requests.get(api_url, headers=headers, params=parameters).json() symbol = response_json['symbol'] data = response_json['annualReports'] # Get the most recent yera from the data year = data[0] fiscalDateEnding = year['fiscalDateEnding'] operatingCashflow = year['operatingCashflow'] capitalExpenditures = year['capitalExpenditures'] cashflowFromInvestment = year['cashflowFromInvestment'] cashflowFromFinancing = year['cashflowFromFinancing'] netIncome = year['netIncome'] dividendPayout = year['dividendPayout'] profitLoss = year['profitLoss'] # Format the data operatingCashflow = '$' + format(float(operatingCashflow), ',') profitLoss = '$' + format(float(profitLoss), ',') capitalExpenditures = '$' + format(float(capitalExpenditures), ',') cashflowFromInvestment = '$' + format(float(cashflowFromInvestment), ',') cashflowFromFinancing = '$' + format(float(cashflowFromFinancing), ',') netIncome = '$' + format(float(netIncome), ',') dividendPayout = dividendPayout.replace(',', '') # Embed the data embed = discord.Embed(title=f'{symbol} Cash Flow', colour = discord.Colour.purple()) # Add an image of the company logo to the embed from google embed.add_field(name='Fiscal Date Ending', value=fiscalDateEnding, inline=False) embed.add_field(name='Net Income', value=netIncome, inline=True) embed.add_field(name='Profit Loss', value=profitLoss, inline=True) embed.add_field(name='Operating Cash Flow', value=operatingCashflow, inline=True) embed.add_field(name='Capital Expenditures', value=capitalExpenditures, inline=True) embed.add_field(name='Cashflow From Investment', value=cashflowFromInvestment, inline=True) embed.add_field(name='Cashflow From Financing', value=cashflowFromFinancing, inline=True) embed.add_field(name='Dividend Payout', value=dividendPayout, inline=False) embed.set_footer(text='Requested By: ' + str(ctx.author.name)) await ctx.send(embed=embed) except: await ctx.send("Error gathering data. Please try again. !cashflow <ticker>") logger.error("Error sending embed from !cashflow command.") return
def to_json(self): data = json_graph.node_link_data(self) try: _str = json.dumps(data, indent=2, cls=CustomTypeEncoder) except Exception as inst: _str = "" logger.error(str(inst)) return _str
def toggle_observable(self,obs=True): if self.id: upd_stat="UPDATE {0} SET observable={1} WHERE id={2}".format(self.table,obs,self.id) try: self.cursor.execute(upd_stat) except Exception as e: logger.error("Cannot update table {0}. {1}".format(self.table,e.pgerror)) return
def configFile(): try: data = xlrd.open_workbook('configFile.xls') #打开xls文件 except IOError as err: logger.error( err, exc_info=True) #使用参数exc_info=True调用logger方法,trackback会输出到logger中。 else: # table = data.sheets()[2]#选择第3个数据页 table = data.sheet_by_name(u'ProcotolPara') #选择 ProcotolPara 数据页 ParaInit['udp_ip'] = str(table.row_values(1)[1]) ParaInit['udp_port'] = int(table.row_values(1)[2]) ParaInit['mqtt_ip'] = str(table.row_values(2)[1]) ParaInit['mqtt_port'] = int(table.row_values(2)[2]) ParaInit['tcp_ip'] = str(table.row_values(3)[1]) ParaInit['tcp_port'] = int(table.row_values(3)[2]) table = data.sheet_by_name(u'BaseStation') #选择 Logger 数据页 anchorunits = [] BS_dict = {} anchorunit = [] S_MAC = [] for i in range(1, table.nrows): BS = table.row_values(i)[1:] BS_dict[BS[0]] = (BS[1], BS[2], BS[3], BS[4], BS[5], int(BS[6]), BS[7], BS[8]) if BS[1] == 'M': if len(anchorunit) != 0: anchorunit.append(S_MAC) anchorunits.append(anchorunit) anchorunit = [] anchorunit.append(str(BS[0])) anchorunit.append(str(BS[0])) S_MAC = [] elif BS[1] == 'S': S_MAC.append(str(BS[0])) anchorunit.append(S_MAC) anchorunits.append(anchorunit) # logger.debug(anchorunits) # logger.debug(BS_dict) Station_MAC_IP = {} Station_IP_MAC = {} MasterIP = [] SlaveIP = [] for k, v in BS_dict.items(): Station_MAC_IP[k] = v[7] Station_IP_MAC[v[7]] = k if v[0] == 'M': MasterIP.append(v[7]) elif v[0] == 'S': SlaveIP.append(v[7]) ParaInit['master_ip'] = MasterIP ParaInit['slave_ip'] = SlaveIP ParaInit['station_mac_ip'] = Station_MAC_IP ParaInit['station_ip_mac'] = Station_IP_MAC ParaInit['slave_ip'] = SlaveIP ParaInit['base_station'] = BS_dict
def variable_set_sub(self, topic, payload): logger.debug('*** Running variable_set_sub ***') # Write controller task try: logger.info("write mbvar value %s" % payload) message = json.loads(payload) self.group_mgmts.writehandle(message) except Exception as e: logger.error("write data error in GroupExtHandle:%s" % e)
def main(): try: logger.info('Calling bot initialization from main') BotInitialization() except Exception as e: print(e) print("\nThe bot failed to run :(") logger.error(e)
def discover_cluster_params(self): if not self.__prod_conn: if not self.__set_self_db_conn(): logger.error("Did not obtain Prod database conection for cluster {0} in discover_cluster_params method ".format(self.db_fields['hostname'])) return cur=self.__prod_conn.cursor() params={} try: cur.execute("SELECT current_setting('server_version') AS ver") except Exception as e: logger.error("Cannot get 'server_version' param for {0}. Details: {1}".format(self.db_fields['hostname'],e.pgerror)) else: pg_ver=cur.fetchone()[0] if self.db_fields['pg_version'] != pg_ver: logger.info("Updated parameter 'pg_version' set to '{0}' for hostcluster {1}".format(pg_ver,self.db_fields['hostname'])) params['pg_version']=pg_ver try: cur.execute("SELECT current_setting('data_directory') AS pg_data_path") except Exception as e: logger.error("Cannot get 'data_directory' param for {0}. Details: {1}".format(self.db_fields['hostname'],e.pgerror)) else: data_path=cur.fetchone()[0] if self.db_fields['pg_data_path'] != data_path: logger.info("Updated parameter 'pg_data_path' set to '{0}' for hostcluster {1}".format(data_path,self.db_fields['hostname'])) params['pg_data_path']=data_path try: cur.execute("SELECT CASE WHEN current_setting('track_counts')='on' THEN 't'::boolean ELSE 'f'::boolean END AS track_counts") except Exception as e: logger.error("Cannot get 'track_counts' param for {0}. Details: {1}".format(self.db_fields['hostname'],e.pgerror)) else: t_c=cur.fetchone()[0] if self.db_fields['track_counts'] != t_c: logger.info("Updated parameter 'track_counts' set to '{0}' for hostcluster {1}".format(t_c,self.db_fields['hostname'])) params['track_counts']=t_c try: cur.execute("SELECT current_setting('track_functions') AS track_functions") except Exception as e: logger.error("Cannot get 'track_functions' param for {0}. Details: {1}".format(self.db_fields['hostname'],e.pgerror)) else: t_f=cur.fetchone()[0] tf=genericEnum('enum_track_functions') tf_id=tf.get_id_by_name(t_f) if self.db_fields['track_function_id'] != tf_id: logger.info("Updated parameter 'track_functions' set to '{0}' for hostcluster {1}".format(t_f,self.db_fields['hostname'])) params['track_function_id']=tf_id cur.close() if len(params) > 0: update_stat="UPDATE {0} SET ".format(self.table) for k in params.keys(): update_stat+="{0}='{1}',".format(k,params[k]) update_stat=update_stat[:-1]+" WHERE id={0}".format(self.id) self.update_record(update_stat) self.truncate() self._populate() else: logger.debug("No new data obtained during discover for hostcluster {0}".format(self.db_fields['hostname']))
def retire(self): if self.id: upd_stat="UPDATE {0} SET alive=FALSE WHERE id={1}".format(self.table,self.id) # print upd_stat try: self.cursor.execute(upd_stat) except Exception as e: logger.error("Cannot retire table {0}. {1}".format(self.table,e.pgerror)) return
def changeCheckBoxListInPopupMenu(list, win_title='Set check boxes'): """Shows the list of check-boxes as a dialog pop-up menu and returns the (un)changed list""" popupMenu = GUIPopupCheckList(None, list, win_title) #popupMenu.move(QtCore.QPoint(50,50)) popupMenu.move(QtGui.QCursor.pos()) response = popupMenu.exec_() if response == QtGui.QDialog.Accepted : logger.info('New checkbox list is accepted', __name__) elif response == QtGui.QDialog.Rejected : logger.info('Will use old checkbox list', __name__) else : logger.error('Unknown response...', __name__)
async def sPrice(self, ctx, ticker): api_url = 'https://www.alphavantage.co/query' parameters = { 'function': 'GLOBAL_QUOTE', 'symbol': ticker, 'apikey': self.av_api_key, 'datatype': 'json', 'outputsize': 'compact' } headers = { 'Accepts': 'application/json', 'AlphaVantagKey': self.av_api_key, } # Checking if user has specified a ticker if ticker is None: await ctx.send("Please specify a stock ticker. Example: !sPrice MSFT") return else: try: response_json = requests.get(api_url, headers=headers, params=parameters).json() symbol = response_json['Global Quote']['01. symbol'] current_price = response_json['Global Quote']['05. price'] open_price = response_json['Global Quote']['02. open'] high_price = response_json['Global Quote']['03. high'] low_price = response_json['Global Quote']['04. low'] volume = response_json['Global Quote']['06. volume'] latest_trading_day = response_json['Global Quote']['07. latest trading day'] previous_close = response_json['Global Quote']['08. previous close'] change = response_json['Global Quote']['09. change'] change_percent = response_json['Global Quote']['10. change percent'] # Formatting the prices/numbers (Issue with output currently) # current_price = '{0:,.3f}'.format(current_price) # open_price = '{0:,.3f}'.format(open_price) # high_price = '{0:,.3f}'.format(high_price) # low_price = '{0:,.3f}'.format(low_price) # previous_close = '{0:,.3f}'.format(previous_close) # change = '{0:,.3f}'.format(change) # Embedding data to output to user embed = discord.Embed(title=symbol, description=f'**Current Price: ${current_price}**' , colour = discord.Colour.purple()) embed.add_field(name=":city_sunset: Open Price", value="$" + open_price, inline=True) embed.add_field(name=":mailbox_closed: High Price", value="$" + high_price, inline=True) embed.add_field(name=":mailbox_with_no_mail: Low Price", value="$" + low_price, inline=True) embed.add_field(name=":bar_chart: Volume", value=volume, inline=True) embed.add_field(name=":city_dusk: Previous Close", value="$" + previous_close, inline=True) embed.add_field(name=":hourglass: Change", value="$" + change, inline=True) embed.add_field(name=":hourglass: Percent Change (24h)", value=change_percent, inline=True) embed.add_field(name=":cityscape: Latest Trading Day", value=latest_trading_day, inline=True) await ctx.send(embed=embed) except: await ctx.send("Error, unable to process embed with data. Please try again.") logger.error("Error sending embed from !sPrice command.") return
def upload_config_var(self): logger.debug('*** Running upload_config_var ***') if self.bridge_mqtt_is_ready(): # self.upload_var_dict() # Serial network port configuration upload self.dev_mgmt.config_timer.add(1) # Start statistics task # self.config_timer.add(5) else: logger.error("Upload configuration channel is not available.")
def _populate(self): init_stat="SELECT * FROM {0} WHERE id={1}".format(self.table,self.id) # print init_stat try: self.cursor.execute(init_stat) except Exception as e: logger.error(e) return self.truncate() res=self.cursor.fetchone() self.db_fields=zip_field_names(res,self.cursor.description)
def __exit__(self, exception_type, exception_value, exception_traceback): logger.info('Excuter funtion __exit__') if exception_value: self.__conn.rollback() logger.error(f'Error exception {exception_value}') else: self.__conn.commit() self.__cursor.close() Connection.break_free_connection(self.__conn)
def log(self, msg, level=1): #logger.levels = ['debug','info','warning','error','crytical'] self.count_msg += 1 #print 'Received msg: %d' % self.count_msg if self.print_bits & 1 or level == 4: print msg if level == 1: logger.info(msg, __name__) elif level == 0: logger.debug(msg, __name__) elif level == 2: logger.warning(msg, __name__) elif level == 3: logger.error(msg, __name__) elif level == 4: logger.crytical(msg, __name__) else: logger.info(msg, __name__)
def log(self, msg, level=1) : #logger.levels = ['debug','info','warning','error','crytical'] self.count_msg += 1 #print 'Received msg: %d' % self.count_msg if self.print_bits & 1 or level==4 : print msg if level==1 : logger.info (msg, __name__) elif level==0 : logger.debug (msg, __name__) elif level==2 : logger.warning (msg, __name__) elif level==3 : logger.error (msg, __name__) elif level==4 : logger.crytical(msg, __name__) else : logger.info (msg, __name__)
def receive(self, addr, pB_udpbegin, cB_ip, cB_ip_2, tB_log, tB_log_2, cB_txmac, cB_ip_3): logger.info(addr) server = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) try: server.bind(addr) except OSError as r: logger.error(r, exc_info=True) tB_log.append('警告:端口%d被占用!' % (addr[1])) pB_udpbegin.setText("udp启动") pB_udpbegin.setEnabled(True) cB_ip.setEnabled(True) global upd_en upd_en = False return server.settimeout(2) #设置套接字超时时间,单位为s self.t = threading.Thread(target=self.quenePrint, args=( tB_log, tB_log_2, )) self.t.setDaemon(True) self.t.start() while True: if upd_en: try: data, self.udp_addr = server.recvfrom(1024) # logger.info(['data length: ',len(data),',data: ',''.join(["%02X"%(char) for char in data])]) except socket.timeout: logger.error(socket.timeout, exc_info=0) # pass else: if data[0] == 1: #表示接收到uwb数据 self.uwbReceiver(data, cB_txmac, cB_ip_3, self.udp_addr) elif data[0] == 2: #表示接收到ble数据 # logger.debug('ble') self.bleReceiver(data, cB_txmac) elif data[0] == 0x10: #表示接收到lora-scale数据 self.tagDevice(data, 0) elif data[0] == 0x80: #表示接收到lora-forklift数据 self.tagDevice(data, 1) elif data[0] == 0xfa: #设备管理器 self.deviceManager(data, self.udp_addr, cB_ip_2) elif data[0] == 0xaa: #节点蓝牙 self.tagBleReceiver(data, cB_txmac) else: server.close() logger.info('udp stop') pB_udpbegin.setEnabled(True) break
def __init__(self,id=None,conn_string=None): self.conn_string=conn_string if id: if not conn_string: logger.error("No connection string provided for database id {0} during init".format(id)) else: super(DatabaseName,self).__init__(id) self.conn_string=conn_string else: super(DatabaseName,self).__init__() self.stat_obj.set_fk_field('dn_id') self.stat_obj.set_table_name('database_stat')
def run_page(self, pagenumber, pagecount): # 构建请求头 ua = UserAgent() headers = {'user-agent': ua.Chrome} url = 'http://www.ldmap.net/service/map/feature/list' # 请求url param = { 'name': '', 'layer': '', 'state': -1, 'pagenumber': pagenumber, 'pagecount': pagecount, 'mapid': self.mapid, '_': int(datetime.now().timestamp()) } try: resp = requests.get(url, headers=headers, params=param) except requests.RequestException as e: logger.error(e) else: print(resp.text) if resp.status_code == 200: result = resp.json() for item in result['feature_list']: tag_create_time = datetime.fromtimestamp( int(item['create_time'][6:19]) / 1000) tag_edit_time = datetime.fromtimestamp( int(item['last_edit_time'][6:19]) / 1000) feature_id = item['feature_id'] feature_name = item['feature_name'] group_id = item['group_id'] group_name = item['group_name'] layer_id = item['layer_id'] layer_name = item['layer_name'] map_id = item['map_id'] longitude, latitude = self.run_detail(feature_id) print(longitude, latitude) bid = Bid(tag_create_time=tag_create_time, tag_edit_time=tag_edit_time, feature_id=feature_id, feature_name=feature_name, group_id=group_id, group_name=group_name, layer_id=layer_id, layer_name=layer_name, map_id=map_id, longitude=longitude, latitude=latitude) bid_upsert(bid)
def log(self, msg, level=1): """Internal logger - re-direct all messages to the project logger, critical messages""" #logger.levels = ['debug','info','warning','error','critical'] self.count_msg += 1 #print 'Received msg: %d' % self.count_msg #if self.print_bits & 1 or level==4 : print msg if level == 1: logger.info(msg, __name__) elif level == 4: logger.critical(msg, __name__) elif level == 0: logger.debug(msg, __name__) elif level == 2: logger.warning(msg, __name__) elif level == 3: logger.error(msg, __name__) else: logger.info(msg, __name__)
def delete_user(cls, data): if data.get('id') == '' or data.get('id') is None: return Con.response({'error': 'Invalid parameters. Id required'}, 400) user = User.get_user_by_id(data.get('id')) if not user: return Con.response({'error': 'User not found'}, 400) try: User.delete_user(data.get('id')) return Con.response({'OK': '200'}, 200) except Exception as error: logger.error('error delete %s', error) return Con.response({'Error': '500'}, 500)
async def get_manga(self, ctx, *args): logger.info(Constants.FUN_QUERY_SENT, Constants.MANGA, " ".join(args)) try: manga_search = MangaSearch(" ".join(args)) manga = Manga((manga_search.results[0]).mal_id) await ctx.send( Constants.MSG_FUN_RESPONSE.format(manga.title, manga.synopsis, manga.image_url)) except Exception: logger.error(Constants.FUN_QUERY_ERR, Constants.MANGA, ' '.join(args)) await ctx.send( Constants.MSG_FUN_ERR.format(Constants.MANGA, ' '.join(args)))
def log(self, msg, level=1) : """Internal logger - re-direct all messages to the project logger, critical messages""" #logger.levels = ['debug','info','warning','error','critical'] self.count_msg += 1 #print 'Received msg: %d' % self.count_msg #if self.print_bits & 1 or level==4 : print msg if level==1 : logger.info (msg, __name__) elif level==4 : logger.critical(msg, __name__) elif level==0 : logger.debug (msg, __name__) elif level==2 : logger.warning (msg, __name__) elif level==3 : logger.error (msg, __name__) else : logger.info (msg, __name__)
def get_dependants(self,obs=None): select_stat="SELECT id FROM {0} WHERE {1}={2} AND alive".format(self.sub_table,self.sub_fk,self.id) if obs: select_stat+=" AND observable" try: self.cursor.execute(select_stat) except Exception as e: logger.error(e.pgerror) return ids=[] for ref in self.cursor.fetchall(): ids.append(ref[0]) return ids
def get_dependants(self,obs=None): select_stat="SELECT id,db_name FROM {0} WHERE {1}={2} AND alive".format(self.sub_table,self.sub_fk,self.id) if obs: select_stat+=" AND observable" try: self.cursor.execute(select_stat) except Exception as e: logger.error(e.pgerror) return ret=[] for db in self.cursor.fetchall(): ret.append(dict(id=db[0],db_name=db[1])) return ret
def __init__(self, path_to_calib_types, calib_type='pedestals') : """Constructor. @param path_to_calib_types - path to the directory with calibration types @param calib_type - calibration type, for example "pedestals", "comm_mode", "pixel_status", etc. """ self.path_to_calib_types = path_to_calib_types self.type = calib_type self.path = os.path.join(path_to_calib_types, calib_type) if not os.path.exists(self.path) : msg = 'Path %s DOES NOT EXIST' % self.path print msg logger.error(msg, __name__) return None
def stat(self,time_id): if not self.__prod_conn: if not self.__set_self_db_conn(): logger.error("Did not obtain Prod database conection for cluster {0} in discover_cluster_databases method ".format(self.db_fields['hostname'])) return cur=self.__prod_conn.cursor() sql_stat="""SELECT pg_stat_get_bgwriter_timed_checkpoints() AS checkpoints_timed, pg_stat_get_bgwriter_requested_checkpoints() AS checkpoints_req, pg_stat_get_bgwriter_buf_written_checkpoints() AS buffers_checkpoint, pg_stat_get_bgwriter_buf_written_clean() AS buffers_clean, pg_stat_get_bgwriter_maxwritten_clean() AS maxwritten_clean, pg_stat_get_buf_written_backend() AS buffers_backend, pg_stat_get_buf_alloc() AS buffers_alloc""" self.create_stat(time_id,sql_stat,cur) cur.close()
def ccd_pixel_coordinates(sp) : ir = np.arange(sp.rows) ic = np.arange(sp.cols) if sp.ccd_orient == 0 : sp.x_ccd_pix = ic if sp.y_is_flip : sp.y_ccd_pix = sp.rows - ir else : sp.y_ccd_pix = ir sp.X_ccd_pix, sp.Y_ccd_pix = np.meshgrid(sp.x_ccd_pix, sp.y_ccd_pix) elif sp.ccd_orient == 90 : sp.x_ccd_pix = ir if sp.y_is_flip : sp.y_ccd_pix = ic else : sp.y_ccd_pix = sp.cols - ic sp.Y_ccd_pix, sp.X_ccd_pix = np.meshgrid(sp.y_ccd_pix, sp.x_ccd_pix) elif sp.ccd_orient == 180 : sp.x_ccd_pix = sp.cols - ic if sp.y_is_flip : sp.y_ccd_pix = ir else : sp.y_ccd_pix = sp.rows - ir sp.X_ccd_pix, sp.Y_ccd_pix = np.meshgrid(sp.x_ccd_pix, sp.y_ccd_pix) elif sp.ccd_orient == 270 : sp.x_ccd_pix = sp.rows - ir if sp.y_is_flip : sp.y_ccd_pix = sp.cols - ic else : sp.y_ccd_pix = ic sp.Y_ccd_pix, sp.X_ccd_pix = np.meshgrid(sp.y_ccd_pix, sp.x_ccd_pix) else : logger.error('Non-existent CCD orientation: ' + str(sp.ccd_orient), __name__) #sp.x_ccd = sp.x_ccd_pix * sp.ccd_pixsize #sp.y_ccd = sp.y_ccd_pix * sp.ccd_pixsize #print 'ir:', ir #print 'ic:', ic #print 'x_ccd:', sp.x_ccd #print 'y_ccd:', sp.y_ccd #print 'X_ccd_pix.shape =', sp.X_ccd_pix.shape #print 'Y_ccd_pix.shape =', sp.Y_ccd_pix.shape #print 'X_ccd_pix:\n', sp.X_ccd_pix #print 'Y_ccd_pix:\n', sp.Y_ccd_pix sp.x_map, sp.y_map = sp.get_xy_maps()
def delete(self, team, lang, task): if team != self.current_user: logger.warn("Team %s said to be %s while deleting translation of %s in %s." % (self.current_user, team, task, lang)) raise HTTPError(403) if lang not in teams[team]["tasks"].get(task, []): logger.warn("Team %s tried to delete a translation that doesn't exists (%s in %s)" % (team, task, lang)) raise HTTPError(404) logger.info("Team %s deleted translation of task %s into %s" % (team, task, lang)) # Update the task and team data task_path = os.path.join(os.path.dirname(__file__), "tasks", "%s.json" % task) tasks[task]["teams"][team].remove(lang) tasks[task]["langs"][lang].remove(team) try: json.dump(tasks[task], open(task_path, "w"), indent=4) except IOError: logger.error("Couldn't write data for task %s" % task) raise HTTPError(500) except ValueError: logger.error("Couldn't dump data for task %s" % task) raise HTTPError(500) team_path = os.path.join(os.path.dirname(__file__), "teams", "%s.json" % team) teams[team]["tasks"][task].remove(lang) teams[team]["langs"][lang].remove(task) try: json.dump(teams[team], open(team_path, "w"), indent=4) except IOError: logger.error("Couldn't write data for team %s" % team) raise HTTPError(500) except ValueError: logger.error("Couldn't dump data for team %s" % team) raise HTTPError(500) # Remove the symlinks links = [os.path.join(os.path.dirname(__file__), "data", task, "by_lang", "%s (%s).pdf" % (lang, team)), os.path.join(os.path.dirname(__file__), "data", task, "by_team", "%s (%s).pdf" % (team, lang))] for link in links: os.remove(link) proxy.send("delete", "%s %s %s" % (team, lang, task), '*')
def create(self): if not self.id: ins_stat="INSERT INTO {0} (".format(self.table) fields=self.db_fields.keys() for column in fields: ins_stat+=column+',' ins_stat=ins_stat[:-1]+") VALUES (" for column in fields: ins_stat+="%s," ins_stat=ins_stat[:-1]+") RETURNING id"; stat=self.cursor.mogrify(ins_stat,(self.db_fields.values())) try: self.cursor.execute(stat) except Exception as e: logger.error("Cannot create record: {0}".format(e.pgerror)) return logger.debug("Created new object with statement: {0}".format(stat)) self.id=self.cursor.fetchone()[0]
def xyLabToImg( self, xy ) : rows = cp.bat_img_rows.value() cols = cp.bat_img_cols.value() size = cp.bat_img_size.value() ccd_orient = cp.ccd_orient.value() x, y = xy print 'Image rows, cols:', rows, cols print 'Beam center Lab x, y =', x, y if ccd_orient == '0' : return (x, rows-y) elif ccd_orient == '90' : return (x, y) elif ccd_orient == '180' : return (cols-x, y) elif ccd_orient == '270' : return (cols-x, rows-y) else : logger.error('Non-existent CCD orientation: ' + str(sp.ccd_orient), __name__)
def delete(self, team, lang, task): logger.info("Team %s unselected %s %s %s" % (self.current_user, lang, task, team)) path = os.path.join(os.path.dirname(__file__), "teams", "%s.json" % self.current_user) if [team, lang, task] in teams[self.current_user]["selected"]: teams[self.current_user]["selected"].remove([team, lang, task]) try: json.dump(teams[self.current_user], open(path, "w"), indent=4) except IOError: logger.error("Couldn't write data for team %s" % team) raise HTTPError(500) except ValueError: logger.error("Couldn't dump data for team %s" % team) raise HTTPError(500) proxy.send("unselect", "%s %s %s" % (team, lang, task), self.current_user)
def ccd_pixel_coordinates(sp) : ir = np.arange(sp.rows) ic = np.arange(sp.cols) if sp.ccd_orient == '0' : sp.x_ccd_pix = ic sp.y_ccd_pix = sp.rows - ir sp.X_ccd_pix, sp.Y_ccd_pix = np.meshgrid(sp.x_ccd_pix, sp.y_ccd_pix) elif sp.ccd_orient == '90' : sp.x_ccd_pix = ir sp.y_ccd_pix = ic sp.Y_ccd_pix, sp.X_ccd_pix = np.meshgrid(sp.y_ccd_pix, sp.x_ccd_pix) elif sp.ccd_orient == '180' : sp.x_ccd_pix = sp.cols - ic sp.y_ccd_pix = ir sp.X_ccd_pix, sp.Y_ccd_pix = np.meshgrid(sp.x_ccd_pix, sp.y_ccd_pix) elif sp.ccd_orient == '270' : sp.x_ccd_pix = sp.rows - ir sp.y_ccd_pix = sp.cols - ic sp.Y_ccd_pix, sp.X_ccd_pix = np.meshgrid(sp.y_ccd_pix, sp.x_ccd_pix) else : logger.error('Non-existent CCD orientation: ' + str(sp.ccd_orient), __name__) #sp.x_ccd = sp.x_ccd_pix * sp.ccd_pixsize #sp.y_ccd = sp.y_ccd_pix * sp.ccd_pixsize #print 'ir:', ir #print 'ic:', ic #print 'x_ccd:', sp.x_ccd #print 'y_ccd:', sp.y_ccd #print 'X_ccd_pix.shape =', sp.X_ccd_pix.shape #print 'Y_ccd_pix.shape =', sp.Y_ccd_pix.shape #print 'X_ccd_pix:\n', sp.X_ccd_pix #print 'Y_ccd_pix:\n', sp.Y_ccd_pix sp.x_map_db, sp.y_map_db = sp.xy_maps_for_direct_beam_data()
def discover_toast(self,prod_cursor): self.cursor.execute("SELECT obj_oid,ttbl_name,id FROM table_toast_name WHERE tn_id={0} AND alive".format(self.id)) local_ttbl=self.cursor.fetchone() try: prod_cursor.execute("""SELECT r.reltoastrelid,t.relname FROM pg_class r INNER JOIN pg_class t ON r.reltoastrelid=t.oid WHERE r.relkind='r' AND t.relkind='t' AND r.oid={0}""".format(self.db_fields['obj_oid'])) except Exception as e: logger.error("Cannot execute toast table discovery query: {0},{1}".format(e.pgcode,e.pgerror)) return prod_ttbl=prod_cursor.fetchone() if local_ttbl and prod_ttbl: if not (local_ttbl[0] == prod_ttbl[0] and local_ttbl[1] == prod_ttbl[1]): logger.info("Retired TOAST table {0} for table {1}".format(local_ttbl[1],self.db_fields['tbl_name'])) old_ttbl=TableToastName(local_ttbl[2]) old_ttbl.retire() logger.info("Create new TOAST table {0} in table {1}".format(prod_ttbl[1],self.db_fields['tbl_name'])) new_ttbl=TableToastName() new_ttbl.set_fields(tn_id=self.id,obj_oid=prod_ttbl[0],ttbl_name=prod_ttbl[1]) new_ttbl.create() self.toast_id=local_ttbl[2] new_ttbl.truncate() elif local_ttbl and not prod_ttbl: self.toast_id=None logger.info("Retired TOAST table {0} for table {1}".format(local_ttbl[1],self.db_fields['tbl_name'])) old_ttbl=TableToastName(local_ttbl[2]) old_ttbl.retire() elif not local_ttbl and prod_ttbl: logger.info("Create new TOAST table {0} in table {1}".format(prod_ttbl[1],self.db_fields['tbl_name'])) new_ttbl=TableToastName() new_ttbl.set_fields(tn_id=self.id,obj_oid=prod_ttbl[0],ttbl_name=prod_ttbl[1]) new_ttbl.create() self.toast_id=new_ttbl.get_id() new_ttbl.truncate()
def init(): # Load teams and tasks from disk teams_path = os.path.join(os.path.dirname(__file__), "teams") for f_name in os.listdir(teams_path): if f_name.endswith(".json"): name = f_name[:-5] path = os.path.join(teams_path, f_name) try: teams[name] = json.load(open(path)) except IOError: logger.error("Couldn't read data for team %s" % name) except ValueError: logger.error("Couldn't load data for team %s" % name) tasks_path = os.path.join(os.path.dirname(__file__), "tasks") for f_name in os.listdir(tasks_path): if f_name.endswith(".json"): name = f_name[:-5] path = os.path.join(tasks_path, f_name) try: tasks[name] = json.load(open(path)) except IOError: logger.error("Couldn't read data for task %s" % name) except ValueError: logger.error("Couldn't load data for task %s" % name) # TODO Notify if some teams are missing or shouldn't be there # Add needed structures to teams and tasks for team in teams.itervalues(): team.setdefault("tasks", {}) team.setdefault("langs", {}) team.setdefault("selected", []) for task in tasks.itervalues(): task.setdefault("teams", {}) task.setdefault("langs", {})
def update_record(self,upd_stat): try: self.cursor.execute(upd_stat) except Exception as e: logger.error("Cannot update {0}\nDetails: {1}\nQuery: {2}".format(self.table,e.pgerror,upd_stat)) return
def _save(self, team, lang, task): timestamp = datetime.now() logger.info("Team %s uploaded translation of task %s into %s" % (team, task, lang)) # Try immediately to save the file in the history path = os.path.join(os.path.dirname(__file__), "history", "%s %s %s %s.pdf" % (timestamp, team, task, lang)) try: open(path, "wb").write(self.request.body) except IOError: logger.error("Couldn't save translation of task %s into %s, made by %s, in the history." % (task, lang, team)) raise HTTPError(500) # Update the task and team data task_path = os.path.join(os.path.dirname(__file__), "tasks", "%s.json" % task) tasks[task]["teams"][team] = sorted(set(tasks[task]["teams"].get(team, []) + [lang])) tasks[task]["langs"][lang] = sorted(set(tasks[task]["langs"].get(lang, []) + [team])) try: json.dump(tasks[task], open(task_path, "w"), indent=4) except IOError: logger.error("Couldn't write data for task %s" % task) raise HTTPError(500) except ValueError: logger.error("Couldn't dump data for task %s" % task) raise HTTPError(500) team_path = os.path.join(os.path.dirname(__file__), "teams", "%s.json" % team) teams[team]["tasks"][task] = sorted(set(teams[team]["tasks"].get(task, []) + [lang])) teams[team]["langs"][lang] = sorted(set(teams[team]["langs"].get(lang, []) + [task])) try: json.dump(teams[team], open(team_path, "w"), indent=4) except IOError: logger.error("Couldn't write data for team %s" % team) raise HTTPError(500) except ValueError: logger.error("Couldn't dump data for team %s" % team) raise HTTPError(500) # Make some symlinks to easily access this version of the file links = [os.path.join(os.path.dirname(__file__), "data", task, "by_lang", "%s (%s).pdf" % (lang, team)), os.path.join(os.path.dirname(__file__), "data", task, "by_team", "%s (%s).pdf" % (team, lang))] for link in links: try: os.makedirs(os.path.dirname(link)) except OSError: pass # dir already exists try: os.remove(link) except OSError: pass # file doesn't exist yet os.symlink(os.path.relpath(path, os.path.dirname(link)), link)
def get(request): data = copy.copy(return_data) ############################################################################## # url parser url= request.get_full_path() logger.info("[REQUEST_URL]: {0}".format(url)) url_tuple = urlparse.urlparse(url) # scheme='http', netloc='192.168.0.136:14001', path='/name_parser/', params='', # query='name=%E6%BD%98%E5%B0%91%E5%AE%81&birthday=1990-12-01&sex=1', fragment=' (_scheme, _netloc, _path, _params, _query, _fragment) = ( url_tuple.scheme, url_tuple.netloc, url_tuple.path, url_tuple.params, url_tuple.query, url_tuple.fragment) # 获得相关的参数,并生成id get_params_list = dict_to_order_list(request.GET) post_params_list = dict_to_order_list(request.POST) id_dict = dict(get=get_params_list, post=post_params_list, path=_path) id_json = json.dumps(id_dict) id = get_hash_id(id_json) ############################################################################### # 2, first check the mysql and return data if existed try: logger.info("[PARSE_ID]: {0}".format(id)) # obj = CacheTable.objects.get(id=id) # obj_data = obj.data obj_data = db_server.get(id) data['data'] = json.loads(obj_data) data = json.dumps(data) logger.info("get old data and return") return HttpResponse(data, content_type="application/json") except : msg = traceback.format_exc() logger.warn(msg) ################################################################################ if not _scheme: _scheme = DEFAULT_SCHEME new_url = urlparse.urlunparse([_scheme, data_api, _path, _params, _query, _fragment]) logger.info("[TRANSFER URL]: {0}".format(new_url)) # 4, 根据或的url通过数据接口获得数据 try: # resp = requests.post(new_url, data=request.POST), # TODO # '<html><title>405: Method Not Allowed</title><body>405: Method Not Allowed</body></html>' # resp = urllib2.urlopen(new_url, data=urllib.urlencode(request.POST), timeout=1) resp = urllib2.urlopen(new_url, data=urllib.urlencode(request.POST), timeout=1) result = extract_data(resp.read()) data['data'] = result # save the data try: result = json.dumps(result) logger.info("insert new item [id: {0}]".format(id)) db_server.insert(id, data_dict=dict(id=id, path=_path, info=id_json, data=result)) # created, obj = CacheTable.objects.update_or_create(id=id, # defaults=dict(id=id, path=_path, info=id_json, data=result)) except: msg = traceback.format_exc() logger.error(msg) except Exception, ex: msg = traceback.format_exc() print msg data['code'] = -1 data['msg'] = ex.message