def validate(ln_list): stop = False valid_numbers = [] #while stop == False: for x in ln_list: if stop ==False: firstcharacters = first_characters(x) if len(x) == 0: #handle blank rows logging(' This is a BLANK row') elif firstcharacters !='' or (firstcharacters ==''and x[0] == '-'): # to handle ' 2' without affecting -999 if not is_int_or_float(x): if is_int_or_float(firstcharacters):# sc[0]: nc = next_character(x,len(firstcharacters)) if nc ==' ' or nc == None: valid_numbers.append(float(firstcharacters)) exit else: logging('Though '+str(x)+' starts with integer it is not a valid integer/float') else: logging(str(x)+' is an invalid string with text characters') else: #is a +/-ve integer' element = float(x) if element >=0: valid_numbers.append(float(x)) else: if element == -999: stop = True else: logging(str(x)+' is an invalid negative number that is ignored') else: logging(str(x)+'\'s character is not an integer') return valid_numbers
def get(self, request): token = request.META.get("HTTP_AUTHORIZATION").split(' ') a = jwt_decode_handler(token[2]) managerid = a['user_id'] # date = request.GET.get('date','') # try: # if date == '': # daily = Daily.objects.filter(userid_id=managerid) # else:manager # daily = Daily.objects.filter(dates=date,userid_id=managerid) # except ObjectDoesNotExist as e: # logging(e) if a['role'] == 1 or a['role'] == 7: try: daily = Daily.objects.filter(userid_id=managerid) except ObjectDoesNotExist as e: logging(e) elif a['role'] == 2: try: daily = Daily.objects.all() except ObjectDoesNotExist as e: logging(e) result = True error = "" data = DailyListSerializer(daily, many=True).data return JsonResponse({"result": result, "data": data, "error": error})
def _startupActorSys(self, currentSystemBase, systemBase, capabilities, logDefs): self.systemAddress = ActorAddress('/ActorSys') self.capabilities = capabilities or dict() if 'logging' in self.capabilities: import logging logging('Thespian').warning('logging specification moved from capabilities to an explicit argument.') if systemBase is None: systemBase = currentSystemBase if systemBase is None: import thespian.system.simpleSystemBase systemBase = thespian.system.simpleSystemBase.ActorSystemBase(self, logDefs = logDefs) elif isinstance(systemBase, str): import sys if sys.version_info < (2,7): import thespian.importlib as importlib else: import importlib # n.b. let standard import exception indicate a missing/unknown systemBase module = importlib.import_module('thespian.system.%s'%systemBase) sbc = getattr(module, 'ActorSystemBase') if currentSystemBase and id(currentSystemBase.__class__) == id(sbc): systemBase = currentSystemBase else: systemBase = sbc(self, logDefs = logDefs) elif systemBase and currentSystemBase: if id(systemBase.__class__) == id(currentSystemBase.__class__): systemBase = currentSystemBase # else systemBase should be a valid object already self._systemBase = systemBase return systemBase
def GetDnsDomainIP(data): '''提取域名和IP 成功返回列表,失败返回空列表 列表格式:["域名","十六进制表示的IP1","十六进制表示的IP2",...,"十六进制表示的IPn"]''' data = data.encode('hex') try: iptext = [] '''retype 值:查询 = 0 ,应答=1''' retype = int(bin(int(data[4:5], 16)).replace('0b', '')[0:1]) '''ANCOUNT 值:DNS的ANCOUNT区域记录总数''' ANCOUNT = int(data[12:16], 16) text, end = DnshextoDomain(data) iptext.append(str(text)) start = end + 8 if retype and ANCOUNT: '''获取ANCOUNT中的所有记录,保存至iptext''' n = 0 while n < ANCOUNT: '''不断移动start和end指针,直至获取所有记录''' start = start + 4 end = start + 4 dnstype = int(data[start:end], 16) start = start + 16 end = end + 16 length = int(data[start:end], 16) start = end + length * 2 if dnstype == 1: iptext.append(data[end:start]) n += 1 return iptext except Exception as e: logging("GetDnsDomainIP info:\t%s" % e) return []
def _startupActorSys(self, currentSystemBase, systemBase, capabilities, logDefs): self.systemAddress = ActorAddress('/ActorSys') self.capabilities = capabilities or dict() if 'logging' in self.capabilities: import logging logging('Thespian').warning( 'logging specification moved from capabilities to an explicit argument.' ) if systemBase is None: systemBase = currentSystemBase if systemBase is None: import thespian.system.simpleSystemBase systemBase = thespian.system.simpleSystemBase.ActorSystemBase( self, logDefs=logDefs) elif isinstance(systemBase, str): import sys if sys.version_info < (2, 7): import thespian.importlib as importlib else: import importlib # n.b. let standard import exception indicate a missing/unknown systemBase module = importlib.import_module('thespian.system.%s' % systemBase) sbc = getattr(module, 'ActorSystemBase') if currentSystemBase and id( currentSystemBase.__class__) == id(sbc): systemBase = currentSystemBase else: systemBase = sbc(self, logDefs=logDefs) elif systemBase and currentSystemBase: if id(systemBase.__class__) == id(currentSystemBase.__class__): systemBase = currentSystemBase # else systemBase should be a valid object already self._systemBase = systemBase return systemBase
def handleSystemPerformanceMessage(self, data: SystemPerformanceData) -> bool: logging("handleSystemPerformanceMessage method is called...") self._handleUpstreamTransmission( ResourceNameEnum.CDA_SYSTEM_PERF_MSG_RESOURCE, DataUtil.systemPerformanceDataToJson(self, data)) pass
def pp_analysis(info_list, pp, pikarma_method): """ Analysis air_scan result for pineAP Suite detection """ for i in info_list: bssid, ssid = i.split("=*=") if bssid not in pp.keys(): pp[bssid] = [] pp[bssid].append(ssid) elif bssid in pp.keys() and ssid not in pp[bssid]: pp[bssid].append(ssid) """ Detects KARMA Attack. """ for v in pp.keys(): if len(pp[v]) >= 2 and v not in blacklist: print colored("\033[1m[*] KARMA Attack activity detected.", 'magenta', attrs=['reverse', 'blink']) print "\033[1m[*] MAC Address : ", v print "\033[1m[*] FakeAP count: ", len(pp[v]) log_time = time.strftime("%c") blacklist.append(v) if pikarma_method == "2": pp_deauth(blacklist) log = log_time, "||", v, " - ", len(pp[v]), " - Deauth Attack" logging(log) elif pikarma_method == "1": log = log_time, "||", v, " - ", len(pp[v]) logging(log) time.sleep(3) return blacklist
def main(): try: opener = my_urllib.build_opener(my_urllib.HTTPCookieProcessor) my_urllib.install_opener(opener) except Exception as e: logging.error("url.install_opener() exception : {0}".format(e)) if not getUUID(): print("获取uuid出错,退出") logging('getUUID error,quit') return showQRImage() while not waitLogin() : pass # os.remove(QRImagePath) #关闭显示图片的窗口,windows不可 if not login(): print('login fail') return if not webwxinit(): print('webwxint fail') return webwxgetcontact()
def get_grace_detail(grace_id): try: all_grace_info = GraceInfo.objects.filter(id = grace_id) result_info = [] for info in all_grace_info: img_arr = []; if info.grace_img_str is not None: img_arr = info.grace_img_str.split("##") # img_set = info.grace_content_img.all() # for img in img_set: # img_detail = { # 'img_location': img.image_location, # 'img_height': img.image_height, # 'img_width': img.image_width, # } # img_arr.append(img_detail) resultItem = { 'grace_id': info.id, 'grace_title': info.grace_title, 'grace_photo': info.grace_title_img, 'img_height': info.grace_title_img_height, 'img_width': info.grace_title_img_width, 'grace_photo_array': img_arr, } result_info.append(resultItem) return result_info except Exception, e: logging(e) return ERROR_CODE
def get_baidu_top_books(): XHQH = 'http://top.baidu.com/buzz?b=353&c=10&fr=topbuzz_b7_c10' DSYQ = 'http://top.baidu.com/buzz?b=355&c=10&fr=topbuzz_b353_c10' CYJK = 'http://top.baidu.com/buzz?b=1509&c=10&fr=topbuzz_b459_c10' QCXS = 'http://top.baidu.com/buzz?b=1508&c=10&fr=topbuzz_b355_c10' WXXX = 'http://top.baidu.com/buzz?b=354&c=10&fr=topbuzz_b1508_c10' # 先找这四项 hot_books = [] for url in (XHQH, DSYQ, CYJK, QCXS, WXXX): try: _get = requests.get(url, headers=header) soup = BeautifulSoup(_get.content.decode('gbk', 'ignore'), "html.parser") # print(soup) for tr in soup.find_all('tr'): # print(tr);break; try: bookname = tr.find(name='td', attrs={ 'class': 'keyword' }).a.text.strip() bookhotcount = tr.find(name='td', attrs={ 'class': 'last' }).text.strip() hot_books.append((bookname, bookhotcount)) except: continue time.sleep(0.2) except AttributeError as e: logging(e) hot_books = sorted(hot_books, key=lambda x: int(x[1]), reverse=True) return hot_books
def shuffle_teams(): try: team_services.shuffle_teams() return "Success", HTTPStatus.OK except Exception as exception: logging(str(exception)) return str(exception), HTTPStatus.BAD_REQUEST
def _write_inference_result(self, sample_ids, labels, weights, prediction_score, prediction_score_per_coordinate, task_index, schema_params: SchemaParams, output_dir): """ Write inference results. """ output_avro_schema = get_inference_output_avro_schema( self.metadata, True, schema_params, has_weight=self._has_feature(schema_params.weight_column_name)) parsed_schema = parse_schema(output_avro_schema) records = [] for rec_id, rec_label, rec_weight, rec_prediction_score, rec_prediction_score_per_coordinate in \ zip(sample_ids, labels, weights, prediction_score, prediction_score_per_coordinate): rec = {schema_params.uid_column_name: int(rec_id), schema_params.prediction_score_column_name: float(rec_prediction_score), schema_params.prediction_score_per_coordinate_column_name: float(rec_prediction_score_per_coordinate)} if self._has_label(schema_params.label_column_name): rec[schema_params.label_column_name] = int(rec_label) if self._has_feature(schema_params.weight_column_name): rec[schema_params.weight_column_name] = int(rec_weight) records.append(rec) output_file = os.path.join(output_dir, f"part-{task_index:05d}.avro") error_msg = f"worker {task_index} encountered error in writing inference results" with tf1.gfile.GFile(output_file, 'wb') as f: try_write_avro_blocks(f, parsed_schema, records, None, error_msg) logging(f"Worker {task_index} saved inference result to {output_file}")
def _test_login(self): """测试是否登陆成功. Output: + 测试成功时返回 True,否则返回 False. """ try: res = self.spider.get(self.url_homepage, headers=self.headers_base, timeout=self.timeout_query) except Exception as e: print res.content logging.debug('Error when testing login: {0}'.format(e)) return False try: html_con = etree.HTML(res.text) except Exception as e: logging('Failed to set dom tree: {0}'.format(e)) return False # print res.content node_list_title = html_con.xpath("//div[@id='zh-home-list-title']") if node_list_title: return True else: return False
def __init__(self, doc, argv_test): self.args = docopt.docopt(doc, argv=argv_test) self.run = self.args['--run'] self.mountpoint = self.args['--mountpoint'] self.test_config_path = self.args['<test_config>'] self.test_log = self.args['--log'] self.init_log() self.init_config() # test case self.start_time = datetime.datetime.now() logging.info('Start test {0}...'.format(self.run)) if self.run == 'vjtree': self.test_vjtree() elif self.run == 'iozone': self.test_iozone() elif self.run == 'rsync': self.test_rsync() elif self.run == 'fsstress': self.test_fsstress() elif self.run == 'mdtest': self.test_mdtest() elif self.run == 'pjd': self.test_pjd() elif self.run == 'fio': self.test_fio() elif self.run == 'smallfile': self.test_smallfile() else: logging('Your parameter is wrong')
def handle_tcp(self, client, remote): try: fds = [client, remote] while True: r, w, e = select.select(fds, [], [], 5) if client in r: cli_data = client.recv(128) #cli_data_de = cli_data cli_data_de = xorr(cli_data) if len(cli_data) <= 0: break result = send_all(remote, cli_data_de) if result < len(cli_data): logging.warn("Failed pipping all data to target!!!") break if remote in r: remote_data = remote.recv(128) #remote_data_en = remote_data remote_data_en = xorr(remote_data) if len(remote_data) <= 0: break result = send_all(client, remote_data_en) if result < len(remote_data): logging("Failed pipping all data to client!!!") break except Exception as e: logging.error(e) finally: client.close() remote.close()
def api_call(func, *args, **kwargs): while True: try: return func(*args, **kwargs) break except APIException as error: log('warning', "\tapi_call() failed: failed (%s)" % str(error)) return False except ExceptionList as errorlist: for error in errorlist: log('warning', "\tapi_call() failed: failed (%s)" % str(error)) return False except HTTPError as error: if str(error) == "403 Client Error: Forbidden": log('warning', "\tapi_call() failed: 403 forbidden") return False log('warning', "\tHTTP error %s raised, sleeping for 30 seconds" % str(error)) pass except RateLimitExceeded as error: logging( 'warning', '\tRateLimitExceeded: Sleeping for %d seconds' % error.sleep_time) time.sleep(error.sleep_time) pass except Exception as error: raise
def __parse_user_info(data, user): if data["retcode"] != 0: logging("get user info failed. retcode:%s", data["retcode"]) return False result = data["result"] # user.setOccupation(obj.getString("occupation")); # user.setPhone(obj.getString("phone")); # user.setAllow(QQAllow.values()[obj.getInt("allow")]); # user.setCollege(obj.getString("college")); # if (obj.has("reg_time")) { # user.setRegTime(obj.getInt("reg_time")); # } user.uin = str(result["uin"]) # user.setConstel(obj.getInt("constel")); # user.setBlood(obj.getInt("blood")); # user.setHomepage(obj.getString("homepage")); # user.setStat(obj.getInt("stat")); # if(obj.has("vip_info")) { # user.setVipLevel(obj.getInt("vip_info")); # VIP等级 0为非VIP # } # user.setCountry(obj.getString("country")); # user.setCity(obj.getString("city")); # user.setPersonal(obj.getString("personal")); user.nick_name = result["nick"] # user.setChineseZodiac(obj.getInt("shengxiao")); # user.setEmail("email"); # user.setProvince(obj.getString("province")); user.gender = transfer_gender(result["gender"]) # user.setMobile(obj.getString("mobile")); # if (obj.has("client_type")) { # user.setClientType(QQClientType.valueOfRaw(obj.getInt("client_type"))); # } return True
def set_in_bd(meta_in_bd, tele_in_bd,last_H_in_bd): try: conn = MySQLdb.connect('localhost', 'fol', 'Qq123456', 'cao_bufr_v2', charset="utf8") cursor = conn.cursor() bar = IncrementalBar('meta_in_bd', max = len(tele_in_bd)) for i in meta_in_bd: bar.next() cursor.execute('''INSERT IGNORE INTO cao_bufr_v2.releaseZonde (Stations_numberStation, time_srok, time_pusk, koordinat, oborudovanie, oblachnost, GEOPOTENTIAL_HEIGHT_CALCULATION_002191, SOFTWARE_IDENTIFICATION_AND_VERSION_NUMBER_025061, RADIOSONDE_SERIAL_NUMBER_001081, CORRECTION_ALGORITHMS_FOR_HUMIDITY_MEASUREMENTS_002017, RADIOSONDE_OPERATING_FREQUENCY_002067, TYPE_OF_PRESSURE_SENSOR_002095, TYPE_OF_TEMPERATURE_SENSOR_002096, TYPE_OF_HUMIDITY_SENSOR_002097, RADIOSONDE_ASCENSION_NUMBER_001082, descriptor_001083, descriptor_001095, descriptor_002066, descriptor_007007, descriptor_002102, descriptor_025065, descriptor_026066, descriptor_002103, descriptor_002015, descriptor_002016, descriptor_002080, descriptor_002081, descriptor_002082, descriptor_002084, descriptor_002085, descriptor_002086, descriptor_035035, text_info_ValueData_205060) VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)''', i) conn.commit() bar.finish() bar = IncrementalBar('in_bd_bufr', max = len(tele_in_bd)) for lines in tele_in_bd: bar.next() cursor.executemany('''INSERT IGNORE INTO cao_bufr_v2.content_telegram (Stations_numberStation, date, time, P, T, Td, H, D, V, dLat, dLon, Flags) VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)''',lines) conn.commit() bar.finish() cursor.executemany('''INSERT IGNORE INTO cao_bufr_v2.last_H (Stations_numberStation, time_srok, H) VALUES (%s,%s,%s)''', last_H_in_bd) conn.commit() except: logging('ошибка при загрузке в базу', 1) finally: conn.close()
def process_item(self, item, spider): try: # 查重处理 self.cursor.execute( """select * from moocCourse where url = %s""", item['url']) # 是否有重复数据 repetition = self.cursor.fetchone() # 重复 if repetition: pass else: # 插入数据 self.cursor.execute( """insert into moocCourse(title, url, image_url, introduction, student) value (%s, %s, %s, %s, %s)""", (item['title'], item['url'], item['image_url'], item['introduction'], item['student'])) # 提交sql语句 self.connect.commit() except Exception as error: # 出现错误时打印错误日志 logging(error) return item
def checkOptions(options): try: int(options.atmost) except ValueError: logging("atmost must be integer") exit(1) try: int(options.atleast) except ValueError: logging("atleast must be integer") exit(1) if options.atmost < 0: logging.error("atmost must be non-negative") exit(1) if options.atleast < 0: logging.error("atleast must be non-negative") exit(1) if options.atmost < options.atleast: logging.error("'atmost >= atleast' does not hold") exit(1)
def validate_post_input(in_data, expected_key, expected_types): """Validate the input sent by the post request This function will first validate if all the expected keys does exist in the received dictionary. Then it will also check if the type of the values corresponding to each key meet with the requirement for the next steps processing. Args: in_data (dict): The dictionary sent by the patient client which include all the information uploaded by users expected_key (list): list of strings, indicate what are the keys that are expected to exist in the dictionary expected_types (list): A list indicate what are the expected types of each values corresponding to each keys in the expected keys dictionary Returns: str: "xx key not found in input" if one all more keys are missing in the input dictionary. "xx key has wrong variable type" if one or more key values has the wrong type different from what we expected. True: If no problem found """ for key, v_type in zip(expected_key, expected_types): if key not in in_data.keys(): logging(1, "{} key not found in input".format(key)) return "{} key not found in input".format(key) if type(in_data[key]) != v_type: logging(2, "{} key value has wrong variable type".format(key)) return "{} key value has wrong variable type".format(key) return True
def add_patient_to_data_base(in_data, time): """ Add the patient information to the database together with the timestamp This function will first call the find_correct_patient function to check if the patient mri number has already existed in the MongoDB. If if it does it will update the database according to the in_data information, otherwise it will create a new patient recode and save it into the database. Args: in_data (dict): The dictionary includes the uploaded user's information received by the server from the post request time (str): The time of when the server received the post request Returns: str: "Patient: xx successfully added if a new patient is added "Patient: xx information successfully updated """ does_exist = find_correct_patient(in_data["MRI"]) if does_exist is False: add_new_patient(in_data, time) logging(0, "Patient: {} successfully added.".format(in_data["MRI"])) return "Patient: {} successfully added.".format(in_data["MRI"]) else: update_patient_inform(in_data, time, does_exist) logging( 0, "Patient: {} information" " successfully updated.".format(in_data["MRI"])) return "Patient: {} information successfully updated.".\ format(in_data["MRI"])
def post_new_patient(): """This is the function that receive the post request fom patient client The in_data is sent by the patient side client. It is a dictionary with the following keys: "patient_name" (str): the uploaded patient's name "MRI" (str): the medical record number uploaded, this value is unique "medical_image" (str): the medial image in the form of a b64_string "medical_title": The user defined filename of the medical image "medical_des": The user defined the description of the uploaded medical image "heart_rate": The heart rate in BPM analyzed from the ecg .csv file "ecg_image": The ecg_trace image generated after the analysis of the ecg trace data "ecg_title": The user defined file name of the ecg image "ecg_des": The user defined description of the ecg image This function will also take down the time information when the server received this post request. Returns: str, int: The string will indicate if the request succeed. It might indicate if the input has valid keys and corresponding types. If the input mri is a pure numeric string, if the new patient is added or the existing patient's information is updated The int is is status code of the server, where 200 represent succeed and 400 represent a bad post request """ # Receive request data logging(0, "Post request to upload patient data received!") in_data = request.get_json() time = str(datetime.datetime.now()) answer, server_status = process_new_patient(in_data, time) return answer, server_status
def _get_num_iterations(self, input_files, metadata_file): """ Get the number of samples each worker assigned. This works for tfrecord only. :param input_files: a list of TFRecord files. :param metadata_file: the metadata associated with the TFRecord files. :return: number of iterations """ start_time = time.time() assert (self.data_format == constants.TFRECORD) # reset the default graph, so it has been called before the main graph is built. tf1.reset_default_graph() num_iterations = 0 dataset = per_record_input_fn(input_files, metadata_file, 1, 0, self.batch_size, self.data_format, build_features=False) data_iterator = tf1.data.make_initializable_iterator(dataset) next_item = data_iterator.get_next() with tf1.device('device:CPU:0'), tf1.Session() as sess: sess.run(data_iterator.initializer) while True: try: sess.run(next_item) num_iterations += 1 except tf.errors.OutOfRangeError: break end_time = time.time() logging( f'It took {end_time - start_time} seconds to count {num_iterations} batches ' f'with batch size {self.batch_size}.') return num_iterations
def writeFileUploadStatus(mainParamList,config_key,successStatus=False,checkForPurge=False): if successStatus: statusDir='' fileName='' startFileName='' if config_key == 'ME_Position_TD': statusDir = '{}/{}'.format(VERTICA_SUCCESS_STATUS_DIR , TD_POSITION_DIR) startFileName='PositionTD' fileName='{}/{}_{}_{}_{}.{}'.format(statusDir,startFileName,mainParamList[0],mainParamList[2],mainParamList[3],'success') elif config_key == 'ME_FinancingPosition': statusDir = '{}/{}'.format(VERTICA_SUCCESS_STATUS_DIR , FIN_POSITION_DIR) startFileName='FinancingPosition' fileName='{}/{}_{}_{}.{}'.format(statusDir,startFileName,mainParamList[0],mainParamList[1],'success') elif config_key == 'ME_Position_SD': statusDir = '{}/{}'.format(VERTICA_SUCCESS_STATUS_DIR , SD_POSITION_DIR) startFileName='PositionSD' fileName='{}/{}_{}_{}_{}.{}'.format(statusDir,startFileName,mainParamList[0],mainParamList[1], mainParamList[3],'success') else: logging.info('Not a valid configKey for writing Status!!!') return fileName=fileName.replace('-','') logging('Status File: {}'.format(fileName)) with open(fileName, 'w') as sFile: sFile.write('Success') sFile.close()
def receiveInterestPayment(self, available_cash, period=1): if self._current_interest_due == 0: logging('Tranche {t} has 0 interest due and no need to pay interest'.format(t=self._tranche_id)) else: self._current_interest_paid = min(self._current_interest_due, available_cash) self._current_interest_shortfall = self._current_interest_due - self._current_interest_paid return available_cash - self._current_interest_paid
def get(self): users = WXUser.all() p = Push() if not users.count(): return opener = poster.streaminghttp.register_openers() weatherinfo = json.loads(opener.open(settings.weather1_url % settings.weather_city, timeout=5).read())['weatherinfo'] logging.info(weatherinfo) city = weatherinfo['city'] temp = weatherinfo['temp'] wd = weatherinfo['WD'] ws = weatherinfo['WS'] sd = weatherinfo['WS'] time = weatherinfo['time'] args = (to_unicode(city), temp, to_unicode(wd), to_unicode(ws), sd, time) logging(str(args)) for user in users: msg = ''' 城市:%s 温度:%s 摄氏度 风向:%s 风力:%s 湿度:%s 发布时间:%s''' % (to_unicode(city), temp, to_unicode(wd), to_unicode(ws), sd, time) logging.info(msg) p.send_txt_msg(user.fake_id, msg)
def add_patient_to_database(patient_id=None, attending_username=None, patient_age=None): """Add a new patient with his info to patient database Add, or say register, a new patient info in a dictionary, with his id, username and age to existing patient database Args: patient_id(int): the id number of the added patient attending_username(str): the corresponding username of the added patient's attending patient_age(int): the age number of the added patient Returns: None, but the patient database 'patient_db' is enlarged """ new_patient = { "patient_id": patient_id, "attending_username": attending_username, "patient_age": patient_age, "heart_rate_history": list() } patient_db.append(new_patient) logging(0, "New patient added. Patient ID:{}".format(patient_id)) print("patient database:\r") print(patient_db)
def get_hot_info_summary(): try: result_info = [] all_hot_type = HotInfoType.objects.all() for type in all_hot_type: all_hot_info_per_type = HotInfo.objects.filter(hot_info_type__id = type.id)[0:5] hot_content = [] for hot_info in all_hot_info_per_type: ctime = hot_info.hot_date dateStr = str(ctime.year) + "-" + str(ctime.month) + "-" + str(ctime.day) + " " \ + str(ctime.hour) + ":" + str(ctime.minute) + ":" + str(ctime.second) hot_item = { "hot_id": hot_info.id, "hot_date": dateStr, "hot_title": hot_info.hot_info_title, "hot_photo": hot_info.hot_info_title_img, } hot_content.append(hot_item) result_item = { "hot_type_id": type.id, "hot_type_name": type.hot_info_desc, "type_content": hot_content, } result_info.append(result_item) return result_info except Exception, e: logging(e) return ERROR_CODE
def update_stripe_subscription_statuses(): """Update Stripe subscriptions with their current status by querying Stripe api""" stripe.api_key = get_stripe_secret_key() connect_account = get_stripe_connect_account() if stripe.api_key is None: logging.error( "Stripe api key not set refusing to update subscription statuses") if connect_account is None: logging.error( "Stripe connect account not set. Refusing to update subscription statuses" ) if stripe_connect_active(): for subscription in Subscription.query.all(): try: stripeSubscription = stripe.Subscription.retrieve( stripe_account=connect_account.id, id=subscription.stripe_subscription_id, ) subscription.stripe_status = stripeSubscription.status database.session.commit() except Exception as e: logging.warning( f"Could not update stripe subscription status: {e}") else: logging( "Refusing to update subscription status since Stripe connect is not active" )
def _write_inference_result(self, sample_ids, labels, weights, scores, scores_and_offsets, task_index, schema_params, output_dir): """ Write inference results. """ photon_ml_writer = PhotonMLWriter(schema_params=schema_params) output_avro_schema = photon_ml_writer.get_inference_output_avro_schema( self.metadata, self._has_label(schema_params[constants.LABEL]), True, has_weight=self._has_feature(schema_params[constants.SAMPLE_WEIGHT])) parsed_schema = parse_schema(output_avro_schema) records = [] for rec_id, rec_label, rec_weight, rec_score, rec_score_and_offset in \ zip(sample_ids, labels, weights, scores, scores_and_offsets): rec = {schema_params[constants.SAMPLE_ID]: int(rec_id), schema_params[constants.PREDICTION_SCORE]: float(rec_score), schema_params[constants.PREDICTION_SCORE_PER_COORDINATE]: float(rec_score_and_offset) } if self._has_label(schema_params[constants.LABEL]): rec[schema_params[constants.LABEL]] = int(rec_label) if self._has_feature(schema_params[constants.SAMPLE_WEIGHT]): rec[schema_params[constants.SAMPLE_WEIGHT]] = int(rec_weight) records.append(rec) output_file = os.path.join(output_dir, "part-{0:05d}.avro".format(task_index)) error_msg = "worker {} encountered error in writing inference results".format(task_index) with tf1.gfile.GFile(output_file, 'wb') as f: try_write_avro_blocks(f, parsed_schema, records, None, error_msg) logging("Worker {} saved inference result to {}".format(task_index, output_file))
def get_course_detail(category_id): try: course_info_list = CourseInfo.objects.filter(id = category_id) result_info = [] for info in course_info_list: teacher_set = info.course_teacher.all() teacher_arr = [] for teacher in teacher_set: teacher_info = { 'teacher_id': teacher.id, 'teacher_name': teacher.teacher_name, 'teacher_photo': teacher.teacher_img, 'teacher_level': teacher.teacher_level, } teacher_arr.append(teacher_info) result_item = { 'course_id': info.id, 'course_name': info.course_name, 'course_intro': info.course_intro, 'course_photo': info.course_img, 'course_detail': info.course_detail, 'teacher_arr': teacher_arr, } result_info.append(result_item) return result_info except Exception, e: logging(e) return ERROR_CODE
def train_one_epoch(self): """ One epoch of training :return: """ self.loss_train_avg = AverageMeter() self.mapk_train_avg = AverageMeter() self.model.train() count = 0 def logging(output, target, loss, batch_idx): mapk3_metric = mapk3(output, target) self.mapk_train_avg.update(mapk3_metric) self.logger.info( 'Train Epoch: {} [{}/{} ({:.0f}%)]\tLoss: {:.6f}\tMapk3: {:.6f}' .format(self.current_epoch, batch_idx * len(data), len(self.train_data_loader.dataset), 100. * batch_idx / len(self.train_data_loader), loss.item(), mapk3_metric)) if self.summary_writer: iteration = self.current_epoch * 1000 + int( 1000. * batch_idx / len(self.train_data_loader)) self.summary_writer.add_scalar("train_loss", loss.item(), iteration) self.summary_writer.add_scalar("train_mapk", mapk3_metric, iteration) self.summary_writer.add_scalar( "lr", self.optimizer.param_groups[0]['lr'], iteration) for batch_idx, (data, target) in enumerate(self.train_data_loader): if count <= 0: self.optimizer.step() self.optimizer.zero_grad() count = self.acum_batches data, target = data.to(self.device), target.to(self.device) output = self.model(data) loss = self.loss(output, target) ## Loggin and gradient accum if batch_idx % self.config.log_interval == 0: logging(output, target, loss, batch_idx) if self.acum_batches >= 2: loss = loss / self.acum_batches loss.backward() self.loss_train_avg.update(loss.item()) self.current_iteration += 1 count = count - 1 if self.scheduler and "step_batch" in dir(self.scheduler): self.scheduler.step_batch(self.current_iteration) self.save_checkpoint()
def log(self, input , level="INFO") : ts = time.time() logtime = datetime.datetime.fromtimestamp(ts).strftime('%d-%m-%Y %H:%M:%S') msg = '[' + level + '] ' + logtime + ' - ' + json.dumps(input) CSTART = '\033[0m' if level == "DEBUG": CSTART = '\033[32m' logging.debug(msg) elif level == "INFO": CSTART = '\033[40m' logging.info(msg) elif level == "WARNING": CSTART = '\033[33m' logging.warn(msg) elif level == "ERROR": CSTART = '\033[6m' logging.error(msg) elif level == "CRITICAL": CSTART = '\033[101m' logging.critical(msg) else: logging(msg) if self.showloginconsole == "True" : CEND = '\033[0m' print(CSTART + msg + CEND )
def main(): p = chdir_myself() logging_init("Demo1.log") #test_xls() test_send_email() logging("Exit the program.") logging_fini()
def download(self): result = True for file_path, url in self._data: local_file = os.path.join(self._destination, file_path) dir_name = os.path.dirname(local_file) if not os.path.exists(dir_name): os.makedirs(dir_name) resp = urllib2.urlopen(url) try: f = open(local_file, 'wb') block_size = 8192 while True: chunk = resp.read(block_size) if not chunk: break f.write(chunk) except Exception as e: logging("Error while downloading url: ex = {}, url = {}".format(e, url)) finally: f.flush() f.close() if not result: shutil.rmtree(self._destination) return result
def getYAMLConfig(fname): try: with open(fname, 'r') as ymlf: config = yaml.load(ymlf) except Exception as e: logging('Error accessing config: ', e) return config
def add_attending_to_database(attending_username, attending_email=None, attending_phone=None): """Add a new attending with his info to patient database Add, or say register, a new attending info in a dictionary, with his username, email and phone number to existing attending database Args: attending_username(str): the username of the added attending attending_email(str): the email address of the added attending attending_phone(str): the phone number of the added attending but it is in the form of string, either numeric or with necessary symbols Returns: None, but the attending database 'attending_db' is enlarged """ new_attending = { "attending_username": attending_username, "attending_email": attending_email, "attending_phone": attending_phone } attending_db.append(new_attending) logging( 0, "New attending added. Username:{} Email:{}".format( attending_username, attending_email)) print("attending database:\r") print(attending_db)
def add_feedback(content, email, phone): try: pub = UserFeedback(feedback_content = content, feedback_email = email, feedback_phone = phone) pub.save() return "1" except Exception, e: logging(e) return ERROR_CODE
def setup_environ(): # lib sys.path.insert(0, os.path.join(ROOT_PATH, 'lib')) # SDK (this will be simpler if SDK is in the codebase) sdk_path = None for path in os.environ.get('PATH').split(os.pathsep): if 'dev_appserver.py' in os.listdir(path): test_path = os.path.join(path, 'dev_appserver.py') sdk_path = os.path.dirname( os.readlink(test_path) if os.path.islink(test_path) else test_path) break if not sdk_path: logging.critical("Can't find sdk_path") sys.exit(1) sys.path.insert(0, sdk_path) # Use dev_appserver to set up the python path from dev_appserver import fix_sys_path fix_sys_path() from google.appengine.tools import dev_appserver as tools_dev_appserver from google.appengine import dist # Parse `app.yaml` appinfo, url_matcher, from_cache = tools_dev_appserver.LoadAppConfig( ROOT_PATH, {}, default_partition='dev') app_id = appinfo.application # Useful for later scripts os.environ['APPLICATION_ID'] = app_id os.environ['APPLICATION_VERSION'] = appinfo.version # Third party libraries on the path if appinfo.libraries: for library in appinfo.libraries: try: dist.use_library(library.name, library.version) except ValueError, e: if library.name == 'django' and library.version == '1.4': # Work around an SDK issue logging.warn('django 1.4 not recognised by dist, fixing python path') sys.path.insert(0, os.path.join(sdk_path, 'lib', 'django-1.4')) else: logging('Unsupported library:\n%s\n' % e) # Extra setup for django if library.name == 'django': try: import settings from django.core.management import setup_environ setup_environ(settings, original_settings_path='settings') except ImportError: logging.error("Could not import django settings")
def connectToDatabase(config): try: conn = psycopg2.connect("dbname='" + config.dbname + "' user='******' host='" + config.host + "' password='******'") except: logging('I am unable to connect to the database') return conn
def set_play_status(self, status, wd, time): """ Set playback flag """ for item in self._sched_week[wd]: if not item["is_main"]: continue if item["time"] == time: item["play"] = status break else: logging("Program not found") return self._sched_write_to_file()
def read_webpage(url): the_page='' try: req = urllib.request.Request(url) response = urllib.request.urlopen(req) the_page = response.read().decode(errors='ignore') except urllib.error.HTTPError as e: logging(e.code) logging(e.read()) filter_punc = lambda t: ''.join([x.lower() for x in t if x.isalpha()]) words = [x for x in map(filter_punc, the_page.split()) if x] return words
def set_record_status(self, status, wd, time): """ Set recorder status """ for item in self._sched_week[wd]: if not item["is_main"]: continue if item["time"] == time: item["record"] = status break else: logging("Program not found") return self._sched_write_to_file()
def show_history(): if request.method == 'GET': list_of_place = get_list_of_places_names() return render_template('history.html', place_list=list_of_place) else: try: chosen_place = request.json['place'] date_time = request.json['date'] actual_history = get_payment_by_date(get_placeid_by_placename(chosen_place), date_time) return render_template('response_history.html', history_info=actual_history) except Exception: logging ("Error was occurred in %s: %s", inspect.stack()[0][3], Exception)
def createR(self): logging("Creating Reminder Class") R_R = Reminder() logging( ( "Setting Reminder class with following: ", JSON_Holder["Database"]["Reminder"]["Date"], JSON_Holder["Database"]["Reminder"]["Time"], ) ) R_R.setAll(JSON_Holder["Database"]["Reminder"]["Date"], JSON_Holder["Database"]["Reminder"]["Time"]) DB.setReminder(R_R)
async def select(sql, args, size=None): logging(sql, args) global __pool with await __pool as conn: cur = await conn.cursor(aiomysql.DictCursor) await cur.execute(sql.replace('?', '%s'), args or ()) if size: rs = await cur.fetchmany(size) else: rs = await cur.fetchall() await cur.close() logging.info('rows returned: %s' % len(rs)) return rs
def select(sql, args, size=None): logging(sql, args) global __pool with (yield from __pool) as conn: cur = yield from conn.cursor(aiomysql.DictCursor) yield from cur.execute(sql.replace("?", "%s"), args or ()) if size: rs = yield from cur.fetchmany(size) else: rs = yield from cur.fetchall() yield from cur.close() logging.info("rows returned: %s" % len(rs)) return rs
def validate(self, model_filename, feature_filename, output_filename): logging.info('validataion, model:[%s], input[%s], output[%s]' % \ (model_filename, feature_filename, output)) predict_cmd = '%s %s %s %s' % (\ self.config.lr_predict_bin,\ self.cofnig.lr_predict_option,\ feature_filename,\ model_filename,\ output_filename) status, output = commands.getstatusoutput(predict_cmd) logging(output) if status != 0: sys.exit(-1)
def _fc_describe(self, Base): result = [] for name, cls in Base._decl_class_registry.items(): if name in ["Audit"]: continue try: if name[0] != "_": pk, properties = self._fc_properties_of(cls) result.append(OrderedDict([("name", name), ("pk", pk), ("properties", properties)])) except: logging(name) raise result.sort(key=lambda v: v["name"]) return result
def aboutUs(): try: aboutUsInfo = AboutUsInfo.objects.all() result_info = [] for about in aboutUsInfo: ad = { 'about_us_detail': about.about_us_detail, } result_info.append(ad) return result_info except Exception, e: logging(e) return ERROR_CODE
def send_mail(mail_list, sub, content_html, append_list): ENCODE = 'utf-8' me = "刘泽宇" + "<"+MAIL_USER+">" msg = MIMEMultipart() msg['Subject'] = sub msg['From'] = me msg['BCC'] = ";".join(mail_list) msg_text = MIMEText(content_html, 'html', ENCODE) msg.attach(msg_text) logging("Read Appends") for each_append in append_list: f = open(each_append, 'rb') f_basename = os.path.basename(each_append).encode(ENCODE) msg_append = MIMEApplication(f.read()) msg_append.add_header('Content-Disposition', 'attachment', filename=f_basename) msg.attach(msg_append) logging("Start to connect.") s = smtplib.SMTP() s.connect(MAIL_HOST) #没网, 或DNS logging("Connetc success") s.login(MAIL_USER, MAIL_PASS) #用户名密码错误 logging("Before Send Email, there are {} receivers.".format(len(mail_list))) try: err_mail = s.sendmail(me, mail_list, msg.as_string()) except smtplib.SMTPRecipientsRefused, e: print("==============Catch SMTPRecipientsRefused Error================") print(e) print("-------") print("+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++")
def new_user(username, password, email): if (username == '' or password == ''): return -1 try: curr_time = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time())) user = BukachUser(username = username, password = password, email = email, create_time = curr_time) user.save() return 1 except Exception, e: logging(e) return -1
def ReadCatalog(catalog_file): try: Books ={} Titles = [] with open(catalog_file,'r',encoding='utf-8') as f_in: lines = list(line for line in (l.strip() for l in f_in) if line) for book in lines: p = (re.compile('(.+),(http:\/\/.+)')).match(book) try: Books[p.group(1)] = [len(Books) ,p.group(2)] Titles.append(p.group(1)) except: logging(sys.exc_info()[0]) logging(sys.exc_info()[1]) return Books, Titles except FileNotFoundError: print("Catalog file not found") return None except: print('Please provide a valid catalog.txt file') logging("Unexpected error:"+str(sys.exc_info()[0])) logging("Unexpected error:"+str(sys.exc_info()[1]))
def resolve(self, request, handler): if len(request.questions) != 1: logging('more than one request question, abort.') reply = request.reply() reply.header.rcode = getattr(dnslib.RCODE, 'FORMERR') return reply try: return self.get_record(request) except Exception as e: sys.stderr.write(repr(request) + '\n') logger.error('resolve %s failed. %s' % (request.header.qname, repr(e))) reply = request.reply() reply.header.rcode = getattr(dnslib.RCODE, 'NXDOMAIN') return reply
def getCourseCategory(): try: categoryInfoList = CourseCategory.objects.all() resultInfo = [] for info in categoryInfoList: resultItem = { 'category_id': info.id, 'category_name': info.category_name, 'category_photo': info.category_img } resultInfo.append(resultItem) return resultInfo except Exception, e: logging(e) return ERROR_CODE
def getAdsInfo(): try: adsInfo = TitleAdsInfo.objects.all() resultInfo = [] for ads in adsInfo: ad = { 'ads_id': ads.id, 'ads_title': ads.ad_text, 'ads_photo': ads.ad_img } resultInfo.append(ad) return resultInfo except Exception, e: logging(e) return ERROR_CODE
def createDD(self): logging("Creating DueDate Class") R_DD = DueDate() logging( ( "Setting DueDate class with following: ", JSON_Holder["Database"]["DueDate"]["Name"], JSON_Holder["Database"]["DueDate"]["Date"], JSON_Holder["Database"]["DueDate"]["Time"], ) ) R_DD.setAll( JSON_Holder["Database"]["DueDate"]["Name"], JSON_Holder["Database"]["DueDate"]["Date"], JSON_Holder["Database"]["DueDate"]["Time"], ) DB.setDueDateC(R_DD)