def handle_server_event(self): last_notification_id = int(cfg.get('SETTINGS', 'LAST_ID')) data = requests.get( cfg.get('API', 'API_ADDRESS') + f'/notifications/{last_notification_id}').json() last_notification_id = data['meta']['last_id'] cfg.set('SETTINGS', 'LAST_ID', str(last_notification_id)) cfg.write(open('settings.cfg', 'w')) for student, notifications in data['students'].items(): self.change_user(int(student)) for student_data in notifications: current_contest = None for contest, contest_data in data['contests'].items(): if contest == str(student_data['contest']): current_contest = contest_data current_contest['id'] = contest break current_stage = None for stage, stage_data in data['stages'].items(): if stage == str(student_data['stage']): current_stage = stage_data current_stage['id'] = stage break self.on_event.notify({ 'contest': current_contest, 'stage': current_stage, 'student': student })
def generate_values_average_time(num): """ :param num: 生成的values数量 :return: """ data_group = [] # 按表名分组 date_list = ger_dates(num, cfg.get('task', 'startDate'), cfg.get('task', 'endDate')) begin_time = datetime.now() for i in range(num): # 生成几组数据 kv = field_value(i, date_list[i]) # 放在循环外面,使得有关联的字段值在每个表中生成的value一样 data_set = [] for table in tables.split(','): # 从配置文件获取表名的list filed_list = cfg.get_raw('database', table).split(',') values_list = [] for filed in filed_list: if filed in kv.keys(): values_list.append(kv[filed]) else: values_list.append('') data_set.append(tuple(values_list)) # print('生成表{}数据完成,耗时:{}'.format(table, (datetime.now() - begin_time))) data_group.append(data_set) print('第{}组数据已生成'.format(str(i + 1))) # 做行列转化,使得每个表的values值在同1个list里面 table_group = list(map(list, zip(*data_group))) print('{}组数据全部生成,耗时:{}'.format(str(num), datetime.now() - begin_time)) return table_group
def main(): os.environ['NO_PROXY'] = '127.0.0.1' bot = VkBot(cfg.get('VK', 'TOKEN'), int(cfg.get('VK', 'GROUP_ID'))) pages_manager = PagesManager( PagesContainer(cfg.get('SETTINGS', 'DATABASE_NAME')), bot, IntroductionPage) bot.run()
def cppflags(): """The C++ flags to preprocess a Pythran generated cpp file""" return (_python_cppflags() + _numpy_cppflags() + _pythran_cppflags() + cfg.get('sys', 'cppflags').split() + cfg.get('user', 'cppflags').split())
async def init() -> None: global conn if conn is None or conn.is_closed(): conn = await asyncpg.connect(host=cfg.get("stats", "pg_host"), user=cfg.get("stats", "pg_user"), password=cfg.get("stats", "pg_password"), database=cfg.get("stats", "pg_database"))
def __init__(self): self.conn = pymssql.connect(host=cfg.get('mssql_connect', 'host'), user=cfg.get('mssql_connect', 'user'), password=cfg.get('mssql_connect', 'pwd'), database=cfg.get('mssql_connect', 'db'), charset=cfg.get('mssql_connect', 'charset')) self.cur = self.conn.cursor()
async def heroku_keep_alive(self): await self.wait_until_ready() while not self.is_closed(): for url in cfg.get('cloud_nodes'): log.info(url) msg = await self.http_get(url) log.info(msg) if msg is not None: await self.event_channel.send(msg) await asyncio.sleep(cfg.get('interval'))
def get(): with open("outputs/" + cfg.get("folder") + "/results.json", "r", encoding="utf8") as f: j = json.load(f) try: with open("outputs/" + cfg.get("folder") + "/results.manual.json", "r", encoding="utf8") as f: j_manual = json.load(f) except FileNotFoundError: j_manual = {} j.update(j_manual) return {i: j[i] for i in j if len(j[i]) > 0}
def handle_new_message(self, message: Message): if self._state == MENU_STATE: if message.text == adding_button.text: self._bot.send_message(Message(adding_state_text)) self._state = ADDING_STATE elif message.text == deleting_button.text: self._state = DELETING_STATE elif message.text == finishing_button.text: requests.put( cfg.get('API', 'API_ADDRESS') + f'/student/{self._bot.get_current_user()}', {'subjects': str(self._subjects)}) self.on_page_changing.notify(RegistrationEndingPage(self._bot)) return else: self._bot.set_keyboard(local_menu, message=keyboard_message) elif self._state == ADDING_STATE: if not self.try_to_add_subject(message.text): self._bot.send_message(Message(subject_does_not_exist_text)) else: self._bot.send_message(Message(subject_added_text)) self._bot.set_keyboard(local_menu, message=keyboard_message) self._state = MENU_STATE elif self._state == DELETING_STATE: self.delete_subject(message.text) self._bot.send_message(Message(subject_deleted_text)) self._bot.set_keyboard(local_menu, message=keyboard_message) self._state = MENU_STATE if len(self._subjects): self._bot.send_message( Message( f'{subjects_list_prefix} {"; ".join(map(lambda x: x["name"], self._subjects))}.' )) else: self._bot.send_message(Message(subjects_does_not_chosen_text))
async def listener_remove(req: Request) -> Response: global conn await init() params = await req.post() try: mount = params["mount"] client = params["client"] duration = params["duration"] except KeyError as e: logger.error("Bad request - {}".format(repr(e))) return Response(status=400, text="Bad request", headers={"Icecast-Auth-Message": "bad_request"}) # Kill listens that were alive for only a few seconds min_listen_time = float(cfg.get("stats", "min_listen_time")) if float(duration) < min_listen_time: await conn.execute( "DELETE FROM listens.listen WHERE mount = $1 AND client_id = $2", mount, client) else: await conn.execute( """ UPDATE listens.listen SET time_end = time_start + ($3 || ' seconds')::INTERVAL WHERE mount = $1 AND client_id = $2 """, mount, client, duration) return Response(status=200)
def test_api(): host = cfg.get('Locust', 'host') # header = {'Content-Type': 'application/octet-stream', # 'Content-Length': '0'} # api = '/api/register/getorgbylogin' # res = requests.post(host + api, headers=header, data=None) # print(res.content) header = {'Content-Type': 'application/octet-stream'} api = '/api/login/userlogin' from compileProtobuf.dstPb.AuthorizeInfoProto_pb2 import AdminLoginInputProto payload = AdminLoginInputProto() payload.account = 'Admin' # payload.account = 'admin' payload.password = '******' # payload.password = '******' payload.organizationID = '-1' payload.userType = 3 payload = payload.SerializeToString() res = requests.post(host + api, data=payload, headers=header) from compileProtobuf.dstPb.PageResponse_pb2 import PageResponse response = PageResponse() response.ParseFromString(res.content) from compileProtobuf.dstPb.AdminLoginUserInfoProto_pb2 import AdminLoginUserInfoProto response_data = AdminLoginUserInfoProto() response_data.ParseFromString(response.data) print(res.status_code, response.isSuccess, response_data)
def compile_cxxfile(cxxfile, module_so=None, **kwargs): '''c++ file -> native module Return the filename of the produced shared library Raises CompileError on failure ''' # FIXME: not sure about overriding the user defined compiler here... compiler = kwargs.get('cxx', cfg.get('user', 'cxx')) _cppflags = cppflags() + kwargs.get('cppflags', []) _cxxflags = cxxflags() + kwargs.get('cxxflags', []) _ldflags = ldflags() + kwargs.get('ldflags', []) # Get output filename from input filename if not set module_so = module_so or (os.path.splitext(cxxfile)[0] + ".so") try: cmd = ([compiler, cxxfile] + _cppflags + _cxxflags + ["-shared", "-o", module_so] + _ldflags) logger.info("Command line: " + _format_cmdline(cmd)) output = check_output(cmd, stderr=STDOUT) except CalledProcessError as e: raise CompileError(e.cmd, e.output) logger.info("Generated module: " + module_so) logger.info("Output: " + output) return module_so
def handle_new_message(self, message: Message): if test_fio(message.text): requests.put(cfg.get('API', 'API_ADDRESS') + f'/student/{self._bot.get_current_user()}', {'name': message.text}) self.on_page_changing.notify(RegionInputPage(self._bot)) else: self._bot.send_message(Message(error_text))
class Judger(object): __SmallFileMaxSize = cfg.getint('ScanFile', 'SmallFile') * 1024 __LastUpdateSeconds = cfg.getint('ScanFile', 'LastUpdate') * 3600 __CodecCheckSize = cfg.getint('ScanFile', 'CodecCheck') __ExcludedExtensions = cfg.get('ScanFile', 'ExcludedExt').lower().split() @classmethod def filter(cls, file_fullname): try: size = path.getsize(file_fullname) last_update = path.getmtime(file_fullname) if time() - last_update > cls.__LastUpdateSeconds: # long time no update return 1 if win and last_update <= path.getctime(file_fullname): # not update after create(no create time for linux) return 1 if size < cls.__SmallFileMaxSize: # too small, looks not like a production log return 2 if file_fullname[ file_fullname.rfind('.'):].lower() in cls.__ExcludedExtensions: # known file extension, not log return 3 if (not win) and access(file_fullname, X_OK): # unix executive, not log return 4 with open(file_fullname, 'rb') as fp: # not txt file, not log if size > cls.__CodecCheckSize * 2: # 文件中间判断,准确性可能大些 fp.seek(int(size / 2)) charset = detect(fp.read(cls.__CodecCheckSize)) if charset['confidence'] < 0.5: return 5 return charset except Exception as err: log.warning(file_fullname + '\t' + str(err)) return 0
def export(): entries = [] courseList = courses.get() dates = schedule.get() resultDates = results.get() for key, r in resultDates.items(): if key not in dates: dates[key] = "" for key, d in dates.items(): ''' edate: Exam date rdate: Results date rdiff: Difference between results and exam date (in days) rurl: Results link ''' edate = "-" rdate = "-" rdiff = "-" rurl = "-" rdateobj = datetime.min edateobj = datetime.min if d and d != "not assigned": edateobj = datetime.strptime(d, "%d/%m/%Y") # use %Y/%m/%d format to allow sorting of date columns edate = edateobj.strftime("%Y/%m/%d") # if the course exam results have been announced if key in resultDates: rdateobj = datetime.strptime(resultDates[key]["date"], "%d/%m/%Y") rdate = rdateobj.strftime("%Y/%m/%d") rurl = "[" + resultDates[key]["source"] + " forum]" if d and d != "not assigned": rdiffobj = rdateobj - edateobj rdiff = "{:03d}".format(rdiffobj.days) courseLink = courseList[key]["title"] if courseList[key]["wiki"]: courseLink = courseList[key]["wiki"] + "|" + courseLink semesterLink = "-" if courseList[key]["semester"]: semesterLink = "[[:Κατηγορία:{}ο Εξάμηνο|{}º]]".format( courseList[key]["semester"], courseList[key]["semester"]) flowLink = "-" if courseList[key]["flow"]: flowLink = "[[:Κατηγορία:Ροή {}|{}]]".format( courseList[key]["flow"], courseList[key]["flow"]) # use (max date - results date) as primary key so that the entries list will # be sorted in descending order of result dates # but ascending (alphabetical) order of course titles entries.append((datetime.max - rdateobj, courseLink, semesterLink, flowLink, edate, rdate, rdiff, rurl)) entries.sort() with open("outputs/" + cfg.get("folder") + "/export_wiki.txt", "w", encoding="utf8") as fout: for entry in entries: fout.write( "|[[{}]]||{}||{}||{}||{}||{}||{}\n|-\n".format(*entry[1:]))
def services_buttons(callback_data_func): services = cfg.get("services") buttons = [] for svc in services: buttons.append([ InlineKeyboardButton(text="%s (%s)" % (svc.name, svc.mode), callback_data=callback_data_func(svc)) ]) return InlineKeyboardMarkup(buttons)
def handle_new_message(self, message: Message): if test_target_year(message.text, self._bot.get_current_user()): requests.put( cfg.get('API', 'API_ADDRESS') + f'/student/{self._bot.get_current_user()}', {'target_year': message.text}) self.on_page_changing.notify(SubjectsInputPage(self._bot)) else: self._bot.send_message(Message(error_text))
def try_to_add_subject(self, subject_name: str): subject_name = subject_name.lower() subjects = requests.get(cfg.get('API', 'API_ADDRESS') + '/subjects').json() for subject in subjects['subjects']: if subject[ 'name'] == subject_name and subject not in self._subjects: self._subjects.append(subject) return True return False
def get_token(): header = {'Content-Type': 'application/json'} url = cfg.get('Token', 'url') + '/Token/RetriveInternal' body = { "ProductName": "eWordIMCIS", "HospitalCode": "QWYHZYFZX", "RequestIP": "192.168.1.56" } token_str = requests.post(url, headers=header, json=body).json()['token'] return token_str
def getLast(): try: with open("outputs/" + cfg.get("folder") + "/last_result_id.txt", "r") as fp: content = fp.read() if content: return content else: return "none" except FileNotFoundError: return "none"
async def listener_add(req: Request) -> Response: global conn await init() params = await req.post() try: mount = params["mount"] client = params["client"] ip = params["ip"] agent = params["agent"] header_name, header_value = cfg.get("stats", "response_header").split(": ") except KeyError as e: logger.error("Bad request - {}".format(repr(e))) return Response(status=400, text="Bad request", headers={"Icecast-Auth-Message": "bad_request"}) # Check the allow/disallow lists only_accept_from = [ x.strip() for x in cfg.get("stats", "only_accept_from", fallback="").split(",") ] ignore_from = [ x.strip() for x in cfg.get("stats", "ignore_from", fallback="").split(",") ] if len(only_accept_from) > 0: if ip not in only_accept_from: return Response(status=200, headers={header_name: header_value}) if ip in ignore_from: return Response(status=200, headers={header_name: header_value}) await conn.execute( """ INSERT INTO listens.listen (mount, client_id, ip_address, user_agent, time_start, time_end) VALUES ($1, $2, $3::inet, $4, NOW(), NULL) """, mount, client, ip, agent) return Response(status=200, headers={header_name: header_value})
async def my_background_task(self): counter = 0 await self.wait_until_ready() while not self.is_closed(): counter += 1 result, message = await self.monitor.run() if result is False: await self.event_channel.send(message) elif counter % 10 == 0: await self.log_channel.send(message) #log.info(f"run status check {cfg.get('interval')}") await asyncio.sleep(cfg.get('interval'))
def main(): import PyLucene init(CONFIG_FILENAME) setup() # log some system info platform = sys.platform if 'win32' in sys.platform: platform += str(sys.getwindowsversion()) log.info('-'*70) log.info('%s %s', config.APPLICATION_NAME, cfg.get('version','number')) log.info('Python %s', sys.version) log.info(' Platform %s', platform) log.info(' pwd: %s, defaultencoding: %s', os.getcwd(), sys.getdefaultencoding()) log.info('PyLucene %s Lucene %s', PyLucene.VERSION, PyLucene.LUCENE_VERSION) # show index version import lucene_logic dbindex = cfg.getPath('archiveindex') reader = lucene_logic.Reader(pathname=dbindex) version = reader.getVersion() log.info(' Index version %s', version) proxyThread = threading.Thread(target=proxyMain, name='proxy') #proxyThread.setDaemon(True) proxyThread.start() adminThread = PyLucene.Thread(runnable(adminMain)) #adminThread.setDaemon(True) adminThread.start() # time.sleep(3) indexThread = PyLucene.Thread(runnable(indexMain)) indexThread.start() # main thread sleep _shutdownEvent.wait() # shutdown log.fatal('System shutting down.') indexThread.join() log.fatal('indexThread terminated.') adminThread.join() log.fatal('adminThread terminated.') proxyThread.join() log.fatal('proxyThread terminated.') log.fatal('End of main thread.') #if __name__ == '__main__': # main()
def percent(start, log, workday=None): if not workday: workday = cfg.get('WORKING_HOURS', 8) now_dt = datetime.now() now = datetime.strptime(f"{now_dt.hour}:{now_dt.minute}", "%H:%M") start = start end = start + timedelta(hours=workday) total = mins(end - start) spent = mins(now - start) percent = float(float(spent) / float(total)) percent = round(percent * 100) code, out, err = notify( f'{now_dt.strftime("%H:%M")} ({start.strftime("%H:%M")}-{end.strftime("%H:%M")})', f"spent {percent}% (last log {log})")
def read_config(self): # reset only http_proxy here?! # setting in CERN httpd format (http://www.w3.org/Daemon/User/Proxies/ManyProxies.html) http_proxy = cfg.get('http', 'http_proxy', '') if not http_proxy: return (scm, netloc, path, params, query, fragment) = urlparse.urlparse( http_proxy, 'http') if scm != 'http' or not netloc: log.error('Invalid http_proxy="%s"', http_proxy) return self.next_proxy_netloc = netloc
def handle_new_message(self, message: Message): if self._offered: if message.text == confirm_button.text: self._offered = False self.try_offer() elif message.text == refuse_button.text: requests.post(cfg.get('API', 'API_ADDRESS') + f'/student_declines/{self._bot.get_current_user()}', {'contest_id': self.current_event['contest']['id']}) self._bot.send_message(Message(ok_text)) self._offered = False self.try_offer() else: self._bot.send_message(Message(command_error_text)) else: self._bot.send_message(Message(empty_event_queue_text))
def draw(self, seq): # (0,150,255) [0x0096ff] -> (42,22,69) [0x45162a] def colour_lookup(ratio, shade=False): r = 000 + (ratio * (42 - 000)) g = 150 + (ratio * (22 - 150)) b = 255 + (ratio * (69 - 255)) if shade: r /= 3.0 g /= 3.0 b /= 3.0 return "rgb({},{},{})".format(int(r), int(g), int(b)) im = Image.new("RGBA", (cfg.getint("Plot", "size"), cfg.getint("Plot", "size")), (10, 4, 27, 255)) draw = ImageDraw.Draw(im) draw.line( ( (cfg.getint("Plot", "margin"), cfg.getint("Plot", "margin")), (cfg.getint("Plot", "margin"), cfg.getint("Plot", "size") - cfg.getint("Plot", "margin")), ( cfg.getint("Plot", "size") - cfg.getint("Plot", "margin"), cfg.getint("Plot", "size") - cfg.getint("Plot", "margin"), ), (cfg.getint("Plot", "size") - cfg.getint("Plot", "margin"), cfg.getint("Plot", "margin")), (cfg.getint("Plot", "margin"), cfg.getint("Plot", "margin")), ), fill="rgb(24,12,54)", ) points = self.fractal.point_set() # sort by depth so oldest segments are drawn on top points.sort(key=lambda p: -p.depth) # for point in points: # fill = colour_lookup(float(point.depth) / (points[0].depth + 1), shade=True) # service_x = (point.x // constants.SERVICE_GRID_SPACING) * constants.SERVICE_GRID_SPACING # service_y = (point.y // constants.SERVICE_GRID_SPACING) * constants.SERVICE_GRID_SPACING # draw.rectangle( # (service_x + 1, service_y + 1, service_x + constants.SERVICE_GRID_SPACING - 1, service_y + constants.SERVICE_GRID_SPACING - 1), # fill=fill # "rgb(25,20,37,20)" # ) for point in points: fill = colour_lookup(float(point.depth) / (points[0].depth + 1)) for segment in point.segments(): end = segment.end() if end.x >= 0 and end.y >= 0 and end.x <= cfg.get("Plot", "size") and end.y <= cfg.get("Plot", "size"): draw.line((point.x, point.y, end.x, end.y), fill=fill) im.save("output/out." + str(seq) + ".png", "PNG")
def save(): site = pywikibot.Site() # fetch the corresponding page of the course page = pywikibot.Page(site, cfg.get("wiki_title")) body = '{| class="sortable wikitable" style="text-align:left"' + "\n" body += '! Μάθημα !! Εξ. !! Ροή !! Εξέταση !! Αποτελέσματα !! Διαφορά !! class="unsortable"|Σύνδεσμος' + "\n" body += '|-' + "\n" body += get() body += "\n" body += '|}' + "\n" body += '----' + "\n" body += 'Disclaimer: Η παρούσα σελίδα παράγεται αυτόματα και είναι ακόμη σε δοκιμαστικό στάδιο, επομένως είναι πολύ πιθανό να περιέχει λανθασμένες πληροφορίες. Τα δεδομένα λαμβάνονται από το αντίστοιχο τόπικ αποτελεσμάτων.' + "\n" body += '[[Κατηγορία:Εξεταστική]]' page.text = body page.save( u"Update αποτελεσμάτων [Αυτόματη επεξεργασία από ShmmywikiBot (χειριστής Tdiam)]" )
def print_summary(present, away, total): """ prints the summary ``` [03:30/02:30|06:00|( 58%)] ``` """ str_p = str_print(present) str_a = str_print(away) total += away + present or 1.0 str_t = str_print(total) wh = cfg.get('WORKING_HOURS',8) str_percentage = str_percent_print(present,total) str_wh = str_percent_print(total,wh*60) print("[" + colored(f"{str_p}/", "green") + colored(f"{str_a}", "red") + colored(f"|{str_t}|", "blue") + colored(f"{str_percentage}", "grey", "on_green")+ "," + colored(f"{str_wh}", "grey", "on_magenta") + "]")
def main(): args = parse_args() VTKEY = cfg.get('vt_apikey') TARGET_URL = args.check_url check_reason = args.why vtcheck = vt(VTKEY, TARGET_URL) report = vtcheck.get_urlvt() positives = [] print(report) if report['response_code'] == 1: for x in sorted(report.get('scans')): if report['scans'][x].get('detected'): positives.append([ x, 'True', report['scans'][x]['result'] if report['scans'][x]['result'] else ' -- ', report['scans'][x]['version'] if 'version' in report['scans'][x] and report['scans'][x]['version'] else ' -- ', report['scans'][x]['update'] if 'update' in report['scans'][x] and report['scans'][x]['update'] else ' -- ' ]) print(positives) elif report['response_code'] == -2: time.sleep(15) report = vtcheck.get_urlvt() print(json.dumps(report, sort_keys=False, indent=4)) elif report['response_code'] == 0: print('not there, putting') vtcheck.put_urlvt() time.sleep(15) report = vtcheck.get_urlvt() print(json.dumps(report, sort_keys=False, indent=4)) print('[+] Google Safebrowsing') sb.safebrowse(TARGET_URL, check_reason)
async def ping_to(self): await self.wait_until_ready() while not self.is_closed(): for ip, port in self.cameras_list: msg = RTSPMessage(ip, port).create_option_msg() socket = AsyncSocket(ip, port) try: await socket.init() except OSError as e: log.error(f"Error: {e}") await self.event_channel.send( f"Camera at IP: {ip} couldn't connect") # send a message to events channel that # unable to connect the camera continue result = await socket.send(msg.encode()) if b"200 OK" in result: continue # send a message to events channels that # the camera responds an error code await self.event_channel.send( f"Camera at IP: {ip} responds Error code {result}") await asyncio.sleep(cfg.get('interval'))
import pymysql from config import cfg import log mysql_conf = cfg.get("mysql") # TODO: cursor connect db = pymysql.connect(host=mysql_conf["host"], user=mysql_conf["user"], passwd=mysql_conf["passwd"], db=mysql_conf["db"], charset=mysql_conf["charset"]) db.cursor().execute("SET SESSION TRANSACTION ISOLATION LEVEL READ COMMITTED") def get_data(data_start_time, data_end_time): cursor = db.cursor() sql = "SELECT th_time, temperature, humidity FROM data WHERE th_time between '{0}' AND '{1}'".format( data_start_time, data_end_time) cursor.execute(sql) data = cursor.fetchall() return data def get_last_data(): cursor = db.cursor() sql = "SELECT th_time, temperature, humidity FROM data ORDER BY th_time DESC LIMIT 1" cursor.execute(sql) data = cursor.fetchall() return data
def field_value(num, time=None): if if_average_by_Time: # 判断是否生成平均的时间字段 time_field = time # 通过参数从平均分布时间的list里面获取 else: # 否则通过fake函数随机生成 time_field = str( fake.date_between_dates( datetime.strptime(cfg.get('task', 'startDate'), "%Y-%m-%d"), datetime.strptime(cfg.get('task', 'endDate'), "%Y-%m-%d"))), # 字段赋值 pi = PersonInfo() hs = HospitalData(organizationName, organizationID, num) # 字段赋值 kv = { "ExamUID": hs.examUid, "PatientID": time + '-' + str(num), "PIDAssigningAuthority": hs.assigningAuthority, "PatientMasterID": hs.pid, "PatientClass": hs.patientClass, "VisitUID": hs.visitUid, "OrganizationID": hs.hospitalCode, "FillerOrderNO": hs.fillerOrderNo, "FillerAssigningAuthority": hs.assigningAuthority, "FillerPatientID": time + '-' + str(num), "AccessionNumber": time.replace('-', '') + str(num), "ServiceText": hs.examBodyPart, "ServiceSectID": hs.serviceSectId, "ProcedureName": hs.procedureName, "ProviderName": PersonInfo().name, "RequestDeptName": '申请科室', "RequestOrgName": hs.hospital, "RequestedDate": time + " 07:00:00", "ClinicDiagnosis": "临床诊断", "RegTime": time + " 08:00:00", "RegisterName": PersonInfo().name, "ExamDate": time + " 08:20:00", "ExamEndDate": time + " 08:25:00", "ExamLocation": hs.serviceSectId + '机房', "StudyInstanceUID": hs.studyUid, "ResultAssistantName": PersonInfo().name, # "ResultPrincipalID": "admin", "ResultPrincipalName": PersonInfo().name, "PreliminaryDate": time + " 14:07:05", "AuditDate": time + " 15:28:31", "ResultDate": time + " 15:28:31", "ResultServiceCenterUID": "00000000-0000-0000-0000-000000000000", "ResultStatus": "审核完成", "ResultStatusCode": "3080", "ResultPrintCount": 0, "AbnormalFlags": "阴性", "PrivacyLevel": 0, "PaymentsFlag": "1", "FilmCount": 0, "FilmNeed": "0", "HasImage": "1", "ImageLocation": 1, "ConsultStatus": 0, "LastUpdateDate": time + " 11:23:21.793", "DataSource": hs.examSystem, "LockFlag": "0", "LockUserUID": "00000000-0000-0000-0000-000000000000", "InWritingUserUID": "00000000-0000-0000-0000-000000000000", "MessageCount": 0, "UnProcessWorkflowCount": 0, "PushState": "0", "DeleteFlag": "0", "ResultSelfPrintCount": 0, "DrugDose": 0, "IdcasState": 0, "PriorityFlag": 0, "DigitalImageNeed": 0, "BusinessStatus": 2, "UploadFlag": "0", "ConsultationState": 2, "HasReport": 0, "RegisterFlag": "0", "IsInterconnectData": "0", "PushFlag": "0", "ImagingFinding": "*****对称,**居中,**未见明显病灶。", "ImagingDiagnosis": "**未见明显异常。", "UploadRetryTimes": 0, "IsMPI": 1, "CreateDate": time + " 11:22:09.027", "CreateOrgnizationID": hs.hospitalCode, "Name": pi.name, "Sex": pi.sex, "BirthDate": pi.birthday, "IDCardNO": pi.IDCard, "ContactPhoneNO": pi.phone_number, "Status": "0", "VisitID": "69915361-2e2b-4932-bf52-44e9a458d1f2", "PatientType": hs.patientClass, # "MedRecNO": "201809300013", "Age": pi.age, "AgeUnit": "岁", "PregnancyFlag": "0", "AdmitDate": time + " 07:56:30", "ReAmissionFlag": "0", "AdmitDeptName": hs.patientClass, "DeptName": hs.patientClass, "FileUID": hs.fileUID, "BusinessID": hs.businessID, "BusinessType": "Exam", "BusinessTime": time + " 07:56:30", "ClassCode": "Exam", "TypeCode": "ExamImage", "MimeType": "DICOMDIR", "FileSHA": "1adb6bb40a16591e330127f0e7ee8d09d09af861", "FileSize": 3250694, "FileCreateTime": time + " 09:35:53.943", "CreateOrganizationID": hs.hospitalCode, "FolderFlag": "0", "LocalFileIsAlreadyDeleted": "0", "ServiceUID": "88760571-e9e2-4536-9a6d-acaa012c9d83", "MediaUID": "5de204a7-4c32-416a-99e3-acaa012c8933", "FileRelativePath": "year\\month\\day\\Exam\\FileUID\\Exam\\ExamImage\\DICOMDIR", "CreateTime": time + " 09:35:53.95", "MigrationFlag": "0", "FileDeleteFlag": "0", "FileUploadFlag": "0", "ArchiveFlag": "0", "ArchiveRetryTimes": 0, "PushRetryTimes": 0 } return kv
from faker import Faker from config import cfg from datetime import datetime from common.utils import ger_dates from common.generator import PersonInfo, HospitalData fake = Faker('zh_CN') # 获取插入表 tables = cfg.get_raw('database', 'tableList') # 时间分配开关 if_average_by_Time = cfg.get_bool('task', 'ifAverageByTime') # 获取机构信息 organizationID = cfg.get('business', 'organizationID') organizationName = cfg.get('business', 'organizationName') # 将生成数据赋值给字段 # def field_value(): # kv = {"A_uid": uuid.uuid4(), # "A_uid2": uuid.uuid4(), # "A_field1": fake.name(), # "A_field2": random.choice(['男', '女']), # "A_field3": fake.date_of_birth(tzinfo=None, minimum_age=0, maximum_age=120), # "Time": fake.date_between_dates(datetime.strptime(cfg.get('task', 'startDate'), "%Y-%m-%d"), # datetime.strptime(cfg.get('task', 'endDate'), "%Y-%m-%d")), # "B_uid": uuid.uuid4(), # "B_field1": fake.job(), # "B_field2": fake.address(), # "C_field1": fake.phone_number(),
def generate_cxx(module_name, code, specs=None, optimizations=None): '''python + pythran spec -> c++ code returns a BoostPythonModule object ''' pm = PassManager(module_name) # font end ir = ast.parse(code) # parse openmp directive pm.apply(GatherOMPData, ir) # avoid conflicts with cxx keywords renamings = pm.apply(NormalizeIdentifiers, ir) check_syntax(ir) # middle-end optimizations = (optimizations or cfg.get('pythran', 'optimizations').split()) optimizations = map(_parse_optimization, optimizations) refine(pm, ir, optimizations) # back-end content = pm.dump(Cxx, ir) if not specs: # Match "None" AND empty specs class Generable: def __init__(self, content): self.content = content def generate(self): return "\n".join("\n".join(l for l in s.generate()) for s in self.content) mod = Generable(content) else: # uniform typing for fname, signatures in specs.items(): if not isinstance(signatures, tuple): specs[fname] = (signatures,) mod = BoostPythonModule(module_name) mod.use_private_namespace = False # very low value for max_arity leads to various bugs max_arity = max(4, max(max(map(len, s)) for s in specs.itervalues())) mod.add_to_preamble([Define("BOOST_PYTHON_MAX_ARITY", max_arity)]) mod.add_to_preamble([Define("BOOST_SIMD_NO_STRICT_ALIASING", "1")]) mod.add_to_preamble(content) mod.add_to_init([ Statement('import_array()'), Statement('boost::python::implicitly_convertible<std::string,' + 'pythonic::core::string>()')] ) for function_name, signatures in specs.iteritems(): internal_func_name = renamings.get(function_name, function_name) for sigid, signature in enumerate(signatures): numbered_function_name = "{0}{1}".format(internal_func_name, sigid) arguments_types = [pytype_to_ctype(t) for t in signature] has_arguments = HasArgument(internal_func_name).visit(ir) arguments = ["a{0}".format(i) for i in xrange(len(arguments_types))] name_fmt = pythran_ward + "{0}::{1}::type{2}" args_list = ", ".join(arguments_types) specialized_fname = name_fmt.format(module_name, internal_func_name, "<{0}>".format(args_list) if has_arguments else "") result_type = ("typename std::remove_reference" + "<typename {0}::result_type>::type".format( specialized_fname)) mod.add_to_init( [Statement("python_to_pythran<{0}>()".format(t)) for t in _extract_all_constructed_types(signature)]) mod.add_to_init([Statement( "pythran_to_python<{0}>()".format(result_type))]) mod.add_function( FunctionBody( FunctionDeclaration( Value( result_type, numbered_function_name), [Value(t, a) for t, a in zip(arguments_types, arguments)]), Block([Statement("return {0}()({1})".format( pythran_ward + '{0}::{1}'.format( module_name, internal_func_name), ', '.join(arguments)))]) ), function_name ) return mod
def ldflags(): """The linker flags to link a Pythran code into a shared library""" return (_python_ldflags() + cfg.get('sys', 'ldflags').split() + cfg.get('user', 'ldflags').split())
def generate_cxx(module_name, code, specs=None, optimizations=None): '''python + pythran spec -> c++ code returns a BoostPythonModule object ''' pm = PassManager(module_name) # front end ir, renamings = frontend.parse(pm, code) # middle-end optimizations = (optimizations or cfg.get('pythran', 'optimizations').split()) optimizations = map(_parse_optimization, optimizations) refine(pm, ir, optimizations) # back-end content = pm.dump(Cxx, ir) # instanciate the meta program if specs is None: class Generable: def __init__(self, content): self.content = content def __str__(self): return str(self.content) generate = __str__ mod = Generable(content) else: # uniform typing for fname, signatures in specs.items(): if not isinstance(signatures, tuple): specs[fname] = (signatures,) mod = BoostPythonModule(module_name) mod.use_private_namespace = False # very low value for max_arity leads to various bugs min_val = 2 specs_max = [max(map(len, s)) for s in specs.itervalues()] max_arity = max([min_val] + specs_max) mod.add_to_preamble([Define("BOOST_PYTHON_MAX_ARITY", max_arity)]) mod.add_to_preamble([Define("BOOST_SIMD_NO_STRICT_ALIASING", "1")]) mod.add_to_preamble([Include("pythonic/core.hpp")]) mod.add_to_preamble([Include("pythonic/python/core.hpp")]) mod.add_to_preamble(map(Include, _extract_specs_dependencies(specs))) mod.add_to_preamble(content.body) mod.add_to_init([ Line('#ifdef PYTHONIC_TYPES_NDARRAY_HPP\nimport_array()\n#endif')]) # topologically sorted exceptions based on the inheritance hierarchy. # needed because otherwise boost python register_exception handlers # do not catch exception type in the right way # (first valid exception is selected) # Inheritance has to be taken into account in the registration order. exceptions = nx.DiGraph() for function_name, v in functions.iteritems(): for mname, symbol in v: if isinstance(symbol, ConstExceptionIntr): exceptions.add_node( getattr(sys.modules[mname], function_name)) # add edges based on class relationships for n in exceptions: if n.__base__ in exceptions: exceptions.add_edge(n.__base__, n) sorted_exceptions = nx.topological_sort(exceptions) mod.add_to_init([ # register exception only if they can be raise from C++ world to # Python world. Preprocessors variables are set only if deps # analysis detect that this exception can be raised Line('#ifdef PYTHONIC_BUILTIN_%s_HPP\n' 'boost::python::register_exception_translator<' 'pythonic::types::%s>(&pythonic::translate_%s);\n' '#endif' % (n.__name__.upper(), n.__name__, n.__name__) ) for n in sorted_exceptions]) for function_name, signatures in specs.iteritems(): internal_func_name = renamings.get(function_name, function_name) for sigid, signature in enumerate(signatures): numbered_function_name = "{0}{1}".format(internal_func_name, sigid) arguments_types = [pytype_to_ctype(t) for t in signature] has_arguments = HasArgument(internal_func_name).visit(ir) arguments = ["a{0}".format(i) for i in xrange(len(arguments_types))] name_fmt = pythran_ward + "{0}::{1}::type{2}" args_list = ", ".join(arguments_types) specialized_fname = name_fmt.format(module_name, internal_func_name, "<{0}>".format(args_list) if has_arguments else "") result_type = ("typename std::remove_reference" + "<typename {0}::result_type>::type".format( specialized_fname)) mod.add_to_init( [Statement("pythonic::python_to_pythran<{0}>()".format(t)) for t in _extract_all_constructed_types(signature)]) mod.add_to_init([Statement( "pythonic::pythran_to_python<{0}>()".format(result_type))]) mod.add_function( FunctionBody( FunctionDeclaration( Value( result_type, numbered_function_name), [Value(t, a) for t, a in zip(arguments_types, arguments)]), Block([Statement("return {0}()({1})".format( pythran_ward + '{0}::{1}'.format( module_name, internal_func_name), ', '.join(arguments)))]) ), function_name ) # call __init__() to execute top-level statements init_call = '::'.join([pythran_ward + module_name, '__init__()()']) mod.add_to_init([Statement(init_call)]) return mod
def cxxflags(): """The C++ flags to compile a Pythran generated cpp file""" return (cfg.get('user', 'cxxflags').split() + cfg.get('sys', 'cxxflags').split())