def initCourseList(self): self.courseList.setHeaderLabel("Course Available") # show the horizontal scrollbar when needed self.courseList.header().setSectionResizeMode( QHeaderView.ResizeToContents) self.courseList.header().setStretchLastSection(False) # connect to database db = DB() db.useDatabase() # fetch and display data subjects = db.getSubject() # list of subject for x in subjects: # x: subject name subjItem = QTreeWidgetItem(self.courseList) subjItem.setText(0, x) levels = db.getLevel(x) for y in levels: # y: level in subject x lvItem = QTreeWidgetItem(subjItem) lvItem.setFlags(lvItem.flags() | Qt.ItemIsTristate | Qt.ItemIsUserCheckable) lvItem.setText(0, "{} Level".format(y)) lvItem.setCheckState(0, Qt.Unchecked) courses = db.getCourse(x, y) for z in courses: # z: course name in "subject x - level y" crseItem = QTreeWidgetItem(lvItem) title = db.getTitle(x, z) crseItem.setFlags(crseItem.flags() | Qt.ItemIsUserCheckable) crseItem.setText(0, "{} - {}".format(z, title[0])) crseItem.setCheckState(0, Qt.Unchecked) self.courseList.sortItems(0, Qt.AscendingOrder) db.close()
def dashboard(): if not session.get('logged_in'): return render_template('login.html') # Get data from Postgres pg = DB() pg.connect() output = pg.get_device_info() datums = pg.get_device_datums() monitored_devices = pg.get_monitored_devices() active_mon_events = pg.get_active_mon_events() active_reachability_events = pg.get_active_reachability_events() interface_status = pg.get_interface_status() crasd = process_check('crasd') pg.close() return render_template( 'home.html', output=output, datums=datums, mem_to_g=mem_to_g, split_line=split_line, to_mbps=to_mbps, monitored_devices=monitored_devices, active_mon_events=active_mon_events, active_reachability_events=active_reachability_events, interface_status=interface_status, crasd=crasd)
def slot_sync(self, arg): """Perform a synchronization with a selected database file but as adminstrative user. This allows even a finalized database to be updated. """ sfile = self.settings.getSetting("syncFile") if sfile: sdir = os.path.dirname(sfile) sfile = os.path.basename(sfile) else: sdir = None syncfile = getFile(_("User database file"), startDir=sdir, startFile=sfile, defaultSuffix=".zga", filter=(_("Report Files"), (u"*.zga",))) if not syncfile: return self.settings.setSetting("syncFile", syncfile) dbs = DBs(syncfile) if not dbs.isOpen(): return sdbname = dbs.getConfig(u"dbname") dbs.close() if (self.dbname != sdbname): warning(_("%s: Database name does not match current master name") % syncfile) return self.dlg = Output() synchronize(self.master, syncfile, self.dlg) self.dlg.done()
def get_csv_device(): pg = DB() pg.connect() output = pg.get_device_info() datums = pg.get_device_datums() pg.close() title = 'Device Information\n' headers = 'Server ID,Status,IP Address,Address,City,State,Zip,Country,Download Rate,Upload Rate\n' data = '{0},{1},{2},{3},{4},{5},{6},{7},{8},{9}\n'.format( output['device_id'], output['status'], output['ip'], output['address'], output['city'], output['state'], output['zip'], output['country'], output['download_rate'], output['upload_rate']) title2 = '\n\nSystem Information\n' headers2 = 'OS,Kernel,CPU Vendor,CPU Model,RAM (Gigs)\n' data2 = '{0},{1},{2},{3},{4}\n'.format(datums['os'], datums['kernel'], datums['cpu_vendor'], datums['cpu_model'], mem_to_g(datums['mem'])) csv = title + headers + data + title2 + headers2 + data2 return Response(csv, mimetype="text/csv", headers={ "Content-disposition": "attachment; filename=device_report.csv" })
def get_csv_charts(): pg = DB() pg.connect() data_memory = pg.get_mem_utilized() load_avg = pg.get_load_avg() pg.close() title = 'Memory Utilization\n' headers = 'Value,Time\n' data = "" for line in data_memory: data += '{0},{1}\n'.format(line[0], line[1]) title2 = '\n\nLoad Average\n' headers2 = 'Value,Time\n' data2 = "" for line in load_avg: data2 += '{0},{1}\n'.format(line[0], line[1]) csv = (title + headers + data + title2 + headers2 + data2) return Response(csv, mimetype="text/csv", headers={ "Content-disposition": "attachment; filename=charts_report.csv" })
def decorated_request(*args, **kwargs): DB.connect() logging.info("[Connection] to database: established") # Process the request response = func(*args, **kwargs) DB.close() logging.info("[Connection] to database: closed") return response
def idol_group_wiki_url(self): db = DB('iddata') # 女性アイドルグループのURL base_url = 'https://ja.wikipedia.org/wiki/%E6%97%A5%E6%9C%AC%E3%81%AE%E5%A5%B3%E6%80%A7%E3%82%A2%E3%82%A4%E3%83%89%E3%83%AB%E3%82%B0%E3%83%AB%E3%83%BC%E3%83%97%E3%81%AE%E4%B8%80%E8%A6%A7' r = requests.get(base_url) content = r.content soup = BeautifulSoup(content, 'html.parser') # すべての該当クラスの<div>タグをリストで返す ex. [<div class="hoge">~</div>, <div>~</div>,...,<div>~</div>] divs = soup.find_all( 'div', class_='div-col columns column-count column-count-2') # 各<div>タグの要素から<a>タグを抜き出し、グループコード,グループ名,URLを抜き出す(80,90年代はパス) for div in divs[2:]: idol_groups = div.find_all('a') for idol_group in idol_groups: # 相対パスを絶対パスに変換して取得 url = urljoin(base_url, idol_group.get('href')) name = idol_group.text # データベースに登録済みか確認 pass_url = list() pass_url.extend( db.select('SELECT url FROM idol_group_wiki_url;')) pass_url.extend( db.select('SELECT url FROM not_idol_group_wiki_url;')) if url in pass_url: continue # idol_group_idを設定 max_id = db.select( 'SELECT MAX(idol_group_id) FROM idol_group_name;')[0] if max_id is None: id = 1 else: id = max_id + 1 # データベースへの登録処理 print(id, name, url) command = input('新規アイドルグループに登録しますか? (y/n/skip) >>') if command is 'y': db.insert( 'INSERT INTO idol_group_name (idol_group_id, idol_group_name) VALUES (%s,%s)', [id, name]) db.insert( 'INSERT INTO idol_group_wiki_url (idol_group_id, url) VALUES (%s,%s)', [id, url]) print('登録しました') elif command is 'n': db.insert( 'INSERT INTO not_idol_group_wiki_url (not_idol_group_name, url) VALUES (%s,%s)', [name, url]) print('URLを除外リストに挿入しました') else: print('スキップしました') db.close()
class DBThread(Thread): def __init__(self, queue): Thread.__init__(self, name="Thread-DB") self.queue = queue self.db = DB() self.logit = logging.getLogger("logit") self.to_commit = Queue() self.commit_size = 1 def run(self): self.db.connect() while True: tweet = self.queue.get() if tweet == None: self.logit.info("Terminal sentinel encountered") self.queue.task_done() break if "delete" in tweet: self.logit.info("delete: %s" % str(tweet)) elif "scrub_geo" in tweet: self.logit.info("scrub_geo: %s" % str(tweet)) elif "limit" in tweet: self.logit.warning("limit: %s" % str(tweet)) elif "status_withheld" in tweet: self.logit.info("status_withheld: %s" % str(tweet)) elif "user_withheld" in tweet: self.logit.info("user_withheld: %s" % str(tweet)) elif "disconnect" in tweet: self.logit.warning("disconnect: %s" % str(tweet)) break elif "warning" in tweet: self.logit.warning("warning: %s" % str(tweet)) elif "id" in tweet and "text" in tweet: try: self.db.add_tweet(tweet) except: self.logit.exception("db add_tweet exception %s" % str(tweet)) # TODO: Get multi-queries working, so we don't need a super low-latency connection to the database # self.to_commit.put(tweet) # Commit self.commit_size tweets at once # if self.to_commit.qsize() % 100 == 0: # self.logit.info('to_commit size: %d' % self.to_commit.qsize()) # if self.to_commit.qsize() >= self.commit_size: # self.logit.info('start committing') # self.db.add_tweets(self.to_commit, self.logit) # self.logit.info('end committing') self.queue.task_done() self.logit.info("Closing db connection") self.db.close()
def get_csv_events(): pg = DB() pg.connect() available_mon_events = pg.get_available_mon_events() active_mon_events = pg.get_active_mon_events() active_reachability_events = pg.get_active_reachability_events() historical_mon_events = pg.get_all_h_mon_events() historical_reachability_events = pg.get_all_h_reach_events() pg.close() title = 'Available Monitoring Events\n' headers = 'Event Type ID,Event Description,Monitor\n' data = "" for line in available_mon_events: data += '{0},{1},{2}\n'.format(line['name'], line['description'], line['monitor']) title2 = '\n\nActive Monitoring Events\n' headers2 = 'Event ID,Event Type,Resource,Start Time\n' data2 = "" for line in active_mon_events: data2 += '{0},{1},{2},{3}\n'.format( line['event_id'], line['name'], line['identifier'], line['start_time'].strftime('%Y-%m-%d %H:%M:%S')) title3 = '\n\nActive Reachability Events\n' headers3 = 'Event ID,Device Name,IP Address,Start Time\n' data3 = '' for line in active_reachability_events: data3 += '{0},{1},{2},{3}\n'.format( line['event_id'], line['name'], line['ip'], line['start_time'].strftime('%Y-%m-%d %H:%M:%S')) title4 = '\n\nHistorical Monitoring Events\n' headers4 = 'Event ID,Event Type,Resource,Start Time,Stop Time\n' data4 = '' for line in historical_mon_events: data4 += '{0},{1},{2},{3},{4}\n'.format( line[0], line[1], line[2], line[3].strftime('%Y-%m-%d %H:%M:%S'), line[4].strftime('%Y-%m-%d %H:%M:%S')) title5 = '\n\nHistorical Reachability Events\n' headers5 = 'Event ID,Device Name,IP Address,Start Time,Stop Time\n' data5 = '' for line in historical_reachability_events: data5 += '{0},{1},{2},{3},{4}\n'.format( line[0], line[1], line[2], line[3].strftime('%Y-%m-%d %H:%M:%S'), line[4].strftime('%Y-%m-%d %H:%M:%S')) csv = (title + headers + data + title2 + headers2 + data2 + title3 + headers3 + data3 + title4 + headers4 + data4 + title5 + headers5 + data5) return Response(csv, mimetype="text/csv", headers={ "Content-disposition": "attachment; filename=events_report.csv" })
def main(): pg = DB() pg.connect() data = pg.get_password('jmillan') if data: print data[0] else: print "user not found" pg.close()
def create_table(): """ テーブル作成SQL "CREATE TABLE idol_group_twitter_follower_num (idol_group_id integer, screen_name varchar(255), follower_num integer, recode_date date) WITH OIDS;" """ db = DB("iddata") #db.execute_sql("CREATE TABLE idol_group_twitter_follower_num (idol_group_id integer, screen_name varchar(255), follower_num integer, recode_date date) WITH OIDS;") db.execute_sql( "CREATE TABLE idol_group_twitter_otavector (idol_group_id integer, screen_name varchar(255), follow_userid varchar(255), follow_num integer, recode_date date) WITH OIDS;" ) db.close()
class DBPipeline(object): def open_spider(self, spider): self.mongo_client = DB() self.collection_name = spider.db_name def close_spider(self, spider): self.mongo_client.close() def process_item(self, item, spider): self.mongo_client.insert_one(self.collection_name, dict(item)) return item
def handle_statistics(message): if message.chat.id == config.owner_chat_id: db_worker = DB() count_licenseplates=db_worker.get_count_licenseplate() count_comments=db_worker.get_count_comment() count_photos=db_worker.get_count_photo() count_activeusers=db_worker.get_count_activeuser() text = "Активных пользователей - {0}\n" \ "Автомобилей - {1}\n" \ "Комментариев - {2}\n" \ "Фотографий - {3}\n" \ .format(count_activeusers,count_licenseplates,count_comments,count_photos) bot.send_message(message.chat.id, text) db_worker.close() request_lp(message) pass
def test(): print DB.getPlaceInfoByID(1) class12 = DB.getPlaceInfoByName('12') print class12['id'], class12['name'], class12['type'] print DB.getPlaceInfoByName('Studyroom nord') print DB.getPlaceInfoByType('restroom') DB.updateRestroomPeopleCount(3, 4) print DB.types print DB.getPriorityListFromPlace(class12['id']) print DB.getPriorityListFromPlaceFilterGender(class12['id'], 'M') DB.close()
def _select_idol_group_screen_name(self, start_idol_group_id=0): db = DB('iddata') # リスト形式でアイドルグループのURLを取得 ex. [[2, 'https://twitter.com/countrygirls_uf'], [5, 'https://twitter.com/JuiceJuice_uf'],..] urls = db.select( "SELECT idol_group_id, url FROM idol_group_twitter_url WHERE account_type = 'official' AND idol_group_id >= %s ORDER BY idol_group_id ASC" % (start_idol_group_id)) db.close() print(urls) # 不要部分を削除し、URLをUserIDに変換 ex. [[2, 'countrygirls_uf'], [5, 'JuiceJuice_uf'],..] screen_names = [[ url[0], url[1].replace('https://twitter.com/', '').replace('http://twitter.com/', '').replace('/', '').replace('?lang=ja', '') ] for url in urls] return screen_names
def filterEvent(self, panelCourse): self.filter.clear() self.filter.setHeaderLabel("Instructors by Course") db = DB() db.useDatabase() # Access the selectedList in Course Panel root = panelCourse.selectedList.invisibleRootItem() slCount = root.childCount() for i in range(slCount - 1, -1, -1): # loop through each subject-level slItem = root.child(i) slName = slItem.text(0).split() subjName = slName[0] crseCount = slItem.childCount() for j in range( crseCount - 1, -1, -1): # loop through each course in current subject-level crseSelected = slItem.child(j) crseText = crseSelected.text(0) crseNum = crseText.split()[0] # add corresponding course to filter crseItem = QTreeWidgetItem() crseItem.setText(0, "{} {}".format(subjName, crseText)) self.filter.addTopLevelItem(crseItem) instructors = db.getInst( subjName, crseNum) # Get all instructors teaching the course for instName in instructors: instructor = QTreeWidgetItem(crseItem) instructor.setText(0, instName) instructor.setCheckState(0, Qt.Checked) db.close() self.filter.expandAll() self.filter.sortItems(0, Qt.AscendingOrder)
def slot_sync(self, arg): """Perform a synchronization with a selected database file but as adminstrative user. This allows even a finalized database to be updated. """ sfile = self.settings.getSetting("syncFile") if sfile: sdir = os.path.dirname(sfile) sfile = os.path.basename(sfile) else: sdir = None syncfile = getFile(_("User database file"), startDir=sdir, startFile=sfile, defaultSuffix=".zga", filter=(_("Report Files"), (u"*.zga", ))) if not syncfile: return self.settings.setSetting("syncFile", syncfile) dbs = DBs(syncfile) if not dbs.isOpen(): return sdbname = dbs.getConfig(u"dbname") dbs.close() if (self.dbname != sdbname): warning( _("%s: Database name does not match current master name") % syncfile) return self.dlg = Output() synchronize(self.master, syncfile, self.dlg) self.dlg.done()
def idol_group_follower_num(self): db = DB('iddata') screen_names = self._select_idol_group_screen_name() # フォロワー数を取得してリストを作成 follower_nums = list() for screen_name in screen_names: try: # フォロワー数リストに[idol_group_id, screen_name, フォロワー数, 取得日]を挿入 row = [ screen_name[0], screen_name[1], self.api.get_user(screen_name[1]).followers_count, self._date_now() ] follower_nums.append(row) # 例外処理 except tweepy.error.TweepError: # 存在しないTwitterのscreen_nameを表示 print("存在しないscreen_nameです : ", screen_name[0], screen_name[1]) print("フォロワー数 : 0") # フォロワー数 : 0を挿入 row = [screen_name[0], screen_name[1], 0, self._date_now()] follower_nums.append(row) # DBにフォロワー数を挿入 [idol_group_id, screen_name, フォロワー数, 取得日] for follower_num in follower_nums: print(follower_num) db.insert( 'INSERT INTO idol_group_twitter_follower_num (idol_group_id, screen_name, follower_num, recode_date) VALUES (%s,%s,%s,%s)', follower_num) db.close()
def create_table(): """ データベース作成SQL "CREATE DATABASE iddata;" """ """ テーブル作成SQL "CREATE TABLE idol_group_name (idol_group_id integer PRIMARY KEY, idol_group_name varchar(255)) WITH OIDS;" "CREATE TABLE idol_group_wiki_url (idol_group_id integer PRIMARY KEY, url varchar(255)) WITH OIDS;" "CREATE TABLE not_idol_group_wiki_url (not_idol_group_name varchar(255), url varchar(255)) WITH OIDS;" "CREATE TABLE idol_group_twitter_url (idol_group_id integer, twitter_name varchar(255), url varchar(255), account_type varchar(255)) WITH OIDS;" """ db = DB('iddata') db.execute_sql( "CREATE TABLE idol_group_name (idol_group_id integer PRIMARY KEY, idol_group_name varchar(255)) WITH OIDS;" ) db.execute_sql( "CREATE TABLE idol_group_wiki_url (idol_group_id integer PRIMARY KEY, url varchar(255)) WITH OIDS;" ) db.execute_sql( "CREATE TABLE not_idol_group_wiki_url (not_idol_group_name varchar(255), url varchar(255)) WITH OIDS;" ) db.execute_sql( "CREATE TABLE idol_group_twitter_url (idol_group_id integer, twitter_name varchar(255), url varchar(255), account_type varchar(255)) WITH OIDS;" ) db.close()
def get(self): db = DB() rows = db.query('select * from t_user') res = rows.fetchone() db.close() return { 'response': res }
def idol_group_otavector(self, start_idol_group_id=0): # 処理前の時刻 t1 = time.time() db = DB('iddata') screen_names = self._select_idol_group_screen_name(start_idol_group_id) # オタベクトルをdictで取得してDBに挿入 for screen_name in screen_names: print("idol_group_id : %d screen_name : %s のオタベクトルを取得します" % (screen_name[0], screen_name[1])) # 1つのアイドルグループのTwitterのフォロワーuser_idを300件取得 follower_ids = tweepy.Cursor(self.api.followers_ids, screen_name=screen_name[1]).items(300) # フォロワーがフォローしているuser_idのリスト follower_follow_userids = list() # フォローuser_idを取得したフォロワーのカウント count = 0 # 1つのアイドルグループのフォロワーがフォローしているuser_idを1000件まですべて取得 for follower_id in follower_ids: # 1人のフォロワーのuserオブジェクトを取得する follower = self.api.get_user(user_id=follower_id) # 鍵垢のフォロワーはスキップする if follower.protected is True: continue # 1人のフォロワーがフォローしているuser_idをリストに追加 try: follower_follow_userids.extend( tweepy.Cursor(self.api.friends_ids, id=follower_id).items(1000)) count += 1 # 例外処理 except tweepy.error.TweepError as e: print(e.reason) # 100人のフォロワーのフォローuser_idを取得したらループを抜ける if count >= 100: break # オタがフォローしているuser_id (key) と重複数 (value) の辞書を作成 ex. {901547836 : 100, 21548745 : 88, ...} otavector = collections.Counter(follower_follow_userids) print(otavector) # データベースにオタベクトルを挿入 for key, value in otavector.items(): db.insert( 'INSERT INTO idol_group_twitter_otavector (idol_group_id, screen_name, follow_userid, follow_num, recode_date) VALUES (%s,%s,%s,%s,%s)', [ screen_name[0], screen_name[1], key, value, self._date_now() ]) print( "idol_group_id : %d screen_name : %s のフォロワー %d 人のオタベクトルを取得しました" % (screen_name[0], screen_name[1], count)) # 処理後の時刻 t2 = time.time() # 経過時間を表示 elapsed_time = t2 - t1 print(f"経過時間:{elapsed_time}\n") db.close()
def main(): print('Main started') db = DB() cursor = db.getCursor() # driver = geckodriver.create_web_driver() driver = chrome_webdriver.create_web_driver() spreadsheet = CompanySpreadsheet() company_info.update_company_info() # company_positions = spreadsheet.get_companies_positions() # for i in range(len(company_positions)): # company_name = company_positions[i]['company_name'] # company_website = company_positions[i]['company_website'] # url = company_positions[i]['posting_url'] # status = company_positions[i]['status'] # update = company_positions[i]['update_entry'] # query = ("SELECT * FROM company WHERE name = %s LIMIT 1") # cursor.execute(query, (company_name,)) # result = cursor.fetchone() # # Insert new entry # if result == None: # query = ("INSERT INTO company (name, website_url, posting_url, posting_content, status) VALUES (%s, %s, %s, %s, %s)") # if url == '': # args = (company_name, company_website, '', '', status) # else: # hashed_content = hash_url_content(driver, url, i) # args = (company_name, company_website, url, hashed_content, status) # cursor.execute(query, args) # spreadsheet.set_row_updated_false(i + 2) # # Update entry # elif str_to_bool(update) == True: # query = ("UPDATE company SET website_url = %s, posting_url = %s, posting_content = %s, status = %s WHERE name = %s") # if url == '': # args = (company_website, '', '', status, company_name) # else: # hashed_content = hash_url_content(driver, url, i) # args = (company_website, url, hashed_content, status, company_name) # cursor.execute(query, args) # spreadsheet.set_row_updated_false(i + 2) # # Check entry # else: # if url == '': # if result[3] != '': # spreadsheet.set_row_status_bad(i + 2) # else: # spreadsheet.set_row_status_good(i + 2) # else: # hashed_content = hash_url_content(driver, url, i) # if hashed_content == result[4]: # spreadsheet.set_row_status_good(i + 2) # else: # spreadsheet.set_row_status_bad(i + 2) db.commit() db.close() driver.quit()
class Restore: """Recreate a master database from a backup file (<dbname>_<time>.zgb) """ def __init__(self, dbpath): # Open database file self.dbs = DB(dbpath) def getDbName(self): """Return the database name, as stored in the 'config' table. If something went wrong with opening the database, None will be returned. """ try: return self.dbs.getConfig(u"dbname") except: return None def close(self): if self.dbs.isOpen(): self.dbs.close() self.dbs = None def setMaster(self, dbm): """Used by client objects to select the master database. """ self.dbm = dbm def run(self, gui): """Given a fresh empty master database in self.dbm, fill it from the open backup database file (self.dbs). """ try: # Create tables t = u"config" gui.report(_("Creating table '%s'") % t) self.makeTable(t) self.restoreTable(t) gui.report(_("Creating table 'data'")) self.dbm.createDataTable() self.restoreDataTable() self.dbm.createInterfaceTable() gui.report(_("Created interface table")) # Copy reports gui.report(_("Creating teacher report tables ...")) t = u"reports" # Get list of teachers from configuration data for tch in self.dbs.listAllFiles(u"teachers/"): self.makeTable(teacher2user(tch[9:])) # Parse all class configuration data, to determine # ownership of reports reports = makeReportsDict(self.dbs) for id, value in self.dbs.read(u"SELECT * FROM %s" % t): table = teacher2user(reports[id]) sqlins = u"INSERT INTO %s VALUES(?, ?)" % table self.dbm.send(sqlins, (id, value)) #********* This was just an idea, but it may never be used. # # Copy comments # gui.report(_("Copying comments ...")) # t = "comments" # self.makeTable(t) # self.restoreTable(t) gui.report(_("DONE!")) except: print_exc() message(_("Couldn't restore database")) self.dbm = None self.close() def makeTable(self, name): """Create a new database table with the given name and standard text fields 'id' and 'value'. """ if not self.dbm.createIVTable(name): message(_("Couldn't create table '%1'"), (name,)) raise def restoreTable(self, name, name2=None): """Copy the contents of table name from the slave to table name2 in the master. If name2 is not given use name. Both tables are assumed to have the 'standard' fields 'id' and 'value'. """ if not name2: name2 = name sqlsel = u"SELECT * FROM %s" % name sqlins = u"INSERT INTO %s VALUES(?, ?)" % name2 for row in self.dbs.read(sqlsel): self.dbm.send(sqlins, row) def restoreDataTable(self): """Copy the files from the table 'data' from master to slave. """ for id in self.dbs.listIds(u"data"): self.dbm.putFile(id, self.dbs.getBFile(id))
class ExamineThread(QRunnable): def __init__(self, get_data, axial_images, coronal_images, sagittal_images, name, age, blood, note, time): super(ExamineThread, self).__init__() self.signals = ExamineSignals() self.get_data = get_data self.examine_result_abnormal = 0 self.examine_result_acl = 0 self.examine_result_men = 0 self.axial_images = axial_images self.coronal_images = coronal_images self.sagittal_images = sagittal_images self.name = name self.age = age self.blood = blood self.note = note self.time = time self.db = DB() def get_text_result(self, result): if round(float(result)) == 0: return 'negative' else: return 'positive' def save_to_database(self): try: id = self.db.get_last_id() except Exception: id = 0 finally: id += 1 axial_path = r'database\axial_{}.npy'.format(id) coronal_path = r'database\coronal_{}.npy'.format(id) sagittal_path = r'database\sagittal_{}.npy'.format(id) copyfile(r'temp\axial.npy', axial_path) copyfile(r'temp\coronal.npy', coronal_path) copyfile(r'temp\sagittal.npy', sagittal_path) self.db.insert(id = id, name = self.name, age = self.age, blood = self.blood, axial = axial_path, coronal=coronal_path, sagittal=sagittal_path, abnormal=self.examine_result_abnormal, acl=self.examine_result_acl, men = self.examine_result_men, note=str(self.note), time=self.time) self.db.close() def run(self): preprocessing(self.axial_images, self.coronal_images, self.sagittal_images) try: self.examine_result_abnormal = Model(key = 'abnormal').get_prediction() self.examine_result_acl = Model(key = 'acl').get_prediction() self.examine_result_men = Model(key = 'men').get_prediction() except Exception: print("can't find models") self.get_data(self.examine_result_abnormal, self.examine_result_acl, self.examine_result_men) self.save_to_database() self.db.close() self.signals.finished.emit() return
def fetch_choices(): """ Fetch data for CHOICES This script is run in the init phase of server. All DB data needed to fill and render templates properly are fetched here and displayed. """ # define choices source choices = dict() choices['ph_product_name'] = list() choices['columns'] = list() choices['compare'] = list() # populate 'ph_product_name' choices DB.connect() ph_pn_query = SkuEntry.select(SkuEntry.ph_product_name).distinct() DB.close() choices['ph_product_name'] = [(r.ph_product_name, r.ph_product_name) for r in ph_pn_query] choices['ph_product_name'] = sorted(choices['ph_product_name']) choices['ph_product_name'].insert(0, ("all", "ALL Products")) # populate 'columns' choices list excluded = () choices['columns'] = list() # take each column and if its not in excluded tuple, add it to list for c in SkuEntry._meta.sorted_field_names: if c not in excluded: item = { 'id': c, 'name': SkuEntry._meta.fields[c].verbose_name, 'type': SkuEntry._meta.fields[c].db_field } choices['columns'].append(item) # define all available operations over the data in database operators = { "equals": lambda a, b: a == b, "does not equal": lambda a, b: a != b, "contains": lambda a, b: a.contains(b), "does not contain": lambda a, b: ~(a.contains(b)), "greater than": lambda a, b: (a > b) & (a >= 0), "less then": lambda a, b: (a < b) & (a >= 0), "empty or not applicable": lambda a: (a == -1) | (a.is_null()), "applicable": lambda a: ~((a == -1) | (a.is_null())), "unlimited": lambda a: a == -2 } # specify valid (and desired and in order) operators for given datatype applicable = dict() applicable["string"] = ("equals", "contains", "does not contain", "empty or not applicable", "applicable") applicable["int"] = ("equals", "does not equal", "greater than", "less then", "empty or not applicable", "applicable", "unlimited") applicable["bool"] = ("equals", ) # populate choices for compare choices['compare'] = dict() for dtype, ops in applicable.items(): choices['compare'][dtype] = OrderedDict([(o, operators[o]) for o in ops]) # set a special tuple of compare operators where search value is not # applicable and where we don't want to show the value input field in the # template choices['compare_no_value'] = ("empty or not applicable", "applicable", "unlimited") # simple lookup table for data type mapping choices['field_type'] = {'int': int, 'string': unicode, 'bool': bool} return choices
def synchronize(dbm, filepath, gui): """Synchronize the given file with the given (open) master database. """ # Current master time mtime = dbm.getTime() # Move the user database file to backup location dir = os.path.dirname(filepath) bfile = re.sub(".zga$", "_%s.zga" % mtime, filepath) if (os.name == 'posix'): try: if Popen(["lsof", filepath], stdout=PIPE).communicate()[0]: warning(_("The database file (%1) is being used" " by another application"), (filepath,)) return except: warning(_("You should install 'lsof' so that usage" " of the file can be tested")) try: os.rename(filepath, bfile) except: # This trap only works on Windows. Linux will happily # allow you to delete a file while another program is # working on it! 'lsof filename' (see above) should be # a way to avoid that. warning(_("Couldn't rename the database file (%1).\n" "Is it being used by another application?"), (filepath,)) return gui.report(_("Database file renamed to %s") % bfile) dbs = DBs(bfile) if not dbs.isOpen(): os.rename(bfile, filepath) return # Teacher's report table teacher = dbs.getConfig(u"me") mtb = teacher2user(teacher) if mtb not in dbm.getTeacherTables(): warning(_("%1: Owning teacher (%2) not known to master database"), (filepath, teacher)) return gui.report(_("Copying reports from user database to master")) # Counter for transferred reports rcount = 0 # Creation time of slave db, i.e. last sync time ctime = dbs.getConfig(u"createtime") # Get all updated reports for id, data in dbs.read(u"SELECT * FROM reports"): # Split off the version data dver, drep = data.split(u"\n", 1) # Get the master version data try: mver = dbm.readValue(mtb, id).split(u"\n", 1)[0] except: gui.report(_("Invalid report, not updated : %s") % id) if (mver > ctime): if confirmationDialog(_("Report update problem"), _("Master version of report has been updated" " since this client was last synchronized.\n" " Replace that version of '%s'?") % id, False): gui.report(_("Revised master version of report '%s'" " will be overwritten") % id) else: gui.report(_("Revised master version of report '%s'" " not overwritten") % id) continue elif (dver <= mver): # Only do anything if the local version is newer than the # the master version continue if (dver > mtime): # The new version has a time stamp later than the # current time on the master, adjust it if dver.endswith(u"$"): dver = mtime + u"$" else: dver = mtime try: sqlupd = u"UPDATE %s SET value = ? WHERE id = ?" % mtb dbm.send(sqlupd, (dver + u"\n" + drep, id)) rcount += 1 except: gui.report(_("Couldn't update report '%s'") % id) gui.report(_("Transferred %d reports") % rcount) # Close the user database dbs.close() # Remember the latest sync time if rcount: dbm.send(u"""UPDATE interface SET value = ? WHERE id = 'lastsynctime'""", (mtime,)) # Recreate the user database gui.report(_("Recreating the user database")) recreate(dbm, filepath, teacher, gui)
class SyncPanel: """There may be only one instance of this class, because of the slot declarations. """ def __init__(self, settings): self.settings = settings # connect slots slot("sp_newpw", self.slot_newpw) slot("sp_sync", self.slot_sync) slot("sp_browse", self.slot_fileBrowser) def init(self, gui, filepath, forceDialog=False): self.gui = gui self.filepath = filepath # Get the path to the user database file self.dbs = None dbDir = None dbPath = self.settings.getSetting("dbFile") while True: if not self.filepath: if dbPath: dbd = os.path.dirname(dbPath) if os.path.isdir(dbd): dbDir = dbd if os.path.isfile(dbPath): self.filepath = dbPath dbPath = None if forceDialog or not self.filepath: self.filepath = getFile(_("User Database File"), startDir=dbDir, defaultSuffix=u".zga", filter=(_("Database Files"), (u"*.zga",))) if not self.filepath: return self.dbs = DBs(self.filepath) if self.dbs.isOpen(): break dbDir = os.path.dirname(self.filepath) self.filepath = None self.settings.setSetting("dbFile", self.filepath) # Set window title self.gui.setTitle(_("Synchronize %s") % self.filepath) self.dbm = None # Get the default host name from the 'base' data self.dbhost = self.dbs.baseDict[u"masterHost"] # Get information from the 'config' table self.dbname = self.dbs.getConfig(u"dbname") teacher = self.dbs.getConfig(u"me") if not teacher: error(_("'%s' is not a teacher's database file") % self.filepath) self.dbuser = teacher2user(teacher) # Close user database file self.closeFile() # set gui lineEdits self.gui.setDBinfo(self.dbhost, self.dbname, self.dbuser, self.filepath) def slot_fileBrowser(self, arg): self.init(self.gui, None, True) def slot_newpw(self, arg): if not self.connect(): return pw = getPassword() if pw: try: self.dbm.setPassword(self.dbuser, pw) message(_("Password changed")) except: message(_("Couldn't change password")) self.disconnect() def slot_sync(self, arg): if self.connect(): if (self.dbm.readValue(u"config", u"finalized") == u""): self.dlg = Output() synchronize(self.dbm, self.filepath, self.dlg) self.dlg.done() else: warning(_("This database is finalized, you can't access it")) # Disconnect from master database self.disconnect() def closeFile(self): """Close the user database in self.dbs """ if self.dbs: self.dbs.close() self.dbs = None def connect(self): """Connect to master db. """ host = self.gui.getDBhost() pw = getPw(host, self.dbname, self.dbuser) if (pw == None): return False cData = { u"host" : host, u"db" : self.dbname, u"user" : self.dbuser, u"pw" : pw } db = DBm(cData) if not db.isOpen(): warning(_("Couldn't open master database")) return False self.dbm = db return True def disconnect(self): """Disconnect from master db. """ if self.dbm: self.dbm.close() self.dbm = None
def handle_text(message): mess=message.text if message.text==("Да") and (config._REQUEST_TYPE==1 or config._REQUEST_TYPE==2): add_new_lp(message) config._REQUEST_STEP = 1 config._REQUEST_TYPE = 2 #return if message.text==("Нет") and (config._REQUEST_TYPE==1 or config._REQUEST_TYPE==2): request_lp(message) return if message.text==("Отмена") and (config._REQUEST_TYPE==2): config._REQUEST_TYPE=0 request_lp(message) return if mess==("Пропустить") and (config._REQUEST_TYPE==2): #config._REQUEST_STEP=config._REQUEST_STEP+1 mess="-" #return if (config._REQUEST_TYPE==0): m=convert_licenseplate(message.text).upper() if not check_licenseplate_len(m): bot.send_message(message.chat.id, "Недопустимая длина номера автомобиля. Пожалуйста укажите правильный номер.") return if not check_licenseplate_chars(m): bot.send_message(message.chat.id, "В номере указаны неверные символы. Пожалуйста укажите правильный номер.") return config.LICENSEPLATE=m result_list=get_info_lp(m) if len(result_list)<1: config._REQUEST_TYPE=2 bot.send_message(message.chat.id, "Информации по данному номеру нет. Хотите добавить?",reply_markup=create_keyboard()) else: bot.send_message(message.chat.id, "Отзывы по номеру {0}".format(m)) for item in result_list: text = "Дата: {0}.{1}.{2}".format(item[0].day, item[0].month, item[0].year) if item[1]!=None: text+="\nАвтомобиль: {0}".format(item[1]) if item[2] != None: text+="\nКомментарий: {0}".format(item[2]) if item[3] != None: text+="\nВодитель: {0}".format(item[3]) if item[4] != None: text+="\nОценка: {0}".format(item[4]) bot.send_message(message.chat.id, text) config._REQUEST_TYPE=2 bot.send_message(message.chat.id, "Хотите добавить свой отзыв?",reply_markup=create_keyboard()) if (config._REQUEST_TYPE==2): if config._REQUEST_STEP==1: config._REQUEST_STEP=2 bot.send_message(message.chat.id, "Марка и модель автомобиля", reply_markup=create_keyboard("skip")) return if config._REQUEST_STEP==2: FDC['carmodel']=mess config._REQUEST_STEP=3 bot.send_message(message.chat.id, "Ваш комментарий о работе", reply_markup=create_keyboard('null')) return if config._REQUEST_STEP==3: FDC['comment']=mess config._REQUEST_STEP=4 bot.send_message(message.chat.id, "Как зовут водителя", reply_markup=create_keyboard("skip")) return if config._REQUEST_STEP==4: FDC['driver']=mess config._REQUEST_STEP=5 bot.send_message(message.chat.id, "Ваша оценка (1-5)", reply_markup=create_keyboard("skip")) return if config._REQUEST_STEP==5: FDC['grade']=num(mess) db_worker = DB() db_worker.save_comment(message.chat.id,config.LICENSEPLATE,FDC['carmodel'],FDC['comment'],FDC['driver'],FDC['grade']) db_worker.close() config._REQUEST_STEP = 0 config._REQUEST_TYPE =0 bot.send_message(message.chat.id, "Спасибо за Ваш отзыв", reply_markup=create_keyboard("null")) request_lp(message) return #TODO: проверка сообщения на длину #TODO: проверка сообщения на корректность return pass
if __name__ == '__main__': csFile = 'cs.csv' langFile = 'lang.csv' rootsFile = 'roots.csv' testdb = DB() flag = testdb.createDatabase( ) # return 1 if database is newly created, 0 if database already created testdb.importData(csFile, flag) testdb.importData(langFile, flag) testdb.importData(rootsFile, flag) l1 = testdb.getSubject() l2 = testdb.getLevel("CMPT") l3 = testdb.getCourse("CMPT", "100") l4 = testdb.getTitle("CMPT", "101") l5 = testdb.getInst("CMPT", "101") print(l1) # print(l1[1][0]) # print(l1[1]) print(l2) # print(l2[1][0]) # print(l2[1]) print(l3) # print(l3[1][0]) # print(l3[1]) print(l4) print(l5) testdb.close()
class Restore: """Recreate a master database from a backup file (<dbname>_<time>.zgb) """ def __init__(self, dbpath): # Open database file self.dbs = DB(dbpath) def getDbName(self): """Return the database name, as stored in the 'config' table. If something went wrong with opening the database, None will be returned. """ try: return self.dbs.getConfig(u"dbname") except: return None def close(self): if self.dbs.isOpen(): self.dbs.close() self.dbs = None def setMaster(self, dbm): """Used by client objects to select the master database. """ self.dbm = dbm def run(self, gui): """Given a fresh empty master database in self.dbm, fill it from the open backup database file (self.dbs). """ try: # Create tables t = u"config" gui.report(_("Creating table '%s'") % t) self.makeTable(t) self.restoreTable(t) gui.report(_("Creating table 'data'")) self.dbm.createDataTable() self.restoreDataTable() self.dbm.createInterfaceTable() gui.report(_("Created interface table")) # Copy reports gui.report(_("Creating teacher report tables ...")) t = u"reports" # Get list of teachers from configuration data for tch in self.dbs.listAllFiles(u"teachers/"): self.makeTable(teacher2user(tch[9:])) # Parse all class configuration data, to determine # ownership of reports reports = makeReportsDict(self.dbs) for id, value in self.dbs.read(u"SELECT * FROM %s" % t): table = teacher2user(reports[id]) sqlins = u"INSERT INTO %s VALUES(?, ?)" % table self.dbm.send(sqlins, (id, value)) #********* This was just an idea, but it may never be used. # # Copy comments # gui.report(_("Copying comments ...")) # t = "comments" # self.makeTable(t) # self.restoreTable(t) gui.report(_("DONE!")) except: print_exc() message(_("Couldn't restore database")) self.dbm = None self.close() def makeTable(self, name): """Create a new database table with the given name and standard text fields 'id' and 'value'. """ if not self.dbm.createIVTable(name): message(_("Couldn't create table '%1'"), (name, )) raise def restoreTable(self, name, name2=None): """Copy the contents of table name from the slave to table name2 in the master. If name2 is not given use name. Both tables are assumed to have the 'standard' fields 'id' and 'value'. """ if not name2: name2 = name sqlsel = u"SELECT * FROM %s" % name sqlins = u"INSERT INTO %s VALUES(?, ?)" % name2 for row in self.dbs.read(sqlsel): self.dbm.send(sqlins, row) def restoreDataTable(self): """Copy the files from the table 'data' from master to slave. """ for id in self.dbs.listIds(u"data"): self.dbm.putFile(id, self.dbs.getBFile(id))
def hash_pwd(self, username, pwd): hashed_pwd = hashpw(pwd, gensalt()) pg = DB() pg.connect() pg.add_user(username, hashed_pwd) pg.close()