def end(username, answers_list, game_dict, game_type): """ takes care of the game end logic: adds the game to the DB :param username: :param answers_list: :param game_dict: :param game_type: :return: """ if DebuggingConventions.DEBUGGING_GAME_END: print( "Game ended with the following stats:\n\tusername: {}\n\tanswers_list{}\n\tgame_type: {}" .format(username, answers_list, game_type)) final_score = calculate_final_score( answers_list, game_dict, Conventions.GAME_TYPES_CODE_FROM_VIEW_TO_STRING[game_type]) if not DebuggingConventions.TESTING_VIEW: # get user ID user_id = load_user_id_only_by_name(username) # save the game Queries.add_game( Conventions.GAME_TYPES_CODE_FROM_VIEW_TO_STRING[game_type], final_score, user_id) # return final score return final_score else: return Conventions.MOCK_GAME_SCORE
def get_places(a): """ endpoint used as an interface for retrieving datastore Place objects """ if a == 'nearby': import Queries return Queries.get_nearby_places(request.query) if a == 'user': import Queries return Queries.get_user_places(request.json['user_id']) if a == "city": import Queries # return Queries.get_places_by_city(request.query) return {'status_code': 0, 'Invalid argument: ': str(a)}
def add_preferences_to_user(username, properties_dict): """ adds a preference to the user in the DB :param username: :param properties_dict: :return: """ user_id = load_user_id_only_by_name(username) if DebuggingConventions.GENERALLY_DEBUGGING_GAME_LOGIC: print(properties_dict) Queries.add_preferences(user_id, properties_dict)
def boot(): print("booting") rows = Queries.get_all_links(Crawler.cursor) for row in rows: if (row[3] == 1): # visited crawled.add(row[1]) else: queue.add(row[1]) Queries.delete_all_not_visited(Crawler.cursor) Crawler.db.commit() print(len(crawled)) print(len(queue)) print("finish booting")
def main(host, user, passwd, name_db, read_file_path1,read_file_path2, save_path_file): #Queries.create_db(host, user, passwd, name_db) with Connector(host, user, passwd, name_db) as db: db.lst_str(Queries.drop_tables()) db.lst_str(Queries.create_tables()) insert_in_db(db, read_file_path1, read_file_path2) db.commit() list_of_queries = Queries.list_of_queries() print(list_of_queries) for i in list_of_queries: print(db.query(i)) save_file(db.query(i), save_path_file)
def graphs(): """ This function is called due to request /getgraphs It queries the dataset for both linksets and lenses The results, two lists of uris and labels, are passed as parameters to the template graphs_list.html """ # GET QUERY ABOUT LINKSETS AND LENSES linkset_query = Qry.get_graph_linkset() lens_query = Qry.get_graph_lens() # RUN QUERIES AGAINST ENDPOINT linksets = sparql(linkset_query, strip=True) lenses = sparql(lens_query, strip=True) # SEND BAK RESULTS return render_template('graphs_list.html',linksets = linksets, lenses = lenses)
def movies(): sort = Queries.get_sort(request.args.get("sort")) year = request.args.getlist("year") tag = request.args.getlist("tag") if tag: if len(tag) == 1: data = pd.read_sql_query(Queries.ONE_TAG, engine, params=tag) if sort: data = pd.read_sql_query(Queries.ONE_TAG + sort, engine, params=tag) else: if sort: data = pd.read_sql_query(Queries.TWO_TAGS + sort, engine, params=tag) else: data = pd.read_sql_query(Queries.TWO_TAGS, engine, params=tag) elif year: data = pd.read_sql_query(Queries.GET_PRODUCTION_YEAR, engine, params=year) elif sort: data = pd.read_sql_query(Queries.ALL_MOVIES + sort, engine) else: data = pd.read_sql_query(Queries.ALL_MOVIES, engine) return render_template("body.html", posts=data)
def generate_genre_question(raw_artists_dict): list_of_lists_of_genres = list() for artist in raw_artists_dict['Artist']: # appends a LIST of genres per artist name list_of_lists_of_genres.append( Queries.get_genre_by_artist(artist[NAME_OFF_SET])) genres_for_question = list() # randomly pick a genre from the list of genres per artist for artist_genres_list in list_of_lists_of_genres: genres_for_question.append(artist_genres_list[random.randint( 0, len(artist_genres_list) - 1)]) # check for None values if none_values_exist_in_answer_list(genres_for_question): return None if answers_list_empty_or_less_than_three_songs(genres_for_question): return None question_text = "What is the artist's genre?" right_answer = genres_for_question[0] return build_question_dict(question_text, genres_for_question, right_answer)
def correspondences(): """ This function is called due to request /getcorrespondences It queries the dataset for both all the correspondences in a certain graph URI Expected Input: uri, label (for the graph) The results, ..., are passed as parameters to the template correspondences_list.html """ graph_menu = request.args.get('graph_menu', '') graph_uri = request.args.get('uri', '') graph_label = request.args.get('label', '') graph_triples = request.args.get('graph_triples', '') alignsMechanism = request.args.get('alignsMechanism', '') operator = request.args.get('operator', '') corresp_query = Qry.get_correspondences(graph_uri) correspondences = sparql(corresp_query, strip=True) return render_template('correspondences_list.html', operator=operator, graph_menu=graph_menu, correspondences=correspondences, graph_label=graph_label, graph_triples=graph_triples, alignsMechanism=alignsMechanism)
def dataDetails(): """ This function is called due to request /getdatadetails It queries the dataset for both all the correspondences in a certain graph URI Expected Input: uri, label (for the graph) The results, ..., are passed as parameters to the template correspondences_list.html """ resource_uri = request.args.get('resource_uri', '') dataset_uri = request.args.get('dataset_uri', '') # query = PREFIXES + """ # select distinct * # { # graph <""" + dataset_uri + """> # { <""" + resource_uri + """> ?pred ?obj } # } # """ query = Qry.get_resource_description(dataset_uri, resource_uri, predicate=None) print "\n\nQEURY:", query dataDetails = sparql(query, strip=True) return render_template('datadetails_list.html', dataDetails=dataDetails)
def nmf(profile_similarity): #find the model with the lowest reconstruction error err = 99999999 lowest_i = 0 for i in range(1, 24): model = NMF(n_components=i, init='random', alpha=0.01, random_state=0) W = model.fit_transform(profile_similarity) if model.reconstruction_err_ < err: err = model.reconstruction_err_ lowest_i = i #reconstruct matrix with lowest error to get recommendations for active users model = NMF(n_components=lowest_i, init='random', alpha=0.01, random_state=0) W = model.fit_transform(profile_similarity) host_id = qu.get_active_users() df_W = pd.DataFrame(W) #write W matrix to file df_W.index = host_id['user_id'] df_W.to_csv('nmf_W.csv') return df_W
def searchBtnPressed(self): #print ("searchBtnPressed") #self.searchBtn.setText('- - - - S e a r c h i n g - - - -') # unfortunately these never show with synchronous ui currentSystem = self.currentSystemCombo.currentText() maxDistance = float(self.maxDistanceSpinBox.value()) jumprange = float(self.mainwindow.jumpRangeSpinBox.value()) minPadSize = int(self.mainwindow.minPadSizeCombo.currentIndex()) importexport=int(self.importComboBox.currentIndex()) commodityidx=int(self.commodityCombobox.currentIndex()) if commodityidx==-1: return commodityid=self.commoditylist[commodityidx].getId() systems = self.db.getSystemByName(currentSystem) if len(systems) == 0: print("system not found!") return self.currentSystem = systems[0] pos = self.currentSystem.getPosition() print("Querying database...") self.result = Queries.queryCommodities(self.db, pos[0], pos[1], pos[2], maxDistance, minPadSize,jumprange ,importexport,commodityid) self.model.refeshData() print("Done!")
def find_similar_hosts(cold_user_id, profile_data, k ): active_user_data = profile_data[profile_data['user_id'].isin(list(qu.get_active_users()['user_id']))] user_data = profile_data[profile_data['user_id'] == cold_user_id] host_data = profile_data[profile_data['user_id'] != cold_user_id] list_close_hosts = list(closest_hosts(active_user_data, host_data, 10)) store = dict() #use profile similarity to find most similar among users in close proximity for j in list_close_hosts: h = np.array(host_data.iloc[j].values.tolist()).reshape(1,-1) user = np.array(user_data.values.tolist()).reshape(1,-1) similarity = cosine_similarity(user, h) #convert index to appropriate user ids key = profile_data['user_id'][j] store[key] = similarity[0][0] most_similar_list = [] #reverse key/value mapping inv = dict([[v,k] for k,v in store.items()]) sort = sorted(inv, reverse=True) for i in sort: most_similar_list.append(inv[i]) return most_similar_list
def get_user_actions(): seen = qu.hosts_seen() liked = qu.hosts_liked() messaged = qu.hosts_messaged() #merge user actions into one dataframe df_temp = pd.merge(seen, liked, how='outer', on=['user_id', 'host_id']) df_temp.rename(columns={ 'flag_x': 'flag_s', 'flag_y': 'flag_l' }, inplace=True) df = pd.merge(df_temp, messaged, how='outer', on=['user_id', 'host_id']) df.rename(columns={'flag': 'flag_m'}, inplace=True) return df
def findShortestRoute(airportList, translator): query = Queries.Queries(airportList, translator) start = "" finish = "" print("City List:") printCityList(query.getCityList()) isValidCity = False while (not isValidCity): print("Select a city for the origin or q to go back:") start = raw_input() isValidCity = start in translator if (input == "q"): return isValidCity = False while (not isValidCity): print("Select a city for the finish or q to go back:") finish = raw_input() isValidCity = finish in translator if (input == "q"): return unvisited = [] for airport in airportList.itervalues(): unvisited.append(airport.code)
def generate_similar_artists_question(raw_artists_dict): # artist name artist_name = raw_artists_dict['Artist'][PLAYING_ARTIST_OFF_SET][ NAME_OFF_SET] question_text = "Who is the most similar artist to this artist?" # generate similar artist by randomly picking a genre of similarity similar_artist_list = Queries.get_similar_artist(artist_name) # randomly pick a similar artist from the similar_artists_list similar_artist = similar_artist_list[random.randint( 0, len(similar_artist_list) - 1)] if DEBUGGING_QUESTIONS_GENERATING: print("Similar artist picked is: {}".format(similar_artist)) answers = list() # insert all of the artists names for artist in raw_artists_dict['Artist']: answers.append(artist[NAME_OFF_SET]) answers = answers[1:] # remove the name of the artist we are playing on answers.append(similar_artist) # append the name of the similar artist # check None values if none_values_exist_in_answer_list(answers): return None if answers_list_empty_or_less_than_three_songs(answers): return None return build_question_dict(question_text, answers, similar_artist)
def recommend(df_ratings, user_id, num_recommendations): #get ratings matrix mat_ratings = atr.rating_matrix(df_ratings) #SVD R = mat_ratings.to_numpy() mean_ratings = np.mean(R, axis = 1) R_nomean = R - mean_ratings.reshape(-1,1) U, sigma, Vt = svds(R_nomean, k=1) #get diagonal matrix of values returned sigma = np.diag(sigma) pred = np.dot(np.dot(U,sigma), Vt) + mean_ratings.reshape(-1,1) df_predictions = pd.DataFrame(pred, columns = mat_ratings.columns) #sort predictions. Account for user_id, which starts at 1 row_num = user_id - 1 df_sorted_pred = df_predictions.iloc[row_num].sort_values(ascending=False).to_frame() df_hosts = qu.get_host_ids() user_data = df_ratings[df_ratings.user_id ==(user_id)] #dataframe consisting of all of specified user's ratings, host id, and host name user_full = user_data.merge(df_hosts, how='left', left_on='host_id', right_on='host_id').sort_values(['rating'], ascending=False) #return list of sorted recommended user ids that user has not seen rec = (df_hosts[~df_hosts['host_id'].isin(user_full['host_id'])]) rec = rec.merge(pd.DataFrame(df_sorted_pred).reset_index(), how = 'left', left_on='host_id', right_on='host_id').rename(columns = {row_num : 'results'}) rec = rec.sort_values('results', ascending=False).iloc[:num_recommendations,:] print(rec) return df_sorted_pred
def query(airports, translator): query = Queries.Queries(airports, translator) # Loop through the option until the user types 'q' while (True): # Print out a city list for the users to select from print("City List:") print(query.getCityList()) print("Select a city for querying on or q to go back:") city = raw_input() if (city == "q"): return printQueryMenu() userInput = raw_input() if (userInput == "1"): print(query.getCode(city)) elif (userInput == "2"): print(query.getCountry(city)) elif (userInput == "3"): print(query.getContinent(city)) elif (userInput == "4"): print(query.getTimeZone(city)) elif (userInput == "5"): print(query.getCoordinates(city)) elif (userInput == "6"): print(query.getPopulation(city)) elif (userInput == "7"): print(query.getRegion(city)) elif (userInput == "8"): print(query.getCityFlights(city)) elif (userInput == "q"): return
def details(): """ This function is called due to request /getdetails It queries the dataset for both all the correspondences in a certain graph URI Expected Input: uri, label (for the graph) The results, ..., are passed as parameters to the template correspondences_list.html """ # singleton_uri = request.args.get('uri', '') # RETRIEVE VARIABLES sub_uri = request.args.get('sub_uri', '') obj_uri = request.args.get('obj_uri', '') subjectTarget = request.args.get('subjectTarget', '') objectTarget = request.args.get('objectTarget', '') alignsSubjects = request.args.get('alignsSubjects', '') alignsObjects = request.args.get('alignsObjects', '') # FOR EACH DATASET GET VALUES FOR THE ALIGNED PROPERTIES query = Qry.get_aligned_predicate_value(sub_uri, obj_uri, alignsSubjects, alignsObjects) details = sparql(query, strip=True) # RETURN THE RESULT return render_template( 'details_list.html', details=details, # pred_uri = singleton_uri, sub_uri=sub_uri, obj_uri=obj_uri, subjectTarget=subjectTarget, objectTarget=objectTarget, alignsSubjects=get_URI_local_name(alignsSubjects), alignsObjects=get_URI_local_name(alignsObjects))
def searchBtnPressed(self): #print ("searchBtnPressed") #self.searchBtn.setText('- - - - S e a r c h i n g - - - -') # unfortunately these never show with synchronous ui currentSystem = self.currentSystemCombo.currentText() maxDistance = float(self.maxDistanceSpinBox.value()) jumprange = float(self.mainwindow.jumpRangeSpinBox.value()) minPadSize = int(self.mainwindow.minPadSizeCombo.currentIndex()) importexport = int(self.importComboBox.currentIndex()) commodityidx = int(self.commodityCombobox.currentIndex()) if commodityidx == -1: return commodityid = self.commoditylist[commodityidx].getId() systems = self.db.getSystemByName(currentSystem) if len(systems) == 0: print("system not found!") return self.currentSystem = systems[0] pos = self.currentSystem.getPosition() print("Querying database...") self.result = Queries.queryCommodities(self.db, pos[0], pos[1], pos[2], maxDistance, minPadSize, jumprange, importexport, commodityid) self.model.refeshData() print("Done!")
def load_user_id_only_by_name(username): """ returns the user id from the db by his name :param username: :return: """ return Queries.get_user_id_by_name(str(username))
def get_all_preferences(): """ load all preferences for user :return: """ return Queries.get_all_genres()
def load_user_from_data_base(username, password): """ loads the user id from database by name and password :param username: :param password: :return: """ return Queries.get_user_id(str(username), str(password))
def run(self): try: threadLock.acquire() while (CrawlerThread.k < len(listWebsites)): self.crawler.startingUrl = listWebsites[CrawlerThread.k] CrawlerThread.k = CrawlerThread.k + 1 threadLock.release() self.crawler.start_crawling() threadLock.acquire() threadLock.release() except KeyboardInterrupt: print("saving") for url in queue: Queries.insert_queue(Crawler.cursor, url, 0, 0) Crawler.db.commit() print("done saving")
def deanonymize_vertexref(g, pert_g, i): vertexref = queries.vertex_refinement(g, i) vertexref_pert = queries.vertex_refinement(pert_g, i) eq = eq_class(vertexref) eq_pert = eq_class(vertexref_pert) result_eq = {} for index in range(0, max(len(eq), len(eq_pert))): result = [] if index < len(eq_pert): for value in eq_pert[index]: if index < len(eq): if value in eq[index]: result.append(value) result_eq[index] = result return deanonymize(result_eq.values(), len(pert_g.nodes))
def process(reports_array, date_array, filename): # creating workbook wb = Workbook() set_styles(wb) sheet = wb.active for i in range(len(date_array)): report_dict = reports_array[i] # as date_array looks like [<date_0>[<start>(year, month, day), <end>(year, month, day)], <date_1>[...]] start = '.'.join( str(j).rjust(2, '0') for j in (date_array[i][0][2], date_array[i][0][1])) end = '.'.join( str(j).rjust(2, '0') for j in (date_array[i][1][2], date_array[i][1][1])) sheet.title = '{}-{}'.format(start, end) # styling cells for s_row in range(1, 51): for s_col in range(1, 13): ind = get_column_letter(s_col) + str(s_row) sheet[ind].font = Font(name='Arial', size=11) sheet.column_dimensions[get_column_letter(s_col)].width = 15 sheet.column_dimensions['A'].width = 25 sheet[ind].number_format = '#,##0' # write metrics CommonMetrics.write_common(sheet, report_dict) CompleteCorrect.write_acomplete(sheet, report_dict) CompleteCorrect.write_clicks(sheet, report_dict) CompleteCorrect.write_corrections(sheet, report_dict) Queries.write_queries(sheet, report_dict) Queries.write_popular(sheet, report_dict['top_search_queries']) print('Data loaded into virtual table for {} - {}'.format(start, end)) if i + 1 < len(date_array): sheet = wb.create_sheet() path = set_path() + filename while True: try: wb.save(path) break except PermissionError: input( 'I can\'t write to an opened file. Close it, please, and hit ENTER.' )
def button_select_checkboxes_in_query(self): if self.sql is None: raise Exception('SQL has not been intiazied in the GraphUI Class') df = self.sql.query_and_export( qr.get_query_text_sites_in_data_more_than_x(15)) self.sites_in_query = df.iloc[:, 0].to_numpy() self.button_deselect_all() for i in range(np.size(self.checkboxes_mat, 0)): if self.checkboxes_mat[i, 1] in self.sites_in_query: self.checkboxes_mat[i, 2].select()
def login(username, password): user_id = load_user_from_data_base(username, password) if user_id == USER_DOESNT_EXIST: return USER_DOESNT_EXIST user_preferences = Queries.get_preferred_genres(user_id) if user_preferences: return PREFERENCES_EXIST_STATUS else: return PREFERENCES_NON_EXISTING
def generate_random_list_of_origins(): """ generates the random list of origins :return: """ Queries.get_countries() list_of_origins = Queries.get_countries().copy() origins_list_to_return = list() for number_of_origins_to_add in range(Conventions.NUMBER_OF_ORIGINS): # get a random origin rand_origin = random.choice(list_of_origins) # add it to the list origins_list_to_return.append(rand_origin) # remove it from the list_of_origins list_of_origins.remove(rand_origin) return origins_list_to_return
def get_Information(): result = None user = username.get() passw = password.get() if user == "" or passw == "": return result else: result = Queries.login(user, passw) if result != None: return result elif result == None: return result
def generate_tables(self): self.check_for_all_variable() self.reset_progressbar() sites_with_num_outputs_less_than = [] for i in range(len(self.sites)): log.message('start: creation of rit with site location {}'.format( self.sites[i])) df = self.sql.query_and_export( qr.get_query_text_all_data_on_site(self.sites[i])) random_table_generator = rit.RandomInventory( df=df, output_file_location=self.create_file_name(self.sites[i]), col_name_material_no='material_number', col_name_material_desc='material_desc', col_name_num_parts='total', col_name_total_value='value', col_name_storage_location='storage_location', col_name_currency='currency', col_name_batch='batch', col_name_plant='plant', num_of_outputs=self.num_outputs) if random_table_generator.df_large_enough( ) and random_table_generator.zero_values_on_materials(): if random_table_generator.not_enough_table_entries(): sites_with_num_outputs_less_than.append( self.sites[i] + ' - ' + cg.site_location[self.sites[i]]) random_table_generator.produce_random_table() elif not random_table_generator.df_large_enough(): log.warning_message( 'the site {} returns null query. Therefore it has been skipped' .format(self.sites[i])) else: log.message( 'error: the sites {} has no monentary value on it\'s materials. Therefore, a weight randomization cannot be done' .format(self.sites[i])) log.message('end: creation of rit with site location {}'.format( self.sites[i])) self.update_progressbar(i) if len(sites_with_num_outputs_less_than) != 0: message = 'The following sites have less than {} materials, so all of there materials numbers were included.\n\n'.format( self.num_outputs) for x in sites_with_num_outputs_less_than: message += x + '\n' self.gui.message(title='All Materials Included', message=message) else: self.gui.message(title='Complete', message='All tables have been generated!')
def graphs2(): """ This function is called due to request /getgraphs It queries the dataset for both linksets and lenses The results, two lists of uris and labels, are passed as parameters to the template graphs_list.html """ # GET QUERY graphs_query = Qry.get_graph_type() # RUN QUERY AGAINST ENDPOINT graphs = sparql(graphs_query, strip=True) # SEND BAK RESULTS return render_template('linksetsCreation.html', graphs=graphs)
def find_class_time_place(self, st_id, courseNum): #print "inside find_class_time_place() with courseNum : " + str(courseNum) # delete #print "the str to parse is: " + str_to_parse #-----for now--------# #q_answer = [["tirgul", "Sunday", "10:00"], ["lecture", "Monday", "12:00"]] q_answer = Queries.get_course_time_date(st_id, courseNum) str_answer = "" len = q_answer.__len__() i = 0 for row in q_answer: i += 1 str_answer = str_answer + " ".join(map(str,row)) if i < len: str_answer = str_answer + ", " return str_answer
def do_mark(): for row in self.transactions_tbl.selected_rows: t = self.transactions_tbl.rows[row] Q.markTransactionAsInvoiceable(t, True)
def month_changed(self): y, m = self.months_tbl.rows[self.months_tbl.selected_row] self.transactions_tbl.rows = D.withSession(lambda: Q.transactionsForMonth(y, m)) self.transactions_tbl.selected_rows = []
def OnGetPt(self, event): t1 = time.clock() import Meds, Problems, PMH, Vitals, Notes, demographics, ToDo, Queries, Prevention, Education t2 = time.clock() lt = "%s/EMR_outputs" % settings.LINUXPATH at = "%s/EMR_outputs" % settings.APPLEPATH wt = "%s\EMR_outputs" % settings.WINPATH f = wx.GetTopLevelParent(self) num = self.list.GetItemCount() if num == 0: dlg = wx.MessageDialog( None, "There are no patients to select. Close window?", "Problem", style=wx.YES_NO | wx.YES_DEFAULT ) answer = dlg.ShowModal() if answer == wx.ID_YES: f.nb.DeletePage(self) else: for i in range(num): if self.list.IsChecked(i): medspage = Meds.Meds(f.nb, -1, self.list.GetItemText(i)) medspage.ptID = self.list.GetItemText(i) probpage = Problems.Problems(f.nb, -1, self.list.GetItemText(i)) probpage.ptID = self.list.GetItemText(i) pmhpage = PMH.PMH(f.nb, -1, self.list.GetItemText(i)) pmhpage.ptID = self.list.GetItemText(i) vitalspage = Vitals.Vitals(f.nb, -1, self.list.GetItemText(i)) vitalspage.ptID = self.list.GetItemText(i) notespage = Notes.Notes(f.nb, -1, self.list.GetItemText(i)) f.nb.DeletePage(0) demogr_page = demographics.demographics(f.nb, ptID=self.list.GetItemText(i)) todo_page = ToDo.todo(f.nb, -1, PtID=self.list.GetItemText(i)) queries_page = Queries.queries(f.nb, ptID=self.list.GetItemText(i)) preventspage = Prevention.Prevention(f.nb, -1, ptID=self.list.GetItemText(i)) preventspage.ptID = self.list.GetItemText(i) educpage = Education.Notes(f.nb, -1, self.list.GetItemText(i)) f.nb.AddPage(demogr_page, "Demographics") f.nb.AddPage(medspage, "Medications") f.nb.AddPage(probpage, "Problems") f.nb.AddPage(pmhpage, "Past Medical History") f.nb.AddPage(vitalspage, "Vitals") f.nb.AddPage(notespage, "Notes") f.nb.AddPage(educpage, "Education") f.nb.AddPage(todo_page, "To Do") f.nb.AddPage(preventspage, "Health Maintenance") f.nb.AddPage(queries_page, "Queries") base_path = EMR_utilities.platformText(lt, at, wt) folders = [ "SOAP_notes", "Labs", "Radiology", "Consults", "Old_Records", "Insurance", "Other", "Orders", ] if sys.platform == "win32": if os.path.exists("%s\%s" % (base_path, self.list.GetItemText(i))): pass else: for item in folders: os.makedirs("%s\%s\%s" % (base_path, self.list.GetItemText(i), item)) else: if os.path.exists("%s/%s" % (base_path, self.list.GetItemText(i))): pass else: for item in folders: os.makedirs("%s/%s/%s" % (base_path, self.list.GetItemText(i), item)) qry = ( 'SELECT firstname, lastname, SUM(balance) FROM demographics INNER JOIN billing \ USING (patient_ID) WHERE patient_ID = "%s";' % self.list.GetItemText(i) ) results = EMR_utilities.getDictData(qry) try: f.ptText.SetLabel( " %s %s %s $%d" % ( results["firstname"], results["lastname"], EMR_utilities.getAge(self.list.GetItemText(i)), results["SUM(balance)"], ) ) except: f.ptText.SetLabel( " %s %s %s no balance" % ( results["firstname"], results["lastname"], EMR_utilities.getAge(self.list.GetItemText(i)), ) ) f.ptID = self.list.GetItemText(i) t3 = time.clock() if f.ptMsgs: f.ptMsgs.messages.SetLabel(EMR_utilities.MESSAGES) f.ptMsgs.panel.Layout() else: wx.MessageBox( "You have turned off messages. Please restart program to see patient messages.", "Messages OFF" ) pass
def do_mark(): t = self.transactions_tbl.rows[self.transactions_tbl.selected_row] person = PersonPicker().pick() if person: Q.markTransactionAsPayment(t, person)
def startSearch(self): if self.currentWorker is not None: # handled elsewhere, or ignored return searchTypeIdx = int(self.searchTypeCombo.currentIndex()) self.searchType=self.searchTypeCombo.itemData(searchTypeIdx) currentSystem = self.currentSystemCombo.currentText() currentBase = self.currentStationCombo.currentText() targetSystem = self.targetSystemCombo.currentText() targetBase = self.targetStationCombo.currentText() maxDistance = float(self.maxDistanceSpinBox.value()) jumprange = float(self.mainwindow.jumpRangeSpinBox.value()) minProfit = int(self.minProfitSpinBox.value()) minPadSize = int(self.mainwindow.minPadSizeCombo.currentIndex()) graphDepth = int(self.graphMinDepthSpin.value()) graphDepthmax = int(self.graphDepthSpin.value()) blackmarket = self.smugglingCheckBox.isChecked() if graphDepth>graphDepthmax: print("min hops have to be less than max hops!") self.mainwindow.sounds.play('error') return if currentBase == 'ANY': currentBase=None if targetBase == 'ANY': targetBase=None pos = self.currentSystem.getPosition() tpos = self.targetSystem.getPosition() directionality=0.0 # todo: currently unused - remove? queryparams=dict({ "x":pos[0], "y":pos[1], "z":pos[2], "x2":tpos[0], "y2":tpos[1], "z2":tpos[2], "directionality":directionality, "maxdistance":maxDistance, "minprofit":minProfit, "landingPadSize":minPadSize, "jumprange":jumprange, "graphDepthMin":graphDepth, "graphDepthMax":graphDepthmax, "sourcesystem":None, "sourcebase":None, "targetsystem":None, "targetbase":None, "blackmarket":blackmarket }) print("Querying database...") searchFn = None if self.searchType=='singles': print("queryProfit") searchFn = lambda : Queries.queryProfit(self.db, queryparams ) elif self.searchType=='loop': print("queryProfitGraphLoops") searchFn = lambda : Queries.queryProfitGraphLoops(self.db, queryparams ) elif self.searchType=='long': print("queryProfitGraphDeadends") searchFn = lambda : Queries.queryProfitGraphDeadends(self.db, queryparams ) elif self.searchType=='target': queryparams['sourcesystem']=currentSystem queryparams['sourcebase']=currentBase queryparams['targetsystem']=targetSystem queryparams['targetbase']=targetBase print("queryProfitGraphTarget") searchFn = lambda : Queries.queryProfitGraphTarget(self.db, queryparams ) elif self.searchType=='direct': queryparams['sourcesystem']=currentSystem queryparams['sourcebase']=currentBase queryparams['targetsystem']=targetSystem queryparams['targetbase']=targetBase print("queryDirectTrades") searchFn = lambda : Queries.queryDirectTrades(self.db, queryparams ) elif self.searchType in ['station_exports','system_exports']: queryparams['sourcesystem']=currentSystem queryparams['sourcebase']=currentBase print("queryProfitGraphDeadends from current") searchFn = lambda : Queries.queryProfitGraphDeadends(self.db, queryparams ) else: print("unknown search type - we should not be here") if searchFn is not None: self.currentWorker = ThreadWorker.ThreadWorker(searchFn, lambda result: self._resultsUpdated.emit(result)) self.currentWorker.start() self._setSearchProgress(True)