def view_command(): for i in list1.tree.get_children(): list1.tree.delete(i) list1._build_tree(c_header, pddf(columns=c_header)) df=pddf(backend.view(), columns=c_header) for c in ['id', 'mother_id', 'father_id', 'spouse_id', 'generation', 'cluster', 'gender' ]: df[c]=to_numeric(df[c], downcast='integer',errors='coerce') list1._build_tree(c_header, df)
def file_save_xls(): from tkinter.filedialog import asksaveasfilename f = asksaveasfilename(title = "Select file",filetypes = (("Excel files","*.xlsx"),("all files","*.*")), defaultextension='.png') if f is None: # asksaveasfile return `None` if dialog closed with "cancel". return df=pddf(backend.view(), columns=c_header) import io df.to_excel(f, index = False)
def values_greater_than(my_dataframe, my_col, threshold=0): counts = my_dataframe[my_col].value_counts() fields_kept = [] for i in range(len(counts)): if counts[i] > threshold: fields_kept.append(counts.index[i]) print("Keeping:", fields_kept) temp = pddf() for item in fields_kept: temp = temp.append(my_dataframe[my_dataframe[my_col] == item]) return temp
def mid_to_matrix(mid, output='nested_list'): # {"nested_list", "pandas"} """ Takes a midi file or stream and returns a matrix with rows representing midi events and columns representing midinote, offset position and event duration: | | pitch | offset | duration | velocity | channel | | ---- | ----- | ------ | -------- | -------- | ------- | | evt1 | | | | | | | evt2 | | | | | | | evt3 | | | | | | | ... | | | | | | """ mid = parse_mid(mid) if mid.type is not 0: print("Midi file type {}. Reformat to type 0 before quantising.".format(mid.type)) return None resolution = mid.ticks_per_beat elapsed = 0 noteons = [] offsets = [] noteoffs = [] durations = [] for msg in mid.tracks[0]: elapsed += msg.time offset = elapsed / resolution if msg.type == 'note_on': noteons.append(msg.note) offsets.append(offset) if msg.type == 'note_off': noteoffs.append(msg.note) durations.append(offset) if not len(noteons) == len(noteoffs): print("Unmatcghing size. Reformat file first") return None else: mnotes = [] for i in range(len(noteons)): mnotes.append([noteons[i], offsets[i], durations[noteoffs.index(noteons[i])] - offsets[i]]) durations.pop(noteoffs.index(noteons[i])) noteoffs.remove(noteons[i]) if output is 'nested_list': return mnotes elif output is 'pandas': return pddf(mnotes, columns=['pitch', 'offset', 'duration'])
def n_most_frequent_values(my_dataframe, my_col, n_most_freq=6): counts = my_dataframe[my_col].value_counts() if len(counts) <= n_most_freq: return my_dataframe elif len(counts) > n_most_freq: counts = counts[:n_most_freq] print("Keeping:", counts.index) temp = pddf() for item in counts.index: temp = temp.append(my_dataframe[my_dataframe[my_col] == item]) return temp
def draw_tree(): from PIL import ImageTk, Image as Image_PIL class ResizingCanvas(Canvas): def __init__(self,parent,**kwargs): Canvas.__init__(self,parent,**kwargs) self.bind("<Configure>", self.on_resize) self.height = self.winfo_reqheight() self.width = self.winfo_reqwidth() # self.bind_all("<MouseWheel>", self.on_mousewheel) def on_resize(self,event): new_width = self.master.winfo_width() new_height = self.master.winfo_height() #colors col_f='#ff4e5c' col_m='#1c3144' col_w='#9d7a7d' # top = tk.Tk() top.iconbitmap(f'{main_f}2683232.ico') Grid.rowconfigure(top, 0, weight=1) Grid.columnconfigure(top, 0, weight=1) top.geometry("900x480") top.wm_title("Tree") frame = tk.Frame(top) frame.grid(row=0, column=0, sticky=N+S+E+W) #scrollbars xscrollbar = tk.Scrollbar(frame, orient=tk.HORIZONTAL) xscrollbar.grid(row=1, column=0, sticky="EW") yscrollbar = tk.Scrollbar(frame) yscrollbar.grid(row=0, column=1, sticky="NS") canvas = ResizingCanvas(frame, xscrollcommand=xscrollbar.set, yscrollcommand=yscrollbar.set) canvas.grid(row=0, column=0, sticky="EWNS") canvas.configure(bg='white') # Image import pydotplus as pdt import time df=pddf(backend_app.view(), columns=c_header) for c in ['id', 'mother_id', 'father_id', 'spouse_id', 'generation', 'cluster', 'gender' ]: df[c]=to_numeric(df[c], downcast='integer',errors='coerce') min_level=int(df['generation'].min()) max_level=int(df['generation'].max()) g = pdt.Dot(graph_type='digraph', compound='true', rankdir='TB',newrank = 'true') i=0 edges = [] nodes=[] # create all edges from ast import literal_eval as make_tuple nl=max_level-min_level+1 s=[None]*(max_level+1) for i in range(min_level,max_level+1): s = pdt.Subgraph(i,rank='same') for j in df.loc[df['generation']==i, 'id'].values : s.add_node(pdt.Node(f"{df.loc[df['id']==j, 'id'].values[0]}")) g.add_subgraph(s) min_cl=int(df['cluster'].min()) max_cl=int(df['cluster'].max()) nl=max_cl-min_cl+1 ss=[None]*(max_cl+1) for i in range(min_cl,max_cl+1): ss = pdt.Cluster(f'c1_{i}', color="white",rankdir="TB") for j in df.loc[df['cluster']==i, 'id'].values : ss.add_node(pdt.Node(f"{df.loc[df['id']==j, 'id'].values[0]}")) g.add_subgraph(ss) for i in range(df.shape[0]): m=df.loc[i, 'mother_id'] f=df.loc[i, 'father_id'] nod=pdt.Node(f"{df.loc[i, 'id']}", shape='record', color=col_m, style='rounded') if m in df['id'].values: g.add_edge(pdt.Edge(pdt.Node(f"{df.loc[df['id']==m, 'id'].values[0]}"), nod, color=col_f)) if f in df['id'].values: g.add_edge(pdt.Edge(pdt.Node(f"{df.loc[df['id']==f, 'id'].values[0]}"), nod, color=col_m)) if notnull(df.loc[i, 'spouse_id']): who=to_numeric(df.loc[i, 'spouse_id'], downcast='integer', errors='coerce') if who in df['id'].values: g.add_edge(pdt.Edge(pdt.Node(f"{df.loc[df['id']==who, 'id'].values[0]}"), nod, dir='none',color=col_w)) #label='m', for i in range(df.shape[0]): cc=col_f if df.loc[i, 'gender']==1: cc=col_m sstr=f"{df.loc[i, 'name']}" if notnull(df.loc[i, 'surname']): sstr=f"{sstr} {df.loc[i, 'surname']}" if notnull(df.loc[i, 'date_birth']): if isinstance(df.loc[i, 'date_birth'], int) or isinstance(df.loc[i, 'date_birth'], str): sstr=f"{sstr}\nB: {df.loc[i, 'date_birth']}" else: sstr=f"{sstr}\nB: {df.loc[i, 'date_birth'].strftime('%d %b %Y') }" # %H:%M:%S d=df.loc[i, 'date_death'] # import numpy as np if notnull(d): # print("ddd" , isna(d), isnull(d), d, type(d) ) sstr=f"{sstr}\nD: {d}" if notnull(df.loc[i, 'place_birth']): sstr=f"{sstr}\nPoB: {df.loc[i, 'place_birth']} ({df.loc[i, 'country_birth']})" if not (isnull(df.loc[i, 'comments']) or df.loc[i, 'comments']==''): sstr=f"{sstr}\nComments: {df.loc[i, 'comments']}" nod=pdt.Node(f"{df.loc[i, 'id']}", label=sstr, shape='box', color=cc, style='rounded') g.add_node(nod) import io buf = io.BytesIO() g.write_png(buf) # plt.savefig(buf, format='png') buf.seek(0) img_g = Image_PIL.open(buf) img = ImageTk.PhotoImage(img_g, master=frame) canvas.create_image(0, 0, image=img) #scrollconfig canvas.config(scrollregion=canvas.bbox(tk.ALL)) xscrollbar.config(command=canvas.xview) yscrollbar.config(command=canvas.yview) frame.grid(row=0, column=0) frame.grid_rowconfigure(0, weight=1) frame.grid_columnconfigure(0, weight=1) #menu menubar = Menu(top) top.config(menu=menubar) menu = Menu(menubar) def file_save(): from tkinter.filedialog import asksaveasfilename f = asksaveasfilename(title = "Select file",filetypes = (("png files","*.png"),("all files","*.*")), defaultextension='.png') if f is None: # asksaveasfile return `None` if dialog closed with "cancel". return img_g.save(f) menu.add_command(label='Save',command=file_save) menu.add_command(label='Close',command=top.destroy) # menu.add_command(label="Exit", command=window.quit) menubar.add_cascade(label="File", menu=menu) top.mainloop()
def search_command(): for i in list1.tree.get_children(): list1.tree.delete(i) # list1._build_tree(c_header, pddf(columns=c_header)) c_list=pddf(backend.search(name_text.get(),surname_text.get(),year_text.get(),gen_text.get()), columns=c_header) list1._build_tree(c_header, c_list)
'name', 'surname', 'date_birth', 'date_death', 'mother_id', 'father_id', 'spouse_id', 'generation', 'cluster', 'gender', 'place_birth', 'country_birth', 'country', 'comments' ] c_list=pddf(backend.view(), columns=c_header) list1 = MultiColumnListbox(c_header, c_list) ''' fifth row ''' fr5 = Frame(window) fr5.pack(fill=X, side=TOP) # '''Buttons''' b_clear=HoverButton(fr5,text="Clear Fields", width=12,command=clear_command, activebackground='SlateGray3') b_clear.pack(side=tk.LEFT, pady=15, padx=10) b3=HoverButton(fr5,text="Add entry", width=12,command=new_command, activebackground='SlateGray3') b3.pack(side=tk.LEFT, pady=15, padx=10)
def draw_tree(): from PIL import ImageTk, Image as Image_PIL c_header=['id', 'name', 'surname', 'date_birth', 'date_death', 'mother_id', 'father_id', 'spouse_id', 'generation', 'cluster', 'gender', 'place_birth', 'country_birth', 'country', 'comments' ] # Image import pydotplus as pdt import time # import pandas as pd df=pddf(backend.view(), columns=c_header) for c in ['id', 'mother_id', 'father_id', 'spouse_id', 'generation', 'cluster', 'gender' ]: df[c]=to_numeric(df[c], downcast='integer',errors='coerce') # df[c]=df[c].astype(int) # df=pd.read_excel('c:/a_tree/gen_tree.xlsx', sheet_name=nn) # print(df) min_level=int(df['generation'].min()) max_level=int(df['generation'].max()) g = pdt.Dot(graph_type='digraph', compound='true', rankdir='TB',newrank = 'true') i=0 edges = [] nodes=[] # create all edges #edges = [(1,2), (1,3), (2,4), (2,5), (3,5)] #nodes = [(1, "A", "r"), (2, "B", "g"), (3, "C", "g"), (4, "D", "r"), (5, "E", "g")] #for e in edges: # g.add_edge(pdt.Edge(e[0], e[1], color=e[2])) # #for n in nodes: # node = pdt.Node(name=n[0], label= n[1], fillcolor=n[2], style="filled" ) # g.add_node(node) # # from ast import literal_eval as make_tuple nl=max_level-min_level+1 s=[None]*(max_level+1) for i in range(min_level,max_level+1): s = pdt.Subgraph(i,rank='same') for j in df.loc[df['generation']==i, 'id'].values : # s.add_node(pdt.Node(f"{df.loc[df['id']==j, 'Name'].values[0]} {df.loc[df['id']==j, 'Surname'].values[0]}")) s.add_node(pdt.Node(f"{df.loc[df['id']==j, 'id'].values[0]}")) g.add_subgraph(s) #min_cl=df['Cluster2'].min() #max_cl=df['Cluster2'].max() #nl=max_cl-min_cl+1 #ss=[None]*(max_cl+1) #for i in range(min_cl,max_cl+1): # if i>0: # ss = pdt.Cluster(f'c2_{i}', color="blue",rankdir="TB") # for j in df.loc[df['Cluster2']==i, 'id'].values : # # s.add_node(pdt.Node(f"{df.loc[df['id']==j, 'Name'].values[0]} {df.loc[df['id']==j, 'Surname'].values[0]}")) # ss.add_node(pdt.Node(f"{df.loc[df['id']==j, 'id'].values[0]}")) # g.add_subgraph(ss) min_cl=int(df['cluster'].min()) max_cl=int(df['cluster'].max()) nl=max_cl-min_cl+1 ss=[None]*(max_cl+1) for i in range(min_cl,max_cl+1): ss = pdt.Cluster(f'c1_{i}', color="white",rankdir="TB") for j in df.loc[df['cluster']==i, 'id'].values : # s.add_node(pdt.Node(f"{df.loc[df['id']==j, 'Name'].values[0]} {df.loc[df['id']==j, 'Surname'].values[0]}")) ss.add_node(pdt.Node(f"{df.loc[df['id']==j, 'id'].values[0]}")) g.add_subgraph(ss) for i in range(df.shape[0]): m=df.loc[i, 'mother_id'] f=df.loc[i, 'father_id'] # if m in df['id'].values: edges.append((i,df.loc[df['id']==m, 'Name'].index[0],'orange')) # if f in df['id'].values: edges.append((i,df.loc[df['id']==f, 'Name'].index[0],'blue')) # nodes.append((i, df.loc[i, 'Name'], 'blue')) # nod=pdt.Node(f"{df.loc[i, 'id']}",label=f"{df.loc[i, 'Name']} {df.loc[i, 'Surname']}", shape='record', color="blue", style='rounded') nod=pdt.Node(f"{df.loc[i, 'id']}", shape='record', color="blue", style='rounded') if m in df['id'].values: g.add_edge(pdt.Edge(pdt.Node(f"{df.loc[df['id']==m, 'id'].values[0]}"), nod, color='orange')) if f in df['id'].values: g.add_edge(pdt.Edge(pdt.Node(f"{df.loc[df['id']==f, 'id'].values[0]}"), nod, color='#066dba')) if notnull(df.loc[i, 'spouse_id']): # rel=df.loc[i, 'relation to'] # # tuple(map(int, re.findall(r'[0-9]+', rel))) # rel=rel.replace('(','').replace(')','').split(',') # who=pd.to_numeric(rel[0]).astype(int) who=to_numeric(df.loc[i, 'spouse_id'], downcast='integer', errors='coerce') # how=rel[1] # g.add_edge(pdt.Edge(pdt.Node(f"{df.loc[df['id']==who, 'Name'].values[0]} {df.loc[df['id']==who, 'Surname'].values[0]}"), nod, dir='none',color='#bf3fbf')) #label='m', if who in df['id'].values: # print('yes') g.add_edge(pdt.Edge(pdt.Node(f"{df.loc[df['id']==who, 'id'].values[0]}"), nod, dir='none',color='#bf3fbf')) #label='m', for i in range(df.shape[0]): cc='orange' if df.loc[i, 'gender']==1: cc='#066dba' sstr=f"{df.loc[i, 'name']}" if notnull(df.loc[i, 'surname']): sstr=f"{sstr} {df.loc[i, 'surname']}" if notnull(df.loc[i, 'date_birth']): if isinstance(df.loc[i, 'date_birth'], int) or isinstance(df.loc[i, 'date_birth'], str): sstr=f"{sstr}\nB: {df.loc[i, 'date_birth']}" else: sstr=f"{sstr}\nB: {df.loc[i, 'date_birth'].strftime('%d %b %Y') }" # %H:%M:%S d=df.loc[i, 'date_death'] # import numpy as np if notnull(d): # print("ddd" , isna(d), isnull(d), d, type(d) ) sstr=f"{sstr}\nD: {d}" if notnull(df.loc[i, 'place_birth']): sstr=f"{sstr}\nPoB: {df.loc[i, 'place_birth']} ({df.loc[i, 'country_birth']})" if not (isnull(df.loc[i, 'comments']) or df.loc[i, 'comments']==''): sstr=f"{sstr}\nComments: {df.loc[i, 'comments']}" nod=pdt.Node(f"{df.loc[i, 'id']}", label=sstr, shape='box', color=cc, style='rounded') # nod=pdt.Node(f"{df.loc[i, 'id']}", label=f"{df.loc[i, 'Name']} {df.loc[i, 'Surname']}\nB: {df.loc[i, 'DoB']}\nD: {df.loc[i, 'DoD']}\nPoB: {df.loc[i, 'Place of Birth']} ({df.loc[i, 'Country of Birth']})", shape='box', color=cc, style='rounded') g.add_node(nod) import io buf = io.BytesIO() g.write_png(buf) # plt.savefig(buf, format='png') buf.seek(0) import base64 buf_img64 = base64.b64encode(buf.getvalue()) # buf_im_str= u'data:img/jpeg;base64,'+buf_img64.decode('utf-8') buf_im_str=buf_img64.decode("utf-8") # img_g = Image_PIL.open(buf) # img=Img_IP(g.create_png()) # img=g.create_png() # return send_file(buf, mimetype='image/PNG') # print (buf_img64) # return render_template('image.html' , img= buf_img64.decode('ascii')) return render_template('image.html' , img= buf_im_str)
def poolDataInterpolate(dropMap, df, label, channel, path, incCarte=5e-3, nbReps=80, startTime=2, timeThresh=40, threshOutlayer=17, display=False): for j, tmp in enumerate(label): dataX = np.array([]) dataY = np.array([]) data = pddf() print(tmp) nbAdded = 0 p = df[0] x0 = np.array(p.time / 3600, dtype=np.float64) p = df[ len(df) - 4] #antepenultien run pour creer le range de temps sur lequel interpolle xLast = np.array(p.time / 3600, dtype=np.float64) #x2=np.arange(.5, min(max(x0)-min(x0),max(xLast)-min(xLast)), .5) x2 = np.arange(.5, timeThresh, .5) data['time'] = pd.Series(x2).values for j, content in enumerate(dropMap[:, 1]): y = [] x = [] if content == tmp: if channel == 'RFP': p = df[j] y = np.array( np.log(p.fluo_2_area * p.speed / p.size) - np.log(p.fluo_2_area[0] * p.speed[0] / p.size[0])) x = np.array(p.time / 3600) x = x - x[0] #reset time ystd = np.array( np.divide(p.fluo_2_std + incCarte / 2, p.fluo_2_median)) if channel == 'GFP': p = df[j] y = np.array( np.log(p.fluo_3_area * p.speed / p.size) - np.log(p.fluo_3_area[0] * p.speed[0] / p.size[0])) x = np.array(p.time / 3600, dtype=np.float64) x = x - x[0] #reset time ystd = np.array( np.divide(p.fluo_3_std + incCarte / 2, p.fluo_3_median)) if channel == 'PVD': p = df[j] y = np.array( np.log(p.fluo_1_area * p.speed / p.size) - np.log(p.fluo_1_area[0] * p.speed[0] / p.size[0])) x = np.array(p.time / 3600, dtype=np.float64) x = x - x[0] #reset time ystd = np.array( np.divide(p.fluo_1_std + incCarte / 2, p.fluo_1_median)) if channel == 'PVDoverGFP': p = df[j] y = np.array(p.fluo_1_area / (p.fluo_3_area)) #no need of log here x = np.array(p.time / 3600, dtype=np.float64) x = x - x[0] #reset time ystd = np.array(np.divide(p.fluo_1_std + incCarte / 2, p.fluo_1_median), dtype=np.float64) #filtre outlayer #idxStart=bisect.bisect(x, startTime) #idxThres=bisect.bisect(x, min(max(x),timeThresh)) idxOutlayer = bisect.bisect(x, threshOutlayer) if len(y) > idxOutlayer: if y[idxOutlayer] > y[2] + 2 or threshOutlayer <= 0: # measure at run 0 does not count f = si.interp1d(x, y) try: y2 = f(x2) data[str(j)] = pd.Series(y2).values nbAdded += 1 except Exception as inst: print('poolData() remove this data') print(j) print(type(inst)) # the exception instance print(inst.args) # arguments stored in .args print(inst) print('x2') print(x2) print('y') print(y) print('x') print(x) #plot the growth curve with the fit and the derivative if display == True: pathgrowth = path + 'growth/' if not os.path.exists(pathgrowth): os.makedirs(pathgrowth) #plt.plot(x2,y2,marker='o', linestyle='-',color='blue') plt.plot(x, y, marker='o', linestyle='-', color='blue') #ax1.set_ylim([np.log(low), np.log(high)]) #ax1.set_yticks(range(np.int(np.log(low)), np.int(np.log(high)))) plt.ylabel('log(fluo)') plt.xlabel('time (h)') plt.title(tmp + ' drp' + str(j) + ' area') plt.savefig(pathgrowth + 'drp' + str(j) + '_deriveAll_nbpt' + '_' + tmp, format='pdf') plt.show() if nbAdded > nbReps: break data.to_csv(path + tmp + channel + 'Interp.csv', index=False)
def getDataHistogram(label, path, channel): folder = path + 'resultIndiv/' [dropMap, df, nn] = loadData(path) stdgRate = pddf() gRate = pddf() lag = pddf() stdlag = pddf() yld = pddf() stdyield = pddf() for n, labelName in enumerate(label): stdgRateList = [] gRateList = [] lagList = [] stdlagList = [] yieldList = [] stdyieldList = [] for file in os.listdir(folder): if file.startswith(labelName + 'resultIndiv_'): if file.endswith(channel + ".csv"): try: with open(folder + file, 'r') as f: reader = csv.reader(f) your_list = list(reader) stdgRateList.append( float(your_list[findIdx(your_list, 'max df std')][1])) gRateList.append( float(your_list[findIdx(your_list, 'max df')][1])) lagList.append( float(your_list[findIdx(your_list, 'lag time')][1])) stdlagList.append( float(your_list[findIdx(your_list, 'lag time std')][1])) yieldList.append( float(your_list[findIdx(your_list, 'max y')][1])) stdyieldList.append( float(your_list[findIdx(your_list, 'max y std')][1])) except Exception as inst: donothing = 0 print(labelName) print(folder + file) print(type(inst)) # the exception instance df = pddf({labelName: gRateList}) gRate = pd.concat([gRate, df], axis=1) df = pddf({labelName: lagList}) lag = pd.concat([lag, df], axis=1) df = pddf({labelName: yieldList}) yld = pd.concat([yld, df], axis=1) df = pddf({labelName: stdgRateList}) stdgRate = pd.concat([stdgRate, df], axis=1) df = pddf({labelName: stdlagList}) stdlag = pd.concat([stdlag, df], axis=1) df = pddf({labelName: stdyieldList}) stdyield = pd.concat([stdyield, df], axis=1) return [stdgRate, gRate, lag, stdlag, yld, stdyield]
def findIdx(your_list, item): lst = pddf(your_list) lst2 = lst[0].values lst3 = lst2.tolist() return lst3.index(item)