def GeneralAllAreas(avgs, sems, sigclusters, trace_names, savefolder, ytitles, maintitle): numrows = avgs.shape[1] if numrows > 1: raise Exception('Can only plot one row per area') showlegends = [True if i==0 else False for i in range(D.numareas)] f, axes_all = plt.subplots(D.numareas, 1, figsize=(width_regplot, height_regplot*D.numareas), sharex=True, sharey=True) for i_area, (area, avgs_area, sems_area, sig_area, axes) in enumerate(zip(D.areas, avgs, sems, sigclusters, axes_all)): if i_area == 0: _makeandplotavgs(avgs_area[0], sems_area[0], sig_area[0], axes, ylab=D.areanames[i_area], showsig=True, leg_labels=trace_names, show_leg=showlegends) else: _makeandplotavgs(avgs_area[0], sems_area[0], sig_area[0], axes, ylab=D.areanames[i_area], showsig=True, leg_labels=trace_names, show_leg=False) _finalplotadjustments(f, maintitle) D.savefig(f'{savefolder}/all') f, axes_all = plt.subplots(D.numareas, 1, figsize=(width_regplot, height_regplot*D.numareas), sharex=True) for i_area, (area, avgs_area, sems_area, sig_area, axes) in enumerate(zip(D.areas, avgs, sems, sigclusters, axes_all)): if i_area == 0: _makeandplotavgs(avgs_area[0], sems_area[0], sig_area[0], axes, ylab=D.areanames[i_area], showsig=True, leg_labels=trace_names, show_leg=showlegends) else: _makeandplotavgs(avgs_area[0], sems_area[0], sig_area[0], axes, ylab=D.areanames[i_area], showsig=True, leg_labels=trace_names, show_leg=False) _finalplotadjustments(f, maintitle) D.savefig(f'{savefolder}/all2')
def PlotDist(dists, savefolder): for dist, areaname in zip(dists, D.areanames): # Remove 0's from end dist = np.trim_zeros(dist, trim='b') histY, histX = np.histogram(dist, bins=10) plt.plot(histX[1:], histY, label=areaname) plt.xlabel('Minimum number of samples for cell') plt.ylabel('Count') plt.legend() plt.xlim(xmin=0) D.savefig(f'{savefolder}/dist')
def analysecell(counter, out_betas, data, cell): Utils.updatecounts(counter, cell, data.n) td = data.behavdata[cell] masks = (D.get_A_AR_trials(td), D.get_A_AC_trials(td), D.get_A_BR_trials(td), D.get_A_BC_trials(td)) x = td.previousreward for i_epoch, epoch in enumerate(D.epochs): y = data.generatenormalisedepoch(cell, epoch) for i_mask, mask in enumerate(masks): out_betas[0, i_mask, i_epoch, cell] = Maths.regression(x[mask], y[mask])
def printDetails(self): detailList = self.convertDictToList( Details(self.tickerEntry.get()).getDetails()) self.detailTv.clear() for count, row in enumerate(detailList): self.detailTv.insertValues([value for value in row], count) keyDetailList = self.convertDictToList( Details(self.tickerEntry.get()).getKeyDetails()) self.keyPerformanceTv.clear() for count, row in enumerate(keyDetailList): self.keyPerformanceTv.insertValues([value for value in row], count) self.getStats()
def PlotDist(dists, savefolder): linestyles = ['-', '--', '-.', ':', 'steps'] for i_area, (dist, areaname) in enumerate(zip(dists, D.areanames)): for i_cond, cond in enumerate(dist): # Remove 0's from end cond = cond[~np.isnan(cond)] histY, histX = np.histogram(cond, bins=10) plt.plot(histX[1:], histY, label=f'{areaname} {i_cond}', linestyle=linestyles[i_cond%len(linestyles)], color=f'C{i_area}') plt.xlabel('Minimum number of samples for cell') plt.ylabel('Count') plt.legend() plt.xlim(xmin=0) D.savefig(f'{savefolder}/dist')
def _finalplotadjustments(f, title, names_epochs=D.names_epochs): labelpos = np.array([i * (D.n_timepoints + 1) + D.converttimetosmoothedtrace(0) for i in range(len(names_epochs))]) plt.xticks(labelpos, names_epochs, fontsize=11) plt.suptitle(title, x=0.05, fontsize=18, ha='left') f.legend(loc='upper right', fancybox=True, shadow=True, ncol=5, bbox_to_anchor=(1, 1)) plt.tight_layout(rect=[0, 0, 1, 0.95]) plt.tick_params(axis='both', which='major', labelsize=13)
def GeneralPlot(avgs, sems, sigclusters, trace_names, savefolder, ytitles, maintitle): numrows = avgs.shape[1] showlegends = [True if i==0 else False for i in range(numrows)] for i_area, (area, avgs_area, sems_area, sig_area) in enumerate(zip(D.areas, avgs, sems, sigclusters)): f, axes = plt.subplots(numrows, 1, figsize=(width_regplot, height_regplot*numrows), sharex=True) if numrows > 1: for i, (label, leg_bool) in enumerate(zip(ytitles, showlegends)): _makeandplotavgs(avgs_area[i], sems_area[i], sig_area[i], axes[i], ylab=label, showsig=True, leg_labels=trace_names, show_leg=leg_bool) else: _makeandplotavgs(avgs_area[0], sems_area[0], sig_area[0], axes, ylab=ytitles, showsig=True, leg_labels=trace_names, show_leg=showlegends) title = maintitle+ f' ({area})' _finalplotadjustments(f, title) D.savefig(f'{savefolder}/{area}')
def getData(): try: email = request.json["email"] except Exception as e: Log.log("Error" + str(e)) return json.dumps(Details.Detail().__dict__) ans = DatabaseHandler.DataBaseHandler().get_data_with_email(email) x = jsonify(ans.__dict__) print(x) print(ans.__dict__) return jsonify(ans.__dict__)
def generate_epoch_norm(self, cell, epoch, raster_halfwidth=D.smooth_window_halfwidth): savefolder = D.get_smooth_folder(raster_halfwidth) savepath = f'{savefolder}norm/{self.area}_{cell}_{epoch}.npy' savedfile = Path(savepath) if savedfile.is_file(): raw = np.load(savedfile) else: fix = self.generate_epoch_raw(cell, D.sc_madefixation) fixstart = D.converttimetosmoothedtrace(50) # ms after fix made fixstop = D.converttimetosmoothedtrace(400) fix = fix[:, fixstart:fixstop] raw = self.generate_epoch_raw(cell, epoch) raw -= np.mean(fix) raw /= np.std(fix) # Make the save folder if it doesn't exist import os directory = os.path.dirname(f'{savefolder}norm/') if not os.path.exists(directory): os.makedirs(directory) np.save(savepath, raw) # Clip raw to suggested boundaries if D.smooth_prewindow > D.static_prewindow or D.smooth_postwindow > D.static_postwindow: raise Exception( 'Need to generate static rasters of sufficient length before you can clip them to the size specified' ) if D.static_prewindow == D.smooth_prewindow: start = 0 # Avoid divide by 0 else: start = (D.static_prewindow - D.smooth_prewindow) // D.smooth_step raw_clipped = raw[:, start:start + D.smooth_outputlength] return raw_clipped
def createmasks(td): x_data = td.c1chosen mask_aar = D.get_A_AR_trials(td) mask_aac = D.get_A_AC_trials(td) mask_abc = D.get_A_BC_trials(td) switchc1 = mask_abc | mask_aar masks = [(mask_aac & (td.rewgiven == 2) & (td.previousreward == 2)), (mask_abc & (td.rewgiven == 2)& (td.previousreward != 2)), (mask_abc & (td.rewgiven != 2)& (td.previousreward != 2))] mask_aac = D.get_A_AC_trials(td) mask_abc = D.get_A_BC_trials(td) masks = [(mask_aac & (td.previousreward == 2)), (mask_abc & (td.previousreward == 2))] return x_data, masks
def getStats(self): keyDetailDict = Details(self.tickerEntry.get()).getKeyDetails() self.medianValueCalc.changeCashYield( text=self.statCalc("median", [ ele for ele in keyDetailDict['Cash Yield'] if isinstance(ele, float) ])) self.meanValueCalc.changeCashYield( text=self.statCalc("mean", [ ele for ele in keyDetailDict['Cash Yield'] if isinstance(ele, float) ])) self.stdValueCalc.changeCashYield( text=self.statCalc("stdev", [ ele for ele in keyDetailDict['Cash Yield'] if isinstance(ele, float) ])) self.minValueCalc.changeCashYield( text=self.statCalc("min", [ ele for ele in keyDetailDict['Cash Yield'] if isinstance(ele, float) ])) self.maxValueCalc.changeCashYield( text=self.statCalc("max", [ ele for ele in keyDetailDict['Cash Yield'] if isinstance(ele, float) ])) self.medianValueCalc.changeEvValue( text=self.statCalc("median", [ ele for ele in keyDetailDict['EV Multiple'] if isinstance(ele, float) ])) self.meanValueCalc.changeEvValue( text=self.statCalc("mean", [ ele for ele in keyDetailDict['EV Multiple'] if isinstance(ele, float) ])) self.stdValueCalc.changeEvValue( text=self.statCalc("stdev", [ ele for ele in keyDetailDict['EV Multiple'] if isinstance(ele, float) ])) self.minValueCalc.changeEvValue( text=self.statCalc("min", [ ele for ele in keyDetailDict['EV Multiple'] if isinstance(ele, float) ])) self.maxValueCalc.changeEvValue( text=self.statCalc("max", [ ele for ele in keyDetailDict['EV Multiple'] if isinstance(ele, float) ]))
def analysecell(counter, out_betas, data, cell): Utils.updatecounts(counter, cell, data.n) td = data.behavdata[cell] mask_aar = D.get_A_AR_trials(td) mask_aac = D.get_A_AC_trials(td) mask_t0rew_high = td.previousreward == 2 mask_t0rew_low = td.previousreward != 2 mask1 = mask_t0rew_high & mask_aar mask2 = mask_t0rew_high & mask_aac mask3 = mask_t0rew_low & mask_aar mask4 = mask_t0rew_low & mask_aac masks = (mask1, mask2, mask3, mask4) for i_epoch, epoch in enumerate(D.epochs): y = data.generatenormalisedepoch(cell, epoch) for i_mask, mask in enumerate(masks): out_betas[0, i_mask, i_epoch, cell] = np.mean(y[mask], axis=0)
def postData(): try: name = request.json["name"] email = request.json["email"] phone = request.json["mob_no"] except Exception as e: Log.log("Error" + str(e)) return json.dumps(Details.Detail().__dict__) ans = Parser.parse(name, email, phone) Log.log("ans = ", ans) if '_id' in ans: ans.pop('_id', None) return json.dumps(ans)
def generatenormalisedepoch(self, cell, epoch): savepath = f'{self.savefolder}norm/{self.area}_{cell}_{epoch}.npy' savedfile = Path(savepath) if savedfile.is_file(): return np.load(savedfile) else: fix = self.generateaverageepoch(cell, D.sc_madefixation) fixstart = D.converttimetosmoothedtrace(50) # ms after fix made fixstop = D.converttimetosmoothedtrace(400) fix = fix[:, fixstart:fixstop] raw = self.generateaverageepoch(cell, epoch) raw -= np.mean(fix) raw /= np.std(fix) # Make the save folder if it doesn't exist import os directory = os.path.dirname(f'{self.savefolder}norm/') if not os.path.exists(directory): os.makedirs(directory) np.save(savepath, raw) return raw
def read_accidents_from_file(date_object): localisations = Area.Localisation.make_dictionary() intersections = Area.Intersection.make_dictionary() type_of_collisions = Details.TypeOfCollision.make_dictionary() try: with open('Accidents.csv', 'r', encoding="ISO-8859-1") as csv_file: reader = csv.reader(csv_file, delimiter=',') for row in reader: date = Date.Date(read_hour(row[4]), read_minute(row[4]), int(row[3]), int(row[2]), 2000 + int(row[1])) if date_object == date: area = Area.Area(localisations[row[6]], intersections[row[7]]) details = Details.Details(type_of_collisions[row[9]]) accident = Accident.Accident(area, date, details) accidents.append(accident) except FileNotFoundError: print("Cannot open a file")
def saveTable(self): Details.closeAllDialogs() return self.writeContents()
import Accident import Area import Date import Details object = Accident.Accident( Area.Area(Area.Localisation.In_built_up_areas, Area.Intersection.Out_of_intersection), Date.Date(12, 30, 5, 11, 2018), Details.Details(Details.TypeOfCollision.Multiple_collisions)) #properties test assert object.area.localisation == Area.Localisation.In_built_up_areas assert object.area.intersection == Area.Intersection.Out_of_intersection assert object.date == Date.Date(12, 30, 5, 11, 2017) assert object.details == Details.Details( Details.TypeOfCollision.Multiple_collisions) #setters test object.area = Area.Area(Area.Localisation.Out_of_agglomeration, Area.Intersection.Out_of_intersection) object.date = Date.Date(11, 50, 28, 2, 2017) object.details = Details.Details(Details.TypeOfCollision.By_the_side) assert object.area.localisation == Area.Localisation.Out_of_agglomeration assert object.date == Date.Date(11, 50, 28, 2, 2017) assert object.details == Details.Details(Details.TypeOfCollision.By_the_side)
def generate_epoch_raw(self, cell, epoch, raster_halfwidth=D.smooth_window_halfwidth): savefolder = D.get_smooth_folder(raster_halfwidth) savepath = f'{savefolder}{self.area}_{cell}_{epoch}.npy' savedfile = Path(savepath) if savedfile.is_file(): return np.load(savedfile) else: def loadspikes(): return scipy.io.loadmat( self.cells_index.spikefilelocs[cell])['tSpikes'].flatten() def loadstrobesbytrials(): file = self.cells_index.strobefilelocs[cell] strobe_codes = scipy.io.loadmat(file)['Strobes'].flatten() strobe_times = scipy.io.loadmat(file)['tStrobes'].flatten() # Remove codes we don't care about strobe_times = strobe_times[strobe_codes < 41] strobe_codes = strobe_codes[strobe_codes < 41] # Remove the repeating 9's and 18's that will make indexing annoying mask = np.ones(strobe_codes.shape, dtype=bool) nine_count = 0 eighteen_count = 0 for i, code in enumerate(strobe_codes): if code == 9: if nine_count % 3 != 1: mask[i] = False nine_count += 1 elif code == 18: if eighteen_count % 3 != 1: mask[i] = False eighteen_count += 1 strobe_codes = strobe_codes[mask] strobe_times = strobe_times[mask] # Now split up strobes into each trial strobe_codes_list = [] strobe_times_list = [] strobe_buffer = [] times_buffer = [] trial_counter = 0 for i, (code, time) in enumerate(zip(strobe_codes, strobe_times)): if code == 9: times_buffer = [] strobe_buffer = [] elif code == 18: strobe_buffer.append(code) times_buffer.append(time) if trial_counter in self.behavdata[ cell].validtrials: # Skip invalid trials strobe_codes_list.append(np.array(strobe_buffer)) strobe_times_list.append(np.array(times_buffer)) trial_counter += 1 else: strobe_buffer.append(code) times_buffer.append(time) out = { 'codes': np.array(strobe_codes_list), 'times': np.array(strobe_times_list) } return out print(f'Generating raster for {self.area} cell {cell}') spikes = loadspikes() print(f'Spikes loaded for {self.area} cell {cell}') strobes = loadstrobesbytrials() print(f'Strobes loaded for {self.area} cell {cell}') # So want to make one smoothed trace per trial stacked in a matrix def makesmoothedtrace(spikes, timepoint): output = np.empty(D.smooth_outputlength) start = timepoint - D.smooth_prewindow for i in range(len(output)): validspikes = np.where( np.logical_and( spikes >= start - D.smooth_window_halfwidth, spikes <= start + D.smooth_window_halfwidth))[0] output[i] = len(validspikes) start += D.smooth_step return output # For each trial make a smoothed trace alltraces = [] for tr_strobe, tr_strobe_time in zip(strobes['codes'], strobes['times']): timepoint = tr_strobe_time[tr_strobe == epoch] if len(timepoint) > 1: raise Exception('Multiple strobe codes found!') elif len(timepoint) > 0: trace = makesmoothedtrace(spikes, timepoint) alltraces.append(trace) alltraces = np.array(alltraces) print(f'Traces loaded for {self.area} cell {cell}') # Make the save folder if it doesn't exist import os directory = os.path.dirname(savefolder) if not os.path.exists(directory): os.makedirs(directory) np.save(savepath, alltraces) return alltraces
import numpy as np import matplotlib.pyplot as plt import Details as D import Utils as U # Static plot parameters buffer = 5 # Gap between each task event numpoints = D.n_timepoints * D.numtrialepochs + buffer * D.numtrialepochs binlength = D.n_timepoints + buffer xtickpos = [int((i_epoch * binlength) + D.converttimetosmoothedtrace(0)) for i_epoch in range(D.numtrialepochs)] width_regplot = 15 height_regplot = 2.75 def _makeandplotavgs(avg_in, sems_in, sig_in, ax, ylab, showsig, leg_labels, show_leg, scale_sig): n_epochs = avg_in.shape[1] numpoints = D.n_timepoints * n_epochs + buffer * n_epochs avgs = np.empty((avg_in.shape[0], numpoints)) avgs.fill(np.nan) sems = np.copy(avgs) sigmarker = np.empty(numpoints) sigmarker.fill(np.nan) if not scale_sig: sigmarker = np.copy(avgs) for i_epoch in range(n_epochs): start = i_epoch * binlength fin = i_epoch * binlength + D.n_timepoints if showsig and scale_sig: sigmarker[start:fin] = sig_in[i_epoch]
def saveTable(self): Details.closeAllDialogs() self.checkFileExtensions("baseName", "baseName") return self.writeContents()
def closeEvent(self, _event): try: if uni.isRaisedAnError is False: if uni.isContinueThreadAction(): uni.cancelThreadAction() _event.ignore() uni.isStartedCloseProcess = True uni.printForDevelopers("Started closeEvent") MApplication.setQuitOnLastWindowClosed(True) try: self.PlayerBar.MusicPlayer.stop() except: pass Details.closeAllDialogs() uni.printForDevelopers("Closed Dialogs") if uni.isRaisedAnError is False: if self.Table.checkUnSavedValues() is False: uni.isStartedCloseProcess = False uni.printForDevelopers("Close ignored") _event.ignore() uni.printForDevelopers("Before self.doBeforeCloseProcesses") if self.doBeforeCloseProcesses() is False: _event.ignore() return None uni.printForDevelopers("After self.doBeforeCloseProcesses") if isActivePyKDE4: uni.printForDevelopers("Before Save KDE Configs") kconf = MGlobal.config() kconfGroup = MConfigGroup(kconf, "DirectoryOperator") self.FileManager.dirOperator.writeConfig(kconfGroup) self.FileManager.actCollection.writeSettings(kconfGroup) uni.printForDevelopers("After Save KDE Configs") uni.printForDevelopers("Before Save Configs") uni.setMySetting(self.Table.hiddenTableColumnsSettingKey, self.Table.hiddenTableColumns) Bars.setAllBarsStyleToMySettings() Records.setRecordType(1) fu.writeToBinaryFile( fu.joinPath(fu.pathOfSettingsDirectory, "LastState"), self.saveState()) Records.restoreRecordType() geometry = [self.geometry().x(), self.geometry().y(), self.geometry().width(), self.geometry().height()] uni.setMySetting("MainWindowGeometries", geometry) uni.setMySetting("lastDirectory", self.FileManager.currentDirectory) uni.setMySetting("isMainWindowMaximized", self.isMaximized()) uni.setMySetting("isShowAdvancedSelections", self.SpecialTools.isShowAdvancedSelections) uni.setMySetting("tableType", uni.tableType) uni.setMySetting("activeTabNoOfSpecialTools", self.SpecialTools.tabwTabs.currentIndex()) uni.saveSettings() Settings.saveUniversalSettings() if uni.isActiveAmarok and uni.getBoolValue("amarokIsUseHost") is False: import Amarok uni.printForDevelopers("Before Amarok.stopEmbeddedDB") Amarok.stopEmbeddedDB() uni.printForDevelopers("After Amarok.stopEmbeddedDB") uni.printForDevelopers("After Save Configs") uni.printForDevelopers("Before self.doAfterCloseProcesses") self.doAfterCloseProcesses() uni.printForDevelopers("After self.doAfterCloseProcesses") except: if ReportBug.isClose is False: ReportBug.ReportBug() _event.ignore()
) st.sidebar.text("Live Updated Data") st.sidebar.text("Source: John Hopkins University") st.sidebar.text(" ") st.sidebar.write("Libraries/Packages used:") st.sidebar.write("1) Streamlit and 2)Plotly") allmap = g.mapall() st.write(allmap) country_name = st.selectbox('', b.list_all_countries, 79) to_show_overall = g.plot_cases_of_a_country(country_name) to_show_daily = g.plot_new_cases_of_country(country_name) d.show_country_stats(country_name) sorted_country_df = b.country_df.sort_values('confirmed', ascending= False) st.write(to_show_daily) st.write(to_show_overall) st.header("Death Count Progress Bar") st.write(g.plot_progressbar_country(country_name)) #g.plot_progressbar_country(country_name) select_graph = st.selectbox('Visualization type', ['Bar plot', 'Pie chart']) if select_graph == 'Pie chart':
def showTableDetails(self, _fileNo, _infoNo): Details.Details(self.values[_fileNo]["path"], uni.getBoolValue("isOpenDetailsInNewWindow"))
def PlotDecStab(accs_all, savefolder, epochnames): fs_ax = 14 fs_tit = 18 width = 25 height = 8 n_areas = accs_all.shape[0] f = plt.figure(figsize=(width, height)) gs = plt.GridSpec(3, n_areas, height_ratios=[1, 0.01, 0.5], hspace=0) axes = [f.add_subplot(gs[0, i]) for i in range(n_areas)] axes_1d = [f.add_subplot(gs[2, i]) for i in range(n_areas)] vmax = np.around(np.nanmax(accs_all), 2) vmin = np.around(np.nanmin(accs_all), 2) labelpos = np.array([i * (D.n_timepoints + 1) + D.converttimetosmoothedtrace(0) for i in range(len(epochnames))]) ylabelpos = len(accs_all[0]) - labelpos def finishing_touches(ax, arr, ax_1d, ylab, vmin, vmax): ax_1d.plot(np.diagonal(arr), zorder=2) if ylab: ax.set_yticks(ylabelpos) ax.set_yticklabels(epochnames, fontsize=fs_ax) ax_1d.set_yticks((vmin, vmax)) ax_1d.set_yticklabels((vmin, vmax), fontsize=fs_ax) ax_1d.set_ylabel('Accuracy (%)', fontsize=fs_ax) else: ax.set_yticks([] * 2) ax_1d.set_yticks([]*2) ax.set_xticks([] * 2) ax.set_title(area, fontsize=fs_tit) for xpos, ypos in zip(labelpos, ylabelpos): ax_1d.axvline(xpos, color='gray', zorder=0, linestyle='--') ax.axvline(xpos, color='gray', zorder=3, linestyle='--') ax.axhline(ypos, color='gray', zorder=3, linestyle='--') ax_1d.set_ylim(vmin, vmax) ax_1d.set_xlim(0, len(np.diagonal(arr))) ax_1d.set_xticks(labelpos) ax_1d.set_xticklabels(epochnames, fontsize=fs_ax) ax_1d.spines['right'].set_visible(False) ax_1d.spines['top'].set_visible(False) for i_area, (area, arr, ax_2d, ax_1d) in enumerate(zip(D.areanames, accs_all, axes, axes_1d)): # Have to jig array around to get it bottom left im_arr = arr.T[::-1] ax_2d.imshow(im_arr, vmax=vmax, vmin=vmin, cmap='rainbow', aspect='auto', zorder=0) finishing_touches(ax_2d, arr, ax_1d, i_area == 0, vmin, vmax) plt.tight_layout() f.subplots_adjust(left=0.08, right=0.95) U.savefig(savefolder, 'allstab') # Now plot areas individually for i_area, (area, arr, ax1) in enumerate(zip(D.areanames, accs_all, axes)): # Have to jig array around to get it bottom left im_arr = arr.T[::-1] f = plt.figure(figsize=(width/D.numareas, height)) gs = plt.GridSpec(3, 1, height_ratios=[1, 0.01, 0.5], hspace=0) ax_2d, ax_1d = f.add_subplot(gs[0, 0]), f.add_subplot(gs[2, 0]) vmax2 = np.around(np.nanmax(arr), 2) vmin2 = np.around(np.nanmin(arr), 2) ax_2d.imshow(im_arr, vmax=vmax2, vmin=vmin2, cmap='rainbow', aspect='auto', zorder=0) finishing_touches(ax_2d, arr, ax_1d, True, vmin2, vmax2) f.subplots_adjust(left=0.25, right=0.95) U.savefig(savefolder, area)
ax.bar(range(numpnts), avg, 0.9, label=f'{monk} ({subj_n})', color=f'C{i}', edgecolor='black') ax.errorbar(range(numpnts), avg, sem, color=f'black', elinewidth=2) # Plot Rare transitions avg = np.empty(numpnts) avg.fill(np.nan) sem = np.copy(avg) avg[i + 6::3] = np.mean(avgs[i][(avgs[i] != 0)[:, 0]], axis=0)[2:] sem[i + 6::3] = Maths.sem(avgs[i][(avgs[i] != 0)[:, 0]])[2:] ax.bar(range(numpnts), avg, 0.9, color=f'C{i}', edgecolor='black') ax.errorbar(range(numpnts), avg, sem, color=f'black', elinewidth=2) labelpos = (0.5, 3.5, 6.5, 9.5) labels = ['Low/Med.', 'High'] * 2 plt.xticks(labelpos, labels) plt.xlabel('Common Rare') plt.ylim(ymax=1) plt.ylabel('Stay probability') plt.legend(loc='upper right') ax.spines['right'].set_visible(False) ax.spines['top'].set_visible(False) plt.tight_layout() D.savefig_makefolder('behav', 'modelbased')
# -*- coding: utf-8 -*- """ Created on Tue Jun 18 21:12:36 2019 @author: bhavya """ import Abstract_Keywords import Pages_Table_Fig import Details import Extract_Email_Title file_name = "CM_for_DD_DSP_Main_Document_10_xyz10868197e8315.docx" t = file_name Table, Figure, Pg_no = Pages_Table_Fig.No_Pages_Tables_Figs(file_name) Key_words, Abstract_summary = Abstract_Keywords.Extract_Abstract_Keyword( file_name) Name, Org = Details.Extract_Details(file_name) Email = Extract_Email_Title.email(file_name) title = Extract_Email_Title.title(file_name) tag = Details.check_tags(file_name)
import Details details = Details.Details(Details.TypeOfCollision.In_chain) #properties test assert details.type_of_collision == Details.TypeOfCollision.In_chain #setters test details.type_of_collision = Details.TypeOfCollision.Frontal assert details.type_of_collision == Details.TypeOfCollision.Frontal
context.append(domain) if Utils.is_empty_string(self.data.works) == False: context.append(self.data.works) if Utils.is_empty_string(self.data.studies) == False: context.append(self.data.studies) if Utils.is_empty_string(self.data.studied) == False: context.append(self.data.studied) if Utils.is_empty_string(self.data.lives) == False: context.append(self.data.lives) if Utils.is_empty_string(self.data.home) == False: context.append(self.data.home) Log.log("context to be search are ", context) self.__search(self.data.name, context) if __name__ == "__main__": br = Browser.Browser() data = Details.Detail() data.name = "Vivek Kundariya" data.email = "*****@*****.**" data.studies = "army institute of technlogy,pune" ld = LinkedInParser(br, data) ld.login() ld.search()