def cluster_analysis(clean_feature_file, feature_names, output_dir, method='ward'): print datetime.now().strftime('starting:%Y-%m-%d %H:%M:%S') df_f = pd.read_csv(clean_feature_file) # all_feature_names=gl_feature_names print "There are %d neurons in this dataset" % df_f.shape[0] REMOVE_OUTLIERS = 1 #clipping the dataset if REMOVE_OUTLIERS > 0: postfix = "_ol_removed" else: postfix = "_ol_clipped_5_glonly" if method == "ap" or method == "all": fc.run_affinity_propagation(df_f, feature_names, output_dir, postfix) num_clusters = 1000 if method == "ward" or method == "all": fc.run_ward_cluster(df_features=df_f, feature_names=feature_names, num_clusters=num_clusters, output_dir=output_dir, output_postfix=postfix, experiment_type='bigneuron', low=500, high=1500, plot_heatmap=0, RemoveOutliers=REMOVE_OUTLIERS) print datetime.now().strftime('end:%Y-%m-%d %H:%M:%S') return
def cluster_analysis(clean_feature_file,feature_names,output_dir,feature_set_type, method='ward',swc_path = None): print datetime.now().strftime('starting:%Y-%m-%d %H:%M:%S') if (swc_path == None): swc_path = "./SWC" df_f = pd.read_csv(clean_feature_file) # all_feature_names=gl_feature_names print "There are %d neurons in this dataset" % df_f.shape[0] REMOVE_OUTLIERS = 1 #clipping the dataset if REMOVE_OUTLIERS > 0: postfix = "_ol_removed_"+feature_set_type else: postfix = "_ol_clipped_5_glonly" if method == "ap" or method == "all": fc.run_affinity_propagation(df_f, feature_names, output_dir, postfix,swc_path,REMOVE_OUTLIERS) num_clusters = 12 if method == "ward" or method == "all": fc.run_ward_cluster(df_features=df_f, feature_names=feature_names, num_clusters=num_clusters, output_dir= output_dir, output_postfix=postfix,experiment_type='ivscc', low=8, high = 35, plot_heatmap=0, RemoveOutliers=REMOVE_OUTLIERS, swc_path=swc_path) print datetime.now().strftime('end:%Y-%m-%d %H:%M:%S') return
def cluster_analysis(clean_feature_file,feature_names,output_dir, method='ward'): print datetime.now().strftime('starting:%Y-%m-%d %H:%M:%S') df_f = pd.read_csv(clean_feature_file) # all_feature_names=gl_feature_names print "There are %d neurons in this dataset" % df_f.shape[0] REMOVE_OUTLIERS = 1 #clipping the dataset if REMOVE_OUTLIERS > 0: postfix = "_ol_removed" else: postfix = "_ol_clipped_5_glonly" if method == "ap" or method == "all": fc.run_affinity_propagation(df_f, feature_names, output_dir, postfix) num_clusters = 1000 if method == "ward" or method == "all": fc.run_ward_cluster(df_features=df_f, feature_names=feature_names, num_clusters=num_clusters,output_dir= output_dir, output_postfix=postfix,experiment_type='bigneuron', low=500, high = 1500, plot_heatmap=0, RemoveOutliers=REMOVE_OUTLIERS) print datetime.now().strftime('end:%Y-%m-%d %H:%M:%S') return
def cluster_analysis(clean_feature_file, feature_names, output_dir, feature_set_type, method='ward', swc_path=None): print datetime.now().strftime('starting:%Y-%m-%d %H:%M:%S') if (swc_path == None): swc_path = "./SWC" df_f = pd.read_csv(clean_feature_file) # all_feature_names=gl_feature_names print "There are %d neurons in this dataset" % df_f.shape[0] REMOVE_OUTLIERS = 1 #clipping the dataset if REMOVE_OUTLIERS > 0: postfix = "_ol_removed_" + feature_set_type else: postfix = "_ol_clipped_5_glonly" if method == "ap" or method == "all": fc.run_affinity_propagation(df_f, feature_names, output_dir, postfix, swc_path, REMOVE_OUTLIERS) num_clusters = 12 if method == "ward" or method == "all": fc.run_ward_cluster(df_features=df_f, feature_names=feature_names, num_clusters=num_clusters, output_dir=output_dir, output_postfix=postfix, experiment_type='ivscc', low=8, high=35, plot_heatmap=0, RemoveOutliers=REMOVE_OUTLIERS, swc_path=swc_path) print datetime.now().strftime('end:%Y-%m-%d %H:%M:%S') return