def audio(): print "Loading audio Data" if args['a']: #use the audio file or numpy file if it exists return DataConverter.main(args['a']) if args['v']: #extract audio from video data return DataConverter.main(args['v']) if args['b']: npy = args['b'] + ".wav.npy" if os.path.isfile(npy): return DataConverter.main(npy) wav = args['b'] + ".wav" if os.path.isfile(wav): return DataConverter.main(wav) parser.epilog = "No audio file found." parser.print_help() exit()
def __init__(self): self.world = tkinter.Tk() self.world.title(u"Syokudou") self.world.geometry("1280x720") self.canvas = tkinter.Canvas(self.world,width = 1280,height=720) self.canvas.create_rectangle(0,0,(gra_WIDTH),(gra_HEIGHT),fill = 'green') #画像データ読込 self.Table_img = Image.open('img/objset.png') self.RoundTable_img = Image.open('img/round.png') self.RoundHalf_img = Image.open('img/round_half.png') self.WindowTable_img = Image.open('img/windowtable.png') self.Table_img = ImageTk.PhotoImage(self.Table_img) self.RoundTable_img = ImageTk.PhotoImage(self.RoundTable_img) self.RoundHalf_img = ImageTk.PhotoImage(self.RoundHalf_img) self.WindowTable_img= ImageTk.PhotoImage(self.WindowTable_img) self.canvas.place(x=100,y=100) #Slip はマスの開始位置をずらす必要があるときに使う変数,Slipが表す長さ分だけ上に配置される。 self.Slip = 0 AbleStay = dc.data() self.humans = [] for i in range(AbleStay.getLength()): self.humans.append(human.human(1,AbleStay.getX(i),AbleStay.getY(i))) self.world.after(10,self.update_humans)
def dataAq(): print "Loading Data Acquisition Input" if args['d']: #use the audio file or numpy file if it exists return DataConverter.main(args['d']) if args['b']: npy = args['b'] + ".dat.npy" if os.path.isfile(npy): return DataConverter.main(npy) npy = args['b'] + ".npy" if os.path.isfile(npy): return DataConverter.main(npy) dat = args['b'] + ".dat" if os.path.isfile(dat): return DataConverter.main(dat) parser.epilog = "No data file found." parser.print_help() exit()
def main(_): data_converter = dc.DataConverter(DATA_PATH) with tf.Session() as sess: network = nk.Network(sess, NAME, data_converter) sess.run(tf.global_variables_initializer()) #saver = sv.Saver(NAME, sess) network.Train(data_converter)
def dataAq(): print "Loading Data Aquisition Input" if args['d']: #use the audio file or numpy file if it exists return DataConverter.main(args['d']) if args['b']: npy = args['b'] + ".dat.npy" if os.path.isfile(npy): return DataConverter.main(npy) npy = args['b'] + ".npy" if os.path.isfile(npy): return DataConverter.main(npy) dat = args['b'] + ".dat" if os.path.isfile(dat): return DataConverter.main(dat) parser.epilog = "No data file found." parser.print_help() exit()
def test_merge_lists_no_equal(self): # Given database_list = [ Bar.Bar('MSFT', 1, '2020-04-15 15:55:00', 171.92, 172.25, 171.79, 171.95, 534118), Bar.Bar('MSFT', 1, '2020-04-15 15:56:00', 171.92, 172.25, 171.79, 171.95, 534118) ] api_list = [ Bar.Bar('MSFT', 1, '2020-04-15 15:59:00', 171.92, 172.25, 171.79, 171.95, 534118), Bar.Bar('MSFT', 1, '2020-04-15 16:00:00', 171.92, 172.25, 171.79, 171.95, 534118) ] # When merged_list, api_list_short = DataConverter.merge_lists( database_list, api_list) # Then expected_api_list_short = [ Bar.Bar('MSFT', 1, '2020-04-15 15:59:00', 171.92, 172.25, 171.79, 171.95, 534118), Bar.Bar('MSFT', 1, '2020-04-15 16:00:00', 171.92, 172.25, 171.79, 171.95, 534118) ] expected_merged_list = [ Bar.Bar('MSFT', 1, '2020-04-15 15:55:00', 171.92, 172.25, 171.79, 171.95, 534118), Bar.Bar('MSFT', 1, '2020-04-15 15:56:00', 171.92, 172.25, 171.79, 171.95, 534118), Bar.Bar('MSFT', 1, '2020-04-15 15:59:00', 171.92, 172.25, 171.79, 171.95, 534118), Bar.Bar('MSFT', 1, '2020-04-15 16:00:00', 171.92, 172.25, 171.79, 171.95, 534118) ] self.assertEqual(len(expected_merged_list), len(merged_list)) for index in range(len(expected_merged_list)): # print(expected_merged_list[index].time_stamp) self.assertEqual(expected_merged_list[index].time_stamp, merged_list[index].time_stamp) self.assertEqual(len(expected_api_list_short), len(api_list_short)) for index in range(len(expected_api_list_short)): self.assertEqual(expected_api_list_short[index].time_stamp, api_list_short[index].time_stamp)
def printOutput(self): return(sorted(set([DataConverter.sanitize(t) for t in self])))
import DataConverter import FileManager data = FileManager.getDictionaryData("data.txt") print(data['name']+' data is',sorted(set([DataConverter.sanitize(each_t) for each_t in data['values']])))
import DataConverter import FileManager clean_data = [] unique_data = [] data = FileManager.getListData("temp.txt") for each_t in data: if each_t not in clean_data: clean_data.append(DataConverter.sanitize(each_t)) for each_t in clean_data: if each_t not in unique_data: unique_data.append(each_t) print(sorted(unique_data))
import DataConverter import FileManager data = FileManager.getListData("data.txt") sandy_data = {} sandy_data['name'] = data.pop(0) sandy_data['dob'] = data.pop(0) sandy_data['values'] = data print( sandy_data['name'] + ' data is', sorted( set([ DataConverter.sanitize(each_t) for each_t in sandy_data['values'] ])))
import DataConverter import FileManager data = FileManager.getListData("temp.txt") print(sorted(set([DataConverter.sanitize(each_t) for each_t in data]))) # print(sorted([set(DataConverter.sanitize(each_t)) for each_t in data]))
def main(): st.sidebar.header('Прогнозирование банкротства компаний') page = st.sidebar.selectbox("Навигатор", ["Теория", "Практика","О данных", "Предобработка данных"]) data = load_data() st.set_option('deprecation.showPyplotGlobalUse', False) if page == "Теория": st.header("Прогнозирование банкротства компаний") st.write("Please select a page on the left.") # st.image('DataAnal/диплом/pic/main.PNG') st.header("О методах прогнозирования, используемых в программе") aboutModels() info() elif page == "О данных": AboutData(data) Visualize(data) elif page == "Предобработка данных": st.title("Предобработка данных") st.write(data.head()) dc.PrepFor() elif page == "Практика": st.title("Data Exploration") st.write(data.head()) st.write("""# Прогноз финансовых бедствий различных компаний""") method = st.selectbox("Выбор метода прогнозирования", ["Выбор модели", "LightGBM", "Stochastic Gradient Decent", "Decision Tree", "Naive Bayes", "Support Vector Machines", "KNN", "Logistic Regression", "Random Forest", "Linear Regression", "XGBoost", "ANN", "Вывод"]) if method == "Выбор модели": st.write("Выбор метода") elif method == "LightGBM": params = add_parameter_ui(method) get_classifier(method, params) elif method == "Stochastic Gradient Decent": params = add_parameter_ui(method) get_classifier(method, params) elif method == "Decision Tree": params = add_parameter_ui(method) get_classifier(method, params) elif method == "Naive Bayes": params = add_parameter_ui(method) get_classifier(method, params) elif method == "Support Vector Machines": params = add_parameter_ui(method) get_classifier(method, params) elif method == "KNN": params = add_parameter_ui(method) get_classifier(method, params) elif method == "Logistic Regression": params = add_parameter_ui(method) get_classifier(method, params) elif method == "Random Forest": params = add_parameter_ui(method) get_classifier(method, params) elif method == "Linear Regression": params = add_parameter_ui(method) get_classifier(method, params) elif method == "XGBoost": params = add_parameter_ui(method) get_classifier(method, params) elif method == "ANN": params = add_parameter_ui(method) get_classifier(method, params) elif method == "Вывод": st.title("Сравнение моделей") st.image('DataAnal/диплом/pic/Итог.PNG')
skipSmallerThan = int(args.skipSmallerThan) resampleTo = int(args.resampleTo) csvSep = args.csvSep addXYZAxes = args.addXYZAxes tickDistance = args.tickDistance addCustomAxes = args.addCustomAxes # Case 1: Use the command line interface loadFromCmd = tgmmPath != None or csvPath != None or biotracksPath != None or svfPath != None loaderState2 = None if loadFromCmd: if csvPath is not None: print("Load from CSV...") loader = dc.CsvLoader(csvPath, resampleTo=resampleTo, minTrackLength=skipSmallerThan, firstLineIsHeader=(csvNoHeader is None), csvSeparator=csvSep) if addState2 is not None: loaderState2 = dc.CsvLoader( addState2, resampleTo=resampleTo, minTrackLength=skipSmallerThan, firstLineIsHeader=(csvNoHeader is None), csvSeparator=csvSep) if tgmmPath is not None: print("Load from TGMM...") loader = dc.TgmmLoader( tgmmPath, resampleTo=resampleTo, minTrackLength=skipSmallerThan,
import DataConverter Data = DataConverter.data() print(Data.getX(0))