def __init__(self): logging.info(pk.getKhiopsInfo()) self.this_file_dir = os.path.dirname(os.path.realpath(__file__)) # path mgmt # use timestamp of each exec in paths self.fh = FileHelper() self.dictionary_file = os.path.join(self.this_file_dir, "dic", "series.kdic") self.classif_res = os.path.join(self.this_file_dir, "res", "khiops_res", "classif") self.coclus_res = os.path.join(self.this_file_dir, "res", "khiops_res", "coclus") self.pred_res = os.path.join(self.this_file_dir, "res", "khiops_res", "pred_res") self.fh.ensure_dirs_exist([ self.dictionary_file, self.classif_res, self.coclus_res, self.pred_res ]) self.ccr = CoclusteringResults() self.utils = MyUtils() logging.info("Khiops manager instantiated") logging.info("dictionary_file used: %s", self.dictionary_file)
def __init__(self, file_name): self.utils_cl = MyUtils() self.this_file_dir = os.path.dirname(os.path.realpath(__file__)) self.fh = FileHelper() self.hm = HmmMachine() self.cm = ClusteringMachine() self.pm = PredictMachine() self.my_metric = "euclidean" self.file_name = file_name self.out_fcasts_f = os.path.join(self.this_file_dir, "res", "fcasts", file_name, "ecml") self.fh.ensure_dirs_exist([self.out_fcasts_f]) logging.info("Instantiated ECML_operator")
def __init__(self): if '-before-file-replace' in sys.argv: print('INFO: pre-processing file replaces started.....') try: FileHelper().copy_and_replace_files( file_path_def_list=LocalSettingsLoader().LOCAL_SETTINGS[ 'FILE_REPLACE_DEF_LIST_BEFORE_DAILY_OPERATION']) except Exception as ex: print('ERROR: pre-processing file replaces error.', ex) print('INFO: pre-processing file replaces completed.') if '-music-sync' in sys.argv: print('INFO: music video file sync started.....') try: YoutubeMusicVideoSync().sync_mvs() except Exception as ex: print('ERROR: music video file sync error.', ex) print('INFO: music video file sync completed.') if '-clone-repos' in sys.argv: print('INFO: git repo clone check started.....') try: GitOperations().clone_missing_repos() except Exception as ex: print('ERROR: git repo clone check error.', ex) print('INFO: git repo clone check completed.') if '-git-repos-sync' in sys.argv: print('INFO: git repos sync started.....') try: GitOperations().fetch_all_repos_and_reset_hard() except Exception as ex: print('ERROR: git repos sync error.', ex) print('INFO: git repos sync completed.') if '-after-file-replace' in sys.argv: print('INFO: post-processing file replace started.....') try: FileHelper().copy_and_replace_files( file_path_def_list=LocalSettingsLoader().LOCAL_SETTINGS[ 'FILE_REPLACE_DEF_LIST_AFTER_DAILY_OPERATION']) except Exception as ex: print('ERROR: post-processing file replace error.', ex) print('INFO: post-processing file replace completed.')
def main(): net = Spotify_Net() accounts = FileHelper().read_accounts() for account in accounts: t = Thread(target=add, args=[net, account]) t.start() index = 0 for zombie in net.ZOMBIES: credentials = accounts[i] zombie.login(credentials[0], credentials[1]) index += 1 while True: url = input("[*] Enter a url to play: ") if str(url).lower() == "exit": for zombie in net.ZOMBIES: zombie.close() break net.play_song(url)
def run(self): print("Starting...") # Load the note file file_helper = FileHelper() new_file = file_helper.convert_dataset_file("./data/humidity_training_and_testing.json") # new_file = "./data/sinewave.csv" print("New file {0} has been created.".format(new_file)) rnn = RecurrentModel(epochs=100, use_differences=False, use_normalization=False, input_length=40, output_length=10, training_record_ratio=0.5) rnn.prepare_data(new_file) # Train the model rnn.train_network() # Save the model rnn.save_model("{0}_model_{1}".format(new_file, datetime.now().timestamp())) # Display the graph rnn.display_results() print("Done.")
def __init__(self): self.YOUTUBE_VIDEO_ID_LIST = LocalSettingsLoader( ).LOCAL_SETTINGS['YOUTUBE_VIDEO_ID_LIST'] self.file_helper_obj = FileHelper() self.shell_executor = ShellExecutor()
from file_helper import FileHelper #创建程序存储用户输入的个人信息 u_name = input('input your name\n') u_gender = input('input your gender\n') u_age = input('input your age\n') f_h = FileHelper(r'study_11\user_info.txt') f_h.write_append(u_name + '\n') f_h.write_append(u_gender + '\n') f_h.write_append(u_age + '\n') print('==========读取个人信息=========') content = f_h.read() print(content) print('==========序列化存储=========') f_h.remove_content() user = {'name': u_name, 'gender': u_gender, 'age': u_age} f_h.write_to_json(user) obj = f_h.json_to_object() print(obj['name'] + '_' + obj['gender'] + '_' + obj['age'])
out_path_res = os.path.join(this_file_dir, "res") in_dir = os.path.join(this_file_dir, "data", "csvs") out_dir_res = os.path.join(this_file_dir, "res") out_dir_khiops = os.path.join(this_file_dir, "res", "khiops_res") out_dir_fcasts = os.path.join(this_file_dir, "res", "fcasts") glob_csv_res = os.path.join(out_path_res, "all.csv") vali_csv_res = os.path.join(out_path_res, "valid.csv") import pandas as pd from sklearn.tree import DecisionTreeClassifier from sklearn.linear_model import LogisticRegression from sklearn.naive_bayes import GaussianNB # Bunch of cleanings for empty files from file_helper import FileHelper fh = FileHelper(tstmp) fh.clean_zips_folder() fh.clean_res_folder(out_dir_res) fh.ensure_dirs_exist([out_path_res, in_dir, out_dir_res, out_dir_khiops, out_dir_fcasts]) # After cleaning, zip the code which is executed now fh.zip_code() # Init objects from khiops import KhiopsManager km = KhiopsManager() from my_utils import MyUtils utils = MyUtils() from ecml_machine import ECMLMachine