def __init__(self, conf_json, model, optimizer=None, name=None): try: data_path = os.environ['STACK_OVER_FLOW_QA'] except KeyError: print( "STACK_OVER_FLOW_QA is not set. Set it to your clone of https://github.com/mrezende/stack_over_flow_python" ) sys.exit(1) self.conf = Conf(conf_json) self.model = model(self.conf) if name is None: self.name = self.conf.name() + '_' + model.__name__ logger.info(f'Initializing Evaluator ...') logger.info(f'Name: {self.name}') else: self.name = name self.path = data_path self.params = self.conf.training_params() optimizer = self.params['optimizer'] if optimizer is None else optimizer self.model.compile(optimizer) self.answers = self.load('answers.json') # self.load('generated') self.training_data = self.load('training.json') self.dev_data = self.load('dev.json') self.eval_data = self.load('eval.json') self._vocab = None self._reverse_vocab = None self._eval_sets = None self.top1_ls = [] self.mrr_ls = []
def __init__(self): self.conf = Conf() self.listOfComp = self.conf.getComp() self.listOfSub = self.conf.getSub() self.tmpOpt1 = [] self.tmpOpt2 = []
def start(self): local_conf = Conf("local") self.log_folder = local_conf.get_string("report.folder") if not self.log_folder: self.log_folder = os.getcwd() + "/log/" if not self.log_folder.endswith("/"): self.log_folder += "/" local_utils.prepare_template(self.log_folder) local_utils.prepare_current_log_folder(self.log_folder)
def __init__(self): conf = Conf("remote") remote_enable = conf.get_string("enable") if remote_enable.lower() == "true": print("Enable remote Reporter") self.reporters.append(difido.RemoteReport()) else: print("remote reporter disabled") self.reporters.append(difido.Console())
def pytest_collection_finish(session): """ called after collection has been performed and modified. :param _pytest.main.Session session: the pytest session object """ print("~~~~~~~~~~~~~~~~~~~~~~~~~`Automation Ver :: "+automation_ver+"~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~") conf = Conf("remote") description = conf.get_string("description") reporter.start_suite(description, testAttr) reporter.add_execution_properties("automation_version", automation_ver)
def save_predict_results(self, conf_json): conf = Conf(conf_json) # copy models to archive folder models_folder = 'models' models_file = f'models/weights_epoch_{conf.name()}.h5' os.makedirs(os.path.join(self.base_folder, models_folder), exist_ok=True) archive_models_file = os.path.join(self.base_folder, models_file) self.copy(models_file, archive_models_file) score_file = 'results_conf.txt' archive_score_file = os.path.join(self.base_folder, score_file) self.move(score_file, archive_score_file)
def __init__(self, args): super(Data_loader, self).__init__() print(args) self.conf = Conf(args) self.args = args self.patient_id = args.patient_id self.patient_test_date = args.patient_test_date self.data = {} self.verbose = args.verbose self.save_per_patient = args.save_per_patient self.extract_incident = args.extract_incident self.save_dir = args.save_dir self.label_previous_day = args.label_previous_day if self.patient_id is not None and self.patient_test_date is None: raise ValueError('test date must be provided') self.env_feat_list = { 0: ['Fridge'], 1: ["living room", 'Lounge'], 2: ['Bathroom'], 3: ['Hallway'], 4: ['Bedroom'], 5: ['Kitchen'], 6: ['Microwave', 'Toaster'], 7: ['Kettle'], } if args.incident == 'all': self.incident = ['UTI symptoms', 'Agitation'] elif args.incident == 'UTI': self.incident = ['UTI symptoms'] elif args.incident == 'Agitation': self.incident = ['Agitation'] assert not (self.label_previous_day and self.args.extract_uti_phase ), 'only one of them can be True' path = self.conf.npy_data if self.save_dir is not None: path = path + '/' + self.save_dir save_mkdir(path) self.load_env() self.save_data()
def save_training_results(self, conf_json): conf = Conf(conf_json) # move plots to archive folder # plot_folder = 'plots' # plot_filename = f'{conf.name()}_plot.png' # plot_file = os.path.join(plot_folder, plot_filename) # os.makedirs(os.path.join(self.base_folder, plot_folder), exist_ok=True) # archive_plot_result_file = os.path.join(self.base_folder, plot_file) # self.move(plot_file, archive_plot_result_file) # copy models to archive folder models_folder = 'models' models_file = f'models/weights_epoch_{conf.name()}.h5' os.makedirs(os.path.join(self.base_folder, models_folder), exist_ok=True) archive_models_file = os.path.join(self.base_folder, models_file) self.copy(models_file, archive_models_file)
def start(self): conf = Conf("remote") self.execution_properties = conf.get_dict("execution.properties") details = ExecutionDetails() details.description = conf.get_string("description") details.execution_properties = self.execution_properties try: self.execution_id = remote_utils.prepare_remote_execution(details) self.enabled = True except: self.enabled = False return machine = self.execution.get_last_machine() try: self.machine_id = remote_utils.add_machine(self.execution_id, machine) except: self.enabled = False return self.retries = 10
def __init__(self): self.general_conf = Conf("general") self.init_model() self.start() self.num_of_suites_to_ignore = self.general_conf.get_int( "num.of.suites.to.ignore")
def split_data(args): data_type = args.data_type conf = Conf(args) base_path = conf.raw_data save_path = conf.csv_data save_mkdir(save_path + '/' + data_type + '/data/') save_mkdir(save_path + '/' + data_type + '/flag/') patients = pd.read_csv(base_path + '/Patients.csv') ids = patients['subjectId'] id_index = patients['sabpId'] a = id_index[ids == ids[0]] observ_types = pd.read_csv(base_path + '/Observation-type.csv') types = {} for i in range(0, len(observ_types)): types[observ_types.loc[i]['code']] = observ_types.loc[i]['display'] observ_devices = pd.read_csv(base_path + '/Observation-device.csv') devices = {} for i in range(0, len(observ_devices)): devices[observ_devices.loc[i]['code']] = observ_devices.loc[i]['display'] observ_locs = pd.read_csv(base_path + '/Observation-location.csv') locs = {} for i in range(0, len(observ_locs)): locs[observ_locs.loc[i]['code']] = observ_locs.loc[i]['display'] data = pd.read_csv(base_path + '/observations.csv') if data_type == 'env': data = data.loc[data['device'] == 408746007] # env elif data_type == 'clinical': data = data.loc[data['device'] != 408746007] # Clinical data['datetimeObserved'] = pd.to_datetime(data['datetimeObserved']) data_new = pd.DataFrame(columns=['subject', 'datetimeObserved', 'type', 'location', 'value']) data_new['subject'] = data['subject'] data_new['datetimeObserved'] = data['datetimeObserved'] data_new['type'] = data['type'].map(types) data_new['location'] = data['location'].map(locs) if data_type == 'env': data_new['value'] = data['valueBoolean'] data_new = data_new.loc[data_new['type'].isin(['Movement', 'Door', 'Does turn on domestic appliance', 'Light'])] bools = {True: 1, False: 0} data_new['value'] = data_new['value'].map(bools) elif data_type == 'clinical': data_new['value'] = data['valueQuantity'] data_new = data_new[data_new.value.notna()] for i in range(0, len(ids)): idx = ids[i] name_data = str(id_index[idx == ids][i]) + "_observation.csv" if data_type == 'env': d = data_new.loc[data_new['subject'] == idx, ['datetimeObserved', 'location', 'value']] elif data_type == 'clinical': d = data_new.loc[data_new['subject'] == idx, ['datetimeObserved', 'type', 'value']] d.to_csv(save_path + '/' + data_type + '/data/' + name_data) env_data = deepcopy(data_new) #if data_type == 'clinical': # return d = pd.read_csv(base_path + '/Flag-category.csv') flag_types = {} for i in range(0, len(d)): flag_types[d.loc[i]['code']] = d.loc[i]['display'] d = pd.read_csv(base_path + '/Flag-type.csv') flag_elements = {} for i in range(0, len(d)): flag_elements[d.loc[i]['code']] = d.loc[i]['display'] data = pd.read_csv(base_path + '/Flags.csv') data['datetimeRaised'] = pd.to_datetime(data['datetimeRaised']) data_new = pd.DataFrame(columns=['flagId', 'subject', 'datetimeObserved', 'element', 'type']) data_new['subject'] = data['subject'] data_new['datetimeObserved'] = data['datetimeRaised'] data_new['type'] = data['category'].map(flag_types) data_new['element'] = data['type'].map(flag_elements) data_new['flagId'] = data['flagId'] d = pd.read_csv(base_path + '/FlagValidations.csv') val_df = pd.DataFrame(columns=['flagId', 'valid']) val_df['flagId'] = d['flag'] val_df['valid'] = d['valid'] data_new = pd.merge(data_new, val_df, on='flagId') flag_data = deepcopy(data_new) for i in range(0, len(ids)): idx = ids[i] name_data = str(id_index[idx == ids][i]) + "_flags.csv" d = data_new.loc[data_new['subject'] == idx, ['datetimeObserved', 'element', 'type', 'valid']] d.to_csv(save_path + '/' + data_type + "/flag/" + name_data) summation = [] for i in range(0, len(ids)): idx = ids[i] name_data = str(id_index[idx == ids][i]) + "obs_flag.csv" f_data = flag_data.loc[flag_data['subject'] == idx, ['datetimeObserved', 'element', 'type', 'valid']] e_data = env_data.loc[env_data['subject'] == idx, ['datetimeObserved', 'location', 'value']] f_data['datetimeObserved'] = f_data['datetimeObserved'].dt.date e_data['date'] = e_data['datetimeObserved'].dt.date e_data = e_data.loc[e_data['date'].isin(f_data['datetimeObserved'])] e_data['Patient id'] = int(id_index[idx == ids][i]) e_data['element'] = None e_data['type'] = None e_data['valid'] = None for sub_date in f_data['datetimeObserved']: e_data.loc[e_data['date'] == sub_date, 'element'] = f_data.loc[f_data['datetimeObserved'] == sub_date][ 'element'].values[0] e_data.loc[e_data['date'] == sub_date, 'type'] = f_data.loc[f_data['datetimeObserved'] == sub_date][ 'type'].values[0] e_data.loc[e_data['date'] == sub_date, 'valid'] = f_data.loc[f_data['datetimeObserved'] == sub_date][ 'valid'].values[0] summation.append(e_data) # e_data.to_csv(save_path + '/' + data_type + "/all_in_one/" + name_data) summation = pd.concat(summation) summation.to_csv(save_path + '/' + data_type + "/merged.csv")
''' Created on Aug 10, 2017 @author: Itai Agmon ''' import json from configuration import Conf from http import client import requests conf = Conf("remote") def prepare_remote_execution(details): res = send_request(method="POST", url="http://" + conf.get_string("host") + ":" + str(conf.get_int("port")) + "/api/executions", data=to_content(details), headers={"Content-Type": "application/json"}) return res.content.decode() def add_machine(execution_id, machine): res = send_request("POST", "http://" + conf.get_string("host") + ":" + str(conf.get_int("port")) + "/api/executions/" + execution_id + "/machines/", to_content(machine), {"Content-Type": "application/json"}) return res.content.decode() def update_machine(execution_id, machine_id, machine): send_request("PUT", "http://" + conf.get_string("host") + ":" + str(conf.get_int("port")) + "/api/executions/{0}/machines/{1}".format(str(execution_id), str(machine_id)), to_content(machine), {"Content-Type": "application/json"}) def add_test_details(execution_id, test_details): send_request("POST", "http://" + conf.get_string("host") + ":" + str(conf.get_int("port")) + "/api/executions/{0}/details".format(str(execution_id)), to_content(test_details), {"Content-Type": "application/json"})