def __init__(self, run_id=None, pipeline_name=None, working_dir=None): if run_id is None: run_id = self._get_run_id() if pipeline_name is None: pipeline_name = 'pipeline' if working_dir is None: working_dir = os.getcwd() make_dir(working_dir) self.run_id = run_id self.sample_ids = list() self.pipeline_name = pipeline_name self.working_dir = os.path.join(os.path.abspath(working_dir)) self.pipeline_dir = os.path.join(self.working_dir, self.pipeline_name) make_dir(self.pipeline_dir) self.logs_dir = self._make_dir('logs') self.outputs_dir = self._make_dir('outputs') self.results_dir = self._make_dir('outputs/results') self.scripts_dir = self._make_dir('scripts') self.sentinels_dir = self._make_dir('sentinels')
def _make_intermediate_pipeline(self, pipeline_name, config_file, sample_id): """make an intermediate pipeline script from the intermediate config file.""" intermediate_dir = os.path.join(self.args.working_dir, 'intermediate_pipeline_scripts') make_dir(intermediate_dir) p = self._make_pipeline(pipeline_name, config_file, intermediate_dir, sample_id) self.pipelines.append(p)
class BaseConfig(object): PROJECT = "app" # Get app root path, also can use flask.root_path. PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__)) DEBUG = False USE_EMAIL = True TESTING = False PROD = False ADMINS = ['*****@*****.**'] #for session SECRET_KEY = 'RANDOM_SECRET_KEY' API_ROOT = 'api' make_dir(INSTANCE_FOLDER_PATH) LOG_FOLDER = os.path.join(INSTANCE_FOLDER_PATH, 'logs') make_dir(LOG_FOLDER) # Fild upload, should override in production. # Limited the maximum allowed payload to 8 megabytes. # http://flask.pocoo.org/docs/patterns/fileuploads/#improving-uploads MAX_CONTENT_LENGTH = 8 * 1024 * 1024 UPLOAD_FOLDER = os.path.join(INSTANCE_FOLDER_PATH, 'uploads') make_dir(UPLOAD_FOLDER)
def _make_intermediate_config_file(self, sample_id, sample_dict): """make an intermediate config file from the original config_file.""" intermediate_dir = os.path.join(self.args.working_dir, 'intermediate_config_files') make_dir(intermediate_dir) temp_name = os.path.splitext(os.path.basename(self.args.config_file))[0] + '_kronos' new_file_name = os.path.join(intermediate_dir, sample_id + '_' + temp_name + '.yaml') new_config_dict = self.c.update_config_dict(sample_dict) Configurer.print2yaml(new_config_dict, new_file_name) return new_file_name
def _make_intermediate_config_file(self, sample_id, sample_dict): """make an intermediate config file from the original config_file.""" intermediate_dir = os.path.join(self.args.working_dir, 'intermediate_config_files') make_dir(intermediate_dir) temp_name = os.path.splitext(os.path.basename( self.args.config_file))[0] + '_kronos' new_file_name = os.path.join(intermediate_dir, sample_id + '_' + temp_name + '.yaml') new_config_dict = self.c.update_config_dict(sample_dict) Configurer.print2yaml(new_config_dict, new_file_name) return new_file_name
def __init__(self, log_dir=None, working_dir=None, qsub_options=None): if working_dir is not None: make_dir(working_dir) else: working_dir = os.getcwd() if log_dir is not None: make_dir(log_dir) else: log_dir = working_dir self.job_ids = Queue() self.log_dir = os.path.abspath(log_dir) self.working_dir = os.path.abspath(working_dir) self.qsub_options = qsub_options
def __init__(self, log_dir=None, working_dir=None): if working_dir is not None: make_dir(working_dir) else: working_dir = os.getcwd() if log_dir is not None: make_dir(log_dir) else: log_dir = working_dir self.job_ids = [] self.log_dir = os.path.abspath(log_dir) self.working_dir = os.path.abspath(working_dir)
def __init__(self, drmaa_library_path, log_dir=None, working_dir=None, qsub_options=None): if working_dir is not None: make_dir(working_dir) else: working_dir = os.getcwd() if log_dir is not None: make_dir(log_dir) else: log_dir = working_dir ## export DRMAA_LIBRARY_PATH to import drmaa os.environ['DRMAA_LIBRARY_PATH'] = os.path.join(os.environ['SGE_ROOT'], drmaa_library_path) import drmaa self.job_ids = Queue() self.drmaa = drmaa self.log_dir = os.path.abspath(log_dir) self.working_dir = os.path.abspath(working_dir) self.qsub_options = qsub_options
def __init__(self, pipeline_name, config_file, script_dir=os.getcwd(), sample_id=None): self.pipeline_name = pipeline_name self.config_file = config_file self.script_dir = script_dir self.sample_id = sample_id make_dir(self.script_dir) ## path to where the resultant pipeline script is written self.pipeline_script = os.path.join(self.script_dir, self.pipeline_name+'.py') ## use the WorkFlow to parse/make the config file self.wf = WorkFlow(config_file) ## holds the starting point of the sub pipeline, key:tag value:task_object self.start_task = {} ## holds the end point of the sub pipeline, key:tag value:task_object self.stop_task = {} ## list of all the inputs to the pipeline, i.e. set of the inputs of ## all the root tasks. A dict with k:input_params and v:input_arguments self.inputs = {}
def __init__(self, args): """initialize.""" self.args = args self.pipelines = [] self.c = Configurer() make_dir(self.args.working_dir)
history = model.fit(x_train, y_train, batch_size=batch_size, epochs=epochs, verbose=1, validation_data=(x_test, y_test), callbacks=[early_stopping, model_checkpoint, reduce_lr, tensor_board]) score = model.evaluate(x_test, y_test, verbose=0) print('Test score:', score[0]) print('Test accuracy:', score[1]) save_model(model, 'model/'+STAMP.format(history.history['categorical_crossentropy'][-1])+'model.h5') if __name__ == '__main__': make_dir() parser = argparse.ArgumentParser(usage='A training program for classifying the OAHEGA dataset') parser.add_argument('-f', '--file', type=str, help='Path to csv file data', required=True) parser.add_argument('-a', '--abs_path', type=str, help='Absolute path to data', required=True) parser.add_argument('-m', '--model', type=str, help='model to be trained (student,distill).' ' If student is selected than path to pretrained teacher must be specified in --teacher parameter', required=True) parser.add_argument('-t', '--teacher', type=str, help='path to .h5 file with weight of pretrained teacher model' , default='../mobilenet_emotions/models/model0.8267208413001912.h5') parser.add_argument('--width', type=int, default=128, help='Width of the images') parser.add_argument('--height', type=int, default=128, help='Height of the images') parser.add_argument('--epochs', type=int, default=12, help='Number of epochs to train on')
def _make_dir(self, dir_name): temp_dir = os.path.join(self.pipeline_dir, dir_name) make_dir(temp_dir) return temp_dir