Esempio n. 1
0
def compute_errors_rel(fileslist, dofs=None, shift=1):
    """Compute relative error defined in 3.36 from Clara's manuscript.
    """
    fref = max(fileslist.keys())

    # Load reference and current simulations,
    reference_file = fileslist[fref]
    ref_model, ref_string, ref_frets, e = load_model(reference_file)
    # if dofs is none, errors are computed for all degrees of freedom.
    if dofs is None:
        dofs = [i for i in range(ref_string.dimension())]

    sref = ref_model.data_ds[ref_string][dofs, ::shift]
    #sum_ref = (sref ** 2).sum(1)
    freqs = []
    tref = ref_model.time[::shift]
    files = list(fileslist.values())
    files = [f for f in files if f != reference_file]
    # Number of freqs taken into account
    nbfiles = len(files)
    # Number of dofs where errors are computed
    nbpoints = len(dofs)
    # Number of contact points
    nbfrets = len(ref_frets)

    # Results buffers:
    # errors[i, j] = error for freq number i at dof j
    errors = np.zeros((nbfiles, nbpoints), dtype=np.float64)
    # ymin[i, j] = minimal value (through time instants) of distance at contact j for freq i
    ymin = np.zeros((nbfiles + 1, nbfrets), dtype=np.float64)

    # Compute ymin for reference model
    for j in range(nbfrets):
        ymin[-1, j] = (ref_model.data_interactions[ref_frets[j]][0][:]).min()

    # Compute errors for all freqs
    for i in range(nbfiles):
        current_model, current_string, current_frets, e = load_model(files[i])
        scurrent = current_model.data_ds[current_string][dofs, :]
        # Ensure time instants are the same for both models (ref an current)
        time_step = current_model.time_step
        tcurrent = current_model.time
        assert np.allclose(tref,
                           tcurrent), 'Error: time instants are different.'

        #errors[i, :] = np.sqrt(time_step * (((sref - scurrent) ** 2).sum(1)) / sum_ref)
        errors[i, :] = ((((sref - scurrent) / sref)**2).sum(1))**0.5

        for j in range(nbfrets):
            ymin[i, j] = (
                current_model.data_interactions[current_frets[j]][0][:]).min()
        freqs.append(current_model.fs)
    return errors, ymin, freqs
Esempio n. 2
0
def save_dof(fileslist, dof, outputfile):
    """Save displacement for a given dof, in mat file
    Extract results from a list of simulations 
    (e.g. convergence study, to plot dof = f(frequency))
    
    Parameters
    ----------
    fileslist : list of string
        list of files (h5) from which displacements must be loaded
    dof : int
        degree of freedom of interest
    outputfile : string
        name of resulting file (.mat)
    """

    files = list(fileslist.values())
    freqs = list(fileslist.keys())
    nbfiles = len(files)
    current_model, current_string, current_frets, e = load_model(files[0])
    # Number of time instants taken into account
    nbtimes = current_model.data_ds[current_string].shape[1]
    print(nbtimes)

    #files = [f for f in files if f != reference_file]
    # Number of freqs taken into account
    nbfiles = len(files)
    # Number of dofs where errors are computed
    nbpoints = 1
    # Number of time instants taken into account
    nbtimes = current_model.data_ds[current_string].shape[1]
    # Number of contact points
    nbfrets = len(current_frets)

    # Results buffers:
    dof_val = np.zeros((nbfiles, nbtimes), dtype=np.float64)

    # Compute errors for all freqs
    for i in range(nbfiles):
        current_model, current_string, current_frets, e = load_model(files[i])
        dof_val[i, :] = current_model.data_ds[current_string][dof, :]

    dofname = 'dof_' + str(dof)
    matdict = {'freqs': freqs, dofname: dof_val}
    sio.savemat(outputfile, matdict)
    return dof_val
Esempio n. 3
0
def check_time_vectors(filelist):
    """Ensure compatibility between time vectors from a list of simulation
    """

    # Load reference and current simulations,
    reference_file = filelist[-1]
    ref_model, ref_string, ref_frets, e = load_model(reference_file)
    tref = ref_model.time
    nbfiles = len(filelist) - 1
    for i in range(nbfiles):
        if os.path.exists(filelist[i]):
            current_model, current_string, current_frets, e = load_model(
                filelist[i])
            tcurrent = current_model.time[:]
            print("check i ... ", i)
            assert np.allclose(tcurrent, tref)
        else:
            print('Missing file ' + filelist[i] + ' - Skip')
Esempio n. 4
0
            #hist = model.fit(new_tr_data.x, new_tr_data.y, epochs=5, batch_size=100, sample_weight=sample_weight) #callbacks=[evaluation_function]
            #count += 5
        hist = model.fit(new_tr_data.x, new_tr_data.y, epochs=6, batch_size=100, sample_weight=sample_weight,callbacks=[evaluation_function])
            #count += 1

        #hist = model.fit(new_tr_data.x, new_tr_data.y, epochs=eps, batch_size=100, callbacks=[evaluation_function,evaluation_function_1])

        ep = 'final'
        logger.info('Saving weights from epoch {0}.'.format(ep))
        datetime.now().strftime('%Y%m%d-%H%M%S')
        weights_name = 'gitig_' + datetime.now().strftime('%Y%m%d-%H%M%S') + '_model_' + str(ep) + '.h5'
        model.save_weights(weights_name)
    else:
        '''
        logger.warning('Using truncated data!')
        fake_data_x = [a[:10000]for a in tr_data.x]
        hist = model.fit(fake_data_x, tr_data.y[:10000], epochs=1, batch_size=100, callbacks=[evaluation_function])
        '''
        hist = model.fit(tr_data.x, tr_data.y, epochs=int(config['training']['epoch']), batch_size=100, callbacks=[evaluation_function])

    # logger.info('Saving trained model...')
    # model_tools.save_model(model,config['model']['path_model_architecture'],config['model']['path_model_weights'])

else:
    from cnn import semantic_similarity_layer
    import cnn, model_tools
    model = model_tools.load_model(config['model']['path_model_architecture'],config['model']['path_model_weights'],{'semantic_similarity_layer': semantic_similarity_layer})
    model.compile(optimizer='adadelta',loss='binary_crossentropy')


Esempio n. 5
0
human_blast_xml_file = os.path.join(utility_dir,
                                    '%s_human_blast.xml' % (run_str))
oncho_blast_xml_file = os.path.join(utility_dir,
                                    '%s_oncho_blast.xml' % (run_str))
chembl_results_file = os.path.join(utility_dir, '%s_chembl.txt' % (run_str))

# # #  Intermediate files created
prot_sequences_file = os.path.join(utility_dir, '%s_prots.fa' % (run_str))
rxn_ko_data_file = os.path.join(utility_dir, '%s_rxns.pkl' % (run_str))
gene_ko_data_file = os.path.join(utility_dir, '%s_genes.pkl' % (run_str))

# # #  Run steps
if not os.path.isfile(rxn_ko_data_file):
    rxn_data = {}
    model_path = os.path.join(files_dir, model_file)
    model = load_model(model_path, wolbachia_ratio)
    rxn_to_genes = get_rxns_to_delete(model)
    do_deletions(
        rxn_data, model, rxn_to_genes, do_double_ko,
        objective_threshold_fraction
    )  # Fills out 'objective', 'deficiencies', and 'genes' of reactions in rxn_data.
    save_data_object(rxn_data, rxn_ko_data_file)
else:
    rxn_data = load_data_object(rxn_ko_data_file)

#print_deficiencies(rxn_data)

if not os.path.isfile(gene_ko_data_file):
    gene_data = process_gene_data(rxn_data)
    get_expression_data(gene_data, expression_file, expression_sheets,
                        expression_conditions)  # Fills out 'expression_levels'
Esempio n. 6
0
    def initialize(self, *, context=None, model=None):
        """First try to load torchscript else load eager mode state_dict based model"""
        import os
        import json

        if context is None:
            self.device = torch.device(
                "cuda" if torch.cuda.is_available() else "cpu")
            logger.info('Available device {}'.format(self.device))
            if model is None:
                raise RuntimeError(f"Missing context and model")
            self.model = model.to(self.device)
        else:
            self.manifest = context.manifest
            properties = context.system_properties
            gpu_id = properties.get("gpu_id")
            if gpu_id is None:
                gpu_id = 0
            model_dir = properties.get("model_dir")

            from os import listdir
            from os.path import isfile, join
            entries = [f for f in listdir(model_dir) if join(model_dir, f)]

            self.device = torch.device(
                "cuda:" + str(gpu_id) if torch.cuda.is_available() else "cpu")
            logger.info('Available device {}'.format(self.device))

            # Read model serialize/pt file
            serialized_file = self.manifest['model']['serializedFile']
            model_pt_path = os.path.join(model_dir, serialized_file)
            json_manifest = json.dumps(self.manifest)
            json_properties = json.dumps(properties)
            json_entries = json.dumps(entries)
            logger.info(f'{json_manifest}')
            logger.info(f'{json_properties}')
            logger.info(f'{json_entries}')
            if not os.path.isfile(model_pt_path):
                raise RuntimeError(
                    f"Missing the serialized model file '{model_pt_path}'")

            try:
                logger.info('Loading torchscript model to device {}'.format(
                    self.device))
                self.model = torch.jit.load(model_pt_path)
            except Exception as e:
                # Read model definition file
                model_py_file = self.manifest['model']['modelFile']
                model_def_path = os.path.join(model_dir, model_py_file)
                if not os.path.isfile(model_def_path):
                    raise RuntimeError(
                        f"Missing the model.py file '{model_def_path}'")

                from model_tools import load_model
                from model import Generator
                import json

                with open(os.path.join(model_dir, 'code', 'hps.json')) as fp:
                    hps = json.load(fp)
                    fp.close()

                params = {
                    'nz': hps['nz'],
                    'nc': hps['nc'],
                    'ngf': hps['ngf'],
                    'num_classes': hps['num-classes']
                }

                self.model = load_model(model_pt_path,
                                        model_cls=Generator,
                                        params=params,
                                        device=self.device)

            self.model.eval()
            logger.debug(
                'Model file {0} loaded successfully'.format(model_pt_path))

        self.initialized = True
Esempio n. 7
0
def compute_errors(fileslist, dofs=None, shift=1, savedofs=None, ifref=-1):
    """Compute relative error defined in 3.36 from Clara's manuscript.

    Parameters
    ----------
    fileslist : list of string
        list of h5 files, each of them holding results of a simulation for 
        a given frequency.
    dofs : list of int
        degrees of freedom (ds) of interest
    savedofs : list of int
        list of dofs to be saved in matlab file (default : none)
    ifref : id (in filelist) of the simu (i.e. freq) used as reference
    for error computation. Default = max freq.
    """
    #fref = max(list(fileslist.keys()))
    fref = list(fileslist.keys())[ifref]
    print("compute errors with reference freq = " + str(fref))
    # Load reference and current simulations,
    reference_file = fileslist[fref]
    ref_model, ref_string, ref_frets, e = load_model(reference_file)
    # if indices is none, errors are computed for all degrees of freedom.
    if dofs is None:
        dofs = [i for i in range(ref_string.dimension())]

    sref = ref_model.data_ds[ref_string][dofs, ::shift]
    sum_ref = (sref**2).sum(1)
    freqs = []
    tref = ref_model.time[::shift]
    files = list(fileslist.values())[:ifref]
    files = [f for f in files if f != reference_file]
    # Number of freqs taken into account
    nbfiles = len(files)
    # Number of dofs where errors are computed
    nbpoints = len(dofs)
    # Number of contact points
    nbfrets = len(ref_frets)
    # Number of time instants taken into account
    nbtimes = ref_model.data_ds[ref_string].shape[1]
    xref = ref_string.x[dofs]
    if savedofs is not None:
        dofs_val = {}
        for dof in dofs:
            dofs_val[dof] = np.zeros((nbfiles + 1, nbtimes), dtype=np.float64)
            dofs_val[dof][-1, :] = ref_model.data_ds[ref_string][dof, :]

    # Results buffers:
    # errors[i, j] = error for freq number i at dof j
    errors = np.zeros((nbfiles, nbpoints), dtype=np.float64)
    # ymin[i, j] = minimal value (through time instants) of distance at contact j for freq i
    ymin = np.zeros((nbfiles + 1, nbfrets), dtype=np.float64)
    # Compute ymin for reference model
    for j in range(nbfrets):
        ymin[-1, j] = (ref_model.data_interactions[ref_frets[j]][0][:]).min()

    # Compute errors for all freqs
    for i in range(nbfiles):
        current_model, current_string, current_frets, e = load_model(files[i])
        scurrent = current_model.data_ds[current_string][dofs, :]
        # Ensure time instants are the same for both models (ref an current)
        time_step = current_model.time_step
        tcurrent = current_model.time
        assert np.allclose(tref,
                           tcurrent), 'Error: time instants are different.'
        xcurrent = current_string.x[dofs]
        assert np.allclose(xref, xcurrent)
        #errors[i, :] = np.sqrt(time_step * (((sref - scurrent) ** 2).sum(1)) / sum_ref)
        errors[i, :] = np.sqrt((((sref - scurrent)**2).sum(1)) / sum_ref)
        if savedofs is not None:
            for dof in dofs:
                dofs_val[dof][i, :] = current_model.data_ds[current_string][
                    dof, :]

        for j in range(nbfrets):
            ymin[i, j] = (
                current_model.data_interactions[current_frets[j]][0][:]).min()
        freqs.append(current_model.fs)

    if savedofs is not None:
        frs = list(fileslist.keys())
        matdict = {'freqs': frs}
        outputfile = savedofs
        for dof in dofs:
            dofname = 'dof_' + str(dof)
            matdict[dofname] = dofs_val[dof]

        sio.savemat(outputfile, matdict)
    return errors, ymin, freqs, xref