def test_input_is_converted_to_quantities(self, tmpdir,
                                           network_dict_val_unit):
     file = 'test.h5'
     tmp_test = tmpdir.mkdir('tmp_test')
     with tmp_test.as_cwd():
         h5.save(file, network_dict_val_unit)
         output = io.load_val_unit_dict_from_h5(file)
         # check that all val unit dicts have been converted to quantities
         check_dict_contains_no_val_unit_dict(output)
 def test_loaded_dictionaries_are_not_empty(self, tmpdir,
                                            network_dict_val_unit):
     file = 'test.h5'
     tmp_test = tmpdir.mkdir('tmp_test')
     with tmp_test.as_cwd():
         h5.save(file, network_dict_val_unit)
         output = io.load_val_unit_dict_from_h5(file)
         # check that no loaded dictionary is empty
         assert bool(output)
 def test_loaded_dictionaries_are_not_empty(self, tmpdir,
                                            network_dict_val_unit):
     file = 'test.h5'
     tmp_test = tmpdir.mkdir('tmp_test')
     with tmp_test.as_cwd():
         h5.save(file, network_dict_val_unit)
         outputs = io.load_network(file)
         # check that no loaded dictionary is empty
         for sub_dict in outputs:
             assert bool(sub_dict)
Пример #4
0
def create_and_save_fixtures(func, regime_params, regimes, file):
    results = {}
    regime_params = extract_required_params(func,
                                            regime_params)
    for regime, params in zip(regimes, regime_params):
        output = func(**params)
        results[regime] = {
            'params': params,
            'output': output
            }
    h5.save(file, results, overwrite_dataset=True)
 def test_returns_dictionaries_in_correct_order(self, tmpdir,
                                                network_dict_val_unit):
     file = 'test.h5'
     tmp_test = tmpdir.mkdir('tmp_test')
     with tmp_test.as_cwd():
         h5.save(file, network_dict_val_unit)
         outputs = io.load_network(file)
         assert 'tau_m' in outputs[0].keys()
         assert 'omegas' in outputs[1].keys()
         assert 'test' in outputs[2].keys()
         rhd = [dict for dict in outputs[3].values()]
         assert 'test' in rhd[0].keys()
         assert 'analysis_params' in rhd[0].keys()
Пример #6
0
    def add_child(self, data, idx, path=None): 
        '''
            function add_child: Insert the data present in the provided file

            data : dict 
                data dictionary that should be of the same format as that initialized by
                init_structure

            path : str
                path to file from which data was loaded. If None, nothing will be done 
                when it comes time to kill children

            idx : location in the master list to insert the results
        '''

        if path is None:
            path = '%s/child_%s.h5' % (self.directory, idx)

        self.children.append({'path' : path, 'idx' : idx})
        h5py_wrapper.save(path, data, write_mode = 'w') 
Пример #7
0
    def concatenate(self):

        if len(self.children) > 0:

            # Sequentially open up children and insert their results into a master
            # dictionary
            dummy_dict = h5py_wrapper.load(self.children[0]['path'])
            master_dict = init_structure(self.total_tasks, dummy_dict)

            for i, child in enumerate(self.children):
                child_data = h5py_wrapper.load(child['path'])
                master_dict = insert_data(master_dict, child_data, child['idx'])

            master_data_filepath = os.path.abspath(os.path.join(self.directory, '..', '%s.dat' % self.directory))
            h5py_wrapper.save(master_data_filepath, master_dict, write_mode = 'w')          

        else:

            # Still create a dummy .dat file to indicate that the job completed
            dummy_dict = {}
            master_data_filepath = os.path.abspath(os.path.join(self.directory, '..', '%s.dat' % self.directory))
            h5py_wrapper.save(master_data_filepath, dummy_dict, write_mode = 'w')          
Пример #8
0
    def store_hdf5(self, outdir, overwrite=False):
        """
        function to write source data to a hdf5 file
        
        :param outdir: outdir of ".h5" file
        :type: str
        """

        if ".h5" or ".hdf5" in outdir:
            pass
        else:
            outdir + ".h5"

        h5w.save(outdir,
                 self.metadata,
                 path='metadata/',
                 write_mode='a',
                 overwrite_dataset=overwrite)
        h5w.save(outdir,
                 self.source_properties,
                 path='source_properties/',
                 write_mode='a',
                 overwrite_dataset=overwrite)
Пример #9
0
    def parallel_concatenate(self, comm, root=0):

        if len(self.children) > 0:

            rank = comm.rank
            # Have the parallel workers 
            children_chunks = np.array_split(self.children, comm.size)
            rank_children = children_chunks[rank]

            # Just gather a raw list of dictionaries
            child_index_lookup_table = []
            dict_list = []

            bad_children = []

            for i, child in enumerate(rank_children):
                try:
                    child_data = h5py_wrapper.load(child['path'])
                except:
                    bad_children.append(child)
                    continue
                dict_list.append(child_data)
                child_index_lookup_table.append(child['idx'])

            # Gather across ranks
            dict_list = comm.gather(dict_list, root=root)
            lookup_table = comm.gather(child_index_lookup_table, root=root)
            bad_children = comm.gather(bad_children, root=root)

            if rank == 0:
                
                # Flatten the list(s)
                dict_list = [elem for sublist in dict_list for elem in sublist]
                lookup_table = np.array([elem for sublist in lookup_table for elem in sublist]).astype(int)
                bad_children = [elem for sublist in bad_children for elem in sublist]

                print(len(dict_list))

                # Follow the normal procedure from concatenate
                dummy_dict = dict_list[0]
                # Init the structure to the total number of tasks; won't necessarily fill all of them
                # because of the presence of bad children
                master_dict = init_structure(self.total_tasks, dummy_dict)

                for i, dict_ in enumerate(dict_list):
                    master_dict = insert_data(master_dict, dict_, lookup_table[i])

                # Save
                file_name = os.path.abspath(self.directory).split('/')[-1]
                print(file_name)
                master_data_filepath = os.path.abspath('..') + '/%s.dat' % file_name
                h5py_wrapper.save(master_data_filepath, master_dict, write_mode = 'w')          
                return bad_children

        else:
            if comm.rank == 0:
                # Still create a dummy .dat file to indicate that the job completed
                dummy_dict = {}
                file_name = os.path.abspath(self.directory).split('/')[-1]
                master_data_filepath = os.path.abspath('..') + '/%s.dat' % file_name
                h5py_wrapper.save(master_data_filepath, dummy_dict, write_mode = 'w')          

                return []