Esempio n. 1
0
    def save_events(self, events_to_save):
        '''Save the particular events, identified during the classification process.

        Args:
            events_to_save (list of strs) : What events to save to the file.
        '''

        # Open up the file to save the data in.
        events_filename = 'events_{}.hdf5'.format(self.tag)
        self.events_filepath = os.path.join(self.out_dir, events_filename)
        f = h5py.File(self.events_filepath, 'a')

        # Save the data
        for event_type in events_to_save:

            data = getattr(self, event_type)
            f.create_dataset(event_type, data=data)

        utilities.save_parameters(self, f)

        # Save the current code versions
        f.attrs['linefinder_version'] = utilities.get_code_version(self)
        f.attrs['galaxy_dive_version'] = utilities.get_code_version(
            read_ahf, instance_type='module')

        f.close()
Esempio n. 2
0
    def save_classifications(self, classifications_to_save):
        '''Save the results of running the classifier.

        Args:
            classifications_to_save (list of strs) :
                What classifications to save to the file.
        '''

        # Open up the file to save the data in.
        classification_filename = 'classifications_{}.hdf5'.format(self.tag)
        self.classification_filepath = os.path.join(
            self.out_dir,
            classification_filename,
        )
        f = h5py.File(self.classification_filepath, 'a')

        # Save the data
        for classification in classifications_to_save:

            data = getattr(self, classification)
            f.create_dataset(classification, data=data)

        utilities.save_parameters(self, f)

        # Save the current code versions
        f.attrs['linefinder_version'] = utilities.get_code_version(self)
        f.attrs['galaxy_dive_version'] = utilities.get_code_version(
            read_ahf, instance_type='module')

        # Save the snapshot number when the main halo is first resolved.
        f.attrs['main_mt_halo_first_snap'] = self.main_mt_halo_first_snap
        f.attrs['ind_first_snap'] = self.ind_first_snap

        f.close()
Esempio n. 3
0
    def test_works_for_modules(self):

        actual = utilities.get_code_version(utilities, instance_type='module')

        expected = subprocess.check_output(['git', 'describe', '--always'])

        assert actual == expected
Esempio n. 4
0
    def save_selected_ids(self, selected_ids_formatted, data_filters):

        # Open up the file to save the data in.
        ids_filename = 'ids_full_{}.hdf5'.format(self.tag)
        self.ids_filepath = os.path.join(self.out_dir, ids_filename)
        f = h5py.File(self.ids_filepath, 'a')

        # Save the data
        if isinstance(selected_ids_formatted, tuple):
            ids, child_ids = selected_ids_formatted
            f.create_dataset('target_ids', data=ids)
            f.create_dataset('target_child_ids', data=child_ids)

        else:
            ids = selected_ids_formatted
            f.create_dataset('target_ids', data=ids)

        # Create groups for the parameters
        grp = f.create_group('parameters')
        subgrp = f.create_group('parameters/snapshot_parameters')
        data_filter_subgrp = f.create_group('parameters/data_filters')

        # Save the data parameters
        for parameter in self.stored_parameters:
            if parameter != 'snapshot_kwargs':
                grp.attrs[parameter] = getattr(self, parameter)

        # Save the snapshot parameters too
        for key in self.snapshot_kwargs.keys():
            subgrp.attrs[key] = self.snapshot_kwargs[key]

        # Save the data filter values
        for key, data_filter in data_filters.items():
            df_subgrp = data_filter_subgrp.create_group(key)
            for inner_key, value in data_filter.items():
                df_subgrp.attrs[inner_key] = value

        # Save how many processors we used.
        grp.attrs['n_processors'] = self.n_processors

        # Save the current code versions
        f.attrs['linefinder_version'] = utilities.get_code_version(self)
        f.attrs['galaxy_dive_version'] = utilities.get_code_version(
            particle_data, instance_type='module')

        f.close()
Esempio n. 5
0
    def write_tracked_data(self, formatted_data):
        '''Write tracks to a file.

        Args:
            formatted_data (dict) :
                Formatted particle track data.

            attrs (dict):
                Particle track data attributes.
        '''

        ptrack, attrs = formatted_data

        # Make sure the output location exists
        if not os.path.exists(self.out_dir):
            os.mkdir(self.out_dir)

        self.outname = 'ptracks_{}.hdf5'.format(self.tag)

        outpath = os.path.join(self.out_dir, self.outname)

        f = h5py.File(outpath, 'w')

        # Save data
        for keyname in ptrack.keys():
            f.create_dataset(keyname, data=ptrack[keyname])

        # Save the attributes
        for key in attrs.keys():
            f.attrs[key] = attrs[key]

        # Save the code of the custom_fns as a string too.
        if self.custom_fns is not None:
            self.custom_fns_str = [
                inspect.getsource(_) for _ in self.custom_fns
            ]

        utilities.save_parameters(self, f)

        # Save the current code version
        f.attrs['linefinder_version'] = utilities.get_code_version(self)
        f.attrs['galaxy_dive_version'] = utilities.get_code_version(
            read_snapshot, instance_type='module')

        f.close()
Esempio n. 6
0
    def write_galaxy_identifications(self, ptrack_gal_ids):
        '''Write the data, close the file, and print out information.'''

        # Get the number of particles, for use in reporting the time
        self.n_particles = self.ptrack['Den'][...].shape[0]

        # Close the old dataset
        self.ptrack.close()

        # Save the data.
        save_filename = 'galids_{}.hdf5'.format(self.tag)
        self.save_filepath = os.path.join(self.out_dir, save_filename)
        f = h5py.File(self.save_filepath, 'w')
        for key in ptrack_gal_ids.keys():
            f.create_dataset(key, data=ptrack_gal_ids[key])

        # Store the main mt halo id
        # (as identified by the larges value at the lowest redshift)
        if self.main_mt_halo_id is None:
            try:
                indice = self.halo_data.mtree_halos[0].index.max()
            except AttributeError:
                self.halo_data.get_mtree_halos(self.mtree_halos_index,
                                               self.halo_file_tag)
                indice = self.halo_data.mtree_halos[0].index.max()
            m_vir_z0 = self.halo_data.get_mtree_halo_quantity(
                quantity='Mvir',
                indice=indice,
                index=self.mtree_halos_index,
                tag=self.halo_file_tag)
            f.attrs['main_mt_halo_id'] = m_vir_z0.argmax()
        else:
            f.attrs['main_mt_halo_id'] = self.main_mt_halo_id

        utilities.save_parameters(self, f)

        # Save the current code version
        f.attrs['linefinder_version'] = utilities.get_code_version(self)
        f.attrs['galaxy_dive_version'] = utilities.get_code_version(
            galaxy_linker, instance_type='module')

        f.close()
Esempio n. 7
0
    def test_works_in_otherdir(self):

        cwd = os.getcwd()

        # Change directories
        os.chdir('../')

        result = utilities.get_code_version(self)

        assert result is not None

        # Change back
        os.chdir(cwd)
Esempio n. 8
0
    def test_doesnt_change_dir_upon_failing(self):

        true_cwd = os.getcwd()

        # Change to a dir that should not be a repository
        os.chdir(os.path.expanduser('~'))

        cwd = os.getcwd()

        result = utilities.get_code_version(self)

        now_cwd = os.getcwd()

        assert cwd == now_cwd

        # Change back
        os.chdir(true_cwd)
Esempio n. 9
0
    def test_default(self):

        result = utilities.get_code_version(self)

        assert result is not None