def __init__(self,param_dictionary=None, stream_definition_id='', locator=None, stream_definition=None):
        """
        """
        if type(param_dictionary) == dict:
            self._pdict = ParameterDictionary.load(param_dictionary)
        
        elif isinstance(param_dictionary,ParameterDictionary):
            self._pdict = param_dictionary
        
        elif stream_definition_id or stream_definition:
            if stream_definition:
                if not isinstance(stream_definition,StreamDefinition):
                    raise BadRequest('Improper StreamDefinition object')
                self._definition = stream_definition

            stream_def_obj = stream_definition or RecordDictionaryTool.read_stream_def(stream_definition_id)
            pdict = stream_def_obj.parameter_dictionary
            self._available_fields = stream_def_obj.available_fields or None
            self._stream_config = stream_def_obj.stream_configuration
            self._pdict = ParameterDictionary.load(pdict)
            self._stream_def = stream_definition_id

        else:
            raise BadRequest('Unable to create record dictionary with improper ParameterDictionary')
        
        if stream_definition_id:
            self._stream_def=stream_definition_id
        
        self._shp = None
        self._rd = {}
        self._locator = locator

        self._setup_params()
    def load_from_granule(cls, g):
        if isinstance(g.param_dictionary, str):
            instance = cls(stream_definition_id=g.param_dictionary, locator=g.locator)
            stream_def_obj = RecordDictionaryTool.read_stream_def(g.param_dictionary)
            pdict = stream_def_obj.parameter_dictionary
            instance._available_fields = stream_def_obj.available_fields or None
            instance._pdict = ParameterDictionary.load(pdict)
        
        else:
            instance = cls(param_dictionary=g.param_dictionary, locator=g.locator)
            instance._pdict = ParameterDictionary.load(g.param_dictionary)
        
       
        if g.domain:
            instance._shp = (g.domain[0],)

        if g.creation_timestamp:
            instance._creation_timestamp = g.creation_timestamp
        
        for k,v in g.record_dictionary.iteritems():
            key = instance._pdict.key_from_ord(k)
            if v is not None:
                ptype = instance._pdict.get_context(key).param_type
                paramval = cls.get_paramval(ptype, instance.domain, v)
                instance._rd[key] = paramval
        
        return instance
    def __init__(self,param_dictionary=None, stream_definition_id='', locator=None):
        """
        """
        if type(param_dictionary) == dict:
            self._pdict = ParameterDictionary.load(param_dictionary)
        
        elif isinstance(param_dictionary,ParameterDictionary):
            self._pdict = param_dictionary
        
        elif stream_definition_id:
            stream_def_obj = RecordDictionaryTool.pdict_from_stream_def(stream_definition_id)
            pdict = stream_def_obj.parameter_dictionary
            self._available_fields = stream_def_obj.available_fields or None
            self._pdict = ParameterDictionary.load(pdict)
            self._stream_def = stream_definition_id
        
        else:
            raise BadRequest('Unable to create record dictionary with improper ParameterDictionary')
        
        if stream_definition_id:
            self._stream_def=stream_definition_id
        
        self._shp = None
        self._rd = {}
        self._locator = locator

        self._setup_params()
    def __init__(self,
                 param_dictionary=None,
                 stream_definition_id='',
                 locator=None):
        """
        """
        if type(param_dictionary) == dict:
            self._pdict = ParameterDictionary.load(param_dictionary)

        elif isinstance(param_dictionary, ParameterDictionary):
            self._pdict = param_dictionary

        elif stream_definition_id:
            stream_def_obj = RecordDictionaryTool.read_stream_def(
                stream_definition_id)
            pdict = stream_def_obj.parameter_dictionary
            self._available_fields = stream_def_obj.available_fields or None
            self._pdict = ParameterDictionary.load(pdict)
            self._stream_def = stream_definition_id

        else:
            raise BadRequest(
                'Unable to create record dictionary with improper ParameterDictionary'
            )

        if stream_definition_id:
            self._stream_def = stream_definition_id

        self._shp = None
        self._rd = {}
        self._locator = locator

        self._setup_params()
    def load_from_granule(cls, g):
        if isinstance(g.param_dictionary, str):
            instance = cls(stream_definition_id=g.param_dictionary,
                           locator=g.locator)
            stream_def_obj = RecordDictionaryTool.read_stream_def(
                g.param_dictionary)
            pdict = stream_def_obj.parameter_dictionary
            instance._available_fields = stream_def_obj.available_fields or None
            instance._pdict = ParameterDictionary.load(pdict)

        else:
            instance = cls(param_dictionary=g.param_dictionary,
                           locator=g.locator)
            instance._pdict = ParameterDictionary.load(g.param_dictionary)

        if g.domain:
            instance._shp = (g.domain[0], )

        if g.creation_timestamp:
            instance._creation_timestamp = g.creation_timestamp

        for k, v in g.record_dictionary.iteritems():
            key = instance._pdict.key_from_ord(k)
            if v is not None:
                ptype = instance._pdict.get_context(key).param_type
                paramval = cls.get_paramval(ptype, instance.domain, v)
                instance._rd[key] = paramval

        return instance
 def _execute_transform(self, msg, streams):
     stream_in_id,stream_out_id = streams
     stream_def_in = self._read_stream_def(stream_in_id)
     stream_def_out = self._read_stream_def(stream_out_id)
     
     incoming_pdict_dump = stream_def_in.parameter_dictionary
     outgoing_pdict_dump = stream_def_out.parameter_dictionary
     
     incoming_pdict = ParameterDictionary.load(incoming_pdict_dump)
     outgoing_pdict = ParameterDictionary.load(outgoing_pdict_dump)
     
     merged_pdict = dict([(k,v) for k,v in incoming_pdict.iteritems()] + [(k,v) for k,v in outgoing_pdict.iteritems()])
     rdt_in = RecordDictionaryTool.load_from_granule(msg)
     rdt_out = RecordDictionaryTool(stream_definition_id=stream_def_out._id)
     #modify the shape of the rdt out since we are using _rd then _shp will never get set
     rdt_out._shp = rdt_in._shp
     if rdt_out._available_fields is None: rdt_out._available_fields = []
     if rdt_in._available_fields is None: rdt_in._available_fields = []
     for key,pctup in merged_pdict.iteritems():
         n,pc = pctup
         #if function then a transform is applied to calculate values
         if isinstance(pc.param_type, ParameterFunctionType):
             #apply transform
             pv = get_value_class(pc.param_type, rdt_in.domain)
             #recursive function applies values
             def pval_callback(name, slice_):
                 result = None
                 #search for input data...first level input
                 if name in rdt_in._available_fields:
                     result = rdt_in[name]
                 else:
                     #not first level data so continue to evaluate
                     n,pc = merged_pdict[name]
                     pv = get_value_class(pc.param_type, rdt_in.domain)
                     pv._pval_callback = pval_callback
                     result = pv[:]
                 return result
             #set the evaluation callback so it can find values in the input stream
             pv._pval_callback = pval_callback
             if key in rdt_out._available_fields:
                 #rdt to and from granule wraps result in a paramval so no need
                 #paramval = rdt_out.get_paramval(pc.param_type, rdt_in.domain, pv[:])
                 #paramval._pval_callback = pval_callback
                 #rdt_out._rd[key] = paramval
                 rdt_out._rd[key] = pv[:]
         else:
             #field exists in both the in and the out stream so pass it along to the output stream
             if key in rdt_in._available_fields and key in rdt_out._available_fields:
                 #pass through
                 rdt_out._rd[key] = rdt_in._rd[key][:]
     return rdt_out 
Example #7
0
    def set_configuration(self, config):
        log.warn("DRIVER: set_configuration")
        """
        expect configuration to have:
        - parser module/class
        - directory, wildcard to find data files
        - optional timestamp of last granule
        - optional poll rate
        - publish info
        """
        log.error("Log level: %s", log.getEffectiveLevel())
        log.debug('using configuration: %s', config)
        self.config = config
        self.max_records = get_safe(config, 'max_records', 100)
        self.stream_config = self.CFG.get('stream_config', {})
        if len(self.stream_config) == 1:
            stream_cfg = self.stream_config.values()[0]
        elif len(self.stream_config) > 1:
            stream_cfg = self.stream_config.values()[0]

        stream_id = stream_cfg['stream_id']
        stream_route = IonObject(OT.StreamRoute,
                                 routing_key=stream_cfg['routing_key'],
                                 exchange_point=stream_cfg['exchange_point'])
        param_dict = stream_cfg['stream_def_dict']['parameter_dictionary']
        self.publisher = StandaloneStreamPublisher(stream_id=stream_id,
                                                   stream_route=stream_route)
        self.parameter_dictionary = ParameterDictionary.load(param_dict)
        self.time_field = self.parameter_dictionary.get_temporal_context()
        self.latest_granule_time = get_safe(config, 'last_time', 0)
    def set_configuration(self, config):
        log.warn("DRIVER: set_configuration")
        """
        expect configuration to have:
        - parser module/class
        - directory, wildcard to find data files
        - optional timestamp of last granule
        - optional poll rate
        - publish info
        """
        log.error("Log level: %s", log.getEffectiveLevel())
        log.debug('using configuration: %s', config)
        self.config = config
        self.max_records = get_safe(config, 'max_records', 100)
        self.stream_config = self.CFG.get('stream_config', {})
        if len(self.stream_config) == 1:
            stream_cfg = self.stream_config.values()[0]
        elif len(self.stream_config) > 1:
            stream_cfg = self.stream_config.values()[0]

        stream_id = stream_cfg['stream_id']
        stream_route = IonObject(OT.StreamRoute, routing_key=stream_cfg['routing_key'], exchange_point=stream_cfg['exchange_point'])
        param_dict = stream_cfg['stream_def_dict']['parameter_dictionary']
        self.publisher = StandaloneStreamPublisher(stream_id=stream_id, stream_route=stream_route)
        self.parameter_dictionary = ParameterDictionary.load(param_dict)
        self.time_field = self.parameter_dictionary.get_temporal_context()
        self.latest_granule_time = get_safe(config, 'last_time', 0)
    def _merge_pdicts(self, pdict1, pdict2):
        incoming_pdict = ParameterDictionary.load(pdict1)
        outgoing_pdict = ParameterDictionary.load(pdict2)

        merged_pdict = ParameterDictionary()
        for k, v in incoming_pdict.iteritems():
            ordinal, v = v
            if k not in merged_pdict:
                merged_pdict.add_context(v)
        for k, v in outgoing_pdict.iteritems():
            ordinal, v = v
            if k not in merged_pdict:
                merged_pdict.add_context(v)
        return merged_pdict
Example #10
0
 def _merge_pdicts(self, pdict1, pdict2):
     incoming_pdict = ParameterDictionary.load(pdict1)
     outgoing_pdict = ParameterDictionary.load(pdict2)
     
     merged_pdict = ParameterDictionary()
     for k,v in incoming_pdict.iteritems():
         ordinal, v = v
         if k not in merged_pdict:
             merged_pdict.add_context(v)
     for k,v in outgoing_pdict.iteritems():
         ordinal, v = v
         if k not in merged_pdict:
             merged_pdict.add_context(v)
     return merged_pdict
 def set_configuration(self, config):
     """
     expect configuration to have:
     - parser module/class
     - directory, wildcard to find data files
     - optional timestamp of last granule
     - optional poll rate
     - publish info
     """
     log.debug('using configuration: %s', config)
     self.config = config
     self.max_records = get_safe(config, 'max_records', 100)
     stream_id = config['stream_id']
     stream_route_param = config['stream_route']
     stream_route = IonObject(OT.StreamRoute, stream_route_param)
     self.publisher = StandaloneStreamPublisher(stream_id=stream_id, stream_route=stream_route)
     self.parameter_dictionary = ParameterDictionary.load(config['parameter_dict'])
     self.time_field = self.parameter_dictionary.get_temporal_context()
     self.latest_granule_time = get_safe(config, 'last_time', 0)
Example #12
0
    def _execute_transform(self, msg, streams):
        stream_in_id,stream_out_id = streams
        stream_def_in = self.read_stream_def(stream_in_id)
        stream_def_out = self.read_stream_def(stream_out_id)
        
        incoming_pdict_dump = stream_def_in.parameter_dictionary
        outgoing_pdict_dump = stream_def_out.parameter_dictionary
        
        incoming_pdict = ParameterDictionary.load(incoming_pdict_dump)
        outgoing_pdict = ParameterDictionary.load(outgoing_pdict_dump)
        
        merged_pdict = ParameterDictionary()
        for k,v in incoming_pdict.iteritems():
            ordinal, v = v
            if k not in merged_pdict:
                merged_pdict.add_context(v)
        for k,v in outgoing_pdict.iteritems():
            ordinal, v = v
            if k not in merged_pdict:
                merged_pdict.add_context(v)
        rdt_temp = RecordDictionaryTool(param_dictionary=merged_pdict)
        rdt_in = RecordDictionaryTool.load_from_granule(msg)
        for field in rdt_temp.fields:
            if not isinstance(rdt_temp._pdict.get_context(field).param_type, ParameterFunctionType):
                try:
                    rdt_temp[field] = rdt_in[field]
                except KeyError:
                    pass
        
        for field in rdt_temp.fields:
            if isinstance(rdt_temp._pdict.get_context(field).param_type, ParameterFunctionType):
                rdt_temp[field] = rdt_temp[field]

        
        rdt_out = RecordDictionaryTool(stream_definition_id=stream_def_out._id)

        for field in rdt_out.fields:
            rdt_out[field] = rdt_temp[field]
        
        return rdt_out 
Example #13
0
    def _get_parameter_dictionary(self):
        pdict = ParameterDictionary()

        cond_ctxt = ParameterContext('salinity', param_type=QuantityType(value_encoding=np.float64))
        cond_ctxt.uom = 'unknown'
        cond_ctxt.fill_value = 0e0
        pdict.add_context(cond_ctxt)

        pres_ctxt = ParameterContext('lat', param_type=QuantityType(value_encoding=np.float64))
        pres_ctxt.uom = 'unknown'
        pres_ctxt.fill_value = 0x0
        pdict.add_context(pres_ctxt)

        temp_ctxt = ParameterContext('lon', param_type=QuantityType(value_encoding=np.float64))
        temp_ctxt.uom = 'unknown'
        temp_ctxt.fill_value = 0x0
        pdict.add_context(temp_ctxt)

        oxy_ctxt = ParameterContext('oxygen', param_type=QuantityType(value_encoding=np.float64))
        oxy_ctxt.uom = 'unknown'
        oxy_ctxt.fill_value = 0x0
        pdict.add_context(oxy_ctxt)

        internal_ts_ctxt = ParameterContext(name='internal_timestamp', param_type=QuantityType(value_encoding=np.float64))
        internal_ts_ctxt._derived_from_name = 'time'
        internal_ts_ctxt.uom = 'seconds'
        internal_ts_ctxt.fill_value = -1
        pdict.add_context(internal_ts_ctxt, is_temporal=True)

        driver_ts_ctxt = ParameterContext(name='driver_timestamp', param_type=QuantityType(value_encoding=np.float64))
        driver_ts_ctxt._derived_from_name = 'time'
        driver_ts_ctxt.uom = 'seconds'
        driver_ts_ctxt.fill_value = -1
        pdict.add_context(driver_ts_ctxt)

        return pdict
Example #14
0
    def repair(
        self,
        backup=True,
        copy_over=True,
        keep_temp=False,
        reanalyze=False,
        analyze_bricks=False,
        detailed_analysis=False,
    ):
        """
        Heavy repair tool that recreates a blank persisted Coverage from the broken coverage's
        original construction parameters, then reconstructs the Master and Parameter metadata
        files by inspection of the ION objects and "valid" brick files.
        @return:
        """
        if self._ar is None or reanalyze:
            self._ar = self._do_analysis(analyze_bricks=analyze_bricks, detailed_analysis=detailed_analysis)

        if self._ar.is_corrupt:
            if len(self._ar.get_brick_corruptions()) > 0:
                raise NotImplementedError("Brick corruption.  Cannot repair at this time!!!")
            else:
                # Repair the Master and Parameter metadata files

                # Need the ParameterDictionary, TemporalDomain and SpatialDomain
                pdict = ParameterDictionary.load(self._dso.parameter_dictionary)
                tdom = GridDomain.load(self._dso.temporal_domain)
                sdom = GridDomain.load(self._dso.spatial_domain)

                # Set up the working directory for the recovered coverage
                tempcov_dir = tempfile.mkdtemp("covs")

                # Create the temporary Coverage
                tempcov = SimplexCoverage(
                    root_dir=tempcov_dir,
                    persistence_guid=self._guid,
                    name=self._guid,
                    parameter_dictionary=pdict,
                    spatial_domain=sdom,
                    temporal_domain=tdom,
                )
                # Handle to persistence layer for tempcov
                pl = tempcov._persistence_layer

                # Set up the original and temporary coverage path strings
                orig_dir = os.path.join(self.cov_pth, self._guid)
                temp_dir = os.path.join(tempcov.persistence_dir, tempcov.persistence_guid)

                # Insert same number of timesteps into temporary coverage as in broken coverage
                brick_domains_new, new_brick_list, brick_list_spans, tD, bD, min_data_bound, max_data_bound = self.inspect_bricks(
                    self.cov_pth, self._guid, "time"
                )
                empty_cov = (
                    brick_list_spans is None
                )  # If None, there are no brick files --> no timesteps, empty coverage!
                if not empty_cov:  # If None, there are no brick files --> no timesteps, empty coverage!
                    bls = [s.value for s in brick_list_spans]
                    maxes = [sum(b[3]) for b in new_brick_list.values()]

                    # Replace metadata is the Master file
                    pl.master_manager.brick_domains = brick_domains_new
                    pl.master_manager.brick_list = new_brick_list

                    # Repair ExternalLinks to brick files
                    with HDFLockingFile(pl.master_manager.file_path, "r+") as f:
                        for param_name in pdict.keys():
                            del f[param_name]
                            f.create_group(param_name)
                    for param_name in pdict.keys():
                        for brick in bls:
                            link_path = "/{0}/{1}".format(param_name, brick[0])
                            brick_file_name = "{0}.hdf5".format(brick[0])
                            brick_rel_path = os.path.join(
                                pl.parameter_metadata[param_name].root_dir.replace(tempcov.persistence_dir, "."),
                                brick_file_name,
                            )
                            log.debug("link_path: %s", link_path)
                            log.debug("brick_rel_path: %s", brick_rel_path)
                            pl.master_manager.add_external_link(link_path, brick_rel_path, brick[0])

                pl.flush_values()
                pl.flush()
                tempcov.close()

                # Remove 'rtree' dataset from Master file if it already exists (post domain expansion)
                # to make way for reconstruction
                with HDFLockingFile(pl.master_manager.file_path, "r+") as f:
                    if "rtree" in f.keys():
                        del f["rtree"]

                # Reconstruct 'rtree' dataset
                # Open temporary Coverage and PersistenceLayer objects
                fixed_cov = AbstractCoverage.load(tempcov.persistence_dir, mode="r+")
                pl_fixed = fixed_cov._persistence_layer

                # Call update_rtree for each brick using PersistenceLayer builtin
                brick_count = 0

                if not empty_cov:
                    for brick in bls:
                        rtree_extents, brick_extents, brick_active_size = pl_fixed.calculate_extents(
                            brick[1][1], bD, tD
                        )
                        pl_fixed.master_manager.update_rtree(brick_count, rtree_extents, obj=brick[0])
                        brick_count += 1

                # Update parameter_bounds property based on each parameter's brick data using deep inspection
                valid_bounds_types = ["BooleanType", "ConstantType", "QuantityType", "ConstantRangeType"]

                if not empty_cov:
                    for param in pdict.keys():
                        if pdict.get_context(param).param_type.__class__.__name__ in valid_bounds_types:
                            brick_domains_new, new_brick_list, brick_list_spans, tD, bD, min_data_bound, max_data_bound = self.inspect_bricks(
                                self.cov_pth, self._guid, param
                            )
                            # Update the metadata
                            pl_fixed.update_parameter_bounds(param, [min_data_bound, max_data_bound])
                pl_fixed.flush()
                fixed_cov.close()

                # Create backup copy of original Master and Parameter files
                if backup:
                    import datetime

                    orig_master_file = os.path.join(self.cov_pth, "{0}_master.hdf5".format(self._guid))

                    # Generate the timestamp
                    tstamp_format = "%Y%m%d%H%M%S"
                    tstamp = datetime.datetime.now().strftime(tstamp_format)

                    backup_master_file = os.path.join(self.cov_pth, "{0}_master.{1}.hdf5".format(self._guid, tstamp))

                    shutil.copy2(orig_master_file, backup_master_file)

                    for param in pdict.keys():
                        param_orig = os.path.join(orig_dir, param, "{0}.hdf5".format(param))
                        param_backup = os.path.join(orig_dir, param, "{0}.{1}.hdf5".format(param, tstamp))
                        shutil.copy2(param_orig, param_backup)

                # Copy Master and Parameter metadata files back to original/broken coverage (cov_pth) location
                if copy_over == True:
                    shutil.copy2(
                        os.path.join(tempcov.persistence_dir, "{0}_master.hdf5".format(self._guid)),
                        os.path.join(self.cov_pth, "{0}_master.hdf5".format(self._guid)),
                    )
                    for param in pdict.keys():
                        shutil.copy2(
                            os.path.join(temp_dir, param, "{0}.hdf5".format(param)),
                            os.path.join(orig_dir, param, "{0}.hdf5".format(param)),
                        )

                # Reanalyze the repaired coverage
                self._ar = self._do_analysis(analyze_bricks=True)

                # Verify repair worked, clean up if not
                if self._ar.is_corrupt:
                    # If the files were backed up then revert
                    if backup:
                        # Remove backed up files and clean up the repair attempt
                        log.info("Repair attempt failed.  Reverting to pre-repair state.")
                        # Use backup copy to replace post-repair file.
                        shutil.copy2(backup_master_file, orig_master_file)
                        # Delete the backup
                        os.remove(backup_master_file)

                        # Iterate over parameters and revert to pre-repair state
                        for param in pdict.keys():
                            param_orig = os.path.join(orig_dir, param, "{0}.hdf5".format(param))
                            param_backup = os.path.join(orig_dir, param, "{0}.{1}.hdf5".format(param, tstamp))
                            # Use backup copy to replace post-repair file.
                            shutil.copy2(param_backup, param_orig)
                            # Delete the backup
                            os.remove(param_backup)

                    raise ValueError("Coverage repair failed! Revert to stored backup version, if possible.")

                # Remove temporary coverage
                if keep_temp == False:
                    shutil.rmtree(tempcov_dir)
                else:
                    return tempcov_dir
        else:
            log.info("Coverage is not corrupt, nothing to repair!")
Example #15
0
    def repair(self,
               backup=True,
               copy_over=True,
               keep_temp=False,
               reanalyze=False,
               analyze_bricks=False,
               detailed_analysis=False):
        """
        Heavy repair tool that recreates a blank persisted Coverage from the broken coverage's
        original construction parameters, then reconstructs the Master and Parameter metadata
        files by inspection of the ION objects and "valid" brick files.
        @return:
        """
        if self._ar is None or reanalyze:
            self._ar = self._do_analysis(analyze_bricks=analyze_bricks,
                                         detailed_analysis=detailed_analysis)

        if self._ar.is_corrupt:
            if len(self._ar.get_brick_corruptions()) > 0:
                raise NotImplementedError(
                    'Brick corruption.  Cannot repair at this time!!!')
            else:
                # Repair the Master and Parameter metadata files

                # Need the ParameterDictionary, TemporalDomain and SpatialDomain
                pdict = ParameterDictionary.load(
                    self._dso.parameter_dictionary)
                tdom = GridDomain.load(self._dso.temporal_domain)
                sdom = GridDomain.load(self._dso.spatial_domain)

                # Set up the working directory for the recovered coverage
                tempcov_dir = tempfile.mkdtemp('covs')

                # Create the temporary Coverage
                tempcov = SimplexCoverage(root_dir=tempcov_dir,
                                          persistence_guid=self._guid,
                                          name=self._guid,
                                          parameter_dictionary=pdict,
                                          spatial_domain=sdom,
                                          temporal_domain=tdom)
                # Handle to persistence layer for tempcov
                pl = tempcov._persistence_layer

                # Set up the original and temporary coverage path strings
                orig_dir = os.path.join(self.cov_pth, self._guid)
                temp_dir = os.path.join(tempcov.persistence_dir,
                                        tempcov.persistence_guid)

                # Insert same number of timesteps into temporary coverage as in broken coverage
                brick_domains_new, new_brick_list, brick_list_spans, tD, bD, min_data_bound, max_data_bound = self.inspect_bricks(
                    self.cov_pth, self._guid, 'time')
                empty_cov = brick_list_spans is None  # If None, there are no brick files --> no timesteps, empty coverage!
                if not empty_cov:  # If None, there are no brick files --> no timesteps, empty coverage!
                    bls = [s.value for s in brick_list_spans]
                    maxes = [sum(b[3]) for b in new_brick_list.values()]
                    tempcov.insert_timesteps(sum(maxes))

                    # Replace metadata is the Master file
                    pl.master_manager.brick_domains = brick_domains_new
                    pl.master_manager.brick_list = new_brick_list

                    # Repair ExternalLinks to brick files
                    f = h5py.File(pl.master_manager.file_path, 'a')
                    for param_name in pdict.keys():
                        del f[param_name]
                        f.create_group(param_name)
                        for brick in bls:
                            link_path = '/{0}/{1}'.format(param_name, brick[0])
                            brick_file_name = '{0}.hdf5'.format(brick[0])
                            brick_rel_path = os.path.join(
                                pl.parameter_metadata[param_name].root_dir.
                                replace(tempcov.persistence_dir,
                                        '.'), brick_file_name)
                            log.debug('link_path: %s', link_path)
                            log.debug('brick_rel_path: %s', brick_rel_path)
                            pl.master_manager.add_external_link(
                                link_path, brick_rel_path, brick[0])

                pl.flush_values()
                pl.flush()
                tempcov.close()

                # Remove 'rtree' dataset from Master file if it already exists (post domain expansion)
                # to make way for reconstruction
                f = h5py.File(pl.master_manager.file_path, 'a')
                if 'rtree' in f.keys():
                    del f['rtree']
                f.close()

                # Reconstruct 'rtree' dataset
                # Open temporary Coverage and PersistenceLayer objects
                fixed_cov = AbstractCoverage.load(tempcov.persistence_dir,
                                                  mode='a')
                pl_fixed = fixed_cov._persistence_layer

                # Call update_rtree for each brick using PersistenceLayer builtin
                brick_count = 0

                if not empty_cov:
                    for brick in bls:
                        rtree_extents, brick_extents, brick_active_size = pl_fixed.calculate_extents(
                            brick[1][1], bD, tD)
                        pl_fixed.master_manager.update_rtree(brick_count,
                                                             rtree_extents,
                                                             obj=brick[0])
                        brick_count += 1

                # Update parameter_bounds property based on each parameter's brick data using deep inspection
                valid_bounds_types = [
                    'BooleanType', 'ConstantType', 'QuantityType',
                    'ConstantRangeType'
                ]

                if not empty_cov:
                    for param in pdict.keys():
                        if pdict.get_context(
                                param
                        ).param_type.__class__.__name__ in valid_bounds_types:
                            brick_domains_new, new_brick_list, brick_list_spans, tD, bD, min_data_bound, max_data_bound = self.inspect_bricks(
                                self.cov_pth, self._guid, param)
                            # Update the metadata
                            pl_fixed.update_parameter_bounds(
                                param, [min_data_bound, max_data_bound])
                pl_fixed.flush()
                fixed_cov.close()

                # Create backup copy of original Master and Parameter files
                if backup:
                    import datetime
                    orig_master_file = os.path.join(
                        self.cov_pth, '{0}_master.hdf5'.format(self._guid))

                    # Generate the timestamp
                    tstamp_format = '%Y%m%d%H%M%S'
                    tstamp = datetime.datetime.now().strftime(tstamp_format)

                    backup_master_file = os.path.join(
                        self.cov_pth,
                        '{0}_master.{1}.hdf5'.format(self._guid, tstamp))

                    shutil.copy2(orig_master_file, backup_master_file)

                    for param in pdict.keys():
                        param_orig = os.path.join(orig_dir, param,
                                                  '{0}.hdf5'.format(param))
                        param_backup = os.path.join(
                            orig_dir, param,
                            '{0}.{1}.hdf5'.format(param, tstamp))
                        shutil.copy2(param_orig, param_backup)

                # Copy Master and Parameter metadata files back to original/broken coverage (cov_pth) location
                if copy_over == True:
                    shutil.copy2(
                        os.path.join(tempcov.persistence_dir,
                                     '{0}_master.hdf5'.format(self._guid)),
                        os.path.join(self.cov_pth,
                                     '{0}_master.hdf5'.format(self._guid)))
                    for param in pdict.keys():
                        shutil.copy2(
                            os.path.join(temp_dir, param,
                                         '{0}.hdf5'.format(param)),
                            os.path.join(orig_dir, param,
                                         '{0}.hdf5'.format(param)))

                # Reanalyze the repaired coverage
                self._ar = self._do_analysis(analyze_bricks=True)

                # Verify repair worked, clean up if not
                if self._ar.is_corrupt:
                    # If the files were backed up then revert
                    if backup:
                        # Remove backed up files and clean up the repair attempt
                        log.info(
                            'Repair attempt failed.  Reverting to pre-repair state.'
                        )
                        # Use backup copy to replace post-repair file.
                        shutil.copy2(backup_master_file, orig_master_file)
                        # Delete the backup
                        os.remove(backup_master_file)

                        # Iterate over parameters and revert to pre-repair state
                        for param in pdict.keys():
                            param_orig = os.path.join(orig_dir, param,
                                                      '{0}.hdf5'.format(param))
                            param_backup = os.path.join(
                                orig_dir, param,
                                '{0}.{1}.hdf5'.format(param, tstamp))
                            # Use backup copy to replace post-repair file.
                            shutil.copy2(param_backup, param_orig)
                            # Delete the backup
                            os.remove(param_backup)

                    raise ValueError(
                        'Coverage repair failed! Revert to stored backup version, if possible.'
                    )

                # Remove temporary coverage
                if keep_temp == False:
                    shutil.rmtree(tempcov_dir)
                else:
                    return tempcov_dir
        else:
            log.info('Coverage is not corrupt, nothing to repair!')