예제 #1
0
    def retrieve_plot_data(self):
        '''retrieve default data from spec data file'''
        '''
        data parser for 2-D mesh and hklmesh
        '''
        label1, start1, end1, intervals1, label2, start2, end2, intervals2, time = self.scan.scanCmd.split()[1:]
        if label1 not in self.scan.data:
            label1 = self.scan.L[0]      # mnemonic v. name
        if label2 not in self.scan.data:
            label2 = self.scan.L[1]      # mnemonic v. name
        axis1 = self.scan.data.get(label1)
        axis2 = self.scan.data.get(label2)
        intervals1, intervals2 = map(int, (intervals1, intervals2))
        start1, end1, start2, end2, time = map(float, (start1, end1, start2, end2, time))

        if len(axis1) < intervals1 and min(axis2) == max(axis2):
            # stopped scan before second row started, 1-D plot is better (issue #82)
            self.axes = [label1,]
            self.signal = self.scan.column_last
            self.data[label1] = self.scan.data[label1]
            self.data[self.signal] = self.scan.data[self.signal]
            return

        axis1 = axis1[0:intervals1+1]
        self.data[label1] = axis1    # 1-D array

        axis2 = [axis2[row] for row in range(len(axis2)) if row % (intervals1+1) == 0]
        self.data[label2] = axis2    # 1-D array

        column_labels = self.scan.L
        column_labels.remove(label1)    # special handling
        column_labels.remove(label2)    # special handling
        if self.scan.scanCmd.startswith('hkl'):
            # find the reciprocal space axis held constant
            label3 = [key for key in ('H', 'K', 'L') if key in column_labels][0]
            self.data[label3] = self.scan.data.get(label3)[0]    # constant

        # build 2-D data objects (do not build label1, label2, [or label3] as 2-D objects)
        data_shape = [len(axis2), len(axis1)]
        for label in column_labels:
            if label not in self.data:
                axis = numpy.array( self.scan.data.get(label) )
                self.data[label] = utils.reshape_data(axis, data_shape)
            else:
                pass

        self.signal = utils.clean_name(self.scan.column_last)
        self.axes = [label1, label2]
    
        if spec.MCA_DATA_KEY in self.scan.data:    # 3-D array(s)
            # save each spectrum
            for key, spectrum in sorted(self.scan.data[spec.MCA_DATA_KEY].items()):
                num_channels = len(spectrum[0])
                data_shape.append(num_channels)
                mca = numpy.array(spectrum)
                data = utils.reshape_data(mca, data_shape)
                channels = range(1, num_channels+1)
                ds_name = '_' + key + '_'
                self.data[ds_name] = data
                self.data[ds_name+'channel_'] = channels
예제 #2
0
    def mesh(self, nxdata, scan):
        '''*internal*: data parser for 2-D mesh and hklmesh'''
        # 2-D parser: http://www.certif.com/spec_help/mesh.html
        # mesh motor1 start1 end1 intervals1 motor2 start2 end2 intervals2 time
        # 2-D parser: http://www.certif.com/spec_help/hklmesh.html
        #  hklmesh Q1 start1 end1 intervals1 Q2 start2 end2 intervals2 time
        # mesh:    data/33id_spec.dat  scan 22
        # hklmesh: data/33bm_spec.dat  scan 17
        signal, axes = '', ['',]
        
        label1, start1, end1, intervals1, label2, start2, end2, intervals2, time = scan.scanCmd.split()[1:]
        if label1 not in scan.data:
            label1 = scan.L[0]      # mnemonic v. name
        if label2 not in scan.data:
            label2 = scan.L[1]      # mnemonic v. name
        axis1 = scan.data.get(label1)
        axis2 = scan.data.get(label2)
        intervals1, intervals2 = map(int, (intervals1, intervals2))
        start1, end1, start2, end2, time = map(float, (start1, end1, start2, end2, time))
        if len(axis1) < intervals1:     # stopped scan before second row started
            signal, axes = self.oneD(nxdata, scan)        # fallback support
        else:
            axis1 = axis1[0:intervals1+1]
            axis2 = [axis2[row] for row in range(len(axis2)) if row % (intervals1+1) == 0]

            column_labels = scan.L
            column_labels.remove(label1)    # special handling
            column_labels.remove(label2)    # special handling
            if scan.scanCmd.startswith('hkl'):
                # find the reciprocal space axis held constant
                label3 = [key for key in ('H', 'K', 'L') if key not in (label1, label2)][0]
                axis3 = scan.data.get(label3)[0]
                self.write_ds(nxdata, label3, axis3)

            self.write_ds(nxdata, label1, axis1)    # 1-D array
            self.write_ds(nxdata, label2, axis2)    # 1-D array

            # build 2-D data objects (do not build label1, label2, [or label3] as 2-D objects)
            data_shape = [len(axis1), len(axis2)]
            for label in column_labels:
                if label not in nxdata:
                    axis = np.array( scan.data.get(label) )
                    self.write_ds(nxdata, label, utils.reshape_data(axis, data_shape))
                else:
                    pass

            signal = utils.clean_name(scan.column_last)
            axes = ':'.join([label1, label2])

        if '_mca_' in scan.data:    # 3-D array(s)
            # save each spectrum
            for key, spectrum in sorted(scan.data['_mca_'].items()):
                num_channels = len(spectrum[0])
                data_shape.append(num_channels)
                mca = np.array(spectrum)
                data = utils.reshape_data(mca, data_shape)
                channels = range(1, num_channels+1)
                ds_name = '_' + key + '_'
                self.write_ds(nxdata, ds_name, data, axes=axes+':'+ds_name+'channel_', units='counts')
                self.write_ds(nxdata, ds_name+'channel_', channels, units='channel')

        return signal, axes
예제 #3
0
    def __init__(self, imagename, psfname=None, sourcefinder_name='pybdsm',
                 makeplots=True, do_psf_corr=True, do_local_var=True,
                 psf_corr_region=2, local_var_region=10, rel_excl_src=None, 
                 pos_smooth=1.6, neg_smooth=1.6, loglevel=0, thresh_pix=5,
                 thresh_isl=3, neg_thresh_isl=3, neg_thresh_pix=5,
                 prefix=None, do_nearsources=False, **kw):

        """ Takes in image and extracts sources and makes 
            reliability estimations..
           
 
        imagename: Fits image
        psfname: PSF fits image, optional. 

        sourcefinder_name: str, optional. Default 'pybdsm'.
            Uses source finder specified by the users.

        makeplots: bool, optional. Default is True.
            Make reliability plots.

        do_psf_corr : bool, optional. Default True.
            If True, correlation of sources with PSF will be added
            as an extra source parameter in reliability estimation.
            But the PSF fits image must be provided.

        do_local_var : bool, optional. Default is True.
            Adds local variance as an extra source parameter,
            similar to do_psf_corr but independent of the PSF image. 

        psf_corr_region : int, optional. Default value is 2. 
            Data size to correlate around a source in beam sizes.
 
        local_var_region: int, optional. Default 10.
            Data size to compute the local variance in beam sizes.

        rel_excl_src : float numbers, optional. Default is None. 
            Excludes sources in this region from the reliability
            estimations, e.g ra, dec, radius in degrees. For
            many regions: ra1, dec1, radius1: ra2, dec2, radius2.

        pos_smooth : float, optional. Default 1.6
            Data smoothing threshold in the positive side of an image.
            For default value 1.6, data peaks < 1.6 * image noise
            will be averaged out.

        neg_smooth : float, optional. Default 1.6.
            Similar to pos_smooth but applied to the negative side of
            an image.

        loglevel :  int, optional. Default is 0.
            Provides Pythonlogging options, 0, 1, 2 and 3 for info, debug,
            error and critial respectively.

        thresh_isl :  float, optional. Default is 3.
            Threshold for the island boundary in number of sigma above
            the mean. Determines extent of island used for fitting 
            [pybdsm]. For positive pixels.

        thresh_pix : float, optional. Default is 5.
            Source detection threshold: threshold for the island 
            peak in number of sigma above the mean. For positive pixels.

        neg_thresh_isl : float, optional. Default is 3. 
            Simialr to thresh_isl but applied to negative side 
            of the image.

        neg_thresh_pix : float, optional. Default is 5. 
            Similar to thresh_pix but applied to the negative
            side of an image.

        do_nearsources: boolean. Default is False.
            If true it adds number of nearest neighnours as an extra
            parameter. It looks for sources around 5 beam sizes.
   
         kw : kward for source extractions. Should be a mapping e.g
            kw['thresh_isl'] = 2.0 or kw['do_polarization'] = True 
        """


       
        # image, psf image
        self.imagename = imagename
        self.psfname = psfname 
        # setting output file names  
     
        self.prefix = prefix
        self.poslsm = self.prefix + "_positive.lsm.html"
        self.neglsm = self.prefix + "_negative.lsm.html"

        # log level  
        self.loglevel = loglevel
        self.log = utils.logger(self.loglevel, prefix=self.prefix)

        self.log.info("Loading Image data")

        # reading imagename data
        self.imagedata, self.wcs, self.header, self.pixelsize =\
            utils.reshape_data(self.imagename, prefix=self.prefix)

        self.bmaj = numpy.deg2rad(self.header["BMAJ"])

        self.do_psf_corr = do_psf_corr
        if not self.psfname:
            self.log.info("No psf provided, do_psf_corr = False.")
            self.do_psf_corr = False
           
        # computing negative noise
        self.noise = utils.negative_noise(self.imagedata)
        
        self.log.info("The negative noise is %e"%self.noise)

        if self.noise == 0: 
            self.log.debug("The negative noise is 0, check image")

        # source finder initialization
        self.sourcefinder_name  = sourcefinder_name
        self.log.info("Using %s source finder to extract sources."%
                      self.sourcefinder_name)


        # making negative image
        self.negativeimage = utils.invert_image(
                               self.imagename, self.imagedata,
                               self.header, self.prefix)

        # boolean optionals    
        self.makeplots = makeplots
        self.do_local_var = do_local_var
        self.nearsources = do_nearsources

        # smoothing factors
        self.pos_smooth = pos_smooth
        self.neg_smooth = neg_smooth
        
        # region to evaluate
        self.psf_corr_region = psf_corr_region
        self.local_var_region = local_var_region
        self.rel_excl_src = rel_excl_src
 
        # Pybdsm or source finder fitting thresholds
        self.thresh_isl = thresh_isl
        self.thresh_pix = thresh_pix
        self.opts_pos = dict(thresh_pix=self.thresh_pix,
                             thresh_isl=self.thresh_isl)
        self.opts_pos.update(kw)
        self.opts_neg = {}
        self.neg_thresh_isl = neg_thresh_isl
        self.neg_thresh_pix = neg_thresh_pix
        self.opts_neg["thresh_isl"] = self.neg_thresh_isl
        self.opts_neg["thresh_pix"] = self.neg_thresh_pix
예제 #4
0
    def _load_data(self):
        """Load input and output data from text files."""
        df, area = load_forcing(self.camels_root, self.basin)
        df['QObs(mm/d)'], cal_start = load_discharge(self.camels_root,
                                                     self.basin, area)

        # determine dates of start and end of period
        if self.period == 'n_cal':
            end_year = cal_start.year + self.years[-1]
            cal_end = pd.to_datetime(f"{end_year}/09/30", yearfirst=True)

        if self.period == 'n_cal':
            self.means = df[cal_start:cal_end].mean()
            self.stds = df[cal_start:cal_end].std()

        # Check which period of the time series is requested
        if self.period == 'n_cal':
            df = df[cal_start:cal_end]

        else:
            start_year = cal_start.year + self.years[0]
            start_date = pd.to_datetime(f"{start_year}/10/01", yearfirst=True)

            # check if end date is till end of time series
            if self.years[1] < 0:
                end_date = df.index[-1]
            else:
                end_year = cal_start.year + self.years[1]
                end_date = pd.to_datetime(f"{end_year}/09/30", yearfirst=True)
            df = df[start_date:end_date]

        # store first and last date of the selected period
        self.period_start = df.index[0]
        self.period_end = df.index[-1]

        # extract data matrix from data frame
        x = np.array([
            df['prcp(mm/day)'].values, df['srad(W/m2)'].values,
            df['tmax(C)'].values, df['tmin(C)'].values, df['vp(Pa)'].values
        ]).T

        y = np.array([df['QObs(mm/d)'].values]).T

        # normalize data, reshape for LSTM training and remove invalid samples
        x = self._local_normalization(x, variable='inputs')
        x, y = reshape_data(x, y, 365)

        if self.period != "n_test":
            # Deletes all records, where no discharge was measured (-999)
            x = np.delete(x, np.argwhere(y < 0)[:, 0], axis=0)
            y = np.delete(y, np.argwhere(y < 0)[:, 0], axis=0)

            # Delete all samples, where discharge is NaN
            if np.sum(np.isnan(y)) > 0:
                print(f"Deleted some records because of NaNs {self.basin}")
                x = np.delete(x, np.argwhere(np.isnan(y)), axis=0)
                y = np.delete(y, np.argwhere(np.isnan(y)), axis=0)

            y = self._local_normalization(y, variable='output')

        # convert arrays to torch tensors
        x = torch.from_numpy(x.astype(np.float32))
        y = torch.from_numpy(y.astype(np.float32))

        return x, y
예제 #5
0
    def retrieve_plot_data(self):
        '''retrieve default data from spec data file'''
        '''
        data parser for 2-D mesh and hklmesh
        '''
        label1, start1, end1, intervals1, label2, start2, end2, intervals2, time = self.scan.scanCmd.split(
        )[1:]
        if label1 not in self.scan.data:
            label1 = self.scan.L[0]  # mnemonic v. name
        if label2 not in self.scan.data:
            label2 = self.scan.L[1]  # mnemonic v. name
        axis1 = self.scan.data.get(label1)
        axis2 = self.scan.data.get(label2)
        intervals1, intervals2 = map(int, (intervals1, intervals2))
        start1, end1, start2, end2, time = map(
            float, (start1, end1, start2, end2, time))

        if len(axis1) < intervals1 and min(axis2) == max(axis2):
            # stopped scan before second row started, 1-D plot is better (issue #82)
            self.axes = [
                label1,
            ]
            self.signal = self.scan.column_last
            self.data[label1] = self.scan.data[label1]
            self.data[self.signal] = self.scan.data[self.signal]
            return

        axis1 = axis1[0:intervals1 + 1]
        self.data[label1] = axis1  # 1-D array

        axis2 = [
            axis2[row] for row in range(len(axis2))
            if row % (intervals1 + 1) == 0
        ]
        self.data[label2] = axis2  # 1-D array

        column_labels = self.scan.L
        column_labels.remove(label1)  # special handling
        column_labels.remove(label2)  # special handling
        if self.scan.scanCmd.startswith('hkl'):
            # find the reciprocal space axis held constant
            label3 = [key for key in ('H', 'K', 'L')
                      if key in column_labels][0]
            self.data[label3] = self.scan.data.get(label3)[0]  # constant

        # build 2-D data objects (do not build label1, label2, [or label3] as 2-D objects)
        data_shape = [len(axis2), len(axis1)]
        for label in column_labels:
            if label not in self.data:
                axis = numpy.array(self.scan.data.get(label))
                self.data[label] = utils.reshape_data(axis, data_shape)
            else:
                pass

        self.signal = utils.clean_name(self.scan.column_last)
        self.axes = [label1, label2]

        if spec.MCA_DATA_KEY in self.scan.data:  # 3-D array(s)
            # save each spectrum
            for key, spectrum in sorted(
                    self.scan.data[spec.MCA_DATA_KEY].items()):
                num_channels = len(spectrum[0])
                data_shape.append(num_channels)
                mca = numpy.array(spectrum)
                data = utils.reshape_data(mca, data_shape)
                channels = range(1, num_channels + 1)
                ds_name = '_' + key + '_'
                self.data[ds_name] = data
                self.data[ds_name + 'channel_'] = channels
    def __init__(self, imagename, psfname=None, sourcefinder_name='pybdsm',
                 makeplots=True, do_psf_corr=True, do_local_var=True,
                 psf_corr_region=5, local_var_region=10, rel_excl_src=None, 
                 pos_smooth=2, neg_smooth=2, loglevel=0, thresh_pix=5,
                 thresh_isl=3, neg_thresh_isl=3, neg_thresh_pix=5, reset_rel=None,
                 prefix=None, do_nearsources=False, savefits=False,
                 increase_beam_cluster=False, savemask_pos=False, savemask_neg=False,
                 **kw):

        """ Takes in image and extracts sources and makes 
            reliability estimations..
           
 
        imagename: Fits image
        psfname: PSF fits image, optional. 

        sourcefinder_name: str, optional. Default 'pybdsm'.
            Uses source finder specified.

        makeplots: bool, optional. Default is True.
            Make reliability plots.

        do_psf_corr : bool, optional. Default True.
            If True, PSF correlation will be added
            as an extra parameter for density estimations.
            NB: the PSF fits image must be provided.

        do_local_var : bool, optional. Default is True.
            If True, adds local variance as an extra parameter,
            for density estimations. 
        
        do_nearsources: boolean. Default is False.
            If true it adds number of nearest neighnours as an extra
            parameter. It looks for sources around 5 beam sizes.

        psf_corr_region : int, optional. Default value is 5. 
            Data size to correlate around a source, in beam sizes.
 
        local_var_region: int, optional. Default 10.
            Data size to compute the local variance in beam sizes.

        rel_excl_src : floats, optional. Default is None. 
            Excludes sources in a specified region
            e.g ra, dec, radius in degrees. For
            2 regions: ra1, dec1, radius1: ra2, dec2, radius2, etc.

        pos_smooth : float, optional. Default 2.
            Masking threshold for the positive image.
            For default value 2, data peaks < 2 * image noise
            are masked.

        neg_smooth : float, optional. Default 2.
            Similar to pos_smooth but applied to the negative image.

        thresh_isl :  float, optional. Default is 3.
            Threshold for forming islands in the positive image

        thresh_pix : float, optional. Default is 5.
            Threshold for model fitting, in positive image.

        neg_thresh_isl : float, optional. Default is 3. 
            Simialr to thresh_isl but for negative image.

        neg_thresh_pix : float, optional. Default is 5. 
            Similar to thresh_pix but for negative image.

        savefits: boolean. Default is False.
            If True a negative image is saved.

        reset_rel: boolean. Default is False. If true then
            sources with correlation < 0.002 and rel >0.60
            have their reliabilities set to 0.

        increase_beam_cluster: boolean, optional. If True, sources
            groupings will be increase by 20% the beam size. If False,
            the actual beam size will be used. Default is False.

        savemask_pos: boolean, optional. If true the mask applied on 
            the positive side of an image after smoothing is saved.
            
        savemask_neg: Similar to savemask_pos but for the negative
            side of an image.
        
        loglevel : int, optional. Default is 0.
            Provides Pythonlogging options, 0, 1, 2 and 3 are for info, debug,
            error and critial respectively.
   
         kw : kward for source extractions. Should be a mapping e.g
            kw['thresh_isl'] = 2.0 or kw['do_polarization'] = True 
        """


       
        self.prefix = prefix

        # log level  
        self.loglevel = loglevel
        self.log = utils.logger(self.loglevel, prefix=self.prefix)

        
        # image, psf image
        self.imagename = imagename
        self.psfname = psfname 
      
        # reading imagename data
        imagedata, self.wcs, self.header, self.pixelsize =\
            utils.reshape_data(self.imagename, prefix=self.prefix)

        self.imagedata = numpy.array(imagedata, dtype=numpy.float32)
        self.image2by2 = numpy.array(utils.image_data(imagedata, prefix),
                             dtype=numpy.float32)

        self.bmaj = numpy.deg2rad(self.header["BMAJ"])

        # boolean optionals    
        self.makeplots = makeplots
        self.do_local_var = do_local_var
        self.nearsources = do_nearsources
        self.do_psf_corr = do_psf_corr
        self.savemaskpos = savemask_pos
        self.savemaskneg = savemask_neg
        self.savefits = savefits
        self.derel = reset_rel

        if not self.psfname:
            self.log.info(" No psf provided, do_psf_corr is set to False.")
            self.do_psf_corr = False

        if self.psfname:
            psfdata, self.psfhdr = utils.open_psf_image(self.psfname)
            self.psfdata = utils.image_data(psfdata, prefix)
 
        # computing negative noise
        self.noise, self.mean = utils.negative_noise(self.imagedata, self.prefix) #here is 2X2 data here
        
        self.log.info(" The negative noise is %e Jy/beam"%self.noise)
        if self.noise == 0: 
            self.log.debug(" The negative noise is 0, check image")

        # source finder initialization
        self.sourcefinder_name  = sourcefinder_name
        self.log.info(" Using %s source finder to extract the sources."%
                      self.sourcefinder_name)

        self.negimage = self.prefix + "_negative.fits"
        
        negativedata =  utils.invert_image(
                               self.imagename, self.imagedata,
                               self.header, self.negimage, prefix)
        self.negimage2by2 = numpy.array(utils.image_data(negativedata, prefix),
                             dtype=numpy.float32)
        self.negativedata = numpy.array(negativedata, numpy.float32)
       
        # smoothing factors
        self.pos_smooth = pos_smooth
        self.neg_smooth = neg_smooth
        
        # region to evaluate
        self.corrstep = psf_corr_region
        self.localstep = local_var_region
        self.radiusrm = rel_excl_src
        self.do_beam = increase_beam_cluster
         
        beam_pix = int(round(numpy.rad2deg(self.bmaj)/self.pixelsize))
        self.locstep = self.localstep * beam_pix
        self.cfstep = self.corrstep * beam_pix
        self.bmin, self.bpa =  self.header["BMIN"], self.header["BPA"]

        self.opts_pos = {}
        if self.do_beam:
            bmaj = self.header["BMAJ"]
            self.opts_pos["beam"] = (1.2*bmaj, 1.2*self.bmin, self.bpa)

        # Pybdsm or source finder fitting thresholds
        self.thresh_isl = thresh_isl
        self.thresh_pix = thresh_pix
        self.opts_pos = dict(thresh_pix=self.thresh_pix,
                             thresh_isl=self.thresh_isl)
        
        self.opts_pos.update(kw)
        self.opts_neg = {}
        self.neg_thresh_isl = neg_thresh_isl
        self.neg_thresh_pix = neg_thresh_pix
        self.opts_neg["thresh_isl"] = self.neg_thresh_isl
        self.opts_neg["thresh_pix"] = self.neg_thresh_pix
예제 #7
0
    def __init__(self, imagename, psfname, poscatalog, negcatalog,
                 snr_thresh=100, local_thresh=0.6, local_region=10,
                 psfcorr_region=2, high_corr_thresh=0.5, negdetec_region=10, 
                 negatives_thresh=10, phasecenter_excl_radius=None,
                 prefix=None, loglevel=0):


        """ Determines sources that require direction-dependent (DD)
            calibration solutions.

        imagename: Fits data
        psfname : PSF fits data
        poscatalog : Catalog of positive detections.
        negcatalog : Catalog of negative detections.
             Sources extracted from the negative side
             of an image.
        snr_thresh : float, optional. Default is 100.
             Any source with 100 times the minimum SNR is
             considered a high SN source.
        local_thresh : float, optional. Default is 0.6.
             Sources with local variance greater than
             0.6 * negative noise are considered as 
             sources of high local variance.
        local_region : integer, optional. Default is 10.
             A region to compute the local variance in
             beam sizes.
        psfcorr : integer, optional. Default is 2.
             Data size to correlate. In beam sizes.
        high_corr_thresh :  float, optional. Default is 0.5.
             Correlation threshold. Sources of high correlation
             with the PSF have correlation > the specified.
        negdetec_region :  float, optional. Default is 10.
             Region to lookup for negative detections around
             a given source. In beam size.
        negative_thresh :  float, optional. Default is 6.
             Number of negative detections, N, threshold. Sources
             with number > N negatives around them are require direction
             dependent (DD) calibration solutions.
        phasecenter_excl_region :  float (in degrees), optional.
             A radius from the phase center (in beam sizes) to exclude 
             in making final DD source selection.
        prefix : str, optional. Sets a prefix to the output directory.
        loglevel :  int, optional. Default 0. Python logging.
        0, 1, 2, 3 for info, debug, error and critical respectively.
        """

        # image, psf image, positive and negative catalogues
        self.imagename = imagename
        self.psfname = psfname
        self.poscatalog = poscatalog
        self.negcatalog = negcatalog
        self.loglevel = loglevel
        self.prefix = prefix
        self.log = utils.logger(self.loglevel, prefix=self.prefix)
        
        # reading the imagename data
        self.imagedata, self.wcs, self.header, self.pixsize =\
                          utils.reshape_data(self.imagename, prefix=self.prefix)
        self.log.info("Loading image data")

        # computing the noise
        self.noise = utils.negative_noise(self.imagedata)
        self.log.info("The negative noise of an image is %e"%
                       self.noise)

        # tags
        self.snr_tag = "snr"
        self.high_local_tag = "high_var"
        self.high_corr_tag = "high_corr"
        self.dd_tag = "dE"

        # thresholds
        self.snr_thresh = snr_thresh
        self.local_thresh = local_thresh
        self.high_corr_thresh = high_corr_thresh
        self.negatives_thresh = negatives_thresh
        
        #regions
        self.psfcorr_region = psfcorr_region
        self.local_region = local_region
        self.phasecenter_excl_radius = phasecenter_excl_radius
        self.negdetec_region =  negdetec_region
        
        # central ra, dec, beam major axes
        self. ra0 = numpy.deg2rad(self.header["CRVAL1"])
        self.dec0 = numpy.deg2rad(self.header["CRVAL2"])
        self.bmaj_deg = self.header['BMAJ'] # in degrees
예제 #8
0
        # Check dimensionality

        print('subject %s: fusiform:  voxel by TR matrix - shape: ' % sub, masked_fusiform_data.shape)
        print('subject %s: fusiform: label list - shape: ' % sub, fusiform_TR_onsets_shifted.shape)
        print()
        print('subject %s: DFR:  voxel by TR matrix - shape: ' % sub, masked_DFR_data.shape)
        print('subject %s: DFR: label list - shape: ' % sub, DFR_TR_onsets_shifted.shape)

        # feature select based on top voxels from Faces vs Objects contrast
        spm_img_file = "spmT_0004.img"
        #spm_img_file = "1005_FFA_FvO.img"
        top_voxels = find_top_voxels(sub, spm_img_file, mask, 100)

        # extract BOLD from non-zero labels (only really matters for the fusiform)
        roi_masked_data = masked_fusiform_data[top_voxels]
        fusiform_data_masked_reduced_nonzero, fusiform_TR_onsets_shifted_nonzero = reshape_data(fusiform_TR_onsets_shifted,
                                                                                                roi_masked_data, 0)

        DFR_data_masked_reduced = masked_DFR_data[top_voxels]

        # create average trial types for DFR
        DFR_average_trials = create_trial_type_averages(DFR_data_masked_reduced, DFR_onsets)

        # run classifier
        run_ids = fusiform_TR_onsets_shifted_nonzero[:, 1] - 1

        # set up collector arrays
        sp = PredefinedSplit(run_ids)
        clf_score = np.array([])
        C_best = []
        high_corr_score = np.array([])
        high_incorr_score = np.array([])