def download_vac(self): ''' Download a VAC to the local system ''' if self._data: log.info('File already downloaded.') return log.info('Downloading VAC data..') self._rsync.reset() self._rsync.remote() self._rsync.add('', full=self._path) self._rsync.set_stream() self._rsync.commit()
def __new__(cls, *args, **kwargs): ''' load relevant VACMixIns upon new class creation ''' # reset the vacs cls._reset_vacs() # initiate vacs for a given release cls._vacs = [] cls.release = config.release for subvac in VACMixIn.__subclasses__(): # Exclude any hidden VACs if subvac._hidden: continue # Excludes VACs from other releases if config.release not in subvac.version: continue sv = subvac() if sv.summary_file is None: log.info( 'VAC {0} has no summary file to load. Skipping.'.format( sv.name)) continue # create the VAC data class vacdc = VACDataClass(subvac.name, subvac.description, sv.summary_file) # add any custom methods to the VAC if hasattr(subvac, 'add_methods'): for method in getattr(subvac, 'add_methods'): assert isinstance( method, six.string_types), 'method name must be a string' svmod = importlib.import_module(subvac.__module__) assert hasattr( svmod, method ), 'method name must be defined in VAC module file' setattr(vacdc, method, types.MethodType(getattr(svmod, method), vacdc)) # add vac to the container class cls._vacs.append(subvac.name) setattr(cls, subvac.name, vacdc) return super(VACs, cls).__new__(cls, *args, **kwargs)
def get_target(self, parent_object): ''' Accesses VAC data for a specific target from a Marvin Tool object ''' # get any parameters you need from the parent object plateifu = parent_object.plateifu imagesz = int(parent_object.header['NAXIS1']) # download the vac from the SAS if it does not already exist locally if not self.file_exists(self.summary_file): log.info( 'Warning: This file is ~6 GB. It may take awhile to download') self.summary_file = self.download_vac('mangaffly', path_params=self.path_params) # create container for more complex return data. ffly = FFlyTarget(plateifu, vacfile=self.summary_file, imagesz=imagesz) return ffly
def get_table(self, ext=None): ''' Create an Astropy table for a data extension Parameters: ext (int|str): The HDU extension name or number Returns: An Astropy table for the given extension ''' if not ext: log.info('No HDU extension specified. Defaulting to ext=1') ext = 1 # check if extension is an image if self.data[ext].is_image: log.info('Ext={0} is not a table extension. Cannot read.'.format(ext)) return return Table.read(self._path, ext, format='fits')
def get_data(self, parent_object): release = parent_object.release drpver = parent_object._drpver plateifu = parent_object.plateifu imagesz = int(parent_object.header['NAXIS1']) # define the variables to build a unique path to your VAC file path_params = {'ver': self.version[release], 'drpver': drpver} # get_path returns False if the files do not exist locally allfile = self.get_path('mangaffly', path_params=path_params) # download the vac from the SAS if it does not already exist locally if not allfile: log.info( 'Warning: This file is ~6 GB. It may take awhile to download') allfile = self.download_vac('mangaffly', path_params=path_params) # create container for more complex return data. ffly = FFLY(plateifu, allfile=allfile, imagesz=imagesz) return ffly
def has_models(self): ''' check if the marvin db has all the models properly loaded ''' isdata = self.datadb is not None isdap = self.dapdb is not None issample = self.sampledb is not None log.info('datadb? {0}'.format(isdata)) log.info('dapdb? {0}'.format(isdap)) log.info('sampledb? {0}'.format(issample)) return all([isdata, isdap, issample])
def get_target(self, parent_object): ''' Accesses VAC data for a specific target from a Marvin Tool object ''' # Create a dictionary for GEMA VAC data gemadata = { "completeness": "", "LSS_1_all": "", "LSS_1_002": "", "LSS_1_006": "", "LSS_1_010": "", "LSS_1_015": "", "LSS_5_all": "", "LSS_5_002": "", "LSS_5_006": "", "LSS_5_010": "", "LSS_5_015": "", "pairs": "", "groups": "", "overdensity": "", "LSS_tensor": "", } # get any parameters you need from the parent object mangaid = parent_object.mangaid # download the vac from the SAS if it does not already exist locally if not self.file_exists(self.summary_file): self.summary_file = self.download_vac('mangagema', path_params=self.path_params) # opening tables in VAC file gemafile = fits.open(self.summary_file) # Return selected line(s) for a mangaid galaxy # LSS parameters indata = mangaid in gemafile[1].data['mangaid'] if not indata: log.warning("No LSS data exists for {0}".format(mangaid)) else: log.info("LSS data exists for {0}".format(mangaid)) log.warning( "Warning: Do not use LSS parameters defined in volume with z < z_manga_galaxy" ) indexLSS = gemafile[1].data['mangaid'] == mangaid gemadata["completeness"] = gemafile[1].data[indexLSS] gemadata["LSS_1_all"] = gemafile[2].data[indexLSS] gemadata["LSS_1_002"] = gemafile[3].data[indexLSS] gemadata["LSS_1_006"] = gemafile[4].data[indexLSS] gemadata["LSS_1_010"] = gemafile[5].data[indexLSS] gemadata["LSS_1_015"] = gemafile[6].data[indexLSS] gemadata["LSS_5_all"] = gemafile[7].data[indexLSS] gemadata["LSS_5_002"] = gemafile[8].data[indexLSS] gemadata["LSS_5_006"] = gemafile[9].data[indexLSS] gemadata["LSS_5_010"] = gemafile[10].data[indexLSS] gemadata["LSS_5_015"] = gemafile[11].data[indexLSS] # Local environment: close pair galaxies indata = mangaid in gemafile[12].data['mangaid'] if not indata: log.warning("No pair data exists for {0}".format(mangaid)) else: log.info("Pair data exists for {0}".format(mangaid)) indexpair = gemafile[12].data['mangaid'] == mangaid gemadata["pairs"] = gemafile[12].data[indexpair] # Local environment: group galaxies indata = mangaid in gemafile[13].data['mangaid'] if not indata: log.warning("No group data exists for {0}".format(mangaid)) else: log.info("Group data exists for {0}".format(mangaid)) indexgroup = gemafile[13].data['mangaid'] == mangaid gemadata["groups"] = gemafile[13].data[indexgroup] # LSS overdensity-corrected local density indata = mangaid in gemafile[14].data['mangaid'] if not indata: log.warning("No overdensity data exists for {0}".format(mangaid)) else: log.info("Overdensity data exists for {0}".format(mangaid)) indexover = gemafile[14].data['mangaid'] == mangaid gemadata["overdensity"] = gemafile[14].data[indexover] # LSS structure indata = mangaid in gemafile[15].data['mangaid'] if not indata: log.warning("No structure data exists for {0}".format(mangaid)) else: log.info("Structure data exists for {0}".format(mangaid)) indextensor = gemafile[15].data['mangaid'] == mangaid gemadata["LSS_tensor"] = gemafile[15].data[indextensor] gema_data = gemadata # closing the FITS file gemafile.close() return gema_data
def getPrevious(self, chunk=None): ''' Retrieve the previous chunk of results. Returns a previous chunk of results from the query. from start to end in units of chunk. Used with getNext to paginate through a long list of results Parameters: chunk (int): The number of objects to return Returns: results (list): A list of query results Example: >>> r = q.run() >>> r.getPrevious(5) >>> Retrieving previous 5, from 30 to 35 >>> [(u'4-3988', u'1901', -9999.0), >>> (u'4-3862', u'1902', -9999.0), >>> (u'4-3293', u'1901', -9999.0), >>> (u'4-3602', u'1902', -9999.0), >>> (u'4-4602', u'1901', -9999.0)] ''' newend = self.start self.chunk = chunk if chunk else self.chunk newstart = newend - self.chunk if newstart < 0: warnings.warn('You have reached the beginning.', MarvinUserWarning) newstart = 0 newend = newstart + self.chunk log.info('Retrieving previous {0}, from {1} to {2}'.format(self.chunk, newstart, newend)) if self.mode == 'local': self.results = self.query.slice(newstart, newend).all() elif self.mode == 'remote': # Fail if no route map initialized if not config.urlmap: raise MarvinError('No URL Map found. Cannot make remote call') # Get the query route url = config.urlmap['api']['getsubset']['url'] params = {'searchfilter': self.searchfilter, 'params': self.returnparams, 'start': newstart, 'end': newend, 'limit': self.limit, 'sort': self.sortcol, 'order': self.order} try: ii = Interaction(route=url, params=params) except MarvinError as e: raise MarvinError('API Query GetNext call failed: {0}'.format(e)) else: self.results = ii.getData() self._makeNamedTuple() self.start = newstart self.end = newend if self.returntype: self.convertToTool() return self.results
def get_data(self, parent_object): # Create a dictionary for GEMA VAC data gemadata = { "completeness": "", "LSS_1_all": "", "LSS_1_002": "", "LSS_1_006": "", "LSS_1_010": "", "LSS_1_015": "", "LSS_5_all": "", "LSS_5_002": "", "LSS_5_006": "", "LSS_5_010": "", "LSS_5_015": "", "pairs": "", "groups": "", "overdensity": "", "LSS_tensor": "", } # get any parameters you need from the parent object mangaid = parent_object.mangaid release = parent_object.release # define the variables to build a unique path to your VAC file path_params = {'ver': self.version[release]} # get_path returns False if the files do not exist locally gemapath = self.get_path('mangagema', path_params=path_params) # download the vac from the SAS if it does not already exist locally if not gemapath: gemapath = self.download_vac('mangagema', path_params=path_params) # opening tables in VAC file gemafile = fits.open(gemapath) # Return selected line(s) for a mangaid galaxy # LSS parameters indata = mangaid in gemafile[1].data['mangaid'] if not indata: log.warning("No LSS data exists for {0}".format(mangaid)) else: log.info("LSS data exists for {0}".format(mangaid)) log.warning( "Warning: Do not use LSS parameters defined in volume with z < z_manga_galaxy" ) indexLSS = gemafile[1].data['mangaid'] == mangaid gemadata["completeness"] = gemafile[1].data[indexLSS] gemadata["LSS_1_all"] = gemafile[2].data[indexLSS] gemadata["LSS_1_002"] = gemafile[3].data[indexLSS] gemadata["LSS_1_006"] = gemafile[4].data[indexLSS] gemadata["LSS_1_010"] = gemafile[5].data[indexLSS] gemadata["LSS_1_015"] = gemafile[6].data[indexLSS] gemadata["LSS_5_all"] = gemafile[7].data[indexLSS] gemadata["LSS_5_002"] = gemafile[8].data[indexLSS] gemadata["LSS_5_006"] = gemafile[9].data[indexLSS] gemadata["LSS_5_010"] = gemafile[10].data[indexLSS] gemadata["LSS_5_015"] = gemafile[11].data[indexLSS] # Local environment: close pair galaxies indata = mangaid in gemafile[12].data['mangaid'] if not indata: log.warning("No pair data exists for {0}".format(mangaid)) else: log.info("Pair data exists for {0}".format(mangaid)) indexpair = gemafile[12].data['mangaid'] == mangaid gemadata["pairs"] = gemafile[12].data[indexpair] # Local environment: group galaxies indata = mangaid in gemafile[13].data['mangaid'] if not indata: log.warning("No group data exists for {0}".format(mangaid)) else: log.info("Group data exists for {0}".format(mangaid)) indexgroup = gemafile[13].data['mangaid'] == mangaid gemadata["groups"] = gemafile[13].data[indexgroup] # LSS overdensity-corrected local density indata = mangaid in gemafile[14].data['mangaid'] if not indata: log.warning("No overdensity data exists for {0}".format(mangaid)) else: log.info("Overdensity data exists for {0}".format(mangaid)) indexover = gemafile[14].data['mangaid'] == mangaid gemadata["overdensity"] = gemafile[14].data[indexover] # LSS structure indata = mangaid in gemafile[15].data['mangaid'] if not indata: log.warning("No structure data exists for {0}".format(mangaid)) else: log.info("Structure data exists for {0}".format(mangaid)) indextensor = gemafile[15].data['mangaid'] == mangaid gemadata["LSS_tensor"] = gemafile[15].data[indextensor] gema_data = gemadata # closing the FITS file gemafile.close() return gema_data