def save_image_info(self, conn): """ Write image info into images table. """ if not self.data.get('frequency').isdigit(): raise SourceException('Frequency should be digital, %s found' % self.data.get('frequency')) band = conn.exec_return( get_sql('get frequency', self.data.get('frequency')), 'No matching frequency band found for frequency %s' % self.data.get('frequency')) if not 'pointing_ra' in self.data or \ not 'pointing_decl' in self.data or \ not 'beam_size' in self.data: data = conn.exec_return("""select min(ldecl), max(ldecl), min(lra), max(lra), avg(ldecl), avg(lra) from detections where run_id = %s;""" % self.run_id, single_column=False) size, avg_decl, avg_ra = get_image_size(*data) self.recalculate_pointing = True else: size = self.data.get('beam_size') avg_decl = self.data.get('pointing_decl') avg_ra = self.data.get('pointing_ra') conn.execute( get_sql('insert image', self.parset_id, band, avg_ra, avg_decl, size, get_svn_version(), self.run_id, self.get('bmaj'), self.get('bmin'), self.get('bpa'))) image_id = conn.exec_return(get_sql('get last image_id')) self.log.info('Image %s created' % image_id) return image_id
def read_and_store_data(self, conn): """ Read all from the BBS file. """ header = None if not os.path.isfile(self.filename): raise SourceException('no file %s' % self.filename) datafile = open(self.filename, 'r') if self.fileformat == 'test': header = self.get_header_test(datafile) elif self.fileformat == 'default': header = self.get_header_default(datafile) if not header: raise SourceException('No header in file %s' % self.filename) self.process_header(header) sql_data = [] # Switch off autocommit (if it is switched on) for better performance. sql_insert = 'insert into detections (run_id, image_name, '\ 'lra, ldecl, lra_err, ldecl_err,'\ 'lf_peak, lf_peak_err, lf_int, lf_int_err, ' \ 'g_minor, g_minor_err, g_major, g_major_err,' \ 'g_pa, g_pa_err, ldet_sigma, healpix_zone) values' while True: data_lines = datafile.readlines(self.BLOCK_SIZE) if not data_lines: break for data_line in data_lines: if data_line.strip() == '' or data_line.startswith('#'): #skip comments and empty lines continue self.sources = self.sources + 1 dhash = self.process_line(data_line.split()) pix = hp.ang2pix(32, radians(90. - float(dhash[1])), radians(float(dhash[0])), nest=True) sql_data.append( "(%s, '%s', %s, %s )" % (self.run_id, self.parset_id, ','.join(dhash), pix)) sql = "%s %s;" % (sql_insert, ',\n'.join(sql_data)) conn.execute(sql) self.log.info('%s sources loaded from %s' % (self.sources, self.filename)) sql_data = [] #Restore autocommit. return True
def read_image(self, source): """ Read image and detections from a given source. """ if source: source.read_and_store_data(self.conn) else: raise SourceException('No source specified.')
def get_header_default(self, datafile): """ Get header for a 'default' data-format. Comments should start with #. List of columns should be in the header: # RA DECL... """ line = datafile.readline() while not (line.startswith('# Gaus_id') or line.startswith("# RA")): line = datafile.readline() if not line: raise SourceException('No header in file %s' % self.filename) return line[2:].strip().lower().split(' ')
def get_header_test(self, datafile): """ Get header for a 'test' data-format. No comments are supported. First line is a list of column-names or column default values, like: ra ra_err=0.01 decl decl_err=0.01 In the example above two columns (ra and decl) are taken from the data, and for ra_err and decl_err a default value is taken. """ try: header = datafile.readline().split( '=', 1)[1].strip(' ').lower().split(',') for ind, head_parts in enumerate(header): head_part = head_parts.split('=') if len(head_part) != 1: # Default value is given header[ind] = (head_part[0], head_part[1].strip("'").strip()) except IndexError: raise SourceException('Wrong header in the first line' \ ' of file %s' % self.filename) return header
def read_and_store_data(self, conn): """ Function doc """ if not os.path.isfile(self.filename): raise SourceException('no file %s' % self.filename) header = open(self.filename, 'r').readline() data = sp.loadtxt(self.filename, delimiter=',', skiprows=1) for fro, to in (('ra', 'lra'), ('dec', 'ldecl'), ('smaj', 'g_major'), ('smin', 'g_minor'), ('pa', 'g_pa'), ('int_flux', 'lf_int'), ('pk_flux', 'lf_peak')): header = header.replace(fro, to) sql_data = [] # Switch off autocommit (if it is switched on) for better performance. sql_insert = 'insert into detections (run_id, image_name, '\ '%s, ldet_sigma, healpix_zone) values' % header self.sources = 0 for source in data: self.sources = self.sources + 1 pix = hp.ang2pix(32, radians(90. - source[2]), radians(source[0]), nest=True) # Convert axes from arcsec to degrees: source[4:8] = source[4:8] / 3600.0 sql_data.append( "(%s, '%s', %s, 3, %s )" % (self.run_id, self.parset_id, ','.join(map(str, source)), pix)) if self.sources % self.BLOCK_SIZE == 0: sql = "%s %s;" % (sql_insert, ',\n'.join(sql_data)) conn.execute(sql) self.log.info('%s sources loaded from %s' % (self.sources, self.filename)) sql_data = [] if len(sql_data) > 0: sql = "%s %s;" % (sql_insert, ',\n'.join(sql_data)) conn.execute(sql) self.log.info('%s sources loaded from %s' % (self.sources, self.filename)) return True