Esempio n. 1
0
 def __init__(self, custom_cm=None, use_monet=None,
              profile=False,
              **params):
     """
     @param custom_cm: allows to pass an object to be used as connection
     manager.
     """
     self.log = get_gsm_logger('pipeline', 'pipeline.log')
     self.use_monet = use_monet
     if not custom_cm:
         if use_monet != None:
             self.conn_manager = GSMConnectionManager(use_monet=use_monet)
         else:
             self.conn_manager = GSMConnectionManager()
     else:
         self.conn_manager = custom_cm
     try:
         self.conn = self.conn_manager.get_connection(**params)
         if profile:
             self.conn.profile = True
             self.conn.log.setLevel(logging.DEBUG)
         self.conn.commit()
     except db.Error as exc:
         self.log.error("Failed to connect: %s" % exc)
         raise exc
     self.options = load_parameters('%s/settings.ini' % 
                                    os.path.dirname(__file__))
     self.log.debug('Pipeline parameters: %s' % self.options)
     self.log.info('Pipeline started.')
Esempio n. 2
0
 def __init__(self,
              custom_cm=None,
              use_monet=None,
              profile=False,
              **params):
     """
     @param custom_cm: allows to pass an object to be used as connection
     manager.
     """
     self.log = get_gsm_logger('pipeline', 'pipeline.log')
     self.use_monet = use_monet
     if not custom_cm:
         if use_monet != None:
             self.conn_manager = GSMConnectionManager(use_monet=use_monet)
         else:
             self.conn_manager = GSMConnectionManager()
     else:
         self.conn_manager = custom_cm
     try:
         self.conn = self.conn_manager.get_connection(**params)
         if profile:
             self.conn.profile = True
             self.conn.log.setLevel(logging.DEBUG)
         self.conn.commit()
     except db.Error as exc:
         self.log.error("Failed to connect: %s" % exc)
         raise exc
     self.options = load_parameters('%s/settings.ini' %
                                    os.path.dirname(__file__))
     self.log.debug('Pipeline parameters: %s' % self.options)
     self.log.info('Pipeline started.')
Esempio n. 3
0
 def setUp(self):
     """
     """
     if ('monetdb' in config):
         self.cm = GSMConnectionManager(
             use_monet=bool(config['monetdb'] == 'True'), use_console=False)
         self.is_monet = bool(config['monetdb'] == 'True')
     else:
         self.cm = GSMConnectionManager(use_console=False, use_monet=True)
         self.is_monet = True
Esempio n. 4
0
def generate_snapshot(filename):
    conn = GSMConnectionManager(database='stress',
                                use_monet=False).get_connection()
    decl_center = -50
    while decl_center < -30:
        decl_center = degrees(acos(2 * random.random() - 1) - 0.5 * pi)
    ra_center = degrees(2 * pi * random.random())
    band = random.random_integers(1, 3)
    sql = get_field(ra_center, decl_center, 3.0, 2)
    cur = conn.get_cursor(sql)
    f = open(filename, 'w')
    f.write('# RA DEC Total_flux e_Total_flux\n\n')
    for data in iter(cur.fetchone, None):
        f.write('%s %s %s %s\n' %
                (random.normal(data[0], ERROR), random.normal(data[1], ERROR),
                 random.normal(data[2], FLUX_ERROR), data[3]))
    f.close()
    parsetname = path.basename(filename)
    parsetname = parsetname[:parsetname.index('.')] + '.parset'
    write_parset(parsetname, filename, FREQUENCY[band], ra_center, decl_center)
Esempio n. 5
0
File: snap.py Progetto: jjdmol/LOFAR
def generate_snapshot(filename):
    conn = GSMConnectionManager(database='stress',
                                use_monet=False).get_connection()
    decl_center = -50
    while decl_center < -30:
        decl_center = degrees(acos(2 * random.random() - 1) - 0.5 * pi)
    ra_center = degrees(2 * pi * random.random())
    band = random.random_integers(1, 3)
    sql = get_field(ra_center, decl_center, 3.0, 2)
    cur = conn.get_cursor(sql)
    f = open(filename, 'w')
    f.write('# RA DEC Total_flux e_Total_flux\n\n')
    for data in iter(cur.fetchone, None):
        f.write('%s %s %s %s\n' % (random.normal(data[0], ERROR),
                                   random.normal(data[1], ERROR),
                                   random.normal(data[2], FLUX_ERROR),
                                   data[3]))
    f.close()
    parsetname = path.basename(filename)
    parsetname = parsetname[:parsetname.index('.')] + '.parset'
    write_parset(parsetname, filename, FREQUENCY[band], ra_center, decl_center)
Esempio n. 6
0
                              min_flux=min_flux)).fetchall()
        }


if __name__ == '__main__':
    parser = argparse.ArgumentParser(description='Run GSM API-call')
    parser.add_argument('-f', '--filename')
    parser.add_argument('-s', '--style', default=STYLE_PLAIN)
    parser.add_argument('-S', '--separator', default=' ')
    parser.add_argument('command', type=str)
    parser.add_argument('--image_id', type=int)
    parser.add_argument('--ra', default=0, type=float)
    parser.add_argument('--decl', default=0, type=float)
    parser.add_argument('--radius', default=5.0, type=float)
    parser.add_argument('-B', '--band', default=8, type=int)
    parser.add_argument('--stokes', default='I', type=str)
    parser.add_argument('--f_peak', default=None, type=float)

    args = parser.parse_args()
    connect = GSMConnectionManager(database='test').get_connection()
    api = GSMAPI(connect)
    if args.command == 'image':
        dataset = api.get_image_properties(args.image_id)
    elif args.command == 'field':
        dataset = api.get_field(args.ra, args.decl, args.radius, args.band,
                                args.f_peak)
    else:
        raise ValueError

    api.output(dataset, int(args.style), args.filename, args.separator)
Esempio n. 7
0
#!/usr/bin/python
"""
***GSM package tool.
***Created by A. Mints (2012).
Cleans all data from the database.
"""

import argparse
from src.gsmconnectionmanager import GSMConnectionManager
from tests.testlib import cleanup_db

parser = argparse.ArgumentParser(description="""
***GSM package tool.
***Created by A. Mints (2012).
    Cleans all data from the database.""",
formatter_class=argparse.RawDescriptionHelpFormatter)

parser.add_argument('-D', '--database', type=str, default='test',
                    help='database name to load data into')
parser.add_argument('-M', '--monetdb', action="store_true", default=False,
                    help='Use MonetDB instead of PostgreSQL')
args = parser.parse_args()

cm = GSMConnectionManager(use_monet=args.monetdb, database=args.database)
cleanup_db(cm.get_connection())
if args.monetdb:
    print "MonetDB database %s cleaned" % args.database
else:
    print "PostgreSQL database %s cleaned" % args.database
Esempio n. 8
0
class GSMPipeline(object):
    """
    General pipeline class.
    """
    def __init__(self,
                 custom_cm=None,
                 use_monet=None,
                 profile=False,
                 **params):
        """
        @param custom_cm: allows to pass an object to be used as connection
        manager.
        """
        self.log = get_gsm_logger('pipeline', 'pipeline.log')
        self.use_monet = use_monet
        if not custom_cm:
            if use_monet != None:
                self.conn_manager = GSMConnectionManager(use_monet=use_monet)
            else:
                self.conn_manager = GSMConnectionManager()
        else:
            self.conn_manager = custom_cm
        try:
            self.conn = self.conn_manager.get_connection(**params)
            if profile:
                self.conn.profile = True
                self.conn.log.setLevel(logging.DEBUG)
            self.conn.commit()
        except db.Error as exc:
            self.log.error("Failed to connect: %s" % exc)
            raise exc
        self.options = load_parameters('%s/settings.ini' %
                                       os.path.dirname(__file__))
        self.log.debug('Pipeline parameters: %s' % self.options)
        self.log.info('Pipeline started.')

    def reopen_connection(self, **params):
        """
        Reopen connection in case it was closed.
        """
        if not self.conn or not self.conn.established():
            try:
                self.conn = self.conn_manager.get_connection(**params)
                self.log.info('Pipeline connection reopened.')
            except db.Error as exc:
                self.log.error("Failed to connect: %s" % exc)
                raise exc

    def read_image(self, source):
        """
        Read image and detections from a given source.
        """
        if source:
            source.read_and_store_data(self.conn)
        else:
            raise SourceException('No source specified.')

    def run_parset(self, parset):
        """
        Process single parset file.
        """
        self.conn.start()
        parset.process(self.conn)
        self.parset = parset
        self.process_image(parset.image_id, parset.run_id)
        self.log.info('Parset %s done.' % parset.filename)
        return parset.image_id

    def run_grouper(self):
        """
        Detect/update and store groups of sources for later processing.
        """
        #Update groups by merging overlapping patches.
        cursor = self.conn.get_cursor(get_sql("GroupFinder"))
        grouper = Grouper(cursor.fetchall())
        while grouper.is_completed():
            grouper.one_cycle()
            self.conn.execute_set(
                get_sql("GroupUpdate", grouper.group,
                        ",".join(map(str, grouper.runcatset))))
            grouper.cleanup()
        for resolver in [SimpleResolver]:
            self.run_resolver(resolver)
        self.conn.execute(get_sql("GroupFill"))

    def run_resolver(self, resolve_class):
        #Running resolver
        resolver = resolve_class(self.conn)
        for group_id in self.conn.get_cursor(get_sql("GroupCycle")):
            if not resolver.run_resolve(group_id[0]):
                #Failed to resolve
                self.log.debug("Group id %s not resolved by %s." %
                               (group_id[0], resolver.__class__.__name__))
                self.conn.log.debug("Group id %s not resolved." % group_id[0])
                self.conn.execute_set(
                    get_sql("GroupUpdate runcat", group_id[0]))
            else:
                self.log.debug("Group id %s resolved by %s." %
                               (group_id[0], resolver.__class__.__name__))
                self.conn.log.debug("Group id %s resolved." % group_id[0])

    def update_image_pointing(self, image_id):
        """
        Update image pointing to average ra/decl of all sources.
        """
        avg_x, avg_y, avg_z, count = self.conn.exec_return(get_sql(
            'Image properties selector', image_id),
                                                           single_column=False)
        avg_x, avg_y, avg_z = avg_x / count, avg_y / count, avg_z / count
        decl = math.asin(avg_z)
        ra = math.atan2(avg_x, avg_y)
        self.conn.execute(
            get_sql('Image properties updater', ra, decl, image_id))

    def process_image(self, image_id, run_id=None, sources_loaded=False):
        """
        Process single image.
        @sources_loaded: True if there are records in the extractedsources
        already.
        """
        self.conn.start()
        status, band, stokes, fov_radius, \
        centr_ra, centr_decl, run_loaded, bmaj = \
        self.conn.exec_return("""
        select status, band, stokes, fov_radius, 
               centr_ra, centr_decl, run_id, bmaj
          from images
         where imageid = %s;""" % image_id, single_column=False)
        if not run_id:
            run_id = run_loaded
        if status == 1:
            raise ImageStateError('Image %s in state 1 (Ok). Cannot process' %
                                  image_id)
        GLOBALS.update({'i': image_id, 'r': run_id, 'b': band, 's': stokes})
        if not sources_loaded:
            self.conn.execute(get_sql('insert_extractedsources'))
            self.conn.execute(get_sql('insert dummysources'))
        if bmaj:
            max_assoc = float(bmaj)
        else:
            max_assoc = float(self.options.get('maximum_association_distance'))
        self.log.debug('Using options: %s' % self.options)
        self.log.debug('Final max_assoc_dist %s' % max_assoc)

        #Now do the matching!
        if self.options.get('matcher') == 'F90':
            matcher_class = MatcherF90
        else:
            matcher_class = MatcherSQL
        matcher = matcher_class(
            self.conn, max_assoc, self.options.get('match_distance'),
            self.options.get('match_distance_extended'),
            get_pixels(centr_ra, centr_decl, fov_radius + 0.5))
        matcher.match(image_id)

        self.conn.call_procedure("fill_temp_assoc_kind(%s);" % image_id)
        #Process many-to-many;
        self.run_grouper()

        # Process one-to-one associations;
        self.conn.execute(get_sql('add 1 to 1'))
        #process one-to-many associations;
        self.conn.execute(get_sql('add 1 to N'))
        self.conn.execute_set(get_sql('update flux_fraction'))
        #process many-to-one associations;
        self.conn.execute_set(get_sql('add N to 1'))
        #updating runningcatalog
        run_update(self.conn, 'update runningcatalog')
        run_update(self.conn, 'update runningcatalog extended')
        self.conn.execute(get_sql('update runningcatalog XYZ'))
        #First update, then insert new (!!!)
        run_update(self.conn, 'update runningcatalog_fluxes')
        self.conn.execute(get_sql('insert new bands for point sources'))
        #inserting new sources
        self.conn.execute_set(get_sql('Insert new sources'))
        self.conn.execute_set(get_sql('Join extended'))
        #update image status and save current svn verion.
        self.conn.execute_set(get_sql('Cleanup', get_svn_version()))
        if self.parset.recalculate_pointing:
            self.update_image_pointing(image_id)
        self.conn.commit()
Esempio n. 9
0
class GSMPipeline(object):
    """
    General pipeline class.
    """
    def __init__(self, custom_cm=None, use_monet=None,
                 profile=False,
                 **params):
        """
        @param custom_cm: allows to pass an object to be used as connection
        manager.
        """
        self.log = get_gsm_logger('pipeline', 'pipeline.log')
        self.use_monet = use_monet
        if not custom_cm:
            if use_monet != None:
                self.conn_manager = GSMConnectionManager(use_monet=use_monet)
            else:
                self.conn_manager = GSMConnectionManager()
        else:
            self.conn_manager = custom_cm
        try:
            self.conn = self.conn_manager.get_connection(**params)
            if profile:
                self.conn.profile = True
                self.conn.log.setLevel(logging.DEBUG)
            self.conn.commit()
        except db.Error as exc:
            self.log.error("Failed to connect: %s" % exc)
            raise exc
        self.options = load_parameters('%s/settings.ini' % 
                                       os.path.dirname(__file__))
        self.log.debug('Pipeline parameters: %s' % self.options)
        self.log.info('Pipeline started.')

    def reopen_connection(self, **params):
        """
        Reopen connection in case it was closed.
        """
        if not self.conn or not self.conn.established():
            try:
                self.conn = self.conn_manager.get_connection(**params)
                self.log.info('Pipeline connection reopened.')
            except db.Error as exc:
                self.log.error("Failed to connect: %s" % exc)
                raise exc

    def read_image(self, source):
        """
        Read image and detections from a given source.
        """
        if source:
            source.read_and_store_data(self.conn)
        else:
            raise SourceException('No source specified.')

    def run_parset(self, parset):
        """
        Process single parset file.
        """
        self.conn.start()
        parset.process(self.conn)
        self.parset = parset
        self.process_image(parset.image_id, parset.run_id)
        self.log.info('Parset %s done.' % parset.filename)
        return parset.image_id

    def run_grouper(self):
        """
        Detect/update and store groups of sources for later processing.
        """
        #Update groups by merging overlapping patches.
        cursor = self.conn.get_cursor(get_sql("GroupFinder"))
        grouper = Grouper(cursor.fetchall())
        while grouper.is_completed():
            grouper.one_cycle()
            self.conn.execute_set(get_sql("GroupUpdate",
                                      grouper.group,
                                      ",".join(map(str, grouper.runcatset))))
            grouper.cleanup()
        for resolver in [SimpleResolver]:
            self.run_resolver(resolver)
        self.conn.execute(get_sql("GroupFill"))

    def run_resolver(self, resolve_class):
        #Running resolver
        resolver = resolve_class(self.conn)
        for group_id in self.conn.get_cursor(get_sql("GroupCycle")):
            if not resolver.run_resolve(group_id[0]):
                #Failed to resolve
                self.log.debug("Group id %s not resolved by %s." % 
                                   (group_id[0], resolver.__class__.__name__))
                self.conn.log.debug("Group id %s not resolved." % group_id[0])
                self.conn.execute_set(get_sql("GroupUpdate runcat",
                                      group_id[0]))
            else:
                self.log.debug("Group id %s resolved by %s."  % 
                                   (group_id[0], resolver.__class__.__name__))
                self.conn.log.debug("Group id %s resolved." % group_id[0])

    def update_image_pointing(self, image_id):
        """
        Update image pointing to average ra/decl of all sources.
        """
        avg_x, avg_y, avg_z, count = self.conn.exec_return(
                            get_sql('Image properties selector', image_id),
                                     single_column=False)
        avg_x, avg_y, avg_z = avg_x / count, avg_y / count, avg_z / count
        decl = math.asin(avg_z)
        ra = math.atan2(avg_x, avg_y)
        self.conn.execute(get_sql('Image properties updater',
                                  ra, decl, image_id))

    def process_image(self, image_id, run_id=None, sources_loaded=False):
        """
        Process single image.
        @sources_loaded: True if there are records in the extractedsources
        already.
        """
        self.conn.start()
        status, band, stokes, fov_radius, \
        centr_ra, centr_decl, run_loaded, bmaj = \
        self.conn.exec_return("""
        select status, band, stokes, fov_radius, 
               centr_ra, centr_decl, run_id, bmaj
          from images
         where imageid = %s;""" % image_id, single_column=False)
        if not run_id:
            run_id = run_loaded
        if status == 1:
            raise ImageStateError('Image %s in state 1 (Ok). Cannot process' %
                                  image_id)
        GLOBALS.update({'i': image_id, 'r': run_id,
                        'b': band, 's': stokes})
        if not sources_loaded:
            self.conn.execute(get_sql('insert_extractedsources'))
            self.conn.execute(get_sql('insert dummysources'))
        if bmaj:
            max_assoc = float(bmaj)
        else:
            max_assoc = float(self.options.get('maximum_association_distance'))
        self.log.debug('Using options: %s' % self.options)
        self.log.debug('Final max_assoc_dist %s' % max_assoc)
        
        #Now do the matching!
        if self.options.get('matcher') == 'F90':
            matcher_class = MatcherF90
        else:
            matcher_class = MatcherSQL
        matcher = matcher_class(self.conn, max_assoc, 
                  self.options.get('match_distance'),
                  self.options.get('match_distance_extended'),
                  get_pixels(centr_ra, centr_decl, fov_radius + 0.5))
        matcher.match(image_id)

        self.conn.call_procedure("fill_temp_assoc_kind(%s);" % image_id)
        #Process many-to-many;
        self.run_grouper()

        # Process one-to-one associations;
        self.conn.execute(get_sql('add 1 to 1'))
        #process one-to-many associations;
        self.conn.execute(get_sql('add 1 to N'))
        self.conn.execute_set(get_sql('update flux_fraction'))
        #process many-to-one associations;
        self.conn.execute_set(get_sql('add N to 1'))
        #updating runningcatalog
        run_update(self.conn, 'update runningcatalog')
        run_update(self.conn, 'update runningcatalog extended')
        self.conn.execute(get_sql('update runningcatalog XYZ'))
        #First update, then insert new (!!!)
        run_update(self.conn, 'update runningcatalog_fluxes')
        self.conn.execute(get_sql('insert new bands for point sources'))
        #inserting new sources
        self.conn.execute_set(get_sql('Insert new sources'))
        self.conn.execute_set(get_sql('Join extended'))
        #update image status and save current svn verion.
        self.conn.execute_set(get_sql('Cleanup', get_svn_version()))
        if self.parset.recalculate_pointing:
            self.update_image_pointing(image_id)
        self.conn.commit()