예제 #1
0
    def __init__(self, parent=None):
        super(SpectrumView, self).__init__(parent)

        # Connect us up to the signal handler.
        self.signals = samp.SAMPIntegratedClient(name=self.__class__.__name__,
                                                 description='Spectrum Viewer')
        try:
            self.signals.connect(hub=parent.hub)
        except AttributeError:
            self.signals.connect()

        self.spectra = sd.SpectrumDataStore()
        self.spectra_model = sd.SpectrumDataModel(self.spectra)

        self.ui = Ui_mainView()
        self.ui.setupUi(self)

        # Initialize the plotting
        self.axes = self.ui.plotView.figure.add_subplot(111)

        # Initialize the spectrum manager.
        self.ui.spectraListView.setModel(self.spectra_model)

        # There are two sets of events that need to
        # be watched for:  and Matplotlib
        # events. First hadnle Qt events.
        self.signals.bind_receive_notification('samp.app.new_data', self._draw)
        self.signals.bind_receive_notification('samp.app.viewport_change',
                                               self._viewport_change)

        # Handle matplotlib events
        #self.register_callbacks()
        self.axes.callbacks.connect('xlim_changed', self._change_data_range)
        self.axes.callbacks.connect('ylim_changed', self._change_data_range)
예제 #2
0
파일: SAMP.py 프로젝트: rajul/ginga
    def _connect_client(self):
        client = samp.SAMPIntegratedClient(
            metadata={
                "samp.name": "ginga",
                "samp.description.text": "Ginga viewer",
                "ginga.version": version
            })
        client.connect()

        # TODO: need to handle some administrative messages
        #client.bindReceiveNotification("samp.app.*", self.samp_placeholder)
        #client.bindReceiveCall("samp.app.*", self.samp_placeholder)

        # Loads a 2-dimensional FITS image.
        # Arguments:
        #   url (string): URL of the FITS image to load
        #   image-id (string) optional: Identifier which may be used
        #           to refer to the loaded image in subsequent messages
        #   name (string) optional: name which may be used to label the
        #           loaded image in the application GUI
        # Return Values: none
        client.bind_receive_call("image.load.fits", self.samp_call_load_fits)
        client.bind_receive_notification("image.load.fits",
                                         self.samp_notify_load_fits)

        # Not yet implemented.  Not sure if/how these are different
        # from the image.load.fits variants
        client.bind_receive_call("table.load.fits", self.samp_placeholder)
        client.bind_receive_notification("table.load.fits",
                                         self.samp_placeholder)

        # Directs attention (e.g. by moving a cursor or shifting the field
        #   of view) to a given point on the celestial sphere.
        # Arguments:
        #   ra (SAMP float): right ascension in degrees
        #   dec (SAMP float): declination in degrees
        # Return Values: none
        client.bind_receive_call("coord.pointAt.sky", self.samp_placeholder)
        client.bind_receive_notification("coord.pointAt.sky",
                                         self.samp_placeholder)

        # Loads a table in VOTable format. This is the usual way to
        # exchange table data between SAMP clients.
        # Arguments:
        #   url (string): URL of the VOTable document to load
        #   table-id (string) optional: identifier which may be used to
        #     refer to the loaded table in subsequent messages
        #   name (string) optional: name which may be used to label the
        #     loaded table in the application GUI
        # Return Values: none
        client.bind_receive_call("table.load.votable", self.samp_placeholder)
        client.bind_receive_notification("table.load.votable",
                                         self.samp_placeholder)
        return client
예제 #3
0
def create_client(metadata, wait=0):

    logger = logging.getLogger("ClientMgr")

    # Create client, connect to Hub, and install message listener
    cli1 = sampy.SAMPIntegratedClient(metadata=metadata)

    try:
        cli1.connect()
    except sampy.SAMPHubError, sampy.SAMPClientError:
        if (wait > 0): time.sleep(wait)
        return None
예제 #4
0
def worker_slave(queue):
    """

    This function handles all work, either running collectcells locally or 
    remotely via ssh. Files to reduce are read from a queue.

    """

    logger = logging.getLogger("SAMPWorker")

    logger.info("Worker process started, ready for action...")

    if (not setup.use_ssh):
        # If we reduce frames locally, prepare the QR logging.
        options['clobber'] = False

    while (True):
        try:
            # print "\n\nWaiting for stuff to do\n\n"
            task = queue.get()
        except (KeyboardInterrupt, SystemExit) as e:
            # print "worker received termination notice"
            # Ignore the shut-down command here, and wait for the official
            # shutdown command from main task
            continue

        if (task is None):
            logger.info("Shutting down worker")
            queue.task_done()
            break

        filename, object_name, obsid = task

        logger.info("starting work on file %s" % (filename))

        ccopts = ""
        if (len(sys.argv) > 2):
            # There are some parameters to be forwarded to collectcells
            ccopts = " ".join(sys.argv[1:])
        # print "ccopts=",ccopts

        if (cmdline_arg_isset("-dryrun")):
            logger.info("DRYRUN: Sending off file %s for reduction" %
                        (filename))
            # print "task done!"
            queue.task_done()
            continue

        if (object_name.lower().find("focus") >= 0):
            #
            # This is most likely a focus exposure
            #
            n_stars = int(cmdline_arg_set_or_default("-nstars", 7))
            logger.info("New focus exposure to analyze (with %d stars)" %
                        (n_stars))

            if (setup.use_ssh):

                remote_inputfile = setup.translate_filename_local2remote(
                    filename)
                kw = {
                    'user': setup.ssh_user,
                    'host': setup.ssh_host,
                    'filename': remote_inputfile,
                    'podidir': setup.remote_podi_dir,
                    'outdir': setup.output_dir,
                    'nstars': n_stars,
                }
                ssh_command = "ssh %(user)s@%(host)s %(podidir)s/podi_focus.py -nstars=%(nstars)d %(filename)s %(outdir)s" % kw

                logger.info("Out-sourcing work to %(user)s@%(host)s via ssh" %
                            kw)
                process = subprocess.Popen(ssh_command.split(),
                                           stdout=subprocess.PIPE,
                                           stderr=subprocess.PIPE,
                                           close_fds=True)
                _stdout, _stderr = process.communicate()
                process.stdout.close()
                process.stderr.close()
                try:
                    process.terminate()
                except:
                    pass

                #print "*"*80
                if (not _stdout == "" and _stdout is not None):
                    logger.info("Received STDOUT:\n%s" % (_stdout))
                    #print "*"*80
                if (not _stderr == "" and _stderr is not None):
                    logger.info("Received STDERR:\n%s" % (_stderr))
                    #print _stderr
                    #print "*"*80

            else:
                # Run locally
                logger.info("Analyzing focus sequence (%s) locally" %
                            (filename))
                podi_focus.get_focus_measurement(filename,
                                                 n_stars=n_stars,
                                                 output_dir=setup.output_dir)

            logger.info("Done with analysis")

            # Now check if we are supposed to open/display the focus plot
            if (setup.focus_display is not None):

                remote_filename = "%s/%s_focus.png" % (setup.output_dir, obsid)
                local_filename = setup.translate_filename_remote2local(
                    filename, remote_filename)

                cmd = "%s %s &" % (setup.focus_display, local_filename)
                logger.info("Opening and displaying plot")
                os.system(cmd)

        else:
            #
            # This is NOT a focus exposure
            #

            if (setup.use_ssh):

                # This is not a focus exposure, to treat it as a normal science exposure
                remote_inputfile = setup.translate_filename_local2remote(
                    filename)
                kw = {
                    'user': setup.ssh_user,
                    'host': setup.ssh_host,
                    'collectcells': setup.ssh_executable,
                    'options': ccopts,
                    'filename': remote_inputfile,
                    'outputfile': setup.output_format,
                }

                ssh_command = "ssh %(user)s@%(host)s %(collectcells)s %(filename)s %(outputfile)s %(options)s -noclobber" % kw

                process = subprocess.Popen(ssh_command.split(),
                                           stdout=subprocess.PIPE,
                                           stderr=subprocess.PIPE,
                                           close_fds=True)
                _stdout, _stderr = process.communicate()
                process.stdout.close()
                process.stderr.close()
                try:
                    process.terminate()
                except:
                    pass

                #print "*"*80
                if (not _stdout == "" and _stdout is not None):
                    logger.info("Received STDOUT:\n%s" % (_stdout))
                    #print "*"*80
                if (not _stderr == "" and _stderr is not None):
                    logger.info("Received STDERR:\n%s" % (_stderr))
                    #print _stderr
                    #print "*"*80

            else:
                logger.info("Running collectcells (%s)" % (filename))
                podi_collectcells.collectcells_with_timeout(
                    input=filename,
                    outputfile=setup.output_format,
                    options=options,
                    timeout=300,
                    process_tracker=process_tracker)

            #
            # If requested, also send the command to ds9
            #
            local_filename = setup.translate_filename_remote2local(
                filename, setup.output_format)
            if (cmdline_arg_isset("-forward2ds9")):
                forward2ds9_option = cmdline_arg_set_or_default(
                    "-forward2ds9", "image")
                if (forward2ds9_option == "irafmosaic"):
                    cmd = "mosaicimage iraf %s" % (local_filename)
                else:
                    cmd = "fits %s" % (local_filename)

                logger.info("Forwarding file to ds9")
                logger.debug("filename: %s" % (filename))
                logger.debug("remote file: %s" % (remote_inputfile))
                logger.debug("local file: %s" % (local_filename))

                try:
                    cli1 = sampy.SAMPIntegratedClient(metadata=metadata)
                    cli1.connect()
                    cli1.enotify_all(mtype='ds9.set', cmd='frame 2')
                    cli1.enotify_all(mtype='ds9.set', cmd='scale scope global')
                    cli1.enotify_all(mtype='ds9.set', cmd=cmd)
                    cli1.disconnect()
                except Exception as err:
                    logger.error("Problems sending message to ds9: %s" % err)
                    podi_logging.log_exception()
                    pass

            # By default, also open the psf diagnostic plot, if available
            psf_plot_fn = local_filename[:-5] + ".psf.png"
            if (os.path.isfile(psf_plot_fn)):
                cmd = "%s %s &" % (setup.focus_display, psf_plot_fn)
                logger.info("Opening and displaying PSF diagnostic plot (%s)" %
                            (psf_plot_fn))
                os.system(cmd)

        #
        # Once the file is reduced, mark the current task as done.
        #
        logger.info("task done!")
        queue.task_done()

    print("Terminating worker process...")

    return
예제 #5
0
def handle_swarp_request(params, logger):

    # print "\n================"*3,params,"\n================"*3

    str_filelist = params['filelist']
    tracking_rate = params['trackrate']
    logger.debug("Received 'filelist': %s" % (str_filelist))
    logger.debug("Received 'trackrate': %s" % (tracking_rate))

    # print "starting work on file",str_filelist

    #
    # Get rid of all files that do not exist
    #
    filelist = []
    for fitsfile in str_filelist.split(","):
        if (os.path.isfile(fitsfile)):
            logger.debug("Found valid input file: %s" % (fitsfile))
            filelist.append(fitsfile)
        elif (os.path.isdir(fitsfile)):
            logger.debug("Found directory name")
            if (fitsfile[-1] == "/"):
                fitsfile = fitsfile[:-1]
            basedir, filebase = os.path.split(fitsfile)
            fitsfile = "%s/%s.33.fits" % (fitsfile, filebase)
            filelist.append(fitsfile)

    # print "filelist = ",filelist

    # We need at least one file to work on
    if (len(filelist) <= 0):
        logger.info("No valid files for stacking found!")
        return
        # queue.task_done()
        # continue

    # print datetime.datetime.now().strftime("%H:%M:%S.%f")
    # print filelist
    # print tracking_rate
    # print extra

    logger.info("Input filelist:\n%s" %
                ("\n".join([" --> %s" % fn for fn in filelist])))
    #("\n".join(filelist)))

    #
    # Open the first file in the list, get the object name
    #
    firsthdu = pyfits.open(filelist[0])
    object_name = firsthdu[0].header['OBJECT']  \
        if 'OBJECT' in firsthdu[0].header else "unknown"
    filter_name = firsthdu[0].header['FILTER']  \
        if 'FILTER' in firsthdu[0].header else"unknown"
    firsthdu.close()
    logger.debug("Reference data: object:%s, filter:%s" %
                 (object_name, filter_name))

    # Create a ODI-like timestamp
    formatted_timestamp = params['timestamp'].strftime("%Y%m%dT%H%M%S")
    logger.debug("Formatted timestamp for output file: %s" %
                 (formatted_timestamp))

    # instead of the number in the dither sequence,
    # use the number of frames in this stack
    number_of_frames = len(filelist)

    # Assemble the entire filename
    output_filename = "stack%s.%d__%s__%s.fits" % (
        formatted_timestamp, number_of_frames, object_name, filter_name)
    output_filename = escape_characters(output_filename)
    remote_output_filename = "%(outputdir)s/%(output_filename)s" % {
        "outputdir": setup.output_dir,
        "output_filename": output_filename,
    }
    logger.debug("Setting output filename: %s" % (output_filename))

    #
    # Re-format the input filelist to point to valid files
    # on the remote filesystem
    #
    # Important: The input files specified are RAW files, but
    # we need to stack based on the reduced files
    #
    remote_filelist = []
    for fn in filelist:
        remote_filename = format_filename(fn, setup.output_format)
        remote_filelist.append(remote_filename)
        #remote_filelist.append(setup.translate_filename_local2remote(fn))

    logger.debug("Filelist on remote filesystem:\n%s" %
                 ("".join(["  --> %s\n" % fn for fn in remote_filelist])))
    # "\n  --> "+"\n  --> ".join(remote_filelist)))

    # If the non-sidereal option is set, use the tracking rate and
    # configure the additional command-line flag for swarpstack
    nonsidereal_option = ""
    if (not tracking_rate == 'none'):
        items = tracking_rate.split(",")
        if (len(items) == 2):
            track_ra = float(items[0])
            track_dec = float(items[1])
            if (track_ra != 0 or track_dec != 0):
                # This fulfills all criteria for a valid non-sidereal command
                # Use first frame as MJD reference frame
                mjd_ref_frame = remote_filelist[0]
                nonsidereal_option = "-nonsidereal=%(ra)s,%(dec)s,%(refframe)s" % {
                    'ra': items[0],
                    'dec': items[1],
                    'refframe': mjd_ref_frame,
                }
    logger.debug("Non-sidereal setup: %s" % (nonsidereal_option))

    #
    # Now we have the list of input files, and the output filename,
    # lets go and initate the ssh request and get to work
    #

    # Set options (bgsub, pixelscale) etc.
    options = "%s" % (nonsidereal_option)

    if ("bgsub" in params and params['bgsub'] == 'yes'):
        options += " -bgsub"

    if ('pixelscale' in params):
        try:
            pixelscale = float(params['pixelscale'])
            print("setting pixelscale")
            if (pixelscale >= 0.1):
                options += " -pixelscale=%s" % params['pixelscale']
        except:
            pass

    if ('skipota' in params):
        otas = params['skipota'].split(",")
        ota_list = []
        for ota in otas:
            try:
                ota_d = int(ota)
                ota_list.append("%02d" % ota_d)
            except:
                pass
        if (len(ota_list) > 0):
            options += " -skipota=%s" % (",".join(ota_list))

    # get the special swarp-settings command line
    if (cmdline_arg_isset("-swarpopts")):
        swarp_opts = cmdline_arg_set_or_default("-swarpopts", None)
        # print "\n"*5,swarp_opts,"\n"*5
        items = swarp_opts.split(":")
        for item in items:
            options += " -%s" % (item)

    # Disabled for now, until we can properly handle different
    # weight types to forward them to ds9
    if ('combine' in params):
        combine_mode = params['combine']
        options += " -combine=%s" % (combine_mode.split(",")[0])

    logger.info("Stacking %d frames, output in %s" %
                (len(filelist), output_filename))

    # print "options=",options
    ssh_command = "ssh %(username)s@%(host)s %(swarpnice)s \
                      %(podidir)s/podi_swarpstack.py \
                      %(remote_output_filename)s %(options)s %(remote_inputlist)s" % {
        'username': setup.ssh_user,
        'host': setup.ssh_host,
        'swarpnice': setup.swarp_nicelevel,
        'podidir': setup.remote_podi_dir,
        'remote_output_filename': remote_output_filename,
        'outputdir': setup.output_dir,
        'output_filename': output_filename,
        'options': options,
        'remote_inputlist': " ".join(remote_filelist)
    }
    logger.debug("SSH command:\n%s" % (" ".join(ssh_command.split())))

    #
    # Now execute the actual swarpstack command
    #
    if (not cmdline_arg_isset("-dryrun")):
        logger.info("Running swarpstack remotely on %s" % (setup.ssh_host))
        start_time = time.time()
        process = subprocess.Popen(ssh_command.split(),
                                   stdout=subprocess.PIPE,
                                   stderr=subprocess.PIPE)
        _stdout, _stderr = process.communicate()
        logger.info(str(_stdout))
        # print _stdout
        # print _stderr
        end_time = time.time()
        logger.info("swarpstack has completed successfully (%.2f seconds)" %
                    (end_time - start_time))
    else:
        logger.info("Skipping execution (-dryrun given):\n\n%s\n\n" %
                    (" ".join(ssh_command.split())))

    #
    # Once we are here, we have the output file created
    # If requested, send it to ds9 to display
    #
    if (not cmdline_arg_isset("-dryrun")
            and cmdline_arg_isset("-forward2ds9")):
        local_filename = setup.translate_filename_remote2local(
            None, remote_output_filename)
        # adjust the combine mode part of the filename
        local_filename = local_filename[:-5] + ".WEIGHTED.fits"
        logger.debug("Commanding ds9 to display %s ..." % (local_filename))
        cmd = "fits %s" % (local_filename)
        try:
            # print "\n"*5,"sending msg to ds9",local_filename,"\n"*5
            cli_ds9 = sampy.SAMPIntegratedClient(metadata=metadata)
            cli_ds9.connect()
            cli_ds9.enotifyAll(mtype='ds9.set', cmd=cmd)
            cli_ds9.disconnect()
            logger.info(
                "Sent command to display new stacked frame to ds9 (%s)" %
                (local_filename))
        except:
            logger.warning("Problems sending message to ds9")
            pass
예제 #6
0
        logger.info("Finishing up %d stacking jobs, please wait ..." %
                    (stacking_queue.qsize()))
    except:
        logger.info("Finishing up stacking jobs, please wait ...")
    stacking_queue.put(None)
    qr_stacking_process.join()

    logger.info("All done, goodbye!")


if __name__ == "__main__":

    if (len(sys.argv) > 1 and sys.argv[1] == "-testconnect"):

        try:
            cli1 = sampy.SAMPIntegratedClient()
            cli1.connect()
            cli1.bind_receive_message(setup.message_queue, receive_msg)
            cli1.disconnect()

            print("\nConnection successful!\n")
        except Exception as err:
            print("\nProblem connecting {0}\n ".format(err))
            pass

        sys.exit(0)

    elif (cmdline_arg_isset("-yappi")):
        print("Running with yappi profiler")
        import yappi
        yappi.start()
import os
import astropy.coordinates as coord
from astropy.vo import samp
from urlparse import urljoin

from gaia.tap import cone_search


# Get everything around some cluster
cluster_name = "NGC 104"

cluster = coord.SkyCoord.from_name(cluster_name)

cluster_candidates = cone_search(cluster.ra.deg, cluster.dec.deg, 0.5)
cluster_candidates.write("cluster.votable", format="votable")


# Create a SAMP client and send our new table to all other clients (incl TOPCAT).
client = samp.SAMPIntegratedClient()
client.connect()

client.notify_all({
    "samp.mtype": "table.load.votable", 
    "samp.params": {
        "name": cluster_name,
        "url": urljoin("file:", os.path.abspath("cluster.votable"))
    }
})