Exemplo n.º 1
0
def main(base_dir,
         processed_dir=None,
         picid=None,
         force=False,
         camera_bias=2048,
         num_workers=8,
         chunk_size=12,
         ):
    print(f'Finding similar stars for observation in {base_dir}')

    fields_dir = os.path.join(os.environ['PANDIR'], 'images', 'fields')
    source_filename = os.path.join(fields_dir, base_dir, f'point-sources-filtered.csv.bz2')
    assert os.path.isfile(source_filename)
    print(f'Using sources in {source_filename}')

    # Get the sources
    sources = pipeline.lookup_sources_for_observation(
        filename=source_filename).set_index(['picid'], append=True)

    if picid:
        print(f"Creating stamp for {picid}")
        sources = sources.query(f'picid == {picid}')

        if not len(sources):
            print(f"{picid} does not exist, exiting")
            return

    picid_list = list(sources.index.levels[1].unique())

    # Used for progress display
    num_sources = len(picid_list)

    print(f'Finding similar stars for {num_sources} sources')

    call_params = {
        'picid_list': picid_list,  # Pass the full list
        'base_dir': base_dir,
        'processed_dir': processed_dir,
        'force': force,
        'camera_bias': camera_bias,
    }

    # Build up the parameter list (NB: "clever" zip_longest usage)
    if picid:
        params = zip_longest([picid], [], fillvalue=call_params)
    else:
        params = zip_longest(picid_list, [], fillvalue=call_params)

    start_time = current_time()
    print(f'Starting at {start_time}')

    with concurrent.futures.ProcessPoolExecutor(max_workers=num_workers) as executor:
        picids = list(tqdm(executor.map(find_similar, params,
                                        chunksize=chunk_size), total=len(picid_list)))
        logging.debug(f'Found similar stars for {len(picids)} sources')

    end_time = current_time()
    print(f'Ending at {end_time}')
    total_time = (end_time - start_time).sec
    print(f'Total: {total_time:.02f} seconds')
Exemplo n.º 2
0
def test_pretty_time():
    t0 = '2016-08-13 10:00:00'
    os.environ['POCSTIME'] = t0

    t1 = current_time(pretty=True)
    assert t1 == t0

    # This will increment one second - see docs
    t2 = current_time(flatten=True)
    assert t2 != t0
    assert t2 == '20160813T100001'

    # This will increment one second - see docs
    t3 = current_time(datetime=True)
    assert t3 == dt(2016, 8, 13, 10, 0, 2, tzinfo=tz.utc)
Exemplo n.º 3
0
def obj():
    return {
        "name": "Generic PANOPTES Unit",
        "pan_id": "PAN000",
        "location": {
            "name": "Mauna Loa Observatory",
            "latitude": 19.54 * u.degree,  # Astropy unit
            "longitude": "-155.58 deg",  # String unit
            "elevation": "3400.0 m",
            "horizon": 30 * u.degree,
            "flat_horizon": -6 * u.degree,
            "focus_horizon": -12 * u.degree,
            "observe_horizon": -18 * u.degree,
            "timezone": "US/Hawaii",
            "gmt_offset": -600,
        },
        "directories": {
            "base": "/var/panoptes",
            "images": "images",
            "data": "data",
            "resources": "POCS/resources/",
            "targets": "POCS/resources/targets",
            "mounts": "POCS/resources/mounts",
        },
        "db": {
            "name": "panoptes",
            "type": "file"
        },
        "empty": {},
        "current_time": current_time(),
        "bool": True,
    }
Exemplo n.º 4
0
    def do_last_reading(self, device):
        """ Gets the last reading from the device. """
        if not device:
            print_warning('Usage: last_reading <device>')
            return
        if not hasattr(self, device):
            print_warning('No such sensor: {!r}'.format(device))
            return

        rec = self.db.get_current(device)

        if rec is None:
            print_warning('No reading found for {!r}'.format(device))
            return

        print_info('*' * 80)
        print("{}:".format(device.upper()))
        pprint(rec)
        print_info('*' * 80)

        # Display the age in seconds of the record
        if isinstance(rec.get('date'), datetime.datetime):
            now = current_time(datetime=True).astimezone(utc)
            record_date = rec['date'].astimezone(utc)
            age = (now - record_date).total_seconds()
            if age < 120:
                print_info('{:.1f} seconds old'.format(age))
            else:
                print_info('{:.1f} minutes old'.format(age / 60.0))
Exemplo n.º 5
0
    def send_message(self, topic, message):
        """ Responsible for actually sending message across a topic

        Args:
            topic(str):   Name of topic to send on. The name must
                match topic_name_re.
            message:   Message to be sent (a string or a dict).
        """
        if not isinstance(topic, str):
            raise ValueError('Topic name must be a string')
        elif not self.topic_name_re.fullmatch(topic):
            raise ValueError('Topic name ("{}") is not valid'.format(topic))

        if topic == 'PANCHAT':
            self.logger.info(f"{topic} {message}")

        if isinstance(message, str):
            message = to_json({
                "message": message,
                "timestamp": current_time(pretty=True),
            })
        elif isinstance(message, dict):
            message = to_json(message)
        else:
            raise ValueError('Message value must be a string or dict')

        # Build the full message with topic
        full_message = f'{topic} {message}'

        # Send the message
        # self.socket.send_string(full_message, flags=zmq.NOBLOCK)
        self.socket.send_string(full_message)
Exemplo n.º 6
0
def test_countdown_timer_bad_input():
    with pytest.raises(ValueError):
        assert CountdownTimer('d')

    with pytest.raises(ValueError):
        assert CountdownTimer(current_time())

    with pytest.raises(AssertionError):
        assert CountdownTimer(-1)
Exemplo n.º 7
0
def _make_pretty_from_fits(fname=None,
                           title=None,
                           figsize=(10, 10 / 1.325),
                           dpi=150,
                           alpha=0.2,
                           number_ticks=7,
                           clip_percent=99.9,
                           **kwargs):

    with open_fits(fname) as hdu:
        header = hdu[0].header
        data = hdu[0].data
        data = focus_utils.mask_saturated(data)
        wcs = WCS(header)

    if not title:
        field = header.get('FIELD', 'Unknown field')
        exptime = header.get('EXPTIME', 'Unknown exptime')
        filter_type = header.get('FILTER', 'Unknown filter')

        try:
            date_time = header['DATE-OBS']
        except KeyError:
            # If we don't have DATE-OBS, check filename for date
            try:
                basename = os.path.splitext(os.path.basename(fname))[0]
                date_time = date_parser.parse(basename).isoformat()
            except Exception:
                # Otherwise use now
                date_time = current_time(pretty=True)

        date_time = date_time.replace('T', ' ', 1)

        title = '{} ({}s {}) {}'.format(field, exptime, filter_type, date_time)

    norm = ImageNormalize(interval=PercentileInterval(clip_percent), stretch=LogStretch())

    fig = Figure()
    FigureCanvas(fig)
    fig.set_size_inches(*figsize)
    fig.dpi = dpi

    if wcs.is_celestial:
        ax = fig.add_subplot(1, 1, 1, projection=wcs)
        ax.coords.grid(True, color='white', ls='-', alpha=alpha)

        ra_axis = ax.coords['ra']
        ra_axis.set_axislabel('Right Ascension')
        ra_axis.set_major_formatter('hh:mm')
        ra_axis.set_ticks(
            number=number_ticks,
            color='white',
            exclude_overlapping=True
        )

        dec_axis = ax.coords['dec']
        dec_axis.set_axislabel('Declination')
        dec_axis.set_major_formatter('dd:mm')
        dec_axis.set_ticks(
            number=number_ticks,
            color='white',
            exclude_overlapping=True
        )
    else:
        ax = fig.add_subplot(111)
        ax.grid(True, color='white', ls='-', alpha=alpha)

        ax.set_xlabel('X / pixels')
        ax.set_ylabel('Y / pixels')

    im = ax.imshow(data, norm=norm, cmap=palette, origin='lower')
    fig.colorbar(im)
    fig.suptitle(title)

    new_filename = fname.replace('.fits', '.jpg')
    fig.savefig(new_filename, bbox_inches='tight')

    # explicitly close and delete figure
    fig.clf()
    del fig

    return new_filename
def main(base_dir,
         processed_dir=None,
         camera_bias=2048,
         gain=1.5,
         readout_noise=10.5,
         frame_slice=None,
         table_filter=None,
         num_refs=50,
         aperture_size=5,
         make_plots=False,
         color_correction=False,
         picid=None,
         force=False,
         num_workers=8,
         chunk_size=12,
         ):

    logger.info(f'Building references for stars for observation in {base_dir}')

    if picid:
        logger.info(f'Searching for picid={picid}')
        output_dir = os.path.join(processed_dir, str(picid), base_dir)
    else:
        output_dir = os.path.join(processed_dir, '*', base_dir)

    psc_files = glob(os.path.join(output_dir, 'psc.csv'), recursive=True)

    logger.info(f'Found {len(psc_files)} PSC files')

    call_params = {
        'base_dir': base_dir,
        'output_dir': output_dir,
        'processed_dir': processed_dir,
        'force': force,
        'frame_slice': frame_slice,
        'table_filter': table_filter,
        'num_refs': num_refs,
        'camera_bias': camera_bias,
        'gain': gain,
        'readout_noise': readout_noise,
        'make_plots': make_plots,
        'color_correction': color_correction,
    }
    logger.debug(f'Call params: {call_params}')

    # Build up the parameter list (NB: "clever" zip_longest usage)
    params = zip_longest(psc_files, [], fillvalue=call_params)

    start_time = current_time()
    print(f'Starting at {start_time}')

    with concurrent.futures.ProcessPoolExecutor(max_workers=num_workers) as executor:
        picids = list(tqdm(
            executor.map(build_ref, params, chunksize=chunk_size
                         ),
            total=len(psc_files))
        )
        logger.info(f'Created {len(picids)} PSC references')

    end_time = current_time()
    print(f'Ending at {end_time}')
    total_time = (end_time - start_time).sec
    print(f'Total: {total_time:.02f} seconds')
def main(
    base_dir,
    processed_dir=None,
    camera_bias=2048,
    gain=1.5,
    readout_noise=10.5,
    frame_slice=None,
    table_filter=None,
    num_refs=50,
    aperture_size=5,
    make_plots=False,
    color_correction=False,
    picid=None,
    force=False,
    num_workers=8,
    chunk_size=12,
):

    logger.info(f'Building references for stars for observation in {base_dir}')

    if picid:
        logger.info(f'Searching for picid={picid}')
        output_dir = os.path.join(processed_dir, str(picid), base_dir)
    else:
        output_dir = os.path.join(processed_dir, '*', base_dir)

    psc_files = glob(os.path.join(output_dir, 'psc.csv'), recursive=True)

    logger.info(f'Found {len(psc_files)} PSC files')

    call_params = {
        'base_dir': base_dir,
        'output_dir': output_dir,
        'processed_dir': processed_dir,
        'force': force,
        'frame_slice': frame_slice,
        'table_filter': table_filter,
        'num_refs': num_refs,
        'camera_bias': camera_bias,
        'gain': gain,
        'readout_noise': readout_noise,
        'make_plots': make_plots,
        'color_correction': color_correction,
    }
    logger.debug(f'Call params: {call_params}')

    # Build up the parameter list (NB: "clever" zip_longest usage)
    params = zip_longest(psc_files, [], fillvalue=call_params)

    start_time = current_time()
    print(f'Starting at {start_time}')

    with concurrent.futures.ProcessPoolExecutor(
            max_workers=num_workers) as executor:
        picids = list(
            tqdm(executor.map(build_ref, params, chunksize=chunk_size),
                 total=len(psc_files)))
        logger.info(f'Created {len(picids)} PSC references')

    end_time = current_time()
    print(f'Ending at {end_time}')
    total_time = (end_time - start_time).sec
    print(f'Total: {total_time:.02f} seconds')
Exemplo n.º 10
0
def main(base_dir=None,
         output_dir=None,
         stamp_size=10,
         picid=None,
         force=False,
         num_workers=8,
         chunk_size=12
         ):

    fields_dir = os.path.join(os.environ['PANDIR'], 'images', 'fields')

    # Get the sources from the stored file.
    source_filename = os.path.join(fields_dir, base_dir, f'point-sources-filtered.csv.bz2')

    # Check for existence of file otherwise `lookup_sources_for_observation` will try to create.
    if not os.path.isfile(source_filename):
        raise UserWarning(f'Please do a source detection and filtering first.')

    # Load the sources from the file.
    sources = pipeline.lookup_sources_for_observation(filename=source_filename)
    sources.set_index(['picid'], append=True, inplace=True)

    # Used for progress display.
    num_sources = len(list(sources.index.levels[1].unique()))

    # Add full path to filename in table.
    sources.file = sources.file.apply(lambda fn: os.path.join(fields_dir, base_dir, fn))

    if picid:
        print(f"Creating stamp for {picid}")
        sources = sources.query(f'picid == {picid}')

        if not len(sources):
            print(f"{picid} does not exist, exiting")
            return
    else:
        print(f'Building PSC for {num_sources} sources')

    start_time = current_time()

    call_params = {
        'observation_dir': base_dir,
        'output_dir': output_dir,
        'force': force,
        'stamp_size': stamp_size,
    }

    print(f'Starting at {start_time}')

    # Run everything in parallel.
    with concurrent.futures.ProcessPoolExecutor(max_workers=num_workers) as executor:
        grouped_sources = sources.groupby('picid')

        params = zip_longest(grouped_sources, [], fillvalue=call_params)

        picids = list(tqdm(executor.map(make_psc, params, chunksize=chunk_size),
                           total=len(grouped_sources)))
        print(f'Created {len(picids)} PSCs')

    end_time = current_time()
    print(f'Ending at {end_time}')
    total_time = (end_time - start_time).sec
    print(f'Total: {total_time:.02f} seconds')
Exemplo n.º 11
0
def create_storage_obj(collection, data, obj_id=None):
    """Returns the object to be stored in the database"""
    obj = dict(data=data, type=collection, date=current_time(datetime=True))
    if obj_id:
        obj['_id'] = obj_id
    return obj