示例#1
0
    def __init__(self, endpoint_url=None, sensor_name=None, *args, **kwargs):
        self.logger = get_logger()
        self.logger.info(f'Setting up remote sensor {sensor_name}')

        # Setup the DB either from kwargs or config.
        self.db = None
        db_type = get_config('db.type', default='file')

        if 'db_type' in kwargs:
            self.logger.info(f"Setting up {kwargs['db_type']} type database")
            db_type = kwargs.get('db_type', db_type)

        self.db = PanDB(db_type=db_type)

        self.sensor_name = sensor_name
        self.sensor = None

        if endpoint_url is None:
            # Get the config for the sensor
            endpoint_url = get_config(f'environment.{sensor_name}.url')
            if endpoint_url is None:
                raise error.PanError(f'No endpoint_url for {sensor_name}')

        if not endpoint_url.startswith('http'):
            endpoint_url = f'http://{endpoint_url}'

        self.endpoint_url = endpoint_url
示例#2
0
def create_scheduler_from_config(observer=None, *args, **kwargs):
    """ Sets up the scheduler that will be used by the observatory """

    logger = get_logger()

    scheduler_config = get_config('scheduler', default=None)
    logger.info(f'scheduler_config: {scheduler_config!r}')

    if scheduler_config is None or len(scheduler_config) == 0:
        logger.info("No scheduler in config")
        return None

    if not observer:
        logger.debug(f'No Observer provided, creating from config.')
        site_details = create_location_from_config()
        observer = site_details['observer']

    scheduler_type = scheduler_config.get('type', 'dispatch')

    # Read the targets from the file
    fields_file = scheduler_config.get('fields_file', 'simple.yaml')
    fields_path = os.path.join(get_config('directories.targets'), fields_file)
    logger.debug(f'Creating scheduler: {fields_path}')

    if os.path.exists(fields_path):

        try:
            # Load the required module
            module = load_module(f'panoptes.pocs.scheduler.{scheduler_type}')

            obstruction_list = get_config('location.obstructions', default=[])
            default_horizon = get_config('location.horizon',
                                         default=30 * u.degree)

            horizon_line = horizon_utils.Horizon(
                obstructions=obstruction_list,
                default_horizon=default_horizon.value)

            # Simple constraint for now
            constraints = [
                Altitude(horizon=horizon_line),
                MoonAvoidance(),
                Duration(default_horizon, weight=5.)
            ]

            # Create the Scheduler instance
            scheduler = module.Scheduler(observer,
                                         fields_file=fields_path,
                                         constraints=constraints,
                                         *args,
                                         **kwargs)
            logger.debug("Scheduler created")
        except error.NotFound as e:
            raise error.NotFound(msg=e)
    else:
        raise error.NotFound(
            msg=f"Fields file does not exist: fields_file={fields_file!r}")

    return scheduler
 def __init__(self, *args, **kwargs):
     super().__init__(*args, **kwargs)
     self.logger = get_logger()
     self.plc_thread = None
     self.command_queue = queue.Queue(maxsize=50)
     self.status_queue = queue.Queue(maxsize=1000)
     self.stop = threading.Event()
     self.stop.set()
     self.plc = AstrohavenPLCSimulator(self.command_queue,
                                       self.status_queue, self.stop,
                                       self.logger)
示例#4
0
def test_base_logger(caplog, profile, tmp_path):
    logger = get_logger(log_dir=str(tmp_path),
                        full_log_file=None)
    logger.debug('Hello')
    time.sleep(1)  # Wait for log to make it there.

    see_log = False
    for rec in caplog.records[-5:]:
        if rec.message == 'Hello':
            see_log = True

    assert see_log
示例#5
0
    def __init__(self, config_host=None, config_port=None, *args, **kwargs):
        self.__version__ = __version__

        self._config_host = config_host or os.getenv('PANOPTES_CONFIG_HOST', 'localhost')
        self._config_port = config_port or os.getenv('PANOPTES_CONFIG_PORT', 6563)

        self.logger = get_logger()

        global PAN_DB_OBJ
        if PAN_DB_OBJ is None:
            # If the user requests a db_type then update runtime config
            db_type = kwargs.get('db_type', self.get_config('db.type', default='file'))
            db_name = kwargs.get('db_name', self.get_config('db.name', default='panoptes'))
            db_folder = kwargs.get('db_folder', self.get_config('db.folder', default='json_store'))

            PAN_DB_OBJ = PanDB(db_type=db_type, db_name=db_name, storage_dir=db_folder)

        self.db = PAN_DB_OBJ
示例#6
0
#!/usr/bin/env python
import os
import re
from glob import glob
from contextlib import suppress

import subprocess
import shutil

from panoptes.utils import error
from panoptes.pocs.utils.logger import get_logger
from panoptes.utils.config.client import get_config
from panoptes.utils.images import fits as fits_utils
from panoptes.utils.images import make_timelapse

logger = get_logger()


def upload_observation_to_bucket(pan_id,
                                 dir_name,
                                 include_files='*.fz',
                                 bucket='panoptes-exp',
                                 **kwargs):
    """Upload an observation directory to google cloud storage.

    This is a convenience function for bulk uploading an observation folder to a
    bucket. This assumes that observations are placed within `/images/fields`
    and follow the normal naming convention for observations.

    Note:
        This requires that the command line utility `gsutil` be installed
示例#7
0
import pytest
from _pytest.logging import caplog as _caplog  # noqa
from panoptes.pocs import hardware
from panoptes.pocs.utils.logger import get_logger
from panoptes.pocs.utils.logger import PanLogger
from panoptes.utils.config.client import set_config
from panoptes.utils.config.server import config_server

# TODO download IERS files.

_all_databases = ['file', 'memory']

TESTING_LOG_LEVEL = 'TRACE'
LOGGER_INFO = PanLogger()

logger = get_logger(console_log_file=TESTING_LOG_LEVEL)
logger.enable('panoptes')
# Add a level above TRACE and below DEBUG
logger.level("testing", no=15, icon="🤖", color="<LIGHT-BLUE><white>")
log_fmt = "<lvl>{level:.1s}</lvl> " \
          "<light-blue>{time:MM-DD HH:mm:ss.SSS!UTC}</>" \
          "<blue> ({time:HH:mm:ss zz})</> " \
          "| <c>{name} {function}:{line}</c> | " \
          "<lvl>{message}</lvl>"

log_file_path = os.path.expandvars('${PANLOG}/panoptes-testing.log')
startup_message = f' STARTING NEW PYTEST RUN - LOGS: {log_file_path} '
logger.add(log_file_path,
           enqueue=True,  # multiprocessing
           format=log_fmt,
           colorize=True,