Пример #1
0
    def __init__(self, name: str = None, mode: str = 'rb', source: FileList = None, vfs: VirtualFileSystem = None,
                 *args, **kwargs):
        """Open/create a new TAR file.

        Args:
            name: Name of file.
            mode: Open mode.
            source: An object that provides a list of filenames to write into the TAR file.
        """

        # store vfs
        self.vfs = vfs

        # mode?
        if mode != 'rb':
            raise ValueError('Modes other than rb are not supported.')

        # get list of files to zip
        source = get_object(source)
        self._files = source(name)

        # init stream
        self._out_stream = io.BytesIO()
        self._buffer = b''

        # write to the BytesIO with no compression
        self._tarfile = tarfile.TarFile.open(fileobj=self._out_stream, mode='w')
Пример #2
0
    def open_file(self, filename: str, mode: str) -> VFSFile:
        """Open a file. The handling class is chosen depending on the rootse in the filename.

        Args:
            filename (str): Name of file to open.
            mode (str): Opening mode.

        Returns:
            (IOBase) File like object for given file.
        """
        # split root
        root, filename = VirtualFileSystem.split_root(filename)

        # does root exist?
        if root not in self._roots:
            raise ValueError("Could not find root {0} for file.".format(root))

        # create file object
        from pyobs.object import get_object

        fd = get_object(self._roots[root],
                        object_class=VFSFile,
                        name=filename,
                        mode=mode)

        # return it
        return fd
Пример #3
0
    def __init__(
        self,
        roof: typing.Union[str, IRoof],
        telescope: typing.Union[str, ITelescope],
        flatfield: typing.Union[str, IFlatField],
        functions: dict,
        priorities: typing.Union[dict, SkyflatPriorities],
        min_exptime: float = 0.5,
        max_exptime: float = 5,
        timespan: float = 7200,
        filter_change: float = 30,
        count: int = 20,
        readout: dict = None,
        *args,
        **kwargs,
    ):
        """Init a new SkyFlats script.

        Args:
            roof: Roof to use
            telescope: Telescope to use
            flatfield: FlatFielder to use
            functions: Dict with solalt-exptime functions for all filters/binning
            priorities: SkyflatPriorities object that returns priorities
            min_exptime: Minimum exposure time for flats
            max_exptime: Maximum exposure time for flats
            timespan: Timespan from now that should be scheduled [s]
            filter_change: Time required for filter change [s]
            count: Number of flats to schedule
            readout: Dictionary with readout times (in sec) per binning (as BxB).
        """
        Script.__init__(self, *args, **kwargs)

        # store modules
        self._roof = roof
        self._telescope = telescope
        self._flatfield = flatfield

        # stuff
        self._count = count

        # get archive and priorities
        priorities = get_object(priorities, SkyflatPriorities)

        # create scheduler
        self._scheduler = Scheduler(
            functions,
            priorities,
            self.observer,
            min_exptime=min_exptime,
            max_exptime=max_exptime,
            timespan=timespan,
            filter_change=filter_change,
            count=count,
            readout=readout,
        )
Пример #4
0
    def __init__(
        self,
        archive: Union[Dict[str, Any], Archive],
        pipeline: Union[Dict[str, Any], Pipeline],
        worker_procs: int = 4,
        filenames_calib: str = FILENAME,
        min_flats: int = 10,
        store_local: Optional[str] = None,
        create_calibs: bool = True,
        calib_science: bool = True,
        **kwargs: Any,
    ):
        """Creates a Night object for reducing a given night.

        Args:
            archive: Archive to fetch images from and write results to.
            pipeline: Science pipeline.
            worker_procs: Number of worker processes.
            filenames_calib: Filename pattern for master calibration files.
            min_flats: Minimum number of raw frames to create flat field.
            store_local: If True, files are stored in given local directory instead of uploaded to archive.
            create_calibs: If False, no calibration files are created for night.
            calib_science: If False, no science frames are calibrated.
        """

        # get archive and science pipeline
        self._archive = get_object(archive, Archive)
        self._pipeline = get_object(pipeline, Pipeline, archive=archive)

        # stuff
        self._worker_processes = worker_procs
        self._min_flats = min_flats
        self._store_local = store_local
        self._create_calibs = create_calibs
        self._calib_science = calib_science

        # cache for master calibration frames
        self._master_frames: Dict[Tuple[ImageType, str, str, Optional[str]],
                                  Image] = {}

        # default filename patterns
        self._fmt_calib = FilenameFormatter(filenames_calib)
Пример #5
0
    def __init__(
        self,
        focuser: Union[str, IFocuser],
        camera: Union[str, IImageGrabber],
        series: FocusSeries,
        offset: bool = False,
        filters: Optional[Union[str, IFilters]] = None,
        filter_name: Optional[str] = None,
        binning: Optional[int] = None,
        **kwargs: Any,
    ):
        """Initialize a new auto focus system.

        Args:
            focuser: Name of IFocuser.
            camera: Name of ICamera.
            filters: Name of IFilters, if any.
            filter_name: Name of filter to set.
            offset: If True, offsets are used instead of absolute focus values.
        """
        Module.__init__(self, **kwargs)

        # store focuser and camera
        self._focuser = focuser
        self._camera = camera
        self._filters = filters
        self._offset = offset
        self._abort = threading.Event()

        # create focus series
        self._series: FocusSeries = get_object(series, FocusSeries)

        # init camera settings mixin
        CameraSettingsMixin.__init__(self,
                                     filters=filters,
                                     filter_name=filter_name,
                                     binning=binning,
                                     **kwargs)

        # register exceptions
        if isinstance(camera, str):
            exc.register_exception(
                exc.RemoteError,
                3,
                timespan=600,
                module=camera,
                callback=self._default_remote_error_callback)
        if isinstance(focuser, str):
            exc.register_exception(
                exc.RemoteError,
                3,
                timespan=600,
                module=focuser,
                callback=self._default_remote_error_callback)
Пример #6
0
    async def run(self, **kwargs: Any) -> None:
        """Move telescope to pointing."""

        # get telescope
        log.info("Getting proxy for telescope...")
        telescope = await self.proxy(self._telescope, ITelescope)

        # pointing
        pointing = get_object(self._pointing, SkyFlatsBasePointing, observer=self.observer)

        # point
        await pointing(telescope)
        log.info("Finished pointing telescope.")
Пример #7
0
    def __init__(self, site: str, night: str,
                 archive: Union[dict, Archive], science: Union[dict, Pipeline], worker_procs: int = 4,
                 filenames_calib: str = '{SITEID}{TELID}-{INSTRUME}-{DAY-OBS|date:}-'
                                        '{IMAGETYP}-{XBINNING}x{YBINNING}{FILTER|filter}.fits',
                 flats_combine: Union[str, Image.CombineMethod] = Image.CombineMethod.MEDIAN, flats_min_raw: int = 10,
                 *args, **kwargs):
        """Creates a Night object for reducing a given night.

        Args:
            site: Telescope site to use.
            night: Night to reduce.
            archive: Archive to fetch images from and write results to.
            science: Science pipeline.
            worker_procs: Number of worker processes.
            filenames_calib: Filename pattern for master calibration files.
            flats_combine: Method to combine flats.
            flats_min_raw: Minimum number of raw frames to create flat field.
            *args:
            **kwargs:
        """

        # get archive and science pipeline
        self._archive = get_object(archive, Archive)
        self._science_pipeline = get_object(science, Pipeline)

        # stuff
        self._site = site
        self._night = night
        self._worker_processes = worker_procs
        self._flats_combine = Image.CombineMethod(flats_combine) if isinstance(flats_combine, str) else flats_combine
        self._flats_min_raw = flats_min_raw

        # cache for master calibration frames
        self._master_frames: Dict[Tuple[Type, str, str, Optional[str]], Image] = {}

        # default filename patterns
        self._fmt_calib = FilenameFormatter(filenames_calib)
Пример #8
0
    def __init__(self,
                 photometry: Union[dict, Photometry] = None,
                 astrometry: Union[dict, Astrometry] = None,
                 masks: Dict[str, Union[Image, str]] = None,
                 filenames: str = None,
                 *args,
                 **kwargs):
        """Pipeline for science images.

        Args:
            photometry: Photometry object. If None, no photometry is performed.
            astrometry: Astrometry object. If None, no astrometry is performed.
            masks: Dictionary with masks to use for each binning given as, e.g., 1x1.
            *args:
            **kwargs:
        """
        # get photometry and astrometry
        self._photometry = None if photometry is None else get_object(
            photometry, Photometry)
        self._astrometry = None if astrometry is None else get_object(
            astrometry, Astrometry)

        # masks
        self._masks = {}
        if masks is not None:
            for binning, mask in masks.items():
                if isinstance(mask, Image):
                    self._masks[binning] = mask
                elif isinstance(mask, str):
                    self._masks[binning] = Image.from_file(mask)
                else:
                    raise ValueError('Unknown mask format.')

        # default filename patterns
        if filenames is None:
            filenames = '{SITEID}{TELID}-{INSTRUME}-{DAY-OBS|date:}-{FRAMENUM|string:04d}-{IMAGETYP|type}01.fits'
        self._formatter = FilenameFormatter(filenames)
Пример #9
0
    def __init__(self,
                 source_detection: SourceDetection,
                 radius_column: str = "radius",
                 **kwargs: Any):
        """Initialize a new projection focus series.

        Args:
            source_detection: Photometry to use for estimating PSF sizes
        """

        # stuff
        self._source_detection: SourceDetection = get_object(
            source_detection, SourceDetection)
        self._radius_col = radius_column
        self._data: List[Dict[str, float]] = []
Пример #10
0
    async def __call__(self, image: Image) -> Image:
        """Processes an image and stores new exposure time in exp_time attribute.

        Args:
            image: Image to process.

        Returns:
            Original image.
        """

        # get object
        source_detection = get_object(self._source_detection, SourceDetection)

        # do photometry and get copy of catalog
        catalog = (await source_detection(image)).catalog
        if catalog is None:
            log.info("No catalog found in image.")
            return image

        # sort catalog by peak flux
        catalog.sort("peak")

        # saturation level
        if "DET-SATU" in image.header and "DET-GAIN" in image.header:
            saturation = image.header["DET-SATU"] / image.header["DET-GAIN"]
        else:
            saturation = 50000

        # get max peak flux that we allow
        max_peak = saturation * self._saturated

        # filter out all stars that are saturated
        catalog = catalog[catalog["peak"] <= max_peak]

        # get brightest star, get its peak flux and store its coordinates
        star = catalog[0]
        peak = star["peak"]
        log.info("Found peak of %.2f at %.1fx%.1f.", star["peak"], star["x"],
                 star["y"])
        self.coordinates = (star["x"], star["y"])

        # get exposure time of image
        exp_time = image.header["EXPTIME"]

        # calculate new exposure time and return it
        self.exp_time = exp_time / (peak - self._bias) * (max_peak -
                                                          self._bias)
        return image
Пример #11
0
 def __init__(
     self,
     archive: Union[Dict[str, Any], Archive],
     site: str,
     instrument: str,
     filter_names: List[str],
     binnings: List[int],
     *args,
     **kwargs,
 ):
     SkyflatPriorities.__init__(self)
     self._archive = get_object(archive, Archive)
     self._filter_names = filter_names
     self._binnings = binnings
     self._site = site
     self._instrument = instrument
Пример #12
0
    def __init__(self,
                 archive: Union[Dict[str, Any], Archive],
                 max_cache_size: int = 20,
                 **kwargs: Any):
        """Init a new image calibration pipeline step.

        Args:
            archive: Archive to fetch calibration frames from.
        """
        ImageProcessor.__init__(self, **kwargs)

        # store
        self._max_cache_size = max_cache_size

        # get archive
        self._archive = get_object(archive, Archive)
Пример #13
0
    def __init__(self,
                 flatfield: typing.Union[str, IFlatField],
                 functions: typing.Dict[str, str],
                 priorities: typing.Union[dict, SkyflatPriorities],
                 min_exptime: float = 0.5,
                 max_exptime: float = 5,
                 timespan: float = 7200,
                 filter_change: float = 30,
                 count: int = 20,
                 *args,
                 **kwargs):
        """Initialize a new flat field scheduler.

        Args:
            flatfield: Flat field module to use
            functions: Dict with flat functions
            priorities: Class handling priorities
            min_exptime: Minimum exposure time [s]
            max_exptime: Maximum exposure time [s]
            timespan: Time to scheduler after start [s]
            filter_change: Time required for filter change [s]
            count: Number of flats to take per filter/binning
        """
        Module.__init__(self, *args, **kwargs)

        # store
        self._flatfield = flatfield
        self._count = count

        # abort
        self._abort = threading.Event()

        # priorities
        prio = get_object(priorities, SkyflatPriorities)

        # create scheduler
        self._scheduler = Scheduler(functions,
                                    prio,
                                    self.observer,
                                    min_exptime=min_exptime,
                                    max_exptime=max_exptime,
                                    timespan=timespan,
                                    filter_change=filter_change,
                                    count=count)
Пример #14
0
    def __init__(self,
                 name: str = None,
                 label: str = None,
                 comm: Union[Comm, dict] = None,
                 *args,
                 **kwargs):
        """Initializes a new pyobs module.

        Args:
            name: Name of module. If None, ID from comm object is used.
            label: Label for module. If None, name is used.
            comm: Comm object to use
        """
        Object.__init__(self, *args, **kwargs)

        # get list of client interfaces
        self._interfaces: List[Type] = []
        self._methods: Dict[str, Tuple[Callable, inspect.Signature]] = {}
        self._get_interfaces_and_methods()

        # get configuration options, i.e. all parameters from c'tor
        self._config_options = self._get_config_options()

        # comm object
        self.comm: Comm
        if comm is None:
            self.comm = DummyComm()
        elif isinstance(comm, Comm):
            self.comm = comm
        elif isinstance(comm, dict):
            log.info('Creating comm object...')
            self.comm = get_object(comm)
        else:
            raise ValueError('Invalid Comm object')

        # name and label
        self._name: str = name if name is not None else self.comm.name
        self._label: str = label if label is not None else self._name
Пример #15
0
    def open_file(self,
                  filename: str,
                  mode: str,
                  compression: bool = None) -> VFSFile:
        """Open a file. The handling class is chosen depending on the rootse in the filename.

        Args:
            filename (str): Name of file to open.
            mode (str): Opening mode.
            compression (bool): Automatically (de)compress data if True. Automatically determine from filename if None.

        Returns:
            (IOBase) File like object for given file.
        """
        from .gzippipe import GzipReader, GzipWriter

        # split root
        root, filename = VirtualFileSystem.split_root(filename)

        # does root exist?
        if root not in self._roots:
            raise ValueError('Could not find root {0} for file.'.format(root))

        # create file object
        fd = get_object(self._roots[root], name=filename, mode=mode)

        # compression?
        if compression or (compression is None and
                           os.path.splitext(filename)[1] in self._compression):
            # create pipe
            if 'w' in mode:
                fd = GzipWriter(fd, close_fd=True)
            else:
                fd = GzipReader(fd, close_fd=True)

        # return it
        return fd
Пример #16
0
    def __init__(self,
                 config: str,
                 log_file: Optional[str] = None,
                 log_level: str = "info",
                 **kwargs: Any):
        """Initializes a pyobs application.

        Args:
            config: Name of config file.
            log_file: Name of log file, if any.
            log_level: Logging level.
            log_rotate: Whether to rotate the log files.
        """

        # get config name without path and extension
        self._config = config

        # formatter for logging, and list of logging handlers
        formatter = logging.Formatter(
            "%(asctime)s [%(levelname)s] %(filename)s:%(lineno)d %(message)s")
        handlers = []

        # create stdout logging handler
        stream_handler = logging.StreamHandler()
        stream_handler.setFormatter(formatter)
        handlers.append(stream_handler)

        # create file logging handler, if log file is given
        if log_file is not None:
            # in Windows, append a FileHandler, otherwise we use a WatchedFileHandler, which works well with logrotate
            if platform.system() == "Windows":
                file_handler = logging.FileHandler(log_file)
            else:
                file_handler = logging.handlers.WatchedFileHandler(log_file)

            # add log file handler
            file_handler.setFormatter(formatter)
            handlers.append(file_handler)

        # basic setup
        logging.basicConfig(handlers=handlers,
                            level=logging.getLevelName(log_level.upper()))
        logging.captureWarnings(True)
        warnings.simplefilter("always", DeprecationWarning)

        # disable tornado logger
        logging.getLogger("tornado.access").disabled = True

        # set pyobs logger
        global log
        log = logging.getLogger(__name__)

        # hack threading to set thread names on OS level
        self._hack_threading()

        # load config
        log.info("Loading configuration from {0:s}...".format(self._config))
        with StringIO(pre_process_yaml(self._config)) as f:
            cfg: Dict[str, Any] = yaml.safe_load(f)

        # create module and open it
        log.info("Creating module...")
        self._module = get_object(cfg, Module)
Пример #17
0
    def __init__(self,
                 functions: Dict[str, Union[str, Dict[str, str]]] = None,
                 target_count: float = 30000,
                 min_exptime: float = 0.5,
                 max_exptime: float = 5,
                 test_frame: tuple = None,
                 counts_frame: tuple = None,
                 allowed_offset_frac: float = 0.2,
                 min_counts: int = 100,
                 pointing: Union[dict, SkyFlatsBasePointing] = None,
                 combine_binnings: bool = True,
                 observer: Observer = None,
                 vfs: VirtualFileSystem = None,
                 callback: Callable = None,
                 *args,
                 **kwargs):
        """Initialize a new flat fielder.

        Depending on the value of combine_binnings, functions must be in a specific format:

            1. combine_binnings=True:
                functions must be a dictionary of filter->function pairs, like
                {'clear': 'exp(-0.9*(h+3.9))'}
                In this case it is assumed that the average flux per pixel is directly correlated to the binning,
                i.e. a flat with 3x3 binning hast on average 9 times as much flux per pixel.
            2. combine_binnings=False:
                functions must be nested one level deeper within the binning, like
                {'1x1': {'clear': 'exp(-0.9*(h+3.9))'}}

        Args:
            functions: Function f(h) for each filter to describe ideal exposure time as a function of solar
                elevation h, i.e. something like exp(-0.9*(h+3.9))
            target_count: Count rate to aim for.
            min_exptime: Minimum exposure time.
            max_exptime: Maximum exposure time.
            test_frame: Tupel (left, top, width, height) in percent that describe the frame for on-sky testing.
            counts_frame: Tupel (left, top, width, height) in percent that describe the frame for calculating mean
                count rate.
            allowed_offset_frac: Offset from target_count (given in fraction of it) that's still allowed for good
                flat-field
            min_counts: Minimum counts in frames.
            combine_binnings: Whether different binnings use the same functions.
            observer: Observer to use.
            vfs: VFS to use.
            callback: Callback function for statistics.
        """

        # store stuff
        self._target_count = target_count
        self._min_exptime = min_exptime
        self._max_exptime = max_exptime
        self._test_frame = (45, 45, 10,
                            10) if test_frame is None else test_frame
        self._counts_frame = (25, 25, 75,
                              75) if counts_frame is None else counts_frame
        self._allowed_offset_frac = allowed_offset_frac
        self._min_counts = min_counts
        self._combine_binnings = combine_binnings
        self._observer = observer
        self._vfs = vfs
        self._callback = callback

        # parse function
        if functions is None:
            functions = {}
        self._functions: Dict[Union[str, Tuple[str, str]], Any]
        if combine_binnings:
            # in the simple case, the key is just the filter
            self._functions = {
                filter_name: Parser().parse(func)
                for filter_name, func in functions.items()
            }
        else:
            # in case of separate binnings, the key to the functions dict is a tuple of binning and filter
            self._functions = {}
            for binning, func in functions.items():
                # func must be a dict
                if isinstance(func, dict):
                    for filter_name, func in func.items():
                        self._functions[binning,
                                        filter_name] = Parser().parse(func)
                else:
                    raise ValueError(
                        'functions must be a dict of binnings, of combine_binnings is False.'
                    )

        # abort event
        self._abort = threading.Event()

        # pointing
        self._pointing = get_object(pointing,
                                    SkyFlatsBasePointing,
                                    observer=self._observer)

        # state machine
        self._state = FlatFielder.State.INIT

        # current exposure time
        self._exptime = None

        # median of last image
        self._median = None

        # exposures to do
        self._exposures_total = 0
        self._exposures_done = 0
        self._exptime_done = 0

        # bias level
        self._bias_level = None

        # which twilight are we in?
        self._twilight = None

        # current request
        self._cur_filter: Optional[str] = None
        self._cur_binning: Optional[int] = None