def load_c_library(name, path=None, mode=ctypes.DEFAULT_MODE, **kwargs): """Utility function to load a shared/dynamically linked library (.so/.dylib/.dll). The name and location of the shared library can be manually specified with the library_path argument, otherwise the ctypes.util.find_library function will be used to try to locate based on library_name. Args: name (str): name of the library (without 'lib' prefix or any suffixes, e.g. 'fli'). path (str, optional): path to the library e.g. '/usr/local/lib/libfli.so'. mode (int, optional): mode in which to load the library, see dlopen(3) man page for details. Should be one of ctypes.RTLD_GLOBAL, ctypes.RTLD_LOCAL, or ctypes.DEFAULT_MODE. Default is ctypes.DEFAULT_MODE. Returns: ctypes.CDLL Raises: pocs.utils.error.NotFound: raised if library_path not given & find_library fails to locate the library. OSError: raises if the ctypes.CDLL loader cannot load the library. """ if mode is None: # Interpret a value of None as the default. mode = ctypes.DEFAULT_MODE # Open library logger.debug(f"Opening {name} library") if not path: path = ctypes.util.find_library(name) if not path: raise error.NotFound(f"Cound not find {name} library!") # This CDLL loader will raise OSError if the library could not be loaded return ctypes.CDLL(path, mode=mode)
def config_getter(context, key, parse=True, default=None): """Get an item from the config server by key name, using dotted notation (e.g. 'location.elevation') If no key is given, returns the entire config. """ host = context.obj.get('host') port = context.obj.get('port') try: # The nargs=-1 makes this a tuple so we get first entry. key = key[0] except IndexError: key = None logger.debug(f'Getting config key={key!r}') try: config_entry = get_config(key=key, host=host, port=port, parse=parse, default=default) except Exception as e: logger.error(f'Error while trying to get config: {e!r}') click.secho(f'Error while trying to get config: {e!r}', fg='red') else: logger.debug(f'Config server response: config_entry={config_entry!r}') click.echo(config_entry)
def get_data(self): logger.debug(f'Getting recent stats from {BASE_URL}') self._stats_path = download_file(f'{BASE_URL}', cache='update', show_progress=False, pkgname='panoptes') stats_df = pd.read_csv(self._stats_path).convert_dtypes(convert_integer=False) self.df = stats_df.sort_index()
def get_devices(self): logger.debug(f'Getting camera device connection config for {self}') camera_devices = dict() for cam_info in get_config('cameras.devices'): name = cam_info.get('name') or cam_info.get('model') port = cam_info.get('port') or cam_info.get('serial_number') camera_devices[name] = port logger.trace(f'camera_devices={camera_devices!r}') return camera_devices
def send_message(self, msg, timestamp): try: # update_status returns a tweepy Status instance, but we # drop it on the floor because we don't have anything we # can do with it. if self.output_timestamp: self.api.update_status('{} - {}'.format(msg, timestamp)) else: self.api.update_status(msg) except tweepy.TweepError: # pragma: no cover logger.debug('Error tweeting message. Please check your Twitter configuration.')
def cr2_to_pgm(cr2_fname, pgm_fname=None, overwrite=True, *args, **kwargs): # pragma: no cover """ Convert CR2 file to PGM Converts a raw Canon CR2 file to a netpbm PGM file via `dcraw`. Assumes `dcraw` is installed on the system Note: This is a blocking call Arguments: cr2_fname {str} -- Name of CR2 file to convert **kwargs {dict} -- Additional keywords to pass to script Keyword Arguments: pgm_fname {str} -- Name of PGM file to output, if None (default) then use same name as CR2 (default: {None}) dcraw {str} -- Path to installed `dcraw` (default: {'dcraw'}) overwrite {bool} -- A bool indicating if existing PGM should be overwritten (default: {True}) Returns: str -- Filename of PGM that was created """ dcraw = shutil.which('dcraw') if dcraw is None: raise error.InvalidCommand('dcraw not found') if pgm_fname is None: pgm_fname = cr2_fname.replace('.cr2', '.pgm') if os.path.exists(pgm_fname) and not overwrite: logger.warning( f"PGM file exists, returning existing file: {pgm_fname}") else: try: # Build the command for this file command = '{} -t 0 -D -4 {}'.format(dcraw, cr2_fname) cmd_list = command.split() logger.debug("PGM Conversion command: \n {}".format(cmd_list)) # Run the command if subprocess.check_call(cmd_list) == 0: logger.debug("PGM Conversion command successful") except subprocess.CalledProcessError as err: raise error.InvalidSystemCommand( msg="File: {} \n err: {}".format(cr2_fname, err)) return pgm_fname
def fpack(fits_fname, unpack=False, overwrite=True): """Compress/Decompress a FITS file Uses `fpack` (or `funpack` if `unpack=True`) to compress a FITS file Args: fits_fname ({str}): Name of a FITS file that contains a WCS. unpack ({bool}, optional): file should decompressed instead of compressed, default False. Returns: str: Filename of compressed/decompressed file. """ assert os.path.exists(fits_fname), warn( "No file exists at: {}".format(fits_fname)) if unpack: fpack = shutil.which('funpack') run_cmd = [fpack, '-D', fits_fname] out_file = fits_fname.replace('.fz', '') else: fpack = shutil.which('fpack') run_cmd = [fpack, '-D', '-Y', fits_fname] out_file = fits_fname.replace('.fits', '.fits.fz') if os.path.exists(out_file): if overwrite is False: raise FileExistsError( f'Destination file already exists at location and overwrite=False' ) else: os.remove(out_file) try: assert fpack is not None except AssertionError: warn( "fpack not found (try installing cfitsio). File has not been changed" ) return fits_fname logger.debug("fpack command: {}".format(run_cmd)) proc = subprocess.Popen(run_cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, universal_newlines=True) try: output, errs = proc.communicate(timeout=5) except subprocess.TimeoutExpired: proc.kill() output, errs = proc.communicate() return out_file
def config_setter(context, key, new_value, parse=True): """Set an item in the config server. """ host = context.obj.get('host') port = context.obj.get('port') logger.debug( f'Setting config key={key!r} new_value={new_value!r} on {host}:{port}' ) config_entry = set_config(key, new_value, host=host, port=port, parse=parse) click.echo(config_entry)
def _make_pretty_from_cr2(fname, title=None, **kwargs): script_name = shutil.which('cr2-to-jpg') cmd = [script_name, fname] if title: cmd.append(title) logger.debug(f'Pretty cr2 command: {cmd!r}') try: output = subprocess.check_output(cmd, stderr=subprocess.STDOUT) logger.debug(f'Pretty CR2 output={output!r}') except subprocess.CalledProcessError as e: raise error.InvalidCommand( f"Error executing {script_name}: {e.output!r}\nCommand: {cmd}") return fname.replace('cr2', 'jpg')
def crop_data(data, box_width=200, center=None, data_only=True, wcs=None, **kwargs): """Return a cropped portion of the image Shape is a box centered around the middle of the data Args: data (`numpy.array`): Array of data. box_width (int, optional): Size of box width in pixels, defaults to 200px. center (tuple(int, int), optional): Crop around set of coords, default to image center. data_only (bool, optional): If True (default), return only data. If False return the `Cutout2D` object. wcs (None|`astropy.wcs.WCS`, optional): A valid World Coordinate System (WCS) that will be cropped along with the data if provided. Returns: np.array: A clipped (thumbnailed) version of the data if `data_only=True`, otherwise a `astropy.nddata.Cutout2D` object. """ assert data.shape[ 0] >= box_width, f"Can't clip data, it's smaller than {box_width} ({data.shape})" # Get the center if center is None: x_len, y_len = data.shape x_center = int(x_len / 2) y_center = int(y_len / 2) else: y_center = int(center[0]) x_center = int(center[1]) logger.debug(f"Using center: {x_center} {y_center}") logger.debug(f"Box width: {box_width}") cutout = Cutout2D(data, (y_center, x_center), box_width, wcs=wcs) if data_only: return cutout.data return cutout
def write_fits(data, header, filename, exposure_event=None, **kwargs): """Write FITS file to requested location. >>> from panoptes.utils.images import fits as fits_utils >>> data = np.random.normal(size=100) >>> header = { 'FILE': 'delete_me', 'TEST': True } >>> filename = str(getfixture('tmpdir').join('temp.fits')) >>> fits_utils.write_fits(data, header, filename) >>> assert os.path.exists(filename) >>> fits_utils.getval(filename, 'FILE') 'delete_me' >>> data2 = fits_utils.getdata(filename) >>> assert np.array_equal(data, data2) Args: data (array_like): The data to be written. header (dict): Dictionary of items to be saved in header. filename (str): Path to filename for output. exposure_event (None|`threading.Event`, optional): A `threading.Event` that can be triggered when the image is written. kwargs (dict): Options that are passed to the `astropy.io.fits.PrimaryHDU.writeto` method. """ if not isinstance(header, fits.Header): header = fits.Header(header) hdu = fits.PrimaryHDU(data, header=header) # Create directories if required. if os.path.dirname(filename): os.makedirs(os.path.dirname(filename), mode=0o775, exist_ok=True) try: hdu.writeto(filename, **kwargs) except OSError as err: logger.error(f'Error writing image to {filename}: {err!r}') else: logger.debug(f'Image written to {filename}') finally: if exposure_event: exposure_event.set()
def __init__(self, **kwargs): super().__init__(**kwargs) logger.debug(f'Getting recent stats from {BASE_URL}') self._observations_path = download_file(f'{BASE_URL}', cache='update', show_progress=False, pkgname='panoptes') self._observations_df = pd.read_csv( self._observations_path).convert_dtypes() # Setup up widgets # Set some default for the params now that we have data. units = sorted(self._observations_df.unit_id.unique()) units.insert(0, 'The Whole World! 🌎') self.param.unit_id.objects = units self.unit_id = [units[0]] # Create the source objects. self.update_dataset()
def sleep(self, max_sleep=None): """Sleep until the timer expires, or for max_sleep, whichever is sooner. Args: max_sleep: Number of seconds to wait for, or None. Returns: True if slept for less than time_left(), False otherwise. """ # Sleep for remaining time by default. remaining = self.time_left() if not remaining: return False sleep_time = remaining # Sleep only for max time if requested. if max_sleep and max_sleep < remaining: assert max_sleep > 0 sleep_time = max_sleep logger.debug(f'Sleeping for {sleep_time:.02f} seconds') time.sleep(sleep_time) return sleep_time < remaining
def restart(self): """Restart the timed duration.""" self.target_time = time.monotonic() + self.duration logger.debug(f'Restarting {self}')
def load_config(config_files=None, parse=True, load_local=True): """Load configuration information. .. note:: This function is used by the config server and normal config usage should be via a running config server. This function supports loading of a number of different files. If no options are passed to ``config_files`` then the default ``$PANDIR/conf_files/pocs.yaml`` will be loaded. ``config_files`` is a list and loaded in order, so the second entry will overwrite any values specified by similarly named keys in the first entry. ``config_files`` should be specified by an absolute path, which can exist anywhere on the filesystem. Local versions of files can override built-in versions and are automatically loaded if they exist alongside the specified config path. Local files have a ``<>_local.yaml`` name, where ``<>`` is the built-in file. Given the following path: :: /path/to/dir |- my_conf.yaml |- my_conf_local.yaml You can do a ``load_config('/path/to/dir/my_conf.yaml')`` and both versions of the file will be loaded, with the values in the local file overriding the non-local. Typically the local file would also be ignored by ``git``, etc. For example, the ``panoptes.utils.config.server.config_server`` will always save values to a local version of the file so the default settings can always be recovered if necessary. Local files can be ignored (mostly for testing purposes or for recovering default values) with the ``load_local=False`` parameter. Args: config_files (list, optional): A list of files to load as config, see Notes for details of how to specify files. parse (bool, optional): If the config file should attempt to create objects such as dates, astropy units, etc. load_local (bool, optional): If local files should be used, see Notes for details. Returns: dict: A dictionary of config items. """ config = dict() config_files = listify(config_files) logger.debug(f'Loading config files: config_files={config_files!r}') for config_file in config_files: try: logger.debug(f'Adding config_file={config_file!r} to config dict') _add_to_conf(config, config_file, parse=parse) except Exception as e: # pragma: no cover logger.warning(f"Problem with config_file={config_file!r}, skipping. {e!r}") # Load local version of config if load_local: local_version = config_file.replace('.', '_local.') if os.path.exists(local_version): try: _add_to_conf(config, local_version, parse=parse) except Exception as e: # pragma: no cover logger.warning(f"Problem with local_version={local_version!r}, skipping: {e!r}") # parse_config_directories currently only corrects directory names. if parse: logger.trace(f'Parsing config={config!r}') with suppress(KeyError): config['directories'] = parse_config_directories(config['directories']) logger.trace(f'Config directories parsed: config={config!r}') return config
def config_server(config_file, host=None, port=None, load_local=True, save_local=False, auto_start=True, access_logs=None, error_logs='logger', ): """Start the config server in a separate process. A convenience function to start the config server. Args: config_file (str or None): The absolute path to the config file to load. Checks for PANOPTES_CONFIG_FILE env var and fails if not provided. host (str, optional): The config server host. First checks for PANOPTES_CONFIG_HOST env var, defaults to 'localhost'. port (str or int, optional): The config server port. First checks for PANOPTES_CONFIG_HOST env var, defaults to 6563. load_local (bool, optional): If local config files should be used when loading, default True. save_local (bool, optional): If setting new values should auto-save to local file, default False. auto_start (bool, optional): If server process should be started automatically, default True. access_logs ('default' or `logger` or `File`-like or None, optional): Controls access logs for the gevent WSGIServer. The `default` string will cause access logs to go to stderr. The string `logger` will use the panoptes logger. A File-like will write to file. The default `None` will turn off all access logs. error_logs ('default' or 'logger' or `File`-like or None, optional): Same as `access_logs` except we use our `logger` as the default. Returns: multiprocessing.Process: The process running the config server. """ config_file = config_file or os.environ['PANOPTES_CONFIG_FILE'] logger.info(f'Starting panoptes-config-server with config_file={config_file!r}') config = load_config(config_files=config_file, load_local=load_local) logger.success(f'Config server Loaded {len(config)} top-level items') # Add an entry to control running of the server. config['config_server'] = dict(running=True) logger.success(f'{config!r}') cut_config = Cut(config) app.config['config_file'] = config_file app.config['save_local'] = save_local app.config['load_local'] = load_local app.config['POCS'] = config app.config['POCS_cut'] = cut_config logger.info(f'Config items saved to flask config-server') # Set up access and error logs for server. access_logs = logger if access_logs == 'logger' else access_logs error_logs = logger if error_logs == 'logger' else error_logs def start_server(host='localhost', port=6563): try: logger.info(f'Starting panoptes config server with {host}:{port}') http_server = WSGIServer((host, int(port)), app, log=access_logs, error_log=error_logs) http_server.serve_forever() except OSError: logger.warning(f'Problem starting config server, is another config server already running?') return None except Exception as e: logger.warning(f'Problem starting config server: {e!r}') return None host = host or os.getenv('PANOPTES_CONFIG_HOST', 'localhost') port = port or os.getenv('PANOPTES_CONFIG_PORT', 6563) cmd_kwargs = dict(host=host, port=port) logger.debug(f'Setting up config server process with cmd_kwargs={cmd_kwargs!r}') server_process = Process(target=start_server, daemon=True, kwargs=cmd_kwargs) if auto_start: server_process.start() return server_process
def get_rgb_background(fits_fn, box_size=(84, 84), filter_size=(3, 3), camera_bias=0, estimator='mean', interpolator='zoom', sigma=5, iters=5, exclude_percentile=100, return_separate=False, *args, **kwargs): """Get the background for each color channel. Most of the options are described in the `photutils.Background2D` page: https://photutils.readthedocs.io/en/stable/background.html#d-background-and-noise-estimation >>> from panoptes.utils.images import fits as fits_utils >>> fits_fn = getfixture('solved_fits_file') >>> data = fits_utils.getdata(fits_fn) >>> data.mean() 2236.816... >>> rgb_back = get_rgb_background(fits_fn) >>> rgb_back.mean() 2202.392... >>> rgb_backs = get_rgb_background(fits_fn, return_separate=True) >>> rgb_backs[0] <photutils.background.background_2d.Background2D...> >>> {color:data.background_rms_median for color, data in zip('rgb', rgb_backs)} {'r': 20.566..., 'g': 32.787..., 'b': 23.820...} Args: fits_fn (str): The filename of the FITS image. box_size (tuple, optional): The box size over which to compute the 2D-Background, default (84, 84). filter_size (tuple, optional): The filter size for determining the median, default (3, 3). camera_bias (int, optional): The built-in camera bias, default 0. A zero camera bias means the bias will be considered as part of the background. estimator (str, optional): The estimator object to use, default 'median'. interpolator (str, optional): The interpolater object to user, default 'zoom'. sigma (int, optional): The sigma on which to filter values, default 5. iters (int, optional): The number of iterations to sigma filter, default 5. exclude_percentile (int, optional): The percentage of the data (per channel) that can be masked, default 100 (i.e. all). return_separate (bool, optional): If the function should return a separate array for color channel, default False. *args: Description **kwargs: Description Returns: `numpy.array`|list: Either a single numpy array representing the entire background, or a list of masked numpy arrays in RGB order. The background for each channel has full interploation across all pixels, but the mask covers them. """ logger.info(f"Getting background for {fits_fn}") logger.debug( f"{estimator} {interpolator} {box_size} {filter_size} {camera_bias} σ={sigma} n={iters}" ) estimators = { 'sexb': SExtractorBackground, 'median': MedianBackground, 'mean': MeanBackground, 'mmm': MMMBackground } interpolators = { 'zoom': BkgZoomInterpolator, } bkg_estimator = estimators[estimator]() interp = interpolators[interpolator]() data = fits_utils.getdata(fits_fn) - camera_bias # Get the data per color channel. rgb_data = get_rgb_data(data) backgrounds = list() for color, color_data in zip(['R', 'G', 'B'], rgb_data): logger.debug(f'Performing background {color} for {fits_fn}') bkg = Background2D(color_data, box_size, filter_size=filter_size, sigma_clip=SigmaClip(sigma=sigma, maxiters=iters), bkg_estimator=bkg_estimator, exclude_percentile=exclude_percentile, mask=color_data.mask, interpolator=interp) # Create a masked array for the background if return_separate: backgrounds.append(bkg) else: backgrounds.append( np.ma.array(data=bkg.background, mask=color_data.mask)) logger.debug( f"{color} Value: {bkg.background_median:.02f} RMS: {bkg.background_rms_median:.02f}" ) if return_separate: return backgrounds # Create one array for the backgrounds, where any holes are filled with zeros. full_background = np.ma.array(backgrounds).sum(0).filled(0) return full_background
def get_stamp_slice(x, y, stamp_size=(14, 14), ignore_superpixel=False): """Get the slice around a given position with fixed Bayer pattern. Given an x,y pixel position, get the slice object for a stamp of a given size but make sure the first position corresponds to a red-pixel. This means that x,y will not necessarily be at the center of the resulting stamp. .. doctest:: >>> from panoptes.utils.images import bayer >>> # Make a super-pixel as represented in numpy (see full stamp below). >>> superpixel = np.array(['G2', 'B', 'R', 'G1']).reshape(2, 2) >>> superpixel array([['G2', 'B'], ['R', 'G1']], dtype='<U2') >>> # Tile it into a 5x5 grid of super-pixels, i.e. a 10x10 stamp. >>> stamp0 = np.tile(superpixel, (5, 5)) >>> stamp0 array([['G2', 'B', 'G2', 'B', 'G2', 'B', 'G2', 'B', 'G2', 'B'], ['R', 'G1', 'R', 'G1', 'R', 'G1', 'R', 'G1', 'R', 'G1'], ['G2', 'B', 'G2', 'B', 'G2', 'B', 'G2', 'B', 'G2', 'B'], ['R', 'G1', 'R', 'G1', 'R', 'G1', 'R', 'G1', 'R', 'G1'], ['G2', 'B', 'G2', 'B', 'G2', 'B', 'G2', 'B', 'G2', 'B'], ['R', 'G1', 'R', 'G1', 'R', 'G1', 'R', 'G1', 'R', 'G1'], ['G2', 'B', 'G2', 'B', 'G2', 'B', 'G2', 'B', 'G2', 'B'], ['R', 'G1', 'R', 'G1', 'R', 'G1', 'R', 'G1', 'R', 'G1'], ['G2', 'B', 'G2', 'B', 'G2', 'B', 'G2', 'B', 'G2', 'B'], ['R', 'G1', 'R', 'G1', 'R', 'G1', 'R', 'G1', 'R', 'G1']], dtype='<U2') >>> stamp1 = np.arange(100).reshape(10, 10) >>> stamp1 array([[ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9], [10, 11, 12, 13, 14, 15, 16, 17, 18, 19], [20, 21, 22, 23, 24, 25, 26, 27, 28, 29], [30, 31, 32, 33, 34, 35, 36, 37, 38, 39], [40, 41, 42, 43, 44, 45, 46, 47, 48, 49], [50, 51, 52, 53, 54, 55, 56, 57, 58, 59], [60, 61, 62, 63, 64, 65, 66, 67, 68, 69], [70, 71, 72, 73, 74, 75, 76, 77, 78, 79], [80, 81, 82, 83, 84, 85, 86, 87, 88, 89], [90, 91, 92, 93, 94, 95, 96, 97, 98, 99]]) >>> x = 7 >>> y = 5 >>> pixel_index = (y, x) # y=rows, x=columns >>> stamp0[pixel_index] 'G1' >>> stamp1[pixel_index] 57 >>> slice0 = bayer.get_stamp_slice(x, y, stamp_size=(6, 6)) >>> slice0 (slice(2, 8, None), slice(4, 10, None)) >>> stamp0[slice0] array([['G2', 'B', 'G2', 'B', 'G2', 'B'], ['R', 'G1', 'R', 'G1', 'R', 'G1'], ['G2', 'B', 'G2', 'B', 'G2', 'B'], ['R', 'G1', 'R', 'G1', 'R', 'G1'], ['G2', 'B', 'G2', 'B', 'G2', 'B'], ['R', 'G1', 'R', 'G1', 'R', 'G1']], dtype='<U2') >>> stamp1[slice0] array([[24, 25, 26, 27, 28, 29], [34, 35, 36, 37, 38, 39], [44, 45, 46, 47, 48, 49], [54, 55, 56, 57, 58, 59], [64, 65, 66, 67, 68, 69], [74, 75, 76, 77, 78, 79]]) The original index had a value of `57`, which is within the center superpixel. Notice that the resulting stamp has a super-pixel in the center and is bordered on all sides by a complete superpixel. This is required by default and an invalid size We can use `ignore_superpixel=True` to get an odd-sized stamp. .. doctest:: >>> slice1 = bayer.get_stamp_slice(x, y, stamp_size=(5, 5), ignore_superpixel=True) >>> slice1 (slice(3, 8, None), slice(5, 10, None)) >>> stamp0[slice1] array([['G1', 'R', 'G1', 'R', 'G1'], ['B', 'G2', 'B', 'G2', 'B'], ['G1', 'R', 'G1', 'R', 'G1'], ['B', 'G2', 'B', 'G2', 'B'], ['G1', 'R', 'G1', 'R', 'G1']], dtype='<U2') >>> stamp1[slice1] array([[35, 36, 37, 38, 39], [45, 46, 47, 48, 49], [55, 56, 57, 58, 59], [65, 66, 67, 68, 69], [75, 76, 77, 78, 79]]) This puts the requested pixel in the center but does not offer any guarantees about the RGGB pattern. Args: x (float): X pixel position. y (float): Y pixel position. stamp_size (tuple, optional): The size of the cutout, default (14, 14). ignore_superpixel (bool): If superpixels should be ignored, default False. Returns: `slice`: A slice object for the data. """ # Make sure requested size can have superpixels on each side. if not ignore_superpixel: for side_length in stamp_size: side_length -= 2 # Subtract center superpixel if side_length / 2 % 2 != 0: raise RuntimeError( f"Invalid slice size: {side_length + 2} " f"Slice must have even number of pixels on each side" f"of center superpixel. i.e. 6, 10, 14, 18...") # Pixels have nasty 0.5 rounding issues x = Decimal(float(x)).to_integral() y = Decimal(float(y)).to_integral() color = get_pixel_color(x, y) logger.debug(f'Found color={color} for x={x} y={y}') x_half = int(stamp_size[0] / 2) y_half = int(stamp_size[1] / 2) x_min = int(x - x_half) x_max = int(x + x_half) y_min = int(y - y_half) y_max = int(y + y_half) # Alter the bounds depending on identified center pixel so we always center superpixel have: # G2 B # R G1 if color == 'R': x_min += 1 x_max += 1 elif color == 'G2': x_min += 1 x_max += 1 y_min += 1 y_max += 1 elif color == 'B': y_min += 1 y_max += 1 # if stamp_size is odd add extra if stamp_size[0] % 2 == 1: x_max += 1 y_max += 1 logger.debug(f'x_min={x_min}, x_max={x_max}, y_min={y_min}, y_max={y_max}') return (slice(y_min, y_max), slice(x_min, x_max))
def get_solve_field(fname, replace=True, overwrite=True, timeout=30, **kwargs): """Convenience function to wait for `solve_field` to finish. This function merely passes the `fname` of the image to be solved along to `solve_field`, which returns a subprocess.Popen object. This function then waits for that command to complete, populates a dictonary with the EXIF informaiton and returns. This is often more useful than the raw `solve_field` function. Example: >>> from panoptes.utils.images import fits as fits_utils >>> # Get our fits filename. >>> fits_fn = getfixture('unsolved_fits_file') >>> # Perform the solve. >>> solve_info = fits_utils.get_solve_field(fits_fn) >>> # Show solved filename. >>> solve_info['solved_fits_file'] '.../unsolved.fits' >>> # Pass a suggested location. >>> ra = 15.23 >>> dec = 90 >>> radius = 5 # deg >>> solve_info = fits_utils.solve_field(fits_fn, ra=ra, dec=dec, radius=radius) >>> # Pass kwargs to `solve-field` program. >>> solve_kwargs = {'--pnm': '/tmp/awesome.bmp', '--overwrite': True} >>> solve_info = fits_utils.get_solve_field(fits_fn, **solve_kwargs, skip_solved=False) >>> assert os.path.exists('/tmp/awesome.bmp') Args: fname ({str}): Name of FITS file to be solved. replace (bool, optional): Saves the WCS back to the original file, otherwise output base filename with `.new` extension. Default True. overwrite (bool, optional): Clobber file, default True. Required if `replace=True`. timeout (int, optional): The timeout for solving, default 30 seconds. **kwargs ({dict}): Options to pass to `solve_field` should start with `--`. Returns: dict: Keyword information from the solved field. """ skip_solved = kwargs.get('skip_solved', True) out_dict = {} output = None errs = None header = getheader(fname) wcs = WCS(header) # Check for solved file if skip_solved and wcs.is_celestial: logger.info( f"Skipping solved file (use skip_solved=False to solve again): {fname}" ) out_dict.update(header) out_dict['solved_fits_file'] = fname return out_dict # Set a default radius of 15 if overwrite: kwargs['--overwrite'] = True # Use unpacked version of file. was_compressed = False if fname.endswith('.fz'): logger.debug(f'Uncompressing {fname}') fname = funpack(fname) logger.debug(f'Using {fname} for solving') was_compressed = True logger.debug(f'Solving with: {kwargs!r}') proc = solve_field(fname, **kwargs) try: output, errs = proc.communicate(timeout=timeout) except subprocess.TimeoutExpired: proc.kill() output, errs = proc.communicate() raise error.Timeout(f'Timeout while solving: {output!r} {errs!r}') else: if proc.returncode != 0: logger.debug(f'Returncode: {proc.returncode}') for log in [output, errs]: if log and log > '': logger.debug(f'Output on {fname}: {log}') if proc.returncode == 3: raise error.SolveError(f'solve-field not found: {output}') new_fname = fname.replace('.fits', '.new') if replace: logger.debug(f'Overwriting original {fname}') os.replace(new_fname, fname) else: fname = new_fname try: header = getheader(fname) header.remove('COMMENT', ignore_missing=True, remove_all=True) header.remove('HISTORY', ignore_missing=True, remove_all=True) out_dict.update(header) except OSError: logger.warning(f"Can't read fits header for: {fname}") # Check it was solved. if WCS(header).is_celestial is False: raise error.SolveError( 'File not properly solved, no WCS header present.') # Remove WCS file. os.remove(fname.replace('.fits', '.wcs')) if was_compressed and replace: logger.debug(f'Compressing plate-solved {fname}') fname = fpack(fname) out_dict['solved_fits_file'] = fname return out_dict
def solve_field(fname, timeout=15, solve_opts=None, *args, **kwargs): """ Plate solves an image. Note: This is a low-level wrapper around the underlying `solve-field` program. See `get_solve_field` for more typical usage and examples. Args: fname(str, required): Filename to solve in .fits extension. timeout(int, optional): Timeout for the solve-field command, defaults to 60 seconds. solve_opts(list, optional): List of options for solve-field. """ solve_field_script = shutil.which('solve-field') if solve_field_script is None: # pragma: no cover raise error.InvalidSystemCommand( f"Can't find solve-field, is astrometry.net installed?") # Add the options for solving the field if solve_opts is not None: options = solve_opts else: # Default options options = [ '--guess-scale', '--cpulimit', str(timeout), '--no-verify', '--crpix-center', '--temp-axy', '--index-xyls', 'none', '--solved', 'none', '--match', 'none', '--rdls', 'none', '--corr', 'none', '--downsample', '4', '--no-plots', ] if 'ra' in kwargs: options.append('--ra') options.append(str(kwargs.get('ra'))) if 'dec' in kwargs: options.append('--dec') options.append(str(kwargs.get('dec'))) if 'radius' in kwargs: options.append('--radius') options.append(str(kwargs.get('radius'))) # Gather all the kwargs that start with `--` and are not already present. logger.debug(f'Adding kwargs: {kwargs!r}') def _modify_opt(opt, val): if isinstance(val, bool): opt_string = str(opt) else: opt_string = f'{opt}={val}' return opt_string options.extend([ _modify_opt(opt, val) for opt, val in kwargs.items() if opt.startswith('--') and opt not in options and not isinstance(val, bool) ]) cmd = [solve_field_script] + options + [fname] logger.debug(f'Solving with: {cmd}') try: proc = subprocess.Popen(cmd, universal_newlines=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) except Exception as e: raise error.PanError(f"Problem plate-solving in solve_field: {e!r}") return proc
def mask_saturated(data, saturation_level=None, threshold=0.9, bit_depth=None, dtype=None): """Convert data to a masked array with saturated values masked. Args: data (array_like): The numpy data array. saturation_level (scalar, optional): The saturation level. If not given then the saturation level will be set to threshold times the maximum pixel value. threshold (float, optional): The fraction of the maximum pixel value to use as the saturation level, default 0.9. bit_depth (astropy.units.Quantity or int, optional): The effective bit depth of the data. If given the maximum pixel value will be assumed to be 2**bit_depth, otherwise an attempt will be made to infer the maximum pixel value from the data type of the data. If data is not an integer type the maximum pixel value cannot be inferred and an IllegalValue exception will be raised. dtype (numpy.dtype, optional): The requested dtype for the masked array. If not given the dtype of the masked array will be same as data. Returns: numpy.ma.array: The masked numpy array. Raises: error.IllegalValue: Raised if bit_depth is an astropy.units.Quantity object but the units are not compatible with either bits or bits/pixel. error.IllegalValue: Raised if neither saturation level or bit_depth are given, and data has a non integer data type. """ if not saturation_level: if bit_depth is not None: try: with suppress(AttributeError): bit_depth = bit_depth.to_value(unit=u.bit) except u.UnitConversionError: try: bit_depth = bit_depth.to_value(unit=u.bit / u.pixel) except u.UnitConversionError: raise error.IllegalValue( "bit_depth must have units of bits or bits/pixel, " + f"got {bit_depth!r}") bit_depth = int(bit_depth) logger.trace(f"Using bit depth {bit_depth!r}") saturation_level = threshold * (2**bit_depth - 1) else: # No bit depth specified, try to guess. logger.trace(f"Inferring bit_depth from data type, {data.dtype!r}") try: # Try to use np.iinfo to compute machine limits. Will work for integer types. saturation_level = threshold * np.iinfo(data.dtype).max except ValueError: # ValueError from np.iinfo means not an integer type. raise error.IllegalValue( "Neither saturation_level or bit_depth given, and data " + "is not an integer type. Cannot determine correct " + "saturation level.") logger.debug(f"Masking image using saturation level {saturation_level!r}") # Convert data to masked array of requested dtype, mask values above saturation level. return np.ma.array(data, mask=(data > saturation_level), dtype=dtype)
def cr2_to_fits(cr2_fname, fits_fname=None, overwrite=False, headers={}, fits_headers={}, remove_cr2=False, **kwargs): # pragma: no cover """Convert a CR2 file to FITS This is a convenience function that first converts the CR2 to PGM via ~cr2_to_pgm. Also adds keyword headers to the FITS file. Note: The intermediate PGM file is automatically removed Arguments: cr2_fname (str): Name of the CR2 file to be converted. fits_fname (str, optional): Name of the FITS file to output. Default is `None`, in which case the `cr2_fname` is used as the base. overwrite (bool, optional): Overwrite existing FITS, default False. headers (dict, optional): Header data added to the FITS file. fits_headers (dict, optional): Header data added to the FITS file without filtering. remove_cr2 (bool, optional): If CR2 should be removed after processing, default False. **kwargs: Description Returns: str: The full path to the generated FITS file. """ if fits_fname is None: fits_fname = cr2_fname.replace('.cr2', '.fits') if not os.path.exists(fits_fname) or overwrite: logger.debug("Converting CR2 to PGM: {}".format(cr2_fname)) # Convert the CR2 to a PGM file then delete PGM pgm = read_pgm(cr2_to_pgm(cr2_fname), remove_after=True) # Add the EXIF information from the CR2 file exif = read_exif(cr2_fname) # Set the PGM as the primary data for the FITS file hdu = fits.PrimaryHDU(pgm) obs_date = date_parse( exif.get('DateTimeOriginal', '').replace(':', '-', 2)).isoformat() # Set some default headers hdu.header.set('FILTER', 'RGGB') hdu.header.set('ISO', exif.get('ISO', '')) hdu.header.set('EXPTIME', exif.get('ExposureTime', 'Seconds')) hdu.header.set('CAMTEMP', exif.get('CameraTemperature', ''), 'Celsius - From CR2') hdu.header.set('CIRCCONF', exif.get('CircleOfConfusion', ''), 'From CR2') hdu.header.set('COLORTMP', exif.get('ColorTempMeasured', ''), 'From CR2') hdu.header.set('FILENAME', exif.get('FileName', ''), 'From CR2') hdu.header.set('INTSN', exif.get('InternalSerialNumber', ''), 'From CR2') hdu.header.set('CAMSN', exif.get('SerialNumber', ''), 'From CR2') hdu.header.set('MEASEV', exif.get('MeasuredEV', ''), 'From CR2') hdu.header.set('MEASEV2', exif.get('MeasuredEV2', ''), 'From CR2') hdu.header.set('MEASRGGB', exif.get('MeasuredRGGB', ''), 'From CR2') hdu.header.set('WHTLVLN', exif.get('NormalWhiteLevel', ''), 'From CR2') hdu.header.set('WHTLVLS', exif.get('SpecularWhiteLevel', ''), 'From CR2') hdu.header.set('REDBAL', exif.get('RedBalance', ''), 'From CR2') hdu.header.set('BLUEBAL', exif.get('BlueBalance', ''), 'From CR2') hdu.header.set('WBRGGB', exif.get('WB RGGBLevelAsShot', ''), 'From CR2') hdu.header.set('DATE-OBS', obs_date) for key, value in fits_headers.items(): try: hdu.header.set(key.upper()[0:8], value) except Exception: pass try: logger.debug("Saving fits file to: {}".format(fits_fname)) hdu.writeto(fits_fname, output_verify='silentfix', overwrite=overwrite) except Exception as e: warn("Problem writing FITS file: {}".format(e)) else: if remove_cr2: os.unlink(cr2_fname) fits_utils.update_observation_headers(fits_fname, headers) return fits_fname
def make_timelapse(directory, fn_out=None, glob_pattern='20[1-9][0-9]*T[0-9]*.jpg', overwrite=False, timeout=60, **kwargs): """Create a timelapse. A timelapse is created from all the images in given ``directory`` Args: directory (str): Directory containing image files. fn_out (str, optional): Full path to output file name, if not provided, defaults to `directory` basename. glob_pattern (str, optional): A glob file pattern of images to include, default '20[1-9][0-9]*T[0-9]*.jpg', which corresponds to the observation images but excludes any pointing images. The pattern should be relative to the local directory. overwrite (bool, optional): Overwrite timelapse if exists, default False. timeout (int): Timeout for making movie, default 60 seconds. **kwargs (dict): Returns: str: Name of output file Raises: error.InvalidSystemCommand: Raised if ffmpeg command is not found. FileExistsError: Raised if fn_out already exists and overwrite=False. """ if fn_out is None: head, tail = os.path.split(directory) if tail == '': head, tail = os.path.split(head) field_name = head.split('/')[-2] cam_name = head.split('/')[-1] fname = f'{field_name}_{cam_name}_{tail}.mp4' fn_out = os.path.normpath(os.path.join(directory, fname)) if os.path.exists(fn_out) and not overwrite: raise FileExistsError("Timelapse exists. Set overwrite=True if needed") ffmpeg = shutil.which('ffmpeg') if ffmpeg is None: raise error.InvalidSystemCommand( "ffmpeg not found, can't make timelapse") inputs_glob = os.path.join(directory, glob_pattern) try: ffmpeg_cmd = [ ffmpeg, '-r', '3', '-pattern_type', 'glob', '-i', inputs_glob, '-s', 'hd1080', '-vcodec', 'libx264', ] if overwrite: ffmpeg_cmd.append('-y') ffmpeg_cmd.append(fn_out) logger.debug(ffmpeg_cmd) proc = subprocess.Popen(ffmpeg_cmd, universal_newlines=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) try: # Don't wait forever outs, errs = proc.communicate(timeout=timeout) except subprocess.TimeoutExpired: proc.kill() outs, errs = proc.communicate() finally: logger.debug(f"Output: {outs}") logger.debug(f"Errors: {errs}") # Double-check for file existence if not os.path.exists(fn_out): fn_out = None except Exception as e: raise error.PanError(f"Problem creating timelapse in {fn_out}: {e!r}") return fn_out