Пример #1
0
    def monitorDir(self):
        """ Monitor the given directory and show new FF files on the screen. """

        # Create a list of FF files in the given directory
        ff_list = []


        showing_empty = False

        # Repeat until the process is killed from the outside
        while not self.exit.is_set():

            # Monitor the given folder for new FF files
            new_ffs = [file_name for file_name in sorted(os.listdir(self.dir_path)) \
                if validFFName(file_name) and (file_name not in ff_list)]


            # If there are no FF files in the directory, show an empty image
            if (not len(ff_list)) and (not len(new_ffs)) and (not showing_empty):
                text = "No FF files found in the given directory as of yet: {:s}".format(self.dir_path)
                img = np.zeros((720, 1280))
                showing_empty = None


            # If there are new FF files, update the image
            if len(new_ffs):

                new_ff = new_ffs[-1]
                text = new_ff

                # Load the new FF
                ff = readFF(self.dir_path, new_ff, verbose=False)

                if ff is not None:
                    img = ff.maxpixel

                else:
                    time.sleep(self.update_interval)
                    continue

                showing_empty = False

                # Add new FF files to the list
                ff_list += new_ffs

            # If there are no FF files, wait
            else:
                if showing_empty is not None:
                    time.sleep(self.update_interval)
                    continue


            if showing_empty is not True:
                self.updateImage(img, text, self.update_interval, banner_text=self.banner_text)


            # Set the proper flag if not showing any FF files
            if showing_empty is None:
                showing_empty = True
Пример #2
0
    def startSlideshow(self):
        """ Start a slideshow. 
        """

        # Make a list of FF files in the given directory
        ff_list = [
            file_name for file_name in sorted(os.listdir(self.dir_path))
            if validFFName(file_name)
        ]

        # Exit if no FF files were found
        if not ff_list:
            print("No FF files in the given directory to use for a slideshow!")
            self.exit.set()
            return None

        # Go through the list of FF files and show them on the screen
        first_run = True
        while not self.exit.is_set():
            for ff_name in ff_list:

                # Stop the loop if the slideshow should stop
                if self.exit.is_set():
                    break

                # Load the FF file
                ff = readFF(self.dir_path, ff_name, verbose=False)
                text = ff_name

                # If the FF files was loaded, show the maxpixel
                if ff is not None:
                    img = ff.maxpixel

                else:

                    # If an FF files could not be loaded on the first run, show an empty image
                    if first_run:
                        img = np.zeros((720, 1280))
                        text = "The FF file {:s} could not be loaded.".format(
                            ff_name)

                    # Otherwise, just wait one more pause interval
                    else:
                        time.sleep(self.slideshow_pause)
                        continue

                # Update the image on the screen
                self.updateImage(img,
                                 text,
                                 self.slideshow_pause,
                                 banner_text=self.banner_text)

                first_run = False
Пример #3
0
def batchFFtoImage(dir_path, fmt):
    # Go through all files in the given folder
    for file_name in os.listdir(dir_path):

        # Check if the file is an FF file
        if validFFName(file_name):

            # Read the FF file
            ff = readFF(dir_path, file_name)

            # Skip the file if it could not be read
            if ff is None:
                continue

            # Make a filename for the image
            img_file_name = file_name.replace('fits', '') + fmt

            print('Saving: ', img_file_name)

            # Save the maxpixel to disk
            saveImage(os.path.join(dir_path, img_file_name), ff.maxpixel)
Пример #4
0
def makeFlat(dir_path, config, nostars=False, use_images=False):
    """ Makes a flat field from the files in the given folder. CALSTARS file is needed to estimate the
        quality of every image by counting the number of detected stars.

    Arguments:
        dir_path: [str] Path to the directory which contains the FF files and a CALSTARS file.
        config: [config object]

    Keyword arguments:
        nostars: [bool] If True, all files will be taken regardless of if they have stars on them or not.
        use_images: [bool] Use image files instead of FF files. False by default.

    Return:
        [2d ndarray] Flat field image as a numpy array. If the flat generation failed, None will be returned.
        
    """

    # If only images are used, then don't look for a CALSTARS file
    if use_images:
        nostars = True

    # Load the calstars file if it should be used
    if not nostars:

        # Find the CALSTARS file in the given folder
        calstars_file = None
        for calstars_file in os.listdir(dir_path):
            if ('CALSTARS' in calstars_file) and ('.txt' in calstars_file):
                break

        if calstars_file is None:
            print('CALSTARS file could not be found in the given directory!')
            return None

        # Load the calstars file
        calstars_list = CALSTARS.readCALSTARS(dir_path, calstars_file)

        # Convert the list to a dictionary
        calstars = {ff_file: star_data for ff_file, star_data in calstars_list}

        print('CALSTARS file: ' + calstars_file + ' loaded!')

        # A list of FF files which have any stars on them
        calstars_ff_files = [line[0] for line in calstars_list]

    else:
        calstars = {}



    # Use image files
    if use_images:

        # Find the file type with the highest file frequency in the given folder
        file_extensions = []
        for file_name in os.listdir(dir_path):
            file_ext = file_name.split('.')[-1]
            if file_ext.lower() in ['jpg', 'png', 'bmp']:
                file_extensions.append(file_ext)
            
        # Get only the most frequent file type
        file_freqs = np.unique(file_extensions, return_counts=True)
        most_freq_type = file_freqs[0][0]

        print('Using image type:', most_freq_type)

        # Take only files of that file type
        ff_list = [file_name for file_name in sorted(os.listdir(dir_path)) \
            if file_name.lower().endswith(most_freq_type)]


    # Use FF files
    else:
        ff_list = []

        # Get a list of FF files in the folder
        for file_name in os.listdir(dir_path):
            if validFFName(file_name) and ((file_name in calstars_ff_files) or nostars):
                ff_list.append(file_name)
                

        # Check that there are any FF files in the folder
        if not ff_list:
            print('No valid FF files in the selected folder!')
            return None



    ff_list_good = []
    ff_times = []

    # Take only those FF files with enough stars on them
    for ff_name in ff_list:

        if (ff_name in calstars) or nostars:

            # Disable requiring minimum number of stars if specified
            if not nostars:
                
                # Get the number of stars detected on the FF image
                ff_nstars = len(calstars[ff_name])

            else:
                ff_nstars = 0

            
            # Check if the number of stars on the image is over the detection threshold
            if (ff_nstars > config.ff_min_stars) or nostars:

                # Add the FF file to the list of FF files to be used to make a flat
                ff_list_good.append(ff_name)


                # If images are used, don't compute the time
                if use_images:
                    ff_time = 0

                else:
                    # Calculate the time of the FF files
                    ff_time = date2JD(*getMiddleTimeFF(ff_name, config.fps, ret_milliseconds=True))


                ff_times.append(ff_time)


    # Check that there are enough good FF files in the folder
    if (len(ff_times) < config.flat_min_imgs) and (not nostars):
        print('Not enough FF files have enough stars on them!')
        return None
        
    
    # Make sure the files cover at least 2 hours
    if (not (max(ff_times) - min(ff_times))*24 > 2) and (not nostars):
        print('Good FF files cover less than 2 hours!')
        return None


    # Sample FF files if there are more than 200
    max_ff_flat = 200
    if len(ff_list_good) > max_ff_flat:
        ff_list_good = sorted(random.sample(ff_list_good, max_ff_flat))


    print('Using {:d} files for flat...'.format(len(ff_list_good)))


    c = 0
    img_list = []
    median_list = []

    # Median combine all good FF files
    for i in range(len(ff_list_good)):

        # Load 10 files at the time and median combine them, which conserves memory
        if c < 10:

            # Use images
            if use_images:
                img = scipy.ndimage.imread(os.path.join(dir_path, ff_list_good[i]), -1)


            # Use FF files
            else:
                ff = readFF(dir_path, ff_list_good[i])

                # Skip the file if it is corruped
                if ff is None:
                    continue

                img = ff.avepixel

            
            img_list.append(img)

            c += 1


        else:

            img_list = np.array(img_list)

            # Median combine the loaded 10 (or less) images
            ff_median = np.median(img_list, axis=0)
            median_list.append(ff_median)

            img_list = []
            c = 0


    # If there are more than 1 calculated median image, combine them
    if len(median_list) > 1:

        # Median combine all median images
        median_list = np.array(median_list)
        ff_median = np.median(median_list, axis=0)

    else:
        if len(median_list) > 0:
            ff_median = median_list[0]
        else:
            ff_median = np.median(np.array(img_list), axis=0)


    # Stretch flat to 0-255
    ff_median = ff_median/np.max(ff_median)*255

    # Convert the flat to 8 bits
    ff_median = ff_median.astype(np.uint8)

    return ff_median
Пример #5
0
   def __init__(self, dir_path, img_type):

      # Take FF files if the the image type was not given
      if img_type is None:
         
         # Get all FF files in the given folder
         self.filenames = sorted([os.path.abspath(os.path.join(dir_path, filename)) for filename \
            in os.listdir(dir_path) if validFFName(filename)])

      else:

         # Get all images of the given extension
         self.filenames = sorted([os.path.abspath(os.path.join(dir_path, filename)) for filename \
            in os.listdir(dir_path) if filename.lower().endswith(img_type.lower())])


      # If no files were given, take 
      if (self.filenames is None) or (len(self.filenames) == 0):
         print('No files in the directory that match the pattern!')
         sys.exit()


      self.files = self.filenames


      # Load an image
      for filename in self.filenames:

         if validFFName(os.path.basename(filename)):
            self.im8u = readFF(*os.path.split(filename)).maxpixel

         else:
            self.im8u = cv2.imread(filename, cv2.IMREAD_COLOR)

         break


      if VERBOSE: 
         print(self.im8u.shape)


      self.HEIGHT = self.im8u.shape[0]
      self.WIDTH  = self.im8u.shape[1]

      if VERBOSE: 
         print("Width =", self.WIDTH, "Height = ", self.HEIGHT)

      if self.WIDTH > 2600:   
         self.scale = 0.25
      elif self.WIDTH > 1280:
         self.scale = 0.5
      else:                    
         self.scale = 1.0



      if len(self.im8u.shape) == 3:
         self.im8u_grey      = cv2.cvtColor(self.im8u, cv2.COLOR_BGR2GRAY)
         self.last_im8u      = cv2.cvtColor(self.im8u, cv2.COLOR_BGR2GRAY)
      else:
         self.im8u_grey      = np.copy(self.im8u)
         self.last_im8u      = np.copy(self.im8u)

      self.diff           = np.copy(self.im8u)
      self.prev_image     = np.copy(self.im8u)
      self.short_max_im8u = np.copy(self.im8u)
      self.long_max_im8u  = np.copy(self.im8u)
      self.short_coadd    = np.copy(self.im8u)
      self.short_coadd_scaled = np.copy(self.im8u)
      self.trigger_list   = []

      self.flip = False
      self.contrast = False
      self.clahe = cv2.createCLAHE(clipLimit=2.0, tileGridSize=(8,8))

      # Set the font for overlay
      self.font = cv2.FONT_HERSHEY_SIMPLEX

      self.index = 0
      self.pause = False
      self.short_max_im8u.fill(0)

      cv2.imshow('CheckNight', cv2.resize(self.short_max_im8u, (0,0), fx=self.scale, fy=self.scale))
      cv2.moveWindow("CheckNight", 0, 0)
Пример #6
0
   def __init__(self, dir_path, img_type):

      # Take FF files if the the image type was not given
      if img_type is None:
         
         # Get all FF files in the given folder
         self.filenames = [os.path.abspath(os.path.join(dir_path, filename)) for filename \
            in os.listdir(dir_path) if validFFName(filename)]

      else:

         # Get all images of the given extension
         self.filenames = [os.path.abspath(os.path.join(dir_path, filename)) for filename \
            in os.listdir(dir_path) if filename.lower().endswith(img_type.lower())]


      # If no files were given, take 
      if (self.filenames is None) or (len(self.filenames) == 0):
         print('No files in the directory that match the pattern!')
         sys.exit()


      self.files = self.filenames


      # Load an image
      for filename in self.filenames:

         if validFFName(os.path.basename(filename)):
            self.im8u = readFF(*os.path.split(filename)).maxpixel

         else:
            self.im8u = cv2.imread(filename, cv2.IMREAD_COLOR)

         break


      if VERBOSE: 
         print(self.im8u.shape)


      self.HEIGHT = self.im8u.shape[0]
      self.WIDTH  = self.im8u.shape[1]

      if VERBOSE: 
         print("Width =", self.WIDTH, "Height = ", self.HEIGHT)

      if self.WIDTH > 2600:   
         self.scale = 0.25
      elif self.WIDTH > 1280:
         self.scale = 0.5
      else:                    
         self.scale = 1.0



      if len(self.im8u.shape) == 3:
         self.im8u_grey      = cv2.cvtColor(self.im8u, cv2.COLOR_BGR2GRAY)
         self.last_im8u      = cv2.cvtColor(self.im8u, cv2.COLOR_BGR2GRAY)
      else:
         self.im8u_grey      = np.copy(self.im8u)
         self.last_im8u      = np.copy(self.im8u)

      self.diff           = np.copy(self.im8u)
      self.prev_image     = np.copy(self.im8u)
      self.short_max_im8u = np.copy(self.im8u)
      self.long_max_im8u  = np.copy(self.im8u)
      self.short_coadd    = np.copy(self.im8u)
      self.short_coadd_scaled = np.copy(self.im8u)
      self.trigger_list   = []

      self.flip = False
      self.contrast = False
      self.clahe = cv2.createCLAHE(clipLimit=2.0, tileGridSize=(8,8))

      # Set the font for overlay
      self.font = cv2.FONT_HERSHEY_SIMPLEX

      self.index = 0
      self.pause = False
      self.short_max_im8u.fill(0)

      cv2.imshow('CheckNight', cv2.resize(self.short_max_im8u, (0,0), fx=self.scale, fy=self.scale))
      cv2.moveWindow("CheckNight", 0, 0)
Пример #7
0
def trackStack(dir_path,
               config,
               border=5,
               background_compensation=True,
               hide_plot=False):
    """ Generate a stack with aligned stars, so the sky appears static. The folder should have a
        platepars_all_recalibrated.json file.

    Arguments:
        dir_path: [str] Path to the directory with image files.
        config: [Config instance]

    Keyword arguments:
        border: [int] Border around the image to exclude (px).
        background_compensation: [bool] Normalize the background by applying a median filter to avepixel and
            use it as a flat field. Slows down the procedure and may sometimes introduce artifacts. True
            by default.
    """

    # Load recalibrated platepars, if they exist ###

    # Find recalibrated platepars file per FF file
    platepars_recalibrated_file = None
    for file_name in os.listdir(dir_path):
        if file_name == config.platepars_recalibrated_name:
            platepars_recalibrated_file = file_name
            break

    # Load all recalibrated platepars if the file is available
    recalibrated_platepars = None
    if platepars_recalibrated_file is not None:
        with open(os.path.join(dir_path, platepars_recalibrated_file)) as f:
            recalibrated_platepars = json.load(f)
            print(
                'Loaded recalibrated platepars JSON file for the calibration report...'
            )

    # ###

    # If the recalib platepars is not found, stop
    if recalibrated_platepars is None:
        print("The {:s} file was not found!".format(
            config.platepars_recalibrated_name))
        return False

    # Get a list of FF files in the folder
    ff_list = []
    for file_name in os.listdir(dir_path):
        if validFFName(file_name):
            ff_list.append(file_name)

    # Take the platepar with the middle time as the reference one
    ff_found_list = []
    jd_list = []
    for ff_name_temp in recalibrated_platepars:

        if ff_name_temp in ff_list:

            # Compute the Julian date of the FF middle
            dt = getMiddleTimeFF(ff_name_temp,
                                 config.fps,
                                 ret_milliseconds=True)
            jd = date2JD(*dt)

            jd_list.append(jd)
            ff_found_list.append(ff_name_temp)

    if len(jd_list) < 2:
        print("Not more than 1 FF image!")
        return False

    # Take the FF file with the middle JD
    jd_list = np.array(jd_list)
    jd_middle = np.mean(jd_list)
    jd_mean_index = np.argmin(np.abs(jd_list - jd_middle))
    ff_mid = ff_found_list[jd_mean_index]

    # Load the middle platepar as the reference one
    pp_ref = Platepar()
    pp_ref.loadFromDict(recalibrated_platepars[ff_mid],
                        use_flat=config.use_flat)

    # Try loading the mask
    mask_path = None
    if os.path.exists(os.path.join(dir_path, config.mask_file)):
        mask_path = os.path.join(dir_path, config.mask_file)

    # Try loading the default mask
    elif os.path.exists(config.mask_file):
        mask_path = os.path.abspath(config.mask_file)

    # Load the mask if given
    mask = None
    if mask_path is not None:
        mask = loadMask(mask_path)
        print("Loaded mask:", mask_path)

    # If the shape of the mask doesn't fit, init an empty mask
    if mask is not None:
        if (mask.img.shape[0] != pp_ref.Y_res) or (mask.img.shape[1] !=
                                                   pp_ref.X_res):
            print("Mask is of wrong shape!")
            mask = None

    if mask is None:
        mask = MaskStructure(255 + np.zeros(
            (pp_ref.Y_res, pp_ref.X_res), dtype=np.uint8))

    # Compute the middle RA/Dec of the reference platepar
    _, ra_temp, dec_temp, _ = xyToRaDecPP([jd2Date(jd_middle)],
                                          [pp_ref.X_res / 2],
                                          [pp_ref.Y_res / 2], [1],
                                          pp_ref,
                                          extinction_correction=False)

    ra_mid, dec_mid = ra_temp[0], dec_temp[0]

    # Go through all FF files and find RA/Dec of image corners to find the size of the stack image ###

    # List of corners
    x_corns = [0, pp_ref.X_res, 0, pp_ref.X_res]
    y_corns = [0, 0, pp_ref.Y_res, pp_ref.Y_res]

    ra_list = []
    dec_list = []

    for ff_temp in ff_found_list:

        # Load the recalibrated platepar
        pp_temp = Platepar()
        pp_temp.loadFromDict(recalibrated_platepars[ff_temp],
                             use_flat=config.use_flat)

        for x_c, y_c in zip(x_corns, y_corns):
            _, ra_temp, dec_temp, _ = xyToRaDecPP(
                [getMiddleTimeFF(ff_temp, config.fps, ret_milliseconds=True)],
                [x_c], [y_c], [1],
                pp_ref,
                extinction_correction=False)
            ra_c, dec_c = ra_temp[0], dec_temp[0]

            ra_list.append(ra_c)
            dec_list.append(dec_c)

    # Compute the angular separation from the middle equatorial coordinates of the reference image to all
    #   RA/Dec corner coordinates
    ang_sep_list = []
    for ra_c, dec_c in zip(ra_list, dec_list):
        ang_sep = np.degrees(
            angularSeparation(np.radians(ra_mid), np.radians(dec_mid),
                              np.radians(ra_c), np.radians(dec_c)))

        ang_sep_list.append(ang_sep)

    # Find the maximum angular separation and compute the image size using the plate scale
    #   The image size will be resampled to 1/2 of the original size to avoid interpolation
    scale = 0.5
    ang_sep_max = np.max(ang_sep_list)
    img_size = int(scale * 2 * ang_sep_max * pp_ref.F_scale)

    #

    # Create the stack platepar with no distortion and a large image size
    pp_stack = copy.deepcopy(pp_ref)
    pp_stack.resetDistortionParameters()
    pp_stack.X_res = img_size
    pp_stack.Y_res = img_size
    pp_stack.F_scale *= scale
    pp_stack.refraction = False

    # Init the image
    avg_stack_sum = np.zeros((img_size, img_size), dtype=float)
    avg_stack_count = np.zeros((img_size, img_size), dtype=int)
    max_deaveraged = np.zeros((img_size, img_size), dtype=np.uint8)

    # Load individual FFs and map them to the stack
    for i, ff_name in enumerate(ff_found_list):

        print("Stacking {:s}, {:.1f}% done".format(
            ff_name, 100 * i / len(ff_found_list)))

        # Read the FF file
        ff = readFF(dir_path, ff_name)

        # Load the recalibrated platepar
        pp_temp = Platepar()
        pp_temp.loadFromDict(recalibrated_platepars[ff_name],
                             use_flat=config.use_flat)

        # Make a list of X and Y image coordinates
        x_coords, y_coords = np.meshgrid(
            np.arange(border, pp_ref.X_res - border),
            np.arange(border, pp_ref.Y_res - border))
        x_coords = x_coords.ravel()
        y_coords = y_coords.ravel()

        # Map image pixels to sky
        jd_arr, ra_coords, dec_coords, _ = xyToRaDecPP(
            len(x_coords) *
            [getMiddleTimeFF(ff_name, config.fps, ret_milliseconds=True)],
            x_coords,
            y_coords,
            len(x_coords) * [1],
            pp_temp,
            extinction_correction=False)

        # Map sky coordinates to stack image coordinates
        stack_x, stack_y = raDecToXYPP(ra_coords, dec_coords, jd_middle,
                                       pp_stack)

        # Round pixel coordinates
        stack_x = np.round(stack_x, decimals=0).astype(int)
        stack_y = np.round(stack_y, decimals=0).astype(int)

        # Cut the image to limits
        filter_arr = (stack_x > 0) & (stack_x < img_size) & (stack_y > 0) & (
            stack_y < img_size)
        x_coords = x_coords[filter_arr].astype(int)
        y_coords = y_coords[filter_arr].astype(int)
        stack_x = stack_x[filter_arr]
        stack_y = stack_y[filter_arr]

        # Apply the mask to maxpixel and avepixel
        maxpixel = copy.deepcopy(ff.maxpixel)
        maxpixel[mask.img == 0] = 0
        avepixel = copy.deepcopy(ff.avepixel)
        avepixel[mask.img == 0] = 0

        # Compute deaveraged maxpixel
        max_deavg = maxpixel - avepixel

        # Normalize the backgroud brightness by applying a large-kernel median filter to avepixel
        if background_compensation:

            # # Apply a median filter to the avepixel to get an estimate of the background brightness
            # avepixel_median = scipy.ndimage.median_filter(ff.avepixel, size=101)
            avepixel_median = cv2.medianBlur(ff.avepixel, 301)

            # Make sure to avoid zero division
            avepixel_median[avepixel_median < 1] = 1

            # Normalize the avepixel by subtracting out the background brightness
            avepixel = avepixel.astype(float)
            avepixel /= avepixel_median
            avepixel *= 50  # Normalize to a good background value, which is usually 50
            avepixel = np.clip(avepixel, 0, 255)
            avepixel = avepixel.astype(np.uint8)

            # plt.imshow(avepixel, cmap='gray', vmin=0, vmax=255)
            # plt.show()

        # Add the average pixel to the sum
        avg_stack_sum[stack_y, stack_x] += avepixel[y_coords, x_coords]

        # Increment the counter image where the avepixel is not zero
        ones_img = np.ones_like(avepixel)
        ones_img[avepixel == 0] = 0
        avg_stack_count[stack_y, stack_x] += ones_img[y_coords, x_coords]

        # Set pixel values to the stack, only take the max values
        max_deaveraged[stack_y, stack_x] = np.max(np.dstack(
            [max_deaveraged[stack_y, stack_x], max_deavg[y_coords, x_coords]]),
                                                  axis=2)

    # Compute the blended avepixel background
    stack_img = avg_stack_sum
    stack_img[avg_stack_count > 0] /= avg_stack_count[avg_stack_count > 0]
    stack_img += max_deaveraged
    stack_img = np.clip(stack_img, 0, 255)
    stack_img = stack_img.astype(np.uint8)

    # Crop image
    non_empty_columns = np.where(stack_img.max(axis=0) > 0)[0]
    non_empty_rows = np.where(stack_img.max(axis=1) > 0)[0]
    crop_box = (np.min(non_empty_rows), np.max(non_empty_rows),
                np.min(non_empty_columns), np.max(non_empty_columns))
    stack_img = stack_img[crop_box[0]:crop_box[1] + 1,
                          crop_box[2]:crop_box[3] + 1]

    # Plot and save the stack ###

    dpi = 200
    plt.figure(figsize=(stack_img.shape[1] / dpi, stack_img.shape[0] / dpi),
               dpi=dpi)

    plt.imshow(stack_img,
               cmap='gray',
               vmin=0,
               vmax=256,
               interpolation='nearest')

    plt.axis('off')
    plt.gca().get_xaxis().set_visible(False)
    plt.gca().get_yaxis().set_visible(False)

    plt.xlim([0, stack_img.shape[1]])
    plt.ylim([stack_img.shape[0], 0])

    # Remove the margins (top and right are set to 0.9999, as setting them to 1.0 makes the image blank in
    #   some matplotlib versions)
    plt.subplots_adjust(left=0,
                        bottom=0,
                        right=0.9999,
                        top=0.9999,
                        wspace=0,
                        hspace=0)

    filenam = os.path.join(dir_path,
                           os.path.basename(dir_path) + "_track_stack.jpg")
    plt.savefig(filenam, bbox_inches='tight', pad_inches=0, dpi=dpi)

    #

    if hide_plot is False:
        plt.show()
Пример #8
0
    flat = None
    if cml_args.flat:
        flat = loadFlat(*os.path.split(cml_args.flat))

    corrected_meteor_list = []

    # Find matching FF files in the directory
    for entry in meteor_list:

        ftp_ff_name, meteor_No, rho, phi, meteor_meas = entry

        # Find the matching FTPdetectinfo file in the directory
        for ff_name in sorted(os.listdir(dir_path)):

            # Reject all non-FF files
            if not validFFName(ff_name):
                continue

            # Reject all FF files which do not match the name in the FTPdetecinfo
            if ff_name != ftp_ff_name:
                continue

            print('Correcting for saturation:', ff_name)

            # Load the FF file
            ff = readFF(dir_path, ff_name)

            # Apply the flat to avepixel
            if flat:
                avepixel = applyFlat(ff.avepixel, flat)
Пример #9
0
        font = ImageFont.load_default()

    # Create temporary directory
    dir_tmp_path = os.path.join(dir_path, "temp_img_dir")

    if os.path.exists(dir_tmp_path):
        shutil.rmtree(dir_tmp_path)
        print("Deleted directory : " + dir_tmp_path)
		
    mkdirP(dir_tmp_path)
    print("Created directory : " + dir_tmp_path)
    
    print("Preparing files for the timelapse...")
    c = 0

    ff_list = [ff_name for ff_name in sorted(os.listdir(dir_path)) if validFFName(ff_name)]

    for file_name in ff_list:

        # Read the FF file
        ff = readFF(dir_path, file_name)

        # Skip the file if it could not be read
        if ff is None:
            continue

        # Get the timestamp from the FF name
        timestamp = filenameToDatetime(file_name).strftime("%Y-%m-%d %H:%M:%S")
		
        # Get id cam from the file name
            # e.g.  FF499_20170626_020520_353_0005120.bin
Пример #10
0
    arg_parser.add_argument('file_format', nargs=1, metavar='FILE_FORMAT', type=str, \
        help='File format of the image, e.g. jpg or png.')

    # Parse the command line arguments
    cml_args = arg_parser.parse_args()

    #########################


    dir_path = cml_args.dir_path[0]

    # Go through all files in the given folder
    for file_name in os.listdir(dir_path):

        # Check if the file is an FF file
        if validFFName(file_name):

            # Read the FF file
            ff = readFF(dir_path, file_name)

            # Skip the file if it could not be read
            if ff is None:
                continue

            # Make a filename for the image
            img_file_name = file_name.replace('fits', '') + cml_args.file_format[0]

            print('Saving: ', img_file_name)

            # Save the maxpixel to disk
            scipy.misc.imsave(os.path.join(dir_path, img_file_name), ff.maxpixel)
Пример #11
0
    initLogging('detection_')

    log = logging.getLogger("logger")

    ######


    if not len(sys.argv) == 2:
        print("Usage: python -m RMS.ExtractStars /path/to/bin/files/")
        sys.exit()
    

    # Get paths to every FF bin file in a directory 
    ff_dir = sys.argv[1].replace('"', '')
    ff_dir = os.path.abspath(ff_dir)
    ff_list = [ff_name for ff_name in sorted(os.listdir(ff_dir)) if validFFName(ff_name)]


    # Check if there are any file in the directory
    if(len(ff_list) == None):
        print("No files found!")
        sys.exit()


    # Try loading a flat field image
    flat_struct = None

    if config.use_flat:
        
        # Check if there is flat in the data directory
        if os.path.exists(os.path.join(ff_dir, config.flat_file)):
Пример #12
0
def makeFlat(dir_path, config):
    """ Makes a flat field from the files in the given folder. CALSTARS file is needed to estimate the
        quality of every image by counting the number of detected stars.

    Arguments:
        dir_path: [str] Path to the directory which contains the FF files and a CALSTARS file.
        config: [config object]

    Return:
        [2d ndarray] Flat field image as a numpy array. If the flat generation failed, None will be returned.
        
    """

    # Find the CALSTARS file in the given folder
    calstars_file = None
    for calstars_file in os.listdir(dir_path):
        if ('CALSTARS' in calstars_file) and ('.txt' in calstars_file):
            break

    if calstars_file is None:
        print('CALSTARS file could not be found in the given directory!')
        return None

    # Load the calstars file
    calstars_list = CALSTARS.readCALSTARS(dir_path, calstars_file)

    # Convert the list to a dictionary
    calstars = {ff_file: star_data for ff_file, star_data in calstars_list}

    print('CALSTARS file: ' + calstars_file + ' loaded!')

    # A list of FF files which have any stars on them
    calstars_ff_files = [line[0] for line in calstars_list]

    ff_list = []

    # Get a list of FF files in the folder
    for file_name in os.listdir(dir_path):
        if validFFName(file_name) and (file_name in calstars_ff_files):
            ff_list.append(file_name)

    # Check that there are any FF files in the folder
    if not ff_list:
        print('No FF files in the selected folder!')
        return None

    ff_list_good = []
    ff_times = []

    # Take only those FF files with enough stars on them
    for ff_name in ff_list:

        if not validFFName(ff_name):
            continue

        if ff_name in calstars:

            # Get the number of stars detected on the FF image
            ff_nstars = len(calstars[ff_name])

            # Check if the number of stars on the image is over the detection threshold
            if ff_nstars > config.ff_min_stars:

                # Add the FF file to the list of FF files to be used to make a flat
                ff_list_good.append(ff_name)

                # Calculate the time of the FF files
                ff_time = date2JD(*getMiddleTimeFF(
                    ff_name, config.fps, ret_milliseconds=True))
                ff_times.append(ff_time)

    # Check that there are enough good FF files in the folder
    if len(ff_times) < config.flat_min_imgs:
        print('Not enough FF files have enough stars on them!')
        return None

    # Make sure the files cover at least 2 hours
    if not (max(ff_times) - min(ff_times)) * 24 > 2:
        print('Good FF files cover less than 2 hours!')
        return None

    # Sample FF files if there are more than 200
    max_ff_flat = 200
    if len(ff_list_good) > max_ff_flat:
        ff_list_good = sorted(random.sample(ff_list_good, max_ff_flat))

    print('Using {:d} files for flat...'.format(len(ff_list_good)))

    c = 0
    ff_avg_list = []
    median_list = []

    # Median combine all good FF files
    for i in range(len(ff_list_good)):

        # Load 10 files at the time and median combine them, which conserves memory
        if c < 10:

            ff = readFF(dir_path, ff_list_good[i])
            ff_avg_list.append(ff.avepixel)

            c += 1

        else:

            ff_avg_list = np.array(ff_avg_list)

            # Median combine the loaded 10 (or less) images
            ff_median = np.median(ff_avg_list, axis=0)
            median_list.append(ff_median)

            ff_avg_list = []
            c = 0

    # If there are more than 1 calculated median image, combine them
    if len(median_list) > 1:

        # Median combine all median images
        median_list = np.array(median_list)
        ff_median = np.median(median_list, axis=0)

    else:
        ff_median = median_list[0]

    # Stretch flat to 0-255
    ff_median = ff_median / np.max(ff_median) * 255

    # Convert the flat to 8 bits
    ff_median = ff_median.astype(np.uint8)

    return ff_median
Пример #13
0
def generateTimelapse(dir_path, nodel):

    t1 = datetime.datetime.utcnow()

    # Load the font for labeling
    try:
        font = ImageFont.truetype("/usr/share/fonts/dejavu/DejaVuSans.ttf", 18)
    except:
        font = ImageFont.load_default()

    # Create temporary directory
    dir_tmp_path = os.path.join(dir_path, "temp_img_dir")

    if os.path.exists(dir_tmp_path):
        shutil.rmtree(dir_tmp_path)
        print("Deleted directory : " + dir_tmp_path)

    mkdirP(dir_tmp_path)
    print("Created directory : " + dir_tmp_path)

    print("Preparing files for the timelapse...")
    c = 0

    ff_list = [
        ff_name for ff_name in sorted(os.listdir(dir_path))
        if validFFName(ff_name)
    ]

    for file_name in ff_list:

        # Read the FF file
        ff = readFF(dir_path, file_name)

        # Skip the file if it could not be read
        if ff is None:
            continue

        # Get the timestamp from the FF name
        timestamp = filenameToDatetime(file_name).strftime("%Y-%m-%d %H:%M:%S")

        # Get id cam from the file name
        # e.g.  FF499_20170626_020520_353_0005120.bin
        # or FF_CA0001_20170626_020520_353_0005120.fits

        file_split = file_name.split('_')

        # Check the number of list elements, and the new fits format has one more underscore
        i = 0
        if len(file_split[0]) == 2:
            i = 1
        camid = file_split[i]

        # Make a filename for the image, continuous count %04d
        img_file_name = 'temp_{:04d}.jpg'.format(c)

        img = ff.maxpixel

        # Draw text to image
        font = cv2.FONT_HERSHEY_SIMPLEX
        text = camid + " " + timestamp + " UTC"
        cv2.putText(img, text, (10, ff.nrows - 6), font, 0.4, (255, 255, 255),
                    1, cv2.LINE_AA)

        # Save the labelled image to disk
        cv2.imwrite(os.path.join(dir_tmp_path, img_file_name), img,
                    [cv2.IMWRITE_JPEG_QUALITY, 100])

        c = c + 1

        # Print elapsed time
        if c % 30 == 0:
            print("{:>5d}/{:>5d}, Elapsed: {:s}".format(c + 1, len(ff_list), \
                str(datetime.datetime.utcnow() - t1)), end="\r")
            sys.stdout.flush()

    # If running on Linux, use avconv
    if platform.system() == 'Linux':

        # If avconv is not found, try using ffmpeg. In case of using ffmpeg,
        # use parameter -nostdin to avoid it being stuck waiting for user input
        software_name = "avconv"
        nostdin = ""
        print("Checking if avconv is available...")
        if os.system(software_name + " --help > /dev/null"):
            software_name = "ffmpeg"
            nostdin = " -nostdin "

        # Construct the command for avconv
        mp4_path = os.path.join(dir_path, os.path.basename(dir_path) + ".mp4")
        temp_img_path = os.path.basename(
            dir_tmp_path) + os.sep + "temp_%04d.jpg"
        com = "cd " + dir_path + ";" \
            + software_name + nostdin + " -v quiet -r "+ str(fps) +" -y -i " + temp_img_path \
            + " -vcodec libx264 -pix_fmt yuv420p -crf 25 -movflags faststart -g 15 -vf \"hqdn3d=4:3:6:4.5,lutyuv=y=gammaval(0.77)\" " \
            + mp4_path

        print("Creating timelapse using {:s}...".format(software_name))
        print(com)
        subprocess.call([com], shell=True)

    # If running on Windows, use ffmpeg.exe
    elif platform.system() == 'Windows':

        # ffmpeg.exe path
        root = os.path.dirname(__file__)
        ffmpeg_path = os.path.join(root, "ffmpeg.exe")

        # Construct the ecommand for ffmpeg
        mp4_path = os.path.basename(dir_path) + ".mp4"
        temp_img_path = os.path.join(os.path.basename(dir_tmp_path),
                                     "temp_%04d.jpg")
        com = ffmpeg_path + " -v quiet -r " + str(
            fps
        ) + " -i " + temp_img_path + " -c:v libx264 -pix_fmt yuv420p -an -crf 25 -g 15 -vf \"hqdn3d=4:3:6:4.5,lutyuv=y=gammaval(0.77)\" -movflags faststart -y " + mp4_path

        print("Creating timelapse using ffmpeg...")
        print(com)
        subprocess.call(com, shell=True, cwd=dir_path)

    else:
        print(
            "generateTimelapse only works on Linux or Windows the video could not be encoded"
        )

    #Delete temporary directory and files inside
    if os.path.exists(dir_tmp_path) and not nodel:
        shutil.rmtree(dir_tmp_path)
        print("Deleted temporary directory : " + dir_tmp_path)

    print("Total time:", datetime.datetime.utcnow() - t1)
Пример #14
0

if __name__ == '__main__':

    if len(sys.argv) < 2:
        print('Usage: python -m Utils.MergeMaxpixels /dir/with/FF/files')

        sys.exit()

    dir_path = sys.argv[1].replace('"', '')

    first_img = True

    # List all FF files in the current dir
    for ff_name in os.listdir(dir_path):
        if validFFName(ff_name):

            print('Stacking: ', ff_name)

            # Load FF file
            ff = readFF(dir_path, ff_name)

            # Take only the detection (max - avg pixel image)
            img = deinterlaceBlend(ff.maxpixel) - deinterlaceBlend(ff.avepixel)

            if first_img:
                merge_img = np.copy(img)
                first_img = False
                continue

            # Blend images 'if lighter'
Пример #15
0
def stackFFs(dir_path, file_format, deinterlace=False, subavg=False, filter_bright=False, flat_path=None,
    file_list=None, mask=None):
    """ Stack FF files in the given folder. 

    Arguments:
        dir_path: [str] Path to the directory with FF files.
        file_format: [str] Image format for the stack. E.g. jpg, png, bmp

    Keyword arguments:
        deinterlace: [bool] True if the image shoud be deinterlaced prior to stacking. False by default.
        subavg: [bool] Whether the average pixel image should be subtracted form the max pixel image. False
            by default. 
        filter_bright: [bool] Whether images with bright backgrounds (after average subtraction) should be
            skipped. False by defualt.
        flat_path: [str] Path to the flat calibration file. None by default. Will only be used if subavg is
            False.
        file_list: [list] A list of file for stacking. False by default, in which case all FF files in the
            given directory will be used.
        mask: [MaskStructure] Mask to apply to the stack. None by default.

    Return:
        stack_path, merge_img:
            - stack_path: [str] Path of the save stack.
            - merge_img: [ndarray] Numpy array of the stacked image.
    """

    # Load the flat if it was given
    flat = None
    if flat_path != '':

        # Try finding the default flat
        if flat_path is None:
            flat_path = dir_path
            flat_file = 'flat.bmp'

        else:
            flat_path, flat_file = os.path.split(flat_path)

        flat_full_path = os.path.join(flat_path, flat_file)
        if os.path.isfile(flat_full_path):

            # Load the flat
            flat = loadFlat(flat_path, flat_file)

            print('Loaded flat:', flat_full_path)


    first_img = True

    n_stacked = 0
    total_ff_files = 0
    merge_img = None

    # If the list of files was not given, take all files in the given folder
    if file_list is None:
        file_list = sorted(os.listdir(dir_path))


    # List all FF files in the current dir
    for ff_name in file_list:
        if validFFName(ff_name):

            # Load FF file
            ff = readFF(dir_path, ff_name)

            # Skip the file if it is corruped
            if ff is None:
                continue

            total_ff_files += 1

            maxpixel = ff.maxpixel
            avepixel = ff.avepixel

            # Dinterlace the images
            if deinterlace:
                maxpixel = deinterlaceBlend(maxpixel)
                avepixel = deinterlaceBlend(avepixel)

            # If the flat was given, apply it to the image, only if no subtraction is done
            if (flat is not None) and not subavg:
                maxpixel = applyFlat(maxpixel, flat)
                avepixel = applyFlat(avepixel, flat)


            # Reject the image if the median subtracted image is too bright. This usually means that there
            #   are clouds on the image which can ruin the stack
            if filter_bright:

                img = maxpixel - avepixel

                # Compute surface brightness
                median = np.median(img)

                # Compute top detection pixels
                top_brightness = np.percentile(img, 99.9)

                # Reject all images where the median brightness is high
                # Preserve images with very bright detections
                if (median > 10) and (top_brightness < (2**(8*img.itemsize) - 10)):
                    print('Skipping: ', ff_name, 'median:', median, 'top brightness:', top_brightness)
                    continue


            # Subtract the average from maxpixel
            if subavg:
                img = maxpixel - avepixel

            else:
                img = maxpixel

            if first_img:
                merge_img = np.copy(img)
                first_img = False
                continue

            print('Stacking: ', ff_name)

            # Blend images 'if lighter'
            merge_img = blendLighten(merge_img, img)

            n_stacked += 1


    # If the number of stacked image is less than 20% of the given images, stack without filtering
    if filter_bright and (n_stacked < 0.2*total_ff_files):
        return stackFFs(dir_path, file_format, deinterlace=deinterlace, subavg=subavg, 
            filter_bright=False, flat_path=flat_path, file_list=file_list)

    # If no images were stacked, do nothing
    if n_stacked == 0:
        return None, None


    # Extract the name of the night directory which contains the FF files
    night_dir = os.path.basename(dir_path)

    stack_path = os.path.join(dir_path, night_dir + '_stack_{:d}_meteors.'.format(n_stacked) + file_format)

    print("Saving stack to:", stack_path)

    # Stretch the levels
    merge_img = adjustLevels(merge_img, np.percentile(merge_img, 0.5), 1.3, np.percentile(merge_img, 99.9))


    # Apply the mask, if given
    if mask is not None:
        merge_img = MaskImage.applyMask(merge_img, mask)

    
    # Save the blended image
    scipy.misc.imsave(stack_path, merge_img)


    return stack_path, merge_img
    flat = None
    if cml_args.flat:
        flat = loadFlat(*os.path.split(cml_args.flat))

    corrected_meteor_list = []

    # Find matching FF files in the directory
    for entry in meteor_list:

        ftp_ff_name, meteor_No, rho, phi, meteor_meas = entry

        # Find the matching FTPdetectinfo file in the directory
        for ff_name in sorted(os.listdir(dir_path)):

            # Reject all non-FF files
            if not validFFName(ff_name):
                continue

            # Reject all FF files which do not match the name in the FTPdetecinfo
            if ff_name != ftp_ff_name:
                continue


            print('Correcting for saturation:', ff_name)

            # Load the FF file
            ff = readFF(dir_path, ff_name)

            # Apply the flat to avepixel
            if flat:
                avepixel = applyFlat(ff.avepixel, flat)
Пример #17
0
def makeFlat(dir_path, config, nostars=False, use_images=False):
    """ Makes a flat field from the files in the given folder. CALSTARS file is needed to estimate the
        quality of every image by counting the number of detected stars.

    Arguments:
        dir_path: [str] Path to the directory which contains the FF files and a CALSTARS file.
        config: [config object]

    Keyword arguments:
        nostars: [bool] If True, all files will be taken regardless of if they have stars on them or not.
        use_images: [bool] Use image files instead of FF files. False by default.

    Return:
        [2d ndarray] Flat field image as a numpy array. If the flat generation failed, None will be returned.
        
    """

    # If only images are used, then don't look for a CALSTARS file
    if use_images:
        nostars = True

    # Load the calstars file if it should be used
    if not nostars:

        # Find the CALSTARS file in the given folder
        calstars_file = None
        for calstars_file in os.listdir(dir_path):
            if ('CALSTARS' in calstars_file) and ('.txt' in calstars_file):
                break

        if calstars_file is None:
            print('CALSTARS file could not be found in the given directory!')
            return None

        # Load the calstars file
        calstars_list = CALSTARS.readCALSTARS(dir_path, calstars_file)

        # Convert the list to a dictionary
        calstars = {ff_file: star_data for ff_file, star_data in calstars_list}

        print('CALSTARS file: ' + calstars_file + ' loaded!')

        # A list of FF files which have any stars on them
        calstars_ff_files = [line[0] for line in calstars_list]

    else:
        calstars = {}
        calstars_ff_files = []

    # Use image files
    if use_images:

        # Find the file type with the highest file frequency in the given folder
        file_extensions = []
        for file_name in os.listdir(dir_path):
            file_ext = file_name.split('.')[-1]
            if file_ext.lower() in ['jpg', 'png', 'bmp']:
                file_extensions.append(file_ext)

        # Get only the most frequent file type
        file_freqs = np.unique(file_extensions, return_counts=True)
        most_freq_type = file_freqs[0][0]

        print('Using image type:', most_freq_type)

        # Take only files of that file type
        ff_list = [file_name for file_name in sorted(os.listdir(dir_path)) \
            if file_name.lower().endswith(most_freq_type)]

    # Use FF files
    else:
        ff_list = []

        # Get a list of FF files in the folder
        for file_name in os.listdir(dir_path):
            if validFFName(file_name) and ((file_name in calstars_ff_files)
                                           or nostars):
                ff_list.append(file_name)

        # Check that there are any FF files in the folder
        if not ff_list:
            print('No valid FF files in the selected folder!')
            return None

    ff_list_good = []
    ff_times = []

    # Take only those FF files with enough stars on them
    for ff_name in ff_list:

        if (ff_name in calstars) or nostars:

            # Disable requiring minimum number of stars if specified
            if not nostars:

                # Get the number of stars detected on the FF image
                ff_nstars = len(calstars[ff_name])

            else:
                ff_nstars = 0

            # Check if the number of stars on the image is over the detection threshold
            if (ff_nstars > config.ff_min_stars) or nostars:

                # Add the FF file to the list of FF files to be used to make a flat
                ff_list_good.append(ff_name)

                # If images are used, don't compute the time
                if use_images:
                    ff_time = 0

                else:
                    # Calculate the time of the FF files
                    ff_time = date2JD(*getMiddleTimeFF(
                        ff_name, config.fps, ret_milliseconds=True))

                ff_times.append(ff_time)

    # Check that there are enough good FF files in the folder
    if (len(ff_times) < config.flat_min_imgs) and (not nostars):
        print('Not enough FF files have enough stars on them!')
        return None

    # Make sure the files cover at least 2 hours
    if (not (max(ff_times) - min(ff_times)) * 24 > 2) and (not nostars):
        print('Good FF files cover less than 2 hours!')
        return None

    # Sample FF files if there are more than 200
    max_ff_flat = 200
    if len(ff_list_good) > max_ff_flat:
        ff_list_good = sorted(random.sample(ff_list_good, max_ff_flat))

    print('Using {:d} files for flat...'.format(len(ff_list_good)))

    c = 0
    img_list = []
    median_list = []

    # Median combine all good FF files
    for i in range(len(ff_list_good)):

        # Load 10 files at the time and median combine them, which conserves memory
        if c < 10:

            # Use images
            if use_images:
                img = loadImage(os.path.join(dir_path, ff_list_good[i]), -1)

            # Use FF files
            else:
                ff = readFF(dir_path, ff_list_good[i])

                # Skip the file if it is corruped
                if ff is None:
                    continue

                img = ff.avepixel

            img_list.append(img)

            c += 1

        else:

            img_list = np.array(img_list)

            # Median combine the loaded 10 (or less) images
            ff_median = np.median(img_list, axis=0)
            median_list.append(ff_median)

            img_list = []
            c = 0

    # If there are more than 1 calculated median image, combine them
    if len(median_list) > 1:

        # Median combine all median images
        median_list = np.array(median_list)
        ff_median = np.median(median_list, axis=0)

    else:
        if len(median_list) > 0:
            ff_median = median_list[0]
        else:
            ff_median = np.median(np.array(img_list), axis=0)

    # Stretch flat to 0-255
    ff_median = ff_median / np.max(ff_median) * 255

    # Convert the flat to 8 bits
    ff_median = ff_median.astype(np.uint8)

    return ff_median
Пример #18
0
def detectStarsAndMeteorsDirectory(dir_path, config):
    """ Extract stars and detect meteors on all FF files in the given folder. 

    Arguments:
        dir_path: [str] Path to the directory with FF files.
        config: [Config obj]

    Return:
        calstars_name: [str] Name of the CALSTARS file.
        ftpdetectinfo_name: [str] Name of the FTPdetectinfo file.
        ff_detected: [list] A list of FF files with detections.
    """

    # Get paths to every FF bin file in a directory 
    ff_dir = dir_path
    ff_dir = os.path.abspath(ff_dir)
    ff_list = [ff_name for ff_name in sorted(os.listdir(ff_dir)) if validFFName(ff_name)]


    # Check if there are any file in the directory
    if not len(ff_list):

        print("No files for processing found!")
        return None, None, None, None


    print('Starting detection...')

    # Initialize the detector
    detector = QueuedPool(detectStarsAndMeteors, cores=-1, log=log, backup_dir=ff_dir)

    # Start the detection
    detector.startPool()

    # Give detector jobs
    for ff_name in ff_list:

        while True:
            
            # Add a job as long as there are available workers to receive it
            if detector.available_workers.value() > 0:
                print('Adding for detection:', ff_name)
                detector.addJob([ff_dir, ff_name, config], wait_time=0)
                break
            else:
                time.sleep(0.1)



    log.info('Waiting for the detection to finish...')

    # Wait for the detector to finish and close it
    detector.closePool()

    log.info('Detection finished!')

    log.info('Collecting results...')

    # Get the detection results from the queue
    detection_results = detector.getResults()


    # Save detection to disk
    calstars_name, ftpdetectinfo_name, ff_detected = saveDetections(detection_results, ff_dir, config)


    return calstars_name, ftpdetectinfo_name, ff_detected, detector
Пример #19
0
def selectFiles(config, dir_path, ff_detected):
    """ Make a list of all files which should be zipped in the given night directory. 
    
        In the list are included:
            - all TXT files
            - all FR bin files and their parent FF bin files
            - all FF bin files with detections

    Arguments:
        config: [conf object] Configuration.
        dir_path: [str] Path to the night directory.
        ff_detected: [list] A list of FF bin file with detections on them.

    Return:
        selected_files: [list] A list of files selected for compression.

    """

    ### Decide what to upload, given the upload mode ###

    upload_ffs = True
    upload_frs = True

    if config.upload_mode == 2:
        upload_ffs = False

    elif config.upload_mode == 3:
        upload_ffs = False
        upload_frs = False

    elif config.upload_mode == 4:
        upload_frs = False

    ### ###

    selected_list = []

    # Go through all files in the night directory
    for file_name in os.listdir(dir_path):

        # Take all .txt and .csv files
        if (file_name.lower().endswith('.txt')) or (
                file_name.lower().endswith('.csv')):
            selected_list.append(file_name)

        # Take all PNG, JPG, BMP images
        if ('.png' in file_name) or ('.jpg' in file_name) or ('.bmp'
                                                              in file_name):
            selected_list.append(file_name)

        # Take all field sum files
        if ('FS' in file_name) and ('fieldsum' in file_name):
            selected_list.append(file_name)

        # Take all FR bin files, and their parent FF bin files
        if upload_frs and ('FR' in file_name) and ('.bin' in file_name):

            fr_split = file_name.split('_')

            # FR file identifier which it shares with the FF bin file
            fr_id = '_'.join(fr_split[1:3])

            ff_match = None

            # Locate the parent FF bin file
            for ff_file_name in os.listdir(dir_path):

                if validFFName(ff_file_name) and (fr_id in ff_file_name):

                    ff_match = ff_file_name
                    break

            # Add the FR bin file and it's parent FF file to the list
            selected_list.append(file_name)

            if ff_match is not None:
                selected_list.append(ff_match)

        # Add FF file which contain detections to the list
        if upload_ffs and (ff_detected is not None) and (file_name
                                                         in ff_detected):
            selected_list.append(file_name)

    # Take only the unique elements in the list, sorted by name
    selected_list = sorted(list(set(selected_list)))

    return selected_list
Пример #20
0
def processNight(night_data_dir, config, detection_results=None, nodetect=False):
    """ Given the directory with FF files, run detection and archiving. 
    
    Arguments:
        night_data_dir: [str] Path to the directory with FF files.
        config: [Config obj]

    Keyword arguments:
        detection_results: [list] An optional list of detection. If None (default), detection will be done
            on the the files in the folder.
        nodetect: [bool] True if detection should be skipped. False by default.

    Return:
        night_archive_dir: [str] Path to the night directory in ArchivedFiles.
        archive_name: [str] Path to the archive.
        detector: [QueuedPool instance] Handle to the detector.
    """

    # Remove final slash in the night dir
    if night_data_dir.endswith(os.sep):
        night_data_dir = night_data_dir[:-1]

    # Extract the name of the night
    night_data_dir_name = os.path.basename(os.path.abspath(night_data_dir))

    platepar = None
    kml_files = []
    recalibrated_platepars = None
    
    # If the detection should be run
    if (not nodetect):

        # If no detection was performed, run it
        if detection_results is None:

            # Run detection on the given directory
            calstars_name, ftpdetectinfo_name, ff_detected, \
                detector = detectStarsAndMeteorsDirectory(night_data_dir, config)

        # Otherwise, save detection results
        else:

            # Save CALSTARS and FTPdetectinfo to disk
            calstars_name, ftpdetectinfo_name, ff_detected = saveDetections(detection_results, \
                night_data_dir, config)

            # If the files were previously detected, there is no detector
            detector = None


        # Get the platepar file
        platepar, platepar_path, platepar_fmt = getPlatepar(config, night_data_dir)


        # Run calibration check and auto astrometry refinement
        if (platepar is not None) and (calstars_name is not None):

            # Read in the CALSTARS file
            calstars_list = CALSTARS.readCALSTARS(night_data_dir, calstars_name)

            # Run astrometry check and refinement
            platepar, fit_status = autoCheckFit(config, platepar, calstars_list)

            # If the fit was sucessful, apply the astrometry to detected meteors
            if fit_status:

                log.info('Astrometric calibration SUCCESSFUL!')

                # Save the refined platepar to the night directory and as default
                platepar.write(os.path.join(night_data_dir, config.platepar_name), fmt=platepar_fmt)
                platepar.write(platepar_path, fmt=platepar_fmt)

            else:
                log.info('Astrometric calibration FAILED!, Using old platepar for calibration...')


            # # Calculate astrometry for meteor detections
            # applyAstrometryFTPdetectinfo(night_data_dir, ftpdetectinfo_name, platepar_path)

            # If a flat is used, disable vignetting correction
            if config.use_flat:
                platepar.vignetting_coeff = 0.0

            log.info("Recalibrating astrometry on FF files with detections...")

            # Recalibrate astrometry on every FF file and apply the calibration to detections
            recalibrated_platepars = recalibrateIndividualFFsAndApplyAstrometry(night_data_dir, \
                os.path.join(night_data_dir, ftpdetectinfo_name), calstars_list, config, platepar)

            

            log.info("Converting RMS format to UFOOrbit format...")

            # Convert the FTPdetectinfo into UFOOrbit input file
            FTPdetectinfo2UFOOrbitInput(night_data_dir, ftpdetectinfo_name, platepar_path)



            # Generate a calibration report
            log.info("Generating a calibration report...")
            try:
                generateCalibrationReport(config, night_data_dir, platepar=platepar)

            except Exception as e:
                log.debug('Generating calibration report failed with the message:\n' + repr(e))
                log.debug(repr(traceback.format_exception(*sys.exc_info())))


            # Perform single station shower association
            log.info("Performing single station shower association...")
            try:
                showerAssociation(config, [os.path.join(night_data_dir, ftpdetectinfo_name)], \
                    save_plot=True, plot_activity=True)

            except Exception as e:
                log.debug('Shower association failed with the message:\n' + repr(e))
                log.debug(repr(traceback.format_exception(*sys.exc_info())))



            # Generate the FOV KML file
            log.info("Generating a FOV KML file...")
            try:

                mask_path = None
                mask = None

                # Try loading the mask
                if os.path.exists(os.path.join(night_data_dir, config.mask_file)):
                    mask_path = os.path.join(night_data_dir, config.mask_file)

                # Try loading the default mask
                elif os.path.exists(config.mask_file):
                    mask_path = os.path.abspath(config.mask_file)

                # Load the mask if given
                if mask_path:
                    mask = loadMask(mask_path)

                if mask is not None:
                    log.info("Loaded mask: {:s}".format(mask_path))

                # Generate the KML (only the FOV is shown, without the station) - 100 km
                kml_file100 = fovKML(config, night_data_dir, platepar, mask=mask, plot_station=False, \
                    area_ht=100000)
                kml_files.append(kml_file100)


                # Generate the KML (only the FOV is shown, without the station) - 70 km
                kml_file70 = fovKML(config, night_data_dir, platepar, mask=mask, plot_station=False, \
                    area_ht=70000)
                kml_files.append(kml_file70)

                # Generate the KML (only the FOV is shown, without the station) - 25 km
                kml_file25 = fovKML(config, night_data_dir, platepar, mask=mask, plot_station=False, \
                    area_ht=25000)
                kml_files.append(kml_file25)



            except Exception as e:
                log.debug("Generating a FOV KML file failed with the message:\n" + repr(e))
                log.debug(repr(traceback.format_exception(*sys.exc_info())))


    else:
        ff_detected = []
        detector = None



    log.info('Plotting field sums...')

    # Plot field sums
    try:
        plotFieldsums(night_data_dir, config)

    except Exception as e:
        log.debug('Plotting field sums failed with message:\n' + repr(e))
        log.debug(repr(traceback.format_exception(*sys.exc_info())))



    # Archive all fieldsums to one archive
    archiveFieldsums(night_data_dir)


    # List for any extra files which will be copied to the night archive directory. Full paths have to be 
    #   given
    extra_files = []


    log.info('Making a flat...')

    # Make a new flat field image
    try:
        flat_img = makeFlat(night_data_dir, config)

    except Exception as e:
        log.debug('Making a flat failed with message:\n' + repr(e))
        log.debug(repr(traceback.format_exception(*sys.exc_info())))
        flat_img = None
        

    # If making flat was sucessfull, save it
    if flat_img is not None:

        # Save the flat in the night directory, to keep the operational flat updated
        flat_path = os.path.join(night_data_dir, os.path.basename(config.flat_file))
        saveImage(flat_path, flat_img)
        log.info('Flat saved to: ' + flat_path)

        # Copy the flat to the night's directory as well
        extra_files.append(flat_path)

    else:
        log.info('Making flat image FAILED!')


    ### Add extra files to archive

    # Add the config file to the archive too
    extra_files.append(config.config_file_name)

    # Add the mask
    if (not nodetect):
        if os.path.exists(config.mask_file):
            mask_path = os.path.abspath(config.mask_file)
            extra_files.append(mask_path)


    # Add the platepar to the archive if it exists
    if (not nodetect):
        if os.path.exists(platepar_path):
            extra_files.append(platepar_path)


    # Add the json file with recalibrated platepars to the archive
    if (not nodetect):
        recalibrated_platepars_path = os.path.join(night_data_dir, config.platepars_recalibrated_name)
        if os.path.exists(recalibrated_platepars_path):
            extra_files.append(recalibrated_platepars_path)

    # Add the FOV KML files
    if len(kml_files):
        extra_files += kml_files



    # If FFs are not uploaded, choose two to upload
    if config.upload_mode > 1:
    
        # If all FF files are not uploaded, add two FF files which were successfuly recalibrated
        recalibrated_ffs = []
        for ff_name in recalibrated_platepars:

            pp = recalibrated_platepars[ff_name]

            # Check if the FF was recalibrated
            if pp.auto_recalibrated:
                recalibrated_ffs.append(os.path.join(night_data_dir, ff_name))

        # Choose two files randomly
        if len(recalibrated_ffs) > 2:
            extra_files += random.sample(recalibrated_ffs, 2)

        elif len(recalibrated_ffs) > 0:
            extra_files += recalibrated_ffs


        # If no were recalibrated
        else:

            # Create a list of all FF files
            ff_list = [os.path.join(night_data_dir, ff_name) for ff_name in os.listdir(night_data_dir) \
                if validFFName(ff_name)]

            # Add any two FF files
            extra_files += random.sample(ff_list, 2)
        

    ### ###



    # If the detection should be run
    if (not nodetect):

        # Make a CAL file and a special CAMS FTPdetectinfo if full CAMS compatibility is desired
        if (config.cams_code > 0) and (platepar is not None):

            log.info('Generating a CAMS FTPdetectinfo file...')

            # Write the CAL file to disk
            cal_file_name = writeCAL(night_data_dir, config, platepar)

            # Check if the CAL file was successfully generated
            if cal_file_name is not None:

                cams_code_formatted = "{:06d}".format(int(config.cams_code))

                # Load the FTPdetectinfo
                _, fps, meteor_list = readFTPdetectinfo(night_data_dir, ftpdetectinfo_name, \
                    ret_input_format=True)

                # Replace the camera code with the CAMS code
                for met in meteor_list:

                    # Replace the station name and the FF file format
                    ff_name = met[0]
                    ff_name = ff_name.replace('.fits', '.bin')
                    ff_name = ff_name.replace(config.stationID, cams_code_formatted)
                    met[0] = ff_name


                # Write the CAMS compatible FTPdetectinfo file
                writeFTPdetectinfo(meteor_list, night_data_dir, \
                    ftpdetectinfo_name.replace(config.stationID, cams_code_formatted),\
                    night_data_dir, cams_code_formatted, fps, calibration=cal_file_name, \
                    celestial_coords_given=(platepar is not None))



    night_archive_dir = os.path.join(os.path.abspath(config.data_dir), config.archived_dir, 
        night_data_dir_name)


    log.info('Archiving detections to ' + night_archive_dir)
    
    # Archive the detections
    archive_name = archiveDetections(night_data_dir, night_archive_dir, ff_detected, config, \
        extra_files=extra_files)


    return night_archive_dir, archive_name, detector
Пример #21
0
def generateCalibrationReport(config, night_dir_path, match_radius=2.0, platepar=None, show_graphs=False):
    """ Given the folder of the night, find the Calstars file, check the star fit and generate a report
        with the quality of the calibration. The report contains information about both the astrometry and
        the photometry calibration. Graphs will be saved in the given directory of the night.
    
    Arguments:
        config: [Config instance]
        night_dir_path: [str] Full path to the directory of the night.

    Keyword arguments:
        match_radius: [float] Match radius for star matching between image and catalog stars (px).
        platepar: [Platepar instance] Use this platepar instead of finding one in the folder.
        show_graphs: [bool] Show the graphs on the screen. False by default.

    Return:
        None
    """

    # Find the CALSTARS file in the given folder
    calstars_file = None
    for calstars_file in os.listdir(night_dir_path):
        if ('CALSTARS' in calstars_file) and ('.txt' in calstars_file):
            break

    if calstars_file is None:
        print('CALSTARS file could not be found in the given directory!')
        return None


    # Load the calstars file
    star_list = readCALSTARS(night_dir_path, calstars_file)



    ### Load recalibrated platepars, if they exist ###

    # Find recalibrated platepars file per FF file
    platepars_recalibrated_file = None
    for file_name in os.listdir(night_dir_path):
        if file_name == config.platepars_recalibrated_name:
            platepars_recalibrated_file = file_name
            break


    # Load all recalibrated platepars if the file is available
    recalibrated_platepars = None
    if platepars_recalibrated_file:
        with open(os.path.join(night_dir_path, platepars_recalibrated_file)) as f:
            recalibrated_platepars = json.load(f)
            print('Loaded recalibrated platepars JSON file for the calibration report...')

    ### ###


    ### Load the platepar file ###

    # Find the platepar file in the given directory if it was not given
    if platepar is None:

        # Find the platepar file
        platepar_file = None
        for file_name in os.listdir(night_dir_path):
            if file_name == config.platepar_name:
                platepar_file = file_name
                break

        if platepar_file is None:
            print('The platepar cannot be found in the night directory!')
            return None

        # Load the platepar file
        platepar = Platepar()
        platepar.read(os.path.join(night_dir_path, platepar_file))


    ### ###


    night_name = os.path.split(night_dir_path.strip(os.sep))[1]


    # Go one mag deeper than in the config
    lim_mag = config.catalog_mag_limit + 1

    # Load catalog stars (load one magnitude deeper)
    catalog_stars, mag_band_str, config.star_catalog_band_ratios = StarCatalog.readStarCatalog(\
        config.star_catalog_path, config.star_catalog_file, lim_mag=lim_mag, \
        mag_band_ratios=config.star_catalog_band_ratios)

    
    ### Take only those CALSTARS entires for which FF files exist in the folder ###

    # Get a list of FF files in the folder\
    ff_list = []
    for file_name in os.listdir(night_dir_path):
        if validFFName(file_name):
            ff_list.append(file_name)


    # Filter out calstars entries, generate a star dictionary where the keys are JDs of FFs
    star_dict = {}
    ff_dict = {}
    for entry in star_list:

        ff_name, star_data = entry

        # Check if the FF from CALSTARS exists in the folder
        if ff_name not in ff_list:
            continue


        dt = getMiddleTimeFF(ff_name, config.fps, ret_milliseconds=True)
        jd = date2JD(*dt)

        # Add the time and the stars to the dict
        star_dict[jd] = star_data
        ff_dict[jd] = ff_name

    ### ###

    # If there are no FF files in the directory, don't generate a report
    if len(star_dict) == 0:
        print('No FF files from the CALSTARS file in the directory!')
        return None


    # If the recalibrated platepars file exists, take the one with the most stars
    max_jd = 0
    if recalibrated_platepars is not None:
        max_stars = 0
        for ff_name_temp in recalibrated_platepars:

            # Compute the Julian date of the FF middle
            dt = getMiddleTimeFF(ff_name_temp, config.fps, ret_milliseconds=True)
            jd = date2JD(*dt)

            # Check that this file exists in CALSTARS and the list of FF files
            if (jd not in star_dict) or (jd not in ff_dict):
                continue

            # Check if the number of stars on this FF file is larger than the before
            if len(star_dict[jd]) > max_stars:
                max_jd = jd
                max_stars = len(star_dict[jd])


        # Set a flag to indicate if using recalibrated platepars has failed
        if max_jd == 0:
            using_recalib_platepars = False
        else:

            print('Using recalibrated platepars, file:', ff_dict[max_jd])
            using_recalib_platepars = True

            # Select the platepar where the FF file has the most stars
            platepar_dict = recalibrated_platepars[ff_dict[max_jd]]
            platepar = Platepar()
            platepar.loadFromDict(platepar_dict)

            filtered_star_dict = {max_jd: star_dict[max_jd]}

            # Match stars on the image with the stars in the catalog
            n_matched, avg_dist, cost, matched_stars = matchStarsResiduals(config, platepar, catalog_stars, \
                filtered_star_dict, match_radius, ret_nmatch=True, lim_mag=lim_mag)

            max_matched_stars = n_matched


    # Otherwise take the optimal FF file for evaluation
    if (recalibrated_platepars is None) or (not using_recalib_platepars):

        # If there are more than a set number of FF files to evaluate, choose only the ones with most stars on
        #   the image
        if len(star_dict) > config.calstars_files_N:

            # Find JDs of FF files with most stars on them
            top_nstars_indices = np.argsort([len(x) for x in star_dict.values()])[::-1][:config.calstars_files_N \
                - 1]

            filtered_star_dict = {}
            for i in top_nstars_indices:
                filtered_star_dict[list(star_dict.keys())[i]] = list(star_dict.values())[i]

            star_dict = filtered_star_dict


        # Match stars on the image with the stars in the catalog
        n_matched, avg_dist, cost, matched_stars = matchStarsResiduals(config, platepar, catalog_stars, \
            star_dict, match_radius, ret_nmatch=True, lim_mag=lim_mag)



    # If no recalibrated platepars where found, find the image with the largest number of matched stars
    if (not using_recalib_platepars) or (max_jd == 0):

        max_jd = 0
        max_matched_stars = 0
        for jd in matched_stars:
            _, _, distances = matched_stars[jd]
            if len(distances) > max_matched_stars:
                max_jd = jd
                max_matched_stars = len(distances)

        
        # If there are no matched stars, use the image with the largest number of detected stars
        if max_matched_stars <= 2:
            max_jd = max(star_dict, key=lambda x: len(star_dict[x]))
            distances = [np.inf]



    # Take the FF file with the largest number of matched stars
    ff_name = ff_dict[max_jd]

    # Load the FF file
    ff = readFF(night_dir_path, ff_name)
    img_h, img_w = ff.avepixel.shape

    dpi = 200
    plt.figure(figsize=(ff.avepixel.shape[1]/dpi, ff.avepixel.shape[0]/dpi), dpi=dpi)

    # Take the average pixel
    img = ff.avepixel

    # Slightly adjust the levels
    img = Image.adjustLevels(img, np.percentile(img, 1.0), 1.2, np.percentile(img, 99.99))

    plt.imshow(img, cmap='gray', interpolation='nearest')

    legend_handles = []


    # Plot detected stars
    for img_star in star_dict[max_jd]:

        y, x, _, _ = img_star

        rect_side = 5*match_radius
        square_patch = plt.Rectangle((x - rect_side/2, y - rect_side/2), rect_side, rect_side, color='g', \
            fill=False, label='Image stars')

        plt.gca().add_artist(square_patch)

    legend_handles.append(square_patch)



    # If there are matched stars, plot them
    if max_matched_stars > 2:

        # Take the solution with the largest number of matched stars
        image_stars, matched_catalog_stars, distances = matched_stars[max_jd]

        # Plot matched stars
        for img_star in image_stars:
            x, y, _, _ = img_star

            circle_patch = plt.Circle((y, x), radius=3*match_radius, color='y', fill=False, \
                label='Matched stars')

            plt.gca().add_artist(circle_patch)

        legend_handles.append(circle_patch)


        ### Plot match residuals ###

        # Compute preducted positions of matched image stars from the catalog
        x_predicted, y_predicted = raDecToXYPP(matched_catalog_stars[:, 0], \
            matched_catalog_stars[:, 1], max_jd, platepar)

        img_y, img_x, _, _ = image_stars.T

        delta_x = x_predicted - img_x
        delta_y = y_predicted - img_y

        # Compute image residual and angle of the error
        res_angle = np.arctan2(delta_y, delta_x)
        res_distance = np.sqrt(delta_x**2 + delta_y**2)


        # Calculate coordinates of the beginning of the residual line
        res_x_beg = img_x + 3*match_radius*np.cos(res_angle)
        res_y_beg = img_y + 3*match_radius*np.sin(res_angle)

        # Calculate coordinates of the end of the residual line
        res_x_end = img_x + 100*np.cos(res_angle)*res_distance
        res_y_end = img_y + 100*np.sin(res_angle)*res_distance

        # Plot the 100x residuals
        for i in range(len(x_predicted)):
            res_plot = plt.plot([res_x_beg[i], res_x_end[i]], [res_y_beg[i], res_y_end[i]], color='orange', \
                lw=0.5, label='100x residuals')

        legend_handles.append(res_plot[0])

        ### ###

    else:

        distances = [np.inf]
        
        # If there are no matched stars, plot large text in the middle of the screen
        plt.text(img_w/2, img_h/2, "NO MATCHED STARS!", color='r', alpha=0.5, fontsize=20, ha='center',
            va='center')


    ### Plot positions of catalog stars to the limiting magnitude of the faintest matched star + 1 mag ###

    # Find the faintest magnitude among matched stars
    if max_matched_stars > 2:
        faintest_mag = np.max(matched_catalog_stars[:, 2]) + 1

    else:
        # If there are no matched stars, use the limiting magnitude from config
        faintest_mag = config.catalog_mag_limit + 1


    # Estimate RA,dec of the centre of the FOV
    _, RA_c, dec_c, _ = xyToRaDecPP([jd2Date(max_jd)], [platepar.X_res/2], [platepar.Y_res/2], [1], 
        platepar)

    RA_c = RA_c[0]
    dec_c = dec_c[0]

    fov_radius = np.hypot(*computeFOVSize(platepar))

    # Get stars from the catalog around the defined center in a given radius
    _, extracted_catalog = subsetCatalog(catalog_stars, RA_c, dec_c, fov_radius, faintest_mag)
    ra_catalog, dec_catalog, mag_catalog = extracted_catalog.T

    # Compute image positions of all catalog stars that should be on the image
    x_catalog, y_catalog = raDecToXYPP(ra_catalog, dec_catalog, max_jd, platepar)

    # Filter all catalog stars outside the image
    temp_arr = np.c_[x_catalog, y_catalog, mag_catalog]
    temp_arr = temp_arr[temp_arr[:, 0] >= 0]
    temp_arr = temp_arr[temp_arr[:, 0] <= ff.avepixel.shape[1]]
    temp_arr = temp_arr[temp_arr[:, 1] >= 0]
    temp_arr = temp_arr[temp_arr[:, 1] <= ff.avepixel.shape[0]]
    x_catalog, y_catalog, mag_catalog = temp_arr.T

    # Plot catalog stars on the image
    cat_stars_handle = plt.scatter(x_catalog, y_catalog, c='none', marker='D', lw=1.0, alpha=0.4, \
        s=((4.0 + (faintest_mag - mag_catalog))/3.0)**(2*2.512), edgecolor='r', label='Catalog stars')

    legend_handles.append(cat_stars_handle)

    ### ###


    # Add info text
    info_text = ff_dict[max_jd] + '\n' \
        + "Matched stars: {:d}/{:d}\n".format(max_matched_stars, len(star_dict[max_jd])) \
        + "Median distance: {:.2f} px\n".format(np.median(distances)) \
        + "Catalog limiting magnitude: {:.1f}".format(lim_mag)

    plt.text(10, 10, info_text, bbox=dict(facecolor='black', alpha=0.5), va='top', ha='left', fontsize=4, \
        color='w')

    legend = plt.legend(handles=legend_handles, prop={'size': 4}, loc='upper right')
    legend.get_frame().set_facecolor('k')
    legend.get_frame().set_edgecolor('k')
    for txt in legend.get_texts():
        txt.set_color('w')


    plt.axis('off')
    plt.gca().get_xaxis().set_visible(False)
    plt.gca().get_yaxis().set_visible(False)

    plt.xlim([0, ff.avepixel.shape[1]])
    plt.ylim([ff.avepixel.shape[0], 0])

    # Remove the margins
    plt.subplots_adjust(left=0, bottom=0, right=1, top=1, wspace=0, hspace=0)

    plt.savefig(os.path.join(night_dir_path, night_name + '_calib_report_astrometry.jpg'), \
        bbox_inches='tight', pad_inches=0, dpi=dpi)


    if show_graphs:
        plt.show()

    else:
        plt.clf()
        plt.close()



    if max_matched_stars > 2:

        ### Plot the photometry ###

        plt.figure(dpi=dpi)

        # Take only those stars which are inside the 3/4 of the shorter image axis from the center
        photom_selection_radius = np.min([img_h, img_w])/3
        filter_indices = ((image_stars[:, 0] - img_h/2)**2 + (image_stars[:, 1] \
            - img_w/2)**2) <= photom_selection_radius**2
        star_intensities = image_stars[filter_indices, 2]
        catalog_mags = matched_catalog_stars[filter_indices, 2]

        # Plot intensities of image stars
        #star_intensities = image_stars[:, 2]
        plt.scatter(-2.5*np.log10(star_intensities), catalog_mags, s=5, c='r')

        # Fit the photometry on automated star intensities
        photom_offset, fit_stddev, _ = photometryFit(np.log10(star_intensities), catalog_mags)


        # Plot photometric offset from the platepar
        x_min, x_max = plt.gca().get_xlim()
        y_min, y_max = plt.gca().get_ylim()

        x_min_w = x_min - 3
        x_max_w = x_max + 3
        y_min_w = y_min - 3
        y_max_w = y_max + 3

        photometry_info = 'Platepar: {:+.2f}LSP {:+.2f} +/- {:.2f} \nGamma = {:.2f}'.format(platepar.mag_0, \
            platepar.mag_lev, platepar.mag_lev_stddev, platepar.gamma)

        # Plot the photometry calibration from the platepar
        logsum_arr = np.linspace(x_min_w, x_max_w, 10)
        plt.plot(logsum_arr, logsum_arr + platepar.mag_lev, label=photometry_info, linestyle='--', \
            color='k', alpha=0.5)

        # Plot the fitted photometry calibration
        fit_info = "Fit: {:+.2f}LSP {:+.2f} +/- {:.2f}".format(-2.5, photom_offset, fit_stddev)
        plt.plot(logsum_arr, logsum_arr + photom_offset, label=fit_info, linestyle='--', color='red', 
            alpha=0.5)

        plt.legend()

        plt.ylabel("Catalog magnitude ({:s})".format(mag_band_str))
        plt.xlabel("Uncalibrated magnitude")

        # Set wider axis limits
        plt.xlim(x_min_w, x_max_w)
        plt.ylim(y_min_w, y_max_w)

        plt.gca().invert_yaxis()
        plt.gca().invert_xaxis()

        plt.grid()

        plt.savefig(os.path.join(night_dir_path, night_name + '_calib_report_photometry.png'), dpi=150)


        if show_graphs:
            plt.show()

        else:
            plt.clf()
            plt.close()
Пример #22
0
        print('FireballDetection')

    else:
        print('MeteorDetection')

    # Load the config file
    config = cr.loadConfigFromDirectory(cml_args.config, cml_args.dir_path)

    if not os.path.exists(cml_args.dir_path):
        print('{:s} directory does not exist!'.format(cml_args.dir_path))

    # Load all FF files in the given directory
    for file_name in os.listdir(cml_args.dir_path):

        # Check if the file is an FF file
        if validFFName(file_name):

            # Read the FF file
            ff = readFF(cml_args.dir_path, file_name)

            # Skip the file if it is corruped
            if ff is None:
                continue

            # Use the fireball thresholding
            if cml_args.fireball:
                k1 = config.k1
                j1 = config.j1

            # Meteor detection
            else:
Пример #23
0
def selectFiles(dir_path, ff_detected):
    """ Make a list of all files which should be zipped in the given night directory. 
    
        In the list are included:
            - all TXT files
            - all FR bin files and their parent FF bin files
            - all FF bin files with detections

    Arguments:
        dir_path: [str] Path to the night directory.
        ff_detected: [list] A list of FF bin file with detections on them.

    Return:
        selected_files: [list] A list of files selected for compression.

    """


    selected_list = []

    # Go through all files in the night directory
    for file_name in os.listdir(dir_path):

        # Take all .txt and .csv files
        if (file_name.lower().endswith('.txt')) or (file_name.lower().endswith('.csv')):
            selected_list.append(file_name)


        # Take all PNG, JPG, BMP images
        if ('.png' in file_name) or ('.jpg' in file_name) or ('.bmp' in file_name):
            selected_list.append(file_name)


        # Take all field sum files
        if ('FS' in file_name) and ('fieldsum' in file_name):
            selected_list.append(file_name)


        # Take all FR bin files, and their parent FF bin files
        if ('FR' in file_name) and ('.bin' in file_name):

            fr_split = file_name.split('_')

            # FR file identifier which it shares with the FF bin file
            fr_id = '_'.join(fr_split[1:3])

            ff_match = None

            # Locate the parent FF bin file
            for ff_file_name in os.listdir(dir_path):

                if validFFName(ff_file_name) and (fr_id in ff_file_name):
                    
                    ff_match = ff_file_name
                    break


            # Add the FR bin file and it's parent FF file to the list
            selected_list.append(file_name)

            if ff_match is not None:
                selected_list.append(ff_match)


        # Add FF file which contain detections to the list
        if file_name in ff_detected:
            selected_list.append(file_name)


    # Take only the unique elements in the list, sorted by name
    selected_list = sorted(list(set(selected_list)))


    return selected_list
def detectStarsAndMeteorsDirectory(dir_path, config):
    """ Extract stars and detect meteors on all FF files in the given folder. 

    Arguments:
        dir_path: [str] Path to the directory with FF files.
        config: [Config obj]

    Return:
        calstars_name: [str] Name of the CALSTARS file.
        ftpdetectinfo_name: [str] Name of the FTPdetectinfo file.
        ff_detected: [list] A list of FF files with detections.
    """

    # Get paths to every FF bin file in a directory 
    ff_dir = dir_path
    ff_dir = os.path.abspath(ff_dir)
    ff_list = [ff_name for ff_name in sorted(os.listdir(ff_dir)) if validFFName(ff_name)]


    # Check if there are any file in the directory
    if not len(ff_list):

        print("No files for processing found!")
        return None, None, None, None


    print('Starting detection...')

    # Initialize the detector
    detector = QueuedPool(detectStarsAndMeteors, cores=-1, log=log, backup_dir=ff_dir)

    # Start the detection
    detector.startPool()

    # Give detector jobs
    for ff_name in ff_list:

        while True:
            
            # Add a job as long as there are available workers to receive it
            if detector.available_workers.value() > 0:
                print('Adding for detection:', ff_name)
                detector.addJob([ff_dir, ff_name, config], wait_time=0)
                break
            else:
                time.sleep(0.1)



    log.info('Waiting for the detection to finish...')

    # Wait for the detector to finish and close it
    detector.closePool()

    log.info('Detection finished!')

    log.info('Collecting results...')

    # Get the detection results from the queue
    detection_results = detector.getResults()


    # Save detection to disk
    calstars_name, ftpdetectinfo_name, ff_detected = saveDetections(detection_results, ff_dir, config)


    return calstars_name, ftpdetectinfo_name, ff_detected, detector
Пример #25
0
def selectFiles(dir_path, ff_detected):
    """ Make a list of all files which should be zipped in the given night directory. 
    
        In the list are included:
            - all TXT files
            - all FR bin files and their parent FF bin files
            - all FF bin files with detections

    Arguments:
        dir_path: [str] Path to the night directory.
        ff_detected: [list] A list of FF bin file with detections on them.

    Return:
        selected_files: [list] A list of files selected for compression.

    """


    selected_list = []

    # Go through all files in the night directory
    for file_name in os.listdir(dir_path):

        # Take all .txt files
        if '.txt' in file_name:
            selected_list.append(file_name)


        # Take all PNG, JPG, BMP images
        if ('.png' in file_name) or ('.jpg' in file_name) or ('.bmp' in file_name):
            selected_list.append(file_name)


        # Take all field sum files
        if ('FS' in file_name) and ('fieldsum' in file_name):
            selected_list.append(file_name)


        # Take all FR bin files, and their parent FF bin files
        if ('FR' in file_name) and ('.bin' in file_name):

            fr_split = file_name.split('_')

            # FR file identifier which it shares with the FF bin file
            fr_id = '_'.join(fr_split[1:3])

            ff_match = None

            # Locate the parent FF bin file
            for ff_file_name in os.listdir(dir_path):

                if validFFName(ff_file_name) and (fr_id in ff_file_name):
                    
                    ff_match = ff_file_name
                    break


            # Add the FR bin file and it's parent FF file to the list
            selected_list.append(file_name)

            if ff_match is not None:
                selected_list.append(ff_match)


        # Add FF file which contain detections to the list
        if file_name in ff_detected:
            selected_list.append(file_name)


    # Take only the unique elements in the list, sorted by name
    selected_list = sorted(list(set(selected_list)))


    return selected_list
Пример #26
0
def stackFFs(dir_path,
             file_format,
             deinterlace=False,
             subavg=False,
             filter_bright=False,
             flat_path=None,
             file_list=None,
             mask=None):
    """ Stack FF files in the given folder. 

    Arguments:
        dir_path: [str] Path to the directory with FF files.
        file_format: [str] Image format for the stack. E.g. jpg, png, bmp

    Keyword arguments:
        deinterlace: [bool] True if the image shoud be deinterlaced prior to stacking. False by default.
        subavg: [bool] Whether the average pixel image should be subtracted form the max pixel image. False
            by default. 
        filter_bright: [bool] Whether images with bright backgrounds (after average subtraction) should be
            skipped. False by defualt.
        flat_path: [str] Path to the flat calibration file. None by default. Will only be used if subavg is
            False.
        file_list: [list] A list of file for stacking. False by default, in which case all FF files in the
            given directory will be used.
        mask: [MaskStructure] Mask to apply to the stack. None by default.

    Return:
        stack_path, merge_img:
            - stack_path: [str] Path of the save stack.
            - merge_img: [ndarray] Numpy array of the stacked image.
    """

    # Load the flat if it was given
    flat = None
    if flat_path != '':

        # Try finding the default flat
        if flat_path is None:
            flat_path = dir_path
            flat_file = 'flat.bmp'

        else:
            flat_path, flat_file = os.path.split(flat_path)

        flat_full_path = os.path.join(flat_path, flat_file)
        if os.path.isfile(flat_full_path):

            # Load the flat
            flat = loadFlat(flat_path, flat_file)

            print('Loaded flat:', flat_full_path)

    first_img = True

    n_stacked = 0
    total_ff_files = 0
    merge_img = None

    # If the list of files was not given, take all files in the given folder
    if file_list is None:
        file_list = sorted(os.listdir(dir_path))

    # List all FF files in the current dir
    for ff_name in file_list:
        if validFFName(ff_name):

            # Load FF file
            ff = readFF(dir_path, ff_name)

            # Skip the file if it is corruped
            if ff is None:
                continue

            total_ff_files += 1

            maxpixel = ff.maxpixel
            avepixel = ff.avepixel

            # Dinterlace the images
            if deinterlace:
                maxpixel = deinterlaceBlend(maxpixel)
                avepixel = deinterlaceBlend(avepixel)

            # If the flat was given, apply it to the image, only if no subtraction is done
            if (flat is not None) and not subavg:
                maxpixel = applyFlat(maxpixel, flat)
                avepixel = applyFlat(avepixel, flat)

            # Reject the image if the median subtracted image is too bright. This usually means that there
            #   are clouds on the image which can ruin the stack
            if filter_bright:

                img = maxpixel - avepixel

                # Compute surface brightness
                median = np.median(img)

                # Compute top detection pixels
                top_brightness = np.percentile(img, 99.9)

                # Reject all images where the median brightness is high
                # Preserve images with very bright detections
                if (median > 10) and (top_brightness <
                                      (2**(8 * img.itemsize) - 10)):
                    print('Skipping: ', ff_name, 'median:', median,
                          'top brightness:', top_brightness)
                    continue

            # Subtract the average from maxpixel
            if subavg:
                img = maxpixel - avepixel

            else:
                img = maxpixel

            if first_img:
                merge_img = np.copy(img)
                first_img = False
                continue

            print('Stacking: ', ff_name)

            # Blend images 'if lighter'
            merge_img = blendLighten(merge_img, img)

            n_stacked += 1

    # If the number of stacked image is less than 20% of the given images, stack without filtering
    if filter_bright and (n_stacked < 0.2 * total_ff_files):
        return stackFFs(dir_path,
                        file_format,
                        deinterlace=deinterlace,
                        subavg=subavg,
                        filter_bright=False,
                        flat_path=flat_path,
                        file_list=file_list)

    # If no images were stacked, do nothing
    if n_stacked == 0:
        return None, None

    # Extract the name of the night directory which contains the FF files
    night_dir = os.path.basename(dir_path)

    stack_path = os.path.join(
        dir_path,
        night_dir + '_stack_{:d}_meteors.'.format(n_stacked) + file_format)

    print("Saving stack to:", stack_path)

    # Stretch the levels
    merge_img = adjustLevels(merge_img, np.percentile(merge_img, 0.5), 1.3,
                             np.percentile(merge_img, 99.9))

    # Apply the mask, if given
    if mask is not None:
        merge_img = MaskImage.applyMask(merge_img, mask)

    # Save the blended image
    saveImage(stack_path, merge_img)

    return stack_path, merge_img
Пример #27
0
                    if archive_dir_name.startswith(config.stationID):
                        if os.path.isdir(os.path.join(config.data_dir, config.archived_dir, \
                            archive_dir_name)):

                            archive_dir_list.append(archive_dir_name)

                # If there are any archived dirs, choose the last one
                if archive_dir_list:

                    latest_night_archive_dir = os.path.join(config.data_dir, config.archived_dir, \
                        archive_dir_list[-1])

                    # Make sure that there are any FF files in the chosen archived dir
                    ffs_latest_night_archive = [ff_name for ff_name \
                        in os.listdir(latest_night_archive_dir) if validFFName(ff_name)]

                    if len(ffs_latest_night_archive):

                        log.info(
                            "Starting a slideshow of {:d} detections from the previous night."
                            .format(len(ffs_latest_night_archive)))

                        # Start the slide show
                        slideshow_view = LiveViewer(latest_night_archive_dir, slideshow=True, \
                            banner_text="Last night's detections")
                        slideshow_view.start()

                    else:
                        log.info(
                            "No detections from the previous night to show as a slideshow!"
Пример #28
0
def generateCalibrationReport(config,
                              night_dir_path,
                              match_radius=2.0,
                              platepar=None,
                              show_graphs=False):
    """ Given the folder of the night, find the Calstars file, check the star fit and generate a report
        with the quality of the calibration. The report contains information about both the astrometry and
        the photometry calibration. Graphs will be saved in the given directory of the night.
    
    Arguments:
        config: [Config instance]
        night_dir_path: [str] Full path to the directory of the night.

    Keyword arguments:
        match_radius: [float] Match radius for star matching between image and catalog stars (px).
        platepar: [Platepar instance] Use this platepar instead of finding one in the folder.
        show_graphs: [bool] Show the graphs on the screen. False by default.

    Return:
        None
    """

    # Find the CALSTARS file in the given folder
    calstars_file = None
    for calstars_file in os.listdir(night_dir_path):
        if ('CALSTARS' in calstars_file) and ('.txt' in calstars_file):
            break

    if calstars_file is None:
        print('CALSTARS file could not be found in the given directory!')
        return None

    # Load the calstars file
    star_list = readCALSTARS(night_dir_path, calstars_file)

    ### Load recalibrated platepars, if they exist ###

    # Find recalibrated platepars file per FF file
    platepars_recalibrated_file = None
    for file_name in os.listdir(night_dir_path):
        if file_name == config.platepars_recalibrated_name:
            platepars_recalibrated_file = file_name
            break

    # Load all recalibrated platepars if the file is available
    recalibrated_platepars = None
    if platepars_recalibrated_file:
        with open(os.path.join(night_dir_path,
                               platepars_recalibrated_file)) as f:
            recalibrated_platepars = json.load(f)
            print(
                'Loaded recalibrated platepars JSON file for the calibration report...'
            )

    ### ###

    ### Load the platepar file ###

    # Find the platepar file in the given directory if it was not given
    if platepar is None:

        # Find the platepar file
        platepar_file = None
        for file_name in os.listdir(night_dir_path):
            if file_name == config.platepar_name:
                platepar_file = file_name
                break

        if platepar_file is None:
            print('The platepar cannot be found in the night directory!')
            return None

        # Load the platepar file
        platepar = Platepar()
        platepar.read(os.path.join(night_dir_path, platepar_file),
                      use_flat=config.use_flat)

    ### ###

    night_name = os.path.split(night_dir_path.strip(os.sep))[1]

    # Go one mag deeper than in the config
    lim_mag = config.catalog_mag_limit + 1

    # Load catalog stars (load one magnitude deeper)
    catalog_stars, mag_band_str, config.star_catalog_band_ratios = StarCatalog.readStarCatalog(\
        config.star_catalog_path, config.star_catalog_file, lim_mag=lim_mag, \
        mag_band_ratios=config.star_catalog_band_ratios)

    ### Take only those CALSTARS entires for which FF files exist in the folder ###

    # Get a list of FF files in the folder
    ff_list = []
    for file_name in os.listdir(night_dir_path):
        if validFFName(file_name):
            ff_list.append(file_name)

    # Filter out calstars entries, generate a star dictionary where the keys are JDs of FFs
    star_dict = {}
    ff_dict = {}
    for entry in star_list:

        ff_name, star_data = entry

        # Check if the FF from CALSTARS exists in the folder
        if ff_name not in ff_list:
            continue

        dt = getMiddleTimeFF(ff_name, config.fps, ret_milliseconds=True)
        jd = date2JD(*dt)

        # Add the time and the stars to the dict
        star_dict[jd] = star_data
        ff_dict[jd] = ff_name

    ### ###

    # If there are no FF files in the directory, don't generate a report
    if len(star_dict) == 0:
        print('No FF files from the CALSTARS file in the directory!')
        return None

    # If the recalibrated platepars file exists, take the one with the most stars
    max_jd = 0
    using_recalib_platepars = False
    if recalibrated_platepars is not None:
        max_stars = 0
        for ff_name_temp in recalibrated_platepars:

            # Compute the Julian date of the FF middle
            dt = getMiddleTimeFF(ff_name_temp,
                                 config.fps,
                                 ret_milliseconds=True)
            jd = date2JD(*dt)

            # Check that this file exists in CALSTARS and the list of FF files
            if (jd not in star_dict) or (jd not in ff_dict):
                continue

            # Check if the number of stars on this FF file is larger than the before
            if len(star_dict[jd]) > max_stars:
                max_jd = jd
                max_stars = len(star_dict[jd])

        # Set a flag to indicate if using recalibrated platepars has failed
        if max_jd == 0:
            using_recalib_platepars = False
        else:

            print('Using recalibrated platepars, file:', ff_dict[max_jd])
            using_recalib_platepars = True

            # Select the platepar where the FF file has the most stars
            platepar_dict = recalibrated_platepars[ff_dict[max_jd]]
            platepar = Platepar()
            platepar.loadFromDict(platepar_dict, use_flat=config.use_flat)

            filtered_star_dict = {max_jd: star_dict[max_jd]}

            # Match stars on the image with the stars in the catalog
            n_matched, avg_dist, cost, matched_stars = matchStarsResiduals(config, platepar, catalog_stars, \
                filtered_star_dict, match_radius, ret_nmatch=True, lim_mag=lim_mag)

            max_matched_stars = n_matched

    # Otherwise take the optimal FF file for evaluation
    if (recalibrated_platepars is None) or (not using_recalib_platepars):

        # If there are more than a set number of FF files to evaluate, choose only the ones with most stars on
        #   the image
        if len(star_dict) > config.calstars_files_N:

            # Find JDs of FF files with most stars on them
            top_nstars_indices = np.argsort([len(x) for x in star_dict.values()])[::-1][:config.calstars_files_N \
                - 1]

            filtered_star_dict = {}
            for i in top_nstars_indices:
                filtered_star_dict[list(star_dict.keys())[i]] = list(
                    star_dict.values())[i]

            star_dict = filtered_star_dict

        # Match stars on the image with the stars in the catalog
        n_matched, avg_dist, cost, matched_stars = matchStarsResiduals(config, platepar, catalog_stars, \
            star_dict, match_radius, ret_nmatch=True, lim_mag=lim_mag)

    # If no recalibrated platepars where found, find the image with the largest number of matched stars
    if (not using_recalib_platepars) or (max_jd == 0):

        max_jd = 0
        max_matched_stars = 0
        for jd in matched_stars:
            _, _, distances = matched_stars[jd]
            if len(distances) > max_matched_stars:
                max_jd = jd
                max_matched_stars = len(distances)

        # If there are no matched stars, use the image with the largest number of detected stars
        if max_matched_stars <= 2:
            max_jd = max(star_dict, key=lambda x: len(star_dict[x]))
            distances = [np.inf]

    # Take the FF file with the largest number of matched stars
    ff_name = ff_dict[max_jd]

    # Load the FF file
    ff = readFF(night_dir_path, ff_name)
    img_h, img_w = ff.avepixel.shape

    dpi = 200
    plt.figure(figsize=(ff.avepixel.shape[1] / dpi,
                        ff.avepixel.shape[0] / dpi),
               dpi=dpi)

    # Take the average pixel
    img = ff.avepixel

    # Slightly adjust the levels
    img = Image.adjustLevels(img, np.percentile(img, 1.0), 1.3,
                             np.percentile(img, 99.99))

    plt.imshow(img, cmap='gray', interpolation='nearest')

    legend_handles = []

    # Plot detected stars
    for img_star in star_dict[max_jd]:

        y, x, _, _ = img_star

        rect_side = 5 * match_radius
        square_patch = plt.Rectangle((x - rect_side/2, y - rect_side/2), rect_side, rect_side, color='g', \
            fill=False, label='Image stars')

        plt.gca().add_artist(square_patch)

    legend_handles.append(square_patch)

    # If there are matched stars, plot them
    if max_matched_stars > 2:

        # Take the solution with the largest number of matched stars
        image_stars, matched_catalog_stars, distances = matched_stars[max_jd]

        # Plot matched stars
        for img_star in image_stars:
            x, y, _, _ = img_star

            circle_patch = plt.Circle((y, x), radius=3*match_radius, color='y', fill=False, \
                label='Matched stars')

            plt.gca().add_artist(circle_patch)

        legend_handles.append(circle_patch)

        ### Plot match residuals ###

        # Compute preducted positions of matched image stars from the catalog
        x_predicted, y_predicted = raDecToXYPP(matched_catalog_stars[:, 0], \
            matched_catalog_stars[:, 1], max_jd, platepar)

        img_y, img_x, _, _ = image_stars.T

        delta_x = x_predicted - img_x
        delta_y = y_predicted - img_y

        # Compute image residual and angle of the error
        res_angle = np.arctan2(delta_y, delta_x)
        res_distance = np.sqrt(delta_x**2 + delta_y**2)

        # Calculate coordinates of the beginning of the residual line
        res_x_beg = img_x + 3 * match_radius * np.cos(res_angle)
        res_y_beg = img_y + 3 * match_radius * np.sin(res_angle)

        # Calculate coordinates of the end of the residual line
        res_x_end = img_x + 100 * np.cos(res_angle) * res_distance
        res_y_end = img_y + 100 * np.sin(res_angle) * res_distance

        # Plot the 100x residuals
        for i in range(len(x_predicted)):
            res_plot = plt.plot([res_x_beg[i], res_x_end[i]], [res_y_beg[i], res_y_end[i]], color='orange', \
                lw=0.5, label='100x residuals')

        legend_handles.append(res_plot[0])

        ### ###

    else:

        distances = [np.inf]

        # If there are no matched stars, plot large text in the middle of the screen
        plt.text(img_w / 2,
                 img_h / 2,
                 "NO MATCHED STARS!",
                 color='r',
                 alpha=0.5,
                 fontsize=20,
                 ha='center',
                 va='center')

    ### Plot positions of catalog stars to the limiting magnitude of the faintest matched star + 1 mag ###

    # Find the faintest magnitude among matched stars
    if max_matched_stars > 2:
        faintest_mag = np.max(matched_catalog_stars[:, 2]) + 1

    else:
        # If there are no matched stars, use the limiting magnitude from config
        faintest_mag = config.catalog_mag_limit + 1

    # Estimate RA,dec of the centre of the FOV
    _, RA_c, dec_c, _ = xyToRaDecPP([jd2Date(max_jd)], [platepar.X_res / 2],
                                    [platepar.Y_res / 2], [1], platepar)

    RA_c = RA_c[0]
    dec_c = dec_c[0]

    fov_radius = np.hypot(*computeFOVSize(platepar))

    # Get stars from the catalog around the defined center in a given radius
    _, extracted_catalog = subsetCatalog(catalog_stars, RA_c, dec_c,
                                         fov_radius, faintest_mag)
    ra_catalog, dec_catalog, mag_catalog = extracted_catalog.T

    # Compute image positions of all catalog stars that should be on the image
    x_catalog, y_catalog = raDecToXYPP(ra_catalog, dec_catalog, max_jd,
                                       platepar)

    # Filter all catalog stars outside the image
    temp_arr = np.c_[x_catalog, y_catalog, mag_catalog]
    temp_arr = temp_arr[temp_arr[:, 0] >= 0]
    temp_arr = temp_arr[temp_arr[:, 0] <= ff.avepixel.shape[1]]
    temp_arr = temp_arr[temp_arr[:, 1] >= 0]
    temp_arr = temp_arr[temp_arr[:, 1] <= ff.avepixel.shape[0]]
    x_catalog, y_catalog, mag_catalog = temp_arr.T

    # Plot catalog stars on the image
    cat_stars_handle = plt.scatter(x_catalog, y_catalog, c='none', marker='D', lw=1.0, alpha=0.4, \
        s=((4.0 + (faintest_mag - mag_catalog))/3.0)**(2*2.512), edgecolor='r', label='Catalog stars')

    legend_handles.append(cat_stars_handle)

    ### ###

    # Add info text in the corner
    info_text = ff_dict[max_jd] + '\n' \
        + "Matched stars within {:.1f} px radius: {:d}/{:d} \n".format(match_radius, max_matched_stars, \
            len(star_dict[max_jd])) \
        + "Median distance = {:.2f} px\n".format(np.median(distances)) \
        + "Catalog lim mag = {:.1f}".format(lim_mag)

    plt.text(10, 10, info_text, bbox=dict(facecolor='black', alpha=0.5), va='top', ha='left', fontsize=4, \
        color='w', family='monospace')

    legend = plt.legend(handles=legend_handles,
                        prop={'size': 4},
                        loc='upper right')
    legend.get_frame().set_facecolor('k')
    legend.get_frame().set_edgecolor('k')
    for txt in legend.get_texts():
        txt.set_color('w')

    ### Add FOV info (centre, size) ###

    # Mark FOV centre
    plt.scatter(platepar.X_res / 2,
                platepar.Y_res / 2,
                marker='+',
                s=20,
                c='r',
                zorder=4)

    # Compute FOV centre alt/az
    azim_centre, alt_centre = raDec2AltAz(max_jd, platepar.lon, platepar.lat,
                                          RA_c, dec_c)

    # Compute FOV size
    fov_h, fov_v = computeFOVSize(platepar)

    # Compute the rotation wrt. horizon
    rot_horizon = rotationWrtHorizon(platepar)

    fov_centre_text = "Azim  = {:6.2f}$\\degree$\n".format(azim_centre) \
                    + "Alt   = {:6.2f}$\\degree$\n".format(alt_centre) \
                    + "Rot h = {:6.2f}$\\degree$\n".format(rot_horizon) \
                    + "FOV h = {:6.2f}$\\degree$\n".format(fov_h) \
                    + "FOV v = {:6.2f}$\\degree$".format(fov_v) \

    plt.text(10, platepar.Y_res - 10, fov_centre_text, bbox=dict(facecolor='black', alpha=0.5), \
        va='bottom', ha='left', fontsize=4, color='w', family='monospace')

    ### ###

    # Plot RA/Dec gridlines #
    addEquatorialGrid(plt, platepar, max_jd)

    plt.axis('off')
    plt.gca().get_xaxis().set_visible(False)
    plt.gca().get_yaxis().set_visible(False)

    plt.xlim([0, ff.avepixel.shape[1]])
    plt.ylim([ff.avepixel.shape[0], 0])

    # Remove the margins
    plt.subplots_adjust(left=0, bottom=0, right=1, top=1, wspace=0, hspace=0)

    plt.savefig(os.path.join(night_dir_path, night_name + '_calib_report_astrometry.jpg'), \
        bbox_inches='tight', pad_inches=0, dpi=dpi)

    if show_graphs:
        plt.show()

    else:
        plt.clf()
        plt.close()

    if max_matched_stars > 2:

        ### PHOTOMETRY FIT ###

        # If a flat is used, set the vignetting coeff to 0
        if config.use_flat:
            platepar.vignetting_coeff = 0.0

        # Extact intensities and mangitudes
        star_intensities = image_stars[:, 2]
        catalog_mags = matched_catalog_stars[:, 2]

        # Compute radius of every star from image centre
        radius_arr = np.hypot(image_stars[:, 0] - img_h / 2,
                              image_stars[:, 1] - img_w / 2)

        # Fit the photometry on automated star intensities (use the fixed vignetting coeff, use robust fit)
        photom_params, fit_stddev, fit_resid, star_intensities, radius_arr, catalog_mags = \
            photometryFitRobust(star_intensities, radius_arr, catalog_mags, \
            fixed_vignetting=platepar.vignetting_coeff)

        photom_offset, _ = photom_params

        ### ###

        ### PLOT PHOTOMETRY ###
        # Note: An almost identical code exists in RMS.Astrometry.SkyFit in the PlateTool.photometry function

        dpi = 130
        fig_p, (ax_p, ax_r) = plt.subplots(nrows=2, facecolor=None, figsize=(6.0, 7.0), dpi=dpi, \
            gridspec_kw={'height_ratios':[2, 1]})

        # Plot raw star intensities
        ax_p.scatter(-2.5 * np.log10(star_intensities),
                     catalog_mags,
                     s=5,
                     c='r',
                     alpha=0.5,
                     label="Raw")

        # If a flat is used, disregard the vignetting
        if not config.use_flat:

            # Plot intensities of image stars corrected for vignetting
            lsp_corr_arr = np.log10(correctVignetting(star_intensities, radius_arr, \
                platepar.vignetting_coeff))
            ax_p.scatter(-2.5*lsp_corr_arr, catalog_mags, s=5, c='b', alpha=0.5, \
                label="Corrected for vignetting")

        # Plot photometric offset from the platepar
        x_min, x_max = ax_p.get_xlim()
        y_min, y_max = ax_p.get_ylim()

        x_min_w = x_min - 3
        x_max_w = x_max + 3
        y_min_w = y_min - 3
        y_max_w = y_max + 3

        photometry_info = "Platepar: {:+.1f}*LSP + {:.2f} +/- {:.2f}".format(platepar.mag_0, \
            platepar.mag_lev, platepar.mag_lev_stddev) \
            + "\nVignetting coeff = {:.5f}".format(platepar.vignetting_coeff) \
            + "\nGamma = {:.2f}".format(platepar.gamma)

        # Plot the photometry calibration from the platepar
        logsum_arr = np.linspace(x_min_w, x_max_w, 10)
        ax_p.plot(logsum_arr, logsum_arr + platepar.mag_lev, label=photometry_info, linestyle='--', \
            color='k', alpha=0.5)

        # Plot the fitted photometry calibration
        fit_info = "Fit: {:+.1f}*LSP + {:.2f} +/- {:.2f}".format(
            -2.5, photom_offset, fit_stddev)
        ax_p.plot(logsum_arr,
                  logsum_arr + photom_offset,
                  label=fit_info,
                  linestyle='--',
                  color='b',
                  alpha=0.75)

        ax_p.legend()

        ax_p.set_ylabel("Catalog magnitude ({:s})".format(mag_band_str))
        ax_p.set_xlabel("Uncalibrated magnitude")

        # Set wider axis limits
        ax_p.set_xlim(x_min_w, x_max_w)
        ax_p.set_ylim(y_min_w, y_max_w)

        ax_p.invert_yaxis()
        ax_p.invert_xaxis()

        ax_p.grid()

        ### Plot photometry vs radius ###

        img_diagonal = np.hypot(img_h / 2, img_w / 2)

        # Plot photometry residuals (including vignetting)
        ax_r.scatter(radius_arr, fit_resid, c='b', alpha=0.75, s=5, zorder=3)

        # Plot a zero line
        ax_r.plot(np.linspace(0, img_diagonal, 10), np.zeros(10), linestyle='dashed', alpha=0.5, \
            color='k')

        # Plot only when no flat is used
        if not config.use_flat:

            #  Plot radius from centre vs. fit residual
            fit_resids_novignetting = catalog_mags - photomLine((np.array(star_intensities), \
                np.array(radius_arr)), photom_offset, 0.0)
            ax_r.scatter(radius_arr,
                         fit_resids_novignetting,
                         s=5,
                         c='r',
                         alpha=0.5,
                         zorder=3)

            px_sum_tmp = 1000
            radius_arr_tmp = np.linspace(0, img_diagonal, 50)

            # Plot vignetting loss curve
            vignetting_loss = 2.5*np.log10(px_sum_tmp) \
                - 2.5*np.log10(correctVignetting(px_sum_tmp, radius_arr_tmp, \
                    platepar.vignetting_coeff))

            ax_r.plot(radius_arr_tmp,
                      vignetting_loss,
                      linestyle='dotted',
                      alpha=0.5,
                      color='k')

        ax_r.grid()

        ax_r.set_ylabel("Fit residuals (mag)")
        ax_r.set_xlabel("Radius from centre (px)")

        ax_r.set_xlim(0, img_diagonal)

        ### ###

        plt.tight_layout()

        plt.savefig(os.path.join(night_dir_path,
                                 night_name + '_calib_report_photometry.png'),
                    dpi=150)

        if show_graphs:
            plt.show()

        else:
            plt.clf()
            plt.close()
Пример #29
0
def runCapture(config, duration=None, video_file=None, nodetect=False, detect_end=False, \
    upload_manager=None, resume_capture=False):
    """ Run capture and compression for the given time.given
    
    Arguments:
        config: [config object] Configuration read from the .config file.

    Keyword arguments:
        duration: [float] Time in seconds to capture. None by default.
        video_file: [str] Path to the video file, if it was given as the video source. None by default.
        nodetect: [bool] If True, detection will not be performed. False by defualt.
        detect_end: [bool] If True, detection will be performed at the end of the night, when capture
            finishes. False by default.
        upload_manager: [UploadManager object] A handle to the UploadManager, which handles uploading files to
            the central server. None by default.
        resume_capture: [bool] Resume capture in the last data directory in CapturedFiles.

    Return:
        night_archive_dir: [str] Path to the archive folder of the processed night.
    """

    global STOP_CAPTURE

    # Check if resuming capture to the last capture directory
    night_data_dir_name = None
    if resume_capture:

        log.info("Resuming capture in the last capture directory...")

        # Find the latest capture directory
        capturedfiles_path = os.path.join(os.path.abspath(config.data_dir),
                                          config.captured_dir)
        most_recent_dir_time = 0
        for dir_name in sorted(os.listdir(capturedfiles_path)):

            dir_path_check = os.path.join(capturedfiles_path, dir_name)

            # Check it's a directory
            if os.path.isdir(dir_path_check):

                # Check if it starts with the correct station code
                if dir_name.startswith(str(config.stationID)):

                    dir_mod_time = os.path.getmtime(dir_path_check)

                    # Check that it is the most recent directory
                    if (night_data_dir_name is None) or (dir_mod_time >
                                                         most_recent_dir_time):
                        night_data_dir_name = dir_name
                        night_data_dir = dir_path_check
                        most_recent_dir_time = dir_mod_time

        if night_data_dir_name is None:
            log.info(
                "Previous capture directory could not be found! Creating a new one..."
            )

        else:
            log.info("Previous capture directory found: {:s}".format(
                night_data_dir))

        # Resume run is finished now, reset resume flag
        cml_args.resume = False

    # Make a name for the capture data directory
    if night_data_dir_name is None:

        # Create a directory for captured files
        night_data_dir_name = str(config.stationID) + '_' \
            + datetime.datetime.utcnow().strftime('%Y%m%d_%H%M%S_%f')

        # Full path to the data directory
        night_data_dir = os.path.join(os.path.abspath(config.data_dir), config.captured_dir, \
            night_data_dir_name)

    # Wait before the capture starts if a time has been given
    if (not resume_capture) and (video_file is None):
        log.info("Waiting {:d} seconds before capture start...".format(
            int(config.capture_wait_seconds)))
        time.sleep(config.capture_wait_seconds)

    # Add a note about Patreon supporters
    print("################################################################")
    print("Thanks to our Patreon supporters in the 'Dinosaur Killer' class:")
    print("- Myron Valenta")
    print("https://www.patreon.com/globalmeteornetwork")
    print("\n\n\n" \
        + "       .:'       .:'        .:'       .:'  \n"\
        + "   _.::'     _.::'      _.::'     _.::'    \n"\
        + "  (_.'      (_.'       (_.'      (_.'      \n"\
        + "                         __                \n"\
        + "                        / _)               \n"\
        + "_\\/_          _/\\/\\/\\_/ /             _\\/_ \n"\
        + "/o\\         _|         /              //o\\ \n"\
        + " |         _|  (  | (  |                |  \n"\
        + "_|____    /__.-'|_|--|_|          ______|__\n")
    print("################################################################")

    # Make a directory for the night
    mkdirP(night_data_dir)

    log.info('Data directory: ' + night_data_dir)

    # Copy the used config file to the capture directory
    if os.path.isfile(config.config_file_name):
        try:
            shutil.copy2(config.config_file_name,
                         os.path.join(night_data_dir, ".config"))
        except:
            log.error("Cannot copy the config file to the capture directory!")

    # Get the platepar file
    platepar, platepar_path, platepar_fmt = getPlatepar(config, night_data_dir)

    # If the platepar is not none, set the FOV from it
    if platepar is not None:
        config.fov_w = platepar.fov_h
        config.fov_h = platepar.fov_v

    log.info('Initializing frame buffers...')
    ### For some reason, the RPi 3 does not like memory chunks which size is the multipier of its L2
    ### cache size (512 kB). When such a memory chunk is provided, the compression becomes 10x slower
    ### then usual. We are applying a dirty fix here where we just add an extra image row and column
    ### if such a memory chunk will be created. The compression is performed, and the image is cropped
    ### back to its original dimensions.
    array_pad = 0

    # Check if the image dimensions are divisible by RPi3 L2 cache size and add padding
    if (256 * config.width * config.height) % (512 * 1024) == 0:
        array_pad = 1

    # Init arrays for parallel compression on 2 cores
    sharedArrayBase = multiprocessing.Array(
        ctypes.c_uint8,
        256 * (config.width + array_pad) * (config.height + array_pad))
    sharedArray = np.ctypeslib.as_array(sharedArrayBase.get_obj())
    sharedArray = sharedArray.reshape(256, (config.height + array_pad),
                                      (config.width + array_pad))
    startTime = multiprocessing.Value('d', 0.0)

    sharedArrayBase2 = multiprocessing.Array(
        ctypes.c_uint8,
        256 * (config.width + array_pad) * (config.height + array_pad))
    sharedArray2 = np.ctypeslib.as_array(sharedArrayBase2.get_obj())
    sharedArray2 = sharedArray2.reshape(256, (config.height + array_pad),
                                        (config.width + array_pad))
    startTime2 = multiprocessing.Value('d', 0.0)

    log.info('Initializing frame buffers done!')

    # Check if the detection should be performed or not
    if nodetect:
        detector = None

    else:

        if detect_end:

            # Delay detection until the end of the night
            delay_detection = duration

        else:
            # Delay the detection for 2 minutes after capture start (helps stability)
            delay_detection = 120

        # Add an additional postprocessing delay
        delay_detection += config.postprocess_delay

        # Set a flag file to indicate that previous files are being loaded (if any)
        capture_resume_file_path = os.path.join(
            config.data_dir, config.capture_resume_flag_file)
        with open(capture_resume_file_path, 'w') as f:
            pass

        # Initialize the detector
        detector = QueuedPool(detectStarsAndMeteors, cores=1, log=log, delay_start=delay_detection, \
            backup_dir=night_data_dir)
        detector.startPool()

        # If the capture is being resumed into the directory, load all previously saved FF files
        if resume_capture:

            # Load all preocessed FF files
            for i, ff_name in enumerate(sorted(os.listdir(night_data_dir))):

                # Every 50 files loaded, update the flag file
                if i % 50 == 0:
                    with open(capture_resume_file_path, 'a') as f:
                        f.write("{:d}\n".format(i))

                # Check if the file is a valid FF files
                ff_path = os.path.join(night_data_dir, ff_name)
                if os.path.isfile(ff_path) and (str(
                        config.stationID) in ff_name) and validFFName(ff_name):

                    # Add the FF file to the detector
                    detector.addJob([night_data_dir, ff_name, config],
                                    wait_time=0.005)
                    log.info(
                        "Added existing FF files for detection: {:s}".format(
                            ff_name))

        # Remove the flag file
        if os.path.isfile(capture_resume_file_path):
            try:
                os.remove(capture_resume_file_path)
            except:
                log.error("There was an error during removing the capture resume flag file: " \
                    + capture_resume_file_path)

    # Initialize buffered capture
    bc = BufferedCapture(sharedArray,
                         startTime,
                         sharedArray2,
                         startTime2,
                         config,
                         video_file=video_file)

    # Initialize the live image viewer
    if config.live_maxpixel_enable:

        # Enable showing the live JPG
        config.live_jpg = True

        live_jpg_path = os.path.join(config.data_dir, 'live.jpg')

        live_view = LiveViewer(live_jpg_path,
                               image=True,
                               slideshow=False,
                               banner_text="Live")
        live_view.start()

    else:
        live_view = None

    # Initialize compression
    compressor = Compressor(night_data_dir,
                            sharedArray,
                            startTime,
                            sharedArray2,
                            startTime2,
                            config,
                            detector=detector)

    # Start buffered capture
    bc.startCapture()

    # Init and start the compression
    compressor.start()

    # Capture until Ctrl+C is pressed
    wait(duration, compressor)

    # If capture was manually stopped, end capture
    if STOP_CAPTURE:
        log.info('Ending capture...')

    # Stop the capture
    log.debug('Stopping capture...')
    bc.stopCapture()
    log.debug('Capture stopped')

    dropped_frames = bc.dropped_frames
    log.info('Total number of late or dropped frames: ' + str(dropped_frames))

    # Stop the compressor
    log.debug('Stopping compression...')
    detector = compressor.stop()

    # Free shared memory after the compressor is done
    try:
        log.debug('Freeing frame buffers...')
        del sharedArrayBase
        del sharedArray
        del sharedArrayBase2
        del sharedArray2

    except Exception as e:
        log.debug('Freeing frame buffers failed with error:' + repr(e))
        log.debug(repr(traceback.format_exception(*sys.exc_info())))

    log.debug('Compression stopped')

    if live_view is not None:

        # Stop the live viewer
        log.debug('Stopping live viewer...')

        live_view.stop()
        live_view.join()
        del live_view
        live_view = None

        log.debug('Live view stopped')

    # If detection should be performed
    if not nodetect:

        try:
            log.info('Finishing up the detection, ' + str(detector.input_queue.qsize()) \
                + ' files to process...')
        except:
            print(
                'Finishing up the detection... error when getting input queue size!'
            )

        # Reset the Ctrl+C to KeyboardInterrupt
        resetSIGINT()

        try:

            # If there are some more files to process, process them on more cores
            if detector.input_queue.qsize() > 0:

                # Let the detector use all cores, but leave 2 free
                available_cores = multiprocessing.cpu_count() - 2

                if available_cores > 1:

                    log.info('Running the detection on {:d} cores...'.format(
                        available_cores))

                    # Start the detector
                    detector.updateCoreNumber(cores=available_cores)

            log.info('Waiting for the detection to finish...')

            # Wait for the detector to finish and close it
            detector.closePool()

            log.info('Detection finished!')

        except KeyboardInterrupt:

            log.info('Ctrl + C pressed, exiting...')

            if upload_manager is not None:

                # Stop the upload manager
                if upload_manager.is_alive():
                    log.debug('Closing upload manager...')
                    upload_manager.stop()
                    del upload_manager

            # Terminate the detector
            if detector is not None:
                del detector

            sys.exit()

        # Set the Ctrl+C back to 'soft' program kill
        setSIGINT()

        ### SAVE DETECTIONS TO FILE

        log.info('Collecting results...')

        # Get the detection results from the queue
        detection_results = detector.getResults()

    else:

        detection_results = []

    # Save detection to disk and archive detection
    night_archive_dir, archive_name, _ = processNight(night_data_dir, config, \
        detection_results=detection_results, nodetect=nodetect)

    # Put the archive up for upload
    if upload_manager is not None:
        log.info('Adding file to upload list: ' + archive_name)
        upload_manager.addFiles([archive_name])
        log.info('File added...')

        # Delay the upload, if the delay is given
        upload_manager.delayNextUpload(delay=60 * config.upload_delay)

    # Delete detector backup files
    if detector is not None:
        detector.deleteBackupFiles()

    # If the capture was run for a limited time, run the upload right away
    if (duration is not None) and (upload_manager is not None):
        log.info('Uploading data before exiting...')
        upload_manager.uploadData()

    # Run the external script
    runExternalScript(night_data_dir, night_archive_dir, config)

    # If capture was manually stopped, end program
    if STOP_CAPTURE:

        log.info('Ending program')

        # Stop the upload manager
        if upload_manager is not None:
            if upload_manager.is_alive():
                upload_manager.stop()
                log.info('Closing upload manager...')

        sys.exit()

    return night_archive_dir